From 2aa4a82499d4becd2284cdb482213d541b8804dd Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sun, 28 Apr 2024 16:29:10 +0200 Subject: Adding upstream version 86.0.1. Signed-off-by: Daniel Baumann --- build/.gdbinit | 204 + build/.gdbinit.loader | 29 + build/.gdbinit.py.in | 19 + build/.lldbinit.in | 20 + build/RunCbindgen.py | 95 + build/__init__.py | 0 build/appini_header.py | 90 + build/application.ini.in | 56 + build/autoconf/acgeneral.m4 | 2607 +++ build/autoconf/acoldnames.m4 | 80 + build/autoconf/acspecific.m4 | 2758 +++ build/autoconf/alloc.m4 | 57 + build/autoconf/altoptions.m4 | 77 + build/autoconf/android.m4 | 113 + build/autoconf/arch.m4 | 15 + build/autoconf/autoconf.m4 | 28 + build/autoconf/autoconf.sh | 158 + build/autoconf/clang-plugin.m4 | 107 + build/autoconf/codeset.m4 | 25 + build/autoconf/compiler-opts.m4 | 162 + build/autoconf/config.guess | 1687 ++ build/autoconf/config.status.m4 | 173 + build/autoconf/config.sub | 1851 ++ build/autoconf/expandlibs.m4 | 52 + build/autoconf/hooks.m4 | 31 + build/autoconf/hotfixes.m4 | 23 + build/autoconf/install-sh | 123 + build/autoconf/mozheader.m4 | 32 + build/autoconf/mozprog.m4 | 42 + build/autoconf/pkg.m4 | 61 + build/autoconf/sanitize.m4 | 135 + build/autoconf/toolchain.m4 | 131 + build/binary-location.mk | 19 + build/build-clang/README | 57 + build/build-clang/android-mangling-error.patch | 34 + .../bug47258-extract-symbols-mbcs.patch | 13 + build/build-clang/build-clang.py | 1067 ++ build/build-clang/clang-10-linux64.json | 27 + build/build-clang/clang-11-android.json | 55 + .../clang-11-linux64-aarch64-cross.json | 21 + build/build-clang/clang-11-linux64.json | 24 + build/build-clang/clang-11-macosx64.json | 22 + build/build-clang/clang-11-mingw.json | 14 + build/build-clang/clang-11-win64-2stage.json | 14 + build/build-clang/clang-11-win64.json | 18 + build/build-clang/clang-5.0-linux64.json | 12 + build/build-clang/clang-7-linux64.json | 19 + build/build-clang/clang-linux64.json | 28 + build/build-clang/clang-tidy-ci.patch | 26 + build/build-clang/clang-tidy-external-linux64.json | 17 + build/build-clang/clang-tidy-linux64.json | 16 + build/build-clang/clang-tidy-macosx64.json | 23 + build/build-clang/clang-tidy-no-errors.patch | 12 + build/build-clang/clang-tidy-win64.json | 15 + build/build-clang/compiler-rt-cross-compile.patch | 15 + build/build-clang/compiler-rt-no-codesign.patch | 21 + ...ical_section_on_gcov_flush-rG02ce9d8ef5a8.patch | 75 + build/build-clang/downgrade-mangling-error.patch | 23 + build/build-clang/find_symbolizer_linux.patch | 58 + .../find_symbolizer_linux_clang_10.patch | 58 + ...86-gfc937806efd-dont-jump-to-landing-pads.patch | 100 + .../llvmorg-11-init-4265-g2dcbdba8540.patch | 106 + ...lvmorg-11-init-4265-g2dcbdba8540_clang_10.patch | 106 + ...t-10926-gb79e990f401-LTO-new-pass-manager.patch | 66 + build/build-clang/loosen-msvc-detection.patch | 22 + build/build-clang/r350774.patch | 14 + build/build-clang/rG7e18aeba5062.patch | 255 + build/build-clang/rG7e18aeba5062_clang_10.patch | 249 + build/build-clang/rename_gcov_flush.patch | 40 + build/build-clang/rename_gcov_flush_7.patch | 14 + build/build-clang/rename_gcov_flush_clang_10.patch | 42 + build/build-clang/rename_gcov_flush_clang_11.patch | 26 + build/build-clang/revert-r362047-and-r362065.patch | 62 + build/build-clang/static-llvm-symbolizer.patch | 12 + build/build-clang/tsan-hang-be41a98ac222.patch | 100 + .../tsan-hang-be41a98ac222_clang_10.patch | 100 + build/build-clang/unpoison-thread-stacks.patch | 62 + .../unpoison-thread-stacks_clang_10.patch | 64 + build/build-infer/README | 36 + build/build-infer/build-infer.py | 152 + build/build-infer/infer-linux64.json | 5 + build/build-rust/README | 3 + build/build-rust/example.patch | 12 + build/build_virtualenv_packages.txt | 3 + build/buildconfig.py | 18 + build/cargo-host-linker | 3 + build/cargo-host-linker.bat | 3 + build/cargo-linker | 22 + build/cargo-linker.bat | 3 + build/checksums.py | 156 + build/clang-plugin/.clang-format | 1 + build/clang-plugin/ArithmeticArgChecker.cpp | 60 + build/clang-plugin/ArithmeticArgChecker.h | 18 + build/clang-plugin/AssertAssignmentChecker.cpp | 20 + build/clang-plugin/AssertAssignmentChecker.h | 18 + build/clang-plugin/BaseCheck.h | 34 + build/clang-plugin/CanRunScriptChecker.cpp | 380 + build/clang-plugin/CanRunScriptChecker.h | 31 + build/clang-plugin/Checks.inc | 43 + build/clang-plugin/ChecksIncludes.inc | 44 + build/clang-plugin/CustomAttributes.cpp | 119 + build/clang-plugin/CustomAttributes.h | 41 + build/clang-plugin/CustomAttributes.inc | 31 + build/clang-plugin/CustomMatchers.h | 426 + build/clang-plugin/CustomTypeAnnotation.cpp | 173 + build/clang-plugin/CustomTypeAnnotation.h | 75 + build/clang-plugin/DanglingOnTemporaryChecker.cpp | 256 + build/clang-plugin/DanglingOnTemporaryChecker.h | 19 + build/clang-plugin/DiagnosticsMatcher.cpp | 17 + build/clang-plugin/DiagnosticsMatcher.h | 31 + build/clang-plugin/ExplicitImplicitChecker.cpp | 36 + build/clang-plugin/ExplicitImplicitChecker.h | 18 + build/clang-plugin/ExplicitOperatorBoolChecker.cpp | 34 + build/clang-plugin/ExplicitOperatorBoolChecker.h | 19 + build/clang-plugin/FopenUsageChecker.cpp | 73 + build/clang-plugin/FopenUsageChecker.h | 18 + build/clang-plugin/KungFuDeathGripChecker.cpp | 114 + build/clang-plugin/KungFuDeathGripChecker.h | 18 + build/clang-plugin/LoadLibraryUsageChecker.cpp | 34 + build/clang-plugin/LoadLibraryUsageChecker.h | 18 + build/clang-plugin/Makefile.in | 19 + build/clang-plugin/MemMoveAnnotation.h | 80 + build/clang-plugin/MozCheckAction.cpp | 27 + build/clang-plugin/MozillaTidyModule.cpp | 45 + build/clang-plugin/MustOverrideChecker.cpp | 60 + build/clang-plugin/MustOverrideChecker.h | 22 + build/clang-plugin/MustReturnFromCallerChecker.cpp | 136 + build/clang-plugin/MustReturnFromCallerChecker.h | 27 + build/clang-plugin/MustUseChecker.cpp | 64 + build/clang-plugin/MustUseChecker.h | 21 + build/clang-plugin/NaNExprChecker.cpp | 56 + build/clang-plugin/NaNExprChecker.h | 18 + build/clang-plugin/NeedsNoVTableTypeChecker.cpp | 39 + build/clang-plugin/NeedsNoVTableTypeChecker.h | 18 + .../NoAddRefReleaseOnReturnChecker.cpp | 32 + .../clang-plugin/NoAddRefReleaseOnReturnChecker.h | 19 + build/clang-plugin/NoAutoTypeChecker.cpp | 21 + build/clang-plugin/NoAutoTypeChecker.h | 18 + .../NoDuplicateRefCntMemberChecker.cpp | 65 + .../clang-plugin/NoDuplicateRefCntMemberChecker.h | 19 + .../NoExplicitMoveConstructorChecker.cpp | 23 + .../NoExplicitMoveConstructorChecker.h | 19 + build/clang-plugin/NoNewThreadsChecker.cpp | 36 + build/clang-plugin/NoNewThreadsChecker.h | 18 + build/clang-plugin/NoPrincipalGetURI.cpp | 27 + build/clang-plugin/NoPrincipalGetURI.h | 18 + .../NoUsingNamespaceMozillaJavaChecker.cpp | 24 + .../NoUsingNamespaceMozillaJavaChecker.h | 19 + build/clang-plugin/NonMemMovableMemberChecker.cpp | 34 + build/clang-plugin/NonMemMovableMemberChecker.h | 19 + .../NonMemMovableTemplateArgChecker.cpp | 51 + .../clang-plugin/NonMemMovableTemplateArgChecker.h | 19 + .../NonParamInsideFunctionDeclChecker.cpp | 117 + .../NonParamInsideFunctionDeclChecker.h | 19 + build/clang-plugin/NonTrivialTypeInFfiChecker.cpp | 56 + build/clang-plugin/NonTrivialTypeInFfiChecker.h | 19 + build/clang-plugin/OverrideBaseCallChecker.cpp | 109 + build/clang-plugin/OverrideBaseCallChecker.h | 27 + .../clang-plugin/OverrideBaseCallUsageChecker.cpp | 21 + build/clang-plugin/OverrideBaseCallUsageChecker.h | 23 + build/clang-plugin/ParamTraitsEnumChecker.cpp | 38 + build/clang-plugin/ParamTraitsEnumChecker.h | 18 + build/clang-plugin/RecurseGuard.h | 56 + .../RefCountedCopyConstructorChecker.cpp | 34 + .../RefCountedCopyConstructorChecker.h | 19 + .../clang-plugin/RefCountedInsideLambdaChecker.cpp | 152 + build/clang-plugin/RefCountedInsideLambdaChecker.h | 33 + build/clang-plugin/ScopeChecker.cpp | 180 + build/clang-plugin/ScopeChecker.h | 18 + build/clang-plugin/SprintfLiteralChecker.cpp | 84 + build/clang-plugin/SprintfLiteralChecker.h | 18 + build/clang-plugin/StmtToBlockMap.h | 90 + .../clang-plugin/TemporaryLifetimeBoundChecker.cpp | 91 + build/clang-plugin/TemporaryLifetimeBoundChecker.h | 22 + build/clang-plugin/ThirdPartyPaths.h | 17 + build/clang-plugin/ThirdPartyPaths.py | 40 + build/clang-plugin/ThreadAllows.py | 59 + build/clang-plugin/ThreadAllows.txt | 95 + build/clang-plugin/ThreadFileAllows.txt | 11 + build/clang-plugin/TrivialCtorDtorChecker.cpp | 29 + build/clang-plugin/TrivialCtorDtorChecker.h | 18 + build/clang-plugin/TrivialDtorChecker.cpp | 23 + build/clang-plugin/TrivialDtorChecker.h | 18 + build/clang-plugin/Utils.h | 492 + build/clang-plugin/VariableUsageHelpers.cpp | 275 + build/clang-plugin/VariableUsageHelpers.h | 63 + build/clang-plugin/alpha/AlphaChecks.inc | 9 + build/clang-plugin/alpha/AlphaIncludes.inc | 1 + build/clang-plugin/alpha/TempRefPtrChecker.cpp | 57 + build/clang-plugin/alpha/TempRefPtrChecker.h | 21 + build/clang-plugin/alpha/sources.mozbuild | 10 + build/clang-plugin/alpha/tests/TestTempRefPtr.cpp | 52 + build/clang-plugin/alpha/tests/sources.mozbuild | 10 + build/clang-plugin/external/CustomAttributes.inc | 0 build/clang-plugin/external/ExternalChecks.inc | 8 + build/clang-plugin/external/ExternalIncludes.inc | 9 + build/clang-plugin/external/sources.mozbuild | 10 + build/clang-plugin/external/tests/sources.mozbuild | 10 + build/clang-plugin/import_mozilla_checks.py | 171 + build/clang-plugin/moz.build | 122 + .../mozsearch-plugin/FileOperations.cpp | 150 + .../clang-plugin/mozsearch-plugin/FileOperations.h | 68 + .../mozsearch-plugin/JSONFormatter.cpp | 119 + .../clang-plugin/mozsearch-plugin/JSONFormatter.h | 53 + .../mozsearch-plugin/MozsearchIndexer.cpp | 1896 ++ build/clang-plugin/mozsearch-plugin/README | 12 + .../mozsearch-plugin/StringOperations.cpp | 42 + .../mozsearch-plugin/StringOperations.h | 25 + build/clang-plugin/plugin.h | 57 + build/clang-plugin/tests/Makefile.in | 13 + build/clang-plugin/tests/NonParameterTestCases.h | 61 + .../tests/TestAssertWithAssignment.cpp | 68 + .../tests/TestBadImplicitConversionCtor.cpp | 50 + build/clang-plugin/tests/TestCanRunScript.cpp | 621 + build/clang-plugin/tests/TestCustomHeap.cpp | 29 + .../clang-plugin/tests/TestDanglingOnTemporary.cpp | 45 + .../tests/TestExplicitOperatorBool.cpp | 11 + build/clang-plugin/tests/TestFopenUsage.cpp | 50 + build/clang-plugin/tests/TestGlobalClass.cpp | 52 + build/clang-plugin/tests/TestHeapClass.cpp | 64 + .../TestInheritTypeAnnotationsFromTemplateArgs.cpp | 46 + build/clang-plugin/tests/TestKungFuDeathGrip.cpp | 142 + build/clang-plugin/tests/TestLoadLibraryUsage.cpp | 20 + .../clang-plugin/tests/TestMultipleAnnotations.cpp | 17 + build/clang-plugin/tests/TestMustOverride.cpp | 63 + .../tests/TestMustReturnFromCaller.cpp | 270 + build/clang-plugin/tests/TestMustUse.cpp | 201 + build/clang-plugin/tests/TestNANTestingExpr.cpp | 24 + build/clang-plugin/tests/TestNANTestingExprC.c | 17 + build/clang-plugin/tests/TestNeedsNoVTableType.cpp | 94 + .../tests/TestNoAddRefReleaseOnReturn.cpp | 110 + .../tests/TestNoArithmeticExprInArgument.cpp | 32 + build/clang-plugin/tests/TestNoAutoType.cpp | 41 + .../tests/TestNoDuplicateRefCntMember.cpp | 49 + .../tests/TestNoExplicitMoveConstructor.cpp | 25 + .../clang-plugin/tests/TestNoNewThreadsChecker.cpp | 9 + build/clang-plugin/tests/TestNoPrincipalGetUri.cpp | 31 + .../tests/TestNoRefcountedInsideLambdas.cpp | 677 + .../tests/TestNoUsingNamespaceMozillaJava.cpp | 29 + build/clang-plugin/tests/TestNonHeapClass.cpp | 62 + build/clang-plugin/tests/TestNonMemMovable.cpp | 830 + build/clang-plugin/tests/TestNonMemMovableStd.cpp | 21 + .../tests/TestNonMemMovableStdAtomic.cpp | 30 + .../clang-plugin/tests/TestNonParameterChecker.cpp | 189 + build/clang-plugin/tests/TestNonTemporaryClass.cpp | 70 + .../clang-plugin/tests/TestNonTrivialTypeInFfi.cpp | 65 + build/clang-plugin/tests/TestOverrideBaseCall.cpp | 175 + .../tests/TestOverrideBaseCallAnnotation.cpp | 47 + build/clang-plugin/tests/TestParamTraitsEnum.cpp | 94 + .../tests/TestRefCountedCopyConstructor.cpp | 25 + build/clang-plugin/tests/TestSprintfLiteral.cpp | 41 + build/clang-plugin/tests/TestStackClass.cpp | 50 + build/clang-plugin/tests/TestStaticLocalClass.cpp | 54 + build/clang-plugin/tests/TestTemporaryClass.cpp | 72 + .../tests/TestTemporaryLifetimeBound.cpp | 126 + build/clang-plugin/tests/TestTrivialCtorDtor.cpp | 83 + build/clang-plugin/tests/TestTrivialDtor.cpp | 52 + build/clang-plugin/tests/moz.build | 91 + build/common_virtualenv_packages.txt | 110 + build/compare-mozconfig/compare-mozconfigs.py | 178 + build/compare-mozconfig/python.ini | 5 + build/debian-packages/cmake-jessie.diff | 70 + build/debian-packages/gdb-jessie.diff | 37 + build/debian-packages/python-zstandard-jessie.diff | 27 + build/debian-packages/python3.6-jessie.diff | 165 + build/debian-packages/valgrind-jessie.diff | 61 + build/defines.sh | 3 + build/docs/build-overview.rst | 117 + build/docs/build-targets.rst | 62 + build/docs/cppeclipse.rst | 54 + build/docs/defining-binaries.rst | 345 + build/docs/defining-xpcom-components.rst | 305 + build/docs/environment-variables.rst | 31 + build/docs/files-metadata.rst | 178 + build/docs/glossary.rst | 47 + build/docs/gn.rst | 52 + build/docs/index.rst | 57 + build/docs/jar-manifests.rst | 97 + build/docs/locales.rst | 331 + build/docs/mozbuild-files.rst | 176 + build/docs/mozbuild-symbols.rst | 7 + build/docs/mozbuild/index.rst | 41 + build/docs/mozconfigs.rst | 69 + build/docs/mozinfo.rst | 176 + build/docs/pgo.rst | 28 + build/docs/preprocessor.rst | 219 + build/docs/python.rst | 178 + build/docs/rust.rst | 180 + build/docs/sccache-dist.rst | 220 + build/docs/slow.rst | 177 + build/docs/sparse.rst | 157 + build/docs/supported-configurations.rst | 95 + build/docs/telemetry.rst | 393 + build/docs/test_certificates.rst | 40 + build/docs/test_manifests.rst | 226 + build/docs/toolchains.rst | 220 + build/docs/unified-builds.rst | 43 + build/docs/visualstudio.rst | 100 + build/dumbmake-dependencies | 72 + build/gecko_templates.mozbuild | 124 + build/gen_symverscript.py | 23 + build/gen_test_packages_manifest.py | 124 + build/genrc.sh | 13 + build/glean_requirements.in | 2 + build/glean_requirements.txt | 170 + build/gn.mozbuild | 36 + build/gyp.mozbuild | 126 + build/gyp_base.mozbuild | 38 + build/gyp_includes/common.gypi | 3591 ++++ build/gyp_includes/filename_rules.gypi | 96 + build/gyp_includes/internal/release_defaults.gypi | 18 + build/gyp_includes/internal/release_impl.gypi | 17 + .../internal/release_impl_official.gypi | 43 + build/gyp_includes/release.gypi | 17 + build/liblowercase/Cargo.lock | 237 + build/liblowercase/Cargo.toml | 22 + build/liblowercase/lib.rs | 252 + build/mach_bootstrap.py | 597 + build/mach_virtualenv_packages.txt | 2 + build/macosx/cross-mozconfig.common | 43 + build/macosx/llvm-dsymutil | 75 + build/macosx/local-mozconfig.common | 28 + build/macosx/mozconfig.common | 15 + build/macosx/permissions/chown_revert.c | 18 + build/macosx/permissions/chown_root.c | 12 + build/midl.py | 93 + build/moz-automation.mk | 110 + build/moz.build | 129 + build/moz.configure/android-ndk.configure | 407 + build/moz.configure/android-sdk.configure | 132 + build/moz.configure/arm.configure | 292 + build/moz.configure/bindgen.configure | 371 + build/moz.configure/checks.configure | 189 + build/moz.configure/compile-checks.configure | 287 + build/moz.configure/compilers-util.configure | 135 + build/moz.configure/flags.configure | 71 + build/moz.configure/headers.configure | 119 + build/moz.configure/init.configure | 1408 ++ build/moz.configure/java.configure | 66 + build/moz.configure/keyfiles.configure | 68 + build/moz.configure/lto-pgo.configure | 331 + build/moz.configure/memory.configure | 98 + build/moz.configure/node.configure | 71 + build/moz.configure/nspr.configure | 117 + build/moz.configure/nss.configure | 30 + build/moz.configure/old.configure | 381 + build/moz.configure/pkg.configure | 103 + build/moz.configure/rust.configure | 552 + build/moz.configure/toolchain.configure | 2842 +++ build/moz.configure/update-programs.configure | 83 + build/moz.configure/util.configure | 494 + build/moz.configure/warnings.configure | 253 + build/moz.configure/windows.configure | 535 + build/mozconfig.artifact | 11 + build/mozconfig.artifact.automation | 6 + build/mozconfig.automation | 22 + build/mozconfig.cache | 84 + build/mozconfig.clang-cl | 25 + build/mozconfig.comm-support | 49 + build/mozconfig.common | 28 + build/mozconfig.common.override | 15 + build/mozconfig.lld-link | 6 + build/mozconfig.no-compile | 33 + build/mozconfig.rust | 1 + build/mozconfig.wasm-sandboxing | 9 + build/mozconfig.win-common | 12 + build/non-unified-compat | 123 + build/package/mac_osx/make-diskimage | 47 + build/package/mac_osx/mozilla-background.jpg | Bin 0 -> 16591 bytes build/package/mac_osx/mozilla.dsstore | Bin 0 -> 6148 bytes build/package/mac_osx/unpack-diskimage | 54 + build/pgo/blueprint/LICENSE | 314 + build/pgo/blueprint/elements.html | 250 + build/pgo/blueprint/fancytype-screen.css | 75 + build/pgo/blueprint/forms.html | 104 + build/pgo/blueprint/grid.html | 210 + build/pgo/blueprint/grid.png | Bin 0 -> 206 bytes build/pgo/blueprint/print.css | 29 + build/pgo/blueprint/sample.html | 91 + build/pgo/blueprint/screen.css | 226 + build/pgo/blueprint/test-small.jpg | Bin 0 -> 1886 bytes build/pgo/blueprint/test.jpg | Bin 0 -> 35467 bytes build/pgo/certs/README | 5 + build/pgo/certs/alternateroot.ca | 18 + build/pgo/certs/alternateroot.ca.keyspec | 1 + build/pgo/certs/alternateroot.certspec | 7 + build/pgo/certs/badCertDomain.certspec | 3 + build/pgo/certs/bug413909cert.certspec | 3 + build/pgo/certs/cert9.db | Bin 0 -> 229376 bytes build/pgo/certs/dynamicPinningBad.certspec | 5 + build/pgo/certs/dynamicPinningBad.server.keyspec | 1 + build/pgo/certs/dynamicPinningGood.certspec | 3 + build/pgo/certs/escapeattack1.certspec | 3 + build/pgo/certs/evintermediate.ca | 26 + build/pgo/certs/evintermediate.ca.keyspec | 1 + build/pgo/certs/evintermediate.certspec | 7 + build/pgo/certs/expired.certspec | 4 + build/pgo/certs/imminently_distrusted.certspec | 4 + build/pgo/certs/key4.db | Bin 0 -> 294912 bytes build/pgo/certs/mochitest.certspec | 3 + build/pgo/certs/mochitest.client | Bin 0 -> 2448 bytes build/pgo/certs/mochitest.client.keyspec | 1 + build/pgo/certs/noSubjectAltName.certspec | 2 + build/pgo/certs/pgoca.ca | 21 + build/pgo/certs/pgoca.ca.keyspec | 1 + build/pgo/certs/pgoca.certspec | 5 + build/pgo/certs/selfsigned.certspec | 3 + build/pgo/certs/sha1_end_entity.certspec | 4 + build/pgo/certs/sha256_end_entity.certspec | 4 + build/pgo/certs/staticPinningBad.certspec | 5 + build/pgo/certs/staticPinningBad.server.keyspec | 1 + build/pgo/certs/unknown_ca.certspec | 5 + build/pgo/certs/untrusted.certspec | 3 + build/pgo/certs/untrustedandexpired.certspec | 4 + build/pgo/favicon.ico | Bin 0 -> 1406 bytes build/pgo/genpgocert.py | 256 + build/pgo/index.html | 109 + build/pgo/js-input/3d-thingy.html | 390 + build/pgo/js-input/crypto-otp.html | 1344 ++ build/pgo/js-input/key.gif | Bin 0 -> 1119 bytes build/pgo/js-input/sunspider/3d-cube.html | 387 + build/pgo/js-input/sunspider/3d-morph.html | 104 + build/pgo/js-input/sunspider/3d-raytrace.html | 490 + .../js-input/sunspider/access-binary-trees.html | 100 + build/pgo/js-input/sunspider/access-fannkuch.html | 116 + build/pgo/js-input/sunspider/access-nbody.html | 219 + build/pgo/js-input/sunspider/access-nsieve.html | 88 + .../sunspider/bitops-3bit-bits-in-byte.html | 82 + .../js-input/sunspider/bitops-bits-in-byte.html | 72 + .../pgo/js-input/sunspider/bitops-bitwise-and.html | 78 + .../pgo/js-input/sunspider/bitops-nsieve-bits.html | 82 + .../js-input/sunspider/controlflow-recursive.html | 75 + build/pgo/js-input/sunspider/crypto-aes.html | 472 + build/pgo/js-input/sunspider/crypto-md5.html | 336 + build/pgo/js-input/sunspider/crypto-sha1.html | 274 + .../pgo/js-input/sunspider/date-format-tofte.html | 349 + .../pgo/js-input/sunspider/date-format-xparb.html | 467 + build/pgo/js-input/sunspider/math-cordic.html | 145 + .../pgo/js-input/sunspider/math-partial-sums.html | 83 + .../pgo/js-input/sunspider/math-spectral-norm.html | 101 + build/pgo/js-input/sunspider/regexp-dna.html | 1762 ++ build/pgo/js-input/sunspider/string-base64.html | 151 + build/pgo/js-input/sunspider/string-fasta.html | 135 + build/pgo/js-input/sunspider/string-tagcloud.html | 315 + .../pgo/js-input/sunspider/string-unpack-code.html | 117 + .../js-input/sunspider/string-validate-input.html | 139 + build/pgo/js-input/valid-xhtml10.png | Bin 0 -> 2414 bytes build/pgo/profileserver.py | 234 + build/pgo/server-locations.txt | 338 + build/psutil_requirements.in | 2 + build/psutil_requirements.txt | 19 + build/qemu-wrap | 24 + build/sanitizers/asan_blacklist_win.txt | 28 + build/sanitizers/ubsan_enum_blacklist.txt | 17 + build/sanitizers/ubsan_object_size_blacklist.txt | 7 + .../ubsan_pointer_overflow_blacklist.txt | 28 + .../sanitizers/ubsan_signed_overflow_blacklist.txt | 258 + .../ubsan_unsigned_overflow_blacklist.txt | 265 + build/sparse-profiles/docker-image | 26 + build/sparse-profiles/github-sync | 8 + build/sparse-profiles/liblowercase | 3 + build/sparse-profiles/mach | 23 + build/sparse-profiles/mozharness | 4 + build/sparse-profiles/perftest | 7 + build/sparse-profiles/profile-generate | 9 + build/sparse-profiles/push-to-try | 5 + build/sparse-profiles/sphinx-docs | 39 + build/sparse-profiles/taskgraph | 95 + build/sparse-profiles/toolchain-build | 9 + build/sparse-profiles/tps | 5 + build/sparse-profiles/update-verify | 4 + build/sparse-profiles/upload-generated-sources | 4 + build/sparse-profiles/upload-symbols | 4 + build/sparse-profiles/webrender | 6 + build/sparse-profiles/wgpu | 4 + build/submit_telemetry_data.py | 153 + build/templates.mozbuild | 220 + build/test_templates.mozbuild | 33 + build/tests/cram/cram.ini | 1 + build/tests/cram/test_configure_help.t | 14 + build/unix/aix.exp | 5 + .../3A24BC1E8FB409FA9F14371813FCEF89DD9E3C4F.key | 51 + build/unix/build-binutils/build-binutils.sh | 94 + .../07F3DBBECC1A39605078094D980C197698C3739D.key | 53 + .../13975A70E63C361C73AE69EF6EEB81F8981C74C7.key | 82 + .../33C235A34C46AA3FFB293709A328C3A2C3C45C06.key | 33 + .../343C2FF0FBEE5EC2EDBEF399F3599FF828C67298.key | 35 + .../5ED46A6721D365587791E2AA783FCD8E58BCAFBA.key | 38 + .../7F74F97C103468EE5D750B583AB00996FC26A641.key | 54 + .../AD17A21EF8AED8F1CC02DBD9F7D5C9BF765C61E3.key | 57 + .../DA23579A74D4AD9AF9D3F945CEFAC8EAAF17519D.key | 52 + .../EAF1C276A747E9ED86210CBAC3126D3B4AE55E93.key | 29 + build/unix/build-gcc/build-gcc.sh | 118 + build/unix/build-hfsplus/build-hfsplus.sh | 49 + build/unix/elfhack/Makefile.in | 44 + build/unix/elfhack/README | 28 + build/unix/elfhack/dummy.c | 7 + build/unix/elfhack/elf.cpp | 934 + build/unix/elfhack/elfhack.cpp | 1325 ++ build/unix/elfhack/elfxx.h | 691 + build/unix/elfhack/inject.c | 124 + build/unix/elfhack/inject/copy_source.py | 10 + build/unix/elfhack/inject/moz.build | 40 + build/unix/elfhack/moz.build | 32 + build/unix/elfhack/test-array.c | 8 + build/unix/elfhack/test-ctors.c | 16 + build/unix/elfhack/test.c | 170 + build/unix/moz.build | 15 + build/unix/mozconfig.asan | 14 + build/unix/mozconfig.linux | 18 + build/unix/mozconfig.linux32 | 8 + build/unix/mozconfig.stdcxx | 17 + build/unix/mozconfig.tsan | 19 + build/unix/mozconfig.unix | 38 + build/unix/mozilla.in | 108 + build/unix/print-non-newline.sh | 35 + build/unix/rewrite_asan_dylib.py | 131 + build/unix/run-gprof.sh | 17 + build/unix/run-hiprof.sh | 25 + build/unix/run-mozilla.sh | 356 + build/unix/run-third.sh | 25 + build/unix/stdc++compat/hide_std.ld | 5 + build/unix/stdc++compat/moz.build | 27 + build/unix/stdc++compat/stdc++compat.cpp | 180 + build/update-settings.ini | 11 + build/upload.py | 106 + build/upload_generated_sources.py | 175 + build/valgrind/__init__.py | 0 build/valgrind/cross-architecture.sup | 173 + build/valgrind/i386-pc-linux-gnu.sup | 53 + build/valgrind/mach_commands.py | 243 + build/valgrind/output_handler.py | 119 + build/valgrind/x86_64-pc-linux-gnu.sup | 1008 ++ build/variables.py | 113 + build/win32/__init__.py | 0 build/win32/autowinchecksec.py | 84 + build/win32/crashinject.cpp | 94 + build/win32/crashinjectdll/crashinjectdll.cpp | 29 + build/win32/crashinjectdll/crashinjectdll.def | 7 + build/win32/crashinjectdll/moz.build | 16 + build/win32/moz.build | 38 + build/win32/mozconfig.vs-latest | 3 + build/win32/mozconfig.vs2017 | 16 + build/win32/nsis-no-insert-timestamp.patch | 27 + build/win32/orderfile.txt | 17849 +++++++++++++++++++ build/win64-aarch64/mozconfig.vs-latest | 3 + build/win64-aarch64/mozconfig.vs2017 | 13 + build/win64/mozconfig.asan | 24 + build/win64/mozconfig.vs-latest | 3 + build/win64/mozconfig.vs2017 | 14 + build/win64/orderfile.txt | 17536 ++++++++++++++++++ build/windows_toolchain.py | 277 + build/workspace-hack/Cargo.toml | 76 + build/workspace-hack/src/lib.rs | 11 + build/zstandard_requirements.in | 2 + build/zstandard_requirements.txt | 98 + 556 files changed, 105270 insertions(+) create mode 100644 build/.gdbinit create mode 100644 build/.gdbinit.loader create mode 100644 build/.gdbinit.py.in create mode 100644 build/.lldbinit.in create mode 100644 build/RunCbindgen.py create mode 100644 build/__init__.py create mode 100644 build/appini_header.py create mode 100644 build/application.ini.in create mode 100644 build/autoconf/acgeneral.m4 create mode 100644 build/autoconf/acoldnames.m4 create mode 100644 build/autoconf/acspecific.m4 create mode 100644 build/autoconf/alloc.m4 create mode 100644 build/autoconf/altoptions.m4 create mode 100644 build/autoconf/android.m4 create mode 100644 build/autoconf/arch.m4 create mode 100644 build/autoconf/autoconf.m4 create mode 100644 build/autoconf/autoconf.sh create mode 100644 build/autoconf/clang-plugin.m4 create mode 100644 build/autoconf/codeset.m4 create mode 100644 build/autoconf/compiler-opts.m4 create mode 100755 build/autoconf/config.guess create mode 100644 build/autoconf/config.status.m4 create mode 100755 build/autoconf/config.sub create mode 100644 build/autoconf/expandlibs.m4 create mode 100644 build/autoconf/hooks.m4 create mode 100644 build/autoconf/hotfixes.m4 create mode 100755 build/autoconf/install-sh create mode 100644 build/autoconf/mozheader.m4 create mode 100644 build/autoconf/mozprog.m4 create mode 100644 build/autoconf/pkg.m4 create mode 100644 build/autoconf/sanitize.m4 create mode 100644 build/autoconf/toolchain.m4 create mode 100644 build/binary-location.mk create mode 100644 build/build-clang/README create mode 100644 build/build-clang/android-mangling-error.patch create mode 100644 build/build-clang/bug47258-extract-symbols-mbcs.patch create mode 100755 build/build-clang/build-clang.py create mode 100644 build/build-clang/clang-10-linux64.json create mode 100644 build/build-clang/clang-11-android.json create mode 100644 build/build-clang/clang-11-linux64-aarch64-cross.json create mode 100644 build/build-clang/clang-11-linux64.json create mode 100644 build/build-clang/clang-11-macosx64.json create mode 100755 build/build-clang/clang-11-mingw.json create mode 100644 build/build-clang/clang-11-win64-2stage.json create mode 100644 build/build-clang/clang-11-win64.json create mode 100644 build/build-clang/clang-5.0-linux64.json create mode 100644 build/build-clang/clang-7-linux64.json create mode 100644 build/build-clang/clang-linux64.json create mode 100644 build/build-clang/clang-tidy-ci.patch create mode 100644 build/build-clang/clang-tidy-external-linux64.json create mode 100644 build/build-clang/clang-tidy-linux64.json create mode 100644 build/build-clang/clang-tidy-macosx64.json create mode 100644 build/build-clang/clang-tidy-no-errors.patch create mode 100644 build/build-clang/clang-tidy-win64.json create mode 100644 build/build-clang/compiler-rt-cross-compile.patch create mode 100644 build/build-clang/compiler-rt-no-codesign.patch create mode 100644 build/build-clang/critical_section_on_gcov_flush-rG02ce9d8ef5a8.patch create mode 100644 build/build-clang/downgrade-mangling-error.patch create mode 100644 build/build-clang/find_symbolizer_linux.patch create mode 100644 build/build-clang/find_symbolizer_linux_clang_10.patch create mode 100644 build/build-clang/llvmorg-11-init-15486-gfc937806efd-dont-jump-to-landing-pads.patch create mode 100644 build/build-clang/llvmorg-11-init-4265-g2dcbdba8540.patch create mode 100644 build/build-clang/llvmorg-11-init-4265-g2dcbdba8540_clang_10.patch create mode 100644 build/build-clang/llvmorg-12-init-10926-gb79e990f401-LTO-new-pass-manager.patch create mode 100644 build/build-clang/loosen-msvc-detection.patch create mode 100644 build/build-clang/r350774.patch create mode 100644 build/build-clang/rG7e18aeba5062.patch create mode 100644 build/build-clang/rG7e18aeba5062_clang_10.patch create mode 100644 build/build-clang/rename_gcov_flush.patch create mode 100644 build/build-clang/rename_gcov_flush_7.patch create mode 100644 build/build-clang/rename_gcov_flush_clang_10.patch create mode 100644 build/build-clang/rename_gcov_flush_clang_11.patch create mode 100644 build/build-clang/revert-r362047-and-r362065.patch create mode 100644 build/build-clang/static-llvm-symbolizer.patch create mode 100644 build/build-clang/tsan-hang-be41a98ac222.patch create mode 100644 build/build-clang/tsan-hang-be41a98ac222_clang_10.patch create mode 100644 build/build-clang/unpoison-thread-stacks.patch create mode 100644 build/build-clang/unpoison-thread-stacks_clang_10.patch create mode 100644 build/build-infer/README create mode 100755 build/build-infer/build-infer.py create mode 100644 build/build-infer/infer-linux64.json create mode 100644 build/build-rust/README create mode 100644 build/build-rust/example.patch create mode 100644 build/build_virtualenv_packages.txt create mode 100644 build/buildconfig.py create mode 100755 build/cargo-host-linker create mode 100644 build/cargo-host-linker.bat create mode 100755 build/cargo-linker create mode 100644 build/cargo-linker.bat create mode 100755 build/checksums.py create mode 100644 build/clang-plugin/.clang-format create mode 100644 build/clang-plugin/ArithmeticArgChecker.cpp create mode 100644 build/clang-plugin/ArithmeticArgChecker.h create mode 100644 build/clang-plugin/AssertAssignmentChecker.cpp create mode 100644 build/clang-plugin/AssertAssignmentChecker.h create mode 100644 build/clang-plugin/BaseCheck.h create mode 100644 build/clang-plugin/CanRunScriptChecker.cpp create mode 100644 build/clang-plugin/CanRunScriptChecker.h create mode 100644 build/clang-plugin/Checks.inc create mode 100644 build/clang-plugin/ChecksIncludes.inc create mode 100644 build/clang-plugin/CustomAttributes.cpp create mode 100644 build/clang-plugin/CustomAttributes.h create mode 100644 build/clang-plugin/CustomAttributes.inc create mode 100644 build/clang-plugin/CustomMatchers.h create mode 100644 build/clang-plugin/CustomTypeAnnotation.cpp create mode 100644 build/clang-plugin/CustomTypeAnnotation.h create mode 100644 build/clang-plugin/DanglingOnTemporaryChecker.cpp create mode 100644 build/clang-plugin/DanglingOnTemporaryChecker.h create mode 100644 build/clang-plugin/DiagnosticsMatcher.cpp create mode 100644 build/clang-plugin/DiagnosticsMatcher.h create mode 100644 build/clang-plugin/ExplicitImplicitChecker.cpp create mode 100644 build/clang-plugin/ExplicitImplicitChecker.h create mode 100644 build/clang-plugin/ExplicitOperatorBoolChecker.cpp create mode 100644 build/clang-plugin/ExplicitOperatorBoolChecker.h create mode 100644 build/clang-plugin/FopenUsageChecker.cpp create mode 100644 build/clang-plugin/FopenUsageChecker.h create mode 100644 build/clang-plugin/KungFuDeathGripChecker.cpp create mode 100644 build/clang-plugin/KungFuDeathGripChecker.h create mode 100644 build/clang-plugin/LoadLibraryUsageChecker.cpp create mode 100644 build/clang-plugin/LoadLibraryUsageChecker.h create mode 100644 build/clang-plugin/Makefile.in create mode 100644 build/clang-plugin/MemMoveAnnotation.h create mode 100644 build/clang-plugin/MozCheckAction.cpp create mode 100644 build/clang-plugin/MozillaTidyModule.cpp create mode 100644 build/clang-plugin/MustOverrideChecker.cpp create mode 100644 build/clang-plugin/MustOverrideChecker.h create mode 100644 build/clang-plugin/MustReturnFromCallerChecker.cpp create mode 100644 build/clang-plugin/MustReturnFromCallerChecker.h create mode 100644 build/clang-plugin/MustUseChecker.cpp create mode 100644 build/clang-plugin/MustUseChecker.h create mode 100644 build/clang-plugin/NaNExprChecker.cpp create mode 100644 build/clang-plugin/NaNExprChecker.h create mode 100644 build/clang-plugin/NeedsNoVTableTypeChecker.cpp create mode 100644 build/clang-plugin/NeedsNoVTableTypeChecker.h create mode 100644 build/clang-plugin/NoAddRefReleaseOnReturnChecker.cpp create mode 100644 build/clang-plugin/NoAddRefReleaseOnReturnChecker.h create mode 100644 build/clang-plugin/NoAutoTypeChecker.cpp create mode 100644 build/clang-plugin/NoAutoTypeChecker.h create mode 100644 build/clang-plugin/NoDuplicateRefCntMemberChecker.cpp create mode 100644 build/clang-plugin/NoDuplicateRefCntMemberChecker.h create mode 100644 build/clang-plugin/NoExplicitMoveConstructorChecker.cpp create mode 100644 build/clang-plugin/NoExplicitMoveConstructorChecker.h create mode 100644 build/clang-plugin/NoNewThreadsChecker.cpp create mode 100644 build/clang-plugin/NoNewThreadsChecker.h create mode 100644 build/clang-plugin/NoPrincipalGetURI.cpp create mode 100644 build/clang-plugin/NoPrincipalGetURI.h create mode 100644 build/clang-plugin/NoUsingNamespaceMozillaJavaChecker.cpp create mode 100644 build/clang-plugin/NoUsingNamespaceMozillaJavaChecker.h create mode 100644 build/clang-plugin/NonMemMovableMemberChecker.cpp create mode 100644 build/clang-plugin/NonMemMovableMemberChecker.h create mode 100644 build/clang-plugin/NonMemMovableTemplateArgChecker.cpp create mode 100644 build/clang-plugin/NonMemMovableTemplateArgChecker.h create mode 100644 build/clang-plugin/NonParamInsideFunctionDeclChecker.cpp create mode 100644 build/clang-plugin/NonParamInsideFunctionDeclChecker.h create mode 100644 build/clang-plugin/NonTrivialTypeInFfiChecker.cpp create mode 100644 build/clang-plugin/NonTrivialTypeInFfiChecker.h create mode 100644 build/clang-plugin/OverrideBaseCallChecker.cpp create mode 100644 build/clang-plugin/OverrideBaseCallChecker.h create mode 100644 build/clang-plugin/OverrideBaseCallUsageChecker.cpp create mode 100644 build/clang-plugin/OverrideBaseCallUsageChecker.h create mode 100644 build/clang-plugin/ParamTraitsEnumChecker.cpp create mode 100644 build/clang-plugin/ParamTraitsEnumChecker.h create mode 100644 build/clang-plugin/RecurseGuard.h create mode 100644 build/clang-plugin/RefCountedCopyConstructorChecker.cpp create mode 100644 build/clang-plugin/RefCountedCopyConstructorChecker.h create mode 100644 build/clang-plugin/RefCountedInsideLambdaChecker.cpp create mode 100644 build/clang-plugin/RefCountedInsideLambdaChecker.h create mode 100644 build/clang-plugin/ScopeChecker.cpp create mode 100644 build/clang-plugin/ScopeChecker.h create mode 100644 build/clang-plugin/SprintfLiteralChecker.cpp create mode 100644 build/clang-plugin/SprintfLiteralChecker.h create mode 100644 build/clang-plugin/StmtToBlockMap.h create mode 100644 build/clang-plugin/TemporaryLifetimeBoundChecker.cpp create mode 100644 build/clang-plugin/TemporaryLifetimeBoundChecker.h create mode 100644 build/clang-plugin/ThirdPartyPaths.h create mode 100644 build/clang-plugin/ThirdPartyPaths.py create mode 100644 build/clang-plugin/ThreadAllows.py create mode 100644 build/clang-plugin/ThreadAllows.txt create mode 100644 build/clang-plugin/ThreadFileAllows.txt create mode 100644 build/clang-plugin/TrivialCtorDtorChecker.cpp create mode 100644 build/clang-plugin/TrivialCtorDtorChecker.h create mode 100644 build/clang-plugin/TrivialDtorChecker.cpp create mode 100644 build/clang-plugin/TrivialDtorChecker.h create mode 100644 build/clang-plugin/Utils.h create mode 100644 build/clang-plugin/VariableUsageHelpers.cpp create mode 100644 build/clang-plugin/VariableUsageHelpers.h create mode 100644 build/clang-plugin/alpha/AlphaChecks.inc create mode 100644 build/clang-plugin/alpha/AlphaIncludes.inc create mode 100644 build/clang-plugin/alpha/TempRefPtrChecker.cpp create mode 100644 build/clang-plugin/alpha/TempRefPtrChecker.h create mode 100644 build/clang-plugin/alpha/sources.mozbuild create mode 100644 build/clang-plugin/alpha/tests/TestTempRefPtr.cpp create mode 100644 build/clang-plugin/alpha/tests/sources.mozbuild create mode 100644 build/clang-plugin/external/CustomAttributes.inc create mode 100644 build/clang-plugin/external/ExternalChecks.inc create mode 100644 build/clang-plugin/external/ExternalIncludes.inc create mode 100644 build/clang-plugin/external/sources.mozbuild create mode 100644 build/clang-plugin/external/tests/sources.mozbuild create mode 100755 build/clang-plugin/import_mozilla_checks.py create mode 100644 build/clang-plugin/moz.build create mode 100644 build/clang-plugin/mozsearch-plugin/FileOperations.cpp create mode 100644 build/clang-plugin/mozsearch-plugin/FileOperations.h create mode 100644 build/clang-plugin/mozsearch-plugin/JSONFormatter.cpp create mode 100644 build/clang-plugin/mozsearch-plugin/JSONFormatter.h create mode 100644 build/clang-plugin/mozsearch-plugin/MozsearchIndexer.cpp create mode 100644 build/clang-plugin/mozsearch-plugin/README create mode 100644 build/clang-plugin/mozsearch-plugin/StringOperations.cpp create mode 100644 build/clang-plugin/mozsearch-plugin/StringOperations.h create mode 100644 build/clang-plugin/plugin.h create mode 100644 build/clang-plugin/tests/Makefile.in create mode 100644 build/clang-plugin/tests/NonParameterTestCases.h create mode 100644 build/clang-plugin/tests/TestAssertWithAssignment.cpp create mode 100644 build/clang-plugin/tests/TestBadImplicitConversionCtor.cpp create mode 100644 build/clang-plugin/tests/TestCanRunScript.cpp create mode 100644 build/clang-plugin/tests/TestCustomHeap.cpp create mode 100644 build/clang-plugin/tests/TestDanglingOnTemporary.cpp create mode 100644 build/clang-plugin/tests/TestExplicitOperatorBool.cpp create mode 100644 build/clang-plugin/tests/TestFopenUsage.cpp create mode 100644 build/clang-plugin/tests/TestGlobalClass.cpp create mode 100644 build/clang-plugin/tests/TestHeapClass.cpp create mode 100644 build/clang-plugin/tests/TestInheritTypeAnnotationsFromTemplateArgs.cpp create mode 100644 build/clang-plugin/tests/TestKungFuDeathGrip.cpp create mode 100644 build/clang-plugin/tests/TestLoadLibraryUsage.cpp create mode 100644 build/clang-plugin/tests/TestMultipleAnnotations.cpp create mode 100644 build/clang-plugin/tests/TestMustOverride.cpp create mode 100644 build/clang-plugin/tests/TestMustReturnFromCaller.cpp create mode 100644 build/clang-plugin/tests/TestMustUse.cpp create mode 100644 build/clang-plugin/tests/TestNANTestingExpr.cpp create mode 100644 build/clang-plugin/tests/TestNANTestingExprC.c create mode 100644 build/clang-plugin/tests/TestNeedsNoVTableType.cpp create mode 100644 build/clang-plugin/tests/TestNoAddRefReleaseOnReturn.cpp create mode 100644 build/clang-plugin/tests/TestNoArithmeticExprInArgument.cpp create mode 100644 build/clang-plugin/tests/TestNoAutoType.cpp create mode 100644 build/clang-plugin/tests/TestNoDuplicateRefCntMember.cpp create mode 100644 build/clang-plugin/tests/TestNoExplicitMoveConstructor.cpp create mode 100644 build/clang-plugin/tests/TestNoNewThreadsChecker.cpp create mode 100644 build/clang-plugin/tests/TestNoPrincipalGetUri.cpp create mode 100644 build/clang-plugin/tests/TestNoRefcountedInsideLambdas.cpp create mode 100644 build/clang-plugin/tests/TestNoUsingNamespaceMozillaJava.cpp create mode 100644 build/clang-plugin/tests/TestNonHeapClass.cpp create mode 100644 build/clang-plugin/tests/TestNonMemMovable.cpp create mode 100644 build/clang-plugin/tests/TestNonMemMovableStd.cpp create mode 100644 build/clang-plugin/tests/TestNonMemMovableStdAtomic.cpp create mode 100644 build/clang-plugin/tests/TestNonParameterChecker.cpp create mode 100644 build/clang-plugin/tests/TestNonTemporaryClass.cpp create mode 100644 build/clang-plugin/tests/TestNonTrivialTypeInFfi.cpp create mode 100644 build/clang-plugin/tests/TestOverrideBaseCall.cpp create mode 100644 build/clang-plugin/tests/TestOverrideBaseCallAnnotation.cpp create mode 100644 build/clang-plugin/tests/TestParamTraitsEnum.cpp create mode 100644 build/clang-plugin/tests/TestRefCountedCopyConstructor.cpp create mode 100644 build/clang-plugin/tests/TestSprintfLiteral.cpp create mode 100644 build/clang-plugin/tests/TestStackClass.cpp create mode 100644 build/clang-plugin/tests/TestStaticLocalClass.cpp create mode 100644 build/clang-plugin/tests/TestTemporaryClass.cpp create mode 100644 build/clang-plugin/tests/TestTemporaryLifetimeBound.cpp create mode 100644 build/clang-plugin/tests/TestTrivialCtorDtor.cpp create mode 100644 build/clang-plugin/tests/TestTrivialDtor.cpp create mode 100644 build/clang-plugin/tests/moz.build create mode 100644 build/common_virtualenv_packages.txt create mode 100644 build/compare-mozconfig/compare-mozconfigs.py create mode 100644 build/compare-mozconfig/python.ini create mode 100644 build/debian-packages/cmake-jessie.diff create mode 100644 build/debian-packages/gdb-jessie.diff create mode 100644 build/debian-packages/python-zstandard-jessie.diff create mode 100644 build/debian-packages/python3.6-jessie.diff create mode 100644 build/debian-packages/valgrind-jessie.diff create mode 100644 build/defines.sh create mode 100644 build/docs/build-overview.rst create mode 100644 build/docs/build-targets.rst create mode 100644 build/docs/cppeclipse.rst create mode 100644 build/docs/defining-binaries.rst create mode 100644 build/docs/defining-xpcom-components.rst create mode 100644 build/docs/environment-variables.rst create mode 100644 build/docs/files-metadata.rst create mode 100644 build/docs/glossary.rst create mode 100644 build/docs/gn.rst create mode 100644 build/docs/index.rst create mode 100644 build/docs/jar-manifests.rst create mode 100644 build/docs/locales.rst create mode 100644 build/docs/mozbuild-files.rst create mode 100644 build/docs/mozbuild-symbols.rst create mode 100644 build/docs/mozbuild/index.rst create mode 100644 build/docs/mozconfigs.rst create mode 100644 build/docs/mozinfo.rst create mode 100644 build/docs/pgo.rst create mode 100644 build/docs/preprocessor.rst create mode 100644 build/docs/python.rst create mode 100644 build/docs/rust.rst create mode 100644 build/docs/sccache-dist.rst create mode 100644 build/docs/slow.rst create mode 100644 build/docs/sparse.rst create mode 100644 build/docs/supported-configurations.rst create mode 100644 build/docs/telemetry.rst create mode 100644 build/docs/test_certificates.rst create mode 100644 build/docs/test_manifests.rst create mode 100644 build/docs/toolchains.rst create mode 100644 build/docs/unified-builds.rst create mode 100644 build/docs/visualstudio.rst create mode 100644 build/dumbmake-dependencies create mode 100644 build/gecko_templates.mozbuild create mode 100644 build/gen_symverscript.py create mode 100644 build/gen_test_packages_manifest.py create mode 100755 build/genrc.sh create mode 100644 build/glean_requirements.in create mode 100644 build/glean_requirements.txt create mode 100644 build/gn.mozbuild create mode 100644 build/gyp.mozbuild create mode 100644 build/gyp_base.mozbuild create mode 100644 build/gyp_includes/common.gypi create mode 100644 build/gyp_includes/filename_rules.gypi create mode 100644 build/gyp_includes/internal/release_defaults.gypi create mode 100644 build/gyp_includes/internal/release_impl.gypi create mode 100644 build/gyp_includes/internal/release_impl_official.gypi create mode 100644 build/gyp_includes/release.gypi create mode 100644 build/liblowercase/Cargo.lock create mode 100644 build/liblowercase/Cargo.toml create mode 100644 build/liblowercase/lib.rs create mode 100644 build/mach_bootstrap.py create mode 100644 build/mach_virtualenv_packages.txt create mode 100644 build/macosx/cross-mozconfig.common create mode 100755 build/macosx/llvm-dsymutil create mode 100644 build/macosx/local-mozconfig.common create mode 100644 build/macosx/mozconfig.common create mode 100644 build/macosx/permissions/chown_revert.c create mode 100644 build/macosx/permissions/chown_root.c create mode 100644 build/midl.py create mode 100644 build/moz-automation.mk create mode 100644 build/moz.build create mode 100644 build/moz.configure/android-ndk.configure create mode 100644 build/moz.configure/android-sdk.configure create mode 100644 build/moz.configure/arm.configure create mode 100644 build/moz.configure/bindgen.configure create mode 100644 build/moz.configure/checks.configure create mode 100755 build/moz.configure/compile-checks.configure create mode 100644 build/moz.configure/compilers-util.configure create mode 100644 build/moz.configure/flags.configure create mode 100644 build/moz.configure/headers.configure create mode 100644 build/moz.configure/init.configure create mode 100644 build/moz.configure/java.configure create mode 100644 build/moz.configure/keyfiles.configure create mode 100644 build/moz.configure/lto-pgo.configure create mode 100644 build/moz.configure/memory.configure create mode 100644 build/moz.configure/node.configure create mode 100644 build/moz.configure/nspr.configure create mode 100644 build/moz.configure/nss.configure create mode 100644 build/moz.configure/old.configure create mode 100644 build/moz.configure/pkg.configure create mode 100644 build/moz.configure/rust.configure create mode 100755 build/moz.configure/toolchain.configure create mode 100644 build/moz.configure/update-programs.configure create mode 100644 build/moz.configure/util.configure create mode 100755 build/moz.configure/warnings.configure create mode 100644 build/moz.configure/windows.configure create mode 100644 build/mozconfig.artifact create mode 100644 build/mozconfig.artifact.automation create mode 100644 build/mozconfig.automation create mode 100644 build/mozconfig.cache create mode 100644 build/mozconfig.clang-cl create mode 100644 build/mozconfig.comm-support create mode 100644 build/mozconfig.common create mode 100644 build/mozconfig.common.override create mode 100644 build/mozconfig.lld-link create mode 100644 build/mozconfig.no-compile create mode 100644 build/mozconfig.rust create mode 100644 build/mozconfig.wasm-sandboxing create mode 100644 build/mozconfig.win-common create mode 100644 build/non-unified-compat create mode 100755 build/package/mac_osx/make-diskimage create mode 100644 build/package/mac_osx/mozilla-background.jpg create mode 100644 build/package/mac_osx/mozilla.dsstore create mode 100755 build/package/mac_osx/unpack-diskimage create mode 100644 build/pgo/blueprint/LICENSE create mode 100644 build/pgo/blueprint/elements.html create mode 100644 build/pgo/blueprint/fancytype-screen.css create mode 100644 build/pgo/blueprint/forms.html create mode 100644 build/pgo/blueprint/grid.html create mode 100644 build/pgo/blueprint/grid.png create mode 100644 build/pgo/blueprint/print.css create mode 100644 build/pgo/blueprint/sample.html create mode 100644 build/pgo/blueprint/screen.css create mode 100644 build/pgo/blueprint/test-small.jpg create mode 100644 build/pgo/blueprint/test.jpg create mode 100644 build/pgo/certs/README create mode 100644 build/pgo/certs/alternateroot.ca create mode 100644 build/pgo/certs/alternateroot.ca.keyspec create mode 100644 build/pgo/certs/alternateroot.certspec create mode 100644 build/pgo/certs/badCertDomain.certspec create mode 100644 build/pgo/certs/bug413909cert.certspec create mode 100644 build/pgo/certs/cert9.db create mode 100644 build/pgo/certs/dynamicPinningBad.certspec create mode 100644 build/pgo/certs/dynamicPinningBad.server.keyspec create mode 100644 build/pgo/certs/dynamicPinningGood.certspec create mode 100644 build/pgo/certs/escapeattack1.certspec create mode 100644 build/pgo/certs/evintermediate.ca create mode 100644 build/pgo/certs/evintermediate.ca.keyspec create mode 100644 build/pgo/certs/evintermediate.certspec create mode 100644 build/pgo/certs/expired.certspec create mode 100644 build/pgo/certs/imminently_distrusted.certspec create mode 100644 build/pgo/certs/key4.db create mode 100644 build/pgo/certs/mochitest.certspec create mode 100644 build/pgo/certs/mochitest.client create mode 100644 build/pgo/certs/mochitest.client.keyspec create mode 100644 build/pgo/certs/noSubjectAltName.certspec create mode 100644 build/pgo/certs/pgoca.ca create mode 100644 build/pgo/certs/pgoca.ca.keyspec create mode 100644 build/pgo/certs/pgoca.certspec create mode 100644 build/pgo/certs/selfsigned.certspec create mode 100644 build/pgo/certs/sha1_end_entity.certspec create mode 100644 build/pgo/certs/sha256_end_entity.certspec create mode 100644 build/pgo/certs/staticPinningBad.certspec create mode 100644 build/pgo/certs/staticPinningBad.server.keyspec create mode 100644 build/pgo/certs/unknown_ca.certspec create mode 100644 build/pgo/certs/untrusted.certspec create mode 100644 build/pgo/certs/untrustedandexpired.certspec create mode 100644 build/pgo/favicon.ico create mode 100644 build/pgo/genpgocert.py create mode 100644 build/pgo/index.html create mode 100644 build/pgo/js-input/3d-thingy.html create mode 100644 build/pgo/js-input/crypto-otp.html create mode 100644 build/pgo/js-input/key.gif create mode 100644 build/pgo/js-input/sunspider/3d-cube.html create mode 100644 build/pgo/js-input/sunspider/3d-morph.html create mode 100644 build/pgo/js-input/sunspider/3d-raytrace.html create mode 100644 build/pgo/js-input/sunspider/access-binary-trees.html create mode 100644 build/pgo/js-input/sunspider/access-fannkuch.html create mode 100644 build/pgo/js-input/sunspider/access-nbody.html create mode 100644 build/pgo/js-input/sunspider/access-nsieve.html create mode 100644 build/pgo/js-input/sunspider/bitops-3bit-bits-in-byte.html create mode 100644 build/pgo/js-input/sunspider/bitops-bits-in-byte.html create mode 100644 build/pgo/js-input/sunspider/bitops-bitwise-and.html create mode 100644 build/pgo/js-input/sunspider/bitops-nsieve-bits.html create mode 100644 build/pgo/js-input/sunspider/controlflow-recursive.html create mode 100644 build/pgo/js-input/sunspider/crypto-aes.html create mode 100644 build/pgo/js-input/sunspider/crypto-md5.html create mode 100644 build/pgo/js-input/sunspider/crypto-sha1.html create mode 100644 build/pgo/js-input/sunspider/date-format-tofte.html create mode 100644 build/pgo/js-input/sunspider/date-format-xparb.html create mode 100644 build/pgo/js-input/sunspider/math-cordic.html create mode 100644 build/pgo/js-input/sunspider/math-partial-sums.html create mode 100644 build/pgo/js-input/sunspider/math-spectral-norm.html create mode 100644 build/pgo/js-input/sunspider/regexp-dna.html create mode 100644 build/pgo/js-input/sunspider/string-base64.html create mode 100644 build/pgo/js-input/sunspider/string-fasta.html create mode 100644 build/pgo/js-input/sunspider/string-tagcloud.html create mode 100644 build/pgo/js-input/sunspider/string-unpack-code.html create mode 100644 build/pgo/js-input/sunspider/string-validate-input.html create mode 100644 build/pgo/js-input/valid-xhtml10.png create mode 100755 build/pgo/profileserver.py create mode 100644 build/pgo/server-locations.txt create mode 100644 build/psutil_requirements.in create mode 100644 build/psutil_requirements.txt create mode 100755 build/qemu-wrap create mode 100644 build/sanitizers/asan_blacklist_win.txt create mode 100644 build/sanitizers/ubsan_enum_blacklist.txt create mode 100644 build/sanitizers/ubsan_object_size_blacklist.txt create mode 100644 build/sanitizers/ubsan_pointer_overflow_blacklist.txt create mode 100644 build/sanitizers/ubsan_signed_overflow_blacklist.txt create mode 100644 build/sanitizers/ubsan_unsigned_overflow_blacklist.txt create mode 100644 build/sparse-profiles/docker-image create mode 100644 build/sparse-profiles/github-sync create mode 100644 build/sparse-profiles/liblowercase create mode 100644 build/sparse-profiles/mach create mode 100644 build/sparse-profiles/mozharness create mode 100644 build/sparse-profiles/perftest create mode 100644 build/sparse-profiles/profile-generate create mode 100644 build/sparse-profiles/push-to-try create mode 100644 build/sparse-profiles/sphinx-docs create mode 100644 build/sparse-profiles/taskgraph create mode 100644 build/sparse-profiles/toolchain-build create mode 100644 build/sparse-profiles/tps create mode 100644 build/sparse-profiles/update-verify create mode 100644 build/sparse-profiles/upload-generated-sources create mode 100644 build/sparse-profiles/upload-symbols create mode 100644 build/sparse-profiles/webrender create mode 100644 build/sparse-profiles/wgpu create mode 100644 build/submit_telemetry_data.py create mode 100644 build/templates.mozbuild create mode 100644 build/test_templates.mozbuild create mode 100644 build/tests/cram/cram.ini create mode 100644 build/tests/cram/test_configure_help.t create mode 100644 build/unix/aix.exp create mode 100644 build/unix/build-binutils/3A24BC1E8FB409FA9F14371813FCEF89DD9E3C4F.key create mode 100755 build/unix/build-binutils/build-binutils.sh create mode 100644 build/unix/build-gcc/07F3DBBECC1A39605078094D980C197698C3739D.key create mode 100644 build/unix/build-gcc/13975A70E63C361C73AE69EF6EEB81F8981C74C7.key create mode 100644 build/unix/build-gcc/33C235A34C46AA3FFB293709A328C3A2C3C45C06.key create mode 100644 build/unix/build-gcc/343C2FF0FBEE5EC2EDBEF399F3599FF828C67298.key create mode 100644 build/unix/build-gcc/5ED46A6721D365587791E2AA783FCD8E58BCAFBA.key create mode 100644 build/unix/build-gcc/7F74F97C103468EE5D750B583AB00996FC26A641.key create mode 100644 build/unix/build-gcc/AD17A21EF8AED8F1CC02DBD9F7D5C9BF765C61E3.key create mode 100644 build/unix/build-gcc/DA23579A74D4AD9AF9D3F945CEFAC8EAAF17519D.key create mode 100644 build/unix/build-gcc/EAF1C276A747E9ED86210CBAC3126D3B4AE55E93.key create mode 100755 build/unix/build-gcc/build-gcc.sh create mode 100755 build/unix/build-hfsplus/build-hfsplus.sh create mode 100644 build/unix/elfhack/Makefile.in create mode 100644 build/unix/elfhack/README create mode 100644 build/unix/elfhack/dummy.c create mode 100644 build/unix/elfhack/elf.cpp create mode 100644 build/unix/elfhack/elfhack.cpp create mode 100644 build/unix/elfhack/elfxx.h create mode 100644 build/unix/elfhack/inject.c create mode 100644 build/unix/elfhack/inject/copy_source.py create mode 100644 build/unix/elfhack/inject/moz.build create mode 100644 build/unix/elfhack/moz.build create mode 100644 build/unix/elfhack/test-array.c create mode 100644 build/unix/elfhack/test-ctors.c create mode 100644 build/unix/elfhack/test.c create mode 100644 build/unix/moz.build create mode 100644 build/unix/mozconfig.asan create mode 100644 build/unix/mozconfig.linux create mode 100644 build/unix/mozconfig.linux32 create mode 100644 build/unix/mozconfig.stdcxx create mode 100644 build/unix/mozconfig.tsan create mode 100644 build/unix/mozconfig.unix create mode 100644 build/unix/mozilla.in create mode 100755 build/unix/print-non-newline.sh create mode 100644 build/unix/rewrite_asan_dylib.py create mode 100644 build/unix/run-gprof.sh create mode 100644 build/unix/run-hiprof.sh create mode 100755 build/unix/run-mozilla.sh create mode 100644 build/unix/run-third.sh create mode 100644 build/unix/stdc++compat/hide_std.ld create mode 100644 build/unix/stdc++compat/moz.build create mode 100644 build/unix/stdc++compat/stdc++compat.cpp create mode 100644 build/update-settings.ini create mode 100644 build/upload.py create mode 100644 build/upload_generated_sources.py create mode 100644 build/valgrind/__init__.py create mode 100644 build/valgrind/cross-architecture.sup create mode 100644 build/valgrind/i386-pc-linux-gnu.sup create mode 100644 build/valgrind/mach_commands.py create mode 100644 build/valgrind/output_handler.py create mode 100644 build/valgrind/x86_64-pc-linux-gnu.sup create mode 100644 build/variables.py create mode 100644 build/win32/__init__.py create mode 100644 build/win32/autowinchecksec.py create mode 100644 build/win32/crashinject.cpp create mode 100644 build/win32/crashinjectdll/crashinjectdll.cpp create mode 100644 build/win32/crashinjectdll/crashinjectdll.def create mode 100644 build/win32/crashinjectdll/moz.build create mode 100644 build/win32/moz.build create mode 100644 build/win32/mozconfig.vs-latest create mode 100644 build/win32/mozconfig.vs2017 create mode 100644 build/win32/nsis-no-insert-timestamp.patch create mode 100644 build/win32/orderfile.txt create mode 100644 build/win64-aarch64/mozconfig.vs-latest create mode 100644 build/win64-aarch64/mozconfig.vs2017 create mode 100644 build/win64/mozconfig.asan create mode 100644 build/win64/mozconfig.vs-latest create mode 100644 build/win64/mozconfig.vs2017 create mode 100644 build/win64/orderfile.txt create mode 100644 build/windows_toolchain.py create mode 100644 build/workspace-hack/Cargo.toml create mode 100644 build/workspace-hack/src/lib.rs create mode 100644 build/zstandard_requirements.in create mode 100644 build/zstandard_requirements.txt (limited to 'build') diff --git a/build/.gdbinit b/build/.gdbinit new file mode 100644 index 0000000000..dc156f5679 --- /dev/null +++ b/build/.gdbinit @@ -0,0 +1,204 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# .gdbinit file for debugging Mozilla + +# You may need to put an 'add-auto-load-safe-path' command in your +# $HOME/.gdbinit file to get GDB to trust this file. If your builds are +# generally in $HOME/moz, then you can say: +# +# add-auto-load-safe-path ~/moz + +# Don't stop for the SIG32/33/etc signals that Flash produces +handle SIG32 noprint nostop pass +handle SIG33 noprint nostop pass +handle SIGPIPE noprint nostop pass + +# Don't stop for certain other signals where it's not useful, +# such as the SIG64 signals triggered by the Linux +# sandboxing code on older kernels. +handle SIG38 noprint nostop pass +handle SIG64 noprint nostop pass +handle SIGSYS noprint nostop pass + +# Show the concrete types behind nsIFoo +set print object on + +# run when using the auto-solib-add trick +define prun + tbreak main + run + set auto-solib-add 0 + cont +end + +# run -mail, when using the auto-solib-add trick +define pmail + tbreak main + run -mail + set auto-solib-add 0 + cont +end + +# Define a "pu" command to display PRUnichar * strings (100 chars max) +# Also allows an optional argument for how many chars to print as long as +# it's less than 100. +define pu + set $uni = $arg0 + if $argc == 2 + set $limit = $arg1 + if $limit > 100 + set $limit = 100 + end + else + set $limit = 100 + end + # scratch array with space for 100 chars plus null terminator. Make + # sure to not use ' ' as the char so this copy/pastes well. + set $scratch = "____________________________________________________________________________________________________" + set $i = 0 + set $scratch_idx = 0 + while (*$uni && $i++ < $limit) + if (*$uni < 0x80) + set $scratch[$scratch_idx++] = *(char*)$uni++ + else + if ($scratch_idx > 0) + set $scratch[$scratch_idx] = '\0' + print $scratch + set $scratch_idx = 0 + end + print /x *(short*)$uni++ + end + end + if ($scratch_idx > 0) + set $scratch[$scratch_idx] = '\0' + print $scratch + end +end + +# Define a "ps" command to display subclasses of nsAC?String. Note that +# this assumes strings as of Gecko 1.9 (well, and probably a few +# releases before that as well); going back far enough will get you +# to string classes that this function doesn't work for. +define ps + set $str = $arg0 + if (sizeof(*$str.mData) == 1 && ($str.mFlags & 1) != 0) + print $str.mData + else + pu $str.mData $str.mLength + end +end + +# Define a "pa" command to display the string value for an nsAtom +define pa + set $atom = $arg0 + if (sizeof(*((&*$atom)->mString)) == 2) + pu (&*$atom)->mString + end +end + +# define a "pxul" command to display the type of a XUL element from +# an nsXULElement* pointer. +define pxul + set $p = $arg0 + print $p->mNodeInfo.mRawPtr->mInner.mName->mStaticAtom->mString +end + +# define a "prefcnt" command to display the refcount of an XPCOM obj +define prefcnt + set $p = $arg0 + print ((nsPurpleBufferEntry*)$p->mRefCnt.mTagged)->mRefCnt +end + +# define a "ptag" command to display the tag name of a content node +define ptag + set $p = $arg0 + pa $p->mNodeInfo.mRawPtr->mInner.mName +end + +## +## nsTArray +## +define ptarray + if $argc == 0 + help ptarray + else + set $size = $arg0.mHdr->mLength + set $capacity = $arg0.mHdr->mCapacity + set $size_max = $size - 1 + set $elts = $arg0.Elements() + end + if $argc == 1 + set $i = 0 + while $i < $size + printf "elem[%u]: ", $i + p *($elts + $i) + set $i++ + end + end + if $argc == 2 + set $idx = $arg1 + if $idx < 0 || $idx > $size_max + printf "idx1, idx2 are not in acceptable range: [0..%u].\n", $size_max + else + printf "elem[%u]: ", $idx + p *($elts + $idx) + end + end + if $argc == 3 + set $start_idx = $arg1 + set $stop_idx = $arg2 + if $start_idx > $stop_idx + set $tmp_idx = $start_idx + set $start_idx = $stop_idx + set $stop_idx = $tmp_idx + end + if $start_idx < 0 || $stop_idx < 0 || $start_idx > $size_max || $stop_idx > $size_max + printf "idx1, idx2 are not in acceptable range: [0..%u].\n", $size_max + else + set $i = $start_idx + while $i <= $stop_idx + printf "elem[%u]: ", $i + p *($elts + $i) + set $i++ + end + end + end + if $argc > 0 + printf "nsTArray length = %u\n", $size + printf "nsTArray capacity = %u\n", $capacity + printf "Element " + whatis *$elts + end +end + +document ptarray + Prints nsTArray information. + Syntax: ptarray + Note: idx, idx1 and idx2 must be in acceptable range [0...size()-1]. + Examples: + ptarray a - Prints tarray content, size, capacity and T typedef + ptarray a 0 - Prints element[idx] from tarray + ptarray a 1 2 - Prints elements in range [idx1..idx2] from tarray +end + +define js + call DumpJSStack() +end + +define ft + call $arg0->DumpFrameTree() +end + +define ftp + call $arg0->DumpFrameTreeInCSSPixels() +end + +define ftl + call $arg0->DumpFrameTreeLimited() +end + +define ftlp + call $arg0->DumpFrameTreeLimitedInCSSPixels() +end diff --git a/build/.gdbinit.loader b/build/.gdbinit.loader new file mode 100644 index 0000000000..e8a13432f7 --- /dev/null +++ b/build/.gdbinit.loader @@ -0,0 +1,29 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# loader for .gdbinit file + +# This file provides a guard against multiple inclusion. GDB command syntax is +# rather limited in that you cannot have a `documentation` command inside of an +# `if`. So we use a separate loader file that sources `.gdbinit` within the +# `if`. + +# You may need to put an 'add-auto-load-safe-path' command in your +# $HOME/.gdbinit file to get GDB to trust this file. If your builds are +# generally in $HOME/moz, then you can say: +# +# add-auto-load-safe-path ~/moz + +# Multiple include guard +if $_moz_gdbinit_loaded + # already loaded +else + set $_moz_gdbinit_loaded=1 + + source -s build/.gdbinit + + # This requires $objdir to have been added to gdb's source directory search + # path. Normally this will be done by libxul.so-gdb.py or js-gdb.py. + source -s build/.gdbinit.py +end diff --git a/build/.gdbinit.py.in b/build/.gdbinit.py.in new file mode 100644 index 0000000000..ec8c11b51b --- /dev/null +++ b/build/.gdbinit.py.in @@ -0,0 +1,19 @@ +#filter substitution + +import os +import sys + +sys.path.append(os.path.join('@topsrcdir@', 'js', 'src', 'gdb')) +sys.path.append(os.path.join('@topsrcdir@', 'python', 'gdbpp')) + +# JS prettyprinters + +import mozilla.autoload +mozilla.autoload.register(gdb.current_objfile()) + +import mozilla.asmjs +mozilla.asmjs.install() + +# Gecko prettyprinters + +import gdbpp diff --git a/build/.lldbinit.in b/build/.lldbinit.in new file mode 100644 index 0000000000..3c566e0b99 --- /dev/null +++ b/build/.lldbinit.in @@ -0,0 +1,20 @@ +# This must be the first Python variable set in this file +script ignore__see_bug_1605268 = True + +#filter substitution +script topsrcdir = "@topsrcdir@"; lldb.debugger.HandleCommand("command source -s true '%s'" % os.path.join(topsrcdir, ".lldbinit")) + +#ifdef MOZ_WIDGET_ANDROID +settings set symbols.enable-external-lookup true + +# This is where libxul.so and libmozglue.so are produced in full builds. +settings append target.exec-search-paths @topobjdir@/toolkit/library +settings append target.exec-search-paths @topobjdir@/mozglue/build + +# This is where artifact builds unpacks "crashreporter-symbols-full" uncompressed ELF debug symbols. +settings append target.debug-file-search-paths @topobjdir@/dist/crashreporter-symbols + +# These are specific paths encoded into Mozilla's automation outputs. +settings append target.source-map /builds/worker/workspace/build/src/obj-firefox @topobjdir@ +settings append target.source-map /builds/worker/workspace/build/src @topsrcdir@ +#endif diff --git a/build/RunCbindgen.py b/build/RunCbindgen.py new file mode 100644 index 0000000000..0c941eaaca --- /dev/null +++ b/build/RunCbindgen.py @@ -0,0 +1,95 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import print_function +import buildconfig +import mozpack.path as mozpath +import os +import six +import subprocess +import pytoml + + +# Try to read the package name or otherwise assume same name as the crate path. +def _get_crate_name(crate_path): + try: + with open(mozpath.join(crate_path, "Cargo.toml")) as f: + return pytoml.load(f)["package"]["name"] + except Exception: + return mozpath.basename(crate_path) + + +CARGO_LOCK = mozpath.join(buildconfig.topsrcdir, "Cargo.lock") +CARGO_TOML = mozpath.join(buildconfig.topsrcdir, "Cargo.toml") + + +def _run_process(args): + env = os.environ.copy() + env["CARGO"] = str(buildconfig.substs["CARGO"]) + env["RUSTC"] = str(buildconfig.substs["RUSTC"]) + + p = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + stdout, stderr = p.communicate() + stdout = six.ensure_text(stdout) + stderr = six.ensure_text(stderr) + if p.returncode != 0: + print(stdout) + print(stderr) + return (stdout, p.returncode) + + +def generate_metadata(output, cargo_config): + stdout, returncode = _run_process( + [ + buildconfig.substs["CARGO"], + "metadata", + "--all-features", + "--format-version", + "1", + "--manifest-path", + CARGO_TOML, + ] + ) + + if returncode != 0: + return returncode + + output.write(stdout) + + # This is not quite accurate, but cbindgen only cares about a subset of the + # data which, when changed, causes these files to change. + return set([CARGO_LOCK, CARGO_TOML]) + + +def generate(output, metadata_path, cbindgen_crate_path, *in_tree_dependencies): + stdout, returncode = _run_process( + [ + buildconfig.substs["CBINDGEN"], + buildconfig.topsrcdir, + "--lockfile", + CARGO_LOCK, + "--crate", + _get_crate_name(cbindgen_crate_path), + "--metadata", + metadata_path, + "--cpp-compat", + ] + ) + + if returncode != 0: + return returncode + + output.write(stdout) + + deps = set() + deps.add(CARGO_LOCK) + deps.add(mozpath.join(cbindgen_crate_path, "cbindgen.toml")) + for directory in in_tree_dependencies + (cbindgen_crate_path,): + for path, dirs, files in os.walk(directory): + for file in files: + if os.path.splitext(file)[1] == ".rs": + deps.add(mozpath.join(path, file)) + + return deps diff --git a/build/__init__.py b/build/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/build/appini_header.py b/build/appini_header.py new file mode 100644 index 0000000000..08bd22721d --- /dev/null +++ b/build/appini_header.py @@ -0,0 +1,90 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +"""Parses a given application.ini file and outputs the corresponding + StaticXREAppData structure as a C++ header file""" + +import configparser +import sys + + +def main(output, file): + config = configparser.RawConfigParser() + config.read(file) + flags = set() + try: + if config.getint("XRE", "EnableProfileMigrator") == 1: + flags.add("NS_XRE_ENABLE_PROFILE_MIGRATOR") + except Exception: + pass + try: + if config.getint("Crash Reporter", "Enabled") == 1: + flags.add("NS_XRE_ENABLE_CRASH_REPORTER") + except Exception: + pass + appdata = dict( + ("%s:%s" % (s, o), config.get(s, o)) + for s in config.sections() + for o in config.options(s) + ) + appdata["flags"] = " | ".join(sorted(flags)) if flags else "0" + appdata["App:profile"] = ( + '"%s"' % appdata["App:profile"] if "App:profile" in appdata else "NULL" + ) + expected = ( + "App:vendor", + "App:name", + "App:remotingname", + "App:version", + "App:buildid", + "App:id", + "Gecko:minversion", + "Gecko:maxversion", + ) + missing = [var for var in expected if var not in appdata] + if missing: + print("Missing values in %s: %s" % (file, ", ".join(missing)), file=sys.stderr) + sys.exit(1) + + if "Crash Reporter:serverurl" not in appdata: + appdata["Crash Reporter:serverurl"] = "" + + if "App:sourcerepository" in appdata and "App:sourcestamp" in appdata: + appdata["App:sourceurl"] = ( + '"%(App:sourcerepository)s/rev/%(App:sourcestamp)s"' % appdata + ) + else: + appdata["App:sourceurl"] = "NULL" + + if "AppUpdate:url" not in appdata: + appdata["AppUpdate:url"] = "" + + output.write( + """#include "mozilla/XREAppData.h" + static const mozilla::StaticXREAppData sAppData = { + "%(App:vendor)s", + "%(App:name)s", + "%(App:remotingname)s", + "%(App:version)s", + "%(App:buildid)s", + "%(App:id)s", + NULL, // copyright + %(flags)s, + "%(Gecko:minversion)s", + "%(Gecko:maxversion)s", + "%(Crash Reporter:serverurl)s", + %(App:profile)s, + NULL, // UAName + %(App:sourceurl)s, + "%(AppUpdate:url)s" + };""" + % appdata + ) + + +if __name__ == "__main__": + if len(sys.argv) != 1: + main(sys.stdout, sys.argv[1]) + else: + print("Usage: %s /path/to/application.ini" % sys.argv[0], file=sys.stderr) diff --git a/build/application.ini.in b/build/application.ini.in new file mode 100644 index 0000000000..a6141de0be --- /dev/null +++ b/build/application.ini.in @@ -0,0 +1,56 @@ +#ifdef MOZ_BUILD_APP_IS_BROWSER +; This file is not used. If you modify it and want the application to use +; your modifications, move it under the browser/ subdirectory and start with +; the "-app /path/to/browser/application.ini" argument. +#else +; This file is not used. If you modify it and want the application to use +; your modifications, start with the "-app /path/to/application.ini" +; argument. +#endif +#if 0 +; This Source Code Form is subject to the terms of the Mozilla Public +; License, v. 2.0. If a copy of the MPL was not distributed with this +; file, You can obtain one at http://mozilla.org/MPL/2.0/. +#endif +#filter substitution +#include @TOPOBJDIR@/buildid.h +#include @TOPOBJDIR@/source-repo.h +[App] +Vendor=@MOZ_APP_VENDOR@ +Name=@MOZ_APP_BASENAME@ +RemotingName=@MOZ_APP_REMOTINGNAME@ +#ifdef MOZ_APP_DISPLAYNAME +CodeName=@MOZ_APP_DISPLAYNAME@ +#endif +Version=@MOZ_APP_VERSION@ +#ifdef MOZ_APP_PROFILE +Profile=@MOZ_APP_PROFILE@ +#endif +BuildID=@MOZ_BUILDID@ +#ifdef MOZ_SOURCE_REPO +SourceRepository=@MOZ_SOURCE_REPO@ +#endif +#ifdef MOZ_SOURCE_STAMP +SourceStamp=@MOZ_SOURCE_STAMP@ +#endif +ID=@MOZ_APP_ID@ + +[Gecko] +MinVersion=@GRE_MILESTONE@ +MaxVersion=@GRE_MILESTONE@ + +[XRE] +#ifdef MOZ_PROFILE_MIGRATOR +EnableProfileMigrator=1 +#endif + +#if MOZ_CRASHREPORTER +[Crash Reporter] +Enabled=1 +ServerURL=https://crash-reports.mozilla.com/submit?id=@MOZ_APP_ID@&version=@MOZ_APP_VERSION@&buildid=@MOZ_BUILDID@ +#endif + +#if MOZ_UPDATER +[AppUpdate] +URL=https://@MOZ_APPUPDATE_HOST@/update/6/%PRODUCT%/%VERSION%/%BUILD_ID%/%BUILD_TARGET%/%LOCALE%/%CHANNEL%/%OS_VERSION%/%SYSTEM_CAPABILITIES%/%DISTRIBUTION%/%DISTRIBUTION_VERSION%/update.xml +#endif diff --git a/build/autoconf/acgeneral.m4 b/build/autoconf/acgeneral.m4 new file mode 100644 index 0000000000..ae971de139 --- /dev/null +++ b/build/autoconf/acgeneral.m4 @@ -0,0 +1,2607 @@ +dnl Parameterized macros. +dnl Requires GNU m4. +dnl This file is part of Autoconf. +dnl Copyright (C) 1992, 93, 94, 95, 96, 1998 Free Software Foundation, Inc. +dnl +dnl This program is free software; you can redistribute it and/or modify +dnl it under the terms of the GNU General Public License as published by +dnl the Free Software Foundation; either version 2, or (at your option) +dnl any later version. +dnl +dnl This program is distributed in the hope that it will be useful, +dnl but WITHOUT ANY WARRANTY; without even the implied warranty of +dnl MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +dnl GNU General Public License for more details. +dnl +dnl You should have received a copy of the GNU General Public License +dnl along with this program; if not, write to the Free Software +dnl Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA +dnl 02111-1307, USA. +dnl +dnl As a special exception, the Free Software Foundation gives unlimited +dnl permission to copy, distribute and modify the configure scripts that +dnl are the output of Autoconf. You need not follow the terms of the GNU +dnl General Public License when using or distributing such scripts, even +dnl though portions of the text of Autoconf appear in them. The GNU +dnl General Public License (GPL) does govern all other use of the material +dnl that constitutes the Autoconf program. +dnl +dnl Certain portions of the Autoconf source text are designed to be copied +dnl (in certain cases, depending on the input) into the output of +dnl Autoconf. We call these the "data" portions. The rest of the Autoconf +dnl source text consists of comments plus executable code that decides which +dnl of the data portions to output in any given case. We call these +dnl comments and executable code the "non-data" portions. Autoconf never +dnl copies any of the non-data portions into its output. +dnl +dnl This special exception to the GPL applies to versions of Autoconf +dnl released by the Free Software Foundation. When you make and +dnl distribute a modified version of Autoconf, you may extend this special +dnl exception to the GPL to apply to your modified version as well, *unless* +dnl your modified version has the potential to copy into its output some +dnl of the text that was the non-data portion of the version that you started +dnl with. (In other words, unless your change moves or copies text from +dnl the non-data portions to the data portions.) If your modification has +dnl such potential, you must delete any notice of this special exception +dnl to the GPL from your modified version. +dnl +dnl Written by David MacKenzie, with help from +dnl Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor, +dnl Roland McGrath, Noah Friedman, david d zuhn, and many others. +dnl +divert(-1)dnl Throw away output until AC_INIT is called. +changequote([, ]) + +define(AC_ACVERSION, 2.13) + +dnl Some old m4's don't support m4exit. But they provide +dnl equivalent functionality by core dumping because of the +dnl long macros we define. +ifdef([__gnu__], , [errprint(Autoconf requires GNU m4. +Install it before installing Autoconf or set the +M4 environment variable to its path name. +)m4exit(2)]) + +undefine([eval]) +undefine([include]) +undefine([shift]) +undefine([format]) + + +dnl ### Defining macros + + +dnl m4 output diversions. We let m4 output them all in order at the end, +dnl except that we explicitly undivert AC_DIVERSION_SED, AC_DIVERSION_CMDS, +dnl and AC_DIVERSION_ICMDS. + +dnl AC_DIVERSION_NOTICE - 1 (= 0) AC_REQUIRE'd #! /bin/sh line +define(AC_DIVERSION_NOTICE, 1)dnl copyright notice & option help strings +define(AC_DIVERSION_INIT, 2)dnl initialization code +define(AC_DIVERSION_NORMAL_4, 3)dnl AC_REQUIRE'd code, 4 level deep +define(AC_DIVERSION_NORMAL_3, 4)dnl AC_REQUIRE'd code, 3 level deep +define(AC_DIVERSION_NORMAL_2, 5)dnl AC_REQUIRE'd code, 2 level deep +define(AC_DIVERSION_NORMAL_1, 6)dnl AC_REQUIRE'd code, 1 level deep +define(AC_DIVERSION_NORMAL, 7)dnl the tests and output code +define(AC_DIVERSION_SED, 8)dnl variable substitutions in config.status +define(AC_DIVERSION_CMDS, 9)dnl extra shell commands in config.status +define(AC_DIVERSION_ICMDS, 10)dnl extra initialization in config.status + +dnl Change the diversion stream to STREAM, while stacking old values. +dnl AC_DIVERT_PUSH(STREAM) +define(AC_DIVERT_PUSH, +[pushdef([AC_DIVERSION_CURRENT], $1)dnl +divert(AC_DIVERSION_CURRENT)dnl +]) + +dnl Change the diversion stream to its previous value, unstacking it. +dnl AC_DIVERT_POP() +define(AC_DIVERT_POP, +[popdef([AC_DIVERSION_CURRENT])dnl +divert(AC_DIVERSION_CURRENT)dnl +]) + +dnl Initialize the diversion setup. +define([AC_DIVERSION_CURRENT], AC_DIVERSION_NORMAL) +dnl This will be popped by AC_REQUIRE in AC_INIT. +pushdef([AC_DIVERSION_CURRENT], AC_DIVERSION_NOTICE) + +dnl The prologue for Autoconf macros. +dnl AC_PRO(MACRO-NAME) +define(AC_PRO, +[define([AC_PROVIDE_$1], )dnl +ifelse(AC_DIVERSION_CURRENT, AC_DIVERSION_NORMAL, +[AC_DIVERT_PUSH(builtin(eval, AC_DIVERSION_CURRENT - 1))], +[pushdef([AC_DIVERSION_CURRENT], AC_DIVERSION_CURRENT)])dnl +]) + +dnl The Epilogue for Autoconf macros. +dnl AC_EPI() +define(AC_EPI, +[AC_DIVERT_POP()dnl +ifelse(AC_DIVERSION_CURRENT, AC_DIVERSION_NORMAL, +[undivert(AC_DIVERSION_NORMAL_4)dnl +undivert(AC_DIVERSION_NORMAL_3)dnl +undivert(AC_DIVERSION_NORMAL_2)dnl +undivert(AC_DIVERSION_NORMAL_1)dnl +])dnl +]) + +dnl Define a macro which automatically provides itself. Add machinery +dnl so the macro automatically switches expansion to the diversion +dnl stack if it is not already using it. In this case, once finished, +dnl it will bring back all the code accumulated in the diversion stack. +dnl This, combined with AC_REQUIRE, achieves the topological ordering of +dnl macros. We don't use this macro to define some frequently called +dnl macros that are not involved in ordering constraints, to save m4 +dnl processing. +dnl AC_DEFUN(NAME, EXPANSION) +define([AC_DEFUN], +[define($1, [AC_PRO([$1])$2[]AC_EPI()])]) + + +dnl ### Initialization + + +dnl AC_INIT_NOTICE() +AC_DEFUN(AC_INIT_NOTICE, +[# Guess values for system-dependent variables and create Makefiles. +# Generated automatically using autoconf version] AC_ACVERSION [ +# Copyright (C) 1992, 93, 94, 95, 96 Free Software Foundation, Inc. +# +# This configure script is free software; the Free Software Foundation +# gives unlimited permission to copy, distribute and modify it. + +# Defaults: +ac_help= +ac_default_prefix=/usr/local +[#] Any additions from configure.in:]) + +dnl AC_PREFIX_DEFAULT(PREFIX) +AC_DEFUN(AC_PREFIX_DEFAULT, +[AC_DIVERT_PUSH(AC_DIVERSION_NOTICE)dnl +ac_default_prefix=$1 +AC_DIVERT_POP()]) + +dnl AC_INIT_PARSE_ARGS() +AC_DEFUN(AC_INIT_PARSE_ARGS, +[ +# Initialize some variables set by options. +# The variables have the same names as the options, with +# dashes changed to underlines. +build=NONE +cache_file=./config.cache +exec_prefix=NONE +host=NONE +no_create= +nonopt=NONE +no_recursion= +prefix=NONE +program_prefix=NONE +program_suffix=NONE +program_transform_name=s,x,x, +silent= +site= +srcdir= +target=NONE +verbose= +x_includes=NONE +x_libraries=NONE +dnl Installation directory options. +dnl These are left unexpanded so users can "make install exec_prefix=/foo" +dnl and all the variables that are supposed to be based on exec_prefix +dnl by default will actually change. +dnl Use braces instead of parens because sh, perl, etc. also accept them. +bindir='${exec_prefix}/bin' +sbindir='${exec_prefix}/sbin' +libexecdir='${exec_prefix}/libexec' +datadir='${prefix}/share' +sysconfdir='${prefix}/etc' +sharedstatedir='${prefix}/com' +localstatedir='${prefix}/var' +libdir='${exec_prefix}/lib' +includedir='${prefix}/include' +oldincludedir='/usr/include' +infodir='${prefix}/info' +mandir='${prefix}/man' + +# Initialize some other variables. +subdirs= +MFLAGS= MAKEFLAGS= +SHELL=${CONFIG_SHELL-/bin/sh} +# Maximum number of lines to put in a shell here document. +ac_max_here_lines=12 + +ac_prev= +for ac_option +do + + # If the previous option needs an argument, assign it. + if test -n "$ac_prev"; then + eval "$ac_prev=\$ac_option" + ac_prev= + continue + fi + + case "$ac_option" in +changequote(, )dnl + -*=*) ac_optarg=`echo "$ac_option" | sed 's/[-_a-zA-Z0-9]*=//'` ;; +changequote([, ])dnl + *) ac_optarg= ;; + esac + + # Accept the important Cygnus configure options, so we can diagnose typos. + + case "$ac_option" in + + -bindir | --bindir | --bindi | --bind | --bin | --bi) + ac_prev=bindir ;; + -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) + bindir="$ac_optarg" ;; + + -build | --build | --buil | --bui | --bu) + ac_prev=build ;; + -build=* | --build=* | --buil=* | --bui=* | --bu=*) + build="$ac_optarg" ;; + + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + cache_file="$ac_optarg" ;; + + -datadir | --datadir | --datadi | --datad | --data | --dat | --da) + ac_prev=datadir ;; + -datadir=* | --datadir=* | --datadi=* | --datad=* | --data=* | --dat=* \ + | --da=*) + datadir="$ac_optarg" ;; + + -disable-* | --disable-*) + ac_feature=`echo $ac_option|sed -e 's/-*disable-//'` + # Reject names that are not valid shell variable names. +changequote(, )dnl + if test -n "`echo $ac_feature| sed 's/[-a-zA-Z0-9_]//g'`"; then +changequote([, ])dnl + AC_MSG_ERROR($ac_feature: invalid feature name) + fi + ac_feature=`echo $ac_feature| sed 's/-/_/g'` + eval "enable_${ac_feature}=no" ;; + + -enable-* | --enable-*) + ac_feature=`echo $ac_option|sed -e 's/-*enable-//' -e 's/=.*//'` + # Reject names that are not valid shell variable names. +changequote(, )dnl + if test -n "`echo $ac_feature| sed 's/[-_a-zA-Z0-9]//g'`"; then +changequote([, ])dnl + AC_MSG_ERROR($ac_feature: invalid feature name) + fi + ac_feature=`echo $ac_feature| sed 's/-/_/g'` + case "$ac_option" in + *=*) ;; + *) ac_optarg=yes ;; + esac + eval "enable_${ac_feature}='$ac_optarg'" ;; + + -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ + | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ + | --exec | --exe | --ex) + ac_prev=exec_prefix ;; + -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ + | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ + | --exec=* | --exe=* | --ex=*) + exec_prefix="$ac_optarg" ;; + + -gas | --gas | --ga | --g) + # Obsolete; use --with-gas. + with_gas=yes ;; + + -help | --help | --hel | --he) + # Omit some internal or obsolete options to make the list less imposing. + # This message is too long to be a string in the A/UX 3.1 sh. + cat << EOF +changequote(, )dnl +Usage: configure [options] [host] +Options: [defaults in brackets after descriptions] +Configuration: + --cache-file=FILE cache test results in FILE + --help print this message + --no-create do not create output files + --quiet, --silent do not print \`checking...' messages + --version print the version of autoconf that created configure +Directory and file names: + --prefix=PREFIX install architecture-independent files in PREFIX + [$ac_default_prefix] + --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX + [same as prefix] + --bindir=DIR user executables in DIR [EPREFIX/bin] + --sbindir=DIR system admin executables in DIR [EPREFIX/sbin] + --libexecdir=DIR program executables in DIR [EPREFIX/libexec] + --datadir=DIR read-only architecture-independent data in DIR + [PREFIX/share] + --sysconfdir=DIR read-only single-machine data in DIR [PREFIX/etc] + --sharedstatedir=DIR modifiable architecture-independent data in DIR + [PREFIX/com] + --localstatedir=DIR modifiable single-machine data in DIR [PREFIX/var] + --libdir=DIR object code libraries in DIR [EPREFIX/lib] + --includedir=DIR C header files in DIR [PREFIX/include] + --oldincludedir=DIR C header files for non-gcc in DIR [/usr/include] + --infodir=DIR info documentation in DIR [PREFIX/info] + --mandir=DIR man documentation in DIR [PREFIX/man] + --srcdir=DIR find the sources in DIR [configure dir or ..] + --program-prefix=PREFIX prepend PREFIX to installed program names + --program-suffix=SUFFIX append SUFFIX to installed program names + --program-transform-name=PROGRAM + run sed PROGRAM on installed program names +EOF + cat << EOF +Host type: + --build=BUILD configure for building on BUILD [BUILD=HOST] + --host=HOST configure for HOST [guessed] + --target=TARGET configure for TARGET [TARGET=HOST] +Features and packages: + --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) + --enable-FEATURE[=ARG] include FEATURE [ARG=yes] + --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] + --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) + --x-includes=DIR X include files are in DIR + --x-libraries=DIR X library files are in DIR +changequote([, ])dnl +EOF + if test -n "$ac_help"; then + echo "--enable and --with options recognized:$ac_help" + fi + exit 0 ;; + + -host | --host | --hos | --ho) + ac_prev=host ;; + -host=* | --host=* | --hos=* | --ho=*) + host="$ac_optarg" ;; + + -includedir | --includedir | --includedi | --included | --include \ + | --includ | --inclu | --incl | --inc) + ac_prev=includedir ;; + -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ + | --includ=* | --inclu=* | --incl=* | --inc=*) + includedir="$ac_optarg" ;; + + -infodir | --infodir | --infodi | --infod | --info | --inf) + ac_prev=infodir ;; + -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) + infodir="$ac_optarg" ;; + + -libdir | --libdir | --libdi | --libd) + ac_prev=libdir ;; + -libdir=* | --libdir=* | --libdi=* | --libd=*) + libdir="$ac_optarg" ;; + + -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ + | --libexe | --libex | --libe) + ac_prev=libexecdir ;; + -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ + | --libexe=* | --libex=* | --libe=*) + libexecdir="$ac_optarg" ;; + + -localstatedir | --localstatedir | --localstatedi | --localstated \ + | --localstate | --localstat | --localsta | --localst \ + | --locals | --local | --loca | --loc | --lo) + ac_prev=localstatedir ;; + -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ + | --localstate=* | --localstat=* | --localsta=* | --localst=* \ + | --locals=* | --local=* | --loca=* | --loc=* | --lo=*) + localstatedir="$ac_optarg" ;; + + -mandir | --mandir | --mandi | --mand | --man | --ma | --m) + ac_prev=mandir ;; + -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) + mandir="$ac_optarg" ;; + + -nfp | --nfp | --nf) + # Obsolete; use --without-fp. + with_fp=no ;; + + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c) + no_create=yes ;; + + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) + no_recursion=yes ;; + + -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ + | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ + | --oldin | --oldi | --old | --ol | --o) + ac_prev=oldincludedir ;; + -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ + | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ + | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) + oldincludedir="$ac_optarg" ;; + + -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) + ac_prev=prefix ;; + -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) + prefix="$ac_optarg" ;; + + -program-prefix | --program-prefix | --program-prefi | --program-pref \ + | --program-pre | --program-pr | --program-p) + ac_prev=program_prefix ;; + -program-prefix=* | --program-prefix=* | --program-prefi=* \ + | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) + program_prefix="$ac_optarg" ;; + + -program-suffix | --program-suffix | --program-suffi | --program-suff \ + | --program-suf | --program-su | --program-s) + ac_prev=program_suffix ;; + -program-suffix=* | --program-suffix=* | --program-suffi=* \ + | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) + program_suffix="$ac_optarg" ;; + + -program-transform-name | --program-transform-name \ + | --program-transform-nam | --program-transform-na \ + | --program-transform-n | --program-transform- \ + | --program-transform | --program-transfor \ + | --program-transfo | --program-transf \ + | --program-trans | --program-tran \ + | --progr-tra | --program-tr | --program-t) + ac_prev=program_transform_name ;; + -program-transform-name=* | --program-transform-name=* \ + | --program-transform-nam=* | --program-transform-na=* \ + | --program-transform-n=* | --program-transform-=* \ + | --program-transform=* | --program-transfor=* \ + | --program-transfo=* | --program-transf=* \ + | --program-trans=* | --program-tran=* \ + | --progr-tra=* | --program-tr=* | --program-t=*) + program_transform_name="$ac_optarg" ;; + + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + silent=yes ;; + + -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) + ac_prev=sbindir ;; + -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ + | --sbi=* | --sb=*) + sbindir="$ac_optarg" ;; + + -sharedstatedir | --sharedstatedir | --sharedstatedi \ + | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ + | --sharedst | --shareds | --shared | --share | --shar \ + | --sha | --sh) + ac_prev=sharedstatedir ;; + -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ + | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ + | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ + | --sha=* | --sh=*) + sharedstatedir="$ac_optarg" ;; + + -site | --site | --sit) + ac_prev=site ;; + -site=* | --site=* | --sit=*) + site="$ac_optarg" ;; + + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + srcdir="$ac_optarg" ;; + + -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ + | --syscon | --sysco | --sysc | --sys | --sy) + ac_prev=sysconfdir ;; + -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ + | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) + sysconfdir="$ac_optarg" ;; + + -target | --target | --targe | --targ | --tar | --ta | --t) + ac_prev=target ;; + -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) + target="$ac_optarg" ;; + + -v | -verbose | --verbose | --verbos | --verbo | --verb) + verbose=yes ;; + + -version | --version | --versio | --versi | --vers) + echo "configure generated by autoconf version AC_ACVERSION" + exit 0 ;; + + -with-* | --with-*) + ac_package=`echo $ac_option|sed -e 's/-*with-//' -e 's/=.*//'` + # Reject names that are not valid shell variable names. +changequote(, )dnl + if test -n "`echo $ac_package| sed 's/[-_a-zA-Z0-9]//g'`"; then +changequote([, ])dnl + AC_MSG_ERROR($ac_package: invalid package name) + fi + ac_package=`echo $ac_package| sed 's/-/_/g'` + case "$ac_option" in + *=*) ;; + *) ac_optarg=yes ;; + esac + eval "with_${ac_package}='$ac_optarg'" ;; + + -without-* | --without-*) + ac_package=`echo $ac_option|sed -e 's/-*without-//'` + # Reject names that are not valid shell variable names. +changequote(, )dnl + if test -n "`echo $ac_package| sed 's/[-a-zA-Z0-9_]//g'`"; then +changequote([, ])dnl + AC_MSG_ERROR($ac_package: invalid package name) + fi + ac_package=`echo $ac_package| sed 's/-/_/g'` + eval "with_${ac_package}=no" ;; + + --x) + # Obsolete; use --with-x. + with_x=yes ;; + + -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ + | --x-incl | --x-inc | --x-in | --x-i) + ac_prev=x_includes ;; + -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ + | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) + x_includes="$ac_optarg" ;; + + -x-libraries | --x-libraries | --x-librarie | --x-librari \ + | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) + ac_prev=x_libraries ;; + -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ + | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) + x_libraries="$ac_optarg" ;; + + -*) AC_MSG_ERROR([$ac_option: invalid option; use --help to show usage]) + ;; + + *) +changequote(, )dnl + if test -n "`echo $ac_option| sed 's/[-a-z0-9.]//g'`"; then +changequote([, ])dnl + AC_MSG_WARN($ac_option: invalid host type) + fi + if test "x$nonopt" != xNONE; then + AC_MSG_ERROR(can only configure for one host and one target at a time) + fi + nonopt="$ac_option" + ;; + + esac +done + +if test -n "$ac_prev"; then + AC_MSG_ERROR(missing argument to --`echo $ac_prev | sed 's/_/-/g'`) +fi +]) + +dnl Try to have only one #! line, so the script doesn't look funny +dnl for users of AC_REVISION. +dnl AC_INIT_BINSH() +AC_DEFUN(AC_INIT_BINSH, +[#! /bin/sh +]) + +dnl AC_INIT(UNIQUE-FILE-IN-SOURCE-DIR) +AC_DEFUN(AC_INIT, +[sinclude(acsite.m4)dnl +sinclude(./aclocal.m4)dnl +AC_REQUIRE([AC_INIT_BINSH])dnl +AC_INIT_NOTICE +AC_DIVERT_POP()dnl to NORMAL +AC_DIVERT_PUSH(AC_DIVERSION_INIT)dnl +AC_INIT_PARSE_ARGS +AC_INIT_PREPARE($1)dnl +AC_DIVERT_POP()dnl to NORMAL +]) + +dnl AC_INIT_PREPARE(UNIQUE-FILE-IN-SOURCE-DIR) +AC_DEFUN(AC_INIT_PREPARE, +[trap 'rm -fr conftest* confdefs* core core.* *.core $ac_clean_files; exit 1' 1 2 15 + +# File descriptor usage: +# 0 standard input +# 1 file creation +# 2 errors and warnings +# 3 some systems may open it to /dev/tty +# 4 used on the Kubota Titan +define(AC_FD_MSG, 6)dnl +[#] AC_FD_MSG checking for... messages and results +define(AC_FD_CC, 5)dnl +[#] AC_FD_CC compiler messages saved in config.log +if test "$silent" = yes; then + exec AC_FD_MSG>/dev/null +else + exec AC_FD_MSG>&1 +fi +exec AC_FD_CC>./config.log + +echo "\ +This file contains any messages produced by compilers while +running configure, to aid debugging if configure makes a mistake. +" 1>&AC_FD_CC + +# Strip out --no-create and --no-recursion so they do not pile up. +# Also quote any args containing shell metacharacters. +ac_configure_args= +for ac_arg +do + case "$ac_arg" in + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c) ;; + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) ;; +changequote(<<, >>)dnl +dnl If you change this globbing pattern, test it on an old shell -- +dnl it's sensitive. Putting any kind of quote in it causes syntax errors. + *" "*|*" "*|*[\[\]\~\<<#>>\$\^\&\*\(\)\{\}\\\|\;\<\>\?]*) + ac_configure_args="$ac_configure_args '$ac_arg'" ;; +changequote([, ])dnl + *) ac_configure_args="$ac_configure_args $ac_arg" ;; + esac +done + +# NLS nuisances. +# Only set these to C if already set. These must not be set unconditionally +# because not all systems understand e.g. LANG=C (notably SCO). +# Fixing LC_MESSAGES prevents Solaris sh from translating var values in `set'! +# Non-C LC_CTYPE values break the ctype check. +if test "${LANG+set}" = set; then LANG=C; export LANG; fi +if test "${LC_ALL+set}" = set; then LC_ALL=C; export LC_ALL; fi +if test "${LC_MESSAGES+set}" = set; then LC_MESSAGES=C; export LC_MESSAGES; fi +if test "${LC_CTYPE+set}" = set; then LC_CTYPE=C; export LC_CTYPE; fi + +# confdefs.h avoids OS command line length limits that DEFS can exceed. +rm -rf conftest* confdefs.h +# AIX cpp loses on an empty file, so make sure it contains at least a newline. +echo > confdefs.h + +# A filename unique to this package, relative to the directory that +# configure is in, which we can look for to find out if srcdir is correct. +ac_unique_file=$1 + +# Find the source files, if location was not specified. +if test -z "$srcdir"; then + ac_srcdir_defaulted=yes + # Try the directory containing this script, then its parent. + ac_prog=[$]0 +changequote(, )dnl + ac_confdir=`echo $ac_prog|sed 's%/[^/][^/]*$%%'` +changequote([, ])dnl + test "x$ac_confdir" = "x$ac_prog" && ac_confdir=. + srcdir=$ac_confdir + if test ! -r $srcdir/$ac_unique_file; then + srcdir=.. + fi +else + ac_srcdir_defaulted=no +fi +if test ! -r $srcdir/$ac_unique_file; then + if test "$ac_srcdir_defaulted" = yes; then + AC_MSG_ERROR(can not find sources in $ac_confdir or ..) + else + AC_MSG_ERROR(can not find sources in $srcdir) + fi +fi +dnl Double slashes in pathnames in object file debugging info +dnl mess up M-x gdb in Emacs. +changequote(, )dnl +srcdir=`echo "${srcdir}" | sed 's%\([^/]\)/*$%\1%'` +changequote([, ])dnl + +dnl Let the site file select an alternate cache file if it wants to. +AC_SITE_LOAD +AC_CACHE_LOAD +AC_LANG_C +dnl By default always use an empty string as the executable +dnl extension. Only change it if the script calls AC_EXEEXT. +ac_exeext= +dnl By default assume that objects files use an extension of .o. Only +dnl change it if the script calls AC_OBJEXT. +ac_objext=o +AC_PROG_ECHO_N +dnl Substitute for predefined variables. +AC_SUBST(SHELL)dnl +AC_SUBST(CFLAGS)dnl +AC_SUBST(CPPFLAGS)dnl +AC_SUBST(CXXFLAGS)dnl +AC_SUBST(FFLAGS)dnl +AC_SUBST(DEFS)dnl +AC_SUBST(LDFLAGS)dnl +AC_SUBST(LIBS)dnl +AC_SUBST(exec_prefix)dnl +AC_SUBST(prefix)dnl +AC_SUBST(program_transform_name)dnl +dnl Installation directory options. +AC_SUBST(bindir)dnl +AC_SUBST(sbindir)dnl +AC_SUBST(libexecdir)dnl +AC_SUBST(datadir)dnl +AC_SUBST(sysconfdir)dnl +AC_SUBST(sharedstatedir)dnl +AC_SUBST(localstatedir)dnl +AC_SUBST(libdir)dnl +AC_SUBST(includedir)dnl +AC_SUBST(oldincludedir)dnl +AC_SUBST(infodir)dnl +AC_SUBST(mandir)dnl +]) + + +dnl ### Selecting optional features + + +dnl AC_ARG_ENABLE(FEATURE, HELP-STRING, ACTION-IF-TRUE [, ACTION-IF-FALSE]) +AC_DEFUN(AC_ARG_ENABLE, +[AC_DIVERT_PUSH(AC_DIVERSION_NOTICE)dnl +ac_help="$ac_help +[$2]" +AC_DIVERT_POP()dnl +[#] Check whether --enable-[$1] or --disable-[$1] was given. +if test "[${enable_]patsubst([$1], -, _)+set}" = set; then + enableval="[$enable_]patsubst([$1], -, _)" + ifelse([$3], , :, [$3]) +ifelse([$4], , , [else + $4 +])dnl +fi +]) + +AC_DEFUN(AC_ENABLE, +[AC_OBSOLETE([$0], [; instead use AC_ARG_ENABLE])dnl +AC_ARG_ENABLE([$1], [ --enable-$1], [$2], [$3])dnl +]) + + +dnl ### Working with optional software + + +dnl AC_ARG_WITH(PACKAGE, HELP-STRING, ACTION-IF-TRUE [, ACTION-IF-FALSE]) +AC_DEFUN(AC_ARG_WITH, +[AC_DIVERT_PUSH(AC_DIVERSION_NOTICE)dnl +ac_help="$ac_help +[$2]" +AC_DIVERT_POP()dnl +[#] Check whether --with-[$1] or --without-[$1] was given. +if test "[${with_]patsubst([$1], -, _)+set}" = set; then + withval="[$with_]patsubst([$1], -, _)" + ifelse([$3], , :, [$3]) +ifelse([$4], , , [else + $4 +])dnl +fi +]) + +AC_DEFUN(AC_WITH, +[AC_OBSOLETE([$0], [; instead use AC_ARG_WITH])dnl +AC_ARG_WITH([$1], [ --with-$1], [$2], [$3])dnl +]) + + +dnl ### Transforming program names. + + +dnl AC_ARG_PROGRAM() +AC_DEFUN(AC_ARG_PROGRAM, +[if test "$program_transform_name" = s,x,x,; then + program_transform_name= +else + # Double any \ or $. echo might interpret backslashes. + cat <<\EOF_SED > conftestsed +s,\\,\\\\,g; s,\$,$$,g +EOF_SED + program_transform_name="`echo $program_transform_name|sed -f conftestsed`" + rm -f conftestsed +fi +test "$program_prefix" != NONE && + program_transform_name="s,^,${program_prefix},; $program_transform_name" +# Use a double $ so make ignores it. +test "$program_suffix" != NONE && + program_transform_name="s,\$\$,${program_suffix},; $program_transform_name" + +# sed with no file args requires a program. +test "$program_transform_name" = "" && program_transform_name="s,x,x," +]) + + +dnl ### Version numbers + + +dnl AC_REVISION(REVISION-INFO) +AC_DEFUN(AC_REVISION, +[AC_REQUIRE([AC_INIT_BINSH])dnl +[# From configure.in] translit([$1], $")]) + +dnl Subroutines of AC_PREREQ. + +dnl Change the dots in NUMBER into commas. +dnl AC_PREREQ_SPLIT(NUMBER) +define(AC_PREREQ_SPLIT, +[translit($1, ., [, ])]) + +dnl Default the ternary version number to 0 (e.g., 1, 7 -> 1, 7, 0). +dnl AC_PREREQ_CANON(MAJOR, MINOR [,TERNARY]) +define(AC_PREREQ_CANON, +[$1, $2, ifelse([$3], , 0, [$3])]) + +dnl Complain and exit if version number 1 is less than version number 2. +dnl PRINTABLE2 is the printable version of version number 2. +dnl AC_PREREQ_COMPARE(MAJOR1, MINOR1, TERNARY1, MAJOR2, MINOR2, TERNARY2, +dnl PRINTABLE2) +define(AC_PREREQ_COMPARE, +[ifelse(builtin([eval], +[$3 + $2 * 1000 + $1 * 1000000 < $6 + $5 * 1000 + $4 * 1000000]), 1, +[errprint(dnl +FATAL ERROR: Autoconf version $7 or higher is required for this script +)m4exit(3)])]) + +dnl Complain and exit if the Autoconf version is less than VERSION. +dnl AC_PREREQ(VERSION) +define(AC_PREREQ, +[AC_PREREQ_COMPARE(AC_PREREQ_CANON(AC_PREREQ_SPLIT(AC_ACVERSION)), +AC_PREREQ_CANON(AC_PREREQ_SPLIT([$1])), [$1])]) + + +dnl ### Getting the canonical system type + + +dnl Find install-sh, config.sub, config.guess, and Cygnus configure +dnl in directory DIR. These are auxiliary files used in configuration. +dnl DIR can be either absolute or relative to $srcdir. +dnl AC_CONFIG_AUX_DIR(DIR) +AC_DEFUN(AC_CONFIG_AUX_DIR, +[AC_CONFIG_AUX_DIRS($1 $srcdir/$1)]) + +dnl The default is `$srcdir' or `$srcdir/..' or `$srcdir/../..'. +dnl There's no need to call this macro explicitly; just AC_REQUIRE it. +AC_DEFUN(AC_CONFIG_AUX_DIR_DEFAULT, +[AC_CONFIG_AUX_DIRS($srcdir $srcdir/.. $srcdir/../..)]) + +dnl Internal subroutine. +dnl Search for the configuration auxiliary files in directory list $1. +dnl We look only for install-sh, so users of AC_PROG_INSTALL +dnl do not automatically need to distribute the other auxiliary files. +dnl AC_CONFIG_AUX_DIRS(DIR ...) +AC_DEFUN(AC_CONFIG_AUX_DIRS, +[ac_aux_dir= +for ac_dir in $1; do + if test -f $ac_dir/install-sh; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/install-sh -c" + break + elif test -f $ac_dir/install.sh; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/install.sh -c" + break + fi +done +if test -z "$ac_aux_dir"; then + AC_MSG_ERROR([can not find install-sh or install.sh in $1]) +fi +ac_config_guess=$ac_aux_dir/config.guess +ac_config_sub=$ac_aux_dir/config.sub +ac_configure=$ac_aux_dir/configure # This should be Cygnus configure. +AC_PROVIDE([AC_CONFIG_AUX_DIR_DEFAULT])dnl +]) + +dnl Canonicalize the host, target, and build system types. +AC_DEFUN(AC_CANONICAL_SYSTEM, +[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl +AC_BEFORE([$0], [AC_ARG_PROGRAM]) +# Do some error checking and defaulting for the host and target type. +# The inputs are: +# configure --host=HOST --target=TARGET --build=BUILD NONOPT +# +# The rules are: +# 1. You are not allowed to specify --host, --target, and nonopt at the +# same time. +# 2. Host defaults to nonopt. +# 3. If nonopt is not specified, then host defaults to the current host, +# as determined by config.guess. +# 4. Target and build default to nonopt. +# 5. If nonopt is not specified, then target and build default to host. + +# The aliases save the names the user supplied, while $host etc. +# will get canonicalized. +case $host---$target---$nonopt in +NONE---*---* | *---NONE---* | *---*---NONE) ;; +*) AC_MSG_ERROR(can only configure for one host and one target at a time) ;; +esac + +AC_CANONICAL_HOST +AC_CANONICAL_TARGET +AC_CANONICAL_BUILD +test "$host_alias" != "$target_alias" && + test "$program_prefix$program_suffix$program_transform_name" = \ + NONENONEs,x,x, && + program_prefix=${target_alias}- +]) + +dnl Subroutines of AC_CANONICAL_SYSTEM. + +AC_DEFUN(AC_CANONICAL_HOST, +[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl + +# Make sure we can run config.sub. +if ${CONFIG_SHELL-/bin/sh} $ac_config_sub sun4 >/dev/null 2>&1; then : +else AC_MSG_ERROR(can not run $ac_config_sub) +fi + +AC_MSG_CHECKING(host system type) + +dnl Set host_alias. +host_alias=$host +case "$host_alias" in +NONE) + case $nonopt in + NONE) + if host_alias=`${CONFIG_SHELL-/bin/sh} $ac_config_guess`; then : + else AC_MSG_ERROR(can not guess host type; you must specify one) + fi ;; + *) host_alias=$nonopt ;; + esac ;; +esac + +dnl Set the other host vars. +changequote(<<, >>)dnl +host=`${CONFIG_SHELL-/bin/sh} $ac_config_sub $host_alias` +host_cpu=`echo $host | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'` +host_vendor=`echo $host | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'` +host_os=`echo $host | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` +changequote([, ])dnl +AC_MSG_RESULT($host) +AC_SUBST(host)dnl +AC_SUBST(host_alias)dnl +AC_SUBST(host_cpu)dnl +AC_SUBST(host_vendor)dnl +AC_SUBST(host_os)dnl +]) + +dnl Internal use only. +AC_DEFUN(AC_CANONICAL_TARGET, +[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl +AC_MSG_CHECKING(target system type) + +dnl Set target_alias. +target_alias=$target +case "$target_alias" in +NONE) + case $nonopt in + NONE) target_alias=$host_alias ;; + *) target_alias=$nonopt ;; + esac ;; +esac + +dnl Set the other target vars. +changequote(<<, >>)dnl +target=`${CONFIG_SHELL-/bin/sh} $ac_config_sub $target_alias` +target_cpu=`echo $target | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'` +target_vendor=`echo $target | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'` +target_os=`echo $target | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` +changequote([, ])dnl +AC_MSG_RESULT($target) +AC_SUBST(target)dnl +AC_SUBST(target_alias)dnl +AC_SUBST(target_cpu)dnl +AC_SUBST(target_vendor)dnl +AC_SUBST(target_os)dnl +]) + +dnl Internal use only. +AC_DEFUN(AC_CANONICAL_BUILD, +[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl +AC_MSG_CHECKING(build system type) + +dnl Set build_alias. +build_alias=$build +case "$build_alias" in +NONE) + case $nonopt in + NONE) build_alias=$host_alias ;; + *) build_alias=$nonopt ;; + esac ;; +esac + +dnl Set the other build vars. +changequote(<<, >>)dnl +build=`${CONFIG_SHELL-/bin/sh} $ac_config_sub $build_alias` +build_cpu=`echo $build | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'` +build_vendor=`echo $build | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'` +build_os=`echo $build | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` +changequote([, ])dnl +AC_MSG_RESULT($build) +AC_SUBST(build)dnl +AC_SUBST(build_alias)dnl +AC_SUBST(build_cpu)dnl +AC_SUBST(build_vendor)dnl +AC_SUBST(build_os)dnl +]) + + +dnl AC_VALIDATE_CACHED_SYSTEM_TUPLE[(cmd)] +dnl if the cache file is inconsistent with the current host, +dnl target and build system types, execute CMD or print a default +dnl error message. +AC_DEFUN(AC_VALIDATE_CACHED_SYSTEM_TUPLE, [ + AC_REQUIRE([AC_CANONICAL_SYSTEM]) + AC_MSG_CHECKING([cached system tuple]) + if { test x"${ac_cv_host_system_type+set}" = x"set" && + test x"$ac_cv_host_system_type" != x"$host"; } || + { test x"${ac_cv_build_system_type+set}" = x"set" && + test x"$ac_cv_build_system_type" != x"$build"; } || + { test x"${ac_cv_target_system_type+set}" = x"set" && + test x"$ac_cv_target_system_type" != x"$target"; }; then + AC_MSG_RESULT([different]) + ifelse($#, 1, [$1], + [AC_MSG_ERROR([remove config.cache and re-run configure])]) + else + AC_MSG_RESULT(ok) + fi + ac_cv_host_system_type="$host" + ac_cv_build_system_type="$build" + ac_cv_target_system_type="$target" +]) + + +dnl ### Caching test results + + +dnl Look for site or system specific initialization scripts. +dnl AC_SITE_LOAD() +define(AC_SITE_LOAD, +[# Prefer explicitly selected file to automatically selected ones. +if test -z "$CONFIG_SITE"; then + if test "x$prefix" != xNONE; then + CONFIG_SITE="$prefix/share/config.site $prefix/etc/config.site" + else + CONFIG_SITE="$ac_default_prefix/share/config.site $ac_default_prefix/etc/config.site" + fi +fi +for ac_site_file in $CONFIG_SITE; do + if test -r "$ac_site_file"; then + echo "loading site script $ac_site_file" + . "$ac_site_file" + fi +done +]) + +dnl AC_CACHE_LOAD() +define(AC_CACHE_LOAD, +[if test -r "$cache_file"; then + echo "loading cache $cache_file" + . $cache_file +else + echo "creating cache $cache_file" + > $cache_file +fi +]) + +dnl AC_CACHE_SAVE() +define(AC_CACHE_SAVE, +[cat > confcache <<\EOF +# This file is a shell script that caches the results of configure +# tests run on this system so they can be shared between configure +# scripts and configure runs. It is not useful on other systems. +# If it contains results you don't want to keep, you may remove or edit it. +# +# By default, configure uses ./config.cache as the cache file, +# creating it if it does not exist already. You can give configure +# the --cache-file=FILE option to use a different cache file; that is +# what configure does when it calls configure scripts in +# subdirectories, so they share the cache. +# Giving --cache-file=/dev/null disables caching, for debugging configure. +# config.status only pays attention to the cache file if you give it the +# --recheck option to rerun configure. +# +EOF +dnl Allow a site initialization script to override cache values. +# The following way of writing the cache mishandles newlines in values, +# but we know of no workaround that is simple, portable, and efficient. +# So, don't put newlines in cache variables' values. +# Ultrix sh set writes to stderr and can't be redirected directly, +# and sets the high bit in the cache file unless we assign to the vars. +changequote(, )dnl +(set) 2>&1 | + case `(ac_space=' '; set | grep ac_space) 2>&1` in + *ac_space=\ *) + # `set' does not quote correctly, so add quotes (double-quote substitution + # turns \\\\ into \\, and sed turns \\ into \). + sed -n \ + -e "s/'/'\\\\''/g" \ + -e "s/^\\([a-zA-Z0-9_]*_cv_[a-zA-Z0-9_]*\\)=\\(.*\\)/\\1=\${\\1='\\2'}/p" + ;; + *) + # `set' quotes correctly as required by POSIX, so do not add quotes. + sed -n -e 's/^\([a-zA-Z0-9_]*_cv_[a-zA-Z0-9_]*\)=\(.*\)/\1=${\1=\2}/p' + ;; + esac >> confcache +changequote([, ])dnl +if cmp -s $cache_file confcache; then + : +else + if test -w $cache_file; then + echo "updating cache $cache_file" + cat confcache > $cache_file + else + echo "not updating unwritable cache $cache_file" + fi +fi +rm -f confcache +]) + +dnl The name of shell var CACHE-ID must contain `_cv_' in order to get saved. +dnl AC_CACHE_VAL(CACHE-ID, COMMANDS-TO-SET-IT) +define(AC_CACHE_VAL, +[dnl We used to use the below line, but it fails if the 1st arg is a +dnl shell variable, so we need the eval. +dnl if test "${$1+set}" = set; then +dnl the '' avoids an AIX 4.1 sh bug ("invalid expansion"). +if eval "test \"`echo '$''{'$1'+set}'`\" = set"; then + echo $ac_n "(cached) $ac_c" 1>&AC_FD_MSG +else + $2 +fi +]) + +dnl AC_CACHE_CHECK(MESSAGE, CACHE-ID, COMMANDS) +define(AC_CACHE_CHECK, +[AC_MSG_CHECKING([$1]) +AC_CACHE_VAL([$2], [$3]) +AC_MSG_RESULT([$]$2)]) + + +dnl ### Defining symbols + + +dnl Set VARIABLE to VALUE, verbatim, or 1. +dnl AC_DEFINE(VARIABLE [, VALUE]) +define(AC_DEFINE, +[cat >> confdefs.h <<\EOF +[#define] $1 ifelse($#, 2, [$2], $#, 3, [$2], 1) +EOF +]) + +dnl Similar, but perform shell substitutions $ ` \ once on VALUE. +define(AC_DEFINE_UNQUOTED, +[cat >> confdefs.h <&AC_FD_MSG +echo "configure:__oline__: checking $1" >&AC_FD_CC]) + +dnl AC_CHECKING(FEATURE-DESCRIPTION) +define(AC_CHECKING, +[echo "checking $1" 1>&AC_FD_MSG +echo "configure:__oline__: checking $1" >&AC_FD_CC]) + +dnl AC_MSG_RESULT(RESULT-DESCRIPTION) +define(AC_MSG_RESULT, +[echo "$ac_t""$1" 1>&AC_FD_MSG]) + +dnl AC_VERBOSE(RESULT-DESCRIPTION) +define(AC_VERBOSE, +[AC_OBSOLETE([$0], [; instead use AC_MSG_RESULT])dnl +echo " $1" 1>&AC_FD_MSG]) + +dnl AC_MSG_WARN(PROBLEM-DESCRIPTION) +define(AC_MSG_WARN, +[echo "configure: warning: $1" 1>&2]) + +dnl AC_MSG_ERROR(ERROR-DESCRIPTION) +define(AC_MSG_ERROR, +[{ echo "configure: error: $1" 1>&2; exit 1; }]) + + +dnl ### Selecting which language to use for testing + + +dnl AC_LANG_C() +AC_DEFUN(AC_LANG_C, +[define([AC_LANG], [C])dnl +ac_ext=c +# CFLAGS is not in ac_cpp because -g, -O, etc. are not valid cpp options. +ac_cpp='$CPP $CPPFLAGS' +ac_compile='${CC-cc} -c $CFLAGS $CPPFLAGS conftest.$ac_ext 1>&AC_FD_CC' +ac_link='${CC-cc} -o conftest${ac_exeext} $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS 1>&AC_FD_CC' +cross_compiling=$ac_cv_prog_cc_cross +]) + +dnl AC_LANG_CPLUSPLUS() +AC_DEFUN(AC_LANG_CPLUSPLUS, +[define([AC_LANG], [CPLUSPLUS])dnl +ac_ext=C +# CXXFLAGS is not in ac_cpp because -g, -O, etc. are not valid cpp options. +ac_cpp='$CXXCPP $CPPFLAGS' +ac_compile='${CXX-g++} -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext 1>&AC_FD_CC' +ac_link='${CXX-g++} -o conftest${ac_exeext} $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS 1>&AC_FD_CC' +cross_compiling=$ac_cv_prog_cxx_cross +]) + +dnl AC_LANG_FORTRAN77() +AC_DEFUN(AC_LANG_FORTRAN77, +[define([AC_LANG], [FORTRAN77])dnl +ac_ext=f +ac_compile='${F77-f77} -c $FFLAGS conftest.$ac_ext 1>&AC_FD_CC' +ac_link='${F77-f77} -o conftest${ac_exeext} $FFLAGS $LDFLAGS conftest.$ac_ext $LIBS 1>&AC_FD_CC' +cross_compiling=$ac_cv_prog_f77_cross +]) + +dnl Push the current language on a stack. +dnl AC_LANG_SAVE() +define(AC_LANG_SAVE, +[pushdef([AC_LANG_STACK], AC_LANG)]) + +dnl Restore the current language from the stack. +dnl AC_LANG_RESTORE() +pushdef([AC_LANG_RESTORE], +[ifelse(AC_LANG_STACK, [C], [AC_LANG_C],dnl +AC_LANG_STACK, [CPLUSPLUS], [AC_LANG_CPLUSPLUS],dnl +AC_LANG_STACK, [FORTRAN77], [AC_LANG_FORTRAN77])[]popdef([AC_LANG_STACK])]) + + +dnl ### Compiler-running mechanics + + +dnl The purpose of this macro is to "configure:123: command line" +dnl written into config.log for every test run. +dnl AC_TRY_EVAL(VARIABLE) +AC_DEFUN(AC_TRY_EVAL, +[{ (eval echo configure:__oline__: \"[$]$1\") 1>&AC_FD_CC; dnl +(eval [$]$1) 2>&AC_FD_CC; }]) + +dnl AC_TRY_COMMAND(COMMAND) +AC_DEFUN(AC_TRY_COMMAND, +[{ ac_try='$1'; AC_TRY_EVAL(ac_try); }]) + + +dnl ### Dependencies between macros + + +dnl AC_BEFORE(THIS-MACRO-NAME, CALLED-MACRO-NAME) +define(AC_BEFORE, +[ifdef([AC_PROVIDE_$2], [errprint(__file__:__line__: [$2 was called before $1 +])])]) + +dnl AC_REQUIRE(MACRO-NAME) +define(AC_REQUIRE, +[ifdef([AC_PROVIDE_$1], , +[AC_DIVERT_PUSH(builtin(eval, AC_DIVERSION_CURRENT - 1))dnl +indir([$1]) +AC_DIVERT_POP()dnl +])]) + +dnl AC_PROVIDE(MACRO-NAME) +define(AC_PROVIDE, +[define([AC_PROVIDE_$1], )]) + +dnl AC_OBSOLETE(THIS-MACRO-NAME [, SUGGESTION]) +define(AC_OBSOLETE, +[errprint(__file__:__line__: warning: [$1] is obsolete[$2] +)]) + + +dnl ### Checking for programs + + +dnl AC_CHECK_PROG(VARIABLE, PROG-TO-CHECK-FOR, VALUE-IF-FOUND +dnl [, [VALUE-IF-NOT-FOUND] [, [PATH] [, [REJECT]]]]) +AC_DEFUN(AC_CHECK_PROG, +[# Extract the first word of "$2", so it can be a program name with args. +set dummy $2; ac_word=[$]2 +AC_MSG_CHECKING([for $ac_word]) +AC_CACHE_VAL(ac_cv_prog_$1, +[if test -n "[$]$1"; then + ac_cv_prog_$1="[$]$1" # Let the user override the test. +else + IFS="${IFS= }"; ac_save_ifs="$IFS"; IFS=":" +ifelse([$6], , , [ ac_prog_rejected=no +])dnl +dnl $ac_dummy forces splitting on constant user-supplied paths. +dnl POSIX.2 word splitting is done only on the output of word expansions, +dnl not every word. This closes a longstanding sh security hole. + ac_dummy="ifelse([$5], , $PATH, [$5])" + for ac_dir in $ac_dummy; do + test -z "$ac_dir" && ac_dir=. + if test -f $ac_dir/$ac_word; then +ifelse([$6], , , dnl +[ if test "[$ac_dir/$ac_word]" = "$6"; then + ac_prog_rejected=yes + continue + fi +])dnl + ac_cv_prog_$1="$3" + break + fi + done + IFS="$ac_save_ifs" +ifelse([$6], , , [if test $ac_prog_rejected = yes; then + # We found a bogon in the path, so make sure we never use it. + set dummy [$]ac_cv_prog_$1 + shift + if test [$]# -gt 0; then + # We chose a different compiler from the bogus one. + # However, it has the same basename, so the bogon will be chosen + # first if we set $1 to just the basename; use the full file name. + shift + set dummy "$ac_dir/$ac_word" "[$]@" + shift + ac_cv_prog_$1="[$]@" +ifelse([$2], [$4], dnl +[ else + # Default is a loser. + AC_MSG_ERROR([$1=$6 unacceptable, but no other $4 found in dnl +ifelse([$5], , [\$]PATH, [$5])]) +])dnl + fi +fi +])dnl +dnl If no 4th arg is given, leave the cache variable unset, +dnl so AC_CHECK_PROGS will keep looking. +ifelse([$4], , , [ test -z "[$]ac_cv_prog_$1" && ac_cv_prog_$1="$4" +])dnl +fi])dnl +$1="$ac_cv_prog_$1" +if test -n "[$]$1"; then + AC_MSG_RESULT([$]$1) +else + AC_MSG_RESULT(no) +fi +AC_SUBST($1)dnl +]) + +dnl AC_PATH_PROG(VARIABLE, PROG-TO-CHECK-FOR [, VALUE-IF-NOT-FOUND [, PATH]]) +AC_DEFUN(AC_PATH_PROG, +[# Extract the first word of "$2", so it can be a program name with args. +set dummy $2; ac_word=[$]2 +AC_MSG_CHECKING([for $ac_word]) +AC_CACHE_VAL(ac_cv_path_$1, +[case "[$]$1" in + /*) + ac_cv_path_$1="[$]$1" # Let the user override the test with a path. + ;; + ?:/*) + ac_cv_path_$1="[$]$1" # Let the user override the test with a dos path. + ;; + *) + IFS="${IFS= }"; ac_save_ifs="$IFS"; IFS=":" +dnl $ac_dummy forces splitting on constant user-supplied paths. +dnl POSIX.2 word splitting is done only on the output of word expansions, +dnl not every word. This closes a longstanding sh security hole. + ac_dummy="ifelse([$4], , $PATH, [$4])" + for ac_dir in $ac_dummy; do + test -z "$ac_dir" && ac_dir=. + if test -f $ac_dir/$ac_word; then + ac_cv_path_$1="$ac_dir/$ac_word" + break + fi + done + IFS="$ac_save_ifs" +dnl If no 3rd arg is given, leave the cache variable unset, +dnl so AC_PATH_PROGS will keep looking. +ifelse([$3], , , [ test -z "[$]ac_cv_path_$1" && ac_cv_path_$1="$3" +])dnl + ;; +esac])dnl +$1="$ac_cv_path_$1" +if test -n "[$]$1"; then + AC_MSG_RESULT([$]$1) +else + AC_MSG_RESULT(no) +fi +AC_SUBST($1)dnl +]) + +dnl AC_CHECK_PROGS(VARIABLE, PROGS-TO-CHECK-FOR [, VALUE-IF-NOT-FOUND +dnl [, PATH]]) +AC_DEFUN(AC_CHECK_PROGS, +[for ac_prog in $2 +do +AC_CHECK_PROG($1, [$]ac_prog, [$]ac_prog, , $4) +test -n "[$]$1" && break +done +ifelse([$3], , , [test -n "[$]$1" || $1="$3" +])]) + +dnl AC_PATH_PROGS(VARIABLE, PROGS-TO-CHECK-FOR [, VALUE-IF-NOT-FOUND +dnl [, PATH]]) +AC_DEFUN(AC_PATH_PROGS, +[for ac_prog in $2 +do +AC_PATH_PROG($1, [$]ac_prog, , $4) +test -n "[$]$1" && break +done +ifelse([$3], , , [test -n "[$]$1" || $1="$3" +])]) + +dnl Internal subroutine. +AC_DEFUN(AC_CHECK_TOOL_PREFIX, +[AC_REQUIRE([AC_CANONICAL_HOST])AC_REQUIRE([AC_CANONICAL_BUILD])dnl +if test $host != $build; then + ac_tool_prefix=${host_alias}- +else + ac_tool_prefix= +fi +]) + +dnl AC_CHECK_TOOL(VARIABLE, PROG-TO-CHECK-FOR[, VALUE-IF-NOT-FOUND [, PATH]]) +AC_DEFUN(AC_CHECK_TOOL, +[AC_REQUIRE([AC_CHECK_TOOL_PREFIX])dnl +AC_CHECK_PROG($1, ${ac_tool_prefix}$2, ${ac_tool_prefix}$2, + ifelse([$3], , [$2], ), $4) +ifelse([$3], , , [ +if test -z "$ac_cv_prog_$1"; then +if test -n "$ac_tool_prefix"; then + AC_CHECK_PROG($1, $2, $2, $3) +else + $1="$3" +fi +fi]) +]) + +dnl Guess the value for the `prefix' variable by looking for +dnl the argument program along PATH and taking its parent. +dnl Example: if the argument is `gcc' and we find /usr/local/gnu/bin/gcc, +dnl set `prefix' to /usr/local/gnu. +dnl This comes too late to find a site file based on the prefix, +dnl and it might use a cached value for the path. +dnl No big loss, I think, since most configures don't use this macro anyway. +dnl AC_PREFIX_PROGRAM(PROGRAM) +AC_DEFUN(AC_PREFIX_PROGRAM, +[if test "x$prefix" = xNONE; then +changequote(<<, >>)dnl +define(<>, translit($1, [a-z], [A-Z]))dnl +changequote([, ])dnl +dnl We reimplement AC_MSG_CHECKING (mostly) to avoid the ... in the middle. +echo $ac_n "checking for prefix by $ac_c" 1>&AC_FD_MSG +AC_PATH_PROG(AC_VAR_NAME, $1) +changequote(<<, >>)dnl + if test -n "$ac_cv_path_<<>>AC_VAR_NAME"; then + prefix=`echo $ac_cv_path_<<>>AC_VAR_NAME|sed 's%/[^/][^/]*//*[^/][^/]*$%%'` +changequote([, ])dnl + fi +fi +undefine([AC_VAR_NAME])dnl +]) + +dnl Try to compile, link and execute TEST-PROGRAM. Set WORKING-VAR to +dnl `yes' if the current compiler works, otherwise set it ti `no'. Set +dnl CROSS-VAR to `yes' if the compiler and linker produce non-native +dnl executables, otherwise set it to `no'. Before calling +dnl `AC_TRY_COMPILER()', call `AC_LANG_*' to set-up for the right +dnl language. +dnl +dnl AC_TRY_COMPILER(TEST-PROGRAM, WORKING-VAR, CROSS-VAR) +AC_DEFUN(AC_TRY_COMPILER, +[cat > conftest.$ac_ext << EOF +ifelse(AC_LANG, [FORTRAN77], , +[ +[#]line __oline__ "configure" +#include "confdefs.h" +]) +[$1] +EOF +if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then + [$2]=yes + # If we can't run a trivial program, we are probably using a cross compiler. + if (./conftest; exit) 2>/dev/null; then + [$3]=no + else + [$3]=yes + fi +else + echo "configure: failed program was:" >&AC_FD_CC + cat conftest.$ac_ext >&AC_FD_CC + [$2]=no +fi +rm -fr conftest*]) + + +dnl ### Checking for libraries + + +dnl AC_TRY_LINK_FUNC(func, action-if-found, action-if-not-found) +dnl Try to link a program that calls FUNC, handling GCC builtins. If +dnl the link succeeds, execute ACTION-IF-FOUND; otherwise, execute +dnl ACTION-IF-NOT-FOUND. + +AC_DEFUN(AC_TRY_LINK_FUNC, +AC_TRY_LINK(dnl +ifelse([$1], [main], , dnl Avoid conflicting decl of main. +[/* Override any gcc2 internal prototype to avoid an error. */ +]ifelse(AC_LANG, CPLUSPLUS, [#ifdef __cplusplus +extern "C" +#endif +])dnl +[/* We use char because int might match the return type of a gcc2 + builtin and then its argument prototype would still apply. */ +char $1(); +]), +[$1()], +[$2], +[$3])) + + +dnl AC_SEARCH_LIBS(FUNCTION, SEARCH-LIBS [, ACTION-IF-FOUND +dnl [, ACTION-IF-NOT-FOUND [, OTHER-LIBRARIES]]]) +dnl Search for a library defining FUNC, if it's not already available. + +AC_DEFUN(AC_SEARCH_LIBS, +[AC_PREREQ([2.13]) +AC_CACHE_CHECK([for library containing $1], [ac_cv_search_$1], +[ac_func_search_save_LIBS="$LIBS" +ac_cv_search_$1="no" +AC_TRY_LINK_FUNC([$1], [ac_cv_search_$1="none required"]) +test "$ac_cv_search_$1" = "no" && for i in $2; do +LIBS="-l$i $5 $ac_func_search_save_LIBS" +AC_TRY_LINK_FUNC([$1], +[ac_cv_search_$1="-l$i" +break]) +done +LIBS="$ac_func_search_save_LIBS"]) +if test "$ac_cv_search_$1" != "no"; then + test "$ac_cv_search_$1" = "none required" || LIBS="$ac_cv_search_$1 $LIBS" + $3 +else : + $4 +fi]) + + + +dnl AC_CHECK_LIB(LIBRARY, FUNCTION [, ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND +dnl [, OTHER-LIBRARIES]]]) +AC_DEFUN(AC_CHECK_LIB, +[AC_MSG_CHECKING([for $2 in -l$1]) +dnl Use a cache variable name containing both the library and function name, +dnl because the test really is for library $1 defining function $2, not +dnl just for library $1. Separate tests with the same $1 and different $2s +dnl may have different results. +ac_lib_var=`echo $1['_']$2 | sed 'y%./+-%__p_%'` +AC_CACHE_VAL(ac_cv_lib_$ac_lib_var, +[ac_save_LIBS="$LIBS" +LIBS="-l$1 $5 $LIBS" +AC_TRY_LINK(dnl +ifelse(AC_LANG, [FORTRAN77], , +ifelse([$2], [main], , dnl Avoid conflicting decl of main. +[/* Override any gcc2 internal prototype to avoid an error. */ +]ifelse(AC_LANG, CPLUSPLUS, [#ifdef __cplusplus +extern "C" +#endif +])dnl +[/* We use char because int might match the return type of a gcc2 + builtin and then its argument prototype would still apply. */ +char $2(); +])), + [$2()], + eval "ac_cv_lib_$ac_lib_var=yes", + eval "ac_cv_lib_$ac_lib_var=no") +LIBS="$ac_save_LIBS" +])dnl +if eval "test \"`echo '$ac_cv_lib_'$ac_lib_var`\" = yes"; then + AC_MSG_RESULT(yes) + ifelse([$3], , +[changequote(, )dnl + ac_tr_lib=HAVE_LIB`echo $1 | sed -e 's/[^a-zA-Z0-9_]/_/g' \ + -e 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/'` +changequote([, ])dnl + AC_DEFINE_UNQUOTED($ac_tr_lib) + LIBS="-l$1 $LIBS" +], [$3]) +else + AC_MSG_RESULT(no) +ifelse([$4], , , [$4 +])dnl +fi +]) + +dnl AC_HAVE_LIBRARY(LIBRARY, [, ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND +dnl [, OTHER-LIBRARIES]]]) +AC_DEFUN(AC_HAVE_LIBRARY, +[AC_OBSOLETE([$0], [; instead use AC_CHECK_LIB])dnl +changequote(<<, >>)dnl +define(<>, dnl +patsubst(patsubst($1, <>, <<\1>>), <<-l>>, <<>>))dnl +define(<>, ac_cv_lib_<<>>AC_LIB_NAME)dnl +changequote([, ])dnl +AC_MSG_CHECKING([for -l[]AC_LIB_NAME]) +AC_CACHE_VAL(AC_CV_NAME, +[ac_save_LIBS="$LIBS" +LIBS="-l[]AC_LIB_NAME[] $4 $LIBS" +AC_TRY_LINK( , [main()], AC_CV_NAME=yes, AC_CV_NAME=no) +LIBS="$ac_save_LIBS" +])dnl +AC_MSG_RESULT($AC_CV_NAME) +if test "$AC_CV_NAME" = yes; then + ifelse([$2], , +[AC_DEFINE([HAVE_LIB]translit(AC_LIB_NAME, [a-z], [A-Z])) + LIBS="-l[]AC_LIB_NAME[] $LIBS" +], [$2]) +ifelse([$3], , , [else + $3 +])dnl +fi +undefine([AC_LIB_NAME])dnl +undefine([AC_CV_NAME])dnl +]) + + +dnl ### Examining declarations + + +dnl AC_TRY_CPP(INCLUDES, [ACTION-IF-TRUE [, ACTION-IF-FALSE]]) +AC_DEFUN(AC_TRY_CPP, +[AC_REQUIRE_CPP()dnl +cat > conftest.$ac_ext <&AC_FD_CC + echo "configure: failed program was:" >&AC_FD_CC + cat conftest.$ac_ext >&AC_FD_CC +ifelse([$3], , , [ rm -rf conftest* + $3 +])dnl +fi +rm -f conftest*]) + +dnl AC_EGREP_HEADER(PATTERN, HEADER-FILE, ACTION-IF-FOUND [, +dnl ACTION-IF-NOT-FOUND]) +AC_DEFUN(AC_EGREP_HEADER, +[AC_EGREP_CPP([$1], [#include <$2>], [$3], [$4])]) + +dnl Because this macro is used by AC_PROG_GCC_TRADITIONAL, which must +dnl come early, it is not included in AC_BEFORE checks. +dnl AC_EGREP_CPP(PATTERN, PROGRAM, [ACTION-IF-FOUND [, +dnl ACTION-IF-NOT-FOUND]]) +AC_DEFUN(AC_EGREP_CPP, +[AC_REQUIRE_CPP()dnl +cat > conftest.$ac_ext <&AC_FD_CC | +dnl Prevent m4 from eating character classes: +changequote(, )dnl + egrep "$1" >/dev/null 2>&1; then +changequote([, ])dnl + ifelse([$3], , :, [rm -rf conftest* + $3]) +ifelse([$4], , , [else + rm -rf conftest* + $4 +])dnl +fi +rm -f conftest* +]) + + +dnl ### Examining syntax + + +dnl AC_TRY_COMPILE(INCLUDES, FUNCTION-BODY, +dnl [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]) +AC_DEFUN(AC_TRY_COMPILE, +[cat > conftest.$ac_ext <&AC_FD_CC + cat conftest.$ac_ext >&AC_FD_CC +ifelse([$4], , , [ rm -rf conftest* + $4 +])dnl +fi +rm -f conftest*]) + + +dnl ### Examining libraries + + +dnl AC_COMPILE_CHECK(ECHO-TEXT, INCLUDES, FUNCTION-BODY, +dnl ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]) +AC_DEFUN(AC_COMPILE_CHECK, +[AC_OBSOLETE([$0], [; instead use AC_TRY_COMPILE or AC_TRY_LINK, and AC_MSG_CHECKING and AC_MSG_RESULT])dnl +ifelse([$1], , , [AC_CHECKING([for $1]) +])dnl +AC_TRY_LINK([$2], [$3], [$4], [$5]) +]) + +dnl AC_TRY_LINK(INCLUDES, FUNCTION-BODY, +dnl [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]) +AC_DEFUN(AC_TRY_LINK, +[cat > conftest.$ac_ext <&AC_FD_CC + cat conftest.$ac_ext >&AC_FD_CC +ifelse([$4], , , [ rm -rf conftest* + $4 +])dnl +fi +rm -f conftest*]) + + +dnl ### Checking for run-time features + + +dnl AC_TRY_RUN(PROGRAM, [ACTION-IF-TRUE [, ACTION-IF-FALSE +dnl [, ACTION-IF-CROSS-COMPILING]]]) +AC_DEFUN(AC_TRY_RUN, +[if test "$cross_compiling" = yes; then + ifelse([$4], , + [errprint(__file__:__line__: warning: [AC_TRY_RUN] called without default to allow cross compiling +)dnl + AC_MSG_ERROR(can not run test program while cross compiling)], + [$4]) +else + AC_TRY_RUN_NATIVE([$1], [$2], [$3]) +fi +]) + +dnl Like AC_TRY_RUN but assumes a native-environment (non-cross) compiler. +dnl AC_TRY_RUN_NATIVE(PROGRAM, [ACTION-IF-TRUE [, ACTION-IF-FALSE]]) +AC_DEFUN(AC_TRY_RUN_NATIVE, +[cat > conftest.$ac_ext </dev/null +then +dnl Don't remove the temporary files here, so they can be examined. + ifelse([$2], , :, [$2]) +else + echo "configure: failed program was:" >&AC_FD_CC + cat conftest.$ac_ext >&AC_FD_CC +ifelse([$3], , , [ rm -fr conftest* + $3 +])dnl +fi +rm -fr conftest*]) + + +dnl ### Checking for header files + + +dnl AC_CHECK_HEADER(HEADER-FILE, [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]) +AC_DEFUN(AC_CHECK_HEADER, +[dnl Do the transliteration at runtime so arg 1 can be a shell variable. +ac_safe=`echo "$1" | sed 'y%./+-%__p_%'` +AC_MSG_CHECKING([for $1]) +AC_CACHE_VAL(ac_cv_header_$ac_safe, +[AC_TRY_CPP([#include <$1>], eval "ac_cv_header_$ac_safe=yes", + eval "ac_cv_header_$ac_safe=no")])dnl +if eval "test \"`echo '$ac_cv_header_'$ac_safe`\" = yes"; then + AC_MSG_RESULT(yes) + ifelse([$2], , :, [$2]) +else + AC_MSG_RESULT(no) +ifelse([$3], , , [$3 +])dnl +fi +]) + +dnl AC_CHECK_HEADERS(HEADER-FILE... [, ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]) +AC_DEFUN(AC_CHECK_HEADERS, +[for ac_hdr in $1 +do +AC_CHECK_HEADER($ac_hdr, +[changequote(, )dnl + ac_tr_hdr=HAVE_`echo $ac_hdr | sed 'y%abcdefghijklmnopqrstuvwxyz./-%ABCDEFGHIJKLMNOPQRSTUVWXYZ___%'` +changequote([, ])dnl + AC_DEFINE_UNQUOTED($ac_tr_hdr) $2], $3)dnl +done +]) + + +dnl ### Checking for the existence of files + +dnl AC_CHECK_FILE(FILE, [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]) +AC_DEFUN(AC_CHECK_FILE, +[AC_REQUIRE([AC_PROG_CC]) +dnl Do the transliteration at runtime so arg 1 can be a shell variable. +ac_safe=`echo "$1" | sed 'y%./+-%__p_%'` +AC_MSG_CHECKING([for $1]) +AC_CACHE_VAL(ac_cv_file_$ac_safe, +[if test "$cross_compiling" = yes; then + errprint(__file__:__line__: warning: Cannot check for file existence when cross compiling +)dnl + AC_MSG_ERROR(Cannot check for file existence when cross compiling) +else + if test -r $1; then + eval "ac_cv_file_$ac_safe=yes" + else + eval "ac_cv_file_$ac_safe=no" + fi +fi])dnl +if eval "test \"`echo '$ac_cv_file_'$ac_safe`\" = yes"; then + AC_MSG_RESULT(yes) + ifelse([$2], , :, [$2]) +else + AC_MSG_RESULT(no) +ifelse([$3], , , [$3]) +fi +]) + +dnl AC_CHECK_FILES(FILE... [, ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]) +AC_DEFUN(AC_CHECK_FILES, +[for ac_file in $1 +do +AC_CHECK_FILE($ac_file, +[changequote(, )dnl + ac_tr_file=HAVE_`echo $ac_file | sed 'y%abcdefghijklmnopqrstuvwxyz./-%ABCDEFGHIJKLMNOPQRSTUVWXYZ___%'` +changequote([, ])dnl + AC_DEFINE_UNQUOTED($ac_tr_file) $2], $3)dnl +done +]) + + +dnl ### Checking for library functions + + +dnl AC_CHECK_FUNC(FUNCTION, [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]) +AC_DEFUN(AC_CHECK_FUNC, +[AC_MSG_CHECKING([for $1]) +AC_CACHE_VAL(ac_cv_func_$1, +[AC_TRY_LINK( +dnl Don't include because on OSF/1 3.0 it includes +dnl which includes which contains a prototype for +dnl select. Similarly for bzero. +[/* System header to define __stub macros and hopefully few prototypes, + which can conflict with char $1(); below. */ +#include +/* Override any gcc2 internal prototype to avoid an error. */ +]ifelse(AC_LANG, CPLUSPLUS, [#ifdef __cplusplus +extern "C" +#endif +])dnl +[/* We use char because int might match the return type of a gcc2 + builtin and then its argument prototype would still apply. */ +char $1(); +], [ +/* The GNU C library defines this for functions which it implements + to always fail with ENOSYS. Some functions are actually named + something starting with __ and the normal name is an alias. */ +#if defined (__stub_$1) || defined (__stub___$1) +choke me +#else +$1(); +#endif +], eval "ac_cv_func_$1=yes", eval "ac_cv_func_$1=no")]) +if eval "test \"`echo '$ac_cv_func_'$1`\" = yes"; then + AC_MSG_RESULT(yes) + ifelse([$2], , :, [$2]) +else + AC_MSG_RESULT(no) +ifelse([$3], , , [$3 +])dnl +fi +]) + +dnl AC_CHECK_FUNCS(FUNCTION... [, ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]) +AC_DEFUN(AC_CHECK_FUNCS, +[for ac_func in $1 +do +AC_CHECK_FUNC($ac_func, +[changequote(, )dnl + ac_tr_func=HAVE_`echo $ac_func | tr 'abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'` +changequote([, ])dnl + AC_DEFINE_UNQUOTED($ac_tr_func) $2], $3)dnl +done +]) + +dnl AC_REPLACE_FUNCS(FUNCTION...) +AC_DEFUN(AC_REPLACE_FUNCS, +[AC_CHECK_FUNCS([$1], , [LIBOBJS="$LIBOBJS ${ac_func}.${ac_objext}"]) +AC_SUBST(LIBOBJS)dnl +]) + + +dnl ### Checking compiler characteristics + + +dnl AC_CHECK_SIZEOF(TYPE [, CROSS-SIZE]) +AC_DEFUN(AC_CHECK_SIZEOF, +[changequote(<<, >>)dnl +dnl The name to #define. +define(<>, translit(sizeof_$1, [a-z *], [A-Z_P]))dnl +dnl The cache variable name. +define(<>, translit(ac_cv_sizeof_$1, [ *], [_p]))dnl +changequote([, ])dnl +AC_MSG_CHECKING(size of $1) +AC_CACHE_VAL(AC_CV_NAME, +[AC_TRY_RUN([#include +main() +{ + FILE *f=fopen("conftestval", "w"); + if (!f) exit(1); + fprintf(f, "%d\n", sizeof($1)); + exit(0); +}], AC_CV_NAME=`cat conftestval`, AC_CV_NAME=0, ifelse([$2], , , AC_CV_NAME=$2))])dnl +AC_MSG_RESULT($AC_CV_NAME) +AC_DEFINE_UNQUOTED(AC_TYPE_NAME, $AC_CV_NAME) +undefine([AC_TYPE_NAME])dnl +undefine([AC_CV_NAME])dnl +]) + + +dnl ### Checking for typedefs + + +dnl AC_CHECK_TYPE(TYPE, DEFAULT) +AC_DEFUN(AC_CHECK_TYPE, +[AC_REQUIRE([AC_HEADER_STDC])dnl +AC_MSG_CHECKING(for $1) +AC_CACHE_VAL(ac_cv_type_$1, +[AC_EGREP_CPP(dnl +changequote(<<,>>)dnl +<<(^|[^a-zA-Z_0-9])$1[^a-zA-Z_0-9]>>dnl +changequote([,]), [#include +#if STDC_HEADERS +#include +#include +#endif], ac_cv_type_$1=yes, ac_cv_type_$1=no)])dnl +AC_MSG_RESULT($ac_cv_type_$1) +if test $ac_cv_type_$1 = no; then + AC_DEFINE($1, $2) +fi +]) + + +dnl ### Creating output files + + +dnl AC_CONFIG_HEADER(HEADER-TO-CREATE ...) +AC_DEFUN(AC_CONFIG_HEADER, +[define(AC_LIST_HEADER, $1)]) + +dnl Link each of the existing files SOURCE... to the corresponding +dnl link name in DEST... +dnl AC_LINK_FILES(SOURCE..., DEST...) +AC_DEFUN(AC_LINK_FILES, +[dnl +define([AC_LIST_FILES], ifdef([AC_LIST_FILES], [AC_LIST_FILES ],)[$1])dnl +define([AC_LIST_LINKS], ifdef([AC_LIST_LINKS], [AC_LIST_LINKS ],)[$2])]) + +dnl Add additional commands for AC_OUTPUT to put into config.status. +dnl Use diversions instead of macros so we can be robust in the +dnl presence of commas in $1 and/or $2. +dnl AC_OUTPUT_COMMANDS(EXTRA-CMDS, INIT-CMDS) +AC_DEFUN(AC_OUTPUT_COMMANDS, +[AC_DIVERT_PUSH(AC_DIVERSION_CMDS)dnl +[$1] +AC_DIVERT_POP()dnl +AC_DIVERT_PUSH(AC_DIVERSION_ICMDS)dnl +[$2] +AC_DIVERT_POP()]) + +dnl AC_CONFIG_SUBDIRS(DIR ...) +AC_DEFUN(AC_CONFIG_SUBDIRS, +[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl +define([AC_LIST_SUBDIRS], ifdef([AC_LIST_SUBDIRS], [AC_LIST_SUBDIRS ],)[$1])dnl +subdirs="AC_LIST_SUBDIRS" +AC_SUBST(subdirs)dnl +]) + +dnl The big finish. +dnl Produce config.status, config.h, and links; and configure subdirs. +dnl AC_OUTPUT([FILE...] [, EXTRA-CMDS] [, INIT-CMDS]) +define(AC_OUTPUT, +[trap '' 1 2 15 +AC_CACHE_SAVE +trap 'rm -fr conftest* confdefs* core core.* *.core $ac_clean_files; exit 1' 1 2 15 + +test "x$prefix" = xNONE && prefix=$ac_default_prefix +# Let make expand exec_prefix. +test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' + +# Any assignment to VPATH causes Sun make to only execute +# the first set of double-colon rules, so remove it if not needed. +# If there is a colon in the path, we need to keep it. +if test "x$srcdir" = x.; then +changequote(, )dnl + ac_vpsub='/^[ ]*VPATH[ ]*=[^:]*$/d' +changequote([, ])dnl +fi + +trap 'rm -f $CONFIG_STATUS conftest*; exit 1' 1 2 15 + +ifdef([AC_LIST_HEADER], [DEFS=-DHAVE_CONFIG_H], [AC_OUTPUT_MAKE_DEFS()]) + +# Without the "./", some shells look in PATH for config.status. +: ${CONFIG_STATUS=./config.status} + +echo creating $CONFIG_STATUS +rm -f $CONFIG_STATUS +cat > $CONFIG_STATUS </dev/null | sed 1q`: +# +[#] [$]0 [$]ac_configure_args +# +# Compiler output produced by configure, useful for debugging +# configure, is in ./config.log if it exists. + +changequote(, )dnl +ac_cs_usage="Usage: $CONFIG_STATUS [--recheck] [--version] [--help]" +changequote([, ])dnl +for ac_option +do + case "[\$]ac_option" in + -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) + echo "running [\$]{CONFIG_SHELL-/bin/sh} [$]0 [$]ac_configure_args --no-create --no-recursion" + exec [\$]{CONFIG_SHELL-/bin/sh} [$]0 [$]ac_configure_args --no-create --no-recursion ;; + -version | --version | --versio | --versi | --vers | --ver | --ve | --v) + echo "$CONFIG_STATUS generated by autoconf version AC_ACVERSION" + exit 0 ;; + -help | --help | --hel | --he | --h) + echo "[\$]ac_cs_usage"; exit 0 ;; + *) echo "[\$]ac_cs_usage"; exit 1 ;; + esac +done + +ac_given_srcdir=$srcdir +ifdef([AC_PROVIDE_AC_PROG_INSTALL], [ac_given_INSTALL="$INSTALL" +])dnl + +changequote(<<, >>)dnl +ifdef(<>, +<>, +<>) +changequote([, ])dnl +EOF +cat >> $CONFIG_STATUS <> $CONFIG_STATUS <> $CONFIG_STATUS <<\EOF +undivert(AC_DIVERSION_CMDS)dnl +$2 +exit 0 +EOF +chmod +x $CONFIG_STATUS +rm -fr confdefs* $ac_clean_files +test "$no_create" = yes || ${CONFIG_SHELL-/bin/sh} $CONFIG_STATUS || exit 1 +dnl config.status should not do recursion. +ifdef([AC_LIST_SUBDIRS], [AC_OUTPUT_SUBDIRS(AC_LIST_SUBDIRS)])dnl +])dnl + +dnl Set the DEFS variable to the -D options determined earlier. +dnl This is a subroutine of AC_OUTPUT. +dnl It is called inside configure, outside of config.status. +dnl AC_OUTPUT_MAKE_DEFS() +define(AC_OUTPUT_MAKE_DEFS, +[# Transform confdefs.h into DEFS. +dnl Using a here document instead of a string reduces the quoting nightmare. +# Protect against shell expansion while executing Makefile rules. +# Protect against Makefile macro expansion. +cat > conftest.defs <<\EOF +changequote(<<, >>)dnl +s%<<#define>> \([A-Za-z_][A-Za-z0-9_]*\) *\(.*\)%-D\1=\2%g +s%[ `~<<#>>$^&*(){}\\|;'"<>?]%\\&%g +s%\[%\\&%g +s%\]%\\&%g +s%\$%$$%g +changequote([, ])dnl +EOF +DEFS=`sed -f conftest.defs confdefs.h | tr '\012' ' '` +rm -f conftest.defs +]) + +dnl Do the variable substitutions to create the Makefiles or whatever. +dnl This is a subroutine of AC_OUTPUT. It is called inside an unquoted +dnl here document whose contents are going into config.status, but +dnl upon returning, the here document is being quoted. +dnl AC_OUTPUT_FILES(FILE...) +define(AC_OUTPUT_FILES, +[# Protect against being on the right side of a sed subst in config.status. +changequote(, )dnl +sed 's/%@/@@/; s/@%/@@/; s/%g\$/@g/; /@g\$/s/[\\\\&%]/\\\\&/g; + s/@@/%@/; s/@@/@%/; s/@g\$/%g/' > conftest.subs <<\\CEOF +changequote([, ])dnl +dnl These here document variables are unquoted when configure runs +dnl but quoted when config.status runs, so variables are expanded once. +$ac_vpsub +dnl Shell code in configure.in might set extrasub. +$extrasub +dnl Insert the sed substitutions of variables. +undivert(AC_DIVERSION_SED) +CEOF +EOF + +cat >> $CONFIG_STATUS <<\EOF + +# Split the substitutions into bite-sized pieces for seds with +# small command number limits, like on Digital OSF/1 and HP-UX. +ac_max_sed_cmds=90 # Maximum number of lines to put in a sed script. +ac_file=1 # Number of current file. +ac_beg=1 # First line for current file. +ac_end=$ac_max_sed_cmds # Line after last line for current file. +ac_more_lines=: +ac_sed_cmds="" +while $ac_more_lines; do + if test $ac_beg -gt 1; then + sed "1,${ac_beg}d; ${ac_end}q" conftest.subs > conftest.s$ac_file + else + sed "${ac_end}q" conftest.subs > conftest.s$ac_file + fi + if test ! -s conftest.s$ac_file; then + ac_more_lines=false + rm -f conftest.s$ac_file + else + if test -z "$ac_sed_cmds"; then + ac_sed_cmds="sed -f conftest.s$ac_file" + else + ac_sed_cmds="$ac_sed_cmds | sed -f conftest.s$ac_file" + fi + ac_file=`expr $ac_file + 1` + ac_beg=$ac_end + ac_end=`expr $ac_end + $ac_max_sed_cmds` + fi +done +if test -z "$ac_sed_cmds"; then + ac_sed_cmds=cat +fi +EOF + +cat >> $CONFIG_STATUS <> $CONFIG_STATUS <<\EOF +for ac_file in .. $CONFIG_FILES; do if test "x$ac_file" != x..; then +changequote(, )dnl + # Support "outfile[:infile[:infile...]]", defaulting infile="outfile.in". + case "$ac_file" in + *:*) ac_file_in=`echo "$ac_file"|sed 's%[^:]*:%%'` + ac_file=`echo "$ac_file"|sed 's%:.*%%'` ;; + *) ac_file_in="${ac_file}.in" ;; + esac + + # Adjust a relative srcdir, top_srcdir, and INSTALL for subdirectories. + + # Remove last slash and all that follows it. Not all systems have dirname. + ac_dir=`echo $ac_file|sed 's%/[^/][^/]*$%%'` +changequote([, ])dnl + if test "$ac_dir" != "$ac_file" && test "$ac_dir" != .; then + # The file is in a subdirectory. + test ! -d "$ac_dir" && mkdir "$ac_dir" + ac_dir_suffix="/`echo $ac_dir|sed 's%^\./%%'`" + # A "../" for each directory in $ac_dir_suffix. +changequote(, )dnl + ac_dots=`echo $ac_dir_suffix|sed 's%/[^/]*%../%g'` +changequote([, ])dnl + else + ac_dir_suffix= ac_dots= + fi + + case "$ac_given_srcdir" in + .) srcdir=. + if test -z "$ac_dots"; then top_srcdir=. + else top_srcdir=`echo $ac_dots|sed 's%/$%%'`; fi ;; + /*) srcdir="$ac_given_srcdir$ac_dir_suffix"; top_srcdir="$ac_given_srcdir" ;; + *) # Relative path. + srcdir="$ac_dots$ac_given_srcdir$ac_dir_suffix" + top_srcdir="$ac_dots$ac_given_srcdir" ;; + esac + +ifdef([AC_PROVIDE_AC_PROG_INSTALL], +[ case "$ac_given_INSTALL" in +changequote(, )dnl + [/$]*) INSTALL="$ac_given_INSTALL" ;; +changequote([, ])dnl + *) INSTALL="$ac_dots$ac_given_INSTALL" ;; + esac +])dnl + + echo creating "$ac_file" + rm -f "$ac_file" + configure_input="Generated automatically from `echo $ac_file_in|sed 's%.*/%%'` by configure." + case "$ac_file" in + *Makefile*) ac_comsub="1i\\ +# $configure_input" ;; + *) ac_comsub= ;; + esac + + ac_file_inputs=`echo $ac_file_in|sed -e "s%^%$ac_given_srcdir/%" -e "s%:% $ac_given_srcdir/%g"` + sed -e "$ac_comsub +s%@configure_input@%$configure_input%g +s%@srcdir@%$srcdir%g +s%@top_srcdir@%$top_srcdir%g +ifdef([AC_PROVIDE_AC_PROG_INSTALL], [s%@INSTALL@%$INSTALL%g +])dnl +dnl The parens around the eval prevent an "illegal io" in Ultrix sh. +" $ac_file_inputs | (eval "$ac_sed_cmds") > $ac_file +dnl This would break Makefile dependencies. +dnl if cmp -s $ac_file conftest.out 2>/dev/null; then +dnl echo "$ac_file is unchanged" +dnl rm -f conftest.out +dnl else +dnl rm -f $ac_file +dnl mv conftest.out $ac_file +dnl fi +fi; done +rm -f conftest.s* +]) + +dnl Create the config.h files from the config.h.in files. +dnl This is a subroutine of AC_OUTPUT. It is called inside a quoted +dnl here document whose contents are going into config.status. +dnl AC_OUTPUT_HEADER(HEADER-FILE...) +define(AC_OUTPUT_HEADER, +[changequote(<<, >>)dnl +# These sed commands are passed to sed as "A NAME B NAME C VALUE D", where +# NAME is the cpp macro being defined and VALUE is the value it is being given. +# +# ac_d sets the value in "#define NAME VALUE" lines. +ac_dA='s%^\([ ]*\)#\([ ]*define[ ][ ]*\)' +ac_dB='\([ ][ ]*\)[^ ]*%\1#\2' +ac_dC='\3' +ac_dD='%g' +# ac_u turns "#undef NAME" with trailing blanks into "#define NAME VALUE". +ac_uA='s%^\([ ]*\)#\([ ]*\)undef\([ ][ ]*\)' +ac_uB='\([ ]\)%\1#\2define\3' +ac_uC=' ' +ac_uD='\4%g' +# ac_e turns "#undef NAME" without trailing blanks into "#define NAME VALUE". +ac_eA='s%^\([ ]*\)#\([ ]*\)undef\([ ][ ]*\)' +ac_eB='<<$>>%\1#\2define\3' +ac_eC=' ' +ac_eD='%g' +changequote([, ])dnl + +if test "${CONFIG_HEADERS+set}" != set; then +EOF +dnl Support passing AC_CONFIG_HEADER a value containing shell variables. +cat >> $CONFIG_STATUS <> $CONFIG_STATUS <<\EOF +fi +for ac_file in .. $CONFIG_HEADERS; do if test "x$ac_file" != x..; then +changequote(, )dnl + # Support "outfile[:infile[:infile...]]", defaulting infile="outfile.in". + case "$ac_file" in + *:*) ac_file_in=`echo "$ac_file"|sed 's%[^:]*:%%'` + ac_file=`echo "$ac_file"|sed 's%:.*%%'` ;; + *) ac_file_in="${ac_file}.in" ;; + esac +changequote([, ])dnl + + echo creating $ac_file + + rm -f conftest.frag conftest.in conftest.out + ac_file_inputs=`echo $ac_file_in|sed -e "s%^%$ac_given_srcdir/%" -e "s%:% $ac_given_srcdir/%g"` + cat $ac_file_inputs > conftest.in + +EOF + +# Transform confdefs.h into a sed script conftest.vals that substitutes +# the proper values into config.h.in to produce config.h. And first: +# Protect against being on the right side of a sed subst in config.status. +# Protect against being in an unquoted here document in config.status. +rm -f conftest.vals +dnl Using a here document instead of a string reduces the quoting nightmare. +dnl Putting comments in sed scripts is not portable. +cat > conftest.hdr <<\EOF +changequote(<<, >>)dnl +s/[\\&%]/\\&/g +s%[\\$`]%\\&%g +s%<<#define>> \([A-Za-z_][A-Za-z0-9_]*\) *\(.*\)%${ac_dA}\1${ac_dB}\1${ac_dC}\2${ac_dD}%gp +s%ac_d%ac_u%gp +s%ac_u%ac_e%gp +changequote([, ])dnl +EOF +sed -n -f conftest.hdr confdefs.h > conftest.vals +rm -f conftest.hdr + +# This sed command replaces #undef with comments. This is necessary, for +# example, in the case of _POSIX_SOURCE, which is predefined and required +# on some systems where configure will not decide to define it. +cat >> conftest.vals <<\EOF +changequote(, )dnl +s%^[ ]*#[ ]*undef[ ][ ]*[a-zA-Z_][a-zA-Z_0-9]*%/* & */% +changequote([, ])dnl +EOF + +# Break up conftest.vals because some shells have a limit on +# the size of here documents, and old seds have small limits too. + +rm -f conftest.tail +while : +do + ac_lines=`grep -c . conftest.vals` + # grep -c gives empty output for an empty file on some AIX systems. + if test -z "$ac_lines" || test "$ac_lines" -eq 0; then break; fi + # Write a limited-size here document to conftest.frag. + echo ' cat > conftest.frag <> $CONFIG_STATUS + sed ${ac_max_here_lines}q conftest.vals >> $CONFIG_STATUS + echo 'CEOF + sed -f conftest.frag conftest.in > conftest.out + rm -f conftest.in + mv conftest.out conftest.in +' >> $CONFIG_STATUS + sed 1,${ac_max_here_lines}d conftest.vals > conftest.tail + rm -f conftest.vals + mv conftest.tail conftest.vals +done +rm -f conftest.vals + +dnl Now back to your regularly scheduled config.status. +cat >> $CONFIG_STATUS <<\EOF + rm -f conftest.frag conftest.h + echo "/* $ac_file. Generated automatically by configure. */" > conftest.h + cat conftest.in >> conftest.h + rm -f conftest.in + if cmp -s $ac_file conftest.h 2>/dev/null; then + echo "$ac_file is unchanged" + rm -f conftest.h + else + # Remove last slash and all that follows it. Not all systems have dirname. + changequote(, )dnl + ac_dir=`echo $ac_file|sed 's%/[^/][^/]*$%%'` + changequote([, ])dnl + if test "$ac_dir" != "$ac_file" && test "$ac_dir" != .; then + # The file is in a subdirectory. + test ! -d "$ac_dir" && mkdir "$ac_dir" + fi + rm -f $ac_file + mv conftest.h $ac_file + fi +fi; done + +]) + +dnl This is a subroutine of AC_OUTPUT. It is called inside a quoted +dnl here document whose contents are going into config.status. +dnl AC_OUTPUT_LINKS(SOURCE..., DEST...) +define(AC_OUTPUT_LINKS, +[EOF + +cat >> $CONFIG_STATUS <> $CONFIG_STATUS <<\EOF +srcdir=$ac_given_srcdir +while test -n "$ac_sources"; do + set $ac_dests; ac_dest=[$]1; shift; ac_dests=[$]* + set $ac_sources; ac_source=[$]1; shift; ac_sources=[$]* + + echo "linking $srcdir/$ac_source to $ac_dest" + + if test ! -r $srcdir/$ac_source; then + AC_MSG_ERROR($srcdir/$ac_source: File not found) + fi + rm -f $ac_dest + + # Make relative symlinks. + # Remove last slash and all that follows it. Not all systems have dirname. +changequote(, )dnl + ac_dest_dir=`echo $ac_dest|sed 's%/[^/][^/]*$%%'` +changequote([, ])dnl + if test "$ac_dest_dir" != "$ac_dest" && test "$ac_dest_dir" != .; then + # The dest file is in a subdirectory. + test ! -d "$ac_dest_dir" && mkdir "$ac_dest_dir" + ac_dest_dir_suffix="/`echo $ac_dest_dir|sed 's%^\./%%'`" + # A "../" for each directory in $ac_dest_dir_suffix. +changequote(, )dnl + ac_dots=`echo $ac_dest_dir_suffix|sed 's%/[^/]*%../%g'` +changequote([, ])dnl + else + ac_dest_dir_suffix= ac_dots= + fi + + case "$srcdir" in +changequote(, )dnl + [/$]*) ac_rel_source="$srcdir/$ac_source" ;; +changequote([, ])dnl + *) ac_rel_source="$ac_dots$srcdir/$ac_source" ;; + esac + + # Make a symlink if possible; otherwise try a hard link. + if ln -s $ac_rel_source $ac_dest 2>/dev/null || + ln $srcdir/$ac_source $ac_dest; then : + else + AC_MSG_ERROR(can not link $ac_dest to $srcdir/$ac_source) + fi +done +]) + +dnl This is a subroutine of AC_OUTPUT. +dnl It is called after running config.status. +dnl AC_OUTPUT_SUBDIRS(DIRECTORY...) +define(AC_OUTPUT_SUBDIRS, +[ +if test "$no_recursion" != yes; then + + # Remove --cache-file and --srcdir arguments so they do not pile up. + ac_sub_configure_args= + ac_prev= + for ac_arg in $ac_configure_args; do + if test -n "$ac_prev"; then + ac_prev= + continue + fi + case "$ac_arg" in + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + ;; + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + ;; + *) ac_sub_configure_args="$ac_sub_configure_args $ac_arg" ;; + esac + done + + for ac_config_dir in $1; do + + # Do not complain, so a configure script can configure whichever + # parts of a large source tree are present. + if test ! -d $srcdir/$ac_config_dir; then + continue + fi + + echo configuring in $ac_config_dir + + case "$srcdir" in + .) ;; + *) + if test -d ./$ac_config_dir || mkdir ./$ac_config_dir; then :; + else + AC_MSG_ERROR(can not create `pwd`/$ac_config_dir) + fi + ;; + esac + + ac_popdir=`pwd` + cd $ac_config_dir + +changequote(, )dnl + # A "../" for each directory in /$ac_config_dir. + ac_dots=`echo $ac_config_dir|sed -e 's%^\./%%' -e 's%[^/]$%&/%' -e 's%[^/]*/%../%g'` +changequote([, ])dnl + + case "$srcdir" in + .) # No --srcdir option. We are building in place. + ac_sub_srcdir=$srcdir ;; + /*) # Absolute path. + ac_sub_srcdir=$srcdir/$ac_config_dir ;; + *) # Relative path. + ac_sub_srcdir=$ac_dots$srcdir/$ac_config_dir ;; + esac + + # Check for guested configure; otherwise get Cygnus style configure. + if test -f $ac_sub_srcdir/configure; then + ac_sub_configure=$ac_sub_srcdir/configure + elif test -f $ac_sub_srcdir/configure.in; then + ac_sub_configure=$ac_configure + else + AC_MSG_WARN(no configuration information is in $ac_config_dir) + ac_sub_configure= + fi + + # The recursion is here. + if test -n "$ac_sub_configure"; then + + # Make the cache file name correct relative to the subdirectory. + case "$cache_file" in + /*) ac_sub_cache_file=$cache_file ;; + *) # Relative path. + ac_sub_cache_file="$ac_dots$cache_file" ;; + esac +ifdef([AC_PROVIDE_AC_PROG_INSTALL], + [ case "$ac_given_INSTALL" in +changequote(, )dnl + [/$]*) INSTALL="$ac_given_INSTALL" ;; +changequote([, ])dnl + *) INSTALL="$ac_dots$ac_given_INSTALL" ;; + esac +])dnl + + echo "[running ${CONFIG_SHELL-/bin/sh} $ac_sub_configure $ac_sub_configure_args --cache-file=$ac_sub_cache_file] --srcdir=$ac_sub_srcdir" + # The eval makes quoting arguments work. + if eval ${CONFIG_SHELL-/bin/sh} $ac_sub_configure $ac_sub_configure_args --cache-file=$ac_sub_cache_file --srcdir=$ac_sub_srcdir + then : + else + AC_MSG_ERROR($ac_sub_configure failed for $ac_config_dir) + fi + fi + + cd $ac_popdir + done +fi +]) diff --git a/build/autoconf/acoldnames.m4 b/build/autoconf/acoldnames.m4 new file mode 100644 index 0000000000..d31cdd754f --- /dev/null +++ b/build/autoconf/acoldnames.m4 @@ -0,0 +1,80 @@ +dnl Map old names of Autoconf macros to new regularized names. +dnl This file is part of Autoconf. +dnl Copyright (C) 1994 Free Software Foundation, Inc. +dnl +dnl This program is free software; you can redistribute it and/or modify +dnl it under the terms of the GNU General Public License as published by +dnl the Free Software Foundation; either version 2, or (at your option) +dnl any later version. +dnl +dnl This program is distributed in the hope that it will be useful, +dnl but WITHOUT ANY WARRANTY; without even the implied warranty of +dnl MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +dnl GNU General Public License for more details. +dnl +dnl You should have received a copy of the GNU General Public License +dnl along with this program; if not, write to the Free Software +dnl Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA +dnl 02111-1307, USA. +dnl +dnl General macros. +dnl +define(AC_WARN, [indir([AC_MSG_WARN], $@)])dnl +define(AC_ERROR, [indir([AC_MSG_ERROR], $@)])dnl +AC_DEFUN(AC_PROGRAM_CHECK, [indir([AC_CHECK_PROG], $@)])dnl +AC_DEFUN(AC_PROGRAM_PATH, [indir([AC_PATH_PROG], $@)])dnl +AC_DEFUN(AC_PROGRAMS_CHECK, [indir([AC_CHECK_PROGS], $@)])dnl +AC_DEFUN(AC_PROGRAMS_PATH, [indir([AC_PATH_PROGS], $@)])dnl +AC_DEFUN(AC_PREFIX, [indir([AC_PREFIX_PROGRAM], $@)])dnl +AC_DEFUN(AC_HEADER_EGREP, [indir([AC_EGREP_HEADER], $@)])dnl +AC_DEFUN(AC_PROGRAM_EGREP, [indir([AC_EGREP_CPP], $@)])dnl +AC_DEFUN(AC_TEST_PROGRAM, [indir([AC_TRY_RUN], $@)])dnl +AC_DEFUN(AC_TEST_CPP, [indir([AC_TRY_CPP], $@)])dnl +AC_DEFUN(AC_HEADER_CHECK, [indir([AC_CHECK_HEADER], $@)])dnl +AC_DEFUN(AC_FUNC_CHECK, [indir([AC_CHECK_FUNC], $@)])dnl +AC_DEFUN(AC_HAVE_FUNCS, [indir([AC_CHECK_FUNCS], $@)])dnl +AC_DEFUN(AC_HAVE_HEADERS, [indir([AC_CHECK_HEADERS], $@)])dnl +AC_DEFUN(AC_SIZEOF_TYPE, [indir([AC_CHECK_SIZEOF], $@)])dnl +dnl +dnl Specific macros. +dnl +AC_DEFUN(AC_GCC_TRADITIONAL, [indir([AC_PROG_GCC_TRADITIONAL])])dnl +AC_DEFUN(AC_MINUS_C_MINUS_O, [indir([AC_PROG_CC_C_O])])dnl +AC_DEFUN(AC_SET_MAKE, [indir([AC_PROG_MAKE_SET])])dnl +AC_DEFUN(AC_YYTEXT_POINTER, [indir([AC_DECL_YYTEXT])])dnl +AC_DEFUN(AC_LN_S, [indir([AC_PROG_LN_S])])dnl +AC_DEFUN(AC_STDC_HEADERS, [indir([AC_HEADER_STDC])])dnl +AC_DEFUN(AC_MAJOR_HEADER, [indir([AC_HEADER_MAJOR])])dnl +AC_DEFUN(AC_STAT_MACROS_BROKEN, [indir([AC_HEADER_STAT])])dnl +AC_DEFUN(AC_SYS_SIGLIST_DECLARED, [indir([AC_DECL_SYS_SIGLIST])])dnl +AC_DEFUN(AC_GETGROUPS_T, [indir([AC_TYPE_GETGROUPS])])dnl +AC_DEFUN(AC_UID_T, [indir([AC_TYPE_UID_T])])dnl +AC_DEFUN(AC_SIZE_T, [indir([AC_TYPE_SIZE_T])])dnl +AC_DEFUN(AC_PID_T, [indir([AC_TYPE_PID_T])])dnl +AC_DEFUN(AC_OFF_T, [indir([AC_TYPE_OFF_T])])dnl +AC_DEFUN(AC_MODE_T, [indir([AC_TYPE_MODE_T])])dnl +AC_DEFUN(AC_RETSIGTYPE, [indir([AC_TYPE_SIGNAL])])dnl +AC_DEFUN(AC_MMAP, [indir([AC_FUNC_MMAP])])dnl +AC_DEFUN(AC_VPRINTF, [indir([AC_FUNC_VPRINTF])])dnl +AC_DEFUN(AC_VFORK, [indir([AC_FUNC_VFORK])])dnl +AC_DEFUN(AC_WAIT3, [indir([AC_FUNC_WAIT3])])dnl +AC_DEFUN(AC_ALLOCA, [indir([AC_FUNC_ALLOCA])])dnl +AC_DEFUN(AC_GETLOADAVG, [indir([AC_FUNC_GETLOADAVG])])dnl +AC_DEFUN(AC_UTIME_NULL, [indir([AC_FUNC_UTIME_NULL])])dnl +AC_DEFUN(AC_STRCOLL, [indir([AC_FUNC_STRCOLL])])dnl +AC_DEFUN(AC_SETVBUF_REVERSED, [indir([AC_FUNC_SETVBUF_REVERSED])])dnl +AC_DEFUN(AC_TIME_WITH_SYS_TIME, [indir([AC_HEADER_TIME])])dnl +AC_DEFUN(AC_TIMEZONE, [indir([AC_STRUCT_TIMEZONE])])dnl +AC_DEFUN(AC_ST_BLOCKS, [indir([AC_STRUCT_ST_BLOCKS])])dnl +AC_DEFUN(AC_ST_BLKSIZE, [indir([AC_STRUCT_ST_BLKSIZE])])dnl +AC_DEFUN(AC_ST_RDEV, [indir([AC_STRUCT_ST_RDEV])])dnl +AC_DEFUN(AC_CROSS_CHECK, [indir([AC_C_CROSS])])dnl +AC_DEFUN(AC_CHAR_UNSIGNED, [indir([AC_C_CHAR_UNSIGNED])])dnl +AC_DEFUN(AC_LONG_DOUBLE, [indir([AC_C_LONG_DOUBLE])])dnl +AC_DEFUN(AC_WORDS_BIGENDIAN, [indir([AC_C_BIGENDIAN])])dnl +AC_DEFUN(AC_INLINE, [indir([AC_C_INLINE])])dnl +AC_DEFUN(AC_CONST, [indir([AC_C_CONST])])dnl +AC_DEFUN(AC_LONG_FILE_NAMES, [indir([AC_SYS_LONG_FILE_NAMES])])dnl +AC_DEFUN(AC_RESTARTABLE_SYSCALLS, [indir([AC_SYS_RESTARTABLE_SYSCALLS])])dnl +AC_DEFUN(AC_FIND_X, [indir([AC_PATH_X])])dnl +AC_DEFUN(AC_FIND_XTRA, [indir([AC_PATH_XTRA])])dnl diff --git a/build/autoconf/acspecific.m4 b/build/autoconf/acspecific.m4 new file mode 100644 index 0000000000..5c6f1c9e5f --- /dev/null +++ b/build/autoconf/acspecific.m4 @@ -0,0 +1,2758 @@ +dnl Macros that test for specific features. +dnl This file is part of Autoconf. +dnl Copyright (C) 1992, 93, 94, 95, 96, 1998 Free Software Foundation, Inc. +dnl +dnl This program is free software; you can redistribute it and/or modify +dnl it under the terms of the GNU General Public License as published by +dnl the Free Software Foundation; either version 2, or (at your option) +dnl any later version. +dnl +dnl This program is distributed in the hope that it will be useful, +dnl but WITHOUT ANY WARRANTY; without even the implied warranty of +dnl MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +dnl GNU General Public License for more details. +dnl +dnl You should have received a copy of the GNU General Public License +dnl along with this program; if not, write to the Free Software +dnl Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA +dnl 02111-1307, USA. +dnl +dnl As a special exception, the Free Software Foundation gives unlimited +dnl permission to copy, distribute and modify the configure scripts that +dnl are the output of Autoconf. You need not follow the terms of the GNU +dnl General Public License when using or distributing such scripts, even +dnl though portions of the text of Autoconf appear in them. The GNU +dnl General Public License (GPL) does govern all other use of the material +dnl that constitutes the Autoconf program. +dnl +dnl Certain portions of the Autoconf source text are designed to be copied +dnl (in certain cases, depending on the input) into the output of +dnl Autoconf. We call these the "data" portions. The rest of the Autoconf +dnl source text consists of comments plus executable code that decides which +dnl of the data portions to output in any given case. We call these +dnl comments and executable code the "non-data" portions. Autoconf never +dnl copies any of the non-data portions into its output. +dnl +dnl This special exception to the GPL applies to versions of Autoconf +dnl released by the Free Software Foundation. When you make and +dnl distribute a modified version of Autoconf, you may extend this special +dnl exception to the GPL to apply to your modified version as well, *unless* +dnl your modified version has the potential to copy into its output some +dnl of the text that was the non-data portion of the version that you started +dnl with. (In other words, unless your change moves or copies text from +dnl the non-data portions to the data portions.) If your modification has +dnl such potential, you must delete any notice of this special exception +dnl to the GPL from your modified version. +dnl +dnl Written by David MacKenzie, with help from +dnl Franc,ois Pinard, Karl Berry, Richard Pixley, Ian Lance Taylor, +dnl Roland McGrath, Noah Friedman, david d zuhn, and many others. + + +dnl ### Checks for programs + + +dnl Check whether to use -n, \c, or newline-tab to separate +dnl checking messages from result messages. +dnl Idea borrowed from dist 3.0. +dnl Internal use only. +AC_DEFUN(AC_PROG_ECHO_N, +[if (echo "testing\c"; echo 1,2,3) | grep c >/dev/null; then + # Stardent Vistra SVR4 grep lacks -e, says ghazi@caip.rutgers.edu. + if (echo -n testing; echo 1,2,3) | sed s/-n/xn/ | grep xn >/dev/null; then + ac_n= ac_c=' +' ac_t=' ' + else + ac_n=-n ac_c= ac_t= + fi +else + ac_n= ac_c='\c' ac_t= +fi +]) + +AC_DEFUN(AC_PROG_CC, +[AC_BEFORE([$0], [AC_PROG_CPP])dnl +AC_CHECK_PROG(CC, gcc, gcc) +if test -z "$CC"; then + AC_CHECK_PROG(CC, cc, cc, , , /usr/ucb/cc) + if test -z "$CC"; then + case "`uname -s`" in + *win32* | *WIN32*) + AC_CHECK_PROG(CC, cl, cl) ;; + esac + fi + test -z "$CC" && AC_MSG_ERROR([no acceptable cc found in \$PATH]) +fi + +AC_PROG_CC_WORKS +AC_PROG_CC_GNU + +if test $ac_cv_prog_gcc = yes; then + GCC=yes +else + GCC= +fi + +dnl Check whether -g works, even if CFLAGS is set, in case the package +dnl plays around with CFLAGS (such as to build both debugging and +dnl normal versions of a library), tasteless as that idea is. +ac_test_CFLAGS="${CFLAGS+set}" +ac_save_CFLAGS="$CFLAGS" +CFLAGS= +AC_PROG_CC_G +if test "$ac_test_CFLAGS" = set; then + CFLAGS="$ac_save_CFLAGS" +elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then + CFLAGS="-g -O2" + else + CFLAGS="-g" + fi +else + if test "$GCC" = yes; then + CFLAGS="-O2" + else + CFLAGS= + fi +fi +]) + +AC_DEFUN(AC_PROG_CXX, +[AC_BEFORE([$0], [AC_PROG_CXXCPP])dnl +AC_CHECK_PROGS(CXX, $CCC c++ g++ gcc CC cxx cc++ cl, gcc) + +AC_PROG_CXX_WORKS +AC_PROG_CXX_GNU + +if test $ac_cv_prog_gxx = yes; then + GXX=yes +else + GXX= +fi + +dnl Check whether -g works, even if CXXFLAGS is set, in case the package +dnl plays around with CXXFLAGS (such as to build both debugging and +dnl normal versions of a library), tasteless as that idea is. +ac_test_CXXFLAGS="${CXXFLAGS+set}" +ac_save_CXXFLAGS="$CXXFLAGS" +CXXFLAGS= +AC_PROG_CXX_G +if test "$ac_test_CXXFLAGS" = set; then + CXXFLAGS="$ac_save_CXXFLAGS" +elif test $ac_cv_prog_cxx_g = yes; then + if test "$GXX" = yes; then + CXXFLAGS="-g -O2" + else + CXXFLAGS="-g" + fi +else + if test "$GXX" = yes; then + CXXFLAGS="-O2" + else + CXXFLAGS= + fi +fi +]) + +dnl Determine a Fortran 77 compiler to use. If `F77' is not already set +dnl in the environment, check for `g77', `f77' and `f2c', in that order. +dnl Set the output variable `F77' to the name of the compiler found. +dnl +dnl If using `g77' (the GNU Fortran 77 compiler), then `AC_PROG_F77' +dnl will set the shell variable `G77' to `yes', and empty otherwise. If +dnl the output variable `FFLAGS' was not already set in the environment, +dnl then set it to `-g -02' for `g77' (or `-O2' where `g77' does not +dnl accept `-g'). Otherwise, set `FFLAGS' to `-g' for all other Fortran +dnl 77 compilers. +dnl +dnl AC_PROG_F77() +AC_DEFUN(AC_PROG_F77, +[AC_BEFORE([$0], [AC_PROG_CPP])dnl +if test -z "$F77"; then + AC_CHECK_PROGS(F77, g77 f77 f2c) + test -z "$F77" && AC_MSG_ERROR([no acceptable Fortran 77 compiler found in \$PATH]) +fi + +AC_PROG_F77_WORKS +AC_PROG_F77_GNU + +if test $ac_cv_prog_g77 = yes; then + G77=yes +dnl Check whether -g works, even if FFLAGS is set, in case the package +dnl plays around with FFLAGS (such as to build both debugging and +dnl normal versions of a library), tasteless as that idea is. + ac_test_FFLAGS="${FFLAGS+set}" + ac_save_FFLAGS="$FFLAGS" + FFLAGS= + AC_PROG_F77_G + if test "$ac_test_FFLAGS" = set; then + FFLAGS="$ac_save_FFLAGS" + elif test $ac_cv_prog_f77_g = yes; then + FFLAGS="-g -O2" + else + FFLAGS="-O2" + fi +else + G77= + test "${FFLAGS+set}" = set || FFLAGS="-g" +fi +]) + +AC_DEFUN(AC_PROG_CC_WORKS, +[AC_MSG_CHECKING([whether the C compiler ($CC $CFLAGS $LDFLAGS) works]) +AC_LANG_SAVE +AC_LANG_C +AC_TRY_COMPILER([main(){return(0);}], ac_cv_prog_cc_works, ac_cv_prog_cc_cross) +AC_LANG_RESTORE +AC_MSG_RESULT($ac_cv_prog_cc_works) +if test $ac_cv_prog_cc_works = no; then + AC_MSG_ERROR([installation or configuration problem: C compiler cannot create executables.]) +fi +AC_MSG_CHECKING([whether the C compiler ($CC $CFLAGS $LDFLAGS) is a cross-compiler]) +AC_MSG_RESULT($ac_cv_prog_cc_cross) +cross_compiling=$ac_cv_prog_cc_cross +]) + +AC_DEFUN(AC_PROG_CXX_WORKS, +[AC_MSG_CHECKING([whether the C++ compiler ($CXX $CXXFLAGS $LDFLAGS) works]) +AC_LANG_SAVE +AC_LANG_CPLUSPLUS +AC_TRY_COMPILER([int main(){return(0);}], ac_cv_prog_cxx_works, ac_cv_prog_cxx_cross) +AC_LANG_RESTORE +AC_MSG_RESULT($ac_cv_prog_cxx_works) +if test $ac_cv_prog_cxx_works = no; then + AC_MSG_ERROR([installation or configuration problem: C++ compiler cannot create executables.]) +fi +AC_MSG_CHECKING([whether the C++ compiler ($CXX $CXXFLAGS $LDFLAGS) is a cross-compiler]) +AC_MSG_RESULT($ac_cv_prog_cxx_cross) +cross_compiling=$ac_cv_prog_cxx_cross +]) + +dnl Test whether the Fortran 77 compiler can compile and link a trivial +dnl Fortran program. Also, test whether the Fortran 77 compiler is a +dnl cross-compiler (which may realistically be the case if the Fortran +dnl compiler is `g77'). +dnl +dnl AC_PROG_F77_WORKS() +AC_DEFUN(AC_PROG_F77_WORKS, +[AC_MSG_CHECKING([whether the Fortran 77 compiler ($F77 $FFLAGS $LDFLAGS) works]) +AC_LANG_SAVE +AC_LANG_FORTRAN77 +AC_TRY_COMPILER(dnl +[ program conftest + end +], ac_cv_prog_f77_works, ac_cv_prog_f77_cross) +AC_LANG_RESTORE +AC_MSG_RESULT($ac_cv_prog_f77_works) +if test $ac_cv_prog_f77_works = no; then + AC_MSG_ERROR([installation or configuration problem: Fortran 77 compiler cannot create executables.]) +fi +AC_MSG_CHECKING([whether the Fortran 77 compiler ($F77 $FFLAGS $LDFLAGS) is a cross-compiler]) +AC_MSG_RESULT($ac_cv_prog_f77_cross) +cross_compiling=$ac_cv_prog_f77_cross +]) + +AC_DEFUN(AC_PROG_CC_GNU, +[AC_CACHE_CHECK(whether we are using GNU C, ac_cv_prog_gcc, +[dnl The semicolon is to pacify NeXT's syntax-checking cpp. +cat > conftest.c </dev/null 2>&1; then + ac_cv_prog_gcc=yes +else + ac_cv_prog_gcc=no +fi])]) + +AC_DEFUN(AC_PROG_CXX_GNU, +[AC_CACHE_CHECK(whether we are using GNU C++, ac_cv_prog_gxx, +[dnl The semicolon is to pacify NeXT's syntax-checking cpp. +cat > conftest.C </dev/null 2>&1; then + ac_cv_prog_gxx=yes +else + ac_cv_prog_gxx=no +fi])]) + +dnl Test whether for Fortran 77 compiler is `g77' (the GNU Fortran 77 +dnl Compiler). This test depends on whether the Fortran 77 compiler can +dnl do CPP pre-processing. +dnl +dnl AC_PROG_F77_GNU() +AC_DEFUN(AC_PROG_F77_GNU, +[AC_CACHE_CHECK(whether we are using GNU Fortran 77, ac_cv_prog_g77, +[cat > conftest.fpp </dev/null 2>&1; then + ac_cv_prog_g77=yes +else + ac_cv_prog_g77=no +fi])]) + +AC_DEFUN(AC_PROG_CC_G, +[AC_CACHE_CHECK(whether ${CC-cc} accepts -g, ac_cv_prog_cc_g, +[echo 'void f(){}' > conftest.c +if test -z "`${CC-cc} -g -c conftest.c 2>&1`"; then + ac_cv_prog_cc_g=yes +else + ac_cv_prog_cc_g=no +fi +rm -f conftest* +])]) + +AC_DEFUN(AC_PROG_CXX_G, +[AC_CACHE_CHECK(whether ${CXX-g++} accepts -g, ac_cv_prog_cxx_g, +[echo 'void f(){}' > conftest.cc +if test -z "`${CXX-g++} -g -c conftest.cc 2>&1`"; then + ac_cv_prog_cxx_g=yes +else + ac_cv_prog_cxx_g=no +fi +rm -f conftest* +])]) + +dnl Test whether the Fortran 77 compiler can accept the `-g' option to +dnl enable debugging. +dnl +dnl AC_PROG_F77_G() +AC_DEFUN(AC_PROG_F77_G, +[AC_CACHE_CHECK(whether $F77 accepts -g, ac_cv_prog_f77_g, +[cat > conftest.f << EOF + program conftest + end +EOF +if test -z "`$F77 -g -c conftest.f 2>&1`"; then + ac_cv_prog_f77_g=yes +else + ac_cv_prog_f77_g=no +fi +rm -f conftest* +])]) + +AC_DEFUN(AC_PROG_GCC_TRADITIONAL, +[AC_REQUIRE([AC_PROG_CC])dnl +AC_REQUIRE([AC_PROG_CPP])dnl +if test $ac_cv_prog_gcc = yes; then + AC_CACHE_CHECK(whether ${CC-cc} needs -traditional, + ac_cv_prog_gcc_traditional, +[ ac_pattern="Autoconf.*'x'" + AC_EGREP_CPP($ac_pattern, [#include +Autoconf TIOCGETP], + ac_cv_prog_gcc_traditional=yes, ac_cv_prog_gcc_traditional=no) + + if test $ac_cv_prog_gcc_traditional = no; then + AC_EGREP_CPP($ac_pattern, [#include +Autoconf TCGETA], + ac_cv_prog_gcc_traditional=yes) + fi]) + if test $ac_cv_prog_gcc_traditional = yes; then + CC="$CC -traditional" + fi +fi +]) + +AC_DEFUN(AC_PROG_CC_C_O, +[if test "x$CC" != xcc; then + AC_MSG_CHECKING(whether $CC and cc understand -c and -o together) +else + AC_MSG_CHECKING(whether cc understands -c and -o together) +fi +set dummy $CC; ac_cc="`echo [$]2 | +changequote(, )dnl + sed -e 's/[^a-zA-Z0-9_]/_/g' -e 's/^[0-9]/_/'`" +changequote([, ])dnl +AC_CACHE_VAL(ac_cv_prog_cc_${ac_cc}_c_o, +[echo 'foo(){}' > conftest.c +# Make sure it works both with $CC and with simple cc. +# We do the test twice because some compilers refuse to overwrite an +# existing .o file with -o, though they will create one. +ac_try='${CC-cc} -c conftest.c -o conftest.o 1>&AC_FD_CC' +if AC_TRY_EVAL(ac_try) && + test -f conftest.o && AC_TRY_EVAL(ac_try); +then + eval ac_cv_prog_cc_${ac_cc}_c_o=yes + if test "x$CC" != xcc; then + # Test first that cc exists at all. + if AC_TRY_COMMAND(cc -c conftest.c 1>&AC_FD_CC); then + ac_try='cc -c conftest.c -o conftest.o 1>&AC_FD_CC' + if AC_TRY_EVAL(ac_try) && + test -f conftest.o && AC_TRY_EVAL(ac_try); + then + # cc works too. + : + else + # cc exists but doesn't like -o. + eval ac_cv_prog_cc_${ac_cc}_c_o=no + fi + fi + fi +else + eval ac_cv_prog_cc_${ac_cc}_c_o=no +fi +rm -f conftest* +])dnl +if eval "test \"`echo '$ac_cv_prog_cc_'${ac_cc}_c_o`\" = yes"; then + AC_MSG_RESULT(yes) +else + AC_MSG_RESULT(no) + AC_DEFINE(NO_MINUS_C_MINUS_O) +fi +]) + +dnl Test if the Fortran 77 compiler accepts the options `-c' and `-o' +dnl simultaneously, and define `F77_NO_MINUS_C_MINUS_O' if it does not. +dnl +dnl The usefulness of this macro is questionable, as I can't really see +dnl why anyone would use it. The only reason I include it is for +dnl completeness, since a similar test exists for the C compiler. +dnl +dnl AC_PROG_F77_C_O +AC_DEFUN(AC_PROG_F77_C_O, +[AC_BEFORE([$0], [AC_PROG_F77])dnl +AC_MSG_CHECKING(whether $F77 understand -c and -o together) +set dummy $F77; ac_f77="`echo [$]2 | +changequote(, )dnl +sed -e 's/[^a-zA-Z0-9_]/_/g' -e 's/^[0-9]/_/'`" +changequote([, ])dnl +AC_CACHE_VAL(ac_cv_prog_f77_${ac_f77}_c_o, +[cat > conftest.f << EOF + program conftest + end +EOF +# We do the `AC_TRY_EVAL' test twice because some compilers refuse to +# overwrite an existing `.o' file with `-o', although they will create +# one. +ac_try='$F77 $FFLAGS -c conftest.f -o conftest.o 1>&AC_FD_CC' +if AC_TRY_EVAL(ac_try) && test -f conftest.o && AC_TRY_EVAL(ac_try); then + eval ac_cv_prog_f77_${ac_f77}_c_o=yes +else + eval ac_cv_prog_f77_${ac_f77}_c_o=no +fi +rm -f conftest* +])dnl +if eval "test \"`echo '$ac_cv_prog_f77_'${ac_f77}_c_o`\" = yes"; then + AC_MSG_RESULT(yes) +else + AC_MSG_RESULT(no) + AC_DEFINE(F77_NO_MINUS_C_MINUS_O) +fi +]) + +dnl Define SET_MAKE to set ${MAKE} if make doesn't. +AC_DEFUN(AC_PROG_MAKE_SET, +[AC_MSG_CHECKING(whether ${MAKE-make} sets \${MAKE}) +set dummy ${MAKE-make}; ac_make=`echo "[$]2" | sed 'y%./+-%__p_%'` +AC_CACHE_VAL(ac_cv_prog_make_${ac_make}_set, +[cat > conftestmake <<\EOF +all: + @echo 'ac_maketemp="${MAKE}"' +EOF +changequote(, )dnl +# GNU make sometimes prints "make[1]: Entering...", which would confuse us. +eval `${MAKE-make} -f conftestmake 2>/dev/null | grep temp=` +changequote([, ])dnl +if test -n "$ac_maketemp"; then + eval ac_cv_prog_make_${ac_make}_set=yes +else + eval ac_cv_prog_make_${ac_make}_set=no +fi +rm -f conftestmake])dnl +if eval "test \"`echo '$ac_cv_prog_make_'${ac_make}_set`\" = yes"; then + AC_MSG_RESULT(yes) + SET_MAKE= +else + AC_MSG_RESULT(no) + SET_MAKE="MAKE=${MAKE-make}" +fi +AC_SUBST([SET_MAKE])dnl +]) + +AC_DEFUN(AC_PROG_RANLIB, +[AC_CHECK_PROG(RANLIB, ranlib, ranlib, :)]) + +dnl Check for mawk first since it's generally faster. +AC_DEFUN(AC_PROG_AWK, +[AC_CHECK_PROGS(AWK, mawk gawk nawk awk, )]) + +AC_DEFUN(AC_PROG_YACC, +[AC_CHECK_PROGS(YACC, 'bison -y' byacc, yacc)]) + +AC_DEFUN(AC_PROG_CPP, +[AC_MSG_CHECKING(how to run the C preprocessor) +# On Suns, sometimes $CPP names a directory. +if test -n "$CPP" && test -d "$CPP"; then + CPP= +fi +if test -z "$CPP"; then +AC_CACHE_VAL(ac_cv_prog_CPP, +[ # This must be in double quotes, not single quotes, because CPP may get + # substituted into the Makefile and "${CC-cc}" will confuse make. + CPP="${CC-cc} -E" + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. +dnl Use a header file that comes with gcc, so configuring glibc +dnl with a fresh cross-compiler works. + AC_TRY_CPP([#include +Syntax Error], , + CPP="${CC-cc} -E -traditional-cpp" + AC_TRY_CPP([#include +Syntax Error], , + CPP="${CC-cc} -nologo -E" + AC_TRY_CPP([#include +Syntax Error], , CPP=/lib/cpp))) + ac_cv_prog_CPP="$CPP"])dnl + CPP="$ac_cv_prog_CPP" +else + ac_cv_prog_CPP="$CPP" +fi +AC_MSG_RESULT($CPP) +AC_SUBST(CPP)dnl +]) + +AC_DEFUN(AC_PROG_CXXCPP, +[AC_MSG_CHECKING(how to run the C++ preprocessor) +if test -z "$CXXCPP"; then +AC_CACHE_VAL(ac_cv_prog_CXXCPP, +[AC_LANG_SAVE[]dnl +AC_LANG_CPLUSPLUS[]dnl + CXXCPP="${CXX-g++} -E" + AC_TRY_CPP([#include ], , CXXCPP=/lib/cpp) + ac_cv_prog_CXXCPP="$CXXCPP" +AC_LANG_RESTORE[]dnl +fi])dnl +CXXCPP="$ac_cv_prog_CXXCPP" +AC_MSG_RESULT($CXXCPP) +AC_SUBST(CXXCPP)dnl +]) + +dnl Require finding the C or C++ preprocessor, whichever is the +dnl current language. +AC_DEFUN(AC_REQUIRE_CPP, +[ifelse(AC_LANG, C, [AC_REQUIRE([AC_PROG_CPP])], [AC_REQUIRE([AC_PROG_CXXCPP])])]) + +AC_DEFUN(AC_PROG_LEX, +[AC_CHECK_PROG(LEX, flex, flex, lex) +if test -z "$LEXLIB" +then + case "$LEX" in + flex*) ac_lib=fl ;; + *) ac_lib=l ;; + esac + AC_CHECK_LIB($ac_lib, yywrap, LEXLIB="-l$ac_lib") +fi +AC_SUBST(LEXLIB)]) + +dnl Check if lex declares yytext as a char * by default, not a char[]. +undefine([AC_DECL_YYTEXT]) +AC_DEFUN(AC_DECL_YYTEXT, +[AC_REQUIRE_CPP()dnl +AC_REQUIRE([AC_PROG_LEX])dnl +AC_CACHE_CHECK(lex output file root, ac_cv_prog_lex_root, +[# The minimal lex program is just a single line: %%. But some broken lexes +# (Solaris, I think it was) want two %% lines, so accommodate them. +echo '%% +%%' | $LEX +if test -f lex.yy.c; then + ac_cv_prog_lex_root=lex.yy +elif test -f lexyy.c; then + ac_cv_prog_lex_root=lexyy +else + AC_MSG_ERROR(cannot find output from $LEX; giving up) +fi]) +LEX_OUTPUT_ROOT=$ac_cv_prog_lex_root +AC_SUBST(LEX_OUTPUT_ROOT)dnl + +AC_CACHE_CHECK(whether yytext is a pointer, ac_cv_prog_lex_yytext_pointer, +[# POSIX says lex can declare yytext either as a pointer or an array; the +# default is implementation-dependent. Figure out which it is, since +# not all implementations provide the %pointer and %array declarations. +ac_cv_prog_lex_yytext_pointer=no +echo 'extern char *yytext;' >>$LEX_OUTPUT_ROOT.c +ac_save_LIBS="$LIBS" +LIBS="$LIBS $LEXLIB" +AC_TRY_LINK(`cat $LEX_OUTPUT_ROOT.c`, , ac_cv_prog_lex_yytext_pointer=yes) +LIBS="$ac_save_LIBS" +rm -f "${LEX_OUTPUT_ROOT}.c" +]) +dnl +if test $ac_cv_prog_lex_yytext_pointer = yes; then + AC_DEFINE(YYTEXT_POINTER) +fi +]) + +AC_DEFUN(AC_PROG_INSTALL, +[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl +# Find a good install program. We prefer a C program (faster), +# so one script is as good as another. But avoid the broken or +# incompatible versions: +# SysV /etc/install, /usr/sbin/install +# SunOS /usr/etc/install +# IRIX /sbin/install +# AIX /bin/install +# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag +# AFS /usr/afsws/bin/install, which mishandles nonexistent args +# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" +# ./install, which can be erroneously created by make from ./install.sh. +AC_MSG_CHECKING(for a BSD compatible install) +if test -z "$INSTALL"; then +AC_CACHE_VAL(ac_cv_path_install, +[ IFS="${IFS= }"; ac_save_IFS="$IFS"; IFS=":" + for ac_dir in $PATH; do + # Account for people who put trailing slashes in PATH elements. + case "$ac_dir/" in + /|./|.//|/etc/*|/usr/sbin/*|/usr/etc/*|/sbin/*|/usr/afsws/bin/*|/usr/ucb/*) ;; + *) + # OSF1 and SCO ODT 3.0 have their own names for install. + # Don't use installbsd from OSF since it installs stuff as root + # by default. + for ac_prog in ginstall scoinst install; do + if test -f $ac_dir/$ac_prog; then + if test $ac_prog = install && + grep dspmsg $ac_dir/$ac_prog >/dev/null 2>&1; then + # AIX install. It has an incompatible calling convention. + : + else + ac_cv_path_install="$ac_dir/$ac_prog -c" + break 2 + fi + fi + done + ;; + esac + done + IFS="$ac_save_IFS" +])dnl + if test "${ac_cv_path_install+set}" = set; then + INSTALL="$ac_cv_path_install" + else + # As a last resort, use the slow shell script. We don't cache a + # path for INSTALL within a source directory, because that will + # break other packages using the cache if that directory is + # removed, or if the path is relative. + INSTALL="$ac_install_sh" + fi +fi +dnl We do special magic for INSTALL instead of AC_SUBST, to get +dnl relative paths right. +AC_MSG_RESULT($INSTALL) + +# Use test -z because SunOS4 sh mishandles braces in ${var-val}. +# It thinks the first close brace ends the variable substitution. +test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' +AC_SUBST(INSTALL_PROGRAM)dnl + +test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL_PROGRAM}' +AC_SUBST(INSTALL_SCRIPT)dnl + +test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' +AC_SUBST(INSTALL_DATA)dnl +]) + +AC_DEFUN(AC_PROG_LN_S, +[AC_MSG_CHECKING(whether ln -s works) +AC_CACHE_VAL(ac_cv_prog_LN_S, +[rm -f conftestdata +if ln -s X conftestdata 2>/dev/null +then + rm -f conftestdata + ac_cv_prog_LN_S="ln -s" +else + ac_cv_prog_LN_S=ln +fi])dnl +LN_S="$ac_cv_prog_LN_S" +if test "$ac_cv_prog_LN_S" = "ln -s"; then + AC_MSG_RESULT(yes) +else + AC_MSG_RESULT(no) +fi +AC_SUBST(LN_S)dnl +]) + +define(AC_RSH, +[errprint(__file__:__line__: [$0] has been removed; replace it with equivalent code +)m4exit(4)]) + + +dnl ### Checks for header files + + +AC_DEFUN(AC_HEADER_STDC, +[AC_REQUIRE_CPP()dnl +AC_CACHE_CHECK(for ANSI C header files, ac_cv_header_stdc, +[AC_TRY_CPP([#include +#include +#include +#include ], ac_cv_header_stdc=yes, ac_cv_header_stdc=no) + +if test $ac_cv_header_stdc = yes; then + # SunOS 4.x string.h does not declare mem*, contrary to ANSI. +AC_EGREP_HEADER(memchr, string.h, , ac_cv_header_stdc=no) +fi + +if test $ac_cv_header_stdc = yes; then + # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. +AC_EGREP_HEADER(free, stdlib.h, , ac_cv_header_stdc=no) +fi + +if test $ac_cv_header_stdc = yes; then + # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. +AC_TRY_RUN([#include +#define ISLOWER(c) ('a' <= (c) && (c) <= 'z') +#define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) +#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) +int main () { int i; for (i = 0; i < 256; i++) +if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) exit(2); +exit (0); } +], , ac_cv_header_stdc=no, :) +fi]) +if test $ac_cv_header_stdc = yes; then + AC_DEFINE(STDC_HEADERS) +fi +]) + +AC_DEFUN(AC_UNISTD_H, +[AC_OBSOLETE([$0], [; instead use AC_CHECK_HEADERS(unistd.h)])dnl +AC_CHECK_HEADER(unistd.h, AC_DEFINE(HAVE_UNISTD_H))]) + +AC_DEFUN(AC_USG, +[AC_OBSOLETE([$0], + [; instead use AC_CHECK_HEADERS(string.h) and HAVE_STRING_H])dnl +AC_MSG_CHECKING([for BSD string and memory functions]) +AC_TRY_LINK([#include ], [rindex(0, 0); bzero(0, 0);], + [AC_MSG_RESULT(yes)], [AC_MSG_RESULT(no); AC_DEFINE(USG)])]) + + +dnl If memchr and the like aren't declared in , include . +dnl To avoid problems, don't check for gcc2 built-ins. +AC_DEFUN(AC_MEMORY_H, +[AC_OBSOLETE([$0], [; instead use AC_CHECK_HEADERS(memory.h) and HAVE_MEMORY_H])dnl +AC_MSG_CHECKING(whether string.h declares mem functions) +AC_EGREP_HEADER(memchr, string.h, ac_found=yes, ac_found=no) +AC_MSG_RESULT($ac_found) +if test $ac_found = no; then + AC_CHECK_HEADER(memory.h, [AC_DEFINE(NEED_MEMORY_H)]) +fi +]) + +AC_DEFUN(AC_HEADER_MAJOR, +[AC_CACHE_CHECK(whether sys/types.h defines makedev, + ac_cv_header_sys_types_h_makedev, +[AC_TRY_LINK([#include ], [return makedev(0, 0);], + ac_cv_header_sys_types_h_makedev=yes, ac_cv_header_sys_types_h_makedev=no) +]) + +if test $ac_cv_header_sys_types_h_makedev = no; then +AC_CHECK_HEADER(sys/mkdev.h, [AC_DEFINE(MAJOR_IN_MKDEV)]) + + if test $ac_cv_header_sys_mkdev_h = no; then +AC_CHECK_HEADER(sys/sysmacros.h, [AC_DEFINE(MAJOR_IN_SYSMACROS)]) + fi +fi +]) + +AC_DEFUN(AC_HEADER_DIRENT, +[ac_header_dirent=no +AC_CHECK_HEADERS_DIRENT(dirent.h sys/ndir.h sys/dir.h ndir.h, + [ac_header_dirent=$ac_hdr; break]) +# Two versions of opendir et al. are in -ldir and -lx on SCO Xenix. +if test $ac_header_dirent = dirent.h; then +AC_CHECK_LIB(dir, opendir, LIBS="$LIBS -ldir") +else +AC_CHECK_LIB(x, opendir, LIBS="$LIBS -lx") +fi +]) + +dnl Like AC_CHECK_HEADER, except also make sure that HEADER-FILE +dnl defines the type `DIR'. dirent.h on NextStep 3.2 doesn't. +dnl AC_CHECK_HEADER_DIRENT(HEADER-FILE, ACTION-IF-FOUND) +AC_DEFUN(AC_CHECK_HEADER_DIRENT, +[ac_safe=`echo "$1" | sed 'y%./+-%__p_%'` +AC_MSG_CHECKING([for $1 that defines DIR]) +AC_CACHE_VAL(ac_cv_header_dirent_$ac_safe, +[AC_TRY_COMPILE([#include +#include <$1>], [DIR *dirp = 0;], + eval "ac_cv_header_dirent_$ac_safe=yes", + eval "ac_cv_header_dirent_$ac_safe=no")])dnl +if eval "test \"`echo '$ac_cv_header_dirent_'$ac_safe`\" = yes"; then + AC_MSG_RESULT(yes) + $2 +else + AC_MSG_RESULT(no) +fi +]) + +dnl Like AC_CHECK_HEADERS, except succeed only for a HEADER-FILE that +dnl defines `DIR'. +dnl AC_CHECK_HEADERS_DIRENT(HEADER-FILE... [, ACTION]) +define(AC_CHECK_HEADERS_DIRENT, +[for ac_hdr in $1 +do +AC_CHECK_HEADER_DIRENT($ac_hdr, +[changequote(, )dnl + ac_tr_hdr=HAVE_`echo $ac_hdr | sed 'y%abcdefghijklmnopqrstuvwxyz./-%ABCDEFGHIJKLMNOPQRSTUVWXYZ___%'` +changequote([, ])dnl + AC_DEFINE_UNQUOTED($ac_tr_hdr) $2])dnl +done]) + +AC_DEFUN(AC_DIR_HEADER, +[AC_OBSOLETE([$0], [; instead use AC_HEADER_DIRENT])dnl +ac_header_dirent=no +for ac_hdr in dirent.h sys/ndir.h sys/dir.h ndir.h; do + AC_CHECK_HEADER_DIRENT($ac_hdr, [ac_header_dirent=$ac_hdr; break]) +done + +case "$ac_header_dirent" in +dirent.h) AC_DEFINE(DIRENT) ;; +sys/ndir.h) AC_DEFINE(SYSNDIR) ;; +sys/dir.h) AC_DEFINE(SYSDIR) ;; +ndir.h) AC_DEFINE(NDIR) ;; +esac + +AC_CACHE_CHECK(whether closedir returns void, ac_cv_func_closedir_void, +[AC_TRY_RUN([#include +#include <$ac_header_dirent> +int closedir(); main() { exit(closedir(opendir(".")) != 0); }], + ac_cv_func_closedir_void=no, ac_cv_func_closedir_void=yes, ac_cv_func_closedir_void=yes)]) +if test $ac_cv_func_closedir_void = yes; then + AC_DEFINE(VOID_CLOSEDIR) +fi +]) + +AC_DEFUN(AC_HEADER_STAT, +[AC_CACHE_CHECK(whether stat file-mode macros are broken, + ac_cv_header_stat_broken, +[AC_EGREP_CPP([You lose], [#include +#include + +#if defined(S_ISBLK) && defined(S_IFDIR) +# if S_ISBLK (S_IFDIR) +You lose. +# endif +#endif + +#if defined(S_ISBLK) && defined(S_IFCHR) +# if S_ISBLK (S_IFCHR) +You lose. +# endif +#endif + +#if defined(S_ISLNK) && defined(S_IFREG) +# if S_ISLNK (S_IFREG) +You lose. +# endif +#endif + +#if defined(S_ISSOCK) && defined(S_IFREG) +# if S_ISSOCK (S_IFREG) +You lose. +# endif +#endif +], ac_cv_header_stat_broken=yes, ac_cv_header_stat_broken=no)]) +if test $ac_cv_header_stat_broken = yes; then + AC_DEFINE(STAT_MACROS_BROKEN) +fi +]) + +AC_DEFUN(AC_DECL_SYS_SIGLIST, +[AC_CACHE_CHECK([for sys_siglist declaration in signal.h or unistd.h], + ac_cv_decl_sys_siglist, +[AC_TRY_COMPILE([#include +#include +/* NetBSD declares sys_siglist in unistd.h. */ +#ifdef HAVE_UNISTD_H +#include +#endif], [char *msg = *(sys_siglist + 1);], + ac_cv_decl_sys_siglist=yes, ac_cv_decl_sys_siglist=no)]) +if test $ac_cv_decl_sys_siglist = yes; then + AC_DEFINE(SYS_SIGLIST_DECLARED) +fi +]) + +AC_DEFUN(AC_HEADER_SYS_WAIT, +[AC_CACHE_CHECK([for sys/wait.h that is POSIX.1 compatible], + ac_cv_header_sys_wait_h, +[AC_TRY_COMPILE([#include +#include +#ifndef WEXITSTATUS +#define WEXITSTATUS(stat_val) ((unsigned)(stat_val) >> 8) +#endif +#ifndef WIFEXITED +#define WIFEXITED(stat_val) (((stat_val) & 255) == 0) +#endif], [int s; +wait (&s); +s = WIFEXITED (s) ? WEXITSTATUS (s) : 1;], +ac_cv_header_sys_wait_h=yes, ac_cv_header_sys_wait_h=no)]) +if test $ac_cv_header_sys_wait_h = yes; then + AC_DEFINE(HAVE_SYS_WAIT_H) +fi +]) + + +dnl ### Checks for typedefs + + +AC_DEFUN(AC_TYPE_GETGROUPS, +[AC_REQUIRE([AC_TYPE_UID_T])dnl +AC_CACHE_CHECK(type of array argument to getgroups, ac_cv_type_getgroups, +[AC_TRY_RUN( +changequote(<<, >>)dnl +<< +/* Thanks to Mike Rendell for this test. */ +#include +#define NGID 256 +#undef MAX +#define MAX(x, y) ((x) > (y) ? (x) : (y)) +main() +{ + gid_t gidset[NGID]; + int i, n; + union { gid_t gval; long lval; } val; + + val.lval = -1; + for (i = 0; i < NGID; i++) + gidset[i] = val.gval; + n = getgroups (sizeof (gidset) / MAX (sizeof (int), sizeof (gid_t)) - 1, + gidset); + /* Exit non-zero if getgroups seems to require an array of ints. This + happens when gid_t is short but getgroups modifies an array of ints. */ + exit ((n > 0 && gidset[n] != val.gval) ? 1 : 0); +} +>>, +changequote([, ])dnl + ac_cv_type_getgroups=gid_t, ac_cv_type_getgroups=int, + ac_cv_type_getgroups=cross) +if test $ac_cv_type_getgroups = cross; then + dnl When we can't run the test program (we are cross compiling), presume + dnl that has either an accurate prototype for getgroups or none. + dnl Old systems without prototypes probably use int. + AC_EGREP_HEADER([getgroups.*int.*gid_t], unistd.h, + ac_cv_type_getgroups=gid_t, ac_cv_type_getgroups=int) +fi]) +AC_DEFINE_UNQUOTED(GETGROUPS_T, $ac_cv_type_getgroups) +]) + +AC_DEFUN(AC_TYPE_UID_T, +[AC_CACHE_CHECK(for uid_t in sys/types.h, ac_cv_type_uid_t, +[AC_EGREP_HEADER(uid_t, sys/types.h, + ac_cv_type_uid_t=yes, ac_cv_type_uid_t=no)]) +if test $ac_cv_type_uid_t = no; then + AC_DEFINE(uid_t, int) + AC_DEFINE(gid_t, int) +fi +]) + +AC_DEFUN(AC_TYPE_SIZE_T, +[AC_CHECK_TYPE(size_t, unsigned)]) + +AC_DEFUN(AC_TYPE_PID_T, +[AC_CHECK_TYPE(pid_t, int)]) + +AC_DEFUN(AC_TYPE_OFF_T, +[AC_CHECK_TYPE(off_t, long)]) + +AC_DEFUN(AC_TYPE_MODE_T, +[AC_CHECK_TYPE(mode_t, int)]) + +dnl Note that identifiers starting with SIG are reserved by ANSI C. +AC_DEFUN(AC_TYPE_SIGNAL, +[AC_CACHE_CHECK([return type of signal handlers], ac_cv_type_signal, +[AC_TRY_COMPILE([#include +#include +#ifdef signal +#undef signal +#endif +#ifdef __cplusplus +extern "C" void (*signal (int, void (*)(int)))(int); +#else +void (*signal ()) (); +#endif +], +[int i;], ac_cv_type_signal=void, ac_cv_type_signal=int)]) +AC_DEFINE_UNQUOTED(RETSIGTYPE, $ac_cv_type_signal) +]) + + +dnl ### Checks for functions + + +AC_DEFUN(AC_FUNC_CLOSEDIR_VOID, +[AC_REQUIRE([AC_HEADER_DIRENT])dnl +AC_CACHE_CHECK(whether closedir returns void, ac_cv_func_closedir_void, +[AC_TRY_RUN([#include +#include <$ac_header_dirent> +int closedir(); main() { exit(closedir(opendir(".")) != 0); }], + ac_cv_func_closedir_void=no, ac_cv_func_closedir_void=yes, ac_cv_func_closedir_void=yes)]) +if test $ac_cv_func_closedir_void = yes; then + AC_DEFINE(CLOSEDIR_VOID) +fi +]) + +AC_DEFUN(AC_FUNC_FNMATCH, +[AC_CACHE_CHECK(for working fnmatch, ac_cv_func_fnmatch_works, +# Some versions of Solaris or SCO have a broken fnmatch function. +# So we run a test program. If we are cross-compiling, take no chance. +# Thanks to John Oleynick and Franc,ois Pinard for this test. +[AC_TRY_RUN([main() { exit (fnmatch ("a*", "abc", 0) != 0); }], +ac_cv_func_fnmatch_works=yes, ac_cv_func_fnmatch_works=no, +ac_cv_func_fnmatch_works=no)]) +if test $ac_cv_func_fnmatch_works = yes; then + AC_DEFINE(HAVE_FNMATCH) +fi +]) + +AC_DEFUN(AC_FUNC_MMAP, +[AC_CHECK_HEADERS(unistd.h) +AC_CHECK_FUNCS(getpagesize) +AC_CACHE_CHECK(for working mmap, ac_cv_func_mmap_fixed_mapped, +[AC_TRY_RUN([ +/* Thanks to Mike Haertel and Jim Avera for this test. + Here is a matrix of mmap possibilities: + mmap private not fixed + mmap private fixed at somewhere currently unmapped + mmap private fixed at somewhere already mapped + mmap shared not fixed + mmap shared fixed at somewhere currently unmapped + mmap shared fixed at somewhere already mapped + For private mappings, we should verify that changes cannot be read() + back from the file, nor mmap's back from the file at a different + address. (There have been systems where private was not correctly + implemented like the infamous i386 svr4.0, and systems where the + VM page cache was not coherent with the filesystem buffer cache + like early versions of FreeBSD and possibly contemporary NetBSD.) + For shared mappings, we should conversely verify that changes get + propogated back to all the places they're supposed to be. + + Grep wants private fixed already mapped. + The main things grep needs to know about mmap are: + * does it exist and is it safe to write into the mmap'd area + * how to use it (BSD variants) */ +#include +#include +#include + +/* This mess was copied from the GNU getpagesize.h. */ +#ifndef HAVE_GETPAGESIZE +# ifdef HAVE_UNISTD_H +# include +# endif + +/* Assume that all systems that can run configure have sys/param.h. */ +# ifndef HAVE_SYS_PARAM_H +# define HAVE_SYS_PARAM_H 1 +# endif + +# ifdef _SC_PAGESIZE +# define getpagesize() sysconf(_SC_PAGESIZE) +# else /* no _SC_PAGESIZE */ +# ifdef HAVE_SYS_PARAM_H +# include +# ifdef EXEC_PAGESIZE +# define getpagesize() EXEC_PAGESIZE +# else /* no EXEC_PAGESIZE */ +# ifdef NBPG +# define getpagesize() NBPG * CLSIZE +# ifndef CLSIZE +# define CLSIZE 1 +# endif /* no CLSIZE */ +# else /* no NBPG */ +# ifdef NBPC +# define getpagesize() NBPC +# else /* no NBPC */ +# ifdef PAGESIZE +# define getpagesize() PAGESIZE +# endif /* PAGESIZE */ +# endif /* no NBPC */ +# endif /* no NBPG */ +# endif /* no EXEC_PAGESIZE */ +# else /* no HAVE_SYS_PARAM_H */ +# define getpagesize() 8192 /* punt totally */ +# endif /* no HAVE_SYS_PARAM_H */ +# endif /* no _SC_PAGESIZE */ + +#endif /* no HAVE_GETPAGESIZE */ + +#ifdef __cplusplus +extern "C" { void *malloc(unsigned); } +#else +char *malloc(); +#endif + +int +main() +{ + char *data, *data2, *data3; + int i, pagesize; + int fd; + + pagesize = getpagesize(); + + /* + * First, make a file with some known garbage in it. + */ + data = malloc(pagesize); + if (!data) + exit(1); + for (i = 0; i < pagesize; ++i) + *(data + i) = rand(); + umask(0); + fd = creat("conftestmmap", 0600); + if (fd < 0) + exit(1); + if (write(fd, data, pagesize) != pagesize) + exit(1); + close(fd); + + /* + * Next, try to mmap the file at a fixed address which + * already has something else allocated at it. If we can, + * also make sure that we see the same garbage. + */ + fd = open("conftestmmap", O_RDWR); + if (fd < 0) + exit(1); + data2 = malloc(2 * pagesize); + if (!data2) + exit(1); + data2 += (pagesize - ((int) data2 & (pagesize - 1))) & (pagesize - 1); + if (data2 != mmap(data2, pagesize, PROT_READ | PROT_WRITE, + MAP_PRIVATE | MAP_FIXED, fd, 0L)) + exit(1); + for (i = 0; i < pagesize; ++i) + if (*(data + i) != *(data2 + i)) + exit(1); + + /* + * Finally, make sure that changes to the mapped area + * do not percolate back to the file as seen by read(). + * (This is a bug on some variants of i386 svr4.0.) + */ + for (i = 0; i < pagesize; ++i) + *(data2 + i) = *(data2 + i) + 1; + data3 = malloc(pagesize); + if (!data3) + exit(1); + if (read(fd, data3, pagesize) != pagesize) + exit(1); + for (i = 0; i < pagesize; ++i) + if (*(data + i) != *(data3 + i)) + exit(1); + close(fd); + unlink("conftestmmap"); + exit(0); +} +], ac_cv_func_mmap_fixed_mapped=yes, ac_cv_func_mmap_fixed_mapped=no, +ac_cv_func_mmap_fixed_mapped=no)]) +if test $ac_cv_func_mmap_fixed_mapped = yes; then + AC_DEFINE(HAVE_MMAP) +fi +]) + +AC_DEFUN(AC_FUNC_GETPGRP, +[AC_CACHE_CHECK(whether getpgrp takes no argument, ac_cv_func_getpgrp_void, +[AC_TRY_RUN([ +/* + * If this system has a BSD-style getpgrp(), + * which takes a pid argument, exit unsuccessfully. + * + * Snarfed from Chet Ramey's bash pgrp.c test program + */ +#include +#include + +int pid; +int pg1, pg2, pg3, pg4; +int ng, np, s, child; + +main() +{ + pid = getpid(); + pg1 = getpgrp(0); + pg2 = getpgrp(); + pg3 = getpgrp(pid); + pg4 = getpgrp(1); + + /* + * If all of these values are the same, it's pretty sure that + * we're on a system that ignores getpgrp's first argument. + */ + if (pg2 == pg4 && pg1 == pg3 && pg2 == pg3) + exit(0); + + child = fork(); + if (child < 0) + exit(1); + else if (child == 0) { + np = getpid(); + /* + * If this is Sys V, this will not work; pgrp will be + * set to np because setpgrp just changes a pgrp to be + * the same as the pid. + */ + setpgrp(np, pg1); + ng = getpgrp(0); /* Same result for Sys V and BSD */ + if (ng == pg1) { + exit(1); + } else { + exit(0); + } + } else { + wait(&s); + exit(s>>8); + } +} +], ac_cv_func_getpgrp_void=yes, ac_cv_func_getpgrp_void=no, + AC_MSG_ERROR(cannot check getpgrp if cross compiling)) +]) +if test $ac_cv_func_getpgrp_void = yes; then + AC_DEFINE(GETPGRP_VOID) +fi +]) + +AC_DEFUN(AC_FUNC_SETPGRP, +[AC_CACHE_CHECK(whether setpgrp takes no argument, ac_cv_func_setpgrp_void, +AC_TRY_RUN([ +#ifdef HAVE_UNISTD_H +#include +#endif + +/* + * If this system has a BSD-style setpgrp, which takes arguments, exit + * successfully. + */ +main() +{ + if (setpgrp(1,1) == -1) + exit(0); + else + exit(1); +} +], ac_cv_func_setpgrp_void=no, ac_cv_func_setpgrp_void=yes, + AC_MSG_ERROR(cannot check setpgrp if cross compiling)) +) +if test $ac_cv_func_setpgrp_void = yes; then + AC_DEFINE(SETPGRP_VOID) +fi +]) + +AC_DEFUN(AC_FUNC_VPRINTF, +[AC_CHECK_FUNC(vprintf, AC_DEFINE(HAVE_VPRINTF)) +if test "$ac_cv_func_vprintf" != yes; then +AC_CHECK_FUNC(_doprnt, AC_DEFINE(HAVE_DOPRNT)) +fi +]) + +AC_DEFUN(AC_FUNC_VFORK, +[AC_REQUIRE([AC_TYPE_PID_T])dnl +AC_CHECK_HEADER(vfork.h, AC_DEFINE(HAVE_VFORK_H)) +AC_CACHE_CHECK(for working vfork, ac_cv_func_vfork_works, +[AC_TRY_RUN([/* Thanks to Paul Eggert for this test. */ +#include +#include +#include +#ifdef HAVE_UNISTD_H +#include +#endif +#ifdef HAVE_VFORK_H +#include +#endif +/* On some sparc systems, changes by the child to local and incoming + argument registers are propagated back to the parent. + The compiler is told about this with #include , + but some compilers (e.g. gcc -O) don't grok . + Test for this by using a static variable whose address + is put into a register that is clobbered by the vfork. */ +static +#ifdef __cplusplus +sparc_address_test (int arg) +#else +sparc_address_test (arg) int arg; +#endif +{ + static pid_t child; + if (!child) { + child = vfork (); + if (child < 0) { + perror ("vfork"); + _exit(2); + } + if (!child) { + arg = getpid(); + write(-1, "", 0); + _exit (arg); + } + } +} +main() { + pid_t parent = getpid (); + pid_t child; + + sparc_address_test (); + + child = vfork (); + + if (child == 0) { + /* Here is another test for sparc vfork register problems. + This test uses lots of local variables, at least + as many local variables as main has allocated so far + including compiler temporaries. 4 locals are enough for + gcc 1.40.3 on a Solaris 4.1.3 sparc, but we use 8 to be safe. + A buggy compiler should reuse the register of parent + for one of the local variables, since it will think that + parent can't possibly be used any more in this routine. + Assigning to the local variable will thus munge parent + in the parent process. */ + pid_t + p = getpid(), p1 = getpid(), p2 = getpid(), p3 = getpid(), + p4 = getpid(), p5 = getpid(), p6 = getpid(), p7 = getpid(); + /* Convince the compiler that p..p7 are live; otherwise, it might + use the same hardware register for all 8 local variables. */ + if (p != p1 || p != p2 || p != p3 || p != p4 + || p != p5 || p != p6 || p != p7) + _exit(1); + + /* On some systems (e.g. IRIX 3.3), + vfork doesn't separate parent from child file descriptors. + If the child closes a descriptor before it execs or exits, + this munges the parent's descriptor as well. + Test for this by closing stdout in the child. */ + _exit(close(fileno(stdout)) != 0); + } else { + int status; + struct stat st; + + while (wait(&status) != child) + ; + exit( + /* Was there some problem with vforking? */ + child < 0 + + /* Did the child fail? (This shouldn't happen.) */ + || status + + /* Did the vfork/compiler bug occur? */ + || parent != getpid() + + /* Did the file descriptor bug occur? */ + || fstat(fileno(stdout), &st) != 0 + ); + } +}], +ac_cv_func_vfork_works=yes, ac_cv_func_vfork_works=no, AC_CHECK_FUNC(vfork) +ac_cv_func_vfork_works=$ac_cv_func_vfork)]) +if test $ac_cv_func_vfork_works = no; then + AC_DEFINE(vfork, fork) +fi +]) + +AC_DEFUN(AC_FUNC_WAIT3, +[AC_CACHE_CHECK(for wait3 that fills in rusage, ac_cv_func_wait3_rusage, +[AC_TRY_RUN([#include +#include +#include +#include +/* HP-UX has wait3 but does not fill in rusage at all. */ +main() { + struct rusage r; + int i; + /* Use a field that we can force nonzero -- + voluntary context switches. + For systems like NeXT and OSF/1 that don't set it, + also use the system CPU time. And page faults (I/O) for Linux. */ + r.ru_nvcsw = 0; + r.ru_stime.tv_sec = 0; + r.ru_stime.tv_usec = 0; + r.ru_majflt = r.ru_minflt = 0; + switch (fork()) { + case 0: /* Child. */ + sleep(1); /* Give up the CPU. */ + _exit(0); + case -1: _exit(0); /* What can we do? */ + default: /* Parent. */ + wait3(&i, 0, &r); + sleep(2); /* Avoid "text file busy" from rm on fast HP-UX machines. */ + exit(r.ru_nvcsw == 0 && r.ru_majflt == 0 && r.ru_minflt == 0 + && r.ru_stime.tv_sec == 0 && r.ru_stime.tv_usec == 0); + } +}], ac_cv_func_wait3_rusage=yes, ac_cv_func_wait3_rusage=no, +ac_cv_func_wait3_rusage=no)]) +if test $ac_cv_func_wait3_rusage = yes; then + AC_DEFINE(HAVE_WAIT3) +fi +]) + +AC_DEFUN(AC_FUNC_ALLOCA, +[AC_REQUIRE_CPP()dnl Set CPP; we run AC_EGREP_CPP conditionally. +# The Ultrix 4.2 mips builtin alloca declared by alloca.h only works +# for constant arguments. Useless! +AC_CACHE_CHECK([for working alloca.h], ac_cv_header_alloca_h, +[AC_TRY_LINK([#include ], [char *p = alloca(2 * sizeof(int));], + ac_cv_header_alloca_h=yes, ac_cv_header_alloca_h=no)]) +if test $ac_cv_header_alloca_h = yes; then + AC_DEFINE(HAVE_ALLOCA_H) +fi + +AC_CACHE_CHECK([for alloca], ac_cv_func_alloca_works, +[AC_TRY_LINK([ +#ifdef __GNUC__ +# define alloca __builtin_alloca +#else +# ifdef _MSC_VER +# include +# define alloca _alloca +# else +# if HAVE_ALLOCA_H +# include +# else +# ifdef _AIX + #pragma alloca +# else +# ifndef alloca /* predefined by HP cc +Olibcalls */ +char *alloca (); +# endif +# endif +# endif +# endif +#endif +], [char *p = (char *) alloca(1);], + ac_cv_func_alloca_works=yes, ac_cv_func_alloca_works=no)]) +if test $ac_cv_func_alloca_works = yes; then + AC_DEFINE(HAVE_ALLOCA) +fi + +if test $ac_cv_func_alloca_works = no; then + # The SVR3 libPW and SVR4 libucb both contain incompatible functions + # that cause trouble. Some versions do not even contain alloca or + # contain a buggy version. If you still want to use their alloca, + # use ar to extract alloca.o from them instead of compiling alloca.c. + ALLOCA=alloca.${ac_objext} + AC_DEFINE(C_ALLOCA) + +AC_CACHE_CHECK(whether alloca needs Cray hooks, ac_cv_os_cray, +[AC_EGREP_CPP(webecray, +[#if defined(CRAY) && ! defined(CRAY2) +webecray +#else +wenotbecray +#endif +], ac_cv_os_cray=yes, ac_cv_os_cray=no)]) +if test $ac_cv_os_cray = yes; then +for ac_func in _getb67 GETB67 getb67; do + AC_CHECK_FUNC($ac_func, [AC_DEFINE_UNQUOTED(CRAY_STACKSEG_END, $ac_func) + break]) +done +fi + +AC_CACHE_CHECK(stack direction for C alloca, ac_cv_c_stack_direction, +[AC_TRY_RUN([find_stack_direction () +{ + static char *addr = 0; + auto char dummy; + if (addr == 0) + { + addr = &dummy; + return find_stack_direction (); + } + else + return (&dummy > addr) ? 1 : -1; +} +main () +{ + exit (find_stack_direction() < 0); +}], ac_cv_c_stack_direction=1, ac_cv_c_stack_direction=-1, + ac_cv_c_stack_direction=0)]) +AC_DEFINE_UNQUOTED(STACK_DIRECTION, $ac_cv_c_stack_direction) +fi +AC_SUBST(ALLOCA)dnl +]) + +AC_DEFUN(AC_FUNC_GETLOADAVG, +[ac_have_func=no # yes means we've found a way to get the load average. + +# Some systems with -lutil have (and need) -lkvm as well, some do not. +# On Solaris, -lkvm requires nlist from -lelf, so check that first +# to get the right answer into the cache. +AC_CHECK_LIB(elf, elf_begin, LIBS="-lelf $LIBS") +AC_CHECK_LIB(kvm, kvm_open, LIBS="-lkvm $LIBS") +# Check for the 4.4BSD definition of getloadavg. +AC_CHECK_LIB(util, getloadavg, + [LIBS="-lutil $LIBS" ac_have_func=yes ac_cv_func_getloadavg_setgid=yes]) + +if test $ac_have_func = no; then + # There is a commonly available library for RS/6000 AIX. + # Since it is not a standard part of AIX, it might be installed locally. + ac_getloadavg_LIBS="$LIBS"; LIBS="-L/usr/local/lib $LIBS" + AC_CHECK_LIB(getloadavg, getloadavg, + LIBS="-lgetloadavg $LIBS", LIBS="$ac_getloadavg_LIBS") +fi + +# Make sure it is really in the library, if we think we found it. +AC_REPLACE_FUNCS(getloadavg) + +if test $ac_cv_func_getloadavg = yes; then + AC_DEFINE(HAVE_GETLOADAVG) + ac_have_func=yes +else + # Figure out what our getloadavg.c needs. + ac_have_func=no + AC_CHECK_HEADER(sys/dg_sys_info.h, + [ac_have_func=yes; AC_DEFINE(DGUX) + AC_CHECK_LIB(dgc, dg_sys_info)]) + + # We cannot check for , because Solaris 2 does not use dwarf (it + # uses stabs), but it is still SVR4. We cannot check for because + # Irix 4.0.5F has the header but not the library. + if test $ac_have_func = no && test $ac_cv_lib_elf_elf_begin = yes; then + ac_have_func=yes; AC_DEFINE(SVR4) + fi + + if test $ac_have_func = no; then + AC_CHECK_HEADER(inq_stats/cpustats.h, + [ac_have_func=yes; AC_DEFINE(UMAX) + AC_DEFINE(UMAX4_3)]) + fi + + if test $ac_have_func = no; then + AC_CHECK_HEADER(sys/cpustats.h, + [ac_have_func=yes; AC_DEFINE(UMAX)]) + fi + + if test $ac_have_func = no; then + AC_CHECK_HEADERS(mach/mach.h) + fi + + AC_CHECK_HEADER(nlist.h, + [AC_DEFINE(NLIST_STRUCT) + AC_CACHE_CHECK([for n_un in struct nlist], ac_cv_struct_nlist_n_un, + [AC_TRY_COMPILE([#include ], + [struct nlist n; n.n_un.n_name = 0;], + ac_cv_struct_nlist_n_un=yes, ac_cv_struct_nlist_n_un=no)]) + if test $ac_cv_struct_nlist_n_un = yes; then + AC_DEFINE(NLIST_NAME_UNION) + fi + ])dnl +fi # Do not have getloadavg in system libraries. + +# Some definitions of getloadavg require that the program be installed setgid. +dnl FIXME Don't hardwire the path of getloadavg.c in the top-level directory. +AC_CACHE_CHECK(whether getloadavg requires setgid, + ac_cv_func_getloadavg_setgid, +[AC_EGREP_CPP([Yowza Am I SETGID yet], +[#include "$srcdir/getloadavg.c" +#ifdef LDAV_PRIVILEGED +Yowza Am I SETGID yet +#endif], + ac_cv_func_getloadavg_setgid=yes, ac_cv_func_getloadavg_setgid=no)]) +if test $ac_cv_func_getloadavg_setgid = yes; then + NEED_SETGID=true; AC_DEFINE(GETLOADAVG_PRIVILEGED) +else + NEED_SETGID=false +fi +AC_SUBST(NEED_SETGID)dnl + +if test $ac_cv_func_getloadavg_setgid = yes; then + AC_CACHE_CHECK(group of /dev/kmem, ac_cv_group_kmem, +[changequote(, )dnl + # On Solaris, /dev/kmem is a symlink. Get info on the real file. + ac_ls_output=`ls -lgL /dev/kmem 2>/dev/null` + # If we got an error (system does not support symlinks), try without -L. + test -z "$ac_ls_output" && ac_ls_output=`ls -lg /dev/kmem` + ac_cv_group_kmem=`echo $ac_ls_output \ + | sed -ne 's/[ ][ ]*/ /g; + s/^.[sSrwx-]* *[0-9]* *\([^0-9]*\) *.*/\1/; + / /s/.* //;p;'` +changequote([, ])dnl +]) + KMEM_GROUP=$ac_cv_group_kmem +fi +AC_SUBST(KMEM_GROUP)dnl +]) + +AC_DEFUN(AC_FUNC_UTIME_NULL, +[AC_CACHE_CHECK(whether utime accepts a null argument, ac_cv_func_utime_null, +[rm -f conftestdata; > conftestdata +# Sequent interprets utime(file, 0) to mean use start of epoch. Wrong. +AC_TRY_RUN([#include +#include +main() { +struct stat s, t; +exit(!(stat ("conftestdata", &s) == 0 && utime("conftestdata", (long *)0) == 0 +&& stat("conftestdata", &t) == 0 && t.st_mtime >= s.st_mtime +&& t.st_mtime - s.st_mtime < 120)); +}], ac_cv_func_utime_null=yes, ac_cv_func_utime_null=no, + ac_cv_func_utime_null=no) +rm -f core core.* *.core]) +if test $ac_cv_func_utime_null = yes; then + AC_DEFINE(HAVE_UTIME_NULL) +fi +]) + +AC_DEFUN(AC_FUNC_STRCOLL, +[AC_CACHE_CHECK(for working strcoll, ac_cv_func_strcoll_works, +[AC_TRY_RUN([#include +main () +{ + exit (strcoll ("abc", "def") >= 0 || + strcoll ("ABC", "DEF") >= 0 || + strcoll ("123", "456") >= 0); +}], ac_cv_func_strcoll_works=yes, ac_cv_func_strcoll_works=no, +ac_cv_func_strcoll_works=no)]) +if test $ac_cv_func_strcoll_works = yes; then + AC_DEFINE(HAVE_STRCOLL) +fi +]) + +AC_DEFUN(AC_FUNC_SETVBUF_REVERSED, +[AC_CACHE_CHECK(whether setvbuf arguments are reversed, + ac_cv_func_setvbuf_reversed, +[AC_TRY_RUN([#include +/* If setvbuf has the reversed format, exit 0. */ +main () { + /* This call has the arguments reversed. + A reversed system may check and see that the address of main + is not _IOLBF, _IONBF, or _IOFBF, and return nonzero. */ + if (setvbuf(stdout, _IOLBF, (char *) main, BUFSIZ) != 0) + exit(1); + putc('\r', stdout); + exit(0); /* Non-reversed systems segv here. */ +}], ac_cv_func_setvbuf_reversed=yes, ac_cv_func_setvbuf_reversed=no) +rm -f core core.* *.core]) +if test $ac_cv_func_setvbuf_reversed = yes; then + AC_DEFINE(SETVBUF_REVERSED) +fi +]) + +AC_DEFUN(AC_FUNC_GETMNTENT, +[# getmntent is in -lsun on Irix 4, -lseq on Dynix/PTX, -lgen on Unixware. +AC_CHECK_LIB(sun, getmntent, LIBS="-lsun $LIBS", + [AC_CHECK_LIB(seq, getmntent, LIBS="-lseq $LIBS", + [AC_CHECK_LIB(gen, getmntent, LIBS="-lgen $LIBS")])]) +AC_CHECK_FUNC(getmntent, [AC_DEFINE(HAVE_GETMNTENT)])]) + +AC_DEFUN(AC_FUNC_STRFTIME, +[AC_CHECK_FUNC(strftime, [AC_DEFINE(HAVE_STRFTIME)], +[# strftime is in -lintl on SCO UNIX. +AC_CHECK_LIB(intl, strftime, +[AC_DEFINE(HAVE_STRFTIME) +LIBS="-lintl $LIBS"])])]) + +AC_DEFUN(AC_FUNC_MEMCMP, +[AC_CACHE_CHECK(for 8-bit clean memcmp, ac_cv_func_memcmp_clean, +[AC_TRY_RUN([ +main() +{ + char c0 = 0x40, c1 = 0x80, c2 = 0x81; + exit(memcmp(&c0, &c2, 1) < 0 && memcmp(&c1, &c2, 1) < 0 ? 0 : 1); +} +], ac_cv_func_memcmp_clean=yes, ac_cv_func_memcmp_clean=no, +ac_cv_func_memcmp_clean=no)]) +test $ac_cv_func_memcmp_clean = no && LIBOBJS="$LIBOBJS memcmp.${ac_objext}" +AC_SUBST(LIBOBJS)dnl +]) + +AC_DEFUN(AC_FUNC_SELECT_ARGTYPES, +[AC_MSG_CHECKING([types of arguments for select()]) + AC_CACHE_VAL(ac_cv_func_select_arg234,dnl + [AC_CACHE_VAL(ac_cv_func_select_arg1,dnl + [AC_CACHE_VAL(ac_cv_func_select_arg5,dnl + [for ac_cv_func_select_arg234 in 'fd_set *' 'int *' 'void *'; do + for ac_cv_func_select_arg1 in 'int' 'size_t' 'unsigned long' 'unsigned'; do + for ac_cv_func_select_arg5 in 'struct timeval *' 'const struct timeval *'; do + AC_TRY_COMPILE(dnl +[#ifdef HAVE_SYS_TYPES_H +#include +#endif +#ifdef HAVE_SYS_TIME_H +#include +#endif +#ifdef HAVE_SYS_SELECT_H +#include +#endif +#ifdef HAVE_SYS_SOCKET_H +#include +#endif +extern select ($ac_cv_func_select_arg1,$ac_cv_func_select_arg234,$ac_cv_func_select_arg234,$ac_cv_func_select_arg234,$ac_cv_func_select_arg5);],,dnl + [ac_not_found=no ; break 3],ac_not_found=yes) + done + done + done + ])dnl AC_CACHE_VAL + ])dnl AC_CACHE_VAL + ])dnl AC_CACHE_VAL + if test "$ac_not_found" = yes; then + ac_cv_func_select_arg1=int + ac_cv_func_select_arg234='int *' + ac_cv_func_select_arg5='struct timeval *' + fi + AC_MSG_RESULT([$ac_cv_func_select_arg1,$ac_cv_func_select_arg234,$ac_cv_func_select_arg5]) + AC_DEFINE_UNQUOTED(SELECT_TYPE_ARG1,$ac_cv_func_select_arg1) + AC_DEFINE_UNQUOTED(SELECT_TYPE_ARG234,($ac_cv_func_select_arg234)) + AC_DEFINE_UNQUOTED(SELECT_TYPE_ARG5,($ac_cv_func_select_arg5)) +]) + + +dnl ### Checks for structure members + + +AC_DEFUN(AC_HEADER_TIME, +[AC_CACHE_CHECK([whether time.h and sys/time.h may both be included], + ac_cv_header_time, +[AC_TRY_COMPILE([#include +#include +#include ], +[struct tm *tp;], ac_cv_header_time=yes, ac_cv_header_time=no)]) +if test $ac_cv_header_time = yes; then + AC_DEFINE(TIME_WITH_SYS_TIME) +fi +]) + +AC_DEFUN(AC_STRUCT_TM, +[AC_CACHE_CHECK([whether struct tm is in sys/time.h or time.h], + ac_cv_struct_tm, +[AC_TRY_COMPILE([#include +#include ], +[struct tm *tp; tp->tm_sec;], + ac_cv_struct_tm=time.h, ac_cv_struct_tm=sys/time.h)]) +if test $ac_cv_struct_tm = sys/time.h; then + AC_DEFINE(TM_IN_SYS_TIME) +fi +]) + +AC_DEFUN(AC_STRUCT_TIMEZONE, +[AC_REQUIRE([AC_STRUCT_TM])dnl +AC_CACHE_CHECK([for tm_zone in struct tm], ac_cv_struct_tm_zone, +[AC_TRY_COMPILE([#include +#include <$ac_cv_struct_tm>], [struct tm tm; tm.tm_zone;], + ac_cv_struct_tm_zone=yes, ac_cv_struct_tm_zone=no)]) +if test "$ac_cv_struct_tm_zone" = yes; then + AC_DEFINE(HAVE_TM_ZONE) +else + AC_CACHE_CHECK(for tzname, ac_cv_var_tzname, +[AC_TRY_LINK( +changequote(<<, >>)dnl +<<#include +#ifndef tzname /* For SGI. */ +extern char *tzname[]; /* RS6000 and others reject char **tzname. */ +#endif>>, +changequote([, ])dnl +[atoi(*tzname);], ac_cv_var_tzname=yes, ac_cv_var_tzname=no)]) + if test $ac_cv_var_tzname = yes; then + AC_DEFINE(HAVE_TZNAME) + fi +fi +]) + +AC_DEFUN(AC_STRUCT_ST_BLOCKS, +[AC_CACHE_CHECK([for st_blocks in struct stat], ac_cv_struct_st_blocks, +[AC_TRY_COMPILE([#include +#include ], [struct stat s; s.st_blocks;], +ac_cv_struct_st_blocks=yes, ac_cv_struct_st_blocks=no)]) +if test $ac_cv_struct_st_blocks = yes; then + AC_DEFINE(HAVE_ST_BLOCKS) +else + LIBOBJS="$LIBOBJS fileblocks.${ac_objext}" +fi +AC_SUBST(LIBOBJS)dnl +]) + +AC_DEFUN(AC_STRUCT_ST_BLKSIZE, +[AC_CACHE_CHECK([for st_blksize in struct stat], ac_cv_struct_st_blksize, +[AC_TRY_COMPILE([#include +#include ], [struct stat s; s.st_blksize;], +ac_cv_struct_st_blksize=yes, ac_cv_struct_st_blksize=no)]) +if test $ac_cv_struct_st_blksize = yes; then + AC_DEFINE(HAVE_ST_BLKSIZE) +fi +]) + +AC_DEFUN(AC_STRUCT_ST_RDEV, +[AC_CACHE_CHECK([for st_rdev in struct stat], ac_cv_struct_st_rdev, +[AC_TRY_COMPILE([#include +#include ], [struct stat s; s.st_rdev;], +ac_cv_struct_st_rdev=yes, ac_cv_struct_st_rdev=no)]) +if test $ac_cv_struct_st_rdev = yes; then + AC_DEFINE(HAVE_ST_RDEV) +fi +]) + + +dnl ### Checks for compiler characteristics + + +AC_DEFUN(AC_C_CROSS, +[AC_OBSOLETE([$0], [; it has been merged into AC_PROG_CC])]) + +AC_DEFUN(AC_C_CHAR_UNSIGNED, +[AC_CACHE_CHECK(whether char is unsigned, ac_cv_c_char_unsigned, +[if test "$GCC" = yes; then + # GCC predefines this symbol on systems where it applies. +AC_EGREP_CPP(yes, +[#ifdef __CHAR_UNSIGNED__ + yes +#endif +], ac_cv_c_char_unsigned=yes, ac_cv_c_char_unsigned=no) +else +AC_TRY_RUN( +[/* volatile prevents gcc2 from optimizing the test away on sparcs. */ +#if !defined(__STDC__) || __STDC__ != 1 +#define volatile +#endif +main() { + volatile char c = 255; exit(c < 0); +}], ac_cv_c_char_unsigned=yes, ac_cv_c_char_unsigned=no) +fi]) +if test $ac_cv_c_char_unsigned = yes && test "$GCC" != yes; then + AC_DEFINE(__CHAR_UNSIGNED__) +fi +]) + +AC_DEFUN(AC_C_LONG_DOUBLE, +[AC_CACHE_CHECK(for long double, ac_cv_c_long_double, +[if test "$GCC" = yes; then + ac_cv_c_long_double=yes +else +AC_TRY_RUN([int main() { +/* The Stardent Vistra knows sizeof(long double), but does not support it. */ +long double foo = 0.0; +/* On Ultrix 4.3 cc, long double is 4 and double is 8. */ +exit(sizeof(long double) < sizeof(double)); }], +ac_cv_c_long_double=yes, ac_cv_c_long_double=no) +fi]) +if test $ac_cv_c_long_double = yes; then + AC_DEFINE(HAVE_LONG_DOUBLE) +fi +]) + +AC_DEFUN(AC_INT_16_BITS, +[AC_OBSOLETE([$0], [; instead use AC_CHECK_SIZEOF(int)])dnl +AC_MSG_CHECKING(whether int is 16 bits) +AC_TRY_RUN([main() { exit(sizeof(int) != 2); }], + [AC_MSG_RESULT(yes) + AC_DEFINE(INT_16_BITS)], AC_MSG_RESULT(no)) +]) + +AC_DEFUN(AC_LONG_64_BITS, +[AC_OBSOLETE([$0], [; instead use AC_CHECK_SIZEOF(long)])dnl +AC_MSG_CHECKING(whether long int is 64 bits) +AC_TRY_RUN([main() { exit(sizeof(long int) != 8); }], + [AC_MSG_RESULT(yes) + AC_DEFINE(LONG_64_BITS)], AC_MSG_RESULT(no)) +]) + +AC_DEFUN(AC_C_BIGENDIAN, +[AC_CACHE_CHECK(whether byte ordering is bigendian, ac_cv_c_bigendian, +[ac_cv_c_bigendian=unknown +# See if sys/param.h defines the BYTE_ORDER macro. +AC_TRY_COMPILE([#include +#include ], [ +#if !BYTE_ORDER || !BIG_ENDIAN || !LITTLE_ENDIAN + bogus endian macros +#endif], [# It does; now see whether it defined to BIG_ENDIAN or not. +AC_TRY_COMPILE([#include +#include ], [ +#if BYTE_ORDER != BIG_ENDIAN + not big endian +#endif], ac_cv_c_bigendian=yes, ac_cv_c_bigendian=no)]) +if test $ac_cv_c_bigendian = unknown; then +AC_TRY_RUN([main () { + /* Are we little or big endian? From Harbison&Steele. */ + union + { + long l; + char c[sizeof (long)]; + } u; + u.l = 1; + exit (u.c[sizeof (long) - 1] == 1); +}], ac_cv_c_bigendian=no, ac_cv_c_bigendian=yes) +fi]) +if test $ac_cv_c_bigendian = yes; then + AC_DEFINE(WORDS_BIGENDIAN) +fi +]) + +dnl Do nothing if the compiler accepts the inline keyword. +dnl Otherwise define inline to __inline__ or __inline if one of those work, +dnl otherwise define inline to be empty. +AC_DEFUN(AC_C_INLINE, +[AC_CACHE_CHECK([for inline], ac_cv_c_inline, +[ac_cv_c_inline=no +for ac_kw in inline __inline__ __inline; do + AC_TRY_COMPILE(, [} $ac_kw foo() {], [ac_cv_c_inline=$ac_kw; break]) +done +]) +case "$ac_cv_c_inline" in + inline | yes) ;; + no) AC_DEFINE(inline, ) ;; + *) AC_DEFINE_UNQUOTED(inline, $ac_cv_c_inline) ;; +esac +]) + +AC_DEFUN(AC_C_CONST, +[dnl This message is consistent in form with the other checking messages, +dnl and with the result message. +AC_CACHE_CHECK([for working const], ac_cv_c_const, +[AC_TRY_COMPILE(, +changequote(<<, >>)dnl +<< +/* Ultrix mips cc rejects this. */ +typedef int charset[2]; const charset x; +/* SunOS 4.1.1 cc rejects this. */ +char const *const *ccp; +char **p; +/* NEC SVR4.0.2 mips cc rejects this. */ +struct point {int x, y;}; +static struct point const zero = {0,0}; +/* AIX XL C 1.02.0.0 rejects this. + It does not let you subtract one const X* pointer from another in an arm + of an if-expression whose if-part is not a constant expression */ +const char *g = "string"; +ccp = &g + (g ? g-g : 0); +/* HPUX 7.0 cc rejects these. */ +++ccp; +p = (char**) ccp; +ccp = (char const *const *) p; +{ /* SCO 3.2v4 cc rejects this. */ + char *t; + char const *s = 0 ? (char *) 0 : (char const *) 0; + + *t++ = 0; +} +{ /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ + int x[] = {25, 17}; + const int *foo = &x[0]; + ++foo; +} +{ /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ + typedef const int *iptr; + iptr p = 0; + ++p; +} +{ /* AIX XL C 1.02.0.0 rejects this saying + "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ + struct s { int j; const int *ap[3]; }; + struct s *b; b->j = 5; +} +{ /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ + const int foo = 10; +} +>>, +changequote([, ])dnl +ac_cv_c_const=yes, ac_cv_c_const=no)]) +if test $ac_cv_c_const = no; then + AC_DEFINE(const, ) +fi +]) + +AC_DEFUN(AC_C_STRINGIZE, [ +AC_REQUIRE([AC_PROG_CPP]) +AC_MSG_CHECKING([for preprocessor stringizing operator]) +AC_CACHE_VAL(ac_cv_c_stringize, +AC_EGREP_CPP([#teststring],[ +#define x(y) #y + +char *s = x(teststring); +], ac_cv_c_stringize=no, ac_cv_c_stringize=yes)) +if test "${ac_cv_c_stringize}" = yes +then + AC_DEFINE(HAVE_STRINGIZE) +fi +AC_MSG_RESULT([${ac_cv_c_stringize}]) +])dnl + +define(AC_ARG_ARRAY, +[errprint(__file__:__line__: [$0] has been removed; don't do unportable things with arguments +)m4exit(4)]) + +dnl Check the object extension used by the compiler: typically .o or +dnl .obj. If this is called, some other behaviour will change, +dnl determined by ac_objext. +AC_DEFUN(AC_OBJEXT, +[AC_MSG_CHECKING([for object suffix]) +AC_CACHE_VAL(ac_cv_objext, +[rm -f conftest* +echo 'int i = 1;' > conftest.$ac_ext +if AC_TRY_EVAL(ac_compile); then + for ac_file in conftest.*; do + case $ac_file in + *.c) ;; + *) ac_cv_objext=`echo $ac_file | sed -e s/conftest.//` ;; + esac + done +else + AC_MSG_ERROR([installation or configuration problem; compiler does not work]) +fi +rm -f conftest*]) +AC_MSG_RESULT($ac_cv_objext) +OBJEXT=$ac_cv_objext +ac_objext=$ac_cv_objext +AC_SUBST(OBJEXT)]) + +dnl Determine the linker flags (e.g. `-L' and `-l') for the Fortran 77 +dnl intrinsic and run-time libraries that are required to successfully +dnl link a Fortran 77 program or shared library. The output variable +dnl FLIBS is set to these flags. +dnl +dnl This macro is intended to be used in those situations when it is +dnl necessary to mix, e.g. C++ and Fortran 77, source code into a single +dnl program or shared library. +dnl +dnl For example, if object files from a C++ and Fortran 77 compiler must +dnl be linked together, then the C++ compiler/linker must be used for +dnl linking (since special C++-ish things need to happen at link time +dnl like calling global constructors, instantiating templates, enabling +dnl exception support, etc.). +dnl +dnl However, the Fortran 77 intrinsic and run-time libraries must be +dnl linked in as well, but the C++ compiler/linker doesn't know how to +dnl add these Fortran 77 libraries. Hence, the macro +dnl `AC_F77_LIBRARY_LDFLAGS' was created to determine these Fortran 77 +dnl libraries. +dnl +dnl This macro was packaged in its current form by Matthew D. Langston +dnl . However, nearly all of this macro +dnl came from the `OCTAVE_FLIBS' macro in `octave-2.0.13/aclocal.m4', +dnl and full credit should go to John W. Eaton for writing this +dnl extremely useful macro. Thank you John. +dnl +dnl AC_F77_LIBRARY_LDFLAGS() +AC_DEFUN(AC_F77_LIBRARY_LDFLAGS, +[AC_MSG_CHECKING([for Fortran 77 libraries]) +AC_REQUIRE([AC_PROG_F77]) +AC_REQUIRE([AC_CANONICAL_HOST]) +AC_CACHE_VAL(ac_cv_flibs, +[changequote(, )dnl +dnl Write a minimal program and compile it with -v. I don't know what +dnl to do if your compiler doesn't have -v... +echo " END" > conftest.f +foutput=`${F77} -v -o conftest conftest.f 2>&1` +dnl +dnl The easiest thing to do for xlf output is to replace all the commas +dnl with spaces. Try to only do that if the output is really from xlf, +dnl since doing that causes problems on other systems. +dnl +xlf_p=`echo $foutput | grep xlfentry` +if test -n "$xlf_p"; then + foutput=`echo $foutput | sed 's/,/ /g'` +fi +dnl +ld_run_path=`echo $foutput | \ + sed -n -e 's/^.*LD_RUN_PATH *= *\([^ ]*\).*/\1/p'` +dnl +dnl We are only supposed to find this on Solaris systems... +dnl Uh, the run path should be absolute, shouldn't it? +dnl +case "$ld_run_path" in + /*) + if test "$ac_cv_prog_gcc" = yes; then + ld_run_path="-Xlinker -R -Xlinker $ld_run_path" + else + ld_run_path="-R $ld_run_path" + fi + ;; + *) + ld_run_path= + ;; +esac +dnl +flibs= +lflags= +dnl +dnl If want_arg is set, we know we want the arg to be added to the list, +dnl so we don't have to examine it. +dnl +want_arg= +dnl +for arg in $foutput; do + old_want_arg=$want_arg + want_arg= +dnl +dnl None of the options that take arguments expect the argument to +dnl start with a -, so pretend we didn't see anything special. +dnl + if test -n "$old_want_arg"; then + case "$arg" in + -*) + old_want_arg= + ;; + esac + fi + case "$old_want_arg" in + '') + case $arg in + /*.a) + exists=false + for f in $lflags; do + if test x$arg = x$f; then + exists=true + fi + done + if $exists; then + arg= + else + lflags="$lflags $arg" + fi + ;; + -bI:*) + exists=false + for f in $lflags; do + if test x$arg = x$f; then + exists=true + fi + done + if $exists; then + arg= + else + if test "$ac_cv_prog_gcc" = yes; then + lflags="$lflags -Xlinker $arg" + else + lflags="$lflags $arg" + fi + fi + ;; + -lang* | -lcrt0.o | -lc | -lgcc) + arg= + ;; + -[lLR]) + want_arg=$arg + arg= + ;; + -[lLR]*) + exists=false + for f in $lflags; do + if test x$arg = x$f; then + exists=true + fi + done + if $exists; then + arg= + else + case "$arg" in + -lkernel32) + case "$canonical_host_type" in + *-*-cygwin*) + arg= + ;; + *) + lflags="$lflags $arg" + ;; + esac + ;; + -lm) + ;; + *) + lflags="$lflags $arg" + ;; + esac + fi + ;; + -u) + want_arg=$arg + arg= + ;; + -Y) + want_arg=$arg + arg= + ;; + *) + arg= + ;; + esac + ;; + -[lLR]) + arg="$old_want_arg $arg" + ;; + -u) + arg="-u $arg" + ;; + -Y) +dnl +dnl Should probably try to ensure unique directory options here too. +dnl This probably only applies to Solaris systems, and then will only +dnl work with gcc... +dnl + arg=`echo $arg | sed -e 's%^P,%%'` + SAVE_IFS=$IFS + IFS=: + list= + for elt in $arg; do + list="$list -L$elt" + done + IFS=$SAVE_IFS + arg="$list" + ;; + esac +dnl + if test -n "$arg"; then + flibs="$flibs $arg" + fi +done +if test -n "$ld_run_path"; then + flibs_result="$ld_run_path $flibs" +else + flibs_result="$flibs" +fi +changequote([, ])dnl +ac_cv_flibs="$flibs_result"]) +FLIBS="$ac_cv_flibs" +AC_SUBST(FLIBS)dnl +AC_MSG_RESULT($FLIBS) +]) + + +dnl ### Checks for operating system services + + +AC_DEFUN(AC_SYS_INTERPRETER, +[# Pull the hash mark out of the macro call to avoid m4 problems. +ac_msg="whether #! works in shell scripts" +AC_CACHE_CHECK($ac_msg, ac_cv_sys_interpreter, +[echo '#! /bin/cat +exit 69 +' > conftest +chmod u+x conftest +(SHELL=/bin/sh; export SHELL; ./conftest >/dev/null) +if test $? -ne 69; then + ac_cv_sys_interpreter=yes +else + ac_cv_sys_interpreter=no +fi +rm -f conftest]) +interpval="$ac_cv_sys_interpreter" +]) + +define(AC_HAVE_POUNDBANG, +[errprint(__file__:__line__: [$0 has been replaced by AC_SYS_INTERPRETER, taking no arguments +])m4exit(4)]) + +AC_DEFUN(AC_SYS_LONG_FILE_NAMES, +[AC_CACHE_CHECK(for long file names, ac_cv_sys_long_file_names, +[ac_cv_sys_long_file_names=yes +# Test for long file names in all the places we know might matter: +# . the current directory, where building will happen +# $prefix/lib where we will be installing things +# $exec_prefix/lib likewise +# eval it to expand exec_prefix. +# $TMPDIR if set, where it might want to write temporary files +# if $TMPDIR is not set: +# /tmp where it might want to write temporary files +# /var/tmp likewise +# /usr/tmp likewise +if test -n "$TMPDIR" && test -d "$TMPDIR" && test -w "$TMPDIR"; then + ac_tmpdirs="$TMPDIR" +else + ac_tmpdirs='/tmp /var/tmp /usr/tmp' +fi +for ac_dir in . $ac_tmpdirs `eval echo $prefix/lib $exec_prefix/lib` ; do + test -d $ac_dir || continue + test -w $ac_dir || continue # It is less confusing to not echo anything here. + (echo 1 > $ac_dir/conftest9012345) 2>/dev/null + (echo 2 > $ac_dir/conftest9012346) 2>/dev/null + val=`cat $ac_dir/conftest9012345 2>/dev/null` + if test ! -f $ac_dir/conftest9012345 || test "$val" != 1; then + ac_cv_sys_long_file_names=no + rm -f $ac_dir/conftest9012345 $ac_dir/conftest9012346 2>/dev/null + break + fi + rm -f $ac_dir/conftest9012345 $ac_dir/conftest9012346 2>/dev/null +done]) +if test $ac_cv_sys_long_file_names = yes; then + AC_DEFINE(HAVE_LONG_FILE_NAMES) +fi +]) + +AC_DEFUN(AC_SYS_RESTARTABLE_SYSCALLS, +[AC_CACHE_CHECK(for restartable system calls, ac_cv_sys_restartable_syscalls, +[AC_TRY_RUN( +[/* Exit 0 (true) if wait returns something other than -1, + i.e. the pid of the child, which means that wait was restarted + after getting the signal. */ +#include +#include +ucatch (isig) { } +main () { + int i = fork (), status; + if (i == 0) { sleep (3); kill (getppid (), SIGINT); sleep (3); exit (0); } + signal (SIGINT, ucatch); + status = wait(&i); + if (status == -1) wait(&i); + exit (status == -1); +} +], ac_cv_sys_restartable_syscalls=yes, ac_cv_sys_restartable_syscalls=no)]) +if test $ac_cv_sys_restartable_syscalls = yes; then + AC_DEFINE(HAVE_RESTARTABLE_SYSCALLS) +fi +]) + +AC_DEFUN(AC_PATH_X, +[AC_REQUIRE_CPP()dnl Set CPP; we run AC_PATH_X_DIRECT conditionally. +# If we find X, set shell vars x_includes and x_libraries to the +# paths, otherwise set no_x=yes. +# Uses ac_ vars as temps to allow command line to override cache and checks. +# --without-x overrides everything else, but does not touch the cache. +AC_MSG_CHECKING(for X) + +AC_ARG_WITH(x, [ --with-x use the X Window System]) +# $have_x is `yes', `no', `disabled', or empty when we do not yet know. +if test "x$with_x" = xno; then + # The user explicitly disabled X. + have_x=disabled +else + if test "x$x_includes" != xNONE && test "x$x_libraries" != xNONE; then + # Both variables are already set. + have_x=yes + else +AC_CACHE_VAL(ac_cv_have_x, +[# One or both of the vars are not set, and there is no cached value. +ac_x_includes=NO ac_x_libraries=NO +AC_PATH_X_XMKMF +AC_PATH_X_DIRECT +if test "$ac_x_includes" = NO || test "$ac_x_libraries" = NO; then + # Didn't find X anywhere. Cache the known absence of X. + ac_cv_have_x="have_x=no" +else + # Record where we found X for the cache. + ac_cv_have_x="have_x=yes \ + ac_x_includes=$ac_x_includes ac_x_libraries=$ac_x_libraries" +fi])dnl + fi + eval "$ac_cv_have_x" +fi # $with_x != no + +if test "$have_x" != yes; then + AC_MSG_RESULT($have_x) + no_x=yes +else + # If each of the values was on the command line, it overrides each guess. + test "x$x_includes" = xNONE && x_includes=$ac_x_includes + test "x$x_libraries" = xNONE && x_libraries=$ac_x_libraries + # Update the cache value to reflect the command line values. + ac_cv_have_x="have_x=yes \ + ac_x_includes=$x_includes ac_x_libraries=$x_libraries" + AC_MSG_RESULT([libraries $x_libraries, headers $x_includes]) +fi +]) + +dnl Internal subroutine of AC_PATH_X. +dnl Set ac_x_includes and/or ac_x_libraries. +AC_DEFUN(AC_PATH_X_XMKMF, +[rm -fr conftestdir +if mkdir conftestdir; then + cd conftestdir + # Make sure to not put "make" in the Imakefile rules, since we grep it out. + cat > Imakefile <<'EOF' +acfindx: + @echo 'ac_im_incroot="${INCROOT}"; ac_im_usrlibdir="${USRLIBDIR}"; ac_im_libdir="${LIBDIR}"' +EOF + if (xmkmf) >/dev/null 2>/dev/null && test -f Makefile; then + # GNU make sometimes prints "make[1]: Entering...", which would confuse us. + eval `${MAKE-make} acfindx 2>/dev/null | grep -v make` + # Open Windows xmkmf reportedly sets LIBDIR instead of USRLIBDIR. + for ac_extension in a so sl; do + if test ! -f $ac_im_usrlibdir/libX11.$ac_extension && + test -f $ac_im_libdir/libX11.$ac_extension; then + ac_im_usrlibdir=$ac_im_libdir; break + fi + done + # Screen out bogus values from the imake configuration. They are + # bogus both because they are the default anyway, and because + # using them would break gcc on systems where it needs fixed includes. + case "$ac_im_incroot" in + /usr/include) ;; + *) test -f "$ac_im_incroot/X11/Xos.h" && ac_x_includes="$ac_im_incroot" ;; + esac + case "$ac_im_usrlibdir" in + /usr/lib | /lib) ;; + *) test -d "$ac_im_usrlibdir" && ac_x_libraries="$ac_im_usrlibdir" ;; + esac + fi + cd .. + rm -fr conftestdir +fi +]) + +dnl Internal subroutine of AC_PATH_X. +dnl Set ac_x_includes and/or ac_x_libraries. +AC_DEFUN(AC_PATH_X_DIRECT, +[if test "$ac_x_includes" = NO; then + # Guess where to find include files, by looking for this one X11 .h file. + test -z "$x_direct_test_include" && x_direct_test_include=X11/Intrinsic.h + + # First, try using that file with no special directory specified. +AC_TRY_CPP([#include <$x_direct_test_include>], +[# We can compile using X headers with no special include directory. +ac_x_includes=], +[# Look for the header file in a standard set of common directories. +# Check X11 before X11Rn because it is often a symlink to the current release. + for ac_dir in \ + /usr/X11/include \ + /usr/X11R6/include \ + /usr/X11R5/include \ + /usr/X11R4/include \ + \ + /usr/include/X11 \ + /usr/include/X11R6 \ + /usr/include/X11R5 \ + /usr/include/X11R4 \ + \ + /usr/local/X11/include \ + /usr/local/X11R6/include \ + /usr/local/X11R5/include \ + /usr/local/X11R4/include \ + \ + /usr/local/include/X11 \ + /usr/local/include/X11R6 \ + /usr/local/include/X11R5 \ + /usr/local/include/X11R4 \ + \ + /usr/X386/include \ + /usr/x386/include \ + /usr/XFree86/include/X11 \ + \ + /usr/include \ + /usr/local/include \ + /usr/unsupported/include \ + /usr/athena/include \ + /usr/local/x11r5/include \ + /usr/lpp/Xamples/include \ + \ + /usr/openwin/include \ + /usr/openwin/share/include \ + ; \ + do + if test -r "$ac_dir/$x_direct_test_include"; then + ac_x_includes=$ac_dir + break + fi + done]) +fi # $ac_x_includes = NO + +if test "$ac_x_libraries" = NO; then + # Check for the libraries. + + test -z "$x_direct_test_library" && x_direct_test_library=Xt + test -z "$x_direct_test_function" && x_direct_test_function=XtMalloc + + # See if we find them without any special options. + # Don't add to $LIBS permanently. + ac_save_LIBS="$LIBS" + LIBS="-l$x_direct_test_library $LIBS" +AC_TRY_LINK(, [${x_direct_test_function}()], +[LIBS="$ac_save_LIBS" +# We can link X programs with no special library path. +ac_x_libraries=], +[LIBS="$ac_save_LIBS" +# First see if replacing the include by lib works. +# Check X11 before X11Rn because it is often a symlink to the current release. +for ac_dir in `echo "$ac_x_includes" | sed s/include/lib/` \ + /usr/X11/lib \ + /usr/X11R6/lib \ + /usr/X11R5/lib \ + /usr/X11R4/lib \ + \ + /usr/lib/X11 \ + /usr/lib/X11R6 \ + /usr/lib/X11R5 \ + /usr/lib/X11R4 \ + \ + /usr/local/X11/lib \ + /usr/local/X11R6/lib \ + /usr/local/X11R5/lib \ + /usr/local/X11R4/lib \ + \ + /usr/local/lib/X11 \ + /usr/local/lib/X11R6 \ + /usr/local/lib/X11R5 \ + /usr/local/lib/X11R4 \ + \ + /usr/X386/lib \ + /usr/x386/lib \ + /usr/XFree86/lib/X11 \ + \ + /usr/lib \ + /usr/local/lib \ + /usr/unsupported/lib \ + /usr/athena/lib \ + /usr/local/x11r5/lib \ + /usr/lpp/Xamples/lib \ + /lib/usr/lib/X11 \ + \ + /usr/openwin/lib \ + /usr/openwin/share/lib \ + ; \ +do +dnl Don't even attempt the hair of trying to link an X program! + for ac_extension in a so sl; do + if test -r $ac_dir/lib${x_direct_test_library}.$ac_extension; then + ac_x_libraries=$ac_dir + break 2 + fi + done +done]) +fi # $ac_x_libraries = NO +]) + +dnl Find additional X libraries, magic flags, etc. +AC_DEFUN(AC_PATH_XTRA, +[AC_REQUIRE([AC_PATH_X])dnl +if test "$no_x" = yes; then + # Not all programs may use this symbol, but it does not hurt to define it. + AC_DEFINE(X_DISPLAY_MISSING) + X_CFLAGS= X_PRE_LIBS= X_LIBS= X_EXTRA_LIBS= +else + if test -n "$x_includes"; then + X_CFLAGS="$X_CFLAGS -I$x_includes" + fi + + # It would also be nice to do this for all -L options, not just this one. + if test -n "$x_libraries"; then + X_LIBS="$X_LIBS -L$x_libraries" +dnl FIXME banish uname from this macro! + # For Solaris; some versions of Sun CC require a space after -R and + # others require no space. Words are not sufficient . . . . + case "`(uname -sr) 2>/dev/null`" in + "SunOS 5"*) + AC_MSG_CHECKING(whether -R must be followed by a space) + ac_xsave_LIBS="$LIBS"; LIBS="$LIBS -R$x_libraries" + AC_TRY_LINK(, , ac_R_nospace=yes, ac_R_nospace=no) + if test $ac_R_nospace = yes; then + AC_MSG_RESULT(no) + X_LIBS="$X_LIBS -R$x_libraries" + else + LIBS="$ac_xsave_LIBS -R $x_libraries" + AC_TRY_LINK(, , ac_R_space=yes, ac_R_space=no) + if test $ac_R_space = yes; then + AC_MSG_RESULT(yes) + X_LIBS="$X_LIBS -R $x_libraries" + else + AC_MSG_RESULT(neither works) + fi + fi + LIBS="$ac_xsave_LIBS" + esac + fi + + # Check for system-dependent libraries X programs must link with. + # Do this before checking for the system-independent R6 libraries + # (-lICE), since we may need -lsocket or whatever for X linking. + + if test "$ISC" = yes; then + X_EXTRA_LIBS="$X_EXTRA_LIBS -lnsl_s -linet" + else + # Martyn.Johnson@cl.cam.ac.uk says this is needed for Ultrix, if the X + # libraries were built with DECnet support. And karl@cs.umb.edu says + # the Alpha needs dnet_stub (dnet does not exist). + AC_CHECK_LIB(dnet, dnet_ntoa, [X_EXTRA_LIBS="$X_EXTRA_LIBS -ldnet"]) + if test $ac_cv_lib_dnet_dnet_ntoa = no; then + AC_CHECK_LIB(dnet_stub, dnet_ntoa, + [X_EXTRA_LIBS="$X_EXTRA_LIBS -ldnet_stub"]) + fi + + # msh@cis.ufl.edu says -lnsl (and -lsocket) are needed for his 386/AT, + # to get the SysV transport functions. + # chad@anasazi.com says the Pyramis MIS-ES running DC/OSx (SVR4) + # needs -lnsl. + # The nsl library prevents programs from opening the X display + # on Irix 5.2, according to dickey@clark.net. + AC_CHECK_FUNC(gethostbyname) + if test $ac_cv_func_gethostbyname = no; then + AC_CHECK_LIB(nsl, gethostbyname, X_EXTRA_LIBS="$X_EXTRA_LIBS -lnsl") + fi + + # lieder@skyler.mavd.honeywell.com says without -lsocket, + # socket/setsockopt and other routines are undefined under SCO ODT + # 2.0. But -lsocket is broken on IRIX 5.2 (and is not necessary + # on later versions), says simon@lia.di.epfl.ch: it contains + # gethostby* variants that don't use the nameserver (or something). + # -lsocket must be given before -lnsl if both are needed. + # We assume that if connect needs -lnsl, so does gethostbyname. + AC_CHECK_FUNC(connect) + if test $ac_cv_func_connect = no; then + AC_CHECK_LIB(socket, connect, X_EXTRA_LIBS="-lsocket $X_EXTRA_LIBS", , + $X_EXTRA_LIBS) + fi + + # gomez@mi.uni-erlangen.de says -lposix is necessary on A/UX. + AC_CHECK_FUNC(remove) + if test $ac_cv_func_remove = no; then + AC_CHECK_LIB(posix, remove, X_EXTRA_LIBS="$X_EXTRA_LIBS -lposix") + fi + + # BSDI BSD/OS 2.1 needs -lipc for XOpenDisplay. + AC_CHECK_FUNC(shmat) + if test $ac_cv_func_shmat = no; then + AC_CHECK_LIB(ipc, shmat, X_EXTRA_LIBS="$X_EXTRA_LIBS -lipc") + fi + fi + + # Check for libraries that X11R6 Xt/Xaw programs need. + ac_save_LDFLAGS="$LDFLAGS" + test -n "$x_libraries" && LDFLAGS="$LDFLAGS -L$x_libraries" + # SM needs ICE to (dynamically) link under SunOS 4.x (so we have to + # check for ICE first), but we must link in the order -lSM -lICE or + # we get undefined symbols. So assume we have SM if we have ICE. + # These have to be linked with before -lX11, unlike the other + # libraries we check for below, so use a different variable. + # --interran@uluru.Stanford.EDU, kb@cs.umb.edu. + AC_CHECK_LIB(ICE, IceConnectionNumber, + [X_PRE_LIBS="$X_PRE_LIBS -lSM -lICE"], , $X_EXTRA_LIBS) + LDFLAGS="$ac_save_LDFLAGS" + +fi +AC_SUBST(X_CFLAGS)dnl +AC_SUBST(X_PRE_LIBS)dnl +AC_SUBST(X_LIBS)dnl +AC_SUBST(X_EXTRA_LIBS)dnl +]) + +dnl The old Cygwin32 macro is deprecated. +AC_DEFUN(AC_CYGWIN32, +[AC_OBSOLETE([$0], [; instead use AC_CYGWIN])dnl +AC_CYGWIN]) + +dnl Check for Cygwin. This is a way to set the right value for +dnl EXEEXT. +AC_DEFUN(AC_CYGWIN, +[AC_CACHE_CHECK(for Cygwin environment, ac_cv_cygwin, +[AC_TRY_COMPILE(,[ +#ifndef __CYGWIN__ +#define __CYGWIN__ __CYGWIN32__ +#endif +return __CYGWIN__;], +ac_cv_cygwin=yes, ac_cv_cygwin=no) +rm -f conftest*]) +CYGWIN= +test "$ac_cv_cygwin" = yes && CYGWIN=yes]) + +dnl Check for mingw32. This is another way to set the right value for +dnl EXEEXT. +AC_DEFUN(AC_MINGW32, +[AC_CACHE_CHECK(for mingw32 environment, ac_cv_mingw32, +[AC_TRY_COMPILE(,[return __MINGW32__;], +ac_cv_mingw32=yes, ac_cv_mingw32=no) +rm -f conftest*]) +MINGW32= +test "$ac_cv_mingw32" = yes && MINGW32=yes]) + +dnl Check for the extension used for executables. This knows that we +dnl add .exe for Cygwin or mingw32. Otherwise, it compiles a test +dnl executable. If this is called, the executable extensions will be +dnl automatically used by link commands run by the configure script. +AC_DEFUN(AC_EXEEXT, +[AC_REQUIRE([AC_CYGWIN]) +AC_REQUIRE([AC_MINGW32]) +AC_MSG_CHECKING([for executable suffix]) +AC_CACHE_VAL(ac_cv_exeext, +[if test "$CYGWIN" = yes || test "$MINGW32" = yes; then + ac_cv_exeext=.exe +else + rm -f conftest* + echo 'int main () { return 0; }' > conftest.$ac_ext + ac_cv_exeext= + if AC_TRY_EVAL(ac_link); then + for file in conftest.*; do + case $file in + *.c | *.o | *.obj) ;; + *) ac_cv_exeext=`echo $file | sed -e s/conftest//` ;; + esac + done + else + AC_MSG_ERROR([installation or configuration problem: compiler cannot create executables.]) + fi + rm -f conftest* + test x"${ac_cv_exeext}" = x && ac_cv_exeext=no +fi]) +EXEEXT="" +test x"${ac_cv_exeext}" != xno && EXEEXT=${ac_cv_exeext} +AC_MSG_RESULT(${ac_cv_exeext}) +dnl Setting ac_exeext will implicitly change the ac_link command. +ac_exeext=$EXEEXT +AC_SUBST(EXEEXT)]) + + +dnl ### Checks for UNIX variants +dnl These are kludges which should be replaced by a single POSIX check. +dnl They aren't cached, to discourage their use. + + +AC_DEFUN(AC_AIX, +[AC_BEFORE([$0], [AC_TRY_COMPILE])dnl +AC_BEFORE([$0], [AC_TRY_RUN])dnl +AC_MSG_CHECKING(for AIX) +AC_EGREP_CPP(yes, +[#ifdef _AIX + yes +#endif +], [AC_MSG_RESULT(yes); AC_DEFINE(_ALL_SOURCE)], AC_MSG_RESULT(no)) +]) + +AC_DEFUN(AC_MINIX, +[AC_BEFORE([$0], [AC_TRY_COMPILE])dnl +AC_BEFORE([$0], [AC_TRY_RUN])dnl +AC_CHECK_HEADER(minix/config.h, MINIX=yes, MINIX=) +if test "$MINIX" = yes; then + AC_DEFINE(_POSIX_SOURCE) + AC_DEFINE(_POSIX_1_SOURCE, 2) + AC_DEFINE(_MINIX) +fi +]) + +AC_DEFUN(AC_ISC_POSIX, +[AC_REQUIRE([AC_PROG_CC])dnl +AC_BEFORE([$0], [AC_TRY_COMPILE])dnl +AC_BEFORE([$0], [AC_TRY_RUN])dnl +AC_MSG_CHECKING(for POSIXized ISC) +if test -d /etc/conf/kconfig.d && + grep _POSIX_VERSION [/usr/include/sys/unistd.h] >/dev/null 2>&1 +then + AC_MSG_RESULT(yes) + ISC=yes # If later tests want to check for ISC. + AC_DEFINE(_POSIX_SOURCE) + if test "$GCC" = yes; then + CC="$CC -posix" + else + CC="$CC -Xp" + fi +else + AC_MSG_RESULT(no) + ISC= +fi +]) + +AC_DEFUN(AC_XENIX_DIR, +[AC_OBSOLETE([$0], [; instead use AC_HEADER_DIRENT])dnl +AC_REQUIRE([AC_DIR_HEADER])dnl +AC_MSG_CHECKING(for Xenix) +AC_EGREP_CPP(yes, +[#if defined(M_XENIX) && !defined(M_UNIX) + yes +#endif +], [AC_MSG_RESULT(yes); XENIX=yes], [AC_MSG_RESULT(no); XENIX=]) +if test "$XENIX" = yes; then + # Make sure -ldir precedes -lx. + test $ac_header_dirent = dirent.h && LIBS="-ldir $LIBS" + LIBS="$LIBS -lx" +fi +]) + +AC_DEFUN(AC_DYNIX_SEQ, +[AC_OBSOLETE([$0], [; instead use AC_FUNC_GETMNTENT])dnl +AC_CHECK_LIB(seq, getmntent, LIBS="-lseq $LIBS") +]) + +AC_DEFUN(AC_IRIX_SUN, +[AC_OBSOLETE([$0], [; instead use AC_FUNC_GETMNTENT or AC_CHECK_LIB(sun, getpwnam)])dnl +AC_CHECK_LIB(sun, getmntent, LIBS="-lsun $LIBS") +]) + +AC_DEFUN(AC_SCO_INTL, +[AC_OBSOLETE([$0], [; instead use AC_FUNC_STRFTIME])dnl +AC_CHECK_LIB(intl, strftime, LIBS="-lintl $LIBS") +]) diff --git a/build/autoconf/alloc.m4 b/build/autoconf/alloc.m4 new file mode 100644 index 0000000000..4972013b62 --- /dev/null +++ b/build/autoconf/alloc.m4 @@ -0,0 +1,57 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +dnl Check for the existence of various allocation headers/functions +AC_DEFUN([MOZ_CHECK_ALLOCATOR],[ + +MALLOC_HEADERS="malloc.h malloc_np.h malloc/malloc.h sys/malloc.h" +MALLOC_H= + +for file in $MALLOC_HEADERS; do + MOZ_CHECK_HEADER($file, [MALLOC_H=$file]) + if test "$MALLOC_H" != ""; then + AC_DEFINE_UNQUOTED(MALLOC_H, <$MALLOC_H>) + break + fi +done + +AC_CHECK_FUNCS(strndup posix_memalign memalign) + +AC_CHECK_FUNCS(malloc_usable_size) +MALLOC_USABLE_SIZE_CONST_PTR=const +if test -n "$HAVE_MALLOC_H"; then + AC_MSG_CHECKING([whether malloc_usable_size definition can use const argument]) + AC_TRY_COMPILE([#include + #include + size_t malloc_usable_size(const void *ptr);], + [return malloc_usable_size(0);], + AC_MSG_RESULT([yes]), + AC_MSG_RESULT([no]) + MALLOC_USABLE_SIZE_CONST_PTR=) +fi +AC_DEFINE_UNQUOTED([MALLOC_USABLE_SIZE_CONST_PTR],[$MALLOC_USABLE_SIZE_CONST_PTR]) + + +dnl In newer bionic headers, valloc is built but not defined, +dnl so we check more carefully here. +AC_MSG_CHECKING([for valloc in malloc.h]) +AC_EGREP_HEADER(valloc, malloc.h, + AC_DEFINE(HAVE_VALLOC) + AC_MSG_RESULT([yes]), + AC_MSG_RESULT([no])) + +AC_MSG_CHECKING([for valloc in unistd.h]) +AC_EGREP_HEADER(valloc, unistd.h, + AC_DEFINE(HAVE_VALLOC) + AC_MSG_RESULT([yes]), + AC_MSG_RESULT([no])) + +AC_MSG_CHECKING([for _aligned_malloc in malloc.h]) +AC_EGREP_HEADER(_aligned_malloc, malloc.h, + AC_DEFINE(HAVE_ALIGNED_MALLOC) + AC_MSG_RESULT([yes]), + AC_MSG_RESULT([no])) + + +]) diff --git a/build/autoconf/altoptions.m4 b/build/autoconf/altoptions.m4 new file mode 100644 index 0000000000..ac016f4b91 --- /dev/null +++ b/build/autoconf/altoptions.m4 @@ -0,0 +1,77 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +dnl altoptions.m4 - An alternative way of specifying command-line options. +dnl These macros are needed to support a menu-based configurator. +dnl This file also includes the macro, AM_READ_MYCONFIG, for reading +dnl the 'myconfig.m4' file. + +dnl Send comments, improvements, bugs to Steve Lamm (slamm@netscape.com). + + +dnl MOZ_ARG_ENABLE_BOOL( NAME, HELP, IF-YES [, IF-NO [, ELSE]]) +dnl MOZ_ARG_DISABLE_BOOL( NAME, HELP, IF-NO [, IF-YES [, ELSE]]) +dnl MOZ_ARG_ENABLE_STRING( NAME, HELP, IF-SET [, ELSE]) +dnl MOZ_ARG_WITH_BOOL( NAME, HELP, IF-YES [, IF-NO [, ELSE]) +dnl MOZ_ARG_WITH_STRING( NAME, HELP, IF-SET [, ELSE]) +dnl MOZ_ARG_HEADER(Comment) +dnl MOZ_READ_MYCONFIG() - Read in 'myconfig.sh' file + +define([MOZ_DIVERSION_ARGS], 12) + +AC_DEFUN([MOZ_ARG],[dnl +AC_DIVERT_PUSH(MOZ_DIVERSION_ARGS)dnl + '$1', +AC_DIVERT_POP()dnl +]) +AC_DEFUN([MOZ_AC_ARG_ENABLE],[MOZ_ARG([--enable-]translit([$1],[_],[-]))AC_ARG_ENABLE([$1], [$2], [$3], [$4])]) +AC_DEFUN([MOZ_AC_ARG_WITH],[MOZ_ARG([--with-]translit([$1],[_],[-]))AC_ARG_WITH([$1], [$2], [$3], [$4])]) + +dnl MOZ_TWO_STRING_TEST(NAME, VAL, STR1, IF-STR1, STR2, IF-STR2 [, ELSE]) +AC_DEFUN([MOZ_TWO_STRING_TEST], +[if test "[$2]" = "[$3]"; then + ifelse([$4], , :, [$4]) + elif test "[$2]" = "[$5]"; then + ifelse([$6], , :, [$6]) + else + ifelse([$7], , + [AC_MSG_ERROR([Option, [$1], does not take an argument ([$2]).])], + [$7]) + fi]) + +dnl MOZ_ARG_ENABLE_BOOL(NAME, HELP, IF-YES [, IF-NO [, ELSE]]) +AC_DEFUN([MOZ_ARG_ENABLE_BOOL], +[MOZ_AC_ARG_ENABLE([$1], [$2], + [MOZ_TWO_STRING_TEST([$1], [$enableval], yes, [$3], no, [$4])], + [$5])]) + +dnl MOZ_ARG_DISABLE_BOOL(NAME, HELP, IF-NO [, IF-YES [, ELSE]]) +AC_DEFUN([MOZ_ARG_DISABLE_BOOL], +[MOZ_AC_ARG_ENABLE([$1], [$2], + [MOZ_TWO_STRING_TEST([$1], [$enableval], no, [$3], yes, [$4])], + [$5])]) + +dnl MOZ_ARG_ENABLE_STRING(NAME, HELP, IF-SET [, ELSE]) +AC_DEFUN([MOZ_ARG_ENABLE_STRING], +[MOZ_AC_ARG_ENABLE([$1], [$2], [$3], [$4])]) + +dnl MOZ_ARG_WITH_BOOL(NAME, HELP, IF-YES [, IF-NO [, ELSE]) +AC_DEFUN([MOZ_ARG_WITH_BOOL], +[MOZ_AC_ARG_WITH([$1], [$2], + [MOZ_TWO_STRING_TEST([$1], [$withval], yes, [$3], no, [$4])], + [$5])]) + +dnl MOZ_ARG_WITH_STRING(NAME, HELP, IF-SET [, ELSE]) +AC_DEFUN([MOZ_ARG_WITH_STRING], +[MOZ_AC_ARG_WITH([$1], [$2], [$3], [$4])]) + +dnl MOZ_ARG_HEADER(Comment) +dnl This is used by webconfig to group options +define(MOZ_ARG_HEADER, [# $1]) + +dnl MOZ_READ_MYCONFIG() - Read in 'myconfig.sh' file +AC_DEFUN([MOZ_READ_MOZCONFIG], +[AC_REQUIRE([AC_INIT_BINSH])dnl +. $OLD_CONFIGURE_VARS +]) diff --git a/build/autoconf/android.m4 b/build/autoconf/android.m4 new file mode 100644 index 0000000000..425def2023 --- /dev/null +++ b/build/autoconf/android.m4 @@ -0,0 +1,113 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +AC_DEFUN([MOZ_ANDROID_NDK], +[ + +case "$target" in +*-android*|*-linuxandroid*) + dnl $android_platform will be set for us by Python configure. + directory_include_args="-isystem $android_system -isystem $android_sysroot/usr/include" + + # clang will do any number of interesting things with host tools unless we tell + # it to use the NDK tools. + extra_opts="-gcc-toolchain $(dirname $(dirname $TOOLCHAIN_PREFIX))" + CPPFLAGS="$extra_opts -D__ANDROID_API__=$android_version $CPPFLAGS" + ASFLAGS="$extra_opts $ASFLAGS" + LDFLAGS="$extra_opts $LDFLAGS" + + CPPFLAGS="$directory_include_args $CPPFLAGS" + CFLAGS="-fno-short-enums -fno-exceptions $CFLAGS" + CXXFLAGS="-fno-short-enums -fno-exceptions $CXXFLAGS $stlport_cppflags" + ASFLAGS="$directory_include_args -DANDROID $ASFLAGS" + + LDFLAGS="-L$android_platform/usr/lib -Wl,-rpath-link=$android_platform/usr/lib --sysroot=$android_platform $LDFLAGS" + ;; +esac + +]) + +AC_DEFUN([MOZ_ANDROID_CPU_ARCH], +[ + +if test "$OS_TARGET" = "Android"; then + case "${CPU_ARCH}" in + arm) + ANDROID_CPU_ARCH=armeabi-v7a + ;; + x86) + ANDROID_CPU_ARCH=x86 + ;; + x86_64) + ANDROID_CPU_ARCH=x86_64 + ;; + aarch64) + ANDROID_CPU_ARCH=arm64-v8a + ;; + esac + + AC_SUBST(ANDROID_CPU_ARCH) +fi +]) + +AC_DEFUN([MOZ_ANDROID_STLPORT], +[ + +if test "$OS_TARGET" = "Android"; then + if test -z "$STLPORT_LIBS"; then + # android-ndk-r8b and later + cxx_libs="$android_ndk/sources/cxx-stl/llvm-libc++/libs/$ANDROID_CPU_ARCH" + # NDK r12 removed the arm/thumb library split and just made + # everything thumb by default. Attempt to compensate. + if test "$MOZ_THUMB2" = 1 -a -d "$cxx_libs/thumb"; then + cxx_libs="$cxx_libs/thumb" + fi + + if ! test -e "$cxx_libs/libc++_static.a"; then + AC_MSG_ERROR([Couldn't find path to llvm-libc++ in the android ndk]) + fi + + STLPORT_LIBS="-L$cxx_libs -lc++_static" + # NDK r12 split the libc++ runtime libraries into pieces. + for lib in c++abi unwind android_support; do + if test -e "$cxx_libs/lib${lib}.a"; then + STLPORT_LIBS="$STLPORT_LIBS -l${lib}" + fi + done + fi +fi +AC_SUBST_LIST([STLPORT_LIBS]) + +]) + + +dnl Configure an Android SDK. +AC_DEFUN([MOZ_ANDROID_SDK], +[ + +MOZ_ARG_WITH_STRING(android-min-sdk, +[ --with-android-min-sdk=[VER] Impose a minimum Firefox for Android SDK version], +[ MOZ_ANDROID_MIN_SDK_VERSION=$withval ]) + +MOZ_ARG_WITH_STRING(android-max-sdk, +[ --with-android-max-sdk=[VER] Impose a maximum Firefox for Android SDK version], +[ MOZ_ANDROID_MAX_SDK_VERSION=$withval ]) + +if test -n "$MOZ_ANDROID_MIN_SDK_VERSION"; then + if test -n "$MOZ_ANDROID_MAX_SDK_VERSION"; then + if test $MOZ_ANDROID_MAX_SDK_VERSION -lt $MOZ_ANDROID_MIN_SDK_VERSION ; then + AC_MSG_ERROR([--with-android-max-sdk must be at least the value of --with-android-min-sdk.]) + fi + fi + + if test $MOZ_ANDROID_MIN_SDK_VERSION -gt $ANDROID_TARGET_SDK ; then + AC_MSG_ERROR([--with-android-min-sdk is expected to be less than $ANDROID_TARGET_SDK]) + fi + + AC_SUBST(MOZ_ANDROID_MIN_SDK_VERSION) +fi + +AC_SUBST(MOZ_ANDROID_MAX_SDK_VERSION) + +]) diff --git a/build/autoconf/arch.m4 b/build/autoconf/arch.m4 new file mode 100644 index 0000000000..45c671082d --- /dev/null +++ b/build/autoconf/arch.m4 @@ -0,0 +1,15 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +AC_DEFUN([MOZ_ARCH_OPTS], +[ +if test -n "$_ARM_FLAGS"; then + CFLAGS="$CFLAGS $_ARM_FLAGS" + CXXFLAGS="$CXXFLAGS $_ARM_FLAGS" + ASFLAGS="$ASFLAGS $_ARM_FLAGS" + if test -n "$_THUMB_FLAGS"; then + LDFLAGS="$LDFLAGS $_THUMB_FLAGS" + fi +fi +]) diff --git a/build/autoconf/autoconf.m4 b/build/autoconf/autoconf.m4 new file mode 100644 index 0000000000..dde59ab380 --- /dev/null +++ b/build/autoconf/autoconf.m4 @@ -0,0 +1,28 @@ +dnl Driver that loads the Autoconf macro files. +dnl Requires GNU m4. +dnl This file is part of Autoconf. +dnl Copyright (C) 1994 Free Software Foundation, Inc. +dnl +dnl This program is free software; you can redistribute it and/or modify +dnl it under the terms of the GNU General Public License as published by +dnl the Free Software Foundation; either version 2, or (at your option) +dnl any later version. +dnl +dnl This program is distributed in the hope that it will be useful, +dnl but WITHOUT ANY WARRANTY; without even the implied warranty of +dnl MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +dnl GNU General Public License for more details. +dnl +dnl You should have received a copy of the GNU General Public License +dnl along with this program; if not, write to the Free Software +dnl Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA +dnl 02111-1307, USA. +dnl +dnl Written by David MacKenzie. +dnl +include(acgeneral.m4)dnl +builtin(include, acspecific.m4)dnl +builtin(include, acoldnames.m4)dnl +dnl Do not sinclude acsite.m4 here, because it may not be installed +dnl yet when Autoconf is frozen. +dnl Do not sinclude ./aclocal.m4 here, to prevent it from being frozen. diff --git a/build/autoconf/autoconf.sh b/build/autoconf/autoconf.sh new file mode 100644 index 0000000000..ceb8a25b00 --- /dev/null +++ b/build/autoconf/autoconf.sh @@ -0,0 +1,158 @@ +#! @SHELL@ +# autoconf -- create `configure' using m4 macros +# Copyright (C) 1992, 1993, 1994, 1996 Free Software Foundation, Inc. + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2, or (at your option) +# any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA +# 02111-1307, USA. + +# If given no args, create `configure' from template file `configure.in'. +# With one arg, create a configure script on standard output from +# the given template file. + +usage="\ +Usage: autoconf [-h] [--help] [-m dir] [--macrodir=dir] + [-l dir] [--localdir=dir] [--version] [template-file]" + +# NLS nuisances. +# Only set these to C if already set. These must not be set unconditionally +# because not all systems understand e.g. LANG=C (notably SCO). +# Fixing LC_MESSAGES prevents Solaris sh from translating var values in `set'! +# Non-C LC_CTYPE values break the ctype check. +if test "${LANG+set}" = set; then LANG=C; export LANG; fi +if test "${LC_ALL+set}" = set; then LC_ALL=C; export LC_ALL; fi +if test "${LC_MESSAGES+set}" = set; then LC_MESSAGES=C; export LC_MESSAGES; fi +if test "${LC_CTYPE+set}" = set; then LC_CTYPE=C; export LC_CTYPE; fi + +: ${AC_MACRODIR=@datadir@} +: ${M4=@M4@} +: ${AWK=@AWK@} +case "${M4}" in +/*) # Handle the case that m4 has moved since we were configured. + # It may have been found originally in a build directory. + test -f "${M4}" || M4=m4 ;; +esac + +: ${TMPDIR=/tmp} +tmpout=${TMPDIR}/acout.$$ +localdir= +show_version=no + +while test $# -gt 0 ; do + case "${1}" in + -h | --help | --h* ) + echo "${usage}" 1>&2; exit 0 ;; + --localdir=* | --l*=* ) + localdir="`echo \"${1}\" | sed -e 's/^[^=]*=//'`" + shift ;; + -l | --localdir | --l*) + shift + test $# -eq 0 && { echo "${usage}" 1>&2; exit 1; } + localdir="${1}" + shift ;; + --macrodir=* | --m*=* ) + AC_MACRODIR="`echo \"${1}\" | sed -e 's/^[^=]*=//'`" + shift ;; + -m | --macrodir | --m* ) + shift + test $# -eq 0 && { echo "${usage}" 1>&2; exit 1; } + AC_MACRODIR="${1}" + shift ;; + --version | --v* ) + show_version=yes; shift ;; + -- ) # Stop option processing + shift; break ;; + - ) # Use stdin as input. + break ;; + -* ) + echo "${usage}" 1>&2; exit 1 ;; + * ) + break ;; + esac +done + +if test $show_version = yes; then + version=`sed -n 's/define.AC_ACVERSION.[ ]*\([0-9.]*\).*/\1/p' \ + $AC_MACRODIR/acgeneral.m4` + echo "Autoconf version $version" + exit 0 +fi + +case $# in + 0) infile=configure.in ;; + 1) infile="$1" ;; + *) echo "$usage" >&2; exit 1 ;; +esac + +trap 'rm -f $tmpin $tmpout; exit 1' 1 2 15 + +tmpin=${TMPDIR}/acin.$$ # Always set this, to avoid bogus errors from some rm's. +if test z$infile = z-; then + infile=$tmpin + cat > $infile +elif test ! -r "$infile"; then + echo "autoconf: ${infile}: No such file or directory" >&2 + exit 1 +fi + +if test -n "$localdir"; then + use_localdir="-I$localdir -DAC_LOCALDIR=$localdir" +else + use_localdir= +fi + +# Use the frozen version of Autoconf if available. +r= f= +# Some non-GNU m4's don't reject the --help option, so give them /dev/null. +case `$M4 --help < /dev/null 2>&1` in +*reload-state*) test -r $AC_MACRODIR/autoconf.m4f && { r=--reload f=f; } ;; +*traditional*) ;; +*) echo Autoconf requires GNU m4 1.1 or later >&2; rm -f $tmpin; exit 1 ;; +esac + +$M4 -I$AC_MACRODIR $use_localdir $r autoconf.m4$f $infile > $tmpout || + { rm -f $tmpin $tmpout; exit 2; } + +# You could add your own prefixes to pattern if you wanted to check for +# them too, e.g. pattern='\(AC_\|ILT_\)', except that UNIX sed doesn't do +# alternation. +pattern="AC_" + +status=0 +if grep "^[^#]*${pattern}" $tmpout > /dev/null 2>&1; then + echo "autoconf: Undefined macros:" >&2 + sed -n "s/^[^#]*\\(${pattern}[_A-Za-z0-9]*\\).*/\\1/p" $tmpout | + while read macro; do + grep -n "^[^#]*$macro" $infile /dev/null + test $? -eq 1 && echo >&2 "***BUG in Autoconf--please report*** $macro" + done | sort -u >&2 + status=1 +fi + +if test $# -eq 0; then + echo "This case should not be reached." + exit 1 +fi + +# Put the real line numbers into the output to make config.log more helpful. +$AWK ' +/__oline__/ { printf "%d:", NR + 1 } + { print } +' $tmpout | sed ' +/__oline__/s/^\([0-9][0-9]*\):\(.*\)__oline__/\2\1/ +' + +rm -f $tmpout + +exit $status diff --git a/build/autoconf/clang-plugin.m4 b/build/autoconf/clang-plugin.m4 new file mode 100644 index 0000000000..2bbb471e5b --- /dev/null +++ b/build/autoconf/clang-plugin.m4 @@ -0,0 +1,107 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +AC_DEFUN([MOZ_CONFIG_CLANG_PLUGIN], [ + +if test -n "$ENABLE_CLANG_PLUGIN"; then + dnl For some reason the llvm-config downloaded from clang.llvm.org for clang3_8 + dnl produces a -isysroot flag for a sysroot which might not ship when passed + dnl --cxxflags. We use sed to remove this argument so that builds work on OSX + dnl + dnl For a similar reason, we remove any -gcc-toolchain arguments, since the + dnl directories specified by such arguments might not exist on the current + dnl machine. + LLVM_CXXFLAGS=`$LLVM_CONFIG --cxxflags | sed -e 's/-isysroot [[^ ]]*//' -e 's/-gcc-toolchain [[^ ]]*//'` + + LLVM_LDFLAGS=`$LLVM_CONFIG --ldflags | tr '\n' ' '` + + if test "${HOST_OS_ARCH}" = "Darwin"; then + dnl We need to make sure that we use the symbols coming from the clang + dnl binary. In order to do this, we need to pass -flat_namespace and + dnl -undefined suppress to the linker. This makes sure that we link the + dnl symbols into the flat namespace provided by clang, and thus get + dnl access to all of the symbols which are undefined in our dylib as we + dnl are building it right now, and also that we don't fail the build + dnl due to undefined symbols (which will be provided by clang). + CLANG_LDFLAGS="-Wl,-flat_namespace -Wl,-undefined,suppress" + dnl We are loaded into clang, so we don't need to link to very many things, + dnl we just need to link to clangASTMatchers because it is not used by clang + CLANG_LDFLAGS="$CLANG_LDFLAGS `$LLVM_CONFIG --prefix`/lib/libclangASTMatchers.a" + dnl We need to remove -L/path/to/clang/lib from LDFLAGS to ensure that we + dnl don't accidentally link against the libc++ there which is a newer + dnl version that what our build machines have installed. + LLVM_LDFLAGS=`echo "$LLVM_LDFLAGS" | sed -E 's/-L[[^ ]]+\/clang\/lib//'` + elif test "${HOST_OS_ARCH}" = "WINNT"; then + CLANG_LDFLAGS="clangASTMatchers.lib clang.lib" + else + CLANG_LDFLAGS="-lclangASTMatchers" + fi + + if test -n "$CLANG_CL"; then + dnl The llvm-config coming with clang-cl may give us arguments in the + dnl /ARG form, which in msys will be interpreted as a path name. So we + dnl need to split the args and convert the leading slashes that we find + dnl into a dash. + LLVM_REPLACE_CXXFLAGS='' + for arg in $LLVM_CXXFLAGS; do + dnl The following expression replaces a leading slash with a dash. + dnl Also replace any backslashes with forward slash. + arg=`echo "$arg"|sed -e 's/^\//-/' -e 's/\\\\/\//g'` + LLVM_REPLACE_CXXFLAGS="$LLVM_REPLACE_CXXFLAGS $arg" + done + LLVM_CXXFLAGS="$LLVM_REPLACE_CXXFLAGS" + dnl We'll also want to replace `-std:` with `-Xclang -std=` so that + dnl LLVM_CXXFLAGS can correctly override the `-Xclang -std=` set by + dnl toolchain.configure. + LLVM_CXXFLAGS=`echo "$LLVM_CXXFLAGS"|sed -e 's/ \(-Xclang \|\)-std[[:=]]/ -Xclang -std=/'` + + LLVM_REPLACE_LDFLAGS='' + for arg in $LLVM_LDFLAGS; do + dnl The following expression replaces a leading slash with a dash. + dnl Also replace any backslashes with forward slash. + arg=`echo "$arg"|sed -e 's/^\//-/' -e 's/\\\\/\//g'` + LLVM_REPLACE_LDFLAGS="$LLVM_REPLACE_LDFLAGS $arg" + done + LLVM_LDFLAGS="$LLVM_REPLACE_LDFLAGS" + + CLANG_REPLACE_LDFLAGS='' + for arg in $CLANG_LDFLAGS; do + dnl The following expression replaces a leading slash with a dash. + dnl Also replace any backslashes with forward slash. + arg=`echo "$arg"|sed -e 's/^\//-/' -e 's/\\\\/\//g'` + CLANG_REPLACE_LDFLAGS="$CLANG_REPLACE_LDFLAGS $arg" + done + CLANG_LDFLAGS="$CLANG_REPLACE_LDFLAGS" + fi + + CLANG_PLUGIN_FLAGS="-Xclang -load -Xclang $CLANG_PLUGIN -Xclang -add-plugin -Xclang moz-check" + + AC_DEFINE(MOZ_CLANG_PLUGIN) +fi + +if test -n "$ENABLE_MOZSEARCH_PLUGIN"; then + if test -z "${ENABLE_CLANG_PLUGIN}"; then + AC_MSG_ERROR([Can't use mozsearch plugin without --enable-clang-plugin.]) + fi + + CLANG_PLUGIN_FLAGS="$CLANG_PLUGIN_FLAGS -Xclang -add-plugin -Xclang mozsearch-index" + + dnl Parameters are: srcdir, outdir (path where output JSON is stored), objdir. + CLANG_PLUGIN_FLAGS="$CLANG_PLUGIN_FLAGS -Xclang -plugin-arg-mozsearch-index -Xclang $_topsrcdir" + CLANG_PLUGIN_FLAGS="$CLANG_PLUGIN_FLAGS -Xclang -plugin-arg-mozsearch-index -Xclang $_objdir/mozsearch_index" + CLANG_PLUGIN_FLAGS="$CLANG_PLUGIN_FLAGS -Xclang -plugin-arg-mozsearch-index -Xclang $_objdir" + + AC_DEFINE(MOZ_MOZSEARCH_PLUGIN) +fi + +AC_SUBST_LIST(CLANG_PLUGIN_FLAGS) +AC_SUBST_LIST(LLVM_CXXFLAGS) +AC_SUBST_LIST(LLVM_LDFLAGS) +AC_SUBST_LIST(CLANG_LDFLAGS) + +AC_SUBST(ENABLE_CLANG_PLUGIN) +AC_SUBST(ENABLE_CLANG_PLUGIN_ALPHA) +AC_SUBST(ENABLE_MOZSEARCH_PLUGIN) + +]) diff --git a/build/autoconf/codeset.m4 b/build/autoconf/codeset.m4 new file mode 100644 index 0000000000..3a25c42961 --- /dev/null +++ b/build/autoconf/codeset.m4 @@ -0,0 +1,25 @@ +# codeset.m4 serial AM1 (gettext-0.10.40) +dnl Copyright (C) 2000-2002 Free Software Foundation, Inc. +dnl This file is free software, distributed under the terms of the GNU +dnl General Public License. As a special exception to the GNU General +dnl Public License, this file may be distributed as part of a program +dnl that contains a configuration script generated by Autoconf, under +dnl the same distribution terms as the rest of that program. + +dnl From Bruno Haible. + +AC_DEFUN([AM_LANGINFO_CODESET], +[ + AC_CACHE_CHECK([for nl_langinfo and CODESET], am_cv_langinfo_codeset, + [AC_TRY_LINK([#include ], + [char* cs = nl_langinfo(CODESET);], + am_cv_langinfo_codeset=yes, + am_cv_langinfo_codeset=no) + ]) + if test $am_cv_langinfo_codeset = yes; then + AC_DEFINE(HAVE_LANGINFO_CODESET, 1, + [Define if you have and nl_langinfo(CODESET).]) + HAVE_LANGINFO_CODESET=1 + fi + AC_SUBST(HAVE_LANGINFO_CODESET) +]) diff --git a/build/autoconf/compiler-opts.m4 b/build/autoconf/compiler-opts.m4 new file mode 100644 index 0000000000..f29697ff20 --- /dev/null +++ b/build/autoconf/compiler-opts.m4 @@ -0,0 +1,162 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +dnl Add compiler specific options + +dnl ============================================================================ +dnl C++ rtti +dnl We don't use it in the code, but it can be usefull for debugging, so give +dnl the user the option of enabling it. +dnl ============================================================================ +AC_DEFUN([MOZ_RTTI], +[ +if test -z "$_MOZ_USE_RTTI"; then + if test "$GNU_CC"; then + CXXFLAGS="$CXXFLAGS -fno-rtti" + else + case "$target" in + *-mingw*) + CXXFLAGS="$CXXFLAGS -GR-" + esac + fi +fi +]) + +dnl ======================================================== +dnl = +dnl = Debugging Options +dnl = +dnl ======================================================== +AC_DEFUN([MOZ_DEBUGGING_OPTS], +[ + +if test -z "$MOZ_DEBUG" -o -n "$MOZ_ASAN"; then + MOZ_NO_DEBUG_RTL=1 +fi + +AC_SUBST(MOZ_NO_DEBUG_RTL) + +if test -n "$MOZ_DEBUG"; then + if test -n "$COMPILE_ENVIRONMENT"; then + AC_MSG_CHECKING([for valid debug flags]) + _SAVE_CFLAGS=$CFLAGS + CFLAGS="$CFLAGS $MOZ_DEBUG_FLAGS" + AC_TRY_COMPILE([#include ], + [printf("Hello World\n");], + _results=yes, + _results=no) + AC_MSG_RESULT([$_results]) + if test "$_results" = "no"; then + AC_MSG_ERROR([These compiler flags are invalid: $MOZ_DEBUG_FLAGS]) + fi + CFLAGS=$_SAVE_CFLAGS + fi +fi +]) + +dnl A high level macro for selecting compiler options. +AC_DEFUN([MOZ_COMPILER_OPTS], +[ + MOZ_DEBUGGING_OPTS + MOZ_RTTI +if test "$CLANG_CXX"; then + _WARNINGS_CXXFLAGS="${_WARNINGS_CXXFLAGS} -Wno-unknown-warning-option" +fi + +if test "$GNU_CC"; then + if test -z "$DEVELOPER_OPTIONS"; then + CFLAGS="$CFLAGS -ffunction-sections -fdata-sections" + CXXFLAGS="$CXXFLAGS -ffunction-sections -fdata-sections" + fi + + CFLAGS="$CFLAGS -fno-math-errno" + CXXFLAGS="$CXXFLAGS -fno-exceptions -fno-math-errno" +fi + +dnl ======================================================== +dnl = Identical Code Folding +dnl ======================================================== + +if test "$GNU_CC" -a "$GCC_USE_GNU_LD" -a -z "$MOZ_DISABLE_ICF" -a -z "$DEVELOPER_OPTIONS"; then + AC_CACHE_CHECK([whether the linker supports Identical Code Folding], + LD_SUPPORTS_ICF, + [echo 'int foo() {return 42;}' \ + 'int bar() {return 42;}' \ + 'int main() {return foo() - bar();}' > conftest.${ac_ext} + # If the linker supports ICF, foo and bar symbols will have + # the same address + if AC_TRY_COMMAND([${CC-cc} -o conftest${ac_exeext} $LDFLAGS -Wl,--icf=safe -ffunction-sections conftest.${ac_ext} $LIBS 1>&2]) && + test -s conftest${ac_exeext} && + $LLVM_OBJDUMP -t conftest${ac_exeext} | awk changequote(<<, >>)'{a[<<$>>6] = <<$>>1} END {if (a["foo"] && (a["foo"] != a["bar"])) { exit 1 }}'changequote([, ]); then + LD_SUPPORTS_ICF=yes + else + LD_SUPPORTS_ICF=no + fi + rm -rf conftest*]) + if test "$LD_SUPPORTS_ICF" = yes; then + _SAVE_LDFLAGS="$LDFLAGS -Wl,--icf=safe" + LDFLAGS="$LDFLAGS -Wl,--icf=safe -Wl,--print-icf-sections" + AC_TRY_LINK([], [], + [LD_PRINT_ICF_SECTIONS=-Wl,--print-icf-sections], + [LD_PRINT_ICF_SECTIONS=]) + AC_SUBST([LD_PRINT_ICF_SECTIONS]) + LDFLAGS="$_SAVE_LDFLAGS" + fi +fi + +dnl ======================================================== +dnl = Automatically remove dead symbols +dnl ======================================================== + +if test "$GNU_CC" -a "$GCC_USE_GNU_LD" -a -z "$DEVELOPER_OPTIONS" -a -z "$MOZ_PROFILE_GENERATE"; then + if test -n "$MOZ_DEBUG_FLAGS"; then + dnl See bug 670659 + AC_CACHE_CHECK([whether removing dead symbols breaks debugging], + GC_SECTIONS_BREAKS_DEBUG_RANGES, + [echo 'int foo() {return 42;}' \ + 'int bar() {return 1;}' \ + 'int main() {return foo();}' > conftest.${ac_ext} + if AC_TRY_COMMAND([${CC-cc} -o conftest.${ac_objext} $CFLAGS $MOZ_DEBUG_FLAGS -c conftest.${ac_ext} 1>&2]) && + AC_TRY_COMMAND([${CC-cc} -o conftest${ac_exeext} $LDFLAGS $MOZ_DEBUG_FLAGS -Wl,--gc-sections conftest.${ac_objext} $LIBS 1>&2]) && + test -s conftest${ac_exeext} -a -s conftest.${ac_objext}; then + if test "`$PYTHON3 -m mozbuild.configure.check_debug_ranges conftest.${ac_objext} conftest.${ac_ext}`" = \ + "`$PYTHON3 -m mozbuild.configure.check_debug_ranges conftest${ac_exeext} conftest.${ac_ext}`"; then + GC_SECTIONS_BREAKS_DEBUG_RANGES=no + else + GC_SECTIONS_BREAKS_DEBUG_RANGES=yes + fi + else + dnl We really don't expect to get here, but just in case + GC_SECTIONS_BREAKS_DEBUG_RANGES="no, but it's broken in some other way" + fi + rm -rf conftest*]) + if test "$GC_SECTIONS_BREAKS_DEBUG_RANGES" = no; then + DSO_LDOPTS="$DSO_LDOPTS -Wl,--gc-sections" + fi + else + DSO_LDOPTS="$DSO_LDOPTS -Wl,--gc-sections" + fi +fi + +if test "$GNU_CC$CLANG_CC"; then + MOZ_PROGRAM_LDFLAGS="$MOZ_PROGRAM_LDFLAGS -pie" +fi + +AC_SUBST(MOZ_PROGRAM_LDFLAGS) + +dnl ASan assumes no symbols are being interposed, and when that happens, +dnl it's not happy with it. Unconveniently, since Firefox is exporting +dnl libffi symbols and Gtk+3 pulls system libffi via libwayland-client, +dnl system libffi interposes libffi symbols that ASan assumes are in +dnl libxul, so it barfs about buffer overflows. +dnl Using -Wl,-Bsymbolic ensures no exported symbol can be interposed. +if test -n "$GCC_USE_GNU_LD"; then + case "$LDFLAGS" in + *-fsanitize=address*) + LDFLAGS="$LDFLAGS -Wl,-Bsymbolic" + ;; + esac +fi + +]) diff --git a/build/autoconf/config.guess b/build/autoconf/config.guess new file mode 100755 index 0000000000..e94095c5fb --- /dev/null +++ b/build/autoconf/config.guess @@ -0,0 +1,1687 @@ +#! /bin/sh +# Attempt to guess a canonical system name. +# Copyright 1992-2020 Free Software Foundation, Inc. + +timestamp='2020-07-12' + +# This file is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, see . +# +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that +# program. This Exception is an additional permission under section 7 +# of the GNU General Public License, version 3 ("GPLv3"). +# +# Originally written by Per Bothner; maintained since 2000 by Ben Elliston. +# +# You can get the latest version of this script from: +# https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess +# +# Please send patches to . + + +me=`echo "$0" | sed -e 's,.*/,,'` + +usage="\ +Usage: $0 [OPTION] + +Output the configuration name of the system \`$me' is run on. + +Options: + -h, --help print this help, then exit + -t, --time-stamp print date of last modification, then exit + -v, --version print version number, then exit + +Report bugs and patches to ." + +version="\ +GNU config.guess ($timestamp) + +Originally written by Per Bothner. +Copyright 1992-2020 Free Software Foundation, Inc. + +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." + +help=" +Try \`$me --help' for more information." + +# Parse command line +while test $# -gt 0 ; do + case $1 in + --time-stamp | --time* | -t ) + echo "$timestamp" ; exit ;; + --version | -v ) + echo "$version" ; exit ;; + --help | --h* | -h ) + echo "$usage"; exit ;; + -- ) # Stop option processing + shift; break ;; + - ) # Use stdin as input. + break ;; + -* ) + echo "$me: invalid option $1$help" >&2 + exit 1 ;; + * ) + break ;; + esac +done + +if test $# != 0; then + echo "$me: too many arguments$help" >&2 + exit 1 +fi + +# CC_FOR_BUILD -- compiler used by this script. Note that the use of a +# compiler to aid in system detection is discouraged as it requires +# temporary files to be created and, as you can see below, it is a +# headache to deal with in a portable fashion. + +# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still +# use `HOST_CC' if defined, but it is deprecated. + +# Portable tmp directory creation inspired by the Autoconf team. + +tmp= +# shellcheck disable=SC2172 +trap 'test -z "$tmp" || rm -fr "$tmp"' 0 1 2 13 15 + +set_cc_for_build() { + # prevent multiple calls if $tmp is already set + test "$tmp" && return 0 + : "${TMPDIR=/tmp}" + # shellcheck disable=SC2039 + { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || + { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir "$tmp" 2>/dev/null) ; } || + { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir "$tmp" 2>/dev/null) && echo "Warning: creating insecure temp directory" >&2 ; } || + { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } + dummy=$tmp/dummy + case ${CC_FOR_BUILD-},${HOST_CC-},${CC-} in + ,,) echo "int x;" > "$dummy.c" + for driver in cc gcc c89 c99 ; do + if ($driver -c -o "$dummy.o" "$dummy.c") >/dev/null 2>&1 ; then + CC_FOR_BUILD="$driver" + break + fi + done + if test x"$CC_FOR_BUILD" = x ; then + CC_FOR_BUILD=no_compiler_found + fi + ;; + ,,*) CC_FOR_BUILD=$CC ;; + ,*,*) CC_FOR_BUILD=$HOST_CC ;; + esac +} + +# This is needed to find uname on a Pyramid OSx when run in the BSD universe. +# (ghazi@noc.rutgers.edu 1994-08-24) +if test -f /.attbin/uname ; then + PATH=$PATH:/.attbin ; export PATH +fi + +UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown +UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown +UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown +UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown + +case "$UNAME_SYSTEM" in +Linux|GNU|GNU/*) + # If the system lacks a compiler, then just pick glibc. + # We could probably try harder. + LIBC=gnu + + set_cc_for_build + cat <<-EOF > "$dummy.c" + #include + #if defined(__UCLIBC__) + LIBC=uclibc + #elif defined(__dietlibc__) + LIBC=dietlibc + #else + LIBC=gnu + #endif + EOF + eval "`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^LIBC' | sed 's, ,,g'`" + + # If ldd exists, use it to detect musl libc. + if command -v ldd >/dev/null && \ + ldd --version 2>&1 | grep -q ^musl + then + LIBC=musl + fi + ;; +esac + +# Note: order is significant - the case branches are not exclusive. + +case "$UNAME_MACHINE:$UNAME_SYSTEM:$UNAME_RELEASE:$UNAME_VERSION" in + *:NetBSD:*:*) + # NetBSD (nbsd) targets should (where applicable) match one or + # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, + # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently + # switched to ELF, *-*-netbsd* would select the old + # object file format. This provides both forward + # compatibility and a consistent mechanism for selecting the + # object file format. + # + # Note: NetBSD doesn't particularly care about the vendor + # portion of the name. We always set it to "unknown". + sysctl="sysctl -n hw.machine_arch" + UNAME_MACHINE_ARCH=`(uname -p 2>/dev/null || \ + "/sbin/$sysctl" 2>/dev/null || \ + "/usr/sbin/$sysctl" 2>/dev/null || \ + echo unknown)` + case "$UNAME_MACHINE_ARCH" in + armeb) machine=armeb-unknown ;; + arm*) machine=arm-unknown ;; + sh3el) machine=shl-unknown ;; + sh3eb) machine=sh-unknown ;; + sh5el) machine=sh5le-unknown ;; + earmv*) + arch=`echo "$UNAME_MACHINE_ARCH" | sed -e 's,^e\(armv[0-9]\).*$,\1,'` + endian=`echo "$UNAME_MACHINE_ARCH" | sed -ne 's,^.*\(eb\)$,\1,p'` + machine="${arch}${endian}"-unknown + ;; + *) machine="$UNAME_MACHINE_ARCH"-unknown ;; + esac + # The Operating System including object format, if it has switched + # to ELF recently (or will in the future) and ABI. + case "$UNAME_MACHINE_ARCH" in + earm*) + os=netbsdelf + ;; + arm*|i386|m68k|ns32k|sh3*|sparc|vax) + set_cc_for_build + if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ELF__ + then + # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). + # Return netbsd for either. FIX? + os=netbsd + else + os=netbsdelf + fi + ;; + *) + os=netbsd + ;; + esac + # Determine ABI tags. + case "$UNAME_MACHINE_ARCH" in + earm*) + expr='s/^earmv[0-9]/-eabi/;s/eb$//' + abi=`echo "$UNAME_MACHINE_ARCH" | sed -e "$expr"` + ;; + esac + # The OS release + # Debian GNU/NetBSD machines have a different userland, and + # thus, need a distinct triplet. However, they do not need + # kernel version information, so it can be replaced with a + # suitable tag, in the style of linux-gnu. + case "$UNAME_VERSION" in + Debian*) + release='-gnu' + ;; + *) + release=`echo "$UNAME_RELEASE" | sed -e 's/[-_].*//' | cut -d. -f1,2` + ;; + esac + # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: + # contains redundant information, the shorter form: + # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. + echo "$machine-${os}${release}${abi-}" + exit ;; + *:Bitrig:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'` + echo "$UNAME_MACHINE_ARCH"-unknown-bitrig"$UNAME_RELEASE" + exit ;; + *:OpenBSD:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` + echo "$UNAME_MACHINE_ARCH"-unknown-openbsd"$UNAME_RELEASE" + exit ;; + *:LibertyBSD:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/^.*BSD\.//'` + echo "$UNAME_MACHINE_ARCH"-unknown-libertybsd"$UNAME_RELEASE" + exit ;; + *:MidnightBSD:*:*) + echo "$UNAME_MACHINE"-unknown-midnightbsd"$UNAME_RELEASE" + exit ;; + *:ekkoBSD:*:*) + echo "$UNAME_MACHINE"-unknown-ekkobsd"$UNAME_RELEASE" + exit ;; + *:SolidBSD:*:*) + echo "$UNAME_MACHINE"-unknown-solidbsd"$UNAME_RELEASE" + exit ;; + *:OS108:*:*) + echo "$UNAME_MACHINE"-unknown-os108_"$UNAME_RELEASE" + exit ;; + macppc:MirBSD:*:*) + echo powerpc-unknown-mirbsd"$UNAME_RELEASE" + exit ;; + *:MirBSD:*:*) + echo "$UNAME_MACHINE"-unknown-mirbsd"$UNAME_RELEASE" + exit ;; + *:Sortix:*:*) + echo "$UNAME_MACHINE"-unknown-sortix + exit ;; + *:Twizzler:*:*) + echo "$UNAME_MACHINE"-unknown-twizzler + exit ;; + *:Redox:*:*) + echo "$UNAME_MACHINE"-unknown-redox + exit ;; + mips:OSF1:*.*) + echo mips-dec-osf1 + exit ;; + alpha:OSF1:*:*) + case $UNAME_RELEASE in + *4.0) + UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` + ;; + *5.*) + UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` + ;; + esac + # According to Compaq, /usr/sbin/psrinfo has been available on + # OSF/1 and Tru64 systems produced since 1995. I hope that + # covers most systems running today. This code pipes the CPU + # types through head -n 1, so we only detect the type of CPU 0. + ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` + case "$ALPHA_CPU_TYPE" in + "EV4 (21064)") + UNAME_MACHINE=alpha ;; + "EV4.5 (21064)") + UNAME_MACHINE=alpha ;; + "LCA4 (21066/21068)") + UNAME_MACHINE=alpha ;; + "EV5 (21164)") + UNAME_MACHINE=alphaev5 ;; + "EV5.6 (21164A)") + UNAME_MACHINE=alphaev56 ;; + "EV5.6 (21164PC)") + UNAME_MACHINE=alphapca56 ;; + "EV5.7 (21164PC)") + UNAME_MACHINE=alphapca57 ;; + "EV6 (21264)") + UNAME_MACHINE=alphaev6 ;; + "EV6.7 (21264A)") + UNAME_MACHINE=alphaev67 ;; + "EV6.8CB (21264C)") + UNAME_MACHINE=alphaev68 ;; + "EV6.8AL (21264B)") + UNAME_MACHINE=alphaev68 ;; + "EV6.8CX (21264D)") + UNAME_MACHINE=alphaev68 ;; + "EV6.9A (21264/EV69A)") + UNAME_MACHINE=alphaev69 ;; + "EV7 (21364)") + UNAME_MACHINE=alphaev7 ;; + "EV7.9 (21364A)") + UNAME_MACHINE=alphaev79 ;; + esac + # A Pn.n version is a patched version. + # A Vn.n version is a released version. + # A Tn.n version is a released field test version. + # A Xn.n version is an unreleased experimental baselevel. + # 1.2 uses "1.2" for uname -r. + echo "$UNAME_MACHINE"-dec-osf"`echo "$UNAME_RELEASE" | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz`" + # Reset EXIT trap before exiting to avoid spurious non-zero exit code. + exitcode=$? + trap '' 0 + exit $exitcode ;; + Amiga*:UNIX_System_V:4.0:*) + echo m68k-unknown-sysv4 + exit ;; + *:[Aa]miga[Oo][Ss]:*:*) + echo "$UNAME_MACHINE"-unknown-amigaos + exit ;; + *:[Mm]orph[Oo][Ss]:*:*) + echo "$UNAME_MACHINE"-unknown-morphos + exit ;; + *:OS/390:*:*) + echo i370-ibm-openedition + exit ;; + *:z/VM:*:*) + echo s390-ibm-zvmoe + exit ;; + *:OS400:*:*) + echo powerpc-ibm-os400 + exit ;; + arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) + echo arm-acorn-riscix"$UNAME_RELEASE" + exit ;; + arm*:riscos:*:*|arm*:RISCOS:*:*) + echo arm-unknown-riscos + exit ;; + SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) + echo hppa1.1-hitachi-hiuxmpp + exit ;; + Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) + # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. + if test "`(/bin/universe) 2>/dev/null`" = att ; then + echo pyramid-pyramid-sysv3 + else + echo pyramid-pyramid-bsd + fi + exit ;; + NILE*:*:*:dcosx) + echo pyramid-pyramid-svr4 + exit ;; + DRS?6000:unix:4.0:6*) + echo sparc-icl-nx6 + exit ;; + DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) + case `/usr/bin/uname -p` in + sparc) echo sparc-icl-nx7; exit ;; + esac ;; + s390x:SunOS:*:*) + echo "$UNAME_MACHINE"-ibm-solaris2"`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`" + exit ;; + sun4H:SunOS:5.*:*) + echo sparc-hal-solaris2"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`" + exit ;; + sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) + echo sparc-sun-solaris2"`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`" + exit ;; + i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) + echo i386-pc-auroraux"$UNAME_RELEASE" + exit ;; + i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) + set_cc_for_build + SUN_ARCH=i386 + # If there is a compiler, see if it is configured for 64-bit objects. + # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. + # This test works for both compilers. + if [ "$CC_FOR_BUILD" != no_compiler_found ]; then + if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + SUN_ARCH=x86_64 + fi + fi + echo "$SUN_ARCH"-pc-solaris2"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`" + exit ;; + sun4*:SunOS:6*:*) + # According to config.sub, this is the proper way to canonicalize + # SunOS6. Hard to guess exactly what SunOS6 will be like, but + # it's likely to be more like Solaris than SunOS4. + echo sparc-sun-solaris3"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`" + exit ;; + sun4*:SunOS:*:*) + case "`/usr/bin/arch -k`" in + Series*|S4*) + UNAME_RELEASE=`uname -v` + ;; + esac + # Japanese Language versions have a version number like `4.1.3-JL'. + echo sparc-sun-sunos"`echo "$UNAME_RELEASE"|sed -e 's/-/_/'`" + exit ;; + sun3*:SunOS:*:*) + echo m68k-sun-sunos"$UNAME_RELEASE" + exit ;; + sun*:*:4.2BSD:*) + UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` + test "x$UNAME_RELEASE" = x && UNAME_RELEASE=3 + case "`/bin/arch`" in + sun3) + echo m68k-sun-sunos"$UNAME_RELEASE" + ;; + sun4) + echo sparc-sun-sunos"$UNAME_RELEASE" + ;; + esac + exit ;; + aushp:SunOS:*:*) + echo sparc-auspex-sunos"$UNAME_RELEASE" + exit ;; + # The situation for MiNT is a little confusing. The machine name + # can be virtually everything (everything which is not + # "atarist" or "atariste" at least should have a processor + # > m68000). The system name ranges from "MiNT" over "FreeMiNT" + # to the lowercase version "mint" (or "freemint"). Finally + # the system name "TOS" denotes a system which is actually not + # MiNT. But MiNT is downward compatible to TOS, so this should + # be no problem. + atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) + echo m68k-atari-mint"$UNAME_RELEASE" + exit ;; + atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) + echo m68k-atari-mint"$UNAME_RELEASE" + exit ;; + *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) + echo m68k-atari-mint"$UNAME_RELEASE" + exit ;; + milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) + echo m68k-milan-mint"$UNAME_RELEASE" + exit ;; + hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) + echo m68k-hades-mint"$UNAME_RELEASE" + exit ;; + *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) + echo m68k-unknown-mint"$UNAME_RELEASE" + exit ;; + m68k:machten:*:*) + echo m68k-apple-machten"$UNAME_RELEASE" + exit ;; + powerpc:machten:*:*) + echo powerpc-apple-machten"$UNAME_RELEASE" + exit ;; + RISC*:Mach:*:*) + echo mips-dec-mach_bsd4.3 + exit ;; + RISC*:ULTRIX:*:*) + echo mips-dec-ultrix"$UNAME_RELEASE" + exit ;; + VAX*:ULTRIX*:*:*) + echo vax-dec-ultrix"$UNAME_RELEASE" + exit ;; + 2020:CLIX:*:* | 2430:CLIX:*:*) + echo clipper-intergraph-clix"$UNAME_RELEASE" + exit ;; + mips:*:*:UMIPS | mips:*:*:RISCos) + set_cc_for_build + sed 's/^ //' << EOF > "$dummy.c" +#ifdef __cplusplus +#include /* for printf() prototype */ + int main (int argc, char *argv[]) { +#else + int main (argc, argv) int argc; char *argv[]; { +#endif + #if defined (host_mips) && defined (MIPSEB) + #if defined (SYSTYPE_SYSV) + printf ("mips-mips-riscos%ssysv\\n", argv[1]); exit (0); + #endif + #if defined (SYSTYPE_SVR4) + printf ("mips-mips-riscos%ssvr4\\n", argv[1]); exit (0); + #endif + #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) + printf ("mips-mips-riscos%sbsd\\n", argv[1]); exit (0); + #endif + #endif + exit (-1); + } +EOF + $CC_FOR_BUILD -o "$dummy" "$dummy.c" && + dummyarg=`echo "$UNAME_RELEASE" | sed -n 's/\([0-9]*\).*/\1/p'` && + SYSTEM_NAME=`"$dummy" "$dummyarg"` && + { echo "$SYSTEM_NAME"; exit; } + echo mips-mips-riscos"$UNAME_RELEASE" + exit ;; + Motorola:PowerMAX_OS:*:*) + echo powerpc-motorola-powermax + exit ;; + Motorola:*:4.3:PL8-*) + echo powerpc-harris-powermax + exit ;; + Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) + echo powerpc-harris-powermax + exit ;; + Night_Hawk:Power_UNIX:*:*) + echo powerpc-harris-powerunix + exit ;; + m88k:CX/UX:7*:*) + echo m88k-harris-cxux7 + exit ;; + m88k:*:4*:R4*) + echo m88k-motorola-sysv4 + exit ;; + m88k:*:3*:R3*) + echo m88k-motorola-sysv3 + exit ;; + AViiON:dgux:*:*) + # DG/UX returns AViiON for all architectures + UNAME_PROCESSOR=`/usr/bin/uname -p` + if [ "$UNAME_PROCESSOR" = mc88100 ] || [ "$UNAME_PROCESSOR" = mc88110 ] + then + if [ "$TARGET_BINARY_INTERFACE"x = m88kdguxelfx ] || \ + [ "$TARGET_BINARY_INTERFACE"x = x ] + then + echo m88k-dg-dgux"$UNAME_RELEASE" + else + echo m88k-dg-dguxbcs"$UNAME_RELEASE" + fi + else + echo i586-dg-dgux"$UNAME_RELEASE" + fi + exit ;; + M88*:DolphinOS:*:*) # DolphinOS (SVR3) + echo m88k-dolphin-sysv3 + exit ;; + M88*:*:R3*:*) + # Delta 88k system running SVR3 + echo m88k-motorola-sysv3 + exit ;; + XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) + echo m88k-tektronix-sysv3 + exit ;; + Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) + echo m68k-tektronix-bsd + exit ;; + *:IRIX*:*:*) + echo mips-sgi-irix"`echo "$UNAME_RELEASE"|sed -e 's/-/_/g'`" + exit ;; + ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. + echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id + exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' + i*86:AIX:*:*) + echo i386-ibm-aix + exit ;; + ia64:AIX:*:*) + if [ -x /usr/bin/oslevel ] ; then + IBM_REV=`/usr/bin/oslevel` + else + IBM_REV="$UNAME_VERSION.$UNAME_RELEASE" + fi + echo "$UNAME_MACHINE"-ibm-aix"$IBM_REV" + exit ;; + *:AIX:2:3) + if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then + set_cc_for_build + sed 's/^ //' << EOF > "$dummy.c" + #include + + main() + { + if (!__power_pc()) + exit(1); + puts("powerpc-ibm-aix3.2.5"); + exit(0); + } +EOF + if $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=`"$dummy"` + then + echo "$SYSTEM_NAME" + else + echo rs6000-ibm-aix3.2.5 + fi + elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then + echo rs6000-ibm-aix3.2.4 + else + echo rs6000-ibm-aix3.2 + fi + exit ;; + *:AIX:*:[4567]) + IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` + if /usr/sbin/lsattr -El "$IBM_CPU_ID" | grep ' POWER' >/dev/null 2>&1; then + IBM_ARCH=rs6000 + else + IBM_ARCH=powerpc + fi + if [ -x /usr/bin/lslpp ] ; then + IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc | + awk -F: '{ print $3 }' | sed s/[0-9]*$/0/` + else + IBM_REV="$UNAME_VERSION.$UNAME_RELEASE" + fi + echo "$IBM_ARCH"-ibm-aix"$IBM_REV" + exit ;; + *:AIX:*:*) + echo rs6000-ibm-aix + exit ;; + ibmrt:4.4BSD:*|romp-ibm:4.4BSD:*) + echo romp-ibm-bsd4.4 + exit ;; + ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and + echo romp-ibm-bsd"$UNAME_RELEASE" # 4.3 with uname added to + exit ;; # report: romp-ibm BSD 4.3 + *:BOSX:*:*) + echo rs6000-bull-bosx + exit ;; + DPX/2?00:B.O.S.:*:*) + echo m68k-bull-sysv3 + exit ;; + 9000/[34]??:4.3bsd:1.*:*) + echo m68k-hp-bsd + exit ;; + hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) + echo m68k-hp-bsd4.4 + exit ;; + 9000/[34678]??:HP-UX:*:*) + HPUX_REV=`echo "$UNAME_RELEASE"|sed -e 's/[^.]*.[0B]*//'` + case "$UNAME_MACHINE" in + 9000/31?) HP_ARCH=m68000 ;; + 9000/[34]??) HP_ARCH=m68k ;; + 9000/[678][0-9][0-9]) + if [ -x /usr/bin/getconf ]; then + sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` + sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` + case "$sc_cpu_version" in + 523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0 + 528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1 + 532) # CPU_PA_RISC2_0 + case "$sc_kernel_bits" in + 32) HP_ARCH=hppa2.0n ;; + 64) HP_ARCH=hppa2.0w ;; + '') HP_ARCH=hppa2.0 ;; # HP-UX 10.20 + esac ;; + esac + fi + if [ "$HP_ARCH" = "" ]; then + set_cc_for_build + sed 's/^ //' << EOF > "$dummy.c" + + #define _HPUX_SOURCE + #include + #include + + int main () + { + #if defined(_SC_KERNEL_BITS) + long bits = sysconf(_SC_KERNEL_BITS); + #endif + long cpu = sysconf (_SC_CPU_VERSION); + + switch (cpu) + { + case CPU_PA_RISC1_0: puts ("hppa1.0"); break; + case CPU_PA_RISC1_1: puts ("hppa1.1"); break; + case CPU_PA_RISC2_0: + #if defined(_SC_KERNEL_BITS) + switch (bits) + { + case 64: puts ("hppa2.0w"); break; + case 32: puts ("hppa2.0n"); break; + default: puts ("hppa2.0"); break; + } break; + #else /* !defined(_SC_KERNEL_BITS) */ + puts ("hppa2.0"); break; + #endif + default: puts ("hppa1.0"); break; + } + exit (0); + } +EOF + (CCOPTS="" $CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null) && HP_ARCH=`"$dummy"` + test -z "$HP_ARCH" && HP_ARCH=hppa + fi ;; + esac + if [ "$HP_ARCH" = hppa2.0w ] + then + set_cc_for_build + + # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating + # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler + # generating 64-bit code. GNU and HP use different nomenclature: + # + # $ CC_FOR_BUILD=cc ./config.guess + # => hppa2.0w-hp-hpux11.23 + # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess + # => hppa64-hp-hpux11.23 + + if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | + grep -q __LP64__ + then + HP_ARCH=hppa2.0w + else + HP_ARCH=hppa64 + fi + fi + echo "$HP_ARCH"-hp-hpux"$HPUX_REV" + exit ;; + ia64:HP-UX:*:*) + HPUX_REV=`echo "$UNAME_RELEASE"|sed -e 's/[^.]*.[0B]*//'` + echo ia64-hp-hpux"$HPUX_REV" + exit ;; + 3050*:HI-UX:*:*) + set_cc_for_build + sed 's/^ //' << EOF > "$dummy.c" + #include + int + main () + { + long cpu = sysconf (_SC_CPU_VERSION); + /* The order matters, because CPU_IS_HP_MC68K erroneously returns + true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct + results, however. */ + if (CPU_IS_PA_RISC (cpu)) + { + switch (cpu) + { + case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; + case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; + case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; + default: puts ("hppa-hitachi-hiuxwe2"); break; + } + } + else if (CPU_IS_HP_MC68K (cpu)) + puts ("m68k-hitachi-hiuxwe2"); + else puts ("unknown-hitachi-hiuxwe2"); + exit (0); + } +EOF + $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=`"$dummy"` && + { echo "$SYSTEM_NAME"; exit; } + echo unknown-hitachi-hiuxwe2 + exit ;; + 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:*) + echo hppa1.1-hp-bsd + exit ;; + 9000/8??:4.3bsd:*:*) + echo hppa1.0-hp-bsd + exit ;; + *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) + echo hppa1.0-hp-mpeix + exit ;; + hp7??:OSF1:*:* | hp8?[79]:OSF1:*:*) + echo hppa1.1-hp-osf + exit ;; + hp8??:OSF1:*:*) + echo hppa1.0-hp-osf + exit ;; + i*86:OSF1:*:*) + if [ -x /usr/sbin/sysversion ] ; then + echo "$UNAME_MACHINE"-unknown-osf1mk + else + echo "$UNAME_MACHINE"-unknown-osf1 + fi + exit ;; + parisc*:Lites*:*:*) + echo hppa1.1-hp-lites + exit ;; + C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) + echo c1-convex-bsd + exit ;; + C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) + if getsysinfo -f scalar_acc + then echo c32-convex-bsd + else echo c2-convex-bsd + fi + exit ;; + C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) + echo c34-convex-bsd + exit ;; + C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) + echo c38-convex-bsd + exit ;; + C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) + echo c4-convex-bsd + exit ;; + CRAY*Y-MP:*:*:*) + echo ymp-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*[A-Z]90:*:*:*) + echo "$UNAME_MACHINE"-cray-unicos"$UNAME_RELEASE" \ + | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ + -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ + -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*TS:*:*:*) + echo t90-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*T3E:*:*:*) + echo alphaev5-cray-unicosmk"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*SV1:*:*:*) + echo sv1-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' + exit ;; + *:UNICOS/mp:*:*) + echo craynv-cray-unicosmp"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' + exit ;; + F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) + FUJITSU_PROC=`uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz` + FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` + FUJITSU_REL=`echo "$UNAME_RELEASE" | sed -e 's/ /_/'` + echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" + exit ;; + 5000:UNIX_System_V:4.*:*) + FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` + FUJITSU_REL=`echo "$UNAME_RELEASE" | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/'` + echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" + exit ;; + i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) + echo "$UNAME_MACHINE"-pc-bsdi"$UNAME_RELEASE" + exit ;; + sparc*:BSD/OS:*:*) + echo sparc-unknown-bsdi"$UNAME_RELEASE" + exit ;; + *:BSD/OS:*:*) + echo "$UNAME_MACHINE"-unknown-bsdi"$UNAME_RELEASE" + exit ;; + arm:FreeBSD:*:*) + UNAME_PROCESSOR=`uname -p` + set_cc_for_build + if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_PCS_VFP + then + echo "${UNAME_PROCESSOR}"-unknown-freebsd"`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`"-gnueabi + else + echo "${UNAME_PROCESSOR}"-unknown-freebsd"`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`"-gnueabihf + fi + exit ;; + *:FreeBSD:*:*) + UNAME_PROCESSOR=`/usr/bin/uname -p` + case "$UNAME_PROCESSOR" in + amd64) + UNAME_PROCESSOR=x86_64 ;; + i386) + UNAME_PROCESSOR=i586 ;; + esac + echo "$UNAME_PROCESSOR"-unknown-freebsd"`echo "$UNAME_RELEASE"|sed -e 's/[-(].*//'`" + exit ;; + i*:CYGWIN*:*) + echo "$UNAME_MACHINE"-pc-cygwin + exit ;; + *:MINGW64*:*) + echo "$UNAME_MACHINE"-pc-mingw64 + exit ;; + *:MINGW*:*) + echo "$UNAME_MACHINE"-pc-mingw32 + exit ;; + *:MSYS*:*) + echo "$UNAME_MACHINE"-pc-msys + exit ;; + i*:PW*:*) + echo "$UNAME_MACHINE"-pc-pw32 + exit ;; + *:Interix*:*) + case "$UNAME_MACHINE" in + x86) + echo i586-pc-interix"$UNAME_RELEASE" + exit ;; + authenticamd | genuineintel | EM64T) + echo x86_64-unknown-interix"$UNAME_RELEASE" + exit ;; + IA64) + echo ia64-unknown-interix"$UNAME_RELEASE" + exit ;; + esac ;; + i*:UWIN*:*) + echo "$UNAME_MACHINE"-pc-uwin + exit ;; + amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) + echo x86_64-pc-cygwin + exit ;; + prep*:SunOS:5.*:*) + echo powerpcle-unknown-solaris2"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`" + exit ;; + *:GNU:*:*) + # the GNU system + echo "`echo "$UNAME_MACHINE"|sed -e 's,[-/].*$,,'`-unknown-$LIBC`echo "$UNAME_RELEASE"|sed -e 's,/.*$,,'`" + exit ;; + *:GNU/*:*:*) + # other systems with GNU libc and userland + echo "$UNAME_MACHINE-unknown-`echo "$UNAME_SYSTEM" | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]"``echo "$UNAME_RELEASE"|sed -e 's/[-(].*//'`-$LIBC" + exit ;; + *:Minix:*:*) + echo "$UNAME_MACHINE"-unknown-minix + exit ;; + aarch64:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + aarch64_be:Linux:*:*) + UNAME_MACHINE=aarch64_be + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + alpha:Linux:*:*) + case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' /proc/cpuinfo 2>/dev/null` in + EV5) UNAME_MACHINE=alphaev5 ;; + EV56) UNAME_MACHINE=alphaev56 ;; + PCA56) UNAME_MACHINE=alphapca56 ;; + PCA57) UNAME_MACHINE=alphapca56 ;; + EV6) UNAME_MACHINE=alphaev6 ;; + EV67) UNAME_MACHINE=alphaev67 ;; + EV68*) UNAME_MACHINE=alphaev68 ;; + esac + objdump --private-headers /bin/sh | grep -q ld.so.1 + if test "$?" = 0 ; then LIBC=gnulibc1 ; fi + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + arc:Linux:*:* | arceb:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + arm*:Linux:*:*) + set_cc_for_build + if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_EABI__ + then + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + else + if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_PCS_VFP + then + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"eabi + else + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"eabihf + fi + fi + exit ;; + avr32*:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + cris:Linux:*:*) + echo "$UNAME_MACHINE"-axis-linux-"$LIBC" + exit ;; + crisv32:Linux:*:*) + echo "$UNAME_MACHINE"-axis-linux-"$LIBC" + exit ;; + e2k:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + frv:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + hexagon:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + i*86:Linux:*:*) + echo "$UNAME_MACHINE"-pc-linux-"$LIBC" + exit ;; + ia64:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + k1om:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + m32r*:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + m68*:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + mips:Linux:*:* | mips64:Linux:*:*) + set_cc_for_build + IS_GLIBC=0 + test x"${LIBC}" = xgnu && IS_GLIBC=1 + sed 's/^ //' << EOF > "$dummy.c" + #undef CPU + #undef mips + #undef mipsel + #undef mips64 + #undef mips64el + #if ${IS_GLIBC} && defined(_ABI64) + LIBCABI=gnuabi64 + #else + #if ${IS_GLIBC} && defined(_ABIN32) + LIBCABI=gnuabin32 + #else + LIBCABI=${LIBC} + #endif + #endif + + #if ${IS_GLIBC} && defined(__mips64) && defined(__mips_isa_rev) && __mips_isa_rev>=6 + CPU=mipsisa64r6 + #else + #if ${IS_GLIBC} && !defined(__mips64) && defined(__mips_isa_rev) && __mips_isa_rev>=6 + CPU=mipsisa32r6 + #else + #if defined(__mips64) + CPU=mips64 + #else + CPU=mips + #endif + #endif + #endif + + #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) + MIPS_ENDIAN=el + #else + #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) + MIPS_ENDIAN= + #else + MIPS_ENDIAN= + #endif + #endif +EOF + eval "`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^CPU\|^MIPS_ENDIAN\|^LIBCABI'`" + test "x$CPU" != x && { echo "$CPU${MIPS_ENDIAN}-unknown-linux-$LIBCABI"; exit; } + ;; + mips64el:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + openrisc*:Linux:*:*) + echo or1k-unknown-linux-"$LIBC" + exit ;; + or32:Linux:*:* | or1k*:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + padre:Linux:*:*) + echo sparc-unknown-linux-"$LIBC" + exit ;; + parisc64:Linux:*:* | hppa64:Linux:*:*) + echo hppa64-unknown-linux-"$LIBC" + exit ;; + parisc:Linux:*:* | hppa:Linux:*:*) + # Look for CPU level + case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in + PA7*) echo hppa1.1-unknown-linux-"$LIBC" ;; + PA8*) echo hppa2.0-unknown-linux-"$LIBC" ;; + *) echo hppa-unknown-linux-"$LIBC" ;; + esac + exit ;; + ppc64:Linux:*:*) + echo powerpc64-unknown-linux-"$LIBC" + exit ;; + ppc:Linux:*:*) + echo powerpc-unknown-linux-"$LIBC" + exit ;; + ppc64le:Linux:*:*) + echo powerpc64le-unknown-linux-"$LIBC" + exit ;; + ppcle:Linux:*:*) + echo powerpcle-unknown-linux-"$LIBC" + exit ;; + riscv32:Linux:*:* | riscv64:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + s390:Linux:*:* | s390x:Linux:*:*) + echo "$UNAME_MACHINE"-ibm-linux-"$LIBC" + exit ;; + sh64*:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + sh*:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + sparc:Linux:*:* | sparc64:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + tile*:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + vax:Linux:*:*) + echo "$UNAME_MACHINE"-dec-linux-"$LIBC" + exit ;; + x86_64:Linux:*:*) + set_cc_for_build + LIBCABI=$LIBC + if [ "$CC_FOR_BUILD" != no_compiler_found ]; then + if (echo '#ifdef __ILP32__'; echo IS_X32; echo '#endif') | \ + (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_X32 >/dev/null + then + LIBCABI="$LIBC"x32 + fi + fi + echo "$UNAME_MACHINE"-pc-linux-"$LIBCABI" + exit ;; + xtensa*:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + i*86:DYNIX/ptx:4*:*) + # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. + # earlier versions are messed up and put the nodename in both + # sysname and nodename. + echo i386-sequent-sysv4 + exit ;; + i*86:UNIX_SV:4.2MP:2.*) + # Unixware is an offshoot of SVR4, but it has its own version + # number series starting with 2... + # I am not positive that other SVR4 systems won't match this, + # I just have to hope. -- rms. + # Use sysv4.2uw... so that sysv4* matches it. + echo "$UNAME_MACHINE"-pc-sysv4.2uw"$UNAME_VERSION" + exit ;; + i*86:OS/2:*:*) + # If we were able to find `uname', then EMX Unix compatibility + # is probably installed. + echo "$UNAME_MACHINE"-pc-os2-emx + exit ;; + i*86:XTS-300:*:STOP) + echo "$UNAME_MACHINE"-unknown-stop + exit ;; + i*86:atheos:*:*) + echo "$UNAME_MACHINE"-unknown-atheos + exit ;; + i*86:syllable:*:*) + echo "$UNAME_MACHINE"-pc-syllable + exit ;; + i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) + echo i386-unknown-lynxos"$UNAME_RELEASE" + exit ;; + i*86:*DOS:*:*) + echo "$UNAME_MACHINE"-pc-msdosdjgpp + exit ;; + i*86:*:4.*:*) + UNAME_REL=`echo "$UNAME_RELEASE" | sed 's/\/MP$//'` + if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then + echo "$UNAME_MACHINE"-univel-sysv"$UNAME_REL" + else + echo "$UNAME_MACHINE"-pc-sysv"$UNAME_REL" + fi + exit ;; + i*86:*:5:[678]*) + # UnixWare 7.x, OpenUNIX and OpenServer 6. + case `/bin/uname -X | grep "^Machine"` in + *486*) UNAME_MACHINE=i486 ;; + *Pentium) UNAME_MACHINE=i586 ;; + *Pent*|*Celeron) UNAME_MACHINE=i686 ;; + esac + echo "$UNAME_MACHINE-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}" + exit ;; + i*86:*:3.2:*) + if test -f /usr/options/cb.name; then + UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then + UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` + (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 + (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ + && UNAME_MACHINE=i586 + (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ + && UNAME_MACHINE=i686 + (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ + && UNAME_MACHINE=i686 + echo "$UNAME_MACHINE"-pc-sco"$UNAME_REL" + else + echo "$UNAME_MACHINE"-pc-sysv32 + fi + exit ;; + pc:*:*:*) + # Left here for compatibility: + # uname -m prints for DJGPP always 'pc', but it prints nothing about + # the processor, so we play safe by assuming i586. + # Note: whatever this is, it MUST be the same as what config.sub + # prints for the "djgpp" host, or else GDB configure will decide that + # this is a cross-build. + echo i586-pc-msdosdjgpp + exit ;; + Intel:Mach:3*:*) + echo i386-pc-mach3 + exit ;; + paragon:*:*:*) + echo i860-intel-osf1 + exit ;; + i860:*:4.*:*) # i860-SVR4 + if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then + echo i860-stardent-sysv"$UNAME_RELEASE" # Stardent Vistra i860-SVR4 + else # Add other i860-SVR4 vendors below as they are discovered. + echo i860-unknown-sysv"$UNAME_RELEASE" # Unknown i860-SVR4 + fi + exit ;; + mini*:CTIX:SYS*5:*) + # "miniframe" + echo m68010-convergent-sysv + exit ;; + mc68k:UNIX:SYSTEM5:3.51m) + echo m68k-convergent-sysv + exit ;; + M680?0:D-NIX:5.3:*) + echo m68k-diab-dnix + exit ;; + M68*:*:R3V[5678]*:*) + test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; + 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) + OS_REL='' + test -r /etc/.relid \ + && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4.3"$OS_REL"; exit; } + /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ + && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;; + 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4; exit; } ;; + NCR*:*:4.2:* | MPRAS*:*:4.2:*) + OS_REL='.3' + test -r /etc/.relid \ + && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4.3"$OS_REL"; exit; } + /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ + && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } + /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ + && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;; + m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) + echo m68k-unknown-lynxos"$UNAME_RELEASE" + exit ;; + mc68030:UNIX_System_V:4.*:*) + echo m68k-atari-sysv4 + exit ;; + TSUNAMI:LynxOS:2.*:*) + echo sparc-unknown-lynxos"$UNAME_RELEASE" + exit ;; + rs6000:LynxOS:2.*:*) + echo rs6000-unknown-lynxos"$UNAME_RELEASE" + exit ;; + PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) + echo powerpc-unknown-lynxos"$UNAME_RELEASE" + exit ;; + SM[BE]S:UNIX_SV:*:*) + echo mips-dde-sysv"$UNAME_RELEASE" + exit ;; + RM*:ReliantUNIX-*:*:*) + echo mips-sni-sysv4 + exit ;; + RM*:SINIX-*:*:*) + echo mips-sni-sysv4 + exit ;; + *:SINIX-*:*:*) + if uname -p 2>/dev/null >/dev/null ; then + UNAME_MACHINE=`(uname -p) 2>/dev/null` + echo "$UNAME_MACHINE"-sni-sysv4 + else + echo ns32k-sni-sysv + fi + exit ;; + PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort + # says + echo i586-unisys-sysv4 + exit ;; + *:UNIX_System_V:4*:FTX*) + # From Gerald Hewes . + # How about differentiating between stratus architectures? -djm + echo hppa1.1-stratus-sysv4 + exit ;; + *:*:*:FTX*) + # From seanf@swdc.stratus.com. + echo i860-stratus-sysv4 + exit ;; + i*86:VOS:*:*) + # From Paul.Green@stratus.com. + echo "$UNAME_MACHINE"-stratus-vos + exit ;; + *:VOS:*:*) + # From Paul.Green@stratus.com. + echo hppa1.1-stratus-vos + exit ;; + mc68*:A/UX:*:*) + echo m68k-apple-aux"$UNAME_RELEASE" + exit ;; + news*:NEWS-OS:6*:*) + echo mips-sony-newsos6 + exit ;; + R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) + if [ -d /usr/nec ]; then + echo mips-nec-sysv"$UNAME_RELEASE" + else + echo mips-unknown-sysv"$UNAME_RELEASE" + fi + exit ;; + BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. + echo powerpc-be-beos + exit ;; + BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. + echo powerpc-apple-beos + exit ;; + BePC:BeOS:*:*) # BeOS running on Intel PC compatible. + echo i586-pc-beos + exit ;; + BePC:Haiku:*:*) # Haiku running on Intel PC compatible. + echo i586-pc-haiku + exit ;; + x86_64:Haiku:*:*) + echo x86_64-unknown-haiku + exit ;; + SX-4:SUPER-UX:*:*) + echo sx4-nec-superux"$UNAME_RELEASE" + exit ;; + SX-5:SUPER-UX:*:*) + echo sx5-nec-superux"$UNAME_RELEASE" + exit ;; + SX-6:SUPER-UX:*:*) + echo sx6-nec-superux"$UNAME_RELEASE" + exit ;; + SX-7:SUPER-UX:*:*) + echo sx7-nec-superux"$UNAME_RELEASE" + exit ;; + SX-8:SUPER-UX:*:*) + echo sx8-nec-superux"$UNAME_RELEASE" + exit ;; + SX-8R:SUPER-UX:*:*) + echo sx8r-nec-superux"$UNAME_RELEASE" + exit ;; + SX-ACE:SUPER-UX:*:*) + echo sxace-nec-superux"$UNAME_RELEASE" + exit ;; + Power*:Rhapsody:*:*) + echo powerpc-apple-rhapsody"$UNAME_RELEASE" + exit ;; + *:Rhapsody:*:*) + echo "$UNAME_MACHINE"-apple-rhapsody"$UNAME_RELEASE" + exit ;; + arm64:Darwin:*:*) + echo aarch64-apple-darwin"$UNAME_RELEASE" + exit ;; + *:Darwin:*:*) + UNAME_PROCESSOR=`uname -p` + case $UNAME_PROCESSOR in + unknown) UNAME_PROCESSOR=powerpc ;; + esac + if command -v xcode-select > /dev/null 2> /dev/null && \ + ! xcode-select --print-path > /dev/null 2> /dev/null ; then + # Avoid executing cc if there is no toolchain installed as + # cc will be a stub that puts up a graphical alert + # prompting the user to install developer tools. + CC_FOR_BUILD=no_compiler_found + else + set_cc_for_build + fi + if [ "$CC_FOR_BUILD" != no_compiler_found ]; then + if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + case $UNAME_PROCESSOR in + i386) UNAME_PROCESSOR=x86_64 ;; + powerpc) UNAME_PROCESSOR=powerpc64 ;; + esac + fi + # On 10.4-10.6 one might compile for PowerPC via gcc -arch ppc + if (echo '#ifdef __POWERPC__'; echo IS_PPC; echo '#endif') | \ + (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_PPC >/dev/null + then + UNAME_PROCESSOR=powerpc + fi + elif test "$UNAME_PROCESSOR" = i386 ; then + # uname -m returns i386 or x86_64 + UNAME_PROCESSOR=$UNAME_MACHINE + fi + echo "$UNAME_PROCESSOR"-apple-darwin"$UNAME_RELEASE" + exit ;; + *:procnto*:*:* | *:QNX:[0123456789]*:*) + UNAME_PROCESSOR=`uname -p` + if test "$UNAME_PROCESSOR" = x86; then + UNAME_PROCESSOR=i386 + UNAME_MACHINE=pc + fi + echo "$UNAME_PROCESSOR"-"$UNAME_MACHINE"-nto-qnx"$UNAME_RELEASE" + exit ;; + *:QNX:*:4*) + echo i386-pc-qnx + exit ;; + NEO-*:NONSTOP_KERNEL:*:*) + echo neo-tandem-nsk"$UNAME_RELEASE" + exit ;; + NSE-*:NONSTOP_KERNEL:*:*) + echo nse-tandem-nsk"$UNAME_RELEASE" + exit ;; + NSR-*:NONSTOP_KERNEL:*:*) + echo nsr-tandem-nsk"$UNAME_RELEASE" + exit ;; + NSV-*:NONSTOP_KERNEL:*:*) + echo nsv-tandem-nsk"$UNAME_RELEASE" + exit ;; + NSX-*:NONSTOP_KERNEL:*:*) + echo nsx-tandem-nsk"$UNAME_RELEASE" + exit ;; + *:NonStop-UX:*:*) + echo mips-compaq-nonstopux + exit ;; + BS2000:POSIX*:*:*) + echo bs2000-siemens-sysv + exit ;; + DS/*:UNIX_System_V:*:*) + echo "$UNAME_MACHINE"-"$UNAME_SYSTEM"-"$UNAME_RELEASE" + exit ;; + *:Plan9:*:*) + # "uname -m" is not consistent, so use $cputype instead. 386 + # is converted to i386 for consistency with other x86 + # operating systems. + # shellcheck disable=SC2154 + if test "$cputype" = 386; then + UNAME_MACHINE=i386 + else + UNAME_MACHINE="$cputype" + fi + echo "$UNAME_MACHINE"-unknown-plan9 + exit ;; + *:TOPS-10:*:*) + echo pdp10-unknown-tops10 + exit ;; + *:TENEX:*:*) + echo pdp10-unknown-tenex + exit ;; + KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) + echo pdp10-dec-tops20 + exit ;; + XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) + echo pdp10-xkl-tops20 + exit ;; + *:TOPS-20:*:*) + echo pdp10-unknown-tops20 + exit ;; + *:ITS:*:*) + echo pdp10-unknown-its + exit ;; + SEI:*:*:SEIUX) + echo mips-sei-seiux"$UNAME_RELEASE" + exit ;; + *:DragonFly:*:*) + echo "$UNAME_MACHINE"-unknown-dragonfly"`echo "$UNAME_RELEASE"|sed -e 's/[-(].*//'`" + exit ;; + *:*VMS:*:*) + UNAME_MACHINE=`(uname -p) 2>/dev/null` + case "$UNAME_MACHINE" in + A*) echo alpha-dec-vms ; exit ;; + I*) echo ia64-dec-vms ; exit ;; + V*) echo vax-dec-vms ; exit ;; + esac ;; + *:XENIX:*:SysV) + echo i386-pc-xenix + exit ;; + i*86:skyos:*:*) + echo "$UNAME_MACHINE"-pc-skyos"`echo "$UNAME_RELEASE" | sed -e 's/ .*$//'`" + exit ;; + i*86:rdos:*:*) + echo "$UNAME_MACHINE"-pc-rdos + exit ;; + i*86:AROS:*:*) + echo "$UNAME_MACHINE"-pc-aros + exit ;; + x86_64:VMkernel:*:*) + echo "$UNAME_MACHINE"-unknown-esx + exit ;; + amd64:Isilon\ OneFS:*:*) + echo x86_64-unknown-onefs + exit ;; + *:Unleashed:*:*) + echo "$UNAME_MACHINE"-unknown-unleashed"$UNAME_RELEASE" + exit ;; +esac + +# No uname command or uname output not recognized. +set_cc_for_build +cat > "$dummy.c" < +#include +#endif +#if defined(ultrix) || defined(_ultrix) || defined(__ultrix) || defined(__ultrix__) +#if defined (vax) || defined (__vax) || defined (__vax__) || defined(mips) || defined(__mips) || defined(__mips__) || defined(MIPS) || defined(__MIPS__) +#include +#if defined(_SIZE_T_) || defined(SIGLOST) +#include +#endif +#endif +#endif +main () +{ +#if defined (sony) +#if defined (MIPSEB) + /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed, + I don't know.... */ + printf ("mips-sony-bsd\n"); exit (0); +#else +#include + printf ("m68k-sony-newsos%s\n", +#ifdef NEWSOS4 + "4" +#else + "" +#endif + ); exit (0); +#endif +#endif + +#if defined (NeXT) +#if !defined (__ARCHITECTURE__) +#define __ARCHITECTURE__ "m68k" +#endif + int version; + version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`; + if (version < 4) + printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version); + else + printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version); + exit (0); +#endif + +#if defined (MULTIMAX) || defined (n16) +#if defined (UMAXV) + printf ("ns32k-encore-sysv\n"); exit (0); +#else +#if defined (CMU) + printf ("ns32k-encore-mach\n"); exit (0); +#else + printf ("ns32k-encore-bsd\n"); exit (0); +#endif +#endif +#endif + +#if defined (__386BSD__) + printf ("i386-pc-bsd\n"); exit (0); +#endif + +#if defined (sequent) +#if defined (i386) + printf ("i386-sequent-dynix\n"); exit (0); +#endif +#if defined (ns32000) + printf ("ns32k-sequent-dynix\n"); exit (0); +#endif +#endif + +#if defined (_SEQUENT_) + struct utsname un; + + uname(&un); + if (strncmp(un.version, "V2", 2) == 0) { + printf ("i386-sequent-ptx2\n"); exit (0); + } + if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */ + printf ("i386-sequent-ptx1\n"); exit (0); + } + printf ("i386-sequent-ptx\n"); exit (0); +#endif + +#if defined (vax) +#if !defined (ultrix) +#include +#if defined (BSD) +#if BSD == 43 + printf ("vax-dec-bsd4.3\n"); exit (0); +#else +#if BSD == 199006 + printf ("vax-dec-bsd4.3reno\n"); exit (0); +#else + printf ("vax-dec-bsd\n"); exit (0); +#endif +#endif +#else + printf ("vax-dec-bsd\n"); exit (0); +#endif +#else +#if defined(_SIZE_T_) || defined(SIGLOST) + struct utsname un; + uname (&un); + printf ("vax-dec-ultrix%s\n", un.release); exit (0); +#else + printf ("vax-dec-ultrix\n"); exit (0); +#endif +#endif +#endif +#if defined(ultrix) || defined(_ultrix) || defined(__ultrix) || defined(__ultrix__) +#if defined(mips) || defined(__mips) || defined(__mips__) || defined(MIPS) || defined(__MIPS__) +#if defined(_SIZE_T_) || defined(SIGLOST) + struct utsname *un; + uname (&un); + printf ("mips-dec-ultrix%s\n", un.release); exit (0); +#else + printf ("mips-dec-ultrix\n"); exit (0); +#endif +#endif +#endif + +#if defined (alliant) && defined (i860) + printf ("i860-alliant-bsd\n"); exit (0); +#endif + + exit (1); +} +EOF + +$CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null && SYSTEM_NAME=`$dummy` && + { echo "$SYSTEM_NAME"; exit; } + +# Apollos put the system type in the environment. +test -d /usr/apollo && { echo "$ISP-apollo-$SYSTYPE"; exit; } + +echo "$0: unable to guess system type" >&2 + +case "$UNAME_MACHINE:$UNAME_SYSTEM" in + mips:Linux | mips64:Linux) + # If we got here on MIPS GNU/Linux, output extra information. + cat >&2 <&2 <&2 </dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null` + +hostinfo = `(hostinfo) 2>/dev/null` +/bin/universe = `(/bin/universe) 2>/dev/null` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` +/bin/arch = `(/bin/arch) 2>/dev/null` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` + +UNAME_MACHINE = "$UNAME_MACHINE" +UNAME_RELEASE = "$UNAME_RELEASE" +UNAME_SYSTEM = "$UNAME_SYSTEM" +UNAME_VERSION = "$UNAME_VERSION" +EOF +fi + +exit 1 + +# Local variables: +# eval: (add-hook 'before-save-hook 'time-stamp) +# time-stamp-start: "timestamp='" +# time-stamp-format: "%:y-%02m-%02d" +# time-stamp-end: "'" +# End: diff --git a/build/autoconf/config.status.m4 b/build/autoconf/config.status.m4 new file mode 100644 index 0000000000..9be939b2b5 --- /dev/null +++ b/build/autoconf/config.status.m4 @@ -0,0 +1,173 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +dnl For use in AC_SUBST replacement +define([MOZ_DIVERSION_SUBST], 11) + +dnl Replace AC_SUBST to store values in a format suitable for python. +dnl The necessary comma after the tuple can't be put here because it +dnl can mess around with things like: +dnl AC_SOMETHING(foo,AC_SUBST(),bar) +define([AC_SUBST], +[ifdef([AC_SUBST_SET_$1], [m4_fatal([Cannot use AC_SUBST and AC_SUBST_SET on the same variable ($1)])], +[ifdef([AC_SUBST_LIST_$1], [m4_fatal([Cannot use AC_SUBST and AC_SUBST_LIST on the same variable ($1)])], +[ifdef([AC_SUBST_TOML_LIST_$1], [m4_fatal([Cannot use AC_SUBST and AC_SUBST_TOML_LIST on the same variable ($1)])], +[ifdef([AC_SUBST_$1], , +[define([AC_SUBST_$1], )dnl +AC_DIVERT_PUSH(MOZ_DIVERSION_SUBST)dnl + (''' $1 ''', r''' [$]$1 ''') +AC_DIVERT_POP()dnl +])])])])]) + +dnl Like AC_SUBST, but makes the value available as a set in python, +dnl with values got from the value of the environment variable, split on +dnl whitespaces. +define([AC_SUBST_SET], +[ifdef([AC_SUBST_$1], [m4_fatal([Cannot use AC_SUBST and AC_SUBST_SET on the same variable ($1)])], +[ifdef([AC_SUBST_LIST_$1], [m4_fatal([Cannot use AC_SUBST_LIST and AC_SUBST_SET on the same variable ($1)])], +[ifdef([AC_SUBST_TOML_LIST_$1], [m4_fatal([Cannot use AC_SUBST_TOML_LIST and AC_SUBST_SET on the same variable ($1)])], +[ifdef([AC_SUBST_SET_$1], , +[define([AC_SUBST_SET_$1], )dnl +AC_DIVERT_PUSH(MOZ_DIVERSION_SUBST)dnl + (''' $1 ''', unique_list(split(r''' [$]$1 '''))) +AC_DIVERT_POP()dnl +])])])])]) + +dnl Like AC_SUBST, but makes the value available as a list in python, +dnl with values got from the value of the environment variable, split on +dnl whitespaces. +define([AC_SUBST_LIST], +[ifdef([AC_SUBST_$1], [m4_fatal([Cannot use AC_SUBST and AC_SUBST_LIST on the same variable ($1)])], +[ifdef([AC_SUBST_SET_$1], [m4_fatal([Cannot use AC_SUBST_SET and AC_SUBST_LIST on the same variable ($1)])], +[ifdef([AC_SUBST_TOML_LIST_$1], [m4_fatal([Cannot use AC_SUBST_TOML_LIST and AC_SUBST_LIST on the same variable ($1)])], +[ifdef([AC_SUBST_LIST_$1], , +[define([AC_SUBST_LIST_$1], )dnl +AC_DIVERT_PUSH(MOZ_DIVERSION_SUBST)dnl + (''' $1 ''', list(split(r''' [$]$1 '''))) +AC_DIVERT_POP()dnl +])])])])]) + +dnl Like AC_SUBST, but makes the value available as a string of comma-separated +dnl quoted strings in python, with values got from the value of the environment +dnl variable, split on whitespaces. The value is suitable for embedding into a +dnl .toml list. +define([AC_SUBST_TOML_LIST], +[ifdef([AC_SUBST_$1], [m4_fatal([Cannot use AC_SUBST and AC_SUBST_TOML_LIST on the same variable ($1)])], +[ifdef([AC_SUBST_SET_$1], [m4_fatal([Cannot use AC_SUBST_SET and AC_SUBST_TOML_LIST on the same variable ($1)])], +[ifdef([AC_SUBST_LIST_$1], [m4_fatal([Cannot use AC_SUBST_LIST and AC_SUBST_TOML_LIST on the same variable ($1)])], +[ifdef([AC_SUBST_TOML_LIST_$1], , +[define([AC_SUBST_TOML_LIST_$1], )dnl +AC_DIVERT_PUSH(MOZ_DIVERSION_SUBST)dnl + (''' $1 ''', r''' %s ''' % str(', '.join("'%s'" % s for s in split(r''' [$]$1 ''')))) +AC_DIVERT_POP()dnl +])])])])]) + + +dnl Ignore AC_SUBSTs for variables we don't have use for but that autoconf +dnl itself exports. +define([AC_SUBST_CFLAGS], ) +define([AC_SUBST_CPPFLAGS], ) +define([AC_SUBST_CXXFLAGS], ) +define([AC_SUBST_FFLAGS], ) +define([AC_SUBST_DEFS], ) +define([AC_SUBST_LDFLAGS], ) +define([AC_SUBST_LIBS], ) + +dnl Wrap AC_DEFINE to store values in a format suitable for python. +dnl autoconf's AC_DEFINE still needs to be used to fill confdefs.h, +dnl which is #included during some compile checks. +dnl The necessary comma after the tuple can't be put here because it +dnl can mess around with things like: +dnl AC_SOMETHING(foo,AC_DEFINE(),bar) +define([_MOZ_AC_DEFINE], defn([AC_DEFINE])) +define([AC_DEFINE], +[cat >> confdefs.pytmp <<\EOF + (''' $1 ''', ifelse($#, 2, [r''' $2 '''], $#, 3, [r''' $2 '''], ' 1 ')) +EOF +ifelse($#, 2, _MOZ_AC_DEFINE([$1], [$2]), $#, 3, _MOZ_AC_DEFINE([$1], [$2], [$3]),_MOZ_AC_DEFINE([$1]))dnl +]) + +dnl Wrap AC_DEFINE_UNQUOTED to store values in a format suitable for +dnl python. +define([_MOZ_AC_DEFINE_UNQUOTED], defn([AC_DEFINE_UNQUOTED])) +define([AC_DEFINE_UNQUOTED], +[cat >> confdefs.pytmp <>>)dnl +echo creating $CONFIG_STATUS + +cat > $CONFIG_STATUS <> $CONFIG_STATUS +rm confdefs.pytmp confdefs.h + +cat >> $CONFIG_STATUS <<\EOF +] + +substs = [ +EOF + +dnl The MOZ_DIVERSION_SUBST output diversion contains AC_SUBSTs, in the +dnl expected format, but lacks the final comma (see above). +sed 's/$/,/' >> $CONFIG_STATUS <> $CONFIG_STATUS +done + +cat >> $CONFIG_STATUS <<\EOF +] + +flags = [ +undivert(MOZ_DIVERSION_ARGS)dnl +] +EOF + +changequote([, ]) +]) + +define([m4_fatal],[ +errprint([$1 +]) +m4exit(1) +]) + +define([AC_OUTPUT], [ifelse($#_$1, 1_, [MOZ_CREATE_CONFIG_STATUS() +MOZ_RUN_CONFIG_STATUS()], +[m4_fatal([Use CONFIGURE_SUBST_FILES in moz.build files to create substituted files.])] +)]) + +define([AC_CONFIG_HEADER], +[m4_fatal([Use CONFIGURE_DEFINE_FILES in moz.build files to produce header files.]) +]) diff --git a/build/autoconf/config.sub b/build/autoconf/config.sub new file mode 100755 index 0000000000..3d9a8dc3d5 --- /dev/null +++ b/build/autoconf/config.sub @@ -0,0 +1,1851 @@ +#! /bin/sh +# Configuration validation subroutine script. +# Copyright 1992-2020 Free Software Foundation, Inc. + +timestamp='2020-07-10' + +# This file is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, see . +# +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that +# program. This Exception is an additional permission under section 7 +# of the GNU General Public License, version 3 ("GPLv3"). + + +# Please send patches to . +# +# Configuration subroutine to validate and canonicalize a configuration type. +# Supply the specified configuration type as an argument. +# If it is invalid, we print an error message on stderr and exit with code 1. +# Otherwise, we print the canonical config type on stdout and succeed. + +# You can get the latest version of this script from: +# https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub + +# This file is supposed to be the same for all GNU packages +# and recognize all the CPU types, system types and aliases +# that are meaningful with *any* GNU software. +# Each package is responsible for reporting which valid configurations +# it does not support. The user should be able to distinguish +# a failure to support a valid configuration from a meaningless +# configuration. + +# The goal of this file is to map all the various variations of a given +# machine specification into a single specification in the form: +# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM +# or in some cases, the newer four-part form: +# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM +# It is wrong to echo any other type of specification. + +me=`echo "$0" | sed -e 's,.*/,,'` + +usage="\ +Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS + +Canonicalize a configuration name. + +Options: + -h, --help print this help, then exit + -t, --time-stamp print date of last modification, then exit + -v, --version print version number, then exit + +Report bugs and patches to ." + +version="\ +GNU config.sub ($timestamp) + +Copyright 1992-2020 Free Software Foundation, Inc. + +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." + +help=" +Try \`$me --help' for more information." + +# Parse command line +while test $# -gt 0 ; do + case $1 in + --time-stamp | --time* | -t ) + echo "$timestamp" ; exit ;; + --version | -v ) + echo "$version" ; exit ;; + --help | --h* | -h ) + echo "$usage"; exit ;; + -- ) # Stop option processing + shift; break ;; + - ) # Use stdin as input. + break ;; + -* ) + echo "$me: invalid option $1$help" >&2 + exit 1 ;; + + *local*) + # First pass through any local machine types. + echo "$1" + exit ;; + + * ) + break ;; + esac +done + +case $# in + 0) echo "$me: missing argument$help" >&2 + exit 1;; + 1) ;; + *) echo "$me: too many arguments$help" >&2 + exit 1;; +esac + +# Split fields of configuration type +# shellcheck disable=SC2162 +IFS="-" read field1 field2 field3 field4 <&2 + exit 1 + ;; + *-*-*-*) + basic_machine=$field1-$field2 + basic_os=$field3-$field4 + ;; + *-*-*) + # Ambiguous whether COMPANY is present, or skipped and KERNEL-OS is two + # parts + maybe_os=$field2-$field3 + case $maybe_os in + nto-qnx* | linux-* | uclinux-uclibc* \ + | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* \ + | netbsd*-eabi* | kopensolaris*-gnu* | cloudabi*-eabi* \ + | storm-chaos* | os2-emx* | rtmk-nova*) + basic_machine=$field1 + basic_os=$maybe_os + ;; + android-linux) + basic_machine=$field1-unknown + basic_os=linux-android + ;; + *) + basic_machine=$field1-$field2 + basic_os=$field3 + ;; + esac + ;; + *-*) + # A lone config we happen to match not fitting any pattern + case $field1-$field2 in + decstation-3100) + basic_machine=mips-dec + basic_os= + ;; + *-*) + # Second component is usually, but not always the OS + case $field2 in + # Prevent following clause from handling this valid os + sun*os*) + basic_machine=$field1 + basic_os=$field2 + ;; + # Manufacturers + dec* | mips* | sequent* | encore* | pc533* | sgi* | sony* \ + | att* | 7300* | 3300* | delta* | motorola* | sun[234]* \ + | unicom* | ibm* | next | hp | isi* | apollo | altos* \ + | convergent* | ncr* | news | 32* | 3600* | 3100* \ + | hitachi* | c[123]* | convex* | sun | crds | omron* | dg \ + | ultra | tti* | harris | dolphin | highlevel | gould \ + | cbm | ns | masscomp | apple | axis | knuth | cray \ + | microblaze* | sim | cisco \ + | oki | wec | wrs | winbond) + basic_machine=$field1-$field2 + basic_os= + ;; + *) + basic_machine=$field1 + basic_os=$field2 + ;; + esac + ;; + esac + ;; + *) + # Convert single-component short-hands not valid as part of + # multi-component configurations. + case $field1 in + 386bsd) + basic_machine=i386-pc + basic_os=bsd + ;; + a29khif) + basic_machine=a29k-amd + basic_os=udi + ;; + adobe68k) + basic_machine=m68010-adobe + basic_os=scout + ;; + alliant) + basic_machine=fx80-alliant + basic_os= + ;; + altos | altos3068) + basic_machine=m68k-altos + basic_os= + ;; + am29k) + basic_machine=a29k-none + basic_os=bsd + ;; + amdahl) + basic_machine=580-amdahl + basic_os=sysv + ;; + amiga) + basic_machine=m68k-unknown + basic_os= + ;; + amigaos | amigados) + basic_machine=m68k-unknown + basic_os=amigaos + ;; + amigaunix | amix) + basic_machine=m68k-unknown + basic_os=sysv4 + ;; + apollo68) + basic_machine=m68k-apollo + basic_os=sysv + ;; + apollo68bsd) + basic_machine=m68k-apollo + basic_os=bsd + ;; + aros) + basic_machine=i386-pc + basic_os=aros + ;; + aux) + basic_machine=m68k-apple + basic_os=aux + ;; + balance) + basic_machine=ns32k-sequent + basic_os=dynix + ;; + blackfin) + basic_machine=bfin-unknown + basic_os=linux + ;; + cegcc) + basic_machine=arm-unknown + basic_os=cegcc + ;; + convex-c1) + basic_machine=c1-convex + basic_os=bsd + ;; + convex-c2) + basic_machine=c2-convex + basic_os=bsd + ;; + convex-c32) + basic_machine=c32-convex + basic_os=bsd + ;; + convex-c34) + basic_machine=c34-convex + basic_os=bsd + ;; + convex-c38) + basic_machine=c38-convex + basic_os=bsd + ;; + cray) + basic_machine=j90-cray + basic_os=unicos + ;; + crds | unos) + basic_machine=m68k-crds + basic_os= + ;; + da30) + basic_machine=m68k-da30 + basic_os= + ;; + decstation | pmax | pmin | dec3100 | decstatn) + basic_machine=mips-dec + basic_os= + ;; + delta88) + basic_machine=m88k-motorola + basic_os=sysv3 + ;; + dicos) + basic_machine=i686-pc + basic_os=dicos + ;; + djgpp) + basic_machine=i586-pc + basic_os=msdosdjgpp + ;; + ebmon29k) + basic_machine=a29k-amd + basic_os=ebmon + ;; + es1800 | OSE68k | ose68k | ose | OSE) + basic_machine=m68k-ericsson + basic_os=ose + ;; + gmicro) + basic_machine=tron-gmicro + basic_os=sysv + ;; + go32) + basic_machine=i386-pc + basic_os=go32 + ;; + h8300hms) + basic_machine=h8300-hitachi + basic_os=hms + ;; + h8300xray) + basic_machine=h8300-hitachi + basic_os=xray + ;; + h8500hms) + basic_machine=h8500-hitachi + basic_os=hms + ;; + harris) + basic_machine=m88k-harris + basic_os=sysv3 + ;; + hp300 | hp300hpux) + basic_machine=m68k-hp + basic_os=hpux + ;; + hp300bsd) + basic_machine=m68k-hp + basic_os=bsd + ;; + hppaosf) + basic_machine=hppa1.1-hp + basic_os=osf + ;; + hppro) + basic_machine=hppa1.1-hp + basic_os=proelf + ;; + i386mach) + basic_machine=i386-mach + basic_os=mach + ;; + isi68 | isi) + basic_machine=m68k-isi + basic_os=sysv + ;; + m68knommu) + basic_machine=m68k-unknown + basic_os=linux + ;; + magnum | m3230) + basic_machine=mips-mips + basic_os=sysv + ;; + merlin) + basic_machine=ns32k-utek + basic_os=sysv + ;; + mingw64) + basic_machine=x86_64-pc + basic_os=mingw64 + ;; + mingw32) + basic_machine=i686-pc + basic_os=mingw32 + ;; + mingw32ce) + basic_machine=arm-unknown + basic_os=mingw32ce + ;; + monitor) + basic_machine=m68k-rom68k + basic_os=coff + ;; + morphos) + basic_machine=powerpc-unknown + basic_os=morphos + ;; + moxiebox) + basic_machine=moxie-unknown + basic_os=moxiebox + ;; + msdos) + basic_machine=i386-pc + basic_os=msdos + ;; + msys) + basic_machine=i686-pc + basic_os=msys + ;; + mvs) + basic_machine=i370-ibm + basic_os=mvs + ;; + nacl) + basic_machine=le32-unknown + basic_os=nacl + ;; + ncr3000) + basic_machine=i486-ncr + basic_os=sysv4 + ;; + netbsd386) + basic_machine=i386-pc + basic_os=netbsd + ;; + netwinder) + basic_machine=armv4l-rebel + basic_os=linux + ;; + news | news700 | news800 | news900) + basic_machine=m68k-sony + basic_os=newsos + ;; + news1000) + basic_machine=m68030-sony + basic_os=newsos + ;; + necv70) + basic_machine=v70-nec + basic_os=sysv + ;; + nh3000) + basic_machine=m68k-harris + basic_os=cxux + ;; + nh[45]000) + basic_machine=m88k-harris + basic_os=cxux + ;; + nindy960) + basic_machine=i960-intel + basic_os=nindy + ;; + mon960) + basic_machine=i960-intel + basic_os=mon960 + ;; + nonstopux) + basic_machine=mips-compaq + basic_os=nonstopux + ;; + os400) + basic_machine=powerpc-ibm + basic_os=os400 + ;; + OSE68000 | ose68000) + basic_machine=m68000-ericsson + basic_os=ose + ;; + os68k) + basic_machine=m68k-none + basic_os=os68k + ;; + paragon) + basic_machine=i860-intel + basic_os=osf + ;; + parisc) + basic_machine=hppa-unknown + basic_os=linux + ;; + psp) + basic_machine=mipsallegrexel-sony + basic_os=psp + ;; + pw32) + basic_machine=i586-unknown + basic_os=pw32 + ;; + rdos | rdos64) + basic_machine=x86_64-pc + basic_os=rdos + ;; + rdos32) + basic_machine=i386-pc + basic_os=rdos + ;; + rom68k) + basic_machine=m68k-rom68k + basic_os=coff + ;; + sa29200) + basic_machine=a29k-amd + basic_os=udi + ;; + sei) + basic_machine=mips-sei + basic_os=seiux + ;; + sequent) + basic_machine=i386-sequent + basic_os= + ;; + sps7) + basic_machine=m68k-bull + basic_os=sysv2 + ;; + st2000) + basic_machine=m68k-tandem + basic_os= + ;; + stratus) + basic_machine=i860-stratus + basic_os=sysv4 + ;; + sun2) + basic_machine=m68000-sun + basic_os= + ;; + sun2os3) + basic_machine=m68000-sun + basic_os=sunos3 + ;; + sun2os4) + basic_machine=m68000-sun + basic_os=sunos4 + ;; + sun3) + basic_machine=m68k-sun + basic_os= + ;; + sun3os3) + basic_machine=m68k-sun + basic_os=sunos3 + ;; + sun3os4) + basic_machine=m68k-sun + basic_os=sunos4 + ;; + sun4) + basic_machine=sparc-sun + basic_os= + ;; + sun4os3) + basic_machine=sparc-sun + basic_os=sunos3 + ;; + sun4os4) + basic_machine=sparc-sun + basic_os=sunos4 + ;; + sun4sol2) + basic_machine=sparc-sun + basic_os=solaris2 + ;; + sun386 | sun386i | roadrunner) + basic_machine=i386-sun + basic_os= + ;; + sv1) + basic_machine=sv1-cray + basic_os=unicos + ;; + symmetry) + basic_machine=i386-sequent + basic_os=dynix + ;; + t3e) + basic_machine=alphaev5-cray + basic_os=unicos + ;; + t90) + basic_machine=t90-cray + basic_os=unicos + ;; + toad1) + basic_machine=pdp10-xkl + basic_os=tops20 + ;; + tpf) + basic_machine=s390x-ibm + basic_os=tpf + ;; + udi29k) + basic_machine=a29k-amd + basic_os=udi + ;; + ultra3) + basic_machine=a29k-nyu + basic_os=sym1 + ;; + v810 | necv810) + basic_machine=v810-nec + basic_os=none + ;; + vaxv) + basic_machine=vax-dec + basic_os=sysv + ;; + vms) + basic_machine=vax-dec + basic_os=vms + ;; + vsta) + basic_machine=i386-pc + basic_os=vsta + ;; + vxworks960) + basic_machine=i960-wrs + basic_os=vxworks + ;; + vxworks68) + basic_machine=m68k-wrs + basic_os=vxworks + ;; + vxworks29k) + basic_machine=a29k-wrs + basic_os=vxworks + ;; + xbox) + basic_machine=i686-pc + basic_os=mingw32 + ;; + ymp) + basic_machine=ymp-cray + basic_os=unicos + ;; + *) + basic_machine=$1 + basic_os= + ;; + esac + ;; +esac + +# Decode 1-component or ad-hoc basic machines +case $basic_machine in + # Here we handle the default manufacturer of certain CPU types. It is in + # some cases the only manufacturer, in others, it is the most popular. + w89k) + cpu=hppa1.1 + vendor=winbond + ;; + op50n) + cpu=hppa1.1 + vendor=oki + ;; + op60c) + cpu=hppa1.1 + vendor=oki + ;; + ibm*) + cpu=i370 + vendor=ibm + ;; + orion105) + cpu=clipper + vendor=highlevel + ;; + mac | mpw | mac-mpw) + cpu=m68k + vendor=apple + ;; + pmac | pmac-mpw) + cpu=powerpc + vendor=apple + ;; + + # Recognize the various machine names and aliases which stand + # for a CPU type and a company and sometimes even an OS. + 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) + cpu=m68000 + vendor=att + ;; + 3b*) + cpu=we32k + vendor=att + ;; + bluegene*) + cpu=powerpc + vendor=ibm + basic_os=cnk + ;; + decsystem10* | dec10*) + cpu=pdp10 + vendor=dec + basic_os=tops10 + ;; + decsystem20* | dec20*) + cpu=pdp10 + vendor=dec + basic_os=tops20 + ;; + delta | 3300 | motorola-3300 | motorola-delta \ + | 3300-motorola | delta-motorola) + cpu=m68k + vendor=motorola + ;; + dpx2*) + cpu=m68k + vendor=bull + basic_os=sysv3 + ;; + encore | umax | mmax) + cpu=ns32k + vendor=encore + ;; + elxsi) + cpu=elxsi + vendor=elxsi + basic_os=${basic_os:-bsd} + ;; + fx2800) + cpu=i860 + vendor=alliant + ;; + genix) + cpu=ns32k + vendor=ns + ;; + h3050r* | hiux*) + cpu=hppa1.1 + vendor=hitachi + basic_os=hiuxwe2 + ;; + hp3k9[0-9][0-9] | hp9[0-9][0-9]) + cpu=hppa1.0 + vendor=hp + ;; + hp9k2[0-9][0-9] | hp9k31[0-9]) + cpu=m68000 + vendor=hp + ;; + hp9k3[2-9][0-9]) + cpu=m68k + vendor=hp + ;; + hp9k6[0-9][0-9] | hp6[0-9][0-9]) + cpu=hppa1.0 + vendor=hp + ;; + hp9k7[0-79][0-9] | hp7[0-79][0-9]) + cpu=hppa1.1 + vendor=hp + ;; + hp9k78[0-9] | hp78[0-9]) + # FIXME: really hppa2.0-hp + cpu=hppa1.1 + vendor=hp + ;; + hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) + # FIXME: really hppa2.0-hp + cpu=hppa1.1 + vendor=hp + ;; + hp9k8[0-9][13679] | hp8[0-9][13679]) + cpu=hppa1.1 + vendor=hp + ;; + hp9k8[0-9][0-9] | hp8[0-9][0-9]) + cpu=hppa1.0 + vendor=hp + ;; + i*86v32) + cpu=`echo "$1" | sed -e 's/86.*/86/'` + vendor=pc + basic_os=sysv32 + ;; + i*86v4*) + cpu=`echo "$1" | sed -e 's/86.*/86/'` + vendor=pc + basic_os=sysv4 + ;; + i*86v) + cpu=`echo "$1" | sed -e 's/86.*/86/'` + vendor=pc + basic_os=sysv + ;; + i*86sol2) + cpu=`echo "$1" | sed -e 's/86.*/86/'` + vendor=pc + basic_os=solaris2 + ;; + j90 | j90-cray) + cpu=j90 + vendor=cray + basic_os=${basic_os:-unicos} + ;; + iris | iris4d) + cpu=mips + vendor=sgi + case $basic_os in + irix*) + ;; + *) + basic_os=irix4 + ;; + esac + ;; + miniframe) + cpu=m68000 + vendor=convergent + ;; + *mint | mint[0-9]* | *MiNT | *MiNT[0-9]*) + cpu=m68k + vendor=atari + basic_os=mint + ;; + news-3600 | risc-news) + cpu=mips + vendor=sony + basic_os=newsos + ;; + next | m*-next) + cpu=m68k + vendor=next + case $basic_os in + openstep*) + ;; + nextstep*) + ;; + ns2*) + basic_os=nextstep2 + ;; + *) + basic_os=nextstep3 + ;; + esac + ;; + np1) + cpu=np1 + vendor=gould + ;; + op50n-* | op60c-*) + cpu=hppa1.1 + vendor=oki + basic_os=proelf + ;; + pa-hitachi) + cpu=hppa1.1 + vendor=hitachi + basic_os=hiuxwe2 + ;; + pbd) + cpu=sparc + vendor=tti + ;; + pbb) + cpu=m68k + vendor=tti + ;; + pc532) + cpu=ns32k + vendor=pc532 + ;; + pn) + cpu=pn + vendor=gould + ;; + power) + cpu=power + vendor=ibm + ;; + ps2) + cpu=i386 + vendor=ibm + ;; + rm[46]00) + cpu=mips + vendor=siemens + ;; + rtpc | rtpc-*) + cpu=romp + vendor=ibm + ;; + sde) + cpu=mipsisa32 + vendor=sde + basic_os=${basic_os:-elf} + ;; + simso-wrs) + cpu=sparclite + vendor=wrs + basic_os=vxworks + ;; + tower | tower-32) + cpu=m68k + vendor=ncr + ;; + vpp*|vx|vx-*) + cpu=f301 + vendor=fujitsu + ;; + w65) + cpu=w65 + vendor=wdc + ;; + w89k-*) + cpu=hppa1.1 + vendor=winbond + basic_os=proelf + ;; + none) + cpu=none + vendor=none + ;; + leon|leon[3-9]) + cpu=sparc + vendor=$basic_machine + ;; + leon-*|leon[3-9]-*) + cpu=sparc + vendor=`echo "$basic_machine" | sed 's/-.*//'` + ;; + + *-*) + # shellcheck disable=SC2162 + IFS="-" read cpu vendor <&2 + exit 1 + ;; + esac + ;; +esac + +# Here we canonicalize certain aliases for manufacturers. +case $vendor in + digital*) + vendor=dec + ;; + commodore*) + vendor=cbm + ;; + *) + ;; +esac + +# Decode manufacturer-specific aliases for certain operating systems. + +if [ x$basic_os != x ] +then + +# First recognize some ad-hoc caes, or perhaps split kernel-os, or else just +# set os. +case $basic_os in + gnu/linux*) + kernel=linux + os=`echo $basic_os | sed -e 's|gnu/linux|gnu|'` + ;; + nto-qnx*) + kernel=nto + os=`echo $basic_os | sed -e 's|nto-qnx|qnx|'` + ;; + *-*) + # shellcheck disable=SC2162 + IFS="-" read kernel os <&2 + exit 1 + ;; +esac + +# As a final step for OS-related things, validate the OS-kernel combination +# (given a valid OS), if there is a kernel. +case $kernel-$os in + linux-gnu* | linux-dietlibc* | linux-android* | linux-newlib* | linux-musl* | linux-uclibc* ) + ;; + -dietlibc* | -newlib* | -musl* | -uclibc* ) + # These are just libc implementations, not actual OSes, and thus + # require a kernel. + echo "Invalid configuration \`$1': libc \`$os' needs explicit kernel." 1>&2 + exit 1 + ;; + kfreebsd*-gnu* | kopensolaris*-gnu*) + ;; + nto-qnx*) + ;; + *-eabi* | *-gnueabi*) + ;; + -*) + # Blank kernel with real OS is always fine. + ;; + *-*) + echo "Invalid configuration \`$1': Kernel \`$kernel' not known to work with OS \`$os'." 1>&2 + exit 1 + ;; +esac + +# Here we handle the case where we know the os, and the CPU type, but not the +# manufacturer. We pick the logical manufacturer. +case $vendor in + unknown) + case $cpu-$os in + *-riscix*) + vendor=acorn + ;; + *-sunos*) + vendor=sun + ;; + *-cnk* | *-aix*) + vendor=ibm + ;; + *-beos*) + vendor=be + ;; + *-hpux*) + vendor=hp + ;; + *-mpeix*) + vendor=hp + ;; + *-hiux*) + vendor=hitachi + ;; + *-unos*) + vendor=crds + ;; + *-dgux*) + vendor=dg + ;; + *-luna*) + vendor=omron + ;; + *-genix*) + vendor=ns + ;; + *-clix*) + vendor=intergraph + ;; + *-mvs* | *-opened*) + vendor=ibm + ;; + *-os400*) + vendor=ibm + ;; + s390-* | s390x-*) + vendor=ibm + ;; + *-ptx*) + vendor=sequent + ;; + *-tpf*) + vendor=ibm + ;; + *-vxsim* | *-vxworks* | *-windiss*) + vendor=wrs + ;; + *-aux*) + vendor=apple + ;; + *-hms*) + vendor=hitachi + ;; + *-mpw* | *-macos*) + vendor=apple + ;; + *-*mint | *-mint[0-9]* | *-*MiNT | *-MiNT[0-9]*) + vendor=atari + ;; + *-vos*) + vendor=stratus + ;; + esac + ;; +esac + +echo "$cpu-$vendor-${kernel:+$kernel-}$os" +exit + +# Local variables: +# eval: (add-hook 'before-save-hook 'time-stamp) +# time-stamp-start: "timestamp='" +# time-stamp-format: "%:y-%02m-%02d" +# time-stamp-end: "'" +# End: diff --git a/build/autoconf/expandlibs.m4 b/build/autoconf/expandlibs.m4 new file mode 100644 index 0000000000..959ee6f1b9 --- /dev/null +++ b/build/autoconf/expandlibs.m4 @@ -0,0 +1,52 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +AC_DEFUN([MOZ_EXPAND_LIBS], +[ +dnl ======================================================== +dnl = +dnl = Check what kind of list files are supported by the +dnl = linker +dnl = +dnl ======================================================== + +AC_CACHE_CHECK(what kind of list files are supported by the linker, + EXPAND_LIBS_LIST_STYLE, + [echo "int main() {return 0;}" > conftest.${ac_ext} + dnl Because BFD ld doesn't work with LTO + linker scripts, we + dnl must pass the LTO CFLAGS to the compile command, and the LTO + dnl LDFLAGS to all subsequent link commands. + dnl https://sourceware.org/bugzilla/show_bug.cgi?id=23600 + if AC_TRY_COMMAND(${CC-cc} -o conftest.${OBJ_SUFFIX} -c $MOZ_LTO_CFLAGS $CFLAGS $CPPFLAGS conftest.${ac_ext} 1>&5) && test -s conftest.${OBJ_SUFFIX}; then + echo "INPUT(conftest.${OBJ_SUFFIX})" > conftest.list + if test "$CC_TYPE" = "clang-cl"; then + link="$LINKER -OUT:conftest${ac_exeext}" + else + link="${CC-cc} -o conftest${ac_exeext}" + fi + if AC_TRY_COMMAND($link $MOZ_LTO_LDFLAGS $LDFLAGS conftest.list $LIBS 1>&5) && test -s conftest${ac_exeext}; then + EXPAND_LIBS_LIST_STYLE=linkerscript + else + echo "conftest.${OBJ_SUFFIX}" > conftest.list + dnl -filelist is for the OS X linker. We need to try -filelist + dnl first because clang understands @file, but may pass an + dnl oversized argument list to the linker depending on the + dnl contents of @file. + if AC_TRY_COMMAND($link $MOZ_LTO_LDFLAGS $LDFLAGS [-Wl,-filelist,conftest.list] $LIBS 1>&5) && test -s conftest${ac_exeext}; then + EXPAND_LIBS_LIST_STYLE=filelist + elif AC_TRY_COMMAND($link $MOZ_LTO_LDFLAGS $LDFLAGS @conftest.list $LIBS 1>&5) && test -s conftest${ac_exeext}; then + EXPAND_LIBS_LIST_STYLE=list + else + AC_ERROR([Couldn't find one that works]) + fi + fi + else + dnl We really don't expect to get here, but just in case + AC_ERROR([couldn't compile a simple C file]) + fi + rm -rf conftest*]) + +AC_SUBST(EXPAND_LIBS_LIST_STYLE) + +]) diff --git a/build/autoconf/hooks.m4 b/build/autoconf/hooks.m4 new file mode 100644 index 0000000000..84d58205c1 --- /dev/null +++ b/build/autoconf/hooks.m4 @@ -0,0 +1,31 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +dnl Wrap AC_INIT_PREPARE to add the above trap. +define([_MOZ_AC_INIT_PREPARE], defn([AC_INIT_PREPARE])) +define([AC_INIT_PREPARE], +[_MOZ_AC_INIT_PREPARE($1) + +test "x$prefix" = xNONE && prefix=$ac_default_prefix +# Let make expand exec_prefix. +test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' +]) + +dnl Print error messages in config.log as well as stderr +define([AC_MSG_ERROR], +[{ echo "configure: error: $1" 1>&2; echo "configure: error: $1" 1>&5; exit 1; }]) + +dnl Divert AC_TRY_COMPILER to make ac_cv_prog_*_works actually cached. +dnl This will allow to just skip the test when python configure has set +dnl the value for us. But since ac_cv_prog_*_cross is calculated at the same +dnl time, and has a different meaning as in python configure, we only want to +dnl use its value to display whether a cross-compile is happening. We forbid +dnl configure tests that would rely on ac_cv_prog_*_cross autoconf meaning +dnl (being able to execute the product of compilation), which are already bad +dnl for cross compiles anyways, so it's a win to get rid of them. +define([_MOZ_AC_TRY_COMPILER], defn([AC_TRY_COMPILER])) +define([AC_TRY_COMPILER], [AC_CACHE_VAL($2, _MOZ_AC_TRY_COMPILER($1, $2, $3))]) + +define([AC_TRY_RUN], [m4_fatal([AC_TRY_RUN is forbidden])]) +define([AC_CHECK_FILE], [m4_fatal([AC_CHECK_FILE is forbidden])]) diff --git a/build/autoconf/hotfixes.m4 b/build/autoconf/hotfixes.m4 new file mode 100644 index 0000000000..9c8362041f --- /dev/null +++ b/build/autoconf/hotfixes.m4 @@ -0,0 +1,23 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +dnl Set of hotfixes to address issues in autoconf 2.13 + +dnl Divert AC_CHECK_FUNC so that the #includes it uses can't interfere +dnl with the function it tests. +dnl So, when testing e.g. posix_memalign, any #include that AC_CHECK_FUNC +dnl prints is replaced with: +dnl #define posix_memalign innocuous_posix_memalign +dnl #include "theinclude" +dnl #undef posix_memalign +dnl This avoids double declaration of that function when the header normally +dnl declares it, while the test itself is just expecting the function not to be +dnl declared at all, and declares it differently (which doesn't matter for the +dnl test itself). +dnl More recent versions of autoconf are essentially doing this. +define([_AC_CHECK_FUNC],defn([AC_CHECK_FUNC]))dnl +define([AC_CHECK_FUNC], [dnl +patsubst(_AC_CHECK_FUNC($@), [#include.*$], [#define $1 innocuous_$1 +\& +#undef $1])])dnl diff --git a/build/autoconf/install-sh b/build/autoconf/install-sh new file mode 100755 index 0000000000..a4be13e59f --- /dev/null +++ b/build/autoconf/install-sh @@ -0,0 +1,123 @@ +#!/bin/sh +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +# +# install - install a program, script, or datafile +# This comes from X11R5; it is not part of GNU. +# +# $XConsortium: install.sh,v 1.2 89/12/18 14:47:22 jim Exp $ +# +# This script is compatible with the BSD install script, but was written +# from scratch. +# + + +# set DOITPROG to echo to test this script + +# Don't use :- since 4.3BSD and earlier shells don't like it. +doit="${DOITPROG-}" + + +# put in absolute paths if you don't have them in your path; or use env. vars. + +mvprog="${MVPROG-mv}" +cpprog="${CPPROG-cp}" +chmodprog="${CHMODPROG-chmod}" +chownprog="${CHOWNPROG-chown}" +chgrpprog="${CHGRPPROG-chgrp}" +stripprog="${STRIPPROG-strip}" +rmprog="${RMPROG-rm}" + +instcmd="$mvprog" +chmodcmd="" +chowncmd="" +chgrpcmd="" +stripcmd="" +rmcmd="$rmprog -f" +mvcmd="$mvprog" +src="" +dst="" + +while [ x"$1" != x ]; do + case $1 in + -c) instcmd="$cpprog" + shift + continue;; + + -m) chmodcmd="$chmodprog $2" + shift + shift + continue;; + + -o) chowncmd="$chownprog $2" + shift + shift + continue;; + + -g) chgrpcmd="$chgrpprog $2" + shift + shift + continue;; + + -s) stripcmd="$stripprog" + shift + continue;; + + *) if [ x"$src" = x ] + then + src=$1 + else + dst=$1 + fi + shift + continue;; + esac +done + +if [ x"$src" = x ] +then + echo "install: no input file specified" + exit 1 +fi + +if [ x"$dst" = x ] +then + echo "install: no destination specified" + exit 1 +fi + + +# If destination is a directory, append the input filename; if your system +# does not like double slashes in filenames, you may need to add some logic + +if [ -d $dst ] +then + dst="$dst"/`basename $src` +fi + +# Make a temp file name in the proper directory. + +dstdir=`dirname $dst` +dsttmp=$dstdir/#inst.$$# + +# Move or copy the file name to the temp name + +$doit $instcmd $src $dsttmp + +# and set any options; do chmod last to preserve setuid bits + +if [ x"$chowncmd" != x ]; then $doit $chowncmd $dsttmp; fi +if [ x"$chgrpcmd" != x ]; then $doit $chgrpcmd $dsttmp; fi +if [ x"$stripcmd" != x ]; then $doit $stripcmd $dsttmp; fi +if [ x"$chmodcmd" != x ]; then $doit $chmodcmd $dsttmp; fi + +# Now rename the file to the real destination. + +$doit $rmcmd $dst +$doit $mvcmd $dsttmp $dst + + +exit 0 diff --git a/build/autoconf/mozheader.m4 b/build/autoconf/mozheader.m4 new file mode 100644 index 0000000000..e99e35a403 --- /dev/null +++ b/build/autoconf/mozheader.m4 @@ -0,0 +1,32 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +dnl MOZ_CHECK_HEADER(HEADER-FILE, [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND [, INCLUDES]]]) +AC_DEFUN([MOZ_CHECK_HEADER], +[ dnl Do the transliteration at runtime so arg 1 can be a shell variable. + ac_safe=`echo "$1" | sed 'y%./+-%__p_%'` + AC_MSG_CHECKING([for $1]) + AC_CACHE_VAL(ac_cv_header_$ac_safe, + [ AC_TRY_COMPILE([$4 +#include <$1>], , + eval "ac_cv_header_$ac_safe=yes", + eval "ac_cv_header_$ac_safe=no") ]) + if eval "test \"`echo '$ac_cv_header_'$ac_safe`\" = yes"; then + AC_MSG_RESULT(yes) + ifelse([$2], , :, [$2]) + else + AC_MSG_RESULT(no) + ifelse([$3], , , [$3]) + fi +]) + +dnl MOZ_CHECK_HEADERS(HEADER-FILE... [, ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND [, INCLUDES]]]) +AC_DEFUN([MOZ_CHECK_HEADERS], +[ for ac_hdr in $1 + do + MOZ_CHECK_HEADER($ac_hdr, + [ ac_tr_hdr=HAVE_`echo $ac_hdr | sed 'y%abcdefghijklmnopqrstuvwxyz./-%ABCDEFGHIJKLMNOPQRSTUVWXYZ___%'` + AC_DEFINE_UNQUOTED($ac_tr_hdr) $2], $3, [$4]) + done +]) diff --git a/build/autoconf/mozprog.m4 b/build/autoconf/mozprog.m4 new file mode 100644 index 0000000000..08747b495f --- /dev/null +++ b/build/autoconf/mozprog.m4 @@ -0,0 +1,42 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +AC_DEFUN([MOZ_PROG_CHECKMSYS], +[AC_REQUIRE([AC_INIT_BINSH])dnl +if test `uname -s | grep -c MINGW 2>/dev/null` != "0"; then + msyshost=1 +fi +]) + +AC_DEFUN([MOZ_PATH_PROG], +[ AC_PATH_PROG($1,$2,$3,$4) + if test "$msyshost"; then + case "[$]$1" in + /*) + tmp_DIRNAME=`dirname "[$]$1"` + tmp_BASENAME=`basename "[$]$1"` + tmp_PWD=`cd "$tmp_DIRNAME" && pwd -W` + $1="$tmp_PWD/$tmp_BASENAME" + if test -e "[$]$1.exe"; then + $1="[$]$1.exe" + fi + esac + fi +]) + +AC_DEFUN([MOZ_PATH_PROGS], +[ AC_PATH_PROGS($1,$2,$3,$4) + if test "$msyshost"; then + case "[$]$1" in + /*) + tmp_DIRNAME=`dirname "[$]$1"` + tmp_BASENAME=`basename "[$]$1"` + tmp_PWD=`cd "$tmp_DIRNAME" && pwd -W` + $1="$tmp_PWD/$tmp_BASENAME" + if test -e "[$]$1.exe"; then + $1="[$]$1.exe" + fi + esac + fi +]) diff --git a/build/autoconf/pkg.m4 b/build/autoconf/pkg.m4 new file mode 100644 index 0000000000..ca8ef7e5a2 --- /dev/null +++ b/build/autoconf/pkg.m4 @@ -0,0 +1,61 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# PKG_CHECK_MODULES(GSTUFF, gtk+-2.0 >= 1.3 glib = 1.3.4, action-if, action-not) +# defines GSTUFF_LIBS, GSTUFF_CFLAGS, see pkg-config man page +# also defines GSTUFF_PKG_ERRORS on error +# PKG_CONFIG is set by Python configure, if it is empty here it could not +# be found. +AC_DEFUN([PKG_CHECK_MODULES], +[succeeded=no + + if test -z "$PKG_CONFIG"; then + echo "*** The pkg-config script could not be found. Make sure it is" + echo "*** in your path, or set the PKG_CONFIG environment variable" + echo "*** to the full path to pkg-config." + echo "*** Or see http://www.freedesktop.org/software/pkgconfig to get pkg-config." + else + PKG_CONFIG_MIN_VERSION=0.9.0 + if $PKG_CONFIG --atleast-pkgconfig-version $PKG_CONFIG_MIN_VERSION; then + AC_MSG_CHECKING(for $2) + + if $PKG_CONFIG --exists "$2" ; then + AC_MSG_RESULT(yes) + succeeded=yes + + AC_MSG_CHECKING($1_CFLAGS) + $1_CFLAGS=`$PKG_CONFIG --cflags "$2"` + AC_MSG_RESULT($$1_CFLAGS) + + AC_MSG_CHECKING($1_LIBS) + ## Remove evil flags like -Wl,--export-dynamic + $1_LIBS="`$PKG_CONFIG --libs \"$2\" |sed s/-Wl,--export-dynamic//g`" + AC_MSG_RESULT($$1_LIBS) + else + $1_CFLAGS="" + $1_LIBS="" + ## If we have a custom action on failure, don't print errors, but + ## do set a variable so people can do so. + $1_PKG_ERRORS=`$PKG_CONFIG --errors-to-stdout --print-errors "$2"` + ifelse([$4], ,echo $$1_PKG_ERRORS,) + fi + + AC_SUBST_LIST($1_CFLAGS) + AC_SUBST_LIST($1_LIBS) + else + echo "*** Your version of pkg-config is too old. You need version $PKG_CONFIG_MIN_VERSION or newer." + echo "*** See http://www.freedesktop.org/software/pkgconfig" + fi + fi + + if test $succeeded = yes; then + ifelse([$3], , :, [$3]) + else + if test "$COMPILE_ENVIRONMENT"; then + ifelse([$4], , AC_MSG_ERROR([Library requirements ($2) not met; consider adjusting the PKG_CONFIG_PATH environment variable if your libraries are in a nonstandard prefix so pkg-config can find them.]), [$4]) + fi + fi +]) + + diff --git a/build/autoconf/sanitize.m4 b/build/autoconf/sanitize.m4 new file mode 100644 index 0000000000..11fc6caebb --- /dev/null +++ b/build/autoconf/sanitize.m4 @@ -0,0 +1,135 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +AC_DEFUN([MOZ_CONFIG_SANITIZE], [ + +dnl ======================================================== +dnl = Use Address Sanitizer +dnl ======================================================== +if test -n "$MOZ_ASAN"; then + if test -n "$CLANG_CL"; then + # Look for the ASan runtime binary + if test "$CPU_ARCH" = "x86_64"; then + MOZ_CLANG_RT_ASAN_LIB=clang_rt.asan_dynamic-x86_64.dll + else + MOZ_CLANG_RT_ASAN_LIB=clang_rt.asan_dynamic-i386.dll + fi + # We use MOZ_PATH_PROG in order to get a Windows style path. + MOZ_PATH_PROG(MOZ_CLANG_RT_ASAN_LIB_PATH, $MOZ_CLANG_RT_ASAN_LIB) + if test -z "$MOZ_CLANG_RT_ASAN_LIB_PATH"; then + AC_MSG_ERROR([Couldn't find $MOZ_CLANG_RT_ASAN_LIB. It should be available in the same location as clang-cl.]) + fi + AC_SUBST(MOZ_CLANG_RT_ASAN_LIB_PATH) + # Suppressing errors in recompiled code. + if test "$OS_ARCH" = "WINNT"; then + CFLAGS="-fsanitize-blacklist=$_topsrcdir/build/sanitizers/asan_blacklist_win.txt $CFLAGS" + CXXFLAGS="-fsanitize-blacklist=$_topsrcdir/build/sanitizers/asan_blacklist_win.txt $CXXFLAGS" + fi + fi + CFLAGS="-fsanitize=address $CFLAGS" + CXXFLAGS="-fsanitize=address $CXXFLAGS" + if test -z "$CLANG_CL"; then + LDFLAGS="-fsanitize=address -rdynamic $LDFLAGS" + fi + AC_DEFINE(MOZ_ASAN) + MOZ_PATH_PROG(LLVM_SYMBOLIZER, llvm-symbolizer) +fi +AC_SUBST(MOZ_ASAN) + +dnl ======================================================== +dnl = Use Memory Sanitizer +dnl ======================================================== +if test -n "$MOZ_MSAN"; then + CFLAGS="-fsanitize=memory -fsanitize-memory-track-origins $CFLAGS" + CXXFLAGS="-fsanitize=memory -fsanitize-memory-track-origins $CXXFLAGS" + if test -z "$CLANG_CL"; then + LDFLAGS="-fsanitize=memory -fsanitize-memory-track-origins -rdynamic $LDFLAGS" + fi + AC_DEFINE(MOZ_MSAN) + MOZ_PATH_PROG(LLVM_SYMBOLIZER, llvm-symbolizer) +fi +AC_SUBST(MOZ_MSAN) + +dnl ======================================================== +dnl = Use Thread Sanitizer +dnl ======================================================== +if test -n "$MOZ_TSAN"; then + CFLAGS="-fsanitize=thread $CFLAGS" + CXXFLAGS="-fsanitize=thread $CXXFLAGS" + if test -z "$CLANG_CL"; then + LDFLAGS="-fsanitize=thread -rdynamic $LDFLAGS" + fi + AC_DEFINE(MOZ_TSAN) + MOZ_PATH_PROG(LLVM_SYMBOLIZER, llvm-symbolizer) +fi +AC_SUBST(MOZ_TSAN) + +dnl ======================================================== +dnl = Use UndefinedBehavior Sanitizer (with custom checks) +dnl ======================================================== +if test -n "$MOZ_UBSAN_CHECKS"; then + MOZ_UBSAN=1 + UBSAN_TXT="$_objdir/ubsan_blacklist.txt" + cat $_topsrcdir/build/sanitizers/ubsan_*_blacklist.txt > $UBSAN_TXT + UBSAN_FLAGS="-fsanitize=$MOZ_UBSAN_CHECKS -fno-sanitize-recover=$MOZ_UBSAN_CHECKS -fsanitize-blacklist=$UBSAN_TXT" + CFLAGS="$UBSAN_FLAGS $CFLAGS" + CXXFLAGS="$UBSAN_FLAGS $CXXFLAGS" + if test -z "$CLANG_CL"; then + LDFLAGS="-fsanitize=undefined -rdynamic $LDFLAGS" + fi + AC_DEFINE(MOZ_UBSAN) + MOZ_PATH_PROG(LLVM_SYMBOLIZER, llvm-symbolizer) +fi +AC_SUBST(MOZ_UBSAN) + +dnl ======================================================== +dnl = Use UndefinedBehavior Sanitizer to find integer overflows +dnl ======================================================== +if test -n "$MOZ_SIGNED_OVERFLOW_SANITIZE$MOZ_UNSIGNED_OVERFLOW_SANITIZE"; then + MOZ_UBSAN=1 + SANITIZER_BLACKLISTS="" + if test -n "$MOZ_SIGNED_OVERFLOW_SANITIZE"; then + SANITIZER_BLACKLISTS="-fsanitize-blacklist=$_topsrcdir/build/sanitizers/ubsan_signed_overflow_blacklist.txt $SANITIZER_BLACKLISTS" + CFLAGS="-fsanitize=signed-integer-overflow $CFLAGS" + CXXFLAGS="-fsanitize=signed-integer-overflow $CXXFLAGS" + if test -z "$CLANG_CL"; then + LDFLAGS="-fsanitize=signed-integer-overflow -rdynamic $LDFLAGS" + fi + AC_DEFINE(MOZ_SIGNED_OVERFLOW_SANITIZE) + fi + if test -n "$MOZ_UNSIGNED_OVERFLOW_SANITIZE"; then + SANITIZER_BLACKLISTS="-fsanitize-blacklist=$_topsrcdir/build/sanitizers/ubsan_unsigned_overflow_blacklist.txt $SANITIZER_BLACKLISTS" + CFLAGS="-fsanitize=unsigned-integer-overflow $CFLAGS" + CXXFLAGS="-fsanitize=unsigned-integer-overflow $CXXFLAGS" + if test -z "$CLANG_CL"; then + LDFLAGS="-fsanitize=unsigned-integer-overflow -rdynamic $LDFLAGS" + fi + AC_DEFINE(MOZ_UNSIGNED_OVERFLOW_SANITIZE) + fi + CFLAGS="$SANITIZER_BLACKLISTS $CFLAGS" + CXXFLAGS="$SANITIZER_BLACKLISTS $CXXFLAGS" + AC_DEFINE(MOZ_UBSAN) + MOZ_PATH_PROG(LLVM_SYMBOLIZER, llvm-symbolizer) +fi +AC_SUBST(MOZ_SIGNED_OVERFLOW_SANITIZE) +AC_SUBST(MOZ_UNSIGNED_OVERFLOW_SANITIZE) +AC_SUBST(MOZ_UBSAN) + +dnl ======================================================= +dnl = Required for stand-alone (sanitizer-less) libFuzzer. +dnl ======================================================= +if test -n "$LIBFUZZER"; then + LDFLAGS="$LIBFUZZER_FLAGS -rdynamic $LDFLAGS" +fi + +# The LLVM symbolizer is used by all sanitizers +AC_SUBST(LLVM_SYMBOLIZER) + +dnl ======================================================== +dnl = Test for whether the compiler is compatible with the +dnl = given sanitize options. +dnl ======================================================== +AC_TRY_LINK(,,,AC_MSG_ERROR([compiler is incompatible with sanitize options])) + +]) diff --git a/build/autoconf/toolchain.m4 b/build/autoconf/toolchain.m4 new file mode 100644 index 0000000000..2a8744610c --- /dev/null +++ b/build/autoconf/toolchain.m4 @@ -0,0 +1,131 @@ +dnl This Source Code Form is subject to the terms of the Mozilla Public +dnl License, v. 2.0. If a copy of the MPL was not distributed with this +dnl file, You can obtain one at http://mozilla.org/MPL/2.0/. + +dnl Several autoconf functions AC_REQUIRE AC_PROG_CPP/AC_PROG_CXXCPP +dnl or AC_HEADER_STDC, meaning they are called even when we don't call +dnl them explicitly. +dnl However, theses checks are not necessary and python configure sets +dnl the corresponding variables already, so just skip those tests +dnl entirely. +define([AC_PROG_CPP],[]) +define([AC_PROG_CXXCPP],[]) +define([AC_HEADER_STDC], []) + +dnl AC_LANG_* set ac_link to the C/C++ compiler, which works fine with +dnl gcc and clang, but not great with clang-cl, where the build system +dnl currently expects to run the linker independently. So LDFLAGS are not +dnl really adapted to be used with clang-cl, which then proceeds to +dnl execute link.exe rather than lld-link.exe. +dnl So when the compiler is clang-cl, we modify ac_link to use a separate +dnl linker call. +define([_MOZ_AC_LANG_C], defn([AC_LANG_C])) +define([AC_LANG_C], +[_MOZ_AC_LANG_C +if test "$CC_TYPE" = "clang-cl"; then + ac_link="$ac_compile"' && ${LINKER} -OUT:conftest${ac_exeext} $LDFLAGS conftest.obj $LIBS 1>&AC_FD_CC' +fi +]) + +define([_MOZ_AC_LANG_CPLUSPLUS], defn([AC_LANG_CPLUSPLUS])) +define([AC_LANG_CPLUSPLUS], +[_MOZ_AC_LANG_CPLUSPLUS +if test "$CC_TYPE" = "clang-cl"; then + ac_link="$ac_compile"' && ${LINKER} -OUT:conftest${ac_exeext} $LDFLAGS conftest.obj $LIBS 1>&AC_FD_CC' +fi +]) + +AC_DEFUN([MOZ_TOOL_VARIABLES], +[ +GNU_CC= +GNU_CXX= +if test "$CC_TYPE" = "gcc"; then + GNU_CC=1 + GNU_CXX=1 +fi + +CLANG_CC= +CLANG_CXX= +CLANG_CL= +if test "$CC_TYPE" = "clang"; then + GNU_CC=1 + GNU_CXX=1 + CLANG_CC=1 + CLANG_CXX=1 +fi +if test "$CC_TYPE" = "clang-cl"; then + CLANG_CL=1 +fi + +AC_SUBST(CLANG_CXX) +AC_SUBST(CLANG_CL) +]) + +AC_DEFUN([MOZ_CROSS_COMPILER], +[ +echo "cross compiling from $host to $target" + +dnl AC_CHECK_PROGS manually goes through $PATH, and as such fails to handle +dnl absolute or relative paths. Relative paths wouldn't work anyways, but +dnl absolute paths would. Trick AC_CHECK_PROGS into working in that case by +dnl adding / to PATH. This is temporary until this moves to moz.configure +dnl (soon). +_SAVE_PATH=$PATH +case "${TOOLCHAIN_PREFIX}" in +/*) + PATH="/:$PATH" + ;; +esac +AC_PROG_CC +AC_PROG_CXX + +AC_CHECK_PROGS(RANLIB, "${TOOLCHAIN_PREFIX}ranlib", :) +AC_CHECK_PROGS(AS, "${TOOLCHAIN_PREFIX}as", :) +AC_CHECK_PROGS(LIPO, "${TOOLCHAIN_PREFIX}lipo", :) +AC_CHECK_PROGS(STRIP, "${TOOLCHAIN_PREFIX}strip", :) +AC_CHECK_PROGS(OTOOL, "${TOOLCHAIN_PREFIX}otool", :) +AC_CHECK_PROGS(INSTALL_NAME_TOOL, "${TOOLCHAIN_PREFIX}install_name_tool", :) +AC_CHECK_PROGS(OBJCOPY, "${TOOLCHAIN_PREFIX}objcopy", :) +PATH=$_SAVE_PATH +]) + +AC_DEFUN([MOZ_CXX11], +[ +dnl Updates to the test below should be duplicated further below for the +dnl cross-compiling case. +AC_LANG_CPLUSPLUS +if test "$GNU_CXX"; then + AC_CACHE_CHECK([whether 64-bits std::atomic requires -latomic], + ac_cv_needs_atomic, + dnl x86 with clang is a little peculiar. std::atomic does not require + dnl linking with libatomic, but using atomic intrinsics does, so we + dnl force the setting on for such systems. + if test "$CC_TYPE" = "clang" -a "$CPU_ARCH" = "x86" -a "$OS_ARCH" = "Linux"; then + ac_cv_needs_atomic=yes + else + AC_TRY_LINK( + [#include + #include ], + [ std::atomic foo; foo = 1; ], + ac_cv_needs_atomic=no, + _SAVE_LIBS="$LIBS" + LIBS="$LIBS -latomic" + AC_TRY_LINK( + [#include + #include ], + [ std::atomic foo; foo = 1; ], + ac_cv_needs_atomic=yes, + ac_cv_needs_atomic="do not know; assuming no") + LIBS="$_SAVE_LIBS" + ) + fi + ) + if test "$ac_cv_needs_atomic" = yes; then + MOZ_NEEDS_LIBATOMIC=1 + else + MOZ_NEEDS_LIBATOMIC= + fi + AC_SUBST(MOZ_NEEDS_LIBATOMIC) +fi +AC_LANG_C +]) diff --git a/build/binary-location.mk b/build/binary-location.mk new file mode 100644 index 0000000000..19ae4b55f3 --- /dev/null +++ b/build/binary-location.mk @@ -0,0 +1,19 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# finds the location of the browser and puts it in the variable $(browser_path) + +ifneq (,$(filter WINNT,$(OS_ARCH))) +program = $(MOZ_APP_NAME)$(BIN_SUFFIX) +else +program = $(MOZ_APP_NAME)-bin$(BIN_SUFFIX) +endif + +TARGET_DIST = $(TARGET_DEPTH)/dist + +ifeq ($(OS_ARCH),Darwin) +browser_path = $(TARGET_DIST)/$(MOZ_MACBUNDLE_NAME)/Contents/MacOS/$(program) +else +browser_path = $(TARGET_DIST)/bin/$(program) +endif diff --git a/build/build-clang/README b/build/build-clang/README new file mode 100644 index 0000000000..8906886b55 --- /dev/null +++ b/build/build-clang/README @@ -0,0 +1,57 @@ +build-clang.py +============== + +A script to build clang from source. + +``` +usage: build-clang.py [-h] -c CONFIG [--clean] + +optional arguments: + -h, --help show this help message and exit + -c CONFIG, --config CONFIG + Clang configuration file + --clean Clean the build directory +``` + +Pre-requisites +-------------- +* Working build toolchain. +* git +* CMake +* Ninja +* Python 2.7 and 3 + +Please use the latest available CMake for your platform to avoid surprises. + +Config file format +------------------ + +build-clang.py accepts a JSON config format with the following fields: + +* stages: Use 1, 2, 3 or 4 to select different compiler stages. The default is 3. +* python_path: Path to the Python 2.7 installation on the machine building clang. +* gcc_dir: Path to the gcc toolchain installation, only required on Linux. +* cc: Path to the bootsraping C Compiler. +* cxx: Path to the bootsraping C++ Compiler. +* as: Path to the assembler tool. +* ar: Path to the library archiver tool. +* ranlib: Path to the ranlib tool (optional). +* libtool: Path to the libtool tool (optional). +* ld: Path to the linker. +* patches: Optional list of patches to apply. +* build_type: The type of build to make. Supported types: Release, Debug, RelWithDebInfo or MinSizeRel. +* build_libcxx: Whether to build with libcxx. The default is false. +* build_clang_tidy: Whether to build clang-tidy with the Mozilla checks imported. The default is false. +* osx_cross_compile: Whether to invoke CMake for OS X cross compile builds. +* assertions: Whether to enable LLVM assertions. The default is false. +* pgo: Whether to build with PGO (requires stages == 4). The default is false. + +The revisions are defined in taskcluster/ci/fetch/toolchains.yml. They are usually commit sha1s corresponding to upstream tags. + +Environment Variables +--------------------- + +The following environment variables are used for cross-compile builds targeting OS X on Linux. + +* CROSS_CCTOOLS_PATH: Path to the cctools directory where the cross compiler toolchain is located. +* CROSS_SYSROOT: Path to the OS X SDK directory for cross compile builds. diff --git a/build/build-clang/android-mangling-error.patch b/build/build-clang/android-mangling-error.patch new file mode 100644 index 0000000000..af32f59c05 --- /dev/null +++ b/build/build-clang/android-mangling-error.patch @@ -0,0 +1,34 @@ +Workaround segfault in clang's mangling code that is tickled when +attempting to mangle the declaration: + std:__ndk1::__find_detail::__find_exactly_one_checked::__matches +in the header in the Android NDK. +This codepath is exercised by MozsearchIndexer.cpp (the searchfox +indexer) when indexing on Android. See also +https://bugs.llvm.org/show_bug.cgi?id=40747 + +diff --git a/clang/lib/AST/ItaniumMangle.cpp b/clang/lib/AST/ItaniumMangle.cpp +index 2dc04f2f3d8..054fc27003d 100644 +--- a/clang/lib/AST/ItaniumMangle.cpp ++++ b/clang/lib/AST/ItaniumMangle.cpp +@@ -3495,16 +3495,21 @@ void CXXNameMangler::mangleExpression(const Expr *E, unsigned Arity) { + // ::= + // ::= L E # integer literal + // ::= L E # floating literal + // ::= L E # external name + // ::= fpT # 'this' expression + QualType ImplicitlyConvertedToType; + + recurse: ++ if (!E) { ++ Out << "MOZ_WE_HACKED_AROUND_BUG_1500941"; ++ return; ++ } ++ + switch (E->getStmtClass()) { + case Expr::NoStmtClass: + #define ABSTRACT_STMT(Type) + #define EXPR(Type, Base) + #define STMT(Type, Base) \ + case Expr::Type##Class: + #include "clang/AST/StmtNodes.inc" + // fallthrough diff --git a/build/build-clang/bug47258-extract-symbols-mbcs.patch b/build/build-clang/bug47258-extract-symbols-mbcs.patch new file mode 100644 index 0000000000..69a95df072 --- /dev/null +++ b/build/build-clang/bug47258-extract-symbols-mbcs.patch @@ -0,0 +1,13 @@ +diff --git a/llvm/utils/extract_symbols.py b/llvm/utils/extract_symbols.py +index 43f603963a2..01fe10d36f0 100755 +--- a/llvm/utils/extract_symbols.py ++++ b/llvm/utils/extract_symbols.py +@@ -32,7 +32,7 @@ import argparse + def dumpbin_get_symbols(lib): + process = subprocess.Popen(['dumpbin','/symbols',lib], bufsize=1, + stdout=subprocess.PIPE, stdin=subprocess.PIPE, +- universal_newlines=True) ++ universal_newlines=True, encoding='mbcs') + process.stdin.close() + for line in process.stdout: + # Look for external symbols that are defined in some section diff --git a/build/build-clang/build-clang.py b/build/build-clang/build-clang.py new file mode 100755 index 0000000000..c935e3dfc8 --- /dev/null +++ b/build/build-clang/build-clang.py @@ -0,0 +1,1067 @@ +#!/usr/bin/python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Only necessary for flake8 to be happy... +from __future__ import print_function + +import os +import os.path +import shutil +import subprocess +import platform +import json +import argparse +import fnmatch +import glob +import errno +import re +import sys +import tarfile +from contextlib import contextmanager +from distutils.dir_util import copy_tree + +from shutil import which + +import zstandard + + +def symlink(source, link_name): + os_symlink = getattr(os, "symlink", None) + if callable(os_symlink): + os_symlink(source, link_name) + else: + if os.path.isdir(source): + # Fall back to copying the directory :( + copy_tree(source, link_name) + + +def check_run(args): + print(" ".join(args), file=sys.stderr, flush=True) + if args[0] == "cmake": + # CMake `message(STATUS)` messages, as appearing in failed source code + # compiles, appear on stdout, so we only capture that. + p = subprocess.Popen(args, stdout=subprocess.PIPE) + lines = [] + for line in p.stdout: + lines.append(line) + sys.stdout.write(line.decode()) + sys.stdout.flush() + r = p.wait() + if r != 0: + cmake_output_re = re.compile(b'See also "(.*/CMakeOutput.log)"') + cmake_error_re = re.compile(b'See also "(.*/CMakeError.log)"') + + def find_first_match(re): + for l in lines: + match = re.search(l) + if match: + return match + + output_match = find_first_match(cmake_output_re) + error_match = find_first_match(cmake_error_re) + + def dump_file(log): + with open(log, "rb") as f: + print("\nContents of", log, "follow\n", file=sys.stderr) + print(f.read(), file=sys.stderr) + + if output_match: + dump_file(output_match.group(1)) + if error_match: + dump_file(error_match.group(1)) + else: + r = subprocess.call(args) + assert r == 0 + + +def run_in(path, args): + with chdir(path): + check_run(args) + + +@contextmanager +def chdir(path): + d = os.getcwd() + print('cd "%s"' % path, file=sys.stderr) + os.chdir(path) + try: + yield + finally: + print('cd "%s"' % d, file=sys.stderr) + os.chdir(d) + + +def patch(patch, srcdir): + patch = os.path.realpath(patch) + check_run(["patch", "-d", srcdir, "-p1", "-i", patch, "--fuzz=0", "-s"]) + + +def import_clang_tidy(source_dir, build_clang_tidy_alpha, build_clang_tidy_external): + clang_plugin_path = os.path.join(os.path.dirname(sys.argv[0]), "..", "clang-plugin") + clang_tidy_path = os.path.join(source_dir, "clang-tools-extra/clang-tidy") + sys.path.append(clang_plugin_path) + from import_mozilla_checks import do_import + + import_options = { + "alpha": build_clang_tidy_alpha, + "external": build_clang_tidy_external, + } + do_import(clang_plugin_path, clang_tidy_path, import_options) + + +def build_package(package_build_dir, cmake_args): + if not os.path.exists(package_build_dir): + os.mkdir(package_build_dir) + # If CMake has already been run, it may have been run with different + # arguments, so we need to re-run it. Make sure the cached copy of the + # previous CMake run is cleared before running it again. + if os.path.exists(package_build_dir + "/CMakeCache.txt"): + os.remove(package_build_dir + "/CMakeCache.txt") + if os.path.exists(package_build_dir + "/CMakeFiles"): + shutil.rmtree(package_build_dir + "/CMakeFiles") + + run_in(package_build_dir, ["cmake"] + cmake_args) + run_in(package_build_dir, ["ninja", "install", "-v"]) + + +@contextmanager +def updated_env(env): + old_env = os.environ.copy() + os.environ.update(env) + yield + os.environ.clear() + os.environ.update(old_env) + + +def build_tar_package(name, base, directory): + name = os.path.realpath(name) + print("tarring {} from {}/{}".format(name, base, directory), file=sys.stderr) + assert name.endswith(".tar.zst") + + cctx = zstandard.ZstdCompressor() + with open(name, "wb") as f, cctx.stream_writer(f) as z: + with tarfile.open(mode="w|", fileobj=z) as tf: + with chdir(base): + tf.add(directory) + + +def mkdir_p(path): + try: + os.makedirs(path) + except OSError as e: + if e.errno != errno.EEXIST or not os.path.isdir(path): + raise + + +def delete(path): + if os.path.isdir(path): + shutil.rmtree(path) + else: + try: + os.unlink(path) + except Exception: + pass + + +def install_libgcc(gcc_dir, clang_dir, is_final_stage): + gcc_bin_dir = os.path.join(gcc_dir, "bin") + + # Copy over gcc toolchain bits that clang looks for, to ensure that + # clang is using a consistent version of ld, since the system ld may + # be incompatible with the output clang produces. But copy it to a + # target-specific directory so a cross-compiler to Mac doesn't pick + # up the (Linux-specific) ld with disastrous results. + # + # Only install this for the bootstrap process; we expect any consumers of + # the newly-built toolchain to provide an appropriate ld themselves. + if not is_final_stage: + x64_bin_dir = os.path.join(clang_dir, "x86_64-unknown-linux-gnu", "bin") + mkdir_p(x64_bin_dir) + shutil.copy2(os.path.join(gcc_bin_dir, "ld"), x64_bin_dir) + + out = subprocess.check_output( + [os.path.join(gcc_bin_dir, "gcc"), "-print-libgcc-file-name"] + ) + + libgcc_dir = os.path.dirname(out.decode().rstrip()) + clang_lib_dir = os.path.join( + clang_dir, + "lib", + "gcc", + "x86_64-unknown-linux-gnu", + os.path.basename(libgcc_dir), + ) + mkdir_p(clang_lib_dir) + copy_tree(libgcc_dir, clang_lib_dir, preserve_symlinks=True) + libgcc_dir = os.path.join(gcc_dir, "lib64") + clang_lib_dir = os.path.join(clang_dir, "lib") + copy_tree(libgcc_dir, clang_lib_dir, preserve_symlinks=True) + libgcc_dir = os.path.join(gcc_dir, "lib32") + clang_lib_dir = os.path.join(clang_dir, "lib32") + copy_tree(libgcc_dir, clang_lib_dir, preserve_symlinks=True) + include_dir = os.path.join(gcc_dir, "include") + clang_include_dir = os.path.join(clang_dir, "include") + copy_tree(include_dir, clang_include_dir, preserve_symlinks=True) + + +def install_import_library(build_dir, clang_dir): + shutil.copy2( + os.path.join(build_dir, "lib", "clang.lib"), os.path.join(clang_dir, "lib") + ) + + +def install_asan_symbols(build_dir, clang_dir): + lib_path_pattern = os.path.join("lib", "clang", "*.*.*", "lib", "windows") + src_path = glob.glob( + os.path.join(build_dir, lib_path_pattern, "clang_rt.asan_dynamic-*.pdb") + ) + dst_path = glob.glob(os.path.join(clang_dir, lib_path_pattern)) + + if len(src_path) != 1: + raise Exception("Source path pattern did not resolve uniquely") + + if len(src_path) != 1: + raise Exception("Destination path pattern did not resolve uniquely") + + shutil.copy2(src_path[0], dst_path[0]) + + +def is_darwin(): + return platform.system() == "Darwin" + + +def is_linux(): + return platform.system() == "Linux" + + +def is_windows(): + return platform.system() == "Windows" + + +def build_one_stage( + cc, + cxx, + asm, + ld, + ar, + ranlib, + libtool, + src_dir, + stage_dir, + package_name, + build_libcxx, + osx_cross_compile, + build_type, + assertions, + python_path, + gcc_dir, + libcxx_include_dir, + build_wasm, + compiler_rt_source_dir=None, + runtimes_source_link=None, + compiler_rt_source_link=None, + is_final_stage=False, + android_targets=None, + extra_targets=None, + pgo_phase=None, +): + if is_final_stage and (android_targets or extra_targets): + # Linking compiler-rt under "runtimes" activates LLVM_RUNTIME_TARGETS + # and related arguments. + symlink(compiler_rt_source_dir, runtimes_source_link) + try: + os.unlink(compiler_rt_source_link) + except Exception: + pass + + if not os.path.exists(stage_dir): + os.mkdir(stage_dir) + + build_dir = stage_dir + "/build" + inst_dir = stage_dir + "/" + package_name + + # cmake doesn't deal well with backslashes in paths. + def slashify_path(path): + return path.replace("\\", "/") + + def cmake_base_args(cc, cxx, asm, ld, ar, ranlib, libtool, inst_dir): + machine_targets = "X86;ARM;AArch64" if is_final_stage else "X86" + cmake_args = [ + "-GNinja", + "-DCMAKE_C_COMPILER=%s" % slashify_path(cc[0]), + "-DCMAKE_CXX_COMPILER=%s" % slashify_path(cxx[0]), + "-DCMAKE_ASM_COMPILER=%s" % slashify_path(asm[0]), + "-DCMAKE_LINKER=%s" % slashify_path(ld[0]), + "-DCMAKE_AR=%s" % slashify_path(ar), + "-DCMAKE_C_FLAGS=%s" % " ".join(cc[1:]), + "-DCMAKE_CXX_FLAGS=%s" % " ".join(cxx[1:]), + "-DCMAKE_ASM_FLAGS=%s" % " ".join(asm[1:]), + "-DCMAKE_EXE_LINKER_FLAGS=%s" % " ".join(ld[1:]), + "-DCMAKE_SHARED_LINKER_FLAGS=%s" % " ".join(ld[1:]), + "-DCMAKE_BUILD_TYPE=%s" % build_type, + "-DCMAKE_INSTALL_PREFIX=%s" % inst_dir, + "-DLLVM_TARGETS_TO_BUILD=%s" % machine_targets, + "-DLLVM_ENABLE_ASSERTIONS=%s" % ("ON" if assertions else "OFF"), + "-DPYTHON_EXECUTABLE=%s" % slashify_path(python_path), + "-DLLVM_TOOL_LIBCXX_BUILD=%s" % ("ON" if build_libcxx else "OFF"), + "-DLLVM_ENABLE_BINDINGS=OFF", + ] + if "TASK_ID" in os.environ: + cmake_args += [ + "-DCLANG_REPOSITORY_STRING=taskcluster-%s" % os.environ["TASK_ID"], + ] + if not is_final_stage: + cmake_args += ["-DLLVM_ENABLE_PROJECTS=clang;compiler-rt"] + if build_wasm: + cmake_args += ["-DLLVM_EXPERIMENTAL_TARGETS_TO_BUILD=WebAssembly"] + if is_linux(): + cmake_args += ["-DLLVM_BINUTILS_INCDIR=%s/include" % gcc_dir] + cmake_args += ["-DLLVM_ENABLE_LIBXML2=FORCE_ON"] + if is_windows(): + cmake_args.insert(-1, "-DLLVM_EXPORT_SYMBOLS_FOR_PLUGINS=ON") + cmake_args.insert(-1, "-DLLVM_USE_CRT_RELEASE=MT") + else: + # libllvm as a shared library is not supported on Windows + cmake_args += ["-DLLVM_LINK_LLVM_DYLIB=ON"] + if ranlib is not None: + cmake_args += ["-DCMAKE_RANLIB=%s" % slashify_path(ranlib)] + if libtool is not None: + cmake_args += ["-DCMAKE_LIBTOOL=%s" % slashify_path(libtool)] + if osx_cross_compile: + cmake_args += [ + "-DCMAKE_SYSTEM_NAME=Darwin", + "-DCMAKE_SYSTEM_VERSION=10.10", + # Xray requires a OSX 10.12 SDK (https://bugs.llvm.org/show_bug.cgi?id=38959) + "-DCOMPILER_RT_BUILD_XRAY=OFF", + "-DLIBCXXABI_LIBCXX_INCLUDES=%s" % libcxx_include_dir, + "-DCMAKE_OSX_SYSROOT=%s" % slashify_path(os.getenv("CROSS_SYSROOT")), + "-DCMAKE_FIND_ROOT_PATH=%s" % slashify_path(os.getenv("CROSS_SYSROOT")), + "-DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=NEVER", + "-DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY", + "-DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY", + "-DCMAKE_MACOSX_RPATH=ON", + "-DCMAKE_OSX_ARCHITECTURES=x86_64", + "-DDARWIN_osx_ARCHS=x86_64", + "-DDARWIN_osx_SYSROOT=%s" % slashify_path(os.getenv("CROSS_SYSROOT")), + "-DLLVM_DEFAULT_TARGET_TRIPLE=x86_64-apple-darwin", + ] + # Starting in LLVM 11 (which requires SDK 10.12) the build tries to + # detect the SDK version by calling xcrun. Cross-compiles don't have + # an xcrun, so we have to set the version explicitly. + if "MacOSX10.12.sdk" in os.getenv("CROSS_SYSROOT"): + cmake_args += [ + "-DDARWIN_macosx_OVERRIDE_SDK_VERSION=10.12", + ] + if pgo_phase == "gen": + # Per https://releases.llvm.org/10.0.0/docs/HowToBuildWithPGO.html + cmake_args += [ + "-DLLVM_BUILD_INSTRUMENTED=IR", + "-DLLVM_BUILD_RUNTIME=No", + ] + if pgo_phase == "use": + cmake_args += [ + "-DLLVM_PROFDATA_FILE=%s/merged.profdata" % stage_dir, + ] + return cmake_args + + cmake_args = [] + + runtime_targets = [] + if is_final_stage: + if android_targets: + runtime_targets = list(sorted(android_targets.keys())) + if extra_targets: + runtime_targets.extend(sorted(extra_targets)) + + if runtime_targets: + cmake_args += [ + "-DLLVM_BUILTIN_TARGETS=%s" % ";".join(runtime_targets), + "-DLLVM_RUNTIME_TARGETS=%s" % ";".join(runtime_targets), + ] + + for target in runtime_targets: + cmake_args += [ + "-DRUNTIMES_%s_COMPILER_RT_BUILD_PROFILE=ON" % target, + "-DRUNTIMES_%s_COMPILER_RT_BUILD_SANITIZERS=ON" % target, + "-DRUNTIMES_%s_COMPILER_RT_BUILD_XRAY=OFF" % target, + "-DRUNTIMES_%s_SANITIZER_ALLOW_CXXABI=OFF" % target, + "-DRUNTIMES_%s_COMPILER_RT_BUILD_LIBFUZZER=OFF" % target, + "-DRUNTIMES_%s_COMPILER_RT_INCLUDE_TESTS=OFF" % target, + "-DRUNTIMES_%s_LLVM_ENABLE_PER_TARGET_RUNTIME_DIR=OFF" % target, + "-DRUNTIMES_%s_LLVM_INCLUDE_TESTS=OFF" % target, + ] + + # The above code flipped switches to build various runtime libraries on + # Android; we now have to provide all the necessary compiler switches to + # make that work. + if is_final_stage and android_targets: + cmake_args += [ + "-DLLVM_LIBDIR_SUFFIX=64", + ] + + android_link_flags = "-fuse-ld=lld" + + for target, cfg in android_targets.items(): + sysroot_dir = cfg["ndk_sysroot"].format(**os.environ) + android_gcc_dir = cfg["ndk_toolchain"].format(**os.environ) + android_include_dirs = cfg["ndk_includes"] + api_level = cfg["api_level"] + + android_flags = [ + "-isystem %s" % d.format(**os.environ) for d in android_include_dirs + ] + android_flags += ["--gcc-toolchain=%s" % android_gcc_dir] + android_flags += ["-D__ANDROID_API__=%s" % api_level] + + # Our flags go last to override any --gcc-toolchain that may have + # been set earlier. + rt_c_flags = " ".join(cc[1:] + android_flags) + rt_cxx_flags = " ".join(cxx[1:] + android_flags) + rt_asm_flags = " ".join(asm[1:] + android_flags) + + for kind in ("BUILTINS", "RUNTIMES"): + for var, arg in ( + ("ANDROID", "1"), + ("CMAKE_ASM_FLAGS", rt_asm_flags), + ("CMAKE_CXX_FLAGS", rt_cxx_flags), + ("CMAKE_C_FLAGS", rt_c_flags), + ("CMAKE_EXE_LINKER_FLAGS", android_link_flags), + ("CMAKE_SHARED_LINKER_FLAGS", android_link_flags), + ("CMAKE_SYSROOT", sysroot_dir), + ("ANDROID_NATIVE_API_LEVEL", api_level), + ): + cmake_args += ["-D%s_%s_%s=%s" % (kind, target, var, arg)] + + cmake_args += cmake_base_args(cc, cxx, asm, ld, ar, ranlib, libtool, inst_dir) + cmake_args += [src_dir] + build_package(build_dir, cmake_args) + + if is_linux(): + install_libgcc(gcc_dir, inst_dir, is_final_stage) + # For some reasons the import library clang.lib of clang.exe is not + # installed, so we copy it by ourselves. + if is_windows(): + # The compiler-rt cmake scripts don't allow to build it for multiple + # targets at once on Windows, so manually build the 32-bits compiler-rt + # during the final stage. + build_32_bit = False + if is_final_stage: + # Only build the 32-bits compiler-rt when we originally built for + # 64-bits, which we detect through the contents of the LIB + # environment variable, which we also adjust for a 32-bits build + # at the same time. + old_lib = os.environ["LIB"] + new_lib = [] + for l in old_lib.split(os.pathsep): + if l.endswith("x64"): + l = l[:-3] + "x86" + build_32_bit = True + elif l.endswith("amd64"): + l = l[:-5] + build_32_bit = True + new_lib.append(l) + if build_32_bit: + os.environ["LIB"] = os.pathsep.join(new_lib) + compiler_rt_build_dir = stage_dir + "/compiler-rt" + compiler_rt_inst_dir = inst_dir + "/lib/clang/" + subdirs = os.listdir(compiler_rt_inst_dir) + assert len(subdirs) == 1 + compiler_rt_inst_dir += subdirs[0] + cmake_args = cmake_base_args( + [os.path.join(inst_dir, "bin", "clang-cl.exe"), "-m32"] + cc[1:], + [os.path.join(inst_dir, "bin", "clang-cl.exe"), "-m32"] + cxx[1:], + [os.path.join(inst_dir, "bin", "clang-cl.exe"), "-m32"] + asm[1:], + ld, + ar, + ranlib, + libtool, + compiler_rt_inst_dir, + ) + cmake_args += [ + "-DLLVM_CONFIG_PATH=%s" + % slashify_path(os.path.join(inst_dir, "bin", "llvm-config")), + os.path.join(src_dir, "projects", "compiler-rt"), + ] + build_package(compiler_rt_build_dir, cmake_args) + os.environ["LIB"] = old_lib + if is_final_stage: + install_import_library(build_dir, inst_dir) + install_asan_symbols(build_dir, inst_dir) + + +# Return the absolute path of a build tool. We first look to see if the +# variable is defined in the config file, and if so we make sure it's an +# absolute path to an existing tool, otherwise we look for a program in +# $PATH named "key". +# +# This expects the name of the key in the config file to match the name of +# the tool in the default toolchain on the system (for example, "ld" on Unix +# and "link" on Windows). +def get_tool(config, key): + f = None + if key in config: + f = config[key].format(**os.environ) + if os.path.isabs(f): + if not os.path.exists(f): + raise ValueError("%s must point to an existing path" % key) + return f + + # Assume that we have the name of some program that should be on PATH. + tool = which(f) if f else which(key) + if not tool: + raise ValueError("%s not found on PATH" % (f or key)) + return tool + + +# This function is intended to be called on the final build directory when +# building clang-tidy. Also clang-format binaries are included that can be used +# in conjunction with clang-tidy. +# As a separate binary we also ship clangd for the language server protocol that +# can be used as a plugin in `vscode`. +# Its job is to remove all of the files which won't be used for clang-tidy or +# clang-format to reduce the download size. Currently when this function +# finishes its job, it will leave final_dir with a layout like this: +# +# clang/ +# bin/ +# clang-apply-replacements +# clang-format +# clang-tidy +# clangd +# include/ +# * (nothing will be deleted here) +# lib/ +# clang/ +# 4.0.0/ +# include/ +# * (nothing will be deleted here) +# share/ +# clang/ +# clang-format-diff.py +# clang-tidy-diff.py +# run-clang-tidy.py +def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): + # Make sure we only have what we expect. + dirs = [ + "bin", + "include", + "lib", + "lib32", + "libexec", + "msbuild-bin", + "share", + "tools", + ] + if is_linux(): + dirs.append("x86_64-unknown-linux-gnu") + for f in glob.glob("%s/*" % final_dir): + if os.path.basename(f) not in dirs: + raise Exception("Found unknown file %s in the final directory" % f) + if not os.path.isdir(f): + raise Exception("Expected %s to be a directory" % f) + + kept_binaries = ["clang-apply-replacements", "clang-format", "clang-tidy", "clangd"] + re_clang_tidy = re.compile(r"^(" + "|".join(kept_binaries) + r")(\.exe)?$", re.I) + for f in glob.glob("%s/bin/*" % final_dir): + if re_clang_tidy.search(os.path.basename(f)) is None: + delete(f) + + # Keep include/ intact. + + # Remove the target-specific files. + if is_linux(): + if os.path.exists(os.path.join(final_dir, "x86_64-unknown-linux-gnu")): + shutil.rmtree(os.path.join(final_dir, "x86_64-unknown-linux-gnu")) + + # In lib/, only keep lib/clang/N.M.O/include and the LLVM shared library. + re_ver_num = re.compile(r"^\d+\.\d+\.\d+$", re.I) + for f in glob.glob("%s/lib/*" % final_dir): + name = os.path.basename(f) + if name == "clang": + continue + if osx_cross_compile and name in ["libLLVM.dylib", "libclang-cpp.dylib"]: + continue + if is_linux() and ( + fnmatch.fnmatch(name, "libLLVM*.so") + or fnmatch.fnmatch(name, "libclang-cpp.so*") + ): + continue + delete(f) + for f in glob.glob("%s/lib/clang/*" % final_dir): + if re_ver_num.search(os.path.basename(f)) is None: + delete(f) + for f in glob.glob("%s/lib/clang/*/*" % final_dir): + if os.path.basename(f) != "include": + delete(f) + + # Completely remove libexec/, msbuild-bin and tools, if it exists. + shutil.rmtree(os.path.join(final_dir, "libexec")) + for d in ("msbuild-bin", "tools"): + d = os.path.join(final_dir, d) + if os.path.exists(d): + shutil.rmtree(d) + + # In share/, only keep share/clang/*tidy* + re_clang_tidy = re.compile(r"format|tidy", re.I) + for f in glob.glob("%s/share/*" % final_dir): + if os.path.basename(f) != "clang": + delete(f) + for f in glob.glob("%s/share/clang/*" % final_dir): + if re_clang_tidy.search(os.path.basename(f)) is None: + delete(f) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "-c", + "--config", + required=True, + type=argparse.FileType("r"), + help="Clang configuration file", + ) + parser.add_argument( + "--clean", required=False, action="store_true", help="Clean the build directory" + ) + parser.add_argument( + "--skip-tar", + required=False, + action="store_true", + help="Skip tar packaging stage", + ) + parser.add_argument( + "--skip-checkout", + required=False, + action="store_true", + help="Do not checkout/revert source", + ) + + args = parser.parse_args() + + if not os.path.exists("llvm/README.txt"): + raise Exception( + "The script must be run from the root directory of the llvm-project tree" + ) + source_dir = os.getcwd() + build_dir = source_dir + "/build" + + if args.clean: + shutil.rmtree(build_dir) + os.sys.exit(0) + + llvm_source_dir = source_dir + "/llvm" + extra_source_dir = source_dir + "/clang-tools-extra" + clang_source_dir = source_dir + "/clang" + lld_source_dir = source_dir + "/lld" + compiler_rt_source_dir = source_dir + "/compiler-rt" + libcxx_source_dir = source_dir + "/libcxx" + libcxxabi_source_dir = source_dir + "/libcxxabi" + + exe_ext = "" + if is_windows(): + exe_ext = ".exe" + + cc_name = "clang" + cxx_name = "clang++" + if is_windows(): + cc_name = "clang-cl" + cxx_name = "clang-cl" + + config_dir = os.path.dirname(args.config.name) + config = json.load(args.config) + + stages = 3 + if "stages" in config: + stages = int(config["stages"]) + if stages not in (1, 2, 3, 4): + raise ValueError("We only know how to build 1, 2, 3, or 4 stages.") + pgo = False + if "pgo" in config: + pgo = config["pgo"] + if pgo not in (True, False): + raise ValueError("Only boolean values are accepted for pgo.") + if pgo and stages != 4: + raise ValueError("PGO is only supported in 4-stage builds.") + build_type = "Release" + if "build_type" in config: + build_type = config["build_type"] + if build_type not in ("Release", "Debug", "RelWithDebInfo", "MinSizeRel"): + raise ValueError( + "We only know how to do Release, Debug, RelWithDebInfo or " + "MinSizeRel builds" + ) + build_libcxx = False + if "build_libcxx" in config: + build_libcxx = config["build_libcxx"] + if build_libcxx not in (True, False): + raise ValueError("Only boolean values are accepted for build_libcxx.") + build_wasm = False + if "build_wasm" in config: + build_wasm = config["build_wasm"] + if build_wasm not in (True, False): + raise ValueError("Only boolean values are accepted for build_wasm.") + build_clang_tidy = False + if "build_clang_tidy" in config: + build_clang_tidy = config["build_clang_tidy"] + if build_clang_tidy not in (True, False): + raise ValueError("Only boolean values are accepted for build_clang_tidy.") + build_clang_tidy_alpha = False + # check for build_clang_tidy_alpha only if build_clang_tidy is true + if build_clang_tidy and "build_clang_tidy_alpha" in config: + build_clang_tidy_alpha = config["build_clang_tidy_alpha"] + if build_clang_tidy_alpha not in (True, False): + raise ValueError( + "Only boolean values are accepted for build_clang_tidy_alpha." + ) + build_clang_tidy_external = False + # check for build_clang_tidy_external only if build_clang_tidy is true + if build_clang_tidy and "build_clang_tidy_external" in config: + build_clang_tidy_external = config["build_clang_tidy_external"] + if build_clang_tidy_external not in (True, False): + raise ValueError( + "Only boolean values are accepted for build_clang_tidy_external." + ) + osx_cross_compile = False + if "osx_cross_compile" in config: + osx_cross_compile = config["osx_cross_compile"] + if osx_cross_compile not in (True, False): + raise ValueError("Only boolean values are accepted for osx_cross_compile.") + if osx_cross_compile and not is_linux(): + raise ValueError("osx_cross_compile can only be used on Linux.") + assertions = False + if "assertions" in config: + assertions = config["assertions"] + if assertions not in (True, False): + raise ValueError("Only boolean values are accepted for assertions.") + python_path = None + if "python_path" not in config: + raise ValueError("Config file needs to set python_path") + python_path = config["python_path"] + gcc_dir = None + if "gcc_dir" in config: + gcc_dir = config["gcc_dir"].format(**os.environ) + if not os.path.exists(gcc_dir): + raise ValueError("gcc_dir must point to an existing path") + ndk_dir = None + android_targets = None + if "android_targets" in config: + android_targets = config["android_targets"] + for attr in ("ndk_toolchain", "ndk_sysroot", "ndk_includes", "api_level"): + for target, cfg in android_targets.items(): + if attr not in cfg: + raise ValueError( + "must specify '%s' as a key for android target: %s" + % (attr, target) + ) + extra_targets = None + if "extra_targets" in config: + extra_targets = config["extra_targets"] + if not isinstance(extra_targets, list): + raise ValueError("extra_targets must be a list") + if not all(isinstance(t, str) for t in extra_targets): + raise ValueError("members of extra_targets should be strings") + + if is_linux() and gcc_dir is None: + raise ValueError("Config file needs to set gcc_dir") + + if is_darwin() or osx_cross_compile: + os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.12" + + cc = get_tool(config, "cc") + cxx = get_tool(config, "cxx") + asm = get_tool(config, "ml" if is_windows() else "as") + ld = get_tool(config, "link" if is_windows() else "ld") + ar = get_tool(config, "lib" if is_windows() else "ar") + ranlib = None if is_windows() else get_tool(config, "ranlib") + libtool = None + if "libtool" in config: + libtool = get_tool(config, "libtool") + + if not os.path.exists(source_dir): + os.makedirs(source_dir) + + for p in config.get("patches", []): + patch(os.path.join(config_dir, p), source_dir) + + compiler_rt_source_link = llvm_source_dir + "/projects/compiler-rt" + + symlinks = [ + (clang_source_dir, llvm_source_dir + "/tools/clang"), + (extra_source_dir, llvm_source_dir + "/tools/clang/tools/extra"), + (lld_source_dir, llvm_source_dir + "/tools/lld"), + (compiler_rt_source_dir, compiler_rt_source_link), + (libcxx_source_dir, llvm_source_dir + "/projects/libcxx"), + (libcxxabi_source_dir, llvm_source_dir + "/projects/libcxxabi"), + ] + for l in symlinks: + # On Windows, we have to re-copy the whole directory every time. + if not is_windows() and os.path.islink(l[1]): + continue + delete(l[1]) + if os.path.exists(l[0]): + symlink(l[0], l[1]) + + package_name = "clang" + if build_clang_tidy: + package_name = "clang-tidy" + import_clang_tidy(source_dir, build_clang_tidy_alpha, build_clang_tidy_external) + + if not os.path.exists(build_dir): + os.makedirs(build_dir) + + libcxx_include_dir = os.path.join(llvm_source_dir, "projects", "libcxx", "include") + + stage1_dir = build_dir + "/stage1" + stage1_inst_dir = stage1_dir + "/" + package_name + + final_stage_dir = stage1_dir + final_inst_dir = stage1_inst_dir + + if is_darwin(): + extra_cflags = [] + extra_cxxflags = ["-stdlib=libc++"] + extra_cflags2 = [] + extra_cxxflags2 = ["-stdlib=libc++"] + extra_asmflags = [] + extra_ldflags = [] + elif is_linux(): + extra_cflags = [] + extra_cxxflags = [] + # When building stage2 and stage3, we want the newly-built clang to pick + # up whatever headers were installed from the gcc we used to build stage1, + # always, rather than the system headers. Providing -gcc-toolchain + # encourages clang to do that. + extra_cflags2 = ["-fPIC", "-gcc-toolchain", stage1_inst_dir] + # Silence clang's warnings about arguments not being used in compilation. + extra_cxxflags2 = [ + "-fPIC", + "-Qunused-arguments", + "-gcc-toolchain", + stage1_inst_dir, + ] + extra_asmflags = [] + # Avoid libLLVM internal function calls going through the PLT. + extra_ldflags = ["-Wl,-Bsymbolic-functions"] + # For whatever reason, LLVM's build system will set things up to turn + # on -ffunction-sections and -fdata-sections, but won't turn on the + # corresponding option to strip unused sections. We do it explicitly + # here. LLVM's build system is also picky about turning on ICF, so + # we do that explicitly here, too. + extra_ldflags += ["-fuse-ld=gold", "-Wl,--gc-sections", "-Wl,--icf=safe"] + + if "LD_LIBRARY_PATH" in os.environ: + os.environ["LD_LIBRARY_PATH"] = "%s/lib64/:%s" % ( + gcc_dir, + os.environ["LD_LIBRARY_PATH"], + ) + else: + os.environ["LD_LIBRARY_PATH"] = "%s/lib64/" % gcc_dir + elif is_windows(): + extra_cflags = [] + extra_cxxflags = [] + # clang-cl would like to figure out what it's supposed to be emulating + # by looking at an MSVC install, but we don't really have that here. + # Force things on. + extra_cflags2 = [] + extra_cxxflags2 = [ + "-fms-compatibility-version=19.15.26726", + "-Xclang", + "-std=c++14", + ] + extra_asmflags = [] + extra_ldflags = [] + + if osx_cross_compile: + # undo the damage done in the is_linux() block above, and also simulate + # the is_darwin() block above. + extra_cflags = [] + extra_cxxflags = ["-stdlib=libc++"] + extra_cxxflags2 = ["-stdlib=libc++"] + + extra_flags = [ + "-target", + "x86_64-apple-darwin", + "-mlinker-version=137", + "-B", + "%s/bin" % os.getenv("CROSS_CCTOOLS_PATH"), + "-isysroot", + os.getenv("CROSS_SYSROOT"), + # technically the sysroot flag there should be enough to deduce this, + # but clang needs some help to figure this out. + "-I%s/usr/include" % os.getenv("CROSS_SYSROOT"), + "-iframework", + "%s/System/Library/Frameworks" % os.getenv("CROSS_SYSROOT"), + ] + extra_cflags += extra_flags + extra_cxxflags += extra_flags + extra_cflags2 += extra_flags + extra_cxxflags2 += extra_flags + extra_asmflags += extra_flags + extra_ldflags = [ + "-Wl,-syslibroot,%s" % os.getenv("CROSS_SYSROOT"), + "-Wl,-dead_strip", + ] + + upload_dir = os.getenv("UPLOAD_DIR") + if assertions and upload_dir: + extra_cflags2 += ["-fcrash-diagnostics-dir=%s" % upload_dir] + extra_cxxflags2 += ["-fcrash-diagnostics-dir=%s" % upload_dir] + + build_one_stage( + [cc] + extra_cflags, + [cxx] + extra_cxxflags, + [asm] + extra_asmflags, + [ld] + extra_ldflags, + ar, + ranlib, + libtool, + llvm_source_dir, + stage1_dir, + package_name, + build_libcxx, + osx_cross_compile, + build_type, + assertions, + python_path, + gcc_dir, + libcxx_include_dir, + build_wasm, + is_final_stage=(stages == 1), + ) + + runtimes_source_link = llvm_source_dir + "/runtimes/compiler-rt" + + if stages >= 2: + stage2_dir = build_dir + "/stage2" + stage2_inst_dir = stage2_dir + "/" + package_name + final_stage_dir = stage2_dir + final_inst_dir = stage2_inst_dir + pgo_phase = "gen" if pgo else None + build_one_stage( + [stage1_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_cflags2, + [stage1_inst_dir + "/bin/%s%s" % (cxx_name, exe_ext)] + extra_cxxflags2, + [stage1_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_asmflags, + [ld] + extra_ldflags, + ar, + ranlib, + libtool, + llvm_source_dir, + stage2_dir, + package_name, + build_libcxx, + osx_cross_compile, + build_type, + assertions, + python_path, + gcc_dir, + libcxx_include_dir, + build_wasm, + compiler_rt_source_dir, + runtimes_source_link, + compiler_rt_source_link, + is_final_stage=(stages == 2), + android_targets=android_targets, + extra_targets=extra_targets, + pgo_phase=pgo_phase, + ) + + if stages >= 3: + stage3_dir = build_dir + "/stage3" + stage3_inst_dir = stage3_dir + "/" + package_name + final_stage_dir = stage3_dir + final_inst_dir = stage3_inst_dir + build_one_stage( + [stage2_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_cflags2, + [stage2_inst_dir + "/bin/%s%s" % (cxx_name, exe_ext)] + extra_cxxflags2, + [stage2_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_asmflags, + [ld] + extra_ldflags, + ar, + ranlib, + libtool, + llvm_source_dir, + stage3_dir, + package_name, + build_libcxx, + osx_cross_compile, + build_type, + assertions, + python_path, + gcc_dir, + libcxx_include_dir, + build_wasm, + compiler_rt_source_dir, + runtimes_source_link, + compiler_rt_source_link, + (stages == 3), + extra_targets=extra_targets, + ) + + if stages >= 4: + stage4_dir = build_dir + "/stage4" + stage4_inst_dir = stage4_dir + "/" + package_name + final_stage_dir = stage4_dir + final_inst_dir = stage4_inst_dir + pgo_phase = None + if pgo: + pgo_phase = "use" + llvm_profdata = stage3_inst_dir + "/bin/llvm-profdata%s" % exe_ext + merge_cmd = [llvm_profdata, "merge", "-o", "merged.profdata"] + profraw_files = glob.glob( + os.path.join(stage2_dir, "build", "profiles", "*.profraw") + ) + if not os.path.exists(stage4_dir): + os.mkdir(stage4_dir) + run_in(stage4_dir, merge_cmd + profraw_files) + build_one_stage( + [stage3_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_cflags2, + [stage3_inst_dir + "/bin/%s%s" % (cxx_name, exe_ext)] + extra_cxxflags2, + [stage3_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_asmflags, + [ld] + extra_ldflags, + ar, + ranlib, + libtool, + llvm_source_dir, + stage4_dir, + package_name, + build_libcxx, + osx_cross_compile, + build_type, + assertions, + python_path, + gcc_dir, + libcxx_include_dir, + build_wasm, + compiler_rt_source_dir, + runtimes_source_link, + compiler_rt_source_link, + (stages == 4), + extra_targets=extra_targets, + pgo_phase=pgo_phase, + ) + + if build_clang_tidy: + prune_final_dir_for_clang_tidy( + os.path.join(final_stage_dir, package_name), osx_cross_compile + ) + + # Copy the wasm32 builtins to the final_inst_dir if the archive is present. + if "wasi-sysroot" in config: + sysroot = config["wasi-sysroot"].format(**os.environ) + if os.path.isdir(sysroot): + for srcdir in glob.glob( + os.path.join(sysroot, "lib", "clang", "*", "lib", "wasi") + ): + print("Copying from wasi-sysroot srcdir %s" % srcdir) + # Copy the contents of the "lib/wasi" subdirectory to the + # appropriate location in final_inst_dir. + version = os.path.basename(os.path.dirname(os.path.dirname(srcdir))) + destdir = os.path.join( + final_inst_dir, "lib", "clang", version, "lib", "wasi" + ) + mkdir_p(destdir) + copy_tree(srcdir, destdir) + + if not args.skip_tar: + build_tar_package("%s.tar.zst" % package_name, final_stage_dir, package_name) diff --git a/build/build-clang/clang-10-linux64.json b/build/build-clang/clang-10-linux64.json new file mode 100644 index 0000000000..b79243b2bc --- /dev/null +++ b/build/build-clang/clang-10-linux64.json @@ -0,0 +1,27 @@ +{ + "stages": "4", + "pgo" : true, + "build_libcxx": true, + "build_wasm": true, + "build_type": "Release", + "assertions": false, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "cxx": "{MOZ_FETCHES_DIR}/gcc/bin/g++", + "as": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "wasi-sysroot": "{MOZ_FETCHES_DIR}/wasi-sysroot", + "patches": [ + "static-llvm-symbolizer.patch", + "find_symbolizer_linux_clang_10.patch", + "rename_gcov_flush_clang_10.patch", + "critical_section_on_gcov_flush-rG02ce9d8ef5a8.patch", + "rG7e18aeba5062_clang_10.patch", + "llvmorg-11-init-4265-g2dcbdba8540_clang_10.patch", + "android-mangling-error.patch", + "unpoison-thread-stacks_clang_10.patch", + "downgrade-mangling-error.patch", + "tsan-hang-be41a98ac222_clang_10.patch", + "loosen-msvc-detection.patch" + ] +} diff --git a/build/build-clang/clang-11-android.json b/build/build-clang/clang-11-android.json new file mode 100644 index 0000000000..ff284f1212 --- /dev/null +++ b/build/build-clang/clang-11-android.json @@ -0,0 +1,55 @@ +{ + "stages": "2", + "build_libcxx": true, + "build_type": "Release", + "assertions": false, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "cxx": "{MOZ_FETCHES_DIR}/gcc/bin/g++", + "as": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "android_targets": { + "armv7-linux-android": { + "ndk_toolchain": "{MOZ_FETCHES_DIR}/android-ndk/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64", + "ndk_sysroot": "{MOZ_FETCHES_DIR}/android-ndk/platforms/android-16/arch-arm", + "ndk_includes": [ + "{MOZ_FETCHES_DIR}/android-ndk/sysroot/usr/include/arm-linux-androideabi", + "{MOZ_FETCHES_DIR}/android-ndk/sysroot/usr/include" + ], + "api_level": 16 + }, + "i686-linux-android": { + "ndk_toolchain": "{MOZ_FETCHES_DIR}/android-ndk/toolchains/x86-4.9/prebuilt/linux-x86_64", + "ndk_sysroot": "{MOZ_FETCHES_DIR}/android-ndk/platforms/android-16/arch-x86", + "ndk_includes": [ + "{MOZ_FETCHES_DIR}/android-ndk/sysroot/usr/include/i686-linux-android", + "{MOZ_FETCHES_DIR}/android-ndk/sysroot/usr/include" + ], + "api_level": 16 + }, + "aarch64-linux-android": { + "ndk_toolchain": "{MOZ_FETCHES_DIR}/android-ndk/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64", + "ndk_sysroot": "{MOZ_FETCHES_DIR}/android-ndk/platforms/android-21/arch-arm64", + "ndk_includes": [ + "{MOZ_FETCHES_DIR}/android-ndk/sysroot/usr/include/aarch64-linux-android", + "{MOZ_FETCHES_DIR}/android-ndk/sysroot/usr/include" + ], + "api_level": 21 + }, + "x86_64-linux-android": { + "ndk_toolchain": "{MOZ_FETCHES_DIR}/android-ndk/toolchains/x86_64-4.9/prebuilt/linux-x86_64", + "ndk_sysroot": "{MOZ_FETCHES_DIR}/android-ndk/platforms/android-21/arch-x86_64", + "ndk_includes": [ + "{MOZ_FETCHES_DIR}/android-ndk/sysroot/usr/include/x86_64-linux-android", + "{MOZ_FETCHES_DIR}/android-ndk/sysroot/usr/include" + ], + "api_level": 21 + } + }, + "patches": [ + "static-llvm-symbolizer.patch", + "find_symbolizer_linux_clang_10.patch", + "rename_gcov_flush_clang_11.patch", + "revert-r362047-and-r362065.patch" + ] +} diff --git a/build/build-clang/clang-11-linux64-aarch64-cross.json b/build/build-clang/clang-11-linux64-aarch64-cross.json new file mode 100644 index 0000000000..1a091815ed --- /dev/null +++ b/build/build-clang/clang-11-linux64-aarch64-cross.json @@ -0,0 +1,21 @@ +{ + "stages": "4", + "pgo" : true, + "build_libcxx": true, + "build_type": "Release", + "assertions": false, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "cxx": "{MOZ_FETCHES_DIR}/gcc/bin/g++", + "as": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "extra_targets": [ + "aarch64-unknown-linux-gnu" + ], + "patches": [ + "static-llvm-symbolizer.patch", + "find_symbolizer_linux_clang_10.patch", + "rename_gcov_flush_clang_11.patch", + "android-mangling-error.patch" + ] +} diff --git a/build/build-clang/clang-11-linux64.json b/build/build-clang/clang-11-linux64.json new file mode 100644 index 0000000000..4e8f1f0098 --- /dev/null +++ b/build/build-clang/clang-11-linux64.json @@ -0,0 +1,24 @@ +{ + "stages": "4", + "pgo" : true, + "build_libcxx": true, + "build_wasm": true, + "build_type": "Release", + "assertions": false, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "cxx": "{MOZ_FETCHES_DIR}/gcc/bin/g++", + "as": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "wasi-sysroot": "{MOZ_FETCHES_DIR}/wasi-sysroot", + "patches": [ + "static-llvm-symbolizer.patch", + "find_symbolizer_linux_clang_10.patch", + "rename_gcov_flush_clang_11.patch", + "android-mangling-error.patch", + "unpoison-thread-stacks_clang_10.patch", + "downgrade-mangling-error.patch", + "llvmorg-12-init-10926-gb79e990f401-LTO-new-pass-manager.patch", + "loosen-msvc-detection.patch" + ] +} diff --git a/build/build-clang/clang-11-macosx64.json b/build/build-clang/clang-11-macosx64.json new file mode 100644 index 0000000000..367c953e38 --- /dev/null +++ b/build/build-clang/clang-11-macosx64.json @@ -0,0 +1,22 @@ +{ + "stages": "1", + "build_libcxx": true, + "build_type": "Release", + "assertions": false, + "osx_cross_compile": true, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/clang/bin/clang", + "cxx": "{MOZ_FETCHES_DIR}/clang/bin/clang++", + "as": "{MOZ_FETCHES_DIR}/clang/bin/clang", + "ar": "{MOZ_FETCHES_DIR}/cctools/bin/x86_64-apple-darwin-ar", + "ranlib": "{MOZ_FETCHES_DIR}/cctools/bin/x86_64-apple-darwin-ranlib", + "libtool": "{MOZ_FETCHES_DIR}/cctools/bin/x86_64-apple-darwin-libtool", + "ld": "{MOZ_FETCHES_DIR}/clang/bin/clang", + "patches": [ + "static-llvm-symbolizer.patch", + "rename_gcov_flush_clang_11.patch", + "compiler-rt-cross-compile.patch", + "compiler-rt-no-codesign.patch" + ] +} diff --git a/build/build-clang/clang-11-mingw.json b/build/build-clang/clang-11-mingw.json new file mode 100755 index 0000000000..4dcd9b26c4 --- /dev/null +++ b/build/build-clang/clang-11-mingw.json @@ -0,0 +1,14 @@ +{ + "stages": "4", + "pgo" : true, + "build_libcxx": true, + "build_type": "Release", + "assertions": false, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "cxx": "{MOZ_FETCHES_DIR}/gcc/bin/g++", + "as": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "patches": [ + ] +} diff --git a/build/build-clang/clang-11-win64-2stage.json b/build/build-clang/clang-11-win64-2stage.json new file mode 100644 index 0000000000..10e6267dbd --- /dev/null +++ b/build/build-clang/clang-11-win64-2stage.json @@ -0,0 +1,14 @@ +{ + "stages": "2", + "build_libcxx": false, + "build_type": "Release", + "assertions": false, + "python_path": "c:/mozilla-build/python/python.exe", + "cc": "cl.exe", + "cxx": "cl.exe", + "ml": "ml64.exe", + "patches": [ + "unpoison-thread-stacks_clang_10.patch", + "bug47258-extract-symbols-mbcs.patch" + ] +} diff --git a/build/build-clang/clang-11-win64.json b/build/build-clang/clang-11-win64.json new file mode 100644 index 0000000000..ba225876a7 --- /dev/null +++ b/build/build-clang/clang-11-win64.json @@ -0,0 +1,18 @@ +{ + "stages": "4", + "pgo" : true, + "build_libcxx": false, + "build_type": "Release", + "assertions": false, + "python_path": "c:/mozilla-build/python/python.exe", + "cc": "cl.exe", + "cxx": "cl.exe", + "ml": "ml64.exe", + "patches": [ + "unpoison-thread-stacks_clang_10.patch", + "downgrade-mangling-error.patch", + "bug47258-extract-symbols-mbcs.patch", + "llvmorg-12-init-10926-gb79e990f401-LTO-new-pass-manager.patch", + "loosen-msvc-detection.patch" + ] +} diff --git a/build/build-clang/clang-5.0-linux64.json b/build/build-clang/clang-5.0-linux64.json new file mode 100644 index 0000000000..0d66e6b731 --- /dev/null +++ b/build/build-clang/clang-5.0-linux64.json @@ -0,0 +1,12 @@ +{ + "stages": "3", + "build_libcxx": true, + "build_type": "Release", + "assertions": false, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "cxx": "{MOZ_FETCHES_DIR}/gcc/bin/g++", + "as": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "patches": [] +} diff --git a/build/build-clang/clang-7-linux64.json b/build/build-clang/clang-7-linux64.json new file mode 100644 index 0000000000..adddc0eb35 --- /dev/null +++ b/build/build-clang/clang-7-linux64.json @@ -0,0 +1,19 @@ +{ + "stages": "3", + "build_libcxx": true, + "build_type": "Release", + "assertions": false, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "cxx": "{MOZ_FETCHES_DIR}/gcc/bin/g++", + "as": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "patches": [ + "static-llvm-symbolizer.patch", + "find_symbolizer_linux.patch", + "rename_gcov_flush_7.patch", + "critical_section_on_gcov_flush-rG02ce9d8ef5a8.patch", + "r350774.patch", + "android-mangling-error.patch" + ] +} diff --git a/build/build-clang/clang-linux64.json b/build/build-clang/clang-linux64.json new file mode 100644 index 0000000000..1a25656ea4 --- /dev/null +++ b/build/build-clang/clang-linux64.json @@ -0,0 +1,28 @@ +{ + "stages": "4", + "pgo" : true, + "build_libcxx": true, + "build_wasm": true, + "build_type": "Release", + "assertions": false, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "cxx": "{MOZ_FETCHES_DIR}/gcc/bin/g++", + "as": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "wasi-sysroot": "{MOZ_FETCHES_DIR}/wasi-sysroot", + "patches": [ + "static-llvm-symbolizer.patch", + "find_symbolizer_linux.patch", + "rename_gcov_flush.patch", + "critical_section_on_gcov_flush-rG02ce9d8ef5a8.patch", + "rG7e18aeba5062.patch", + "llvmorg-11-init-4265-g2dcbdba8540.patch", + "android-mangling-error.patch", + "unpoison-thread-stacks.patch", + "downgrade-mangling-error.patch", + "tsan-hang-be41a98ac222.patch", + "llvmorg-11-init-15486-gfc937806efd-dont-jump-to-landing-pads.patch", + "loosen-msvc-detection.patch" + ] +} diff --git a/build/build-clang/clang-tidy-ci.patch b/build/build-clang/clang-tidy-ci.patch new file mode 100644 index 0000000000..8d5d807ddf --- /dev/null +++ b/build/build-clang/clang-tidy-ci.patch @@ -0,0 +1,26 @@ +diff --git a/clang-tools-extra/clang-tidy/ClangTidy.cpp b/clang-tools-extra/clang-tidy/ClangTidy.cpp +index d6913dfd3c07..d031a163fdd7 100644 +--- a/clang-tools-extra/clang-tidy/ClangTidy.cpp ++++ b/clang-tools-extra/clang-tidy/ClangTidy.cpp +@@ -418,6 +418,7 @@ ClangTidyASTConsumerFactory::CreateASTConsumer( + if (!Check->isLanguageVersionSupported(Context.getLangOpts())) + continue; + Check->registerMatchers(&*Finder); ++ Check->registerPPCallbacks(Compiler); + Check->registerPPCallbacks(*SM, PP, ModuleExpanderPP); + } + +diff --git a/clang-tools-extra/clang-tidy/ClangTidyCheck.h b/clang-tools-extra/clang-tidy/ClangTidyCheck.h +index 54b725126752..200780e86804 100644 +--- a/clang-tools-extra/clang-tidy/ClangTidyCheck.h ++++ b/clang-tools-extra/clang-tidy/ClangTidyCheck.h +@@ -130,6 +130,9 @@ public: + return true; + } + ++ /// This has been deprecated in clang 9 - needed by mozilla-must-override ++ virtual void registerPPCallbacks(CompilerInstance &Compiler) {} ++ + /// Override this to register ``PPCallbacks`` in the preprocessor. + /// + /// This should be used for clang-tidy checks that analyze preprocessor- diff --git a/build/build-clang/clang-tidy-external-linux64.json b/build/build-clang/clang-tidy-external-linux64.json new file mode 100644 index 0000000000..55382875b1 --- /dev/null +++ b/build/build-clang/clang-tidy-external-linux64.json @@ -0,0 +1,17 @@ +{ + "stages": "1", + "build_libcxx": true, + "build_type": "Release", + "assertions": false, + "build_clang_tidy": true, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "cxx": "{MOZ_FETCHES_DIR}/gcc/bin/g++", + "as": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "patches": [ + "clang-tidy-ci.patch", + "clang-tidy-no-errors.patch" + ], + "build_clang_tidy_external": true +} diff --git a/build/build-clang/clang-tidy-linux64.json b/build/build-clang/clang-tidy-linux64.json new file mode 100644 index 0000000000..dd5d85db25 --- /dev/null +++ b/build/build-clang/clang-tidy-linux64.json @@ -0,0 +1,16 @@ +{ + "stages": "1", + "build_libcxx": true, + "build_type": "Release", + "assertions": false, + "build_clang_tidy": true, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "cxx": "{MOZ_FETCHES_DIR}/gcc/bin/g++", + "as": "{MOZ_FETCHES_DIR}/gcc/bin/gcc", + "patches": [ + "clang-tidy-ci.patch", + "clang-tidy-no-errors.patch" + ] +} diff --git a/build/build-clang/clang-tidy-macosx64.json b/build/build-clang/clang-tidy-macosx64.json new file mode 100644 index 0000000000..1295b9e4a9 --- /dev/null +++ b/build/build-clang/clang-tidy-macosx64.json @@ -0,0 +1,23 @@ +{ + "stages": "1", + "build_libcxx": true, + "build_type": "Release", + "assertions": false, + "build_clang_tidy": true, + "osx_cross_compile": true, + "python_path": "/usr/bin/python2.7", + "gcc_dir": "{MOZ_FETCHES_DIR}/gcc", + "cc": "{MOZ_FETCHES_DIR}/clang/bin/clang", + "cxx": "{MOZ_FETCHES_DIR}/clang/bin/clang++", + "as": "{MOZ_FETCHES_DIR}/clang/bin/clang", + "ar": "{MOZ_FETCHES_DIR}/cctools/bin/x86_64-apple-darwin-ar", + "ranlib": "{MOZ_FETCHES_DIR}/cctools/bin/x86_64-apple-darwin-ranlib", + "libtool": "{MOZ_FETCHES_DIR}/cctools/bin/x86_64-apple-darwin-libtool", + "ld": "{MOZ_FETCHES_DIR}/clang/bin/clang", + "patches": [ + "clang-tidy-ci.patch", + "clang-tidy-no-errors.patch", + "compiler-rt-cross-compile.patch", + "compiler-rt-no-codesign.patch" + ] +} diff --git a/build/build-clang/clang-tidy-no-errors.patch b/build/build-clang/clang-tidy-no-errors.patch new file mode 100644 index 0000000000..57a8167021 --- /dev/null +++ b/build/build-clang/clang-tidy-no-errors.patch @@ -0,0 +1,12 @@ +diff --git a/clang-tools-extra/clang-tidy/ClangTidyCheck.cpp b/clang-tools-extra/clang-tidy/ClangTidyCheck.cpp +index fbf117688bb..dc7235b1450 100644 +--- a/clang-tools-extra/clang-tidy/ClangTidyCheck.cpp ++++ b/clang-tools-extra/clang-tidy/ClangTidyCheck.cpp +@@ -20,6 +20,7 @@ ClangTidyCheck::ClangTidyCheck(StringRef CheckName, ClangTidyContext *Context) + + DiagnosticBuilder ClangTidyCheck::diag(SourceLocation Loc, StringRef Message, + DiagnosticIDs::Level Level) { ++ Level = Level == DiagnosticIDs::Error ? DiagnosticIDs::Warning : Level; + return Context->diag(CheckName, Loc, Message, Level); + } + diff --git a/build/build-clang/clang-tidy-win64.json b/build/build-clang/clang-tidy-win64.json new file mode 100644 index 0000000000..3cf7038e98 --- /dev/null +++ b/build/build-clang/clang-tidy-win64.json @@ -0,0 +1,15 @@ +{ + "stages": "1", + "build_libcxx": false, + "build_type": "Release", + "assertions": false, + "build_clang_tidy": true, + "python_path": "c:/mozilla-build/python/python.exe", + "cc": "cl.exe", + "cxx": "cl.exe", + "ml": "ml64.exe", + "patches": [ + "clang-tidy-ci.patch", + "clang-tidy-no-errors.patch" + ] +} diff --git a/build/build-clang/compiler-rt-cross-compile.patch b/build/build-clang/compiler-rt-cross-compile.patch new file mode 100644 index 0000000000..4ab24952ac --- /dev/null +++ b/build/build-clang/compiler-rt-cross-compile.patch @@ -0,0 +1,15 @@ +Add `-target x86_64-apple-darwin' to the compiler-rt overridden CFLAGS + +diff --git a/compiler-rt/cmake/Modules/CompilerRTDarwinUtils.cmake b/compiler-rt/cmake/Modules/CompilerRTDarwinUtils.cmake +index 28d398672..aac68bf36 100644 +--- a/compiler-rt/cmake/Modules/CompilerRTDarwinUtils.cmake ++++ b/compiler-rt/cmake/Modules/CompilerRTDarwinUtils.cmake +@@ -265,7 +265,7 @@ endfunction() + macro(darwin_add_builtin_libraries) + set(DARWIN_EXCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/Darwin-excludes) + +- set(CFLAGS "-fPIC -O3 -fvisibility=hidden -DVISIBILITY_HIDDEN -Wall -fomit-frame-pointer") ++ set(CFLAGS "-fPIC -O3 -fvisibility=hidden -DVISIBILITY_HIDDEN -Wall -fomit-frame-pointer -target x86_64-apple-darwin -isysroot ${CMAKE_OSX_SYSROOT} -I${CMAKE_OSX_SYSROOT}/usr/include") + set(CMAKE_C_FLAGS "") + set(CMAKE_CXX_FLAGS "") + set(CMAKE_ASM_FLAGS "") diff --git a/build/build-clang/compiler-rt-no-codesign.patch b/build/build-clang/compiler-rt-no-codesign.patch new file mode 100644 index 0000000000..99d3f7e992 --- /dev/null +++ b/build/build-clang/compiler-rt-no-codesign.patch @@ -0,0 +1,21 @@ +Disable codesign for macosx cross-compile toolchain. Codesign only works on OSX. + +Index: cmake/Modules/AddCompilerRT.cmake +=================================================================== +--- a/compiler-rt/cmake/Modules/AddCompilerRT.cmake ++++ b/compiler-rt/cmake/Modules/AddCompilerRT.cmake +@@ -321,14 +321,6 @@ + set_target_properties(${libname} PROPERTIES IMPORT_PREFIX "") + set_target_properties(${libname} PROPERTIES IMPORT_SUFFIX ".lib") + endif() +- if(APPLE) +- # Ad-hoc sign the dylibs +- add_custom_command(TARGET ${libname} +- POST_BUILD +- COMMAND codesign --sign - $ +- WORKING_DIRECTORY ${COMPILER_RT_LIBRARY_OUTPUT_DIR} +- ) +- endif() + endif() + + set(parent_target_arg) diff --git a/build/build-clang/critical_section_on_gcov_flush-rG02ce9d8ef5a8.patch b/build/build-clang/critical_section_on_gcov_flush-rG02ce9d8ef5a8.patch new file mode 100644 index 0000000000..c5c533a915 --- /dev/null +++ b/build/build-clang/critical_section_on_gcov_flush-rG02ce9d8ef5a8.patch @@ -0,0 +1,75 @@ +From 02ce9d8ef5a84bc884de4105eae5f8736ef67634 Mon Sep 17 00:00:00 2001 +From: Calixte Denizet +Date: Tue, 10 Dec 2019 13:22:33 +0100 +Subject: [PATCH] [compiler-rt] Add a critical section when flushing gcov + counters + +Summary: +Counters can be flushed in a multi-threaded context for example when the process is forked in different threads (https://github.com/llvm/llvm-project/blob/master/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp#L632-L663). +In order to avoid pretty bad things, a critical section is needed around the flush. +We had a lot of crashes in this code in Firefox CI when we switched to clang for linux ccov builds and those crashes disappeared with this patch. + +Reviewers: marco-c, froydnj, dmajor, davidxl, vsk + +Reviewed By: marco-c, dmajor + +Subscribers: ahatanak, froydnj, dmajor, dberris, jfb, #sanitizers, llvm-commits, sylvestre.ledru + +Tags: #sanitizers, #llvm + +Differential Revision: https://reviews.llvm.org/D70910 +--- + +diff --git a/compiler-rt/lib/profile/GCDAProfiling.c b/compiler-rt/lib/profile/GCDAProfiling.c +index b7257db10e7..d4abc4181ed 100644 +--- a/compiler-rt/lib/profile/GCDAProfiling.c ++++ b/compiler-rt/lib/profile/GCDAProfiling.c +@@ -62,8 +62,27 @@ typedef unsigned long long uint64_t; + #include "InstrProfiling.h" + #include "InstrProfilingUtil.h" + +-/* #define DEBUG_GCDAPROFILING */ ++#ifndef _WIN32 ++#include ++static pthread_mutex_t gcov_flush_mutex = PTHREAD_MUTEX_INITIALIZER; ++static __inline void gcov_flush_lock() { ++ pthread_mutex_lock(&gcov_flush_mutex); ++} ++static __inline void gcov_flush_unlock() { ++ pthread_mutex_unlock(&gcov_flush_mutex); ++} ++#else ++#include ++static SRWLOCK gcov_flush_mutex = SRWLOCK_INIT; ++static __inline void gcov_flush_lock() { ++ AcquireSRWLockExclusive(&gcov_flush_mutex); ++} ++static __inline void gcov_flush_unlock() { ++ ReleaseSRWLockExclusive(&gcov_flush_mutex); ++} ++#endif + ++/* #define DEBUG_GCDAPROFILING */ + /* + * --- GCOV file format I/O primitives --- + */ +@@ -620,12 +639,16 @@ void llvm_register_flush_function(fn_ptr fn) { + } + + void __custom_llvm_gcov_flush() { ++ gcov_flush_lock(); ++ + struct fn_node* curr = flush_fn_list.head; + + while (curr) { + curr->fn(); + curr = curr->next; + } ++ ++ gcov_flush_unlock(); + } + + COMPILER_RT_VISIBILITY +-- +2.24.0 + diff --git a/build/build-clang/downgrade-mangling-error.patch b/build/build-clang/downgrade-mangling-error.patch new file mode 100644 index 0000000000..69f46f4dd0 --- /dev/null +++ b/build/build-clang/downgrade-mangling-error.patch @@ -0,0 +1,23 @@ +Downgrade unimplemented mangling diagnostic from error to note. +This codepath is exercised by MozsearchIndexer.cpp (the searchfox +indexer) when indexing on Windows. We can do without having the +unimplemented bits for now as long the compiler doesn't fail the +build. See also https://bugs.llvm.org/show_bug.cgi?id=39294 + +diff --git a/clang/lib/AST/ItaniumMangle.cpp b/clang/lib/AST/ItaniumMangle.cpp +index 8b1419074df5..4436cd118f87 100644 +--- a/clang/lib/AST/ItaniumMangle.cpp ++++ b/clang/lib/AST/ItaniumMangle.cpp +@@ -3847,10 +3847,11 @@ recurse: + if (!NullOut) { + // As bad as this diagnostic is, it's better than crashing. + DiagnosticsEngine &Diags = Context.getDiags(); +- unsigned DiagID = Diags.getCustomDiagID(DiagnosticsEngine::Error, ++ unsigned DiagID = Diags.getCustomDiagID(DiagnosticsEngine::Remark, + "cannot yet mangle expression type %0"); + Diags.Report(E->getExprLoc(), DiagID) + << E->getStmtClassName() << E->getSourceRange(); ++ Out << "MOZ_WE_HACKED_AROUND_BUG_1418415"; + } + break; + } diff --git a/build/build-clang/find_symbolizer_linux.patch b/build/build-clang/find_symbolizer_linux.patch new file mode 100644 index 0000000000..c511401c32 --- /dev/null +++ b/build/build-clang/find_symbolizer_linux.patch @@ -0,0 +1,58 @@ +We currently need this patch because ASan only searches PATH to find the +llvm-symbolizer binary to symbolize ASan traces. On testing machines, this +can be installed in PATH easily. However, for e.g. the ASan Nightly Project, +where we ship an ASan build, including llvm-symbolizer, to the user, we +cannot expect llvm-symbolizer to be on PATH. Instead, we should try to look +it up next to the binary. This patch implements the functionality for Linux +only until there is similar functionality provided upstream. + +diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_file.cc b/compiler-rt/lib/sanitizer_common/sanitizer_file.cc +index cde54bf..8daade1 100644 +--- a/compiler-rt/lib/sanitizer_common/sanitizer_file.cc ++++ b/compiler-rt/lib/sanitizer_common/sanitizer_file.cc +@@ -21,6 +21,10 @@ + #include "sanitizer_common.h" + #include "sanitizer_file.h" + ++#if SANITIZER_LINUX ++#include "sanitizer_posix.h" ++#endif ++ + namespace __sanitizer { + + void CatastrophicErrorWrite(const char *buffer, uptr length) { +@@ -156,6 +160,34 @@ char *FindPathToBinary(const char *name) { + if (*end == '\0') break; + beg = end + 1; + } ++ ++#if SANITIZER_LINUX ++ // If we cannot find the requested binary in PATH, we should try to locate ++ // it next to the binary, in case it is shipped with the build itself ++ // (e.g. llvm-symbolizer shipped with sanitizer build to symbolize on client. ++ if (internal_readlink("/proc/self/exe", buffer.data(), kMaxPathLength) < 0) ++ return nullptr; ++ ++ uptr buf_len = internal_strlen(buffer.data()); ++ ++ /* Avoid using dirname() here */ ++ while (buf_len > 0) { ++ if (buffer[buf_len - 1] == '/') ++ break; ++ buf_len--; ++ } ++ ++ if (!buf_len) ++ return nullptr; ++ ++ if (buf_len + name_len + 1 <= kMaxPathLength) { ++ internal_memcpy(&buffer[buf_len], name, name_len); ++ buffer[buf_len + name_len] = '\0'; ++ if (FileExists(buffer.data())) ++ return internal_strdup(buffer.data()); ++ } ++#endif ++ + return nullptr; + } + diff --git a/build/build-clang/find_symbolizer_linux_clang_10.patch b/build/build-clang/find_symbolizer_linux_clang_10.patch new file mode 100644 index 0000000000..1ddb02024d --- /dev/null +++ b/build/build-clang/find_symbolizer_linux_clang_10.patch @@ -0,0 +1,58 @@ +We currently need this patch because ASan only searches PATH to find the +llvm-symbolizer binary to symbolize ASan traces. On testing machines, this +can be installed in PATH easily. However, for e.g. the ASan Nightly Project, +where we ship an ASan build, including llvm-symbolizer, to the user, we +cannot expect llvm-symbolizer to be on PATH. Instead, we should try to look +it up next to the binary. This patch implements the functionality for Linux +only until there is similar functionality provided upstream. + +diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_file.cpp b/compiler-rt/lib/sanitizer_common/sanitizer_file.cpp +index 79930d79425..cfb4f90c0d5 100644 +--- a/compiler-rt/lib/sanitizer_common/sanitizer_file.cpp ++++ b/compiler-rt/lib/sanitizer_common/sanitizer_file.cpp +@@ -20,6 +20,10 @@ + #include "sanitizer_common.h" + #include "sanitizer_file.h" + ++#if SANITIZER_LINUX ++#include "sanitizer_posix.h" ++#endif ++ + namespace __sanitizer { + + void CatastrophicErrorWrite(const char *buffer, uptr length) { +@@ -194,6 +198,34 @@ char *FindPathToBinary(const char *name) { + if (*end == '\0') break; + beg = end + 1; + } ++ ++#if SANITIZER_LINUX ++ // If we cannot find the requested binary in PATH, we should try to locate ++ // it next to the binary, in case it is shipped with the build itself ++ // (e.g. llvm-symbolizer shipped with sanitizer build to symbolize on client. ++ if (internal_readlink("/proc/self/exe", buffer.data(), kMaxPathLength) < 0) ++ return nullptr; ++ ++ uptr buf_len = internal_strlen(buffer.data()); ++ ++ /* Avoid using dirname() here */ ++ while (buf_len > 0) { ++ if (buffer[buf_len - 1] == '/') ++ break; ++ buf_len--; ++ } ++ ++ if (!buf_len) ++ return nullptr; ++ ++ if (buf_len + name_len + 1 <= kMaxPathLength) { ++ internal_memcpy(&buffer[buf_len], name, name_len); ++ buffer[buf_len + name_len] = '\0'; ++ if (FileExists(buffer.data())) ++ return internal_strdup(buffer.data()); ++ } ++#endif ++ + return nullptr; + } + diff --git a/build/build-clang/llvmorg-11-init-15486-gfc937806efd-dont-jump-to-landing-pads.patch b/build/build-clang/llvmorg-11-init-15486-gfc937806efd-dont-jump-to-landing-pads.patch new file mode 100644 index 0000000000..fee6798c59 --- /dev/null +++ b/build/build-clang/llvmorg-11-init-15486-gfc937806efd-dont-jump-to-landing-pads.patch @@ -0,0 +1,100 @@ +From d1c09fb47e2778538c5b1f918724d31d05497883 Mon Sep 17 00:00:00 2001 +From: Arthur Eubanks +Date: Wed, 13 May 2020 16:33:09 -0700 +Subject: [PATCH] Don't jump to landing pads in Control Flow Optimizer + +Summary: Likely fixes https://bugs.llvm.org/show_bug.cgi?id=45858. + +Subscribers: hiraditya, llvm-commits + +Tags: #llvm + +Differential Revision: https://reviews.llvm.org/D80047 +--- + llvm/lib/CodeGen/BranchFolding.cpp | 18 ++++++------ + llvm/test/CodeGen/X86/branchfolding-ehpad.mir | 28 +++++++++++++++++++ + 2 files changed, 38 insertions(+), 8 deletions(-) + create mode 100644 llvm/test/CodeGen/X86/branchfolding-ehpad.mir + +diff --git a/llvm/lib/CodeGen/BranchFolding.cpp b/llvm/lib/CodeGen/BranchFolding.cpp +index fb54b5d6c8d..4a822b58446 100644 +--- a/llvm/lib/CodeGen/BranchFolding.cpp ++++ b/llvm/lib/CodeGen/BranchFolding.cpp +@@ -991,10 +991,10 @@ bool BranchFolder::TryTailMergeBlocks(MachineBasicBlock *SuccBB, + continue; + } + +- // If one of the blocks is the entire common tail (and not the entry +- // block, which we can't jump to), we can treat all blocks with this same +- // tail at once. Use PredBB if that is one of the possibilities, as that +- // will not introduce any extra branches. ++ // If one of the blocks is the entire common tail (and is not the entry ++ // block/an EH pad, which we can't jump to), we can treat all blocks with ++ // this same tail at once. Use PredBB if that is one of the possibilities, ++ // as that will not introduce any extra branches. + MachineBasicBlock *EntryBB = + &MergePotentials.front().getBlock()->getParent()->front(); + unsigned commonTailIndex = SameTails.size(); +@@ -1002,19 +1002,21 @@ bool BranchFolder::TryTailMergeBlocks(MachineBasicBlock *SuccBB, + // into the other. + if (SameTails.size() == 2 && + SameTails[0].getBlock()->isLayoutSuccessor(SameTails[1].getBlock()) && +- SameTails[1].tailIsWholeBlock()) ++ SameTails[1].tailIsWholeBlock() && !SameTails[1].getBlock()->isEHPad()) + commonTailIndex = 1; + else if (SameTails.size() == 2 && + SameTails[1].getBlock()->isLayoutSuccessor( +- SameTails[0].getBlock()) && +- SameTails[0].tailIsWholeBlock()) ++ SameTails[0].getBlock()) && ++ SameTails[0].tailIsWholeBlock() && ++ !SameTails[0].getBlock()->isEHPad()) + commonTailIndex = 0; + else { + // Otherwise just pick one, favoring the fall-through predecessor if + // there is one. + for (unsigned i = 0, e = SameTails.size(); i != e; ++i) { + MachineBasicBlock *MBB = SameTails[i].getBlock(); +- if (MBB == EntryBB && SameTails[i].tailIsWholeBlock()) ++ if ((MBB == EntryBB || MBB->isEHPad()) && ++ SameTails[i].tailIsWholeBlock()) + continue; + if (MBB == PredBB) { + commonTailIndex = i; +diff --git a/llvm/test/CodeGen/X86/branchfolding-ehpad.mir b/llvm/test/CodeGen/X86/branchfolding-ehpad.mir +new file mode 100644 +index 00000000000..d445cd20680 +--- /dev/null ++++ b/llvm/test/CodeGen/X86/branchfolding-ehpad.mir +@@ -0,0 +1,28 @@ ++# RUN: llc -mtriple=x86_64-windows-msvc -verify-machineinstrs -run-pass branch-folder -o - %s | FileCheck %s ++ ++# Check that branch-folder does not create a fallthrough to a landing pad. ++# Also make sure that the landing pad still can be tail merged. ++--- ++name: foo ++body: | ++ ; CHECK-LABEL: name: foo ++ bb.0: ++ successors: %bb.1, %bb.3 ++ bb.1: ++ JCC_1 %bb.4, 5, implicit killed $eflags ++ bb.2: ++ MOV8mi $r13, 1, $noreg, 0, $noreg, 0 ++ JMP_1 %bb.5 ++ ; CHECK: bb.2: ++ ; CHECK-NOT: successors: {{.*}}bb.3 ++ ; CHECK: bb.3 (landing-pad): ++ ; CHECK-NOT: MOV8mi ++ bb.3(landing-pad): ++ MOV8mi $r13, 1, $noreg, 0, $noreg, 0 ++ JMP_1 %bb.5 ++ ; CHECK: bb.4: ++ bb.4: ++ MOV8mi $r13, 2, $noreg, 0, $noreg, 0 ++ bb.5: ++ RET 0 ++... +-- +2.24.1.windows.2 + diff --git a/build/build-clang/llvmorg-11-init-4265-g2dcbdba8540.patch b/build/build-clang/llvmorg-11-init-4265-g2dcbdba8540.patch new file mode 100644 index 0000000000..b03ae0640c --- /dev/null +++ b/build/build-clang/llvmorg-11-init-4265-g2dcbdba8540.patch @@ -0,0 +1,106 @@ +diff --git a/compiler-rt/lib/tsan/rtl/tsan_interceptors.cc b/compiler-rt/lib/tsan/rtl/tsan_interceptors.cc +index 9a184c79798..733decfe52c 100644 +--- a/compiler-rt/lib/tsan/rtl/tsan_interceptors.cc ++++ b/compiler-rt/lib/tsan/rtl/tsan_interceptors.cc +@@ -1021,7 +1021,7 @@ TSAN_INTERCEPTOR(int, pthread_create, + + TSAN_INTERCEPTOR(int, pthread_join, void *th, void **ret) { + SCOPED_INTERCEPTOR_RAW(pthread_join, th, ret); +- int tid = ThreadTid(thr, pc, (uptr)th); ++ int tid = ThreadConsumeTid(thr, pc, (uptr)th); + ThreadIgnoreBegin(thr, pc); + int res = BLOCK_REAL(pthread_join)(th, ret); + ThreadIgnoreEnd(thr, pc); +@@ -1034,8 +1034,8 @@ TSAN_INTERCEPTOR(int, pthread_join, void *th, void **ret) { + DEFINE_REAL_PTHREAD_FUNCTIONS + + TSAN_INTERCEPTOR(int, pthread_detach, void *th) { +- SCOPED_TSAN_INTERCEPTOR(pthread_detach, th); +- int tid = ThreadTid(thr, pc, (uptr)th); ++ SCOPED_INTERCEPTOR_RAW(pthread_detach, th); ++ int tid = ThreadConsumeTid(thr, pc, (uptr)th); + int res = REAL(pthread_detach)(th); + if (res == 0) { + ThreadDetach(thr, pc, tid); +@@ -1055,8 +1055,8 @@ TSAN_INTERCEPTOR(void, pthread_exit, void *retval) { + + #if SANITIZER_LINUX + TSAN_INTERCEPTOR(int, pthread_tryjoin_np, void *th, void **ret) { +- SCOPED_TSAN_INTERCEPTOR(pthread_tryjoin_np, th, ret); +- int tid = ThreadTid(thr, pc, (uptr)th); ++ SCOPED_INTERCEPTOR_RAW(pthread_tryjoin_np, th, ret); ++ int tid = ThreadConsumeTid(thr, pc, (uptr)th); + ThreadIgnoreBegin(thr, pc); + int res = REAL(pthread_tryjoin_np)(th, ret); + ThreadIgnoreEnd(thr, pc); +@@ -1069,8 +1069,8 @@ TSAN_INTERCEPTOR(int, pthread_tryjoin_np, void *th, void **ret) { + + TSAN_INTERCEPTOR(int, pthread_timedjoin_np, void *th, void **ret, + const struct timespec *abstime) { +- SCOPED_TSAN_INTERCEPTOR(pthread_timedjoin_np, th, ret, abstime); +- int tid = ThreadTid(thr, pc, (uptr)th); ++ SCOPED_INTERCEPTOR_RAW(pthread_timedjoin_np, th, ret, abstime); ++ int tid = ThreadConsumeTid(thr, pc, (uptr)th); + ThreadIgnoreBegin(thr, pc); + int res = BLOCK_REAL(pthread_timedjoin_np)(th, ret, abstime); + ThreadIgnoreEnd(thr, pc); +diff --git a/compiler-rt/lib/tsan/rtl/tsan_rtl.h b/compiler-rt/lib/tsan/rtl/tsan_rtl.h +index 3a8231bda9a..30e144fbd00 100644 +--- a/compiler-rt/lib/tsan/rtl/tsan_rtl.h ++++ b/compiler-rt/lib/tsan/rtl/tsan_rtl.h +@@ -772,7 +772,7 @@ int ThreadCreate(ThreadState *thr, uptr pc, uptr uid, bool detached); + void ThreadStart(ThreadState *thr, int tid, tid_t os_id, + ThreadType thread_type); + void ThreadFinish(ThreadState *thr); +-int ThreadTid(ThreadState *thr, uptr pc, uptr uid); ++int ThreadConsumeTid(ThreadState *thr, uptr pc, uptr uid); + void ThreadJoin(ThreadState *thr, uptr pc, int tid); + void ThreadDetach(ThreadState *thr, uptr pc, int tid); + void ThreadFinalize(ThreadState *thr); +diff --git a/compiler-rt/lib/tsan/rtl/tsan_rtl_thread.cc b/compiler-rt/lib/tsan/rtl/tsan_rtl_thread.cc +index fd95cfed4f5..13e457bd770 100644 +--- a/compiler-rt/lib/tsan/rtl/tsan_rtl_thread.cc ++++ b/compiler-rt/lib/tsan/rtl/tsan_rtl_thread.cc +@@ -285,19 +285,34 @@ void ThreadFinish(ThreadState *thr) { + ctx->thread_registry->FinishThread(thr->tid); + } + +-static bool FindThreadByUid(ThreadContextBase *tctx, void *arg) { +- uptr uid = (uptr)arg; +- if (tctx->user_id == uid && tctx->status != ThreadStatusInvalid) { ++struct ConsumeThreadContext { ++ uptr uid; ++ ThreadContextBase* tctx; ++}; ++ ++static bool ConsumeThreadByUid(ThreadContextBase *tctx, void *arg) { ++ ConsumeThreadContext *findCtx = (ConsumeThreadContext*)arg; ++ if (tctx->user_id == findCtx->uid && tctx->status != ThreadStatusInvalid) { ++ if (findCtx->tctx) { ++ // Ensure that user_id is unique. If it's not the case we are screwed. ++ // Something went wrong before, but now there is no way to recover. ++ // Returning a wrong thread is not an option, it may lead to very hard ++ // to debug false positives (e.g. if we join a wrong thread). ++ Report("ThreadSanitizer: dup thread with used id 0x%zx\n", findCtx->uid); ++ Die(); ++ } ++ findCtx->tctx = tctx; + tctx->user_id = 0; +- return true; + } + return false; + } + +-int ThreadTid(ThreadState *thr, uptr pc, uptr uid) { +- int res = ctx->thread_registry->FindThread(FindThreadByUid, (void*)uid); +- DPrintf("#%d: ThreadTid uid=%zu tid=%d\n", thr->tid, uid, res); +- return res; ++int ThreadConsumeTid(ThreadState *thr, uptr pc, uptr uid) { ++ ConsumeThreadContext findCtx = {uid, nullptr}; ++ ctx->thread_registry->FindThread(ConsumeThreadByUid, &findCtx); ++ int tid = findCtx.tctx ? findCtx.tctx->tid : ThreadRegistry::kUnknownTid; ++ DPrintf("#%d: ThreadTid uid=%zu tid=%d\n", thr->tid, uid, tid); ++ return tid; + } + + void ThreadJoin(ThreadState *thr, uptr pc, int tid) { diff --git a/build/build-clang/llvmorg-11-init-4265-g2dcbdba8540_clang_10.patch b/build/build-clang/llvmorg-11-init-4265-g2dcbdba8540_clang_10.patch new file mode 100644 index 0000000000..fb487e7801 --- /dev/null +++ b/build/build-clang/llvmorg-11-init-4265-g2dcbdba8540_clang_10.patch @@ -0,0 +1,106 @@ +diff --git a/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp b/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp +index 8aea1e4ec05..a623f4fe589 100644 +--- a/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp ++++ b/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp +@@ -1016,7 +1016,7 @@ TSAN_INTERCEPTOR(int, pthread_create, + + TSAN_INTERCEPTOR(int, pthread_join, void *th, void **ret) { + SCOPED_INTERCEPTOR_RAW(pthread_join, th, ret); +- int tid = ThreadTid(thr, pc, (uptr)th); ++ int tid = ThreadConsumeTid(thr, pc, (uptr)th); + ThreadIgnoreBegin(thr, pc); + int res = BLOCK_REAL(pthread_join)(th, ret); + ThreadIgnoreEnd(thr, pc); +@@ -1029,8 +1029,8 @@ TSAN_INTERCEPTOR(int, pthread_join, void *th, void **ret) { + DEFINE_REAL_PTHREAD_FUNCTIONS + + TSAN_INTERCEPTOR(int, pthread_detach, void *th) { +- SCOPED_TSAN_INTERCEPTOR(pthread_detach, th); +- int tid = ThreadTid(thr, pc, (uptr)th); ++ SCOPED_INTERCEPTOR_RAW(pthread_detach, th); ++ int tid = ThreadConsumeTid(thr, pc, (uptr)th); + int res = REAL(pthread_detach)(th); + if (res == 0) { + ThreadDetach(thr, pc, tid); +@@ -1050,8 +1050,8 @@ TSAN_INTERCEPTOR(void, pthread_exit, void *retval) { + + #if SANITIZER_LINUX + TSAN_INTERCEPTOR(int, pthread_tryjoin_np, void *th, void **ret) { +- SCOPED_TSAN_INTERCEPTOR(pthread_tryjoin_np, th, ret); +- int tid = ThreadTid(thr, pc, (uptr)th); ++ SCOPED_INTERCEPTOR_RAW(pthread_tryjoin_np, th, ret); ++ int tid = ThreadConsumeTid(thr, pc, (uptr)th); + ThreadIgnoreBegin(thr, pc); + int res = REAL(pthread_tryjoin_np)(th, ret); + ThreadIgnoreEnd(thr, pc); +@@ -1064,8 +1064,8 @@ TSAN_INTERCEPTOR(int, pthread_tryjoin_np, void *th, void **ret) { + + TSAN_INTERCEPTOR(int, pthread_timedjoin_np, void *th, void **ret, + const struct timespec *abstime) { +- SCOPED_TSAN_INTERCEPTOR(pthread_timedjoin_np, th, ret, abstime); +- int tid = ThreadTid(thr, pc, (uptr)th); ++ SCOPED_INTERCEPTOR_RAW(pthread_timedjoin_np, th, ret, abstime); ++ int tid = ThreadConsumeTid(thr, pc, (uptr)th); + ThreadIgnoreBegin(thr, pc); + int res = BLOCK_REAL(pthread_timedjoin_np)(th, ret, abstime); + ThreadIgnoreEnd(thr, pc); +diff --git a/compiler-rt/lib/tsan/rtl/tsan_rtl.h b/compiler-rt/lib/tsan/rtl/tsan_rtl.h +index c38fc43a9f8..20f7a99157a 100644 +--- a/compiler-rt/lib/tsan/rtl/tsan_rtl.h ++++ b/compiler-rt/lib/tsan/rtl/tsan_rtl.h +@@ -775,7 +775,7 @@ int ThreadCreate(ThreadState *thr, uptr pc, uptr uid, bool detached); + void ThreadStart(ThreadState *thr, int tid, tid_t os_id, + ThreadType thread_type); + void ThreadFinish(ThreadState *thr); +-int ThreadTid(ThreadState *thr, uptr pc, uptr uid); ++int ThreadConsumeTid(ThreadState *thr, uptr pc, uptr uid); + void ThreadJoin(ThreadState *thr, uptr pc, int tid); + void ThreadDetach(ThreadState *thr, uptr pc, int tid); + void ThreadFinalize(ThreadState *thr); +diff --git a/compiler-rt/lib/tsan/rtl/tsan_rtl_thread.cpp b/compiler-rt/lib/tsan/rtl/tsan_rtl_thread.cpp +index 0ac1ee99c47..f7068f0d331 100644 +--- a/compiler-rt/lib/tsan/rtl/tsan_rtl_thread.cpp ++++ b/compiler-rt/lib/tsan/rtl/tsan_rtl_thread.cpp +@@ -285,19 +285,34 @@ void ThreadFinish(ThreadState *thr) { + ctx->thread_registry->FinishThread(thr->tid); + } + +-static bool FindThreadByUid(ThreadContextBase *tctx, void *arg) { +- uptr uid = (uptr)arg; +- if (tctx->user_id == uid && tctx->status != ThreadStatusInvalid) { ++struct ConsumeThreadContext { ++ uptr uid; ++ ThreadContextBase* tctx; ++}; ++ ++static bool ConsumeThreadByUid(ThreadContextBase *tctx, void *arg) { ++ ConsumeThreadContext *findCtx = (ConsumeThreadContext*)arg; ++ if (tctx->user_id == findCtx->uid && tctx->status != ThreadStatusInvalid) { ++ if (findCtx->tctx) { ++ // Ensure that user_id is unique. If it's not the case we are screwed. ++ // Something went wrong before, but now there is no way to recover. ++ // Returning a wrong thread is not an option, it may lead to very hard ++ // to debug false positives (e.g. if we join a wrong thread). ++ Report("ThreadSanitizer: dup thread with used id 0x%zx\n", findCtx->uid); ++ Die(); ++ } ++ findCtx->tctx = tctx; + tctx->user_id = 0; +- return true; + } + return false; + } + +-int ThreadTid(ThreadState *thr, uptr pc, uptr uid) { +- int res = ctx->thread_registry->FindThread(FindThreadByUid, (void*)uid); +- DPrintf("#%d: ThreadTid uid=%zu tid=%d\n", thr->tid, uid, res); +- return res; ++int ThreadConsumeTid(ThreadState *thr, uptr pc, uptr uid) { ++ ConsumeThreadContext findCtx = {uid, nullptr}; ++ ctx->thread_registry->FindThread(ConsumeThreadByUid, &findCtx); ++ int tid = findCtx.tctx ? findCtx.tctx->tid : ThreadRegistry::kUnknownTid; ++ DPrintf("#%d: ThreadTid uid=%zu tid=%d\n", thr->tid, uid, tid); ++ return tid; + } + + void ThreadJoin(ThreadState *thr, uptr pc, int tid) { diff --git a/build/build-clang/llvmorg-12-init-10926-gb79e990f401-LTO-new-pass-manager.patch b/build/build-clang/llvmorg-12-init-10926-gb79e990f401-LTO-new-pass-manager.patch new file mode 100644 index 0000000000..61c0df9214 --- /dev/null +++ b/build/build-clang/llvmorg-12-init-10926-gb79e990f401-LTO-new-pass-manager.patch @@ -0,0 +1,66 @@ +diff --git a/lld/COFF/Config.h b/lld/COFF/Config.h +index 7c439176f3a4..ae969c6bdd8b 100644 +--- a/lld/COFF/Config.h ++++ b/lld/COFF/Config.h +@@ -155,6 +155,11 @@ struct Configuration { + // Used for /opt:lldltocachepolicy=policy + llvm::CachePruningPolicy ltoCachePolicy; + ++ // Used for /opt:[no]ltonewpassmanager ++ bool ltoNewPassManager = false; ++ // Used for /opt:[no]ltodebugpassmanager ++ bool ltoDebugPassManager = false; ++ + // Used for /merge:from=to (e.g. /merge:.rdata=.text) + std::map merge; + +diff --git a/lld/COFF/Driver.cpp b/lld/COFF/Driver.cpp +index 9ceccef86779..db2ae241dddf 100644 +--- a/lld/COFF/Driver.cpp ++++ b/lld/COFF/Driver.cpp +@@ -1418,6 +1418,8 @@ void LinkerDriver::link(ArrayRef argsArr) { + unsigned icfLevel = + args.hasArg(OPT_profile) ? 0 : 1; // 0: off, 1: limited, 2: on + unsigned tailMerge = 1; ++ bool ltoNewPM = false; ++ bool ltoDebugPM = false; + for (auto *arg : args.filtered(OPT_opt)) { + std::string str = StringRef(arg->getValue()).lower(); + SmallVector vec; +@@ -1435,6 +1437,14 @@ void LinkerDriver::link(ArrayRef argsArr) { + tailMerge = 2; + } else if (s == "nolldtailmerge") { + tailMerge = 0; ++ } else if (s == "ltonewpassmanager") { ++ ltoNewPM = true; ++ } else if (s == "noltonewpassmanager") { ++ ltoNewPM = false; ++ } else if (s == "ltodebugpassmanager") { ++ ltoDebugPM = true; ++ } else if (s == "noltodebugpassmanager") { ++ ltoDebugPM = false; + } else if (s.startswith("lldlto=")) { + StringRef optLevel = s.substr(7); + if (optLevel.getAsInteger(10, config->ltoo) || config->ltoo > 3) +@@ -1464,6 +1474,8 @@ void LinkerDriver::link(ArrayRef argsArr) { + config->doGC = doGC; + config->doICF = icfLevel > 0; + config->tailMerge = (tailMerge == 1 && config->doICF) || tailMerge == 2; ++ config->ltoNewPassManager = ltoNewPM; ++ config->ltoDebugPassManager = ltoDebugPM; + + // Handle /lldsavetemps + if (args.hasArg(OPT_lldsavetemps)) +diff --git a/lld/COFF/LTO.cpp b/lld/COFF/LTO.cpp +index bb44819e60f8..e55fb544b050 100644 +--- a/lld/COFF/LTO.cpp ++++ b/lld/COFF/LTO.cpp +@@ -82,6 +82,8 @@ static lto::Config createConfig() { + c.MAttrs = getMAttrs(); + c.CGOptLevel = args::getCGOptLevel(config->ltoo); + c.AlwaysEmitRegularLTOObj = !config->ltoObjPath.empty(); ++ c.UseNewPM = config->ltoNewPassManager; ++ c.DebugPassManager = config->ltoDebugPassManager; + + if (config->saveTemps) + checkError(c.addSaveTemps(std::string(config->outputFile) + ".", diff --git a/build/build-clang/loosen-msvc-detection.patch b/build/build-clang/loosen-msvc-detection.patch new file mode 100644 index 0000000000..03cd72e929 --- /dev/null +++ b/build/build-clang/loosen-msvc-detection.patch @@ -0,0 +1,22 @@ +In a proper VS install, the path to cl.exe looks like: +...\VC\Tools\MSVC\14.11.25503\bin\HostX64\x64\cl.exe + +In our automation, the path is just: +...\VC\bin\HostX64\x64\cl.exe + +Clang tries to do some sanity-checking to make sure that the cl.exe it finds is the Microsoft compiler and not some other program. But the checks are a little too strict for us, so just look for "bin\Host*\*\cl.exe". + +diff --git a/clang/lib/Driver/ToolChains/MSVC.cpp b/clang/lib/Driver/ToolChains/MSVC.cpp +index 7978a6941cb..0159e89fa27 100644 +--- a/clang/lib/Driver/ToolChains/MSVC.cpp ++++ b/clang/lib/Driver/ToolChains/MSVC.cpp +@@ -152,8 +152,7 @@ static bool findVCToolChainViaEnvironment(std::string &Path, + // path components with these prefixes when walking backwards through + // the path. + // Note: empty strings match anything. +- llvm::StringRef ExpectedPrefixes[] = {"", "Host", "bin", "", +- "MSVC", "Tools", "VC"}; ++ llvm::StringRef ExpectedPrefixes[] = {"", "Host", "bin"}; + + auto It = llvm::sys::path::rbegin(PathEntry); + auto End = llvm::sys::path::rend(PathEntry); diff --git a/build/build-clang/r350774.patch b/build/build-clang/r350774.patch new file mode 100644 index 0000000000..6b8640f745 --- /dev/null +++ b/build/build-clang/r350774.patch @@ -0,0 +1,14 @@ +diff --git a/llvm/lib/Object/Binary.cpp b/llvm/lib/Object/Binary.cpp +index d7c25921ec3..fe41987f5c2 100644 +--- a/llvm/lib/Object/Binary.cpp ++++ b/llvm/lib/Object/Binary.cpp +@@ -88,7 +88,8 @@ Expected> object::createBinary(MemoryBufferRef Buffer, + + Expected> object::createBinary(StringRef Path) { + ErrorOr> FileOrErr = +- MemoryBuffer::getFileOrSTDIN(Path); ++ MemoryBuffer::getFileOrSTDIN(Path, /*FileSize=*/-1, ++ /*RequiresNullTerminator=*/false); + if (std::error_code EC = FileOrErr.getError()) + return errorCodeToError(EC); + std::unique_ptr &Buffer = FileOrErr.get(); diff --git a/build/build-clang/rG7e18aeba5062.patch b/build/build-clang/rG7e18aeba5062.patch new file mode 100644 index 0000000000..58947b6dd8 --- /dev/null +++ b/build/build-clang/rG7e18aeba5062.patch @@ -0,0 +1,255 @@ +From 779a169144581438d9e24b8b46a86704f6335e35 Mon Sep 17 00:00:00 2001 +From: Nikita Popov +Date: Sat, 16 Nov 2019 16:22:18 +0100 +Subject: [PATCH] [LVI] Restructure caching + +Variant on D70103. The caching is switched to always use a BB to +cache entry map, which then contains per-value caches. A separate +set contains value handles with a deletion callback. This allows us +to properly invalidate overdefined values. + +A possible alternative would be to always cache by value first and +have per-BB maps/sets in the each cache entry. In that case we could +use a ValueMap and would avoid the separate value handle set. I went +with the BB indexing at the top level to make it easier to integrate +D69914, but possibly that's not the right choice. + +Differential Revision: https://reviews.llvm.org/D70376 +--- + llvm/lib/Analysis/LazyValueInfo.cpp | 143 +++++++++------------------- + 1 file changed, 47 insertions(+), 96 deletions(-) + +diff --git a/llvm/lib/Analysis/LazyValueInfo.cpp b/llvm/lib/Analysis/LazyValueInfo.cpp +index 542ff709d47..eb51744aec3 100644 +--- a/llvm/lib/Analysis/LazyValueInfo.cpp ++++ b/llvm/lib/Analysis/LazyValueInfo.cpp +@@ -132,12 +132,9 @@ namespace { + /// A callback value handle updates the cache when values are erased. + class LazyValueInfoCache; + struct LVIValueHandle final : public CallbackVH { +- // Needs to access getValPtr(), which is protected. +- friend struct DenseMapInfo; +- + LazyValueInfoCache *Parent; + +- LVIValueHandle(Value *V, LazyValueInfoCache *P) ++ LVIValueHandle(Value *V, LazyValueInfoCache *P = nullptr) + : CallbackVH(V), Parent(P) { } + + void deleted() override; +@@ -151,89 +148,63 @@ namespace { + /// This is the cache kept by LazyValueInfo which + /// maintains information about queries across the clients' queries. + class LazyValueInfoCache { +- /// This is all of the cached block information for exactly one Value*. +- /// The entries are sorted by the BasicBlock* of the +- /// entries, allowing us to do a lookup with a binary search. +- /// Over-defined lattice values are recorded in OverDefinedCache to reduce +- /// memory overhead. +- struct ValueCacheEntryTy { +- ValueCacheEntryTy(Value *V, LazyValueInfoCache *P) : Handle(V, P) {} +- LVIValueHandle Handle; +- SmallDenseMap, ValueLatticeElement, 4> BlockVals; ++ /// This is all of the cached information for one basic block. It contains ++ /// the per-value lattice elements, as well as a separate set for ++ /// overdefined values to reduce memory usage. ++ struct BlockCacheEntryTy { ++ SmallDenseMap, ValueLatticeElement, 4> LatticeElements; ++ SmallDenseSet, 4> OverDefined; + }; + +- /// This tracks, on a per-block basis, the set of values that are +- /// over-defined at the end of that block. +- typedef DenseMap, SmallPtrSet> +- OverDefinedCacheTy; +- /// Keep track of all blocks that we have ever seen, so we +- /// don't spend time removing unused blocks from our caches. +- DenseSet > SeenBlocks; +- +- /// This is all of the cached information for all values, +- /// mapped from Value* to key information. +- DenseMap> ValueCache; +- OverDefinedCacheTy OverDefinedCache; +- ++ /// Cached information per basic block. ++ DenseMap, BlockCacheEntryTy> BlockCache; ++ /// Set of value handles used to erase values from the cache on deletion. ++ DenseSet> ValueHandles; + + public: + void insertResult(Value *Val, BasicBlock *BB, + const ValueLatticeElement &Result) { +- SeenBlocks.insert(BB); +- ++ auto &CacheEntry = BlockCache.try_emplace(BB).first->second; + // Insert over-defined values into their own cache to reduce memory + // overhead. + if (Result.isOverdefined()) +- OverDefinedCache[BB].insert(Val); +- else { +- auto It = ValueCache.find_as(Val); +- if (It == ValueCache.end()) { +- ValueCache[Val] = make_unique(Val, this); +- It = ValueCache.find_as(Val); +- assert(It != ValueCache.end() && "Val was just added to the map!"); +- } +- It->second->BlockVals[BB] = Result; +- } +- } +- +- bool isOverdefined(Value *V, BasicBlock *BB) const { +- auto ODI = OverDefinedCache.find(BB); +- +- if (ODI == OverDefinedCache.end()) +- return false; ++ CacheEntry.OverDefined.insert(Val); ++ else ++ CacheEntry.LatticeElements.insert({ Val, Result }); + +- return ODI->second.count(V); ++ auto HandleIt = ValueHandles.find_as(Val); ++ if (HandleIt == ValueHandles.end()) ++ ValueHandles.insert({ Val, this }); + } + + bool hasCachedValueInfo(Value *V, BasicBlock *BB) const { +- if (isOverdefined(V, BB)) +- return true; +- +- auto I = ValueCache.find_as(V); +- if (I == ValueCache.end()) ++ auto It = BlockCache.find(BB); ++ if (It == BlockCache.end()) + return false; + +- return I->second->BlockVals.count(BB); ++ return It->second.OverDefined.count(V) || ++ It->second.LatticeElements.count(V); + } + + ValueLatticeElement getCachedValueInfo(Value *V, BasicBlock *BB) const { +- if (isOverdefined(V, BB)) ++ auto It = BlockCache.find(BB); ++ if (It == BlockCache.end()) ++ return ValueLatticeElement(); ++ ++ if (It->second.OverDefined.count(V)) + return ValueLatticeElement::getOverdefined(); + +- auto I = ValueCache.find_as(V); +- if (I == ValueCache.end()) ++ auto LatticeIt = It->second.LatticeElements.find(V); ++ if (LatticeIt == It->second.LatticeElements.end()) + return ValueLatticeElement(); +- auto BBI = I->second->BlockVals.find(BB); +- if (BBI == I->second->BlockVals.end()) +- return ValueLatticeElement(); +- return BBI->second; ++ ++ return LatticeIt->second; + } + + /// clear - Empty the cache. + void clear() { +- SeenBlocks.clear(); +- ValueCache.clear(); +- OverDefinedCache.clear(); ++ BlockCache.clear(); ++ ValueHandles.clear(); + } + + /// Inform the cache that a given value has been deleted. +@@ -247,23 +218,18 @@ namespace { + /// OldSucc might have (unless also overdefined in NewSucc). This just + /// flushes elements from the cache and does not add any. + void threadEdgeImpl(BasicBlock *OldSucc,BasicBlock *NewSucc); +- +- friend struct LVIValueHandle; + }; + } + + void LazyValueInfoCache::eraseValue(Value *V) { +- for (auto I = OverDefinedCache.begin(), E = OverDefinedCache.end(); I != E;) { +- // Copy and increment the iterator immediately so we can erase behind +- // ourselves. +- auto Iter = I++; +- SmallPtrSetImpl &ValueSet = Iter->second; +- ValueSet.erase(V); +- if (ValueSet.empty()) +- OverDefinedCache.erase(Iter); ++ for (auto &Pair : BlockCache) { ++ Pair.second.LatticeElements.erase(V); ++ Pair.second.OverDefined.erase(V); + } + +- ValueCache.erase(V); ++ auto HandleIt = ValueHandles.find_as(V); ++ if (HandleIt != ValueHandles.end()) ++ ValueHandles.erase(HandleIt); + } + + void LVIValueHandle::deleted() { +@@ -273,18 +239,7 @@ void LVIValueHandle::deleted() { + } + + void LazyValueInfoCache::eraseBlock(BasicBlock *BB) { +- // Shortcut if we have never seen this block. +- DenseSet >::iterator I = SeenBlocks.find(BB); +- if (I == SeenBlocks.end()) +- return; +- SeenBlocks.erase(I); +- +- auto ODI = OverDefinedCache.find(BB); +- if (ODI != OverDefinedCache.end()) +- OverDefinedCache.erase(ODI); +- +- for (auto &I : ValueCache) +- I.second->BlockVals.erase(BB); ++ BlockCache.erase(BB); + } + + void LazyValueInfoCache::threadEdgeImpl(BasicBlock *OldSucc, +@@ -302,10 +257,11 @@ void LazyValueInfoCache::threadEdgeImpl(BasicBlock *OldSucc, + std::vector worklist; + worklist.push_back(OldSucc); + +- auto I = OverDefinedCache.find(OldSucc); +- if (I == OverDefinedCache.end()) ++ auto I = BlockCache.find(OldSucc); ++ if (I == BlockCache.end() || I->second.OverDefined.empty()) + return; // Nothing to process here. +- SmallVector ValsToClear(I->second.begin(), I->second.end()); ++ SmallVector ValsToClear(I->second.OverDefined.begin(), ++ I->second.OverDefined.end()); + + // Use a worklist to perform a depth-first search of OldSucc's successors. + // NOTE: We do not need a visited list since any blocks we have already +@@ -319,10 +275,10 @@ void LazyValueInfoCache::threadEdgeImpl(BasicBlock *OldSucc, + if (ToUpdate == NewSucc) continue; + + // If a value was marked overdefined in OldSucc, and is here too... +- auto OI = OverDefinedCache.find(ToUpdate); +- if (OI == OverDefinedCache.end()) ++ auto OI = BlockCache.find(ToUpdate); ++ if (OI == BlockCache.end() || OI->second.OverDefined.empty()) + continue; +- SmallPtrSetImpl &ValueSet = OI->second; ++ auto &ValueSet = OI->second.OverDefined; + + bool changed = false; + for (Value *V : ValsToClear) { +@@ -332,11 +288,6 @@ void LazyValueInfoCache::threadEdgeImpl(BasicBlock *OldSucc, + // If we removed anything, then we potentially need to update + // blocks successors too. + changed = true; +- +- if (ValueSet.empty()) { +- OverDefinedCache.erase(OI); +- break; +- } + } + + if (!changed) continue; +-- +2.24.0 + diff --git a/build/build-clang/rG7e18aeba5062_clang_10.patch b/build/build-clang/rG7e18aeba5062_clang_10.patch new file mode 100644 index 0000000000..0fc39a1b4d --- /dev/null +++ b/build/build-clang/rG7e18aeba5062_clang_10.patch @@ -0,0 +1,249 @@ +From 779a169144581438d9e24b8b46a86704f6335e35 Mon Sep 17 00:00:00 2001 +From: Nikita Popov +Date: Sat, 16 Nov 2019 16:22:18 +0100 +Subject: [PATCH] [LVI] Restructure caching + +Variant on D70103. The caching is switched to always use a BB to +cache entry map, which then contains per-value caches. A separate +set contains value handles with a deletion callback. This allows us +to properly invalidate overdefined values. + +A possible alternative would be to always cache by value first and +have per-BB maps/sets in the each cache entry. In that case we could +use a ValueMap and would avoid the separate value handle set. I went +with the BB indexing at the top level to make it easier to integrate +D69914, but possibly that's not the right choice. + +Differential Revision: https://reviews.llvm.org/D70376 + +diff --git a/llvm/lib/Analysis/LazyValueInfo.cpp b/llvm/lib/Analysis/LazyValueInfo.cpp +index bad2de9e5f5..33406a75d80 100644 +--- a/llvm/lib/Analysis/LazyValueInfo.cpp ++++ b/llvm/lib/Analysis/LazyValueInfo.cpp +@@ -136,12 +136,10 @@ namespace { + /// A callback value handle updates the cache when values are erased. + class LazyValueInfoCache; + struct LVIValueHandle final : public CallbackVH { +- // Needs to access getValPtr(), which is protected. +- friend struct DenseMapInfo; + + LazyValueInfoCache *Parent; + +- LVIValueHandle(Value *V, LazyValueInfoCache *P) ++ LVIValueHandle(Value *V, LazyValueInfoCache *P = nullptr) + : CallbackVH(V), Parent(P) { } + + void deleted() override; +@@ -155,89 +153,63 @@ namespace { + /// This is the cache kept by LazyValueInfo which + /// maintains information about queries across the clients' queries. + class LazyValueInfoCache { +- /// This is all of the cached block information for exactly one Value*. +- /// The entries are sorted by the BasicBlock* of the +- /// entries, allowing us to do a lookup with a binary search. +- /// Over-defined lattice values are recorded in OverDefinedCache to reduce +- /// memory overhead. +- struct ValueCacheEntryTy { +- ValueCacheEntryTy(Value *V, LazyValueInfoCache *P) : Handle(V, P) {} +- LVIValueHandle Handle; +- SmallDenseMap, ValueLatticeElement, 4> BlockVals; ++ /// This is all of the cached information for one basic block. It contains ++ /// the per-value lattice elements, as well as a separate set for ++ /// overdefined values to reduce memory usage. ++ struct BlockCacheEntryTy { ++ SmallDenseMap, ValueLatticeElement, 4> LatticeElements; ++ SmallDenseSet, 4> OverDefined; + }; + +- /// This tracks, on a per-block basis, the set of values that are +- /// over-defined at the end of that block. +- typedef DenseMap, SmallPtrSet> +- OverDefinedCacheTy; +- /// Keep track of all blocks that we have ever seen, so we +- /// don't spend time removing unused blocks from our caches. +- DenseSet > SeenBlocks; +- +- /// This is all of the cached information for all values, +- /// mapped from Value* to key information. +- DenseMap> ValueCache; +- OverDefinedCacheTy OverDefinedCache; +- ++ /// Cached information per basic block. ++ DenseMap, BlockCacheEntryTy> BlockCache; ++ /// Set of value handles used to erase values from the cache on deletion. ++ DenseSet> ValueHandles; + + public: + void insertResult(Value *Val, BasicBlock *BB, + const ValueLatticeElement &Result) { +- SeenBlocks.insert(BB); +- ++ auto &CacheEntry = BlockCache.try_emplace(BB).first->second; + // Insert over-defined values into their own cache to reduce memory + // overhead. + if (Result.isOverdefined()) +- OverDefinedCache[BB].insert(Val); +- else { +- auto It = ValueCache.find_as(Val); +- if (It == ValueCache.end()) { +- ValueCache[Val] = std::make_unique(Val, this); +- It = ValueCache.find_as(Val); +- assert(It != ValueCache.end() && "Val was just added to the map!"); +- } +- It->second->BlockVals[BB] = Result; +- } +- } +- +- bool isOverdefined(Value *V, BasicBlock *BB) const { +- auto ODI = OverDefinedCache.find(BB); +- +- if (ODI == OverDefinedCache.end()) +- return false; ++ CacheEntry.OverDefined.insert(Val); ++ else ++ CacheEntry.LatticeElements.insert({ Val, Result }); + +- return ODI->second.count(V); ++ auto HandleIt = ValueHandles.find_as(Val); ++ if (HandleIt == ValueHandles.end()) ++ ValueHandles.insert({ Val, this }); + } + + bool hasCachedValueInfo(Value *V, BasicBlock *BB) const { +- if (isOverdefined(V, BB)) +- return true; +- +- auto I = ValueCache.find_as(V); +- if (I == ValueCache.end()) ++ auto It = BlockCache.find(BB); ++ if (It == BlockCache.end()) + return false; + +- return I->second->BlockVals.count(BB); ++ return It->second.OverDefined.count(V) || ++ It->second.LatticeElements.count(V); + } + + ValueLatticeElement getCachedValueInfo(Value *V, BasicBlock *BB) const { +- if (isOverdefined(V, BB)) ++ auto It = BlockCache.find(BB); ++ if (It == BlockCache.end()) ++ return ValueLatticeElement(); ++ ++ if (It->second.OverDefined.count(V)) + return ValueLatticeElement::getOverdefined(); + +- auto I = ValueCache.find_as(V); +- if (I == ValueCache.end()) +- return ValueLatticeElement(); +- auto BBI = I->second->BlockVals.find(BB); +- if (BBI == I->second->BlockVals.end()) ++ auto LatticeIt = It->second.LatticeElements.find(V); ++ if (LatticeIt == It->second.LatticeElements.end()) + return ValueLatticeElement(); +- return BBI->second; ++ ++ return LatticeIt->second; + } + + /// clear - Empty the cache. + void clear() { +- SeenBlocks.clear(); +- ValueCache.clear(); +- OverDefinedCache.clear(); ++ BlockCache.clear(); ++ ValueHandles.clear(); + } + + /// Inform the cache that a given value has been deleted. +@@ -251,23 +223,18 @@ namespace { + /// OldSucc might have (unless also overdefined in NewSucc). This just + /// flushes elements from the cache and does not add any. + void threadEdgeImpl(BasicBlock *OldSucc,BasicBlock *NewSucc); +- +- friend struct LVIValueHandle; + }; + } + + void LazyValueInfoCache::eraseValue(Value *V) { +- for (auto I = OverDefinedCache.begin(), E = OverDefinedCache.end(); I != E;) { +- // Copy and increment the iterator immediately so we can erase behind +- // ourselves. +- auto Iter = I++; +- SmallPtrSetImpl &ValueSet = Iter->second; +- ValueSet.erase(V); +- if (ValueSet.empty()) +- OverDefinedCache.erase(Iter); ++ for (auto &Pair : BlockCache) { ++ Pair.second.LatticeElements.erase(V); ++ Pair.second.OverDefined.erase(V); + } + +- ValueCache.erase(V); ++ auto HandleIt = ValueHandles.find_as(V); ++ if (HandleIt != ValueHandles.end()) ++ ValueHandles.erase(HandleIt); + } + + void LVIValueHandle::deleted() { +@@ -277,18 +244,7 @@ void LVIValueHandle::deleted() { + } + + void LazyValueInfoCache::eraseBlock(BasicBlock *BB) { +- // Shortcut if we have never seen this block. +- DenseSet >::iterator I = SeenBlocks.find(BB); +- if (I == SeenBlocks.end()) +- return; +- SeenBlocks.erase(I); +- +- auto ODI = OverDefinedCache.find(BB); +- if (ODI != OverDefinedCache.end()) +- OverDefinedCache.erase(ODI); +- +- for (auto &I : ValueCache) +- I.second->BlockVals.erase(BB); ++ BlockCache.erase(BB); + } + + void LazyValueInfoCache::threadEdgeImpl(BasicBlock *OldSucc, +@@ -306,10 +262,11 @@ void LazyValueInfoCache::threadEdgeImpl(BasicBlock *OldSucc, + std::vector worklist; + worklist.push_back(OldSucc); + +- auto I = OverDefinedCache.find(OldSucc); +- if (I == OverDefinedCache.end()) ++ auto I = BlockCache.find(OldSucc); ++ if (I == BlockCache.end() || I->second.OverDefined.empty()) + return; // Nothing to process here. +- SmallVector ValsToClear(I->second.begin(), I->second.end()); ++ SmallVector ValsToClear(I->second.OverDefined.begin(), ++ I->second.OverDefined.end()); + + // Use a worklist to perform a depth-first search of OldSucc's successors. + // NOTE: We do not need a visited list since any blocks we have already +@@ -323,10 +280,10 @@ void LazyValueInfoCache::threadEdgeImpl(BasicBlock *OldSucc, + if (ToUpdate == NewSucc) continue; + + // If a value was marked overdefined in OldSucc, and is here too... +- auto OI = OverDefinedCache.find(ToUpdate); +- if (OI == OverDefinedCache.end()) ++ auto OI = BlockCache.find(ToUpdate); ++ if (OI == BlockCache.end() || OI->second.OverDefined.empty()) + continue; +- SmallPtrSetImpl &ValueSet = OI->second; ++ auto &ValueSet = OI->second.OverDefined; + + bool changed = false; + for (Value *V : ValsToClear) { +@@ -336,11 +293,6 @@ void LazyValueInfoCache::threadEdgeImpl(BasicBlock *OldSucc, + // If we removed anything, then we potentially need to update + // blocks successors too. + changed = true; +- +- if (ValueSet.empty()) { +- OverDefinedCache.erase(OI); +- break; +- } + } + + if (!changed) continue; diff --git a/build/build-clang/rename_gcov_flush.patch b/build/build-clang/rename_gcov_flush.patch new file mode 100644 index 0000000000..c707c4423f --- /dev/null +++ b/build/build-clang/rename_gcov_flush.patch @@ -0,0 +1,40 @@ +Index: compiler-rt/lib/profile/GCDAProfiling.c +=================================================================== +diff --git a/compiler-rt/lib/profile/GCDAProfiling.c b/compiler-rt/lib/profile/GCDAProfiling.c +--- a/compiler-rt/lib/profile/GCDAProfiling.c ++++ b/compiler-rt/lib/profile/GCDAProfiling.c +@@ -619,7 +619,7 @@ + fn_list_insert(&flush_fn_list, fn); + } + +-void __gcov_flush() { ++void __custom_llvm_gcov_flush() { + struct fn_node* curr = flush_fn_list.head; + + while (curr) { +diff --git a/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp b/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp +index 9af64ed332c..bcebe303ff4 100644 +--- a/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp ++++ b/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp +@@ -647,7 +647,7 @@ + for (auto I : ForkAndExecs) { + IRBuilder<> Builder(I); + FunctionType *FTy = FunctionType::get(Builder.getVoidTy(), {}, false); +- FunctionCallee GCOVFlush = M->getOrInsertFunction("__gcov_flush", FTy); ++ FunctionCallee GCOVFlush = M->getOrInsertFunction("__custom_llvm_gcov_flush", FTy); + Builder.CreateCall(GCOVFlush); + I->getParent()->splitBasicBlock(I); + } +diff --git a/clang/lib/Driver/ToolChains/Darwin.cpp b/clang/lib/Driver/ToolChains/Darwin.cpp +index e113f9a679..b3a07b18c0 100644 +--- a/clang/lib/Driver/ToolChains/Darwin.cpp ++++ b/clang/lib/Driver/ToolChains/Darwin.cpp +@@ -1122,7 +1122,7 @@ + // runtime's functionality. + if (hasExportSymbolDirective(Args)) { + if (needsGCovInstrumentation(Args)) { +- addExportedSymbol(CmdArgs, "___gcov_flush"); ++ addExportedSymbol(CmdArgs, "___custom_llvm_gcov_flush"); + addExportedSymbol(CmdArgs, "_flush_fn_list"); + addExportedSymbol(CmdArgs, "_writeout_fn_list"); + } else { diff --git a/build/build-clang/rename_gcov_flush_7.patch b/build/build-clang/rename_gcov_flush_7.patch new file mode 100644 index 0000000000..ae7b922716 --- /dev/null +++ b/build/build-clang/rename_gcov_flush_7.patch @@ -0,0 +1,14 @@ +Index: compiler-rt/lib/profile/GCDAProfiling.c +=================================================================== +diff --git a/compiler-rt/lib/profile/GCDAProfiling.c b/compiler-rt/lib/profile/GCDAProfiling.c +--- a/compiler-rt/lib/profile/GCDAProfiling.c (revisione 336380) ++++ b/compiler-rt/lib/profile/GCDAProfiling.c (copia locale) +@@ -555,7 +555,7 @@ + fn_list_insert(&flush_fn_list, fn); + } + +-void __gcov_flush() { ++void __custom_llvm_gcov_flush() { + struct fn_node* curr = flush_fn_list.head; + + while (curr) { diff --git a/build/build-clang/rename_gcov_flush_clang_10.patch b/build/build-clang/rename_gcov_flush_clang_10.patch new file mode 100644 index 0000000000..1da3b653a5 --- /dev/null +++ b/build/build-clang/rename_gcov_flush_clang_10.patch @@ -0,0 +1,42 @@ +diff --git a/clang/lib/Driver/ToolChains/Darwin.cpp b/clang/lib/Driver/ToolChains/Darwin.cpp +index 220bc8f9835..4f7ce485777 100644 +--- a/clang/lib/Driver/ToolChains/Darwin.cpp ++++ b/clang/lib/Driver/ToolChains/Darwin.cpp +@@ -1143,7 +1143,7 @@ void Darwin::addProfileRTLibs(const ArgList &Args, + // runtime's functionality. + if (hasExportSymbolDirective(Args)) { + if (ForGCOV) { +- addExportedSymbol(CmdArgs, "___gcov_flush"); ++ addExportedSymbol(CmdArgs, "___custom_llvm_gcov_flush"); + addExportedSymbol(CmdArgs, "_flush_fn_list"); + addExportedSymbol(CmdArgs, "_writeout_fn_list"); + } else { +diff --git a/compiler-rt/lib/profile/GCDAProfiling.c b/compiler-rt/lib/profile/GCDAProfiling.c +index 498c05900bf..b7257db10e7 100644 +--- a/compiler-rt/lib/profile/GCDAProfiling.c ++++ b/compiler-rt/lib/profile/GCDAProfiling.c +@@ -619,7 +619,7 @@ void llvm_register_flush_function(fn_ptr fn) { + fn_list_insert(&flush_fn_list, fn); + } + +-void __gcov_flush() { ++void __custom_llvm_gcov_flush() { + struct fn_node* curr = flush_fn_list.head; + + while (curr) { +diff --git a/compiler-rt/test/tsan/pthread_atfork_deadlock2.c b/compiler-rt/test/tsan/pthread_atfork_deadlock2.c +new file mode 100644 +index 00000000000..e69de29bb2d +diff --git a/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp b/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp +index bf3e4ed3e31..37bdcfaeab8 100644 +--- a/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp ++++ b/llvm/lib/Transforms/Instrumentation/GCOVProfiling.cpp +@@ -656,7 +656,7 @@ void GCOVProfiler::AddFlushBeforeForkAndExec() { + for (auto I : ForkAndExecs) { + IRBuilder<> Builder(I); + FunctionType *FTy = FunctionType::get(Builder.getVoidTy(), {}, false); +- FunctionCallee GCOVFlush = M->getOrInsertFunction("__gcov_flush", FTy); ++ FunctionCallee GCOVFlush = M->getOrInsertFunction("__custom_llvm_gcov_flush", FTy); + Builder.CreateCall(GCOVFlush); + I->getParent()->splitBasicBlock(I); + } diff --git a/build/build-clang/rename_gcov_flush_clang_11.patch b/build/build-clang/rename_gcov_flush_clang_11.patch new file mode 100644 index 0000000000..bd76477fd5 --- /dev/null +++ b/build/build-clang/rename_gcov_flush_clang_11.patch @@ -0,0 +1,26 @@ +diff --git a/clang/lib/Driver/ToolChains/Darwin.cpp b/clang/lib/Driver/ToolChains/Darwin.cpp +index 7b879f8cb65..3810a2ceec2 100644 +--- a/clang/lib/Driver/ToolChains/Darwin.cpp ++++ b/clang/lib/Driver/ToolChains/Darwin.cpp +@@ -1196,7 +1196,7 @@ void Darwin::addProfileRTLibs(const ArgList &Args, + // runtime's functionality. + if (hasExportSymbolDirective(Args)) { + if (ForGCOV) { +- addExportedSymbol(CmdArgs, "___gcov_flush"); ++ addExportedSymbol(CmdArgs, "___custom_llvm_gcov_flush"); + addExportedSymbol(CmdArgs, "_flush_fn_list"); + addExportedSymbol(CmdArgs, "_writeout_fn_list"); + addExportedSymbol(CmdArgs, "_reset_fn_list"); +diff --git a/compiler-rt/lib/profile/GCDAProfiling.c b/compiler-rt/lib/profile/GCDAProfiling.c +index 57d8dec423c..2edfb6e19e9 100644 +--- a/compiler-rt/lib/profile/GCDAProfiling.c ++++ b/compiler-rt/lib/profile/GCDAProfiling.c +@@ -644,7 +644,7 @@ void llvm_register_flush_function(fn_ptr fn) { + fn_list_insert(&flush_fn_list, fn); + } + +-void __gcov_flush() { ++void __custom_llvm_gcov_flush() { + struct fn_node* curr = flush_fn_list.head; + + while (curr) { diff --git a/build/build-clang/revert-r362047-and-r362065.patch b/build/build-clang/revert-r362047-and-r362065.patch new file mode 100644 index 0000000000..c522c9ae02 --- /dev/null +++ b/build/build-clang/revert-r362047-and-r362065.patch @@ -0,0 +1,62 @@ +Bisection found that r362047 (and its followup build fix r362065) cause the +build to install the android PGO library into the following location: +stage2/clang/lib/linux/libclang_rt.profile-arm-android.a +rather than the expected: +stage2/clang/lib64/clang/$VERSION/lib/linux/libclang_rt.profile-arm-android.a + +For lack of any progress in debugging this, revert those two patches. + +--- a/llvm/runtimes/CMakeLists.txt ++++ b/llvm/runtimes/CMakeLists.txt +@@ -60,12 +60,11 @@ + project(Runtimes C CXX ASM) + +- find_package(LLVM PATHS "${LLVM_BINARY_DIR}" NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) +- + # Add the root project's CMake modules, and the LLVM build's modules to the + # CMake module path. + list(INSERT CMAKE_MODULE_PATH 0 + "${CMAKE_CURRENT_SOURCE_DIR}/../cmake" + "${CMAKE_CURRENT_SOURCE_DIR}/../cmake/modules" ++ "${LLVM_LIBRARY_DIR}/cmake/llvm" + ) + + # Some of the runtimes will conditionally use the compiler-rt sanitizers +@@ -80,6 +79,11 @@ + endif() + endif() + ++ # LLVMConfig.cmake contains a bunch of CMake variables from the LLVM build. ++ # This file is installed as part of LLVM distributions, so this can be used ++ # either from a build directory or an installed LLVM. ++ include(LLVMConfig) ++ + # Setting these variables will allow the sub-build to put their outputs into + # the library and bin directories of the top-level build. + set(LLVM_LIBRARY_OUTPUT_INTDIR ${LLVM_LIBRARY_DIR}) +@@ -89,9 +93,6 @@ + set(LLVM_MAIN_SRC_DIR ${LLVM_BUILD_MAIN_SRC_DIR}) + set(LLVM_CMAKE_PATH ${LLVM_MAIN_SRC_DIR}/cmake/modules) + +- # This variable is used by individual runtimes to locate LLVM files. +- set(LLVM_PATH ${LLVM_BUILD_MAIN_SRC_DIR}) +- + if(APPLE) + set(LLVM_ENABLE_LIBCXX ON CACHE BOOL "") + endif() +@@ -381,4 +382,6 @@ + CMAKE_ARGS -DCOMPILER_RT_BUILD_BUILTINS=Off + -DLLVM_INCLUDE_TESTS=${LLVM_INCLUDE_TESTS} ++ -DLLVM_BINARY_DIR=${LLVM_BINARY_DIR} ++ -DLLVM_LIBRARY_DIR=${LLVM_LIBRARY_DIR} + -DLLVM_DEFAULT_TARGET_TRIPLE=${TARGET_TRIPLE} + -DLLVM_ENABLE_PROJECTS_USED=${LLVM_ENABLE_PROJECTS_USED} +@@ -470,6 +473,8 @@ + # Builtins were built separately above + CMAKE_ARGS -DCOMPILER_RT_BUILD_BUILTINS=Off + -DLLVM_INCLUDE_TESTS=${LLVM_INCLUDE_TESTS} ++ -DLLVM_BINARY_DIR=${LLVM_BINARY_DIR} ++ -DLLVM_LIBRARY_DIR=${LLVM_LIBRARY_DIR} + -DLLVM_DEFAULT_TARGET_TRIPLE=${target} + -DLLVM_ENABLE_PROJECTS_USED=${LLVM_ENABLE_PROJECTS_USED} + -DLLVM_ENABLE_PER_TARGET_RUNTIME_DIR=ON diff --git a/build/build-clang/static-llvm-symbolizer.patch b/build/build-clang/static-llvm-symbolizer.patch new file mode 100644 index 0000000000..ea8ebc322b --- /dev/null +++ b/build/build-clang/static-llvm-symbolizer.patch @@ -0,0 +1,12 @@ +diff --git a/llvm/tools/llvm-symbolizer/CMakeLists.txt b/llvm/tools/llvm-symbolizer/CMakeLists.txt +index 8185c296c50..13c7419fa47 100644 +--- a/llvm/tools/llvm-symbolizer/CMakeLists.txt ++++ b/llvm/tools/llvm-symbolizer/CMakeLists.txt +@@ -13,6 +13,7 @@ set(LLVM_LINK_COMPONENTS + ) + + add_llvm_tool(llvm-symbolizer ++ DISABLE_LLVM_LINK_LLVM_DYLIB + llvm-symbolizer.cpp + ) + diff --git a/build/build-clang/tsan-hang-be41a98ac222.patch b/build/build-clang/tsan-hang-be41a98ac222.patch new file mode 100644 index 0000000000..3e148e52b3 --- /dev/null +++ b/build/build-clang/tsan-hang-be41a98ac222.patch @@ -0,0 +1,100 @@ +From be41a98ac222f33ed5558d86e1cede67249e99b5 Mon Sep 17 00:00:00 2001 +From: Dmitry Vyukov +Date: Sat, 21 Mar 2020 13:34:50 +0100 +Subject: [PATCH] tsan: fix deadlock with pthread_atfork callbacks + +This fixes the bug reported at: +https://groups.google.com/forum/#!topic/thread-sanitizer/e_zB9gYqFHM + +A pthread_atfork callback triggers a data race +and we deadlock on the report_mtx. Ignore memory access +in the pthread_atfork callbacks to prevent the deadlock. +--- + compiler-rt/lib/tsan/rtl/tsan_rtl.cc | 9 ++++ + .../test/tsan/pthread_atfork_deadlock2.c | 49 +++++++++++++++++++ + 2 files changed, 58 insertions(+) + create mode 100644 compiler-rt/test/tsan/pthread_atfork_deadlock2.c + +diff --git a/compiler-rt/lib/tsan/rtl/tsan_rtl.cc b/compiler-rt/lib/tsan/rtl/tsan_rtl.ccc +index fe469faad2a2..13c9b770f50a 100644 +--- a/compiler-rt/lib/tsan/rtl/tsan_rtl.cc ++++ b/compiler-rt/lib/tsan/rtl/tsan_rtl.cc +@@ -495,14 +495,23 @@ int Finalize(ThreadState *thr) { + void ForkBefore(ThreadState *thr, uptr pc) { + ctx->thread_registry->Lock(); + ctx->report_mtx.Lock(); ++ // Ignore memory accesses in the pthread_atfork callbacks. ++ // If any of them triggers a data race we will deadlock ++ // on the report_mtx. ++ // We could ignore interceptors and sync operations as well, ++ // but so far it's unclear if it will do more good or harm. ++ // Unnecessarily ignoring things can lead to false positives later. ++ ThreadIgnoreBegin(thr, pc); + } + + void ForkParentAfter(ThreadState *thr, uptr pc) { ++ ThreadIgnoreEnd(thr, pc); // Begin is in ForkBefore. + ctx->report_mtx.Unlock(); + ctx->thread_registry->Unlock(); + } + + void ForkChildAfter(ThreadState *thr, uptr pc) { ++ ThreadIgnoreEnd(thr, pc); // Begin is in ForkBefore. + ctx->report_mtx.Unlock(); + ctx->thread_registry->Unlock(); + +diff --git a/compiler-rt/test/tsan/pthread_atfork_deadlock2.c b/compiler-rt/test/tsan/pthread_atfork_deadlock2.c +new file mode 100644 +index 000000000000..700507c1e637 +--- /dev/null ++++ b/compiler-rt/test/tsan/pthread_atfork_deadlock2.c +@@ -0,0 +1,49 @@ ++// RUN: %clang_tsan -O1 %s -o %t && %run %t 2>&1 | FileCheck %s ++// Regression test for ++// https://groups.google.com/d/msg/thread-sanitizer/e_zB9gYqFHM/DmAiTsrLAwAJ ++// pthread_atfork() callback triggers a data race and we deadlocked ++// on the report_mtx as we lock it around fork. ++#include "test.h" ++#include ++#include ++#include ++ ++int glob = 0; ++ ++void *worker(void *unused) { ++ glob++; ++ barrier_wait(&barrier); ++ return NULL; ++} ++ ++void atfork() { ++ glob++; ++} ++ ++int main() { ++ barrier_init(&barrier, 2); ++ pthread_atfork(atfork, NULL, NULL); ++ pthread_t t; ++ pthread_create(&t, NULL, worker, NULL); ++ barrier_wait(&barrier); ++ pid_t pid = fork(); ++ if (pid < 0) { ++ fprintf(stderr, "fork failed: %d\n", errno); ++ return 1; ++ } ++ if (pid == 0) { ++ fprintf(stderr, "CHILD\n"); ++ return 0; ++ } ++ if (pid != waitpid(pid, NULL, 0)) { ++ fprintf(stderr, "waitpid failed: %d\n", errno); ++ return 1; ++ } ++ pthread_join(t, NULL); ++ fprintf(stderr, "PARENT\n"); ++ return 0; ++} ++ ++// CHECK-NOT: ThreadSanitizer: data race ++// CHECK: CHILD ++// CHECK: PARENT diff --git a/build/build-clang/tsan-hang-be41a98ac222_clang_10.patch b/build/build-clang/tsan-hang-be41a98ac222_clang_10.patch new file mode 100644 index 0000000000..e65335a1fd --- /dev/null +++ b/build/build-clang/tsan-hang-be41a98ac222_clang_10.patch @@ -0,0 +1,100 @@ +From be41a98ac222f33ed5558d86e1cede67249e99b5 Mon Sep 17 00:00:00 2001 +From: Dmitry Vyukov +Date: Sat, 21 Mar 2020 13:34:50 +0100 +Subject: [PATCH] tsan: fix deadlock with pthread_atfork callbacks + +This fixes the bug reported at: +https://groups.google.com/forum/#!topic/thread-sanitizer/e_zB9gYqFHM + +A pthread_atfork callback triggers a data race +and we deadlock on the report_mtx. Ignore memory access +in the pthread_atfork callbacks to prevent the deadlock. +--- + compiler-rt/lib/tsan/rtl/tsan_rtl.cc | 9 ++++ + .../test/tsan/pthread_atfork_deadlock2.c | 49 +++++++++++++++++++ + 2 files changed, 58 insertions(+) + create mode 100644 compiler-rt/test/tsan/pthread_atfork_deadlock2.c + +diff --git a/compiler-rt/lib/tsan/rtl/tsan_rtl.cpp b/compiler-rt/lib/tsan/rtl/tsan_rtl.cpp +index 3f3c0cce119..5e324a0a5fd 100644 +--- a/compiler-rt/lib/tsan/rtl/tsan_rtl.cpp ++++ b/compiler-rt/lib/tsan/rtl/tsan_rtl.cpp +@@ -494,14 +494,23 @@ int Finalize(ThreadState *thr) { + void ForkBefore(ThreadState *thr, uptr pc) { + ctx->thread_registry->Lock(); + ctx->report_mtx.Lock(); ++ // Ignore memory accesses in the pthread_atfork callbacks. ++ // If any of them triggers a data race we will deadlock ++ // on the report_mtx. ++ // We could ignore interceptors and sync operations as well, ++ // but so far it's unclear if it will do more good or harm. ++ // Unnecessarily ignoring things can lead to false positives later. ++ ThreadIgnoreBegin(thr, pc); + } + + void ForkParentAfter(ThreadState *thr, uptr pc) { ++ ThreadIgnoreEnd(thr, pc); // Begin is in ForkBefore. + ctx->report_mtx.Unlock(); + ctx->thread_registry->Unlock(); + } + + void ForkChildAfter(ThreadState *thr, uptr pc) { ++ ThreadIgnoreEnd(thr, pc); // Begin is in ForkBefore. + ctx->report_mtx.Unlock(); + ctx->thread_registry->Unlock(); + +diff --git a/compiler-rt/test/tsan/pthread_atfork_deadlock2.c b/compiler-rt/test/tsan/pthread_atfork_deadlock2.c +new file mode 100644 +index 00000000000..700507c1e63 +--- /dev/null ++++ b/compiler-rt/test/tsan/pthread_atfork_deadlock2.c +@@ -0,0 +1,49 @@ ++// RUN: %clang_tsan -O1 %s -o %t && %run %t 2>&1 | FileCheck %s ++// Regression test for ++// https://groups.google.com/d/msg/thread-sanitizer/e_zB9gYqFHM/DmAiTsrLAwAJ ++// pthread_atfork() callback triggers a data race and we deadlocked ++// on the report_mtx as we lock it around fork. ++#include "test.h" ++#include ++#include ++#include ++ ++int glob = 0; ++ ++void *worker(void *unused) { ++ glob++; ++ barrier_wait(&barrier); ++ return NULL; ++} ++ ++void atfork() { ++ glob++; ++} ++ ++int main() { ++ barrier_init(&barrier, 2); ++ pthread_atfork(atfork, NULL, NULL); ++ pthread_t t; ++ pthread_create(&t, NULL, worker, NULL); ++ barrier_wait(&barrier); ++ pid_t pid = fork(); ++ if (pid < 0) { ++ fprintf(stderr, "fork failed: %d\n", errno); ++ return 1; ++ } ++ if (pid == 0) { ++ fprintf(stderr, "CHILD\n"); ++ return 0; ++ } ++ if (pid != waitpid(pid, NULL, 0)) { ++ fprintf(stderr, "waitpid failed: %d\n", errno); ++ return 1; ++ } ++ pthread_join(t, NULL); ++ fprintf(stderr, "PARENT\n"); ++ return 0; ++} ++ ++// CHECK-NOT: ThreadSanitizer: data race ++// CHECK: CHILD ++// CHECK: PARENT diff --git a/build/build-clang/unpoison-thread-stacks.patch b/build/build-clang/unpoison-thread-stacks.patch new file mode 100644 index 0000000000..2fb7cafd90 --- /dev/null +++ b/build/build-clang/unpoison-thread-stacks.patch @@ -0,0 +1,62 @@ +[winasan] Unpoison the stack in NtTerminateThread + +In long-running builds we've seen some ASan complaints during thread creation +that we suspect are due to leftover poisoning from previous threads whose stacks +occupied that memory. This patch adds a hook that unpoisons the stack just +before the NtTerminateThread syscall. + +Differential Revision: https://reviews.llvm.org/D52091 + +** Update for clang 9 ** : After some backouts, this patch eventually landed +upstream in a different form, as the TLS handler `asan_thread_exit`, but that +variant causes failures in our test suite, so revert the TLS handler in favor of +the interceptor approach from the first patch. + +--- a/compiler-rt/lib/asan/asan_win.cc ++++ b/compiler-rt/lib/asan/asan_win.cc +@@ -154,6 +154,14 @@ + thr_flags, tid); + } + ++INTERCEPTOR_WINAPI(void, NtTerminateThread, void *rcx) { ++ // Unpoison the terminating thread's stack because the memory may be re-used. ++ NT_TIB *tib = (NT_TIB *)NtCurrentTeb(); ++ uptr stackSize = (uptr)tib->StackBase - (uptr)tib->StackLimit; ++ __asan_unpoison_memory_region(tib->StackLimit, stackSize); ++ return REAL(NtTerminateThread(rcx)); ++} ++ + // }}} + + namespace __asan { +@@ -168,7 +176,9 @@ + + ASAN_INTERCEPT_FUNC(CreateThread); + ASAN_INTERCEPT_FUNC(SetUnhandledExceptionFilter); +- ++ CHECK(::__interception::OverrideFunction("NtTerminateThread", ++ (uptr)WRAP(NtTerminateThread), ++ (uptr *)&REAL(NtTerminateThread))); + #ifdef _WIN64 + ASAN_INTERCEPT_FUNC(__C_specific_handler); + #else +@@ -380,19 +390,6 @@ + void *, unsigned long, void *) = asan_thread_init; + #endif + +-static void NTAPI asan_thread_exit(void *module, DWORD reason, void *reserved) { +- if (reason == DLL_THREAD_DETACH) { +- // Unpoison the thread's stack because the memory may be re-used. +- NT_TIB *tib = (NT_TIB *)NtCurrentTeb(); +- uptr stackSize = (uptr)tib->StackBase - (uptr)tib->StackLimit; +- __asan_unpoison_memory_region(tib->StackLimit, stackSize); +- } +-} +- +-#pragma section(".CRT$XLY", long, read) // NOLINT +-__declspec(allocate(".CRT$XLY")) void(NTAPI *__asan_tls_exit)( +- void *, unsigned long, void *) = asan_thread_exit; +- + WIN_FORCE_LINK(__asan_dso_reg_hook) + + // }}} diff --git a/build/build-clang/unpoison-thread-stacks_clang_10.patch b/build/build-clang/unpoison-thread-stacks_clang_10.patch new file mode 100644 index 0000000000..563fa1d7bf --- /dev/null +++ b/build/build-clang/unpoison-thread-stacks_clang_10.patch @@ -0,0 +1,64 @@ +[winasan] Unpoison the stack in NtTerminateThread + +In long-running builds we've seen some ASan complaints during thread creation +that we suspect are due to leftover poisoning from previous threads whose stacks +occupied that memory. This patch adds a hook that unpoisons the stack just +before the NtTerminateThread syscall. + +Differential Revision: https://reviews.llvm.org/D52091 + +** Update for clang 9 ** : After some backouts, this patch eventually landed +upstream in a different form, as the TLS handler `asan_thread_exit`, but that +variant causes failures in our test suite, so revert the TLS handler in favor of +the interceptor approach from the first patch. + +diff --git a/compiler-rt/lib/asan/asan_win.cpp b/compiler-rt/lib/asan/asan_win.cpp +index 417892aaedd..5fe86db44f4 100644 +--- a/compiler-rt/lib/asan/asan_win.cpp ++++ b/compiler-rt/lib/asan/asan_win.cpp +@@ -154,6 +154,14 @@ INTERCEPTOR_WINAPI(HANDLE, CreateThread, LPSECURITY_ATTRIBUTES security, + thr_flags, tid); + } + ++INTERCEPTOR_WINAPI(void, NtTerminateThread, void *rcx) { ++ // Unpoison the terminating thread's stack because the memory may be re-used. ++ NT_TIB *tib = (NT_TIB *)NtCurrentTeb(); ++ uptr stackSize = (uptr)tib->StackBase - (uptr)tib->StackLimit; ++ __asan_unpoison_memory_region(tib->StackLimit, stackSize); ++ return REAL(NtTerminateThread(rcx)); ++} ++ + // }}} + + namespace __asan { +@@ -168,7 +176,9 @@ void InitializePlatformInterceptors() { + + ASAN_INTERCEPT_FUNC(CreateThread); + ASAN_INTERCEPT_FUNC(SetUnhandledExceptionFilter); +- ++ CHECK(::__interception::OverrideFunction("NtTerminateThread", ++ (uptr)WRAP(NtTerminateThread), ++ (uptr *)&REAL(NtTerminateThread))); + #ifdef _WIN64 + ASAN_INTERCEPT_FUNC(__C_specific_handler); + #else +@@ -380,19 +390,6 @@ __declspec(allocate(".CRT$XLAB")) void(NTAPI *__asan_tls_init)( + void *, unsigned long, void *) = asan_thread_init; + #endif + +-static void NTAPI asan_thread_exit(void *module, DWORD reason, void *reserved) { +- if (reason == DLL_THREAD_DETACH) { +- // Unpoison the thread's stack because the memory may be re-used. +- NT_TIB *tib = (NT_TIB *)NtCurrentTeb(); +- uptr stackSize = (uptr)tib->StackBase - (uptr)tib->StackLimit; +- __asan_unpoison_memory_region(tib->StackLimit, stackSize); +- } +-} +- +-#pragma section(".CRT$XLY", long, read) +-__declspec(allocate(".CRT$XLY")) void(NTAPI *__asan_tls_exit)( +- void *, unsigned long, void *) = asan_thread_exit; +- + WIN_FORCE_LINK(__asan_dso_reg_hook) + + // }}} diff --git a/build/build-infer/README b/build/build-infer/README new file mode 100644 index 0000000000..af11d9af3c --- /dev/null +++ b/build/build-infer/README @@ -0,0 +1,36 @@ +build-infer.py +============== + +A script to build infer from source. + +``` +usage: build-infer.py [-h] -c CONFIG [--clean] + +optional arguments: + -h, --help show this help message and exit + -c CONFIG, --config CONFIG + infer configuration file + --clean Clean the build directory +``` + +Pre-requisites +-------------- +* Working build toolchain. +* ocam +* git +* autoconf +* libsqlite-dev +* CMake +* Ninja +* Python 2.7 + +Please use the latest available CMake for your platform to avoid surprises. + +Config file format +------------------ + +build-clang.py accepts a JSON config format with the following fields: + +* infer_revision: The infer revision to build. +* infer_repo: git repository for infer. +* patches: Optional list of patches to apply. \ No newline at end of file diff --git a/build/build-infer/build-infer.py b/build/build-infer/build-infer.py new file mode 100755 index 0000000000..e9a4804ce0 --- /dev/null +++ b/build/build-infer/build-infer.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import subprocess +import json +import argparse +import sys +import shutil +from functools import reduce + + +def check_run(args, path): + print(" ".join(args) + " in " + path, file=sys.stderr) + subprocess.run(args, cwd=path, check=True) + + +def run_in(path, args, extra_env=None): + """ + Runs the given commands in the directory specified by . + """ + env = dict(os.environ) + env.update(extra_env or {}) + check_run(args, path) + subprocess.run(args, cwd=path) + + +def build_tar_package(tar, name, base, directories): + name = os.path.realpath(name) + run_in( + base, + [tar, "-c", "-%s" % ("J" if ".xz" in name else "j"), "-f", name] + directories, + ) + + +def is_git_repo(dir): + """Check whether the given directory is a git repository.""" + from subprocess import CalledProcessError + + try: + check_run(["git", "rev-parse"], dir) + return True + except CalledProcessError: + return False + + +def git_clone(main_dir, url, clone_dir, commit): + """ + Clones the repository from into , and brings the + repository to the state of . + """ + run_in(main_dir, ["git", "clone", url, clone_dir]) + run_in(clone_dir, ["git", "checkout", commit]) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "-c", + "--config", + required=True, + type=argparse.FileType("r"), + help="Infer configuration file", + ) + parser.add_argument( + "-b", "--base-dir", help="Base directory for code and build artifacts" + ) + parser.add_argument( + "--clean", action="store_true", help="Clean the build directory" + ) + parser.add_argument( + "--skip-tar", action="store_true", help="Skip tar packaging stage" + ) + + args = parser.parse_args() + + # The directories end up in the debug info, so the easy way of getting + # a reproducible build is to run it in a know absolute directory. + # We use a directory that is registered as a volume in the Docker image. + if args.base_dir: + base_dir = args.base_dir + else: + base_dir = reduce( + os.path.join, [os.sep + "builds", "worker", "workspace", "moz-toolchain"] + ) + infer_dir = os.path.join(base_dir, "infer") + source_dir = os.path.join(infer_dir, "src") + build_dir = os.path.join(infer_dir, "build") + + if args.clean: + shutil.rmtree(build_dir) + os.sys.exit(0) + + config = json.load(args.config) + infer_revision = config["infer_revision"] + infer_repo = config["infer_repo"] + + for folder in [infer_dir, source_dir, build_dir]: + os.makedirs(folder, exist_ok=True) + + # clone infer + if not is_git_repo(source_dir): + # git doesn't like cloning into a non-empty folder. If src is not a git + # repo then just remove it in order to reclone + shutil.rmtree(source_dir) + git_clone(infer_dir, infer_repo, source_dir, infer_revision) + # apply a few patches + dir_path = os.path.dirname(os.path.realpath(__file__)) + # clean the git directory by reseting all changes + git_commands = [["clean", "-f"], ["reset", "--hard"]] + for command in git_commands: + run_in(source_dir, ["git"] + command) + for p in config.get("patches", []): + run_in(source_dir, ["git", "apply", os.path.join(dir_path, p)]) + # configure opam + run_in(source_dir, ["opam", "init", "--no-setup", "--disable-sandboxing"]) + # build infer + run_in(source_dir, ["./build-infer.sh", "java"], extra_env={"NO_CMAKE_STRIP": "1"}) + + package_name = "infer" + infer_package = os.path.join(os.getcwd(), package_name) + # We need to create a package with all of the depended libraries injected in it + run_in( + source_dir, + [ + "make", + "install-with-libs", + "BUILD_MODE=opt", + "PATCHELF=patchelf", + "DESTDIR={}".format(infer_package), + "libdir_relative_to_bindir=../lib", + ], + ) + + infer_package_with_pref = os.path.join(infer_package, "usr") + if not args.skip_tar: + os.rename( + os.path.join(infer_package_with_pref, "local"), + os.path.join(infer_package_with_pref, "infer"), + ) + build_tar_package( + "tar", + "%s.tar.xz" % (package_name), + infer_package_with_pref, + [ + os.path.join("infer", "bin"), + os.path.join("infer", "lib"), + os.path.join("infer", "share"), + ], + ) diff --git a/build/build-infer/infer-linux64.json b/build/build-infer/infer-linux64.json new file mode 100644 index 0000000000..b6331b1d7e --- /dev/null +++ b/build/build-infer/infer-linux64.json @@ -0,0 +1,5 @@ +{ + "infer_repo": "https://github.com/facebook/infer", + "infer_revision": "99464c01da5809e7159ed1a75ef10f60d34506a4", + "patches": [] +} diff --git a/build/build-rust/README b/build/build-rust/README new file mode 100644 index 0000000000..34f7af63a5 --- /dev/null +++ b/build/build-rust/README @@ -0,0 +1,3 @@ +This directory is for patches to rust toolchains, see these docs for details: + +https://firefox-source-docs.mozilla.org/build/buildsystem/toolchains.html \ No newline at end of file diff --git a/build/build-rust/example.patch b/build/build-rust/example.patch new file mode 100644 index 0000000000..09a00bf22f --- /dev/null +++ b/build/build-rust/example.patch @@ -0,0 +1,12 @@ +diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md +index 2a4c42ea0a4..c94b24e9cb1 100644 +--- a/CONTRIBUTING.md ++++ b/CONTRIBUTING.md +@@ -1,5 +1,7 @@ + # Contributing to Rust + ++Hello this is a harmless little example patch! ++ + Thank you for your interest in contributing to Rust! + + To get started, read the [Getting Started] guide in the [rustc-dev-guide]. diff --git a/build/build_virtualenv_packages.txt b/build/build_virtualenv_packages.txt new file mode 100644 index 0000000000..87b126012a --- /dev/null +++ b/build/build_virtualenv_packages.txt @@ -0,0 +1,3 @@ +packages.txt:build/common_virtualenv_packages.txt +python3:mozilla.pth:third_party/python/glean_parser +set-variable MOZBUILD_VIRTUALENV=1 diff --git a/build/buildconfig.py b/build/buildconfig.py new file mode 100644 index 0000000000..3fcc82ffdb --- /dev/null +++ b/build/buildconfig.py @@ -0,0 +1,18 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import sys +from mozbuild.base import MozbuildObject +from mozbuild.backend.configenvironment import PartialConfigEnvironment + +config = MozbuildObject.from_environment() +partial_config = PartialConfigEnvironment(config.topobjdir) + +for var in ("topsrcdir", "topobjdir"): + value = getattr(config, var) + setattr(sys.modules[__name__], var, value) + +for var in ("defines", "substs", "get_dependencies"): + value = getattr(partial_config, var) + setattr(sys.modules[__name__], var, value) diff --git a/build/cargo-host-linker b/build/cargo-host-linker new file mode 100755 index 0000000000..cbd0472bf7 --- /dev/null +++ b/build/cargo-host-linker @@ -0,0 +1,3 @@ +#!/bin/sh +# See comment in cargo-linker. +eval ${MOZ_CARGO_WRAP_HOST_LD} ${MOZ_CARGO_WRAP_HOST_LDFLAGS} '"$@"' diff --git a/build/cargo-host-linker.bat b/build/cargo-host-linker.bat new file mode 100644 index 0000000000..80e6eab273 --- /dev/null +++ b/build/cargo-host-linker.bat @@ -0,0 +1,3 @@ +@echo off +REM See comment in cargo-linker (without extension) +%MOZ_CARGO_WRAP_HOST_LD% %MOZ_CARGO_WRAP_HOST_LDFLAGS% %* diff --git a/build/cargo-linker b/build/cargo-linker new file mode 100755 index 0000000000..394dabcf7f --- /dev/null +++ b/build/cargo-linker @@ -0,0 +1,22 @@ +#!/bin/sh + +# If you want to use a custom linker with Cargo, Cargo requires that you +# specify it in Cargo.toml or via the matching environment variable. +# Passing extra options to the linker is possible with Cargo via +# RUSTFLAGS='-C link-args', but testing showed that doing this reliably +# was difficult. +# +# Our solution to these problems is to use this wrapper script. We pass +# in the LD and the LDFLAGS to use via environment variables. Note that +# we do *not* quote either MOZ_CARGO_WRAP variable: +# +# * MOZ_CARGO_WRAP_LD is equivalent to CC on Unix-y platforms, and CC +# frequently has additional arguments in addition to the compiler +# itself. +# * MOZ_CARGO_WRAP_LDFLAGS contains space-separated arguments to pass, +# and not quoting it ensures that each of those arguments is passed +# as a separate argument to the actual LD. +# +# $@ is doubly quoted for the eval. See bug 1418598. + +eval ${MOZ_CARGO_WRAP_LD} ${MOZ_CARGO_WRAP_LDFLAGS} '"$@"' diff --git a/build/cargo-linker.bat b/build/cargo-linker.bat new file mode 100644 index 0000000000..ccde26c8ef --- /dev/null +++ b/build/cargo-linker.bat @@ -0,0 +1,3 @@ +@echo off +REM See comment in cargo-linker (without extension) +%MOZ_CARGO_WRAP_LD% %MOZ_CARGO_WRAP_LDFLAGS% %* diff --git a/build/checksums.py b/build/checksums.py new file mode 100755 index 0000000000..d82c721117 --- /dev/null +++ b/build/checksums.py @@ -0,0 +1,156 @@ +#!/usr/bin/python +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import with_statement + +from optparse import OptionParser +import hashlib +import logging +import os + +logger = logging.getLogger("checksums.py") + + +def digest_file(filename, digest, chunk_size=131072): + """Produce a checksum for the file specified by 'filename'. 'filename' + is a string path to a file that is opened and read in this function. The + checksum algorithm is specified by 'digest' and is a valid OpenSSL + algorithm. If the digest used is not valid or Python's hashlib doesn't + work, the None object will be returned instead. The size of blocks + that this function will read from the file object it opens based on + 'filename' can be specified by 'chunk_size', which defaults to 1K""" + assert not os.path.isdir(filename), "this function only works with files" + + logger.debug("Creating new %s object" % digest) + h = hashlib.new(digest) + with open(filename, "rb") as f: + while True: + data = f.read(chunk_size) + if not data: + logger.debug("Finished reading in file") + break + h.update(data) + hash = h.hexdigest() + logger.debug("Hash for %s is %s" % (filename, hash)) + return hash + + +def process_files(dirs, output_filename, digests): + """This function takes a list of directory names, 'drs'. It will then + compute the checksum for each of the files in these by by opening the files. + Once each file is read and its checksum is computed, this function + will write the information to the file specified by 'output_filename'. + The path written in the output file will have anything specified by 'strip' + removed from the path. The output file is closed before returning nothing + The algorithm to compute checksums with can be specified by 'digests' + and needs to be a list of valid OpenSSL algorithms. + + The output file is written in the format: + + Example: + d1fa09ae4220 sha1 14250744 firefox-4.0b6pre.en-US.mac64.dmg + """ + + if os.path.exists(output_filename): + logger.debug('Overwriting existing checksums file "%s"' % output_filename) + else: + logger.debug('Creating a new checksums file "%s"' % output_filename) + with open(output_filename, "w+") as output: + for d in dirs: + for root, dirs, files in os.walk(d): + for f in files: + full = os.path.join(root, f) + rel = os.path.relpath(full, d) + + for digest in digests: + hash = digest_file(full, digest) + + output.write( + "%s %s %s %s\n" % (hash, digest, os.path.getsize(full), rel) + ) + + +def setup_logging(level=logging.DEBUG): + """This function sets up the logging module using a speficiable logging + module logging level. The default log level is DEBUG. + + The output is in the format: + - + Example: + DEBUG - Finished reading in file""" + + logger = logging.getLogger("checksums.py") + logger.setLevel(logging.DEBUG) + handler = logging.StreamHandler() + handler.setLevel(level) + formatter = logging.Formatter("%(levelname)s - %(message)s") + handler.setFormatter(formatter) + logger.addHandler(handler) + + +def main(): + """This is a main function that parses arguments, sets up logging + and generates a checksum file""" + # Parse command line arguments + parser = OptionParser() + parser.add_option( + "-d", + "--digest", + help="checksum algorithm to use", + action="append", + dest="digests", + ) + parser.add_option( + "-o", + "--output", + help="output file to use", + action="store", + dest="outfile", + default="checksums", + ) + parser.add_option( + "-v", + "--verbose", + help="Be noisy (takes precedence over quiet)", + action="store_true", + dest="verbose", + default=False, + ) + parser.add_option( + "-q", + "--quiet", + help="Be quiet", + action="store_true", + dest="quiet", + default=False, + ) + + options, args = parser.parse_args() + + # Figure out which logging level to use + if options.verbose: + loglevel = logging.DEBUG + elif options.quiet: + loglevel = logging.ERROR + else: + loglevel = logging.INFO + + # Set up logging + setup_logging(loglevel) + + # Validate the digest type to use + if not options.digests: + options.digests = ["sha1"] + + for i in args: + if not os.path.isdir(i): + logger.error("%s is not a directory" % i) + exit(1) + + process_files(args, options.outfile, options.digests) + + +if __name__ == "__main__": + main() diff --git a/build/clang-plugin/.clang-format b/build/clang-plugin/.clang-format new file mode 100644 index 0000000000..9b3aa8b721 --- /dev/null +++ b/build/clang-plugin/.clang-format @@ -0,0 +1 @@ +BasedOnStyle: LLVM diff --git a/build/clang-plugin/ArithmeticArgChecker.cpp b/build/clang-plugin/ArithmeticArgChecker.cpp new file mode 100644 index 0000000000..0042961b32 --- /dev/null +++ b/build/clang-plugin/ArithmeticArgChecker.cpp @@ -0,0 +1,60 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "ArithmeticArgChecker.h" +#include "CustomMatchers.h" + +void ArithmeticArgChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + callExpr(allOf(hasDeclaration(noArithmeticExprInArgs()), + anyOf(hasDescendant( + binaryOperator( + allOf(binaryArithmeticOperator(), + hasLHS(hasDescendant(declRefExpr())), + hasRHS(hasDescendant(declRefExpr())))) + .bind("node")), + hasDescendant( + unaryOperator( + allOf(unaryArithmeticOperator(), + hasUnaryOperand(allOf( + hasType(builtinType()), + anyOf(hasDescendant(declRefExpr()), + declRefExpr()))))) + .bind("node"))))) + .bind("call"), + this); + AstMatcher->addMatcher( + cxxConstructExpr( + allOf(hasDeclaration(noArithmeticExprInArgs()), + anyOf(hasDescendant( + binaryOperator( + allOf(binaryArithmeticOperator(), + hasLHS(hasDescendant(declRefExpr())), + hasRHS(hasDescendant(declRefExpr())))) + .bind("node")), + hasDescendant( + unaryOperator( + allOf(unaryArithmeticOperator(), + hasUnaryOperand(allOf( + hasType(builtinType()), + anyOf(hasDescendant(declRefExpr()), + declRefExpr()))))) + .bind("node"))))) + .bind("call"), + this); +} + +void ArithmeticArgChecker::check(const MatchFinder::MatchResult &Result) { + const char *Error = + "cannot pass an arithmetic expression of built-in types to %0"; + const Expr *Expression = Result.Nodes.getNodeAs("node"); + if (const CallExpr *Call = Result.Nodes.getNodeAs("call")) { + diag(Expression->getBeginLoc(), Error, DiagnosticIDs::Error) + << Call->getDirectCallee(); + } else if (const CXXConstructExpr *Ctr = + Result.Nodes.getNodeAs("call")) { + diag(Expression->getBeginLoc(), Error, DiagnosticIDs::Error) + << Ctr->getConstructor(); + } +} diff --git a/build/clang-plugin/ArithmeticArgChecker.h b/build/clang-plugin/ArithmeticArgChecker.h new file mode 100644 index 0000000000..62165b716b --- /dev/null +++ b/build/clang-plugin/ArithmeticArgChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef ArithmeticArgChecker_h__ +#define ArithmeticArgChecker_h__ + +#include "plugin.h" + +class ArithmeticArgChecker : public BaseCheck { +public: + ArithmeticArgChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/AssertAssignmentChecker.cpp b/build/clang-plugin/AssertAssignmentChecker.cpp new file mode 100644 index 0000000000..467de28d63 --- /dev/null +++ b/build/clang-plugin/AssertAssignmentChecker.cpp @@ -0,0 +1,20 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "AssertAssignmentChecker.h" +#include "CustomMatchers.h" + +void AssertAssignmentChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + callExpr(isAssertAssignmentTestFunc()).bind("funcCall"), this); +} + +void AssertAssignmentChecker::check(const MatchFinder::MatchResult &Result) { + const CallExpr *FuncCall = Result.Nodes.getNodeAs("funcCall"); + + if (FuncCall && hasSideEffectAssignment(FuncCall)) { + diag(FuncCall->getBeginLoc(), "Forbidden assignment in assert expression", + DiagnosticIDs::Error); + } +} diff --git a/build/clang-plugin/AssertAssignmentChecker.h b/build/clang-plugin/AssertAssignmentChecker.h new file mode 100644 index 0000000000..5e47b62183 --- /dev/null +++ b/build/clang-plugin/AssertAssignmentChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef AssertAssignmentChecker_h__ +#define AssertAssignmentChecker_h__ + +#include "plugin.h" + +class AssertAssignmentChecker : public BaseCheck { +public: + AssertAssignmentChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/BaseCheck.h b/build/clang-plugin/BaseCheck.h new file mode 100644 index 0000000000..867b82d2ad --- /dev/null +++ b/build/clang-plugin/BaseCheck.h @@ -0,0 +1,34 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef BaseCheck_h__ +#define BaseCheck_h__ + +class MozContext {}; +typedef MozContext ContextType; + +class BaseCheck : public MatchFinder::MatchCallback { +public: + BaseCheck(StringRef CheckName, ContextType *Context) {} + virtual void registerMatchers(MatchFinder *Finder) {} + virtual void registerPPCallbacks(CompilerInstance &CI) {} + virtual void check(const MatchFinder::MatchResult &Result) {} + DiagnosticBuilder diag(SourceLocation Loc, StringRef Description, + DiagnosticIDs::Level Level = DiagnosticIDs::Warning) { + DiagnosticsEngine &Diag = Context->getDiagnostics(); + unsigned ID = Diag.getDiagnosticIDs()->getCustomDiagID(Level, Description); + return Diag.Report(Loc, ID); + } + +private: + void run(const MatchFinder::MatchResult &Result) override { + Context = Result.Context; + check(Result); + } + +private: + ASTContext *Context; +}; + +#endif diff --git a/build/clang-plugin/CanRunScriptChecker.cpp b/build/clang-plugin/CanRunScriptChecker.cpp new file mode 100644 index 0000000000..f75f0380e0 --- /dev/null +++ b/build/clang-plugin/CanRunScriptChecker.cpp @@ -0,0 +1,380 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +/** + * This checker implements the "can run script" analysis. The idea is to detect + * functions that can run script that are being passed reference-counted + * arguments (including "this") whose refcount might go to zero as a result of + * the script running. We want to prevent that. + * + * The approach is to attempt to enforce the following invariants on the call + * graph: + * + * 1) Any caller of a MOZ_CAN_RUN_SCRIPT function is itself MOZ_CAN_RUN_SCRIPT. + * 2) If a virtual MOZ_CAN_RUN_SCRIPT method overrides a base class method, + * that base class method is also MOZ_CAN_RUN_SCRIPT. + * + * Invariant 2 ensures that we don't accidentally call a MOZ_CAN_RUN_SCRIPT + * function via a base-class virtual call. Invariant 1 ensures that + * the property of being able to run script propagates up the callstack. There + * is an opt-out for invariant 1: A function (declaration _or_ implementation) + * can be decorated with MOZ_CAN_RUN_SCRIPT_BOUNDARY to indicate that we do not + * require it or any of its callers to be MOZ_CAN_RUN_SCRIPT even if it calls + * MOZ_CAN_RUN_SCRIPT functions. + * + * There are two known holes in invariant 1, apart from the + * MOZ_CAN_RUN_SCRIPT_BOUNDARY opt-out: + * + * - Functions called via function pointers can be MOZ_CAN_RUN_SCRIPT even if + * their caller is not, because we have no way to determine from the function + * pointer what function is being called. + * - MOZ_CAN_RUN_SCRIPT destructors can happen in functions that are not + * MOZ_CAN_RUN_SCRIPT. + * https://bugzilla.mozilla.org/show_bug.cgi?id=1535523 tracks this. + * + * Given those invariants we then require that when calling a MOZ_CAN_RUN_SCRIPT + * function all refcounted arguments (including "this") satisfy one of these + * conditions: + * a) The argument is held via a strong pointer on the stack. + * b) The argument is a const strong pointer member of "this". We know "this" + * is being kept alive, and a const strong pointer member can't drop its ref + * until "this" dies. + * c) The argument is an argument of the caller (and hence held by a strong + * pointer somewhere higher up the callstack). + * d) The argument is explicitly annotated with MOZ_KnownLive, which indicates + * that something is guaranteed to keep it alive (e.g. it's rooted via a JS + * reflector). + * e) The argument is constexpr and therefore cannot disappear. + */ + +#include "CanRunScriptChecker.h" +#include "CustomMatchers.h" +#include "clang/Lex/Lexer.h" + +void CanRunScriptChecker::registerMatchers(MatchFinder *AstMatcher) { + auto Refcounted = qualType(hasDeclaration(cxxRecordDecl(isRefCounted()))); + auto StackSmartPtr = + ignoreTrivials(declRefExpr(to(varDecl(hasAutomaticStorageDuration(), + hasType(isSmartPtrToRefCounted()))))); + auto ConstMemberOfThisSmartPtr = + memberExpr(hasType(isSmartPtrToRefCounted()), hasType(isConstQualified()), + hasObjectExpression(cxxThisExpr())); + // A smartptr can be known-live for three reasons: + // 1) It's declared on the stack. + // 2) It's a const member of "this". We know "this" is alive (recursively) + // and const members can't change their value hence can't drop their + // reference until "this" gets destroyed. + // 3) It's an immediate temporary being constructed at the point where the + // call is happening. + auto KnownLiveSmartPtr = anyOf( + StackSmartPtr, ConstMemberOfThisSmartPtr, + ignoreTrivials(cxxConstructExpr(hasType(isSmartPtrToRefCounted())))); + + auto MozKnownLiveCall = + ignoreTrivials(callExpr(callee(functionDecl(hasName("MOZ_KnownLive"))))); + + // Params of the calling function are presumed live, because it itself should + // be MOZ_CAN_RUN_SCRIPT. Note that this is subject to + // https://bugzilla.mozilla.org/show_bug.cgi?id=1537656 a the moment. + auto KnownLiveParam = anyOf( + // "this" is OK + cxxThisExpr(), + // A parameter of the calling function is OK. + declRefExpr(to(parmVarDecl()))); + + // A matcher that matches various things that are known to be live directly, + // without making any assumptions about operators. + auto KnownLiveBase = anyOf( + // Things that are known to be a stack or immutable refptr. + KnownLiveSmartPtr, + // MOZ_KnownLive() calls. + MozKnownLiveCall, + // Params of the caller function. + KnownLiveParam, + // Constexpr things. + declRefExpr(to(varDecl(isConstexpr())))); + + // A matcher that matches various known-live things that don't involve + // non-unary operators. + auto KnownLiveSimple = anyOf( + // Things that are just known live. + KnownLiveBase, + // Method calls on a live things that are smart ptrs. Note that we don't + // want to allow general method calls on live things, because those can + // return non-live objects (e.g. consider "live_pointer->foo()" as an + // example). For purposes of this analysis we are assuming the method + // calls on smart ptrs all just return the pointer inside, + cxxMemberCallExpr( + on(allOf(hasType(isSmartPtrToRefCounted()), KnownLiveBase))), + // operator* or operator-> on a thing that is already known to be live. + cxxOperatorCallExpr(anyOf(hasOverloadedOperatorName("*"), + hasOverloadedOperatorName("->")), + hasAnyArgument(KnownLiveBase), argumentCountIs(1)), + // A dereference on a thing that is known to be live. This is _not_ + // caught by the "operator* or operator->" clause above, because + // cxxOperatorCallExpr() only catches cases when a class defines + // operator*. The default (built-in) operator* matches unaryOperator() + // instead.), + unaryOperator( + unaryDereferenceOperator(), + hasUnaryOperand( + // If we're doing *someArg, the argument of the dereference is an + // ImplicitCastExpr LValueToRValue which has the DeclRefExpr as an + // argument. We could try to match that explicitly with a custom + // matcher (none of the built-in matchers seem to match on the + // thing being cast for an implicitCastExpr), but it's simpler to + // just use ignoreTrivials to strip off the cast. + ignoreTrivials(KnownLiveBase))), + // Taking a pointer to a live reference. We explicitly want to exclude + // things that are not of type reference-to-refcounted or type refcounted, + // because if someone takes a pointer to a pointer to refcounted or a + // pointer to a smart ptr and passes those in to a callee that definitely + // does not guarantee liveness; in fact the callee could modify those + // things! In practice they would be the wrong type anyway, though, so + // it's hard to add a test for this. + unaryOperator(hasOperatorName("&"), + hasUnaryOperand(allOf(anyOf(hasType(references(Refcounted)), + hasType(Refcounted)), + ignoreTrivials(KnownLiveBase))))); + + auto KnownLive = anyOf( + // Anything above, of course. + KnownLiveSimple, + // Conditional operators where both arms are live. + conditionalOperator(hasFalseExpression(ignoreTrivials(KnownLiveSimple)), + hasTrueExpression(ignoreTrivials(KnownLiveSimple))) + // We're not handling cases like a dereference of a conditional operator, + // mostly because handling a dereference in general is so ugly. I + // _really_ wish I could just write a recursive matcher here easily. + ); + + auto InvalidArg = ignoreTrivialsConditional( + // We want to consider things if there is anything refcounted involved, + // including in any of the trivials that we otherwise strip off. + anyOf(hasType(Refcounted), hasType(pointsTo(Refcounted)), + hasType(references(Refcounted)), hasType(isSmartPtrToRefCounted())), + // We want to find any expression, + expr( + // which is not known live, + unless(KnownLive), + // and which is not a default arg with value nullptr, since those are + // always safe, + unless(cxxDefaultArgExpr(isNullDefaultArg())), + // and which is not a literal nullptr, + unless(cxxNullPtrLiteralExpr()), expr().bind("invalidArg"))); + + // A matcher which will mark the first invalid argument it finds invalid, but + // will always match, even if it finds no invalid arguments, so it doesn't + // preclude other matchers from running and maybe finding invalid args. + auto OptionalInvalidExplicitArg = anyOf( + // We want to find any argument which is invalid. + hasAnyArgument(InvalidArg), + + // This makes this matcher optional. + anything()); + + // Please note that the hasCanRunScriptAnnotation() matchers are not present + // directly in the cxxMemberCallExpr, callExpr and constructExpr matchers + // because we check that the corresponding functions can run script later in + // the checker code. + AstMatcher->addMatcher( + expr( + anyOf( + // We want to match a method call expression, + cxxMemberCallExpr( + // which optionally has an invalid arg, + OptionalInvalidExplicitArg, + // or which optionally has an invalid this argument, + anyOf(on(InvalidArg), anything()), expr().bind("callExpr")), + // or a regular call expression, + callExpr( + // which optionally has an invalid arg. + OptionalInvalidExplicitArg, expr().bind("callExpr")), + // or a construct expression, + cxxConstructExpr( + // which optionally has an invalid arg. + OptionalInvalidExplicitArg, expr().bind("constructExpr"))), + + anyOf( + // We want to match the parent function. + forFunction(functionDecl().bind("nonCanRunScriptParentFunction")), + + // ... optionally. + anything())), + this); +} + +void CanRunScriptChecker::onStartOfTranslationUnit() { + IsFuncSetBuilt = false; + CanRunScriptFuncs.clear(); +} + +namespace { +/// This class is a callback used internally to match function declarations with +/// the MOZ_CAN_RUN_SCRIPT annotation, adding these functions to the +/// can-run-script function set and making sure the functions they override (if +/// any) also have the annotation. +class FuncSetCallback : public MatchFinder::MatchCallback { +public: + FuncSetCallback(CanRunScriptChecker &Checker, + std::unordered_set &FuncSet) + : CanRunScriptFuncs(FuncSet), Checker(Checker) {} + + void run(const MatchFinder::MatchResult &Result) override; + +private: + /// This method checks the methods overriden by the given parameter. + void checkOverriddenMethods(const CXXMethodDecl *Method); + + std::unordered_set &CanRunScriptFuncs; + CanRunScriptChecker &Checker; +}; + +void FuncSetCallback::run(const MatchFinder::MatchResult &Result) { + const FunctionDecl *Func; + if (auto *Lambda = Result.Nodes.getNodeAs("lambda")) { + Func = Lambda->getCallOperator(); + if (!Func || !hasCustomAttribute(Func)) + return; + } else { + Func = Result.Nodes.getNodeAs("canRunScriptFunction"); + } + + CanRunScriptFuncs.insert(Func); + + // If this is a method, we check the methods it overrides. + if (auto *Method = dyn_cast(Func)) { + checkOverriddenMethods(Method); + } +} + +void FuncSetCallback::checkOverriddenMethods(const CXXMethodDecl *Method) { + for (auto OverriddenMethod : Method->overridden_methods()) { + if (!hasCustomAttribute(OverriddenMethod)) { + const char *ErrorNonCanRunScriptOverridden = + "functions marked as MOZ_CAN_RUN_SCRIPT cannot override functions " + "that are not marked MOZ_CAN_RUN_SCRIPT"; + const char *NoteNonCanRunScriptOverridden = + "overridden function declared here"; + + Checker.diag(Method->getLocation(), ErrorNonCanRunScriptOverridden, + DiagnosticIDs::Error); + Checker.diag(OverriddenMethod->getLocation(), + NoteNonCanRunScriptOverridden, DiagnosticIDs::Note); + } + } +} +} // namespace + +void CanRunScriptChecker::buildFuncSet(ASTContext *Context) { + // We create a match finder. + MatchFinder Finder; + // We create the callback which will be called when we find a function with + // a MOZ_CAN_RUN_SCRIPT annotation. + FuncSetCallback Callback(*this, CanRunScriptFuncs); + // We add the matcher to the finder, linking it to our callback. + Finder.addMatcher( + functionDecl(hasCanRunScriptAnnotation()).bind("canRunScriptFunction"), + &Callback); + Finder.addMatcher(lambdaExpr().bind("lambda"), &Callback); + // We start the analysis, given the ASTContext our main checker is in. + Finder.matchAST(*Context); +} + +void CanRunScriptChecker::check(const MatchFinder::MatchResult &Result) { + + // If the set of functions which can run script is not yet built, then build + // it. + if (!IsFuncSetBuilt) { + buildFuncSet(Result.Context); + IsFuncSetBuilt = true; + } + + const char *ErrorInvalidArg = + "arguments must all be strong refs or caller's parameters when calling a " + "function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object " + "argument). '%0' is neither."; + + const char *ErrorNonCanRunScriptParent = + "functions marked as MOZ_CAN_RUN_SCRIPT can only be called from " + "functions also marked as MOZ_CAN_RUN_SCRIPT"; + const char *NoteNonCanRunScriptParent = "caller function declared here"; + + const Expr *InvalidArg; + if (const CXXDefaultArgExpr *defaultArg = + Result.Nodes.getNodeAs("invalidArg")) { + InvalidArg = defaultArg->getExpr(); + } else { + InvalidArg = Result.Nodes.getNodeAs("invalidArg"); + } + + const CallExpr *Call = Result.Nodes.getNodeAs("callExpr"); + // If we don't find the FunctionDecl linked to this call or if it's not marked + // as can-run-script, consider that we didn't find a match. + if (Call && (!Call->getDirectCallee() || + !CanRunScriptFuncs.count(Call->getDirectCallee()))) { + Call = nullptr; + } + + const CXXConstructExpr *Construct = + Result.Nodes.getNodeAs("constructExpr"); + + // If we don't find the CXXConstructorDecl linked to this construct expression + // or if it's not marked as can-run-script, consider that we didn't find a + // match. + if (Construct && (!Construct->getConstructor() || + !CanRunScriptFuncs.count(Construct->getConstructor()))) { + Construct = nullptr; + } + + const FunctionDecl *ParentFunction = + Result.Nodes.getNodeAs("nonCanRunScriptParentFunction"); + // If the parent function can run script, consider that we didn't find a match + // because we only care about parent functions which can't run script. + // + // In addition, If the parent function is annotated as a + // CAN_RUN_SCRIPT_BOUNDARY, we don't want to complain about it calling a + // CAN_RUN_SCRIPT function. This is a mechanism to opt out of the infectious + // nature of CAN_RUN_SCRIPT which is necessary in some tricky code like + // Bindings. + if (ParentFunction && + (CanRunScriptFuncs.count(ParentFunction) || + hasCustomAttribute(ParentFunction))) { + ParentFunction = nullptr; + } + + // Get the call range from either the CallExpr or the ConstructExpr. + SourceRange CallRange; + if (Call) { + CallRange = Call->getSourceRange(); + } else if (Construct) { + CallRange = Construct->getSourceRange(); + } else { + // If we have neither a Call nor a Construct, we have nothing do to here. + return; + } + + // If we have an invalid argument in the call, we emit the diagnostic to + // signal it. + if (InvalidArg) { + const StringRef invalidArgText = Lexer::getSourceText( + CharSourceRange::getTokenRange(InvalidArg->getSourceRange()), + Result.Context->getSourceManager(), Result.Context->getLangOpts()); + diag(InvalidArg->getExprLoc(), ErrorInvalidArg, DiagnosticIDs::Error) + << InvalidArg->getSourceRange() << invalidArgText; + } + + // If the parent function is not marked as MOZ_CAN_RUN_SCRIPT, we emit an + // error and a not indicating it. + if (ParentFunction) { + assert(!hasCustomAttribute(ParentFunction) && + "Matcher missed something"); + + diag(CallRange.getBegin(), ErrorNonCanRunScriptParent, DiagnosticIDs::Error) + << CallRange; + + diag(ParentFunction->getCanonicalDecl()->getLocation(), + NoteNonCanRunScriptParent, DiagnosticIDs::Note); + } +} diff --git a/build/clang-plugin/CanRunScriptChecker.h b/build/clang-plugin/CanRunScriptChecker.h new file mode 100644 index 0000000000..4516609999 --- /dev/null +++ b/build/clang-plugin/CanRunScriptChecker.h @@ -0,0 +1,31 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef CanRunScriptChecker_h__ +#define CanRunScriptChecker_h__ + +#include "plugin.h" +#include + +class CanRunScriptChecker : public BaseCheck { +public: + CanRunScriptChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; + + // Simply initialize the can-run-script function set at the beginning of each + // translation unit. + void onStartOfTranslationUnit() override; + +private: + /// Runs the inner matcher on the AST to find all the can-run-script + /// functions using custom rules (not only the annotation). + void buildFuncSet(ASTContext *Context); + + bool IsFuncSetBuilt; + std::unordered_set CanRunScriptFuncs; +}; + +#endif diff --git a/build/clang-plugin/Checks.inc b/build/clang-plugin/Checks.inc new file mode 100644 index 0000000000..1630aa192c --- /dev/null +++ b/build/clang-plugin/Checks.inc @@ -0,0 +1,43 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +// The list of checker classes that are compatible with clang-tidy. + +CHECK(ArithmeticArgChecker, "arithmetic-argument") +CHECK(AssertAssignmentChecker, "assignment-in-assert") +CHECK(CanRunScriptChecker, "can-run-script") +CHECK(DanglingOnTemporaryChecker, "dangling-on-temporary") +CHECK(ExplicitImplicitChecker, "implicit-constructor") +CHECK(ExplicitOperatorBoolChecker, "explicit-operator-bool") +CHECK(KungFuDeathGripChecker, "kungfu-death-grip") +#ifdef TARGET_IS_WINDOWS +CHECK(LoadLibraryUsageChecker, "load-library-usage") +CHECK(FopenUsageChecker, "fopen-usage") +#endif +CHECK(MustOverrideChecker, "must-override") +CHECK(MustReturnFromCallerChecker, "must-return-from-caller") +CHECK(MustUseChecker, "must-use") +CHECK(NaNExprChecker, "nan-expr") +CHECK(NoPrincipalGetURI, "no-principal-geturi") +CHECK(NeedsNoVTableTypeChecker, "needs-no-vtable-type") +CHECK(NoAddRefReleaseOnReturnChecker, "no-addref-release-on-return") +CHECK(NoAutoTypeChecker, "no-auto-type") +CHECK(NoDuplicateRefCntMemberChecker, "no-duplicate-refcnt-member") +CHECK(NoExplicitMoveConstructorChecker, "no-explicit-move-constructor") +CHECK(NoNewThreadsChecker, "no-new-threads") +CHECK(NonMemMovableMemberChecker, "non-memmovable-member") +CHECK(NonMemMovableTemplateArgChecker, "non-memmovable-template-arg") +CHECK(NoUsingNamespaceMozillaJavaChecker, "no-using-namespace-mozilla-java") +CHECK(NonParamInsideFunctionDeclChecker, "non-memmovable-template-arg") +CHECK(NonTrivialTypeInFfiChecker, "non-trivial-type-in-ffi-boundary") +CHECK(OverrideBaseCallChecker, "override-base-call") +CHECK(OverrideBaseCallUsageChecker, "override-base-call-usage") +CHECK(ParamTraitsEnumChecker, "paramtraits-enum") +CHECK(RefCountedCopyConstructorChecker, "refcounted-copy-constructor") +CHECK(RefCountedInsideLambdaChecker, "refcounted-inside-lambda") +CHECK(ScopeChecker, "scope") +CHECK(SprintfLiteralChecker, "sprintf-literal") +CHECK(TemporaryLifetimeBoundChecker, "temporary-lifetime-bound") +CHECK(TrivialCtorDtorChecker, "trivial-constructor-destructor") +CHECK(TrivialDtorChecker, "trivial-destructor") diff --git a/build/clang-plugin/ChecksIncludes.inc b/build/clang-plugin/ChecksIncludes.inc new file mode 100644 index 0000000000..3f4ec4fe08 --- /dev/null +++ b/build/clang-plugin/ChecksIncludes.inc @@ -0,0 +1,44 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +// The list of #include directives necessary for the checker classes that +// are compatible with clang-tidy. + +#include "ArithmeticArgChecker.h" +#include "AssertAssignmentChecker.h" +#include "CanRunScriptChecker.h" +#include "DanglingOnTemporaryChecker.h" +#include "ExplicitImplicitChecker.h" +#include "ExplicitOperatorBoolChecker.h" +#ifdef TARGET_IS_WINDOWS +#include "LoadLibraryUsageChecker.h" +#include "FopenUsageChecker.h" +#endif +#include "KungFuDeathGripChecker.h" +#include "MustOverrideChecker.h" +#include "MustReturnFromCallerChecker.h" +#include "MustUseChecker.h" +#include "NaNExprChecker.h" +#include "NoPrincipalGetURI.h" +#include "NeedsNoVTableTypeChecker.h" +#include "NoAddRefReleaseOnReturnChecker.h" +#include "NoAutoTypeChecker.h" +#include "NoDuplicateRefCntMemberChecker.h" +#include "NoExplicitMoveConstructorChecker.h" +#include "NoNewThreadsChecker.h" +#include "NonMemMovableMemberChecker.h" +#include "NonMemMovableTemplateArgChecker.h" +#include "NonParamInsideFunctionDeclChecker.h" +#include "NonTrivialTypeInFfiChecker.h" +#include "NoUsingNamespaceMozillaJavaChecker.h" +#include "OverrideBaseCallChecker.h" +#include "OverrideBaseCallUsageChecker.h" +#include "ParamTraitsEnumChecker.h" +#include "RefCountedCopyConstructorChecker.h" +#include "RefCountedInsideLambdaChecker.h" +#include "ScopeChecker.h" +#include "SprintfLiteralChecker.h" +#include "TemporaryLifetimeBoundChecker.h" +#include "TrivialCtorDtorChecker.h" +#include "TrivialDtorChecker.h" diff --git a/build/clang-plugin/CustomAttributes.cpp b/build/clang-plugin/CustomAttributes.cpp new file mode 100644 index 0000000000..886114e384 --- /dev/null +++ b/build/clang-plugin/CustomAttributes.cpp @@ -0,0 +1,119 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "CustomAttributes.h" +#include "plugin.h" +#include "clang/Frontend/FrontendPluginRegistry.h" +#include + +/* Having annotations in the AST unexpectedly impacts codegen. + * Ideally, we'd avoid having annotations at all, by using an API such as + * the one from https://reviews.llvm.org/D31338, and storing the attributes + * data separately from the AST on our own. Unfortunately, there is no such + * API currently in clang, so we must do without. + * We can do something similar, though, where we go through the AST before + * running the checks, create a mapping of AST nodes to attributes, and + * remove the attributes/annotations from the AST nodes. + * Not all declarations can be reached from the decl() AST matcher, though, + * so we do our best effort (getting the other declarations we look at in + * checks). We emit a warning when checks look at a note that still has + * annotations attached (aka, hasn't been seen during our first pass), + * so that those don't go unnoticed. (-Werror should then take care of + * making that an error) + */ + +using namespace clang; +using namespace llvm; + +static DenseMap AttributesCache; + +static CustomAttributesSet CacheAttributes(const Decl *D) { + CustomAttributesSet attrs = {}; + for (auto Attr : D->specific_attrs()) { + auto annotation = Attr->getAnnotation(); +#define ATTR(a) \ + if (annotation == #a) { \ + attrs.has_##a = true; \ + } else +#include "CustomAttributes.inc" +#include "external/CustomAttributes.inc" +#undef ATTR + {} + } + const_cast(D)->dropAttr(); + AttributesCache.insert(std::make_pair(D, attrs)); + return attrs; +} + +static void Report(const Decl *D, const char *message) { + ASTContext &Context = D->getASTContext(); + DiagnosticsEngine &Diag = Context.getDiagnostics(); + unsigned ID = + Diag.getDiagnosticIDs()->getCustomDiagID(DiagnosticIDs::Warning, message); + Diag.Report(D->getBeginLoc(), ID); +} + +CustomAttributesSet GetAttributes(const Decl *D) { + CustomAttributesSet attrs = {}; + if (D->hasAttr()) { + Report(D, "Declaration has unhandled annotations."); + attrs = CacheAttributes(D); + } else { + auto attributes = AttributesCache.find(D); + if (attributes != AttributesCache.end()) { + attrs = attributes->second; + } + } + return attrs; +} + +bool hasCustomAttribute(const clang::Decl *D, CustomAttributes A) { + CustomAttributesSet attrs = GetAttributes(D); + switch (A) { +#define ATTR(a) \ + case a: \ + return attrs.has_##a; +#include "CustomAttributes.inc" +#include "external/CustomAttributes.inc" +#undef ATTR + } + return false; +} + +class CustomAttributesMatcher + : public ast_matchers::MatchFinder::MatchCallback { +public: + void run(const ast_matchers::MatchFinder::MatchResult &Result) final { + if (auto D = Result.Nodes.getNodeAs("decl")) { + CacheAttributes(D); + } else if (auto L = Result.Nodes.getNodeAs("lambda")) { + CacheAttributes(L->getCallOperator()); + CacheAttributes(L->getLambdaClass()); + } + } +}; + +class CustomAttributesAction : public PluginASTAction { +public: + ASTConsumerPtr CreateASTConsumer(CompilerInstance &CI, + StringRef FileName) override { + auto &Context = CI.getASTContext(); + auto AstMatcher = new (Context.Allocate()) MatchFinder(); + auto Matcher = new (Context.Allocate()) + CustomAttributesMatcher(); + AstMatcher->addMatcher(decl().bind("decl"), Matcher); + AstMatcher->addMatcher(lambdaExpr().bind("lambda"), Matcher); + return AstMatcher->newASTConsumer(); + } + + bool ParseArgs(const CompilerInstance &CI, + const std::vector &Args) override { + return true; + } + + ActionType getActionType() override { return AddBeforeMainAction; } +}; + +static FrontendPluginRegistry::Add + X("moz-custom-attributes", "prepare custom attributes for moz-check"); diff --git a/build/clang-plugin/CustomAttributes.h b/build/clang-plugin/CustomAttributes.h new file mode 100644 index 0000000000..04c95b7184 --- /dev/null +++ b/build/clang-plugin/CustomAttributes.h @@ -0,0 +1,41 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef CustomAttributes_h__ +#define CustomAttributes_h__ + +#include "clang/AST/DeclBase.h" +#include "llvm/ADT/StringRef.h" + +enum CustomAttributes { +#define ATTR(a) a, +#include "CustomAttributes.inc" +#include "external/CustomAttributes.inc" +#undef ATTR +}; + +struct CustomAttributesSet { +#define ATTR(a) bool has_##a : 1; +#include "CustomAttributes.inc" +#include "external/CustomAttributes.inc" +#undef ATTR +}; + +template bool hasCustomAttribute(const clang::Decl *D) { + return false; +} + +extern CustomAttributesSet GetAttributes(const clang::Decl *D); + +#define ATTR(name) \ + template <> inline bool hasCustomAttribute(const clang::Decl *D) { \ + return GetAttributes(D).has_##name; \ + } +#include "CustomAttributes.inc" +#include "external/CustomAttributes.inc" +#undef ATTR + +extern bool hasCustomAttribute(const clang::Decl *D, CustomAttributes A); + +#endif /* CustomAttributes_h__ */ diff --git a/build/clang-plugin/CustomAttributes.inc b/build/clang-plugin/CustomAttributes.inc new file mode 100644 index 0000000000..2986ab9e4a --- /dev/null +++ b/build/clang-plugin/CustomAttributes.inc @@ -0,0 +1,31 @@ +ATTR(moz_allow_temporary) +ATTR(moz_can_run_script) +ATTR(moz_can_run_script_boundary) +ATTR(moz_global_class) +ATTR(moz_heap_allocator) +ATTR(moz_heap_class) +ATTR(moz_implicit) +ATTR(moz_inherit_type_annotations_from_template_args) +ATTR(moz_is_smartptr_to_refcounted) +ATTR(moz_may_call_after_must_return) +ATTR(moz_must_override) +ATTR(moz_must_return_from_caller_if_this_is_arg) +ATTR(moz_must_use_type) +ATTR(moz_needs_memmovable_members) +ATTR(moz_needs_memmovable_type) +ATTR(moz_needs_no_vtable_type) +ATTR(moz_no_addref_release_on_return) +ATTR(moz_no_arith_expr_in_arg) +ATTR(moz_no_dangling_on_temporaries) +ATTR(moz_non_autoable) +ATTR(moz_non_memmovable) +ATTR(moz_non_param) +ATTR(moz_non_temporary_class) +ATTR(moz_nonheap_class) +ATTR(moz_required_base_method) +ATTR(moz_stack_class) +ATTR(moz_static_local_class) +ATTR(moz_temporary_class) +ATTR(moz_lifetime_bound) +ATTR(moz_trivial_ctor_dtor) +ATTR(moz_trivial_dtor) diff --git a/build/clang-plugin/CustomMatchers.h b/build/clang-plugin/CustomMatchers.h new file mode 100644 index 0000000000..54bb0d02dd --- /dev/null +++ b/build/clang-plugin/CustomMatchers.h @@ -0,0 +1,426 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef CustomMatchers_h__ +#define CustomMatchers_h__ + +#include "MemMoveAnnotation.h" +#include "Utils.h" + +namespace clang { +namespace ast_matchers { + +/// This matcher will match any function declaration that is declared as a heap +/// allocator. +AST_MATCHER(FunctionDecl, heapAllocator) { + return hasCustomAttribute(&Node); +} + +/// This matcher will match any declaration that is marked as not accepting +/// arithmetic expressions in its arguments. +AST_MATCHER(Decl, noArithmeticExprInArgs) { + return hasCustomAttribute(&Node); +} + +/// This matcher will match any C++ class that is marked as having a trivial +/// constructor and destructor. +AST_MATCHER(CXXRecordDecl, hasTrivialCtorDtor) { + return hasCustomAttribute(&Node); +} + +/// This matcher will match any C++ class that is marked as having a trivial +/// destructor. +AST_MATCHER(CXXRecordDecl, hasTrivialDtor) { + return hasCustomAttribute(&Node); +} + +AST_MATCHER(CXXConstructExpr, allowsTemporary) { + return hasCustomAttribute(Node.getConstructor()); +} + +/// This matcher will match lvalue-ref-qualified methods. +AST_MATCHER(CXXMethodDecl, isLValueRefQualified) { + return Node.getRefQualifier() == RQ_LValue; +} + +/// This matcher will match rvalue-ref-qualified methods. +AST_MATCHER(CXXMethodDecl, isRValueRefQualified) { + return Node.getRefQualifier() == RQ_RValue; +} + +AST_POLYMORPHIC_MATCHER(isFirstParty, + AST_POLYMORPHIC_SUPPORTED_TYPES(Decl, Stmt)) { + return !inThirdPartyPath(&Node, &Finder->getASTContext()) && + !ASTIsInSystemHeader(Finder->getASTContext(), Node); +} + +/// This matcher will match temporary expressions. +/// We need this matcher for compatibility with clang 3.* (clang 4 and above +/// insert a MaterializeTemporaryExpr everywhere). +AST_MATCHER(Expr, isTemporary) { + return Node.isRValue() || Node.isXValue() || + isa(&Node); +} + +/// This matcher will match any method declaration that is marked as returning +/// a pointer deleted by the destructor of the class. +AST_MATCHER(CXXMethodDecl, noDanglingOnTemporaries) { + return hasCustomAttribute(&Node); +} + +/// This matcher will match any function declaration that is marked to prohibit +/// calling AddRef or Release on its return value. +AST_MATCHER(FunctionDecl, hasNoAddRefReleaseOnReturnAttr) { + return hasCustomAttribute(&Node); +} + +/// This matcher will match any function declaration that is marked as being +/// allowed to run script. +AST_MATCHER(FunctionDecl, hasCanRunScriptAnnotation) { + return hasCustomAttribute(&Node); +} + +/// This matcher will match all arithmetic binary operators. +AST_MATCHER(BinaryOperator, binaryArithmeticOperator) { + BinaryOperatorKind OpCode = Node.getOpcode(); + return OpCode == BO_Mul || OpCode == BO_Div || OpCode == BO_Rem || + OpCode == BO_Add || OpCode == BO_Sub || OpCode == BO_Shl || + OpCode == BO_Shr || OpCode == BO_And || OpCode == BO_Xor || + OpCode == BO_Or || OpCode == BO_MulAssign || OpCode == BO_DivAssign || + OpCode == BO_RemAssign || OpCode == BO_AddAssign || + OpCode == BO_SubAssign || OpCode == BO_ShlAssign || + OpCode == BO_ShrAssign || OpCode == BO_AndAssign || + OpCode == BO_XorAssign || OpCode == BO_OrAssign; +} + +/// This matcher will match all arithmetic unary operators. +AST_MATCHER(UnaryOperator, unaryArithmeticOperator) { + UnaryOperatorKind OpCode = Node.getOpcode(); + return OpCode == UO_PostInc || OpCode == UO_PostDec || OpCode == UO_PreInc || + OpCode == UO_PreDec || OpCode == UO_Plus || OpCode == UO_Minus || + OpCode == UO_Not; +} + +/// This matcher will match the unary dereference operator +AST_MATCHER(UnaryOperator, unaryDereferenceOperator) { + UnaryOperatorKind OpCode = Node.getOpcode(); + return OpCode == UO_Deref; +} + +/// This matcher will match == and != binary operators. +AST_MATCHER(BinaryOperator, binaryEqualityOperator) { + BinaryOperatorKind OpCode = Node.getOpcode(); + return OpCode == BO_EQ || OpCode == BO_NE; +} + +/// This matcher will match comma operator. +AST_MATCHER(BinaryOperator, binaryCommaOperator) { + BinaryOperatorKind OpCode = Node.getOpcode(); + return OpCode == BO_Comma; +} + +/// This matcher will match floating point types. +AST_MATCHER(QualType, isFloat) { return Node->isRealFloatingType(); } + +/// This matcher will match locations in system headers. This is adopted from +/// isExpansionInSystemHeader in newer clangs, but modified in order to work +/// with old clangs that we use on infra. +AST_POLYMORPHIC_MATCHER(isInSystemHeader, + AST_POLYMORPHIC_SUPPORTED_TYPES(Decl, Stmt)) { + return ASTIsInSystemHeader(Finder->getASTContext(), Node); +} + +/// This matcher will match a file "gtest-port.h". The file contains +/// known fopen usages that are OK. +AST_MATCHER(CallExpr, isInWhitelistForFopenUsage) { + static const char Whitelist[] = "gtest-port.h"; + SourceLocation Loc = Node.getBeginLoc(); + StringRef FileName = + getFilename(Finder->getASTContext().getSourceManager(), Loc); + + return llvm::sys::path::rbegin(FileName)->equals(Whitelist); +} + +/// This matcher will match a list of files. These files contain +/// known NaN-testing expressions which we would like to whitelist. +AST_MATCHER(BinaryOperator, isInWhitelistForNaNExpr) { + const char *whitelist[] = {"SkScalar.h", "json_writer.cpp", "State.cpp"}; + + SourceLocation Loc = Node.getOperatorLoc(); + StringRef FileName = + getFilename(Finder->getASTContext().getSourceManager(), Loc); + for (auto itr = std::begin(whitelist); itr != std::end(whitelist); itr++) { + if (llvm::sys::path::rbegin(FileName)->equals(*itr)) { + return true; + } + } + + return false; +} + +AST_MATCHER(CallExpr, isInWhiteListForPrincipalGetUri) { + const auto Whitelist = {"nsIPrincipal.h", "BasePrincipal.cpp", + "ContentPrincipal.cpp"}; + SourceLocation Loc = Node.getBeginLoc(); + StringRef Filename = + getFilename(Finder->getASTContext().getSourceManager(), Loc); + + for (auto Exclusion : Whitelist) { + if (Filename.find(Exclusion) != std::string::npos) { + return true; + } + } + return false; +} + +/// This matcher will match a list of files which contain NS_NewNamedThread +/// code or names of existing threads that we would like to ignore. +AST_MATCHER(CallExpr, isInAllowlistForThreads) { + + // Get the source location of the call + SourceLocation Loc = Node.getRParenLoc(); + StringRef FileName = + getFilename(Finder->getASTContext().getSourceManager(), Loc); + for (auto thread_file : allow_thread_files) { + if (llvm::sys::path::rbegin(FileName)->equals(thread_file)) { + return true; + } + } + + // Now we get the first arg (the name of the thread) and we check it. + const StringLiteral *nameArg = + dyn_cast(Node.getArg(0)->IgnoreImplicit()); + if (nameArg) { + const StringRef name = nameArg->getString(); + for (auto thread_name : allow_thread_names) { + if (name.equals(thread_name)) { + return true; + } + } + } + + return false; +} + +/// This matcher will match all accesses to AddRef or Release methods. +AST_MATCHER(MemberExpr, isAddRefOrRelease) { + ValueDecl *Member = Node.getMemberDecl(); + CXXMethodDecl *Method = dyn_cast(Member); + if (Method) { + const auto &Name = getNameChecked(Method); + return Name == "AddRef" || Name == "Release"; + } + return false; +} + +/// This matcher will select classes which are refcounted AND have an mRefCnt +/// member. +AST_MATCHER(CXXRecordDecl, hasRefCntMember) { + return isClassRefCounted(&Node) && getClassRefCntMember(&Node); +} + +/// This matcher will select classes which are refcounted. +AST_MATCHER(CXXRecordDecl, isRefCounted) { return isClassRefCounted(&Node); } + +AST_MATCHER(QualType, hasVTable) { return typeHasVTable(Node); } + +AST_MATCHER(CXXRecordDecl, hasNeedsNoVTableTypeAttr) { + return hasCustomAttribute(&Node); +} + +/// This matcher will select classes which are non-memmovable +AST_MATCHER(QualType, isNonMemMovable) { + return NonMemMovable.hasEffectiveAnnotation(Node); +} + +/// This matcher will select classes which require a memmovable template arg +AST_MATCHER(CXXRecordDecl, needsMemMovableTemplateArg) { + return hasCustomAttribute(&Node); +} + +/// This matcher will select classes which require all members to be memmovable +AST_MATCHER(CXXRecordDecl, needsMemMovableMembers) { + return hasCustomAttribute(&Node); +} + +AST_MATCHER(CXXConstructorDecl, isInterestingImplicitCtor) { + const CXXConstructorDecl *Declaration = Node.getCanonicalDecl(); + return + // Skip constructors in system headers + !ASTIsInSystemHeader(Declaration->getASTContext(), *Declaration) && + // Skip ignored namespaces and paths + !isInIgnoredNamespaceForImplicitCtor(Declaration) && + !inThirdPartyPath(Declaration) && + // We only want Converting constructors + Declaration->isConvertingConstructor(false) && + // We don't want copy of move constructors, as those are allowed to be + // implicit + !Declaration->isCopyOrMoveConstructor() && + // We don't want inheriting constructors, since using declarations can't + // have attributes + !Declaration->isInheritingConstructor() && + // We don't want deleted constructors. + !Declaration->isDeleted(); +} + +AST_MATCHER_P(Expr, ignoreTrivials, internal::Matcher, InnerMatcher) { + return InnerMatcher.matches(*IgnoreTrivials(&Node), Finder, Builder); +} + +// Takes two matchers: the first one is a condition; the second is a matcher to +// be applied once we are done unwrapping trivials. While the condition does +// not match and we're looking at a trivial, will keep unwrapping the trivial +// and trying again. Once the condition matches, we will go ahead and unwrap all +// trivials and apply the inner matcher to the result. +// +// The expected use here is if we want to condition a match on some typecheck +// but apply the match to only non-trivials, because there are trivials (e.g. +// casts) that can change types. +AST_MATCHER_P2(Expr, ignoreTrivialsConditional, internal::Matcher, + Condition, internal::Matcher, InnerMatcher) { + const Expr *node = &Node; + while (true) { + if (Condition.matches(*node, Finder, Builder)) { + return InnerMatcher.matches(*IgnoreTrivials(node), Finder, Builder); + } + const Expr *newNode = MaybeSkipOneTrivial(node); + if (newNode == node) { + return false; + } + node = newNode; + } +} + +// We can't call this "isImplicit" since it clashes with an existing matcher in +// clang. +AST_MATCHER(CXXConstructorDecl, isMarkedImplicit) { + return hasCustomAttribute(&Node); +} + +AST_MATCHER(CXXRecordDecl, isConcreteClass) { return !Node.isAbstract(); } + +AST_MATCHER(QualType, autoNonAutoableType) { + if (const AutoType *T = Node->getContainedAutoType()) { + if (const CXXRecordDecl *Rec = T->getAsCXXRecordDecl()) { + return hasCustomAttribute(Rec); + } + } + return false; +} + +AST_MATCHER(CXXConstructorDecl, isExplicitMoveConstructor) { + return Node.isExplicit() && Node.isMoveConstructor(); +} + +AST_MATCHER(CXXConstructorDecl, isCompilerProvidedCopyConstructor) { + return !Node.isUserProvided() && Node.isCopyConstructor(); +} + +AST_MATCHER(CallExpr, isAssertAssignmentTestFunc) { + static const std::string AssertName = "MOZ_AssertAssignmentTest"; + const FunctionDecl *Method = Node.getDirectCallee(); + + return Method && Method->getDeclName().isIdentifier() && + Method->getName() == AssertName; +} + +AST_MATCHER(CallExpr, isSnprintfLikeFunc) { + static const std::string Snprintf = "snprintf"; + static const std::string Vsnprintf = "vsnprintf"; + const FunctionDecl *Func = Node.getDirectCallee(); + + if (!Func || isa(Func)) { + return false; + } + + StringRef Name = getNameChecked(Func); + if (Name != Snprintf && Name != Vsnprintf) { + return false; + } + + return !inThirdPartyPath(Node.getBeginLoc(), + Finder->getASTContext().getSourceManager()) && + !isIgnoredPathForSprintfLiteral( + &Node, Finder->getASTContext().getSourceManager()); +} + +AST_MATCHER(CXXRecordDecl, isLambdaDecl) { return Node.isLambda(); } + +AST_MATCHER(QualType, isRefPtr) { return typeIsRefPtr(Node); } + +AST_MATCHER(QualType, isSmartPtrToRefCounted) { + auto *D = getNonTemplateSpecializedCXXRecordDecl(Node); + if (!D) { + return false; + } + + D = D->getCanonicalDecl(); + + return D && hasCustomAttribute(D); +} + +AST_MATCHER(ClassTemplateSpecializationDecl, isSmartPtrToRefCountedDecl) { + auto *D = dyn_cast_or_null( + Node.getSpecializedTemplate()->getTemplatedDecl()); + if (!D) { + return false; + } + + D = D->getCanonicalDecl(); + + return D && hasCustomAttribute(D); +} + +AST_MATCHER(CXXRecordDecl, hasBaseClasses) { + const CXXRecordDecl *Decl = Node.getCanonicalDecl(); + + // Must have definition and should inherit other classes + return Decl && Decl->hasDefinition() && Decl->getNumBases(); +} + +AST_MATCHER(CXXMethodDecl, isRequiredBaseMethod) { + const CXXMethodDecl *Decl = Node.getCanonicalDecl(); + return Decl && hasCustomAttribute(Decl); +} + +AST_MATCHER(CXXMethodDecl, isNonVirtual) { + const CXXMethodDecl *Decl = Node.getCanonicalDecl(); + return Decl && !Decl->isVirtual(); +} + +AST_MATCHER(FunctionDecl, isMozMustReturnFromCaller) { + const FunctionDecl *Decl = Node.getCanonicalDecl(); + return Decl && + hasCustomAttribute(Decl); +} + +AST_MATCHER(FunctionDecl, isMozTemporaryLifetimeBound) { + const FunctionDecl *Decl = Node.getCanonicalDecl(); + return Decl && hasCustomAttribute(Decl); +} + +/// This matcher will select default args which have nullptr as the value. +AST_MATCHER(CXXDefaultArgExpr, isNullDefaultArg) { + const Expr *Expr = Node.getExpr(); + return Expr && Expr->isNullPointerConstant(Finder->getASTContext(), + Expr::NPC_NeverValueDependent); +} + +AST_MATCHER(UsingDirectiveDecl, isUsingNamespaceMozillaJava) { + const NamespaceDecl *Namespace = Node.getNominatedNamespace(); + const std::string &FQName = Namespace->getQualifiedNameAsString(); + + static const char NAMESPACE[] = "mozilla::java"; + static const char PREFIX[] = "mozilla::java::"; + + // We match both the `mozilla::java` namespace itself as well as any other + // namespaces contained within the `mozilla::java` namespace. + return !FQName.compare(NAMESPACE) || + !FQName.compare(0, sizeof(PREFIX) - 1, PREFIX); +} + +} // namespace ast_matchers +} // namespace clang + +#endif diff --git a/build/clang-plugin/CustomTypeAnnotation.cpp b/build/clang-plugin/CustomTypeAnnotation.cpp new file mode 100644 index 0000000000..1291b39418 --- /dev/null +++ b/build/clang-plugin/CustomTypeAnnotation.cpp @@ -0,0 +1,173 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "CustomTypeAnnotation.h" +#include "Utils.h" + +CustomTypeAnnotation StackClass = + CustomTypeAnnotation(moz_stack_class, "stack"); +CustomTypeAnnotation GlobalClass = + CustomTypeAnnotation(moz_global_class, "global"); +CustomTypeAnnotation NonHeapClass = + CustomTypeAnnotation(moz_nonheap_class, "non-heap"); +CustomTypeAnnotation HeapClass = CustomTypeAnnotation(moz_heap_class, "heap"); +CustomTypeAnnotation NonTemporaryClass = + CustomTypeAnnotation(moz_non_temporary_class, "non-temporary"); +CustomTypeAnnotation TemporaryClass = + CustomTypeAnnotation(moz_temporary_class, "temporary"); +CustomTypeAnnotation StaticLocalClass = + CustomTypeAnnotation(moz_static_local_class, "static-local"); + +void CustomTypeAnnotation::dumpAnnotationReason(BaseCheck &Check, QualType T, + SourceLocation Loc) { + const char *Inherits = + "%1 is a %0 type because it inherits from a %0 type %2"; + const char *Member = "%1 is a %0 type because member %2 is a %0 type %3"; + const char *Array = "%1 is a %0 type because it is an array of %0 type %2"; + const char *Templ = + "%1 is a %0 type because it has a template argument %0 type %2"; + const char *Implicit = "%1 is a %0 type because %2"; + + AnnotationReason Reason = directAnnotationReason(T); + for (;;) { + switch (Reason.Kind) { + case RK_ArrayElement: + Check.diag(Loc, Array, DiagnosticIDs::Note) << Pretty << T << Reason.Type; + break; + case RK_BaseClass: { + const CXXRecordDecl *Declaration = T->getAsCXXRecordDecl(); + assert(Declaration && "This type should be a C++ class"); + + Check.diag(Declaration->getLocation(), Inherits, DiagnosticIDs::Note) + << Pretty << T << Reason.Type; + break; + } + case RK_Field: + Check.diag(Reason.Field->getLocation(), Member, DiagnosticIDs::Note) + << Pretty << T << Reason.Field << Reason.Type; + break; + case RK_TemplateInherited: { + const CXXRecordDecl *Declaration = T->getAsCXXRecordDecl(); + assert(Declaration && "This type should be a C++ class"); + + Check.diag(Declaration->getLocation(), Templ, DiagnosticIDs::Note) + << Pretty << T << Reason.Type; + break; + } + case RK_Implicit: { + const TagDecl *Declaration = T->getAsTagDecl(); + assert(Declaration && "This type should be a TagDecl"); + + Check.diag(Declaration->getLocation(), Implicit, DiagnosticIDs::Note) + << Pretty << T << Reason.ImplicitReason; + return; + } + default: + // FIXME (bug 1203263): note the original annotation. + return; + } + + T = Reason.Type; + Reason = directAnnotationReason(T); + } +} + +CustomTypeAnnotation::AnnotationReason +CustomTypeAnnotation::directAnnotationReason(QualType T) { + if (const TagDecl *D = T->getAsTagDecl()) { + if (hasCustomAttribute(D, Attribute)) { + AnnotationReason Reason = {T, RK_Direct, nullptr, ""}; + return Reason; + } + + std::string ImplAnnotReason = getImplicitReason(D); + if (!ImplAnnotReason.empty()) { + AnnotationReason Reason = {T, RK_Implicit, nullptr, ImplAnnotReason}; + return Reason; + } + } + + // Check if we have a cached answer + void *Key = T.getAsOpaquePtr(); + ReasonCache::iterator Cached = Cache.find(T.getAsOpaquePtr()); + if (Cached != Cache.end()) { + return Cached->second; + } + + // Check if we have a type which we can recurse into + if (const clang::ArrayType *Array = T->getAsArrayTypeUnsafe()) { + if (hasEffectiveAnnotation(Array->getElementType())) { + AnnotationReason Reason = {Array->getElementType(), RK_ArrayElement, + nullptr, ""}; + Cache[Key] = Reason; + return Reason; + } + } + + // Recurse into Base classes + if (const CXXRecordDecl *Declaration = T->getAsCXXRecordDecl()) { + if (Declaration->hasDefinition()) { + Declaration = Declaration->getDefinition(); + + for (const CXXBaseSpecifier &Base : Declaration->bases()) { + if (hasEffectiveAnnotation(Base.getType())) { + AnnotationReason Reason = {Base.getType(), RK_BaseClass, nullptr, ""}; + Cache[Key] = Reason; + return Reason; + } + } + + // Recurse into members + for (const FieldDecl *Field : Declaration->fields()) { + if (hasEffectiveAnnotation(Field->getType())) { + AnnotationReason Reason = {Field->getType(), RK_Field, Field, ""}; + Cache[Key] = Reason; + return Reason; + } + } + + // Recurse into template arguments if the annotation + // MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS is present + if (hasCustomAttribute( + Declaration)) { + const ClassTemplateSpecializationDecl *Spec = + dyn_cast(Declaration); + if (Spec) { + const TemplateArgumentList &Args = Spec->getTemplateArgs(); + + AnnotationReason Reason = tmplArgAnnotationReason(Args.asArray()); + if (Reason.Kind != RK_None) { + Cache[Key] = Reason; + return Reason; + } + } + } + } + } + + AnnotationReason Reason = {QualType(), RK_None, nullptr, ""}; + Cache[Key] = Reason; + return Reason; +} + +CustomTypeAnnotation::AnnotationReason +CustomTypeAnnotation::tmplArgAnnotationReason(ArrayRef Args) { + for (const TemplateArgument &Arg : Args) { + if (Arg.getKind() == TemplateArgument::Type) { + QualType Type = Arg.getAsType(); + if (hasEffectiveAnnotation(Type)) { + AnnotationReason Reason = {Type, RK_TemplateInherited, nullptr, ""}; + return Reason; + } + } else if (Arg.getKind() == TemplateArgument::Pack) { + AnnotationReason Reason = tmplArgAnnotationReason(Arg.getPackAsArray()); + if (Reason.Kind != RK_None) { + return Reason; + } + } + } + + AnnotationReason Reason = {QualType(), RK_None, nullptr, ""}; + return Reason; +} diff --git a/build/clang-plugin/CustomTypeAnnotation.h b/build/clang-plugin/CustomTypeAnnotation.h new file mode 100644 index 0000000000..1fec12f69b --- /dev/null +++ b/build/clang-plugin/CustomTypeAnnotation.h @@ -0,0 +1,75 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef CustomTypeAnnotation_h__ +#define CustomTypeAnnotation_h__ + +#include "CustomAttributes.h" +#include "plugin.h" + +class CustomTypeAnnotation { + enum ReasonKind { + RK_None, + RK_Direct, + RK_ArrayElement, + RK_BaseClass, + RK_Field, + RK_TemplateInherited, + RK_Implicit, + }; + struct AnnotationReason { + QualType Type; + ReasonKind Kind; + const FieldDecl *Field; + std::string ImplicitReason; + + bool valid() const { return Kind != RK_None; } + }; + typedef DenseMap ReasonCache; + + CustomAttributes Attribute; + const char *Pretty; + ReasonCache Cache; + +public: + CustomTypeAnnotation(CustomAttributes Attribute, const char *Pretty) + : Attribute(Attribute), Pretty(Pretty){}; + + virtual ~CustomTypeAnnotation() {} + + // Checks if this custom annotation "effectively affects" the given type. + bool hasEffectiveAnnotation(QualType T) { + return directAnnotationReason(T).valid(); + } + void dumpAnnotationReason(BaseCheck &Check, QualType T, SourceLocation Loc); + + void reportErrorIfPresent(BaseCheck &Check, QualType T, SourceLocation Loc, + const char *Error, const char *Note) { + if (hasEffectiveAnnotation(T)) { + Check.diag(Loc, Error, DiagnosticIDs::Error) << T; + Check.diag(Loc, Note, DiagnosticIDs::Note); + dumpAnnotationReason(Check, T, Loc); + } + } + +private: + AnnotationReason directAnnotationReason(QualType T); + AnnotationReason tmplArgAnnotationReason(ArrayRef Args); + +protected: + // Allow subclasses to apply annotations for reasons other than a direct + // annotation. A non-empty string return value means that the object D is + // annotated, and should contain the reason why. + virtual std::string getImplicitReason(const TagDecl *D) const { return ""; } +}; + +extern CustomTypeAnnotation StackClass; +extern CustomTypeAnnotation GlobalClass; +extern CustomTypeAnnotation NonHeapClass; +extern CustomTypeAnnotation HeapClass; +extern CustomTypeAnnotation NonTemporaryClass; +extern CustomTypeAnnotation TemporaryClass; +extern CustomTypeAnnotation StaticLocalClass; + +#endif diff --git a/build/clang-plugin/DanglingOnTemporaryChecker.cpp b/build/clang-plugin/DanglingOnTemporaryChecker.cpp new file mode 100644 index 0000000000..96d85ef4c0 --- /dev/null +++ b/build/clang-plugin/DanglingOnTemporaryChecker.cpp @@ -0,0 +1,256 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "DanglingOnTemporaryChecker.h" +#include "CustomMatchers.h" +#include "VariableUsageHelpers.h" + +void DanglingOnTemporaryChecker::registerMatchers(MatchFinder *AstMatcher) { + //////////////////////////////////////// + // Quick annotation conflict checkers // + //////////////////////////////////////// + + AstMatcher->addMatcher( + // This is a matcher on a method declaration, + cxxMethodDecl( + // which is marked as no dangling on temporaries, + noDanglingOnTemporaries(), + + // and which is && ref-qualified. + isRValueRefQualified(), + + decl().bind("invalidMethodRefQualified")), + this); + + AstMatcher->addMatcher( + // This is a matcher on a method declaration, + cxxMethodDecl( + // which is marked as no dangling on temporaries, + noDanglingOnTemporaries(), + + // which returns a primitive type, + returns(builtinType()), + + // and which doesn't return a pointer. + unless(returns(pointerType())), + + decl().bind("invalidMethodPointer")), + this); + + ////////////////// + // Main checker // + ////////////////// + + auto hasParentCall = hasParent( + expr(anyOf(cxxOperatorCallExpr( + // If we're in a lamda, we may have an operator call + // expression ancestor in the AST, but the temporary we're + // matching against is not going to have the same lifetime + // as the constructor call. + unless(has(expr(ignoreTrivials(lambdaExpr())))), + expr().bind("parentOperatorCallExpr")), + callExpr( + // If we're in a lamda, we may have a call expression + // ancestor in the AST, but the temporary we're matching + // against is not going to have the same lifetime as the + // function call. + unless(has(expr(ignoreTrivials(lambdaExpr())))), + expr().bind("parentCallExpr")), + objcMessageExpr( + // If we're in a lamda, we may have an objc message + // expression ancestor in the AST, but the temporary we're + // matching against is not going to have the same lifetime + // as the function call. + unless(has(expr(ignoreTrivials(lambdaExpr())))), + expr().bind("parentObjCMessageExpr")), + cxxConstructExpr( + // If we're in a lamda, we may have a construct expression + // ancestor in the AST, but the temporary we're matching + // against is not going to have the same lifetime as the + // constructor call. + unless(has(expr(ignoreTrivials(lambdaExpr())))), + expr().bind("parentConstructExpr"))))); + + AstMatcher->addMatcher( + // This is a matcher on a method call, + cxxMemberCallExpr( + // which is in first party code, + isFirstParty(), + + // and which is performed on a temporary, + on(allOf(unless(hasType(pointerType())), isTemporary(), + // but which is not `this`. + unless(cxxThisExpr()))), + + // and which is marked as no dangling on temporaries. + callee(cxxMethodDecl(noDanglingOnTemporaries())), + + expr().bind("memberCallExpr"), + + // We optionally match a parent call expression or a parent construct + // expression because using a temporary inside a call is fine as long + // as the pointer doesn't escape the function call. + anyOf( + // This is the case where the call is the direct parent, so we + // know that the member call expression is the argument. + allOf(hasParentCall, expr().bind("parentCallArg")), + + // This is the case where the call is not the direct parent, so we + // get its child to know in which argument tree we are. + hasAncestor(expr(hasParentCall, expr().bind("parentCallArg"))), + // To make it optional. + anything())), + this); +} + +void DanglingOnTemporaryChecker::check(const MatchFinder::MatchResult &Result) { + /////////////////////////////////////// + // Quick annotation conflict checker // + /////////////////////////////////////// + + const char *ErrorInvalidRefQualified = "methods annotated with " + "MOZ_NO_DANGLING_ON_TEMPORARIES " + "cannot be && ref-qualified"; + + const char *ErrorInvalidPointer = "methods annotated with " + "MOZ_NO_DANGLING_ON_TEMPORARIES must " + "return a pointer"; + + if (auto InvalidRefQualified = + Result.Nodes.getNodeAs("invalidMethodRefQualified")) { + diag(InvalidRefQualified->getLocation(), ErrorInvalidRefQualified, + DiagnosticIDs::Error); + return; + } + + if (auto InvalidPointer = + Result.Nodes.getNodeAs("invalidMethodPointer")) { + diag(InvalidPointer->getLocation(), ErrorInvalidPointer, + DiagnosticIDs::Error); + return; + } + + ////////////////// + // Main checker // + ////////////////// + + const char *Error = "calling `%0` on a temporary, potentially allowing use " + "after free of the raw pointer"; + + const char *EscapeStmtNote = + "the raw pointer escapes the function scope here"; + + const ObjCMessageExpr *ParentObjCMessageExpr = + Result.Nodes.getNodeAs("parentObjCMessageExpr"); + + // We don't care about cases in ObjC message expressions. + if (ParentObjCMessageExpr) { + return; + } + + const CXXMemberCallExpr *MemberCall = + Result.Nodes.getNodeAs("memberCallExpr"); + + const CallExpr *ParentCallExpr = + Result.Nodes.getNodeAs("parentCallExpr"); + const CXXConstructExpr *ParentConstructExpr = + Result.Nodes.getNodeAs("parentConstructExpr"); + const CXXOperatorCallExpr *ParentOperatorCallExpr = + Result.Nodes.getNodeAs("parentOperatorCallExpr"); + const Expr *ParentCallArg = Result.Nodes.getNodeAs("parentCallArg"); + + // Just in case. + if (!MemberCall) { + return; + } + + // If we have a parent call, we check whether or not we escape the function + // being called. + if (ParentOperatorCallExpr || ParentCallExpr || ParentConstructExpr) { + // Just in case. + if (!ParentCallArg) { + return; + } + + // No default constructor so we can't construct it using if/else. + auto FunctionEscapeData = + ParentOperatorCallExpr + ? escapesFunction(ParentCallArg, ParentOperatorCallExpr) + : ParentCallExpr + ? escapesFunction(ParentCallArg, ParentCallExpr) + : escapesFunction(ParentCallArg, ParentConstructExpr); + + // If there was an error in the escapesFunction call. + if (std::error_code ec = FunctionEscapeData.getError()) { + // FIXME: For now we ignore the variadic case and just consider that the + // argument doesn't escape the function. Same for the case where we can't + // find the function declaration or if the function is builtin. + if (static_cast(ec.value()) == + EscapesFunctionError::FunctionIsVariadic || + static_cast(ec.value()) == + EscapesFunctionError::FunctionDeclNotFound || + static_cast(ec.value()) == + EscapesFunctionError::FunctionIsBuiltin) { + return; + } + + // We emit the internal checker error and return. + diag(MemberCall->getExprLoc(), + std::string(ec.category().name()) + " error: " + ec.message(), + DiagnosticIDs::Error); + return; + } + + // We deconstruct the function escape data. + const Stmt *EscapeStmt; + const Decl *EscapeDecl; + std::tie(EscapeStmt, EscapeDecl) = *FunctionEscapeData; + + // If we didn't escape a parent function, we're done: we don't emit any + // diagnostic. + if (!EscapeStmt || !EscapeDecl) { + return; + } + + // We emit the error diagnostic indicating that we are calling the method + // temporary. + diag(MemberCall->getExprLoc(), Error, DiagnosticIDs::Error) + << MemberCall->getMethodDecl()->getName() + << MemberCall->getSourceRange(); + + // We indicate the escape statement. + diag(EscapeStmt->getBeginLoc(), EscapeStmtNote, DiagnosticIDs::Note) + << EscapeStmt->getSourceRange(); + + // We build the escape note along with its source range. + StringRef EscapeDeclNote; + SourceRange EscapeDeclRange; + if (isa(EscapeDecl)) { + EscapeDeclNote = "through the parameter declared here"; + EscapeDeclRange = EscapeDecl->getSourceRange(); + } else if (isa(EscapeDecl)) { + EscapeDeclNote = "through the variable declared here"; + EscapeDeclRange = EscapeDecl->getSourceRange(); + } else if (isa(EscapeDecl)) { + EscapeDeclNote = "through the field declared here"; + EscapeDeclRange = EscapeDecl->getSourceRange(); + } else if (auto FuncDecl = dyn_cast(EscapeDecl)) { + EscapeDeclNote = "through the return value of the function declared here"; + EscapeDeclRange = FuncDecl->getReturnTypeSourceRange(); + } else { + return; + } + + // We emit the declaration note indicating through which decl the argument + // escapes. + diag(EscapeDecl->getLocation(), EscapeDeclNote, DiagnosticIDs::Note) + << EscapeDeclRange; + } else { + // We emit the error diagnostic indicating that we are calling the method + // temporary. + diag(MemberCall->getExprLoc(), Error, DiagnosticIDs::Error) + << MemberCall->getMethodDecl()->getName() + << MemberCall->getSourceRange(); + } +} diff --git a/build/clang-plugin/DanglingOnTemporaryChecker.h b/build/clang-plugin/DanglingOnTemporaryChecker.h new file mode 100644 index 0000000000..43f19ebedc --- /dev/null +++ b/build/clang-plugin/DanglingOnTemporaryChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DanglingOnTemporaryChecker_h__ +#define DanglingOnTemporaryChecker_h__ + +#include "plugin.h" + +class DanglingOnTemporaryChecker : public BaseCheck { +public: + DanglingOnTemporaryChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/DiagnosticsMatcher.cpp b/build/clang-plugin/DiagnosticsMatcher.cpp new file mode 100644 index 0000000000..96d2f60440 --- /dev/null +++ b/build/clang-plugin/DiagnosticsMatcher.cpp @@ -0,0 +1,17 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "DiagnosticsMatcher.h" + +DiagnosticsMatcher::DiagnosticsMatcher(CompilerInstance &CI) { +#define CHECK(cls, name) \ + cls##_.registerMatchers(&AstMatcher); \ + cls##_.registerPPCallbacks(CI); +#include "Checks.inc" +#include "external/ExternalChecks.inc" +#ifdef MOZ_CLANG_PLUGIN_ALPHA +#include "alpha/AlphaChecks.inc" +#endif +#undef CHECK +} diff --git a/build/clang-plugin/DiagnosticsMatcher.h b/build/clang-plugin/DiagnosticsMatcher.h new file mode 100644 index 0000000000..2738541f62 --- /dev/null +++ b/build/clang-plugin/DiagnosticsMatcher.h @@ -0,0 +1,31 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DiagnosticsMatcher_h__ +#define DiagnosticsMatcher_h__ + +#include "ChecksIncludes.inc" +#include "external/ExternalIncludes.inc" +#ifdef MOZ_CLANG_PLUGIN_ALPHA +#include "alpha/AlphaIncludes.inc" +#endif + +class DiagnosticsMatcher { +public: + DiagnosticsMatcher(CompilerInstance &CI); + + ASTConsumerPtr makeASTConsumer() { return AstMatcher.newASTConsumer(); } + +private: +#define CHECK(cls, name) cls cls##_{name}; +#include "Checks.inc" +#include "external/ExternalChecks.inc" +#ifdef MOZ_CLANG_PLUGIN_ALPHA +#include "alpha/AlphaChecks.inc" +#endif +#undef CHECK + MatchFinder AstMatcher; +}; + +#endif diff --git a/build/clang-plugin/ExplicitImplicitChecker.cpp b/build/clang-plugin/ExplicitImplicitChecker.cpp new file mode 100644 index 0000000000..e0620f502f --- /dev/null +++ b/build/clang-plugin/ExplicitImplicitChecker.cpp @@ -0,0 +1,36 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "ExplicitImplicitChecker.h" +#include "CustomMatchers.h" + +void ExplicitImplicitChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + cxxConstructorDecl( + isInterestingImplicitCtor(), + ofClass(allOf(isConcreteClass(), decl().bind("class"))), + unless(isMarkedImplicit())) + .bind("ctor"), + this); +} + +void ExplicitImplicitChecker::check(const MatchFinder::MatchResult &Result) { + // We've already checked everything in the matcher, so we just have to report + // the error. + + const CXXConstructorDecl *Ctor = + Result.Nodes.getNodeAs("ctor"); + const CXXRecordDecl *Declaration = + Result.Nodes.getNodeAs("class"); + + FixItHint FixItHint = + FixItHint::CreateInsertion(Ctor->getLocation(), "explicit "); + diag(Ctor->getLocation(), "bad implicit conversion constructor for %0", + DiagnosticIDs::Error) + << Declaration->getDeclName(); + diag(Ctor->getLocation(), + "consider adding the explicit keyword to the constructor", + DiagnosticIDs::Note) + << FixItHint; +} diff --git a/build/clang-plugin/ExplicitImplicitChecker.h b/build/clang-plugin/ExplicitImplicitChecker.h new file mode 100644 index 0000000000..f1591c999e --- /dev/null +++ b/build/clang-plugin/ExplicitImplicitChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef ExplicitImplicitChecker_h__ +#define ExplicitImplicitChecker_h__ + +#include "plugin.h" + +class ExplicitImplicitChecker : public BaseCheck { +public: + ExplicitImplicitChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/ExplicitOperatorBoolChecker.cpp b/build/clang-plugin/ExplicitOperatorBoolChecker.cpp new file mode 100644 index 0000000000..db2c19e517 --- /dev/null +++ b/build/clang-plugin/ExplicitOperatorBoolChecker.cpp @@ -0,0 +1,34 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "ExplicitOperatorBoolChecker.h" +#include "CustomMatchers.h" + +void ExplicitOperatorBoolChecker::registerMatchers(MatchFinder *AstMatcher) { + // Older clang versions such as the ones used on the infra recognize these + // conversions as 'operator _Bool', but newer clang versions recognize these + // as 'operator bool'. + AstMatcher->addMatcher( + cxxMethodDecl(anyOf(hasName("operator bool"), hasName("operator _Bool"))) + .bind("node"), + this); +} + +void ExplicitOperatorBoolChecker::check( + const MatchFinder::MatchResult &Result) { + const CXXConversionDecl *Method = + Result.Nodes.getNodeAs("node"); + const CXXRecordDecl *Clazz = Method->getParent(); + + if (!Method->isExplicit() && !hasCustomAttribute(Method) && + !ASTIsInSystemHeader(Method->getASTContext(), *Method) && + isInterestingDeclForImplicitConversion(Method)) { + diag(Method->getBeginLoc(), "bad implicit conversion operator for %0", + DiagnosticIDs::Error) + << Clazz; + diag(Method->getBeginLoc(), "consider adding the explicit keyword to %0", + DiagnosticIDs::Note) + << "'operator bool'"; + } +} diff --git a/build/clang-plugin/ExplicitOperatorBoolChecker.h b/build/clang-plugin/ExplicitOperatorBoolChecker.h new file mode 100644 index 0000000000..90909e6296 --- /dev/null +++ b/build/clang-plugin/ExplicitOperatorBoolChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef ExplicitOperatorBoolChecker_h__ +#define ExplicitOperatorBoolChecker_h__ + +#include "plugin.h" + +class ExplicitOperatorBoolChecker : public BaseCheck { +public: + ExplicitOperatorBoolChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/FopenUsageChecker.cpp b/build/clang-plugin/FopenUsageChecker.cpp new file mode 100644 index 0000000000..905fc91f47 --- /dev/null +++ b/build/clang-plugin/FopenUsageChecker.cpp @@ -0,0 +1,73 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "FopenUsageChecker.h" +#include "CustomMatchers.h" + +void FopenUsageChecker::registerMatchers(MatchFinder *AstMatcher) { + + auto hasConstCharPtrParam = [](const unsigned int Position) { + return hasParameter( + Position, hasType(hasCanonicalType(pointsTo(asString("const char"))))); + }; + + auto hasParamOfType = [](const unsigned int Position, const char *Name) { + return hasParameter(Position, hasType(asString(Name))); + }; + + auto hasIntegerParam = [](const unsigned int Position) { + return hasParameter(Position, hasType(isInteger())); + }; + + AstMatcher->addMatcher( + callExpr( + allOf( + isFirstParty(), + callee(functionDecl(allOf( + isInSystemHeader(), + anyOf( + allOf(anyOf(allOf(hasName("fopen"), + hasConstCharPtrParam(0)), + allOf(hasName("fopen_s"), + hasParameter( + 0, hasType(pointsTo(pointsTo( + asString("FILE"))))), + hasConstCharPtrParam(2))), + hasConstCharPtrParam(1)), + allOf(anyOf(hasName("open"), + allOf(hasName("_open"), hasIntegerParam(2)), + allOf(hasName("_sopen"), hasIntegerParam(3))), + hasConstCharPtrParam(0), hasIntegerParam(1)), + allOf(hasName("_sopen_s"), + hasParameter(0, hasType(pointsTo(isInteger()))), + hasConstCharPtrParam(1), hasIntegerParam(2), + hasIntegerParam(3), hasIntegerParam(4)), + allOf(hasName("OpenFile"), hasConstCharPtrParam(0), + hasParamOfType(1, "LPOFSTRUCT"), + hasIntegerParam(2)), + allOf(hasName("CreateFileA"), hasConstCharPtrParam(0), + hasIntegerParam(1), hasIntegerParam(2), + hasParamOfType(3, "LPSECURITY_ATTRIBUTES"), + hasIntegerParam(4), hasIntegerParam(5), + hasParamOfType(6, "HANDLE")))))), + unless(isInWhitelistForFopenUsage()))) + .bind("funcCall"), + this); +} + +void FopenUsageChecker::check(const MatchFinder::MatchResult &Result) { + const CallExpr *FuncCall = Result.Nodes.getNodeAs("funcCall"); + static const char *ExtraInfo = + "On Windows executed functions: fopen, fopen_s, open, _open, _sopen, " + "_sopen_s, OpenFile, CreateFileA should never be used due to lossy " + "conversion from UTF8 to ANSI."; + + if (FuncCall) { + diag(FuncCall->getBeginLoc(), + "Usage of ASCII file functions (here %0) is forbidden on Windows.", + DiagnosticIDs::Warning) + << FuncCall->getDirectCallee()->getName(); + diag(FuncCall->getBeginLoc(), ExtraInfo, DiagnosticIDs::Note); + } +} diff --git a/build/clang-plugin/FopenUsageChecker.h b/build/clang-plugin/FopenUsageChecker.h new file mode 100644 index 0000000000..9dc71831ab --- /dev/null +++ b/build/clang-plugin/FopenUsageChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef FopenUsageChecker_h__ +#define FopenUsageChecker_h__ + +#include "plugin.h" + +class FopenUsageChecker : public BaseCheck { +public: + FopenUsageChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/KungFuDeathGripChecker.cpp b/build/clang-plugin/KungFuDeathGripChecker.cpp new file mode 100644 index 0000000000..03bb20514f --- /dev/null +++ b/build/clang-plugin/KungFuDeathGripChecker.cpp @@ -0,0 +1,114 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "KungFuDeathGripChecker.h" +#include "CustomMatchers.h" + +void KungFuDeathGripChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher(varDecl(allOf(hasType(isRefPtr()), hasLocalStorage(), + hasInitializer(anything()))) + .bind("decl"), + this); +} + +void KungFuDeathGripChecker::check(const MatchFinder::MatchResult &Result) { + const char *Error = "Unused \"kungFuDeathGrip\" %0 objects constructed from " + "%1 are prohibited"; + const char *Note = "Please switch all accesses to this %0 to go through " + "'%1', or explicitly pass '%1' to `mozilla::Unused`"; + + const VarDecl *D = Result.Nodes.getNodeAs("decl"); + if (D->isReferenced()) { + return; + } + + // Not interested in parameters. + if (isa(D) || isa(D)) { + return; + } + + const Expr *E = IgnoreTrivials(D->getInit()); + const CXXConstructExpr *CE = dyn_cast(E); + if (CE && CE->getNumArgs() == 0) { + // We don't report an error when we construct and don't use a nsCOMPtr / + // nsRefPtr with no arguments. We don't report it because the error is not + // related to the current check. In the future it may be reported through a + // more generic mechanism. + return; + } + + // We don't want to look at the single argument conversion constructors + // which are inbetween the declaration and the actual object which we are + // assigning into the nsCOMPtr/RefPtr. To do this, we repeatedly + // IgnoreTrivials, then look at the expression. If it is one of these + // conversion constructors, we ignore it and continue to dig. + while ((CE = dyn_cast(E)) && CE->getNumArgs() == 1) { + E = IgnoreTrivials(CE->getArg(0)); + } + + // If the argument expression is an xvalue, we are not taking a copy of + // anything. + if (E->isXValue()) { + return; + } + + // It is possible that the QualType doesn't point to a type yet so we are + // not interested. + if (E->getType().isNull()) { + return; + } + + // We allow taking a kungFuDeathGrip of `this` because it cannot change + // beneath us, so calling directly through `this` is OK. This is the same + // for local variable declarations. + // + // We also don't complain about unused RefPtrs which are constructed from + // the return value of a new expression, as these are required in order to + // immediately destroy the value created (which was presumably created for + // its side effects), and are not used as a death grip. + if (isa(E) || isa(E) || isa(E)) { + return; + } + + // These types are assigned into nsCOMPtr and RefPtr for their side effects, + // and not as a kungFuDeathGrip. We don't want to consider RefPtr and nsCOMPtr + // types which are initialized with these types as errors. + const TagDecl *TD = E->getType()->getAsTagDecl(); + if (TD && TD->getIdentifier()) { + static const char *IgnoreTypes[] = { + "already_AddRefed", + "nsGetServiceByCID", + "nsGetServiceByCIDWithError", + "nsGetServiceByContractID", + "nsGetServiceByContractIDWithError", + "nsCreateInstanceByCID", + "nsCreateInstanceByContractID", + "nsCreateInstanceFromFactory", + }; + + for (uint32_t i = 0; i < sizeof(IgnoreTypes) / sizeof(IgnoreTypes[0]); + ++i) { + if (TD->getName() == IgnoreTypes[i]) { + return; + } + } + } + + // Report the error + const char *ErrThing; + const char *NoteThing; + if (isa(E)) { + ErrThing = "members"; + NoteThing = "member"; + } else { + ErrThing = "temporary values"; + NoteThing = "value"; + } + + // We cannot provide the note if we don't have an initializer + diag(D->getBeginLoc(), Error, DiagnosticIDs::Error) + << D->getType() << ErrThing; + diag(E->getBeginLoc(), Note, DiagnosticIDs::Note) + << NoteThing << getNameChecked(D); +} diff --git a/build/clang-plugin/KungFuDeathGripChecker.h b/build/clang-plugin/KungFuDeathGripChecker.h new file mode 100644 index 0000000000..6bb2c76835 --- /dev/null +++ b/build/clang-plugin/KungFuDeathGripChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef KungFuDeathGripChecker_h__ +#define KungFuDeathGripChecker_h__ + +#include "plugin.h" + +class KungFuDeathGripChecker : public BaseCheck { +public: + KungFuDeathGripChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/LoadLibraryUsageChecker.cpp b/build/clang-plugin/LoadLibraryUsageChecker.cpp new file mode 100644 index 0000000000..1c7d336ea9 --- /dev/null +++ b/build/clang-plugin/LoadLibraryUsageChecker.cpp @@ -0,0 +1,34 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "LoadLibraryUsageChecker.h" +#include "CustomMatchers.h" + +// On MacOS the filesystem is UTF-8, on linux the canonical filename is 8-bit +// string. On Windows data loss conversion will occur. This checker restricts +// the use of ASCII file functions for loading libraries. + +void LoadLibraryUsageChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + callExpr( + allOf(isFirstParty(), + callee(functionDecl(anyOf( + allOf(isInSystemHeader(), anyOf(hasName("LoadLibraryA"), + hasName("LoadLibraryExA"))), + hasName("PR_LoadLibrary")))), + unless(hasArgument(0, stringLiteral())))) + .bind("funcCall"), + this); +} + +void LoadLibraryUsageChecker::check(const MatchFinder::MatchResult &Result) { + const CallExpr *FuncCall = Result.Nodes.getNodeAs("funcCall"); + + if (FuncCall) { + diag(FuncCall->getBeginLoc(), + "Usage of ASCII file functions (such as %0) is forbidden.", + DiagnosticIDs::Error) + << FuncCall->getDirectCallee()->getName(); + } +} diff --git a/build/clang-plugin/LoadLibraryUsageChecker.h b/build/clang-plugin/LoadLibraryUsageChecker.h new file mode 100644 index 0000000000..f7b60234be --- /dev/null +++ b/build/clang-plugin/LoadLibraryUsageChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef LoadLibraryUsageChecker_h__ +#define LoadLibraryUsageChecker_h__ + +#include "plugin.h" + +class LoadLibraryUsageChecker : public BaseCheck { +public: + LoadLibraryUsageChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif // !defined(LoadLibraryUsageChecker_h__) diff --git a/build/clang-plugin/Makefile.in b/build/clang-plugin/Makefile.in new file mode 100644 index 0000000000..39bcd37573 --- /dev/null +++ b/build/clang-plugin/Makefile.in @@ -0,0 +1,19 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +include $(topsrcdir)/config/config.mk + +HOST_LDFLAGS := $(LLVM_LDFLAGS) $(CLANG_LDFLAGS) + +ifeq ($(HOST_OS_ARCH),WINNT) +# clang-plugin.dll needs to be deterministic for sccache hashes +HOST_LDFLAGS += -brepro +else +HOST_LDFLAGS += -shared +endif + +# Use the default OS X deployment target to enable using the libc++ headers +# correctly. Note that the binary produced here is a host tool and doesn't need +# to be distributed. +MACOSX_DEPLOYMENT_TARGET := diff --git a/build/clang-plugin/MemMoveAnnotation.h b/build/clang-plugin/MemMoveAnnotation.h new file mode 100644 index 0000000000..b423383744 --- /dev/null +++ b/build/clang-plugin/MemMoveAnnotation.h @@ -0,0 +1,80 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef MemMoveAnnotation_h__ +#define MemMoveAnnotation_h__ + +#include "CustomMatchers.h" +#include "CustomTypeAnnotation.h" +#include "Utils.h" + +#include + +class MemMoveAnnotation final : public CustomTypeAnnotation { +public: + MemMoveAnnotation() + : CustomTypeAnnotation(moz_non_memmovable, "non-memmove()able") {} + + virtual ~MemMoveAnnotation() {} + +protected: + std::string getImplicitReason(const TagDecl *D) const override { + // Annotate everything in ::std, with a few exceptions; see bug + // 1201314 for discussion. + if (getDeclarationNamespace(D) == "std") { + // This doesn't check that it's really ::std::pair and not + // ::std::something_else::pair, but should be good enough. + StringRef Name = getNameChecked(D); + if (isNameExcepted(Name.data())) { + return ""; + } + return "it is an stl-provided type not guaranteed to be memmove-able"; + } + return ""; + } + +private: + bool isNameExcepted(const char *Name) const { + static std::unordered_set NamesSet = { + {"pair"}, + {"atomic"}, + // libstdc++ specific names + {"__atomic_base"}, + {"atomic_bool"}, + {"__cxx_atomic_impl"}, + {"__cxx_atomic_base_impl"}, + {"__pair_base"}, + // MSVCRT specific names + {"_Atomic_impl"}, + {"_Atomic_base"}, + {"_Atomic_bool"}, + {"_Atomic_char"}, + {"_Atomic_schar"}, + {"_Atomic_uchar"}, + {"_Atomic_char16_t"}, + {"_Atomic_char32_t"}, + {"_Atomic_wchar_t"}, + {"_Atomic_short"}, + {"_Atomic_ushort"}, + {"_Atomic_int"}, + {"_Atomic_uint"}, + {"_Atomic_long"}, + {"_Atomic_ulong"}, + {"_Atomic_llong"}, + {"_Atomic_ullong"}, + {"_Atomic_address"}, + // MSVCRT 2019 + {"_Atomic_integral"}, + {"_Atomic_integral_facade"}, + {"_Atomic_padded"}, + {"_Atomic_pointer"}, + {"_Atomic_storage"}}; + + return NamesSet.find(Name) != NamesSet.end(); + } +}; + +extern MemMoveAnnotation NonMemMovable; + +#endif diff --git a/build/clang-plugin/MozCheckAction.cpp b/build/clang-plugin/MozCheckAction.cpp new file mode 100644 index 0000000000..6ec8c92bd8 --- /dev/null +++ b/build/clang-plugin/MozCheckAction.cpp @@ -0,0 +1,27 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "DiagnosticsMatcher.h" +#include "plugin.h" +#include "clang/Frontend/FrontendPluginRegistry.h" + +class MozCheckAction : public PluginASTAction { +public: + ASTConsumerPtr CreateASTConsumer(CompilerInstance &CI, + StringRef FileName) override { + void *Buffer = CI.getASTContext().Allocate(); + auto Matcher = new (Buffer) DiagnosticsMatcher(CI); + return Matcher->makeASTConsumer(); + } + + bool ParseArgs(const CompilerInstance &CI, + const std::vector &Args) override { + return true; + } +}; + +static FrontendPluginRegistry::Add X("moz-check", + "check moz action"); + +DenseMap InThirdPartyPathCache; diff --git a/build/clang-plugin/MozillaTidyModule.cpp b/build/clang-plugin/MozillaTidyModule.cpp new file mode 100644 index 0000000000..b1870e7454 --- /dev/null +++ b/build/clang-plugin/MozillaTidyModule.cpp @@ -0,0 +1,45 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifdef CLANG_TIDY + +#include "../ClangTidy.h" +#include "../ClangTidyModule.h" +#include "../ClangTidyModuleRegistry.h" +#include "ChecksIncludes.inc" +#include "external/ExternalIncludes.inc" +#ifdef MOZ_CLANG_PLUGIN_ALPHA +#include "alpha/AlphaIncludes.inc" +#endif + +using namespace clang::ast_matchers; + +namespace clang { +namespace tidy { + +class MozillaModule : public ClangTidyModule { +public: + void addCheckFactories(ClangTidyCheckFactories &CheckFactories) override { +#define CHECK(cls, name) CheckFactories.registerCheck("mozilla-" name); +#include "Checks.inc" +#include "external/ExternalChecks.inc" +#ifdef MOZ_CLANG_PLUGIN_ALPHA +#include "alpha/AlphaChecks.inc" +#endif +#undef CHECK + } +}; + +// Register the MozillaTidyModule using this statically initialized variable. +static ClangTidyModuleRegistry::Add + X("mozilla-module", "Adds Mozilla lint checks."); + +} // namespace tidy +} // namespace clang + +// This anchor is used to force the linker to link in the generated object file +// and thus register the MozillaModule. +volatile int MozillaModuleAnchorSource = 0; + +#endif diff --git a/build/clang-plugin/MustOverrideChecker.cpp b/build/clang-plugin/MustOverrideChecker.cpp new file mode 100644 index 0000000000..b19ae94aac --- /dev/null +++ b/build/clang-plugin/MustOverrideChecker.cpp @@ -0,0 +1,60 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MustOverrideChecker.h" +#include "CustomMatchers.h" + +void MustOverrideChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher(cxxRecordDecl(isDefinition()).bind("class"), this); +} + +void MustOverrideChecker::registerPPCallbacks(CompilerInstance &CI) { + this->CI = &CI; +} + +void MustOverrideChecker::check(const MatchFinder::MatchResult &Result) { + auto D = Result.Nodes.getNodeAs("class"); + + // Look through all of our immediate bases to find methods that need to be + // overridden + typedef std::vector OverridesVector; + OverridesVector MustOverrides; + for (const auto &Base : D->bases()) { + // The base is either a class (CXXRecordDecl) or it's a templated class... + CXXRecordDecl *Parent = Base.getType() + .getDesugaredType(D->getASTContext()) + ->getAsCXXRecordDecl(); + // The parent might not be resolved to a type yet. In this case, we can't + // do any checking here. For complete correctness, we should visit + // template instantiations, but this case is likely to be rare, so we will + // ignore it until it becomes important. + if (!Parent) { + continue; + } + Parent = Parent->getDefinition(); + for (const auto &M : Parent->methods()) { + if (hasCustomAttribute(M)) + MustOverrides.push_back(M); + } + } + + for (auto &O : MustOverrides) { + bool Overridden = false; + for (const auto &M : D->methods()) { + // The way that Clang checks if a method M overrides its parent method + // is if the method has the same name but would not overload. + if (getNameChecked(M) == getNameChecked(O) && + !CI->getSema().IsOverload(M, O, false)) { + Overridden = true; + break; + } + } + if (!Overridden) { + diag(D->getLocation(), "%0 must override %1", DiagnosticIDs::Error) + << D->getDeclName() << O->getDeclName(); + diag(O->getLocation(), "function to override is here", + DiagnosticIDs::Note); + } + } +} diff --git a/build/clang-plugin/MustOverrideChecker.h b/build/clang-plugin/MustOverrideChecker.h new file mode 100644 index 0000000000..ed1835eb57 --- /dev/null +++ b/build/clang-plugin/MustOverrideChecker.h @@ -0,0 +1,22 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef MustOverrideChecker_h__ +#define MustOverrideChecker_h__ + +#include "plugin.h" + +class MustOverrideChecker : public BaseCheck { +public: + MustOverrideChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context), CI(nullptr) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void registerPPCallbacks(CompilerInstance &CI) override; + void check(const MatchFinder::MatchResult &Result) override; + +private: + const CompilerInstance *CI; +}; + +#endif diff --git a/build/clang-plugin/MustReturnFromCallerChecker.cpp b/build/clang-plugin/MustReturnFromCallerChecker.cpp new file mode 100644 index 0000000000..bdf4332dc4 --- /dev/null +++ b/build/clang-plugin/MustReturnFromCallerChecker.cpp @@ -0,0 +1,136 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MustReturnFromCallerChecker.h" +#include "CustomMatchers.h" + +void MustReturnFromCallerChecker::registerMatchers(MatchFinder *AstMatcher) { + // Look for a call to a MOZ_MUST_RETURN_FROM_CALLER member + AstMatcher->addMatcher( + cxxMemberCallExpr( + on(declRefExpr(to(parmVarDecl()))), + callee(functionDecl(isMozMustReturnFromCaller())), + anyOf(hasAncestor(lambdaExpr().bind("containing-lambda")), + hasAncestor(functionDecl().bind("containing-func")))) + .bind("call"), + this); +} + +void MustReturnFromCallerChecker::check( + const MatchFinder::MatchResult &Result) { + const auto *ContainingLambda = + Result.Nodes.getNodeAs("containing-lambda"); + const auto *ContainingFunc = + Result.Nodes.getNodeAs("containing-func"); + const auto *Call = Result.Nodes.getNodeAs("call"); + + Stmt *Body = nullptr; + if (ContainingLambda) { + Body = ContainingLambda->getBody(); + } else if (ContainingFunc) { + Body = ContainingFunc->getBody(); + } else { + return; + } + assert(Body && "Should have a body by this point"); + + // Generate the CFG for the enclosing function or decl. + CFG::BuildOptions Options; + std::unique_ptr TheCFG = + CFG::buildCFG(nullptr, Body, Result.Context, Options); + if (!TheCFG) { + return; + } + + // Determine which block in the CFG we want to look at the successors of. + StmtToBlockMap BlockMap(TheCFG.get(), Result.Context); + size_t CallIndex; + const auto *Block = BlockMap.blockContainingStmt(Call, &CallIndex); + if (!Block) { + // This statement is not within the CFG! + return; + } + + if (!immediatelyReturns(Block, Result.Context, CallIndex + 1)) { + diag(Call->getBeginLoc(), + "You must immediately return after calling this function", + DiagnosticIDs::Error); + } +} + +bool MustReturnFromCallerChecker::isIgnorable(const Stmt *S) { + auto AfterTrivials = IgnoreTrivials(S); + + // After a call to MOZ_MUST_RETURN_FROM_CALLER function it's ok to have any of + // these expressions. + if (isa(AfterTrivials) || isa(AfterTrivials) || + isa(AfterTrivials) || isa(AfterTrivials) || + isa(AfterTrivials) || + isa(AfterTrivials) || + isa(AfterTrivials) || + isa(AfterTrivials)) { + return true; + } + + // Solitary `this` should be permited, like in the context `return this;` + if (auto TE = dyn_cast(AfterTrivials)) { + if (TE->child_begin() == TE->child_end()) { + return true; + } + return false; + } + + // For UnaryOperator make sure we only accept arithmetic operations. + if (auto UO = dyn_cast(AfterTrivials)) { + if (!UO->isArithmeticOp()) { + return false; + } + return isIgnorable(UO->getSubExpr()); + } + + // It's also OK to call any function or method which is annotated with + // MOZ_MAY_CALL_AFTER_MUST_RETURN. We consider all CXXConversionDecls + // to be MOZ_MAY_CALL_AFTER_MUST_RETURN (like operator T*()). + if (auto CE = dyn_cast(AfterTrivials)) { + auto Callee = CE->getDirectCallee(); + if (Callee && hasCustomAttribute(Callee)) { + return true; + } + + if (Callee && isa(Callee)) { + return true; + } + } + return false; +} + +bool MustReturnFromCallerChecker::immediatelyReturns( + RecurseGuard Block, ASTContext *TheContext, + size_t FromIdx) { + if (Block.isRepeat()) { + return false; + } + + for (size_t I = FromIdx; I < Block->size(); ++I) { + Optional S = (*Block)[I].getAs(); + if (!S) { + continue; + } + + // Some statements should be ignored by default due to their CFG context. + if (isIgnorable(S->getStmt())) { + continue; + } + + // Otherwise, this expression is problematic. + return false; + } + + for (auto Succ = Block->succ_begin(); Succ != Block->succ_end(); ++Succ) { + if (!immediatelyReturns(Block.recurse(*Succ), TheContext, 0)) { + return false; + } + } + return true; +} diff --git a/build/clang-plugin/MustReturnFromCallerChecker.h b/build/clang-plugin/MustReturnFromCallerChecker.h new file mode 100644 index 0000000000..68630bf7a7 --- /dev/null +++ b/build/clang-plugin/MustReturnFromCallerChecker.h @@ -0,0 +1,27 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef MustReturnFromCallerChecker_h__ +#define MustReturnFromCallerChecker_h__ + +#include "RecurseGuard.h" +#include "StmtToBlockMap.h" +#include "Utils.h" +#include "plugin.h" + +class MustReturnFromCallerChecker : public BaseCheck { +public: + MustReturnFromCallerChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; + +private: + bool isIgnorable(const Stmt *S); + bool immediatelyReturns(RecurseGuard Block, + ASTContext *TheContext, size_t FromIdx); +}; + +#endif diff --git a/build/clang-plugin/MustUseChecker.cpp b/build/clang-plugin/MustUseChecker.cpp new file mode 100644 index 0000000000..92acb7ff5b --- /dev/null +++ b/build/clang-plugin/MustUseChecker.cpp @@ -0,0 +1,64 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MustUseChecker.h" +#include "CustomMatchers.h" +#include "CustomTypeAnnotation.h" + +CustomTypeAnnotation MustUse = + CustomTypeAnnotation(moz_must_use_type, "must-use"); + +void MustUseChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher(switchCase().bind("switchcase"), this); + AstMatcher->addMatcher(compoundStmt().bind("compound"), this); + AstMatcher->addMatcher(ifStmt().bind("if"), this); + AstMatcher->addMatcher(whileStmt().bind("while"), this); + AstMatcher->addMatcher(doStmt().bind("do"), this); + AstMatcher->addMatcher(forStmt().bind("for"), this); + AstMatcher->addMatcher(binaryOperator(binaryCommaOperator()).bind("bin"), + this); +} + +void MustUseChecker::check(const MatchFinder::MatchResult &Result) { + if (auto SC = Result.Nodes.getNodeAs("switchcase")) { + handleUnusedExprResult(SC->getSubStmt()); + } + if (auto C = Result.Nodes.getNodeAs("compound")) { + for (const auto &S : C->body()) { + handleUnusedExprResult(S); + } + } + if (auto IF = Result.Nodes.getNodeAs("if")) { + handleUnusedExprResult(IF->getThen()); + handleUnusedExprResult(IF->getElse()); + } + if (auto W = Result.Nodes.getNodeAs("while")) { + handleUnusedExprResult(W->getBody()); + } + if (auto D = Result.Nodes.getNodeAs("do")) { + handleUnusedExprResult(D->getBody()); + } + if (auto F = Result.Nodes.getNodeAs("for")) { + handleUnusedExprResult(F->getBody()); + handleUnusedExprResult(F->getInit()); + handleUnusedExprResult(F->getInc()); + } + if (auto C = Result.Nodes.getNodeAs("bin")) { + handleUnusedExprResult(C->getLHS()); + } +} + +void MustUseChecker::handleUnusedExprResult(const Stmt *Statement) { + const Expr *E = dyn_cast_or_null(Statement); + if (E) { + E = E->IgnoreImplicit(); // Ignore ExprWithCleanup etc. implicit wrappers + QualType T = E->getType(); + if (MustUse.hasEffectiveAnnotation(T) && !isIgnoredExprForMustUse(E)) { + diag(E->getBeginLoc(), "Unused value of must-use type %0", + DiagnosticIDs::Error) + << T; + MustUse.dumpAnnotationReason(*this, T, E->getBeginLoc()); + } + } +} diff --git a/build/clang-plugin/MustUseChecker.h b/build/clang-plugin/MustUseChecker.h new file mode 100644 index 0000000000..42f1b1a24a --- /dev/null +++ b/build/clang-plugin/MustUseChecker.h @@ -0,0 +1,21 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef MustUseChecker_h__ +#define MustUseChecker_h__ + +#include "plugin.h" + +class MustUseChecker : public BaseCheck { +public: + MustUseChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; + +private: + void handleUnusedExprResult(const Stmt *Statement); +}; + +#endif diff --git a/build/clang-plugin/NaNExprChecker.cpp b/build/clang-plugin/NaNExprChecker.cpp new file mode 100644 index 0000000000..6532443f23 --- /dev/null +++ b/build/clang-plugin/NaNExprChecker.cpp @@ -0,0 +1,56 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NaNExprChecker.h" +#include "CustomMatchers.h" + +void NaNExprChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + binaryOperator( + allOf(binaryEqualityOperator(), + hasLHS(has(ignoringParenImpCasts( + declRefExpr(hasType(qualType((isFloat())))).bind("lhs")))), + hasRHS(has(ignoringParenImpCasts( + declRefExpr(hasType(qualType((isFloat())))).bind("rhs")))), + unless(anyOf(isInSystemHeader(), isInWhitelistForNaNExpr())))) + .bind("node"), + this); +} + +void NaNExprChecker::check(const MatchFinder::MatchResult &Result) { + if (!Result.Context->getLangOpts().CPlusPlus) { + // mozilla::IsNaN is not usable in C, so there is no point in issuing these + // warnings. + return; + } + + const BinaryOperator *Expression = + Result.Nodes.getNodeAs("node"); + const DeclRefExpr *LHS = Result.Nodes.getNodeAs("lhs"); + const DeclRefExpr *RHS = Result.Nodes.getNodeAs("rhs"); + const ImplicitCastExpr *LHSExpr = + dyn_cast(Expression->getLHS()); + const ImplicitCastExpr *RHSExpr = + dyn_cast(Expression->getRHS()); + // The AST subtree that we are looking for will look like this: + // -BinaryOperator ==/!= + // |-ImplicitCastExpr LValueToRValue + // | |-DeclRefExpr + // |-ImplicitCastExpr LValueToRValue + // |-DeclRefExpr + // The check below ensures that we are dealing with the correct AST subtree + // shape, and + // also that both of the found DeclRefExpr's point to the same declaration. + if (LHS->getFoundDecl() == RHS->getFoundDecl() && LHSExpr && RHSExpr && + std::distance(LHSExpr->child_begin(), LHSExpr->child_end()) == 1 && + std::distance(RHSExpr->child_begin(), RHSExpr->child_end()) == 1 && + *LHSExpr->child_begin() == LHS && *RHSExpr->child_begin() == RHS) { + diag(Expression->getBeginLoc(), + "comparing a floating point value to itself for " + "NaN checking can lead to incorrect results", + DiagnosticIDs::Error); + diag(Expression->getBeginLoc(), "consider using mozilla::IsNaN instead", + DiagnosticIDs::Note); + } +} diff --git a/build/clang-plugin/NaNExprChecker.h b/build/clang-plugin/NaNExprChecker.h new file mode 100644 index 0000000000..313c3cb4cc --- /dev/null +++ b/build/clang-plugin/NaNExprChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NaNExprChecker_h__ +#define NaNExprChecker_h__ + +#include "plugin.h" + +class NaNExprChecker : public BaseCheck { +public: + NaNExprChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NeedsNoVTableTypeChecker.cpp b/build/clang-plugin/NeedsNoVTableTypeChecker.cpp new file mode 100644 index 0000000000..9d5ad039ba --- /dev/null +++ b/build/clang-plugin/NeedsNoVTableTypeChecker.cpp @@ -0,0 +1,39 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NeedsNoVTableTypeChecker.h" +#include "CustomMatchers.h" + +void NeedsNoVTableTypeChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + classTemplateSpecializationDecl( + allOf(hasAnyTemplateArgument(refersToType(hasVTable())), + hasNeedsNoVTableTypeAttr())) + .bind("node"), + this); +} + +void NeedsNoVTableTypeChecker::check(const MatchFinder::MatchResult &Result) { + const ClassTemplateSpecializationDecl *Specialization = + Result.Nodes.getNodeAs("node"); + + // Get the offending template argument + QualType Offender; + const TemplateArgumentList &Args = + Specialization->getTemplateInstantiationArgs(); + for (unsigned i = 0; i < Args.size(); ++i) { + Offender = Args[i].getAsType(); + if (typeHasVTable(Offender)) { + break; + } + } + + diag(Specialization->getBeginLoc(), + "%0 cannot be instantiated because %1 has a VTable", + DiagnosticIDs::Error) + << Specialization << Offender; + diag(Specialization->getPointOfInstantiation(), + "bad instantiation of %0 requested here", DiagnosticIDs::Note) + << Specialization; +} diff --git a/build/clang-plugin/NeedsNoVTableTypeChecker.h b/build/clang-plugin/NeedsNoVTableTypeChecker.h new file mode 100644 index 0000000000..abff4d1554 --- /dev/null +++ b/build/clang-plugin/NeedsNoVTableTypeChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NeedsNoVTableTypeChecker_h__ +#define NeedsNoVTableTypeChecker_h__ + +#include "plugin.h" + +class NeedsNoVTableTypeChecker : public BaseCheck { +public: + NeedsNoVTableTypeChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NoAddRefReleaseOnReturnChecker.cpp b/build/clang-plugin/NoAddRefReleaseOnReturnChecker.cpp new file mode 100644 index 0000000000..188169e655 --- /dev/null +++ b/build/clang-plugin/NoAddRefReleaseOnReturnChecker.cpp @@ -0,0 +1,32 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NoAddRefReleaseOnReturnChecker.h" +#include "CustomMatchers.h" + +void NoAddRefReleaseOnReturnChecker::registerMatchers(MatchFinder *AstMatcher) { + // Look for all of the calls to AddRef() or Release() + AstMatcher->addMatcher( + memberExpr(isAddRefOrRelease(), hasParent(callExpr())).bind("member"), + this); +} + +void NoAddRefReleaseOnReturnChecker::check( + const MatchFinder::MatchResult &Result) { + const MemberExpr *Member = Result.Nodes.getNodeAs("member"); + const Expr *Base = IgnoreTrivials(Member->getBase()); + + // Check if the call to AddRef() or Release() was made on the result of a call + // to a MOZ_NO_ADDREF_RELEASE_ON_RETURN function or method. + if (auto *Call = dyn_cast(Base)) { + if (auto *Callee = Call->getDirectCallee()) { + if (hasCustomAttribute(Callee)) { + diag(Call->getBeginLoc(), + "%1 cannot be called on the return value of %0", + DiagnosticIDs::Error) + << Callee << dyn_cast(Member->getMemberDecl()); + } + } + } +} diff --git a/build/clang-plugin/NoAddRefReleaseOnReturnChecker.h b/build/clang-plugin/NoAddRefReleaseOnReturnChecker.h new file mode 100644 index 0000000000..525f769eff --- /dev/null +++ b/build/clang-plugin/NoAddRefReleaseOnReturnChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NoAddRefReleaseOnReturnChecker_h__ +#define NoAddRefReleaseOnReturnChecker_h__ + +#include "plugin.h" + +class NoAddRefReleaseOnReturnChecker : public BaseCheck { +public: + NoAddRefReleaseOnReturnChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NoAutoTypeChecker.cpp b/build/clang-plugin/NoAutoTypeChecker.cpp new file mode 100644 index 0000000000..937c7c5742 --- /dev/null +++ b/build/clang-plugin/NoAutoTypeChecker.cpp @@ -0,0 +1,21 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NoAutoTypeChecker.h" +#include "CustomMatchers.h" + +void NoAutoTypeChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher(varDecl(hasType(autoNonAutoableType())).bind("node"), + this); +} + +void NoAutoTypeChecker::check(const MatchFinder::MatchResult &Result) { + const VarDecl *D = Result.Nodes.getNodeAs("node"); + + diag(D->getLocation(), "Cannot use auto to declare a variable of type %0", + DiagnosticIDs::Error) + << D->getType(); + diag(D->getLocation(), "Please write out this type explicitly", + DiagnosticIDs::Note); +} diff --git a/build/clang-plugin/NoAutoTypeChecker.h b/build/clang-plugin/NoAutoTypeChecker.h new file mode 100644 index 0000000000..0801503846 --- /dev/null +++ b/build/clang-plugin/NoAutoTypeChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NoAutoTypeChecker_h__ +#define NoAutoTypeChecker_h__ + +#include "plugin.h" + +class NoAutoTypeChecker : public BaseCheck { +public: + NoAutoTypeChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NoDuplicateRefCntMemberChecker.cpp b/build/clang-plugin/NoDuplicateRefCntMemberChecker.cpp new file mode 100644 index 0000000000..eb78a3bd49 --- /dev/null +++ b/build/clang-plugin/NoDuplicateRefCntMemberChecker.cpp @@ -0,0 +1,65 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NoDuplicateRefCntMemberChecker.h" +#include "CustomMatchers.h" + +void NoDuplicateRefCntMemberChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher(cxxRecordDecl().bind("decl"), this); +} + +void NoDuplicateRefCntMemberChecker::check( + const MatchFinder::MatchResult &Result) { + const CXXRecordDecl *D = Result.Nodes.getNodeAs("decl"); + const FieldDecl *RefCntMember = getClassRefCntMember(D); + const FieldDecl *FoundRefCntBase = nullptr; + + if (!D->hasDefinition()) + return; + D = D->getDefinition(); + + // If we don't have an mRefCnt member, and we have less than 2 superclasses, + // we don't have to run this loop, as neither case will ever apply. + if (!RefCntMember && D->getNumBases() < 2) { + return; + } + + // Check every superclass for whether it has a base with a refcnt member, and + // warn for those which do + for (auto &Base : D->bases()) { + // Determine if this base class has an mRefCnt member + const FieldDecl *BaseRefCntMember = getBaseRefCntMember(Base.getType()); + + if (BaseRefCntMember) { + if (RefCntMember) { + // We have an mRefCnt, and superclass has an mRefCnt + const char *Error = "Refcounted record %0 has multiple mRefCnt members"; + const char *Note1 = "Superclass %0 also has an mRefCnt member"; + const char *Note2 = + "Consider using the _INHERITED macros for AddRef and Release here"; + + diag(D->getBeginLoc(), Error, DiagnosticIDs::Error) << D; + diag(BaseRefCntMember->getBeginLoc(), Note1, DiagnosticIDs::Note) + << BaseRefCntMember->getParent(); + diag(RefCntMember->getBeginLoc(), Note2, DiagnosticIDs::Note); + } + + if (FoundRefCntBase) { + const char *Error = "Refcounted record %0 has multiple superclasses " + "with mRefCnt members"; + const char *Note = "Superclass %0 has an mRefCnt member"; + + // superclass has mRefCnt, and another superclass also has an mRefCnt + diag(D->getBeginLoc(), Error, DiagnosticIDs::Error) << D; + diag(BaseRefCntMember->getBeginLoc(), Note, DiagnosticIDs::Note) + << BaseRefCntMember->getParent(); + diag(FoundRefCntBase->getBeginLoc(), Note, DiagnosticIDs::Note) + << FoundRefCntBase->getParent(); + } + + // Record that we've found a base with a mRefCnt member + FoundRefCntBase = BaseRefCntMember; + } + } +} diff --git a/build/clang-plugin/NoDuplicateRefCntMemberChecker.h b/build/clang-plugin/NoDuplicateRefCntMemberChecker.h new file mode 100644 index 0000000000..e038ca873b --- /dev/null +++ b/build/clang-plugin/NoDuplicateRefCntMemberChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NoDuplicateRefCntMemberChecker_h__ +#define NoDuplicateRefCntMemberChecker_h__ + +#include "plugin.h" + +class NoDuplicateRefCntMemberChecker : public BaseCheck { +public: + NoDuplicateRefCntMemberChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NoExplicitMoveConstructorChecker.cpp b/build/clang-plugin/NoExplicitMoveConstructorChecker.cpp new file mode 100644 index 0000000000..3e46c55a71 --- /dev/null +++ b/build/clang-plugin/NoExplicitMoveConstructorChecker.cpp @@ -0,0 +1,23 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NoExplicitMoveConstructorChecker.h" +#include "CustomMatchers.h" + +void NoExplicitMoveConstructorChecker::registerMatchers( + MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + cxxConstructorDecl(isExplicitMoveConstructor()).bind("node"), this); +} + +void NoExplicitMoveConstructorChecker::check( + const MatchFinder::MatchResult &Result) { + // Everything we needed to know was checked in the matcher - we just report + // the error here + const CXXConstructorDecl *D = + Result.Nodes.getNodeAs("node"); + + diag(D->getLocation(), "Move constructors may not be marked explicit", + DiagnosticIDs::Error); +} diff --git a/build/clang-plugin/NoExplicitMoveConstructorChecker.h b/build/clang-plugin/NoExplicitMoveConstructorChecker.h new file mode 100644 index 0000000000..adc474c144 --- /dev/null +++ b/build/clang-plugin/NoExplicitMoveConstructorChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NoExplicitMoveConstructorChecker_h__ +#define NoExplicitMoveConstructorChecker_h__ + +#include "plugin.h" + +class NoExplicitMoveConstructorChecker : public BaseCheck { +public: + NoExplicitMoveConstructorChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NoNewThreadsChecker.cpp b/build/clang-plugin/NoNewThreadsChecker.cpp new file mode 100644 index 0000000000..90c190f8d1 --- /dev/null +++ b/build/clang-plugin/NoNewThreadsChecker.cpp @@ -0,0 +1,36 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NoNewThreadsChecker.h" +#include "CustomMatchers.h" + +void NoNewThreadsChecker::registerMatchers(MatchFinder *AstMatcher) { + // The checker looks for: + // -Instances of NS_NewNamedThread that aren't in allowed files + // -Instances of NS_NewNamedThread that use names that aren't recognized + AstMatcher->addMatcher( + callExpr(allOf(isFirstParty(), + callee(functionDecl(hasName("NS_NewNamedThread"))), + unless(isInAllowlistForThreads()))) + .bind("funcCall"), + this); +} + +void NoNewThreadsChecker::check(const MatchFinder::MatchResult &Result) { + const CallExpr *FuncCall = Result.Nodes.getNodeAs("funcCall"); + + if (FuncCall) { + diag(FuncCall->getBeginLoc(), + "Thread name not recognized. Please use the background thread pool.", + DiagnosticIDs::Error) + << FuncCall->getDirectCallee()->getName(); + diag( + FuncCall->getBeginLoc(), + "NS_NewNamedThread has been deprecated in favor of background " + "task dispatch via NS_DispatchBackgroundTask and " + "NS_CreateBackgroundTaskQueue. If you must create a new ad-hoc thread, " + "have your thread name added to ThreadAllows.txt.", + DiagnosticIDs::Note); + } +} diff --git a/build/clang-plugin/NoNewThreadsChecker.h b/build/clang-plugin/NoNewThreadsChecker.h new file mode 100644 index 0000000000..e8dc13fece --- /dev/null +++ b/build/clang-plugin/NoNewThreadsChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NoNewThreadsChecker_h__ +#define NoNewThreadsChecker_h__ + +#include "plugin.h" + +class NoNewThreadsChecker : public BaseCheck { +public: + NoNewThreadsChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif // !defined(NoNewThreadsChecker_h__) diff --git a/build/clang-plugin/NoPrincipalGetURI.cpp b/build/clang-plugin/NoPrincipalGetURI.cpp new file mode 100644 index 0000000000..2f3774b331 --- /dev/null +++ b/build/clang-plugin/NoPrincipalGetURI.cpp @@ -0,0 +1,27 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NoPrincipalGetURI.h" +#include "CustomMatchers.h" + +void NoPrincipalGetURI::registerMatchers(MatchFinder *AstMatcher) { + + AstMatcher->addMatcher( + cxxMemberCallExpr( + allOf(callee(cxxMethodDecl(hasName("GetURI"))), + anyOf(on(hasType(asString("class nsIPrincipal *"))), + on(hasType(asString("class nsIPrincipal")))), + unless(isInWhiteListForPrincipalGetUri())), + argumentCountIs(1)) + .bind("id"), + this); +} + +void NoPrincipalGetURI::check(const MatchFinder::MatchResult &Result) { + const auto *MatchedDecl = Result.Nodes.getNodeAs("id"); + diag(MatchedDecl->getExprLoc(), + "Principal->GetURI is deprecated and will be removed soon. Please " + "consider using the new helper functions of nsIPrincipal", + DiagnosticIDs::Error); +} diff --git a/build/clang-plugin/NoPrincipalGetURI.h b/build/clang-plugin/NoPrincipalGetURI.h new file mode 100644 index 0000000000..2b39b74bed --- /dev/null +++ b/build/clang-plugin/NoPrincipalGetURI.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NoPrincipalGetURI_h__ +#define NoPrincipalGetURI_h__ + +#include "plugin.h" + +class NoPrincipalGetURI : public BaseCheck { +public: + NoPrincipalGetURI(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NoUsingNamespaceMozillaJavaChecker.cpp b/build/clang-plugin/NoUsingNamespaceMozillaJavaChecker.cpp new file mode 100644 index 0000000000..20449dba92 --- /dev/null +++ b/build/clang-plugin/NoUsingNamespaceMozillaJavaChecker.cpp @@ -0,0 +1,24 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NoUsingNamespaceMozillaJavaChecker.h" +#include "CustomMatchers.h" + +void NoUsingNamespaceMozillaJavaChecker::registerMatchers( + MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + usingDirectiveDecl(isUsingNamespaceMozillaJava()).bind("directive"), + this); +} + +void NoUsingNamespaceMozillaJavaChecker::check( + const MatchFinder::MatchResult &Result) { + const UsingDirectiveDecl *Directive = + Result.Nodes.getNodeAs("directive"); + const NamespaceDecl *Namespace = Directive->getNominatedNamespace(); + + diag(Directive->getUsingLoc(), "using namespace %0 is forbidden", + DiagnosticIDs::Error) + << Namespace->getQualifiedNameAsString(); +} diff --git a/build/clang-plugin/NoUsingNamespaceMozillaJavaChecker.h b/build/clang-plugin/NoUsingNamespaceMozillaJavaChecker.h new file mode 100644 index 0000000000..7b68ba9333 --- /dev/null +++ b/build/clang-plugin/NoUsingNamespaceMozillaJavaChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NoUsingNamespaceMozillaJavaChecker_h__ +#define NoUsingNamespaceMozillaJavaChecker_h__ + +#include "plugin.h" + +class NoUsingNamespaceMozillaJavaChecker : public BaseCheck { +public: + NoUsingNamespaceMozillaJavaChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NonMemMovableMemberChecker.cpp b/build/clang-plugin/NonMemMovableMemberChecker.cpp new file mode 100644 index 0000000000..232c634534 --- /dev/null +++ b/build/clang-plugin/NonMemMovableMemberChecker.cpp @@ -0,0 +1,34 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NonMemMovableMemberChecker.h" +#include "CustomMatchers.h" + +MemMoveAnnotation NonMemMovable = MemMoveAnnotation(); + +void NonMemMovableMemberChecker::registerMatchers(MatchFinder *AstMatcher) { + // Handle non-mem-movable members + AstMatcher->addMatcher(cxxRecordDecl(needsMemMovableMembers()).bind("decl"), + this); +} + +void NonMemMovableMemberChecker::check(const MatchFinder::MatchResult &Result) { + const char *Error = + "class %0 cannot have non-memmovable member %1 of type %2"; + + // Get the specialization + const CXXRecordDecl *Declaration = + Result.Nodes.getNodeAs("decl"); + + // Report an error for every member which is non-memmovable + for (const FieldDecl *Field : Declaration->fields()) { + QualType Type = Field->getType(); + if (NonMemMovable.hasEffectiveAnnotation(Type)) { + diag(Field->getLocation(), Error, DiagnosticIDs::Error) + << Declaration << Field << Type; + NonMemMovable.dumpAnnotationReason(*this, Type, + Declaration->getLocation()); + } + } +} diff --git a/build/clang-plugin/NonMemMovableMemberChecker.h b/build/clang-plugin/NonMemMovableMemberChecker.h new file mode 100644 index 0000000000..0fc9b1f87f --- /dev/null +++ b/build/clang-plugin/NonMemMovableMemberChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NonMemMovableMemberChecker_h__ +#define NonMemMovableMemberChecker_h__ + +#include "plugin.h" + +class NonMemMovableMemberChecker : public BaseCheck { +public: + NonMemMovableMemberChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NonMemMovableTemplateArgChecker.cpp b/build/clang-plugin/NonMemMovableTemplateArgChecker.cpp new file mode 100644 index 0000000000..65afe21006 --- /dev/null +++ b/build/clang-plugin/NonMemMovableTemplateArgChecker.cpp @@ -0,0 +1,51 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NonMemMovableTemplateArgChecker.h" +#include "CustomMatchers.h" + +void NonMemMovableTemplateArgChecker::registerMatchers( + MatchFinder *AstMatcher) { + // Handle non-mem-movable template specializations + AstMatcher->addMatcher( + classTemplateSpecializationDecl( + allOf(needsMemMovableTemplateArg(), + hasAnyTemplateArgument(refersToType(isNonMemMovable())))) + .bind("specialization"), + this); +} + +void NonMemMovableTemplateArgChecker::check( + const MatchFinder::MatchResult &Result) { + const char *Error = + "Cannot instantiate %0 with non-memmovable template argument %1"; + const char *Note = "instantiation of %0 requested here"; + + // Get the specialization + const ClassTemplateSpecializationDecl *Specialization = + Result.Nodes.getNodeAs("specialization"); + SourceLocation RequestLoc = Specialization->getPointOfInstantiation(); + + // Report an error for every template argument which is non-memmovable + const TemplateArgumentList &Args = + Specialization->getTemplateInstantiationArgs(); + for (unsigned i = 0; i < Args.size(); ++i) { + QualType ArgType = Args[i].getAsType(); + if (NonMemMovable.hasEffectiveAnnotation(ArgType)) { + diag(Specialization->getLocation(), Error, DiagnosticIDs::Error) + << Specialization << ArgType; + // XXX It would be really nice if we could get the instantiation stack + // information + // from Sema such that we could print a full template instantiation stack, + // however, + // it seems as though that information is thrown out by the time we get + // here so we + // can only report one level of template specialization (which in many + // cases won't + // be useful) + diag(RequestLoc, Note, DiagnosticIDs::Note) << Specialization; + NonMemMovable.dumpAnnotationReason(*this, ArgType, RequestLoc); + } + } +} diff --git a/build/clang-plugin/NonMemMovableTemplateArgChecker.h b/build/clang-plugin/NonMemMovableTemplateArgChecker.h new file mode 100644 index 0000000000..cd94c95930 --- /dev/null +++ b/build/clang-plugin/NonMemMovableTemplateArgChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NonMemMovableTemplateArgChecker_h__ +#define NonMemMovableTemplateArgChecker_h__ + +#include "plugin.h" + +class NonMemMovableTemplateArgChecker : public BaseCheck { +public: + NonMemMovableTemplateArgChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NonParamInsideFunctionDeclChecker.cpp b/build/clang-plugin/NonParamInsideFunctionDeclChecker.cpp new file mode 100644 index 0000000000..fa12a8cf24 --- /dev/null +++ b/build/clang-plugin/NonParamInsideFunctionDeclChecker.cpp @@ -0,0 +1,117 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NonParamInsideFunctionDeclChecker.h" +#include "CustomMatchers.h" + +class NonParamAnnotation : public CustomTypeAnnotation { +public: + NonParamAnnotation() : CustomTypeAnnotation(moz_non_param, "non-param"){}; + +protected: + // Adding alignas(_) on a struct implicitly marks it as MOZ_NON_PARAM, due to + // MSVC limitations which prevent passing explcitly aligned types by value as + // parameters. This overload of hasFakeAnnotation injects fake MOZ_NON_PARAM + // annotations onto these types. + std::string getImplicitReason(const TagDecl *D) const override { + // Check if the decl itself has an AlignedAttr on it. + for (const Attr *A : D->attrs()) { + if (isa(A)) { + return "it has an alignas(_) annotation"; + } + } + + // Check if any of the decl's fields have an AlignedAttr on them. + if (auto RD = dyn_cast(D)) { + for (auto F : RD->fields()) { + for (auto A : F->attrs()) { + if (isa(A)) { + return ("member '" + F->getName() + + "' has an alignas(_) annotation") + .str(); + } + } + } + } + + // We don't need to check the types of fields, as the CustomTypeAnnotation + // infrastructure will handle that for us. + return ""; + } +}; +NonParamAnnotation NonParam; + +void NonParamInsideFunctionDeclChecker::registerMatchers( + MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + functionDecl( + anyOf(allOf(isDefinition(), + hasAncestor( + classTemplateSpecializationDecl().bind("spec"))), + isDefinition())) + .bind("func"), + this); + AstMatcher->addMatcher(lambdaExpr().bind("lambda"), this); +} + +void NonParamInsideFunctionDeclChecker::check( + const MatchFinder::MatchResult &Result) { + static DenseSet CheckedFunctionDecls; + + const FunctionDecl *func = Result.Nodes.getNodeAs("func"); + if (!func) { + const LambdaExpr *lambda = Result.Nodes.getNodeAs("lambda"); + if (lambda) { + func = lambda->getCallOperator(); + } + } + + if (!func) { + return; + } + + if (func->isDeleted()) { + return; + } + + // We need to skip decls which have these types as parameters in system + // headers, because presumably those headers act like an assertion that the + // alignment will be preserved in that situation. + if (getDeclarationNamespace(func) == "std") { + return; + } + + if (inThirdPartyPath(func)) { + return; + } + + // Don't report errors on the same declarations more than once. + if (CheckedFunctionDecls.count(func)) { + return; + } + CheckedFunctionDecls.insert(func); + + const ClassTemplateSpecializationDecl *Spec = + Result.Nodes.getNodeAs("spec"); + + for (ParmVarDecl *p : func->parameters()) { + QualType T = p->getType().withoutLocalFastQualifiers(); + if (NonParam.hasEffectiveAnnotation(T)) { + diag(p->getLocation(), "Type %0 must not be used as parameter", + DiagnosticIDs::Error) + << T; + diag(p->getLocation(), + "Please consider passing a const reference instead", + DiagnosticIDs::Note); + + if (Spec) { + diag(Spec->getPointOfInstantiation(), + "The bad argument was passed to %0 here", DiagnosticIDs::Note) + << Spec->getSpecializedTemplate(); + } + + NonParam.dumpAnnotationReason(*this, T, p->getLocation()); + } + } +} diff --git a/build/clang-plugin/NonParamInsideFunctionDeclChecker.h b/build/clang-plugin/NonParamInsideFunctionDeclChecker.h new file mode 100644 index 0000000000..ed11f3fe12 --- /dev/null +++ b/build/clang-plugin/NonParamInsideFunctionDeclChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NonParamInsideFunctionDeclChecker_h__ +#define NonParamInsideFunctionDeclChecker_h__ + +#include "plugin.h" + +class NonParamInsideFunctionDeclChecker : public BaseCheck { +public: + NonParamInsideFunctionDeclChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/NonTrivialTypeInFfiChecker.cpp b/build/clang-plugin/NonTrivialTypeInFfiChecker.cpp new file mode 100644 index 0000000000..f482fb131f --- /dev/null +++ b/build/clang-plugin/NonTrivialTypeInFfiChecker.cpp @@ -0,0 +1,56 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "NonTrivialTypeInFfiChecker.h" +#include "CustomMatchers.h" + +void NonTrivialTypeInFfiChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher(functionDecl(isExternC()).bind("func"), this); +} + +void NonTrivialTypeInFfiChecker::check(const MatchFinder::MatchResult &Result) { + static DenseSet CheckedFunctionDecls; + + const FunctionDecl *func = Result.Nodes.getNodeAs("func"); + // Don't report errors on the same declarations more than once. + if (!CheckedFunctionDecls.insert(func).second) { + return; + } + + if (inThirdPartyPath(func)) { + return; + } + + auto NoteFor = [](const QualType &T) -> std::string { + std::string s = "Please consider using a pointer or reference"; + if (T->getAs()) { + s += ", or explicitly instantiating the template"; + } + return s + " instead"; + }; + + for (ParmVarDecl *p : func->parameters()) { + QualType T = p->getType().getUnqualifiedType(); + if (!T->isVoidType() && !T->isReferenceType() && + !T.isTriviallyCopyableType(*Result.Context)) { + diag(p->getLocation(), + "Type %0 must not be used as parameter to extern " + "\"C\" function", + DiagnosticIDs::Error) + << T; + diag(p->getLocation(), NoteFor(T), DiagnosticIDs::Note); + } + } + + QualType T = func->getReturnType().getUnqualifiedType(); + if (!T->isVoidType() && !T->isReferenceType() && + !T.isTriviallyCopyableType(*Result.Context)) { + diag(func->getLocation(), + "Type %0 must not be used as return type of " + "extern \"C\" function", + DiagnosticIDs::Error) + << T; + diag(func->getLocation(), NoteFor(T), DiagnosticIDs::Note); + } +} diff --git a/build/clang-plugin/NonTrivialTypeInFfiChecker.h b/build/clang-plugin/NonTrivialTypeInFfiChecker.h new file mode 100644 index 0000000000..106c64c021 --- /dev/null +++ b/build/clang-plugin/NonTrivialTypeInFfiChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef NonTrivialTypeInFfiChecker_h__ +#define NonTrivialTypeInFfiChecker_h__ + +#include "plugin.h" + +class NonTrivialTypeInFfiChecker : public BaseCheck { +public: + NonTrivialTypeInFfiChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/OverrideBaseCallChecker.cpp b/build/clang-plugin/OverrideBaseCallChecker.cpp new file mode 100644 index 0000000000..600d431335 --- /dev/null +++ b/build/clang-plugin/OverrideBaseCallChecker.cpp @@ -0,0 +1,109 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "OverrideBaseCallChecker.h" +#include "CustomMatchers.h" + +void OverrideBaseCallChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher(cxxRecordDecl(hasBaseClasses()).bind("class"), this); +} + +bool OverrideBaseCallChecker::isRequiredBaseMethod( + const CXXMethodDecl *Method) { + return hasCustomAttribute(Method); +} + +void OverrideBaseCallChecker::evaluateExpression( + const Stmt *StmtExpr, std::list &MethodList) { + // Continue while we have methods in our list + if (!MethodList.size()) { + return; + } + + if (auto MemberFuncCall = dyn_cast(StmtExpr)) { + if (auto Method = + dyn_cast(MemberFuncCall->getDirectCallee())) { + findBaseMethodCall(Method, MethodList); + } + } + + for (auto S : StmtExpr->children()) { + if (S) { + evaluateExpression(S, MethodList); + } + } +} + +void OverrideBaseCallChecker::getRequiredBaseMethod( + const CXXMethodDecl *Method, + std::list &MethodsList) { + + if (isRequiredBaseMethod(Method)) { + MethodsList.push_back(Method); + } else { + // Loop through all it's base methods. + for (auto BaseMethod = Method->begin_overridden_methods(); + BaseMethod != Method->end_overridden_methods(); BaseMethod++) { + getRequiredBaseMethod(*BaseMethod, MethodsList); + } + } +} + +void OverrideBaseCallChecker::findBaseMethodCall( + const CXXMethodDecl *Method, + std::list &MethodsList) { + + MethodsList.remove(Method); + // Loop also through all it's base methods; + for (auto BaseMethod = Method->begin_overridden_methods(); + BaseMethod != Method->end_overridden_methods(); BaseMethod++) { + findBaseMethodCall(*BaseMethod, MethodsList); + } +} + +void OverrideBaseCallChecker::check(const MatchFinder::MatchResult &Result) { + const char *Error = + "Method %0 must be called in all overrides, but is not called in " + "this override defined for class %1"; + const CXXRecordDecl *Decl = Result.Nodes.getNodeAs("class"); + + // Loop through the methods and look for the ones that are overridden. + for (auto Method : Decl->methods()) { + // If this method doesn't override other methods or it doesn't have a body, + // continue to the next declaration. + if (!Method->size_overridden_methods() || !Method->hasBody()) { + continue; + } + + // Preferred the usage of list instead of vector in order to avoid + // calling erase-remove when deleting items + std::list MethodsList; + // For each overridden method push it to a list if it meets our + // criteria + for (auto BaseMethod = Method->begin_overridden_methods(); + BaseMethod != Method->end_overridden_methods(); BaseMethod++) { + getRequiredBaseMethod(*BaseMethod, MethodsList); + } + + // If no method has been found then no annotation was used + // so checking is not needed + if (!MethodsList.size()) { + continue; + } + + // Loop through the body of our method and search for calls to + // base methods + evaluateExpression(Method->getBody(), MethodsList); + + // If list is not empty pop up errors + for (auto BaseMethod : MethodsList) { + std::string QualName; + raw_string_ostream OS(QualName); + BaseMethod->printQualifiedName(OS); + + diag(Method->getLocation(), Error, DiagnosticIDs::Error) + << OS.str() << Decl->getName(); + } + } +} diff --git a/build/clang-plugin/OverrideBaseCallChecker.h b/build/clang-plugin/OverrideBaseCallChecker.h new file mode 100644 index 0000000000..e919af6749 --- /dev/null +++ b/build/clang-plugin/OverrideBaseCallChecker.h @@ -0,0 +1,27 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef OverrideBaseCallChecker_h__ +#define OverrideBaseCallChecker_h__ + +#include "plugin.h" + +class OverrideBaseCallChecker : public BaseCheck { +public: + OverrideBaseCallChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; + +private: + void evaluateExpression(const Stmt *StmtExpr, + std::list &MethodList); + void getRequiredBaseMethod(const CXXMethodDecl *Method, + std::list &MethodsList); + void findBaseMethodCall(const CXXMethodDecl *Method, + std::list &MethodsList); + bool isRequiredBaseMethod(const CXXMethodDecl *Method); +}; + +#endif diff --git a/build/clang-plugin/OverrideBaseCallUsageChecker.cpp b/build/clang-plugin/OverrideBaseCallUsageChecker.cpp new file mode 100644 index 0000000000..34b9cd16a9 --- /dev/null +++ b/build/clang-plugin/OverrideBaseCallUsageChecker.cpp @@ -0,0 +1,21 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "OverrideBaseCallUsageChecker.h" +#include "CustomMatchers.h" + +void OverrideBaseCallUsageChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + cxxMethodDecl(isNonVirtual(), isRequiredBaseMethod()).bind("method"), + this); +} + +void OverrideBaseCallUsageChecker::check( + const MatchFinder::MatchResult &Result) { + const char *Error = + "MOZ_REQUIRED_BASE_METHOD can be used only on virtual methods"; + const CXXMethodDecl *Method = Result.Nodes.getNodeAs("method"); + + diag(Method->getLocation(), Error, DiagnosticIDs::Error); +} diff --git a/build/clang-plugin/OverrideBaseCallUsageChecker.h b/build/clang-plugin/OverrideBaseCallUsageChecker.h new file mode 100644 index 0000000000..0e81d72387 --- /dev/null +++ b/build/clang-plugin/OverrideBaseCallUsageChecker.h @@ -0,0 +1,23 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef OverrideBaseCallUsageChecker_h__ +#define OverrideBaseCallUsageChecker_h__ + +#include "plugin.h" + +/* + * This is a companion checker for OverrideBaseCallChecker that rejects + * the usage of MOZ_REQUIRED_BASE_METHOD on non-virtual base methods. + */ +class OverrideBaseCallUsageChecker : public BaseCheck { +public: + OverrideBaseCallUsageChecker(StringRef CheckName = "override-base-call-usage", + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/ParamTraitsEnumChecker.cpp b/build/clang-plugin/ParamTraitsEnumChecker.cpp new file mode 100644 index 0000000000..7214e9fe5b --- /dev/null +++ b/build/clang-plugin/ParamTraitsEnumChecker.cpp @@ -0,0 +1,38 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "ParamTraitsEnumChecker.h" +#include "CustomMatchers.h" + +void ParamTraitsEnumChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + classTemplateSpecializationDecl(hasName("ParamTraits")).bind("decl"), + this); +} + +void ParamTraitsEnumChecker::check(const MatchFinder::MatchResult &Result) { + const ClassTemplateSpecializationDecl *Decl = + Result.Nodes.getNodeAs("decl"); + + for (auto &Inner : Decl->decls()) { + if (auto *Def = dyn_cast(Inner)) { + QualType UnderlyingType = Def->getUnderlyingType(); + QualType CanonicalType = UnderlyingType.getCanonicalType(); + + const clang::Type *TypePtr = CanonicalType.getTypePtrOrNull(); + if (!TypePtr) { + return; + } + + if (TypePtr->isEnumeralType()) { + diag(Decl->getBeginLoc(), + "Custom ParamTraits implementation for an enum type", + DiagnosticIDs::Error); + diag(Decl->getBeginLoc(), + "Please use a helper class for example ContiguousEnumSerializer", + DiagnosticIDs::Note); + } + } + } +} diff --git a/build/clang-plugin/ParamTraitsEnumChecker.h b/build/clang-plugin/ParamTraitsEnumChecker.h new file mode 100644 index 0000000000..e89a7d2895 --- /dev/null +++ b/build/clang-plugin/ParamTraitsEnumChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef ParamTraitsEnumChecker_h__ +#define ParamTraitsEnumChecker_h__ + +#include "plugin.h" + +class ParamTraitsEnumChecker : public BaseCheck { +public: + ParamTraitsEnumChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/RecurseGuard.h b/build/clang-plugin/RecurseGuard.h new file mode 100644 index 0000000000..5daf55a9e8 --- /dev/null +++ b/build/clang-plugin/RecurseGuard.h @@ -0,0 +1,56 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef RecurseGuard_h__ +#define RecurseGuard_h__ + +#include "Utils.h" + +// This class acts as a tracker for avoiding infinite recursion when traversing +// chains in CFGs etc. +// +// Constructing a RecurseGuard sets up a shared backing store which tracks the +// currently observed objects. Whenever recursing, use RecurseGuard.recurse(T) +// to construct another RecurseGuard with the same backing store. +// +// The RecurseGuard object will unregister its object when it is destroyed, and +// has a method `isRepeat()` which will return `true` if the item was already +// seen. +template class RecurseGuard { +public: + RecurseGuard(T Thing) : Thing(Thing), Set(new DenseSet()), Repeat(false) { + Set->insert(Thing); + } + RecurseGuard(T Thing, std::shared_ptr> &Set) + : Thing(Thing), Set(Set), Repeat(false) { + Repeat = !Set->insert(Thing).second; + } + RecurseGuard(const RecurseGuard &) = delete; + RecurseGuard(RecurseGuard &&Other) + : Thing(Other.Thing), Set(Other.Set), Repeat(Other.Repeat) { + Other.Repeat = true; + } + ~RecurseGuard() { + if (!Repeat) { + Set->erase(Thing); + } + } + + bool isRepeat() { return Repeat; } + + T get() { return Thing; } + + operator T() { return Thing; } + + T operator->() { return Thing; } + + RecurseGuard recurse(T NewThing) { return RecurseGuard(NewThing, Set); } + +private: + T Thing; + std::shared_ptr> Set; + bool Repeat; +}; + +#endif // RecurseGuard_h__ diff --git a/build/clang-plugin/RefCountedCopyConstructorChecker.cpp b/build/clang-plugin/RefCountedCopyConstructorChecker.cpp new file mode 100644 index 0000000000..569d4eecec --- /dev/null +++ b/build/clang-plugin/RefCountedCopyConstructorChecker.cpp @@ -0,0 +1,34 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "RefCountedCopyConstructorChecker.h" +#include "CustomMatchers.h" + +void RefCountedCopyConstructorChecker::registerMatchers( + MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + cxxConstructExpr( + hasDeclaration(cxxConstructorDecl(isCompilerProvidedCopyConstructor(), + ofClass(hasRefCntMember())))) + .bind("node"), + this); +} + +void RefCountedCopyConstructorChecker::check( + const MatchFinder::MatchResult &Result) { + const char *Error = + "Invalid use of compiler-provided copy constructor on refcounted type"; + const char *Note = "The default copy constructor also copies the " + "default mRefCnt property, leading to reference " + "count imbalance issues. Please provide your own " + "copy constructor which only copies the fields which " + "need to be copied"; + + // Everything we needed to know was checked in the matcher - we just report + // the error here + const CXXConstructExpr *E = Result.Nodes.getNodeAs("node"); + + diag(E->getLocation(), Error, DiagnosticIDs::Error); + diag(E->getLocation(), Note, DiagnosticIDs::Note); +} diff --git a/build/clang-plugin/RefCountedCopyConstructorChecker.h b/build/clang-plugin/RefCountedCopyConstructorChecker.h new file mode 100644 index 0000000000..edae63534a --- /dev/null +++ b/build/clang-plugin/RefCountedCopyConstructorChecker.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef RefCountedCopyConstructorChecker_h__ +#define RefCountedCopyConstructorChecker_h__ + +#include "plugin.h" + +class RefCountedCopyConstructorChecker : public BaseCheck { +public: + RefCountedCopyConstructorChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/RefCountedInsideLambdaChecker.cpp b/build/clang-plugin/RefCountedInsideLambdaChecker.cpp new file mode 100644 index 0000000000..8a95b761de --- /dev/null +++ b/build/clang-plugin/RefCountedInsideLambdaChecker.cpp @@ -0,0 +1,152 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "RefCountedInsideLambdaChecker.h" +#include "CustomMatchers.h" + +RefCountedMap RefCountedClasses; + +void RefCountedInsideLambdaChecker::registerMatchers(MatchFinder *AstMatcher) { + // We want to reject any code which captures a pointer to an object of a + // refcounted type, and then lets that value escape. As a primitive analysis, + // we reject any occurances of the lambda as a template parameter to a class + // (which could allow it to escape), as well as any presence of such a lambda + // in a return value (either from lambdas, or in c++14, auto functions). + // + // We check these lambdas' capture lists for raw pointers to refcounted types. + AstMatcher->addMatcher(functionDecl(returns(recordType(hasDeclaration( + cxxRecordDecl(isLambdaDecl()).bind("decl"))))), + this); + AstMatcher->addMatcher(lambdaExpr().bind("lambdaExpr"), this); + AstMatcher->addMatcher( + classTemplateSpecializationDecl( + hasAnyTemplateArgument(refersToType(recordType( + hasDeclaration(cxxRecordDecl(isLambdaDecl()).bind("decl")))))), + this); +} + +void RefCountedInsideLambdaChecker::emitDiagnostics(SourceLocation Loc, + StringRef Name, + QualType Type) { + diag(Loc, + "Refcounted variable '%0' of type %1 cannot be captured by a lambda", + DiagnosticIDs::Error) + << Name << Type; + diag(Loc, "Please consider using a smart pointer", DiagnosticIDs::Note); +} + +void RefCountedInsideLambdaChecker::check( + const MatchFinder::MatchResult &Result) { + static DenseSet CheckedDecls; + + const CXXRecordDecl *Lambda = Result.Nodes.getNodeAs("decl"); + + if (const LambdaExpr *OuterLambda = + Result.Nodes.getNodeAs("lambdaExpr")) { + const CXXMethodDecl *OpCall = OuterLambda->getCallOperator(); + QualType ReturnTy = OpCall->getReturnType(); + if (const CXXRecordDecl *Record = ReturnTy->getAsCXXRecordDecl()) { + Lambda = Record; + } + } + + if (!Lambda || !Lambda->isLambda()) { + return; + } + + // Don't report errors on the same declarations more than once. + if (CheckedDecls.count(Lambda)) { + return; + } + CheckedDecls.insert(Lambda); + + bool StrongRefToThisCaptured = false; + + for (const LambdaCapture &Capture : Lambda->captures()) { + // Check if any of the captures are ByRef. If they are, we have nothing to + // report, as it's OK to capture raw pointers to refcounted objects so long + // as the Lambda doesn't escape the current scope, which is required by + // ByRef captures already. + if (Capture.getCaptureKind() == LCK_ByRef) { + return; + } + + // Check if this capture is byvalue, and captures a strong reference to + // this. + // XXX: Do we want to make sure that this type which we are capturing is a + // "Smart Pointer" somehow? + if (!StrongRefToThisCaptured && Capture.capturesVariable() && + Capture.getCaptureKind() == LCK_ByCopy) { + const VarDecl *Var = Capture.getCapturedVar(); + if (Var->hasInit()) { + const Stmt *Init = Var->getInit(); + + // Ignore single argument constructors, and trivial nodes. + while (true) { + auto NewInit = IgnoreTrivials(Init); + if (auto ConstructExpr = dyn_cast(NewInit)) { + if (ConstructExpr->getNumArgs() == 1) { + NewInit = ConstructExpr->getArg(0); + } + } + if (Init == NewInit) { + break; + } + Init = NewInit; + } + + if (isa(Init)) { + StrongRefToThisCaptured = true; + } + } + } + } + + // Now we can go through and produce errors for any captured variables or this + // pointers. + for (const LambdaCapture &Capture : Lambda->captures()) { + if (Capture.capturesVariable()) { + QualType Pointee = Capture.getCapturedVar()->getType()->getPointeeType(); + + if (!Pointee.isNull() && isClassRefCounted(Pointee)) { + emitDiagnostics(Capture.getLocation(), + Capture.getCapturedVar()->getName(), Pointee); + return; + } + } + + // The situation with captures of `this` is more complex. All captures of + // `this` look the same-ish (they are LCK_This). We want to complain about + // captures of `this` where `this` is a refcounted type, and the capture is + // actually used in the body of the lambda (if the capture isn't used, then + // we don't care, because it's only being captured in order to give access + // to private methods). + // + // In addition, we don't complain about this, even if it is used, if it was + // captured implicitly when the LambdaCaptureDefault was LCD_ByRef, as that + // expresses the intent that the lambda won't leave the enclosing scope. + bool ImplicitByRefDefaultedCapture = + Capture.isImplicit() && Lambda->getLambdaCaptureDefault() == LCD_ByRef; + if (Capture.capturesThis() && !ImplicitByRefDefaultedCapture && + !StrongRefToThisCaptured) { + ThisVisitor V(*this); + bool NotAborted = V.TraverseDecl( + const_cast(Lambda->getLambdaCallOperator())); + if (!NotAborted) { + return; + } + } + } +} + +bool RefCountedInsideLambdaChecker::ThisVisitor::VisitCXXThisExpr( + CXXThisExpr *This) { + QualType Pointee = This->getType()->getPointeeType(); + if (!Pointee.isNull() && isClassRefCounted(Pointee)) { + Checker.emitDiagnostics(This->getBeginLoc(), "this", Pointee); + return false; + } + + return true; +} diff --git a/build/clang-plugin/RefCountedInsideLambdaChecker.h b/build/clang-plugin/RefCountedInsideLambdaChecker.h new file mode 100644 index 0000000000..ed9419681a --- /dev/null +++ b/build/clang-plugin/RefCountedInsideLambdaChecker.h @@ -0,0 +1,33 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef RefCountedInsideLambdaChecker_h__ +#define RefCountedInsideLambdaChecker_h__ + +#include "plugin.h" + +class RefCountedInsideLambdaChecker : public BaseCheck { +public: + RefCountedInsideLambdaChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; + + void emitDiagnostics(SourceLocation Loc, StringRef Name, QualType Type); + +private: + class ThisVisitor : public RecursiveASTVisitor { + public: + explicit ThisVisitor(RefCountedInsideLambdaChecker &Checker) + : Checker(Checker) {} + + bool VisitCXXThisExpr(CXXThisExpr *This); + + private: + RefCountedInsideLambdaChecker &Checker; + }; +}; + +#endif diff --git a/build/clang-plugin/ScopeChecker.cpp b/build/clang-plugin/ScopeChecker.cpp new file mode 100644 index 0000000000..962c252105 --- /dev/null +++ b/build/clang-plugin/ScopeChecker.cpp @@ -0,0 +1,180 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "ScopeChecker.h" +#include "CustomMatchers.h" + +void ScopeChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher(varDecl().bind("node"), this); + AstMatcher->addMatcher(cxxNewExpr().bind("node"), this); + AstMatcher->addMatcher( + materializeTemporaryExpr( + unless(hasDescendant(cxxConstructExpr(allowsTemporary())))) + .bind("node"), + this); + AstMatcher->addMatcher( + callExpr(callee(functionDecl(heapAllocator()))).bind("node"), this); +} + +// These enum variants determine whether an allocation has occured in the code. +enum AllocationVariety { + AV_None, + AV_Global, + AV_Automatic, + AV_Temporary, + AV_Heap, +}; + +// XXX Currently the Decl* in the AutomaticTemporaryMap is unused, but it +// probably will be used at some point in the future, in order to produce better +// error messages. +typedef DenseMap + AutomaticTemporaryMap; +AutomaticTemporaryMap AutomaticTemporaries; + +void ScopeChecker::check(const MatchFinder::MatchResult &Result) { + // There are a variety of different reasons why something could be allocated + AllocationVariety Variety = AV_None; + SourceLocation Loc; + QualType T; + bool IsStaticLocal = false; + + if (const ParmVarDecl *D = Result.Nodes.getNodeAs("node")) { + if (D->hasUnparsedDefaultArg() || D->hasUninstantiatedDefaultArg()) { + return; + } + if (const Expr *Default = D->getDefaultArg()) { + if (const MaterializeTemporaryExpr *E = + dyn_cast(Default)) { + // We have just found a ParmVarDecl which has, as its default argument, + // a MaterializeTemporaryExpr. We mark that MaterializeTemporaryExpr as + // automatic, by adding it to the AutomaticTemporaryMap. + // Reporting on this type will occur when the MaterializeTemporaryExpr + // is matched against. + AutomaticTemporaries[E] = D; + } + } + return; + } + + // Determine the type of allocation which we detected + if (const VarDecl *D = Result.Nodes.getNodeAs("node")) { + if (D->hasGlobalStorage()) { + Variety = AV_Global; + } else { + Variety = AV_Automatic; + } + T = D->getType(); + Loc = D->getBeginLoc(); + IsStaticLocal = D->isStaticLocal(); + } else if (const CXXNewExpr *E = Result.Nodes.getNodeAs("node")) { + // New allocates things on the heap. + // We don't consider placement new to do anything, as it doesn't actually + // allocate the storage, and thus gives us no useful information. + if (!isPlacementNew(E)) { + Variety = AV_Heap; + T = E->getAllocatedType(); + Loc = E->getBeginLoc(); + } + } else if (const MaterializeTemporaryExpr *E = + Result.Nodes.getNodeAs("node")) { + // Temporaries can actually have varying storage durations, due to temporary + // lifetime extension. We consider the allocation variety of this temporary + // to be the same as the allocation variety of its lifetime. + + // XXX We maybe should mark these lifetimes as being due to a temporary + // which has had its lifetime extended, to improve the error messages. + switch (E->getStorageDuration()) { + case SD_FullExpression: { + // Check if this temporary is allocated as a default argument! + // if it is, we want to pretend that it is automatic. + AutomaticTemporaryMap::iterator AutomaticTemporary = + AutomaticTemporaries.find(E); + if (AutomaticTemporary != AutomaticTemporaries.end()) { + Variety = AV_Automatic; + } else { + Variety = AV_Temporary; + } + } break; + case SD_Automatic: + Variety = AV_Automatic; + break; + case SD_Thread: + case SD_Static: + Variety = AV_Global; + break; + case SD_Dynamic: + assert(false && "I don't think that this ever should occur..."); + Variety = AV_Heap; + break; + } + T = E->getType().getUnqualifiedType(); + Loc = E->getBeginLoc(); + } else if (const CallExpr *E = Result.Nodes.getNodeAs("node")) { + T = E->getType()->getPointeeType(); + if (!T.isNull()) { + // This will always allocate on the heap, as the heapAllocator() check + // was made in the matcher + Variety = AV_Heap; + Loc = E->getBeginLoc(); + } + } + + // Error messages for incorrect allocations. + const char *Stack = "variable of type %0 only valid on the stack"; + const char *Global = "variable of type %0 only valid as global"; + const char *Heap = "variable of type %0 only valid on the heap"; + const char *NonHeap = "variable of type %0 is not valid on the heap"; + const char *NonTemporary = "variable of type %0 is not valid in a temporary"; + const char *Temporary = "variable of type %0 is only valid as a temporary"; + const char *StaticLocal = "variable of type %0 is only valid as a static " + "local"; + + const char *StackNote = + "value incorrectly allocated in an automatic variable"; + const char *GlobalNote = "value incorrectly allocated in a global variable"; + const char *HeapNote = "value incorrectly allocated on the heap"; + const char *TemporaryNote = "value incorrectly allocated in a temporary"; + + // Report errors depending on the annotations on the input types. + switch (Variety) { + case AV_None: + return; + + case AV_Global: + StackClass.reportErrorIfPresent(*this, T, Loc, Stack, GlobalNote); + HeapClass.reportErrorIfPresent(*this, T, Loc, Heap, GlobalNote); + TemporaryClass.reportErrorIfPresent(*this, T, Loc, Temporary, GlobalNote); + if (!IsStaticLocal) { + StaticLocalClass.reportErrorIfPresent(*this, T, Loc, StaticLocal, + GlobalNote); + } + break; + + case AV_Automatic: + GlobalClass.reportErrorIfPresent(*this, T, Loc, Global, StackNote); + HeapClass.reportErrorIfPresent(*this, T, Loc, Heap, StackNote); + TemporaryClass.reportErrorIfPresent(*this, T, Loc, Temporary, StackNote); + StaticLocalClass.reportErrorIfPresent(*this, T, Loc, StaticLocal, + StackNote); + break; + + case AV_Temporary: + GlobalClass.reportErrorIfPresent(*this, T, Loc, Global, TemporaryNote); + HeapClass.reportErrorIfPresent(*this, T, Loc, Heap, TemporaryNote); + NonTemporaryClass.reportErrorIfPresent(*this, T, Loc, NonTemporary, + TemporaryNote); + StaticLocalClass.reportErrorIfPresent(*this, T, Loc, StaticLocal, + TemporaryNote); + break; + + case AV_Heap: + GlobalClass.reportErrorIfPresent(*this, T, Loc, Global, HeapNote); + StackClass.reportErrorIfPresent(*this, T, Loc, Stack, HeapNote); + NonHeapClass.reportErrorIfPresent(*this, T, Loc, NonHeap, HeapNote); + TemporaryClass.reportErrorIfPresent(*this, T, Loc, Temporary, HeapNote); + StaticLocalClass.reportErrorIfPresent(*this, T, Loc, StaticLocal, HeapNote); + break; + } +} diff --git a/build/clang-plugin/ScopeChecker.h b/build/clang-plugin/ScopeChecker.h new file mode 100644 index 0000000000..edab241f1c --- /dev/null +++ b/build/clang-plugin/ScopeChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef ScopeChecker_h__ +#define ScopeChecker_h__ + +#include "plugin.h" + +class ScopeChecker : public BaseCheck { +public: + ScopeChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/SprintfLiteralChecker.cpp b/build/clang-plugin/SprintfLiteralChecker.cpp new file mode 100644 index 0000000000..94e8e2fd1b --- /dev/null +++ b/build/clang-plugin/SprintfLiteralChecker.cpp @@ -0,0 +1,84 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "SprintfLiteralChecker.h" +#include "CustomMatchers.h" + +void SprintfLiteralChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + callExpr( + isSnprintfLikeFunc(), + allOf(hasArgument( + 0, ignoringParenImpCasts(declRefExpr().bind("buffer"))), + anyOf(hasArgument(1, sizeOfExpr(has(ignoringParenImpCasts( + declRefExpr().bind("size"))))), + hasArgument(1, integerLiteral().bind("immediate")), + hasArgument(1, declRefExpr(to(varDecl( + hasType(isConstQualified()), + hasInitializer(integerLiteral().bind( + "constant"))))))))) + .bind("funcCall"), + this); +} + +void SprintfLiteralChecker::check(const MatchFinder::MatchResult &Result) { + if (!Result.Context->getLangOpts().CPlusPlus) { + // SprintfLiteral is not usable in C, so there is no point in issuing these + // warnings. + return; + } + + const char *Error = + "Use %1 instead of %0 when writing into a character array."; + const char *Note = + "This will prevent passing in the wrong size to %0 accidentally."; + + const CallExpr *D = Result.Nodes.getNodeAs("funcCall"); + + StringRef Name = D->getDirectCallee()->getName(); + const char *Replacement; + if (Name == "snprintf") { + Replacement = "SprintfLiteral"; + } else { + assert(Name == "vsnprintf"); + Replacement = "VsprintfLiteral"; + } + + const DeclRefExpr *Buffer = Result.Nodes.getNodeAs("buffer"); + const DeclRefExpr *Size = Result.Nodes.getNodeAs("size"); + if (Size) { + // Match calls like snprintf(x, sizeof(x), ...). + if (Buffer->getFoundDecl() != Size->getFoundDecl()) { + return; + } + + diag(D->getBeginLoc(), Error, DiagnosticIDs::Error) << Name << Replacement; + diag(D->getBeginLoc(), Note, DiagnosticIDs::Note) << Name; + return; + } + + const QualType QType = Buffer->getType(); + const ConstantArrayType *Type = + dyn_cast(QType.getTypePtrOrNull()); + if (Type) { + // Match calls like snprintf(x, 100, ...), where x is int[100]; + const IntegerLiteral *Literal = + Result.Nodes.getNodeAs("immediate"); + if (!Literal) { + // Match calls like: const int y = 100; snprintf(x, y, ...); + Literal = Result.Nodes.getNodeAs("constant"); + } + + // We're going to assume here that the bitwidth of both of these values fits + // within 64 bits. and zero-extend both values to 64-bits before comparing + // them. + uint64_t Size = Type->getSize().getZExtValue(); + uint64_t Lit = Literal->getValue().getZExtValue(); + if (Size <= Lit) { + diag(D->getBeginLoc(), Error, DiagnosticIDs::Error) + << Name << Replacement; + diag(D->getBeginLoc(), Note, DiagnosticIDs::Note) << Name; + } + } +} diff --git a/build/clang-plugin/SprintfLiteralChecker.h b/build/clang-plugin/SprintfLiteralChecker.h new file mode 100644 index 0000000000..bf407987b3 --- /dev/null +++ b/build/clang-plugin/SprintfLiteralChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef SprintfLiteralChecker_h__ +#define SprintfLiteralChecker_h__ + +#include "plugin.h" + +class SprintfLiteralChecker : public BaseCheck { +public: + SprintfLiteralChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/StmtToBlockMap.h b/build/clang-plugin/StmtToBlockMap.h new file mode 100644 index 0000000000..f499cb91ec --- /dev/null +++ b/build/clang-plugin/StmtToBlockMap.h @@ -0,0 +1,90 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef StmtToBlockMap_h__ +#define StmtToBlockMap_h__ + +#include "Utils.h" + +// This method is copied from clang-tidy's ExprSequence.cpp. +// +// Returns the Stmt nodes that are parents of 'S', skipping any potential +// intermediate non-Stmt nodes. +// +// In almost all cases, this function returns a single parent or no parents at +// all. +inline SmallVector getParentStmts(const Stmt *S, + ASTContext *Context) { + SmallVector Result; + + auto Parents = Context->getParents(*S); + + SmallVector NodesToProcess(Parents.begin(), + Parents.end()); + + while (!NodesToProcess.empty()) { + clang::DynTypedNode Node = NodesToProcess.back(); + NodesToProcess.pop_back(); + + if (const auto *S = Node.get()) { + Result.push_back(S); + } else { + Parents = Context->getParents(Node); + NodesToProcess.append(Parents.begin(), Parents.end()); + } + } + + return Result; +} + +// This class is a modified version of the class from clang-tidy's +// ExprSequence.cpp +// +// Maps `Stmt`s to the `CFGBlock` that contains them. Some `Stmt`s may be +// contained in more than one `CFGBlock`; in this case, they are mapped to the +// innermost block (i.e. the one that is furthest from the root of the tree). +// An optional outparameter provides the index into the block where the `Stmt` +// was found. +class StmtToBlockMap { +public: + // Initializes the map for the given `CFG`. + StmtToBlockMap(const CFG *TheCFG, ASTContext *TheContext) + : Context(TheContext) { + for (const auto *B : *TheCFG) { + for (size_t I = 0; I < B->size(); ++I) { + if (Optional S = (*B)[I].getAs()) { + Map[S->getStmt()] = std::make_pair(B, I); + } + } + } + } + + // Returns the block that S is contained in. Some `Stmt`s may be contained + // in more than one `CFGBlock`; in this case, this function returns the + // innermost block (i.e. the one that is furthest from the root of the tree). + // + // The optional outparameter `Index` is set to the index into the block where + // the `Stmt` was found. + const CFGBlock *blockContainingStmt(const Stmt *S, + size_t *Index = nullptr) const { + while (!Map.count(S)) { + SmallVector Parents = getParentStmts(S, Context); + if (Parents.empty()) + return nullptr; + S = Parents[0]; + } + + const auto &E = Map.lookup(S); + if (Index) + *Index = E.second; + return E.first; + } + +private: + ASTContext *Context; + + llvm::DenseMap> Map; +}; + +#endif // StmtToBlockMap_h__ diff --git a/build/clang-plugin/TemporaryLifetimeBoundChecker.cpp b/build/clang-plugin/TemporaryLifetimeBoundChecker.cpp new file mode 100644 index 0000000000..dc66f62b0d --- /dev/null +++ b/build/clang-plugin/TemporaryLifetimeBoundChecker.cpp @@ -0,0 +1,91 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "TemporaryLifetimeBoundChecker.h" +#include "CustomMatchers.h" +#include "clang/Lex/Lexer.h" + +void TemporaryLifetimeBoundChecker::registerMatchers(MatchFinder *AstMatcher) { + // Look for a call to a MOZ_LIFETIME_BOUND member function + auto isTemporaryLifetimeBoundCall = + cxxMemberCallExpr( + onImplicitObjectArgument(anyOf(has(cxxTemporaryObjectExpr()), + has(materializeTemporaryExpr()))), + callee(functionDecl(isMozTemporaryLifetimeBound()))) + .bind("call"); + + // XXX This definitely does not catch everything relevant. In particular, the + // matching on conditionalOperator would need to be recursive. But it's a + // start. + auto hasTemporaryLifetimeBoundCall = + anyOf(isTemporaryLifetimeBoundCall, + conditionalOperator( + anyOf(hasFalseExpression(isTemporaryLifetimeBoundCall), + hasTrueExpression(isTemporaryLifetimeBoundCall)))); + + AstMatcher->addMatcher( + returnStmt(hasReturnValue( + allOf(exprWithCleanups().bind("expr-with-cleanups"), + ignoringParenCasts(hasTemporaryLifetimeBoundCall)))) + .bind("return-stmt"), + this); + + AstMatcher->addMatcher( + varDecl(hasType(references(cxxRecordDecl())), + hasInitializer( + allOf(exprWithCleanups(), + ignoringParenCasts(hasTemporaryLifetimeBoundCall)))) + .bind("var-decl"), + this); +} + +void TemporaryLifetimeBoundChecker::check( + const MatchFinder::MatchResult &Result) { + const auto *Call = Result.Nodes.getNodeAs("call"); + const auto *ReturnStatement = + Result.Nodes.getNodeAs("return-stmt"); + const auto *ReferenceVarDecl = Result.Nodes.getNodeAs("var-decl"); + + const char ErrorReturn[] = + "cannot return result of lifetime-bound function %0 on " + "temporary of type %1"; + + const char ErrorBindToReference[] = + "cannot bind result of lifetime-bound function %0 on " + "temporary of type %1 to reference, does not extend lifetime"; + + const char NoteCalledFunction[] = "member function declared here"; + + // We are either a return statement... + if (ReturnStatement) { + const auto *ExprWithCleanups = + Result.Nodes.getNodeAs("expr-with-cleanups"); + if (!ExprWithCleanups->isLValue()) { + return; + } + + const auto Range = ReturnStatement->getSourceRange(); + + diag(Range.getBegin(), ErrorReturn, DiagnosticIDs::Error) + << Range << Call->getMethodDecl() + << Call->getImplicitObjectArgument() + ->getType() + .withoutLocalFastQualifiers(); + } + + // ... or a variable declaration that declare a reference + if (ReferenceVarDecl) { + const auto Range = ReferenceVarDecl->getSourceRange(); + + diag(Range.getBegin(), ErrorBindToReference, DiagnosticIDs::Error) + << Range << Call->getMethodDecl() + << Call->getImplicitObjectArgument() + ->getType() + .withoutLocalFastQualifiers(); + } + + const auto *MethodDecl = Call->getMethodDecl(); + diag(MethodDecl->getCanonicalDecl()->getLocation(), NoteCalledFunction, + DiagnosticIDs::Note); +} diff --git a/build/clang-plugin/TemporaryLifetimeBoundChecker.h b/build/clang-plugin/TemporaryLifetimeBoundChecker.h new file mode 100644 index 0000000000..712c0de9c0 --- /dev/null +++ b/build/clang-plugin/TemporaryLifetimeBoundChecker.h @@ -0,0 +1,22 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef TemporaryLifetimeBoundChecker_h__ +#define TemporaryLifetimeBoundChecker_h__ + +#include "plugin.h" + +class TemporaryLifetimeBoundChecker : public BaseCheck { +public: + TemporaryLifetimeBoundChecker(StringRef CheckName, + ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; + +private: +}; + +#endif diff --git a/build/clang-plugin/ThirdPartyPaths.h b/build/clang-plugin/ThirdPartyPaths.h new file mode 100644 index 0000000000..6a497923f2 --- /dev/null +++ b/build/clang-plugin/ThirdPartyPaths.h @@ -0,0 +1,17 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef ThirdPartyPaths_h__ +#define ThirdPartyPaths_h__ + +#include + +// These two values are defined in ThirdPartyPaths.cpp, which is a file +// generated by ThirdPartyPaths.py. + +extern const char *MOZ_THIRD_PARTY_PATHS[]; + +extern const uint32_t MOZ_THIRD_PARTY_PATHS_COUNT; + +#endif diff --git a/build/clang-plugin/ThirdPartyPaths.py b/build/clang-plugin/ThirdPartyPaths.py new file mode 100644 index 0000000000..caaa919d43 --- /dev/null +++ b/build/clang-plugin/ThirdPartyPaths.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 + +import json + + +def generate(output, *input_paths): + """ + This file generates a ThirdPartyPaths.cpp file from the ThirdPartyPaths.txt + file in /tools/rewriting, which is used by the Clang Plugin to help identify + sources which should be ignored. + """ + tpp_list = [] + lines = set() + + for path in input_paths: + with open(path) as f: + lines.update(f.readlines()) + + for line in lines: + line = line.strip() + if line.endswith("/"): + line = line[:-1] + tpp_list.append(line) + tpp_strings = ",\n ".join([json.dumps(tpp) for tpp in sorted(tpp_list)]) + + output.write( + """\ +/* THIS FILE IS GENERATED BY ThirdPartyPaths.py - DO NOT EDIT */ + +#include + +const char* MOZ_THIRD_PARTY_PATHS[] = { + %s +}; + +extern const uint32_t MOZ_THIRD_PARTY_PATHS_COUNT = %d; + +""" + % (tpp_strings, len(tpp_list)) + ) diff --git a/build/clang-plugin/ThreadAllows.py b/build/clang-plugin/ThreadAllows.py new file mode 100644 index 0000000000..e45f629254 --- /dev/null +++ b/build/clang-plugin/ThreadAllows.py @@ -0,0 +1,59 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. +import json + +FIRST_LINE = "// This file was generated by generate_thread_allows.py. DO NOT EDIT." + + +def generate_allows(input_paths): + """ + This script reads in the ThreadAllows.txt and ThreadFileAllows.txt lists + and generates a header file containing a two arrays of allowed threads. + These can be the following formats: + -Files which the checker should ignore + These files either contain definitions of NS_NewNamedThread or + use args which the plugin can't cast (such as func args). + -Thread names which the checker should ignore + Specifies which individual thread names to ignore. + """ + file_list = [] + name_list = [] + lines = set() + + for path in input_paths: + with open(path) as file: + lines.update(file.readlines()) + + for line in sorted(lines): + """ + We are assuming lines ending in .cpp, .h are files. Threads should + NOT have names containing filenames. Please don't do that. + """ + line = line.strip() + if line.endswith(".cpp") or line.endswith(".h"): + file_list.append(line) + else: + name_list.append(line) + file_list_s = ",\n ".join(json.dumps(elem) for elem in file_list) + name_list_s = ",\n ".join(json.dumps(elem) for elem in name_list) + output_string = ( + FIRST_LINE + + """ + +static const char *allow_thread_files[] = { + %s +}; + +static const char *allow_thread_names[] = { + %s +}; + + """ + % (file_list_s, name_list_s) + ) + return output_string + + +def generate_file(output, *input_paths): + output.write(generate_allows(input_paths)) diff --git a/build/clang-plugin/ThreadAllows.txt b/build/clang-plugin/ThreadAllows.txt new file mode 100644 index 0000000000..8f1e1cd584 --- /dev/null +++ b/build/clang-plugin/ThreadAllows.txt @@ -0,0 +1,95 @@ +ApplyUpdates +AsyncShutdownPr +AsyncShutdownWt +Atom Test +AutoRefCnt Test +AutoTestThread +AwaitIdleMixed +AwaitIdlePaused +BGReadURLs +BHMgr Processor +COM Intcpt Log +COM MTA +Cache I/O +Cameras IPC +ChainedPipePump +ChainedPipeRecv +Checker Test +Compositor +Cookie +CrashRep Inject +DDMediaLogs +DOMCacheThread +DataChannel IO +DataStorage +DatabaseLocker +DecodeToSurface +Decoder Test +FileWatcher IO +Font Loader +FontEnumThread +Function Broker +GMPThread +Gamepad +GeckoProfGTest +GraphRunner +HTML5 Parser +ICS parser +IMAP +IPC Launch +IPDL Background +IdentityCrypto +ImageBridgeChld +LS Thread +LayerScope +MDCDMThread +MWQThread +MediaCache +MediaTelemetry +MediaTrackGrph +mtransport +NamedPipeSrv +Netlink Monitor +OSKeyStore +OutputDrain +PaintThread +Permission +PlayEventSound +ProcessHangMon +ProfSymbolTable +ProfilerChild +ProxyResolution +RemoteLzyStream +RWLockTester +RacingServMan +RemVidChild +Sandbox Testing +SaveScripts +Socket Thread +SpeechWorker +SpinEventLoop +StressRunner +SuicideManager +SuicideThread +TEQ AwaitIdle +Test Thread +Test thread +TestPipe +TestShortWrites +TestThreadsMain +Testing Thread +Timer Thread +ToastBgThread +TRR Background +URL Classifier +Update Watcher +VRService +VsyncIOThread +Wifi Monitor +Worker Launcher +speechd init +t1 +t2 +thread +thread shutdown +wifi tickler diff --git a/build/clang-plugin/ThreadFileAllows.txt b/build/clang-plugin/ThreadFileAllows.txt new file mode 100644 index 0000000000..7fe559ce70 --- /dev/null +++ b/build/clang-plugin/ThreadFileAllows.txt @@ -0,0 +1,11 @@ +ActorsParent.cpp +DecodePool.cpp +GeckoChildProcessHost.cpp +LazyIdleThread.cpp +LazyIdleThread.h +VRThread.cpp +mozStorageConnection.cpp +nr_socket_prsock.cpp +nsThreadPool.cpp +nsThreadUtils.cpp +nsThreadUtils.h diff --git a/build/clang-plugin/TrivialCtorDtorChecker.cpp b/build/clang-plugin/TrivialCtorDtorChecker.cpp new file mode 100644 index 0000000000..59576a5b64 --- /dev/null +++ b/build/clang-plugin/TrivialCtorDtorChecker.cpp @@ -0,0 +1,29 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "TrivialCtorDtorChecker.h" +#include "CustomMatchers.h" + +void TrivialCtorDtorChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher(cxxRecordDecl(hasTrivialCtorDtor()).bind("node"), + this); +} + +void TrivialCtorDtorChecker::check(const MatchFinder::MatchResult &Result) { + const char *Error = "class %0 must have trivial constructors and destructors"; + const CXXRecordDecl *Node = Result.Nodes.getNodeAs("node"); + + if (!Node->hasDefinition()) { + return; + } + + // We need to accept non-constexpr trivial constructors as well. This occurs + // when a struct contains pod members, which will not be initialized. As + // constexpr values are initialized, the constructor is non-constexpr. + bool BadCtor = !(Node->hasConstexprDefaultConstructor() || + Node->hasTrivialDefaultConstructor()); + bool BadDtor = !Node->hasTrivialDestructor(); + if (BadCtor || BadDtor) + diag(Node->getBeginLoc(), Error, DiagnosticIDs::Error) << Node; +} diff --git a/build/clang-plugin/TrivialCtorDtorChecker.h b/build/clang-plugin/TrivialCtorDtorChecker.h new file mode 100644 index 0000000000..6b44016781 --- /dev/null +++ b/build/clang-plugin/TrivialCtorDtorChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef TrivialCtorDtorChecker_h__ +#define TrivialCtorDtorChecker_h__ + +#include "plugin.h" + +class TrivialCtorDtorChecker : public BaseCheck { +public: + TrivialCtorDtorChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/TrivialDtorChecker.cpp b/build/clang-plugin/TrivialDtorChecker.cpp new file mode 100644 index 0000000000..ffcd2ae101 --- /dev/null +++ b/build/clang-plugin/TrivialDtorChecker.cpp @@ -0,0 +1,23 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "TrivialDtorChecker.h" +#include "CustomMatchers.h" + +void TrivialDtorChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher(cxxRecordDecl(hasTrivialDtor()).bind("node"), this); +} + +void TrivialDtorChecker::check(const MatchFinder::MatchResult &Result) { + const char *Error = "class %0 must have a trivial destructor"; + const CXXRecordDecl *Node = Result.Nodes.getNodeAs("node"); + + if (!Node->hasDefinition()) { + return; + } + + bool BadDtor = !Node->hasTrivialDestructor(); + if (BadDtor) + diag(Node->getBeginLoc(), Error, DiagnosticIDs::Error) << Node; +} diff --git a/build/clang-plugin/TrivialDtorChecker.h b/build/clang-plugin/TrivialDtorChecker.h new file mode 100644 index 0000000000..dd0be727e6 --- /dev/null +++ b/build/clang-plugin/TrivialDtorChecker.h @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef TrivialDtorChecker_h__ +#define TrivialDtorChecker_h__ + +#include "plugin.h" + +class TrivialDtorChecker : public BaseCheck { +public: + TrivialDtorChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; +}; + +#endif diff --git a/build/clang-plugin/Utils.h b/build/clang-plugin/Utils.h new file mode 100644 index 0000000000..d38f25d3b7 --- /dev/null +++ b/build/clang-plugin/Utils.h @@ -0,0 +1,492 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef Utils_h__ +#define Utils_h__ + +#include "CustomAttributes.h" +#include "ThirdPartyPaths.h" +#include "ThreadAllows.h" +#include "plugin.h" + +inline StringRef getFilename(const SourceManager &SM, SourceLocation Loc) { + // We use the presumed location to handle #line directives and such, so the + // plugin is friendly to icecc / sccache users. + auto PL = SM.getPresumedLoc(Loc); + if (PL.isValid()) { + return StringRef(PL.getFilename()); + } + return SM.getFilename(Loc); +} + +// Check if the given expression contains an assignment expression. +// This can either take the form of a Binary Operator or a +// Overloaded Operator Call. +inline bool hasSideEffectAssignment(const Expr *Expression) { + if (auto OpCallExpr = dyn_cast_or_null(Expression)) { + auto BinOp = OpCallExpr->getOperator(); + if (BinOp == OO_Equal || (BinOp >= OO_PlusEqual && BinOp <= OO_PipeEqual)) { + return true; + } + } else if (auto BinOpExpr = dyn_cast_or_null(Expression)) { + if (BinOpExpr->isAssignmentOp()) { + return true; + } + } + + // Recurse to children. + for (const Stmt *SubStmt : Expression->children()) { + auto ChildExpr = dyn_cast_or_null(SubStmt); + if (ChildExpr && hasSideEffectAssignment(ChildExpr)) { + return true; + } + } + + return false; +} + +template +inline bool ASTIsInSystemHeader(const ASTContext &AC, const T &D) { + auto &SourceManager = AC.getSourceManager(); + auto ExpansionLoc = SourceManager.getExpansionLoc(D.getBeginLoc()); + if (ExpansionLoc.isInvalid()) { + return false; + } + return SourceManager.isInSystemHeader(ExpansionLoc); +} + +template inline StringRef getNameChecked(const T &D) { + return D->getIdentifier() ? D->getName() : ""; +} + +/// A cached data of whether classes are refcounted or not. +typedef DenseMap> + RefCountedMap; +extern RefCountedMap RefCountedClasses; + +inline bool classHasAddRefRelease(const CXXRecordDecl *D) { + const RefCountedMap::iterator &It = RefCountedClasses.find(D); + if (It != RefCountedClasses.end()) { + return It->second.second; + } + + bool SeenAddRef = false; + bool SeenRelease = false; + for (CXXRecordDecl::method_iterator Method = D->method_begin(); + Method != D->method_end(); ++Method) { + const auto &Name = getNameChecked(Method); + if (Name == "AddRef") { + SeenAddRef = true; + } else if (Name == "Release") { + SeenRelease = true; + } + } + RefCountedClasses[D] = std::make_pair(D, SeenAddRef && SeenRelease); + return SeenAddRef && SeenRelease; +} + +inline bool isClassRefCounted(QualType T); + +inline bool isClassRefCounted(const CXXRecordDecl *D) { + // Normalize so that D points to the definition if it exists. + if (!D->hasDefinition()) + return false; + D = D->getDefinition(); + // Base class: anyone with AddRef/Release is obviously a refcounted class. + if (classHasAddRefRelease(D)) + return true; + + // Look through all base cases to figure out if the parent is a refcounted + // class. + for (CXXRecordDecl::base_class_const_iterator Base = D->bases_begin(); + Base != D->bases_end(); ++Base) { + bool Super = isClassRefCounted(Base->getType()); + if (Super) { + return true; + } + } + + return false; +} + +inline bool isClassRefCounted(QualType T) { + while (const clang::ArrayType *ArrTy = T->getAsArrayTypeUnsafe()) + T = ArrTy->getElementType(); + CXXRecordDecl *Clazz = T->getAsCXXRecordDecl(); + return Clazz ? isClassRefCounted(Clazz) : false; +} + +inline const FieldDecl *getClassRefCntMember(const CXXRecordDecl *D) { + for (RecordDecl::field_iterator Field = D->field_begin(), E = D->field_end(); + Field != E; ++Field) { + if (getNameChecked(Field) == "mRefCnt") { + return *Field; + } + } + return 0; +} + +inline bool typeHasVTable(QualType T) { + while (const clang::ArrayType *ArrTy = T->getAsArrayTypeUnsafe()) + T = ArrTy->getElementType(); + CXXRecordDecl *Offender = T->getAsCXXRecordDecl(); + return Offender && Offender->hasDefinition() && Offender->isDynamicClass(); +} + +inline StringRef getDeclarationNamespace(const Decl *Declaration) { + const DeclContext *DC = + Declaration->getDeclContext()->getEnclosingNamespaceContext(); + const NamespaceDecl *ND = dyn_cast(DC); + if (!ND) { + return ""; + } + + while (const DeclContext *ParentDC = ND->getParent()) { + if (!isa(ParentDC)) { + break; + } + ND = cast(ParentDC); + } + + const auto &Name = ND->getName(); + return Name; +} + +inline bool isInIgnoredNamespaceForImplicitCtor(const Decl *Declaration) { + StringRef Name = getDeclarationNamespace(Declaration); + if (Name == "") { + return false; + } + + return Name == "std" || // standard C++ lib + Name == "__gnu_cxx" || // gnu C++ lib + Name == "boost" || // boost + Name == "webrtc" || // upstream webrtc + Name == "rtc" || // upstream webrtc 'base' package + Name.startswith("icu_") || // icu + Name == "google" || // protobuf + Name == "google_breakpad" || // breakpad + Name == "soundtouch" || // libsoundtouch + Name == "stagefright" || // libstagefright + Name == "MacFileUtilities" || // MacFileUtilities + Name == "dwarf2reader" || // dwarf2reader + Name == "arm_ex_to_module" || // arm_ex_to_module + Name == "testing" || // gtest + Name == "Json" || // jsoncpp + Name == "rlbox" || // rlbox + Name == "v8"; // irregexp +} + +inline bool isInIgnoredNamespaceForImplicitConversion(const Decl *Declaration) { + StringRef Name = getDeclarationNamespace(Declaration); + if (Name == "") { + return false; + } + + return Name == "std" || // standard C++ lib + Name == "__gnu_cxx" || // gnu C++ lib + Name == "google_breakpad" || // breakpad + Name == "testing" || // gtest + Name == "rlbox"; // rlbox +} + +inline bool isIgnoredPathForImplicitConversion(const Decl *Declaration) { + Declaration = Declaration->getCanonicalDecl(); + SourceLocation Loc = Declaration->getLocation(); + const SourceManager &SM = Declaration->getASTContext().getSourceManager(); + SmallString<1024> FileName = getFilename(SM, Loc); + llvm::sys::fs::make_absolute(FileName); + llvm::sys::path::reverse_iterator Begin = llvm::sys::path::rbegin(FileName), + End = llvm::sys::path::rend(FileName); + for (; Begin != End; ++Begin) { + if (Begin->compare_lower(StringRef("graphite2")) == 0) { + return true; + } + if (Begin->compare_lower(StringRef("chromium")) == 0) { + // Ignore security/sandbox/chromium but not ipc/chromium. + ++Begin; + return Begin != End && Begin->compare_lower(StringRef("sandbox")) == 0; + } + } + return false; +} + +inline bool isIgnoredPathForSprintfLiteral(const CallExpr *Call, + const SourceManager &SM) { + SourceLocation Loc = Call->getBeginLoc(); + SmallString<1024> FileName = getFilename(SM, Loc); + llvm::sys::fs::make_absolute(FileName); + llvm::sys::path::reverse_iterator Begin = llvm::sys::path::rbegin(FileName), + End = llvm::sys::path::rend(FileName); + for (; Begin != End; ++Begin) { + if (Begin->compare_lower(StringRef("angle")) == 0 || + Begin->compare_lower(StringRef("chromium")) == 0 || + Begin->compare_lower(StringRef("crashreporter")) == 0 || + Begin->compare_lower(StringRef("google-breakpad")) == 0 || + Begin->compare_lower(StringRef("gflags")) == 0 || + Begin->compare_lower(StringRef("harfbuzz")) == 0 || + Begin->compare_lower(StringRef("icu")) == 0 || + Begin->compare_lower(StringRef("jsoncpp")) == 0 || + Begin->compare_lower(StringRef("libstagefright")) == 0 || + Begin->compare_lower(StringRef("transport")) == 0 || + Begin->compare_lower(StringRef("protobuf")) == 0 || + Begin->compare_lower(StringRef("skia")) == 0 || + Begin->compare_lower(StringRef("sfntly")) == 0 || + // Gtest uses snprintf as GTEST_SNPRINTF_ with sizeof + Begin->compare_lower(StringRef("testing")) == 0) { + return true; + } + if (Begin->compare_lower(StringRef("webrtc")) == 0) { + // Ignore trunk/webrtc, but not media/webrtc + ++Begin; + return Begin != End && Begin->compare_lower(StringRef("trunk")) == 0; + } + } + return false; +} + +inline bool isInterestingDeclForImplicitConversion(const Decl *Declaration) { + return !isInIgnoredNamespaceForImplicitConversion(Declaration) && + !isIgnoredPathForImplicitConversion(Declaration); +} + +inline bool isIgnoredExprForMustUse(const Expr *E) { + if (const CXXOperatorCallExpr *OpCall = dyn_cast(E)) { + switch (OpCall->getOperator()) { + case OO_Equal: + case OO_PlusEqual: + case OO_MinusEqual: + case OO_StarEqual: + case OO_SlashEqual: + case OO_PercentEqual: + case OO_CaretEqual: + case OO_AmpEqual: + case OO_PipeEqual: + case OO_LessLessEqual: + case OO_GreaterGreaterEqual: + return true; + default: + return false; + } + } + + if (const BinaryOperator *Op = dyn_cast(E)) { + return Op->isAssignmentOp(); + } + + return false; +} + +inline bool typeIsRefPtr(QualType Q) { + CXXRecordDecl *D = Q->getAsCXXRecordDecl(); + if (!D || !D->getIdentifier()) { + return false; + } + + StringRef name = D->getName(); + if (name == "RefPtr" || name == "nsCOMPtr") { + return true; + } + return false; +} + +// The method defined in clang for ignoring implicit nodes doesn't work with +// some AST trees. To get around this, we define our own implementation of +// IgnoreTrivials. +inline const Stmt *MaybeSkipOneTrivial(const Stmt *s) { + if (!s) { + return nullptr; + } + if (auto *ewc = dyn_cast(s)) { + return ewc->getSubExpr(); + } + if (auto *mte = dyn_cast(s)) { + // With clang 10 and up `getTemporary` has been replaced with the more + // versatile `getSubExpr`. +#if CLANG_VERSION_FULL >= 1000 + return mte->getSubExpr(); +#else + return mte->GetTemporaryExpr(); +#endif + } + if (auto *bte = dyn_cast(s)) { + return bte->getSubExpr(); + } + if (auto *ce = dyn_cast(s)) { + s = ce->getSubExpr(); + } + if (auto *pe = dyn_cast(s)) { + s = pe->getSubExpr(); + } + // Not a trivial. + return s; +} + +inline const Stmt *IgnoreTrivials(const Stmt *s) { + while (true) { + const Stmt *newS = MaybeSkipOneTrivial(s); + if (newS == s) { + return newS; + } + s = newS; + } + + // Unreachable + return nullptr; +} + +inline const Expr *IgnoreTrivials(const Expr *e) { + return cast_or_null(IgnoreTrivials(static_cast(e))); +} + +// Returns the input if the input is not a trivial. +inline const Expr *MaybeSkipOneTrivial(const Expr *e) { + return cast_or_null(MaybeSkipOneTrivial(static_cast(e))); +} + +const FieldDecl *getBaseRefCntMember(QualType T); + +inline const FieldDecl *getBaseRefCntMember(const CXXRecordDecl *D) { + const FieldDecl *RefCntMember = getClassRefCntMember(D); + if (RefCntMember && isClassRefCounted(D)) { + return RefCntMember; + } + + for (CXXRecordDecl::base_class_const_iterator Base = D->bases_begin(), + E = D->bases_end(); + Base != E; ++Base) { + RefCntMember = getBaseRefCntMember(Base->getType()); + if (RefCntMember) { + return RefCntMember; + } + } + return 0; +} + +inline const FieldDecl *getBaseRefCntMember(QualType T) { + while (const clang::ArrayType *ArrTy = T->getAsArrayTypeUnsafe()) + T = ArrTy->getElementType(); + CXXRecordDecl *Clazz = T->getAsCXXRecordDecl(); + return Clazz ? getBaseRefCntMember(Clazz) : 0; +} + +inline bool isPlacementNew(const CXXNewExpr *Expression) { + // Regular new expressions aren't placement new + if (Expression->getNumPlacementArgs() == 0) + return false; + const FunctionDecl *Declaration = Expression->getOperatorNew(); + if (Declaration && hasCustomAttribute(Declaration)) { + return false; + } + return true; +} + +extern DenseMap InThirdPartyPathCache; + +inline bool inThirdPartyPath(SourceLocation Loc, const SourceManager &SM) { + StringRef OriginalFileName = getFilename(SM, Loc); + auto pair = InThirdPartyPathCache.find(OriginalFileName); + if (pair != InThirdPartyPathCache.end()) { + return pair->second; + } + + SmallString<1024> FileName = OriginalFileName; + llvm::sys::fs::make_absolute(FileName); + + for (uint32_t i = 0; i < MOZ_THIRD_PARTY_PATHS_COUNT; ++i) { + auto PathB = sys::path::begin(FileName); + auto PathE = sys::path::end(FileName); + + auto ThirdPartyB = sys::path::begin(MOZ_THIRD_PARTY_PATHS[i]); + auto ThirdPartyE = sys::path::end(MOZ_THIRD_PARTY_PATHS[i]); + + for (; PathB != PathE; ++PathB) { + // Perform an inner loop to compare path segments, checking if the current + // segment is the start of the current third party path. + auto IPathB = PathB; + auto IThirdPartyB = ThirdPartyB; + for (; IPathB != PathE && IThirdPartyB != ThirdPartyE; + ++IPathB, ++IThirdPartyB) { + if (IPathB->compare_lower(*IThirdPartyB) != 0) { + break; + } + } + + // We found a match! + if (IThirdPartyB == ThirdPartyE) { + InThirdPartyPathCache.insert(std::make_pair(OriginalFileName, true)); + return true; + } + } + } + + InThirdPartyPathCache.insert(std::make_pair(OriginalFileName, false)); + return false; +} + +inline bool inThirdPartyPath(const Decl *D, ASTContext *context) { + D = D->getCanonicalDecl(); + SourceLocation Loc = D->getLocation(); + const SourceManager &SM = context->getSourceManager(); + + return inThirdPartyPath(Loc, SM); +} + +inline CXXRecordDecl *getNonTemplateSpecializedCXXRecordDecl(QualType Q) { + auto *D = Q->getAsCXXRecordDecl(); + + if (!D) { + auto TemplateQ = Q->getAs(); + if (!TemplateQ) { + return nullptr; + } + + auto TemplateDecl = TemplateQ->getTemplateName().getAsTemplateDecl(); + if (!TemplateDecl) { + return nullptr; + } + + D = dyn_cast_or_null(TemplateDecl->getTemplatedDecl()); + if (!D) { + return nullptr; + } + } + + return D; +} + +inline bool inThirdPartyPath(const Decl *D) { + return inThirdPartyPath(D, &D->getASTContext()); +} + +inline bool inThirdPartyPath(const Stmt *S, ASTContext *context) { + SourceLocation Loc = S->getBeginLoc(); + const SourceManager &SM = context->getSourceManager(); + auto ExpansionLoc = SM.getExpansionLoc(Loc); + if (ExpansionLoc.isInvalid()) { + return inThirdPartyPath(Loc, SM); + } + return inThirdPartyPath(ExpansionLoc, SM); +} + +/// Polyfill for CXXOperatorCallExpr::isInfixBinaryOp() +inline bool isInfixBinaryOp(const CXXOperatorCallExpr *OpCall) { +#if CLANG_VERSION_FULL >= 400 + return OpCall->isInfixBinaryOp(); +#else + // Taken from clang source. + if (OpCall->getNumArgs() != 2) + return false; + + switch (OpCall->getOperator()) { + case OO_Call: + case OO_Subscript: + return false; + default: + return true; + } +#endif +} + +#endif diff --git a/build/clang-plugin/VariableUsageHelpers.cpp b/build/clang-plugin/VariableUsageHelpers.cpp new file mode 100644 index 0000000000..75479f1f0b --- /dev/null +++ b/build/clang-plugin/VariableUsageHelpers.cpp @@ -0,0 +1,275 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "VariableUsageHelpers.h" +#include "Utils.h" + +std::vector getUsageAsRvalue(const ValueDecl *ValueDeclaration, + const FunctionDecl *FuncDecl) { + std::vector UsageStatements; + + // We check the function declaration has a body. + auto Body = FuncDecl->getBody(); + if (!Body) { + return std::vector(); + } + + // We build a Control Flow Graph (CFG) fron the body of the function + // declaration. + std::unique_ptr StatementCFG = CFG::buildCFG( + FuncDecl, Body, &FuncDecl->getASTContext(), CFG::BuildOptions()); + + // We iterate through all the CFGBlocks, which basically means that we go over + // all the possible branches of the code and therefore cover all statements. + for (auto &Block : *StatementCFG) { + // We iterate through all the statements of the block. + for (auto &BlockItem : *Block) { + Optional CFGStatement = BlockItem.getAs(); + if (!CFGStatement) { + continue; + } + + // FIXME: Right now this function/if chain is very basic and only covers + // the cases we need for escapesFunction() + if (auto BinOp = dyn_cast(CFGStatement->getStmt())) { + // We only care about assignments. + if (BinOp->getOpcode() != BO_Assign) { + continue; + } + + // We want our declaration to be used on the right hand side of the + // assignment. + auto DeclRef = dyn_cast(IgnoreTrivials(BinOp->getRHS())); + if (!DeclRef) { + continue; + } + + if (DeclRef->getDecl() != ValueDeclaration) { + continue; + } + } else if (auto Return = dyn_cast(CFGStatement->getStmt())) { + // We want our declaration to be used as the expression of the return + // statement. + auto DeclRef = dyn_cast_or_null( + IgnoreTrivials(Return->getRetValue())); + if (!DeclRef) { + continue; + } + + if (DeclRef->getDecl() != ValueDeclaration) { + continue; + } + } else { + continue; + } + + // We didn't early-continue, so we add the statement to the list. + UsageStatements.push_back(CFGStatement->getStmt()); + } + } + + return UsageStatements; +} + +// We declare our EscapesFunctionError enum to be an error code enum. +namespace std { +template <> struct is_error_code_enum : true_type {}; +} // namespace std + +// We define the EscapesFunctionErrorCategory which contains the error messages +// corresponding to each enum variant. +namespace { +struct EscapesFunctionErrorCategory : std::error_category { + const char *name() const noexcept override; + std::string message(int ev) const override; +}; + +const char *EscapesFunctionErrorCategory::name() const noexcept { + return "escapes function"; +} + +std::string EscapesFunctionErrorCategory::message(int ev) const { + switch (static_cast(ev)) { + case EscapesFunctionError::ConstructorDeclNotFound: + return "constructor declaration not found"; + + case EscapesFunctionError::FunctionDeclNotFound: + return "function declaration not found"; + + case EscapesFunctionError::FunctionIsBuiltin: + return "function is builtin"; + + case EscapesFunctionError::FunctionIsVariadic: + return "function is variadic"; + + case EscapesFunctionError::ExprNotInCall: + return "expression is not in call"; + + case EscapesFunctionError::NoParamForArg: + return "no parameter for argument"; + + case EscapesFunctionError::ArgAndParamNotPointers: + return "argument and parameter are not pointers"; + } +} + +const EscapesFunctionErrorCategory TheEscapesFunctionErrorCategory{}; +} // namespace + +std::error_code make_error_code(EscapesFunctionError e) { + return {static_cast(e), TheEscapesFunctionErrorCategory}; +} + +ErrorOr> +escapesFunction(const Expr *Arg, const CXXConstructExpr *Construct) { + // We get the function declaration corresponding to the call. + auto CtorDecl = Construct->getConstructor(); + if (!CtorDecl) { + return EscapesFunctionError::ConstructorDeclNotFound; + } + + return escapesFunction(Arg, CtorDecl, Construct->getArgs(), + Construct->getNumArgs()); +} + +ErrorOr> +escapesFunction(const Expr *Arg, const CallExpr *Call) { + // We get the function declaration corresponding to the call. + auto FuncDecl = Call->getDirectCallee(); + if (!FuncDecl) { + return EscapesFunctionError::FunctionDeclNotFound; + } + + return escapesFunction(Arg, FuncDecl, Call->getArgs(), Call->getNumArgs()); +} + +ErrorOr> +escapesFunction(const Expr *Arg, const CXXOperatorCallExpr *OpCall) { + // We get the function declaration corresponding to the operator call. + auto FuncDecl = OpCall->getDirectCallee(); + if (!FuncDecl) { + return EscapesFunctionError::FunctionDeclNotFound; + } + + auto Args = OpCall->getArgs(); + auto NumArgs = OpCall->getNumArgs(); + // If this is an infix binary operator defined as a one-param method, we + // remove the first argument as it is inserted explicitly and creates a + // mismatch with the parameters of the method declaration. + if (isInfixBinaryOp(OpCall) && FuncDecl->getNumParams() == 1) { + Args++; + NumArgs--; + } + + return escapesFunction(Arg, FuncDecl, Args, NumArgs); +} + +ErrorOr> +escapesFunction(const Expr *Arg, const FunctionDecl *FuncDecl, + const Expr *const *Arguments, unsigned NumArgs) { + if (!NumArgs) { + return std::make_tuple((const Stmt *)nullptr, (const Decl *)nullptr); + } + + if (FuncDecl->getBuiltinID() != 0 || + ASTIsInSystemHeader(FuncDecl->getASTContext(), *FuncDecl)) { + return EscapesFunctionError::FunctionIsBuiltin; + } + + // FIXME: should probably be handled at some point, but it's too annoying + // for now. + if (FuncDecl->isVariadic()) { + return EscapesFunctionError::FunctionIsVariadic; + } + + // We find the argument number corresponding to the Arg expression. + unsigned ArgNum = 0; + for (unsigned i = 0; i < NumArgs; i++) { + if (IgnoreTrivials(Arg) == IgnoreTrivials(Arguments[i])) { + break; + } + ++ArgNum; + } + // If we don't find it, we early-return NoneType. + if (ArgNum >= NumArgs) { + return EscapesFunctionError::ExprNotInCall; + } + + // Now we get the associated parameter. + if (ArgNum >= FuncDecl->getNumParams()) { + return EscapesFunctionError::NoParamForArg; + } + auto Param = FuncDecl->getParamDecl(ArgNum); + + // We want both the argument and the parameter to be of pointer type. + // FIXME: this is enough for the DanglingOnTemporaryChecker, because the + // analysed methods only return pointers, but more cases should probably be + // handled when we want to use this function more broadly. + if ((!Arg->getType().getNonReferenceType()->isPointerType() && + Arg->getType().getNonReferenceType()->isBuiltinType()) || + (!Param->getType().getNonReferenceType()->isPointerType() && + Param->getType().getNonReferenceType()->isBuiltinType())) { + return EscapesFunctionError::ArgAndParamNotPointers; + } + + // We retrieve the usages of the parameter in the function. + auto Usages = getUsageAsRvalue(Param, FuncDecl); + + // For each usage, we check if it doesn't allow the parameter to escape the + // function scope. + for (auto Usage : Usages) { + // In the case of an assignment. + if (auto BinOp = dyn_cast(Usage)) { + // We retrieve the declaration the parameter is assigned to. + auto DeclRef = dyn_cast(BinOp->getLHS()); + if (!DeclRef) { + continue; + } + + if (auto ParamDeclaration = dyn_cast(DeclRef->getDecl())) { + // This is the case where the parameter escapes through another + // parameter. + + // FIXME: for now we only care about references because we only detect + // trivial LHS with just a DeclRefExpr, and not more complex cases like: + // void func(Type* param1, Type** param2) { + // *param2 = param1; + // } + // This should be fixed when we have better/more helper functions to + // help deal with this kind of lvalue expressions. + if (!ParamDeclaration->getType()->isReferenceType()) { + continue; + } + + return std::make_tuple(Usage, (const Decl *)ParamDeclaration); + } else if (auto VarDeclaration = dyn_cast(DeclRef->getDecl())) { + // This is the case where the parameter escapes through a global/static + // variable. + if (!VarDeclaration->hasGlobalStorage()) { + continue; + } + + return std::make_tuple(Usage, (const Decl *)VarDeclaration); + } else if (auto FieldDeclaration = + dyn_cast(DeclRef->getDecl())) { + // This is the case where the parameter escapes through a field. + + return std::make_tuple(Usage, (const Decl *)FieldDeclaration); + } + } else if (isa(Usage)) { + // This is the case where the parameter escapes through the return value + // of the function. + if (!FuncDecl->getReturnType()->isPointerType() && + !FuncDecl->getReturnType()->isReferenceType()) { + continue; + } + + return std::make_tuple(Usage, (const Decl *)FuncDecl); + } + } + + // No early-return, this means that we haven't found any case of funciton + // escaping and that therefore the parameter remains in the function scope. + return std::make_tuple((const Stmt *)nullptr, (const Decl *)nullptr); +} diff --git a/build/clang-plugin/VariableUsageHelpers.h b/build/clang-plugin/VariableUsageHelpers.h new file mode 100644 index 0000000000..d498857eea --- /dev/null +++ b/build/clang-plugin/VariableUsageHelpers.h @@ -0,0 +1,63 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef VariableUsageHelpers_h__ +#define VariableUsageHelpers_h__ + +#include "plugin.h" + +/// Returns a list of the statements where the given declaration is used as an +/// rvalue (within the provided function). +/// +/// WARNING: incomplete behaviour/implementation for general-purpose use outside +/// of escapesFunction(). This only detects very basic usages (see +/// implementation for more details). +std::vector getUsageAsRvalue(const ValueDecl *ValueDeclaration, + const FunctionDecl *FuncDecl); + +/// This is the error enumeration for escapesFunction(), describing all the +/// possible error cases. +enum class EscapesFunctionError { + ConstructorDeclNotFound = 1, + FunctionDeclNotFound, + FunctionIsBuiltin, + FunctionIsVariadic, + ExprNotInCall, + NoParamForArg, + ArgAndParamNotPointers +}; + +/// Required by the std::error_code system to convert our enum into a general +/// error code. +std::error_code make_error_code(EscapesFunctionError); + +/// Returns a (statement, decl) tuple if an argument from an argument list +/// escapes the function scope through globals/statics/other things. The +/// statement is where the value escapes the function, while the declaration +/// points to what it escapes through. If the argument doesn't escape the +/// function, the tuple will only contain nullptrs. +/// If the analysis runs into an unexpected error or into an unimplemented +/// configuration, it will return an error_code of type EscapesFunctionError +/// representing the precise issue. +/// +/// WARNING: incomplete behaviour/implementation for general-purpose use outside +/// of DanglingOnTemporaryChecker. This only covers a limited set of cases, +/// mainly in terms of arguments and parameter types. +ErrorOr> +escapesFunction(const Expr *Arg, const FunctionDecl *FuncDecl, + const Expr *const *Arguments, unsigned NumArgs); + +/// Helper function taking a call expression. +ErrorOr> +escapesFunction(const Expr *Arg, const CallExpr *Call); + +/// Helper function taking a construct expression. +ErrorOr> +escapesFunction(const Expr *Arg, const CXXConstructExpr *Construct); + +/// Helper function taking an operator call expression. +ErrorOr> +escapesFunction(const Expr *Arg, const CXXOperatorCallExpr *OpCall); + +#endif diff --git a/build/clang-plugin/alpha/AlphaChecks.inc b/build/clang-plugin/alpha/AlphaChecks.inc new file mode 100644 index 0000000000..8b9f819e81 --- /dev/null +++ b/build/clang-plugin/alpha/AlphaChecks.inc @@ -0,0 +1,9 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +// The list of checker classes that are compatible with clang-tidy and are considered +// to be in alpha stage development. + +// CHECK(AlphaChecker, "alpha-checker") +CHECK(TempRefPtrChecker, "performance-temp-refptr") diff --git a/build/clang-plugin/alpha/AlphaIncludes.inc b/build/clang-plugin/alpha/AlphaIncludes.inc new file mode 100644 index 0000000000..4e158f59c3 --- /dev/null +++ b/build/clang-plugin/alpha/AlphaIncludes.inc @@ -0,0 +1 @@ +#include "TempRefPtrChecker.h" diff --git a/build/clang-plugin/alpha/TempRefPtrChecker.cpp b/build/clang-plugin/alpha/TempRefPtrChecker.cpp new file mode 100644 index 0000000000..0a4d078368 --- /dev/null +++ b/build/clang-plugin/alpha/TempRefPtrChecker.cpp @@ -0,0 +1,57 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "TempRefPtrChecker.h" +#include "CustomMatchers.h" + +constexpr const char *kCallExpr = "call-expr"; +constexpr const char *kOperatorCallExpr = "operator-call"; + +void TempRefPtrChecker::registerMatchers(MatchFinder *AstMatcher) { + AstMatcher->addMatcher( + cxxOperatorCallExpr( + hasOverloadedOperatorName("->"), + hasAnyArgument(implicitCastExpr( + hasSourceExpression(materializeTemporaryExpr(anyOf( + hasDescendant(callExpr().bind(kCallExpr)), anything()))))), + callee(hasDeclContext(classTemplateSpecializationDecl( + isSmartPtrToRefCountedDecl(), + // ignore any calls on temporary RefPtr>, + // since these typically need to be locally ref-counted, + // e.g. in Then chains where the promise might be resolved + // concurrently + unless(hasTemplateArgument( + 0, refersToType(hasDeclaration( + cxxRecordDecl(hasName("mozilla::MozPromise")))))))))) + .bind(kOperatorCallExpr), + this); +} + +void TempRefPtrChecker::check(const MatchFinder::MatchResult &Result) { + const auto *OCE = + Result.Nodes.getNodeAs(kOperatorCallExpr); + + const auto *refPtrDecl = + dyn_cast(OCE->getCalleeDecl()->getDeclContext()); + + diag(OCE->getOperatorLoc(), + "performance issue: temporary %0 is only dereferenced here once which " + "involves short-lived AddRef/Release calls") + << refPtrDecl; + + const auto *InnerCE = Result.Nodes.getNodeAs(kCallExpr); + if (InnerCE) { + const auto functionName = + InnerCE->getCalleeDecl()->getAsFunction()->getQualifiedNameAsString(); + + if (functionName != "mozilla::MakeRefPtr") { + diag( + OCE->getOperatorLoc(), + "consider changing function %0 to return a raw reference instead (be " + "sure that the pointee is held alive by someone else though!)", + DiagnosticIDs::Note) + << functionName; + } + } +} diff --git a/build/clang-plugin/alpha/TempRefPtrChecker.h b/build/clang-plugin/alpha/TempRefPtrChecker.h new file mode 100644 index 0000000000..ebed50c3a0 --- /dev/null +++ b/build/clang-plugin/alpha/TempRefPtrChecker.h @@ -0,0 +1,21 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef TempRefPtrChecker_h__ +#define TempRefPtrChecker_h__ + +#include "plugin.h" + +class TempRefPtrChecker final : public BaseCheck { +public: + TempRefPtrChecker(StringRef CheckName, ContextType *Context = nullptr) + : BaseCheck(CheckName, Context) {} + void registerMatchers(MatchFinder *AstMatcher) override; + void check(const MatchFinder::MatchResult &Result) override; + +private: + CompilerInstance *CI; +}; + +#endif diff --git a/build/clang-plugin/alpha/sources.mozbuild b/build/clang-plugin/alpha/sources.mozbuild new file mode 100644 index 0000000000..738b25a581 --- /dev/null +++ b/build/clang-plugin/alpha/sources.mozbuild @@ -0,0 +1,10 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +HOST_SOURCES += [ + # 'AlphaChecker.cpp', + 'TempRefPtrChecker.cpp', +] \ No newline at end of file diff --git a/build/clang-plugin/alpha/tests/TestTempRefPtr.cpp b/build/clang-plugin/alpha/tests/TestTempRefPtr.cpp new file mode 100644 index 0000000000..51f756b8e6 --- /dev/null +++ b/build/clang-plugin/alpha/tests/TestTempRefPtr.cpp @@ -0,0 +1,52 @@ +#include + +using namespace mozilla; + +struct RefCountedBase { + void AddRef(); + void Release(); + + void method_test(); +}; + +struct RefCountedBaseHolder { + RefPtr GetRefCountedBase() const { + return mRefCountedBase; + } + +private: + RefPtr mRefCountedBase = MakeRefPtr(); +}; + + +void test_arrow_temporary_new_refptr_function_style_cast() { + RefPtr(new RefCountedBase())->method_test(); // expected-warning {{performance issue: temporary 'RefPtr' is only dereferenced here once which involves short-lived AddRef/Release calls}} +} + +void test_arrow_temporary_new_refptr_brace() { + RefPtr{new RefCountedBase()}->method_test(); // expected-warning {{performance issue: temporary 'RefPtr' is only dereferenced here once which involves short-lived AddRef/Release calls}} +} + +void test_arrow_temporary_new_c_style_cast() { + ((RefPtr)(new RefCountedBase()))->method_test(); // expected-warning {{performance issue: temporary 'RefPtr' is only dereferenced here once which involves short-lived AddRef/Release calls}} +} + +void test_arrow_temporary_new_static_cast() { + static_cast>(new RefCountedBase())->method_test(); // expected-warning {{performance issue: temporary 'RefPtr' is only dereferenced here once which involves short-lived AddRef/Release calls}} +} + +void test_arrow_temporary_new_refptr_makerefptr() { + MakeRefPtr()->method_test(); // expected-warning {{performance issue: temporary 'RefPtr' is only dereferenced here once which involves short-lived AddRef/Release calls}} +} + +void test_arrow_temporary_get_refptr_from_member_function() { + const RefCountedBaseHolder holder; + holder.GetRefCountedBase()->method_test(); // expected-warning {{performance issue: temporary 'RefPtr' is only dereferenced here once which involves short-lived AddRef/Release calls}} expected-note {{consider changing function RefCountedBaseHolder::GetRefCountedBase to return a raw reference instead}} +} + +void test_ref(RefCountedBase &aRefCountedBase); + +void test_star_temporary_new_refptr_function_style_cast() { + // TODO: Should we warn about operator* as well? + test_ref(*RefPtr(new RefCountedBase())); +} diff --git a/build/clang-plugin/alpha/tests/sources.mozbuild b/build/clang-plugin/alpha/tests/sources.mozbuild new file mode 100644 index 0000000000..96c7341478 --- /dev/null +++ b/build/clang-plugin/alpha/tests/sources.mozbuild @@ -0,0 +1,10 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +SOURCES += [ + # 'AlphaTest.cpp', + 'TestTempRefPtr.cpp', +] \ No newline at end of file diff --git a/build/clang-plugin/external/CustomAttributes.inc b/build/clang-plugin/external/CustomAttributes.inc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/build/clang-plugin/external/ExternalChecks.inc b/build/clang-plugin/external/ExternalChecks.inc new file mode 100644 index 0000000000..d5f0b0334c --- /dev/null +++ b/build/clang-plugin/external/ExternalChecks.inc @@ -0,0 +1,8 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +// Placeholder file to be overwritten with external checks during build +// The list of checker classes that are compatible with clang-tidy-external. + +// CHECK(ExternalChecker, "external-checker") diff --git a/build/clang-plugin/external/ExternalIncludes.inc b/build/clang-plugin/external/ExternalIncludes.inc new file mode 100644 index 0000000000..9fda16de8a --- /dev/null +++ b/build/clang-plugin/external/ExternalIncludes.inc @@ -0,0 +1,9 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +// Placeholder file to be overwritten with external checks during build +// The list of #include directives necessary for the checker classes that +// are compatible with clang-tidy-external. + +// #include "ExternalChecker.h" diff --git a/build/clang-plugin/external/sources.mozbuild b/build/clang-plugin/external/sources.mozbuild new file mode 100644 index 0000000000..01daf87080 --- /dev/null +++ b/build/clang-plugin/external/sources.mozbuild @@ -0,0 +1,10 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Placeholder file to be overwritten with external checks during build +HOST_SOURCES += [ + # 'ExternalChecker.cpp', +] diff --git a/build/clang-plugin/external/tests/sources.mozbuild b/build/clang-plugin/external/tests/sources.mozbuild new file mode 100644 index 0000000000..1f3b4a61c9 --- /dev/null +++ b/build/clang-plugin/external/tests/sources.mozbuild @@ -0,0 +1,10 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Placeholder file to be overwritten with external checks during build +SOURCES += [ + # 'ExternalTest.cpp', +] diff --git a/build/clang-plugin/import_mozilla_checks.py b/build/clang-plugin/import_mozilla_checks.py new file mode 100755 index 0000000000..2c2c5a42b9 --- /dev/null +++ b/build/clang-plugin/import_mozilla_checks.py @@ -0,0 +1,171 @@ +#!/usr/bin/python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import glob +import shutil +import errno + +import ThirdPartyPaths +import ThreadAllows + + +def copy_dir_contents(src, dest): + for f in glob.glob("%s/*" % src): + try: + destname = "%s/%s" % (dest, os.path.basename(f)) + if os.path.isdir(f): + shutil.copytree(f, destname) + else: + shutil.copy2(f, destname) + except OSError as e: + if e.errno == errno.ENOTDIR: + shutil.copy2(f, destname) + elif e.errno == errno.EEXIST: + if os.path.isdir(f): + copy_dir_contents(f, destname) + else: + os.remove(destname) + shutil.copy2(f, destname) + else: + raise Exception("Directory not copied. Error: %s" % e) + + +def write_cmake(module_path, import_options): + names = [" " + os.path.basename(f) for f in glob.glob("%s/*.cpp" % module_path)] + + if import_options["external"]: + names += [ + " " + os.path.join("external", os.path.basename(f)) + for f in glob.glob("%s/external/*.cpp" % (module_path)) + ] + + if import_options["alpha"]: + names += [ + " " + os.path.join("alpha", os.path.basename(f)) + for f in glob.glob("%s/alpha/*.cpp" % (module_path)) + ] + + with open(os.path.join(module_path, "CMakeLists.txt"), "w") as f: + f.write( + """set(LLVM_LINK_COMPONENTS support) + +add_definitions( -DCLANG_TIDY ) + +add_clang_library(clangTidyMozillaModule + ThirdPartyPaths.cpp +%(names)s + + LINK_LIBS + clangAST + clangASTMatchers + clangBasic + clangLex + clangTidy + clangTidyReadabilityModule + clangTidyUtils + clangTidyMPIModule + )""" + % {"names": "\n".join(names)} + ) + + +def add_moz_module(cmake_path): + with open(cmake_path, "r") as f: + lines = f.readlines() + f.close() + + try: + idx = lines.index("set(ALL_CLANG_TIDY_CHECKS\n") + lines.insert(idx + 1, " clangTidyMozillaModule\n") + + with open(cmake_path, "w") as f: + for line in lines: + f.write(line) + except ValueError: + raise Exception("Unable to find ALL_CLANG_TIDY_CHECKS in {}".format(cmake_path)) + + +def write_third_party_paths(mozilla_path, module_path): + tpp_txt = os.path.join(mozilla_path, "../../tools/rewriting/ThirdPartyPaths.txt") + generated_txt = os.path.join(mozilla_path, "../../tools/rewriting/Generated.txt") + with open(os.path.join(module_path, "ThirdPartyPaths.cpp"), "w") as f: + ThirdPartyPaths.generate(f, tpp_txt, generated_txt) + + +def generate_thread_allows(mozilla_path, module_path): + names = os.path.join(mozilla_path, "../../build/clang-plugin/ThreadAllows.txt") + files = os.path.join(mozilla_path, "../../build/clang-plugin/ThreadFileAllows.txt") + with open(os.path.join(module_path, "ThreadAllows.h"), "w") as f: + f.write(ThreadAllows.generate_allows({files, names})) + + +def do_import(mozilla_path, clang_tidy_path, import_options): + module = "mozilla" + module_path = os.path.join(clang_tidy_path, module) + try: + os.makedirs(module_path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + copy_dir_contents(mozilla_path, module_path) + write_third_party_paths(mozilla_path, module_path) + generate_thread_allows(mozilla_path, module_path) + write_cmake(module_path, import_options) + add_moz_module(os.path.join(module_path, "..", "CMakeLists.txt")) + with open(os.path.join(module_path, "..", "CMakeLists.txt"), "a") as f: + f.write("add_subdirectory(%s)\n" % module) + # A better place for this would be in `ClangTidyForceLinker.h` but `ClangTidyMain.cpp` + # is also OK. + with open(os.path.join(module_path, "..", "tool", "ClangTidyMain.cpp"), "a") as f: + f.write( + """ +// This anchor is used to force the linker to link the MozillaModule. +extern volatile int MozillaModuleAnchorSource; +static int LLVM_ATTRIBUTE_UNUSED MozillaModuleAnchorDestination = + MozillaModuleAnchorSource; +""" + ) + + +def main(): + import argparse + + parser = argparse.ArgumentParser( + usage="import_mozilla_checks.py [option]", + description="Imports the Mozilla static analysis checks into a clang-tidy source tree.", + ) + parser.add_argument( + "mozilla_path", help="Full path to mozilla-central/build/clang-plugin" + ) + parser.add_argument( + "clang_tidy_path", help="Full path to llvm-project/clang-tools-extra/clang-tidy" + ) + parser.add_argument( + "--import-alpha", + help="Enable import of in-tree alpha checks", + action="store_true", + ) + parser.add_argument( + "--import-external", + help="Enable import of in-tree external checks", + action="store_true", + ) + args = parser.parse_args() + + if not os.path.isdir(args.mozilla_path): + print("Invalid path to mozilla clang plugin") + + if not os.path.isdir(args.clang_tidy_path): + print("Invalid path to clang-tidy source directory") + + import_options = {"alpha": args.import_alpha, "external": args.import_external} + + do_import(args.mozilla_path, args.clang_tidy_path, import_options) + + +if __name__ == "__main__": + main() diff --git a/build/clang-plugin/moz.build b/build/clang-plugin/moz.build new file mode 100644 index 0000000000..47d780054c --- /dev/null +++ b/build/clang-plugin/moz.build @@ -0,0 +1,122 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +HostSharedLibrary("clang-plugin") + +HOST_SOURCES += ["!ThirdPartyPaths.cpp"] + +HOST_SOURCES += [ + "ArithmeticArgChecker.cpp", + "AssertAssignmentChecker.cpp", + "CanRunScriptChecker.cpp", + "CustomAttributes.cpp", + "CustomTypeAnnotation.cpp", + "DanglingOnTemporaryChecker.cpp", + "DiagnosticsMatcher.cpp", + "ExplicitImplicitChecker.cpp", + "ExplicitOperatorBoolChecker.cpp", + "KungFuDeathGripChecker.cpp", + "MozCheckAction.cpp", + "MustOverrideChecker.cpp", + "MustReturnFromCallerChecker.cpp", + "MustUseChecker.cpp", + "NaNExprChecker.cpp", + "NeedsNoVTableTypeChecker.cpp", + "NoAddRefReleaseOnReturnChecker.cpp", + "NoAutoTypeChecker.cpp", + "NoDuplicateRefCntMemberChecker.cpp", + "NoExplicitMoveConstructorChecker.cpp", + "NoNewThreadsChecker.cpp", + "NonMemMovableMemberChecker.cpp", + "NonMemMovableTemplateArgChecker.cpp", + "NonParamInsideFunctionDeclChecker.cpp", + "NonTrivialTypeInFfiChecker.cpp", + "NoPrincipalGetURI.cpp", + "NoUsingNamespaceMozillaJavaChecker.cpp", + "OverrideBaseCallChecker.cpp", + "OverrideBaseCallUsageChecker.cpp", + "ParamTraitsEnumChecker.cpp", + "RefCountedCopyConstructorChecker.cpp", + "RefCountedInsideLambdaChecker.cpp", + "ScopeChecker.cpp", + "SprintfLiteralChecker.cpp", + "TemporaryLifetimeBoundChecker.cpp", + "TrivialCtorDtorChecker.cpp", + "TrivialDtorChecker.cpp", + "VariableUsageHelpers.cpp", +] + +# Ideally, we wouldn't have compile-time choices wrt checkes. bug 1617153. +if CONFIG["OS_ARCH"] == "WINNT": + HOST_DEFINES["TARGET_IS_WINDOWS"] = True + HOST_SOURCES += [ + "FopenUsageChecker.cpp", + "LoadLibraryUsageChecker.cpp", + ] + +if CONFIG["ENABLE_MOZSEARCH_PLUGIN"]: + HOST_SOURCES += [ + "mozsearch-plugin/FileOperations.cpp", + "mozsearch-plugin/JSONFormatter.cpp", + "mozsearch-plugin/MozsearchIndexer.cpp", + "mozsearch-plugin/StringOperations.cpp", + ] + +GeneratedFile( + "ThirdPartyPaths.cpp", + script="ThirdPartyPaths.py", + entry_point="generate", + inputs=[ + "/tools/rewriting/ThirdPartyPaths.txt", + "/tools/rewriting/Generated.txt", + ], +) + +GeneratedFile( + "ThreadAllows.h", + script="ThreadAllows.py", + entry_point="generate_file", + inputs=[ + "/build/clang-plugin/ThreadAllows.txt", + "/build/clang-plugin/ThreadFileAllows.txt", + ], +) + +HOST_COMPILE_FLAGS["STL"] = [] +HOST_COMPILE_FLAGS["VISIBILITY"] = [] + +# libc++ is required to build plugins against clang on OS X. +if CONFIG["HOST_OS_ARCH"] == "Darwin": + HOST_CXXFLAGS += ["-stdlib=libc++"] + +# As of clang 8, llvm-config doesn't output the flags used to build clang +# itself, so we don't end up with -fPIC as a side effect. llvm.org/PR8220 +if CONFIG["HOST_OS_ARCH"] != "WINNT": + HOST_CXXFLAGS += ["-fPIC"] + +DIRS += [ + "tests", +] + +include("external/sources.mozbuild") + +if CONFIG["ENABLE_CLANG_PLUGIN_ALPHA"]: + HOST_DEFINES["MOZ_CLANG_PLUGIN_ALPHA"] = "1" + include("alpha/sources.mozbuild") + +# In the current moz.build world, we need to override essentially every +# variable to limit ourselves to what we need to build the clang plugin. +if CONFIG["HOST_OS_ARCH"] == "WINNT": + extra_cxxflags = ["-GR-", "-EHsc"] +else: + extra_cxxflags = ["-fno-rtti", "-fno-exceptions"] + +if CONFIG["LLVM_CXXFLAGS"]: + HOST_COMPILE_FLAGS["HOST_CXXFLAGS"] = CONFIG["LLVM_CXXFLAGS"] + extra_cxxflags + +# Avoid -DDEBUG=1 on the command line, which conflicts with a #define +# DEBUG(...) in llvm headers. +DEFINES["DEBUG"] = False diff --git a/build/clang-plugin/mozsearch-plugin/FileOperations.cpp b/build/clang-plugin/mozsearch-plugin/FileOperations.cpp new file mode 100644 index 0000000000..2468672b9f --- /dev/null +++ b/build/clang-plugin/mozsearch-plugin/FileOperations.cpp @@ -0,0 +1,150 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "FileOperations.h" + +#include +#include + +#if defined(_WIN32) || defined(_WIN64) +#include +#include +#include +#include "StringOperations.h" +#else +#include +#include +#include +#endif + +#include +#include +#include + +// Make sure that all directories on path exist, excluding the final element of +// the path. +void ensurePath(std::string Path) { + size_t Pos = 0; + if (Path[0] == PATHSEP_CHAR) { + Pos++; + } + + while ((Pos = Path.find(PATHSEP_CHAR, Pos)) != std::string::npos) { + std::string Portion = Path.substr(0, Pos); + if (!Portion.empty()) { +#if defined(_WIN32) || defined(_WIN64) + int Err = _mkdir(Portion.c_str()); +#else + int Err = mkdir(Portion.c_str(), 0775); +#endif + if (Err == -1 && errno != EEXIST) { + perror("mkdir failed"); + exit(1); + } + } + + Pos++; + } +} + +#if defined(_WIN32) || defined(_WIN64) +AutoLockFile::AutoLockFile(const std::string &SrcFile, const std::string &DstFile) { + this->Filename = DstFile; + std::string Hash = hash(SrcFile); + std::string MutexName = std::string("Local\\searchfox-") + Hash; + std::wstring WideMutexName; + WideMutexName.assign(MutexName.begin(), MutexName.end()); + Handle = CreateMutex(nullptr, false, WideMutexName.c_str()); + if (Handle == NULL) { + return; + } + + if (WaitForSingleObject(Handle, INFINITE) != WAIT_OBJECT_0) { + return; + } +} + +AutoLockFile::~AutoLockFile() { + ReleaseMutex(Handle); + CloseHandle(Handle); +} + +bool AutoLockFile::success() { + return Handle != NULL; +} + +FILE *AutoLockFile::openFile() { + int DstDescriptor = _open(Filename.c_str(), _O_RDONLY | _O_CREAT | _O_BINARY, 0666); + return _fdopen(DstDescriptor, "rb"); +} + +FILE *AutoLockFile::openTmp() { + int TmpDescriptor = _open((Filename + ".tmp").c_str(), _O_WRONLY | _O_APPEND | _O_CREAT | _O_BINARY, 0666); + return _fdopen(TmpDescriptor, "ab"); +} + +bool AutoLockFile::moveTmp() { + if (_unlink(Filename.c_str()) == -1) { + if (errno != ENOENT) { + return false; + } + } + return rename((Filename + ".tmp").c_str(), Filename.c_str()) == 0; +} + +std::string getAbsolutePath(const std::string &Filename) { + char Full[_MAX_PATH]; + if (!_fullpath(Full, Filename.c_str(), _MAX_PATH)) { + return std::string(""); + } + return std::string(Full); +} +#else +AutoLockFile::AutoLockFile(const std::string &SrcFile, const std::string &DstFile) { + this->Filename = DstFile; + FileDescriptor = open(SrcFile.c_str(), O_RDONLY); + if (FileDescriptor == -1) { + return; + } + + do { + int rv = flock(FileDescriptor, LOCK_EX); + if (rv == 0) { + break; + } + } while (true); +} + +AutoLockFile::~AutoLockFile() { close(FileDescriptor); } + +bool AutoLockFile::success() { return FileDescriptor != -1; } + +FILE *AutoLockFile::openFile() { + int DstDescriptor = open(Filename.c_str(), O_RDONLY | O_CREAT, 0666); + return fdopen(DstDescriptor, "rb"); +} + +FILE* AutoLockFile::openTmp() { + int TmpDescriptor = open((Filename + ".tmp").c_str(), O_WRONLY | O_APPEND | O_CREAT, 0666); + return fdopen(TmpDescriptor, "ab"); +} + +bool AutoLockFile::moveTmp() { + if (unlink(Filename.c_str()) == -1) { + if (errno != ENOENT) { + return false; + } + } + return rename((Filename + ".tmp").c_str(), Filename.c_str()) == 0; +} + +std::string getAbsolutePath(const std::string &Filename) { + char Full[4096]; + if (!realpath(Filename.c_str(), Full)) { + return std::string(""); + } + return std::string(Full); +} +#endif diff --git a/build/clang-plugin/mozsearch-plugin/FileOperations.h b/build/clang-plugin/mozsearch-plugin/FileOperations.h new file mode 100644 index 0000000000..472c835b23 --- /dev/null +++ b/build/clang-plugin/mozsearch-plugin/FileOperations.h @@ -0,0 +1,68 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef FileOperations_h +#define FileOperations_h + +#include +#include + +#if defined(_WIN32) || defined(_WIN64) +#include +#define PATHSEP_CHAR '\\' +#define PATHSEP_STRING "\\" +#else +#define PATHSEP_CHAR '/' +#define PATHSEP_STRING "/" +#endif + +// Make sure that all directories on path exist, excluding the final element of +// the path. +void ensurePath(std::string Path); + +std::string getAbsolutePath(const std::string &Filename); + +// Used to synchronize access when writing to an analysis file, so that +// concurrently running clang instances don't clobber each other's data. +// On Windows, we use a named mutex. On POSIX platforms, we use flock on the +// source files. flock is advisory locking, and doesn't interfere with clang's +// own opening of the source files (i.e. to interfere, clang would have to be +// using flock itself, which it does not). +struct AutoLockFile { + // Absolute path to the analysis file + std::string Filename; + +#if defined(_WIN32) || defined(_WIN64) + // Handle for the named Mutex + HANDLE Handle = NULL; +#else + // fd for the *source* file that corresponds to the analysis file. We use + // the source file because it doesn't change while the analysis file gets + // repeatedly replaced by a new version written to a separate tmp file. + // This fd is used when using flock to synchronize access. + int FileDescriptor = -1; +#endif + + // SrcFile should be the absolute path to the source code file, and DstFile + // the absolute path to the corresponding analysis file. This constructor + // will block until exclusive access has been obtained. + AutoLockFile(const std::string &SrcFile, const std::string &DstFile); + ~AutoLockFile(); + + // Check after constructing to ensure the mutex was properly set up. + bool success(); + + // Open the existing analysis file for reading (an empty one is created if + // it doesn't already exist). Caller is responsible for fclose'ing it. + FILE *openFile(); + // Open a new tmp file for writing the new analysis data to. Caller is + // responsible for fclose'ing it. + FILE *openTmp(); + // Replace the existing analysis file with the new "tmp" one that has the new + // data. Returns false on error. + bool moveTmp(); +}; + +#endif diff --git a/build/clang-plugin/mozsearch-plugin/JSONFormatter.cpp b/build/clang-plugin/mozsearch-plugin/JSONFormatter.cpp new file mode 100644 index 0000000000..ec77fc9b2d --- /dev/null +++ b/build/clang-plugin/mozsearch-plugin/JSONFormatter.cpp @@ -0,0 +1,119 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "JSONFormatter.h" + +#include +#include +#include + +static std::string replaceAll(std::string Mangled, std::string Pattern, + std::string Replacement) { + size_t Pos = 0; + while ((Pos = Mangled.find(Pattern, Pos)) != std::string::npos) { + Mangled = Mangled.replace(Pos, Pattern.length(), Replacement); + Pos += Replacement.length(); + } + return Mangled; +} + +/** + * Hacky escaping logic with the goal of not upsetting the much more thorough + * rust JSON parsing library that actually understands UTF-8. Double-quote + * and (escaping) backslash are escaped, as is tab (\t), with newlines (\r\n + * and \n) normalized to escaped \n. + * + * Additionally, everything that's not printable ASCII is simply erased. The + * motivating file is media/openmax_il/il112/OMX_Other.h#93 which has a + * corrupted apostrophe as <92> in there. The better course of action would + * be a validating UTF-8 parse that discards corrupt/non-printable characters. + * Since this is motivated by a commenting proof-of-concept and builds are + * already slow, I'm punting on that. + */ +std::string JSONFormatter::escape(std::string Input) { + bool NeedsEscape = false; + for (char C : Input) { + if (C == '\\' || C == '"' || C < 32 || C > 126) { + NeedsEscape = true; + break; + } + } + + if (!NeedsEscape) { + return Input; + } + + std::string Cur = Input; + Cur = replaceAll(Cur, "\\", "\\\\"); + Cur = replaceAll(Cur, "\"", "\\\""); + Cur = replaceAll(Cur, "\t", "\\t"); + Cur = replaceAll(Cur, "\r\n", "\\n"); + Cur = replaceAll(Cur, "\n", "\\n"); + Cur.erase(std::remove_if(Cur.begin(), Cur.end(), + [](char C){ return C < 32 || C > 126; }), + Cur.end()); + return Cur; +} + +void JSONFormatter::add(const char *Name, const char *Value) { + assert(PropertyCount < kMaxProperties); + Properties[PropertyCount] = Property(Name, std::string(Value)); + PropertyCount++; + + // `"Name":"Value",` + Length += strlen(Name) + 3 + strlen(Value) + 2 + 1; +} + +void JSONFormatter::add(const char *Name, std::string Value) { + std::string Escaped = escape(std::move(Value)); + + // `"Name":"Escaped",` + Length += strlen(Name) + 3 + Escaped.length() + 2 + 1; + + assert(PropertyCount < kMaxProperties); + Properties[PropertyCount] = Property(Name, std::move(Escaped)); + PropertyCount++; +} + +void JSONFormatter::add(const char *Name, int Value) { + // 1 digit + assert(Value >= 0 && Value < 10); + + assert(PropertyCount < kMaxProperties); + Properties[PropertyCount] = Property(Name, Value); + PropertyCount++; + + // `"Name":V,` + Length += strlen(Name) + 3 + 2; +} + +void JSONFormatter::format(std::string &Result) { + Result.reserve(Length + 2); + + Result.push_back('{'); + for (int I = 0; I < PropertyCount; I++) { + Result.push_back('"'); + Result.append(Properties[I].Name); + Result.push_back('"'); + Result.push_back(':'); + + if (Properties[I].IsString) { + Result.push_back('"'); + Result.append(Properties[I].StringValue); + Result.push_back('"'); + } else { + Result.push_back(Properties[I].IntValue + '0'); + } + + if (I + 1 != PropertyCount) { + Result.push_back(','); + } + } + + Result.push_back('}'); + Result.push_back('\n'); + + assert(Result.length() == Length + 2); +} diff --git a/build/clang-plugin/mozsearch-plugin/JSONFormatter.h b/build/clang-plugin/mozsearch-plugin/JSONFormatter.h new file mode 100644 index 0000000000..7d4c7e292e --- /dev/null +++ b/build/clang-plugin/mozsearch-plugin/JSONFormatter.h @@ -0,0 +1,53 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef JSONFormatter_h +#define JSONFormatter_h + +#include +#include + +// A very basic JSON formatter that records key/value pairs and outputs a JSON +// object that contains only non-object data. +class JSONFormatter { + // Of these fields, only mEscapedStringValue is owned by this class. All the + // others are expected to outlive the class (which is typically allocated + // on-stack). + struct Property { + const char *Name; + std::string StringValue; + int IntValue; + bool IsString; + + Property() {} + + Property(const char* Name, std::string String) + : Name(Name), StringValue(std::move(String)), IsString(true) {} + + Property(const char* Name, int Int) + : Name(Name), IntValue(Int), IsString(false) {} + }; + + static const int kMaxProperties = 32; + + Property Properties[kMaxProperties]; + int PropertyCount; + + // Length of the generated JSON output. + size_t Length; + + std::string escape(std::string Input); + +public: + JSONFormatter() : PropertyCount(0), Length(0) {} + + void add(const char *Name, const char *Value); + void add(const char *Name, std::string Value); + void add(const char *Name, int Value); + + void format(std::string &Result); +}; + +#endif diff --git a/build/clang-plugin/mozsearch-plugin/MozsearchIndexer.cpp b/build/clang-plugin/mozsearch-plugin/MozsearchIndexer.cpp new file mode 100644 index 0000000000..9d2cda29d7 --- /dev/null +++ b/build/clang-plugin/mozsearch-plugin/MozsearchIndexer.cpp @@ -0,0 +1,1896 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "clang/AST/AST.h" +#include "clang/AST/ASTConsumer.h" +#include "clang/AST/ASTContext.h" +#include "clang/AST/Expr.h" +#include "clang/AST/ExprCXX.h" +#include "clang/AST/Mangle.h" +#include "clang/AST/RecursiveASTVisitor.h" +#include "clang/Basic/FileManager.h" +#include "clang/Basic/SourceManager.h" +#include "clang/Basic/Version.h" +#include "clang/Frontend/CompilerInstance.h" +#include "clang/Frontend/FrontendPluginRegistry.h" +#include "clang/Lex/Lexer.h" +#include "clang/Lex/PPCallbacks.h" +#include "clang/Lex/Preprocessor.h" +#include "llvm/ADT/SmallString.h" +#include "llvm/Support/raw_ostream.h" + +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "FileOperations.h" +#include "JSONFormatter.h" +#include "StringOperations.h" + +#if CLANG_VERSION_MAJOR < 8 +// Starting with Clang 8.0 some basic functions have been renamed +#define getBeginLoc getLocStart +#define getEndLoc getLocEnd +#endif +// We want std::make_unique, but that's only available in c++14. In versions +// prior to that, we need to fall back to llvm's make_unique. It's also the +// case that we expect clang 10 to build with c++14 and clang 9 and earlier to +// build with c++11, at least as suggested by the llvm-config --cxxflags on +// non-windows platforms. mozilla-central seems to build with -std=c++17 on +// windows so we need to make this decision based on __cplusplus instead of +// the CLANG_VERSION_MAJOR. +#if __cplusplus < 201402L +using llvm::make_unique; +#else +using std::make_unique; +#endif + +using namespace clang; + +const std::string GENERATED("__GENERATED__" PATHSEP_STRING); + +// Absolute path to directory containing source code. +std::string Srcdir; + +// Absolute path to objdir (including generated code). +std::string Objdir; + +// Absolute path where analysis JSON output will be stored. +std::string Outdir; + +enum class FileType { + // The file was either in the source tree nor objdir. It might be a system + // include, for example. + Unknown, + // A file from the source tree. + Source, + // A file from the objdir. + Generated, +}; + +// Takes an absolute path to a file, and returns the type of file it is. If +// it's a Source or Generated file, the provided inout path argument is modified +// in-place so that it is relative to the source dir or objdir, respectively. +FileType relativizePath(std::string& path) { + if (path.compare(0, Objdir.length(), Objdir) == 0) { + path.replace(0, Objdir.length(), GENERATED); + return FileType::Generated; + } + // Empty filenames can get turned into Srcdir when they are resolved as + // absolute paths, so we should exclude files that are exactly equal to + // Srcdir or anything outside Srcdir. + if (path.length() > Srcdir.length() && path.compare(0, Srcdir.length(), Srcdir) == 0) { + // Remove the trailing `/' as well. + path.erase(0, Srcdir.length() + 1); + return FileType::Source; + } + return FileType::Unknown; +} + +#if !defined(_WIN32) && !defined(_WIN64) +#include + +static double time() { + struct timeval Tv; + gettimeofday(&Tv, nullptr); + return double(Tv.tv_sec) + double(Tv.tv_usec) / 1000000.; +} +#endif + +// Return true if |input| is a valid C++ identifier. We don't want to generate +// analysis information for operators, string literals, etc. by accident since +// it trips up consumers of the data. +static bool isValidIdentifier(std::string Input) { + for (char C : Input) { + if (!(isalpha(C) || isdigit(C) || C == '_')) { + return false; + } + } + return true; +} + +struct RAIITracer { + RAIITracer(const char *log) : mLog(log) { + printf("<%s>\n", mLog); + } + + ~RAIITracer() { + printf("\n", mLog); + } + + const char* mLog; +}; + +#define TRACEFUNC RAIITracer tracer(__FUNCTION__); + +class IndexConsumer; + +// For each C++ file seen by the analysis (.cpp or .h), we track a +// FileInfo. This object tracks whether the file is "interesting" (i.e., whether +// it's in the source dir or the objdir). We also store the analysis output +// here. +struct FileInfo { + FileInfo(std::string &Rname) : Realname(Rname) { + switch (relativizePath(Realname)) { + case FileType::Generated: + Interesting = true; + Generated = true; + break; + case FileType::Source: + Interesting = true; + Generated = false; + break; + case FileType::Unknown: + Interesting = false; + Generated = false; + break; + } + } + std::string Realname; + std::vector Output; + bool Interesting; + bool Generated; +}; + +class IndexConsumer; + +class PreprocessorHook : public PPCallbacks { + IndexConsumer *Indexer; + +public: + PreprocessorHook(IndexConsumer *C) : Indexer(C) {} + + virtual void FileChanged(SourceLocation Loc, FileChangeReason Reason, + SrcMgr::CharacteristicKind FileType, + FileID PrevFID) override; + + virtual void InclusionDirective(SourceLocation HashLoc, + const Token &IncludeTok, + StringRef FileName, + bool IsAngled, + CharSourceRange FileNameRange, + const FileEntry *File, + StringRef SearchPath, + StringRef RelativePath, + const Module *Imported, + SrcMgr::CharacteristicKind FileType) override; + + virtual void MacroDefined(const Token &Tok, + const MacroDirective *Md) override; + + virtual void MacroExpands(const Token &Tok, const MacroDefinition &Md, + SourceRange Range, const MacroArgs *Ma) override; + virtual void MacroUndefined(const Token &Tok, const MacroDefinition &Md, + const MacroDirective *Undef) override; + virtual void Defined(const Token &Tok, const MacroDefinition &Md, + SourceRange Range) override; + virtual void Ifdef(SourceLocation Loc, const Token &Tok, + const MacroDefinition &Md) override; + virtual void Ifndef(SourceLocation Loc, const Token &Tok, + const MacroDefinition &Md) override; +}; + +class IndexConsumer : public ASTConsumer, + public RecursiveASTVisitor, + public DiagnosticConsumer { +private: + CompilerInstance &CI; + SourceManager &SM; + LangOptions &LO; + std::map> FileMap; + MangleContext *CurMangleContext; + ASTContext *AstContext; + + typedef RecursiveASTVisitor Super; + + // Tracks the set of declarations that the current expression/statement is + // nested inside of. + struct AutoSetContext { + AutoSetContext(IndexConsumer *Self, NamedDecl *Context, bool VisitImplicit = false) + : Self(Self), Prev(Self->CurDeclContext), Decl(Context) { + this->VisitImplicit = VisitImplicit || (Prev ? Prev->VisitImplicit : false); + Self->CurDeclContext = this; + } + + ~AutoSetContext() { Self->CurDeclContext = Prev; } + + IndexConsumer *Self; + AutoSetContext *Prev; + NamedDecl *Decl; + bool VisitImplicit; + }; + AutoSetContext *CurDeclContext; + + FileInfo *getFileInfo(SourceLocation Loc) { + FileID Id = SM.getFileID(Loc); + + std::map>::iterator It; + It = FileMap.find(Id); + if (It == FileMap.end()) { + // We haven't seen this file before. We need to make the FileInfo + // structure information ourselves + std::string Filename = std::string(SM.getFilename(Loc)); + std::string Absolute; + // If Loc is a macro id rather than a file id, it Filename might be + // empty. Also for some types of file locations that are clang-internal + // like "" it can return an empty Filename. In these cases we + // want to leave Absolute as empty. + if (!Filename.empty()) { + Absolute = getAbsolutePath(Filename); + if (Absolute.empty()) { + Absolute = Filename; + } + } + std::unique_ptr Info = make_unique(Absolute); + It = FileMap.insert(std::make_pair(Id, std::move(Info))).first; + } + return It->second.get(); + } + + // Helpers for processing declarations + // Should we ignore this location? + bool isInterestingLocation(SourceLocation Loc) { + if (Loc.isInvalid()) { + return false; + } + + return getFileInfo(Loc)->Interesting; + } + + // Convert location to "line:column" or "line:column-column" given length. + // In resulting string rep, line is 1-based and zero-padded to 5 digits, while + // column is 0-based and unpadded. + std::string locationToString(SourceLocation Loc, size_t Length = 0) { + std::pair Pair = SM.getDecomposedLoc(Loc); + + bool IsInvalid; + unsigned Line = SM.getLineNumber(Pair.first, Pair.second, &IsInvalid); + if (IsInvalid) { + return ""; + } + unsigned Column = SM.getColumnNumber(Pair.first, Pair.second, &IsInvalid); + if (IsInvalid) { + return ""; + } + + if (Length) { + return stringFormat("%05d:%d-%d", Line, Column - 1, Column - 1 + Length); + } else { + return stringFormat("%05d:%d", Line, Column - 1); + } + } + + // Convert SourceRange to "line-line". + // In the resulting string rep, line is 1-based. + std::string lineRangeToString(SourceRange Range) { + std::pair Begin = SM.getDecomposedLoc(Range.getBegin()); + std::pair End = SM.getDecomposedLoc(Range.getEnd()); + + bool IsInvalid; + unsigned Line1 = SM.getLineNumber(Begin.first, Begin.second, &IsInvalid); + if (IsInvalid) { + return ""; + } + unsigned Line2 = SM.getLineNumber(End.first, End.second, &IsInvalid); + if (IsInvalid) { + return ""; + } + + return stringFormat("%d-%d", Line1, Line2); + } + + // Convert SourceRange to "line:column-line:column". + // In the resulting string rep, line is 1-based, column is 0-based. + std::string fullRangeToString(SourceRange Range) { + std::pair Begin = SM.getDecomposedLoc(Range.getBegin()); + std::pair End = SM.getDecomposedLoc(Range.getEnd()); + + bool IsInvalid; + unsigned Line1 = SM.getLineNumber(Begin.first, Begin.second, &IsInvalid); + if (IsInvalid) { + return ""; + } + unsigned Column1 = SM.getColumnNumber(Begin.first, Begin.second, &IsInvalid); + if (IsInvalid) { + return ""; + } + unsigned Line2 = SM.getLineNumber(End.first, End.second, &IsInvalid); + if (IsInvalid) { + return ""; + } + unsigned Column2 = SM.getColumnNumber(End.first, End.second, &IsInvalid); + if (IsInvalid) { + return ""; + } + + return stringFormat("%d:%d-%d:%d", Line1, Column1 - 1, Line2, Column2 - 1); + } + + // Returns the qualified name of `d` without considering template parameters. + std::string getQualifiedName(const NamedDecl *D) { + const DeclContext *Ctx = D->getDeclContext(); + if (Ctx->isFunctionOrMethod()) { + return D->getQualifiedNameAsString(); + } + + std::vector Contexts; + + // Collect contexts. + while (Ctx && isa(Ctx)) { + Contexts.push_back(Ctx); + Ctx = Ctx->getParent(); + } + + std::string Result; + + std::reverse(Contexts.begin(), Contexts.end()); + + for (const DeclContext *DC : Contexts) { + if (const auto *Spec = dyn_cast(DC)) { + Result += Spec->getNameAsString(); + + if (Spec->getSpecializationKind() == TSK_ExplicitSpecialization) { + std::string Backing; + llvm::raw_string_ostream Stream(Backing); + const TemplateArgumentList &TemplateArgs = Spec->getTemplateArgs(); + printTemplateArgumentList( + Stream, TemplateArgs.asArray(), PrintingPolicy(CI.getLangOpts())); + Result += Stream.str(); + } + } else if (const auto *Nd = dyn_cast(DC)) { + if (Nd->isAnonymousNamespace() || Nd->isInline()) { + continue; + } + Result += Nd->getNameAsString(); + } else if (const auto *Rd = dyn_cast(DC)) { + if (!Rd->getIdentifier()) { + Result += "(anonymous)"; + } else { + Result += Rd->getNameAsString(); + } + } else if (const auto *Fd = dyn_cast(DC)) { + Result += Fd->getNameAsString(); + } else if (const auto *Ed = dyn_cast(DC)) { + // C++ [dcl.enum]p10: Each enum-name and each unscoped + // enumerator is declared in the scope that immediately contains + // the enum-specifier. Each scoped enumerator is declared in the + // scope of the enumeration. + if (Ed->isScoped() || Ed->getIdentifier()) + Result += Ed->getNameAsString(); + else + continue; + } else { + Result += cast(DC)->getNameAsString(); + } + Result += "::"; + } + + if (D->getDeclName()) + Result += D->getNameAsString(); + else + Result += "(anonymous)"; + + return Result; + } + + std::string mangleLocation(SourceLocation Loc, + std::string Backup = std::string()) { + FileInfo *F = getFileInfo(Loc); + std::string Filename = F->Realname; + if (Filename.length() == 0 && Backup.length() != 0) { + return Backup; + } + if (F->Generated) { + // Since generated files may be different on different platforms, + // we need to include a platform-specific thing in the hash. Otherwise + // we can end up with hash collisions where different symbols from + // different platforms map to the same thing. + char* Platform = getenv("MOZSEARCH_PLATFORM"); + Filename = std::string(Platform ? Platform : "") + std::string("@") + Filename; + } + return hash(Filename + std::string("@") + locationToString(Loc)); + } + + bool isAcceptableSymbolChar(char c) { + return isalpha(c) || isdigit(c) || c == '_' || c == '/'; + } + + std::string mangleFile(std::string Filename, FileType Type) { + // "Mangle" the file path, such that: + // 1. The majority of paths will still be mostly human-readable. + // 2. The sanitization algorithm doesn't produce collisions where two + // different unsanitized paths can result in the same sanitized paths. + // 3. The produced symbol doesn't cause problems with downstream consumers. + // In order to accomplish this, we keep alphanumeric chars, underscores, + // and slashes, and replace everything else with an "@xx" hex encoding. + // The majority of path characters are letters and slashes which don't get + // encoded, so that satisifies (1). Since "@" characters in the unsanitized + // path get encoded, there should be no "@" characters in the sanitized path + // that got preserved from the unsanitized input, so that should satisfy (2). + // And (3) was done by trial-and-error. Note in particular the dot (.) + // character needs to be encoded, or the symbol-search feature of mozsearch + // doesn't work correctly, as all dot characters in the symbol query get + // replaced by #. + for (size_t i = 0; i < Filename.length(); i++) { + char c = Filename[i]; + if (isAcceptableSymbolChar(c)) { + continue; + } + char hex[4]; + sprintf(hex, "@%02X", ((int)c) & 0xFF); + Filename.replace(i, 1, hex); + i += 2; + } + + if (Type == FileType::Generated) { + // Since generated files may be different on different platforms, + // we need to include a platform-specific thing in the hash. Otherwise + // we can end up with hash collisions where different symbols from + // different platforms map to the same thing. + char* Platform = getenv("MOZSEARCH_PLATFORM"); + Filename = std::string(Platform ? Platform : "") + std::string("@") + Filename; + } + return Filename; + } + + std::string mangleQualifiedName(std::string Name) { + std::replace(Name.begin(), Name.end(), ' ', '_'); + return Name; + } + + std::string getMangledName(clang::MangleContext *Ctx, + const clang::NamedDecl *Decl) { + if (isa(Decl) && cast(Decl)->isExternC()) { + return cast(Decl)->getNameAsString(); + } + + if (isa(Decl) || isa(Decl)) { + const DeclContext *DC = Decl->getDeclContext(); + if (isa(DC) || isa(DC) || + isa(DC) || + // isa(DC) || + isa(DC)) { + llvm::SmallVector Output; + llvm::raw_svector_ostream Out(Output); +#if CLANG_VERSION_MAJOR >= 11 + // This code changed upstream in version 11: + // https://github.com/llvm/llvm-project/commit/29e1a16be8216066d1ed733a763a749aed13ff47 + GlobalDecl GD; + if (const CXXConstructorDecl *D = dyn_cast(Decl)) { + GD = GlobalDecl(D, Ctor_Complete); + } else if (const CXXDestructorDecl *D = + dyn_cast(Decl)) { + GD = GlobalDecl(D, Dtor_Complete); + } else { + GD = GlobalDecl(Decl); + } + Ctx->mangleName(GD, Out); +#else + if (const CXXConstructorDecl *D = dyn_cast(Decl)) { + Ctx->mangleCXXCtor(D, CXXCtorType::Ctor_Complete, Out); + } else if (const CXXDestructorDecl *D = + dyn_cast(Decl)) { + Ctx->mangleCXXDtor(D, CXXDtorType::Dtor_Complete, Out); + } else { + Ctx->mangleName(Decl, Out); + } +#endif + return Out.str().str(); + } else { + return std::string("V_") + mangleLocation(Decl->getLocation()) + + std::string("_") + hash(std::string(Decl->getName())); + } + } else if (isa(Decl) || isa(Decl) || + isa(Decl)) { + if (!Decl->getIdentifier()) { + // Anonymous. + return std::string("T_") + mangleLocation(Decl->getLocation()); + } + + return std::string("T_") + mangleQualifiedName(getQualifiedName(Decl)); + } else if (isa(Decl) || isa(Decl)) { + if (!Decl->getIdentifier()) { + // Anonymous. + return std::string("NS_") + mangleLocation(Decl->getLocation()); + } + + return std::string("NS_") + mangleQualifiedName(getQualifiedName(Decl)); + } else if (const ObjCIvarDecl *D2 = dyn_cast(Decl)) { + const ObjCInterfaceDecl *Iface = D2->getContainingInterface(); + return std::string("F_<") + getMangledName(Ctx, Iface) + ">_" + + D2->getNameAsString(); + } else if (const FieldDecl *D2 = dyn_cast(Decl)) { + const RecordDecl *Record = D2->getParent(); + return std::string("F_<") + getMangledName(Ctx, Record) + ">_" + + D2->getNameAsString(); + } else if (const EnumConstantDecl *D2 = dyn_cast(Decl)) { + const DeclContext *DC = Decl->getDeclContext(); + if (const NamedDecl *Named = dyn_cast(DC)) { + return std::string("E_<") + getMangledName(Ctx, Named) + ">_" + + D2->getNameAsString(); + } + } + + assert(false); + return std::string(""); + } + + void debugLocation(SourceLocation Loc) { + std::string S = locationToString(Loc); + StringRef Filename = SM.getFilename(Loc); + printf("--> %s %s\n", std::string(Filename).c_str(), S.c_str()); + } + + void debugRange(SourceRange Range) { + printf("Range\n"); + debugLocation(Range.getBegin()); + debugLocation(Range.getEnd()); + } + +public: + IndexConsumer(CompilerInstance &CI) + : CI(CI), SM(CI.getSourceManager()), LO(CI.getLangOpts()), CurMangleContext(nullptr), + AstContext(nullptr), CurDeclContext(nullptr), TemplateStack(nullptr) { + CI.getPreprocessor().addPPCallbacks( + make_unique(this)); + } + + virtual DiagnosticConsumer *clone(DiagnosticsEngine &Diags) const { + return new IndexConsumer(CI); + } + +#if !defined(_WIN32) && !defined(_WIN64) + struct AutoTime { + AutoTime(double *Counter) : Counter(Counter), Start(time()) {} + ~AutoTime() { + if (Start) { + *Counter += time() - Start; + } + } + void stop() { + *Counter += time() - Start; + Start = 0; + } + double *Counter; + double Start; + }; +#endif + + // All we need is to follow the final declaration. + virtual void HandleTranslationUnit(ASTContext &Ctx) { + CurMangleContext = + clang::ItaniumMangleContext::create(Ctx, CI.getDiagnostics()); + + AstContext = &Ctx; + TraverseDecl(Ctx.getTranslationUnitDecl()); + + // Emit the JSON data for all files now. + std::map>::iterator It; + for (It = FileMap.begin(); It != FileMap.end(); It++) { + if (!It->second->Interesting) { + continue; + } + + FileInfo &Info = *It->second; + + std::string Filename = Outdir + Info.Realname; + std::string SrcFilename = Info.Generated + ? Objdir + Info.Realname.substr(GENERATED.length()) + : Srcdir + PATHSEP_STRING + Info.Realname; + + ensurePath(Filename); + + // We lock the output file in case some other clang process is trying to + // write to it at the same time. + AutoLockFile Lock(SrcFilename, Filename); + + if (!Lock.success()) { + fprintf(stderr, "Unable to lock file %s\n", Filename.c_str()); + exit(1); + } + + // Merge our results with the existing lines from the output file. + // This ensures that header files that are included multiple times + // in different ways are analyzed completely. + + FILE *Fp = Lock.openFile(); + if (!Fp) { + fprintf(stderr, "Unable to open input file %s\n", Filename.c_str()); + exit(1); + } + FILE *OutFp = Lock.openTmp(); + if (!OutFp) { + fprintf(stderr, "Unable to open tmp out file for %s\n", Filename.c_str()); + exit(1); + } + + // Sort our new results and get an iterator to them + std::sort(Info.Output.begin(), Info.Output.end()); + std::vector::const_iterator NewLinesIter = Info.Output.begin(); + std::string LastNewWritten; + + // Loop over the existing (sorted) lines in the analysis output file. + char Buffer[65536]; + while (fgets(Buffer, sizeof(Buffer), Fp)) { + std::string OldLine(Buffer); + + // Write any results from Info.Output that are lexicographically + // smaller than OldLine (read from the existing file), but make sure + // to skip duplicates. Keep advacing NewLinesIter until we reach an + // entry that is lexicographically greater than OldLine. + for (; NewLinesIter != Info.Output.end(); NewLinesIter++) { + if (*NewLinesIter > OldLine) { + break; + } + if (*NewLinesIter == OldLine) { + continue; + } + if (*NewLinesIter == LastNewWritten) { + // dedupe the new entries being written + continue; + } + if (fwrite(NewLinesIter->c_str(), NewLinesIter->length(), 1, OutFp) != 1) { + fprintf(stderr, "Unable to write to tmp output file for %s\n", Filename.c_str()); + exit(1); + } + LastNewWritten = *NewLinesIter; + } + + // Write the entry read from the existing file. + if (fwrite(OldLine.c_str(), OldLine.length(), 1, OutFp) != 1) { + fprintf(stderr, "Unable to write to tmp output file for %s\n", Filename.c_str()); + exit(1); + } + } + + // We finished reading from Fp + fclose(Fp); + + // Finish iterating our new results, discarding duplicates + for (; NewLinesIter != Info.Output.end(); NewLinesIter++) { + if (*NewLinesIter == LastNewWritten) { + continue; + } + if (fwrite(NewLinesIter->c_str(), NewLinesIter->length(), 1, OutFp) != 1) { + fprintf(stderr, "Unable to write to tmp output file for %s\n", Filename.c_str()); + exit(1); + } + LastNewWritten = *NewLinesIter; + } + + // Done writing all the things, close it and replace the old output file + // with the new one. + fclose(OutFp); + if (!Lock.moveTmp()) { + fprintf(stderr, "Unable to move tmp output file into place for %s (err %d)\n", Filename.c_str(), errno); + exit(1); + } + } + } + + // Return a list of mangled names of all the methods that the given method + // overrides. + void findOverriddenMethods(const CXXMethodDecl *Method, + std::vector &Symbols) { + std::string Mangled = getMangledName(CurMangleContext, Method); + Symbols.push_back(Mangled); + + CXXMethodDecl::method_iterator Iter = Method->begin_overridden_methods(); + CXXMethodDecl::method_iterator End = Method->end_overridden_methods(); + for (; Iter != End; Iter++) { + const CXXMethodDecl *Decl = *Iter; + if (Decl->isTemplateInstantiation()) { + Decl = dyn_cast(Decl->getTemplateInstantiationPattern()); + } + return findOverriddenMethods(Decl, Symbols); + } + } + + // Unfortunately, we have to override all these methods in order to track the + // context we're inside. + + bool TraverseEnumDecl(EnumDecl *D) { + AutoSetContext Asc(this, D); + return Super::TraverseEnumDecl(D); + } + bool TraverseRecordDecl(RecordDecl *D) { + AutoSetContext Asc(this, D); + return Super::TraverseRecordDecl(D); + } + bool TraverseCXXRecordDecl(CXXRecordDecl *D) { + AutoSetContext Asc(this, D); + return Super::TraverseCXXRecordDecl(D); + } + bool TraverseFunctionDecl(FunctionDecl *D) { + AutoSetContext Asc(this, D); + const FunctionDecl *Def; + // (See the larger AutoTemplateContext comment for more information.) If a + // method on a templated class is declared out-of-line, we need to analyze + // the definition inside the scope of the template or else we won't properly + // handle member access on the templated type. + if (TemplateStack && D->isDefined(Def) && Def && D != Def) { + TraverseFunctionDecl(const_cast(Def)); + } + return Super::TraverseFunctionDecl(D); + } + bool TraverseCXXMethodDecl(CXXMethodDecl *D) { + AutoSetContext Asc(this, D); + const FunctionDecl *Def; + // See TraverseFunctionDecl. + if (TemplateStack && D->isDefined(Def) && Def && D != Def) { + TraverseFunctionDecl(const_cast(Def)); + } + return Super::TraverseCXXMethodDecl(D); + } + bool TraverseCXXConstructorDecl(CXXConstructorDecl *D) { + AutoSetContext Asc(this, D, /*VisitImplicit=*/true); + const FunctionDecl *Def; + // See TraverseFunctionDecl. + if (TemplateStack && D->isDefined(Def) && Def && D != Def) { + TraverseFunctionDecl(const_cast(Def)); + } + return Super::TraverseCXXConstructorDecl(D); + } + bool TraverseCXXConversionDecl(CXXConversionDecl *D) { + AutoSetContext Asc(this, D); + const FunctionDecl *Def; + // See TraverseFunctionDecl. + if (TemplateStack && D->isDefined(Def) && Def && D != Def) { + TraverseFunctionDecl(const_cast(Def)); + } + return Super::TraverseCXXConversionDecl(D); + } + bool TraverseCXXDestructorDecl(CXXDestructorDecl *D) { + AutoSetContext Asc(this, D); + const FunctionDecl *Def; + // See TraverseFunctionDecl. + if (TemplateStack && D->isDefined(Def) && Def && D != Def) { + TraverseFunctionDecl(const_cast(Def)); + } + return Super::TraverseCXXDestructorDecl(D); + } + + // Used to keep track of the context in which a token appears. + struct Context { + // Ultimately this becomes the "context" JSON property. + std::string Name; + + // Ultimately this becomes the "contextsym" JSON property. + std::vector Symbols; + + Context() {} + Context(std::string Name, std::vector Symbols) + : Name(Name), Symbols(Symbols) {} + }; + + Context translateContext(NamedDecl *D) { + const FunctionDecl *F = dyn_cast(D); + if (F && F->isTemplateInstantiation()) { + D = F->getTemplateInstantiationPattern(); + } + + std::vector Symbols = {getMangledName(CurMangleContext, D)}; + if (CXXMethodDecl::classof(D)) { + Symbols.clear(); + findOverriddenMethods(dyn_cast(D), Symbols); + } + return Context(D->getQualifiedNameAsString(), Symbols); + } + + Context getContext(SourceLocation Loc) { + if (SM.isMacroBodyExpansion(Loc)) { + // If we're inside a macro definition, we don't return any context. It + // will probably not be what the user expects if we do. + return Context(); + } + + if (CurDeclContext) { + return translateContext(CurDeclContext->Decl); + } + return Context(); + } + + // Similar to GetContext(SourceLocation), but it skips the declaration passed + // in. This is useful if we want the context of a declaration that's already + // on the stack. + Context getContext(Decl *D) { + if (SM.isMacroBodyExpansion(D->getLocation())) { + // If we're inside a macro definition, we don't return any context. It + // will probably not be what the user expects if we do. + return Context(); + } + + AutoSetContext *Ctxt = CurDeclContext; + while (Ctxt) { + if (Ctxt->Decl != D) { + return translateContext(Ctxt->Decl); + } + Ctxt = Ctxt->Prev; + } + return Context(); + } + + static std::string concatSymbols(const std::vector Symbols) { + if (Symbols.empty()) { + return ""; + } + + size_t Total = 0; + for (auto It = Symbols.begin(); It != Symbols.end(); It++) { + Total += It->length(); + } + Total += Symbols.size() - 1; + + std::string SymbolList; + SymbolList.reserve(Total); + + for (auto It = Symbols.begin(); It != Symbols.end(); It++) { + std::string Symbol = *It; + + if (It != Symbols.begin()) { + SymbolList.push_back(','); + } + SymbolList.append(Symbol); + } + + return SymbolList; + } + + // Analyzing template code is tricky. Suppose we have this code: + // + // template + // bool Foo(T* ptr) { return T::StaticMethod(ptr); } + // + // If we analyze the body of Foo without knowing the type T, then we will not + // be able to generate any information for StaticMethod. However, analyzing + // Foo for every possible instantiation is inefficient and it also generates + // too much data in some cases. For example, the following code would generate + // one definition of Baz for every instantiation, which is undesirable: + // + // template + // class Bar { struct Baz { ... }; }; + // + // To solve this problem, we analyze templates only once. We do so in a + // GatherDependent mode where we look for "dependent scoped member + // expressions" (i.e., things like StaticMethod). We keep track of the + // locations of these expressions. If we find one or more of them, we analyze + // the template for each instantiation, in an AnalyzeDependent mode. This mode + // ignores all source locations except for the ones where we found dependent + // scoped member expressions before. For these locations, we generate a + // separate JSON result for each instantiation. + // + // We inherit our parent's mode if it is exists. This is because if our + // parent is in analyze mode, it means we've already lived a full life in + // gather mode and we must not restart in gather mode or we'll cause the + // indexer to visit EVERY identifier, which is way too much data. + struct AutoTemplateContext { + AutoTemplateContext(IndexConsumer *Self) + : Self(Self) + , CurMode(Self->TemplateStack ? Self->TemplateStack->CurMode : Mode::GatherDependent) + , Parent(Self->TemplateStack) { + Self->TemplateStack = this; + } + + ~AutoTemplateContext() { Self->TemplateStack = Parent; } + + // We traverse templates in two modes: + enum class Mode { + // Gather mode does not traverse into specializations. It looks for + // locations where it would help to have more info from template + // specializations. + GatherDependent, + + // Analyze mode traverses into template specializations and records + // information about token locations saved in gather mode. + AnalyzeDependent, + }; + + // We found a dependent scoped member expression! Keep track of it for + // later. + void visitDependent(SourceLocation Loc) { + if (CurMode == Mode::AnalyzeDependent) { + return; + } + + DependentLocations.insert(Loc.getRawEncoding()); + if (Parent) { + Parent->visitDependent(Loc); + } + } + + bool inGatherMode() { + return CurMode == Mode::GatherDependent; + } + + // Do we need to perform the extra AnalyzeDependent passes (one per + // instantiation)? + bool needsAnalysis() const { + if (!DependentLocations.empty()) { + return true; + } + if (Parent) { + return Parent->needsAnalysis(); + } + return false; + } + + void switchMode() { CurMode = Mode::AnalyzeDependent; } + + // Do we want to analyze each template instantiation separately? + bool shouldVisitTemplateInstantiations() const { + if (CurMode == Mode::AnalyzeDependent) { + return true; + } + if (Parent) { + return Parent->shouldVisitTemplateInstantiations(); + } + return false; + } + + // For a given expression/statement, should we emit JSON data for it? + bool shouldVisit(SourceLocation Loc) { + if (CurMode == Mode::GatherDependent) { + return true; + } + if (DependentLocations.find(Loc.getRawEncoding()) != + DependentLocations.end()) { + return true; + } + if (Parent) { + return Parent->shouldVisit(Loc); + } + return false; + } + + private: + IndexConsumer *Self; + Mode CurMode; + std::unordered_set DependentLocations; + AutoTemplateContext *Parent; + }; + + AutoTemplateContext *TemplateStack; + + bool shouldVisitTemplateInstantiations() const { + if (TemplateStack) { + return TemplateStack->shouldVisitTemplateInstantiations(); + } + return false; + } + + bool shouldVisitImplicitCode() const { + return CurDeclContext && CurDeclContext->VisitImplicit; + } + + bool TraverseClassTemplateDecl(ClassTemplateDecl *D) { + AutoTemplateContext Atc(this); + Super::TraverseClassTemplateDecl(D); + + if (!Atc.needsAnalysis()) { + return true; + } + + Atc.switchMode(); + + if (D != D->getCanonicalDecl()) { + return true; + } + + for (auto *Spec : D->specializations()) { + for (auto *Rd : Spec->redecls()) { + // We don't want to visit injected-class-names in this traversal. + if (cast(Rd)->isInjectedClassName()) + continue; + + TraverseDecl(Rd); + } + } + + return true; + } + + bool TraverseFunctionTemplateDecl(FunctionTemplateDecl *D) { + AutoTemplateContext Atc(this); + if (Atc.inGatherMode()) { + Super::TraverseFunctionTemplateDecl(D); + } + + if (!Atc.needsAnalysis()) { + return true; + } + + Atc.switchMode(); + + if (D != D->getCanonicalDecl()) { + return true; + } + + for (auto *Spec : D->specializations()) { + for (auto *Rd : Spec->redecls()) { + TraverseDecl(Rd); + } + } + + return true; + } + + bool shouldVisit(SourceLocation Loc) { + if (TemplateStack) { + return TemplateStack->shouldVisit(Loc); + } + return true; + } + + enum { + // Flag to omit the identifier from being cross-referenced across files. + // This is usually desired for local variables. + NoCrossref = 1 << 0, + // Flag to indicate the token with analysis data is not an identifier. Indicates + // we want to skip the check that tries to ensure a sane identifier token. + NotIdentifierToken = 1 << 1, + // This indicates that the end of the provided SourceRange is valid and + // should be respected. If this flag is not set, the visitIdentifier + // function should use only the start of the SourceRange and auto-detect + // the end based on whatever token is found at the start. + LocRangeEndValid = 1 << 2 + }; + + // This is the only function that emits analysis JSON data. It should be + // called for each identifier that corresponds to a symbol. + void visitIdentifier(const char *Kind, const char *SyntaxKind, + llvm::StringRef QualName, SourceRange LocRange, + const std::vector &Symbols, + Context TokenContext = Context(), int Flags = 0, + SourceRange PeekRange = SourceRange(), + SourceRange NestingRange = SourceRange()) { + SourceLocation Loc = LocRange.getBegin(); + if (!shouldVisit(Loc)) { + return; + } + + // Find the file positions corresponding to the token. + unsigned StartOffset = SM.getFileOffset(Loc); + unsigned EndOffset = (Flags & LocRangeEndValid) + ? SM.getFileOffset(LocRange.getEnd()) + : StartOffset + Lexer::MeasureTokenLength(Loc, SM, CI.getLangOpts()); + + std::string LocStr = locationToString(Loc, EndOffset - StartOffset); + std::string RangeStr = locationToString(Loc, EndOffset - StartOffset); + std::string PeekRangeStr; + + if (!(Flags & NotIdentifierToken)) { + // Get the token's characters so we can make sure it's a valid token. + const char *StartChars = SM.getCharacterData(Loc); + std::string Text(StartChars, EndOffset - StartOffset); + if (!isValidIdentifier(Text)) { + return; + } + } + + FileInfo *F = getFileInfo(Loc); + + std::string SymbolList; + + // Reserve space in symbolList for everything in `symbols`. `symbols` can + // contain some very long strings. + size_t Total = 0; + for (auto It = Symbols.begin(); It != Symbols.end(); It++) { + Total += It->length(); + } + + // Space for commas. + Total += Symbols.size() - 1; + SymbolList.reserve(Total); + + // For each symbol, generate one "target":1 item. We want to find this line + // if someone searches for any one of these symbols. + for (auto It = Symbols.begin(); It != Symbols.end(); It++) { + std::string Symbol = *It; + + if (!(Flags & NoCrossref)) { + JSONFormatter Fmt; + + Fmt.add("loc", LocStr); + Fmt.add("target", 1); + Fmt.add("kind", Kind); + Fmt.add("pretty", QualName.data()); + Fmt.add("sym", Symbol); + if (!TokenContext.Name.empty()) { + Fmt.add("context", TokenContext.Name); + } + std::string ContextSymbol = concatSymbols(TokenContext.Symbols); + if (!ContextSymbol.empty()) { + Fmt.add("contextsym", ContextSymbol); + } + if (PeekRange.isValid()) { + PeekRangeStr = lineRangeToString(PeekRange); + if (!PeekRangeStr.empty()) { + Fmt.add("peekRange", PeekRangeStr); + } + } + + std::string S; + Fmt.format(S); + F->Output.push_back(std::move(S)); + } + + if (It != Symbols.begin()) { + SymbolList.push_back(','); + } + SymbolList.append(Symbol); + } + + // Generate a single "source":1 for all the symbols. If we search from here, + // we want to union the results for every symbol in `symbols`. + JSONFormatter Fmt; + + Fmt.add("loc", RangeStr); + Fmt.add("source", 1); + + if (NestingRange.isValid()) { + std::string NestingRangeStr = fullRangeToString(NestingRange); + if (!NestingRangeStr.empty()) { + Fmt.add("nestingRange", NestingRangeStr); + } + } + + std::string Syntax; + if (Flags & NoCrossref) { + Fmt.add("syntax", ""); + } else { + Syntax = Kind; + Syntax.push_back(','); + Syntax.append(SyntaxKind); + Fmt.add("syntax", Syntax); + } + + std::string Pretty(SyntaxKind); + Pretty.push_back(' '); + Pretty.append(QualName.data()); + Fmt.add("pretty", Pretty); + + Fmt.add("sym", SymbolList); + + if (Flags & NoCrossref) { + Fmt.add("no_crossref", 1); + } + + std::string Buf; + Fmt.format(Buf); + F->Output.push_back(std::move(Buf)); + } + + void visitIdentifier(const char *Kind, const char *SyntaxKind, + llvm::StringRef QualName, SourceLocation Loc, std::string Symbol, + Context TokenContext = Context(), int Flags = 0, + SourceRange PeekRange = SourceRange(), + SourceRange NestingRange = SourceRange()) { + std::vector V = {Symbol}; + visitIdentifier(Kind, SyntaxKind, QualName, SourceRange(Loc), V, TokenContext, Flags, + PeekRange, NestingRange); + } + + void normalizeLocation(SourceLocation *Loc) { + *Loc = SM.getSpellingLoc(*Loc); + } + + // For cases where the left-brace is not directly accessible from the AST, + // helper to use the lexer to find the brace. Make sure you're picking the + // start location appropriately! + SourceLocation findLeftBraceFromLoc(SourceLocation Loc) { + return Lexer::findLocationAfterToken(Loc, tok::l_brace, SM, LO, false); + } + + // If the provided statement is compound, return its range. + SourceRange getCompoundStmtRange(Stmt* D) { + if (!D) { + return SourceRange(); + } + + CompoundStmt *D2 = dyn_cast(D); + if (D2) { + return D2->getSourceRange(); + } + + return SourceRange(); + } + + SourceRange getFunctionPeekRange(FunctionDecl* D) { + // We always start at the start of the function decl, which may include the + // return type on a separate line. + SourceLocation Start = D->getBeginLoc(); + + // By default, we end at the line containing the function's name. + SourceLocation End = D->getLocation(); + + std::pair FuncLoc = SM.getDecomposedLoc(End); + + // But if there are parameters, we want to include those as well. + for (ParmVarDecl* Param : D->parameters()) { + std::pair ParamLoc = SM.getDecomposedLoc(Param->getLocation()); + + // It's possible there are macros involved or something. We don't include + // the parameters in that case. + if (ParamLoc.first == FuncLoc.first) { + // Assume parameters are in order, so we always take the last one. + End = Param->getEndLoc(); + } + } + + return SourceRange(Start, End); + } + + SourceRange getTagPeekRange(TagDecl* D) { + SourceLocation Start = D->getBeginLoc(); + + // By default, we end at the line containing the name. + SourceLocation End = D->getLocation(); + + std::pair FuncLoc = SM.getDecomposedLoc(End); + + if (CXXRecordDecl* D2 = dyn_cast(D)) { + // But if there are parameters, we want to include those as well. + for (CXXBaseSpecifier& Base : D2->bases()) { + std::pair Loc = SM.getDecomposedLoc(Base.getEndLoc()); + + // It's possible there are macros involved or something. We don't include + // the parameters in that case. + if (Loc.first == FuncLoc.first) { + // Assume parameters are in order, so we always take the last one. + End = Base.getEndLoc(); + } + } + } + + return SourceRange(Start, End); + } + + SourceRange getCommentRange(NamedDecl* D) { + const RawComment* RC = + AstContext->getRawCommentForDeclNoCache(D); + if (!RC) { + return SourceRange(); + } + + return RC->getSourceRange(); + } + + // Sanity checks that all ranges are in the same file, returning the first if + // they're in different files. Unions the ranges based on which is first. + SourceRange combineRanges(SourceRange Range1, SourceRange Range2) { + if (Range1.isInvalid()) { + return Range2; + } + if (Range2.isInvalid()) { + return Range1; + } + + std::pair Begin1 = SM.getDecomposedLoc(Range1.getBegin()); + std::pair End1 = SM.getDecomposedLoc(Range1.getEnd()); + std::pair Begin2 = SM.getDecomposedLoc(Range2.getBegin()); + std::pair End2 = SM.getDecomposedLoc(Range2.getEnd()); + + if (End1.first != Begin2.first) { + // Something weird is probably happening with the preprocessor. Just + // return the first range. + return Range1; + } + + // See which range comes first. + if (Begin1.second <= End2.second) { + return SourceRange(Range1.getBegin(), Range2.getEnd()); + } else { + return SourceRange(Range2.getBegin(), Range1.getEnd()); + } + } + + // Given a location and a range, returns the range if: + // - The location and the range live in the same file. + // - The range is well ordered (end is not before begin). + // Returns an empty range otherwise. + SourceRange validateRange(SourceLocation Loc, SourceRange Range) { + std::pair Decomposed = SM.getDecomposedLoc(Loc); + std::pair Begin = SM.getDecomposedLoc(Range.getBegin()); + std::pair End = SM.getDecomposedLoc(Range.getEnd()); + + if (Begin.first != Decomposed.first || End.first != Decomposed.first) { + return SourceRange(); + } + + if (Begin.second >= End.second) { + return SourceRange(); + } + + return Range; + } + + bool VisitNamedDecl(NamedDecl *D) { + SourceLocation Loc = D->getLocation(); + + // If the token is from a macro expansion and the expansion location + // is interesting, use that instead as it tends to be more useful. + SourceLocation expandedLoc = Loc; + if (SM.isMacroBodyExpansion(Loc)) { + Loc = SM.getFileLoc(Loc); + } + + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return true; + } + + if (isa(D) && !D->getDeclName().getAsIdentifierInfo()) { + // Unnamed parameter in function proto. + return true; + } + + int Flags = 0; + const char *Kind = "def"; + const char *PrettyKind = "?"; + SourceRange PeekRange(D->getBeginLoc(), D->getEndLoc()); + // The nesting range identifies the left brace and right brace, which + // heavily depends on the AST node type. + SourceRange NestingRange; + if (FunctionDecl *D2 = dyn_cast(D)) { + if (D2->isTemplateInstantiation()) { + D = D2->getTemplateInstantiationPattern(); + } + Kind = D2->isThisDeclarationADefinition() ? "def" : "decl"; + PrettyKind = "function"; + PeekRange = getFunctionPeekRange(D2); + + // Only emit the nesting range if: + // - This is a definition AND + // - This isn't a template instantiation. Function templates' + // instantiations can end up as a definition with a Loc at their point + // of declaration but with the CompoundStmt of the template's + // point of definition. This really messes up the nesting range logic. + // At the time of writing this, the test repo's `big_header.h`'s + // `WhatsYourVector_impl::forwardDeclaredTemplateThingInlinedBelow` as + // instantiated by `big_cpp.cpp` triggers this phenomenon. + // + // Note: As covered elsewhere, template processing is tricky and it's + // conceivable that we may change traversal patterns in the future, + // mooting this guard. + if (D2->isThisDeclarationADefinition() && + !D2->isTemplateInstantiation()) { + // The CompoundStmt range is the brace range. + NestingRange = getCompoundStmtRange(D2->getBody()); + } + } else if (TagDecl *D2 = dyn_cast(D)) { + Kind = D2->isThisDeclarationADefinition() ? "def" : "decl"; + PrettyKind = "type"; + + if (D2->isThisDeclarationADefinition() && D2->getDefinition() == D2) { + PeekRange = getTagPeekRange(D2); + NestingRange = D2->getBraceRange(); + } else { + PeekRange = SourceRange(); + } + } else if (isa(D)) { + Kind = "def"; + PrettyKind = "type"; + PeekRange = SourceRange(Loc, Loc); + } else if (VarDecl *D2 = dyn_cast(D)) { + if (D2->isLocalVarDeclOrParm()) { + Flags = NoCrossref; + } + + Kind = D2->isThisDeclarationADefinition() == VarDecl::DeclarationOnly + ? "decl" + : "def"; + PrettyKind = "variable"; + } else if (isa(D) || isa(D)) { + Kind = "def"; + PrettyKind = "namespace"; + PeekRange = SourceRange(Loc, Loc); + NamespaceDecl *D2 = dyn_cast(D); + if (D2) { + // There's no exposure of the left brace so we have to find it. + NestingRange = SourceRange( + findLeftBraceFromLoc(D2->isAnonymousNamespace() ? D2->getBeginLoc() : Loc), + D2->getRBraceLoc()); + } + } else if (isa(D)) { + Kind = "def"; + PrettyKind = "field"; + } else if (isa(D)) { + Kind = "def"; + PrettyKind = "enum constant"; + } else { + return true; + } + + SourceRange CommentRange = getCommentRange(D); + PeekRange = combineRanges(PeekRange, CommentRange); + PeekRange = validateRange(Loc, PeekRange); + NestingRange = validateRange(Loc, NestingRange); + + std::vector Symbols = {getMangledName(CurMangleContext, D)}; + if (CXXMethodDecl::classof(D)) { + Symbols.clear(); + findOverriddenMethods(dyn_cast(D), Symbols); + } + + // In the case of destructors, Loc might point to the ~ character. In that + // case we want to skip to the name of the class. However, Loc might also + // point to other places that generate destructors, such as the use site of + // a macro that expands to generate a destructor, or a lambda (apparently + // clang 8 creates a destructor declaration for at least some lambdas). In + // the former case we'll use the macro use site as the location, and in the + // latter we'll just drop the declaration. + if (isa(D)) { + PrettyKind = "destructor"; + const char *P = SM.getCharacterData(Loc); + if (*P == '~') { + // Advance Loc to the class name + P++; + + unsigned Skipped = 1; + while (*P == ' ' || *P == '\t' || *P == '\r' || *P == '\n') { + P++; + Skipped++; + } + + Loc = Loc.getLocWithOffset(Skipped); + } else { + // See if the destructor is coming from a macro expansion + P = SM.getCharacterData(expandedLoc); + if (*P != '~') { + // It's not + return true; + } + // It is, so just use Loc as-is + } + } + + visitIdentifier(Kind, PrettyKind, getQualifiedName(D), SourceRange(Loc), Symbols, + getContext(D), Flags, PeekRange, NestingRange); + + return true; + } + + bool VisitCXXConstructExpr(CXXConstructExpr *E) { + SourceLocation Loc = E->getBeginLoc(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return true; + } + + FunctionDecl *Ctor = E->getConstructor(); + if (Ctor->isTemplateInstantiation()) { + Ctor = Ctor->getTemplateInstantiationPattern(); + } + std::string Mangled = getMangledName(CurMangleContext, Ctor); + + // FIXME: Need to do something different for list initialization. + + visitIdentifier("use", "constructor", getQualifiedName(Ctor), Loc, Mangled, + getContext(Loc)); + + return true; + } + + bool VisitCallExpr(CallExpr *E) { + Decl *Callee = E->getCalleeDecl(); + if (!Callee || !FunctionDecl::classof(Callee)) { + return true; + } + + const NamedDecl *NamedCallee = dyn_cast(Callee); + + SourceLocation Loc; + + const FunctionDecl *F = dyn_cast(NamedCallee); + if (F->isTemplateInstantiation()) { + NamedCallee = F->getTemplateInstantiationPattern(); + } + + std::string Mangled = getMangledName(CurMangleContext, NamedCallee); + int Flags = 0; + + Expr *CalleeExpr = E->getCallee()->IgnoreParenImpCasts(); + + if (CXXOperatorCallExpr::classof(E)) { + // Just take the first token. + CXXOperatorCallExpr *Op = dyn_cast(E); + Loc = Op->getOperatorLoc(); + Flags |= NotIdentifierToken; + } else if (MemberExpr::classof(CalleeExpr)) { + MemberExpr *Member = dyn_cast(CalleeExpr); + Loc = Member->getMemberLoc(); + } else if (DeclRefExpr::classof(CalleeExpr)) { + // We handle this in VisitDeclRefExpr. + return true; + } else { + return true; + } + + normalizeLocation(&Loc); + + if (!isInterestingLocation(Loc)) { + return true; + } + + visitIdentifier("use", "function", getQualifiedName(NamedCallee), Loc, Mangled, + getContext(Loc), Flags); + + return true; + } + + bool VisitTagTypeLoc(TagTypeLoc L) { + SourceLocation Loc = L.getBeginLoc(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return true; + } + + TagDecl *Decl = L.getDecl(); + std::string Mangled = getMangledName(CurMangleContext, Decl); + visitIdentifier("use", "type", getQualifiedName(Decl), Loc, Mangled, + getContext(Loc)); + return true; + } + + bool VisitTypedefTypeLoc(TypedefTypeLoc L) { + SourceLocation Loc = L.getBeginLoc(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return true; + } + + NamedDecl *Decl = L.getTypedefNameDecl(); + std::string Mangled = getMangledName(CurMangleContext, Decl); + visitIdentifier("use", "type", getQualifiedName(Decl), Loc, Mangled, + getContext(Loc)); + return true; + } + + bool VisitInjectedClassNameTypeLoc(InjectedClassNameTypeLoc L) { + SourceLocation Loc = L.getBeginLoc(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return true; + } + + NamedDecl *Decl = L.getDecl(); + std::string Mangled = getMangledName(CurMangleContext, Decl); + visitIdentifier("use", "type", getQualifiedName(Decl), Loc, Mangled, + getContext(Loc)); + return true; + } + + bool VisitTemplateSpecializationTypeLoc(TemplateSpecializationTypeLoc L) { + SourceLocation Loc = L.getBeginLoc(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return true; + } + + TemplateDecl *Td = L.getTypePtr()->getTemplateName().getAsTemplateDecl(); + if (ClassTemplateDecl *D = dyn_cast(Td)) { + NamedDecl *Decl = D->getTemplatedDecl(); + std::string Mangled = getMangledName(CurMangleContext, Decl); + visitIdentifier("use", "type", getQualifiedName(Decl), Loc, Mangled, + getContext(Loc)); + } else if (TypeAliasTemplateDecl *D = dyn_cast(Td)) { + NamedDecl *Decl = D->getTemplatedDecl(); + std::string Mangled = getMangledName(CurMangleContext, Decl); + visitIdentifier("use", "type", getQualifiedName(Decl), Loc, Mangled, + getContext(Loc)); + } + + return true; + } + + bool VisitDeclRefExpr(DeclRefExpr *E) { + SourceLocation Loc = E->getExprLoc(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return true; + } + + if (E->hasQualifier()) { + Loc = E->getNameInfo().getLoc(); + normalizeLocation(&Loc); + } + + NamedDecl *Decl = E->getDecl(); + if (const VarDecl *D2 = dyn_cast(Decl)) { + int Flags = 0; + if (D2->isLocalVarDeclOrParm()) { + Flags = NoCrossref; + } + std::string Mangled = getMangledName(CurMangleContext, Decl); + visitIdentifier("use", "variable", getQualifiedName(Decl), Loc, Mangled, + getContext(Loc), Flags); + } else if (isa(Decl)) { + const FunctionDecl *F = dyn_cast(Decl); + if (F->isTemplateInstantiation()) { + Decl = F->getTemplateInstantiationPattern(); + } + + std::string Mangled = getMangledName(CurMangleContext, Decl); + visitIdentifier("use", "function", getQualifiedName(Decl), Loc, Mangled, + getContext(Loc)); + } else if (isa(Decl)) { + std::string Mangled = getMangledName(CurMangleContext, Decl); + visitIdentifier("use", "enum", getQualifiedName(Decl), Loc, Mangled, + getContext(Loc)); + } + + return true; + } + + bool VisitCXXConstructorDecl(CXXConstructorDecl *D) { + if (!isInterestingLocation(D->getLocation())) { + return true; + } + + for (CXXConstructorDecl::init_const_iterator It = D->init_begin(); + It != D->init_end(); ++It) { + const CXXCtorInitializer *Ci = *It; + if (!Ci->getMember() || !Ci->isWritten()) { + continue; + } + + SourceLocation Loc = Ci->getMemberLocation(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + continue; + } + + FieldDecl *Member = Ci->getMember(); + std::string Mangled = getMangledName(CurMangleContext, Member); + visitIdentifier("use", "field", getQualifiedName(Member), Loc, Mangled, + getContext(D)); + } + + return true; + } + + bool VisitMemberExpr(MemberExpr *E) { + SourceLocation Loc = E->getExprLoc(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return true; + } + + ValueDecl *Decl = E->getMemberDecl(); + if (FieldDecl *Field = dyn_cast(Decl)) { + std::string Mangled = getMangledName(CurMangleContext, Field); + visitIdentifier("use", "field", getQualifiedName(Field), Loc, Mangled, + getContext(Loc)); + } + return true; + } + + bool VisitCXXDependentScopeMemberExpr(CXXDependentScopeMemberExpr *E) { + SourceLocation Loc = E->getMemberLoc(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return true; + } + + if (TemplateStack) { + TemplateStack->visitDependent(Loc); + } + return true; + } + + void enterSourceFile(SourceLocation Loc) { + normalizeLocation(&Loc); + FileInfo* newFile = getFileInfo(Loc); + if (!newFile->Interesting) { + return; + } + FileType type = newFile->Generated ? FileType::Generated : FileType::Source; + std::vector symbols = { + std::string("FILE_") + mangleFile(newFile->Realname, type) + }; + // We use an explicit zero-length source range at the start of the file. If we + // don't set the LocRangeEndValid flag, the visitIdentifier code will use the + // entire first token, which could be e.g. a long multiline-comment. + visitIdentifier("def", "file", newFile->Realname, SourceRange(Loc), + symbols, Context(), NotIdentifierToken | LocRangeEndValid); + } + + void inclusionDirective(SourceRange FileNameRange, const FileEntry* File) { + std::string includedFile(File->tryGetRealPathName()); + FileType type = relativizePath(includedFile); + if (type == FileType::Unknown) { + return; + } + std::vector symbols = { + std::string("FILE_") + mangleFile(includedFile, type) + }; + visitIdentifier("use", "file", includedFile, FileNameRange, symbols, + Context(), NotIdentifierToken | LocRangeEndValid); + } + + void macroDefined(const Token &Tok, const MacroDirective *Macro) { + if (Macro->getMacroInfo()->isBuiltinMacro()) { + return; + } + SourceLocation Loc = Tok.getLocation(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return; + } + + IdentifierInfo *Ident = Tok.getIdentifierInfo(); + if (Ident) { + std::string Mangled = + std::string("M_") + mangleLocation(Loc, std::string(Ident->getName())); + visitIdentifier("def", "macro", Ident->getName(), Loc, Mangled); + } + } + + void macroUsed(const Token &Tok, const MacroInfo *Macro) { + if (!Macro) { + return; + } + if (Macro->isBuiltinMacro()) { + return; + } + SourceLocation Loc = Tok.getLocation(); + normalizeLocation(&Loc); + if (!isInterestingLocation(Loc)) { + return; + } + + IdentifierInfo *Ident = Tok.getIdentifierInfo(); + if (Ident) { + std::string Mangled = + std::string("M_") + + mangleLocation(Macro->getDefinitionLoc(), std::string(Ident->getName())); + visitIdentifier("use", "macro", Ident->getName(), Loc, Mangled); + } + } +}; + +void PreprocessorHook::FileChanged(SourceLocation Loc, FileChangeReason Reason, + SrcMgr::CharacteristicKind FileType, + FileID PrevFID = FileID()) { + switch (Reason) { + case PPCallbacks::RenameFile: + case PPCallbacks::SystemHeaderPragma: + // Don't care about these, since we want the actual on-disk filenames + break; + case PPCallbacks::EnterFile: + Indexer->enterSourceFile(Loc); + break; + case PPCallbacks::ExitFile: + // Don't care about exiting files + break; + } +} + +void PreprocessorHook::InclusionDirective(SourceLocation HashLoc, + const Token &IncludeTok, + StringRef FileName, + bool IsAngled, + CharSourceRange FileNameRange, + const FileEntry *File, + StringRef SearchPath, + StringRef RelativePath, + const Module *Imported, + SrcMgr::CharacteristicKind FileType) { + Indexer->inclusionDirective(FileNameRange.getAsRange(), File); +} + +void PreprocessorHook::MacroDefined(const Token &Tok, + const MacroDirective *Md) { + Indexer->macroDefined(Tok, Md); +} + +void PreprocessorHook::MacroExpands(const Token &Tok, const MacroDefinition &Md, + SourceRange Range, const MacroArgs *Ma) { + Indexer->macroUsed(Tok, Md.getMacroInfo()); +} + +void PreprocessorHook::MacroUndefined(const Token &Tok, + const MacroDefinition &Md, + const MacroDirective *Undef) +{ + Indexer->macroUsed(Tok, Md.getMacroInfo()); +} + +void PreprocessorHook::Defined(const Token &Tok, const MacroDefinition &Md, + SourceRange Range) { + Indexer->macroUsed(Tok, Md.getMacroInfo()); +} + +void PreprocessorHook::Ifdef(SourceLocation Loc, const Token &Tok, + const MacroDefinition &Md) { + Indexer->macroUsed(Tok, Md.getMacroInfo()); +} + +void PreprocessorHook::Ifndef(SourceLocation Loc, const Token &Tok, + const MacroDefinition &Md) { + Indexer->macroUsed(Tok, Md.getMacroInfo()); +} + +class IndexAction : public PluginASTAction { +protected: + std::unique_ptr CreateASTConsumer(CompilerInstance &CI, + llvm::StringRef F) { + return make_unique(CI); + } + + bool ParseArgs(const CompilerInstance &CI, + const std::vector &Args) { + if (Args.size() != 3) { + DiagnosticsEngine &D = CI.getDiagnostics(); + unsigned DiagID = D.getCustomDiagID( + DiagnosticsEngine::Error, + "Need arguments for the source, output, and object directories"); + D.Report(DiagID); + return false; + } + + // Load our directories + Srcdir = getAbsolutePath(Args[0]); + if (Srcdir.empty()) { + DiagnosticsEngine &D = CI.getDiagnostics(); + unsigned DiagID = D.getCustomDiagID( + DiagnosticsEngine::Error, "Source directory '%0' does not exist"); + D.Report(DiagID) << Args[0]; + return false; + } + + ensurePath(Args[1] + PATHSEP_STRING); + Outdir = getAbsolutePath(Args[1]); + Outdir += PATHSEP_STRING; + + Objdir = getAbsolutePath(Args[2]); + if (Objdir.empty()) { + DiagnosticsEngine &D = CI.getDiagnostics(); + unsigned DiagID = D.getCustomDiagID(DiagnosticsEngine::Error, + "Objdir '%0' does not exist"); + D.Report(DiagID) << Args[2]; + return false; + } + Objdir += PATHSEP_STRING; + + printf("MOZSEARCH: %s %s %s\n", Srcdir.c_str(), Outdir.c_str(), + Objdir.c_str()); + + return true; + } + + void printHelp(llvm::raw_ostream &Ros) { + Ros << "Help for mozsearch plugin goes here\n"; + } +}; + +static FrontendPluginRegistry::Add + Y("mozsearch-index", "create the mozsearch index database"); diff --git a/build/clang-plugin/mozsearch-plugin/README b/build/clang-plugin/mozsearch-plugin/README new file mode 100644 index 0000000000..d948e9aca3 --- /dev/null +++ b/build/clang-plugin/mozsearch-plugin/README @@ -0,0 +1,12 @@ +This clang plugin code generates a JSON file for each compiler input +file. The JSON file contains information about the C++ symbols that +are referenced by the input file. The data is eventually consumed by +Searchfox. See https://github.com/mozsearch/mozsearch for more +information. + +This plugin is enabled with the --enable-clang-plugin and +--enable-mozsearch-plugin mozconfig options. The output of the plugin +is stored in $OBJDIR/mozsearch_index. + +This code is not a checker, unlike other parts of the Mozilla clang +plugin. It cannot be used with clang-tidy. diff --git a/build/clang-plugin/mozsearch-plugin/StringOperations.cpp b/build/clang-plugin/mozsearch-plugin/StringOperations.cpp new file mode 100644 index 0000000000..a2e60e42c6 --- /dev/null +++ b/build/clang-plugin/mozsearch-plugin/StringOperations.cpp @@ -0,0 +1,42 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "StringOperations.h" + +static unsigned long djbHash(const char *Str) { + unsigned long Hash = 5381; + + for (const char *P = Str; *P; P++) { + // Hash * 33 + c + Hash = ((Hash << 5) + Hash) + *P; + } + + return Hash; +} + +// This doesn't actually return a hex string of |hash|, but it +// does... something. It doesn't really matter what. +static void hashToString(unsigned long Hash, char *Buffer) { + const char Table[] = {"0123456789abcdef"}; + char *P = Buffer; + while (Hash) { + *P = Table[Hash & 0xf]; + Hash >>= 4; + P++; + } + + *P = 0; +} + +std::string hash(const std::string &Str) { + static char HashStr[41]; + unsigned long H = djbHash(Str.c_str()); + hashToString(H, HashStr); + return std::string(HashStr); +} + +std::string toString(int N) { + return stringFormat("%d", N); +} diff --git a/build/clang-plugin/mozsearch-plugin/StringOperations.h b/build/clang-plugin/mozsearch-plugin/StringOperations.h new file mode 100644 index 0000000000..4aa5b31962 --- /dev/null +++ b/build/clang-plugin/mozsearch-plugin/StringOperations.h @@ -0,0 +1,25 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef StringOperations_h +#define StringOperations_h + +#include +#include +#include + +std::string hash(const std::string &Str); + +template +inline std::string stringFormat(const std::string &Format, Args... ArgList) { + size_t Len = snprintf(nullptr, 0, Format.c_str(), ArgList...); + std::unique_ptr Buf(new char[Len + 1]); + snprintf(Buf.get(), Len + 1, Format.c_str(), ArgList...); + return std::string(Buf.get(), Buf.get() + Len); +} + +std::string toString(int N); + +#endif diff --git a/build/clang-plugin/plugin.h b/build/clang-plugin/plugin.h new file mode 100644 index 0000000000..15e7560455 --- /dev/null +++ b/build/clang-plugin/plugin.h @@ -0,0 +1,57 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef plugin_h__ +#define plugin_h__ + +#include "clang/AST/ASTConsumer.h" +#include "clang/AST/ASTContext.h" +#include "clang/AST/RecursiveASTVisitor.h" +#include "clang/ASTMatchers/ASTMatchFinder.h" +#include "clang/ASTMatchers/ASTMatchers.h" +#include "clang/Analysis/CFG.h" +#include "clang/Basic/Version.h" +#include "clang/Frontend/CompilerInstance.h" +#include "clang/Frontend/MultiplexConsumer.h" +#include "clang/Sema/Sema.h" +#include "llvm/ADT/DenseMap.h" +#include "llvm/Support/FileSystem.h" +#include "llvm/Support/Path.h" +#include +#include + +#define CLANG_VERSION_FULL (CLANG_VERSION_MAJOR * 100 + CLANG_VERSION_MINOR) + +using namespace llvm; +using namespace clang; +using namespace clang::ast_matchers; + +#if CLANG_VERSION_FULL >= 306 +typedef std::unique_ptr ASTConsumerPtr; +#else +typedef ASTConsumer *ASTConsumerPtr; +#endif + +#if CLANG_VERSION_FULL < 800 +// Starting with Clang 8.0 some basic functions have been renamed +#define getBeginLoc getLocStart +#define getEndLoc getLocEnd +#endif + +// In order to support running our checks using clang-tidy, we implement a +// source compatible base check class called BaseCheck, and we use the +// preprocessor to decide which base class to pick. +#ifdef CLANG_TIDY +#if CLANG_VERSION_FULL >= 900 +#include "../ClangTidyCheck.h" +#else +#include "../ClangTidy.h" +#endif +typedef clang::tidy::ClangTidyCheck BaseCheck; +typedef clang::tidy::ClangTidyContext ContextType; +#else +#include "BaseCheck.h" +#endif + +#endif // plugin_h__ diff --git a/build/clang-plugin/tests/Makefile.in b/build/clang-plugin/tests/Makefile.in new file mode 100644 index 0000000000..ae5691bdc2 --- /dev/null +++ b/build/clang-plugin/tests/Makefile.in @@ -0,0 +1,13 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +include $(topsrcdir)/config/rules.mk + +target:: $(OBJS) + +# We don't actually build anything. +.PHONY: $(OBJS) + +# Don't actually build a library, since we don't actually build objects. +$(LIBRARY): EXPAND_LIBS_GEN=true diff --git a/build/clang-plugin/tests/NonParameterTestCases.h b/build/clang-plugin/tests/NonParameterTestCases.h new file mode 100644 index 0000000000..d38a14d944 --- /dev/null +++ b/build/clang-plugin/tests/NonParameterTestCases.h @@ -0,0 +1,61 @@ +MAYBE_STATIC void raw(Param x) {} + +MAYBE_STATIC void raw(NonParam x) {} //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void raw(NonParamUnion x) {} //expected-error {{Type 'NonParamUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void raw(NonParamClass x) {} //expected-error {{Type 'NonParamClass' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void raw(NonParamEnum x) {} //expected-error {{Type 'NonParamEnum' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void raw(NonParamEnumClass x) {} //expected-error {{Type 'NonParamEnumClass' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void raw(HasNonParamStruct x) {} //expected-error {{Type 'HasNonParamStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void raw(HasNonParamUnion x) {} //expected-error {{Type 'HasNonParamUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void raw(HasNonParamStructUnion x) {} //expected-error {{Type 'HasNonParamStructUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + +MAYBE_STATIC void const_(const NonParam x) {} //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void const_(const NonParamUnion x) {} //expected-error {{Type 'NonParamUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void const_(const NonParamClass x) {} //expected-error {{Type 'NonParamClass' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void const_(const NonParamEnum x) {} //expected-error {{Type 'NonParamEnum' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void const_(const NonParamEnumClass x) {} //expected-error {{Type 'NonParamEnumClass' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void const_(const HasNonParamStruct x) {} //expected-error {{Type 'HasNonParamStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void const_(const HasNonParamUnion x) {} //expected-error {{Type 'HasNonParamUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC void const_(const HasNonParamStructUnion x) {} //expected-error {{Type 'HasNonParamStructUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + +MAYBE_STATIC void array(NonParam x[]) {} +MAYBE_STATIC void array(NonParamUnion x[]) {} +MAYBE_STATIC void array(NonParamClass x[]) {} +MAYBE_STATIC void array(NonParamEnum x[]) {} +MAYBE_STATIC void array(NonParamEnumClass x[]) {} +MAYBE_STATIC void array(HasNonParamStruct x[]) {} +MAYBE_STATIC void array(HasNonParamUnion x[]) {} +MAYBE_STATIC void array(HasNonParamStructUnion x[]) {} + +MAYBE_STATIC void ptr(NonParam* x) {} +MAYBE_STATIC void ptr(NonParamUnion* x) {} +MAYBE_STATIC void ptr(NonParamClass* x) {} +MAYBE_STATIC void ptr(NonParamEnum* x) {} +MAYBE_STATIC void ptr(NonParamEnumClass* x) {} +MAYBE_STATIC void ptr(HasNonParamStruct* x) {} +MAYBE_STATIC void ptr(HasNonParamUnion* x) {} +MAYBE_STATIC void ptr(HasNonParamStructUnion* x) {} + +MAYBE_STATIC void ref(NonParam& x) {} +MAYBE_STATIC void ref(NonParamUnion& x) {} +MAYBE_STATIC void ref(NonParamClass& x) {} +MAYBE_STATIC void ref(NonParamEnum& x) {} +MAYBE_STATIC void ref(NonParamEnumClass& x) {} +MAYBE_STATIC void ref(HasNonParamStruct& x) {} +MAYBE_STATIC void ref(HasNonParamUnion& x) {} +MAYBE_STATIC void ref(HasNonParamStructUnion& x) {} + +MAYBE_STATIC void constRef(const NonParam& x) {} +MAYBE_STATIC void constRef(const NonParamUnion& x) {} +MAYBE_STATIC void constRef(const NonParamClass& x) {} +MAYBE_STATIC void constRef(const NonParamEnum& x) {} +MAYBE_STATIC void constRef(const NonParamEnumClass& x) {} +MAYBE_STATIC void constRef(const HasNonParamStruct& x) {} +MAYBE_STATIC void constRef(const HasNonParamUnion& x) {} +MAYBE_STATIC void constRef(const HasNonParamStructUnion& x) {} + +MAYBE_STATIC inline void inlineRaw(NonParam x) {} //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC inline void inlineRaw(NonParamUnion x) {} //expected-error {{Type 'NonParamUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC inline void inlineRaw(NonParamClass x) {} //expected-error {{Type 'NonParamClass' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC inline void inlineRaw(NonParamEnum x) {} //expected-error {{Type 'NonParamEnum' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +MAYBE_STATIC inline void inlineRaw(NonParamEnumClass x) {} //expected-error {{Type 'NonParamEnumClass' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} diff --git a/build/clang-plugin/tests/TestAssertWithAssignment.cpp b/build/clang-plugin/tests/TestAssertWithAssignment.cpp new file mode 100644 index 0000000000..f0f049e4a3 --- /dev/null +++ b/build/clang-plugin/tests/TestAssertWithAssignment.cpp @@ -0,0 +1,68 @@ +#include "mozilla/MacroArgs.h" + +static __attribute__((always_inline)) bool MOZ_AssertAssignmentTest(bool expr) { + return expr; +} + +#define MOZ_UNLIKELY(x) (__builtin_expect(!!(x), 0)) +#define MOZ_CRASH() do { } while(0) +#define MOZ_CHECK_ASSERT_ASSIGNMENT(expr) MOZ_AssertAssignmentTest(!!(expr)) + +#define MOZ_ASSERT_HELPER1(expr) \ + do { \ + if (MOZ_UNLIKELY(!MOZ_CHECK_ASSERT_ASSIGNMENT(expr))) { \ + MOZ_CRASH();\ + } \ + } while(0) \ + +/* Now the two-argument form. */ +#define MOZ_ASSERT_HELPER2(expr, explain) \ + do { \ + if (MOZ_UNLIKELY(!MOZ_CHECK_ASSERT_ASSIGNMENT(expr))) { \ + MOZ_CRASH();\ + } \ + } while(0) \ + +#define MOZ_RELEASE_ASSERT_GLUE(a, b) a b +#define MOZ_RELEASE_ASSERT(...) \ + MOZ_RELEASE_ASSERT_GLUE( \ + MOZ_PASTE_PREFIX_AND_ARG_COUNT(MOZ_ASSERT_HELPER, __VA_ARGS__), \ + (__VA_ARGS__)) + +#define MOZ_ASSERT(...) MOZ_RELEASE_ASSERT(__VA_ARGS__) + +void FunctionTest(int p) { + MOZ_ASSERT(p = 1); // expected-error {{Forbidden assignment in assert expression}} +} + +void FunctionTest2(int p) { + MOZ_ASSERT(((p = 1))); // expected-error {{Forbidden assignment in assert expression}} +} + +void FunctionTest3(int p) { + MOZ_ASSERT(p != 3); +} + +class TestOverloading { + int value; +public: + explicit TestOverloading(int _val) : value(_val) {} + // different operators + explicit operator bool() const { return true; } + TestOverloading& operator=(const int _val) { value = _val; return *this; } + + int& GetInt() {return value;} +}; + +void TestOverloadingFunc() { + TestOverloading p(2); + int f; + + MOZ_ASSERT(p); + MOZ_ASSERT(p = 3); // expected-error {{Forbidden assignment in assert expression}} + MOZ_ASSERT(p, "p is not valid"); + MOZ_ASSERT(p = 3, "p different than 3"); // expected-error {{Forbidden assignment in assert expression}} + MOZ_ASSERT(p.GetInt() = 2); // expected-error {{Forbidden assignment in assert expression}} + MOZ_ASSERT(p.GetInt() == 2); + MOZ_ASSERT(p.GetInt() == 2, f = 3); +} diff --git a/build/clang-plugin/tests/TestBadImplicitConversionCtor.cpp b/build/clang-plugin/tests/TestBadImplicitConversionCtor.cpp new file mode 100644 index 0000000000..ca2472582e --- /dev/null +++ b/build/clang-plugin/tests/TestBadImplicitConversionCtor.cpp @@ -0,0 +1,50 @@ +#define MOZ_IMPLICIT __attribute__((annotate("moz_implicit"))) + +struct Foo { + Foo(int); // expected-error {{bad implicit conversion constructor for 'Foo'}} expected-note {{consider adding the explicit keyword to the constructor}} + Foo(int, char=0); // expected-error {{bad implicit conversion constructor for 'Foo'}} expected-note {{consider adding the explicit keyword to the constructor}} + Foo(...); // expected-error {{bad implicit conversion constructor for 'Foo'}} expected-note {{consider adding the explicit keyword to the constructor}} + template + Foo(float); // expected-error {{bad implicit conversion constructor for 'Foo'}} expected-note {{consider adding the explicit keyword to the constructor}} + Foo(int, unsigned); + Foo(Foo&); + Foo(const Foo&); + Foo(volatile Foo&); + Foo(const volatile Foo&); + Foo(Foo&&); + Foo(const Foo&&); + Foo(volatile Foo&&); + Foo(const volatile Foo&&); +}; + +struct Bar { + explicit Bar(int); + explicit Bar(int, char=0); + explicit Bar(...); +}; + +struct Baz { + MOZ_IMPLICIT Baz(int); + MOZ_IMPLICIT Baz(int, char=0); + MOZ_IMPLICIT Baz(...); +}; + +struct Barn { + Barn(int) = delete; + Barn(int, char=0) = delete; + Barn(...) = delete; +}; + +struct Abstract { + Abstract(int); + Abstract(int, char=0); + Abstract(...); + virtual void f() = 0; +}; + +template +struct Template { + Template(int); // expected-error {{bad implicit conversion constructor for 'Template'}} expected-note {{consider adding the explicit keyword to the constructor}} + template + Template(float); // expected-error {{bad implicit conversion constructor for 'Template'}} expected-note {{consider adding the explicit keyword to the constructor}} +}; diff --git a/build/clang-plugin/tests/TestCanRunScript.cpp b/build/clang-plugin/tests/TestCanRunScript.cpp new file mode 100644 index 0000000000..01ca514f6c --- /dev/null +++ b/build/clang-plugin/tests/TestCanRunScript.cpp @@ -0,0 +1,621 @@ +#include +#include + +#define MOZ_CAN_RUN_SCRIPT __attribute__((annotate("moz_can_run_script"))) +#define MOZ_CAN_RUN_SCRIPT_BOUNDARY __attribute__((annotate("moz_can_run_script_boundary"))) + +MOZ_CAN_RUN_SCRIPT void test() { + +} + +void test_parent() { // expected-note {{caller function declared here}} + test(); // expected-error {{functions marked as MOZ_CAN_RUN_SCRIPT can only be called from functions also marked as MOZ_CAN_RUN_SCRIPT}} +} + +MOZ_CAN_RUN_SCRIPT void test_parent2() { + test(); +} + +struct RefCountedBase; +MOZ_CAN_RUN_SCRIPT void test2(RefCountedBase* param) { + +} + +struct RefCountedBase { + void AddRef(); + void Release(); + + MOZ_CAN_RUN_SCRIPT void method_test() { + test(); + } + + MOZ_CAN_RUN_SCRIPT void method_test2() { + test2(this); + } + + virtual void method_test3() { // expected-note {{caller function declared here}} + test(); // expected-error {{functions marked as MOZ_CAN_RUN_SCRIPT can only be called from functions also marked as MOZ_CAN_RUN_SCRIPT}} + } + + MOZ_CAN_RUN_SCRIPT void method_test4() { + method_test(); + } + + MOZ_CAN_RUN_SCRIPT void method_test5() { + this->method_test(); + } +}; + +MOZ_CAN_RUN_SCRIPT void testLambda() { + auto doIt = []() MOZ_CAN_RUN_SCRIPT { + test(); + }; + + auto doItWrong = []() { // expected-note {{caller function declared here}} + test(); // expected-error {{functions marked as MOZ_CAN_RUN_SCRIPT can only be called from functions also marked as MOZ_CAN_RUN_SCRIPT}} + }; + + doIt(); + doItWrong(); +} + +void test2_parent() { // expected-note {{caller function declared here}} + test2(new RefCountedBase); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'new RefCountedBase' is neither.}} \ + // expected-error {{functions marked as MOZ_CAN_RUN_SCRIPT can only be called from functions also marked as MOZ_CAN_RUN_SCRIPT}} +} + +MOZ_CAN_RUN_SCRIPT void test2_parent2() { + test2(new RefCountedBase); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'new RefCountedBase' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test2_parent3(RefCountedBase* param) { + test2(param); +} + +MOZ_CAN_RUN_SCRIPT void test2_parent4() { + RefPtr refptr = new RefCountedBase; + test2(refptr); +} + +MOZ_CAN_RUN_SCRIPT void test2_parent5() { + test2(MOZ_KnownLive(new RefCountedBase)); +} + +MOZ_CAN_RUN_SCRIPT void test2_parent6() { + RefPtr refptr = new RefCountedBase; + refptr->method_test(); + refptr->method_test2(); +} + +MOZ_CAN_RUN_SCRIPT void test2_parent7() { + RefCountedBase* t = new RefCountedBase; + t->method_test(); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 't' is neither.}} + t->method_test2(); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 't' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test2_parent8() { + test2(nullptr); +} + +MOZ_CAN_RUN_SCRIPT void test3(int* param) {} + +MOZ_CAN_RUN_SCRIPT void test3_parent() { + test3(new int); +} + +struct RefCountedChild : public RefCountedBase { + virtual void method_test3() override; // expected-note {{overridden function declared here}} expected-note {{overridden function declared here}} expected-note {{caller function declared here}} +}; + +void RefCountedChild::method_test3() { + test(); // expected-error {{functions marked as MOZ_CAN_RUN_SCRIPT can only be called from functions also marked as MOZ_CAN_RUN_SCRIPT}} +} + +struct RefCountedSubChild : public RefCountedChild { + MOZ_CAN_RUN_SCRIPT void method_test3() override; // expected-error {{functions marked as MOZ_CAN_RUN_SCRIPT cannot override functions that are not marked MOZ_CAN_RUN_SCRIPT}} +}; + +void RefCountedSubChild::method_test3() { // expected-error {{functions marked as MOZ_CAN_RUN_SCRIPT cannot override functions that are not marked MOZ_CAN_RUN_SCRIPT}} + test(); +} + +MOZ_CAN_RUN_SCRIPT void test4() { + RefPtr refptr1 = new RefCountedChild; + refptr1->method_test3(); + + RefPtr refptr2 = new RefCountedSubChild; + refptr2->method_test3(); + + RefPtr refptr3 = new RefCountedSubChild; + refptr3->method_test3(); + + RefPtr refptr4 = new RefCountedSubChild; + refptr4->method_test3(); +} + +MOZ_CAN_RUN_SCRIPT_BOUNDARY void test5() { + RefPtr refptr1 = new RefCountedChild; + refptr1->method_test3(); + + RefPtr refptr2 = new RefCountedSubChild; + refptr2->method_test3(); + + RefPtr refptr3 = new RefCountedSubChild; + refptr3->method_test3(); + + RefPtr refptr4 = new RefCountedSubChild; + refptr4->method_test3(); +} + +// We should be able to call test5 from a non-can_run_script function. +void test5_b() { + test5(); +} + +MOZ_CAN_RUN_SCRIPT void test6() { + void* x = new RefCountedBase(); + test2((RefCountedBase*)x); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'x' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_ref(const RefCountedBase&) { + +} + +MOZ_CAN_RUN_SCRIPT void test_ref_1() { + RefCountedBase* t = new RefCountedBase; + test_ref(*t); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). '*t' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_ref_2() { + RefCountedBase* t = new RefCountedBase; + (*t).method_test(); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). '*t' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_ref_3() { + RefCountedBase* t = new RefCountedBase; + auto& ref = *t; + test_ref(ref); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'ref' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_ref_4() { + RefCountedBase* t = new RefCountedBase; + auto& ref = *t; + ref.method_test(); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'ref' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_ref_5() { + RefPtr t = new RefCountedBase; + test_ref(*t); +} + +MOZ_CAN_RUN_SCRIPT void test_ref_6() { + RefPtr t = new RefCountedBase; + (*t).method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ref_7() { + RefPtr t = new RefCountedBase; + auto& ref = *t; + MOZ_KnownLive(ref).method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ref_8() { + RefPtr t = new RefCountedBase; + auto& ref = *t; + test_ref(MOZ_KnownLive(ref)); +} + +MOZ_CAN_RUN_SCRIPT void test_ref_9() { + void* x = new RefCountedBase(); + test_ref(*(RefCountedBase*)x); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). '*(RefCountedBase*)x' is neither.}} +} + +// Ignore warning not related to static analysis here +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wvoid-ptr-dereference" +MOZ_CAN_RUN_SCRIPT void test_ref_10() { + void* x = new RefCountedBase(); + test_ref((RefCountedBase&)*x); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). '*x' is neither.}} +} +#pragma GCC diagnostic pop + +MOZ_CAN_RUN_SCRIPT void test_maybe() { + mozilla::Maybe unsafe; + unsafe.emplace(new RefCountedBase); + (*unsafe)->method_test(); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). '*unsafe' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_maybe_2() { + // FIXME(bz): This should not generate an error! + mozilla::Maybe> safe; + safe.emplace(new RefCountedBase); + (*safe)->method_test(); // expected-error-re {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). '(*safe){{(->)?}}' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_defaults_helper_1(RefCountedBase* arg = nullptr) { +} + +MOZ_CAN_RUN_SCRIPT void test_defaults_1() { + test_defaults_helper_1(); +} + +MOZ_CAN_RUN_SCRIPT void test_defaults_2() { + RefCountedBase* t = new RefCountedBase; + test_defaults_helper_1(t); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 't' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_defaults_3() { + RefPtr t = new RefCountedBase; + test_defaults_helper_1(t); +} + +MOZ_CAN_RUN_SCRIPT void test_defaults_helper_2(RefCountedBase* arg = new RefCountedBase()) { // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'new RefCountedBase()' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_defaults_4() { + test_defaults_helper_2(); +} + +MOZ_CAN_RUN_SCRIPT void test_defaults_5() { + RefCountedBase* t = new RefCountedBase; + test_defaults_helper_2(t); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 't' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_defaults_6() { + RefPtr t = new RefCountedBase; + test_defaults_helper_2(t); +} + +MOZ_CAN_RUN_SCRIPT void test_arg_deref_helper(RefCountedBase&) { +} + +MOZ_CAN_RUN_SCRIPT void test_arg_deref(RefCountedBase* arg) { + test_arg_deref_helper(*arg); +} + +struct RefCountedDerefTester : public RefCountedBase { + MOZ_CAN_RUN_SCRIPT void foo() { + test_arg_deref_helper(*this); + } +}; + +struct DisallowMemberArgs { + RefPtr mRefCounted; + MOZ_CAN_RUN_SCRIPT void foo() { + mRefCounted->method_test(); // expected-error-re {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mRefCounted{{(->)?}}' is neither.}} + } + MOZ_CAN_RUN_SCRIPT void bar() { + test2(mRefCounted); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mRefCounted' is neither.}} + } +}; + +struct DisallowMemberArgsWithGet { + RefPtr mRefCounted; + MOZ_CAN_RUN_SCRIPT void foo() { + mRefCounted.get()->method_test(); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mRefCounted.get()' is neither.}} + } + MOZ_CAN_RUN_SCRIPT void bar() { + test2(mRefCounted.get()); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mRefCounted.get()' is neither.}} + } +}; + +struct AllowKnownLiveMemberArgs { + RefPtr mRefCounted; + MOZ_CAN_RUN_SCRIPT void foo() { + MOZ_KnownLive(mRefCounted)->method_test(); + } + MOZ_CAN_RUN_SCRIPT void bar() { + test2(MOZ_KnownLive(mRefCounted)); + } +}; + +struct WeakPtrReturner : public RefCountedBase { + RefCountedBase* getWeakPtr() { return new RefCountedBase(); } +}; + +struct DisallowMemberCallsOnRandomKnownLive { + RefPtr mWeakPtrReturner1; + WeakPtrReturner* mWeakPtrReturner2; + + MOZ_CAN_RUN_SCRIPT void test_refptr_method() { + MOZ_KnownLive(mWeakPtrReturner1)->getWeakPtr()->method_test(); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'MOZ_KnownLive(mWeakPtrReturner1)->getWeakPtr()' is neither.}} + } + + MOZ_CAN_RUN_SCRIPT void test_refptr_function() { + test2(MOZ_KnownLive(mWeakPtrReturner1)->getWeakPtr()); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'MOZ_KnownLive(mWeakPtrReturner1)->getWeakPtr()' is neither.}} + } + + MOZ_CAN_RUN_SCRIPT void test_raw_method() { + MOZ_KnownLive(mWeakPtrReturner2)->getWeakPtr()->method_test(); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'MOZ_KnownLive(mWeakPtrReturner2)->getWeakPtr()' is neither.}} + } + + MOZ_CAN_RUN_SCRIPT void test_raw_function() { + test2(MOZ_KnownLive(mWeakPtrReturner2)->getWeakPtr()); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'MOZ_KnownLive(mWeakPtrReturner2)->getWeakPtr()' is neither.}} + } +}; + +struct AllowConstMemberArgs { + const RefPtr mRefCounted; + MOZ_CAN_RUN_SCRIPT void foo() { + mRefCounted->method_test(); + } + MOZ_CAN_RUN_SCRIPT void bar() { + test2(mRefCounted); + } +}; + +struct AllowConstMemberArgsWithExplicitThis { + const RefPtr mRefCounted; + MOZ_CAN_RUN_SCRIPT void foo() { + this->mRefCounted->method_test(); + } + MOZ_CAN_RUN_SCRIPT void bar() { + test2(this->mRefCounted); + } +}; + +struct DisallowConstMemberArgsOfMembers { + RefPtr mMember; + MOZ_CAN_RUN_SCRIPT void foo() { + mMember->mRefCounted->method_test(); // expected-error-re {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mMember->mRefCounted{{(->)?}}' is neither.}} + } + MOZ_CAN_RUN_SCRIPT void bar() { + test2(mMember->mRefCounted); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mMember->mRefCounted' is neither.}} + } +}; + +struct DisallowConstNonRefPtrMemberArgs { + RefCountedBase* const mRefCounted; + MOZ_CAN_RUN_SCRIPT void foo() { + mRefCounted->method_test(); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mRefCounted' is neither.}} + } + MOZ_CAN_RUN_SCRIPT void bar() { + test2(mRefCounted); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mRefCounted' is neither.}} + } +}; + +MOZ_CAN_RUN_SCRIPT void test_temporary_1() { +#ifdef MOZ_CLANG_PLUGIN_ALPHA + RefPtr(new RefCountedBase())->method_test(); // expected-warning {{performance issue: temporary 'RefPtr' is only dereferenced here once which involves short-lived AddRef/Release calls}} +#else + RefPtr(new RefCountedBase())->method_test(); +#endif +} + +MOZ_CAN_RUN_SCRIPT void test_temporary_2() { + test_ref(*RefPtr(new RefCountedBase())); +} + +struct WeakSmartPtr { + RefCountedBase* member; + + explicit WeakSmartPtr(RefCountedBase* arg) : member(arg) {} + + RefCountedBase* operator->() const { + return member; + } + + RefCountedBase& operator*() const { + return *member; + } + + operator RefCountedBase*() const { + return member; + } +}; + +MOZ_CAN_RUN_SCRIPT void test_temporary_3() { + WeakSmartPtr(new RefCountedBase())->method_test(); // expected-error-re {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'WeakSmartPtr(new RefCountedBase()){{(->)?}}' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_temporary_4() { + test_ref(*WeakSmartPtr(new RefCountedBase())); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). '*WeakSmartPtr(new RefCountedBase())' is neither.}} +} + +MOZ_CAN_RUN_SCRIPT void test_temporary_5() { + test2(WeakSmartPtr(new RefCountedBase())); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'WeakSmartPtr(new RefCountedBase())' is neither.}} +} + + +template +struct TArray { + TArray() { + mArray[0] = new RefCountedBase(); + } + T& operator[](unsigned int index) { return mArray[index]; } + T mArray[1]; +}; + +struct DisallowRawTArrayElement { + TArray mArray; + MOZ_CAN_RUN_SCRIPT void foo() { + mArray[0]->method_test(); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mArray[0]' is neither.}} + } + MOZ_CAN_RUN_SCRIPT void bar() { + test2(mArray[0]); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mArray[0]' is neither.}} + } +}; + +struct DisallowRefPtrTArrayElement { + TArray> mArray; + MOZ_CAN_RUN_SCRIPT void foo() { + mArray[0]->method_test(); // expected-error-re {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mArray[0]{{(->)?}}' is neither.}} + } + MOZ_CAN_RUN_SCRIPT void bar() { + test2(mArray[0]); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'mArray[0]' is neither.}} + } +}; + +struct AllowConstexprMembers { + static constexpr RefCountedBase* mRefCounted = nullptr; + static constexpr RefCountedBase* mRefCounted2 = nullptr; + MOZ_CAN_RUN_SCRIPT void foo() { + mRefCounted->method_test(); + } + MOZ_CAN_RUN_SCRIPT void bar() { + test2(mRefCounted); + } + MOZ_CAN_RUN_SCRIPT void baz() { + test_ref(*mRefCounted); + } +}; + +MOZ_CAN_RUN_SCRIPT void test_constexpr_1() { + AllowConstexprMembers::mRefCounted->method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_constexpr_2() { + test2(AllowConstexprMembers::mRefCounted); +} + +MOZ_CAN_RUN_SCRIPT void test_constexpr_3() { + test_ref(*AllowConstexprMembers::mRefCounted); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_1(RefCountedBase* arg1, RefCountedBase* arg2) { + (arg1 ? arg1 : arg2)->method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_2(RefCountedBase* arg1, RefCountedBase* arg2) { + test2(arg1 ? arg1 : arg2); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_3(RefCountedBase* arg1, RefCountedBase& arg2) { + (arg1 ? *arg1 : arg2).method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_4(RefCountedBase* arg1, RefCountedBase& arg2) { + test_ref(arg1 ? *arg1 : arg2); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_5(RefCountedBase* arg) { + RefPtr local = new RefCountedBase(); + (arg ? arg : local.get())->method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_6(RefCountedBase* arg) { + RefPtr local = new RefCountedBase(); + test2(arg ? arg : local.get()); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_7(RefCountedBase* arg) { + RefPtr local = new RefCountedBase(); + (arg ? *arg : *local).method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_8(RefCountedBase* arg) { + RefPtr local = new RefCountedBase(); + test_ref(arg ? *arg : *local); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_9(RefCountedBase* arg) { + (arg ? arg : AllowConstexprMembers::mRefCounted)->method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_10(RefCountedBase* arg) { + test2(arg ? arg : AllowConstexprMembers::mRefCounted); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_11(RefCountedBase* arg) { + (arg ? *arg : *AllowConstexprMembers::mRefCounted).method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_12(RefCountedBase* arg) { + test_ref(arg ? *arg : *AllowConstexprMembers::mRefCounted); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_13(RefCountedBase* arg1, RefCountedBase& arg2) { + (arg1 ? arg1 : &arg2)->method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_44(RefCountedBase* arg1, RefCountedBase& arg2) { + test2(arg1 ? arg1 : &arg2); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_13(bool arg) { + (arg ? + AllowConstexprMembers::mRefCounted : + AllowConstexprMembers::mRefCounted2)->method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_14(bool arg) { + test2(arg ? + AllowConstexprMembers::mRefCounted : + AllowConstexprMembers::mRefCounted2); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_15(bool arg) { + (arg ? + *AllowConstexprMembers::mRefCounted : + *AllowConstexprMembers::mRefCounted2).method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_ternary_16(bool arg) { + test_ref(arg ? + *AllowConstexprMembers::mRefCounted : + *AllowConstexprMembers::mRefCounted2); +} + +MOZ_CAN_RUN_SCRIPT void test_pointer_to_ref_1(RefCountedBase& arg) { + (&arg)->method_test(); +} + +MOZ_CAN_RUN_SCRIPT void test_pointer_to_ref_2(RefCountedBase& arg) { + test2(&arg); +} + +struct DisallowMemberArgsViaReferenceAlias { + RefPtr mRefCounted; + MOZ_CAN_RUN_SCRIPT void foo() { + RefPtr& bogus = mRefCounted; + bogus->method_test(); // expected-error-re {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'bogus{{(->)?}}' is neither.}} + } + MOZ_CAN_RUN_SCRIPT void bar() { + RefPtr& bogus = mRefCounted; + test2(bogus); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'bogus' is neither.}} + } +}; + +struct DisallowMemberArgsViaReferenceAlias2 { + RefPtr mRefCountedArr[2]; + MOZ_CAN_RUN_SCRIPT void foo1() { + for (RefPtr& item : mRefCountedArr) { + item->method_test(); // expected-error-re {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'item{{(->)?}}' is neither.}} + } + } + MOZ_CAN_RUN_SCRIPT void foo2() { + for (auto& item : mRefCountedArr) { + item->method_test(); // expected-error-re {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'item{{(->)?}}' is neither.}} + } + } + MOZ_CAN_RUN_SCRIPT void foo3() { + for (RefPtr item : mRefCountedArr) { + item->method_test(); + } + } + MOZ_CAN_RUN_SCRIPT void foo4() { + for (auto item : mRefCountedArr) { + item->method_test(); + } + } + MOZ_CAN_RUN_SCRIPT void bar1() { + for (RefPtr& item : mRefCountedArr) { + test2(item); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'item' is neither.}} + } + } + MOZ_CAN_RUN_SCRIPT void bar2() { + for (auto& item : mRefCountedArr) { + test2(item); // expected-error {{arguments must all be strong refs or caller's parameters when calling a function marked as MOZ_CAN_RUN_SCRIPT (including the implicit object argument). 'item' is neither.}} + } + } + MOZ_CAN_RUN_SCRIPT void bar3() { + for (RefPtr item : mRefCountedArr) { + test2(item); + } + } + MOZ_CAN_RUN_SCRIPT void bar4() { + for (auto item : mRefCountedArr) { + test2(item); + } + } +}; diff --git a/build/clang-plugin/tests/TestCustomHeap.cpp b/build/clang-plugin/tests/TestCustomHeap.cpp new file mode 100644 index 0000000000..c1e82f2fa7 --- /dev/null +++ b/build/clang-plugin/tests/TestCustomHeap.cpp @@ -0,0 +1,29 @@ +#define MOZ_NONHEAP_CLASS __attribute__((annotate("moz_nonheap_class"))) +#ifndef MOZ_HEAP_ALLOCATOR +#define MOZ_HEAP_ALLOCATOR \ + _Pragma("GCC diagnostic push") \ + _Pragma("GCC diagnostic ignored \"-Wgcc-compat\"") \ + __attribute__((annotate("moz_heap_allocator"))) \ + _Pragma("GCC diagnostic pop") +#endif + +#include +#include + +struct MOZ_NONHEAP_CLASS X { +}; + +void *operator new(size_t x, int qual) MOZ_HEAP_ALLOCATOR { + return ::operator new(x); +} + +template +T *customAlloc() MOZ_HEAP_ALLOCATOR { + T *arg = static_cast(malloc(sizeof(T))); + return new (arg) T(); +} + +void misuseX() { + X *foo = customAlloc(); // expected-error {{variable of type 'X' is not valid on the heap}} expected-note {{value incorrectly allocated on the heap}} + X *foo2 = new (100) X(); // expected-error {{variable of type 'X' is not valid on the heap}} expected-note {{value incorrectly allocated on the heap}} +} diff --git a/build/clang-plugin/tests/TestDanglingOnTemporary.cpp b/build/clang-plugin/tests/TestDanglingOnTemporary.cpp new file mode 100644 index 0000000000..62a7755ece --- /dev/null +++ b/build/clang-plugin/tests/TestDanglingOnTemporary.cpp @@ -0,0 +1,45 @@ +#define MOZ_NO_DANGLING_ON_TEMPORARIES \ + __attribute__((annotate("moz_no_dangling_on_temporaries"))) + +class AnnotateConflict { + MOZ_NO_DANGLING_ON_TEMPORARIES int *get() && { return nullptr; } // expected-error {{methods annotated with MOZ_NO_DANGLING_ON_TEMPORARIES cannot be && ref-qualified}} + MOZ_NO_DANGLING_ON_TEMPORARIES int test() { return 0; } // expected-error {{methods annotated with MOZ_NO_DANGLING_ON_TEMPORARIES must return a pointer}} +}; + +class NS_ConvertUTF8toUTF16 { +public: + MOZ_NO_DANGLING_ON_TEMPORARIES int *get() { return nullptr; } + operator int*() + { + return get(); // This should be ignored because the call is implcitly on this + } +}; + +NS_ConvertUTF8toUTF16 TemporaryFunction() { return NS_ConvertUTF8toUTF16(); } + +void UndefinedFunction(int* test); + +void NoEscapeFunction(int *test) {} + +int *glob; // expected-note {{through the variable declared here}} +void EscapeFunction1(int *test) { glob = test; } // expected-note {{the raw pointer escapes the function scope here}} + +void EscapeFunction2(int *test, int *&escape) { escape = test; } // expected-note {{the raw pointer escapes the function scope here}} \ + expected-note {{through the parameter declared here}} + +int *EscapeFunction3(int *test) { return test; } // expected-note {{the raw pointer escapes the function scope here}} \ + expected-note {{through the return value of the function declared here}} + +int main() { + int *test = TemporaryFunction().get(); // expected-error {{calling `get` on a temporary, potentially allowing use after free of the raw pointer}} + int *test2 = NS_ConvertUTF8toUTF16().get(); // expected-error {{calling `get` on a temporary, potentially allowing use after free of the raw pointer}} + + UndefinedFunction(NS_ConvertUTF8toUTF16().get()); + + NoEscapeFunction(TemporaryFunction().get()); + EscapeFunction1(TemporaryFunction().get()); // expected-error {{calling `get` on a temporary, potentially allowing use after free of the raw pointer}} + + int *escape; + EscapeFunction2(TemporaryFunction().get(), escape); // expected-error {{calling `get` on a temporary, potentially allowing use after free of the raw pointer}} + int *escape2 = EscapeFunction3(TemporaryFunction().get()); // expected-error {{calling `get` on a temporary, potentially allowing use after free of the raw pointer}} +} diff --git a/build/clang-plugin/tests/TestExplicitOperatorBool.cpp b/build/clang-plugin/tests/TestExplicitOperatorBool.cpp new file mode 100644 index 0000000000..bc4b43a7d0 --- /dev/null +++ b/build/clang-plugin/tests/TestExplicitOperatorBool.cpp @@ -0,0 +1,11 @@ +#define MOZ_IMPLICIT __attribute__((annotate("moz_implicit"))) + +struct Bad { + operator bool(); // expected-error {{bad implicit conversion operator for 'Bad'}} expected-note {{consider adding the explicit keyword to 'operator bool'}} +}; +struct Good { + explicit operator bool(); +}; +struct Okay { + MOZ_IMPLICIT operator bool(); +}; diff --git a/build/clang-plugin/tests/TestFopenUsage.cpp b/build/clang-plugin/tests/TestFopenUsage.cpp new file mode 100644 index 0000000000..19a89f88c9 --- /dev/null +++ b/build/clang-plugin/tests/TestFopenUsage.cpp @@ -0,0 +1,50 @@ +#include +#include +#include +#include +#include + +void func_fopen() { + FILE *f1 = fopen("dummy.txt", "rt"); // expected-warning {{Usage of ASCII file functions (here fopen) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + FILE *f2; + fopen_s(&f2, "dummy.txt", "rt"); // expected-warning {{Usage of ASCII file functions (here fopen_s) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + + int fh1 = _open("dummy.txt", _O_RDONLY); // expected-warning {{Usage of ASCII file functions (here _open) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + int fh2 = open("dummy.txt", _O_RDONLY); // expected-warning {{Usage of ASCII file functions (here open) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + int fh3 = _sopen("dummy.txt", _O_RDONLY, _SH_DENYRW); // expected-warning {{Usage of ASCII file functions (here _sopen) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + int fd4; + errno_t err = _sopen_s(&fd4, "dummy.txt", _O_RDONLY, _SH_DENYRW, 0); // expected-warning {{Usage of ASCII file functions (here _sopen_s) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + + std::fstream fs1; + fs1.open("dummy.txt"); // expected-warning {{Usage of ASCII file functions (here open) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + std::ifstream ifs1; + ifs1.open("dummy.txt"); // expected-warning {{Usage of ASCII file functions (here open) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + std::ofstream ofs1; + ofs1.open("dummy.txt"); // expected-warning {{Usage of ASCII file functions (here open) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} +#ifdef _MSC_VER + std::fstream fs2; + fs2.open(L"dummy.txt"); + std::ifstream ifs2; + ifs2.open(L"dummy.txt"); + std::ofstream ofs2; + ofs2.open(L"dummy.txt"); +#endif + + LPOFSTRUCT buffer; + HFILE hFile1 = OpenFile("dummy.txt", buffer, OF_READ); // expected-warning {{Usage of ASCII file functions (here OpenFile) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + +#ifndef UNICODE + // CreateFile is just an alias of CreateFileA + LPCSTR buffer2; + HANDLE hFile2 = CreateFile(buffer2, GENERIC_WRITE, 0, NULL, CREATE_NEW, // expected-warning {{Usage of ASCII file functions (here CreateFileA) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + FILE_ATTRIBUTE_NORMAL, NULL); +#else + // CreateFile is just an alias of CreateFileW and should not be matched + LPCWSTR buffer2; + HANDLE hFile2 = CreateFile(buffer2, GENERIC_WRITE, 0, NULL, CREATE_NEW, + FILE_ATTRIBUTE_NORMAL, NULL); +#endif + LPCSTR buffer3; + HANDLE hFile3 = CreateFileA(buffer3, GENERIC_WRITE, 0, NULL, CREATE_NEW, // expected-warning {{Usage of ASCII file functions (here CreateFileA) is forbidden on Windows.}} expected-note {{On Windows executed functions: fopen, fopen_s, open, _open, _sopen, _sopen_s, OpenFile, CreateFileA should never be used due to lossy conversion from UTF8 to ANSI.}} + FILE_ATTRIBUTE_NORMAL, NULL); +} diff --git a/build/clang-plugin/tests/TestGlobalClass.cpp b/build/clang-plugin/tests/TestGlobalClass.cpp new file mode 100644 index 0000000000..1825b97078 --- /dev/null +++ b/build/clang-plugin/tests/TestGlobalClass.cpp @@ -0,0 +1,52 @@ +#define MOZ_GLOBAL_CLASS __attribute__((annotate("moz_global_class"))) +#include + +struct MOZ_GLOBAL_CLASS Global { + int i; + void *operator new(size_t x) throw() { return 0; } + void *operator new(size_t blah, char *buffer) { return buffer; } +}; + +template +struct MOZ_GLOBAL_CLASS TemplateClass { + T i; +}; + +void gobble(void *) { } + +void misuseGlobalClass(int len) { + Global notValid; // expected-error {{variable of type 'Global' only valid as global}} expected-note {{value incorrectly allocated in an automatic variable}} + Global alsoNotValid[2]; // expected-error {{variable of type 'Global [2]' only valid as global}} expected-note {{'Global [2]' is a global type because it is an array of global type 'Global'}} expected-note {{value incorrectly allocated in an automatic variable}} + static Global valid; + static Global alsoValid[2]; + + gobble(¬Valid); + gobble(&valid); + gobble(&alsoValid[0]); + + gobble(new Global); // expected-error {{variable of type 'Global' only valid as global}} expected-note {{value incorrectly allocated on the heap}} + gobble(new Global[10]); // expected-error {{variable of type 'Global' only valid as global}} expected-note {{value incorrectly allocated on the heap}} + gobble(new TemplateClass); // expected-error {{variable of type 'TemplateClass' only valid as global}} expected-note {{value incorrectly allocated on the heap}} + gobble(len <= 5 ? &valid : new Global); // expected-error {{variable of type 'Global' only valid as global}} expected-note {{value incorrectly allocated on the heap}} + + char buffer[sizeof(Global)]; + gobble(new (buffer) Global); +} + +Global valid; +struct RandomClass { + Global nonstaticMember; // expected-note {{'RandomClass' is a global type because member 'nonstaticMember' is a global type 'Global'}} + static Global staticMember; +}; +struct MOZ_GLOBAL_CLASS RandomGlobalClass { + Global nonstaticMember; + static Global staticMember; +}; + +struct BadInherit : Global {}; // expected-note {{'BadInherit' is a global type because it inherits from a global type 'Global'}} +struct MOZ_GLOBAL_CLASS GoodInherit : Global {}; + +void misuseGlobalClassEvenMore(int len) { + BadInherit moreInvalid; // expected-error {{variable of type 'BadInherit' only valid as global}} expected-note {{value incorrectly allocated in an automatic variable}} + RandomClass evenMoreInvalid; // expected-error {{variable of type 'RandomClass' only valid as global}} expected-note {{value incorrectly allocated in an automatic variable}} +} diff --git a/build/clang-plugin/tests/TestHeapClass.cpp b/build/clang-plugin/tests/TestHeapClass.cpp new file mode 100644 index 0000000000..36e7629737 --- /dev/null +++ b/build/clang-plugin/tests/TestHeapClass.cpp @@ -0,0 +1,64 @@ +#define MOZ_HEAP_CLASS __attribute__((annotate("moz_heap_class"))) +#define MOZ_IMPLICIT __attribute__((annotate("moz_implicit"))) + +#include + +struct MOZ_HEAP_CLASS Heap { + int i; + Heap() {} + MOZ_IMPLICIT Heap(int a) {} + Heap(int a, int b) {} + void *operator new(size_t x) throw() { return 0; } + void *operator new(size_t blah, char *buffer) { return buffer; } +}; + +template +struct MOZ_HEAP_CLASS TemplateClass { + T i; +}; + +void gobble(void *) { } + +void gobbleref(const Heap&) { } + +void misuseHeapClass(int len) { + Heap invalid; // expected-error {{variable of type 'Heap' only valid on the heap}} expected-note {{value incorrectly allocated in an automatic variable}} + Heap alsoInvalid[2]; // expected-error {{variable of type 'Heap [2]' only valid on the heap}} expected-note {{value incorrectly allocated in an automatic variable}} expected-note {{'Heap [2]' is a heap type because it is an array of heap type 'Heap'}} + static Heap invalidStatic; // expected-error {{variable of type 'Heap' only valid on the heap}} expected-note {{value incorrectly allocated in a global variable}} + static Heap alsoInvalidStatic[2]; // expected-error {{variable of type 'Heap [2]' only valid on the heap}} expected-note {{value incorrectly allocated in a global variable}} expected-note {{'Heap [2]' is a heap type because it is an array of heap type 'Heap'}} + + gobble(&invalid); + gobble(&invalidStatic); + gobble(&alsoInvalid[0]); + + gobbleref(Heap()); // expected-error {{variable of type 'Heap' only valid on the heap}} expected-note {{value incorrectly allocated in a temporary}} + gobbleref(Heap(10, 20)); // expected-error {{variable of type 'Heap' only valid on the heap}} expected-note {{value incorrectly allocated in a temporary}} + gobbleref(Heap(10)); // expected-error {{variable of type 'Heap' only valid on the heap}} expected-note {{value incorrectly allocated in a temporary}} + gobbleref(10); // expected-error {{variable of type 'Heap' only valid on the heap}} expected-note {{value incorrectly allocated in a temporary}} + + gobble(new Heap); + gobble(new Heap[10]); + gobble(new TemplateClass); + gobble(len <= 5 ? &invalid : new Heap); + + char buffer[sizeof(Heap)]; + gobble(new (buffer) Heap); +} + +Heap invalidStatic; // expected-error {{variable of type 'Heap' only valid on the heap}} expected-note {{value incorrectly allocated in a global variable}} +struct RandomClass { + Heap nonstaticMember; // expected-note {{'RandomClass' is a heap type because member 'nonstaticMember' is a heap type 'Heap'}} + static Heap staticMember; // expected-error {{variable of type 'Heap' only valid on the heap}} expected-note {{value incorrectly allocated in a global variable}} +}; +struct MOZ_HEAP_CLASS RandomHeapClass { + Heap nonstaticMember; + static Heap staticMember; // expected-error {{variable of type 'Heap' only valid on the heap}} expected-note {{value incorrectly allocated in a global variable}} +}; + +struct BadInherit : Heap {}; // expected-note {{'BadInherit' is a heap type because it inherits from a heap type 'Heap'}} +struct MOZ_HEAP_CLASS GoodInherit : Heap {}; + +void useStuffWrongly() { + BadInherit i; // expected-error {{variable of type 'BadInherit' only valid on the heap}} expected-note {{value incorrectly allocated in an automatic variable}} + RandomClass r; // expected-error {{variable of type 'RandomClass' only valid on the heap}} expected-note {{value incorrectly allocated in an automatic variable}} +} diff --git a/build/clang-plugin/tests/TestInheritTypeAnnotationsFromTemplateArgs.cpp b/build/clang-plugin/tests/TestInheritTypeAnnotationsFromTemplateArgs.cpp new file mode 100644 index 0000000000..0c04c3b2bd --- /dev/null +++ b/build/clang-plugin/tests/TestInheritTypeAnnotationsFromTemplateArgs.cpp @@ -0,0 +1,46 @@ +#define MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS \ + __attribute__((annotate("moz_inherit_type_annotations_from_template_args"))) +#define MOZ_STACK_CLASS __attribute__((annotate("moz_stack_class"))) +#define MOZ_NON_MEMMOVABLE __attribute__((annotate("moz_non_memmovable"))) +#define MOZ_NEEDS_MEMMOVABLE_TYPE __attribute__((annotate("moz_needs_memmovable_type"))) + +class Normal {}; +class MOZ_STACK_CLASS Stack {}; +class IndirectStack : Stack {}; // expected-note {{'IndirectStack' is a stack type because it inherits from a stack type 'Stack'}} +class ContainsStack { Stack m; }; // expected-note {{'ContainsStack' is a stack type because member 'm' is a stack type 'Stack'}} +class MOZ_NON_MEMMOVABLE Pointery {}; +class IndirectPointery : Pointery {}; // expected-note {{'IndirectPointery' is a non-memmove()able type because it inherits from a non-memmove()able type 'Pointery'}} +class ContainsPointery { Pointery m; }; // expected-note {{'ContainsPointery' is a non-memmove()able type because member 'm' is a non-memmove()able type 'Pointery'}} + +template +class MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS Template {}; // expected-note-re 5 {{'Template<{{.*}}>' is a stack type because it has a template argument stack type '{{.*}}'}} expected-note-re 5 {{'Template<{{.*}}>' is a non-memmove()able type because it has a template argument non-memmove()able type '{{.*}}'}} +class IndirectTemplate : Template {}; // expected-note {{'IndirectTemplate' is a stack type because it inherits from a stack type 'Template'}} +class ContainsTemplate { Template m; }; // expected-note {{'ContainsTemplate' is a stack type because member 'm' is a stack type 'Template'}} + +static Template a; // expected-error {{variable of type 'Template' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} +static Template b; // expected-error {{variable of type 'Template' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} +static Template c; // expected-error {{variable of type 'Template' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} +static IndirectTemplate d; // expected-error {{variable of type 'IndirectTemplate' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} +static ContainsTemplate e; // expected-error {{variable of type 'ContainsTemplate' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} +static Template f; + +template +class MOZ_NEEDS_MEMMOVABLE_TYPE Mover { // expected-error-re 8 {{Cannot instantiate 'Mover<{{.*}}>' with non-memmovable template argument '{{.*}}'}} + char mForceInstantiation[sizeof(T)]; +}; +class IndirectTemplatePointery : Template {}; // expected-note {{'IndirectTemplatePointery' is a non-memmove()able type because it inherits from a non-memmove()able type 'Template'}} +class ContainsTemplatePointery { Template m; }; // expected-note {{'ContainsTemplatePointery' is a non-memmove()able type because member 'm' is a non-memmove()able type 'Template'}} + +static Mover> n; // expected-note-re {{instantiation of 'Mover{{ ?}}>' requested here}} +static Mover> o; // expected-note-re {{instantiation of 'Mover{{ ?}}>' requested here}} +static Mover> p; // expected-note-re {{instantiation of 'Mover{{ ?}}>' requested here}} +static Mover q; // expected-note {{instantiation of 'Mover' requested here}} +static Mover r; // expected-note {{instantiation of 'Mover' requested here}} +static Mover> s; + +template +class MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS ManyTs {}; // expected-note-re 3 {{'ManyTs<{{.*}}>' is a non-memmove()able type because it has a template argument non-memmove()able type '{{.*}}'}} + +static Mover> t; // expected-note-re {{instantiation of 'Mover{{ ?}}>' requested here}} +static Mover> u; // expected-note-re {{instantiation of 'Mover{{ ?}}>' requested here}} +static Mover> v; // expected-note-re {{instantiation of 'Mover{{ ?}}>' requested here}} diff --git a/build/clang-plugin/tests/TestKungFuDeathGrip.cpp b/build/clang-plugin/tests/TestKungFuDeathGrip.cpp new file mode 100644 index 0000000000..0218015807 --- /dev/null +++ b/build/clang-plugin/tests/TestKungFuDeathGrip.cpp @@ -0,0 +1,142 @@ +#include + +#define MOZ_IMPLICIT __attribute__((annotate("moz_implicit"))) + +template +class already_AddRefed { +public: + already_AddRefed(); + T* mPtr; +}; + +template +class RefPtr { +public: + RefPtr(); + MOZ_IMPLICIT RefPtr(T* aIn); + MOZ_IMPLICIT RefPtr(already_AddRefed aIn); + + RefPtr(const RefPtr& aOther) = default; + RefPtr& operator=(const RefPtr&) = default; + + // We must define non-defaulted move operations as in the real RefPtr to make + // the type non-trivially-copyable. + RefPtr(RefPtr&&); + RefPtr& operator=(RefPtr&&); + + void swap(RefPtr& aOther); + + ~RefPtr(); + T* mPtr; +}; + +template +class nsCOMPtr { +public: + nsCOMPtr(); + MOZ_IMPLICIT nsCOMPtr(T* aIn); + MOZ_IMPLICIT nsCOMPtr(already_AddRefed aIn); + ~nsCOMPtr(); + T* mPtr; +}; + +class Type { +public: + static nsCOMPtr someStaticCOMPtr; + + void f(nsCOMPtr ignoredArgument, Type *param) { + nsCOMPtr never_referenced; + nsCOMPtr kfdg_t1(this); + nsCOMPtr kfdg_t2 = this; + nsCOMPtr kfdg_t3 = (this); + + nsCOMPtr kfdg_m1(p); // expected-error {{Unused "kungFuDeathGrip" 'nsCOMPtr' objects constructed from members are prohibited}} expected-note {{Please switch all accesses to this member to go through 'kfdg_m1', or explicitly pass 'kfdg_m1' to `mozilla::Unused`}} + nsCOMPtr kfdg_m2 = p; // expected-error {{Unused "kungFuDeathGrip" 'nsCOMPtr' objects constructed from members are prohibited}} expected-note {{Please switch all accesses to this member to go through 'kfdg_m2', or explicitly pass 'kfdg_m2' to `mozilla::Unused`}} + nsCOMPtr kfdg_m3(p); + kfdg_m3.mPtr->f(nullptr, nullptr); + nsCOMPtr kfdg_m4 = p; + kfdg_m4.mPtr->f(nullptr, nullptr); + + nsCOMPtr kfdg_a1((already_AddRefed())); + nsCOMPtr kfdg_a2 = already_AddRefed(); + + nsCOMPtr kfdg_p1(param); + nsCOMPtr kfdg_p2 = param; + + + RefPtr never_referenced2; + RefPtr kfdg_t4(this); + RefPtr kfdg_t5 = this; + RefPtr kfdg_t6 = (this); + + RefPtr kfdg_m5(p); // expected-error {{Unused "kungFuDeathGrip" 'RefPtr' objects constructed from members are prohibited}} expected-note {{Please switch all accesses to this member to go through 'kfdg_m5', or explicitly pass 'kfdg_m5' to `mozilla::Unused`}} + RefPtr kfdg_m6 = p; // expected-error {{Unused "kungFuDeathGrip" 'RefPtr' objects constructed from members are prohibited}} expected-note {{Please switch all accesses to this member to go through 'kfdg_m6', or explicitly pass 'kfdg_m6' to `mozilla::Unused`}} + RefPtr kfdg_m7(p); + kfdg_m7.mPtr->f(nullptr, nullptr); + RefPtr kfdg_m8 = p; + kfdg_m8.mPtr->f(nullptr, nullptr); + + RefPtr kfdg_a3((already_AddRefed())); + RefPtr kfdg_a4 = already_AddRefed(); + + RefPtr kfdg_p3(param); + RefPtr kfdg_p4 = param; + } + + Type *p; +}; + +struct Type2 { + void f() { + mWeakRef->f(nullptr, nullptr); + } + + void g() { + RefPtr kfdg; + kfdg.swap(mStrongRef); + f(); + } + + void h() { + RefPtr kfdg = std::move(mStrongRef); + f(); + } + + RefPtr mStrongRef; + Type* mWeakRef; +}; + +void f(nsCOMPtr ignoredArgument, Type *param) { + nsCOMPtr never_referenced; + Type t; + // Type *p = nullptr; + nsCOMPtr kfdg_m1(t.p); // expected-error {{Unused "kungFuDeathGrip" 'nsCOMPtr' objects constructed from members are prohibited}} expected-note {{Please switch all accesses to this member to go through 'kfdg_m1', or explicitly pass 'kfdg_m1' to `mozilla::Unused`}} + nsCOMPtr kfdg_m2 = t.p; // expected-error {{Unused "kungFuDeathGrip" 'nsCOMPtr' objects constructed from members are prohibited}} expected-note {{Please switch all accesses to this member to go through 'kfdg_m2', or explicitly pass 'kfdg_m2' to `mozilla::Unused`}} + nsCOMPtr kfdg_m3(t.p); + kfdg_m3.mPtr->f(nullptr, nullptr); + nsCOMPtr kfdg_m4 = t.p; + kfdg_m4.mPtr->f(nullptr, nullptr); + + nsCOMPtr kfdg_a1((already_AddRefed())); + nsCOMPtr kfdg_a2 = already_AddRefed(); + + nsCOMPtr kfdg_p1(param); + nsCOMPtr kfdg_p2 = param; + + + RefPtr never_referenced2; + RefPtr kfdg_m5(t.p); // expected-error {{Unused "kungFuDeathGrip" 'RefPtr' objects constructed from members are prohibited}} expected-note {{Please switch all accesses to this member to go through 'kfdg_m5', or explicitly pass 'kfdg_m5' to `mozilla::Unused`}} + RefPtr kfdg_m6 = t.p; // expected-error {{Unused "kungFuDeathGrip" 'RefPtr' objects constructed from members are prohibited}} expected-note {{Please switch all accesses to this member to go through 'kfdg_m6', or explicitly pass 'kfdg_m6' to `mozilla::Unused`}} + RefPtr kfdg_m7(t.p); + kfdg_m7.mPtr->f(nullptr, nullptr); + RefPtr kfdg_m8 = t.p; + kfdg_m8.mPtr->f(nullptr, nullptr); + + RefPtr kfdg_a3((already_AddRefed())); + RefPtr kfdg_a4 = already_AddRefed(); + + RefPtr kfdg_p3(param); + RefPtr kfdg_p4 = param; +} + +nsCOMPtr Type::someStaticCOMPtr(nullptr); diff --git a/build/clang-plugin/tests/TestLoadLibraryUsage.cpp b/build/clang-plugin/tests/TestLoadLibraryUsage.cpp new file mode 100644 index 0000000000..319c9d6b2a --- /dev/null +++ b/build/clang-plugin/tests/TestLoadLibraryUsage.cpp @@ -0,0 +1,20 @@ +#include +#include "prlink.h" + +void Func() { + auto h1 = PR_LoadLibrary(nullptr); // expected-error {{Usage of ASCII file functions (such as PR_LoadLibrary) is forbidden.}} + auto h2 = PR_LoadLibrary("C:\\Some\\Path"); + auto h3 = LoadLibraryA(nullptr); // expected-error {{Usage of ASCII file functions (such as LoadLibraryA) is forbidden.}} + auto h4 = LoadLibraryA("C:\\Some\\Path"); + auto h5 = LoadLibraryExA(nullptr, nullptr, 0); // expected-error {{Usage of ASCII file functions (such as LoadLibraryExA) is forbidden.}} + auto h6 = LoadLibraryExA("C:\\Some\\Path", nullptr, 0); + +#ifndef UNICODE + // LoadLibrary is a defnine for LoadLibraryA + auto h7 = LoadLibrary(nullptr); // expected-error {{Usage of ASCII file functions (such as LoadLibraryA) is forbidden.}} + auto h8 = LoadLibrary("C:\\Some\\Path"); + // LoadLibraryEx is a define for LoadLibraryExA + auto h9 = LoadLibraryEx(nullptr, nullptr, 0); // expected-error {{Usage of ASCII file functions (such as LoadLibraryExA) is forbidden.}} + auto h10 = LoadLibraryEx("C:\\Some\\Path", nullptr, 0); +#endif +} diff --git a/build/clang-plugin/tests/TestMultipleAnnotations.cpp b/build/clang-plugin/tests/TestMultipleAnnotations.cpp new file mode 100644 index 0000000000..aa927259db --- /dev/null +++ b/build/clang-plugin/tests/TestMultipleAnnotations.cpp @@ -0,0 +1,17 @@ +#define MOZ_MUST_USE_TYPE __attribute__((annotate("moz_must_use_type"))) +#define MOZ_STACK_CLASS __attribute__((annotate("moz_stack_class"))) + +class MOZ_MUST_USE_TYPE MOZ_STACK_CLASS TestClass {}; + +TestClass foo; // expected-error {{variable of type 'TestClass' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} + +TestClass f() +{ + TestClass bar; + return bar; +} + +void g() +{ + f(); // expected-error {{Unused value of must-use type 'TestClass'}} +} diff --git a/build/clang-plugin/tests/TestMustOverride.cpp b/build/clang-plugin/tests/TestMustOverride.cpp new file mode 100644 index 0000000000..8e053f6c23 --- /dev/null +++ b/build/clang-plugin/tests/TestMustOverride.cpp @@ -0,0 +1,63 @@ +#define MOZ_MUST_OVERRIDE __attribute__((annotate("moz_must_override"))) +// Ignore warnings not related to static analysis here +#pragma GCC diagnostic ignored "-Woverloaded-virtual" + +struct S { + virtual void f() MOZ_MUST_OVERRIDE; // expected-note {{function to override is here}} + virtual void g() MOZ_MUST_OVERRIDE; + virtual void h() MOZ_MUST_OVERRIDE; // expected-note {{function to override is here}} +}; +struct C : S { // expected-error {{'C' must override 'f'}} expected-error {{'C' must override 'h'}} + virtual void g() MOZ_MUST_OVERRIDE; // expected-note {{function to override is here}} + virtual void h(int); + void q() MOZ_MUST_OVERRIDE; // expected-note {{function to override is here}} +}; +struct D : C { // expected-error {{'D' must override 'g'}} expected-error {{'D' must override 'q'}} + virtual void f(); +}; + +struct Base { + virtual void VirtMethod() MOZ_MUST_OVERRIDE; // expected-note {{function to override is here}} + void NonVirtMethod() MOZ_MUST_OVERRIDE; // expected-note {{function to override is here}} + static void StaticMethod() MOZ_MUST_OVERRIDE; +}; + +struct DoesNotPropagate : Base { + virtual void VirtMethod(); + void NonVirtMethod(); + static void StaticMethod(); +}; + +struct Final : DoesNotPropagate { }; + +struct Propagates : Base { + virtual void VirtMethod() MOZ_MUST_OVERRIDE; // expected-note {{function to override is here}} + void NonVirtMethod() MOZ_MUST_OVERRIDE; // expected-note {{function to override is here}} + static void StaticMethod() MOZ_MUST_OVERRIDE; // expected-note {{function to override is here}} +}; + +struct FailsFinal : Propagates { }; // expected-error {{'FailsFinal' must override 'VirtMethod'}} expected-error {{'FailsFinal' must override 'NonVirtMethod'}} expected-error {{'FailsFinal' must override 'StaticMethod'}} + +struct WrongOverload : Base { // expected-error {{'WrongOverload' must override 'VirtMethod'}} expected-error {{'WrongOverload' must override 'NonVirtMethod'}} + virtual void VirtMethod() const; + void NonVirtMethod(int param); + static void StaticMethod(); +}; + +namespace A { namespace B { namespace C { + struct Param {}; + struct Base { + void f(Param p) MOZ_MUST_OVERRIDE; // expected-note {{function to override is here}} + }; +}}} + +struct Param {}; + +struct Derived : A::B::C::Base { + typedef A::B::C::Param Typedef; + void f(Typedef t); +}; + +struct BadDerived : A::B::C::Base { // expected-error {{'BadDerived' must override 'f'}} + void f(Param p); +}; diff --git a/build/clang-plugin/tests/TestMustReturnFromCaller.cpp b/build/clang-plugin/tests/TestMustReturnFromCaller.cpp new file mode 100644 index 0000000000..c935be3cf8 --- /dev/null +++ b/build/clang-plugin/tests/TestMustReturnFromCaller.cpp @@ -0,0 +1,270 @@ +#include +#include + +#define MOZ_MUST_RETURN_FROM_CALLER_IF_THIS_IS_ARG __attribute__((annotate("moz_must_return_from_caller_if_this_is_arg"))) +#define MOZ_MAY_CALL_AFTER_MUST_RETURN __attribute__((annotate("moz_may_call_after_must_return"))) + +struct Thrower { + void MOZ_MUST_RETURN_FROM_CALLER_IF_THIS_IS_ARG Throw() {} +}; + +void DoAnythingElse(); +int MakeAnInt(); +int MOZ_MAY_CALL_AFTER_MUST_RETURN SafeMakeInt(); +bool Condition(); + +// It might be nicer to #include "mozilla/ScopeExit.h" and use that here -- but +// doing so also will #define the two attribute-macros defined above, running a +// risk of redefinition errors. Just stick to the normal clang-plugin test +// style and use as little external code as possible. + +template +class ScopeExit { + Func exitFunction; + bool callOnDestruction; +public: + explicit ScopeExit(Func&& func) + : exitFunction(std::move(func)) + , callOnDestruction(true) + {} + + ~ScopeExit() { + if (callOnDestruction) { + exitFunction(); + } + } + + void release() { callOnDestruction = false; } +}; + +template +ScopeExit +MakeScopeExit(ExitFunction&& func) +{ + return ScopeExit(std::move(func)); +} + +class Foo { +public: + __attribute__((annotate("moz_implicit"))) Foo(std::nullptr_t); + Foo(); +}; + +void a1(Thrower& thrower) { + thrower.Throw(); +} + +int a2(Thrower& thrower) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + return MakeAnInt(); +} + +int a3(Thrower& thrower) { + // RAII operations happening after a must-immediately-return are fine. + auto atExit = MakeScopeExit([] { DoAnythingElse(); }); + thrower.Throw(); + return 5; +} + +int a4(Thrower& thrower) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + return Condition() ? MakeAnInt() : MakeAnInt(); +} + +void a5(Thrower& thrower) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + DoAnythingElse(); +} + +int a6(Thrower& thrower) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + DoAnythingElse(); + return MakeAnInt(); +} + +int a7(Thrower& thrower) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + DoAnythingElse(); + return Condition() ? MakeAnInt() : MakeAnInt(); +} + +int a8(Thrower& thrower) { + thrower.Throw(); + return SafeMakeInt(); +} + +int a9(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + } + return SafeMakeInt(); +} + +int a10(Thrower& thrower) { + auto atExit = MakeScopeExit([] { DoAnythingElse(); }); + + if (Condition()) { + thrower.Throw(); + return SafeMakeInt(); + } + + atExit.release(); + DoAnythingElse(); + return 5; +} + +void b1(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + } +} + +int b2(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + } + return MakeAnInt(); +} + +int b3(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + } + return 5; +} + +// Explicit test in orer to also verify the `UnaryOperator` node in the `CFG` +int b3a(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + } + return -1; +} + +float b3b(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + } + return 1.0f; +} + +bool b3c(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + } + return false; +} + +int b4(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + } + return Condition() ? MakeAnInt() : MakeAnInt(); +} + +void b5(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + } + DoAnythingElse(); +} + +void b6(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + DoAnythingElse(); + } +} + +void b7(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + return; + } + DoAnythingElse(); +} + +void b8(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + DoAnythingElse(); + return; + } + DoAnythingElse(); +} + +void b9(Thrower& thrower) { + while (Condition()) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + } +} + +void b10(Thrower& thrower) { + while (Condition()) { + thrower.Throw(); + return; + } +} + +void b11(Thrower& thrower) { + thrower.Throw(); // expected-error {{You must immediately return after calling this function}} + if (Condition()) { + return; + } else { + return; + } +} + +void b12(Thrower& thrower) { + switch (MakeAnInt()) { + case 1: + break; + default: + thrower.Throw(); + return; + } +} + +void b13(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + } + return; +} + +Foo b14(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + return nullptr; + } + return nullptr; +} + +Foo b15(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + } + return nullptr; +} + +Foo b16(Thrower& thrower) { + if (Condition()) { + thrower.Throw(); + } + return Foo(); +} + +void c1() { + Thrower thrower; + thrower.Throw(); + DoAnythingElse(); // Should be allowed, since our thrower is not an arg +} + +class TestRet { + TestRet *b13(Thrower &thrower) { + if (Condition()) { + thrower.Throw(); + } + return this; + } +}; diff --git a/build/clang-plugin/tests/TestMustUse.cpp b/build/clang-plugin/tests/TestMustUse.cpp new file mode 100644 index 0000000000..7878a4cde5 --- /dev/null +++ b/build/clang-plugin/tests/TestMustUse.cpp @@ -0,0 +1,201 @@ +#define MOZ_MUST_USE_TYPE __attribute__((annotate("moz_must_use_type"))) + +struct Temporary { ~Temporary(); }; +class MOZ_MUST_USE_TYPE MustUse {}; +class MayUse {}; + +MustUse producesMustUse(); +MustUse *producesMustUsePointer(); +MustUse &producesMustUseRef(); + +MustUse producesMustUse(const Temporary& t); +MustUse *producesMustUsePointer(const Temporary& t); +MustUse &producesMustUseRef(const Temporary& t); + +MayUse producesMayUse(); +MayUse *producesMayUsePointer(); +MayUse &producesMayUseRef(); + +void use(MustUse*); +void use(MustUse&); +void use(MustUse&&); +void use(MayUse*); +void use(MayUse&); +void use(MayUse&&); +void use(bool); + +void foo() { + MustUse u; + + producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMustUsePointer(); + producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMayUse(); + producesMayUsePointer(); + producesMayUseRef(); + u = producesMustUse(); + u = producesMustUse(Temporary()); + { + producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMustUsePointer(); + producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMayUse(); + producesMayUsePointer(); + producesMayUseRef(); + u = producesMustUse(); + u = producesMustUse(Temporary()); + } + if (true) { + producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMustUsePointer(); + producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMayUse(); + producesMayUsePointer(); + producesMayUseRef(); + u = producesMustUse(); + u = producesMustUse(Temporary()); + } else { + producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMustUsePointer(); + producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMayUse(); + producesMayUsePointer(); + producesMayUseRef(); + u = producesMustUse(); + u = producesMustUse(Temporary()); + } + + if(true) producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + else producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + if(true) producesMustUsePointer(); + else producesMustUsePointer(); + if(true) producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + else producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + if(true) producesMayUse(); + else producesMayUse(); + if(true) producesMayUsePointer(); + else producesMayUsePointer(); + if(true) producesMayUseRef(); + else producesMayUseRef(); + if(true) u = producesMustUse(); + else u = producesMustUse(); + if(true) u = producesMustUse(Temporary()); + else u = producesMustUse(Temporary()); + + while (true) producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + while (true) producesMustUsePointer(); + while (true) producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + while (true) producesMayUse(); + while (true) producesMayUsePointer(); + while (true) producesMayUseRef(); + while (true) u = producesMustUse(); + while (true) u = producesMustUse(Temporary()); + + do producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + while (true); + do producesMustUsePointer(); + while (true); + do producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + while (true); + do producesMayUse(); + while (true); + do producesMayUsePointer(); + while (true); + do producesMayUseRef(); + while (true); + do u = producesMustUse(); + while (true); + do u = producesMustUse(Temporary()); + while (true); + + for (;;) producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + for (;;) producesMustUsePointer(); + for (;;) producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + for (;;) producesMayUse(); + for (;;) producesMayUsePointer(); + for (;;) producesMayUseRef(); + for (;;) u = producesMustUse(); + for (;;) u = producesMustUse(Temporary()); + + for (producesMustUse();;); // expected-error {{Unused value of must-use type 'MustUse'}} + for (producesMustUsePointer();;); + for (producesMustUseRef();;); // expected-error {{Unused value of must-use type 'MustUse'}} + for (producesMayUse();;); + for (producesMayUsePointer();;); + for (producesMayUseRef();;); + for (u = producesMustUse();;); + for (u = producesMustUse(Temporary());;); + + for (;;producesMustUse()); // expected-error {{Unused value of must-use type 'MustUse'}} + for (;;producesMustUsePointer()); + for (;;producesMustUseRef()); // expected-error {{Unused value of must-use type 'MustUse'}} + for (;;producesMayUse()); + for (;;producesMayUsePointer()); + for (;;producesMayUseRef()); + for (;;u = producesMustUse()); + for (;;u = producesMustUse(Temporary())); + + use((producesMustUse(), false)); // expected-error {{Unused value of must-use type 'MustUse'}} + use((producesMustUsePointer(), false)); + use((producesMustUseRef(), false)); // expected-error {{Unused value of must-use type 'MustUse'}} + use((producesMayUse(), false)); + use((producesMayUsePointer(), false)); + use((producesMayUseRef(), false)); + use((u = producesMustUse(), false)); + use((u = producesMustUse(Temporary()), false)); + + switch (1) { + case 1: + producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMustUsePointer(); + producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMayUse(); + producesMayUsePointer(); + producesMayUseRef(); + u = producesMustUse(); + u = producesMustUse(Temporary()); + case 2: + producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + case 3: + producesMustUsePointer(); + case 4: + producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + case 5: + producesMayUse(); + case 6: + producesMayUsePointer(); + case 7: + producesMayUseRef(); + case 8: + u = producesMustUse(); + case 9: + u = producesMustUse(Temporary()); + default: + producesMustUse(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMustUsePointer(); + producesMustUseRef(); // expected-error {{Unused value of must-use type 'MustUse'}} + producesMayUse(); + producesMayUsePointer(); + producesMayUseRef(); + u = producesMustUse(); + u = producesMustUse(Temporary()); + } + + use(producesMustUse()); + use(producesMustUsePointer()); + use(producesMustUseRef()); + use(producesMayUse()); + use(producesMayUsePointer()); + use(producesMayUseRef()); + use(u = producesMustUse()); + use(u = producesMustUse(Temporary())); + + MustUse a = producesMustUse(); + MustUse *b = producesMustUsePointer(); + MustUse &c = producesMustUseRef(); + MayUse d = producesMayUse(); + MayUse *e = producesMayUsePointer(); + MayUse &f = producesMayUseRef(); + MustUse g = u = producesMustUse(); + MustUse h = u = producesMustUse(Temporary()); +} diff --git a/build/clang-plugin/tests/TestNANTestingExpr.cpp b/build/clang-plugin/tests/TestNANTestingExpr.cpp new file mode 100644 index 0000000000..aee4532742 --- /dev/null +++ b/build/clang-plugin/tests/TestNANTestingExpr.cpp @@ -0,0 +1,24 @@ +#line 1 "tests/SkScalar.h" +// This checks that the whitelist accounts for #line directives and such. If you +// remove SkScalar from the whitelist, please change the filename here instead +// of adding expected diagnostics. +inline int headerSays(double x) { + return x != x; +} +#line 9 "TestNANTestingExpr.cpp" +void test(bool x); +void foo() { + float f, f2; + typedef double mydouble; + mydouble d; + double d2; + test(f == f); // expected-error{{comparing a floating point value to itself for NaN checking can lead to incorrect results}} expected-note{{consider using mozilla::IsNaN instead}} + test(d == d); // expected-error{{comparing a floating point value to itself for NaN checking can lead to incorrect results}} expected-note{{consider using mozilla::IsNaN instead}} + test(f != f); // expected-error{{comparing a floating point value to itself for NaN checking can lead to incorrect results}} expected-note{{consider using mozilla::IsNaN instead}} + test(d != d); // expected-error{{comparing a floating point value to itself for NaN checking can lead to incorrect results}} expected-note{{consider using mozilla::IsNaN instead}} + test(f != d); + test(d == (d - f)); + test(f == f2); + test(d == d2); + test(d + 1 == d); +} diff --git a/build/clang-plugin/tests/TestNANTestingExprC.c b/build/clang-plugin/tests/TestNANTestingExprC.c new file mode 100644 index 0000000000..ab2fead22a --- /dev/null +++ b/build/clang-plugin/tests/TestNANTestingExprC.c @@ -0,0 +1,17 @@ +/* expected-no-diagnostics */ +void test(int x); +void foo() { + float f, f2; + typedef double mydouble; + mydouble d; + double d2; + test(f == f); + test(d == d); + test(f != f); + test(d != d); + test(f != d); + test(d == (d - f)); + test(f == f2); + test(d == d2); + test(d + 1 == d); +} diff --git a/build/clang-plugin/tests/TestNeedsNoVTableType.cpp b/build/clang-plugin/tests/TestNeedsNoVTableType.cpp new file mode 100644 index 0000000000..9b7c405d80 --- /dev/null +++ b/build/clang-plugin/tests/TestNeedsNoVTableType.cpp @@ -0,0 +1,94 @@ +#define MOZ_NEEDS_NO_VTABLE_TYPE __attribute__((annotate("moz_needs_no_vtable_type"))) + +template +struct MOZ_NEEDS_NO_VTABLE_TYPE PickyConsumer { // expected-error {{'PickyConsumer' cannot be instantiated because 'B' has a VTable}} expected-error {{'PickyConsumer' cannot be instantiated because 'E' has a VTable}} expected-error {{'PickyConsumer' cannot be instantiated because 'F' has a VTable}} expected-error {{'PickyConsumer' cannot be instantiated because 'G' has a VTable}} + T *m; +}; + +template +struct MOZ_NEEDS_NO_VTABLE_TYPE PickyConsumer_A { // expected-error {{'PickyConsumer_A' cannot be instantiated because 'B' has a VTable}} expected-error {{'PickyConsumer_A' cannot be instantiated because 'E' has a VTable}} expected-error {{'PickyConsumer_A' cannot be instantiated because 'F' has a VTable}} expected-error {{'PickyConsumer_A' cannot be instantiated because 'G' has a VTable}} + T *m; +}; +template +struct PickyConsumerWrapper { + PickyConsumer_A m; // expected-note {{bad instantiation of 'PickyConsumer_A' requested here}} expected-note {{bad instantiation of 'PickyConsumer_A' requested here}} expected-note {{bad instantiation of 'PickyConsumer_A' requested here}} expected-note {{bad instantiation of 'PickyConsumer_A' requested here}} +}; + +template +struct MOZ_NEEDS_NO_VTABLE_TYPE PickyConsumer_B { // expected-error {{'PickyConsumer_B' cannot be instantiated because 'B' has a VTable}} expected-error {{'PickyConsumer_B' cannot be instantiated because 'E' has a VTable}} expected-error {{'PickyConsumer_B' cannot be instantiated because 'F' has a VTable}} expected-error {{'PickyConsumer_B' cannot be instantiated because 'G' has a VTable}} + T *m; +}; +template +struct PickyConsumerSubclass : PickyConsumer_B {}; // expected-note {{bad instantiation of 'PickyConsumer_B' requested here}} expected-note {{bad instantiation of 'PickyConsumer_B' requested here}} expected-note {{bad instantiation of 'PickyConsumer_B' requested here}} expected-note {{bad instantiation of 'PickyConsumer_B' requested here}} + +template +struct NonPickyConsumer { + T *m; +}; + +struct A {}; +struct B : virtual A {}; +struct C : A {}; +struct D { + void d(); +}; +struct E { + virtual void e(); +}; +struct F : E { + void e() final; +}; +struct G { + virtual void e() = 0; +}; + +void f() { + { + PickyConsumer a1; + PickyConsumerWrapper a2; + PickyConsumerSubclass a3; + NonPickyConsumer a4; + } + + { + PickyConsumer a1; // expected-note {{bad instantiation of 'PickyConsumer' requested here}} + PickyConsumerWrapper a2; + PickyConsumerSubclass a3; + NonPickyConsumer a4; + } + + { + PickyConsumer a1; + PickyConsumerWrapper a2; + PickyConsumerSubclass a3; + NonPickyConsumer a4; + } + + { + PickyConsumer a1; + PickyConsumerWrapper a2; + PickyConsumerSubclass a3; + NonPickyConsumer a4; + } + + { + PickyConsumer a1; // expected-note {{bad instantiation of 'PickyConsumer' requested here}} + PickyConsumerWrapper a2; + PickyConsumerSubclass a3; + NonPickyConsumer a4; + } + + { + PickyConsumer a1; // expected-note {{bad instantiation of 'PickyConsumer' requested here}} + PickyConsumerWrapper a2; + PickyConsumerSubclass a3; + NonPickyConsumer a4; + } + + { + PickyConsumer a1; // expected-note {{bad instantiation of 'PickyConsumer' requested here}} + PickyConsumerWrapper a2; + PickyConsumerSubclass a3; + NonPickyConsumer a4; + } +} diff --git a/build/clang-plugin/tests/TestNoAddRefReleaseOnReturn.cpp b/build/clang-plugin/tests/TestNoAddRefReleaseOnReturn.cpp new file mode 100644 index 0000000000..2e1f83377e --- /dev/null +++ b/build/clang-plugin/tests/TestNoAddRefReleaseOnReturn.cpp @@ -0,0 +1,110 @@ +#define MOZ_NO_ADDREF_RELEASE_ON_RETURN __attribute__((annotate("moz_no_addref_release_on_return"))) + +struct Test { + void AddRef(); + void Release(); + void foo(); +}; + +struct TestD : Test {}; + +struct S { + Test* f() MOZ_NO_ADDREF_RELEASE_ON_RETURN; + Test& g() MOZ_NO_ADDREF_RELEASE_ON_RETURN; + Test h() MOZ_NO_ADDREF_RELEASE_ON_RETURN; +}; + +struct SD { + TestD* f() MOZ_NO_ADDREF_RELEASE_ON_RETURN; + TestD& g() MOZ_NO_ADDREF_RELEASE_ON_RETURN; + TestD h() MOZ_NO_ADDREF_RELEASE_ON_RETURN; +}; + +template +struct X { + T* f() MOZ_NO_ADDREF_RELEASE_ON_RETURN; + T& g() MOZ_NO_ADDREF_RELEASE_ON_RETURN; + T h() MOZ_NO_ADDREF_RELEASE_ON_RETURN; +}; + +template +struct SP { + T* operator->() MOZ_NO_ADDREF_RELEASE_ON_RETURN; +}; + +Test* f() MOZ_NO_ADDREF_RELEASE_ON_RETURN; +Test& g() MOZ_NO_ADDREF_RELEASE_ON_RETURN; +Test h() MOZ_NO_ADDREF_RELEASE_ON_RETURN; + +TestD* fd() MOZ_NO_ADDREF_RELEASE_ON_RETURN; +TestD& gd() MOZ_NO_ADDREF_RELEASE_ON_RETURN; +TestD hd() MOZ_NO_ADDREF_RELEASE_ON_RETURN; + +void test() { + S s; + s.f()->AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'f'}} + s.f()->Release(); // expected-error{{'Release' cannot be called on the return value of 'f'}} + s.f()->foo(); + s.g().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'g'}} + s.g().Release(); // expected-error{{'Release' cannot be called on the return value of 'g'}} + s.g().foo(); + s.h().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'h'}} + s.h().Release(); // expected-error{{'Release' cannot be called on the return value of 'h'}} + s.h().foo(); + SD sd; + sd.f()->AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'f'}} + sd.f()->Release(); // expected-error{{'Release' cannot be called on the return value of 'f'}} + sd.f()->foo(); + sd.g().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'g'}} + sd.g().Release(); // expected-error{{'Release' cannot be called on the return value of 'g'}} + sd.g().foo(); + sd.h().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'h'}} + sd.h().Release(); // expected-error{{'Release' cannot be called on the return value of 'h'}} + sd.h().foo(); + X x; + x.f()->AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'f'}} + x.f()->Release(); // expected-error{{'Release' cannot be called on the return value of 'f'}} + x.f()->foo(); + x.g().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'g'}} + x.g().Release(); // expected-error{{'Release' cannot be called on the return value of 'g'}} + x.g().foo(); + x.h().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'h'}} + x.h().Release(); // expected-error{{'Release' cannot be called on the return value of 'h'}} + x.h().foo(); + X xd; + xd.f()->AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'f'}} + xd.f()->Release(); // expected-error{{'Release' cannot be called on the return value of 'f'}} + xd.f()->foo(); + xd.g().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'g'}} + xd.g().Release(); // expected-error{{'Release' cannot be called on the return value of 'g'}} + xd.g().foo(); + xd.h().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'h'}} + xd.h().Release(); // expected-error{{'Release' cannot be called on the return value of 'h'}} + xd.h().foo(); + SP sp; + sp->AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'operator->'}} + sp->Release(); // expected-error{{'Release' cannot be called on the return value of 'operator->'}} + sp->foo(); + SP spd; + spd->AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'operator->'}} + spd->Release(); // expected-error{{'Release' cannot be called on the return value of 'operator->'}} + spd->foo(); + f()->AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'f'}} + f()->Release(); // expected-error{{'Release' cannot be called on the return value of 'f'}} + f()->foo(); + g().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'g'}} + g().Release(); // expected-error{{'Release' cannot be called on the return value of 'g'}} + g().foo(); + h().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'h'}} + h().Release(); // expected-error{{'Release' cannot be called on the return value of 'h'}} + h().foo(); + fd()->AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'fd'}} + fd()->Release(); // expected-error{{'Release' cannot be called on the return value of 'fd'}} + fd()->foo(); + gd().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'gd'}} + gd().Release(); // expected-error{{'Release' cannot be called on the return value of 'gd'}} + gd().foo(); + hd().AddRef(); // expected-error{{'AddRef' cannot be called on the return value of 'hd'}} + hd().Release(); // expected-error{{'Release' cannot be called on the return value of 'hd'}} + hd().foo(); +} diff --git a/build/clang-plugin/tests/TestNoArithmeticExprInArgument.cpp b/build/clang-plugin/tests/TestNoArithmeticExprInArgument.cpp new file mode 100644 index 0000000000..d147b17012 --- /dev/null +++ b/build/clang-plugin/tests/TestNoArithmeticExprInArgument.cpp @@ -0,0 +1,32 @@ +#define MOZ_NO_ARITHMETIC_EXPR_IN_ARGUMENT __attribute__((annotate("moz_no_arith_expr_in_arg"))) + +struct X { + explicit X(int) MOZ_NO_ARITHMETIC_EXPR_IN_ARGUMENT; + void baz(int) MOZ_NO_ARITHMETIC_EXPR_IN_ARGUMENT; +}; + +int operator+(int, X); +int operator+(X, int); +int operator++(X); + +void badArithmeticsInArgs() { + int a = 1; + typedef int myint; + myint b = 2; + X goodObj1(a); + goodObj1.baz(b); + X badObj1(a + b); // expected-error{{cannot pass an arithmetic expression of built-in types to 'X'}} + X badObj2 = X(a ? 0 : ++a); // expected-error{{cannot pass an arithmetic expression of built-in types to 'X'}} + X badObj3(~a); // expected-error{{cannot pass an arithmetic expression of built-in types to 'X'}} + badObj1.baz(a - 1 - b); // expected-error{{cannot pass an arithmetic expression of built-in types to 'baz'}} + badObj1.baz(++a); // expected-error{{cannot pass an arithmetic expression of built-in types to 'baz'}} + badObj1.baz(a++); // expected-error{{cannot pass an arithmetic expression of built-in types to 'baz'}} + badObj1.baz(a || b); + badObj1.baz(a + goodObj1); + badObj1.baz(goodObj1 + a); + badObj1.baz(++goodObj1); + badObj1.baz(-1); + badObj1.baz(-1.0); + badObj1.baz(1 + 2); + badObj1.baz(1 << (sizeof(int)/2)); +} diff --git a/build/clang-plugin/tests/TestNoAutoType.cpp b/build/clang-plugin/tests/TestNoAutoType.cpp new file mode 100644 index 0000000000..6c6e65f243 --- /dev/null +++ b/build/clang-plugin/tests/TestNoAutoType.cpp @@ -0,0 +1,41 @@ +#define MOZ_NON_AUTOABLE __attribute__((annotate("moz_non_autoable"))) + +template +struct MOZ_NON_AUTOABLE ExplicitTypeTemplate {}; +struct MOZ_NON_AUTOABLE ExplicitType {}; +struct NonExplicitType {}; + +void f() { + { + ExplicitType a; + auto b = a; // expected-error {{Cannot use auto to declare a variable of type 'ExplicitType'}} expected-note {{Please write out this type explicitly}} + auto &br = a; // expected-error {{Cannot use auto to declare a variable of type 'ExplicitType &'}} expected-note {{Please write out this type explicitly}} + const auto &brc = a; // expected-error {{Cannot use auto to declare a variable of type 'const ExplicitType &'}} expected-note {{Please write out this type explicitly}} + auto *bp = &a; // expected-error {{Cannot use auto to declare a variable of type 'ExplicitType *'}} expected-note {{Please write out this type explicitly}} + const auto *bpc = &a; // expected-error {{Cannot use auto to declare a variable of type 'const ExplicitType *'}} expected-note {{Please write out this type explicitly}} + } + + { + ExplicitTypeTemplate a; + auto b = a; // expected-error {{Cannot use auto to declare a variable of type 'ExplicitTypeTemplate'}} expected-note {{Please write out this type explicitly}} + auto &br = a; // expected-error {{Cannot use auto to declare a variable of type 'ExplicitTypeTemplate &'}} expected-note {{Please write out this type explicitly}} + const auto &brc = a; // expected-error {{Cannot use auto to declare a variable of type 'const ExplicitTypeTemplate &'}} expected-note {{Please write out this type explicitly}} + auto *bp = &a; // expected-error {{Cannot use auto to declare a variable of type 'ExplicitTypeTemplate *'}} expected-note {{Please write out this type explicitly}} + const auto *bpc = &a; // expected-error {{Cannot use auto to declare a variable of type 'const ExplicitTypeTemplate *'}} expected-note {{Please write out this type explicitly}} + } + + { + NonExplicitType c; + auto d = c; + auto &dr = c; + const auto &drc = c; + auto *dp = &c; + const auto *dpc = &c; + } +} + +ExplicitType A; +auto B = A; // expected-error {{Cannot use auto to declare a variable of type 'ExplicitType'}} expected-note {{Please write out this type explicitly}} + +NonExplicitType C; +auto D = C; diff --git a/build/clang-plugin/tests/TestNoDuplicateRefCntMember.cpp b/build/clang-plugin/tests/TestNoDuplicateRefCntMember.cpp new file mode 100644 index 0000000000..ff68e4fc7c --- /dev/null +++ b/build/clang-plugin/tests/TestNoDuplicateRefCntMember.cpp @@ -0,0 +1,49 @@ +class C1 {}; + +class RC1 { +public: + virtual void AddRef(); + virtual void Release(); + +private: + int mRefCnt; // expected-note 2 {{Superclass 'RC1' also has an mRefCnt member}} expected-note 3 {{Superclass 'RC1' has an mRefCnt member}} +}; + +class RC2 : public RC1 { // expected-error {{Refcounted record 'RC2' has multiple mRefCnt members}} +public: + virtual void AddRef(); + virtual void Release(); + +private: + int mRefCnt; // expected-note {{Consider using the _INHERITED macros for AddRef and Release here}} +}; + +class C2 : public RC1 {}; + +class RC3 : public RC1 {}; + +class RC4 : public RC3, public C2 {}; // expected-error {{Refcounted record 'RC4' has multiple superclasses with mRefCnt members}} + +class RC5 : public RC1 {}; + +class RC6 : public C1, public RC5 { // expected-error {{Refcounted record 'RC6' has multiple mRefCnt members}} +public: + virtual void AddRef(); + virtual void Release(); + +private: + int mRefCnt; // expected-note {{Consider using the _INHERITED macros for AddRef and Release here}} +}; + +class Predecl; + +class OtherRC { +public: + virtual void AddRef(); + virtual void Release(); + +private: + int mRefCnt; // expected-note {{Superclass 'OtherRC' has an mRefCnt member}} +}; + +class MultRCSuper : public RC1, public OtherRC {}; // expected-error {{Refcounted record 'MultRCSuper' has multiple superclasses with mRefCnt members}} diff --git a/build/clang-plugin/tests/TestNoExplicitMoveConstructor.cpp b/build/clang-plugin/tests/TestNoExplicitMoveConstructor.cpp new file mode 100644 index 0000000000..5aea6b1a7f --- /dev/null +++ b/build/clang-plugin/tests/TestNoExplicitMoveConstructor.cpp @@ -0,0 +1,25 @@ +class Foo { + Foo(Foo&& f); +}; + +class Bar { + explicit Bar(Bar&& f); // expected-error {{Move constructors may not be marked explicit}} +}; + +class Baz { + template + explicit Baz(T&& f) {}; +}; + +class Quxx { + Quxx(); + Quxx(Quxx& q) = delete; + template + explicit Quxx(T&& f) {}; +}; + +void f() { + // Move a quxx into a quxx! (This speciailizes Quxx's constructor to look like + // a move constructor - to make sure it doesn't trigger) + Quxx(Quxx()); +} diff --git a/build/clang-plugin/tests/TestNoNewThreadsChecker.cpp b/build/clang-plugin/tests/TestNoNewThreadsChecker.cpp new file mode 100644 index 0000000000..c10277c1c4 --- /dev/null +++ b/build/clang-plugin/tests/TestNoNewThreadsChecker.cpp @@ -0,0 +1,9 @@ +// Dummy NS_NewNamedThread. +void NS_NewNamedThread(const char *aName) {} + +void func_threads() { + // Test to see if the checker recognizes a bad name, and if it recognizes a + // name from the ThreadAllows.txt. + NS_NewNamedThread("A bad name"); // expected-error {{Thread name not recognized. Please use the background thread pool.}} expected-note {{NS_NewNamedThread has been deprecated in favor of background task dispatch via NS_DispatchBackgroundTask and NS_CreateBackgroundTaskQueue. If you must create a new ad-hoc thread, have your thread name added to ThreadAllows.txt.}} + NS_NewNamedThread("Checker Test"); +} diff --git a/build/clang-plugin/tests/TestNoPrincipalGetUri.cpp b/build/clang-plugin/tests/TestNoPrincipalGetUri.cpp new file mode 100644 index 0000000000..6442778ef1 --- /dev/null +++ b/build/clang-plugin/tests/TestNoPrincipalGetUri.cpp @@ -0,0 +1,31 @@ +class nsIPrincipal { +public: + void GetURI(int foo){}; +}; + +class SomePrincipal : public nsIPrincipal { +public: + void GetURI(int foo) {} +}; + +class NullPrincipal : public SomePrincipal {}; + +class SomeURI { +public: + void GetURI(int foo) {} +}; + +void f() { + nsIPrincipal *a = new SomePrincipal(); + a->GetURI(0); // expected-error {{Principal->GetURI is deprecated and will be removed soon. Please consider using the new helper functions of nsIPrincipal}} + + nsIPrincipal *b = new NullPrincipal(); + b->GetURI(0); // expected-error {{Principal->GetURI is deprecated and will be removed soon. Please consider using the new helper functions of nsIPrincipal}} + + SomeURI *c = new SomeURI(); + c->GetURI(0); + + SomePrincipal *d = new SomePrincipal(); + d->GetURI(0); + +} diff --git a/build/clang-plugin/tests/TestNoRefcountedInsideLambdas.cpp b/build/clang-plugin/tests/TestNoRefcountedInsideLambdas.cpp new file mode 100644 index 0000000000..4b4b814751 --- /dev/null +++ b/build/clang-plugin/tests/TestNoRefcountedInsideLambdas.cpp @@ -0,0 +1,677 @@ +#include +#define MOZ_STRONG_REF +#define MOZ_IMPLICIT __attribute__((annotate("moz_implicit"))) + +// Ensure that warnings about returning stack addresses of local variables are +// errors, so our `expected-error` annotations below work correctly. +#pragma GCC diagnostic error "-Wreturn-stack-address" + +struct RefCountedBase { + void AddRef(); + void Release(); +}; + +template +struct SmartPtr { + SmartPtr(); + MOZ_IMPLICIT SmartPtr(T*); + T* MOZ_STRONG_REF t; + T* operator->() const; +}; + +struct R : RefCountedBase { + void method(); +private: + void privateMethod(); +}; + +void take(...); +void foo() { + R* ptr; + SmartPtr sp; + take([&](R* argptr) { + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); + take([&](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); + take([&](R* argptr) { + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); + take([&](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); + take([=](R* argptr) { + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); + take([=](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); + take([=](R* argptr) { + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); + take([=](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); + take([ptr](R* argptr) { + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); + take([sp](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); + take([ptr](R* argptr) { + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); + take([sp](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); + take([&ptr](R* argptr) { + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); + take([&sp](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); + take([&ptr](R* argptr) { + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); + take([&sp](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); +} + +void b() { + R* ptr; + SmartPtr sp; + std::function([&](R* argptr) { + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); + std::function)>([&](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); + std::function([&](R* argptr) { + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); + std::function)>([&](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); + std::function([=](R* argptr) { + R* localptr; + ptr->method(); // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + argptr->method(); + localptr->method(); + }); + std::function)>([=](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); + std::function([=](R* argptr) { + R* localptr; + take(ptr); // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + take(argptr); + take(localptr); + }); + std::function)>([=](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); + std::function([ptr](R* argptr) { // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); + std::function)>([sp](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); + std::function([ptr](R* argptr) { // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); + std::function)>([sp](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); + std::function([&ptr](R* argptr) { + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); + std::function)>([&sp](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); + std::function([&ptr](R* argptr) { + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); + std::function)>([&sp](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); +} + +// These tests would check c++14 deduced return types, if they were supported in +// our codebase. They are being kept here for convenience in the future if we do +// add support for c++14 deduced return types +#if 0 +auto d1() { + R* ptr; + SmartPtr sp; + return ([&](R* argptr) { + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); +} +auto d2() { + R* ptr; + SmartPtr sp; + return ([&](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); +} +auto d3() { + R* ptr; + SmartPtr sp; + return ([&](R* argptr) { + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); +} +auto d4() { + R* ptr; + SmartPtr sp; + return ([&](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); +} +auto d5() { + R* ptr; + SmartPtr sp; + return ([=](R* argptr) { + R* localptr; + ptr->method(); // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + argptr->method(); + localptr->method(); + }); +} +auto d6() { + R* ptr; + SmartPtr sp; + return ([=](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); +} +auto d8() { + R* ptr; + SmartPtr sp; + return ([=](R* argptr) { + R* localptr; + take(ptr); // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + take(argptr); + take(localptr); + }); +} +auto d9() { + R* ptr; + SmartPtr sp; + return ([=](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); +} +auto d10() { + R* ptr; + SmartPtr sp; + return ([ptr](R* argptr) { // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); +} +auto d11() { + R* ptr; + SmartPtr sp; + return ([sp](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); +} +auto d12() { + R* ptr; + SmartPtr sp; + return ([ptr](R* argptr) { // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); +} +auto d13() { + R* ptr; + SmartPtr sp; + return ([sp](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); +} +auto d14() { + R* ptr; + SmartPtr sp; + return ([&ptr](R* argptr) { + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); +} +auto d15() { + R* ptr; + SmartPtr sp; + return ([&sp](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); +} +auto d16() { + R* ptr; + SmartPtr sp; + return ([&ptr](R* argptr) { + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); +} +auto d17() { + R* ptr; + SmartPtr sp; + return ([&sp](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); +} +#endif + +void e() { + auto e1 = []() { + R* ptr; + SmartPtr sp; + return ([&](R* argptr) { // expected-error{{address of stack memory associated with local variable 'ptr' returned}} + R* localptr; +#if __clang_major__ >= 12 + ptr->method(); // expected-note{{implicitly captured by reference due to use here}} +#else + ptr->method(); +#endif + argptr->method(); + localptr->method(); + }); + }; + auto e2 = []() { + R* ptr; + SmartPtr sp; + return ([&](SmartPtr argsp) { // expected-error{{address of stack memory associated with local variable 'sp' returned}} + SmartPtr localsp; +#if __clang_major__ >= 12 + sp->method(); // expected-note{{implicitly captured by reference due to use here}} +#else + sp->method(); +#endif + argsp->method(); + localsp->method(); + }); + }; + auto e3 = []() { + R* ptr; + SmartPtr sp; + return ([&](R* argptr) { // expected-error{{address of stack memory associated with local variable 'ptr' returned}} + R* localptr; +#if __clang_major__ >= 12 + take(ptr); // expected-note{{implicitly captured by reference due to use here}} +#else + take(ptr); +#endif + take(argptr); + take(localptr); + }); + }; + auto e4 = []() { + R* ptr; + SmartPtr sp; + return ([&](SmartPtr argsp) { // expected-error{{address of stack memory associated with local variable 'sp' returned}} + SmartPtr localsp; +#if __clang_major__ >= 12 + take(sp); // expected-note{{implicitly captured by reference due to use here}} +#else + take(sp); +#endif + take(argsp); + take(localsp); + }); + }; + auto e5 = []() { + R* ptr; + SmartPtr sp; + return ([=](R* argptr) { + R* localptr; + ptr->method(); // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + argptr->method(); + localptr->method(); + }); + }; + auto e6 = []() { + R* ptr; + SmartPtr sp; + return ([=](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); + }; + auto e8 = []() { + R* ptr; + SmartPtr sp; + return ([=](R* argptr) { + R* localptr; + take(ptr); // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + take(argptr); + take(localptr); + }); + }; + auto e9 = []() { + R* ptr; + SmartPtr sp; + return ([=](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); + }; + auto e10 = []() { + R* ptr; + SmartPtr sp; + return ([ptr](R* argptr) { // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); + }; + auto e11 = []() { + R* ptr; + SmartPtr sp; + return ([sp](SmartPtr argsp) { + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); + }; + auto e12 = []() { + R* ptr; + SmartPtr sp; + return ([ptr](R* argptr) { // expected-error{{Refcounted variable 'ptr' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); + }; + auto e13 = []() { + R* ptr; + SmartPtr sp; + return ([sp](SmartPtr argsp) { + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); + }; + auto e14 = []() { + R* ptr; + SmartPtr sp; +#if __clang_major__ >= 12 + return ([&ptr](R* argptr) { // expected-error{{address of stack memory associated with local variable 'ptr' returned}} expected-note{{captured by reference here}} + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); +#else + return ([&ptr](R* argptr) { // expected-error{{address of stack memory associated with local variable 'ptr' returned}} + R* localptr; + ptr->method(); + argptr->method(); + localptr->method(); + }); +#endif + }; + auto e15 = []() { + R* ptr; + SmartPtr sp; +#if __clang_major__ >= 12 + return ([&sp](SmartPtr argsp) { // expected-error{{address of stack memory associated with local variable 'sp' returned}} expected-note{{captured by reference here}} + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); +#else + return ([&sp](SmartPtr argsp) { // expected-error{{address of stack memory associated with local variable 'sp' returned}} + SmartPtr localsp; + sp->method(); + argsp->method(); + localsp->method(); + }); +#endif + }; + auto e16 = []() { + R* ptr; + SmartPtr sp; +#if __clang_major__ >= 12 + return ([&ptr](R* argptr) { // expected-error{{address of stack memory associated with local variable 'ptr' returned}} expected-note{{captured by reference here}} + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); +#else + return ([&ptr](R* argptr) { // expected-error{{address of stack memory associated with local variable 'ptr' returned}} + R* localptr; + take(ptr); + take(argptr); + take(localptr); + }); +#endif + }; + auto e17 = []() { + R* ptr; + SmartPtr sp; +#if __clang_major__ >= 12 + return ([&sp](SmartPtr argsp) { // expected-error{{address of stack memory associated with local variable 'sp' returned}} expected-note{{captured by reference here}} + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); +#else + return ([&sp](SmartPtr argsp) { // expected-error{{address of stack memory associated with local variable 'sp' returned}} + SmartPtr localsp; + take(sp); + take(argsp); + take(localsp); + }); +#endif + }; +} + +void +R::privateMethod() { + SmartPtr self = this; + std::function([&]() { + self->method(); + }); + std::function([&]() { + self->privateMethod(); + }); + std::function([&]() { + this->method(); + }); + std::function([&]() { + this->privateMethod(); + }); + std::function([=]() { + self->method(); + }); + std::function([=]() { + self->privateMethod(); + }); + std::function([=]() { + this->method(); // expected-error{{Refcounted variable 'this' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + }); + std::function([=]() { + this->privateMethod(); // expected-error{{Refcounted variable 'this' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + }); + std::function([self]() { + self->method(); + }); + std::function([self]() { + self->privateMethod(); + }); + std::function([this]() { + this->method(); // expected-error{{Refcounted variable 'this' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + }); + std::function([this]() { + this->privateMethod(); // expected-error{{Refcounted variable 'this' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + }); + std::function([this]() { + method(); // expected-error{{Refcounted variable 'this' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + }); + std::function([this]() { + privateMethod(); // expected-error{{Refcounted variable 'this' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + }); + std::function([=]() { + method(); // expected-error{{Refcounted variable 'this' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + }); + std::function([=]() { + privateMethod(); // expected-error{{Refcounted variable 'this' of type 'R' cannot be captured by a lambda}} expected-note{{Please consider using a smart pointer}} + }); + std::function([&]() { + method(); + }); + std::function([&]() { + privateMethod(); + }); + + // It should be OK to go through `this` if we have captured a reference to it. + std::function([this, self]() { + this->method(); + this->privateMethod(); + method(); + privateMethod(); + }); +} diff --git a/build/clang-plugin/tests/TestNoUsingNamespaceMozillaJava.cpp b/build/clang-plugin/tests/TestNoUsingNamespaceMozillaJava.cpp new file mode 100644 index 0000000000..70cfbe1827 --- /dev/null +++ b/build/clang-plugin/tests/TestNoUsingNamespaceMozillaJava.cpp @@ -0,0 +1,29 @@ +namespace mozilla { +namespace java { +namespace sdk { +} // namespace sdk + +namespace future { +} // namespace future +} // namespace java +} // namespace mozilla + +namespace mozilla { + using namespace java; // expected-error{{using namespace mozilla::java is forbidden}} + using namespace java::future; // expected-error{{using namespace mozilla::java::future is forbidden}} +} + +using namespace mozilla::java::sdk; // expected-error{{using namespace mozilla::java::sdk is forbidden}} + +namespace shouldPass { + namespace java { + } + + using namespace java; +} + +using namespace shouldPass::java; + + +void test() { +} diff --git a/build/clang-plugin/tests/TestNonHeapClass.cpp b/build/clang-plugin/tests/TestNonHeapClass.cpp new file mode 100644 index 0000000000..26fe6404e0 --- /dev/null +++ b/build/clang-plugin/tests/TestNonHeapClass.cpp @@ -0,0 +1,62 @@ +#define MOZ_NONHEAP_CLASS __attribute__((annotate("moz_nonheap_class"))) +#define MOZ_STACK_CLASS __attribute__((annotate("moz_stack_class"))) +#include + +struct MOZ_NONHEAP_CLASS NonHeap { + int i; + void *operator new(size_t x) throw() { return 0; } + void *operator new(size_t blah, char *buffer) { return buffer; } +}; + +template +struct MOZ_NONHEAP_CLASS TemplateClass { + T i; +}; + +void gobble(void *) { } + +void misuseNonHeapClass(int len) { + NonHeap valid; + NonHeap alsoValid[2]; + static NonHeap validStatic; + static NonHeap alsoValidStatic[2]; + + gobble(&valid); + gobble(&validStatic); + gobble(&alsoValid[0]); + + gobble(new NonHeap); // expected-error {{variable of type 'NonHeap' is not valid on the heap}} expected-note {{value incorrectly allocated on the heap}} + gobble(new NonHeap[10]); // expected-error {{variable of type 'NonHeap' is not valid on the heap}} expected-note {{value incorrectly allocated on the heap}} + gobble(new TemplateClass); // expected-error {{variable of type 'TemplateClass' is not valid on the heap}} expected-note {{value incorrectly allocated on the heap}} + gobble(len <= 5 ? &valid : new NonHeap); // expected-error {{variable of type 'NonHeap' is not valid on the heap}} expected-note {{value incorrectly allocated on the heap}} + + char buffer[sizeof(NonHeap)]; + gobble(new (buffer) NonHeap); +} + +NonHeap validStatic; +struct RandomClass { + NonHeap nonstaticMember; // expected-note {{'RandomClass' is a non-heap type because member 'nonstaticMember' is a non-heap type 'NonHeap'}} + static NonHeap staticMember; +}; +struct MOZ_NONHEAP_CLASS RandomNonHeapClass { + NonHeap nonstaticMember; + static NonHeap staticMember; +}; + +struct BadInherit : NonHeap {}; // expected-note {{'BadInherit' is a non-heap type because it inherits from a non-heap type 'NonHeap'}} +struct MOZ_NONHEAP_CLASS GoodInherit : NonHeap {}; + +void useStuffWrongly() { + gobble(new BadInherit); // expected-error {{variable of type 'BadInherit' is not valid on the heap}} expected-note {{value incorrectly allocated on the heap}} + gobble(new RandomClass); // expected-error {{variable of type 'RandomClass' is not valid on the heap}} expected-note {{value incorrectly allocated on the heap}} +} + +// Stack class overrides non-heap typees. +struct MOZ_STACK_CLASS StackClass {}; +struct MOZ_NONHEAP_CLASS InferredStackClass : GoodInherit { + NonHeap nonstaticMember; + StackClass stackClass; // expected-note {{'InferredStackClass' is a stack type because member 'stackClass' is a stack type 'StackClass'}} +}; + +InferredStackClass global; // expected-error {{variable of type 'InferredStackClass' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} diff --git a/build/clang-plugin/tests/TestNonMemMovable.cpp b/build/clang-plugin/tests/TestNonMemMovable.cpp new file mode 100644 index 0000000000..dfbb5a6c65 --- /dev/null +++ b/build/clang-plugin/tests/TestNonMemMovable.cpp @@ -0,0 +1,830 @@ +#define MOZ_NON_MEMMOVABLE __attribute__((annotate("moz_non_memmovable"))) +#define MOZ_NEEDS_MEMMOVABLE_TYPE __attribute__((annotate("moz_needs_memmovable_type"))) +#define MOZ_NEEDS_MEMMOVABLE_MEMBERS __attribute__((annotate("moz_needs_memmovable_members"))) + +/* + These are a bunch of structs with variable levels of memmovability. + They will be used as template parameters to the various NeedyTemplates +*/ +struct MOZ_NON_MEMMOVABLE NonMovable {}; +struct Movable {}; + +// Subclasses +struct S_NonMovable : NonMovable {}; // expected-note 51 {{'S_NonMovable' is a non-memmove()able type because it inherits from a non-memmove()able type 'NonMovable'}} +struct S_Movable : Movable {}; + +// Members +struct W_NonMovable { + NonMovable m; // expected-note 34 {{'W_NonMovable' is a non-memmove()able type because member 'm' is a non-memmove()able type 'NonMovable'}} +}; +struct W_Movable { + Movable m; +}; + +// Wrapped Subclasses +struct WS_NonMovable { + S_NonMovable m; // expected-note 34 {{'WS_NonMovable' is a non-memmove()able type because member 'm' is a non-memmove()able type 'S_NonMovable'}} +}; +struct WS_Movable { + S_Movable m; +}; + +// Combinations of the above +struct SW_NonMovable : W_NonMovable {}; // expected-note 17 {{'SW_NonMovable' is a non-memmove()able type because it inherits from a non-memmove()able type 'W_NonMovable'}} +struct SW_Movable : W_Movable {}; + +struct SWS_NonMovable : WS_NonMovable {}; // expected-note 17 {{'SWS_NonMovable' is a non-memmove()able type because it inherits from a non-memmove()able type 'WS_NonMovable'}} +struct SWS_Movable : WS_Movable {}; + +// Basic templated wrapper +template +struct Template_Inline { + T m; // expected-note-re 56 {{'Template_Inline<{{.*}}>' is a non-memmove()able type because member 'm' is a non-memmove()able type '{{.*}}'}} +}; + +template +struct Template_Ref { + T* m; +}; + +template +struct Template_Unused {}; + +template +struct MOZ_NON_MEMMOVABLE Template_NonMovable {}; + +/* + These tests take the following form: + DECLARATIONS => Declarations of the templates which are either marked with MOZ_NEEDS_MEMMOVABLE_TYPE + or which instantiate a MOZ_NEEDS_MEMMOVABLE_TYPE through some mechanism. + BAD N => Instantiations of the wrapper template with each of the non-memmovable types. + The prefix S_ means subclass, W_ means wrapped. Each of these rows should produce an error + on the NeedyTemplate in question, and a note at the instantiation location of that template. + Unfortunately, on every case more complicated than bad1, the instantiation location is + within another template. Thus, the notes are expected on the template in question which + actually instantiates the MOZ_NEEDS_MEMMOVABLE_TYPE template. + GOOD N => Instantiations of the wrapper template with each of the memmovable types. + This is meant as a sanity check to ensure that we don't reject valid instantiations of + templates. + + + Note 1: Each set uses it's own types to ensure that they don't re-use each-other's template specializations. + If they did, then some of the error messages would not be emitted (as error messages are emitted for template + specializations, rather than for variable declarations) + + Note 2: Every instance of NeedyTemplate contains a member of type T. This is to ensure that T is actually + instantiated (if T is a template) by clang. If T isn't instantiated, then we can't actually tell if it is + NON_MEMMOVABLE. (This is OK in practice, as you cannot memmove a type which you don't know the size of). + + Note 3: There are a set of tests for specializations of NeedyTemplate at the bottom. For each set of tests, + these tests contribute two expected errors to the templates. +*/ + +// +// 1 - Unwrapped MOZ_NEEDS_MEMMOVABLE_TYPE +// + +template +struct MOZ_NEEDS_MEMMOVABLE_TYPE NeedyTemplate1 {T m;}; // expected-error-re 26 {{Cannot instantiate 'NeedyTemplate1<{{.*}}>' with non-memmovable template argument '{{.*}}'}} + +void bad1() { + NeedyTemplate1 a1; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 a2; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 a3; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 a4; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 a5; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 a6; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + + NeedyTemplate1 > b1; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > b2; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > b3; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > b4; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > b5; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > b6; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + + NeedyTemplate1 > c1; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c2; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c3; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c4; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c5; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c6; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c7; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c8; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c9; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c10; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c11; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} + NeedyTemplate1 > c12; // expected-note-re {{instantiation of 'NeedyTemplate1<{{.*}}>' requested here}} +} + +void good1() { + NeedyTemplate1 a1; + NeedyTemplate1 a2; + NeedyTemplate1 a3; + NeedyTemplate1 a4; + NeedyTemplate1 a5; + NeedyTemplate1 a6; + + NeedyTemplate1 > b1; + NeedyTemplate1 > b2; + NeedyTemplate1 > b3; + NeedyTemplate1 > b4; + NeedyTemplate1 > b5; + NeedyTemplate1 > b6; + + NeedyTemplate1 > c1; + NeedyTemplate1 > c2; + NeedyTemplate1 > c3; + NeedyTemplate1 > c4; + NeedyTemplate1 > c5; + NeedyTemplate1 > c6; + NeedyTemplate1 > c7; + NeedyTemplate1 > c8; + NeedyTemplate1 > c9; + NeedyTemplate1 > c10; + NeedyTemplate1 > c11; + NeedyTemplate1 > c12; + + NeedyTemplate1 > d1; + NeedyTemplate1 > d2; + NeedyTemplate1 > d3; + NeedyTemplate1 > d4; + NeedyTemplate1 > d5; + NeedyTemplate1 > d6; + NeedyTemplate1 > d7; + NeedyTemplate1 > d8; + NeedyTemplate1 > d9; + NeedyTemplate1 > d10; + NeedyTemplate1 > d11; + NeedyTemplate1 > d12; +} + +// +// 2 - Subclassed MOZ_NEEDS_MEMMOVABLE_TYPE +// + +template +struct MOZ_NEEDS_MEMMOVABLE_TYPE NeedyTemplate2 {T m;}; // expected-error-re 26 {{Cannot instantiate 'NeedyTemplate2<{{.*}}>' with non-memmovable template argument '{{.*}}'}} +template +struct S_NeedyTemplate2 : NeedyTemplate2 {}; // expected-note-re 26 {{instantiation of 'NeedyTemplate2<{{.*}}>' requested here}} + +void bad2() { + S_NeedyTemplate2 a1; + S_NeedyTemplate2 a2; + S_NeedyTemplate2 a3; + S_NeedyTemplate2 a4; + S_NeedyTemplate2 a5; + S_NeedyTemplate2 a6; + + S_NeedyTemplate2 > b1; + S_NeedyTemplate2 > b2; + S_NeedyTemplate2 > b3; + S_NeedyTemplate2 > b4; + S_NeedyTemplate2 > b5; + S_NeedyTemplate2 > b6; + + S_NeedyTemplate2 > c1; + S_NeedyTemplate2 > c2; + S_NeedyTemplate2 > c3; + S_NeedyTemplate2 > c4; + S_NeedyTemplate2 > c5; + S_NeedyTemplate2 > c6; + S_NeedyTemplate2 > c7; + S_NeedyTemplate2 > c8; + S_NeedyTemplate2 > c9; + S_NeedyTemplate2 > c10; + S_NeedyTemplate2 > c11; + S_NeedyTemplate2 > c12; +} + +void good2() { + S_NeedyTemplate2 a1; + S_NeedyTemplate2 a2; + S_NeedyTemplate2 a3; + S_NeedyTemplate2 a4; + S_NeedyTemplate2 a5; + S_NeedyTemplate2 a6; + + S_NeedyTemplate2 > b1; + S_NeedyTemplate2 > b2; + S_NeedyTemplate2 > b3; + S_NeedyTemplate2 > b4; + S_NeedyTemplate2 > b5; + S_NeedyTemplate2 > b6; + + S_NeedyTemplate2 > c1; + S_NeedyTemplate2 > c2; + S_NeedyTemplate2 > c3; + S_NeedyTemplate2 > c4; + S_NeedyTemplate2 > c5; + S_NeedyTemplate2 > c6; + S_NeedyTemplate2 > c7; + S_NeedyTemplate2 > c8; + S_NeedyTemplate2 > c9; + S_NeedyTemplate2 > c10; + S_NeedyTemplate2 > c11; + S_NeedyTemplate2 > c12; + + S_NeedyTemplate2 > d1; + S_NeedyTemplate2 > d2; + S_NeedyTemplate2 > d3; + S_NeedyTemplate2 > d4; + S_NeedyTemplate2 > d5; + S_NeedyTemplate2 > d6; + S_NeedyTemplate2 > d7; + S_NeedyTemplate2 > d8; + S_NeedyTemplate2 > d9; + S_NeedyTemplate2 > d10; + S_NeedyTemplate2 > d11; + S_NeedyTemplate2 > d12; +} + +// +// 3 - Wrapped MOZ_NEEDS_MEMMOVABLE_TYPE +// + +template +struct MOZ_NEEDS_MEMMOVABLE_TYPE NeedyTemplate3 {T m;}; // expected-error-re 26 {{Cannot instantiate 'NeedyTemplate3<{{.*}}>' with non-memmovable template argument '{{.*}}'}} +template +struct W_NeedyTemplate3 { + NeedyTemplate3 m; // expected-note-re 26 {{instantiation of 'NeedyTemplate3<{{.*}}>' requested here}} +}; +void bad3() { + W_NeedyTemplate3 a1; + W_NeedyTemplate3 a2; + W_NeedyTemplate3 a3; + W_NeedyTemplate3 a4; + W_NeedyTemplate3 a5; + W_NeedyTemplate3 a6; + + W_NeedyTemplate3 > b1; + W_NeedyTemplate3 > b2; + W_NeedyTemplate3 > b3; + W_NeedyTemplate3 > b4; + W_NeedyTemplate3 > b5; + W_NeedyTemplate3 > b6; + + W_NeedyTemplate3 > c1; + W_NeedyTemplate3 > c2; + W_NeedyTemplate3 > c3; + W_NeedyTemplate3 > c4; + W_NeedyTemplate3 > c5; + W_NeedyTemplate3 > c6; + W_NeedyTemplate3 > c7; + W_NeedyTemplate3 > c8; + W_NeedyTemplate3 > c9; + W_NeedyTemplate3 > c10; + W_NeedyTemplate3 > c11; + W_NeedyTemplate3 > c12; +} + +void good3() { + W_NeedyTemplate3 a1; + W_NeedyTemplate3 a2; + W_NeedyTemplate3 a3; + W_NeedyTemplate3 a4; + W_NeedyTemplate3 a5; + W_NeedyTemplate3 a6; + + W_NeedyTemplate3 > b1; + W_NeedyTemplate3 > b2; + W_NeedyTemplate3 > b3; + W_NeedyTemplate3 > b4; + W_NeedyTemplate3 > b5; + W_NeedyTemplate3 > b6; + + W_NeedyTemplate3 > c1; + W_NeedyTemplate3 > c2; + W_NeedyTemplate3 > c3; + W_NeedyTemplate3 > c4; + W_NeedyTemplate3 > c5; + W_NeedyTemplate3 > c6; + W_NeedyTemplate3 > c7; + W_NeedyTemplate3 > c8; + W_NeedyTemplate3 > c9; + W_NeedyTemplate3 > c10; + W_NeedyTemplate3 > c11; + W_NeedyTemplate3 > c12; + + W_NeedyTemplate3 > d1; + W_NeedyTemplate3 > d2; + W_NeedyTemplate3 > d3; + W_NeedyTemplate3 > d4; + W_NeedyTemplate3 > d5; + W_NeedyTemplate3 > d6; + W_NeedyTemplate3 > d7; + W_NeedyTemplate3 > d8; + W_NeedyTemplate3 > d9; + W_NeedyTemplate3 > d10; + W_NeedyTemplate3 > d11; + W_NeedyTemplate3 > d12; +} + +// +// 4 - Wrapped Subclassed MOZ_NEEDS_MEMMOVABLE_TYPE +// + +template +struct MOZ_NEEDS_MEMMOVABLE_TYPE NeedyTemplate4 {T m;}; // expected-error-re 26 {{Cannot instantiate 'NeedyTemplate4<{{.*}}>' with non-memmovable template argument '{{.*}}'}} +template +struct S_NeedyTemplate4 : NeedyTemplate4 {}; // expected-note-re 26 {{instantiation of 'NeedyTemplate4<{{.*}}>' requested here}} +template +struct WS_NeedyTemplate4 { + S_NeedyTemplate4 m; +}; +void bad4() { + WS_NeedyTemplate4 a1; + WS_NeedyTemplate4 a2; + WS_NeedyTemplate4 a3; + WS_NeedyTemplate4 a4; + WS_NeedyTemplate4 a5; + WS_NeedyTemplate4 a6; + + WS_NeedyTemplate4 > b1; + WS_NeedyTemplate4 > b2; + WS_NeedyTemplate4 > b3; + WS_NeedyTemplate4 > b4; + WS_NeedyTemplate4 > b5; + WS_NeedyTemplate4 > b6; + + WS_NeedyTemplate4 > c1; + WS_NeedyTemplate4 > c2; + WS_NeedyTemplate4 > c3; + WS_NeedyTemplate4 > c4; + WS_NeedyTemplate4 > c5; + WS_NeedyTemplate4 > c6; + WS_NeedyTemplate4 > c7; + WS_NeedyTemplate4 > c8; + WS_NeedyTemplate4 > c9; + WS_NeedyTemplate4 > c10; + WS_NeedyTemplate4 > c11; + WS_NeedyTemplate4 > c12; +} + +void good4() { + WS_NeedyTemplate4 a1; + WS_NeedyTemplate4 a2; + WS_NeedyTemplate4 a3; + WS_NeedyTemplate4 a4; + WS_NeedyTemplate4 a5; + WS_NeedyTemplate4 a6; + + WS_NeedyTemplate4 > b1; + WS_NeedyTemplate4 > b2; + WS_NeedyTemplate4 > b3; + WS_NeedyTemplate4 > b4; + WS_NeedyTemplate4 > b5; + WS_NeedyTemplate4 > b6; + + WS_NeedyTemplate4 > c1; + WS_NeedyTemplate4 > c2; + WS_NeedyTemplate4 > c3; + WS_NeedyTemplate4 > c4; + WS_NeedyTemplate4 > c5; + WS_NeedyTemplate4 > c6; + WS_NeedyTemplate4 > c7; + WS_NeedyTemplate4 > c8; + WS_NeedyTemplate4 > c9; + WS_NeedyTemplate4 > c10; + WS_NeedyTemplate4 > c11; + WS_NeedyTemplate4 > c12; + + WS_NeedyTemplate4 > d1; + WS_NeedyTemplate4 > d2; + WS_NeedyTemplate4 > d3; + WS_NeedyTemplate4 > d4; + WS_NeedyTemplate4 > d5; + WS_NeedyTemplate4 > d6; + WS_NeedyTemplate4 > d7; + WS_NeedyTemplate4 > d8; + WS_NeedyTemplate4 > d9; + WS_NeedyTemplate4 > d10; + WS_NeedyTemplate4 > d11; + WS_NeedyTemplate4 > d12; +} + +// +// 5 - Subclassed Wrapped MOZ_NEEDS_MEMMOVABLE_TYPE +// + +template +struct MOZ_NEEDS_MEMMOVABLE_TYPE NeedyTemplate5 {T m;}; // expected-error-re 26 {{Cannot instantiate 'NeedyTemplate5<{{.*}}>' with non-memmovable template argument '{{.*}}'}} +template +struct W_NeedyTemplate5 { + NeedyTemplate5 m; // expected-note-re 26 {{instantiation of 'NeedyTemplate5<{{.*}}>' requested here}} +}; +template +struct SW_NeedyTemplate5 : W_NeedyTemplate5 {}; +void bad5() { + SW_NeedyTemplate5 a1; + SW_NeedyTemplate5 a2; + SW_NeedyTemplate5 a3; + SW_NeedyTemplate5 a4; + SW_NeedyTemplate5 a5; + SW_NeedyTemplate5 a6; + + SW_NeedyTemplate5 > b1; + SW_NeedyTemplate5 > b2; + SW_NeedyTemplate5 > b3; + SW_NeedyTemplate5 > b4; + SW_NeedyTemplate5 > b5; + SW_NeedyTemplate5 > b6; + + SW_NeedyTemplate5 > c1; + SW_NeedyTemplate5 > c2; + SW_NeedyTemplate5 > c3; + SW_NeedyTemplate5 > c4; + SW_NeedyTemplate5 > c5; + SW_NeedyTemplate5 > c6; + SW_NeedyTemplate5 > c7; + SW_NeedyTemplate5 > c8; + SW_NeedyTemplate5 > c9; + SW_NeedyTemplate5 > c10; + SW_NeedyTemplate5 > c11; + SW_NeedyTemplate5 > c12; +} + +void good5() { + SW_NeedyTemplate5 a1; + SW_NeedyTemplate5 a2; + SW_NeedyTemplate5 a3; + SW_NeedyTemplate5 a4; + SW_NeedyTemplate5 a5; + SW_NeedyTemplate5 a6; + + SW_NeedyTemplate5 > b1; + SW_NeedyTemplate5 > b2; + SW_NeedyTemplate5 > b3; + SW_NeedyTemplate5 > b4; + SW_NeedyTemplate5 > b5; + SW_NeedyTemplate5 > b6; + + SW_NeedyTemplate5 > c1; + SW_NeedyTemplate5 > c2; + SW_NeedyTemplate5 > c3; + SW_NeedyTemplate5 > c4; + SW_NeedyTemplate5 > c5; + SW_NeedyTemplate5 > c6; + SW_NeedyTemplate5 > c7; + SW_NeedyTemplate5 > c8; + SW_NeedyTemplate5 > c9; + SW_NeedyTemplate5 > c10; + SW_NeedyTemplate5 > c11; + SW_NeedyTemplate5 > c12; + + SW_NeedyTemplate5 > d1; + SW_NeedyTemplate5 > d2; + SW_NeedyTemplate5 > d3; + SW_NeedyTemplate5 > d4; + SW_NeedyTemplate5 > d5; + SW_NeedyTemplate5 > d6; + SW_NeedyTemplate5 > d7; + SW_NeedyTemplate5 > d8; + SW_NeedyTemplate5 > d9; + SW_NeedyTemplate5 > d10; + SW_NeedyTemplate5 > d11; + SW_NeedyTemplate5 > d12; +} + +// +// 6 - MOZ_NEEDS_MEMMOVABLE_TYPE instantiated with default template argument +// +// Note: This has an extra error, because it also includes a test with the default template argument. +// + +template +struct MOZ_NEEDS_MEMMOVABLE_TYPE NeedyTemplate6 {T m;}; // expected-error-re 27 {{Cannot instantiate 'NeedyTemplate6<{{.*}}>' with non-memmovable template argument '{{.*}}'}} +template +struct W_NeedyTemplate6 { + NeedyTemplate6 m; // expected-note-re 27 {{instantiation of 'NeedyTemplate6<{{.*}}>' requested here}} +}; +template +struct SW_NeedyTemplate6 : W_NeedyTemplate6 {}; +// We create a different NonMovable type here, as NeedyTemplate6 will already be instantiated with NonMovable +struct MOZ_NON_MEMMOVABLE NonMovable2 {}; +template +struct Defaulted_SW_NeedyTemplate6 { + SW_NeedyTemplate6 m; +}; +void bad6() { + Defaulted_SW_NeedyTemplate6 a1; + Defaulted_SW_NeedyTemplate6 a2; + Defaulted_SW_NeedyTemplate6 a3; + Defaulted_SW_NeedyTemplate6 a4; + Defaulted_SW_NeedyTemplate6 a5; + Defaulted_SW_NeedyTemplate6 a6; + + Defaulted_SW_NeedyTemplate6 > b1; + Defaulted_SW_NeedyTemplate6 > b2; + Defaulted_SW_NeedyTemplate6 > b3; + Defaulted_SW_NeedyTemplate6 > b4; + Defaulted_SW_NeedyTemplate6 > b5; + Defaulted_SW_NeedyTemplate6 > b6; + + Defaulted_SW_NeedyTemplate6 > c1; + Defaulted_SW_NeedyTemplate6 > c2; + Defaulted_SW_NeedyTemplate6 > c3; + Defaulted_SW_NeedyTemplate6 > c4; + Defaulted_SW_NeedyTemplate6 > c5; + Defaulted_SW_NeedyTemplate6 > c6; + Defaulted_SW_NeedyTemplate6 > c7; + Defaulted_SW_NeedyTemplate6 > c8; + Defaulted_SW_NeedyTemplate6 > c9; + Defaulted_SW_NeedyTemplate6 > c10; + Defaulted_SW_NeedyTemplate6 > c11; + Defaulted_SW_NeedyTemplate6 > c12; + + Defaulted_SW_NeedyTemplate6<> c13; +} + +void good6() { + Defaulted_SW_NeedyTemplate6 a1; + Defaulted_SW_NeedyTemplate6 a2; + Defaulted_SW_NeedyTemplate6 a3; + Defaulted_SW_NeedyTemplate6 a4; + Defaulted_SW_NeedyTemplate6 a5; + Defaulted_SW_NeedyTemplate6 a6; + + Defaulted_SW_NeedyTemplate6 > b1; + Defaulted_SW_NeedyTemplate6 > b2; + Defaulted_SW_NeedyTemplate6 > b3; + Defaulted_SW_NeedyTemplate6 > b4; + Defaulted_SW_NeedyTemplate6 > b5; + Defaulted_SW_NeedyTemplate6 > b6; + + Defaulted_SW_NeedyTemplate6 > c1; + Defaulted_SW_NeedyTemplate6 > c2; + Defaulted_SW_NeedyTemplate6 > c3; + Defaulted_SW_NeedyTemplate6 > c4; + Defaulted_SW_NeedyTemplate6 > c5; + Defaulted_SW_NeedyTemplate6 > c6; + Defaulted_SW_NeedyTemplate6 > c7; + Defaulted_SW_NeedyTemplate6 > c8; + Defaulted_SW_NeedyTemplate6 > c9; + Defaulted_SW_NeedyTemplate6 > c10; + Defaulted_SW_NeedyTemplate6 > c11; + Defaulted_SW_NeedyTemplate6 > c12; + + Defaulted_SW_NeedyTemplate6 > d1; + Defaulted_SW_NeedyTemplate6 > d2; + Defaulted_SW_NeedyTemplate6 > d3; + Defaulted_SW_NeedyTemplate6 > d4; + Defaulted_SW_NeedyTemplate6 > d5; + Defaulted_SW_NeedyTemplate6 > d6; + Defaulted_SW_NeedyTemplate6 > d7; + Defaulted_SW_NeedyTemplate6 > d8; + Defaulted_SW_NeedyTemplate6 > d9; + Defaulted_SW_NeedyTemplate6 > d10; + Defaulted_SW_NeedyTemplate6 > d11; + Defaulted_SW_NeedyTemplate6 > d12; +} + +// +// 7 - MOZ_NEEDS_MEMMOVABLE_TYPE instantiated as default template argument +// + +template +struct MOZ_NEEDS_MEMMOVABLE_TYPE NeedyTemplate7 {T m;}; // expected-error-re 26 {{Cannot instantiate 'NeedyTemplate7<{{.*}}>' with non-memmovable template argument '{{.*}}'}} +template > +struct Defaulted_Templated_NeedyTemplate7 {Q m;}; // expected-note-re 26 {{instantiation of 'NeedyTemplate7<{{.*}}>' requested here}} +void bad7() { + Defaulted_Templated_NeedyTemplate7 a1; + Defaulted_Templated_NeedyTemplate7 a2; + Defaulted_Templated_NeedyTemplate7 a3; + Defaulted_Templated_NeedyTemplate7 a4; + Defaulted_Templated_NeedyTemplate7 a5; + Defaulted_Templated_NeedyTemplate7 a6; + + Defaulted_Templated_NeedyTemplate7 > b1; + Defaulted_Templated_NeedyTemplate7 > b2; + Defaulted_Templated_NeedyTemplate7 > b3; + Defaulted_Templated_NeedyTemplate7 > b4; + Defaulted_Templated_NeedyTemplate7 > b5; + Defaulted_Templated_NeedyTemplate7 > b6; + + Defaulted_Templated_NeedyTemplate7 > c1; + Defaulted_Templated_NeedyTemplate7 > c2; + Defaulted_Templated_NeedyTemplate7 > c3; + Defaulted_Templated_NeedyTemplate7 > c4; + Defaulted_Templated_NeedyTemplate7 > c5; + Defaulted_Templated_NeedyTemplate7 > c6; + Defaulted_Templated_NeedyTemplate7 > c7; + Defaulted_Templated_NeedyTemplate7 > c8; + Defaulted_Templated_NeedyTemplate7 > c9; + Defaulted_Templated_NeedyTemplate7 > c10; + Defaulted_Templated_NeedyTemplate7 > c11; + Defaulted_Templated_NeedyTemplate7 > c12; +} + +void good7() { + Defaulted_Templated_NeedyTemplate7 a1; + Defaulted_Templated_NeedyTemplate7 a2; + Defaulted_Templated_NeedyTemplate7 a3; + Defaulted_Templated_NeedyTemplate7 a4; + Defaulted_Templated_NeedyTemplate7 a5; + Defaulted_Templated_NeedyTemplate7 a6; + + Defaulted_Templated_NeedyTemplate7 > b1; + Defaulted_Templated_NeedyTemplate7 > b2; + Defaulted_Templated_NeedyTemplate7 > b3; + Defaulted_Templated_NeedyTemplate7 > b4; + Defaulted_Templated_NeedyTemplate7 > b5; + Defaulted_Templated_NeedyTemplate7 > b6; + + Defaulted_Templated_NeedyTemplate7 > c1; + Defaulted_Templated_NeedyTemplate7 > c2; + Defaulted_Templated_NeedyTemplate7 > c3; + Defaulted_Templated_NeedyTemplate7 > c4; + Defaulted_Templated_NeedyTemplate7 > c5; + Defaulted_Templated_NeedyTemplate7 > c6; + Defaulted_Templated_NeedyTemplate7 > c7; + Defaulted_Templated_NeedyTemplate7 > c8; + Defaulted_Templated_NeedyTemplate7 > c9; + Defaulted_Templated_NeedyTemplate7 > c10; + Defaulted_Templated_NeedyTemplate7 > c11; + Defaulted_Templated_NeedyTemplate7 > c12; + + Defaulted_Templated_NeedyTemplate7 > d1; + Defaulted_Templated_NeedyTemplate7 > d2; + Defaulted_Templated_NeedyTemplate7 > d3; + Defaulted_Templated_NeedyTemplate7 > d4; + Defaulted_Templated_NeedyTemplate7 > d5; + Defaulted_Templated_NeedyTemplate7 > d6; + Defaulted_Templated_NeedyTemplate7 > d7; + Defaulted_Templated_NeedyTemplate7 > d8; + Defaulted_Templated_NeedyTemplate7 > d9; + Defaulted_Templated_NeedyTemplate7 > d10; + Defaulted_Templated_NeedyTemplate7 > d11; + Defaulted_Templated_NeedyTemplate7 > d12; +} + +// +// 8 - Wrapped MOZ_NEEDS_MEMMOVABLE_TYPE instantiated as default template argument +// + +template +struct MOZ_NEEDS_MEMMOVABLE_TYPE NeedyTemplate8 {T m;}; // expected-error-re 26 {{Cannot instantiate 'NeedyTemplate8<{{.*}}>' with non-memmovable template argument '{{.*}}'}} +template > +struct Defaulted_Templated_NeedyTemplate8 {Q m;}; // expected-note-re 26 {{instantiation of 'NeedyTemplate8<{{.*}}>' requested here}} +template +struct W_Defaulted_Templated_NeedyTemplate8 { + Defaulted_Templated_NeedyTemplate8 m; +}; +void bad8() { + W_Defaulted_Templated_NeedyTemplate8 a1; + W_Defaulted_Templated_NeedyTemplate8 a2; + W_Defaulted_Templated_NeedyTemplate8 a3; + W_Defaulted_Templated_NeedyTemplate8 a4; + W_Defaulted_Templated_NeedyTemplate8 a5; + W_Defaulted_Templated_NeedyTemplate8 a6; + + W_Defaulted_Templated_NeedyTemplate8 > b1; + W_Defaulted_Templated_NeedyTemplate8 > b2; + W_Defaulted_Templated_NeedyTemplate8 > b3; + W_Defaulted_Templated_NeedyTemplate8 > b4; + W_Defaulted_Templated_NeedyTemplate8 > b5; + W_Defaulted_Templated_NeedyTemplate8 > b6; + + W_Defaulted_Templated_NeedyTemplate8 > c1; + W_Defaulted_Templated_NeedyTemplate8 > c2; + W_Defaulted_Templated_NeedyTemplate8 > c3; + W_Defaulted_Templated_NeedyTemplate8 > c4; + W_Defaulted_Templated_NeedyTemplate8 > c5; + W_Defaulted_Templated_NeedyTemplate8 > c6; + W_Defaulted_Templated_NeedyTemplate8 > c7; + W_Defaulted_Templated_NeedyTemplate8 > c8; + W_Defaulted_Templated_NeedyTemplate8 > c9; + W_Defaulted_Templated_NeedyTemplate8 > c10; + W_Defaulted_Templated_NeedyTemplate8 > c11; + W_Defaulted_Templated_NeedyTemplate8 > c12; +} + +void good8() { + W_Defaulted_Templated_NeedyTemplate8 a1; + W_Defaulted_Templated_NeedyTemplate8 a2; + W_Defaulted_Templated_NeedyTemplate8 a3; + W_Defaulted_Templated_NeedyTemplate8 a4; + W_Defaulted_Templated_NeedyTemplate8 a5; + W_Defaulted_Templated_NeedyTemplate8 a6; + + W_Defaulted_Templated_NeedyTemplate8 > b1; + W_Defaulted_Templated_NeedyTemplate8 > b2; + W_Defaulted_Templated_NeedyTemplate8 > b3; + W_Defaulted_Templated_NeedyTemplate8 > b4; + W_Defaulted_Templated_NeedyTemplate8 > b5; + W_Defaulted_Templated_NeedyTemplate8 > b6; + + W_Defaulted_Templated_NeedyTemplate8 > c1; + W_Defaulted_Templated_NeedyTemplate8 > c2; + W_Defaulted_Templated_NeedyTemplate8 > c3; + W_Defaulted_Templated_NeedyTemplate8 > c4; + W_Defaulted_Templated_NeedyTemplate8 > c5; + W_Defaulted_Templated_NeedyTemplate8 > c6; + W_Defaulted_Templated_NeedyTemplate8 > c7; + W_Defaulted_Templated_NeedyTemplate8 > c8; + W_Defaulted_Templated_NeedyTemplate8 > c9; + W_Defaulted_Templated_NeedyTemplate8 > c10; + W_Defaulted_Templated_NeedyTemplate8 > c11; + W_Defaulted_Templated_NeedyTemplate8 > c12; + + W_Defaulted_Templated_NeedyTemplate8 > d1; + W_Defaulted_Templated_NeedyTemplate8 > d2; + W_Defaulted_Templated_NeedyTemplate8 > d3; + W_Defaulted_Templated_NeedyTemplate8 > d4; + W_Defaulted_Templated_NeedyTemplate8 > d5; + W_Defaulted_Templated_NeedyTemplate8 > d6; + W_Defaulted_Templated_NeedyTemplate8 > d7; + W_Defaulted_Templated_NeedyTemplate8 > d8; + W_Defaulted_Templated_NeedyTemplate8 > d9; + W_Defaulted_Templated_NeedyTemplate8 > d10; + W_Defaulted_Templated_NeedyTemplate8 > d11; + W_Defaulted_Templated_NeedyTemplate8 > d12; +} + +/* + SpecializedNonMovable is a non-movable class which has an explicit specialization of NeedyTemplate + for it. Instantiations of NeedyTemplateN should be legal as the explicit + specialization isn't annotated with MOZ_NEEDS_MEMMOVABLE_TYPE. + + However, as it is MOZ_NON_MEMMOVABLE, derived classes and members shouldn't be able to be used to + instantiate NeedyTemplate. +*/ + +struct MOZ_NON_MEMMOVABLE SpecializedNonMovable {}; +struct S_SpecializedNonMovable : SpecializedNonMovable {}; // expected-note 8 {{'S_SpecializedNonMovable' is a non-memmove()able type because it inherits from a non-memmove()able type 'SpecializedNonMovable'}} + +// Specialize all of the NeedyTemplates with SpecializedNonMovable. +template <> +struct NeedyTemplate1 {}; +template <> +struct NeedyTemplate2 {}; +template <> +struct NeedyTemplate3 {}; +template <> +struct NeedyTemplate4 {}; +template <> +struct NeedyTemplate5 {}; +template <> +struct NeedyTemplate6 {}; +template <> +struct NeedyTemplate7 {}; +template <> +struct NeedyTemplate8 {}; + +void specialization() { + /* + SpecializedNonMovable has a specialization for every variant of NeedyTemplate, + so these templates are valid, even though SpecializedNonMovable isn't + memmovable + */ + NeedyTemplate1 a1; + S_NeedyTemplate2 a2; + W_NeedyTemplate3 a3; + WS_NeedyTemplate4 a4; + SW_NeedyTemplate5 a5; + Defaulted_SW_NeedyTemplate6 a6; + Defaulted_Templated_NeedyTemplate7 a7; + W_Defaulted_Templated_NeedyTemplate8 a8; + + /* + These entries contain an element which is SpecializedNonMovable, and are non-movable + as there is no valid specialization, and their member is non-memmovable + */ + NeedyTemplate1 > b1; // expected-note-re {{instantiation of 'NeedyTemplate1{{ ?}}>' requested here}} + S_NeedyTemplate2 > b2; + W_NeedyTemplate3 > b3; + WS_NeedyTemplate4 > b4; + SW_NeedyTemplate5 > b5; + Defaulted_SW_NeedyTemplate6 > b6; + Defaulted_Templated_NeedyTemplate7 > b7; + W_Defaulted_Templated_NeedyTemplate8 > b8; + + /* + The subclass of SpecializedNonMovable, is also non-memmovable, + as there is no valid specialization. + */ + NeedyTemplate1 c1; // expected-note {{instantiation of 'NeedyTemplate1' requested here}} + S_NeedyTemplate2 c2; + W_NeedyTemplate3 c3; + WS_NeedyTemplate4 c4; + SW_NeedyTemplate5 c5; + Defaulted_SW_NeedyTemplate6 c6; + Defaulted_Templated_NeedyTemplate7 c7; + W_Defaulted_Templated_NeedyTemplate8 c8; +} + +class MOZ_NEEDS_MEMMOVABLE_MEMBERS NeedsMemMovableMembers { + Movable m1; + NonMovable m2; // expected-error {{class 'NeedsMemMovableMembers' cannot have non-memmovable member 'm2' of type 'NonMovable'}} + S_Movable sm1; + S_NonMovable sm2; // expected-error {{class 'NeedsMemMovableMembers' cannot have non-memmovable member 'sm2' of type 'S_NonMovable'}} + W_Movable wm1; + W_NonMovable wm2; // expected-error {{class 'NeedsMemMovableMembers' cannot have non-memmovable member 'wm2' of type 'W_NonMovable'}} + SW_Movable swm1; + SW_NonMovable swm2; // expected-error {{class 'NeedsMemMovableMembers' cannot have non-memmovable member 'swm2' of type 'SW_NonMovable'}} + WS_Movable wsm1; + WS_NonMovable wsm2; // expected-error {{class 'NeedsMemMovableMembers' cannot have non-memmovable member 'wsm2' of type 'WS_NonMovable'}} + SWS_Movable swsm1; + SWS_NonMovable swsm2; // expected-error {{class 'NeedsMemMovableMembers' cannot have non-memmovable member 'swsm2' of type 'SWS_NonMovable'}} +}; + +class NeedsMemMovableMembersDerived : public NeedsMemMovableMembers {}; diff --git a/build/clang-plugin/tests/TestNonMemMovableStd.cpp b/build/clang-plugin/tests/TestNonMemMovableStd.cpp new file mode 100644 index 0000000000..c821e6db15 --- /dev/null +++ b/build/clang-plugin/tests/TestNonMemMovableStd.cpp @@ -0,0 +1,21 @@ +#define MOZ_NEEDS_MEMMOVABLE_TYPE __attribute__((annotate("moz_needs_memmovable_type"))) + +template +class MOZ_NEEDS_MEMMOVABLE_TYPE Mover { T mForceInst; }; // expected-error-re 4 {{Cannot instantiate 'Mover<{{.*}}>' with non-memmovable template argument '{{.*}}'}} + +namespace std { +// In theory defining things in std:: like this invokes undefined +// behavior, but in practice it's good enough for this test case. +template class basic_string { }; // expected-note 2 {{'std::basic_string' is a non-memmove()able type because it is an stl-provided type not guaranteed to be memmove-able}} expected-note {{'std::string' (aka 'basic_string') is a non-memmove()able type because it is an stl-provided type not guaranteed to be memmove-able}} +typedef basic_string string; +template class pair { T mT; U mU; }; // expected-note-re {{std::pair{{ ?}}>' is a non-memmove()able type because member 'mU' is a non-memmove()able type 'std::basic_string'}} +class arbitrary_name { }; // expected-note {{'std::arbitrary_name' is a non-memmove()able type because it is an stl-provided type not guaranteed to be memmove-able}} +} + +class HasString { std::string m; }; // expected-note {{'HasString' is a non-memmove()able type because member 'm' is a non-memmove()able type 'std::string' (aka 'basic_string')}} + +static Mover bad; // expected-note-re {{instantiation of 'Mover{{ ?}}>' requested here}} +static Mover bad_mem; // expected-note {{instantiation of 'Mover' requested here}} +static Mover assumed_bad; // expected-note {{instantiation of 'Mover' requested here}} +static Mover> good; +static Mover> not_good; // expected-note-re {{instantiation of 'Mover{{ ?}}>{{ ?}}>' requested here}} diff --git a/build/clang-plugin/tests/TestNonMemMovableStdAtomic.cpp b/build/clang-plugin/tests/TestNonMemMovableStdAtomic.cpp new file mode 100644 index 0000000000..b8aef2eacd --- /dev/null +++ b/build/clang-plugin/tests/TestNonMemMovableStdAtomic.cpp @@ -0,0 +1,30 @@ +// expected-no-diagnostics + +#define MOZ_NEEDS_MEMMOVABLE_TYPE __attribute__((annotate("moz_needs_memmovable_type"))) + +template +class MOZ_NEEDS_MEMMOVABLE_TYPE Mover { T mForceInst; }; + +#include +#include +struct CustomType{}; +static struct { + Mover> m1; + Mover> m2; + Mover> m3; + Mover> m4; + Mover> m5; + Mover> m6; + Mover> m7; + Mover> m8; + Mover> m9; + Mover> m10; + Mover> m11; + Mover> m12; + Mover> m13; + Mover> m14; + Mover> m15; + Mover> m16; + Mover> m17; + Mover> m18; +} good; diff --git a/build/clang-plugin/tests/TestNonParameterChecker.cpp b/build/clang-plugin/tests/TestNonParameterChecker.cpp new file mode 100644 index 0000000000..d3c2b9b379 --- /dev/null +++ b/build/clang-plugin/tests/TestNonParameterChecker.cpp @@ -0,0 +1,189 @@ +#define MOZ_NON_PARAM __attribute__((annotate("moz_non_param"))) + +struct Param {}; +struct MOZ_NON_PARAM NonParam {}; +union MOZ_NON_PARAM NonParamUnion {}; +class MOZ_NON_PARAM NonParamClass {}; +enum MOZ_NON_PARAM NonParamEnum { X, Y, Z }; +enum class MOZ_NON_PARAM NonParamEnumClass { X, Y, Z }; + +struct HasNonParamStruct { NonParam x; int y; }; // expected-note 14 {{'HasNonParamStruct' is a non-param type because member 'x' is a non-param type 'NonParam'}} +union HasNonParamUnion { NonParam x; int y; }; // expected-note 18 {{'HasNonParamUnion' is a non-param type because member 'x' is a non-param type 'NonParam'}} +struct HasNonParamStructUnion { HasNonParamUnion z; }; // expected-note 9 {{'HasNonParamStructUnion' is a non-param type because member 'z' is a non-param type 'HasNonParamUnion'}} + +#define MAYBE_STATIC +#include "NonParameterTestCases.h" +#undef MAYBE_STATIC + +// Do not check typedef and using. +typedef void (*funcTypeParam)(Param x); +typedef void (*funcTypeNonParam)(NonParam x); + +using usingFuncTypeParam = void (*)(Param x); +using usingFuncTypeNonParam = void (*)(NonParam x); + +class class_ +{ + explicit class_(Param x) {} + explicit class_(NonParam x) {} //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + explicit class_(HasNonParamStruct x) {} //expected-error {{Type 'HasNonParamStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + explicit class_(HasNonParamUnion x) {} //expected-error {{Type 'HasNonParamUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + explicit class_(HasNonParamStructUnion x) {} //expected-error {{Type 'HasNonParamStructUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + +#define MAYBE_STATIC +#include "NonParameterTestCases.h" +#undef MAYBE_STATIC +}; + +class classWithStatic +{ +#define MAYBE_STATIC static +#include "NonParameterTestCases.h" +#undef MAYBE_STATIC +}; + +template +class tmplClassForParam +{ +public: + void raw(T x) {} + void rawDefault(T x = T()) {} + void const_(const T x) {} + void ptr(T* x) {} + void ref(T& x) {} + void constRef(const T& x) {} + + void notCalled(T x) {} +}; + +template +class tmplClassForNonParam +{ +public: + void raw(T x) {} //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + void rawDefault(T x = T()) {} //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + void const_(const T x) {} //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + void ptr(T* x) {} + void ref(T& x) {} + void constRef(const T& x) {} + + void notCalled(T x) {} +}; + +template +class tmplClassForHasNonParamStruct +{ +public: + void raw(T x) {} //expected-error {{Type 'HasNonParamStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + void rawDefault(T x = T()) {} //expected-error {{Type 'HasNonParamStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + void const_(const T x) {} //expected-error {{Type 'HasNonParamStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + void ptr(T* x) {} + void ref(T& x) {} + void constRef(const T& x) {} + + void notCalled(T x) {} +}; + +void testTemplateClass() +{ + tmplClassForParam paramClass; + Param param; + paramClass.raw(param); + paramClass.rawDefault(); + paramClass.const_(param); + paramClass.ptr(¶m); + paramClass.ref(param); + paramClass.constRef(param); + + tmplClassForNonParam nonParamClass; //expected-note 3 {{The bad argument was passed to 'tmplClassForNonParam' here}} + NonParam nonParam; + nonParamClass.raw(nonParam); + nonParamClass.rawDefault(); + nonParamClass.const_(nonParam); + nonParamClass.ptr(&nonParam); + nonParamClass.ref(nonParam); + nonParamClass.constRef(nonParam); + + tmplClassForHasNonParamStruct hasNonParamStructClass;//expected-note 3 {{The bad argument was passed to 'tmplClassForHasNonParamStruct' here}} + HasNonParamStruct hasNonParamStruct; + hasNonParamStructClass.raw(hasNonParamStruct); + hasNonParamStructClass.rawDefault(); + hasNonParamStructClass.const_(hasNonParamStruct); + hasNonParamStructClass.ptr(&hasNonParamStruct); + hasNonParamStructClass.ref(hasNonParamStruct); + hasNonParamStructClass.constRef(hasNonParamStruct); +} + +template +class NestedTemplateInner +{ +public: + void raw(T x) {} //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +}; + +template +class nestedTemplateOuter +{ +public: + void constRef(const T& x) { + NestedTemplateInner inner; //expected-note {{The bad argument was passed to 'NestedTemplateInner' here}} + inner.raw(x); + } +}; + +void testNestedTemplateClass() +{ + nestedTemplateOuter outer; + NonParam nonParam; + outer.constRef(nonParam); // FIXME: this line needs note "The bad argument was passed to 'constRef' here" +} + +template +void tmplFuncForParam(T x) {} +template +void tmplFuncForNonParam(T x) {} //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +template +void tmplFuncForNonParamImplicit(T x) {} //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +template +void tmplFuncForHasNonParamStruct(T x) {} //expected-error {{Type 'HasNonParamStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +template +void tmplFuncForHasNonParamStructImplicit(T x) {} //expected-error {{Type 'HasNonParamStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + +void testTemplateFunc() +{ + Param param; + tmplFuncForParam(param); + + NonParam nonParam; + tmplFuncForNonParam(nonParam); // FIXME: this line needs note "The bad argument was passed to 'tmplFuncForNonParam' here" + tmplFuncForNonParamImplicit(nonParam); // FIXME: this line needs note "The bad argument was passed to 'tmplFuncForNonParamImplicit' here" + + HasNonParamStruct hasNonParamStruct; + tmplFuncForHasNonParamStruct(hasNonParamStruct); // FIXME: this line needs note "The bad argument was passed to 'tmplFuncForHasNonParamStruct' here" + tmplFuncForHasNonParamStructImplicit(hasNonParamStruct); // FIXME: this line needs note "The bad argument was passed to 'tmplFuncForHasNonParamStructImplicit' here" +} + +void testLambda() +{ + auto paramLambda = [](Param x) -> void {}; + auto nonParamLambda = [](NonParam x) -> void {}; //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + auto nonParamStructLambda = [](HasNonParamStruct x) -> void {}; //expected-error {{Type 'HasNonParamStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + auto nonParamUnionLambda = [](HasNonParamUnion x) -> void {}; //expected-error {{Type 'HasNonParamUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + auto nonParamStructUnionLambda = [](HasNonParamStructUnion x) -> void {}; //expected-error {{Type 'HasNonParamStructUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + + (void)[](Param x) -> void {}; + (void)[](NonParam x) -> void {}; //expected-error {{Type 'NonParam' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + (void)[](HasNonParamStruct x) -> void {}; //expected-error {{Type 'HasNonParamStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + (void)[](HasNonParamUnion x) -> void {}; //expected-error {{Type 'HasNonParamUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} + (void)[](HasNonParamStructUnion x) -> void {}; //expected-error {{Type 'HasNonParamStructUnion' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +} + +// Check that alignas() implies the MOZ_NON_PARAM attribute. + +struct alignas(8) AlignasStruct { char a; }; // expected-note {{'AlignasStruct' is a non-param type because it has an alignas(_) annotation}} +void takesAlignasStruct(AlignasStruct x) { } // expected-error {{Type 'AlignasStruct' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +void takesAlignasStructByRef(const AlignasStruct& x) { } + +struct AlignasMember { alignas(8) char a; }; // expected-note {{'AlignasMember' is a non-param type because member 'a' has an alignas(_) annotation}} +void takesAlignasMember(AlignasMember x) { } // expected-error {{Type 'AlignasMember' must not be used as parameter}} expected-note {{Please consider passing a const reference instead}} +void takesAlignasMemberByRef(const AlignasMember& x) { } diff --git a/build/clang-plugin/tests/TestNonTemporaryClass.cpp b/build/clang-plugin/tests/TestNonTemporaryClass.cpp new file mode 100644 index 0000000000..682c8ad530 --- /dev/null +++ b/build/clang-plugin/tests/TestNonTemporaryClass.cpp @@ -0,0 +1,70 @@ +#define MOZ_NON_TEMPORARY_CLASS __attribute__((annotate("moz_non_temporary_class"))) +#define MOZ_IMPLICIT __attribute__((annotate("moz_implicit"))) + +#include + +struct MOZ_NON_TEMPORARY_CLASS NonTemporary { + int i; + NonTemporary() {} + MOZ_IMPLICIT NonTemporary(int a) {} + NonTemporary(int a, int b) {} + void *operator new(size_t x) throw() { return 0; } + void *operator new(size_t blah, char *buffer) { return buffer; } +}; + +template +struct MOZ_NON_TEMPORARY_CLASS TemplateClass { + T i; +}; + +void gobble(void *) { } + +void gobbleref(const NonTemporary&) { } + +template +void gobbleanyref(const T&) { } + +void misuseNonTemporaryClass(int len) { + NonTemporary invalid; + NonTemporary alsoInvalid[2]; + static NonTemporary invalidStatic; + static NonTemporary alsoInvalidStatic[2]; + + gobble(&invalid); + gobble(&invalidStatic); + gobble(&alsoInvalid[0]); + + gobbleref(NonTemporary()); // expected-error {{variable of type 'NonTemporary' is not valid in a temporary}} expected-note {{value incorrectly allocated in a temporary}} + gobbleref(NonTemporary(10, 20)); // expected-error {{variable of type 'NonTemporary' is not valid in a temporary}} expected-note {{value incorrectly allocated in a temporary}} + gobbleref(NonTemporary(10)); // expected-error {{variable of type 'NonTemporary' is not valid in a temporary}} expected-note {{value incorrectly allocated in a temporary}} + gobbleref(10); // expected-error {{variable of type 'NonTemporary' is not valid in a temporary}} expected-note {{value incorrectly allocated in a temporary}} + gobbleanyref(TemplateClass()); // expected-error {{variable of type 'TemplateClass' is not valid in a temporary}} expected-note {{value incorrectly allocated in a temporary}} + + gobble(new NonTemporary); + gobble(new NonTemporary[10]); + gobble(new TemplateClass); + gobble(len <= 5 ? &invalid : new NonTemporary); + + char buffer[sizeof(NonTemporary)]; + gobble(new (buffer) NonTemporary); +} + +void defaultArg(const NonTemporary& arg = NonTemporary()) { +} + +NonTemporary invalidStatic; +struct RandomClass { + NonTemporary nonstaticMember; // expected-note {{'RandomClass' is a non-temporary type because member 'nonstaticMember' is a non-temporary type 'NonTemporary'}} + static NonTemporary staticMember; +}; +struct MOZ_NON_TEMPORARY_CLASS RandomNonTemporaryClass { + NonTemporary nonstaticMember; + static NonTemporary staticMember; +}; + +struct BadInherit : NonTemporary {}; // expected-note {{'BadInherit' is a non-temporary type because it inherits from a non-temporary type 'NonTemporary'}} + +void useStuffWrongly() { + gobbleanyref(BadInherit()); // expected-error {{variable of type 'BadInherit' is not valid in a temporary}} expected-note {{value incorrectly allocated in a temporary}} + gobbleanyref(RandomClass()); // expected-error {{variable of type 'RandomClass' is not valid in a temporary}} expected-note {{value incorrectly allocated in a temporary}} +} diff --git a/build/clang-plugin/tests/TestNonTrivialTypeInFfi.cpp b/build/clang-plugin/tests/TestNonTrivialTypeInFfi.cpp new file mode 100644 index 0000000000..e491122b99 --- /dev/null +++ b/build/clang-plugin/tests/TestNonTrivialTypeInFfi.cpp @@ -0,0 +1,65 @@ +// clang warns for some of these on its own, but we're not testing that, plus +// some of them (TrivialT) is a false positive (clang doesn't realize the +// type is fully specialized below). +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wreturn-type-c-linkage" + +struct Opaque; +struct Trivial { + int foo; + char bar; + Opaque* baz; +}; + +template +struct TrivialT { + int foo; + char bar; + T* baz; +}; + +struct NonTrivial { + ~NonTrivial() { + } + + Opaque* ptr; +}; + +template +struct NonTrivialT { + ~NonTrivialT() { + delete ptr; + } + + T* ptr; +}; + +struct TransitivelyNonTrivial { + NonTrivial nontrivial; +}; + +extern "C" void Foo(); +extern "C" Trivial Foo1(); +extern "C" NonTrivial Foo2(); // expected-error {{Type 'NonTrivial' must not be used as return type of extern "C" function}} expected-note {{Please consider using a pointer or reference instead}} +extern "C" NonTrivialT Foo3(); // expected-error {{Type 'NonTrivialT' must not be used as return type of extern "C" function}} expected-note {{Please consider using a pointer or reference, or explicitly instantiating the template instead}} +extern "C" NonTrivialT Foo4(); // expected-error {{Type 'NonTrivialT' must not be used as return type of extern "C" function}} expected-note {{Please consider using a pointer or reference, or explicitly instantiating the template instead}} + +extern "C" NonTrivial* Foo5(); + +extern "C" TrivialT Foo6(); +extern "C" TrivialT Foo7(); // expected-error {{Type 'TrivialT' must not be used as return type of extern "C" function}} expected-note {{Please consider using a pointer or reference, or explicitly instantiating the template instead}} +extern "C" Trivial* Foo8(); + +extern "C" void Foo9(Trivial); +extern "C" void Foo10(NonTrivial); // expected-error {{Type 'NonTrivial' must not be used as parameter to extern "C" function}} expected-note {{Please consider using a pointer or reference instead}} +extern "C" void Foo11(NonTrivial*); +extern "C" void Foo12(NonTrivialT); // expected-error {{Type 'NonTrivialT' must not be used as parameter to extern "C" function}} expected-note {{Please consider using a pointer or reference, or explicitly instantiating the template instead}} +extern "C" void Foo13(TrivialT); +extern "C" void Foo14(TrivialT); // expected-error {{Type 'TrivialT' must not be used as parameter to extern "C" function}} expected-note {{Please consider using a pointer or reference, or explicitly instantiating the template instead}} + +extern "C" TransitivelyNonTrivial Foo15(); // expected-error {{Type 'TransitivelyNonTrivial' must not be used as return type of extern "C" function}} expected-note {{Please consider using a pointer or reference instead}} +extern "C" void Foo16(TransitivelyNonTrivial); // expected-error {{Type 'TransitivelyNonTrivial' must not be used as parameter to extern "C" function}} expected-note {{Please consider using a pointer or reference instead}} + +template struct TrivialT; + +#pragma GCC diagnostic pop diff --git a/build/clang-plugin/tests/TestOverrideBaseCall.cpp b/build/clang-plugin/tests/TestOverrideBaseCall.cpp new file mode 100644 index 0000000000..6fdaaad04e --- /dev/null +++ b/build/clang-plugin/tests/TestOverrideBaseCall.cpp @@ -0,0 +1,175 @@ +#define MOZ_REQUIRED_BASE_METHOD __attribute__((annotate("moz_required_base_method"))) + +class Base { +public: + virtual void fo() MOZ_REQUIRED_BASE_METHOD { + } + + virtual int foRet() MOZ_REQUIRED_BASE_METHOD { + return 0; + } +}; + +class BaseOne : public Base { +public: + virtual void fo() MOZ_REQUIRED_BASE_METHOD { + Base::fo(); + } +}; + +class BaseSecond : public Base { +public: + virtual void fo() MOZ_REQUIRED_BASE_METHOD { + Base::fo(); + } +}; + +class Deriv : public BaseOne, public BaseSecond { +public: + void func() { + } + + void fo() { + func(); + BaseSecond::fo(); + BaseOne::fo(); + } +}; + +class DerivSimple : public Base { +public: + void fo() { // expected-error {{Method Base::fo must be called in all overrides, but is not called in this override defined for class DerivSimple}} + } +}; + +class BaseVirtualOne : public virtual Base { +}; + +class BaseVirtualSecond: public virtual Base { +}; + +class DerivVirtual : public BaseVirtualOne, public BaseVirtualSecond { +public: + void fo() { + Base::fo(); + } +}; + +class DerivIf : public Base { +public: + void fo() { + if (true) { + Base::fo(); + } + } +}; + +class DerivIfElse : public Base { +public: + void fo() { + if (true) { + Base::fo(); + } else { + Base::fo(); + } + } +}; + +class DerivFor : public Base { +public: + void fo() { + for (int i = 0; i < 10; i++) { + Base::fo(); + } + } +}; + +class DerivDoWhile : public Base { +public: + void fo() { + do { + Base::fo(); + } while(false); + } +}; + +class DerivWhile : public Base { +public: + void fo() { + while (true) { + Base::fo(); + break; + } + } +}; + +class DerivAssignment : public Base { +public: + int foRet() { + return foRet(); + } +}; + +class BaseOperator { +private: + int value; +public: + BaseOperator() : value(0) { + } + virtual BaseOperator& operator++() MOZ_REQUIRED_BASE_METHOD { + value++; + return *this; + } +}; + +class DerivOperatorErr : public BaseOperator { +private: + int value; +public: + DerivOperatorErr() : value(0) { + } + DerivOperatorErr& operator++() { // expected-error {{Method BaseOperator::operator++ must be called in all overrides, but is not called in this override defined for class DerivOperatorErr}} + value++; + return *this; + } +}; + +class DerivOperator : public BaseOperator { +private: + int value; +public: + DerivOperator() : value(0) { + } + DerivOperator& operator++() { + BaseOperator::operator++(); + value++; + return *this; + } +}; + +class DerivPrime : public Base { +public: + void fo() { + Base::fo(); + } +}; + +class DerivSecondErr : public DerivPrime { +public: + void fo() { // expected-error {{Method Base::fo must be called in all overrides, but is not called in this override defined for class DerivSecondErr}} + } +}; + +class DerivSecond : public DerivPrime { +public: + void fo() { + Base::fo(); + } +}; + +class DerivSecondIndirect : public DerivPrime { +public: + void fo() { + DerivPrime::fo(); + } +}; diff --git a/build/clang-plugin/tests/TestOverrideBaseCallAnnotation.cpp b/build/clang-plugin/tests/TestOverrideBaseCallAnnotation.cpp new file mode 100644 index 0000000000..e268122c69 --- /dev/null +++ b/build/clang-plugin/tests/TestOverrideBaseCallAnnotation.cpp @@ -0,0 +1,47 @@ +#define MOZ_REQUIRED_BASE_METHOD __attribute__((annotate("moz_required_base_method"))) + +class Base { +public: + virtual void fo() MOZ_REQUIRED_BASE_METHOD { + } +}; + +class BaseNonVirtual { +public: + void fo() MOZ_REQUIRED_BASE_METHOD { // expected-error {{MOZ_REQUIRED_BASE_METHOD can be used only on virtual methods}} + } +}; + +class Deriv : public BaseNonVirtual { +public: + virtual void fo() MOZ_REQUIRED_BASE_METHOD { + } +}; + +class DerivVirtual : public Base { +public: + void fo() MOZ_REQUIRED_BASE_METHOD { + Base::fo(); + } +}; + +class BaseOperator { +public: + BaseOperator& operator++() MOZ_REQUIRED_BASE_METHOD { // expected-error {{MOZ_REQUIRED_BASE_METHOD can be used only on virtual methods}} + return *this; + } +}; + +class DerivOperator : public BaseOperator { +public: + virtual DerivOperator& operator++() { + return *this; + } +}; + +class DerivPrimeOperator : public DerivOperator { +public: + DerivPrimeOperator& operator++() { + return *this; + } +}; diff --git a/build/clang-plugin/tests/TestParamTraitsEnum.cpp b/build/clang-plugin/tests/TestParamTraitsEnum.cpp new file mode 100644 index 0000000000..a250250bfe --- /dev/null +++ b/build/clang-plugin/tests/TestParamTraitsEnum.cpp @@ -0,0 +1,94 @@ +typedef enum { + BadFirst, + BadSecond, + BadThird +} BadEnum; + +typedef enum { + NestedFirst, + NestedSecond +} NestedBadEnum; + +typedef enum { + GoodFirst, + GoodSecond, + GoodLast +} GoodEnum; + +enum RawEnum { + RawFirst, + RawLast +}; + +enum class ClassEnum { + ClassFirst, + ClassLast +}; + +template struct ParamTraits; + +// Simplified EnumSerializer etc. from IPCMessageUtils.h +template +struct EnumSerializer { + typedef E paramType; +}; + +template +class ContiguousEnumValidator +{}; + +template +struct ContiguousEnumSerializer + : EnumSerializer> +{}; + +// Typical ParamTraits implementation that should be avoided +template<> +struct ParamTraits // expected-error {{Custom ParamTraits implementation for an enum type}} expected-note {{Please use a helper class for example ContiguousEnumSerializer}} +{ + typedef ClassEnum paramType; +}; + +template<> +struct ParamTraits // expected-error {{Custom ParamTraits implementation for an enum type}} expected-note {{Please use a helper class for example ContiguousEnumSerializer}} +{ + typedef enum RawEnum paramType; +}; + +template<> +struct ParamTraits // expected-error {{Custom ParamTraits implementation for an enum type}} expected-note {{Please use a helper class for example ContiguousEnumSerializer}} +{ + typedef BadEnum paramType; +}; + +// Make sure the analysis catches nested typedefs +typedef NestedBadEnum NestedDefLevel1; +typedef NestedDefLevel1 NestedDefLevel2; + +template<> +struct ParamTraits // expected-error {{Custom ParamTraits implementation for an enum type}} expected-note {{Please use a helper class for example ContiguousEnumSerializer}} +{ + typedef NestedDefLevel2 paramType; +}; + +// Make sure a non enum typedef is not accidentally flagged +typedef int IntTypedef; + +template<> +struct ParamTraits +{ + typedef IntTypedef paramType; +}; + +// Make sure ParamTraits using helper classes are not flagged +template<> +struct ParamTraits +: public ContiguousEnumSerializer +{}; diff --git a/build/clang-plugin/tests/TestRefCountedCopyConstructor.cpp b/build/clang-plugin/tests/TestRefCountedCopyConstructor.cpp new file mode 100644 index 0000000000..d3bd73084c --- /dev/null +++ b/build/clang-plugin/tests/TestRefCountedCopyConstructor.cpp @@ -0,0 +1,25 @@ +// Implicit copy construct which is unused +class RC1 { + void AddRef(); + void Release(); + int mRefCnt; +}; + +// Explicit copy constructor which is used +class RC2 { +public: + RC2(); + RC2(const RC2&); +private: + void AddRef(); + void Release(); + int mRefCnt; +}; + +void f() { + RC1* r1 = new RC1(); + RC1* r1p = new RC1(*r1); // expected-error {{Invalid use of compiler-provided copy constructor on refcounted type}} expected-note {{The default copy constructor also copies the default mRefCnt property, leading to reference count imbalance issues. Please provide your own copy constructor which only copies the fields which need to be copied}} + + RC2* r2 = new RC2(); + RC2* r2p = new RC2(*r2); +} diff --git a/build/clang-plugin/tests/TestSprintfLiteral.cpp b/build/clang-plugin/tests/TestSprintfLiteral.cpp new file mode 100644 index 0000000000..a8dac4009c --- /dev/null +++ b/build/clang-plugin/tests/TestSprintfLiteral.cpp @@ -0,0 +1,41 @@ +#include + +void bad() { + char x[100]; + snprintf(x, sizeof(x), "bar"); // expected-error {{Use SprintfLiteral instead of snprintf when writing into a character array.}} expected-note {{This will prevent passing in the wrong size to snprintf accidentally.}} + snprintf(x, 100, "bar"); // expected-error {{Use SprintfLiteral instead of snprintf when writing into a character array.}} expected-note {{This will prevent passing in the wrong size to snprintf accidentally.}} + const int hundred = 100; + snprintf(x, hundred, "bar"); // expected-error {{Use SprintfLiteral instead of snprintf when writing into a character array.}} expected-note {{This will prevent passing in the wrong size to snprintf accidentally.}} +} + +void ok() { + char x[100]; + int y; + snprintf(x, sizeof(y), "what"); + + snprintf(x, 50, "what"); + + int nothundred = 100; + nothundred = 99; + snprintf(x, nothundred, "what"); +} + +void vargs_bad(va_list args) { + char x[100]; + vsnprintf(x, sizeof(x), "bar", args); // expected-error {{Use VsprintfLiteral instead of vsnprintf when writing into a character array.}} expected-note {{This will prevent passing in the wrong size to vsnprintf accidentally.}} + vsnprintf(x, 100, "bar", args); // expected-error {{Use VsprintfLiteral instead of vsnprintf when writing into a character array.}} expected-note {{This will prevent passing in the wrong size to vsnprintf accidentally.}} + const int hundred = 100; + vsnprintf(x, hundred, "bar", args); // expected-error {{Use VsprintfLiteral instead of vsnprintf when writing into a character array.}} expected-note {{This will prevent passing in the wrong size to vsnprintf accidentally.}} +} + +void vargs_good(va_list args) { + char x[100]; + int y; + vsnprintf(x, sizeof(y), "what", args); + + vsnprintf(x, 50, "what", args); + + int nothundred = 100; + nothundred = 99; + vsnprintf(x, nothundred, "what", args); +} diff --git a/build/clang-plugin/tests/TestStackClass.cpp b/build/clang-plugin/tests/TestStackClass.cpp new file mode 100644 index 0000000000..41afa39e12 --- /dev/null +++ b/build/clang-plugin/tests/TestStackClass.cpp @@ -0,0 +1,50 @@ +#define MOZ_STACK_CLASS __attribute__((annotate("moz_stack_class"))) +#include + +struct MOZ_STACK_CLASS Stack { + int i; + void *operator new(size_t x) throw() { return 0; } + void *operator new(size_t blah, char *buffer) { return buffer; } +}; + +template +struct MOZ_STACK_CLASS TemplateClass { + T i; +}; + +void gobble(void *) { } + +void misuseStackClass(int len) { + Stack valid; + Stack alsoValid[2]; + static Stack notValid; // expected-error {{variable of type 'Stack' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} + static Stack alsoNotValid[2]; // expected-error {{variable of type 'Stack [2]' only valid on the stack}} expected-note {{'Stack [2]' is a stack type because it is an array of stack type 'Stack'}} expected-note {{value incorrectly allocated in a global variable}} + + gobble(&valid); + gobble(¬Valid); + gobble(&alsoValid[0]); + + gobble(new Stack); // expected-error {{variable of type 'Stack' only valid on the stack}} expected-note {{value incorrectly allocated on the heap}} + gobble(new Stack[10]); // expected-error {{variable of type 'Stack' only valid on the stack}} expected-note {{value incorrectly allocated on the heap}} + gobble(new TemplateClass); // expected-error {{variable of type 'TemplateClass' only valid on the stack}} expected-note {{value incorrectly allocated on the heap}} + gobble(len <= 5 ? &valid : new Stack); // expected-error {{variable of type 'Stack' only valid on the stack}} expected-note {{value incorrectly allocated on the heap}} + + char buffer[sizeof(Stack)]; + gobble(new (buffer) Stack); +} + +Stack notValid; // expected-error {{variable of type 'Stack' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} +struct RandomClass { + Stack nonstaticMember; // expected-note {{'RandomClass' is a stack type because member 'nonstaticMember' is a stack type 'Stack'}} + static Stack staticMember; // expected-error {{variable of type 'Stack' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} +}; +struct MOZ_STACK_CLASS RandomStackClass { + Stack nonstaticMember; + static Stack staticMember; // expected-error {{variable of type 'Stack' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} +}; + +struct BadInherit : Stack {}; // expected-note {{'BadInherit' is a stack type because it inherits from a stack type 'Stack'}} +struct MOZ_STACK_CLASS GoodInherit : Stack {}; + +BadInherit moreInvalid; // expected-error {{variable of type 'BadInherit' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} +RandomClass evenMoreInvalid; // expected-error {{variable of type 'RandomClass' only valid on the stack}} expected-note {{value incorrectly allocated in a global variable}} diff --git a/build/clang-plugin/tests/TestStaticLocalClass.cpp b/build/clang-plugin/tests/TestStaticLocalClass.cpp new file mode 100644 index 0000000000..9b29337de0 --- /dev/null +++ b/build/clang-plugin/tests/TestStaticLocalClass.cpp @@ -0,0 +1,54 @@ +#define MOZ_STATIC_LOCAL_CLASS __attribute__((annotate("moz_static_local_class"))) +#include + +struct MOZ_STATIC_LOCAL_CLASS StaticLocal { + int i; + void *operator new(size_t x) throw() { return 0; } + void *operator new(size_t blah, char *buffer) { return buffer; } +}; + +template +struct MOZ_STATIC_LOCAL_CLASS TemplateClass { + T i; +}; + +void gobble(void *) { } + +void misuseStaticLocalClass(int len) { + StaticLocal notValid; // expected-error {{variable of type 'StaticLocal' is only valid as a static local}} expected-note {{value incorrectly allocated in an automatic variable}} + StaticLocal alsoNotValid[2]; // expected-error {{variable of type 'StaticLocal [2]' is only valid as a static local}} expected-note {{'StaticLocal [2]' is a static-local type because it is an array of static-local type 'StaticLocal'}} expected-note {{value incorrectly allocated in an automatic variable}} + static StaticLocal valid; + static StaticLocal alsoValid[2]; + + gobble(¬Valid); + gobble(&valid); + gobble(&alsoValid[0]); + + gobble(new StaticLocal); // expected-error {{variable of type 'StaticLocal' is only valid as a static local}} expected-note {{value incorrectly allocated on the heap}} + gobble(new StaticLocal[10]); // expected-error {{variable of type 'StaticLocal' is only valid as a static local}} expected-note {{value incorrectly allocated on the heap}} + gobble(new TemplateClass); // expected-error {{variable of type 'TemplateClass' is only valid as a static local}} expected-note {{value incorrectly allocated on the heap}} + gobble(len <= 5 ? &valid : new StaticLocal); // expected-error {{variable of type 'StaticLocal' is only valid as a static local}} expected-note {{value incorrectly allocated on the heap}} + + char buffer[sizeof(StaticLocal)]; + gobble(new (buffer) StaticLocal); +} + +StaticLocal notValid; // expected-error {{variable of type 'StaticLocal' is only valid as a static local}} expected-note {{value incorrectly allocated in a global variable}} + +struct RandomClass { + StaticLocal nonstaticMember; // expected-note {{'RandomClass' is a static-local type because member 'nonstaticMember' is a static-local type 'StaticLocal'}} + static StaticLocal staticMember; // expected-error {{variable of type 'StaticLocal' is only valid as a static local}} expected-note {{value incorrectly allocated in a global variable}} +}; + +struct MOZ_STATIC_LOCAL_CLASS RandomStaticLocalClass { + StaticLocal nonstaticMember; + static StaticLocal staticMember; // expected-error {{variable of type 'StaticLocal' is only valid as a static local}} expected-note {{value incorrectly allocated in a global variable}} +}; + +struct BadInherit : StaticLocal {}; // expected-note {{'BadInherit' is a static-local type because it inherits from a static-local type 'StaticLocal'}} +struct MOZ_STATIC_LOCAL_CLASS GoodInherit : StaticLocal {}; + +void misuseStaticLocalClassEvenMore(int len) { + BadInherit moreInvalid; // expected-error {{variable of type 'BadInherit' is only valid as a static local}} expected-note {{value incorrectly allocated in an automatic variable}} + RandomClass evenMoreInvalid; // expected-error {{variable of type 'RandomClass' is only valid as a static local}} expected-note {{value incorrectly allocated in an automatic variable}} +} diff --git a/build/clang-plugin/tests/TestTemporaryClass.cpp b/build/clang-plugin/tests/TestTemporaryClass.cpp new file mode 100644 index 0000000000..e7f1e0ee7c --- /dev/null +++ b/build/clang-plugin/tests/TestTemporaryClass.cpp @@ -0,0 +1,72 @@ +#define MOZ_TEMPORARY_CLASS __attribute__((annotate("moz_temporary_class"))) +#define MOZ_IMPLICIT __attribute__((annotate("moz_implicit"))) + +#include + +struct MOZ_TEMPORARY_CLASS Temporary { + int i; + Temporary() {} + MOZ_IMPLICIT Temporary(int a) {} + Temporary(int a, int b) {} + void *operator new(size_t x) throw() { return 0; } + void *operator new(size_t blah, char *buffer) { return buffer; } +}; + +template +struct MOZ_TEMPORARY_CLASS TemplateClass { + T i; +}; + +void gobble(void *) { } + +void gobbleref(const Temporary&) { } + +template +void gobbleanyref(const T&) { } + +void misuseNonTemporaryClass(int len) { + // All of these should error. + Temporary invalid; // expected-error {{variable of type 'Temporary' is only valid as a temporary}} expected-note {{value incorrectly allocated in an automatic variable}} + Temporary alsoInvalid[2]; // expected-error {{variable of type 'Temporary [2]' is only valid as a temporary}} expected-note {{value incorrectly allocated in an automatic variable}} expected-note {{'Temporary [2]' is a temporary type because it is an array of temporary type 'Temporary'}} + static Temporary invalidStatic; // expected-error {{variable of type 'Temporary' is only valid as a temporary}} expected-note {{value incorrectly allocated in a global variable}} + static Temporary alsoInvalidStatic[2]; // expected-error {{variable of type 'Temporary [2]' is only valid as a temporary}} expected-note {{value incorrectly allocated in a global variable}} expected-note {{'Temporary [2]' is a temporary type because it is an array of temporary type 'Temporary'}} + + gobble(&invalid); + gobble(&invalidStatic); + gobble(&alsoInvalid[0]); + + // All of these should be fine. + gobbleref(Temporary()); + gobbleref(Temporary(10, 20)); + gobbleref(Temporary(10)); + gobbleref(10); + gobbleanyref(TemplateClass()); + + // All of these should error. + gobble(new Temporary); // expected-error {{variable of type 'Temporary' is only valid as a temporary}} expected-note {{value incorrectly allocated on the heap}} + gobble(new Temporary[10]); // expected-error {{variable of type 'Temporary' is only valid as a temporary}} expected-note {{value incorrectly allocated on the heap}} + gobble(new TemplateClass); // expected-error {{variable of type 'TemplateClass' is only valid as a temporary}} expected-note {{value incorrectly allocated on the heap}} + gobble(len <= 5 ? &invalid : new Temporary); // expected-error {{variable of type 'Temporary' is only valid as a temporary}} expected-note {{value incorrectly allocated on the heap}} + + // Placement new is odd, but okay. + char buffer[sizeof(Temporary)]; + gobble(new (buffer) Temporary); +} + +void defaultArg(const Temporary& arg = Temporary()) { // expected-error {{variable of type 'Temporary' is only valid as a temporary}} expected-note {{value incorrectly allocated in an automatic variable}} +} + +// Can't be a global, this should error. +Temporary invalidStatic; // expected-error {{variable of type 'Temporary' is only valid as a temporary}} expected-note {{value incorrectly allocated in a global variable}} + +struct RandomClass { + Temporary nonstaticMember; // This is okay if RandomClass is only used as a temporary. + static Temporary staticMember; // expected-error {{variable of type 'Temporary' is only valid as a temporary}} expected-note {{value incorrectly allocated in a global variable}} +}; + +struct BadInherit : Temporary {}; + +void useStuffWrongly() { + gobbleanyref(BadInherit()); + gobbleanyref(RandomClass()); +} diff --git a/build/clang-plugin/tests/TestTemporaryLifetimeBound.cpp b/build/clang-plugin/tests/TestTemporaryLifetimeBound.cpp new file mode 100644 index 0000000000..0c51182cab --- /dev/null +++ b/build/clang-plugin/tests/TestTemporaryLifetimeBound.cpp @@ -0,0 +1,126 @@ +#define MOZ_LIFETIME_BOUND __attribute__((annotate("moz_lifetime_bound"))) + +struct Foo {}; + +struct Bar { + MOZ_LIFETIME_BOUND const Foo &AsFoo() const; // expected-note {{member function declared here}} expected-note {{member function declared here}} expected-note {{member function declared here}} expected-note {{member function declared here}} expected-note {{member function declared here}} + MOZ_LIFETIME_BOUND operator const Foo &() const; // expected-note {{member function declared here}} expected-note {{member function declared here}} expected-note {{member function declared here}} expected-note {{member function declared here}} expected-note {{member function declared here}} expected-note {{member function declared here}} +}; + +Bar MakeBar() { return Bar(); } + +Bar testReturnsInstance_Constructed() { return Bar(); } + +const Foo &testReturnsReference_Static() { + static constexpr auto bar = Bar{}; + return bar; +} + +/* TODO This is bad as well... but not related to a temporary. +const Foo& testReturnsReference_Local() { + constexpr auto bar = Bar{}; + return bar; +} +*/ + +const Foo &testReturnsReferenceToTemporaryViaLifetimeBound_Constructed() { + return Bar(); // expected-error {{cannot return result of lifetime-bound function 'operator const Foo &' on temporary of type 'Bar'}} +} + +const Foo &testReturnsReferenceToTemporaryViaLifetimeBound2_Constructed() { + return static_cast(Bar()); // expected-error {{cannot return result of lifetime-bound function 'operator const Foo &' on temporary of type 'Bar'}} +} + +const Foo &testReturnsReferenceToTemporaryViaLifetimeBound3_Constructed() { + return Bar().AsFoo(); // expected-error {{cannot return result of lifetime-bound function 'AsFoo' on temporary of type 'Bar'}} +} + +const Foo & +testReturnsReferenceToTemporaryViaLifetimeBound4_Constructed(bool aCond) { + static constexpr Foo foo; + return aCond ? foo : Bar().AsFoo(); // expected-error {{cannot return result of lifetime-bound function 'AsFoo' on temporary of type 'Bar'}} +} + +Foo testReturnsValueViaLifetimeBoundFunction_Constructed() { return Bar(); } + +Foo testReturnsValueViaLifetimeBoundFunction2_Constructed() { + return static_cast(Bar()); +} + +Foo testReturnsValueViaLifetimeBoundFunction3_Constructed() { + return Bar().AsFoo(); +} + +Bar testReturnInstance_Returned() { return MakeBar(); } + +const Foo &testReturnsReferenceToTemporaryViaLifetimeBound_Returned() { + return MakeBar(); // expected-error {{cannot return result of lifetime-bound function 'operator const Foo &' on temporary of type 'Bar'}} +} + +const Foo &testReturnsReferenceToTemporaryViaLifetimeBound2_Returned() { + return static_cast(MakeBar()); // expected-error {{cannot return result of lifetime-bound function 'operator const Foo &' on temporary of type 'Bar'}} +} + +const Foo &testReturnsReferenceToTemporaryViaLifetimeBound3_Returned() { + return MakeBar().AsFoo(); // expected-error {{cannot return result of lifetime-bound function 'AsFoo' on temporary of type 'Bar'}} +} + +Foo testReturnsValueViaLifetimeBoundFunction_Returned() { return MakeBar(); } + +Foo testReturnsValueViaLifetimeBoundFunction2_Returned() { + return static_cast(MakeBar()); +} + +Foo testReturnsValueViaLifetimeBoundFunction3_Returned() { + return MakeBar().AsFoo(); +} + +void testNoLifetimeExtension() { + const Foo &foo = Bar(); // expected-error {{cannot bind result of lifetime-bound function 'operator const Foo &' on temporary of type 'Bar' to reference, does not extend lifetime}} +} + +void testNoLifetimeExtension2() { + const auto &foo = static_cast(MakeBar()); // expected-error {{cannot bind result of lifetime-bound function 'operator const Foo &' on temporary of type 'Bar' to reference, does not extend lifetime}} +} + +void testNoLifetimeExtension3() { + const Foo &foo = Bar().AsFoo(); // expected-error {{cannot bind result of lifetime-bound function 'AsFoo' on temporary of type 'Bar' to reference, does not extend lifetime}} +} + +void testNoLifetimeExtension4(bool arg) { + const Foo foo; + const Foo &fooRef = arg ? foo : Bar().AsFoo(); // expected-error {{cannot bind result of lifetime-bound function 'AsFoo' on temporary of type 'Bar' to reference, does not extend lifetime}} +} + +// While this looks similar to testNoLifetimeExtension4, this is actually fine, +// as the coerced type of the conditional operator is `Foo` here rather than +// `const Foo&`, and thus an implicit copy of `Bar().AsFoo()` is created, whose +// lifetime is actually extended. +void testLifetimeExtension(bool arg) { + const Foo &foo = arg ? Foo() : Bar().AsFoo(); +} + +void testConvertToValue() { const Foo foo = Bar(); } + +Foo testReturnConvertToValue() { + return static_cast(Bar()); +} + +void FooFunc(const Foo &aFoo); + +// We want to allow binding to parameters of the target reference type though. +// This is the very reason the annotation is required, and the function cannot +// be restricted to lvalues. Lifetime is not an issue here, as the temporary's +// lifetime is until the end of the full expression anyway. + +void testBindToParameter() { + FooFunc(Bar()); + FooFunc(static_cast(Bar())); + FooFunc(Bar().AsFoo()); + FooFunc(MakeBar()); +} + +// This should be OK, because the return value isn't necessarily coming from the +// argument (and it should be OK for any type). +const Foo &RandomFunctionCall(const Foo &aFoo); +const Foo &testReturnFunctionCall() { return RandomFunctionCall(Bar()); } diff --git a/build/clang-plugin/tests/TestTrivialCtorDtor.cpp b/build/clang-plugin/tests/TestTrivialCtorDtor.cpp new file mode 100644 index 0000000000..cef06b0acc --- /dev/null +++ b/build/clang-plugin/tests/TestTrivialCtorDtor.cpp @@ -0,0 +1,83 @@ +#define MOZ_TRIVIAL_CTOR_DTOR __attribute__((annotate("moz_trivial_ctor_dtor"))) + +struct MOZ_TRIVIAL_CTOR_DTOR EmptyClass{}; + +template +struct MOZ_TRIVIAL_CTOR_DTOR TemplateEmptyClass{}; + +struct MOZ_TRIVIAL_CTOR_DTOR NonEmptyClass { + void *m; +}; + +template +struct MOZ_TRIVIAL_CTOR_DTOR TemplateNonEmptyClass { + T* m; +}; + +struct MOZ_TRIVIAL_CTOR_DTOR BadUserDefinedCtor { // expected-error {{class 'BadUserDefinedCtor' must have trivial constructors and destructors}} + BadUserDefinedCtor() {} +}; + +struct MOZ_TRIVIAL_CTOR_DTOR BadUserDefinedDtor { // expected-error {{class 'BadUserDefinedDtor' must have trivial constructors and destructors}} + ~BadUserDefinedDtor() {} +}; + +struct MOZ_TRIVIAL_CTOR_DTOR BadVirtualDtor { // expected-error {{class 'BadVirtualDtor' must have trivial constructors and destructors}} + virtual ~BadVirtualDtor() {} +}; + +struct MOZ_TRIVIAL_CTOR_DTOR OkVirtualMember { + virtual void f(); +}; + +void foo(); +struct MOZ_TRIVIAL_CTOR_DTOR BadNonEmptyCtorDtor { // expected-error {{class 'BadNonEmptyCtorDtor' must have trivial constructors and destructors}} + BadNonEmptyCtorDtor() { foo(); } + ~BadNonEmptyCtorDtor() { foo(); } +}; + +struct NonTrivialCtor { + NonTrivialCtor() { foo(); } +}; + +struct NonTrivialDtor { + ~NonTrivialDtor() { foo(); } +}; + +struct VirtualMember { + virtual void f(); +}; + +struct MOZ_TRIVIAL_CTOR_DTOR BadNonTrivialCtorInBase : NonTrivialCtor { // expected-error {{class 'BadNonTrivialCtorInBase' must have trivial constructors and destructors}} +}; + +struct MOZ_TRIVIAL_CTOR_DTOR BadNonTrivialDtorInBase : NonTrivialDtor { // expected-error {{class 'BadNonTrivialDtorInBase' must have trivial constructors and destructors}} +}; + +struct MOZ_TRIVIAL_CTOR_DTOR BadNonTrivialCtorInMember { // expected-error {{class 'BadNonTrivialCtorInMember' must have trivial constructors and destructors}} + NonTrivialCtor m; +}; + +struct MOZ_TRIVIAL_CTOR_DTOR BadNonTrivialDtorInMember { // expected-error {{class 'BadNonTrivialDtorInMember' must have trivial constructors and destructors}} + NonTrivialDtor m; +}; + +struct MOZ_TRIVIAL_CTOR_DTOR OkVirtualMemberInMember { + VirtualMember m; +}; + +struct MOZ_TRIVIAL_CTOR_DTOR OkConstExprConstructor { + constexpr OkConstExprConstructor() {} +}; + +struct MOZ_TRIVIAL_CTOR_DTOR OkConstExprConstructorInMember { + OkConstExprConstructor m; +}; + +// XXX: This error is unfortunate, but is unlikely to come up in real code. +// In this situation, it should be possible to define a constexpr constructor +// which explicitly initializes the members. +struct MOZ_TRIVIAL_CTOR_DTOR BadUnfortunateError { // expected-error {{class 'BadUnfortunateError' must have trivial constructors and destructors}} + OkConstExprConstructor m; + void *n; +}; diff --git a/build/clang-plugin/tests/TestTrivialDtor.cpp b/build/clang-plugin/tests/TestTrivialDtor.cpp new file mode 100644 index 0000000000..f86d41b238 --- /dev/null +++ b/build/clang-plugin/tests/TestTrivialDtor.cpp @@ -0,0 +1,52 @@ +#define MOZ_TRIVIAL_DTOR __attribute__((annotate("moz_trivial_dtor"))) + +struct MOZ_TRIVIAL_DTOR EmptyClass{}; + +template +struct MOZ_TRIVIAL_DTOR TemplateEmptyClass{}; + +struct MOZ_TRIVIAL_DTOR NonEmptyClass { + void *m; +}; + +template +struct MOZ_TRIVIAL_DTOR TemplateNonEmptyClass { + T* m; +}; + +struct MOZ_TRIVIAL_DTOR BadUserDefinedDtor { // expected-error {{class 'BadUserDefinedDtor' must have a trivial destructor}} + ~BadUserDefinedDtor() {} +}; + +struct MOZ_TRIVIAL_DTOR BadVirtualDtor { // expected-error {{class 'BadVirtualDtor' must have a trivial destructor}} + virtual ~BadVirtualDtor() {} +}; + +struct MOZ_TRIVIAL_DTOR OkVirtualMember { + virtual void f(); +}; + +void foo(); +struct MOZ_TRIVIAL_DTOR BadNonEmptyCtorDtor { // expected-error {{class 'BadNonEmptyCtorDtor' must have a trivial destructor}} + BadNonEmptyCtorDtor() { foo(); } + ~BadNonEmptyCtorDtor() { foo(); } +}; + +struct NonTrivialDtor { + ~NonTrivialDtor() { foo(); } +}; + +struct VirtualMember { + virtual void f(); +}; + +struct MOZ_TRIVIAL_DTOR BadNonTrivialDtorInBase : NonTrivialDtor { // expected-error {{class 'BadNonTrivialDtorInBase' must have a trivial destructor}} +}; + +struct MOZ_TRIVIAL_DTOR BadNonTrivialDtorInMember { // expected-error {{class 'BadNonTrivialDtorInMember' must have a trivial destructor}} + NonTrivialDtor m; +}; + +struct MOZ_TRIVIAL_DTOR OkVirtualMemberInMember { + VirtualMember m; +}; diff --git a/build/clang-plugin/tests/moz.build b/build/clang-plugin/tests/moz.build new file mode 100644 index 0000000000..f826d74d45 --- /dev/null +++ b/build/clang-plugin/tests/moz.build @@ -0,0 +1,91 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# dummy library name to avoid skipping building the sources here. +Library("clang-plugin-tests") + +SOURCES += [ + "TestAssertWithAssignment.cpp", + "TestBadImplicitConversionCtor.cpp", + "TestCanRunScript.cpp", + "TestCustomHeap.cpp", + "TestDanglingOnTemporary.cpp", + "TestExplicitOperatorBool.cpp", + "TestGlobalClass.cpp", + "TestHeapClass.cpp", + "TestInheritTypeAnnotationsFromTemplateArgs.cpp", + "TestKungFuDeathGrip.cpp", + "TestMultipleAnnotations.cpp", + "TestMustOverride.cpp", + "TestMustReturnFromCaller.cpp", + "TestMustUse.cpp", + "TestNANTestingExpr.cpp", + "TestNANTestingExprC.c", + "TestNeedsNoVTableType.cpp", + "TestNoAddRefReleaseOnReturn.cpp", + "TestNoArithmeticExprInArgument.cpp", + "TestNoAutoType.cpp", + "TestNoDuplicateRefCntMember.cpp", + "TestNoExplicitMoveConstructor.cpp", + "TestNoNewThreadsChecker.cpp", + "TestNonHeapClass.cpp", + "TestNonMemMovable.cpp", + "TestNonMemMovableStd.cpp", + "TestNonMemMovableStdAtomic.cpp", + "TestNonParameterChecker.cpp", + "TestNonTemporaryClass.cpp", + "TestNonTrivialTypeInFfi.cpp", + "TestNoPrincipalGetUri.cpp", + "TestNoRefcountedInsideLambdas.cpp", + "TestNoUsingNamespaceMozillaJava.cpp", + "TestOverrideBaseCall.cpp", + "TestOverrideBaseCallAnnotation.cpp", + "TestParamTraitsEnum.cpp", + "TestRefCountedCopyConstructor.cpp", + "TestSprintfLiteral.cpp", + "TestStackClass.cpp", + "TestStaticLocalClass.cpp", + "TestTemporaryClass.cpp", + "TestTemporaryLifetimeBound.cpp", + "TestTrivialCtorDtor.cpp", + "TestTrivialDtor.cpp", +] + +if CONFIG["OS_ARCH"] == "WINNT": + SOURCES += [ + "TestFopenUsage.cpp", + "TestLoadLibraryUsage.cpp", + ] + +include("../external/tests/sources.mozbuild") + +if CONFIG["ENABLE_CLANG_PLUGIN_ALPHA"]: + DEFINES["MOZ_CLANG_PLUGIN_ALPHA"] = "1" + include("../alpha/tests/sources.mozbuild") + +DisableStlWrapping() +NoVisibilityFlags() + +# Build without any warning flags, and with clang verify flag for a +# syntax-only build (no codegen), without a limit on the number of errors. +COMPILE_FLAGS["OS_CXXFLAGS"] = [ + f for f in COMPILE_FLAGS.get("OS_CXXFLAGS", []) if not f.startswith("-W") +] + ["-fsyntax-only", "-Xclang", "-verify", "-ferror-limit=0", "-Wno-invalid-noreturn"] +COMPILE_FLAGS["OS_CFLAGS"] = [ + f for f in COMPILE_FLAGS.get("OS_CFLAGS", []) if not f.startswith("-W") +] + [ + "-fsyntax-only", + "-Xclang", + "-verify", + "-ferror-limit=0", + "-Xclang", + "-std=c11", + "-Wno-invalid-noreturn", +] + +# Don't reflect WARNINGS_CFLAGS into CFLAGS, as the warnings flags should be +# as specified in OS_CFLAGS above. +DisableCompilerWarnings() diff --git a/build/common_virtualenv_packages.txt b/build/common_virtualenv_packages.txt new file mode 100644 index 0000000000..b25fbd3308 --- /dev/null +++ b/build/common_virtualenv_packages.txt @@ -0,0 +1,110 @@ +mozilla.pth:python/mach +mozilla.pth:python/mozboot +mozilla.pth:python/mozbuild +mozilla.pth:python/mozlint +mozilla.pth:python/mozperftest +mozilla.pth:python/mozrelease +mozilla.pth:python/mozterm +mozilla.pth:python/mozversioncontrol +mozilla.pth:python/l10n +mozilla.pth:third_party/python/appdirs +mozilla.pth:third_party/python/atomicwrites +mozilla.pth:third_party/python/attrs/src +python2:mozilla.pth:third_party/python/backports +mozilla.pth:third_party/python/biplist +mozilla.pth:third_party/python/blessings +mozilla.pth:third_party/python/Click +mozilla.pth:third_party/python/compare-locales +mozilla.pth:third_party/python/configobj +mozilla.pth:third_party/python/cookies +mozilla.pth:third_party/python/cram +mozilla.pth:third_party/python/diskcache +mozilla.pth:third_party/python/distro +mozilla.pth:third_party/python/dlmanager +mozilla.pth:third_party/python/ecdsa/src +python2:mozilla.pth:third_party/python/enum34 +mozilla.pth:third_party/python/esprima +mozilla.pth:third_party/python/fluent.migrate +mozilla.pth:third_party/python/fluent.syntax +mozilla.pth:third_party/python/funcsigs +python2:mozilla.pth:third_party/python/futures +mozilla.pth:third_party/python/importlib_metadata +mozilla.pth:third_party/python/iso8601 +mozilla.pth:third_party/python/Jinja2/src +mozilla.pth:third_party/python/jsonschema +mozilla.pth:third_party/python/MarkupSafe/src +mozilla.pth:third_party/python/mohawk +mozilla.pth:third_party/python/more-itertools +mozilla.pth:third_party/python/mozilla-version +mozilla.pth:third_party/python/pathlib2 +mozilla.pth:third_party/python/pathspec +mozilla.pth:third_party/python/pep487/lib +mozilla.pth:third_party/python/gyp/pylib +mozilla.pth:third_party/python/pyrsistent +mozilla.pth:third_party/python/python-hglib +mozilla.pth:third_party/python/pluggy +mozilla.pth:third_party/python/jsmin +mozilla.pth:third_party/python/pylru +mozilla.pth:third_party/python/pystache +python2:mozilla.pth:third_party/python/PyYAML/lib +python3:mozilla.pth:third_party/python/PyYAML/lib3/ +mozilla.pth:third_party/python/requests +mozilla.pth:third_party/python/requests-unixsocket +python2:mozilla.pth:third_party/python/scandir +mozilla.pth:third_party/python/slugid +mozilla.pth:third_party/python/taskcluster +mozilla.pth:third_party/python/taskcluster-urls +mozilla.pth:third_party/python/py +mozilla.pth:third_party/python/pytest/src +mozilla.pth:third_party/python/pytoml +mozilla.pth:third_party/python/redo +mozilla.pth:third_party/python/responses +mozilla.pth:third_party/python/sentry-sdk +mozilla.pth:third_party/python/six +mozilla.pth:third_party/python/taskcluster-urls +mozilla.pth:third_party/python/urllib3/src +mozilla.pth:third_party/python/voluptuous +mozilla.pth:third_party/python/json-e +mozilla.pth:third_party/python/yamllint +mozilla.pth:third_party/python/zipp +mozilla.pth:build +mozilla.pth:config +mozilla.pth:config/mozunit +mozilla.pth:dom/bindings +mozilla.pth:dom/bindings/parser +mozilla.pth:layout/tools/reftest +mozilla.pth:third_party/python/ply +mozilla.pth:taskcluster +mozilla.pth:testing +mozilla.pth:testing/condprofile +mozilla.pth:testing/firefox-ui/harness +mozilla.pth:testing/marionette/client +mozilla.pth:testing/marionette/harness +mozilla.pth:testing/marionette/harness/marionette_harness/runner/mixins/browsermob-proxy-py +mozilla.pth:testing/marionette/puppeteer/firefox +mozilla.pth:testing/raptor +mozilla.pth:testing/talos +packages.txt:testing/mozbase/packages.txt +mozilla.pth:tools +mozilla.pth:testing/web-platform +mozilla.pth:testing/web-platform/tests/tools/third_party/html5lib +mozilla.pth:testing/web-platform/tests/tools/third_party/webencodings +mozilla.pth:testing/web-platform/tests/tools/third_party/h2 +mozilla.pth:testing/web-platform/tests/tools/third_party/hpack +mozilla.pth:testing/web-platform/tests/tools/third_party/hyperframe +mozilla.pth:testing/web-platform/tests/tools/third_party/certifi +mozilla.pth:testing/web-platform/tests/tools/third_party/pywebsocket3 +mozilla.pth:testing/web-platform/tests/tools/wptserve +mozilla.pth:testing/web-platform/tests/tools/wptrunner +mozilla.pth:testing/web-platform/tests/tools/six +mozilla.pth:testing/xpcshell +mozilla.pth:third_party/python/mock-1.0.0 +mozilla.pth:xpcom/ds/tools +mozilla.pth:tools/moztreedocs +mozilla.pth:third_party/python/cbor2 +mozilla.pth:third_party/python/pyasn1 +mozilla.pth:third_party/python/pyasn1-modules +mozilla.pth:third_party/python/rsa +mozilla.pth:toolkit/components/telemetry/tests/marionette/harness +mozilla.pth:xpcom/idl-parser +optional:packages.txt:comm/build/virtualenv_packages.txt diff --git a/build/compare-mozconfig/compare-mozconfigs.py b/build/compare-mozconfig/compare-mozconfigs.py new file mode 100644 index 0000000000..7e39d9c071 --- /dev/null +++ b/build/compare-mozconfig/compare-mozconfigs.py @@ -0,0 +1,178 @@ +#!/usr/bin/python +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# originally from https://hg.mozilla.org/build/tools/file/4ab9c1a4e05b/scripts/release/compare-mozconfigs.py # NOQA: E501 + +from __future__ import unicode_literals + +import logging +import os +import difflib +import unittest + +import buildconfig +import mozunit + +FAILURE_CODE = 1 +SUCCESS_CODE = 0 + +PLATFORMS = ( + "linux32", + "linux64", + "macosx64", + "win32", + "win64", + "win64-aarch64", +) + +log = logging.getLogger(__name__) + + +class ConfigError(Exception): + pass + + +def readConfig(configfile): + c = {} + execfile(configfile, c) + return c["whitelist"] + + +def verify_mozconfigs( + mozconfig_pair, nightly_mozconfig_pair, platform, mozconfigWhitelist +): + """Compares mozconfig to nightly_mozconfig and compare to an optional + whitelist of known differences. mozconfig_pair and nightly_mozconfig_pair + are pairs containing the mozconfig's identifier and the list of lines in + the mozconfig.""" + + # unpack the pairs to get the names, the names are just for + # identifying the mozconfigs when logging the error messages + mozconfig_name, mozconfig_lines = mozconfig_pair + nightly_mozconfig_name, nightly_mozconfig_lines = nightly_mozconfig_pair + + if not mozconfig_lines or not nightly_mozconfig_lines: + log.info("Missing mozconfigs to compare for %s" % platform) + return False + + success = True + + diff_instance = difflib.Differ() + diff_result = diff_instance.compare(mozconfig_lines, nightly_mozconfig_lines) + diff_list = list(diff_result) + + for line in diff_list: + clean_line = line[1:].strip() + if (line[0] == "-" or line[0] == "+") and len(clean_line) > 1: + # skip comment lines + if clean_line.startswith("#"): + continue + # compare to whitelist + message = "" + if line[0] == "-": + # handle lines that move around in diff + if "+" + line[1:] in diff_list: + continue + if platform in mozconfigWhitelist.get("release", {}): + if clean_line in mozconfigWhitelist["release"][platform]: + continue + elif line[0] == "+": + if "-" + line[1:] in diff_list: + continue + if platform in mozconfigWhitelist.get("nightly", {}): + if clean_line in mozconfigWhitelist["nightly"][platform]: + continue + else: + log.warning( + "%s not in %s %s!" + % ( + clean_line, + platform, + mozconfigWhitelist["nightly"][platform], + ) + ) + else: + log.error("Skipping line %s!" % line) + continue + message = "found in %s but not in %s: %s" + if line[0] == "-": + log.error( + message % (mozconfig_name, nightly_mozconfig_name, clean_line) + ) + else: + log.error( + message % (nightly_mozconfig_name, mozconfig_name, clean_line) + ) + success = False + return success + + +def get_mozconfig(path): + """Consumes a path and returns a list of lines from the mozconfig file.""" + with open(path, "rb") as fh: + return fh.readlines() + + +def compare(topsrcdir): + app = os.path.join(topsrcdir, "browser") + whitelist = readConfig(os.path.join(app, "config", "mozconfigs", "whitelist")) + + success = True + + def normalize_lines(lines): + return {l.strip() for l in lines} + + for platform in PLATFORMS: + log.info("Comparing platform %s" % platform) + + mozconfigs_path = os.path.join(app, "config", "mozconfigs", platform) + + nightly_path = os.path.join(mozconfigs_path, "nightly") + beta_path = os.path.join(mozconfigs_path, "beta") + release_path = os.path.join(mozconfigs_path, "release") + + nightly_lines = get_mozconfig(nightly_path) + beta_lines = get_mozconfig(beta_path) + release_lines = get_mozconfig(release_path) + + # Validate that entries in whitelist['nightly'][platform] are actually + # present. + whitelist_normalized = normalize_lines(whitelist["nightly"].get(platform, [])) + nightly_normalized = normalize_lines(nightly_lines) + + for line in sorted(whitelist_normalized - nightly_normalized): + log.error("extra line in nightly whitelist: %s" % line) + success = False + + log.info("Comparing beta and nightly mozconfigs") + passed = verify_mozconfigs( + (beta_path, beta_lines), (nightly_path, nightly_lines), platform, whitelist + ) + + if not passed: + success = False + + log.info("Comparing release and nightly mozconfigs") + passed = verify_mozconfigs( + (release_path, release_lines), + (nightly_path, nightly_lines), + platform, + whitelist, + ) + if not passed: + success = False + + return success + + +class TestCompareMozconfigs(unittest.TestCase): + def test_compare_mozconfigs(self): + topsrcdir = buildconfig.substs["top_srcdir"] + self.assertTrue(compare(topsrcdir)) + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + mozunit.main() diff --git a/build/compare-mozconfig/python.ini b/build/compare-mozconfig/python.ini new file mode 100644 index 0000000000..f90050c03e --- /dev/null +++ b/build/compare-mozconfig/python.ini @@ -0,0 +1,5 @@ +[DEFAULT] +skip-if = python == 3 +subsuite = mozbuild + +[compare-mozconfigs.py] diff --git a/build/debian-packages/cmake-jessie.diff b/build/debian-packages/cmake-jessie.diff new file mode 100644 index 0000000000..7fd301f908 --- /dev/null +++ b/build/debian-packages/cmake-jessie.diff @@ -0,0 +1,70 @@ +diff -Nru cmake-3.16.3/debian/changelog cmake-3.16.3/debian/changelog +--- cmake-3.16.3/debian/changelog 2020-03-16 19:09:14.000000000 +0900 ++++ cmake-3.16.3/debian/changelog 2020-06-24 14:59:20.000000000 +0900 +@@ -1,3 +1,17 @@ ++cmake (3.16.3-1.deb9moz1) jessie-backports; urgency=medium ++ ++ * Mozilla backport for jessie. ++ * debian/rules, debian/control: ++ - Don't build against system libraries. libuv is missing on jessie, and ++ it's just simpler to disable the use of system libraries altogether. ++ - But still use system curl. ++ - Don't build a dbgsym package through dh_strip, that's not supported on ++ jessie. ++ * debian/compat: Restore a compat level of 10. ++ * debian/control: Remove versioned dependency on debhelper. ++ ++ -- Mike Hommey Wed, 24 Jun 2020 14:59:20 +0900 ++ + cmake (3.16.3-1~bpo9+1) stretch-backports; urgency=medium + + * Rebuild for stretch-backports. +diff -Nru cmake-3.16.3/debian/compat cmake-3.16.3/debian/compat +--- cmake-3.16.3/debian/compat 1970-01-01 09:00:00.000000000 +0900 ++++ cmake-3.16.3/debian/compat 2020-06-24 14:57:28.000000000 +0900 +@@ -0,0 +1 @@ ++10 +diff -Nru cmake-3.16.3/debian/control cmake-3.16.3/debian/control +--- cmake-3.16.3/debian/control 2020-03-16 19:09:14.000000000 +0900 ++++ cmake-3.16.3/debian/control 2020-06-24 14:59:20.000000000 +0900 +@@ -4,17 +4,10 @@ + Maintainer: Debian CMake Team + Uploaders: Lisandro Damián Nicanor Pérez Meyer , + Felix Geyer +-Build-Depends: debhelper-compat (= 12), ++Build-Depends: debhelper, + freebsd-glue [kfreebsd-any], +- libarchive-dev (>= 3.3.3), +- libbz2-dev, + libcurl4-openssl-dev | libcurl-ssl-dev, +- libexpat1-dev, +- libjsoncpp-dev, +- liblzma-dev, + libncurses5-dev, +- librhash-dev, +- libuv1-dev (>= 1.10), + procps [!hurd-any], + python3-sphinx, + qtbase5-dev , +diff -Nru cmake-3.16.3/debian/rules cmake-3.16.3/debian/rules +--- cmake-3.16.3/debian/rules 2020-03-16 19:09:14.000000000 +0900 ++++ cmake-3.16.3/debian/rules 2020-06-24 14:59:20.000000000 +0900 +@@ -45,7 +45,7 @@ + override_dh_auto_configure: $(BUILD_FLAGS_FILE) + rm -rf Build && mkdir -p Build + cd Build && ../bootstrap --prefix=/usr --docdir=/share/doc/cmake --mandir=/share/man \ +- --init=../$(BUILD_FLAGS_FILE) --system-libs \ ++ --init=../$(BUILD_FLAGS_FILE) --system-curl \ + --sphinx-man --sphinx-html --sphinx-flags="-D today=\"$(BUILD_DATE)\"" \ + $(BOOTSTRAP_PARALLEL) --verbose + +@@ -71,9 +71,6 @@ + override_dh_sphinxdoc: + dh_sphinxdoc -pcmake-doc + +-override_dh_strip: +- dh_strip --dbgsym-migration='cmake-dbg (<< 3.5.0-1~)' +- + %: + dh $@ --with=sphinxdoc --builddirectory=Build + diff --git a/build/debian-packages/gdb-jessie.diff b/build/debian-packages/gdb-jessie.diff new file mode 100644 index 0000000000..a363ab8a1a --- /dev/null +++ b/build/debian-packages/gdb-jessie.diff @@ -0,0 +1,37 @@ +diff -Nru gdb-7.12/debian/changelog gdb-7.12/debian/changelog +--- gdb-7.12/debian/changelog 2017-01-19 19:28:25.000000000 +0900 ++++ gdb-7.12/debian/changelog 2018-02-07 15:36:15.000000000 +0900 +@@ -1,3 +1,11 @@ ++gdb (7.12-6.deb8moz1) jessie; urgency=medium ++ ++ * Mozilla backport for jessie. ++ * debian/rules: Don't pass --dbgsym-migration to dh_strip, it's not ++ supported on jessie's debhelper. ++ ++ -- Mike Hommey Tue, 16 Jul 2019 18:28:20 +0900 ++ + gdb (7.12-6) unstable; urgency=medium + + * debian/patches: import 7.12 branch fixes +diff -Nru gdb-7.12/debian/rules gdb-7.12/debian/rules +--- gdb-7.12/debian/rules 2016-12-15 09:31:54.000000000 +0900 ++++ gdb-7.12/debian/rules 2018-02-07 15:36:15.000000000 +0900 +@@ -4,7 +4,6 @@ + + DEB_BUILDDIR := $(ALL_BUILDDIR)/objdir + DEB_DH_INSTALL_SOURCEDIR := $(shell pwd)/debian/tmp +-DEB_DH_STRIP_ARGS_gdb = --dbgsym-migration='gdb-dbg (<< 7.12-1~)' + + # Override CDBS's default CFLAGS, which also includes -Wall; gdb + # does not handle -Wunused well with -Werror, but defaults to +@@ -89,10 +88,6 @@ + arch_config_args := --with-libunwind-ia64 + endif + +-ifneq (,$(filter $(DEB_HOST_ARCH),amd64 armel armhf i386 kfreebsd-amd64 kfreebsd-i386 mips mipsel powerpc s390x)) +- arch_config_args += --with-babeltrace +-endif +- + ifdef GDB_TARGET + run_tests := no + diff --git a/build/debian-packages/python-zstandard-jessie.diff b/build/debian-packages/python-zstandard-jessie.diff new file mode 100644 index 0000000000..fa38d309b8 --- /dev/null +++ b/build/debian-packages/python-zstandard-jessie.diff @@ -0,0 +1,27 @@ +diff --git a/debian/changelog b/debian/changelog +index 84028db..d6c86c4 100644 +--- a/debian/changelog ++++ b/debian/changelog +@@ -1,3 +1,9 @@ ++python-zstandard (0.11.1-1.deb8moz1) jessie; urgency=low ++ ++ * Remove build dependencies so package builds on jessie. ++ ++ -- Mike Hommey Thu, 25 Jul 2019 14:43:17 +0900 ++ + python-zstandard (0.9.1-1) unstable; urgency=low + + * Initial Debian packaging definition. +diff --git a/debian/control b/debian/control +index 43bbd46..720082f 100644 +--- a/debian/control ++++ b/debian/control +@@ -7,8 +7,6 @@ Build-Depends: + dh-python, + python-all-dev, + python3-all-dev, +- python-hypothesis, +- python3-hypothesis, + python-nose, + python3-nose, + python-setuptools, diff --git a/build/debian-packages/python3.6-jessie.diff b/build/debian-packages/python3.6-jessie.diff new file mode 100644 index 0000000000..c95cb7b884 --- /dev/null +++ b/build/debian-packages/python3.6-jessie.diff @@ -0,0 +1,165 @@ +diff -Nru python3.6-3.6.3/debian/apt_preferences python3.6-3.6.3/debian/apt_preferences +--- python3.6-3.6.3/debian/apt_preferences 1970-01-01 09:00:00.000000000 +0900 ++++ python3.6-3.6.3/debian/apt_preferences 2020-05-07 11:20:37.000000000 +0900 +@@ -0,0 +1,7 @@ ++Package: python3-pkg-resources ++Pin: release n=jessie-backports ++Pin-Priority: 900 ++ ++Package: python3-setuptools ++Pin: release n=jessie-backports ++Pin-Priority: 900 +diff -Nru python3.6-3.6.3/debian/changelog python3.6-3.6.3/debian/changelog +--- python3.6-3.6.3/debian/changelog 2017-12-05 19:22:15.000000000 +0900 ++++ python3.6-3.6.3/debian/changelog 2020-05-07 11:20:37.000000000 +0900 +@@ -1,3 +1,19 @@ ++python3.6 (3.6.3-2.deb8moz1) jessie; urgency=medium ++ ++ * Mozilla backport for jessie. ++ * debian/control.in: Remove libmpdec-dev dependency; Jessie has a ++ broken version. ++ * debian/rules: ++ - Remove --with-system-libmpdec because jessie doesn't have it. ++ - Disable PGO builds because they segfault. ++ * debian/rules, debian/apt_preferences: Prefer python3-pkg-resources and ++ python3-setuptools being installed from jessie-backports when this package ++ is installed. ++ * debian/control.in, debian/rules: Don't generate the -doc package, sphinx ++ is too old in jessie. ++ ++ -- Mike Hommey Tue, 7 May 2020 11:20:37 +0900 ++ + python3.6 (3.6.3-2) unstable; urgency=medium + + * Update to 20171205 from the 3.6 branch. +diff -Nru python3.6-3.6.3/debian/control.in python3.6-3.6.3/debian/control.in +--- python3.6-3.6.3/debian/control.in 2017-12-05 19:22:15.000000000 +0900 ++++ python3.6-3.6.3/debian/control.in 2020-05-07 11:20:37.000000000 +0900 +@@ -9,7 +9,7 @@ + zlib1g-dev, libbz2-dev, liblzma-dev, + libgdbm-dev, libdb-dev, + tk-dev, blt-dev (>= 2.4z), libssl-dev, +- libexpat1-dev, libmpdec-dev (>= 2.4), ++ libexpat1-dev, + libbluetooth-dev [!hurd-i386 !kfreebsd-i386 !kfreebsd-amd64], + locales [!armel !avr32 !hppa !ia64 !mipsel], + libsqlite3-dev, libffi-dev (>= 3.0.5) [!or1k !avr32], +@@ -27,7 +27,7 @@ + Multi-Arch: allowed + Priority: @PRIO@ + Depends: @PVER@-minimal (= ${binary:Version}), lib@PVER@-stdlib (= ${binary:Version}), mime-support, ${shlibs:Depends}, ${misc:Depends} +-Suggests: @PVER@-venv, @PVER@-doc, binutils ++Suggests: @PVER@-venv, binutils + Description: Interactive high-level object-oriented language (version @VER@) + Python is a high-level, interactive, object-oriented language. Its @VER@ version + includes an extensive class library with lots of goodies for +@@ -53,7 +53,7 @@ + Priority: @PRIO@ + Pre-Depends: ${misc:Pre-Depends} + Depends: lib@PVER@-minimal (= ${binary:Version}), mime-support, ${shlibs:Depends}, ${misc:Depends} +-Breaks: libmpdec2 (<< 2.4.2), PYFPE_BREAKS ++Breaks: PYFPE_BREAKS + Replaces: lib@PVER@-testsuite (<< 3.6.2-2) + Description: Interactive high-level object-oriented language (standard library, version @VER@) + Python is a high-level, interactive, object-oriented language. Its @VER@ version +@@ -164,28 +164,6 @@ + IDLE is an Integrated Development Environment for Python (v@VER@). + IDLE is written using Tkinter and therefore quite platform-independent. + +-Package: @PVER@-doc +-Section: doc +-Architecture: all +-Multi-Arch: foreign +-Depends: libjs-jquery, libjs-underscore, ${misc:Depends} +-Suggests: @PVER@ +-Description: Documentation for the high-level object-oriented language Python (v@VER@) +- These is the official set of documentation for the interactive high-level +- object-oriented language Python (v@VER@). All documents are provided +- in HTML format. The package consists of ten documents: +- . +- * What's New in Python@VER@ +- * Tutorial +- * Python Library Reference +- * Macintosh Module Reference +- * Python Language Reference +- * Extending and Embedding Python +- * Python/C API Reference +- * Installing Python Modules +- * Documenting Python +- * Distributing Python Modules +- + Package: @PVER@-dbg + Section: debug + Architecture: any +diff -Nru python3.6-3.6.3/debian/rules python3.6-3.6.3/debian/rules +--- python3.6-3.6.3/debian/rules 2017-12-05 19:22:15.000000000 +0900 ++++ python3.6-3.6.3/debian/rules 2020-05-07 11:20:37.000000000 +0900 +@@ -180,14 +180,6 @@ + DEBUG_CFLAGS += -fexceptions + endif + +-ifeq ($(DEB_HOST_GNU_TYPE),$(DEB_BUILD_GNU_TYPE)) +- ifeq ($(DEB_HOST_ARCH_OS),linux) +- ifneq (,$(findstring $(DEB_HOST_ARCH), amd64 armel armhf i386 powerpc ppc64 ppc64el s390x)) +- with_pgo := yes +- endif +- endif +-endif +- + ifneq (,$(findstring $(DEB_HOST_ARCH), amd64 armel armhf i386 powerpc ppc64 ppc64el s390x)) + with_lto := yes + endif +@@ -347,8 +339,7 @@ + --with-dbmliborder=bdb:gdbm \ + --with-computed-gotos \ + --without-ensurepip \ +- --with-system-expat \ +- --with-system-libmpdec \ ++ --with-system-expat + + ifneq (,$(filter $(DEB_HOST_ARCH), avr32 or1k)) + common_configure_args += --without-ffi +@@ -684,7 +675,6 @@ + + stamps/stamp-doc-html: + dh_testdir +- $(MAKE) -C Doc html + @mkdir -p stamps + touch stamps/stamp-doc-html + +@@ -1310,6 +1300,8 @@ + install -D -m 644 $$i debian/$$b/usr/share/lintian/overrides/$$b; \ + done + ++ install -D -m 644 debian/apt_preferences $(d_min)/etc/apt/preferences.d/python3.6.pref ++ + touch stamps/stamp-install + + # Build architecture-independent files here. +@@ -1317,26 +1309,6 @@ + dh_testdir -i + dh_testroot -i + +- : # $(p_doc) package +- dh_installdirs -p$(p_doc) \ +- usr/share/doc/$(p_base) \ +- usr/share/doc/$(p_doc) +- dh_installdocs -p$(p_doc) +- cp -a Doc/build/html $(d_doc)/usr/share/doc/$(p_base)/ +- rm -f $(d_doc)/usr/share/doc/$(p_base)/html/_static/jquery.js +- dh_link -p$(p_doc) \ +- /usr/share/doc/$(p_base)/html /usr/share/doc/$(p_doc)/html \ +- /usr/share/javascript/jquery/jquery.js /usr/share/doc/$(p_base)/html/_static/jquery.js \ +- /usr/share/javascript/underscore/underscore.js /usr/share/doc/$(p_base)/html/_static/underscore.js +- +- : # devhelp docs +- cd $(buildd_static) && ./python ../debian/pyhtml2devhelp.py \ +- ../$(d_doc)/usr/share/doc/$(p_base)/html index.html $(VER) \ +- > ../$(d_doc)/usr/share/doc/$(p_base)/html/$(PVER).devhelp +- gzip -9nv $(d_doc)/usr/share/doc/$(p_base)/html/$(PVER).devhelp +- dh_link -p$(p_doc) \ +- /usr/share/doc/$(p_base)/html /usr/share/devhelp/books/$(PVER) +- + for i in $(p_ltst); do \ + rm -rf debian/$$i/usr/share/doc/$$i; \ + ln -s $(p_base) debian/$$i/usr/share/doc/$$i; \ diff --git a/build/debian-packages/valgrind-jessie.diff b/build/debian-packages/valgrind-jessie.diff new file mode 100644 index 0000000000..aae89c02aa --- /dev/null +++ b/build/debian-packages/valgrind-jessie.diff @@ -0,0 +1,61 @@ +diff -Nru valgrind-3.16.1/debian/changelog valgrind-3.16.1/debian/changelog +--- valgrind-3.16.1/debian/changelog 2020-06-29 02:49:18.000000000 +0900 ++++ valgrind-3.16.1/debian/changelog 2020-12-09 09:17:53.000000000 +0900 +@@ -1,3 +1,16 @@ ++valgrind (1:3.16.1-1.deb8moz1) jessie; urgency=medium ++ ++ * Mozilla backport for jessie. ++ * debian/control, debian/compat: Drop debhelper compat back to 9, which ++ requires adding back an explicit dependency on dh-autoreconf. ++ * debian/rules: ++ - Debhelper only defaulted to --parallel in compat >= 10, so add ++ --parallel back. ++ - Add an explicit --libexecdir to match that of debhelper compat level >= ++ 12. ++ ++ -- Mike Hommey Wed, 9 Dec 2020 09:17:53 +0900 ++ + valgrind (1:3.16.1-1) unstable; urgency=medium + + * New upstream release +diff -Nru valgrind-3.16.1/debian/compat valgrind-3.16.1/debian/compat +--- valgrind-3.16.1/debian/compat 1970-01-01 09:00:00.000000000 +0900 ++++ valgrind-3.16.1/debian/compat 2020-12-09 09:15:49.000000000 +0900 +@@ -0,0 +1 @@ ++9 +diff -Nru valgrind-3.16.1/debian/control valgrind-3.16.1/debian/control +--- valgrind-3.16.1/debian/control 2020-06-29 02:49:18.000000000 +0900 ++++ valgrind-3.16.1/debian/control 2020-12-09 09:17:53.000000000 +0900 +@@ -2,7 +2,8 @@ + Section: devel + Priority: optional + Maintainer: Alessandro Ghedini +-Build-Depends: debhelper-compat (= 13), ++Build-Depends: debhelper (>= 9), ++ dh-autoreconf, + gdb, + gcc-multilib [amd64], + libc6-dev-i386 [amd64], +diff -Nru valgrind-3.16.1/debian/rules valgrind-3.16.1/debian/rules +--- valgrind-3.16.1/debian/rules 2020-06-29 02:49:18.000000000 +0900 ++++ valgrind-3.16.1/debian/rules 2020-12-09 09:17:53.000000000 +0900 +@@ -11,16 +11,16 @@ + LDFLAGS = $(shell dpkg-buildflags --get LDFLAGS) + + %: +- dh $@ --with=autoreconf ++ dh $@ --parallel --with=autoreconf + + override_dh_auto_configure: +- dh_auto_configure -- --enable-tls CFLAGS="$(CFLAGS)" LDFLAGS="$(LDFLAGS)" ++ dh_auto_configure -- --libexecdir=/usr/libexec --enable-tls CFLAGS="$(CFLAGS)" LDFLAGS="$(LDFLAGS)" + + override_dh_auto_test: + : # do nothing for now + + override_dh_auto_build: +- dh_auto_build ++ dh_auto_build --parallel + $(MAKE) -C docs FAQ.txt + $(MAKE) -C docs html-docs + $(MAKE) -C docs man-pages diff --git a/build/defines.sh b/build/defines.sh new file mode 100644 index 0000000000..cf98c3d8bf --- /dev/null +++ b/build/defines.sh @@ -0,0 +1,3 @@ +# Define indicating that this build is prior to one of the early betas. To be +# unset mid-way through the beta cycle. +EARLY_BETA_OR_EARLIER= diff --git a/build/docs/build-overview.rst b/build/docs/build-overview.rst new file mode 100644 index 0000000000..a7784e7b1a --- /dev/null +++ b/build/docs/build-overview.rst @@ -0,0 +1,117 @@ +.. _build_overview: + +===================== +Build System Overview +===================== + +This document provides an overview on how the build system works. It is +targeted at people wanting to learn about internals of the build system. +It is not meant for persons who casually interact with the build system. +That being said, knowledge empowers, so consider reading on. + +The build system is composed of many different components working in +harmony to build the source tree. We begin with a graphic overview. + +.. graphviz:: + + digraph build_components { + rankdir="LR"; + "configure" -> "config.status" -> "build backend" -> "build output" + } + +Phase 1: Configuration +====================== + +Phase 1 centers around the ``configure`` script, which is a bash shell script. +The file is generated from a file called ``configure.in`` which is written in M4 +and processed using Autoconf 2.13 to create the final configure script. +You don't have to worry about how you obtain a ``configure`` file: the build +system does this for you. + +The primary job of ``configure`` is to determine characteristics of the system +and compiler, apply options passed into it, and validate everything looks OK to +build. The primary output of the ``configure`` script is an executable file +in the object directory called ``config.status``. ``configure`` also produces +some additional files (like ``autoconf.mk``). However, the most important file +in terms of architecture is ``config.status``. + +The existence of a ``config.status`` file may be familiar to those who have worked +with Autoconf before. However, Mozilla's ``config.status`` is different from almost +any other ``config.status`` you've ever seen: it's written in Python! Instead of +having our ``configure`` script produce a shell script, we have it generating +Python. + +Now is as good a time as any to mention that Python is prevalent in our build +system. If we need to write code for the build system, we do it in Python. +That's just how we roll. For more, see :ref:`python`. + +``config.status`` contains 2 parts: data structures representing the output of +``configure`` and a command-line interface for preparing/configuring/generating +an appropriate build backend. (A build backend is merely a tool used to build +the tree - like GNU Make or Tup). These data structures essentially describe +the current state of the system and what the existing build configuration looks +like. For example, it defines which compiler to use, how to invoke it, which +application features are enabled, etc. You are encouraged to open up +``config.status`` to have a look for yourself! + +Once we have emitted a ``config.status`` file, we pass into the realm of +phase 2. + +Phase 2: Build Backend Preparation and the Build Definition +=========================================================== + +Once ``configure`` has determined what the current build configuration is, +we need to apply this to the source tree so we can actually build. + +What essentially happens is the automatically-produced ``config.status`` Python +script is executed as soon as ``configure`` has generated it. ``config.status`` +is charged with the task of tell a tool how to build the tree. To do this, +``config.status`` must first scan the build system definition. + +The build system definition consists of various ``moz.build`` files in the tree. +There is roughly one ``moz.build`` file per directory or per set of related directories. +Each ``moz.build`` files defines how its part of the build config works. For +example it says *I want these C++ files compiled* or *look for additional +information in these directories.* config.status starts with the ``moz.build`` +file from the root directory and then descends into referenced ``moz.build`` +files by following ``DIRS`` variables or similar. + +As the ``moz.build`` files are read, data structures describing the overall +build system definition are emitted. These data structures are then fed into a +build backend, which then performs actions, such as writing out files to +be read by a build tool. e.g. a ``make`` backend will write a +``Makefile``. + +When ``config.status`` runs, you'll see the following output:: + + Reticulating splines... + Finished reading 1096 moz.build files into 1276 descriptors in 2.40s + Backend executed in 2.39s + 2188 total backend files. 0 created; 1 updated; 2187 unchanged + Total wall time: 5.03s; CPU time: 3.79s; Efficiency: 75% + +What this is saying is that a total of *1096* ``moz.build`` files were read. +Altogether, *1276* data structures describing the build configuration were +derived from them. It took *2.40s* wall time to just read these files and +produce the data structures. The *1276* data structures were fed into the +build backend which then determined it had to manage *2188* files derived +from those data structures. Most of them already existed and didn't need +changed. However, *1* was updated as a result of the new configuration. +The whole process took *5.03s*. Although, only *3.79s* was in +CPU time. That likely means we spent roughly *25%* of the time waiting on +I/O. + +For more on how ``moz.build`` files work, see :ref:`mozbuild-files`. + +Phase 3: Invocation of the Build Backend +======================================== + +When most people think of the build system, they think of phase 3. This is +where we take all the code in the tree and produce Firefox or whatever +application you are creating. Phase 3 effectively takes whatever was +generated by phase 2 and runs it. Since the dawn of Mozilla, this has been +make consuming Makefiles. However, with the transition to moz.build files, +you may soon see non-Make build backends, such as Tup or Visual Studio. + +When building the tree, most of the time is spent in phase 3. This is when +header files are installed, C++ files are compiled, files are preprocessed, etc. diff --git a/build/docs/build-targets.rst b/build/docs/build-targets.rst new file mode 100644 index 0000000000..dacd46c7f4 --- /dev/null +++ b/build/docs/build-targets.rst @@ -0,0 +1,62 @@ +.. _build_targets: + +============= +Build Targets +============= + +When you build with ``mach build``, there are some special targets that can be +built. This page attempts to document them. + +Partial Tree Targets +==================== + +The targets in this section only build part of the tree. Please note that +partial tree builds can be unreliable. Use at your own risk. + +export + Build the *export* tier. The *export* tier builds everything that is + required for C/C++ compilation. It stages all header files, processes + IDLs, etc. + +compile + Build the *compile* tier. The *compile* tier compiles all C/C++ files. + +libs + Build the *libs* tier. The *libs* tier performs linking and performs + most build steps which aren't related to compilation. + +tools + Build the *tools* tier. The *tools* tier mostly deals with supplementary + tools and compiled tests. It will link tools against libXUL, including + compiled test binaries. + +binaries: + Recompiles and relinks C/C++ files. Only works after a complete normal + build, but allows for much faster rebuilds of C/C++ code. For performance + reasons, however, it skips nss, nspr, icu and ffi. This is targeted to + improve local developer workflow when touching C/C++ code. + +install-manifests + Process install manifests. Install manifests handle the installation of + files into the object directory. + + Unless ``NO_REMOVE=1`` is defined in the environment, files not accounted + in the install manifests will be deleted from the object directory. + +install-tests + Processes the tests install manifest. + +Common Actions +============== + +The targets in this section correspond to common build-related actions. Many +of the actions in this section are effectively frontends to shell scripts. +These actions will likely all be replaced by mach commands someday. + +buildsymbols + Create a symbols archive for the current build. + + This must be performed after a successful build. + +check + Run build system tests. diff --git a/build/docs/cppeclipse.rst b/build/docs/cppeclipse.rst new file mode 100644 index 0000000000..1604b16ca2 --- /dev/null +++ b/build/docs/cppeclipse.rst @@ -0,0 +1,54 @@ +.. _build_cppeclipse: + +===================== +Cpp Eclipse Projects +===================== + +For additional information on using Eclipse CDT see +`the MDN page +`_. + +The build system contains alpha support for generating C++ Eclipse +project files to aid with development. + +Please report bugs to bugzilla and make them depend on bug 973770. + +To generate a C++ Eclipse project files, you'll need to have a fully +built tree:: + + mach build + +Then, simply generate the C++ Eclipse build backend:: + + mach build-backend -b CppEclipse + +If all goes well, the path to the generated workspace should be +printed. + +To use the generated C++ Eclipse project files, you'll need to +have a Eclipse CDT 8.3 (We plan to follow the latest Eclipse release) +`Eclipse CDT plugin +`_ +installed. You can then import all the projects into Eclipse using +*File > Import ... > General > Existing Projects into Workspace* +-only- if you have not ran the background indexer. + +Updating Project Files +====================== + +As you pull and update the source tree, your C++ Eclipse files may +fall out of sync with the build configuration. The tree should still +build fine from within Eclipse, but source files may be missing and in +rare circumstances Eclipse's index may not have the proper build +configuration. + +To account for this, you'll want to periodically regenerate the +C++ Eclipse project files. You can do this by running ``mach build +&& mach build-backend -b CppEclipse`` from the +command line. + +Currently, regeneration rewrites the original project files. **If +you've made any customizations to the projects, they will likely get +overwritten.** We would like to improve this user experience in the +future. + diff --git a/build/docs/defining-binaries.rst b/build/docs/defining-binaries.rst new file mode 100644 index 0000000000..fdac27e26a --- /dev/null +++ b/build/docs/defining-binaries.rst @@ -0,0 +1,345 @@ +.. _defining_binaries: + +====================================== +Defining Binaries for the Build System +====================================== + +One part of what the build system does is compile C/C++ and link the resulting +objects to produce executables and/or libraries. This document describes the +basics of defining what is going to be built and how. All the following +describes constructs to use in moz.build files. + + +Source files +============ + +Source files to be used in a given directory are registered in the ``SOURCES`` +and ``UNIFIED_SOURCES`` variables. ``UNIFIED_SOURCES`` have a special behavior +in that they are aggregated by batches of 16, requiring, for example, that there +are no conflicting variables in those source files. + +``SOURCES`` and ``UNIFIED_SOURCES`` are lists which must be appended to, and +each append requires the given list to be alphanumerically ordered. + +.. code-block:: python + + UNIFIED_SOURCES += [ + 'FirstSource.cpp', + 'SecondSource.cpp', + 'ThirdSource.cpp', + ] + + SOURCES += [ + 'OtherSource.cpp', + ] + +``SOURCES`` and ``UNIFIED_SOURCES`` can contain a mix of different file types, +for C, C++, and Objective C. + + +Static Libraries +================ + +To build a static library, other than defining the source files (see above), one +just needs to define a library name with the ``Library`` template. + +.. code-block:: python + + Library('foo') + +The library file name will be ``libfoo.a`` on UNIX systems and ``foo.lib`` on +Windows. + +If the static library needs to aggregate other static libraries, a list of +``Library`` names can be added to the ``USE_LIBS`` variable. Like ``SOURCES``, it +requires the appended list to be alphanumerically ordered. + +.. code-block:: python + + USE_LIBS += ['bar', 'baz'] + +If there are multiple directories containing the same ``Library`` name, it is +possible to disambiguate by prefixing with the path to the wanted one (relative +or absolute): + +.. code-block:: python + + USE_LIBS += [ + '/path/from/topsrcdir/to/bar', + '../relative/baz', + ] + +Note that the leaf name in those paths is the ``Library`` name, not an actual +file name. + +Note that currently, the build system may not create an actual library for +static libraries. It is an implementation detail that shouldn't need to be +worried about. + +As a special rule, ``USE_LIBS`` is allowed to contain references to shared +libraries. In such cases, programs and shared libraries linking this static +library will inherit those shared library dependencies. + + +Intermediate (Static) Libraries +=============================== + +In many cases in the tree, static libraries are built with the only purpose +of being linked into another, bigger one (like libxul). Instead of adding all +required libraries to ``USE_LIBS`` for the bigger one, it is possible to tell +the build system that the library built in the current directory is meant to +be linked to that bigger library, with the ``FINAL_LIBRARY`` variable. + +.. code-block:: python + + FINAL_LIBRARY = 'xul' + +The ``FINAL_LIBRARY`` value must match a unique ``Library`` name somewhere +in the tree. + +As a special rule, those intermediate libraries don't need a ``Library`` name +for themselves. + + +Shared Libraries +================ + +Sometimes, we want shared libraries, a.k.a. dynamic libraries. Such libraries +are defined similarly to static libraries, using the ``SharedLibrary`` template +instead of ``Library``. + +.. code-block:: python + + SharedLibrary('foo') + +When this template is used, no static library is built. See further below to +build both types of libraries. + +With a ``SharedLibrary`` name of ``foo``, the library file name will be +``libfoo.dylib`` on OSX, ``libfoo.so`` on ELF systems (Linux, etc.), and +``foo.dll`` on Windows. On Windows, there is also an import library named +``foo.lib``, used on the linker command line. ``libfoo.dylib`` and +``libfoo.so`` are considered the import library name for, resp. OSX and ELF +systems. + +On OSX, one may want to create a special kind of dynamic library: frameworks. +This is done with the ``Framework`` template. + +.. code-block:: python + + Framework('foo') + +With a ``Framework`` name of ``foo``, the framework file name will be ``foo``. +This template however affects the behavior on all platforms, so it needs to +be set only on OSX. + + +Executables +=========== + +Executables, a.k.a. programs, are, in the simplest form, defined with the +``Program`` template. + +.. code-block:: python + + Program('foobar') + +On UNIX systems, the executable file name will be ``foobar``, while on Windows, +it will be ``foobar.exe``. + +Like static and shared libraries, the build system can be instructed to link +libraries to the executable with ``USE_LIBS``, listing various ``Library`` +names. + +In some cases, we want to create an executable per source file in the current +directory, in which case we can use the ``SimplePrograms`` template + +.. code-block:: python + + SimplePrograms([ + 'FirstProgram', + 'SecondProgram', + ]) + +Contrary to ``Program``, which requires corresponding ``SOURCES``, when using +``SimplePrograms``, the corresponding ``SOURCES`` are implied. If the +corresponding ``sources`` have an extension different from ``.cpp``, it is +possible to specify the proper extension: + +.. code-block:: python + + SimplePrograms([ + 'ThirdProgram', + 'FourthProgram', + ], ext='.c') + +Please note this construct was added for compatibility with what already lives +in the mozilla tree ; it is recommended not to add new simple programs with +sources with a different extension than ``.cpp``. + +Similar to ``SimplePrograms``, is the ``CppUnitTests`` template, which defines, +with the same rules, C++ unit tests programs. Like ``SimplePrograms``, it takes +an ``ext`` argument to specify the extension for the corresponding ``SOURCES``, +if it's different from ``.cpp``. + + +Linking with system libraries +============================= + +Programs and libraries usually need to link with system libraries, such as a +widget toolkit, etc. Those required dependencies can be given with the +``OS_LIBS`` variable. + +.. code-block:: python + + OS_LIBS += [ + 'foo', + 'bar', + ] + +This expands to ``foo.lib bar.lib`` when building with MSVC, and +``-lfoo -lbar`` otherwise. + +For convenience with ``pkg-config``, ``OS_LIBS`` can also take linker flags +such as ``-L/some/path`` and ``-llib``, such that it is possible to directly +assign ``LIBS`` variables from ``CONFIG``, such as: + +.. code-block:: python + + OS_LIBS += CONFIG['MOZ_PANGO_LIBS'] + +(assuming ``CONFIG['MOZ_PANGO_LIBS']`` is a list, not a string) + +Like ``USE_LIBS``, this variable applies to static and shared libraries, as +well as programs. + + +Libraries from third party build system +======================================= + +Some libraries in the tree are not built by the moz.build-governed build +system, and there is no ``Library`` corresponding to them. + +However, ``USE_LIBS`` allows to reference such libraries by giving a full +path (like when disambiguating identical ``Library`` names). The same naming +rules apply as other uses of ``USE_LIBS``, so only the library name without +prefix and suffix shall be given. + +.. code-block:: python + + USE_LIBS += [ + '/path/from/topsrcdir/to/third-party/bar', + '../relative/third-party/baz', + ] + +Note that ``/path/from/topsrcdir/to/third-party`` and +``../relative/third-party/baz`` must lead under a subconfigured directory (a +directory with an AC_OUTPUT_SUBDIRS in configure.in), or ``security/nss``. + + +Building both static and shared libraries +========================================= + +When both types of libraries are required, one needs to set both +``FORCE_SHARED_LIB`` and ``FORCE_STATIC_LIB`` boolean variables. + +.. code-block:: python + + FORCE_SHARED_LIB = True + FORCE_STATIC_LIB = True + +But because static libraries and Windows import libraries have the same file +names, either the static or the shared library name needs to be different +than the name given to the ``Library`` template. + +The ``STATIC_LIBRARY_NAME`` and ``SHARED_LIBRARY_NAME`` variables can be used +to change either the static or the shared library name. + +.. code-block:: python + + Library('foo') + STATIC_LIBRARY_NAME = 'foo_s' + +With the above, on Windows, ``foo_s.lib`` will be the static library, +``foo.dll`` the shared library, and ``foo.lib`` the import library. + +In some cases, for convenience, it is possible to set both +``STATIC_LIBRARY_NAME`` and ``SHARED_LIBRARY_NAME``. For example: + +.. code-block:: python + + Library('mylib') + STATIC_LIBRARY_NAME = 'mylib_s' + SHARED_LIBRARY_NAME = CONFIG['SHARED_NAME'] + +This allows to use ``mylib`` in the ``USE_LIBS`` of another library or +executable. + +When referring to a ``Library`` name building both types of libraries in +``USE_LIBS``, the shared library is chosen to be linked. But sometimes, +it is wanted to link the static version, in which case the ``Library`` name +needs to be prefixed with ``static:`` in ``USE_LIBS`` + +:: + + a/moz.build: + Library('mylib') + FORCE_SHARED_LIB = True + FORCE_STATIC_LIB = True + STATIC_LIBRARY_NAME = 'mylib_s' + b/moz.build: + Program('myprog') + USE_LIBS += [ + 'static:mylib', + ] + + +Miscellaneous +============= + +The ``SONAME`` variable declares a "shared object name" for the library. It +defaults to the ``Library`` name or the ``SHARED_LIBRARY_NAME`` if set. When +linking to a library with a ``SONAME``, the resulting library or program will +have a dependency on the library with the name corresponding to the ``SONAME`` +instead of the ``Library`` name. This only impacts ELF systems. + +:: + + a/moz.build: + Library('mylib') + b/moz.build: + Library('otherlib') + SONAME = 'foo' + c/moz.build: + Program('myprog') + USE_LIBS += [ + 'mylib', + 'otherlib', + ] + +On e.g. Linux, the above ``myprog`` will have DT_NEEDED markers for +``libmylib.so`` and ``libfoo.so`` instead of ``libmylib.so`` and +``libotherlib.so`` if there weren't a ``SONAME``. This means the runtime +requirement for ``myprog`` is ``libfoo.so`` instead of ``libotherlib.so``. + + +Gecko-related binaries +====================== + +Some programs or libraries are totally independent of Gecko, and can use the +above mentioned templates. Others are Gecko-related in some way, and may +need XPCOM linkage, mozglue. These things are tedious. A set of additional +templates exists to ease defining such programs and libraries. They are +essentially the same as the above mentioned templates, prefixed with "Gecko": + + - ``GeckoProgram`` + - ``GeckoSimplePrograms`` + - ``GeckoCppUnitTests`` + - ``GeckoSharedLibrary`` + - ``GeckoFramework`` + +All the Gecko-prefixed templates take the same arguments as their +non-Gecko-prefixed counterparts, and can take a few more arguments +for non-standard cases. See the definition of ``GeckoBinary`` in +build/gecko_templates.mozbuild for more details, but most usecases +should not require these additional arguments. diff --git a/build/docs/defining-xpcom-components.rst b/build/docs/defining-xpcom-components.rst new file mode 100644 index 0000000000..e5735c5af9 --- /dev/null +++ b/build/docs/defining-xpcom-components.rst @@ -0,0 +1,305 @@ +.. _defining_xpcom_components: + +========================================= +Defining XPCOM C++-implemented Components +========================================= + +Native XPCOM components are registered at build time, and compiled into static +data structures which allow them to be accessed with little runtime overhead. +Each module which wishes to register components must provide a manifest +describing each component it implements, its type, and how it should be +constructed. + +Manifest files are Python data files registered in ``moz.build`` files in a +``XPCOM_MANIFESTS`` file list: + +.. code-block:: python + + XPCOM_MANIFESTS += [ + 'components.conf', + ] + +The files may define any of the following special variables: + +.. code-block:: python + + # Optional: A function to be called once, the first time any component + # listed in this manifest is instantiated. + InitFunc = 'nsInitFooModule' + # Optional: A function to be called at shutdown if any component listed in + # this manifest has been instantiated. + UnloadFunc = 'nsUnloadFooModule' + + # Optional: A processing priority, to determine how early or late the + # manifest is processed. Defaults to 50. In practice, this mainly affects + # the order in which unload functions are called at shutdown, with higher + # priority numbers being called later. + Priority = 10 + + # Optional: A list of header files to include before calling init or + # unload functions, or any legacy constructor functions. + # + # Any header path beginning with a `/` is loaded relative to the root of + # the source tree, and must not rely on any local includes. + # + # Any relative header path must be exported. + Headers = [ + '/foo/nsFooModule.h', + 'nsFoo.h', + ] + + # A list of component classes provided by this module. + Classes = [ + { + # ... + }, + # ... + ] + + # A list of category registrations + Categories = { + 'category': { + 'name': 'value', + 'other-name': ('value', ProcessSelector.MAIN_PROCESS_ONLY), + # ... + }, + # ... + } + +Class definitions may have the following properties: + +``name`` (optional) + If present, this component will generate an entry with the given name in the + ``mozilla::components`` namespace in ``mozilla/Components.h``, which gives + easy access to its CID, service, and instance constructors as (e.g.,) + ``components::Foo::CID()``, ``components::Foo::Service()``, and + ``components::Foo::Create()``, respectively. + +``cid`` + A UUID string containing this component's CID, in the form + ``'{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}'``. + +``contract_ids`` (optional) + A list of contract IDs to register for this class. + +``categories`` (optional) + A dict of category entries to register for this component's contract ID. + Each key in the dict is the name of the category. Each value is either a + string containing a single entry name, or a list of entry name strings. + +``type`` (optional, default=``nsISupports``) + The fully-qualified type of the class implementing this component. Defaults + to ``nsISupports``, but **must** be provided if the ``init_method`` property + is specified, or if neither the ``constructor`` nor ``legacy_constructor`` + properties are provided. + +``headers`` (optional) + A list of headers to include in order to call this component's constructor, + in the same format as the global ``Headers`` property. + +``init_method`` (optional) + The name of a method to call on newly-created instances of this class before + returning them. The method must take no arguments, and must return a + ``nsresult``. If it returns failure, that failure is propagated to the + ``getService`` or ``createInstance`` caller. + +``constructor`` (optional) + The fully-qualified name of a constructor function to call in order to + create instances of this class. This function must be declared in one of the + headers listed in the ``headers`` property, must take no arguments, and must + return ``already_AddRefed`` where ``iface`` is the interface provided + in the ``type`` property. + + This property is incompatible with ``legacy_constructor``. + +``jsm`` (optional) + If provided, must be the URL of a JavaScript module which contains a + JavaScript implementation of the component. The ``constructor`` property + must contain the name of an exported function which can be constructed to + create a new instance of the component. + +``legacy_constructor`` (optional) + This property is deprecated, and should not be used in new code. + + The fully-qualified name of a constructor function to call in order to + create instances of this class. This function must be declared in one of the + headers listed in the ``headers`` property, and must have the signature + ``nsresult(nsISupports* aOuter, const nsID& aIID, void** aResult)``, and + behave equivalently to ``nsIFactory::CreateInstance``. + + This property is incompatible with ``constructor``. + +``singleton`` (optional, default=``False``) + If true, this component's constructor is expected to return the same + singleton for every call, and no ``mozilla::components::::Create()`` + method will be generated for it. + +``overridable`` (optional, default=``False``) + If true, this component's contract ID is expected to be overridden by some + tests, and its ``mozilla::components::::Service()`` getter will + therefore look it up by contract ID for every call. This component must, + therefore, provide at least one contract ID in its ``contract_ids`` array. + + If false, the ``Service()`` getter will always retrieve the service based on + its static data, and it cannot be overridden. + + Note: Enabling this option is expensive, and should not be done when it can + be avoided, or when the getter is used by any hot code. + +``external`` (optional, default=``False`` if any ``headers`` are provided, ``True`` otherwise) + If true, a constructor for this component's ``type`` must be defined in + another translation unit, using ``NS_IMPL_COMPONENT_FACTORY(type)``. The + constructor must return an ``already_AddRefed``, and will be + used to construct instances of this type. + + This option should only be used in cases where the headers which define the + component's concrete type cannot be easily included without local includes. + + Note: External constructors may not specify an ``init_method``, since the + generated code will not have the necessary type information required to call + it. This option is also incompatible with ``constructor`` and + ``legacy_constructor``. + +``processes`` (optional, default=``ProcessSelector.ANY_PROCESS``) + An optional specifier restricting which types of process this component may + be loaded in. This must be a property of ``ProcessSelector`` with the same + name as one of the values in the ``Module::ProcessSelector`` enum. + + +Conditional Compilation +======================= + +This manifest may run any appropriate Python code to customize the values of +the ``Classes`` array based on build configuration. To simplify this process, +the following globals are available: + +``defined`` + A function which returns true if the given build config setting is defined + and true. + +``buildconfig`` + The ``buildconfig`` python module, with a ``substs`` property containing a + dict of all available build substitutions. + + +Component Constructors +====================== + +There are several ways to define component constructors, which vary mostly +depending on how old the code that uses them is: + +Class Constructors +------------------ + +This simplest way to define a component is to include a header defining a +concrete type, and let the component manager call that class's constructor: + +.. code-block:: python + + 'type': 'mozilla::foo::Foo', + 'headers': ['mozilla/Foo.h'], + +This is generally the preferred method of defining non-singleton constructors, +but may not be practicable for classes which rely on local includes for their +definitions. + +Singleton Constructors +---------------------- + +Singleton classes are generally expected to provide their own constructor +function which caches a singleton instance the first time it is called, and +returns the same instance on subsequent calls. This requires declaring the +constructor in an included header, and implementing it in a separate source +file: + +.. code-block:: python + + 'type': 'mozilla::foo::Foo', + 'headers': ['mozilla/Foo.h'], + 'constructor': 'mozilla::Foo::GetSingleton', + +``Foo.h`` + +.. code-block:: c++ + + class Foo final : public nsISupports { + public: + static already_AddRefed GetSingleton(); + }; + +``Foo.cpp`` + +.. code-block:: c++ + + already_AddRefed Foo::GetSingleton() { + // ... + } + +External Constructors +--------------------- + +For types whose headers can't easily be included, constructors can be defined +using a template specialization on an incomplete type: + +.. code-block:: python + + 'type': 'mozilla::foo::Foo', + 'external: True,' + +``Foo.cpp`` + +.. code-block:: c++ + + NS_IMPL_COMPONENT_FACTORY(Foo) { + return do_AddRef(new Foo()).downcast(); + } + +Legacy Constructors +------------------- + +These should not be used in new code, and are left as an exercise for the +reader. + + +Registering Categories +====================== + +Classes which need define category entries with the same value as their +contract ID may do so using the following: + +.. code-block:: python + + 'contract_ids': ['@mozilla.org/foo;1'], + 'categories': { + 'content-policy': 'm-foo', + 'Gecko-Content-Viewers': ['image/jpeg', 'image/png'], + }, + +This will define each of the following category entries: + +* ``"content-policy"`` ``"m-foo",`` ``"@mozilla.org/foo;1"`` +* ``"Gecko-Content-Viewers"`` ``"image/jpeg"`` ``"@mozilla.org/foo;1"`` +* ``"Gecko-Content-Viewers"`` ``"image/png"`` ``"@mozilla.org/foo;1"`` + +Some category entries do not have a contract ID as a value. These entries can +be specified by adding to a global ``Categories`` dictionary: + +.. code-block:: python + + Categories = { + 'app-startup': { + 'Mapi Support': 'service,@mozilla.org/mapisupport;1', + } + } + +It is possible to limit these on a per-process basis by using a tuple as the +value: + +.. code-block:: python + + Categories = { + 'app-startup': { + 'MainProcessSingleton': ('service,@mozilla.org/main-process-singleton;1', ProcessSelector.MAIN_PROCESS_ONLY), + } + } + diff --git a/build/docs/environment-variables.rst b/build/docs/environment-variables.rst new file mode 100644 index 0000000000..c463391596 --- /dev/null +++ b/build/docs/environment-variables.rst @@ -0,0 +1,31 @@ +.. _environment_variables: + +================================================ +Environment Variables Impacting the Build System +================================================ + +Various environment variables have an impact on the behavior of the +build system. This document attempts to document them. + +AUTOCLOBBER + If defines, the build system will automatically clobber as needed. + The default behavior is to print a message and error out when a + clobber is needed. + + This variable is typically defined in a :ref:`mozconfig ` + file via ``mk_add_options``. + +REBUILD_CHECK + If defined, the build system will print information about why + certain files were rebuilt. + + This feature is disabled by default because it makes the build slower. + +MACH_NO_TERMINAL_FOOTER + If defined, the terminal footer displayed when building with mach in + a TTY is disabled. + +MACH_NO_WRITE_TIMES + If defined, mach commands will not prefix output lines with the + elapsed time since program start. This option is equivalent to + passing ``--log-no-times`` to mach. diff --git a/build/docs/files-metadata.rst b/build/docs/files-metadata.rst new file mode 100644 index 0000000000..6a7290c55e --- /dev/null +++ b/build/docs/files-metadata.rst @@ -0,0 +1,178 @@ +.. _mozbuild_files_metadata: + +============== +Files Metadata +============== + +:ref:`mozbuild-files` provide a mechanism for attaching metadata to +files. Essentially, you define some flags to set on a file or file +pattern. Later, some tool or process queries for metadata attached to a +file of interest and it does something intelligent with that data. + +Defining Metadata +================= + +Files metadata is defined by using the +:ref:`Files Sub-Context ` in ``moz.build`` +files. e.g.:: + + with Files('**/Makefile.in'): + BUG_COMPONENT = ('Firefox Build System', 'General') + +This working example says, *for all Makefile.in files in every directory +underneath this one - including this directory - set the Bugzilla +component to Firefox Build System :: General*. + +For more info, read the +:ref:`docs on Files `. + +How Metadata is Read +==================== + +``Files`` metadata is extracted in :ref:`mozbuild_fs_reading_mode`. + +Reading starts by specifying a set of files whose metadata you are +interested in. For each file, the filesystem is walked to the root +of the source directory. Any ``moz.build`` encountered during this +walking are marked as relevant to the file. + +Let's say you have the following filesystem content:: + + /moz.build + /root_file + /dir1/moz.build + /dir1/foo + /dir1/subdir1/foo + /dir2/foo + +For ``/root_file``, the relevant ``moz.build`` files are just +``/moz.build``. + +For ``/dir1/foo`` and ``/dir1/subdir1/foo``, the relevant files are +``/moz.build`` and ``/dir1/moz.build``. + +For ``/dir2``, the relevant file is just ``/moz.build``. + +Once the list of relevant ``moz.build`` files is obtained, each +``moz.build`` file is evaluated. Root ``moz.build`` file first, +leaf-most files last. This follows the rules of +:ref:`mozbuild_fs_reading_mode`, with the set of evaluated ``moz.build`` +files being controlled by filesystem content, not ``DIRS`` variables. + +The file whose metadata is being resolved maps to a set of ``moz.build`` +files which in turn evaluates to a list of contexts. For file metadata, +we only care about one of these contexts: +:ref:`Files `. + +We start with an empty ``Files`` instance to represent the file. As +we encounter a *files sub-context*, we see if it is appropriate to +this file. If it is, we apply its values. This process is repeated +until all *files sub-contexts* have been applied or skipped. The final +state of the ``Files`` instance is used to represent the metadata for +this particular file. + +It may help to visualize this. Say we have 2 ``moz.build`` files:: + + # /moz.build + with Files('*.cpp'): + BUG_COMPONENT = ('Core', 'XPCOM') + + with Files('**/*.js'): + BUG_COMPONENT = ('Firefox', 'General') + + # /foo/moz.build + with Files('*.js'): + BUG_COMPONENT = ('Another', 'Component') + +Querying for metadata for the file ``/foo/test.js`` will reveal 3 +relevant ``Files`` sub-contexts. They are evaluated as follows: + +1. ``/moz.build - Files('*.cpp')``. Does ``/*.cpp`` match + ``/foo/test.js``? **No**. Ignore this context. +2. ``/moz.build - Files('**/*.js')``. Does ``/**/*.js`` match + ``/foo/test.js``? **Yes**. Apply ``BUG_COMPONENT = ('Firefox', 'General')`` + to us. +3. ``/foo/moz.build - Files('*.js')``. Does ``/foo/*.js`` match + ``/foo/test.js``? **Yes**. Apply + ``BUG_COMPONENT = ('Another', 'Component')``. + +At the end of execution, we have +``BUG_COMPONENT = ('Another', 'Component')`` as the metadata for +``/foo/test.js``. + +One way to look at file metadata is as a stack of data structures. +Each ``Files`` sub-context relevant to a given file is applied on top +of the previous state, starting from an empty state. The final state +wins. + +.. _mozbuild_files_metadata_finalizing: + +Finalizing Values +================= + +The default behavior of ``Files`` sub-context evaluation is to apply new +values on top of old. In most circumstances, this results in desired +behavior. However, there are circumstances where this may not be +desired. There is thus a mechanism to *finalize* or *freeze* values. + +Finalizing values is useful for scenarios where you want to prevent +wildcard matches from overwriting previously-set values. This is useful +for one-off files. + +Let's take ``Makefile.in`` files as an example. The build system module +policy dictates that ``Makefile.in`` files are part of the ``Build +Config`` module and should be reviewed by peers of that module. However, +there exist ``Makefile.in`` files in many directories in the source +tree. Without finalization, a ``*`` or ``**`` wildcard matching rule +would match ``Makefile.in`` files and overwrite their metadata. + +Finalizing of values is performed by setting the ``FINAL`` variable +on ``Files`` sub-contexts. See the +:ref:`Files documentation ` for more. + +Here is an example with ``Makefile.in`` files, showing how it is +possible to finalize the ``BUG_COMPONENT`` value.:: + + # /moz.build + with Files('**/Makefile.in'): + BUG_COMPONENT = ('Firefox Build System', 'General') + FINAL = True + + # /foo/moz.build + with Files('**'): + BUG_COMPONENT = ('Another', 'Component') + +If we query for metadata of ``/foo/Makefile.in``, both ``Files`` +sub-contexts match the file pattern. However, since ``BUG_COMPONENT`` is +marked as finalized by ``/moz.build``, the assignment from +``/foo/moz.build`` is ignored. The final value for ``BUG_COMPONENT`` +is ``('Firefox Build System', 'General')``. + +Here is another example:: + + with Files('*.cpp'): + BUG_COMPONENT = ('One-Off', 'For C++') + FINAL = True + + with Files('**'): + BUG_COMPONENT = ('Regular', 'Component') + +For every files except ``foo.cpp``, the bug component will be resolved +as ``Regular :: Component``. However, ``foo.cpp`` has its value of +``One-Off :: For C++`` preserved because it is finalized. + +.. important:: + + ``FINAL`` only applied to variables defined in a context. + + If you want to mark one variable as finalized but want to leave + another mutable, you'll need to use 2 ``Files`` contexts. + +Guidelines for Defining Metadata +================================ + +In general, values defined towards the root of the source tree are +generic and become more specific towards the leaves. For example, +the ``BUG_COMPONENT`` for ``/browser`` might be ``Firefox :: General`` +whereas ``/browser/components/preferences`` would list +``Firefox :: Preferences``. diff --git a/build/docs/glossary.rst b/build/docs/glossary.rst new file mode 100644 index 0000000000..d610f07443 --- /dev/null +++ b/build/docs/glossary.rst @@ -0,0 +1,47 @@ +Build Glossary +============== + +.. glossary:: + :sorted: + + object directory + A directory holding the output of the build system. The build + system attempts to isolate all file modifications to this + directory. By convention, object directories are commonly + directories under the source directory prefixed with **obj-**. + e.g. **obj-firefox**. + + mozconfig + A shell script used to configure the build system. + + configure + A generated shell script which detects the current system + environment, applies a requested set of build configuration + options, and writes out metadata to be consumed by the build + system. + + config.status + An executable file produced by **configure** that takes the + generated build config and writes out files used to build the + tree. Traditionally, config.status writes out a bunch of + Makefiles. + + install manifest + A file containing metadata describing file installation rules. + A large part of the build system consists of copying files + around to appropriate places. We write out special files + describing the set of required operations so we can process the + actions efficiently. These files are install manifests. + + clobber build + A build performed with an initially empty object directory. All + build actions must be performed. + + incremental build + A build performed with the result of a previous build in an + object directory. The build should not have to work as hard because + it will be able to reuse the work from previous builds. + + mozinfo + An API for accessing a common and limited subset of the build and + run-time configuration. See :ref:`mozinfo`. diff --git a/build/docs/gn.rst b/build/docs/gn.rst new file mode 100644 index 0000000000..bf5ad8509c --- /dev/null +++ b/build/docs/gn.rst @@ -0,0 +1,52 @@ +.. _gn: + +============================== +GN support in the build system +============================== + +:abbr:`GN (Generated Ninja)` is a third-party build tool used by chromium and +some related projects that are vendored in mozilla-central. Rather than +requiring ``GN`` to build or writing our own build definitions for these projects, +we have support in the build system for translating GN configuration +files into moz.build files. In most cases these moz.build files will be like any +others in the tree (except that they shouldn't be modified by hand), however +those updating vendored code or building on platforms not supported by +Mozilla automation may need to re-generate these files. This is a two-step +process, described below. + +Generating GN configs as JSON +============================= + +The first step must take place on a machine with access to the ``GN`` tool, which +is specified in a mozconfig with:: + + export GN= + +With this specified, and the tree configured, run:: + + $ ./mach build-backend -b GnConfigGen + +This will run ``gn gen`` on projects found in ``GN_DIRS`` specified in moz.build +files, injecting variables from the current build configuration and specifying +the result be written to a JSON file in ``$objdir/gn-output``. The file will +have a name derived from the arguments passed to ``gn gen``, for instance +``x64_False_x64_linux.json``. + +If updating upstream sources or vendoring a new project, this step must be +performed for each supported build configuration. If adding support for a +specific configuration, the generated configuration may be added to existing +configs before re-generating the ``moz.build`` files, which should be found under +the ``gn-configs`` directory under the vendored project's top-level directory. + +Generating moz.build files from GN JSON configs +=============================================== + +Once the relevant JSON configs are present under a project's ``gn-configs`` +directory, run:: + + $ ./mach build-backend -b GnMozbuildWriter + +This will combine the configuration files present into a set of moz.build files +that will build the given project. Once the result is verified, the resulting +moz.build files should be checked in and should build like any other part of +mozilla-central. diff --git a/build/docs/index.rst b/build/docs/index.rst new file mode 100644 index 0000000000..f2692605ba --- /dev/null +++ b/build/docs/index.rst @@ -0,0 +1,57 @@ +============ +Build System +============ + +Important Concepts +================== +.. toctree:: + :maxdepth: 1 + + glossary + build-overview + supported-configurations + Mozconfig Files + mozbuild-files + mozbuild-symbols + files-metadata + Profile Guided Optimization + slow + environment-variables + build-targets + python + test_manifests + mozinfo + preprocessor + jar-manifests + defining-binaries + defining-xpcom-components + toolchains + locales + unified-builds + rust + sparse + Support for projects building with GN + telemetry + sccache-dist + test_certificates + +integrated development environment (IDE) +======================================== +.. toctree:: + :maxdepth: 1 + + ../../contributing/vscode + androideclipse + cppeclipse + visualstudio + +mozbuild +======== + +mozbuild is a Python package containing a lot of the code for the +Mozilla build system. + +.. toctree:: + :maxdepth: 1 + + mozbuild/index diff --git a/build/docs/jar-manifests.rst b/build/docs/jar-manifests.rst new file mode 100644 index 0000000000..946026fe86 --- /dev/null +++ b/build/docs/jar-manifests.rst @@ -0,0 +1,97 @@ +.. _jar_manifests: + +============= +JAR Manifests +============= + +JAR Manifests are plaintext files in the tree that are used to package chrome +files into the correct JARs, and create +`Chrome Registration `_ +manifests. JAR Manifests are commonly named ``jar.mn``. They are +declared in ``moz.build`` files using the ``JAR_MANIFESTS`` variable. + +``jar.mn`` files are automatically processed by the build system when building a +source directory that contains one. The ``jar``.mn is run through the +:ref:`preprocessor` before being passed to the manifest processor. In order to +have ``@variables@`` expanded (such as ``@AB_CD@``) throughout the file, add +the line ``#filter substitution`` at the top of your ``jar.mn`` file. + +The format of a jar.mn is fairly simple; it consists of a heading specifying +which JAR file is being packaged, followed by indented lines listing files and +chrome registration instructions. + +To see a simple ``jar.mn`` file at work, see ``toolkit/profile/jar.mn``. A much +more complex ``jar.mn`` is at ``toolkit/locales/jar.mn``. + +Shipping Chrome Files +===================== + +To ship chrome files in a JAR, an indented line indicates a file to be packaged:: + + .jar: + path/in/jar/file_name.xul (source/tree/location/file_name.xul) + +The JAR location may be preceded with a base path between square brackets:: + [base/path] .jar: + path/in/jar/file_name.xul (source/tree/location/file_name.xul) + +In this case, the jar will be directly located under the given ``base/path``, +while without a base path, it will be under a ``chrome`` directory. + +If the JAR manifest and packaged file live in the same directory, the path and +parenthesis can be omitted. In other words, the following two lines are +equivalent:: + + path/in/jar/same_place.xhtml (same_place.xhtml) + path/in/jar/same_place.xhtml + +The source tree location may also be an *absolute* path (taken from the +top of the source tree:: + + path/in/jar/file_name.xul (/path/in/sourcetree/file_name.xul) + +An asterisk marker (``*``) at the beginning of the line indicates that the +file should be processed by the :ref:`preprocessor` before being packaged:: + + * path/in/jar/preprocessed.xul (source/tree/location/file_name.xul) + +Preprocessed files always replace existing files, to ensure that changes in +``#expand`` or ``#include`` directives are picked up. + +There is a special source-directory format for localized files (note the +percent sign in the source file location): this format reads ``localized.dtd`` +from the ``en-US`` directory if building an English version, and reads the +file from the alternate localization source tree +``/l10n//path/localized.dtd`` if building a localized version:: + + locale/path/localized.dtd (%localized/path/localized.dtd) + +The source tree location can also use wildcards, in which case the path in +jar is expected to be a base directory. Paths before the wildcard are not +made part of the destination path:: + + path/in/jar/ (source/tree/location/*.xul) + +The above will install all xul files under ``source/tree/location`` as +``path/in/jar/*.xul``. + +Register Chrome +=============== + +`Chrome Registration `_ +instructions are marked with a percent sign (``%``) at the beginning of the +line, and must be part of the definition of a JAR file. Any additional percents +signs are replaced with an appropriate relative URL of the JAR file being +packaged:: + + % content global %path/in/jar/ + % overlay chrome://blah/content/blah.xul chrome://foo/content/overlay.xul + +There are two possible locations for a manifest file. If the chrome is being +built into a standalone application, the ``jar.mn`` processor creates a +``.manifest`` next to the JAR file itself. This is the default +behavior. + +If the build specifies ``USE_EXTENSION_MANIFEST = 1``, the ``jar.mn`` processor +creates a single ``chrome.manifest`` file suitable for registering chrome as +an extension. diff --git a/build/docs/locales.rst b/build/docs/locales.rst new file mode 100644 index 0000000000..cde26e30fe --- /dev/null +++ b/build/docs/locales.rst @@ -0,0 +1,331 @@ +.. _localization: + +================ +Localized Builds +================ + +Localization repacks +==================== + +To save on build time, the build system and automation collaborate to allow +downloading a packaged en-US Firefox, performing some locale-specific +post-processing, and re-packaging a locale-specific Firefox. Such artifacts +are termed "single-locale language repacks". There is another concept of a +"multi-locale language build", which is more like a regular build and less +like a re-packaging post-processing step. + +.. note:: + + These builds rely on make targets that don't work for + `artifact builds `_. + +Instructions for single-locale repacks for developers +----------------------------------------------------- + +This assumes that ``$AB_CD`` is the locale you want to repack with; you +find the available localizations on `l10n-central `_. + +#. You must have a built and packaged object directory, or a pre-built + ``en-US`` package. + + .. code-block:: shell + + ./mach build + ./mach package + +#. Repackage using the locale-specific changes. + + .. code-block:: shell + + ./mach build installers-$AB_CD + +You should find a re-packaged build at ``OBJDIR/dist/``, and a +runnable binary in ``OBJDIR/dist/l10n-stage/``. +The ``installers`` target runs quite a few things for you, including getting +the repository for the requested locale from +https://hg.mozilla.org/l10n-central/. It will clone them into +``~/.mozbuild/l10n-central``. If you have an existing repository there, you +may want to occasionally update that via ``hg pull -u``. If you prefer +to have the l10n repositories at a different location on your disk, you +can point to the directory via + + .. code-block:: shell + + ac_add_options --with-l10n-base=/make/this/a/absolute/path + +This build also packages a language pack. + +Instructions for language packs +------------------------------- + +Language packs are extensions that contain just the localized resources. Building +them doesn't require an actual build, but they're only compatible with the +``mozilla-central`` source they're built with. + + +.. code-block:: shell + + ./mach build langpack-$AB_CD + +This target shares much of the logic of the ``installers-$AB_CD`` target above, +and does the check-out of the localization repository etc. It doesn't require +a package or a build, though. The generated language pack is in +``OBJDIR/dist/$(MOZ_PKG_PLATFORM)/xpi/``. + +.. note:: + + Despite the platform-dependent location in the build directory, language packs + are platform independent, and the content that goes into them needs to be + built in a platform-independent way. + +Instructions for multi-locale builds +------------------------------------ + +If you want to create a single build with multiple locales, you will do + +#. Create a build and package + + .. code-block:: shell + + ./mach build + ./mach package + +#. For each locale you want to include in the build: + + .. code-block:: shell + + export MOZ_CHROME_MULTILOCALE="de it zh-TW" + for AB_CD in $MOZ_CHROME_MULTILOCALE; do + ./mach build chrome-$AB_CD + done + +#. Create the multilingual package: + + .. code-block:: shell + + AB_CD=multi ./mach package + +General flow of repacks +----------------------- + +The general flow of the locale repacks is controlled by +``$MOZ_BUILD_APP/locales/Makefile.in`` and ``toolkit/locales/l10n.mk``, plus +the packaging build system. The three main entry points above all trigger +related build flows: + +#. Get the localization repository, if needed +#. Run l10n-merge with a prior clobber of the merge dir +#. Copy l10n files to ``dist``, with minor differences here between ``l10n-%`` and ``chrome-%`` +#. Repackage and package + +Details on l10n-merge are described in its own section below. +The copying of files is mainly controlled by ``jar.mn``, in the few source +directories that include localizable files. ``l10n-%`` is used for repacks, +``chrome-%`` for multi-locale packages. The repackaging is dedicated +Python code in ``toolkit/mozapps/installer/l10n-repack.py``, using an existing +package. It strips existing ``chrome`` l10n resources, and adds localizations +and metadata. + +Language packs don't require repackaging. The windows installers are generated +by merely packaging an existing repackaged zip into to an installer. + +Exposing strings +================ + +The localization flow handles a few file formats in well-known locations in the +source tree. + +Alongside being built by including the directory in ``$MOZ_BUILD_APP/locales/Makefile.in`` +and respective entries in a ``jar.mn``, we also have configuration files tailored +to localization tools and infrastructure. They're also controlling which +files l10n-merge handles, and how. + +These configurations are TOML files. They're part of the bigger +localization ecosystem at Mozilla, and `the documentation about the +file format `_ +explains how to set them up, and what the entries mean. In short, you find + +.. code-block:: toml + + [[paths]] + reference = browser/locales/en-US/** + l10n = {l}browser/** + +to add a directory for all localizations. Changes to these files are best +submitted for review by :Pike or :flod. + +These configuration files are the future, and right now, we still have +support for the previous way to configuring l10n, which is described below. + +The locations are commonly in directories like + + :file:`browser/`\ ``locales/en-US/``\ :file:`subdir/file.ext` + +The first thing to note is that only files beneath :file:`locales/en-US` are +exposed to localizers. The second thing to note is that only a few directories +are exposed. Which directories are exposed is defined in files called +``l10n.ini``, which are at a +`few places `_ +in the source code. + +An example looks like this + +.. code-block:: ini + + [general] + depth = ../.. + + [compare] + dirs = browser + browser/branding/official + + [includes] + toolkit = toolkit/locales/l10n.ini + +This tells the l10n infrastructure three things: + +* resolve the paths against the directory two levels up +* include files in :file:`browser/locales/en-US` and + :file:`browser/branding/official/locales/en-US` +* load more data from :file:`toolkit/locales/l10n.ini` + +For projects like Thunderbird and SeaMonkey in ``comm-central``, additional +data needs to be provided when including an ``l10n.ini`` from a different +repository: + +.. code-block:: ini + + [include_toolkit] + type = hg + mozilla = mozilla-central + repo = https://hg.mozilla.org/ + l10n.ini = toolkit/locales/l10n.ini + +This tells the l10n infrastructure where to find the repository, and where inside +that repository the ``l10n.ini`` file is. This is needed because for local +builds, :file:`mail/locales/l10n.ini` references +:file:`mozilla/toolkit/locales/l10n.ini`, which is where the comm-central +build setup expects toolkit to be. + +Now that the directories exposed to l10n are known, we can talk about the +supported file formats. + +File formats +------------ + +The following file formats are known to the l10n tool chains: + +Fluent + Used in Firefox UI, both declarative and programmatically. +DTD + Deprecated. Used in XUL and XHTML. +Properties + Used from JavaScript and C++. When used from js, also comes with + `plural support `_. +ini + Used by the crashreporter and updater, avoid if possible. +inc + Used during builds, for example to create metadata for + language packs or bookmarks. + +Adding new formats involves changing various different tools, and is strongly +discouraged. + +Exceptions +---------- +Generally, anything that exists in ``en-US`` needs a one-to-one mapping in +all localizations. There are a few cases where that's not wanted, notably +around locale configuration and locale-dependent metadata. + +For optional strings and files, l10n-merge won't add ``en-US`` content if +the localization doesn't have that content. + +For the TOML files, the +`[[filters]] documentation `_ +is a good reference. In short, filters match the localized source code, optionally +a ``key``, and an action. An example like + +.. code-block:: toml + + [[filters]] + path = "{l}browser/defines.inc" + key = "MOZ_LANGPACK_CONTRIBUTORS" + action = "ignore" + +indicates that the ``MOZ_LANGPACK_CONTRIBUTORS`` in ``browser/defines.inc`` +is optional. + +For the legacy ini configuration files, there's a Python module +``filter.py`` next to the main ``l10n.ini``, implementing :py:func:`test`, with the following +signature + +.. code-block:: python + + def test(mod, path, entity = None): + if does_not_matter: + return "ignore" + if show_but_do_not_merge: + return "report" + # default behavior, localizer or build need to do something + return "error" + +For any missing file, this function is called with ``mod`` being +the *module*, and ``path`` being the relative path inside +:file:`locales/en-US`. The module is the top-level dir as referenced in +:file:`l10n.ini`. + +For missing strings, the :py:data:`entity` parameter is the key of the string +in the en-US file. + +l10n-merge +========== + +The chrome registry in Gecko doesn't support fallback from a localization to ``en-US`` at runtime. +Thus, the build needs to ensure that the localization as it's built into +the package has all required strings, and that the strings don't contain +errors. To ensure that, we're *merging* the localization and ``en-US`` +at build time, nick-named l10n-merge. + +For Fluent, we're also removing erroneous messages. For many errors in Fluent, +that's cosmetic, but when a localization has different values or attributes +on a message, that's actually important so that the DOM bindings of Fluent +can apply the translation without having to load the ``en-US`` source to +compare against. + +The process can be manually triggered via + +.. code-block:: bash + + $> ./mach build merge-$AB_CD + +It creates another directory in the object dir, :file:`browser/locales/merge-dir/$AB_CD`, in +which the sanitized files are stored. The actual repackaging process only looks +in the merged directory, so the preparation steps of l10n-merge need to ensure +that all files are generated or copied. + +l10n-merge modifies a file if it supports the particular file type, and there +are missing strings which are not filtered out, or if an existing string +shows an error. See the Checks section below for details. If the files are +not modified, l10n-merge copies them over to the respective location in the +merge dir. + +Checks +------ + +As part of the build and other localization tool chains, we run a variety +of source-based checks. Think of them as linters. + +The suite of checks is usually determined by file type, i.e., there's a +suite of checks for DTD files and one for properties files, etc. + +Localizations +------------- + +Now that we talked in-depth about how to expose content to localizers, +where are the localizations? + +We host a mercurial repository per locale. All of our +localizations can be found on https://hg.mozilla.org/l10n-central/. + +You can search inside our localized files on +`Transvision `_. diff --git a/build/docs/mozbuild-files.rst b/build/docs/mozbuild-files.rst new file mode 100644 index 0000000000..9d69404732 --- /dev/null +++ b/build/docs/mozbuild-files.rst @@ -0,0 +1,176 @@ +.. _mozbuild-files: + +=============== +moz.build Files +=============== + +``moz.build`` files are the mechanism by which tree metadata (notably +the build configuration) is defined. + +Directories in the tree contain ``moz.build`` files which declare +functionality for their respective part of the tree. This includes +things such as the list of C++ files to compile, where to find tests, +etc. + +``moz.build`` files are actually Python scripts. However, their +execution is governed by special rules. This is explained below. + +moz.build Python Sandbox +======================== + +As mentioned above, ``moz.build`` files are Python scripts. However, +they are executed in a special Python *sandbox* that significantly +changes and limits the execution environment. The environment is so +different, it's doubtful most ``moz.build`` files would execute without +error if executed by a vanilla Python interpreter (e.g. ``python +moz.build``. + +The following properties make execution of ``moz.build`` files special: + +1. The execution environment exposes a limited subset of Python. +2. There is a special set of global symbols and an enforced naming + convention of symbols. +3. Some symbols are inherited from previously-executed ``moz.build`` + files. + +The limited subset of Python is actually an extremely limited subset. +Only a few symbols from ``__builtin__`` are exposed. These include +``True``, ``False``, ``None``, ``sorted``, ``int``, and ``set``. Global +functions like ``import``, ``print``, and ``open`` aren't available. +Without these, ``moz.build`` files can do very little. *This is by design*. + +The execution sandbox treats all ``UPPERCASE`` variables specially. Any +``UPPERCASE`` variable must be known to the sandbox before the script +executes. Any attempt to read or write to an unknown ``UPPERCASE`` +variable will result in an exception being raised. Furthermore, the +types of all ``UPPERCASE`` variables is strictly enforced. Attempts to +assign an incompatible type to an ``UPPERCASE`` variable will result in +an exception being raised. + +The strictness of behavior with ``UPPERCASE`` variables is a very +intentional design decision. By ensuring strict behavior, any operation +involving an ``UPPERCASE`` variable is guaranteed to have well-defined +side-effects. Previously, when the build configuration was defined in +``Makefiles``, assignments to variables that did nothing would go +unnoticed. ``moz.build`` files fix this problem by eliminating the +potential for false promises. + +After a ``moz.build`` file has completed execution, only the +``UPPERCASE`` variables are used to retrieve state. + +The set of variables and functions available to the Python sandbox is +defined by the :py:mod:`mozbuild.frontend.context` module. The +data structures in this module are consumed by the +:py:class:`mozbuild.frontend.reader.MozbuildSandbox` class to construct +the sandbox. There are tests to ensure that the set of symbols exposed +to an empty sandbox are all defined in the ``context`` module. +This module also contains documentation for each symbol, so nothing can +sneak into the sandbox without being explicitly defined and documented. + +Reading and Traversing moz.build Files +====================================== + +The process for reading ``moz.build`` files roughly consists of: + +1. Start at the root ``moz.build`` (``/moz.build``). +2. Evaluate the ``moz.build`` file in a new sandbox. +3. Emit the main *context* and any *sub-contexts* from the executed + sandbox. +4. Extract a set of ``moz.build`` files to execute next. +5. For each additional ``moz.build`` file, goto #2 and repeat until all + referenced files have executed. + +From the perspective of the consumer, the output of reading is a stream +of :py:class:`mozbuild.frontend.reader.context.Context` instances. Each +``Context`` defines a particular aspect of data. Consumers iterate over +these objects and do something with the data inside. Each object is +essentially a dictionary of all the ``UPPERCASE`` variables populated +during its execution. + +.. note:: + + Historically, there was only one ``context`` per ``moz.build`` file. + As the number of things tracked by ``moz.build`` files grew and more + and more complex processing was desired, it was necessary to split these + contexts into multiple logical parts. It is now common to emit + multiple contexts per ``moz.build`` file. + +Build System Reading Mode +------------------------- + +The traditional mode of evaluation of ``moz.build`` files is what's +called *build system traversal mode.* In this mode, the ``CONFIG`` +variable in each ``moz.build`` sandbox is populated from data coming +from ``config.status``, which is produced by ``configure``. + +During evaluation, ``moz.build`` files often make decisions conditional +on the state of the build configuration. e.g. *only compile foo.cpp if +feature X is enabled*. + +In this mode, traversal of ``moz.build`` files is governed by variables +like ``DIRS`` and ``TEST_DIRS``. For example, to execute a child +directory, ``foo``, you would add ``DIRS += ['foo']`` to a ``moz.build`` +file and ``foo/moz.build`` would be evaluated. + +.. _mozbuild_fs_reading_mode: + +Filesystem Reading Mode +----------------------- + +There is an alternative reading mode that doesn't involve the build +system and doesn't use ``DIRS`` variables to control traversal into +child directories. This mode is called *filesystem reading mode*. + +In this reading mode, the ``CONFIG`` variable is a dummy, mostly empty +object. Accessing all but a few special variables will return an empty +value. This means that nearly all ``if CONFIG['FOO']:`` branches will +not be taken. + +Instead of using content from within the evaluated ``moz.build`` +file to drive traversal into subsequent ``moz.build`` files, the set +of files to evaluate is controlled by the thing doing the reading. + +A single ``moz.build`` file is not guaranteed to be executable in +isolation. Instead, we must evaluate all *parent* ``moz.build`` files +first. For example, in order to evaluate ``/foo/moz.build``, one must +execute ``/moz.build`` and have its state influence the execution of +``/foo/moz.build``. + +Filesystem reading mode is utilized to power the +:ref:`mozbuild_files_metadata` feature. + +Technical Details +----------------- + +The code for reading ``moz.build`` files lives in +:py:mod:`mozbuild.frontend.reader`. The Python sandboxes evaluation results +(:py:class:`mozbuild.frontend.context.Context`) are passed into +:py:mod:`mozbuild.frontend.emitter`, which converts them to classes defined +in :py:mod:`mozbuild.frontend.data`. Each class in this module defines a +domain-specific component of tree metadata. e.g. there will be separate +classes that represent a JavaScript file vs a compiled C++ file or test +manifests. This means downstream consumers of this data can filter on class +types to only consume what they are interested in. + +There is no well-defined mapping between ``moz.build`` file instances +and the number of :py:mod:`mozbuild.frontend.data` classes derived from +each. Depending on the content of the ``moz.build`` file, there may be 1 +object derived or 100. + +The purpose of the ``emitter`` layer between low-level sandbox execution +and metadata representation is to facilitate a unified normalization and +verification step. There are multiple downstream consumers of the +``moz.build``-derived data and many will perform the same actions. This +logic can be complicated, so we have a component dedicated to it. + +:py:class:`mozbuild.frontend.reader.BuildReader`` and +:py:class:`mozbuild.frontend.reader.TreeMetadataEmitter`` have a +stream-based API courtesy of generators. When you hook them up properly, +the :py:mod:`mozbuild.frontend.data` classes are emitted before all +``moz.build`` files have been read. This means that downstream errors +are raised soon after sandbox execution. + +Lots of the code for evaluating Python sandboxes is applicable to +non-Mozilla systems. In theory, it could be extracted into a standalone +and generic package. However, until there is a need, there will +likely be some tightly coupled bits. diff --git a/build/docs/mozbuild-symbols.rst b/build/docs/mozbuild-symbols.rst new file mode 100644 index 0000000000..4e9a8853a0 --- /dev/null +++ b/build/docs/mozbuild-symbols.rst @@ -0,0 +1,7 @@ +.. _mozbuild_symbols: + +======================== +mozbuild Sandbox Symbols +======================== + +.. mozbuildsymbols:: mozbuild.frontend.context diff --git a/build/docs/mozbuild/index.rst b/build/docs/mozbuild/index.rst new file mode 100644 index 0000000000..86f38940b4 --- /dev/null +++ b/build/docs/mozbuild/index.rst @@ -0,0 +1,41 @@ +======== +mozbuild +======== + +mozbuild is a Python package providing functionality used by Mozilla's +build system. + +Modules Overview +================ + +* mozbuild.backend -- Functionality for producing and interacting with build + backends. A build backend is an entity that consumes build system metadata + (from mozbuild.frontend) and does something useful with it (typically writing + out files that can be used by a build tool to build the tree). +* mozbuild.compilation -- Functionality related to compiling. This + includes managing compiler warnings. +* mozbuild.frontend -- Functionality for reading build frontend files + (what defines the build system) and converting them to data structures + which are fed into build backends to produce backend configurations. +* mozpack -- Functionality related to packaging builds. + +Overview +======== + +The build system consists of frontend files that define what to do. They +say things like "compile X" "copy Y." + +The mozbuild.frontend package contains code for reading these frontend +files and converting them to static data structures. The set of produced +static data structures for the tree constitute the current build +configuration. + +There exist entities called build backends. From a high level, build +backends consume the build configuration and do something with it. They +typically produce tool-specific files such as make files which can be used +to build the tree. + +Piecing it all together, we have frontend files that are parsed into data +structures. These data structures are fed into a build backend. The output +from build backends is used by builders to build the tree. + diff --git a/build/docs/mozconfigs.rst b/build/docs/mozconfigs.rst new file mode 100644 index 0000000000..1859b87875 --- /dev/null +++ b/build/docs/mozconfigs.rst @@ -0,0 +1,69 @@ +.. _mozconfig: + +=============== +mozconfig Files +=============== + +mozconfig files are used to configure how a build works. + +mozconfig files are actually shell scripts. They are executed in a +special context with specific variables and functions exposed to them. + +API +=== + +Functions +--------- + +The following special functions are available to a mozconfig script. + +ac_add_options +^^^^^^^^^^^^^^ + +This function is used to declare extra options/arguments to pass into +configure. + +e.g.:: + + ac_add_options --disable-tests + ac_add_options --enable-optimize + +mk_add_options +^^^^^^^^^^^^^^ + +This function is used to inject statements into client.mk for execution. +It is typically used to define variables, notably the object directory. + +e.g.:: + + mk_add_options AUTOCLOBBER=1 + +Special mk_add_options Variables +-------------------------------- + +For historical reasons, the method for communicating certain +well-defined variables is via mk_add_options(). In this section, we +document what those special variables are. + +MOZ_OBJDIR +^^^^^^^^^^ + +This variable is used to define the :term:`object directory` for the current +build. + +Finding the active mozconfig +============================ + +Multiple mozconfig files can exist to provide different configuration +options for different tasks. The rules for finding the active mozconfig +are defined in the +:py:func:`mozboot.mozconfig.find_mozconfig` method. + +.. automodule:: mozboot.mozconfig + :members: find_mozconfig + +Loading the active mozconfig +---------------------------- + +.. autoclass:: mozbuild.mozconfig.MozconfigLoader + :members: read_mozconfig diff --git a/build/docs/mozinfo.rst b/build/docs/mozinfo.rst new file mode 100644 index 0000000000..795ee3c219 --- /dev/null +++ b/build/docs/mozinfo.rst @@ -0,0 +1,176 @@ +.. _mozinfo: + +======= +mozinfo +======= + +``mozinfo`` is a solution for representing a subset of build +configuration and run-time data. + +``mozinfo`` data is typically accessed through a ``mozinfo.json`` file +which is written to the :term:`object directory` during build +configuration. The code for writing this file lives in +:py:mod:`mozbuild.mozinfo`. + +``mozinfo.json`` is an object/dictionary of simple string values. + +The attributes in ``mozinfo.json`` are used for many purposes. One use +is to filter tests for applicability to the current build. For more on +this, see :ref:`test_manifests`. + +.. _mozinfo_attributes: + +mozinfo.json Attributes +================================= + +``mozinfo`` currently records the following attributes. + +appname + The application being built. + + Value comes from ``MOZ_APP_NAME`` from ``config.status``. + + Optional. + +asan + Whether address sanitization is enabled. + + Values are ``true`` and ``false``. + + Always defined. + +bin_suffix + The file suffix for binaries produced with this build. + + Values may be an empty string, as not all platforms have a binary + suffix. + + Always defined. + +bits + The number of bits in the CPU this build targets. + + Values are typically ``32`` or ``64``. + + Universal Mac builds do not have this key defined. + + Unknown processor architectures (see ``processor`` below) may not have + this key defined. + + Optional. + +buildapp + The path to the XUL application being built. + + For desktop Firefox, this is ``browser``. For Fennec, it's + ``mobile/android``. + +crashreporter + Whether the crash reporter is enabled for this build. + + Values are ``true`` and ``false``. + + Always defined. + +datareporting + Whether data reporting (MOZ_DATA_REPORTING) is enabled for this build. + + Values are ``true`` and ``false``. + + Always defined. + +debug + Whether this is a debug build. + + Values are ``true`` and ``false``. + + Always defined. + +devedition + Whether this is a devedition build. + + Values are ``true`` and ``false``. + + Always defined. + +healthreport + Whether the Health Report feature is enabled. + + Values are ``true`` and ``false``. + + Always defined. + +mozconfig + The path of the :ref:`mozconfig file ` used to produce this build. + + Optional. + +nightly_build + Whether this is a nightly build. + + Values are ``true`` and ``false``. + + Always defined. + +os + The operating system the build is produced for. Values for tier-1 + supported platforms are ``linux``, ``win``, ``mac``, and + ``android``. For other platforms, the value is the lowercase version + of the ``OS_TARGET`` variable from ``config.status``. + + Always defined. + +processor + Information about the processor architecture this build targets. + + Values come from ``TARGET_CPU``, however some massaging may be + performed. + + If the build is a universal build on Mac (it targets both 32-bit and + 64-bit), the value is ``universal-x86-x86_64``. + + If the value starts with ``arm``, the value is ``arm``. + + If the value starts with a string of the form ``i[3-9]86]``, the + value is ``x86``. + + Always defined. + +release_or_beta + Whether this is a release or beta build. + + Values are ``true`` and ``false``. + + Always defined. + +stylo + Whether the Stylo styling system is being used. + + Values are ``true`` and ``false``. + + Always defined. + +tests_enabled + Whether tests are enabled for this build. + + Values are ``true`` and ``false``. + + Always defined. + +toolkit + The widget toolkit in case. The value comes from the + ``MOZ_WIDGET_TOOLKIT`` ``config.status`` variable. + + Always defined. + +topsrcdir + The path to the source directory the build came from. + + Always defined. + +webrender + Whether or not WebRender is enabled as the Gecko compositor. + + Values are ``true`` and ``false``. + + Always defined. diff --git a/build/docs/pgo.rst b/build/docs/pgo.rst new file mode 100644 index 0000000000..722056c727 --- /dev/null +++ b/build/docs/pgo.rst @@ -0,0 +1,28 @@ +.. _pgo: + +=========================== +Profile Guided Optimization +=========================== + +:abbr:`PGO (Profile Guided Optimization)` is the process of adding +probes to a compiled binary, running said binary, then using the +run-time information to *recompile* the binary to (hopefully) make it +faster. + +How PGO Builds Work +=================== + +The supported interface for invoking a PGO build is to add ``MOZ_PGO=1`` to +configure flags and then build. e.g. in your mozconfig:: + + ac_add_options MOZ_PGO=1 + +Then:: + + $ ./mach build + +This is roughly equivalent to:: + +#. Perform a build with *--enable-profile-generate* in $topobjdir/instrumented +#. Perform a run of the instrumented binaries with build/pgo/profileserver.py +#. Perform a build with *--enable-profile-use* in $topobjdir diff --git a/build/docs/preprocessor.rst b/build/docs/preprocessor.rst new file mode 100644 index 0000000000..5ce9092ed9 --- /dev/null +++ b/build/docs/preprocessor.rst @@ -0,0 +1,219 @@ +.. _preprocessor: + +================= +Text Preprocessor +================= + +The build system contains a text preprocessor similar to the C preprocessor, +meant for processing files which have no built-in preprocessor such as XUL +and JavaScript documents. It is implemented at ``python/mozbuild/mozbuild/preprocessor.py`` and +is typically invoked via :ref:`jar_manifests`. + +While used to preprocess CSS files, the directives are changed to begin with +``%`` instead of ``#`` to avoid conflict of the id selectors. + +Directives +========== + +Variable Definition +------------------- + +define +^^^^^^ + +:: + + #define variable + #define variable value + +Defines a preprocessor variable. + +Note that, unlike the C preprocessor, instances of this variable later in the +source are not automatically replaced (see #filter). If value is not supplied, +it defaults to ``1``. + +Note that whitespace is significant, so ``"#define foo one"`` and +``"#define foo one "`` is different (in the second case, ``foo`` is defined to +be a four-character string). + +undef +^^^^^ + +:: + + #undef variable + +Undefines a preprocessor variable. + +Conditionals +------------ + +if +^^ + +:: + + #if variable + #if !variable + #if variable == string + #if variable != string + +Disables output if the conditional is false. This can be nested to arbitrary +depths. Note that in the equality checks, the variable must come first. + +else +^^^^ + +:: + + #else + +Reverses the state of the previous conditional block; for example, if the +last ``#if`` was true (output was enabled), an ``#else`` makes it off +(output gets disabled). + +endif +^^^^^ + +:: + + #endif + +Ends the conditional block. + +ifdef / ifndef +^^^^^^^^^^^^^^ + +:: + + #ifdef variable + #ifndef variable + +An ``#if`` conditional that is true only if the preprocessor variable +variable is defined (in the case of ``ifdef``) or not defined (``ifndef``). + +elif / elifdef / elifndef +^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + #elif variable + #elif !variable + #elif variable == string + #elif variable != string + #elifdef variable + #elifndef variable + +A shorthand to mean an ``#else`` combined with the relevant conditional. +The following two blocks are equivalent:: + + #ifdef foo + block 1 + #elifdef bar + block 2 + #endif + +:: + + #ifdef foo + block 1 + #else + #ifdef bar + block 2 + #endif + #endif + +File Inclusion +-------------- + +include +^^^^^^^ + +:: + + #include filename + +The file specified by filename is processed as if the contents was placed +at this position. This also means that preprocessor conditionals can even +be started in one file and ended in another (but is highly discouraged). +There is no limit on depth of inclusion, or repeated inclusion of the same +file, or self inclusion; thus, care should be taken to avoid infinite loops. + +includesubst +^^^^^^^^^^^^ + +:: + + #includesubst @variable@filename + +Same as a ``#include`` except that all instances of variable in the included +file is also expanded as in ``#filter`` substitution + +expand +^^^^^^ + +:: + + #expand string + +All variables wrapped in ``__`` are replaced with their value, for this line +only. If the variable is not defined, it expands to an empty string. For +example, if ``foo`` has the value ``bar``, and ``baz`` is not defined, then:: + + #expand This <__foo__> <__baz__> gets expanded + +Is expanded to:: + + This <> gets expanded + +filter / unfilter +^^^^^^^^^^^^^^^^^ + +:: + + #filter filter1 filter2 ... filterN + #unfilter filter1 filter2 ... filterN + +``#filter`` turns on the given filter. + +Filters are run in alphabetical order on a per-line basis. + +``#unfilter`` turns off the given filter. Available filters are: + +emptyLines + strips blank lines from the output +dumbComments + dumbComments: empties out any line that consists of optional whitespace + followed by a ``//``. Good for getting rid of comments that are on their + own lines, and being smarter with a simple regexp filter is impossible +substitution + all variables wrapped in @ are replaced with their value. If the + variable is not defined, it is a fatal error. Similar to ``#expand`` + and ``#filter`` +attemptSubstitution + all variables wrapped in ``@`` are replaced with their value, or an + empty string if the variable is not defined. Similar to ``#expand``. + +literal +^^^^^^^ + +:: + + #literal string + +Output the string (i.e. the rest of the line) literally, with no other fixups. +This is useful to output lines starting with ``#``, or to temporarily +disable filters. + +Other +----- + +#error +^^^^^^ + +:: + + #error string + +Cause a fatal error at this point, with the error message being the +given string. diff --git a/build/docs/python.rst b/build/docs/python.rst new file mode 100644 index 0000000000..200c48ce21 --- /dev/null +++ b/build/docs/python.rst @@ -0,0 +1,178 @@ +.. _python: + +=========================== +Python and the Build System +=========================== + +The Python programming language is used significantly in the build +system. If we need to write code for the build system or for a tool +related to the build system, Python is typically the first choice. + +Python Requirements +=================== + +The tree requires Python 2.7.3 or greater but not Python 3 to build. +All Python packages not in the Python distribution are included in the +source tree. So all you should need is a vanilla Python install and you +should be good to go. + +Only CPython (the Python distribution available from www.python.org) is +supported. + +We require Python 2.7.3 (and not say 2.7.2) to build because Python +2.7.3 contains numerous bug fixes, especially around the area of Unicode +handling. These bug fixes are extremely annoying and have to be worked +around. The build maintainers were tired of doing this, so the minimum +version requirement was upped (bug 870420). + +We intend to eventually support Python 3. This will come by way of dual +2.7/3.x compatibility because a single flag day conversion to 3.x will +be too cumbersome given the amount of Python that would need converted. +We will not know which 3.x minor release we are targeting until this +effort is underway. This is tracked in bug 636155. + +Compiled Python Packages +======================== + +There are some features of the build that rely on compiled Python packages +(packages containing C source). These features are currently all +optional because not every system contains the Python development +headers required to build these extensions. + +We recommend you have the Python development headers installed (``mach +bootstrap`` should do this for you) so you can take advantage of these +features. + +Issues with OS X System Python +============================== + +The Python that ships with OS X has historically been littered with +subtle bugs and suboptimalities. Furthermore, OS X up through 10.8 don't +ship with Python 2.7.3 (10.8 ships with 2.7.2). + +OS X 10.8 and below users will be required to install a new Python +distribution. This may not be necessary for OS X 10.9+. However, we +still recommend installing a separate Python because of the history with +OS X's system Python issues. + +We recommend installing Python through Homebrew or MacPorts. If you run +``mach bootstrap``, this should be done for you. + +Virtualenvs +=========== + +The build system relies heavily on +`virtualenvs `_. Virtualenvs are +standalone and isolated Python environments. The problem a virtualenv +solves is that of dependencies across multiple Python components. If two +components on a system relied on different versions of a package, there +could be a conflict. Instead of managing multiple versions of a package +simultaneously, Python and virtualenvs take the route that it is easier +to just keep them separate so there is no potential for conflicts. + +Very early in the build process, a virtualenv is created inside the +:term:`object directory`. The virtualenv is configured such that it can +find all the Python packages in the source tree. The code for this lives +in :py:mod:`mozbuild.virtualenv`. + +Deficiencies +------------ + +There are numerous deficiencies with the way virtualenvs are handled in +the build system. + +* mach reinvents the virtualenv. + + There is code in ``build/mach_bootstrap.py`` that configures ``sys.path`` + much the same way the virtualenv does. There are various bugs tracking + this. However, no clear solution has yet been devised. It's not a huge + problem and thus not a huge priority. + +* They aren't preserved across copies and packaging. + + If you attempt to copy an entire tree from one machine to another or + from one directory to another, chances are the virtualenv will fall + apart. It would be nice if we could preserve it somehow. Instead of + actually solving portable virtualenvs, all we really need to solve is + encapsulating the logic for populating the virtualenv along with all + dependent files in the appropriate place. + +* .pyc files written to source directory. + + We rely heavily on ``.pth`` files in our virtualenv. A ``.pth`` file + is a special file that contains a list of paths. Python will take the + set of listed paths encountered in ``.pth`` files and add them to + ``sys.path``. + + When Python compiles a ``.py`` file to bytecode, it writes out a + ``.pyc`` file so it doesn't have to perform this compilation again. + It puts these ``.pyc`` files alongside the ``.pyc`` file. Python + provides very little control for determining where these ``.pyc`` files + go, even in Python 3 (which offers customer importers). + + With ``.pth`` files pointing back to directories in the source tree + and not the object directory, ``.pyc`` files are created in the source + tree. This is bad because when Python imports a module, it first looks + for a ``.pyc`` file before the ``.py`` file. If there is a ``.pyc`` + file but no ``.py`` file, it will happily import the module. This + wreaks havoc during file moves, refactoring, etc. + + There are various proposals for fixing this. See bug 795995. + +Installing Python Manually +========================== + +We highly recommend you use your system's package manager or a +well-supported 3rd party package manager to install Python for you. If +these are not available to you, we recommend the following tools for +installing Python: + +* `buildout.python `_ +* `pyenv `_ +* An official installer from http://www.python.org. + +If all else fails, consider compiling Python from source manually. But this +should be viewed as the least desirable option. + +Common Issues with Python +========================= + +Upgrading your Python distribution breaks the virtualenv +-------------------------------------------------------- + +If you upgrade the Python distribution (e.g. install Python 2.7.5 +from 2.7.3, chances are parts of the virtualenv will break. +This commonly manifests as a cryptic ``Cannot import XXX`` exception. +More often than not, the module being imported contains binary/compiled +components. + +If you upgrade or reinstall your Python distribution, we recommend +clobbering your build. + +Packages installed at the system level conflict with build system's +------------------------------------------------------------------- + +It is common for people to install Python packages using ``sudo`` (e.g. +``sudo pip install psutil``) or with the system's package manager +(e.g. ``apt-get install python-mysql``. + +A problem with this is that packages installed at the system level may +conflict with the package provided by the source tree. As of bug 907902 +and changeset f18eae7c3b27 (September 16, 2013), this should no longer +be an issue since the virtualenv created as part of the build doesn't +add the system's ``site-packages`` directory to ``sys.path``. However, +poorly installed packages may still find a way to creep into the mix and +interfere with our virtualenv. + +As a general principle, we recommend against using your system's package +manager or using ``sudo`` to install Python packages. Instead, create +virtualenvs and isolated Python environments for all of your Python +projects. + +Python on $PATH is not appropriate +---------------------------------- + +Tools like ``mach`` will look for Python by performing ``/usr/bin/env +python`` or equivalent. Please be sure the appropriate Python 2.7.3+ +path is on $PATH. On OS X, this likely means you'll need to modify your +shell's init script to put something ahead of ``/usr/bin``. diff --git a/build/docs/rust.rst b/build/docs/rust.rst new file mode 100644 index 0000000000..c3d377d160 --- /dev/null +++ b/build/docs/rust.rst @@ -0,0 +1,180 @@ +.. _rust: + +============================== +Including Rust Code in Firefox +============================== + +This page explains how to add, build, link, and vendor Rust crates. + +The `code documentation <../../writing-rust-code>`_ explains how to write and +work with Rust code in Firefox. The +`test documentation <../../testing-rust-code>`_ explains how to test and debug +Rust code in Firefox. + +Linking Rust crates into libxul +=============================== + +Rust crates that you want to link into libxul should be listed in the +``dependencies`` section of +`toolkit/library/rust/shared/Cargo.toml `_. +You must also add an ``extern crate`` reference to +`toolkit/library/rust/shared/lib.rs `_. +This ensures that the Rust code will be linked properly into libxul as well +as the copy of libxul used for gtests. (Even though Rust 2018 mostly doesn't +require ``extern crate`` declarations, these ones are necessary because the +gkrust setup is non-typical.) + +After adding your crate, execute ``cargo update -p gkrust-shared`` to update +the ``Cargo.lock`` file. You will also need to do this any time you change the +dependencies in a ``Cargo.toml`` file. If you don't, you will get a build error +saying **"error: the lock file /home/njn/moz/mc3/Cargo.lock needs to be updated +but --frozen was passed to prevent this"**. + +By default, all Cargo packages in the mozilla-central repository are part of +the same +`workspace `_ +and will share the ``Cargo.lock`` file and ``target`` directory in the root of +the repository. You can change this behavior by adding a path to the +``exclude`` list in the top-level ``Cargo.toml`` file. You may want to do +this if your package's development workflow includes dev-dependencies that +aren't needed by general Firefox developers or test infrastructure. + +The actual build mechanism is as follows. The build system generates a special +'Rust unified library' crate, compiles that to a static library +(``libgkrust.a``), and links that into libxul, so all public symbols will be +available to C++ code. Building a static library that is linked into a dynamic +library is easier than building dynamic libraries directly, and it also avoids +some subtle issues around how mozalloc works that make the Rust dynamic library +path a little wonky. + +Linking Rust crates into something else +======================================= + +To link Rust code into libraries other than libxul, create a directory with a +``Cargo.toml`` file for your crate, and a ``moz.build`` file that contains: + +.. code-block:: python + + RustLibrary('crate_name') + +where ``crate_name`` matches the name from the ``[package]`` section of your +``Cargo.toml``. You can refer to `the moz.build file `_ and `the Cargo.toml file `_ that are used for libxul. + +You can then add ``USE_LIBS += ['crate_name']`` to the ``moz.build`` file +that defines the binary as you would with any other library in the tree. + +.. important:: + + You cannot link a Rust crate into an intermediate library that will be + eventually linked into libxul. The build system enforces that only a single + ``RustLibrary`` may be linked into a binary. If you need to do this, you + will have to add a ``RustLibrary`` to link to any standalone binaries that + link the intermediate library, and also add the Rust crate to the libxul + dependencies as in `linking Rust Crates into libxul`_. + +Conditional compilation +======================== + +Edit `tool/library/rust/gkrust-features.mozbuild +`_ +to expose build flags as Cargo features. + +Standalone Rust programs +======================== + +It is also possible to build standalone Rust programs. First, put the Rust +program (including the ``Cargo.toml`` file and the ``src`` directory) in its +own directory, and add an empty ``moz.build`` file to the same directory. + +Then, if the standalone Rust program must run on the compile target (e.g. +because it's shipped with Firefox) then add this rule to the ``moz.build`` +file: + +.. code-block:: python + + RUST_PROGRAMS = ['prog_name'] + +where *prog_name* is the name of the executable as specified in the +``Cargo.toml`` (and probably also matches the name of the directory). + +Otherwise, if the standalone Rust program must run on the compile host (e.g. +because it's used to build Firefox but not shipped with Firefox) then do the +same thing, but use ``HOST_RUST_PROGRAMS`` instead of ``RUST_PROGRAMS``. + +Where should I put my crate? +============================ + +If your crate's canonical home is mozilla-central, you can put it next to the +related code in the appropriate directory. + +If your crate is mirrored into mozilla-central from another repository, and +will not be actively developed in mozilla-central, you can simply list it +as a ``crates.io``-style dependency with a version number, and let it be +vendored into the ``third_party/rust`` directory. + +If your crate is mirrored into mozilla-central from another repository, but +will be actively developed in both locations, you should send mail to the +dev-builds mailing list to start a discussion on how to meet your needs. + +Third-party crate dependencies +============================== + +Third-party dependencies for in-tree Rust crates are *vendored* into the +``third_party/rust`` directory of mozilla-central. This means that a copy of +each third-party crate's code is committed into mozilla-central. As a result, +building Firefox does not involve downloading any third-party crates. + +If you add a dependency on a new crate you must run ``mach vendor rust`` to +vendor the dependencies into that directory. (Note that ``mach vendor rust`` +`may not work as well on Windows `_ +as on other platforms.) + +When it comes to checking the suitability of third-party code for inclusion +into mozilla-central, keep the following in mind. + +- ``mach vendor rust`` will check that the licenses of all crates are suitable. +- You should review the crate code to some degree to check that it looks + reasonable (especially for unsafe code) and that it has reasonable tests. +- Third-party crate tests aren't run, which means that large test fixtures will + bloat mozilla-central. Consider working with upstream to mark those test + fixtures with ``[package] exclude = ...`` as described + `here `_. +- If you specify a dependency on a branch, pin it to a specific revision, + otherwise other people will get unexpected changes when they run ``./mach + vendor rust`` any time the branch gets updated. See `bug 1612619 + `_ for a case where such a problem was fixed. +- Other than that, there is no formal sign-off procedure, but one may be added + in the future. + +Note that all dependencies will be vendored, even ones that aren't used due to +disabled features. It's possible that multiple versions of a crate will end up +vendored into mozilla-central. + +Patching third-party crates +=========================== + +Sometimes you might want to temporarily patch a third-party crate, for local +builds or for a try push. + +To do this, first add an entry to the ``[patch.crates-io]`` section of the +top-level ``Cargo.toml`` that points to the crate within ``third_party``. For +example + +.. code-block:: toml + + bitflags = { path = "third_party/rust/bitflags" } + +Next, run ``cargo update -p $CRATE_NAME --precise $VERSION``, where +``$CRATE_NAME`` is the name of the patched crate, and ``$VERSION`` is its +version number. This will update the ``Cargo.lock`` file. + +Then, make the local changes to the crate. + +Finally, make sure you don't accidentally land the changes to the crate or the +``Cargo.lock`` file. + +For an example of a more complex workflow involving a third-party crate, see +`mp4parse-rust/README.md `_. +It describes the workflow for a crate that is hosted on GitHub, and for which +changes are made via GitHub pull requests, but all pull requests must also be +tested within mozilla-central before being merged. diff --git a/build/docs/sccache-dist.rst b/build/docs/sccache-dist.rst new file mode 100644 index 0000000000..f98a0f5d35 --- /dev/null +++ b/build/docs/sccache-dist.rst @@ -0,0 +1,220 @@ +.. _sccache_dist: + +================================== +Distributed sccache (sccache-dist) +================================== + +`sccache `_ is a ccache-like tool written in +rust. + +Distributed sccache (also referred to as sccache-dist) is being rolled out to +Mozilla offices as a replacement for icecc. The steps for setting up your +machine as an sccache-dist server as well as distributing your build to servers +in your office are detailed below. + +In addition to improved security properties, distributed sccache offers +distribution and caching of rust compilation, so it should be an improvement +above and beyond what we see with icecc. Build servers run on Linux and +distributing builds is currently supported from Linux, macOS, and Windows. + + +Steps for distributing a build as an sccache-dist client +======================================================== + +Start by following the instructions at https://github.com/mozilla/sccache/blob/master/docs/DistributedQuickstart.md#configure-a-client +to configure your sccache distributed client. +*NOTE* If you're distributing from Linux a toolchain will be packaged +automatically and provided to the build server. If you're distributing from +Windows or macOS, start by using the cross-toolchains provided by +``./mach bootstrap`` rather than attempting to use ``icecc-create-env``. +sccache 0.2.12 or above is recommended, and the auth section of your config +must read:: + + [dist.auth] + type = "mozilla" + +* The scheduler url to use is: ``https://sccache1.corpdmz..mozilla.com``, + where is, for instance, sfo1. A complete list of office short names + to be used can be found in the `Office Addressing Schemes spreadsheet `_. + +* To use distributed sccache from a Mozilla office, you must be on the corporate + network. Use the ``Mozilla`` ssid for wireless. The corp vlan is the default + if wired. + +* If you're compiling from a macOS client, there are a handful of additional + considerations outlined here: + https://github.com/mozilla/sccache/blob/master/docs/DistributedQuickstart.md#considerations-when-distributing-from-macos. + + Run ``./mach bootstrap`` to download prebuilt toolchains to + ``~/.mozbuild/clang-dist-toolchain.tar.xz`` and + ``~/.mozbuild/rustc-dist-toolchain.tar.xz``. This is an example of the paths + that should be added to your client config to specify toolchains to build on + macOS, located at ``~/Library/Application Support/Mozilla.sccache/config``:: + + [[dist.toolchains]] + type = "path_override" + compiler_executable = "/path/to/home/.rustup/toolchains/stable-x86_64-apple-darwin/bin/rustc" + archive = "/path/to/home/.mozbuild/rustc-dist-toolchain.tar.xz" + archive_compiler_executable = "/builds/worker/toolchains/rustc/bin/rustc" + + [[dist.toolchains]] + type = "path_override" + compiler_executable = "/path/to/home/.mozbuild/clang/bin/clang" + archive = "/path/to/home/.mozbuild/clang-dist-toolchain.tar.xz" + archive_compiler_executable = "/builds/worker/toolchains/clang/bin/clang" + + [[dist.toolchains]] + type = "path_override" + compiler_executable = "/path/to/home/.mozbuild/clang/bin/clang++" + archive = "/path/to/home/.mozbuild/clang-dist-toolchain.tar.xz" + archive_compiler_executable = "/builds/worker/toolchains/clang/bin/clang" + + Note that the version of ``rustc`` found in ``rustc-dist-toolchain.tar.xz`` + must match the version of ``rustc`` used locally. The distributed archive + will contain the version of ``rustc`` used by automation builds, which may + lag behind stable for a few days after Rust releases, which is specified by + the task definition in + `this file `_. + For instance, to specify 1.37.0 rather than the current stable, run + ``rustup toolchain add 1.37.0`` and point to + ``/path/to/home/.rustup/toolchains/1.37.0-x86_64-apple-darwin/bin/rustc`` in your + client config. + + The build system currently requires an explicit target to be passed with + ``HOST_CFLAGS`` and ``HOST_CXXFLAGS`` e.g.:: + + export HOST_CFLAGS="--target=x86_64-apple-darwin16.0.0" + export HOST_CXXFLAGS="--target=x86_64-apple-darwin16.0.0" + +* Compiling from a Windows client is supported but hasn't seen as much testing + as other platforms. The following example mozconfig can be used as a guide:: + + ac_add_options CCACHE="C:/Users//.mozbuild/sccache/sccache.exe" + + export CC="C:/Users//.mozbuild/clang/bin/clang-cl.exe --driver-mode=cl" + export CXX="C:/Users//.mozbuild/clang/bin/clang-cl.exe --driver-mode=cl" + export HOST_CC="C:/Users//.mozbuild/clang/bin/clang-cl.exe --driver-mode=cl" + export HOST_CXX="C:/Users//.mozbuild/clang/bin/clang-cl.exe --driver-mode=cl" + + The client config should be located at + ``~/AppData/Roaming/Mozilla/sccache/config/config``, and as on macOS custom + toolchains should be obtained with ``./mach bootstrap`` and specified in the + client config, for example:: + + [[dist.toolchains]] + type = "path_override" + compiler_executable = "C:/Users//.mozbuild/clang/bin/clang-cl.exe" + archive = "C:/Users//.mozbuild/clang-dist-toolchain.tar.xz" + archive_compiler_executable = "/builds/worker/toolchains/clang/bin/clang" + + [[dist.toolchains]] + type = "path_override" + compiler_executable = "C:/Users//.rustup/toolchains/stable-x86_64-pc-windows-msvc/bin/rustc.exe" + archive = "C:/Users//.mozbuild/rustc-dist-toolchain.tar.xz" + archive_compiler_executable = "/builds/worker/toolchains/rustc/bin/rustc" + +* Add the following to your mozconfig:: + + ac_add_options CCACHE=/path/to/home/.mozbuild/sccache/sccache + + If you're compiling from a macOS client, you might need some additional configuration:: + + # Set the target flag to Darwin + export CFLAGS="--target=x86_64-apple-darwin16.0.0" + export CXXFLAGS="--target=x86_64-apple-darwin16.0.0" + export HOST_CFLAGS="--target=x86_64-apple-darwin16.0.0" + export HOST_CXXFLAGS="--target=x86_64-apple-darwin16.0.0" + + # Specify the macOS SDK to use + ac_add_options --with-macos-sdk=/path/to/MacOSX-SDKs/MacOSX10.12.sdk + + You can get the right macOS SDK by downloading an old version of XCode from + `developer.apple.com `_ and unpacking the SDK + from it. + +* When attempting to get your client running, the output of ``sccache -s`` should + be consulted to confirm compilations are being distributed. To receive helpful + logging from the local daemon in case they aren't, run + ``SCCACHE_NO_DAEMON=1 SCCACHE_START_SERVER=1 SCCACHE_LOG=sccache=trace path/to/sccache`` + in a terminal window separate from your build prior to building. *NOTE* use + ``RUST_LOG`` instead of ``SCCACHE_LOG`` if your build of ``sccache`` does not + include `pull request 822 + `_. (``sccache`` binaries from + ``mach bootstrap`` do include this PR.) + +* Run ``./mach build -j`` with an appropriately large ````. + ``sccache --dist-status`` should provide the number of cores available to you + (or a message if you're not connected). In the future this will be integrated + with the build system to automatically select an appropriate value. + +This should be enough to distribute your build and replace your use of icecc. +Bear in mind there may be a few speedbumps, and please ensure your version of +sccache is current before investigating further. Please see the common questions +section below and ask for help if anything is preventing you from using it over +email (dev-builds), on slack in #sccache, or in #build on irc. + +Steps for setting up a server +============================= + +Build servers must run linux and use bubblewrap 0.3.0+ for sandboxing of compile +processes. This requires a kernel 4.6 or greater, so Ubuntu 18+, RHEL 8, or +similar. + +* Run ``./mach bootstrap`` or + ``./mach artifact toolchain --from-build linux64-sccache`` to acquire a recent + version of ``sccache-dist``. Please use a ``sccache-dist`` binary acquired in + this fashion to ensure compatibility with statically linked dependencies. + +* Collect the IP of your builder and request assignment of a static IP in a bug + filed in + `NetOps :: Other `_ + This bug should include your office (SFO, YVR, etc.), your MAC address, and a + description of why you want a static IP (“To serve as an sccache builder” + should be sufficient). + +* Visit the ``sccache`` section of https://login.mozilla.com to generate an auth + token for your builder. + +* The instructions at https://github.com/mozilla/sccache/blob/master/docs/DistributedQuickstart.md#configure-a-build-server + should contain everything else required to configure and run the server. + + *NOTE* Port 10500 will be used by convention for builders in offices. + Please use port 10500 in the ``public_addr`` section of your builder config. + + Extra logging may be helpful when setting up a server. To enable logging, + run your server with + ``sudo env SCCACHE_LOG=sccache=trace ~/.mozbuild/sccache/sccache-dist server --config ~/.config/sccache/server.conf`` + (or similar). *NOTE* ``sudo`` *must* come before setting environment variables + for this to work. *NOTE* use ``RUST_LOG`` instead of ``SCCACHE_LOG`` if your + build of ``sccache`` does not include `pull request 822 + `_. (``sccache`` binaries from + ``mach bootstrap`` do include this PR.) + + As when configuring a client, the scheduler url to use is: + ``https://sccache1.corpdmz..mozilla.com``, where is an + office abbreviation found + `here `_. + + +Common questions/considerations +=============================== + +* My build is still slow: scache-dist can only do so much with parts of the + build that aren't able to be parallelized. To start debugging a slow build, + ensure the "Successful distributed compilations" line in the output of + ``sccache -s`` dominates other counts. For a full build, at least a 2-3x + improvement should be observed. + +* My build output is incomprehensible due to a flood of warnings: clang will + treat some warnings differently when it's fed preprocessed code in a separate + invocation (preprocessing occurs locally with sccache-dist). Adding + ``rewrite_includes_only = true`` to the ``dist`` section of your client config + will improve this; however, setting this will cause build failures with a + commonly deployed version of ``glibc``. This option will default to ``true`` + once the fix is more widely available. Details of this fix can be found in + `this patch `_. + +* My build fails with a message about incompatible versions of rustc between + dependent crates: if you're using a custom toolchain check that the version + of rustc in your ``rustc-dist-toolchain.tar.xz`` is the same as the version + you're running locally. diff --git a/build/docs/slow.rst b/build/docs/slow.rst new file mode 100644 index 0000000000..ea45197d05 --- /dev/null +++ b/build/docs/slow.rst @@ -0,0 +1,177 @@ +.. _slow: + +============================ +Why the Build System is Slow +============================ + +A common complaint about the build system is that it's slow. There are +many reasons contributing to its slowness. We will attempt to document +them here. + +First, it is important to distinguish between a :term:`clobber build` +and an :term:`incremental build`. The reasons for why each are slow can +be different. + +The build does a lot of work +============================ + +It may not be obvious, but the main reason the build system is slow is +because it does a lot of work! The source tree consists of a few +thousand C++ files. On a modern machine, we spend over 120 minutes of CPU +core time compiling files! So, if you are looking for the root cause of +slow clobber builds, look at the sheer volume of C++ files in the tree. + +You don't have enough CPU cores and MHz +======================================= + +The build should be CPU bound. If the build system maintainers are +optimizing the build system perfectly, every CPU core in your machine +should be 100% saturated during a build. While this isn't currently the +case (keep reading below), generally speaking, the more CPU cores you +have in your machine and the more total MHz in your machine, the better. + +**We highly recommend building with no fewer than 4 physical CPU +cores.** Please note the *physical* in this sentence. Hyperthreaded +cores (an Intel Core i7 will report 8 CPU cores but only 4 are physical +for example) only yield at most a 1.25x speedup per core. + +We also recommend using the most modern CPU model possible. Haswell +chips deliver much more performance per CPU cycle than say Sandy Bridge +CPUs. + +This cause impacts both clobber and incremental builds. + +You are building with a slow I/O layer +====================================== + +The build system can be I/O bound if your I/O layer is slow. Linking +libxul on some platforms and build architectures can perform gigabytes +of I/O. + +To minimize the impact of slow I/O on build performance, **we highly +recommend building with an SSD.** Power users with enough memory may opt +to build from a RAM disk. Mechanical disks should be avoided if at all +possible. + +Some may dispute the importance of an SSD on build times. It is true +that the beneficial impact of an SSD can be mitigated if your system has +lots of memory and the build files stay in the page cache. However, +operating system memory management is complicated. You don't really have +control over what or when something is evicted from the page cache. +Therefore, unless your machine is a dedicated build machine or you have +more memory than is needed by everything running on your machine, +chances are you'll run into page cache eviction and you I/O layer will +impact build performance. That being said, an SSD certainly doesn't +hurt build times. And, anyone who has used a machine with an SSD will +tell you how great of an investment it is for performance all around the +operating system. On top of that, some automated tests are I/O bound +(like those touching SQLite databases), so an SSD will make tests +faster. + +This cause impacts both clobber and incremental builds. + +You don't have enough memory +============================ + +The build system allocates a lot of memory, especially when building +many things in parallel. If you don't have enough free system memory, +the build will cause swap activity, slowing down your system and the +build. Even if you never get to the point of swapping, the build system +performs a lot of I/O and having all accessed files in memory and the +page cache can significantly reduce the influence of the I/O layer on +the build system. + +**We recommend building with no less than 8 GB of system memory.** As +always, the more memory you have, the better. For a bare bones machine +doing nothing more than building the source tree, anything more than 16 +GB is likely entering the point of diminishing returns. + +This cause impacts both clobber and incremental builds. + +You are building on Windows +=========================== + +New processes on Windows are about a magnitude slower to spawn than on +UNIX-y systems such as Linux. This is because Windows has optimized new +threads while the \*NIX platforms typically optimize new processes. +Anyway, the build system spawns thousands of new processes during a +build. Parts of the build that rely on rapid spawning of new processes +are slow on Windows as a result. This is most pronounced when running +*configure*. The configure file is a giant shell script and shell +scripts rely heavily on new processes. This is why configure +can run over a minute slower on Windows. + +Another reason Windows builds are slower is because Windows lacks proper +symlink support. On systems that support symlinks, we can generate a +file into a staging area then symlink it into the final directory very +quickly. On Windows, we have to perform a full file copy. This incurs +much more I/O. And if done poorly, can muck with file modification +times, messing up build dependencies. As of the summer of 2013, the +impact of symlinks is being mitigated through the use +of an :term:`install manifest`. + +These issues impact both clobber and incremental builds. + +Recursive make traversal is slow +================================ + +The build system has traditionally been built by employing recursive +make. Recursive make involves make iterating through directories / make +files sequentially and executing each in turn. This is inefficient for +directories containing few targets/tasks because make could be *starved* +for work when processing these directories. Any time make is starved, +the build isn't using all available CPU cycles and the build is slower +as a result. + +Work has started in bug 907365 to fix this issue by changing the way +make traverses all the make files. + +The impact of slow recursive make traversal is mostly felt on +incremental builds. Traditionally, most of the wall time during a +no-op build is spent in make traversal. + +make is inefficient +=================== + +Compared to modern build backends like Tup or Ninja, `make` is slow and +inefficient. We can only make `make` so fast. At some point, we'll hit a +performance plateau and will need to use a different tool to make builds +faster. + +Please note that clobber and incremental builds are different. A clobber build +with `make` will likely be as fast as a clobber build with a modern build +system. + +C++ header dependency hell +========================== + +Modifying a *.h* file can have significant impact on the build system. +If you modify a *.h* that is used by 1000 C++ files, all of those 1000 +C++ files will be recompiled. + +Our code base has traditionally been sloppy managing the impact of +changed headers on build performance. Bug 785103 tracks improving the +situation. + +This issue mostly impacts the times of an :term:`incremental build`. + +A search/indexing service on your machine is running +==================================================== + +Many operating systems have a background service that automatically +indexes filesystem content to make searching faster. On Windows, you +have the Windows Search Service. On OS X, you have Finder. + +These background services sometimes take a keen interest in the files +being produced as part of the build. Since the build system produces +hundreds of megabytes or even a few gigabytes of file data, you can +imagine how much work this is to index! If this work is being performed +while the build is running, your build will be slower. + +OS X's Finder is notorious for indexing when the build is running. And, +it has a tendency to suck up a whole CPU core. This can make builds +several minutes slower. If you build with ``mach`` and have the optional +``psutil`` package built (it requires Python development headers - see +:ref:`python` for more) and Finder is running during a build, mach will +print a warning at the end of the build, complete with instructions on +how to fix it. diff --git a/build/docs/sparse.rst b/build/docs/sparse.rst new file mode 100644 index 0000000000..6dcf548334 --- /dev/null +++ b/build/docs/sparse.rst @@ -0,0 +1,157 @@ +.. _build_sparse: + +================ +Sparse Checkouts +================ + +The Firefox repository is large: over 230,000 files. That many files +can put a lot of strain on machines, tools, and processes. + +Some version control tools have the ability to only populate a +working directory / checkout with a subset of files in the repository. +This is called *sparse checkout*. + +Various tools in the Firefox repository are configured to work +when a sparse checkout is being used. + +Sparse Checkouts in Mercurial +============================= + +Mercurial 4.3 introduced **experimental** support for sparse checkouts +in the official distribution (a Facebook-authored extension has +implemented the feature as a 3rd party extension for years). + +To enable sparse checkout support in Mercurial, enable the ``sparse`` +extension:: + + [extensions] + sparse = + +The *sparseness* of the working directory is managed using +``hg debugsparse``. Run ``hg help debugsparse`` and ``hg help -e sparse`` +for more info on the feature. + +When a *sparse config* is enabled, the working directory only contains +files matching that config. You cannot ``hg add`` or ``hg remove`` files +outside the *sparse config*. + +.. warning:: + + Sparse support in Mercurial 4.3 does not have any backwards + compatibility guarantees. Expect things to change. Scripting against + commands or relying on behavior is strongly discouraged. + +In-Tree Sparse Profiles +======================= + +Mercurial supports defining the sparse config using files under version +control. These are called *sparse profiles*. + +Essentially, the sparse profiles are managed just like any other file in +the repository. When you ``hg update``, the sparse configuration is +evaluated against the sparse profile at the revision being updated to. +From an end-user perspective, you just need to *activate* a profile once +and files will be added or removed as appropriate whenever the versioned +profile file updates. + +In the Firefox repository, the ``build/sparse-profiles`` directory +contains Mercurial *sparse profiles* files. + +Each *sparse profile* essentially defines a list of file patterns +(see ``hg help patterns``) to include or exclude. See +``hg help -e sparse`` for more. + +Mach Support for Sparse Checkouts +================================= + +``mach`` detects when a sparse checkout is being used and its +behavior may vary to accommodate this. + +By default it is a fatal error if ``mach`` can't load one of the +``mach_commands.py`` files it was told to. But if a sparse checkout +is being used, ``mach`` assumes that file isn't part of the sparse +checkout and to ignore missing file errors. This means that +running ``mach`` inside a sparse checkout will only have access +to the commands defined in files in the sparse checkout. + +Sparse Checkouts in Automation +============================== + +``hg robustcheckout`` (the extension/command used to perform clones +and working directory operations in automation) supports sparse checkout. +However, it has a number of limitations over Mercurial's default sparse +checkout implementation: + +* Only supports 1 profile at a time +* Does not support non-profile sparse configs +* Does not allow transitioning from a non-sparse to sparse checkout or + vice-versa + +These restrictions ensure that any sparse working directory populated by +``hg robustcheckout`` is as consistent and robust as possible. + +``run-task`` (the low-level script for *bootstrapping* tasks in +automation) has support for sparse checkouts. + +TaskGraph tasks using ``run-task`` can specify a ``sparse-profile`` +attribute in YAML (or in code) to denote the sparse profile file to +use. e.g.:: + + run: + using: run-command + command: + sparse-profile: taskgraph + +This automagically results in ``run-task`` and ``hg robustcheckout`` +using the sparse profile defined in ``build/sparse-profiles/``. + +Pros and Cons of Sparse Checkouts +================================= + +The benefits of sparse checkout are that it makes the repository appear +to be smaller. This means: + +* Less time performing working directory operations -> faster version + control operations +* Fewer files to consult -> faster operations +* Working directories only contain what is needed -> easier to understand + what everything does + +Fewer files in the working directory also contributes to disadvantages: + +* Searching may not yield hits because a file isn't in the sparse + checkout. e.g. a *global* search and replace may not actually be + *global* after all. +* Tools performing filesystem walking or path globbing (e.g. + ``**/*.js``) may fail to find files because they don't exist. +* Various tools and processes make assumptions that all files in the + repository are always available. + +There can also be problems caused by mixing sparse and non-sparse +checkouts. For example, if a process in automation is using sparse +and a local developer is not using sparse, things may work for the +local developer but fail in automation (because a file isn't included +in the sparse configuration and not available to automation. +Furthermore, if environments aren't using exactly the same sparse +configuration, differences can contribute to varying behavior. + +When Should Sparse Checkouts Be Used? +===================================== + +Developers are discouraged from using sparse checkouts for local work +until tools for handling sparse checkouts have improved. In particular, +Mercurial's support for sparse is still experimental and various Firefox +tools make assumptions that all files are available. Developers should +use sparse checkout at their own risk. + +The use of sparse checkouts in automation is a performance versus +robustness trade-off. Use of sparse checkouts will make automation +faster because machines will only have to manage a few thousand files +in a checkout instead of a few hundred thousand. This can potentially +translate to minutes saved per machine day. At the scale of thousands +of machines, the savings can be significant. But adopting sparse +checkouts will open up new avenues for failures. (See section above.) +If a process is isolated (in terms of file access) and well-understood, +sparse checkout can likely be leveraged with little risk. But if a +process is doing things like walking the filesystem and performing +lots of wildcard matching, the dangers are higher. diff --git a/build/docs/supported-configurations.rst b/build/docs/supported-configurations.rst new file mode 100644 index 0000000000..4df06d6600 --- /dev/null +++ b/build/docs/supported-configurations.rst @@ -0,0 +1,95 @@ +Supported build targets +======================= + + .. role:: strikethrough + +There are three tiers of **supported Firefox build targets** at this +time. These tiers represent the shared engineering priorities of the +Mozilla project. + +.. note:: + + Sheriffs are in charge of monitoring the tree. Their definition for tiers + is for automation jobs, which tells a developer what is expected of them when + they land code. This document is about the tiers of supported build targets, + which tells a person compiling/using Firefox what they can expect from Mozilla. + See the `job tier definition `__ for more information. + +.. _tier-1: + +The term **"Tier-1 platform"** refers to those platforms - CPU +architectures and operating systems - that are the primary focus of +Firefox development efforts. Tier-1 platforms are fully supported by +Mozilla's `continuous integration processes `__ and the +:ref:`Try Server`. Any proposed change to Firefox on these +platforms that results in build failures, test failures, performance +regressions or other major problems **will be reverted immediately**. + + +The **Tier-1 Firefox platforms** and their supported compilers are: + +- Android on Linux x86, x86-64, ARMv7 and ARMv8-A (clang) +- Linux/x86 and x86-64 (gcc and clang) +- macOS 10.12 and later on x86-64 and AArch64 (clang) +- Windows/x86, x86-64 and AArch64 (clang-cl) + +Prior to Firefox 63, Windows/x86 and Windows/x86-64 relied on the MSVC +compiler; from **Firefox 63 onward MSVC is not supported**. Older 32-bit +x86 CPUs without SSE2 instructions such as the Pentium III and Athlon XP +are also **not considered Tier-1 platforms, and are not supported**. +Note also that while Windows/x86 and ARM/AArch64 are supported *as build +targets*, it is not possible to build Firefox *on* Windows/x86 or +Windows/AArch64 systems. + +.. _tier-2: + +**Tier-2 platforms** are actively maintained by the Mozilla community, +though with less rigorous requirements. Proposed changes resulting in +breakage or regressions limited to these platforms **may not immediately +result in reversion**. However, developers who break these platforms are +expected to work with platform maintainers to fix problems, and **may be +required to revert their changes** if a fix cannot be found. + +The **Tier-2 Firefox platforms** and their supported compilers are: + +- Linux/AArch64 (clang) +- Windows/x86 (mingw-clang) - maintained by Tom Ritter and Jacek Caban + - + + - *Note that some features of this platform are disabled, as they + require MS COM or the w32api project doesn't expose the necessary + Windows APIs.* + +.. _tier-3: + +**Tier-3 platforms** have a maintainer or community which attempt to +keep the platform working. These platforms are **not supported by our +continuous integration processes**, and **Mozilla does not routinely +test on these platforms**, nor do we block further development on the +outcomes of those tests. + +At any given time a Firefox built from mozilla-central for these +platforms may or may not work correctly or build at all. + +**Tier-3 Firefox platforms** include:  + +- Linux on various CPU architectures including ARM variants not listed + above, PowerPC, and x86 CPUs without SSE2 support - maintained by + various Linux distributions +- FreeBSD/x86, x86-64, Aarch64 (clang) - maintained by Jan Beich +- OpenBSD/x86, x86-64 (clang) - maintained by Landry Breuil +- NetBSD/x86-64 (gcc) - maintained by David Laight +- Solaris/x86-64, sparc64 (gcc) - maintained by Petr Sumbera +- :strikethrough:`Windows/x86-64 (mingw-gcc)` - Unsupported due to + requirements for clang-bindgen + +If you're filing a bug against Firefox on a Tier-3 platform (or any +combination of OS, CPU and compiler not listed above) please bear in +mind that Mozilla developers do not reliably have access to non-Tier-1 +platforms or build environments. To be actionable bug reports against +non-Tier-1 platforms should include as much information as possible to +help the owner of the bug determine the cause of the problem and the +proper solution. If you can provide a patch, a regression range or +assist in verifying that the developer's patches work for your platform, +that would help a lot towards getting your bugs fixed and checked into +the tree. diff --git a/build/docs/telemetry.rst b/build/docs/telemetry.rst new file mode 100644 index 0000000000..a036cd1871 --- /dev/null +++ b/build/docs/telemetry.rst @@ -0,0 +1,393 @@ +.. _buildtelemetry: + +=============== +Build Telemetry +=============== + +The build system (specifically, all the build tooling hooked +up to ``./mach``) has been configured to collect metrics data +points and errors for various build system actions. This data +helps drive team planning for the build team and ensure that +resources are applied to build processes that need them most. +You can opt-in to send telemetry to Mozilla during +``./mach bootstrap`` or by editing your ``.mozbuild/machrc`` +file. + +Telemetry +========= + +The build telemetry schema can be found in-tree under +``python/mozbuild/mozbuild/telemetry.py`` in Voluptuous schema +format. You can use the ``export_telemetry_schema.py`` script in +that same directory to get the schema in JSON-schema format. +Details of the schema are specified below: + +.. _telemetry.json#/: + +:type: ``object`` + +:Required: :ref:`telemetry.json#/properties/argv`, :ref:`telemetry.json#/properties/build_opts`, :ref:`telemetry.json#/properties/client_id`, :ref:`telemetry.json#/properties/command`, :ref:`telemetry.json#/properties/duration_ms`, :ref:`telemetry.json#/properties/success`, :ref:`telemetry.json#/properties/system`, :ref:`telemetry.json#/properties/time` + +**Properties:** :ref:`telemetry.json#/properties/argv`, :ref:`telemetry.json#/properties/build_opts`, :ref:`telemetry.json#/properties/client_id`, :ref:`telemetry.json#/properties/command`, :ref:`telemetry.json#/properties/duration_ms`, :ref:`telemetry.json#/properties/exception`, :ref:`telemetry.json#/properties/file_types_changed`, :ref:`telemetry.json#/properties/success`, :ref:`telemetry.json#/properties/system`, :ref:`telemetry.json#/properties/time` + + +.. _telemetry.json#/properties/argv: + +argv +++++ + +Full mach commandline. If the commandline contains absolute paths they will be sanitized. + +:type: ``array`` + +.. container:: sub-title + + Every element of **argv** is: + +:type: ``string`` + + +.. _telemetry.json#/properties/build_opts: + +build_opts +++++++++++ + +Selected build options + +:type: ``object`` + +**Properties:** :ref:`telemetry.json#/properties/build_opts/properties/artifact`, :ref:`telemetry.json#/properties/build_opts/properties/ccache`, :ref:`telemetry.json#/properties/build_opts/properties/compiler`, :ref:`telemetry.json#/properties/build_opts/properties/debug`, :ref:`telemetry.json#/properties/build_opts/properties/icecream`, :ref:`telemetry.json#/properties/build_opts/properties/opt`, :ref:`telemetry.json#/properties/build_opts/properties/sccache` + + +.. _telemetry.json#/properties/build_opts/properties/artifact: + +artifact +######## + +true if --enable-artifact-builds + +:type: ``boolean`` + + +.. _telemetry.json#/properties/build_opts/properties/ccache: + +ccache +###### + +true if ccache is in use (--with-ccache) + +:type: ``boolean`` + + +.. _telemetry.json#/properties/build_opts/properties/compiler: + +compiler +######## + +The compiler type in use (CC_TYPE) + +**Allowed values:** + +- clang +- clang-cl +- gcc +- msvc + + +.. _telemetry.json#/properties/build_opts/properties/debug: + +debug +##### + +true if build is debug (--enable-debug) + +:type: ``boolean`` + + +.. _telemetry.json#/properties/build_opts/properties/icecream: + +icecream +######## + +true if icecream in use + +:type: ``boolean`` + + +.. _telemetry.json#/properties/build_opts/properties/opt: + +opt +### + +true if build is optimized (--enable-optimize) + +:type: ``boolean`` + + +.. _telemetry.json#/properties/build_opts/properties/sccache: + +sccache +####### + +true if ccache in use is sccache + +:type: ``boolean`` + + +.. _telemetry.json#/properties/build_attrs: + +build_attrs ++++++++++++ + +Selected runtime attributes of the build + +:type: ``object`` + +**Properties:** :ref:`telemetry.json#/properties/build_attrs/properties/cpu_percent`, :ref:`telemetry.json#/properties/build_attrs/properties/clobber` + +.. _telemetry.json#/properties/build_attrs/properties/cpu_percent: + +cpu_percent +########### + +cpu utilization observed during the build + +:type: ``number`` + +.. _telemetry.json#/properties/build_attrs/properties/clobber: + +clobber +####### + +true if the build was a clobber/full build + +:type: ``boolean`` + + +.. _telemetry.json#/properties/client_id: + +client_id ++++++++++ + +A UUID to uniquely identify a client + +:type: ``string`` + + +.. _telemetry.json#/properties/command: + +command ++++++++ + +The mach command that was invoked + +:type: ``string`` + + +.. _telemetry.json#/properties/duration_ms: + +duration_ms ++++++++++++ + +Command duration in milliseconds + +:type: ``number`` + + +.. _telemetry.json#/properties/exception: + +exception ++++++++++ + +If a Python exception was encountered during the execution of the command, this value contains the result of calling `repr` on the exception object. + +:type: ``string`` + + +.. _telemetry.json#/properties/file_types_changed: + +file_types_changed +++++++++++++++++++ + +This array contains a list of objects with {ext, count} properties giving the count of files changed since the last invocation grouped by file type + +:type: ``array`` + +.. container:: sub-title + + Every element of **file_types_changed** is: + +:type: ``object`` + +:Required: :ref:`telemetry.json#/properties/file_types_changed/items/properties/count`, :ref:`telemetry.json#/properties/file_types_changed/items/properties/ext` + +**Properties:** :ref:`telemetry.json#/properties/file_types_changed/items/properties/count`, :ref:`telemetry.json#/properties/file_types_changed/items/properties/ext` + + +.. _telemetry.json#/properties/file_types_changed/items/properties/count: + +count +##### + +Count of changed files with this extension + +:type: ``number`` + + +.. _telemetry.json#/properties/file_types_changed/items/properties/ext: + +ext +### + +File extension + +:type: ``string`` + + +.. _telemetry.json#/properties/success: + +success ++++++++ + +true if the command succeeded + +:type: ``boolean`` + + +.. _telemetry.json#/properties/system: + +system +++++++ + +:type: ``object`` + +:Required: :ref:`telemetry.json#/properties/system/properties/os` + +**Properties:** :ref:`telemetry.json#/properties/system/properties/cpu_brand`, :ref:`telemetry.json#/properties/system/properties/drive_is_ssd`, :ref:`telemetry.json#/properties/system/properties/logical_cores`, :ref:`telemetry.json#/properties/system/properties/memory_gb`, :ref:`telemetry.json#/properties/system/properties/os`, :ref:`telemetry.json#/properties/system/properties/physical_cores`, :ref:`telemetry.json#/properties/system/properties/virtual_machine` + + +.. _telemetry.json#/properties/system/properties/cpu_brand: + +cpu_brand +######### + +CPU brand string from CPUID + +:type: ``string`` + + +.. _telemetry.json#/properties/system/properties/drive_is_ssd: + +drive_is_ssd +############ + +true if the source directory is on a solid-state disk + +:type: ``boolean`` + + +.. _telemetry.json#/properties/system/properties/logical_cores: + +logical_cores +############# + +Number of logical CPU cores present + +:type: ``number`` + + +.. _telemetry.json#/properties/system/properties/memory_gb: + +memory_gb +######### + +System memory in GB + +:type: ``number`` + + +.. _telemetry.json#/properties/system/properties/os: + +os +## + +Operating system + +**Allowed values:** + +- windows +- macos +- linux +- other + + +.. _telemetry.json#/properties/system/properties/physical_cores: + +physical_cores +############## + +Number of physical CPU cores present + +:type: ``number`` + + +.. _telemetry.json#/properties/system/properties/virtual_machine: + +virtual_machine +############### + +true if the OS appears to be running in a virtual machine + +:type: ``boolean`` + + +.. _telemetry.json#/properties/time: + +time +++++ + +Time at which this event happened + +:type: ``string`` + +:format: ``date-time`` + + +Glean Telemetry +=============== + +In addition to the existing build-specific telemetry, Mozbuild is also reporting data using +`Glean `_ via :ref:`mach_telemetry`. +The metrics collected are documented :ref:`here`. +As Python 2 is phased out, the old telemetry will be replaced by the new Glean implementation. + + +Error Reporting +=============== + +``./mach`` uses `Sentry `_ +to automatically report errors to `our issue-tracking dashboard +`_. + +Information captured +++++++++++++++++++++ + +Sentry automatically collects useful information surrounding +the error to help the build team discover what caused the +issue and how to reproduce it. This information includes: + +* Environmental information, such as the computer name, timestamp, Python runtime and Python module versions +* Process arguments +* The stack trace of the error, including contextual information: + + * The data contained in the exception + * Functions and their respective source file names, line numbers + * Variables in each frame +* `Sentry "Breadcrumbs" `_, + which are important events that have happened which help contextualize the error, such as: + + * An HTTP request has occurred + * A subprocess has been spawned + * Logging has occurred + +Note that file paths may be captured, which include absolute paths (potentially including usernames). diff --git a/build/docs/test_certificates.rst b/build/docs/test_certificates.rst new file mode 100644 index 0000000000..c8394f7785 --- /dev/null +++ b/build/docs/test_certificates.rst @@ -0,0 +1,40 @@ +.. _test_certificates: + +=============================== +Adding Certificates for Testing +=============================== + +Sometimes we need to write tests for scenarios that require custom client, server or certificate authority (CA) certificates. For that purpose, you can generate such certificates using ``build/pgo/genpgocert.py``. + +The certificate specifications (and key specifications) are located in ``build/pgo/certs/``. + +To add a new **server certificate**, add a ``${cert_name}.certspec`` file to that folder. +If it needs a non-default private key, add a corresponding ``${cert_name}.server.keyspec``. + +For a new **client certificate**, add a ``${cert_name}.client.keyspec`` and corresponding ``${cert_name}.certspec``. + +To add a new **CA**, add a ``${cert_name}.ca.keyspec`` as well as a corresponding ``${cert_name}.certspec`` to that folder. + +.. hint:: + + * The full syntax for .certspec files is documented at https://searchfox.org/mozilla-central/source/security/manager/ssl/tests/unit/pycert.py + + * The full syntax for .keyspec files is documented at https://searchfox.org/mozilla-central/source/security/manager/ssl/tests/unit/pykey.py + +Then regenerate the certificates by running::: + + ./mach python build/pgo/genpgocert.py + +These commands will modify cert9.db and key4.db, and if you have added a .keyspec file will generate a ``{$cert_name}.client`` or ``{$cert_name}.ca`` file. + +**These files need to be committed.** + +If you've created a new server certificate, you probably want to modify ``build/pgo/server-locations.txt`` to add a location with your specified certificate::: + + https://my-test.example.com:443 cert=${cert_name} + +You will need to run ``./mach build`` again afterwards. + +.. important:: + + Make sure to exactly follow the naming conventions and use the same ``cert_name`` in all places diff --git a/build/docs/test_manifests.rst b/build/docs/test_manifests.rst new file mode 100644 index 0000000000..89a7ded2aa --- /dev/null +++ b/build/docs/test_manifests.rst @@ -0,0 +1,226 @@ +.. _test_manifests: + +============== +Test Manifests +============== + +Many test suites have their test metadata defined in files called +**test manifests**. + +Test manifests are divided into two flavors: :ref:`manifestparser_manifests` +and :ref:`reftest_manifests`. + +Naming Convention +================= + +The build system does not enforce file naming for test manifest files. +However, the following convention is used. + +mochitest.ini + For the *plain* flavor of mochitests. + +chrome.ini + For the *chrome* flavor of mochitests. + +browser.ini + For the *browser chrome* flavor of mochitests. + +a11y.ini + For the *a11y* flavor of mochitests. + +xpcshell.ini + For *xpcshell* tests. + +.. _manifestparser_manifests: + +ManifestParser Manifests +========================== + +ManifestParser manifests are essentially ini files that conform to a basic +set of assumptions. + +The :doc:`reference documentation ` +for manifestparser manifests describes the basic format of test manifests. + +In summary, manifests are ini files with section names describing test files:: + + [test_foo.js] + [test_bar.js] + +Keys under sections can hold metadata about each test:: + + [test_foo.js] + skip-if = os == "win" + [test_foo.js] + skip-if = os == "linux" && debug + [test_baz.js] + fail-if = os == "mac" || os == "android" + +There is a special **DEFAULT** section whose keys/metadata apply to all +sections/tests:: + + [DEFAULT] + property = value + + [test_foo.js] + +In the above example, **test_foo.js** inherits the metadata **property = value** +from the **DEFAULT** section. + +Recognized Metadata +------------------- + +Test manifests can define some common keys/metadata to influence behavior. +Those keys are as follows: + +head + List of files that will be executed before the test file. (Used in + xpcshell tests.) + +tail + List of files that will be executed after the test file. (Used in + xpcshell tests.) + +support-files + List of additional files required to run tests. This is typically + defined in the **DEFAULT** section. + + Unlike other file lists, *support-files* supports a globbing mechanism + to facilitate pulling in many files with minimal typing. This globbing + mechanism is activated if an entry in this value contains a ``*`` + character. A single ``*`` will wildcard match all files in a directory. + A double ``**`` will descend into child directories. For example, + ``data/*`` will match ``data/foo`` but not ``data/subdir/bar`` where + ``data/**`` will match ``data/foo`` and ``data/subdir/bar``. + + Support files starting with ``/`` are placed in a root directory, rather + than a location determined by the manifest location. For mochitests, + this allows for the placement of files at the server root. The source + file is selected from the base name (e.g., ``foo`` for ``/path/foo``). + Files starting with ``/`` cannot be selected using globbing. + + Some support files are used by tests across multiple directories. In + this case, a test depending on a support file from another directory + must note that dependency with the path to the required support file + in its own **support-files** entry. These use a syntax where paths + starting with ``!/`` will indicate the beginning of the path to a + shared support file starting from the root of the srcdir. For example, + if a manifest at ``dom/base/test/mochitest.ini`` has a support file, + ``dom/base/test/server-script.sjs``, and a mochitest in + ``dom/workers/test`` depends on that support file, the test manifest + at ``dom/workers/test/mochitest.ini`` must include + ``!/dom/base/test/server-script.sjs`` in its **support-files** entry. + +generated-files + List of files that are generated as part of the build and don't exist in + the source tree. + + The build system assumes that each manifest file, test file, and file + listed in **head**, **tail**, and **support-files** is static and + provided by the source tree (and not automatically generated as part + of the build). This variable tells the build system not to make this + assumption. + + This variable will likely go away sometime once all generated files are + accounted for in the build config. + + If a generated file is not listed in this key, a clobber build will + likely fail. + +dupe-manifest + Record that this manifest duplicates another manifest. + + The common scenario is two manifest files will include a shared + manifest file via the ``[include:file]`` special section. The build + system enforces that each test file is only provided by a single + manifest. Having this key present bypasses that check. + + The value of this key is ignored. + +skip-if + Skip this test if the specified condition is true. + See :ref:`manifest_filter_language`. + + Conditions can be specified on multiple lines, where each line is implicitly + joined by a logical OR (``||``). This makes it easier to add comments to + distinct failures. For example: + + .. parsed-literal:: + + [test_foo.js] + skip-if = + os == "mac" && fission # bug 123 - fails on fission + os == "windows" && debug # bug 456 - hits an assertion + +fail-if + Expect test failure if the specified condition is true. + See :ref:`manifest_filter_language`. + + Conditions can be specified on multiple lines (see ``skip-if``). + +run-sequentially + If present, the test should not be run in parallel with other tests. + + Some test harnesses support parallel test execution on separate processes + and/or threads (behavior varies by test harness). If this key is present, + the test harness should not attempt to run this test in parallel with any + other test. + + By convention, the value of this key is a string describing why the test + can't be run in parallel. + +scheme + Changes the scheme and domain from which the test runs. (Only used in mochitest suites) + + There are two possible values: + - ``http`` (default): The test will run from http://mochi.test:8888 + - ``https``: The test will run from https://example.com:443 + +.. _manifest_filter_language: + +Manifest Filter Language +------------------------ + +Some manifest keys accept a special filter syntax as their values. These +values are essentially boolean expressions that are evaluated at test +execution time. + +The expressions can reference a well-defined set of variables, such as +``os`` and ``debug``. These variables are populated from the +``mozinfo.json`` file. For the full list of available variables, see +the :ref:`mozinfo documentation `. + +See +`the source `_ for the full documentation of the +expression syntax until it is documented here. + +.. todo:: + + Document manifest filter language. + +.. _manifest_file_installation: + +File Installation +----------------- + +Files referenced by manifests are automatically installed into the object +directory into paths defined in +:py:func:`mozbuild.frontend.emitter.TreeMetadataEmitter._process_test_manifest`. + +Relative paths resolving to parent directory (e.g. +``support-files = ../foo.txt`` have special behavior. + +For ``support-files``, the file will be installed to the default destination +for that manifest. Only the file's base name is used to construct the final +path: directories are irrelevant. Files starting with ``/`` are an exception, +these are installed relative to the root of the destination; the base name is +instead used to select the file.. + +For all other entry types, the file installation is skipped. + +.. _reftest_manifests: + +Reftest Manifests +================= + +See `MDN `_. diff --git a/build/docs/toolchains.rst b/build/docs/toolchains.rst new file mode 100644 index 0000000000..2a2f87c182 --- /dev/null +++ b/build/docs/toolchains.rst @@ -0,0 +1,220 @@ +.. _build_toolchains: + +=========================== +Creating Toolchain Archives +=========================== + +There are various scripts in the repository for producing archives +of the build tools (e.g. compilers and linkers) required to build. + +Clang and Rust +============== + +To modify the toolchains used for a particular task, you may need several +things: + +1. A `build task`_ + +2. Which uses a toolchain task + + - `clang toolchain`_ + - `rust toolchain`_ + +3. Which uses a git fetch + + - `clang fetch`_ + - (from-source ``dev`` builds only) `rust fetch`_ + +4. (clang only) Which uses a `config json`_ + +5. Which takes patches_ you may want to apply. + +For the most part, you should be able to accomplish what you want by +copying/editing the existing examples in those files. + +.. _build task: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/taskcluster/ci/build/linux.yml#5-45 +.. _clang toolchain: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/taskcluster/ci/toolchain/clang.yml#51-72 +.. _rust toolchain: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/taskcluster/ci/toolchain/rust.yml#57-74 +.. _clang fetch: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/taskcluster/ci/fetch/toolchains.yml#413-418 +.. _rust fetch: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/taskcluster/ci/fetch/toolchains.yml#434-439 +.. _config json: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/build/build-clang/clang-linux64.json +.. _patches: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/build/build-clang/static-llvm-symbolizer.patch + +Clang +----- + +Building clang is handled by `build-clang.py`_, which uses several resources +in the `build-clang`_ directory. Read the `build-clang README`_ for more +details. + +Note for local builds: build-clang.py can be run on developer machines but its +lengthy multi-stage build process is unnecessary for most local development. The +upstream `LLVM Getting Started Guide`_ has instructions on how to build +clang more directly. + +.. _build-clang.py: https://searchfox.org/mozilla-central/source/build/build-clang/build-clang.py +.. _build-clang README: https://searchfox.org/mozilla-central/source/build/build-clang/README +.. _build-clang: https://searchfox.org/mozilla-central/source/build/build-clang/ +.. _LLVM Getting Started Guide: https://llvm.org/docs/GettingStarted.html + +Rust +---- + +Rust builds are handled by `repack_rust.py`_. The primary purpose of +that script is to download prebuilt tarballs from the Rust project. + +It uses the same basic format as `rustup` for specifying the toolchain +(via ``--channel``): + +- request a stable build with ``1.xx.y`` (e.g. ``1.47.0``) +- request a beta build with ``beta-yyyy-mm-dd`` (e.g. ``beta-2020-08-26``) +- request a nightly build with ``nightly-yyyy-mm-dd`` (e.g. ``nightly-2020-08-26``) +- request a build from `Rust's ci`_ with ``bors-$sha`` (e.g. ``bors-796a2a9bbe7614610bd67d4cd0cf0dfff0468778``) +- request a from-source build with ``dev`` + +Rust From Source +---------------- + +As of this writing, from-source builds for Rust are a new feature, and not +used anywhere by default. The feature was added so that we can test patches +to rustc against the tree. Expect things to be a bit hacky and limited. + +Most importantly, building from source requires your toolchain to have a +`fetch of the rust tree`_ as well as `clang and binutils toolchains`_. It is also +recommended to upgrade the worker-type to e.g. ``b-linux-large``. + +Rust's build dependencies are fairly minimal, and it has a sanity check +that should catch any missing or too-old dependencies. See the `Rust README`_ +for more details. + +Patches are set via `the --patch flag`_ (passed via ``toolchain/rust.yml``). +Patch paths are assumed to be relative to ``/build/build-rust/``, and may be +optionally prefixed with ``module-path:`` to specify they apply to that git +submodule in the Rust source. e.g. ``--patch src/llvm-project:mypatch.diff`` +patches rust's llvm with ``/build/build-rust/mypatch.diff``. There are no +currently checked in rust patches to use as an example, but they should be +the same format as `the clang ones`_. + +Rust builds are not currently configurable, and uses a `hardcoded config.toml`_, +which you may need to edit for your purposes. See Rust's `example config`_ for +details/defaults. Note that these options do occasionally change, so be sure +you're using options for the version you're targeting. For instance, there was +a large change around Rust ~1.48, and the currently checked in config was for +1.47, so it may not work properly when building the latest version of Rust. + +Rust builds are currently limited to targeting only the host platform. +Although the machinery is in place to request additional targets, the +cross-compilation fails for some unknown reason. We have not yet investigated +what needs to be done to get this working. + +While Rust generally maintains a clean tree for building ``rustc`` and +``cargo``, other tools like ``rustfmt`` or ``miri`` are allowed to be +transiently broken. This means not every commit in the Rust tree will be +able to build the `tools we require`_. + +Although ``repack_rust`` considers ``rustfmt`` an optional package, Rust builds +do not currently implement this and will fail if ``rustfmt`` is busted. Some +attempt was made to work around it, but `more work is needed`_. + +.. _Rust's ci: https://github.com/rust-lang/rust/pull/77875#issuecomment-736092083 +.. _repack_rust.py: https://searchfox.org/mozilla-central/source/taskcluster/scripts/misc/repack_rust.py +.. _fetch of the rust tree: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/taskcluster/ci/toolchain/rust.yml#69-71 +.. _clang and binutils toolchains: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/taskcluster/ci/toolchain/rust.yml#72-74 +.. _the --patch flag: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/taskcluster/scripts/misc/repack_rust.py#667-675 +.. _the clang ones: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/build/build-clang/static-llvm-symbolizer.patch +.. _Rust README: https://github.com/rust-lang/rust/#building-on-a-unix-like-system +.. _hardcoded config.toml: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/taskcluster/scripts/misc/repack_rust.py#384-421 +.. _example config: https://github.com/rust-lang/rust/blob/b7ebc6b0c1ba3c27ebb17c0b496ece778ef11e18/config.toml.example +.. _tools we require: https://searchfox.org/mozilla-central/rev/168c45a7acc44e9904cfd4eebcb9eb080e05699c/taskcluster/scripts/misc/repack_rust.py#398 +.. _more work is needed: https://github.com/rust-lang/rust/issues/79249 + + +Windows +======= + +The ``build/windows_toolchain.py`` script is used to build and manage +Windows toolchain archives containing Visual Studio executables, SDKs, +etc. + +The way Firefox build automation works is an archive containing the +toolchain is produced and uploaded to an internal Mozilla server. The +build automation will download, verify, and extract this archive before +building. The archive is self-contained so machines don't need to install +Visual Studio, SDKs, or various other dependencies. Unfortunately, +Microsoft's terms don't allow Mozilla to distribute this archive +publicly. However, the same tool can be used to create your own copy. + +Configuring Your System +----------------------- + +It is **highly** recommended to perform this process on a fresh installation +of Windows 7 or 10 (such as in a VM). Installing all updates through +Windows Update is not only acceptable - it is encouraged. Although it +shouldn't matter. + +Next, install Visual Studio 2017 Community. The download link can be found +at https://www.visualstudio.com/vs/community/. +Be sure to follow these install instructions: + +1. Choose a ``Custom`` installation and click ``Next`` +2. Select ``Programming Languages`` -> ``Visual C++`` (make sure all sub items are + selected) +3. Under ``Windows and Web Development`` uncheck everything except + ``Universal Windows App Development Tools`` and the items under it + (should be ``Tools (1.3.1)...`` and the ``Windows 10 SDK``). + +Once Visual Studio 2017 Community has been installed, from a checkout +of mozilla-central, run something like the following to produce a ZIP +archive:: + + $ ./mach python build/windows_toolchain.py create-zip vs2017_15.8.4 + +The produced archive will be the argument to ``create-zip`` + ``.zip``. + +Firefox for Android with Gradle +=============================== + +To build Firefox for Android with Gradle in automation, archives +containing both the Gradle executable and a Maven repository +comprising the exact build dependencies are produced and uploaded to +an internal Mozilla server. The build automation will download, +verify, and extract these archive before building. These archives +provide a self-contained Gradle and Maven repository so that machines +don't need to fetch additional Maven dependencies at build time. +(Gradle and the downloaded Maven dependencies can be both +redistributed publicly.) + +Archiving the Gradle executable is straight-forward, but archiving a +local Maven repository is not. Therefore a toolchain job exists for +producing the required archives, `android-gradle-dependencies`. The +job runs in a container based on a custom Docker image and spawns a +Sonatype Nexus proxying Maven repository process in the background. +The job builds Firefox for Android using Gradle and the in-tree Gradle +configuration rooted at ``build.gradle``. The spawned proxying Maven +repository downloads external dependencies and collects them. After +the Gradle build completes, the job archives the Gradle version used +to build, and the downloaded Maven repository, and exposes them as +Task Cluster artifacts. + +To update the version of Gradle in the archive produced, update +``gradle/wrapper/gradle-wrapper.properties``. Be sure to also update +the SHA256 checksum to prevent poisoning the build machines! + +To update the versions of Gradle dependencies used, update +``dependencies`` sections in the in-tree Gradle configuration rooted +at ``build.gradle``. Once you are confident your changes build +locally, push a fresh build to try. The `android-gradle-dependencies` +toolchain should run automatically, fetching your new dependencies and +wiring them into the appropriate try build jobs. + +To update the version of Sonatype Nexus, update the `sonatype-nexus` +`fetch` task definition. + +To modify the Sonatype Nexus configuration, typically to proxy a new +remote Maven repository, modify +`taskcluster/scripts/misc/android-gradle-dependencies/nexus.xml`. + +There is also a toolchain job that fetches the Android SDK and related +packages. To update the versions of packaged fetched, modify +`python/mozboot/mozboot/android-packages.txt` and update the various +in-tree versions accordingly. diff --git a/build/docs/unified-builds.rst b/build/docs/unified-builds.rst new file mode 100644 index 0000000000..876ef9544c --- /dev/null +++ b/build/docs/unified-builds.rst @@ -0,0 +1,43 @@ +.. _unified-builds: + +============== +Unified Builds +============== + +The Firefox build system uses the technique of "unified builds" (or elsewhere +called "`unity builds `_") to +improve compilation performance. Rather than compiling source files individually, +groups of files in the same directory are concatenated together, then compiled once +in a single batch. + +Unified builds can be configured using the ``UNIFIED_SOURCES`` variable in ``moz.build`` files. + +.. _unified_build_compilation_failures: + +Why are there unrelated compilation failures when I change files? +================================================================= + +Since multiple files are concatenated together in a unified build, it's possible for a change +in one file to cause the compilation of a seemingly unrelated file to fail. +This is usually because source files become implicitly dependent on each other for: + +* ``#include`` statements +* ``using namespace ...;`` statements +* Other symbol imports or definitions + +One of the more common cases of unexpected failures are when source code files are added or +removed, and the "chunking" is changed. There's a limit on the number of files that are combined +together for a single compilation, so sometimes the addition of a new file will cause another one +to be bumped into a different chunk. If that other chunk doesn't meet the implicit requirements +of the bumped file, there will be a tough-to-debug compilation failure. + +Other notes: +============ + +* Some IDEs (such as VSCode with ``clangd``) build files in standalone mode, so they may show + more failures than a ``mach build``. +* The amount of files per chunk can be adjusted in ``moz.build`` files with the + ``FILES_PER_UNIFIED_FILE`` variable. Note that changing the chunk size can introduce + compilation failures as described :ref:`above`. +* We are happy to accept patches that fix problematic unified build chunks (such as by adding + includes or namespace annotations). diff --git a/build/docs/visualstudio.rst b/build/docs/visualstudio.rst new file mode 100644 index 0000000000..3fbf28e94b --- /dev/null +++ b/build/docs/visualstudio.rst @@ -0,0 +1,100 @@ +.. _build_visualstudio: + +====================== +Visual Studio Projects +====================== + +The build system contains alpha support for generating Visual Studio +project files to aid with development. + +To generate Visual Studio project files, you'll need to have a configured tree:: + + mach configure + +(If you have built recently, your tree is already configured.) + +Then, simply generate the Visual Studio build backend:: + + mach build-backend -b VisualStudio + +If all goes well, the path to the generated Solution (``.sln``) file should be +printed. You should be able to open that solution with Visual Studio 2010 or +newer. + +Currently, output is hard-coded to the Visual Studio 2010 format. If you open +the solution in a newer Visual Studio release, you will be prompted to upgrade +projects. Simply click through the wizard to do that. + +Structure of Solution +===================== + +The Visual Studio solution consists of hundreds of projects spanning thousands +of files. To help with organization, the solution is divided into the following +trees/folders: + +Build Targets + This folder contains common build targets. The *full* project is used to + perform a full build. The *binaries* project is used to build just binaries. + The *visual-studio* project can be built to regenerate the Visual Studio + project files. + + Performing the *clean* action on any of these targets will clean the + *entire* build output. + +Binaries + This folder contains common binaries that can be executed from within + Visual Studio. If you are building the Firefox desktop application, + the *firefox* project will launch firefox.exe. You probably want one of + these set to your startup project. + +Libraries + This folder contains entries for each static library that is produced as + part of the build. These roughly correspond to each directory in the tree + containing C/C++. e.g. code from ``dom/base`` will be contained in the + ``dom_base`` project. + + These projects don't do anything when built. If you build a project here, + the *binaries* build target project is built. + +Updating Project Files +====================== + +As you pull and update the source tree, your Visual Studio files may fall out +of sync with the build configuration. The tree should still build fine from +within Visual Studio. But source files may be missing and IntelliSense may not +have the proper build configuration. + +To account for this, you'll want to periodically regenerate the Visual Studio +project files. You can do this within Visual Studio by building the +``Build Targets :: visual-studio`` project or by running +``mach build-backend -b VisualStudio`` from the command line. + +Currently, regeneration rewrites the original project files. **If you've made +any customizations to the solution or projects, they will likely get +overwritten.** We would like to improve this user experience in the +future. + +Moving Project Files Around +=========================== + +The produced Visual Studio solution and project files should be portable. +If you want to move them to a non-default directory, they should continue +to work from wherever they are. If they don't, please file a bug. + +Invoking mach through Visual Studio +=================================== + +It's possible to build the tree via Visual Studio. There is some light magic +involved here. + +Alongside the Visual Studio project files is a batch script named ``mach.bat``. +This batch script sets the environment variables present in your *MozillaBuild* +development environment at the time of Visual Studio project generation +and invokes *mach* inside an msys shell with the arguments specified to the +batch script. This script essentially allows you to invoke mach commands +inside the MozillaBuild environment without having to load MozillaBuild. + +While projects currently only utilize the ``mach build`` command, the batch +script does not limit it's use: any mach command can be invoked. Developers +may abuse this fact to add custom projects and commands that invoke other +mach commands. diff --git a/build/dumbmake-dependencies b/build/dumbmake-dependencies new file mode 100644 index 0000000000..880877637a --- /dev/null +++ b/build/dumbmake-dependencies @@ -0,0 +1,72 @@ +toolkit/library + dom + ipc + security/sandbox + ipc + netwerk/build + netwerk + storage/build + storage + xpcom + chrome + extensions + docshell/build + docshell + uriloader + modules + widget + gfx + toolkit/components/build + toolkit/components + security/manager + security/certverifier + security/build + accessible + dom + content + layout + editor + parser + js/src + mfbt + js/xpconnect + js/xpconnect/loader + view + caps + xpfe/appshell + xpfe/components + js + toolkit + rdf/build + embedding + hal + image/build + image + intl/build + intl + media + profile + services + startupcache + devtools/platform + devtools/server + devtools/shared +browser/app + browser/base + browser/components + devtools/client + browser/locales + browser/modules + browser/themes + toolkit + toolkit/components + toolkit/components/downloads + toolkit/content + toolkit/crashreporter + toolkit/forgetaboutsite + toolkit/identity + toolkit/modules + toolkit/mozapps/extensions + toolkit/profile + toolkit/themes + toolkit/webapps diff --git a/build/gecko_templates.mozbuild b/build/gecko_templates.mozbuild new file mode 100644 index 0000000000..51aae5b870 --- /dev/null +++ b/build/gecko_templates.mozbuild @@ -0,0 +1,124 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +@template +def GeckoBinary(linkage='dependent', mozglue=None): + '''Template for Gecko-related binaries. + + This template is meant to be used in other templates. + + `linkage` indicates the wanted xpcom linkage type. Valid values are + 'dependent', 'standalone' or None. 'dependent' is the default. It is + used for e.g. XPCOM components and executables with direct dependencies + on libxul. Most executables should use the 'standalone' linkage, which + uses the standalone XPCOM glue to load libxul. None means no XPCOM glue + or libxul linkage at all. + + `mozglue` indicates whether to link against the mozglue library, and if + so, what linkage to apply. Valid values are None (mozglue not linked), + 'program' (mozglue linked to an executable program), or 'library' (mozglue + linked to a shared library). + ''' + if linkage == 'dependent': + USE_LIBS += [ + 'nspr', + 'xul-real', + ] + elif linkage == 'standalone': + DEFINES['XPCOM_GLUE'] = True + + USE_LIBS += [ + 'xpcomglue', + ] + elif linkage != None: + error('`linkage` must be "dependent", "standalone" or None') + + if mozglue: + if mozglue == 'program': + USE_LIBS += ['mozglue'] + DEFINES['MOZ_HAS_MOZGLUE'] = True + if CONFIG['MOZ_GLUE_IN_PROGRAM'] and CONFIG['CC_TYPE'] in ('clang', 'gcc'): + LDFLAGS += ['-rdynamic'] + elif mozglue == 'library': + LIBRARY_DEFINES['MOZ_HAS_MOZGLUE'] = True + if not CONFIG['MOZ_GLUE_IN_PROGRAM']: + USE_LIBS += ['mozglue'] + else: + error('`mozglue` must be "program" or "library"') + + +@template +def GeckoProgram(name, linkage='standalone', **kwargs): + '''Template for program executables related to Gecko. + + `name` identifies the executable base name. + + See the documentation for `GeckoBinary` for other possible arguments, + with the notable difference that the default for `linkage` is 'standalone'. + ''' + Program(name) + + kwargs.setdefault('mozglue', 'program') + + GeckoBinary(linkage=linkage, **kwargs) + + +@template +def GeckoSimplePrograms(names, **kwargs): + '''Template for simple program executables related to Gecko. + + `names` identifies the executable base names for each executable. + + See the documentation for `GeckoBinary` for other possible arguments. + ''' + SimplePrograms(names) + + kwargs.setdefault('mozglue', 'program') + + GeckoBinary(**kwargs) + + +@template +def GeckoCppUnitTests(names, **kwargs): + '''Template for C++ unit tests related to Gecko. + + `names` identifies the executable base names for each executable. + + See the documentation for `GeckoBinary` for other possible arguments. + ''' + CppUnitTests(names) + + kwargs.setdefault('mozglue', 'program') + + GeckoBinary(**kwargs) + + +@template +def GeckoSharedLibrary(name, output_category=None, **kwargs): + '''Template for shared libraries related to Gecko. + + `name` identifies the library base name. + See the documentation for `GeckoBinary` for other possible arguments. + ''' + SharedLibrary(name, output_category) + + kwargs.setdefault('mozglue', 'library') + + GeckoBinary(**kwargs) + + +@template +def GeckoFramework(name, output_category=None, **kwargs): + '''Template for OSX frameworks related to Gecko. + + `name` identifies the library base name. + See the documentation for `GeckoBinary` for other possible arguments. + ''' + Framework(name, output_category) + + kwargs.setdefault('mozglue', 'library') + + GeckoBinary(**kwargs) diff --git a/build/gen_symverscript.py b/build/gen_symverscript.py new file mode 100644 index 0000000000..f32554abc8 --- /dev/null +++ b/build/gen_symverscript.py @@ -0,0 +1,23 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distibuted with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import sys +from mozbuild.preprocessor import Preprocessor + + +def main(output, input_file, version): + pp = Preprocessor() + pp.context.update( + { + "VERSION": version, + } + ) + pp.out = output + pp.do_include(input_file) + + +if __name__ == "__main__": + main(*sys.agv[1:]) diff --git a/build/gen_test_packages_manifest.py b/build/gen_test_packages_manifest.py new file mode 100644 index 0000000000..afaae8c56a --- /dev/null +++ b/build/gen_test_packages_manifest.py @@ -0,0 +1,124 @@ +#!/usr/bin/python +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json + +from argparse import ArgumentParser + +ALL_HARNESSES = [ + "common", # Harnesses without a specific package will look here. + "condprof", + "mochitest", + "reftest", + "xpcshell", + "cppunittest", + "jittest", + "mozbase", + "web-platform", + "talos", + "raptor", + "awsy", + "gtest", + "updater-dep", + "jsreftest", + "perftests", + "fuzztest", +] + +PACKAGE_SPECIFIED_HARNESSES = [ + "condprof", + "cppunittest", + "mochitest", + "reftest", + "xpcshell", + "web-platform", + "talos", + "raptor", + "awsy", + "updater-dep", + "jittest", + "jsreftest", + "perftests", + "fuzztest", +] + +# These packages are not present for every build configuration. +OPTIONAL_PACKAGES = [ + "gtest", +] + + +def parse_args(): + parser = ArgumentParser( + description="Generate a test_packages.json file to tell automation which harnesses " + "require which test packages." + ) + parser.add_argument( + "--common", + required=True, + action="store", + dest="tests_common", + help='Name of the "common" archive, a package to be used by all ' "harnesses.", + ) + parser.add_argument( + "--jsshell", + required=True, + action="store", + dest="jsshell", + help="Name of the jsshell zip.", + ) + for harness in PACKAGE_SPECIFIED_HARNESSES: + parser.add_argument( + "--%s" % harness, + required=True, + action="store", + dest=harness, + help="Name of the %s zip." % harness, + ) + for harness in OPTIONAL_PACKAGES: + parser.add_argument( + "--%s" % harness, + required=False, + action="store", + dest=harness, + help="Name of the %s zip." % harness, + ) + parser.add_argument( + "--dest-file", + required=True, + action="store", + dest="destfile", + help="Path to the output file to be written.", + ) + return parser.parse_args() + + +def generate_package_data(args): + # Generate a dictionary mapping test harness names (exactly as they're known to + # mozharness and testsuite-targets.mk, ideally) to the set of archive names that + # harness depends on to run. + # mozharness will use this file to determine what test zips to download, + # which will be an optimization once parts of the main zip are split to harness + # specific zips. + tests_common = args.tests_common + jsshell = args.jsshell + + harness_requirements = dict([(k, [tests_common]) for k in ALL_HARNESSES]) + harness_requirements["jittest"].append(jsshell) + harness_requirements["jsreftest"].append(args.reftest) + for harness in PACKAGE_SPECIFIED_HARNESSES + OPTIONAL_PACKAGES: + pkg_name = getattr(args, harness, None) + if pkg_name is None: + continue + harness_requirements[harness].append(pkg_name) + return harness_requirements + + +if __name__ == "__main__": + args = parse_args() + packages_data = generate_package_data(args) + with open(args.destfile, "w") as of: + json.dump(packages_data, of, indent=4) diff --git a/build/genrc.sh b/build/genrc.sh new file mode 100755 index 0000000000..c0c6194624 --- /dev/null +++ b/build/genrc.sh @@ -0,0 +1,13 @@ +#!/bin/sh +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +DATATYPE="$1" +INFILE="$2" + +echo "${DATATYPE} RCDATA" +sed 's/"/""/g' ${INFILE} | awk 'BEGIN { printf("BEGIN\n") } { printf("\"%s\\r\\n\",\n", $0) } END { printf("\"\\0\"\nEND\n") }' + +exit 0 diff --git a/build/glean_requirements.in b/build/glean_requirements.in new file mode 100644 index 0000000000..cbedafe32d --- /dev/null +++ b/build/glean_requirements.in @@ -0,0 +1,2 @@ +glean_sdk==33.7.0 + diff --git a/build/glean_requirements.txt b/build/glean_requirements.txt new file mode 100644 index 0000000000..b463c7741c --- /dev/null +++ b/build/glean_requirements.txt @@ -0,0 +1,170 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes --output-file=glean_requirements.txt glean_requirements.in +# +appdirs==1.4.4 \ + --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ + --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 \ + # via glean-parser +attrs==20.3.0 \ + --hash=sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6 \ + --hash=sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700 \ + # via jsonschema +cffi==1.14.4 \ + --hash=sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e \ + --hash=sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d \ + --hash=sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a \ + --hash=sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec \ + --hash=sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362 \ + --hash=sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668 \ + --hash=sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c \ + --hash=sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b \ + --hash=sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06 \ + --hash=sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698 \ + --hash=sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2 \ + --hash=sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c \ + --hash=sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7 \ + --hash=sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009 \ + --hash=sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03 \ + --hash=sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b \ + --hash=sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909 \ + --hash=sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53 \ + --hash=sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35 \ + --hash=sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26 \ + --hash=sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b \ + --hash=sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01 \ + --hash=sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb \ + --hash=sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293 \ + --hash=sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd \ + --hash=sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d \ + --hash=sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3 \ + --hash=sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d \ + --hash=sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e \ + --hash=sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca \ + --hash=sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d \ + --hash=sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775 \ + --hash=sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375 \ + --hash=sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b \ + --hash=sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b \ + --hash=sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f \ + # via glean-sdk +click==7.1.2 \ + --hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a \ + --hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc \ + # via glean-parser +diskcache==5.1.0 \ + --hash=sha256:bc7928df986dbc8a8d6e34c33b0da89d668cfa65e7fcc91298a6959a35076993 \ + --hash=sha256:d8d608363f1b0ecbc216b5b34b6c8e269a5d4cce338269f3b1c1f8f11816f71d \ + # via glean-parser +glean-parser==1.29.0 \ + --hash=sha256:7cf1b02ef87fad57bf0f6b9711a98c1fd8f89c9df702245d16c09bf1b042a255 \ + --hash=sha256:df7436e164148594176ec55f7d7c3c5c944daca67c3cc30428514628625b214b \ + # via glean-sdk +glean_sdk==33.7.0 \ + --hash=sha256:2fc5e8d2ec668fae2235e0cae6cb59a6611a1cbdc9cf667319bf01fe1f53e8ce \ + --hash=sha256:93f943ffc10bee45942a0b9e7262e44e2ab30fa71c185e079b5cabca4c7f64c2 \ + --hash=sha256:aa655ff576b8fe209be973f6f6920f3bdb5e1ea1388bfcf569a10b6d29843b26 \ + --hash=sha256:c92ef6cf3ba3604dd5597e9f2ff3158a7b853757882ecfc9a33a2f2c941cb171 \ + --hash=sha256:e038e3ae8c29d6e5047b755120c8846fcb79aca76531f9210f738dbc21808d99 \ + # via -r glean_requirements.in +importlib-metadata==3.3.0 \ + --hash=sha256:5c5a2720817414a6c41f0a49993908068243ae02c1635a228126519b509c8aed \ + --hash=sha256:bf792d480abbd5eda85794e4afb09dd538393f7d6e6ffef6e9f03d2014cf9450 \ + # via jsonschema +iso8601==0.1.13 \ + --hash=sha256:694be0743e9f1535ea873bfc7bd6fb62380c62b75822761859428073a17fd39c \ + --hash=sha256:6f02f01dd13320a7f280e58516dc8d1950dfaf77527cc365a398cd9de4d3c692 \ + --hash=sha256:f7dec22af52025d4526be94cc1303c7d8f5379b746a3f54a8c8446384392eeb1 \ + # via glean-parser, glean-sdk +jinja2==2.11.2 \ + --hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \ + --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 \ + # via glean-parser +jsonschema==3.2.0 \ + --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ + --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a \ + # via glean-parser +markupsafe==1.1.1 \ + --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \ + --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \ + --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \ + --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \ + --hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \ + --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \ + --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \ + --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \ + --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \ + --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \ + --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \ + --hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \ + --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \ + --hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \ + --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \ + --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \ + --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \ + --hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \ + --hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \ + --hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \ + --hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \ + --hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \ + --hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \ + --hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \ + --hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \ + --hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \ + --hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \ + --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \ + --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \ + --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \ + --hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \ + --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \ + --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \ + # via jinja2 +pathspec==0.8.1 \ + --hash=sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd \ + --hash=sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d \ + # via yamllint +pycparser==2.20 \ + --hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \ + --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 \ + # via cffi +pyrsistent==0.17.3 \ + --hash=sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e \ + # via jsonschema +pyyaml==5.3.1 \ + --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ + --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ + --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ + --hash=sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e \ + --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ + --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ + --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ + --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ + --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ + --hash=sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a \ + --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ + --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ + --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \ + # via glean-parser, yamllint +six==1.15.0 \ + --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ + --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \ + # via jsonschema +typing-extensions==3.7.4.3 \ + --hash=sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 \ + --hash=sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ + --hash=sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f \ + # via importlib-metadata +yamllint==1.25.0 \ + --hash=sha256:b1549cbe5b47b6ba67bdeea31720f5c51431a4d0c076c1557952d841f7223519 \ + --hash=sha256:c7be4d0d2584a1b561498fa9acb77ad22eb434a109725c7781373ae496d823b3 \ + # via glean-parser +zipp==3.4.0 \ + --hash=sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108 \ + --hash=sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb \ + # via importlib-metadata + +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. +# setuptools diff --git a/build/gn.mozbuild b/build/gn.mozbuild new file mode 100644 index 0000000000..2351aca4a3 --- /dev/null +++ b/build/gn.mozbuild @@ -0,0 +1,36 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +gn_vars = {} + +if CONFIG['MOZ_DEBUG']: + gn_vars['is_debug'] = True +else: + gn_vars['is_debug'] = False + +os = CONFIG['OS_TARGET'] + +flavors = { + 'WINNT': 'win', + 'Android': 'android', + 'Linux': 'linux', + 'Darwin': 'mac' if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa' else 'ios', + 'SunOS': 'solaris', + 'GNU/kFreeBSD': 'freebsd', + 'DragonFly': 'dragonfly', + 'FreeBSD': 'freebsd', + 'NetBSD': 'netbsd', + 'OpenBSD': 'openbsd', +} +gn_vars['target_os'] = flavors.get(os) + +arches = { + 'x86_64': 'x64', + 'aarch64': 'arm64', +} + +gn_vars['host_cpu'] = arches.get(CONFIG['HOST_CPU_ARCH'], CONFIG['HOST_CPU_ARCH']) +gn_vars['target_cpu'] = arches.get(CONFIG['CPU_ARCH'], CONFIG['CPU_ARCH']) diff --git a/build/gyp.mozbuild b/build/gyp.mozbuild new file mode 100644 index 0000000000..7499f6c460 --- /dev/null +++ b/build/gyp.mozbuild @@ -0,0 +1,126 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +include('gyp_base.mozbuild') + +gyp_vars.update({ + 'lsan': 0, + 'asan': 0, + 'tsan': 1 if CONFIG['MOZ_TSAN'] else 0, + 'ubsan' : 0, + 'fuzzing' : 1 if CONFIG['FUZZING'] else 0, + 'libfuzzer' : 1 if CONFIG['LIBFUZZER'] else 0, + 'libfuzzer_fuzzer_no_link_flag' : 1 if CONFIG['HAVE_LIBFUZZER_FLAG_FUZZER_NO_LINK'] else 0, + 'build_with_mozilla': 1, + 'build_with_chromium': 0, + # 10.9 once we move to TC cross-compiles - bug 1270217 + 'mac_sdk_min': '10.9', + 'mac_deployment_target': '10.9', + 'use_official_google_api_keys': 0, + 'have_clock_monotonic': 1 if CONFIG['HAVE_CLOCK_MONOTONIC'] else 0, + 'have_ethtool_cmd_speed_hi': 1 if CONFIG['MOZ_WEBRTC_HAVE_ETHTOOL_SPEED_HI'] else 0, + 'include_alsa_audio': 1 if CONFIG['MOZ_ALSA'] else 0, + 'include_pulse_audio': 1 if CONFIG['MOZ_PULSEAUDIO'] else 0, + # basic stuff for everything + 'include_internal_video_render': 0, + 'clang': 1 if CONFIG['CC_TYPE'] == 'clang' else 0, + 'clang_cl': 1 if CONFIG['CC_TYPE'] == 'clang-cl' else 0, + 'clang_use_chrome_plugins': 0, + 'enable_protobuf': 0, + 'include_tests': 0, + 'enable_android_opensl': 1, + 'enable_android_opensl_output': 0, + # use_system_lib* still seems to be in use in trunk/build + 'use_system_libjpeg': 0, + 'use_system_libvpx': 0, + 'build_json': 0, + 'build_libjpeg': 0, + 'build_libyuv': 0, + 'build_libvpx': 0, + 'build_libevent': 0, + 'build_ssl': 0, + 'build_json': 0, + 'build_icu': 0, + 'build_opus': 0, + 'libyuv_dir': '/media/libyuv/libyuv', + 'yuv_disable_avx2': 0 if CONFIG['HAVE_X86_AVX2'] else 1, + # don't use openssl + 'use_openssl': 0, + # Must match build/gyp.mozbuild WEBRTC_BUILD_LIBEVENT + #'enable_libevent': 0, default according to OS + + 'debug': 1 if CONFIG['DEBUG'] else 0, + + 'use_x11': 1 if CONFIG['MOZ_X11'] else 0, + 'use_glib': 1 if CONFIG['GLIB_LIBS'] else 0, + # bug 1373485 - avoid pkg-config for gtk2 in webrtc + 'use_gtk': 0, + + # turn off mandatory use of NEON and instead use NEON detection + 'arm_neon': 0, + 'arm_neon_optional': 1, + + 'moz_webrtc_mediacodec': 0, + + # Turn off multi monitor screen share + 'multi_monitor_screenshare%' : 0, + + # (for vp8) chromium sets to 0 also + 'use_temporal_layers': 0, + + # Creates AEC internal sample dump files in current directory + 'aec_debug_dump': 1, + + # codec enable/disables: + 'include_g711': 1, + 'include_opus': 1, + 'include_g722': 1, + 'include_ilbc': 0, + # We turn on ISAC because the AGC uses parts of it, and depend on the + # linker to throw away uneeded bits. + 'include_isac': 1, + 'include_pcm16b': 1, + + #'rtc_opus_variable_complexity': 1, + + 'apm_debug_dump': 1, +}) + +if os == 'Android': + gyp_vars.update( + gtest_target_type='executable', + moz_webrtc_mediacodec=1, + android_toolchain=CONFIG.get('ANDROID_TOOLCHAIN', ''), + ) + +if CONFIG['ARM_ARCH']: + if int(CONFIG['ARM_ARCH']) < 7: + gyp_vars['armv7'] = 0 + gyp_vars['arm_neon_optional'] = 0 + elif os == 'Android': + gyp_vars['armv7'] = 1 + gyp_vars['arm_neon'] = 1 + gyp_vars['build_with_neon'] = 1 + else: + # CPU detection for ARM works on Android only. armv7 always uses CPU + # detection, so we have to set armv7=0 for non-Android target + gyp_vars['armv7'] = 0 + # For libyuv + gyp_vars['arm_version'] = int(CONFIG['ARM_ARCH']) + +# Don't try to compile ssse3/sse4.1 code if toolchain doesn't support +if CONFIG['INTEL_ARCHITECTURE']: + if not CONFIG['HAVE_TOOLCHAIN_SUPPORT_MSSSE3'] or not CONFIG['HAVE_TOOLCHAIN_SUPPORT_MSSE4_1']: + gyp_vars['yuv_disable_asm'] = 1 + +if CONFIG['MACOS_SDK_DIR']: + gyp_vars['mac_sdk_path'] = CONFIG['MACOS_SDK_DIR'] + +if not CONFIG['MOZ_SYSTEM_LIBVPX']: + gyp_vars['libvpx_dir'] = '/media/libvpx/libvpx' + +if not CONFIG['MOZ_SYSTEM_LIBEVENT']: + gyp_vars['libevent_dir'] = '/ipc/chromium/src/third_party/libevent' diff --git a/build/gyp_base.mozbuild b/build/gyp_base.mozbuild new file mode 100644 index 0000000000..9344cc4e7f --- /dev/null +++ b/build/gyp_base.mozbuild @@ -0,0 +1,38 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +gyp_vars = {} + +os = CONFIG['OS_TARGET'] + +if os == 'WINNT': + gyp_vars.update( + MSVS_VERSION=CONFIG['_MSVS_VERSION'], + MSVS_OS_BITS=64 if CONFIG['HAVE_64BIT_BUILD'] else 32, + ) + +flavors = { + 'WINNT': 'win', + 'Android': 'android', + 'Linux': 'linux', + 'Darwin': 'mac' if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa' else 'ios', + 'SunOS': 'solaris', + 'GNU/kFreeBSD': 'freebsd', + 'DragonFly': 'dragonfly', + 'FreeBSD': 'freebsd', + 'NetBSD': 'netbsd', + 'OpenBSD': 'openbsd', +} +gyp_vars['OS'] = flavors.get(os) + +arches = { + 'x86_64': 'x64', + 'x86': 'ia32', + 'aarch64': 'arm64', +} + +gyp_vars['host_arch'] = arches.get(CONFIG['HOST_CPU_ARCH'], CONFIG['HOST_CPU_ARCH']) +gyp_vars['target_arch'] = arches.get(CONFIG['CPU_ARCH'], CONFIG['CPU_ARCH']) diff --git a/build/gyp_includes/common.gypi b/build/gyp_includes/common.gypi new file mode 100644 index 0000000000..8cd6ef7557 --- /dev/null +++ b/build/gyp_includes/common.gypi @@ -0,0 +1,3591 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# IMPORTANT: +# Please don't directly include this file if you are building via gyp_chromium, +# since gyp_chromium is automatically forcing its inclusion. +{ + # Variables expected to be overriden on the GYP command line (-D) or by + # ~/.gyp/include.gypi. + 'variables': { + # Putting a variables dict inside another variables dict looks kind of + # weird. This is done so that 'host_arch', 'chromeos', etc are defined as + # variables within the outer variables dict here. This is necessary + # to get these variables defined for the conditions within this variables + # dict that operate on these variables. + 'variables': { + 'variables': { + 'variables': { + 'variables': { + # Whether we're building a ChromeOS build. + 'chromeos%': 0, + + # Whether or not we are using the Aura windowing framework. + 'use_aura%': 0, + + # Whether or not we are building the Ash shell. + 'use_ash%': 0, + }, + # Copy conditionally-set variables out one scope. + 'chromeos%': '<(chromeos)', + 'use_aura%': '<(use_aura)', + 'use_ash%': '<(use_ash)', + + # Whether we are using Views Toolkit + 'toolkit_views%': 0, + + # Use OpenSSL instead of NSS. Under development: see http://crbug.com/62803 + 'use_openssl%': 0, + + 'use_ibus%': 0, + + # Disable viewport meta tag by default. + 'enable_viewport%': 0, + + # Enable HiDPI support. + 'enable_hidpi%': 0, + + # Enable touch optimized art assets and metrics. + 'enable_touch_ui%': 0, + + # Is this change part of the android upstream bringup? + # Allows us to *temporarily* disable certain things for + # staging. Only set to 1 in a GYP_DEFINES. + 'android_upstream_bringup%': 0, + + # Override buildtype to select the desired build flavor. + # Dev - everyday build for development/testing + # Official - release build (generally implies additional processing) + # TODO(mmoss) Once 'buildtype' is fully supported (e.g. Windows gyp + # conversion is done), some of the things which are now controlled by + # 'branding', such as symbol generation, will need to be refactored + # based on 'buildtype' (i.e. we don't care about saving symbols for + # non-Official # builds). + 'buildtype%': 'Dev', + + 'conditions': [ + # ChromeOS implies ash. + ['chromeos==1', { + 'use_ash%': 1, + 'use_aura%': 1, + }], + + # For now, Windows builds that |use_aura| should also imply using + # ash. This rule should be removed for the future when Windows is + # using the aura windows without the ash interface. + ['use_aura==1 and OS=="win"', { + 'use_ash%': 1, + }], + ['use_ash==1', { + 'use_aura%': 1, + }], + + # A flag for BSD platforms + ['OS=="dragonfly" or OS=="freebsd" or OS=="netbsd" or \ + OS=="openbsd"', { + 'os_bsd%': 1, + }, { + 'os_bsd%': 0, + }], + ], + }, + # Copy conditionally-set variables out one scope. + 'chromeos%': '<(chromeos)', + 'use_aura%': '<(use_aura)', + 'use_ash%': '<(use_ash)', + 'os_bsd%': '<(os_bsd)', + 'use_openssl%': '<(use_openssl)', + 'use_ibus%': '<(use_ibus)', + 'enable_viewport%': '<(enable_viewport)', + 'enable_hidpi%': '<(enable_hidpi)', + 'enable_touch_ui%': '<(enable_touch_ui)', + 'android_upstream_bringup%': '<(android_upstream_bringup)', + 'buildtype%': '<(buildtype)', + + # Sets whether we're building with the Android SDK/NDK (and hence with + # Ant, value 0), or as part of the Android system (and hence with the + # Android build system, value 1). + 'android_build_type%': 0, + + # Compute the architecture that we're building on. + 'conditions': [ + ['OS=="win" or OS=="ios"', { + 'host_arch%': 'ia32', + }, { + # This handles the Unix platforms for which there is some support. + # Anything else gets passed through, which probably won't work very + # well; such hosts should pass an explicit target_arch to gyp. + 'host_arch%': + '. Additional + # documentation on these macros is available at + # http://developer.apple.com/mac/library/technotes/tn2002/tn2064.html#SECTION3 + # Chrome normally builds with the Mac OS X 10.6 SDK and sets the + # deployment target to 10.6. Other projects, such as O3D, may + # override these defaults. + + # Normally, mac_sdk_min is used to find an SDK that Xcode knows + # about that is at least the specified version. In official builds, + # the SDK must match mac_sdk_min exactly. If the SDK is installed + # someplace that Xcode doesn't know about, set mac_sdk_path to the + # path to the SDK; when set to a non-empty string, SDK detection + # based on mac_sdk_min will be bypassed entirely. + 'mac_sdk_min%': '10.6', + 'mac_sdk_path%': '', + + 'mac_deployment_target%': '10.6', + }, + + 'mac_sdk_min': '<(mac_sdk_min)', + 'mac_sdk_path': '<(mac_sdk_path)', + 'mac_deployment_target': '<(mac_deployment_target)', + + # Enable clang on mac by default! + 'clang%': 1, + + # Compile in Breakpad support by default so that it can be + # tested, even if it is not enabled by default at runtime. + 'mac_breakpad_compiled_in%': 1, + 'conditions': [ + # mac_product_name is set to the name of the .app bundle as it should + # appear on disk. This duplicates data from + # chrome/app/theme/chromium/BRANDING and + # chrome/app/theme/google_chrome/BRANDING, but is necessary to get + # these names into the build system. + ['branding=="Chrome"', { + 'mac_product_name%': 'Google Chrome', + }, { # else: branding!="Chrome" + 'mac_product_name%': 'Chromium', + }], + + ['branding=="Chrome" and buildtype=="Official"', { + 'mac_sdk%': ', where + # typically changes with each launch. This in turn + # means that breakpoints in Chrome.dll don't stick from one launch + # to the next. For this reason, we turn ASLR off in debug builds. + # Note that this is a three-way bool, where 0 means to pick up + # the default setting, 1 is off and 2 is on. + 'RandomizedBaseAddress': 1, + }, + 'VCResourceCompilerTool': { + 'PreprocessorDefinitions': ['_DEBUG'], + }, + }, + 'conditions': [ + ['OS=="linux" or OS=="android"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': [ + '<@(debug_extra_cflags)', + ], + }], + ], + }], + # Disabled on iOS because it was causing a crash on startup. + # TODO(michelea): investigate, create a reduced test and possibly + # submit a radar. + ['release_valgrind_build==0 and OS!="ios"', { + 'xcode_settings': { + 'OTHER_CFLAGS': [ + '-fstack-protector-all', # Implies -fstack-protector + ], + }, + }], + ], + }, + 'Release_Base': { + 'abstract': 1, + 'defines': [ + 'NDEBUG', + ], + 'xcode_settings': { + 'DEAD_CODE_STRIPPING': 'YES', # -Wl,-dead_strip + 'GCC_OPTIMIZATION_LEVEL': '<(mac_release_optimization)', + 'OTHER_CFLAGS': [ '<@(release_extra_cflags)', ], + }, + 'msvs_settings': { + 'VCCLCompilerTool': { + 'RuntimeLibrary': '<(win_release_RuntimeLibrary)', + 'conditions': [ + # In official builds, each target will self-select + # an optimization level. + ['buildtype!="Official"', { + 'Optimization': '<(win_release_Optimization)', + }, + ], + # According to MSVS, InlineFunctionExpansion=0 means + # "default inlining", not "/Ob0". + # Thus, we have to handle InlineFunctionExpansion==0 separately. + ['win_release_InlineFunctionExpansion==0', { + 'AdditionalOptions': ['/Ob0'], + }], + ['win_release_InlineFunctionExpansion!=""', { + 'InlineFunctionExpansion': + '<(win_release_InlineFunctionExpansion)', + }], + + # if win_release_OmitFramePointers is blank, leave as default + ['win_release_OmitFramePointers==1', { + 'OmitFramePointers': 'true', + }], + ['win_release_OmitFramePointers==0', { + 'OmitFramePointers': 'false', + # The above is not sufficient (http://crbug.com/106711): it + # simply eliminates an explicit "/Oy", but both /O2 and /Ox + # perform FPO regardless, so we must explicitly disable. + # We still want the false setting above to avoid having + # "/Oy /Oy-" and warnings about overriding. + 'AdditionalOptions': ['/Oy-'], + }], + ], + 'AdditionalOptions': [ '<@(win_release_extra_cflags)', ], + }, + 'VCLinkerTool': { + # LinkIncremental is a tri-state boolean, where 0 means default + # (i.e., inherit from parent solution), 1 means false, and + # 2 means true. + 'LinkIncremental': '1', + # This corresponds to the /PROFILE flag which ensures the PDB + # file contains FIXUP information (growing the PDB file by about + # 5%) but does not otherwise alter the output binary. This + # information is used by the Syzygy optimization tool when + # decomposing the release image. + 'Profile': 'true', + }, + }, + 'conditions': [ + ['msvs_use_common_release', { + 'includes': ['release.gypi'], + }], + ['release_valgrind_build==0', { + 'defines': [ + 'NVALGRIND', + 'DYNAMIC_ANNOTATIONS_ENABLED=0', + ], + }, { + 'defines': [ + 'DYNAMIC_ANNOTATIONS_ENABLED=1', + 'WTF_USE_DYNAMIC_ANNOTATIONS=1', + ], + }], + ['win_use_allocator_shim==0', { + 'defines': ['NO_TCMALLOC'], + }], + ['OS=="linux"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': [ + '<@(release_extra_cflags)', + ], + }], + ], + }], + ], + }, + # + # Concrete configurations + # + 'Debug': { + 'inherit_from': ['Common_Base', 'x86_Base', 'Debug_Base'], + }, + 'Release': { + 'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'], + }, + 'conditions': [ + [ 'OS=="win"', { + # TODO(bradnelson): add a gyp mechanism to make this more graceful. + 'Debug_x64': { + 'inherit_from': ['Common_Base', 'x64_Base', 'Debug_Base'], + }, + 'Release_x64': { + 'inherit_from': ['Common_Base', 'x64_Base', 'Release_Base'], + }, + }], + ], + }, + }, + 'conditions': [ + ['os_posix==1 and OS!="mac" and OS!="ios"', { + 'target_defaults': { + # Enable -Werror by default, but put it in a variable so it can + # be disabled in ~/.gyp/include.gypi on the valgrind builders. + 'variables': { + 'werror%': '-Werror', + 'libraries_for_target%': '', + }, + 'defines': [ + '_FILE_OFFSET_BITS=64', + ], + 'cflags': [ + '<(werror)', # See note above about the werror variable. + '-pthread', + '-fno-exceptions', + '-fno-strict-aliasing', # See http://crbug.com/32204 + '-Wall', + # TODO(evan): turn this back on once all the builds work. + # '-Wextra', + # Don't warn about unused function params. We use those everywhere. + '-Wno-unused-parameter', + # Don't warn about the "struct foo f = {0};" initialization pattern. + '-Wno-missing-field-initializers', + # Don't export any symbols (for example, to plugins we dlopen()). + # Note: this is *required* to make some plugins work. + '-fvisibility=hidden', + '-pipe', + ], + 'cflags_cc': [ + '-fno-rtti', + '-fno-threadsafe-statics', + # Make inline functions have hidden visiblity by default. + # Surprisingly, not covered by -fvisibility=hidden. + '-fvisibility-inlines-hidden', + # GCC turns on -Wsign-compare for C++ under -Wall, but clang doesn't, + # so we specify it explicitly. + # TODO(fischman): remove this if http://llvm.org/PR10448 obsoletes it. + # http://code.google.com/p/chromium/issues/detail?id=90453 + '-Wsign-compare', + ], + 'ldflags': [ + '-pthread', '-Wl,-z,noexecstack', + ], + 'libraries' : [ + '<(libraries_for_target)', + ], + 'configurations': { + 'Debug_Base': { + 'variables': { + 'debug_optimize%': '0', + }, + 'defines': [ + '_DEBUG', + ], + 'cflags': [ + '-O>(debug_optimize)', + '-g', + ], + 'conditions' : [ + ['OS=="android" and android_full_debug==0', { + # Some configurations are copied from Release_Base to reduce + # the binary size. + 'variables': { + 'debug_optimize%': 's', + }, + 'cflags': [ + '-fomit-frame-pointer', + '-fdata-sections', + '-ffunction-sections', + ], + 'ldflags': [ + '-Wl,-O1', + '-Wl,--as-needed', + '-Wl,--gc-sections', + ], + }], + ], + }, + 'Release_Base': { + 'variables': { + 'release_optimize%': '2', + # Binaries become big and gold is unable to perform GC + # and remove unused sections for some of test targets + # on 32 bit platform. + # (This is currently observed only in chromeos valgrind bots) + # The following flag is to disable --gc-sections linker + # option for these bots. + 'no_gc_sections%': 0, + + # TODO(bradnelson): reexamine how this is done if we change the + # expansion of configurations + 'release_valgrind_build%': 0, + }, + 'cflags': [ + '-O<(release_optimize)', + # Don't emit the GCC version ident directives, they just end up + # in the .comment section taking up binary size. + '-fno-ident', + # Put data and code in their own sections, so that unused symbols + # can be removed at link time with --gc-sections. + '-fdata-sections', + '-ffunction-sections', + ], + 'ldflags': [ + # Specifically tell the linker to perform optimizations. + # See http://lwn.net/Articles/192624/ . + '-Wl,-O1', + '-Wl,--as-needed', + ], + 'conditions' : [ + ['no_gc_sections==0', { + 'ldflags': [ + '-Wl,--gc-sections', + ], + }], + ['OS=="android"', { + 'variables': { + 'release_optimize%': 's', + }, + 'cflags': [ + '-fomit-frame-pointer', + ], + }], + ['clang==1', { + 'cflags!': [ + '-fno-ident', + ], + }], + ['profiling==1', { + 'cflags': [ + '-fno-omit-frame-pointer', + '-g', + ], + }], + ], + }, + }, + 'variants': { + 'coverage': { + 'cflags': ['-fprofile-arcs', '-ftest-coverage'], + 'ldflags': ['-fprofile-arcs'], + }, + 'profile': { + 'cflags': ['-pg', '-g'], + 'ldflags': ['-pg'], + }, + 'symbols': { + 'cflags': ['-g'], + }, + }, + 'conditions': [ + ['target_arch=="ia32"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'asflags': [ + # Needed so that libs with .s files (e.g. libicudata.a) + # are compatible with the general 32-bit-ness. + '-32', + ], + # All floating-point computations on x87 happens in 80-bit + # precision. Because the C and C++ language standards allow + # the compiler to keep the floating-point values in higher + # precision than what's specified in the source and doing so + # is more efficient than constantly rounding up to 64-bit or + # 32-bit precision as specified in the source, the compiler, + # especially in the optimized mode, tries very hard to keep + # values in x87 floating-point stack (in 80-bit precision) + # as long as possible. This has important side effects, that + # the real value used in computation may change depending on + # how the compiler did the optimization - that is, the value + # kept in 80-bit is different than the value rounded down to + # 64-bit or 32-bit. There are possible compiler options to + # make this behavior consistent (e.g. -ffloat-store would keep + # all floating-values in the memory, thus force them to be + # rounded to its original precision) but they have significant + # runtime performance penalty. + # + # -mfpmath=sse -msse2 makes the compiler use SSE instructions + # which keep floating-point values in SSE registers in its + # native precision (32-bit for single precision, and 64-bit + # for double precision values). This means the floating-point + # value used during computation does not change depending on + # how the compiler optimized the code, since the value is + # always kept in its specified precision. + 'conditions': [ + ['branding=="Chromium" and disable_sse2==0', { + 'cflags': [ + '-march=pentium4', + '-msse2', + '-mfpmath=sse', + ], + }], + # ChromeOS targets Pinetrail, which is sse3, but most of the + # benefit comes from sse2 so this setting allows ChromeOS + # to build on other CPUs. In the future -march=atom would + # help but requires a newer compiler. + ['chromeos==1 and disable_sse2==0', { + 'cflags': [ + '-msse2', + ], + }], + # Install packages have started cropping up with + # different headers between the 32-bit and 64-bit + # versions, so we have to shadow those differences off + # and make sure a 32-bit-on-64-bit build picks up the + # right files. + # For android build, use NDK headers instead of host headers + ['host_arch!="ia32" and OS!="android"', { + 'include_dirs+': [ + '/usr/include32', + ], + }], + ], + 'target_conditions': [ + ['_toolset=="target" and OS!="android"', { + # -mmmx allows mmintrin.h to be used for mmx intrinsics. + # video playback is mmx and sse2 optimized. + 'cflags': [ + '-m32', + '-mmmx', + ], + 'ldflags': [ + '-m32', + ], + 'cflags_mozilla': [ + '-m32', + '-mmmx', + ], + }], + ], + }], + ], + }], + ['target_arch=="arm"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags_cc': [ + # The codesourcery arm-2009q3 toolchain warns at that the ABI + # has changed whenever it encounters a varargs function. This + # silences those warnings, as they are not helpful and + # clutter legitimate warnings. + '-Wno-abi', + ], + 'conditions': [ + ['arm_thumb==1', { + 'cflags': [ + '-mthumb', + ] + }], + ['armv7==1', { + 'cflags': [ + '-march=armv7-a', + '-mtune=cortex-a8', + '-mfloat-abi=<(arm_float_abi)', + ], + 'conditions': [ + ['arm_neon==1', { + 'cflags': [ '-mfpu=neon', ], + }, { + 'cflags': [ '-mfpu=<(arm_fpu)', ], + }], + ], + }], + ['OS=="android"', { + # Most of the following flags are derived from what Android + # uses by default when building for arm, reference for which + # can be found in the following file in the Android NDK: + # toolchains/arm-linux-androideabi-4.4.3/setup.mk + 'cflags': [ + # The tree-sra optimization (scalar replacement for + # aggregates enabling subsequent optimizations) leads to + # invalid code generation when using the Android NDK's + # compiler (r5-r7). This can be verified using + # TestWebKitAPI's WTF.Checked_int8_t test. + '-fno-tree-sra', + '-fuse-ld=gold', + '-Wno-psabi', + ], + # Android now supports .relro sections properly. + # NOTE: While these flags enable the generation of .relro + # sections, the generated libraries can still be loaded on + # older Android platform versions. + 'ldflags': [ + '-Wl,-z,relro', + '-Wl,-z,now', + '-fuse-ld=gold', + ], + 'conditions': [ + ['arm_thumb == 1', { + # Android toolchain doesn't support -mimplicit-it=thumb + 'cflags!': [ '-Wa,-mimplicit-it=thumb', ], + 'cflags': [ '-mthumb-interwork', ], + }], + ['armv7==0', { + # Flags suitable for Android emulator + 'cflags': [ + '-march=armv5te', + '-mtune=xscale', + '-msoft-float', + ], + 'defines': [ + '__ARM_ARCH_5__', + '__ARM_ARCH_5T__', + '__ARM_ARCH_5E__', + '__ARM_ARCH_5TE__', + ], + }], + ['clang==1', { + 'cflags!': [ + # Clang does not support the following options. + '-mthumb-interwork', + '-finline-limit=64', + '-fno-tree-sra', + '-fuse-ld=gold', + '-Wno-psabi', + ], + }], + ], + }], + ], + }], + ], + }], + ['linux_fpic==1', { + 'cflags': [ + '-fPIC', + ], + 'ldflags': [ + '-fPIC', + ], + }], + ['sysroot!=""', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': [ + '--sysroot=<(sysroot)', + ], + 'ldflags': [ + '--sysroot=<(sysroot)', + ], + }]] + }], + ['clang==1', { + 'cflags': [ + '-Wheader-hygiene', + # Clang spots more unused functions. + '-Wno-unused-function', + # Don't die on dtoa code that uses a char as an array index. + '-Wno-char-subscripts', + # Especially needed for gtest macros using enum values from Mac + # system headers. + # TODO(pkasting): In C++11 this is legal, so this should be + # removed when we change to that. (This is also why we don't + # bother fixing all these cases today.) + '-Wno-unnamed-type-template-args', + # This (rightyfully) complains about 'override', which we use + # heavily. + '-Wno-c++11-extensions', + + # Warns on switches on enums that cover all enum values but + # also contain a default: branch. Chrome is full of that. + '-Wno-covered-switch-default', + + # TODO(thakis): Remove this. + '-Wno-implicit-conversion-floating-point-to-bool', + ], + 'cflags!': [ + # Clang doesn't seem to know know this flag. + '-mfpmath=sse', + ], + }], + ['clang==1 and clang_use_chrome_plugins==1', { + 'cflags': [ + '<@(clang_chrome_plugins_flags)', + ], + }], + ['clang==1 and clang_load!=""', { + 'cflags': [ + '-Xclang', '-load', '-Xclang', '<(clang_load)', + ], + }], + ['clang==1 and clang_add_plugin!=""', { + 'cflags': [ + '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)', + ], + }], + ['clang==1 and "<(GENERATOR)"=="ninja"', { + 'cflags': [ + # See http://crbug.com/110262 + '-fcolor-diagnostics', + ], + }], + ['asan==1', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': [ + '-faddress-sanitizer', + '-fno-omit-frame-pointer', + ], + 'ldflags': [ + '-faddress-sanitizer', + ], + 'defines': [ + 'ADDRESS_SANITIZER', + ], + }], + ], + }], + ['tsan==1', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': [ + '-fthread-sanitizer', + '-fno-omit-frame-pointer', + '-fPIE', + ], + 'ldflags': [ + '-fthread-sanitizer', + ], + 'defines': [ + 'THREAD_SANITIZER', + 'DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1', + ], + 'target_conditions': [ + ['_type=="executable"', { + 'ldflags': [ + '-pie', + ], + }], + ], + }], + ], + }], + ['order_profiling!=0 and (chromeos==1 or OS=="linux")', { + 'target_conditions' : [ + ['_toolset=="target"', { + 'cflags': [ + '-finstrument-functions', + # Allow mmx intrinsics to inline, so that the + # compiler can expand the intrinsics. + '-finstrument-functions-exclude-file-list=mmintrin.h', + ], + }], + ], + }], + ['linux_breakpad==1', { + 'cflags': [ '-g' ], + 'defines': ['USE_LINUX_BREAKPAD'], + }], + ['linux_use_heapchecker==1', { + 'variables': {'linux_use_tcmalloc%': 1}, + 'defines': ['USE_HEAPCHECKER'], + }], + ['linux_use_tcmalloc==0', { + 'defines': ['NO_TCMALLOC'], + }], + ['linux_keep_shadow_stacks==1', { + 'defines': ['KEEP_SHADOW_STACKS'], + 'cflags': [ + '-finstrument-functions', + # Allow mmx intrinsics to inline, so that the compiler can expand + # the intrinsics. + '-finstrument-functions-exclude-file-list=mmintrin.h', + ], + }], + ['linux_use_gold_flags==1', { + 'ldflags': [ + # Experimentation found that using four linking threads + # saved ~20% of link time. + # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36 + '-Wl,--threads', + '-Wl,--thread-count=4', + ], + 'conditions': [ + ['release_valgrind_build==0', { + 'target_conditions': [ + ['_toolset=="target"', { + 'ldflags': [ + # There seems to be a conflict of --icf and -pie + # in gold which can generate crashy binaries. As + # a security measure, -pie takes precendence for + # now. + #'-Wl,--icf=safe', + '-Wl,--icf=none', + ], + }], + ], + }], + ], + }], + ['linux_use_gold_binary==1', { + 'variables': { + 'conditions': [ + ['inside_chromium_build==1', { + # We pass the path to gold to the compiler. gyp leaves + # unspecified what the cwd is when running the compiler, + # so the normal gyp path-munging fails us. This hack + # gets the right path. + 'gold_path': '<(PRODUCT_DIR)/../../third_party/gold', + }, { + 'gold_path': '<(PRODUCT_DIR)/../../Source/WebKit/chromium/third_party/gold', + }] + ] + }, + 'ldflags': [ + # Put our gold binary in the search path for the linker. + '-B<(gold_path)', + ], + }], + ], + }, + }], + # FreeBSD-specific options; note that most FreeBSD options are set above, + # with Linux. + ['OS=="freebsd"', { + 'target_defaults': { + 'ldflags': [ + '-Wl,--no-keep-memory', + ], + }, + }], + # Android-specific options; note that most are set above with Linux. + ['OS=="android"', { + 'variables': { + # This is the id for the archived chrome symbols. Each build that + # archives symbols is assigned an id which is then added to GYP_DEFINES. + # This is written to the device log on crashes just prior to dropping a + # tombstone. Tools can determine the location of the archived symbols + # from the id. + 'chrome_symbols_id%': '', + 'conditions': [ + # Use shared stlport library when system one used. + # Figure this out early since it needs symbols from libgcc.a, so it + # has to be before that in the set of libraries. + ['use_system_stlport==1', { + 'android_stlport_library': 'stlport', + }, { + 'android_stlport_library': 'stlport_static', + }], + ], + + # Placing this variable here prevents from forking libvpx, used + # by remoting. Remoting is off, so it needn't built, + # so forking it's deps seems like overkill. + # But this variable need defined to properly run gyp. + # A proper solution is to have an OS==android conditional + # in third_party/libvpx/libvpx.gyp to define it. + 'libvpx_path': 'lib/linux/arm', + }, + 'target_defaults': { + 'variables': { + 'release_extra_cflags%': '', + }, + + 'target_conditions': [ + # Settings for building device targets using Android's toolchain. + # These are based on the setup.mk file from the Android NDK. + # + # The NDK Android executable link step looks as follows: + # $LDFLAGS + # $(TARGET_CRTBEGIN_DYNAMIC_O) <-- crtbegin.o + # $(PRIVATE_OBJECTS) <-- The .o that we built + # $(PRIVATE_STATIC_LIBRARIES) <-- The .a that we built + # $(TARGET_LIBGCC) <-- libgcc.a + # $(PRIVATE_SHARED_LIBRARIES) <-- The .so that we built + # $(PRIVATE_LDLIBS) <-- System .so + # $(TARGET_CRTEND_O) <-- crtend.o + # + # For now the above are approximated for executables by adding + # crtbegin.o to the end of the ldflags and 'crtend.o' to the end + # of 'libraries'. + # + # The NDK Android shared library link step looks as follows: + # $LDFLAGS + # $(PRIVATE_OBJECTS) <-- The .o that we built + # -l,--whole-archive + # $(PRIVATE_WHOLE_STATIC_LIBRARIES) + # -l,--no-whole-archive + # $(PRIVATE_STATIC_LIBRARIES) <-- The .a that we built + # $(TARGET_LIBGCC) <-- libgcc.a + # $(PRIVATE_SHARED_LIBRARIES) <-- The .so that we built + # $(PRIVATE_LDLIBS) <-- System .so + # + # For now, assume that whole static libraries are not needed. + # + # For both executables and shared libraries, add the proper + # libgcc.a to the start of libraries which puts it in the + # proper spot after .o and .a files get linked in. + # + # TODO: The proper thing to do longer-tem would be proper gyp + # support for a custom link command line. + ['_toolset=="target"', { + 'conditions': [ + ['build_with_mozilla==0', { + 'cflags!': [ + '-pthread', # Not supported by Android toolchain. + ], + 'cflags': [ + '-ffunction-sections', + '-funwind-tables', + '-g', + '-fstack-protector', + '-fno-short-enums', + '-finline-limit=64', + '-Wa,--noexecstack', + '<@(release_extra_cflags)', + ], + 'ldflags!': [ + '-pthread', # Not supported by Android toolchain. + ], + 'ldflags': [ + '-nostdlib', + '-Wl,--no-undefined', + # Don't export symbols from statically linked libraries. + '-Wl,--exclude-libs=ALL', + ], + 'libraries': [ + '-l<(android_stlport_library)', + # Manually link the libgcc.a that the cross compiler uses. + '@(change_mach_o_flags_options)', + ], + }, + ], + 'conditions': [ + ['asan==1', { + 'variables': { + 'asan_saves_file': 'asan.saves', + }, + 'xcode_settings': { + 'CHROMIUM_STRIP_SAVE_FILE': '<(asan_saves_file)', + }, + }], + ], + 'target_conditions': [ + ['mac_pie==1 and release_valgrind_build==0', { + # Turn on position-independence (ASLR) for executables. When + # PIE is on for the Chrome executables, the framework will + # also be subject to ASLR. + # Don't do this when building for Valgrind, because Valgrind + # doesn't understand slide. TODO: Make Valgrind on Mac OS X + # understand slide, and get rid of the Valgrind check. + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-Wl,-pie', # Position-independent executable (MH_PIE) + ], + }, + }], + ], + }], + ['(_type=="executable" or _type=="shared_library" or \ + _type=="loadable_module") and mac_strip!=0', { + 'target_conditions': [ + ['mac_real_dsym == 1', { + # To get a real .dSYM bundle produced by dsymutil, set the + # debug information format to dwarf-with-dsym. Since + # strip_from_xcode will not be used, set Xcode to do the + # stripping as well. + 'configurations': { + 'Release_Base': { + 'xcode_settings': { + 'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym', + 'DEPLOYMENT_POSTPROCESSING': 'YES', + 'STRIP_INSTALLED_PRODUCT': 'YES', + 'target_conditions': [ + ['_type=="shared_library" or _type=="loadable_module"', { + # The Xcode default is to strip debugging symbols + # only (-S). Local symbols should be stripped as + # well, which will be handled by -x. Xcode will + # continue to insert -S when stripping even when + # additional flags are added with STRIPFLAGS. + 'STRIPFLAGS': '-x', + }], # _type=="shared_library" or _type=="loadable_module"' + ], # target_conditions + }, # xcode_settings + }, # configuration "Release" + }, # configurations + }, { # mac_real_dsym != 1 + # To get a fast fake .dSYM bundle, use a post-build step to + # produce the .dSYM and strip the executable. strip_from_xcode + # only operates in the Release configuration. + 'postbuilds': [ + { + 'variables': { + # Define strip_from_xcode in a variable ending in _path + # so that gyp understands it's a path and performs proper + # relativization during dict merging. + 'strip_from_xcode': 'mac/strip_from_xcode', + }, + 'postbuild_name': 'Strip If Needed', + 'action': ['$(srcdir)$(os_sep)build$(os_sep)<(strip_from_xcode)'], + }, + ], # postbuilds + }], # mac_real_dsym + ], # target_conditions + }], # (_type=="executable" or _type=="shared_library" or + # _type=="loadable_module") and mac_strip!=0 + ], # target_conditions + }, # target_defaults + }], # OS=="mac" + ['OS=="ios"', { + 'target_defaults': { + 'xcode_settings' : { + 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0', + + # This next block is mostly common with the 'mac' section above, + # but keying off (or setting) 'clang' isn't valid for iOS as it + # also seems to mean using the custom build of clang. + + # Don't use -Wc++0x-extensions, which Xcode 4 enables by default + # when buliding with clang. This warning is triggered when the + # override keyword is used via the OVERRIDE macro from + # base/compiler_specific.h. + 'CLANG_WARN_CXX0X_EXTENSIONS': 'NO', + 'WARNING_CFLAGS': [ + '-Wheader-hygiene', + # Don't die on dtoa code that uses a char as an array index. + # This is required solely for base/third_party/dmg_fp/dtoa.cc. + '-Wno-char-subscripts', + # Clang spots more unused functions. + '-Wno-unused-function', + # See comments on this flag higher up in this file. + '-Wno-unnamed-type-template-args', + # This (rightyfully) complains about 'override', which we use + # heavily. + '-Wno-c++11-extensions', + ], + }, + 'target_conditions': [ + ['_type=="executable"', { + 'configurations': { + 'Release_Base': { + 'xcode_settings': { + 'DEPLOYMENT_POSTPROCESSING': 'YES', + 'STRIP_INSTALLED_PRODUCT': 'YES', + }, + }, + }, + 'xcode_settings': { + 'conditions': [ + ['chromium_ios_signing', { + # iOS SDK wants everything for device signed. + 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer', + }, { + 'CODE_SIGNING_REQUIRED': 'NO', + 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': '', + }], + ], + }, + }], + ], # target_conditions + }, # target_defaults + }], # OS=="ios" + ['OS=="win"', { + 'target_defaults': { + 'defines': [ + 'WIN32', + '_WINDOWS', + 'NOMINMAX', + '_CRT_RAND_S', + 'CERT_CHAIN_PARA_HAS_EXTRA_FIELDS', + 'WIN32_LEAN_AND_MEAN', + '_ATL_NO_OPENGL', + ], + 'conditions': [ + ['build_with_mozilla==0', { + 'defines': [ + '_WIN32_WINNT=0x0602', + 'WINVER=0x0602', + ], + }], + ['buildtype=="Official"', { + # In official builds, targets can self-select an optimization + # level by defining a variable named 'optimize', and setting it + # to one of + # - "size", optimizes for minimal code size - the default. + # - "speed", optimizes for speed over code size. + # - "max", whole program optimization and link-time code + # generation. This is very expensive and should be used + # sparingly. + 'variables': { + 'optimize%': 'size', + }, + 'target_conditions': [ + ['optimize=="size"', { + 'msvs_settings': { + 'VCCLCompilerTool': { + # 1, optimizeMinSpace, Minimize Size (/O1) + 'Optimization': '1', + # 2, favorSize - Favor small code (/Os) + 'FavorSizeOrSpeed': '2', + }, + }, + }, + ], + ['optimize=="speed"', { + 'msvs_settings': { + 'VCCLCompilerTool': { + # 2, optimizeMaxSpeed, Maximize Speed (/O2) + 'Optimization': '2', + # 1, favorSpeed - Favor fast code (/Ot) + 'FavorSizeOrSpeed': '1', + }, + }, + }, + ], + ['optimize=="max"', { + 'msvs_settings': { + 'VCCLCompilerTool': { + # 2, optimizeMaxSpeed, Maximize Speed (/O2) + 'Optimization': '2', + # 1, favorSpeed - Favor fast code (/Ot) + 'FavorSizeOrSpeed': '1', + # This implies link time code generation. + 'WholeProgramOptimization': 'true', + }, + }, + }, + ], + ], + }, + ], + ['component=="static_library"', { + 'defines': [ + '_HAS_EXCEPTIONS=0', + ], + }], + ['MSVS_VERSION=="2008"', { + 'defines': [ + '_HAS_TR1=0', + ], + }], + ['secure_atl', { + 'defines': [ + '_SECURE_ATL', + ], + }], + ], + 'msvs_system_include_dirs': [ + '<(windows_sdk_path)/Include/shared', + '<(windows_sdk_path)/Include/um', + '<(windows_sdk_path)/Include/winrt', +# '<(directx_sdk_path)/Include', + '$(VSInstallDir)/VC/atlmfc/include', + ], + 'msvs_cygwin_dirs': ['<(DEPTH)/third_party/cygwin'], + 'msvs_disabled_warnings': [4351, 4396, 4503, 4819, + # TODO(maruel): These warnings are level 4. They will be slowly + # removed as code is fixed. + 4100, 4121, 4125, 4127, 4130, 4131, 4189, 4201, 4238, 4244, 4245, + 4310, 4355, 4428, 4481, 4505, 4510, 4512, 4530, 4610, 4611, 4701, + 4702, 4706, + ], + 'msvs_settings': { + 'VCCLCompilerTool': { + 'AdditionalOptions': ['/MP'], + 'MinimalRebuild': 'false', + 'BufferSecurityCheck': 'true', + 'EnableFunctionLevelLinking': 'true', + 'RuntimeTypeInfo': 'false', + 'WarningLevel': '4', + 'WarnAsError': 'true', + 'DebugInformationFormat': '3', + 'conditions': [ + ['component=="shared_library"', { + 'ExceptionHandling': '1', # /EHsc + }, { + 'ExceptionHandling': '0', + }], + ], + }, + 'VCLibrarianTool': { + 'AdditionalOptions': ['/ignore:4221'], + 'AdditionalLibraryDirectories': [ +# '<(directx_sdk_path)/Lib/x86', + '<(windows_sdk_path)/Lib/win8/um/x86', + ], + }, + 'VCLinkerTool': { + 'AdditionalDependencies': [ + 'wininet.lib', + 'dnsapi.lib', + 'version.lib', + 'msimg32.lib', + 'ws2_32.lib', + 'usp10.lib', + 'dbghelp.lib', + 'winmm.lib', + 'shlwapi.lib', + ], + + 'conditions': [ + ['msvs_express', { + # Explicitly required when using the ATL with express + 'AdditionalDependencies': [ + 'atlthunk.lib', + ], + + # ATL 8.0 included in WDK 7.1 makes the linker to generate + # almost eight hundred LNK4254 and LNK4078 warnings: + # - warning LNK4254: section 'ATL' (50000040) merged into + # '.rdata' (40000040) with different attributes + # - warning LNK4078: multiple 'ATL' sections found with + # different attributes + 'AdditionalOptions': ['/ignore:4254', '/ignore:4078'], + }], + ['MSVS_VERSION=="2005e"', { + # Non-express versions link automatically to these + 'AdditionalDependencies': [ + 'advapi32.lib', + 'comdlg32.lib', + 'ole32.lib', + 'shell32.lib', + 'user32.lib', + 'winspool.lib', + ], + }], + ], + 'AdditionalLibraryDirectories': [ +# '<(directx_sdk_path)/Lib/x86', XXXX + '<(windows_sdk_path)/Lib/win8/um/x86', + ], + 'GenerateDebugInformation': 'true', + 'MapFileName': '$(OutDir)\\$(TargetName).map', + 'ImportLibrary': '$(OutDir)\\lib\\$(TargetName).lib', + 'FixedBaseAddress': '1', + # SubSystem values: + # 0 == not set + # 1 == /SUBSYSTEM:CONSOLE + # 2 == /SUBSYSTEM:WINDOWS + # Most of the executables we'll ever create are tests + # and utilities with console output. + 'SubSystem': '1', + }, + 'VCMIDLTool': { + 'GenerateStublessProxies': 'true', + 'TypeLibraryName': '$(InputName).tlb', + 'OutputDirectory': '$(IntDir)', + 'HeaderFileName': '$(InputName).h', + 'DLLDataFileName': '$(InputName).dlldata.c', + 'InterfaceIdentifierFileName': '$(InputName)_i.c', + 'ProxyFileName': '$(InputName)_p.c', + }, + 'VCResourceCompilerTool': { + 'Culture' : '1033', + 'AdditionalIncludeDirectories': [ + '<(DEPTH)', + '<(SHARED_INTERMEDIATE_DIR)', + ], + }, + }, + }, + }], + ['disable_nacl==1', { + 'target_defaults': { + 'defines': [ + 'DISABLE_NACL', + ], + }, + }], + ['OS=="win" and msvs_use_common_linker_extras', { + 'target_defaults': { + 'msvs_settings': { + 'VCLinkerTool': { + 'DelayLoadDLLs': [ + 'dbghelp.dll', + 'dwmapi.dll', + 'shell32.dll', + 'uxtheme.dll', + ], + }, + }, + 'configurations': { + 'x86_Base': { + 'msvs_settings': { + 'VCLinkerTool': { + 'AdditionalOptions': [ + '/safeseh', + '/dynamicbase', + '/ignore:4199', + '/ignore:4221', + '/nxcompat', + ], + }, + }, + }, + 'x64_Base': { + 'msvs_settings': { + 'VCLinkerTool': { + 'AdditionalOptions': [ + # safeseh is not compatible with x64 + '/dynamicbase', + '/ignore:4199', + '/ignore:4221', + '/nxcompat', + ], + }, + }, + }, + }, + }, + }], + ['enable_new_npdevice_api==1', { + 'target_defaults': { + 'defines': [ + 'ENABLE_NEW_NPDEVICE_API', + ], + }, + }], + ], + 'xcode_settings': { + # DON'T ADD ANYTHING NEW TO THIS BLOCK UNLESS YOU REALLY REALLY NEED IT! + # This block adds *project-wide* configuration settings to each project + # file. It's almost always wrong to put things here. Specify your + # custom xcode_settings in target_defaults to add them to targets instead. + + 'conditions': [ + # In an Xcode Project Info window, the "Base SDK for All Configurations" + # setting sets the SDK on a project-wide basis. In order to get the + # configured SDK to show properly in the Xcode UI, SDKROOT must be set + # here at the project level. + ['OS=="mac"', { + 'conditions': [ + ['mac_sdk_path==""', { + 'SDKROOT': 'macosx<(mac_sdk)', # -isysroot + }, { + 'SDKROOT': '<(mac_sdk_path)', # -isysroot + }], + ], + }], + ['OS=="ios"', { + 'conditions': [ + ['ios_sdk_path==""', { + 'SDKROOT': 'iphoneos<(ios_sdk)', # -isysroot + }, { + 'SDKROOT': '<(ios_sdk_path)', # -isysroot + }], + ], + }], + ['OS=="ios"', { + # Just build armv7 since iOS 4.3+ only supports armv7. + 'ARCHS': '$(ARCHS_UNIVERSAL_IPHONE_OS)', + 'IPHONEOS_DEPLOYMENT_TARGET': '<(ios_deployment_target)', + # Target both iPhone and iPad. + 'TARGETED_DEVICE_FAMILY': '1,2', + }], + ], + + # The Xcode generator will look for an xcode_settings section at the root + # of each dict and use it to apply settings on a file-wide basis. Most + # settings should not be here, they should be in target-specific + # xcode_settings sections, or better yet, should use non-Xcode-specific + # settings in target dicts. SYMROOT is a special case, because many other + # Xcode variables depend on it, including variables such as + # PROJECT_DERIVED_FILE_DIR. When a source group corresponding to something + # like PROJECT_DERIVED_FILE_DIR is added to a project, in order for the + # files to appear (when present) in the UI as actual files and not red + # red "missing file" proxies, the correct path to PROJECT_DERIVED_FILE_DIR, + # and therefore SYMROOT, needs to be set at the project level. + 'SYMROOT': '<(DEPTH)/xcodebuild', + }, +} diff --git a/build/gyp_includes/filename_rules.gypi b/build/gyp_includes/filename_rules.gypi new file mode 100644 index 0000000000..7b16a15595 --- /dev/null +++ b/build/gyp_includes/filename_rules.gypi @@ -0,0 +1,96 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This gypi file defines the patterns used for determining whether a +# file is excluded from the build on a given platform. It is +# included by common.gypi for chromium_code. + +{ + 'target_conditions': [ + ['OS!="win" or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_win(_unittest)?\\.(h|cc)$'], + ['exclude', '(^|/)win/'], + ['exclude', '(^|/)win_[^/]*\\.(h|cc)$'] ], + }], + ['OS!="mac" or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_(cocoa|mac)(_unittest)?\\.(h|cc|mm?)$'], + ['exclude', '(^|/)(cocoa|mac)/'] ], + }], + ['OS!="ios" or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_ios(_unittest)?\\.(h|cc|mm?)$'], + ['exclude', '(^|/)ios/'] ], + }], + ['(OS!="mac" and OS!="ios") or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '\\.mm?$' ] ], + }], + # Do not exclude the linux files on *BSD since most of them can be + # shared at this point. + # In case a file is not needed, it is going to be excluded later on. + # TODO(evan): the above is not correct; we shouldn't build _linux + # files on non-linux. + ['OS!="linux" and OS!="solaris" and <(os_bsd)!=1 or >(nacl_untrusted_build)==1', { + 'sources/': [ + ['exclude', '_linux(_unittest)?\\.(h|cc)$'], + ['exclude', '(^|/)linux/'], + ], + }], + ['OS!="android"', { + 'sources/': [ + ['exclude', '_android(_unittest)?\\.cc$'], + ['exclude', '(^|/)android/'], + ], + }], + ['OS=="win" and >(nacl_untrusted_build)==0', { + 'sources/': [ + ['exclude', '_posix(_unittest)?\\.(h|cc)$'], + ['exclude', '(^|/)posix/'], + ], + }], + ['<(chromeos)!=1 or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_chromeos(_unittest)?\\.(h|cc)$'] ] + }], + ['>(nacl_untrusted_build)==0', { + 'sources/': [ + ['exclude', '_nacl(_unittest)?\\.(h|cc)$'], + ], + }], + ['OS!="linux" and OS!="solaris" and <(os_bsd)!=1 or >(nacl_untrusted_build)==1', { + 'sources/': [ + ['exclude', '_xdg(_unittest)?\\.(h|cc)$'], + ], + }], + ['<(use_x11)!=1 or >(nacl_untrusted_build)==1', { + 'sources/': [ + ['exclude', '_(x|x11)(_unittest)?\\.(h|cc)$'], + ['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'], + ], + }], + ['(<(toolkit_uses_gtk)!=1 or >(nacl_untrusted_build)==1) and (build_with_mozilla==0)', { + 'sources/': [ + ['exclude', '_gtk(_browsertest|_unittest)?\\.(h|cc)$'], + ['exclude', '(^|/)gtk/'], + ['exclude', '(^|/)gtk_[^/]*\\.(h|cc)$'], + ], + }], + ['<(toolkit_views)==0 or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_views\\.(h|cc)$'] ] + }], + ['<(use_aura)==0 or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_aura(_unittest)?\\.(h|cc)$'], + ['exclude', '(^|/)aura/'], + ] + }], + ['<(use_aura)==0 or <(use_x11)==0 or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_aurax11\\.(h|cc)$'] ] + }], + ['<(use_aura)==0 or OS!="win" or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'] ] + }], + ['<(use_ash)==0 or >(nacl_untrusted_build)==1', { + 'sources/': [ ['exclude', '_ash(_unittest)?\\.(h|cc)$'], + ['exclude', '(^|/)ash/'], + ] + }], + ] +} diff --git a/build/gyp_includes/internal/release_defaults.gypi b/build/gyp_includes/internal/release_defaults.gypi new file mode 100644 index 0000000000..1bf674ac12 --- /dev/null +++ b/build/gyp_includes/internal/release_defaults.gypi @@ -0,0 +1,18 @@ +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +{ + 'msvs_settings': { + 'VCCLCompilerTool': { + 'StringPooling': 'true', + }, + 'VCLinkerTool': { + # No incremental linking. + 'LinkIncremental': '1', + # Eliminate Unreferenced Data (/OPT:REF). + 'OptimizeReferences': '2', + # Folding on (/OPT:ICF). + 'EnableCOMDATFolding': '2', + }, + }, +} diff --git a/build/gyp_includes/internal/release_impl.gypi b/build/gyp_includes/internal/release_impl.gypi new file mode 100644 index 0000000000..5ac0e09d1e --- /dev/null +++ b/build/gyp_includes/internal/release_impl.gypi @@ -0,0 +1,17 @@ +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +{ + 'includes': ['release_defaults.gypi'], + 'msvs_settings': { + 'VCCLCompilerTool': { + 'OmitFramePointers': 'false', + # The above is not sufficient (http://crbug.com/106711): it + # simply eliminates an explicit "/Oy", but both /O2 and /Ox + # perform FPO regardless, so we must explicitly disable. + # We still want the false setting above to avoid having + # "/Oy /Oy-" and warnings about overriding. + 'AdditionalOptions': ['/Oy-'], + }, + }, +} diff --git a/build/gyp_includes/internal/release_impl_official.gypi b/build/gyp_includes/internal/release_impl_official.gypi new file mode 100644 index 0000000000..d084ae32cf --- /dev/null +++ b/build/gyp_includes/internal/release_impl_official.gypi @@ -0,0 +1,43 @@ +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +{ + 'includes': ['release_defaults.gypi'], + 'defines': ['OFFICIAL_BUILD'], + 'msvs_settings': { + 'VCCLCompilerTool': { + 'InlineFunctionExpansion': '2', + 'EnableIntrinsicFunctions': 'true', + 'EnableFiberSafeOptimizations': 'true', + 'OmitFramePointers': 'false', + # The above is not sufficient (http://crbug.com/106711): it + # simply eliminates an explicit "/Oy", but both /O2 and /Ox + # perform FPO regardless, so we must explicitly disable. + # We still want the false setting above to avoid having + # "/Oy /Oy-" and warnings about overriding. + 'AdditionalOptions': ['/Oy-'], + }, + 'VCLibrarianTool': { + 'AdditionalOptions': [ + '/ltcg', + '/expectedoutputsize:120000000' + ], + }, + 'VCLinkerTool': { + 'AdditionalOptions': [ + '/time', + # This may reduce memory fragmentation during linking. + # The expected size is 40*1024*1024, which gives us about 10M of + # headroom as of Dec 16, 2011. + '/expectedoutputsize:41943040', + ], + 'LinkTimeCodeGeneration': '1', + # The /PROFILE flag causes the linker to add a "FIXUP" debug stream to + # the generated PDB. According to MSDN documentation, this flag is only + # available (or perhaps supported) in the Enterprise (team development) + # version of Visual Studio. If this blocks your official build, simply + # comment out this line, then re-run "gclient runhooks". + 'Profile': 'true', + }, + }, +} diff --git a/build/gyp_includes/release.gypi b/build/gyp_includes/release.gypi new file mode 100644 index 0000000000..7595ef5a29 --- /dev/null +++ b/build/gyp_includes/release.gypi @@ -0,0 +1,17 @@ +{ + 'conditions': [ + # Handle build types. + ['buildtype=="Dev"', { + 'includes': ['internal/release_impl.gypi'], + }], + ['buildtype=="Official"', { + 'includes': ['internal/release_impl_official.gypi'], + }], + # TODO(bradnelson): may also need: + # checksenabled + # coverage + # dom_stats + # pgo_instrument + # pgo_optimize + ], +} diff --git a/build/liblowercase/Cargo.lock b/build/liblowercase/Cargo.lock new file mode 100644 index 0000000000..4449d1ebc7 --- /dev/null +++ b/build/liblowercase/Cargo.lock @@ -0,0 +1,237 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "c2-chacha" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb" +dependencies = [ + "ppv-lite86", +] + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "getrandom" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb147597cdf94ed43ab7a9038716637d2d1bf2bc571da995d0028dec06bd3018" + +[[package]] +name = "lowercase" +version = "0.1.0" +dependencies = [ + "libc", + "once_cell", + "paste", + "path-dedot", + "tempfile", +] + +[[package]] +name = "once_cell" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c601810575c99596d4afc46f78a678c80105117c379eb3650cf99b8a21ce5b" + +[[package]] +name = "paste" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e1afe738d71b1ebab5f1207c055054015427dbfc7bbe9ee1266894156ec046" +dependencies = [ + "paste-impl", + "proc-macro-hack", +] + +[[package]] +name = "paste-impl" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d4dc4a7f6f743211c5aab239640a65091535d97d43d92a52bca435a640892bb" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "path-dedot" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cf32f6a3b529384739d9c11c230ad760aeb553061e7834f58de63a7c507f24f" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b" + +[[package]] +name = "proc-macro-hack" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd45702f76d6d3c75a80564378ae228a85f0b59d2f3ed43c91b4a69eb2ebfc5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c09721c6781493a2a492a96b5a5bf19b65917fe6728884e7c44dd0c60ca3435" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quote" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom", + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" +dependencies = [ + "c2-chacha", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core", +] + +[[package]] +name = "redox_syscall" +version = "0.1.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" + +[[package]] +name = "remove_dir_all" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" +dependencies = [ + "winapi", +] + +[[package]] +name = "syn" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "123bd9499cfb380418d509322d7a6d52e5315f064fe4b3ad18a53d6b92c07859" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "tempfile" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" +dependencies = [ + "cfg-if", + "libc", + "rand", + "redox_syscall", + "remove_dir_all", + "winapi", +] + +[[package]] +name = "unicode-xid" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "winapi" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/build/liblowercase/Cargo.toml b/build/liblowercase/Cargo.toml new file mode 100644 index 0000000000..2f26edd718 --- /dev/null +++ b/build/liblowercase/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "lowercase" +version = "0.1.0" +authors = ["Mike Hommey "] +edition = "2018" +license = "MPL-2.0" + +[lib] +crate-type = ["cdylib"] +path = "lib.rs" + +[dependencies] +libc = "0.2" +once_cell = "1" +paste = "0.1" +path-dedot = "1" + +[dev-dependencies] +tempfile = "3" + +[profile.release] +lto = true diff --git a/build/liblowercase/lib.rs b/build/liblowercase/lib.rs new file mode 100644 index 0000000000..9e068a8c38 --- /dev/null +++ b/build/liblowercase/lib.rs @@ -0,0 +1,252 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +/* LD_PRELOAD library that intercepts some libc functions and lowercases + * paths under a given set of directories before calling the real libc + * functions. + * + * The set of directories is defined with the LOWERCASE_DIRS environment + * variable, separated with a `:`. + * + * Only the parts of the directories below the LOWERCASE_DIRS directories + * are lowercased. + * + * For example, with LOWERCASE_DIRS=/Foo:/Bar: + * `/home/QuX` is unchanged. + * `/Foo/QuX` becomes `/Foo/qux`. + * `/foo/QuX` is unchanged. + * `/Bar/QuX` becomes `/Bar/qux`. + * etc. + * + * This is, by no means, supposed to be a generic LD_PRELOAD library. It + * only intercepts the libc functions that matter in order to build Firefox. + */ + +use std::borrow::Cow; +use std::env::{self, current_dir}; +use std::ffi::{c_void, CStr, CString, OsStr, OsString}; +use std::mem::transmute; +use std::os::raw::{c_char, c_int}; +use std::os::unix::ffi::{OsStrExt, OsStringExt}; +use std::path::{Path, PathBuf}; +use std::ptr::null; + +use once_cell::sync::Lazy; +use path_dedot::ParseDot; + +#[cfg(not(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu")))] +compile_error!("Platform is not supported"); + +static LOWERCASE_DIRS: Lazy> = Lazy::new(|| match env::var_os("LOWERCASE_DIRS") { + None => Vec::new(), + Some(value) => value + .as_bytes() + .split(|&c| c == b':') + .map(|p| canonicalize_path(Path::new(OsStr::from_bytes(p))).into_owned()) + .collect(), +}); + +fn canonicalize_path(path: &Path) -> Cow { + let path = if path.is_absolute() { + Cow::Borrowed(path) + } else { + match current_dir() { + Ok(cwd) => Cow::Owned(cwd.join(path)), + Err(_) => Cow::Borrowed(path), + } + }; + + // TODO: avoid allocation when the path doesn't need .. / . removals. + Cow::Owned(path.parse_dot().unwrap()) +} + +#[test] +fn test_canonicalize_path() { + use std::env::set_current_dir; + use std::iter::repeat; + use tempfile::tempdir; + + fn do_test(curdir: &Path) { + let foobarbaz = curdir.join("foo/bar/baz"); + + assert_eq!(foobarbaz, canonicalize_path(Path::new("foo/bar/baz"))); + assert_eq!(foobarbaz, canonicalize_path(Path::new("./foo/bar/baz"))); + assert_eq!(foobarbaz, canonicalize_path(Path::new("foo/./bar/baz"))); + assert_eq!(foobarbaz, canonicalize_path(Path::new("foo/././bar/baz"))); + assert_eq!( + foobarbaz, + canonicalize_path(Path::new("foo/././bar/qux/../baz")) + ); + assert_eq!( + foobarbaz, + canonicalize_path(Path::new("foo/./bar/../qux/../bar/baz")) + ); + assert_eq!( + foobarbaz, + canonicalize_path(Path::new("foo/bar/./../../foo/bar/baz")) + ); + + let depth = curdir.components().count(); + for depth in depth..=depth + 1 { + let path = repeat("..").take(depth).collect::>(); + let mut path = path.join("/"); + path.push_str("/foo/bar/baz"); + + assert_eq!( + Path::new("/foo/bar/baz"), + canonicalize_path(Path::new(&path)) + ); + } + } + + let orig_curdir = current_dir().unwrap(); + + do_test(&orig_curdir); + + let tempdir = tempdir().unwrap(); + set_current_dir(&tempdir).unwrap(); + + do_test(tempdir.path()); + + set_current_dir(orig_curdir).unwrap(); +} + +fn normalize_path(path: &CStr) -> Cow { + let orig_path = path; + let path = Path::new(OsStr::from_bytes(orig_path.to_bytes())); + match normalize_path_for_dirs(&path, &LOWERCASE_DIRS) { + Cow::Borrowed(_) => Cow::Borrowed(orig_path), + Cow::Owned(p) => Cow::Owned(CString::new(p.into_os_string().into_vec()).unwrap()), + } +} + +fn normalize_path_for_dirs<'a>(path: &'a Path, dirs: &[PathBuf]) -> Cow<'a, Path> { + let orig_path = path; + let path = canonicalize_path(path); + + for lowercase_dir in dirs.iter() { + if path.starts_with(lowercase_dir) { + // TODO: avoid allocation when the string doesn't actually need + // modification. + let mut lowercased_path = path.into_owned().into_os_string().into_vec(); + lowercased_path[lowercase_dir.as_os_str().as_bytes().len()..].make_ascii_lowercase(); + return Cow::Owned(OsString::from_vec(lowercased_path).into()); + } + } + + Cow::Borrowed(orig_path) +} + +#[test] +fn test_normalize_path() { + let paths = vec![ + Path::new("/Foo/Bar").to_owned(), + Path::new("/Qux").to_owned(), + current_dir().unwrap().join("Fuga"), + ]; + + assert_eq!( + normalize_path_for_dirs(Path::new("/foo/bar/Baz"), &paths), + Path::new("/foo/bar/Baz") + ); + assert_eq!( + normalize_path_for_dirs(Path::new("/Foo/Bar/Baz"), &paths), + Path::new("/Foo/Bar/baz") + ); + assert_eq!( + normalize_path_for_dirs(Path::new("/Foo/BarBaz"), &paths), + Path::new("/Foo/BarBaz") + ); + assert_eq!( + normalize_path_for_dirs(Path::new("/Foo/Bar"), &paths), + Path::new("/Foo/Bar") + ); + assert_eq!( + normalize_path_for_dirs(Path::new("/Foo/Bar/Baz/../Qux"), &paths), + Path::new("/Foo/Bar/qux") + ); + assert_eq!( + normalize_path_for_dirs(Path::new("/Foo/Bar/Baz/../../Qux"), &paths), + Path::new("/Foo/Bar/Baz/../../Qux") + ); + assert_eq!( + normalize_path_for_dirs(Path::new("/Qux/Foo/Bar/Baz"), &paths), + Path::new("/Qux/foo/bar/baz") + ); + assert_eq!( + normalize_path_for_dirs(Path::new("/foo/../Qux/Baz"), &paths), + Path::new("/Qux/baz") + ); + assert_eq!( + normalize_path_for_dirs(Path::new("fuga/Foo/Bar"), &paths), + Path::new("fuga/Foo/Bar") + ); + assert_eq!( + normalize_path_for_dirs(Path::new("Fuga/Foo/Bar"), &paths), + current_dir().unwrap().join("Fuga/foo/bar") + ); + assert_eq!( + normalize_path_for_dirs(Path::new("Fuga/../Foo/Bar"), &paths), + Path::new("Fuga/../Foo/Bar") + ); +} + +macro_rules! wrappers { + ($(fn $name:ident($( $a:ident : $t:ty ),*) $( -> $ret:ty)?;)*) => { + $( + paste::item! { + #[allow(non_upper_case_globals)] + static [< real $name >]: Lazy $ret)?> = + Lazy::new(|| unsafe { + transmute(libc::dlsym( + libc::RTLD_NEXT, + concat!(stringify!($name), "\0").as_ptr() as _ + )) + }); + #[no_mangle] + unsafe extern "C" fn $name($($a : $t),*) $(-> $ret)? { + $( wrappers!(@normalize ($a: $t)); )* + [< real $name >]($($a),*) + } + } + )* + }; + (@normalize ($a:ident: *const c_char)) => { + let $a = if $a.is_null() { + None + } else { + Some(normalize_path(CStr::from_ptr($a))) + }; + let $a = $a.as_ref().map(|p| p.as_ptr()).unwrap_or(null()); + }; + (@normalize ($a:ident: $t:ty)) => {} +} + +// Note: actual definitions for e.g. fopen/fopen64 would be using c_char +// instead of c_void for mode, but the wrappers macro treats all `*const c_char`s +// as "to maybe be lowercased". +wrappers! { + fn open(path: *const c_char, flags: c_int, mode: libc::mode_t) -> c_int; + fn open64(path: *const c_char, flags: c_int, mode: libc::mode_t) -> c_int; + fn fopen(path: *const c_char, mode: *const c_void) -> *mut libc::FILE; + fn fopen64(path: *const c_char, mode: *const c_void) -> *mut libc::FILE; + + fn opendir(path: *const c_char) -> *mut libc::DIR; + + fn __xstat(ver: c_int, path: *const c_char, buf: *mut libc::stat) -> c_int; + fn __xstat64(ver: c_int, path: *const c_char, buf: *mut libc::stat64) -> c_int; + + fn __lxstat(ver: c_int, path: *const c_char, buf: *mut libc::stat) -> c_int; + fn __lxstat64(ver: c_int, path: *const c_char, buf: *mut libc::stat64) -> c_int; + fn __fxstatat(ver: c_int, fd: c_int, path: *const c_char, buf: *mut libc::stat, flag: c_int) -> c_int; + fn __fxstatat64(ver: c_int, fd: c_int, path: *const c_char, buf: *mut libc::stat64, flag: c_int) -> c_int; + + fn access(path: *const c_char, mode: c_int) -> c_int; + + fn mkdir(path: *const c_char, mode: libc::mode_t) -> c_int; + + fn chdir(path: *const c_char) -> c_int; + + fn symlink(target: *const c_char, linkpath: *const c_char) -> c_int; +} diff --git a/build/mach_bootstrap.py b/build/mach_bootstrap.py new file mode 100644 index 0000000000..7cda9b8d19 --- /dev/null +++ b/build/mach_bootstrap.py @@ -0,0 +1,597 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import division, print_function, unicode_literals + +import errno +import json +import math +import os +import platform +import shutil +import subprocess +import sys +import uuid + +if sys.version_info[0] < 3: + import __builtin__ as builtins +else: + import builtins + +from types import ModuleType + + +STATE_DIR_FIRST_RUN = """ +mach and the build system store shared state in a common directory on the +filesystem. The following directory will be created: + + {userdir} + +If you would like to use a different directory, hit CTRL+c and set the +MOZBUILD_STATE_PATH environment variable to the directory you would like to +use and re-run mach. For this change to take effect forever, you'll likely +want to export this environment variable from your shell's init scripts. + +Press ENTER/RETURN to continue or CTRL+c to abort. +""".lstrip() + + +# Individual files providing mach commands. +MACH_MODULES = [ + "build/valgrind/mach_commands.py", + "devtools/shared/css/generated/mach_commands.py", + "dom/bindings/mach_commands.py", + "js/src/devtools/rootAnalysis/mach_commands.py", + "layout/tools/reftest/mach_commands.py", + "mobile/android/mach_commands.py", + "python/mach/mach/commands/commandinfo.py", + "python/mach/mach/commands/settings.py", + "python/mach_commands.py", + "python/mozboot/mozboot/mach_commands.py", + "python/mozbuild/mozbuild/artifact_commands.py", + "python/mozbuild/mozbuild/backend/mach_commands.py", + "python/mozbuild/mozbuild/build_commands.py", + "python/mozbuild/mozbuild/code_analysis/mach_commands.py", + "python/mozbuild/mozbuild/compilation/codecomplete.py", + "python/mozbuild/mozbuild/frontend/mach_commands.py", + "python/mozbuild/mozbuild/vendor/mach_commands.py", + "python/mozbuild/mozbuild/mach_commands.py", + "python/mozperftest/mozperftest/mach_commands.py", + "python/mozrelease/mozrelease/mach_commands.py", + "remote/mach_commands.py", + "taskcluster/mach_commands.py", + "testing/awsy/mach_commands.py", + "testing/condprofile/mach_commands.py", + "testing/firefox-ui/mach_commands.py", + "testing/geckodriver/mach_commands.py", + "testing/mach_commands.py", + "testing/marionette/mach_commands.py", + "testing/mochitest/mach_commands.py", + "testing/mozharness/mach_commands.py", + "testing/raptor/mach_commands.py", + "testing/talos/mach_commands.py", + "testing/tps/mach_commands.py", + "testing/web-platform/mach_commands.py", + "testing/xpcshell/mach_commands.py", + "toolkit/components/telemetry/tests/marionette/mach_commands.py", + "tools/browsertime/mach_commands.py", + "tools/compare-locales/mach_commands.py", + "tools/lint/mach_commands.py", + "tools/mach_commands.py", + "tools/moztreedocs/mach_commands.py", + "tools/phabricator/mach_commands.py", + "tools/power/mach_commands.py", + "tools/tryselect/mach_commands.py", + "tools/vcs/mach_commands.py", +] + + +CATEGORIES = { + "build": { + "short": "Build Commands", + "long": "Interact with the build system", + "priority": 80, + }, + "post-build": { + "short": "Post-build Commands", + "long": "Common actions performed after completing a build.", + "priority": 70, + }, + "testing": { + "short": "Testing", + "long": "Run tests.", + "priority": 60, + }, + "ci": { + "short": "CI", + "long": "Taskcluster commands", + "priority": 59, + }, + "devenv": { + "short": "Development Environment", + "long": "Set up and configure your development environment.", + "priority": 50, + }, + "build-dev": { + "short": "Low-level Build System Interaction", + "long": "Interact with specific parts of the build system.", + "priority": 20, + }, + "misc": { + "short": "Potpourri", + "long": "Potent potables and assorted snacks.", + "priority": 10, + }, + "release": { + "short": "Release automation", + "long": "Commands for used in release automation.", + "priority": 5, + }, + "disabled": { + "short": "Disabled", + "long": "The disabled commands are hidden by default. Use -v to display them. " + "These commands are unavailable for your current context, " + 'run "mach " to see why.', + "priority": 0, + }, +} + + +def search_path(mozilla_dir, packages_txt): + with open(os.path.join(mozilla_dir, packages_txt)) as f: + packages = [line.rstrip().split(":") for line in f] + + def handle_package(package): + if package[0] == "optional": + try: + for path in handle_package(package[1:]): + yield path + except Exception: + pass + + if package[0] in ("windows", "!windows"): + for_win = not package[0].startswith("!") + is_win = sys.platform == "win32" + if is_win == for_win: + for path in handle_package(package[1:]): + yield path + + if package[0] in ("python2", "python3"): + for_python3 = package[0].endswith("3") + is_python3 = sys.version_info[0] > 2 + if is_python3 == for_python3: + for path in handle_package(package[1:]): + yield path + + if package[0] == "packages.txt": + assert len(package) == 2 + for p in search_path(mozilla_dir, package[1]): + yield os.path.join(mozilla_dir, p) + + if package[0].endswith(".pth"): + assert len(package) == 2 + yield os.path.join(mozilla_dir, package[1]) + + for package in packages: + for path in handle_package(package): + yield path + + +def mach_sys_path(mozilla_dir): + return [ + os.path.join(mozilla_dir, path) + for path in search_path(mozilla_dir, "build/mach_virtualenv_packages.txt") + ] + + +def bootstrap(topsrcdir, mozilla_dir=None): + if mozilla_dir is None: + mozilla_dir = topsrcdir + + # Ensure we are running Python 2.7 or 3.5+. We put this check here so we + # generate a user-friendly error message rather than a cryptic stack trace + # on module import. + major, minor = sys.version_info[:2] + if (major == 2 and minor < 7) or (major == 3 and minor < 5): + print("Python 2.7 or Python 3.5+ is required to run mach.") + print("You are running Python", platform.python_version()) + sys.exit(1) + + # This directory was deleted in bug 1666345, but there may be some ignored + # files here. We can safely just delete it for the user so they don't have + # to clean the repo themselves. + deleted_dir = os.path.join(topsrcdir, "third_party", "python", "psutil") + if os.path.exists(deleted_dir): + shutil.rmtree(deleted_dir, ignore_errors=True) + + # Global build system and mach state is stored in a central directory. By + # default, this is ~/.mozbuild. However, it can be defined via an + # environment variable. We detect first run (by lack of this directory + # existing) and notify the user that it will be created. The logic for + # creation is much simpler for the "advanced" environment variable use + # case. For default behavior, we educate users and give them an opportunity + # to react. We always exit after creating the directory because users don't + # like surprises. + sys.path[0:0] = mach_sys_path(mozilla_dir) + import mach.base + import mach.main + from mach.util import setenv + from mozboot.util import get_state_dir + + # Set a reasonable limit to the number of open files. + # + # Some linux systems set `ulimit -n` to a very high number, which works + # well for systems that run servers, but this setting causes performance + # problems when programs close file descriptors before forking, like + # Python's `subprocess.Popen(..., close_fds=True)` (close_fds=True is the + # default in Python 3), or Rust's stdlib. In some cases, Firefox does the + # same thing when spawning processes. We would prefer to lower this limit + # to avoid such performance problems; processes spawned by `mach` will + # inherit the limit set here. + # + # The Firefox build defaults the soft limit to 1024, except for builds that + # do LTO, where the soft limit is 8192. We're going to default to the + # latter, since people do occasionally do LTO builds on their local + # machines, and requiring them to discover another magical setting after + # setting up an LTO build in the first place doesn't seem good. + # + # This code mimics the code in taskcluster/scripts/run-task. + try: + import resource + + # Keep the hard limit the same, though, allowing processes to change + # their soft limit if they need to (Firefox does, for instance). + (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE) + # Permit people to override our default limit if necessary via + # MOZ_LIMIT_NOFILE, which is the same variable `run-task` uses. + limit = os.environ.get("MOZ_LIMIT_NOFILE") + if limit: + limit = int(limit) + else: + # If no explicit limit is given, use our default if it's less than + # the current soft limit. For instance, the default on macOS is + # 256, so we'd pick that rather than our default. + limit = min(soft, 8192) + # Now apply the limit, if it's different from the original one. + if limit != soft: + resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard)) + except ImportError: + # The resource module is UNIX only. + pass + + from mozbuild.util import patch_main + + patch_main() + + def resolve_repository(): + import mozversioncontrol + + try: + # This API doesn't respect the vcs binary choices from configure. + # If we ever need to use the VCS binary here, consider something + # more robust. + return mozversioncontrol.get_repository_object(path=mozilla_dir) + except (mozversioncontrol.InvalidRepoPath, mozversioncontrol.MissingVCSTool): + return None + + def pre_dispatch_handler(context, handler, args): + # If --disable-tests flag was enabled in the mozconfig used to compile + # the build, tests will be disabled. Instead of trying to run + # nonexistent tests then reporting a failure, this will prevent mach + # from progressing beyond this point. + if handler.category == "testing" and not handler.ok_if_tests_disabled: + from mozbuild.base import BuildEnvironmentNotFoundException + + try: + from mozbuild.base import MozbuildObject + + # all environments should have an instance of build object. + build = MozbuildObject.from_environment() + if build is not None and hasattr(build, "mozconfig"): + ac_options = build.mozconfig["configure_args"] + if ac_options and "--disable-tests" in ac_options: + print( + "Tests have been disabled by mozconfig with the flag " + + '"ac_add_options --disable-tests".\n' + + "Remove the flag, and re-compile to enable tests." + ) + sys.exit(1) + except BuildEnvironmentNotFoundException: + # likely automation environment, so do nothing. + pass + + def post_dispatch_handler( + context, handler, instance, success, start_time, end_time, depth, args + ): + """Perform global operations after command dispatch. + + + For now, we will use this to handle build system telemetry. + """ + + # Don't finalize telemetry data if this mach command was invoked as part of + # another mach command. + if depth != 1: + return + + _finalize_telemetry_glean( + context.telemetry, handler.name == "bootstrap", success + ) + _finalize_telemetry_legacy( + context, instance, handler, success, start_time, end_time, topsrcdir + ) + + def populate_context(key=None): + if key is None: + return + if key == "state_dir": + state_dir = get_state_dir() + if state_dir == os.environ.get("MOZBUILD_STATE_PATH"): + if not os.path.exists(state_dir): + print( + "Creating global state directory from environment variable: %s" + % state_dir + ) + os.makedirs(state_dir, mode=0o770) + else: + if not os.path.exists(state_dir): + if not os.environ.get("MOZ_AUTOMATION"): + print(STATE_DIR_FIRST_RUN.format(userdir=state_dir)) + try: + sys.stdin.readline() + except KeyboardInterrupt: + sys.exit(1) + + print("\nCreating default state directory: %s" % state_dir) + os.makedirs(state_dir, mode=0o770) + + return state_dir + + if key == "local_state_dir": + return get_state_dir(srcdir=True) + + if key == "topdir": + return topsrcdir + + if key == "pre_dispatch_handler": + return pre_dispatch_handler + + if key == "post_dispatch_handler": + return post_dispatch_handler + + if key == "repository": + return resolve_repository() + + raise AttributeError(key) + + # Note which process is top-level so that recursive mach invocations can avoid writing + # telemetry data. + if "MACH_MAIN_PID" not in os.environ: + setenv("MACH_MAIN_PID", str(os.getpid())) + + driver = mach.main.Mach(os.getcwd()) + driver.populate_context_handler = populate_context + + if not driver.settings_paths: + # default global machrc location + driver.settings_paths.append(get_state_dir()) + # always load local repository configuration + driver.settings_paths.append(mozilla_dir) + + for category, meta in CATEGORIES.items(): + driver.define_category(category, meta["short"], meta["long"], meta["priority"]) + + repo = resolve_repository() + + for path in MACH_MODULES: + # Sparse checkouts may not have all mach_commands.py files. Ignore + # errors from missing files. + try: + driver.load_commands_from_file(os.path.join(mozilla_dir, path)) + except mach.base.MissingFileError: + if not repo or not repo.sparse_checkout_present(): + raise + + return driver + + +def _finalize_telemetry_legacy( + context, instance, handler, success, start_time, end_time, topsrcdir +): + """Record and submit legacy telemetry. + + Parameterized by the raw gathered telemetry, this function handles persisting and + submission of the data. + + This has been designated as "legacy" telemetry because modern telemetry is being + submitted with "Glean". + """ + from mozboot.util import get_state_dir + from mozbuild.base import MozbuildObject + from mozbuild.telemetry import gather_telemetry + from mach.telemetry import is_telemetry_enabled, is_applicable_telemetry_environment + + if not ( + is_applicable_telemetry_environment() and is_telemetry_enabled(context.settings) + ): + return + + if not isinstance(instance, MozbuildObject): + instance = MozbuildObject.from_environment() + + command_attrs = getattr(context, "command_attrs", {}) + + # We gather telemetry for every operation. + data = gather_telemetry( + command=handler.name, + success=success, + start_time=start_time, + end_time=end_time, + mach_context=context, + instance=instance, + command_attrs=command_attrs, + ) + if data: + telemetry_dir = os.path.join(get_state_dir(), "telemetry") + try: + os.mkdir(telemetry_dir) + except OSError as e: + if e.errno != errno.EEXIST: + raise + outgoing_dir = os.path.join(telemetry_dir, "outgoing") + try: + os.mkdir(outgoing_dir) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + ".json"), "w") as f: + json.dump(data, f, sort_keys=True) + + # The user is performing a maintenance command, skip the upload + if handler.name in ( + "bootstrap", + "doctor", + "mach-commands", + "vcs-setup", + "create-mach-environment", + "install-moz-phab", + # We call mach environment in client.mk which would cause the + # data submission to block the forward progress of make. + "environment", + ): + return False + + if "TEST_MACH_TELEMETRY_NO_SUBMIT" in os.environ: + # In our telemetry tests, we want telemetry to be collected for analysis, but + # we don't want it submitted. + return False + + state_dir = get_state_dir() + + machpath = os.path.join(instance.topsrcdir, "mach") + with open(os.devnull, "wb") as devnull: + subprocess.Popen( + [ + sys.executable, + machpath, + "python", + "--no-virtualenv", + os.path.join(topsrcdir, "build", "submit_telemetry_data.py"), + state_dir, + ], + stdout=devnull, + stderr=devnull, + ) + + +def _finalize_telemetry_glean(telemetry, is_bootstrap, success): + """Submit telemetry collected by Glean. + + Finalizes some metrics (command success state and duration, system information) and + requests Glean to send the collected data. + """ + + from mach.telemetry import MACH_METRICS_PATH + from mozbuild.telemetry import ( + get_cpu_brand, + get_distro_and_version, + get_psutil_stats, + ) + + mach_metrics = telemetry.metrics(MACH_METRICS_PATH) + mach_metrics.mach.duration.stop() + mach_metrics.mach.success.set(success) + system_metrics = mach_metrics.mach.system + system_metrics.cpu_brand.set(get_cpu_brand()) + distro, version = get_distro_and_version() + system_metrics.distro.set(distro) + system_metrics.distro_version.set(version) + + has_psutil, logical_cores, physical_cores, memory_total = get_psutil_stats() + if has_psutil: + # psutil may not be available (we allow `mach create-mach-environment` + # to fail to install it). + system_metrics.logical_cores.add(logical_cores) + system_metrics.physical_cores.add(physical_cores) + if memory_total is not None: + system_metrics.memory.accumulate( + int(math.ceil(float(memory_total) / (1024 * 1024 * 1024))) + ) + telemetry.submit(is_bootstrap) + + +# Hook import such that .pyc/.pyo files without a corresponding .py file in +# the source directory are essentially ignored. See further below for details +# and caveats. +# Objdirs outside the source directory are ignored because in most cases, if +# a .pyc/.pyo file exists there, a .py file will be next to it anyways. +class ImportHook(object): + def __init__(self, original_import): + self._original_import = original_import + # Assume the source directory is the parent directory of the one + # containing this file. + self._source_dir = ( + os.path.normcase( + os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + ) + + os.sep + ) + self._modules = set() + + def __call__(self, name, globals=None, locals=None, fromlist=None, level=-1): + if sys.version_info[0] >= 3 and level < 0: + level = 0 + + # name might be a relative import. Instead of figuring out what that + # resolves to, which is complex, just rely on the real import. + # Since we don't know the full module name, we can't check sys.modules, + # so we need to keep track of which modules we've already seen to avoid + # to stat() them again when they are imported multiple times. + module = self._original_import(name, globals, locals, fromlist, level) + + # Some tests replace modules in sys.modules with non-module instances. + if not isinstance(module, ModuleType): + return module + + resolved_name = module.__name__ + if resolved_name in self._modules: + return module + self._modules.add(resolved_name) + + # Builtin modules don't have a __file__ attribute. + if not getattr(module, "__file__", None): + return module + + # Note: module.__file__ is not always absolute. + path = os.path.normcase(os.path.abspath(module.__file__)) + # Note: we could avoid normcase and abspath above for non pyc/pyo + # files, but those are actually rare, so it doesn't really matter. + if not path.endswith((".pyc", ".pyo")): + return module + + # Ignore modules outside our source directory + if not path.startswith(self._source_dir): + return module + + # If there is no .py corresponding to the .pyc/.pyo module we're + # loading, remove the .pyc/.pyo file, and reload the module. + # Since we already loaded the .pyc/.pyo module, if it had side + # effects, they will have happened already, and loading the module + # with the same name, from another directory may have the same side + # effects (or different ones). We assume it's not a problem for the + # python modules under our source directory (either because it + # doesn't happen or because it doesn't matter). + if not os.path.exists(module.__file__[:-1]): + if os.path.exists(module.__file__): + os.remove(module.__file__) + del sys.modules[module.__name__] + module = self(name, globals, locals, fromlist, level) + + return module + + +# Install our hook. This can be deleted when the Python 3 migration is complete. +if sys.version_info[0] < 3: + builtins.__import__ = ImportHook(builtins.__import__) diff --git a/build/mach_virtualenv_packages.txt b/build/mach_virtualenv_packages.txt new file mode 100644 index 0000000000..bdc4387be4 --- /dev/null +++ b/build/mach_virtualenv_packages.txt @@ -0,0 +1,2 @@ +packages.txt:build/common_virtualenv_packages.txt +set-variable MACH_VIRTUALENV=1 diff --git a/build/macosx/cross-mozconfig.common b/build/macosx/cross-mozconfig.common new file mode 100644 index 0000000000..5e9902f7af --- /dev/null +++ b/build/macosx/cross-mozconfig.common @@ -0,0 +1,43 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +. "$topsrcdir/build/mozconfig.common" + +# cctools for ld, ar, and other related tools ; dsymutil for rust. +mk_add_options "export PATH=$MOZ_FETCHES_DIR/cctools/bin:$MOZ_FETCHES_DIR/binutils/bin:$MOZ_FETCHES_DIR/llvm-dsymutil/bin:$PATH" + +# dsymutil needs a libstdc++ more recent than what's on the system. +mk_add_options "export LD_LIBRARY_PATH=$MOZ_FETCHES_DIR/clang/lib" + +# This SDK was copied from a local XCode install and uploaded to tooltool. +# Generate the tarball by running this command with the proper SDK version: +# sdk_path=$(xcrun --sdk macosx10.12 --show-sdk-path) +# tar -C $(dirname ${sdk_path}) -cHjf /tmp/$(basename ${sdk_path}).tar.bz2 $(basename ${sdk_path}) +# Upload the resulting tarball from /tmp to tooltool, and change the entry in +# `browser/config/tooltool-manifests/macosx64/cross-releng.manifest`. +CROSS_SYSROOT=$topsrcdir/MacOSX10.12.sdk + +export CFLAGS="$CFLAGS -fcrash-diagnostics-dir=${UPLOAD_PATH}" +export CXXFLAGS="$CXXFLAGS -fcrash-diagnostics-dir=${UPLOAD_PATH}" +export DSYMUTIL=$topsrcdir/build/macosx/llvm-dsymutil +mk_add_options "export REAL_DSYMUTIL=$MOZ_FETCHES_DIR/llvm-dsymutil/bin/dsymutil" +export MKFSHFS=$MOZ_FETCHES_DIR/hfsplus-tools/newfs_hfs +export DMG_TOOL=$MOZ_FETCHES_DIR/dmg/dmg +export HFS_TOOL=$MOZ_FETCHES_DIR/dmg/hfsplus + +export HOST_CFLAGS="-g" +export HOST_CXXFLAGS="-g" +export HOST_LDFLAGS="-g" + +ac_add_options --target=x86_64-apple-darwin +export MACOS_SDK_DIR=$CROSS_SYSROOT + +if [ "x$MOZ_PKG_SPECIAL" != "xasan" -a -z "$MOZ_AUTOMATION_ARTIFACT_BUILDS" ]; then + # Enable static analysis checks by default on OSX cross builds. + # Exception is ASan, where this breaks. + # The option is not valid on artifact builds, so don't add it there either. + ac_add_options --enable-clang-plugin +fi + +unset MOZ_STDCXX_COMPAT diff --git a/build/macosx/llvm-dsymutil b/build/macosx/llvm-dsymutil new file mode 100755 index 0000000000..2deb78f4b6 --- /dev/null +++ b/build/macosx/llvm-dsymutil @@ -0,0 +1,75 @@ +#!/bin/sh +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +"$REAL_DSYMUTIL" "$@" +ret=$? +if [ $ret -ne 139 ]; then + exit $ret +fi + +echo "$REAL_DSYMUTIL crashed. Trying to get a reduced testcase." >&2 +tmpdir=$(mktemp -d) +trap "rm -rf $tmpdir" EXIT + +# Get the library file name from the command line arguments. We assume +# it's the last argument that doesn't start with a dash. +for arg in "$@"; do + case "$arg" in + -*) + ;; + *) + lib="$arg" + ;; + esac +done + +last_obj=$("$REAL_DSYMUTIL" --verbose "$@" 2> /dev/null | sed -n "/trying to open/s/trying to open '\(.*\)'/\1/p" | tail -1) + +case "$last_obj" in +"") + echo "Could not produce a reduced testcase. Aborting." >&2 + # Ideally, we'd produce an archive with every .o and .a involved, but so + # far, this case has never happened, so, meh. + exit 139 + ;; +*.a\(*.o\)) + # The crash likely happened while reading one particular object in a library. + # Create a new library with just that one object. + archive=$(readlink -f "${last_obj%(*}") + obj="${last_obj#*.a(}" + obj="${obj%)}" + (cd "$tmpdir"; ar x "$archive" "$obj") + mkdir -p $tmpdir/crasher/$(dirname "$archive") + (cd "$tmpdir"; ar cr "$tmpdir/crasher/$archive" "$obj") + rm "$tmpdir/$obj" + ;; +*) + # The crash likely happened while reading one particular object. + obj=$(readlink -f "$last_obj") + mkdir -p "$tmpdir/crasher/$(dirname "$obj")" + cp "$obj" "$tmpdir/crasher/$obj" + ;; +esac +cp "$lib" "$tmpdir/crasher" +cat > "$tmpdir/crasher/run-me.sh" < /dev/null 2>&1) +if [ $? -eq 139 ]; then + echo "Could reproduce with a reduced testcase. Creating an artifact." >&2 + mkdir -p "$HOME/artifacts" + artifact=dsymutil-crasher.tar.xz + tar -Jcf "$HOME/artifacts/$artifact" -C "$tmpdir" crasher/ + echo "Check the $artifact artifact." >&2 +else + echo "Could not reproduce with a reduced testcase. Sorry." >&2 +fi + +exit 139 diff --git a/build/macosx/local-mozconfig.common b/build/macosx/local-mozconfig.common new file mode 100644 index 0000000000..8103d4a927 --- /dev/null +++ b/build/macosx/local-mozconfig.common @@ -0,0 +1,28 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +. "$topsrcdir/build/mozconfig.common" + +if [ -d "$MOZ_FETCHES_DIR/clang" ]; then + # mozilla-central based build + export DSYMUTIL=$MOZ_FETCHES_DIR/clang/bin/llvm-dsymutil + # Use an updated linker. + ldflags="-B$MOZ_FETCHES_DIR/cctools/bin" + export AR=$MOZ_FETCHES_DIR/cctools/bin/ar +fi + +# Ensure the updated linker doesn't generate things our older build tools +# don't understand. +ldflags="$ldflags -Wl,-no_data_in_code_info" +export LDFLAGS="$ldflags" + +# If not set use the system default clang +if [ -z "$CC" ]; then + export CC=clang +fi + +# If not set use the system default clang++ +if [ -z "$CXX" ]; then + export CXX=clang++ +fi diff --git a/build/macosx/mozconfig.common b/build/macosx/mozconfig.common new file mode 100644 index 0000000000..ad234d290f --- /dev/null +++ b/build/macosx/mozconfig.common @@ -0,0 +1,15 @@ +if test `uname -s` = Linux; then + . $topsrcdir/build/macosx/cross-mozconfig.common +else + . $topsrcdir/build/macosx/local-mozconfig.common +fi + +if [ -z "$USE_ARTIFACT" ]; then + if [ -n "$TASKCLUSTER_PGO_PROFILE_USE" ]; then + export MOZ_LTO=cross + ac_add_options --enable-profile-use=cross + ac_add_options --with-pgo-jarlog=${MOZ_FETCHES_DIR}/en-US.log + ac_add_options --with-pgo-profile-path=${MOZ_FETCHES_DIR}/merged.profdata + fi +fi + diff --git a/build/macosx/permissions/chown_revert.c b/build/macosx/permissions/chown_revert.c new file mode 100644 index 0000000000..72dc1e64d5 --- /dev/null +++ b/build/macosx/permissions/chown_revert.c @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include +#include + +int main(int argc, char** argv) { + if (argc != 2) return 1; + + uid_t realuser = getuid(); + char uidstring[20]; + snprintf(uidstring, 19, "%i", realuser); + uidstring[19] = '\0'; + + return execl("/usr/sbin/chown", "/usr/sbin/chown", "-R", "-h", uidstring, + argv[1], (char*)0); +} diff --git a/build/macosx/permissions/chown_root.c b/build/macosx/permissions/chown_root.c new file mode 100644 index 0000000000..e2ef111c7f --- /dev/null +++ b/build/macosx/permissions/chown_root.c @@ -0,0 +1,12 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include + +int main(int argc, char** argv) { + if (argc != 2) return 1; + + return execl("/usr/sbin/chown", "/usr/sbin/chown", "-R", "-h", "root:admin", + argv[1], (char*)0); +} diff --git a/build/midl.py b/build/midl.py new file mode 100644 index 0000000000..add17006d6 --- /dev/null +++ b/build/midl.py @@ -0,0 +1,93 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import buildconfig +import subprocess +import os +import sys + + +def relativize(path, base=None): + # For absolute path in Unix builds, we need relative paths because + # Windows programs run via Wine don't like these Unix absolute paths + # (they look like command line arguments). + if path.startswith("/"): + return os.path.relpath(path, base) + # For Windows absolute paths, we can just use the unmodified path. + # And if the path starts with '-', it's a command line argument. + if os.path.isabs(path) or path.startswith("-"): + return path + # Remaining case is relative paths, which may be relative to a different + # directory (os.getcwd()) than the needed `base`, so we "rebase" it. + return os.path.relpath(path, base) + + +def midl(out, input, *flags): + out.avoid_writing_to_file() + midl = buildconfig.substs["MIDL"] + wine = buildconfig.substs.get("WINE") + base = os.path.dirname(out.name) or "." + if midl.lower().endswith(".exe") and wine: + command = [wine, midl] + else: + command = [midl] + command.extend(buildconfig.substs["MIDL_FLAGS"]) + command.extend([relativize(f, base) for f in flags]) + command.append("-Oicf") + command.append(relativize(input, base)) + print("Executing:", " ".join(command)) + result = subprocess.run(command, cwd=base) + return result.returncode + + +# midl outputs dlldata to a single dlldata.c file by default. This prevents running +# midl in parallel in the same directory for idl files that would generate dlldata.c +# because of race conditions updating the file. Instead, we ask midl to create +# separate files, and we merge them manually. +def merge_dlldata(out, *inputs): + inputs = [open(i) for i in inputs] + read_a_line = [True] * len(inputs) + while True: + lines = [ + f.readline() if read_a_line[n] else lines[n] for n, f in enumerate(inputs) + ] + unique_lines = set(lines) + if len(unique_lines) == 1: + # All the lines are identical + if not lines[0]: + break + out.write(lines[0]) + read_a_line = [True] * len(inputs) + elif ( + len(unique_lines) == 2 + and len([l for l in unique_lines if "#define" in l]) == 1 + ): + # Most lines are identical. When they aren't, it's typically because some + # files have an extra #define that others don't. When that happens, we + # print out the #define, and get a new input line from the files that had + # a #define on the next iteration. We expect that next line to match what + # the other files had on this iteration. + # Note: we explicitly don't support the case where there are different + # defines across different files, except when there's a different one + # for each file, in which case it's handled further below. + a = unique_lines.pop() + if "#define" in a: + out.write(a) + else: + out.write(unique_lines.pop()) + read_a_line = ["#define" in l for l in lines] + elif len(unique_lines) != len(lines): + # If for some reason, we don't get lines that are entirely different + # from each other, we have some unexpected input. + print( + "Error while merging dlldata. Last lines read: {}".format(lines), + file=sys.stderr, + ) + return 1 + else: + for line in lines: + out.write(line) + read_a_line = [True] * len(inputs) + + return 0 diff --git a/build/moz-automation.mk b/build/moz-automation.mk new file mode 100644 index 0000000000..28628065e1 --- /dev/null +++ b/build/moz-automation.mk @@ -0,0 +1,110 @@ +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +ifndef ENABLE_TESTS +# We can't package tests if they aren't enabled. +MOZ_AUTOMATION_PACKAGE_TESTS = 0 +endif + +ifdef CROSS_COMPILE +# Narrow the definition of cross compilation to not include win32 builds +# on win64 and linux32 builds on linux64. +ifeq ($(HOST_OS_ARCH),$(OS_TARGET)) +ifneq (,$(filter x86%,$(CPU_ARCH))) +FUZZY_CROSS_COMPILE = +else +FUZZY_CROSS_COMPILE = 1 +endif +else +FUZZY_CROSS_COMPILE = 1 +endif +endif + +# Don't run make check when cross compiling, when doing artifact builds +# or when building instrumented builds for PGO. +ifneq (,$(USE_ARTIFACT)$(FUZZY_CROSS_COMPILE)$(MOZ_PROFILE_GENERATE)) +MOZ_AUTOMATION_CHECK := 0 +endif + +ifneq (,$(filter automation/%,$(MAKECMDGOALS))) +ifeq (4.0,$(firstword $(sort 4.0 $(MAKE_VERSION)))) +MAKEFLAGS += --output-sync=target +else +.NOTPARALLEL: +endif +endif + +ifndef JS_STANDALONE +include $(topsrcdir)/toolkit/mozapps/installer/package-name.mk +include $(topsrcdir)/toolkit/mozapps/installer/upload-files.mk + +# Clear out DIST_FILES if it was set by upload-files.mk (for Android builds) +DIST_FILES = +endif + +# Helper variables to convert from MOZ_AUTOMATION_* variables to the +# corresponding the make target +tier_MOZ_AUTOMATION_BUILD_SYMBOLS = buildsymbols +tier_MOZ_AUTOMATION_PACKAGE = package +tier_MOZ_AUTOMATION_PACKAGE_TESTS = package-tests +tier_MOZ_AUTOMATION_PACKAGE_GENERATED_SOURCES = package-generated-sources +tier_MOZ_AUTOMATION_UPLOAD_SYMBOLS = uploadsymbols +tier_MOZ_AUTOMATION_UPLOAD = upload +tier_MOZ_AUTOMATION_CHECK = check + +# Automation build steps. Everything in MOZ_AUTOMATION_TIERS also gets used in +# TIERS for mach display. As such, the MOZ_AUTOMATION_TIERS are roughly sorted +# here in the order that they will be executed (since mach doesn't know of the +# dependencies between them). +moz_automation_symbols = \ + MOZ_AUTOMATION_PACKAGE_TESTS \ + MOZ_AUTOMATION_UPLOAD \ + $(NULL) + +ifneq (,$(COMPILE_ENVIRONMENT)$(MOZ_ARTIFACT_BUILDS)) +moz_automation_symbols += \ + MOZ_AUTOMATION_BUILD_SYMBOLS \ + MOZ_AUTOMATION_UPLOAD_SYMBOLS \ + MOZ_AUTOMATION_PACKAGE \ + MOZ_AUTOMATION_PACKAGE_GENERATED_SOURCES \ + MOZ_AUTOMATION_CHECK \ + $(NULL) +endif +MOZ_AUTOMATION_TIERS := $(foreach sym,$(moz_automation_symbols),$(if $(filter 1,$($(sym))),$(tier_$(sym)))) + +# Dependencies between automation build steps +automation-start/uploadsymbols: automation/buildsymbols + +automation-start/upload: automation/package +automation-start/upload: automation/package-tests +automation-start/upload: automation/buildsymbols +automation-start/upload: automation/package-generated-sources + +# Run the check tier after everything else. +automation-start/check: $(addprefix automation/,$(filter-out check,$(MOZ_AUTOMATION_TIERS))) + +automation/build: $(addprefix automation/,$(MOZ_AUTOMATION_TIERS)) + @echo Automation steps completed. + +# Run as many tests as possible, even in case of one of them failing. +AUTOMATION_EXTRA_CMDLINE-check = --keep-going + +# The commands only run if the corresponding MOZ_AUTOMATION_* variable is +# enabled. This means, for example, if we enable MOZ_AUTOMATION_UPLOAD, then +# 'buildsymbols' will only run if MOZ_AUTOMATION_BUILD_SYMBOLS is also set. +# However, the target automation/buildsymbols will still be executed in this +# case because it is a prerequisite of automation/upload. +define automation_commands +@+$(PYTHON3) $(topsrcdir)/config/run-and-prefix.py $1 $(MAKE) $1 $(AUTOMATION_EXTRA_CMDLINE-$1) +$(call BUILDSTATUS,TIER_FINISH $1) +endef + +# The tier start message is in a separate target so make doesn't buffer it +# until the step completes with output syncing enabled. +automation-start/%: + $(if $(filter $*,$(MOZ_AUTOMATION_TIERS)),$(call BUILDSTATUS,TIER_START $*)) + +automation/%: automation-start/% + $(if $(filter $*,$(MOZ_AUTOMATION_TIERS)),$(call automation_commands,$*)) diff --git a/build/moz.build b/build/moz.build new file mode 100644 index 0000000000..59a5d56ce8 --- /dev/null +++ b/build/moz.build @@ -0,0 +1,129 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +with Files("**"): + BUG_COMPONENT = ("Firefox Build System", "General") + +# This cannot be named "build" because of bug 922191. +SPHINX_TREES["buildsystem"] = "docs" + +with Files("docs/**"): + SCHEDULES.exclusive = ["docs"] + +if CONFIG["OS_ARCH"] == "WINNT": + DIRS += ["win32"] +else: + DIRS += ["unix"] + +CRAMTEST_MANIFESTS += [ + "tests/cram/cram.ini", +] + +DEFINES["ACCEPTED_MAR_CHANNEL_IDS"] = CONFIG["ACCEPTED_MAR_CHANNEL_IDS"] + +if CONFIG["MOZ_BUILD_APP"] == "browser": + PYTHON_UNITTEST_MANIFESTS += [ + "compare-mozconfig/python.ini", + ] + +if CONFIG["ENABLE_TESTS"] or CONFIG["MOZ_DMD"]: + FINAL_TARGET_FILES += ["/tools/rb/fix_stacks.py"] + +if CONFIG["MOZ_DMD"]: + FINAL_TARGET_FILES += ["/memory/replace/dmd/dmd.py"] + +# Put a useful .gdbinit and .gdbinit.py in $objdir/build, to be picked up +# automatically by GDB via either libxul.so-gdb.py or js-gdb.py. +OBJDIR_PP_FILES.build += [".gdbinit.py.in"] +OBJDIR_FILES.build += [".gdbinit.loader"] +OBJDIR_FILES.build += [".gdbinit"] + +# Install the clang-cl runtime library for ASAN next to the binaries we produce. +if CONFIG["MOZ_ASAN"] and CONFIG["CC_TYPE"] == "clang-cl": + FINAL_TARGET_FILES += ["%" + CONFIG["MOZ_CLANG_RT_ASAN_LIB_PATH"]] + FINAL_TARGET_FILES += [ + "%" + CONFIG["MOZ_CLANG_RT_ASAN_LIB_PATH"].replace(".dll", ".pdb") + ] + +# Install the clang runtime library for ASAN next to the binaries we produce. +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android" and CONFIG["MOZ_ASAN"]: + FINAL_TARGET_FILES += ["%" + CONFIG["MOZ_CLANG_RT_ASAN_LIB_PATH"]] + +if CONFIG["MOZ_WIDGET_TOOLKIT"] != "android" and CONFIG["LLVM_SYMBOLIZER"]: + FINAL_TARGET_FILES += ["/" + CONFIG["LLVM_SYMBOLIZER"]] + +if CONFIG["MOZ_APP_BASENAME"]: + appini_defines = { + "TOPOBJDIR": TOPOBJDIR, + } + + for var in ( + "GRE_MILESTONE", + "MOZ_APP_VERSION", + "MOZ_APP_BASENAME", + "MOZ_APP_VENDOR", + "MOZ_APP_ID", + "MAR_CHANNEL_ID", + "MOZ_APP_REMOTINGNAME", + ): + appini_defines[var] = CONFIG[var] + + if CONFIG["MOZ_APP_DISPLAYNAME"] != CONFIG["MOZ_APP_BASENAME"]: + appini_defines["MOZ_APP_DISPLAYNAME"] = CONFIG["MOZ_APP_DISPLAYNAME"] + + if CONFIG["MOZ_BUILD_APP"] == "browser": + appini_defines["MOZ_BUILD_APP_IS_BROWSER"] = True + + if CONFIG["MOZ_APP_PROFILE"]: + appini_defines["MOZ_APP_PROFILE"] = CONFIG["MOZ_APP_PROFILE"] + + for var in ("MOZ_CRASHREPORTER", "MOZ_PROFILE_MIGRATOR", "MOZ_UPDATER"): + if CONFIG[var]: + appini_defines[var] = True + + appini_defines["MOZ_APPUPDATE_HOST"] = "aus5.mozilla.org" + if CONFIG["MOZ_APPUPDATE_HOST"]: + appini_defines["MOZ_APPUPDATE_HOST"] = CONFIG["MOZ_APPUPDATE_HOST"] + + GeneratedFile( + "application.ini", + script="../python/mozbuild/mozbuild/action/preprocessor.py", + entry_point="generate", + inputs=["application.ini.in"], + flags=[ + "-D%s=%s" % (k, "1" if v is True else v) + for k, v in sorted(appini_defines.items(), key=lambda t: t[0]) + ], + ) + + FINAL_TARGET_FILES += ["!application.ini"] + if CONFIG["MOZ_WIDGET_TOOLKIT"] != "android" and CONFIG["MOZ_UPDATER"]: + FINAL_TARGET_PP_FILES += ["update-settings.ini"] + + GeneratedFile( + "application.ini.h", script="appini_header.py", inputs=["!application.ini"] + ) + + +# Put a .lldbinit in the bin directory and the objdir, to be picked up +# automatically by LLDB when we debug executables using either of those two +# directories as the current working directory. The .lldbinit file will +# load $(topsrcdir)/.lldbinit, which is where the actual debugging commands are. +DEFINES["topsrcdir"] = TOPSRCDIR +DEFINES["topobjdir"] = TOPOBJDIR +FINAL_TARGET_PP_FILES += [".lldbinit.in"] +OBJDIR_FILES += ["!/dist/bin/.lldbinit"] + +# Put the .ycm_extra_conf.py file at the root of the objdir. It is used by +# the vim plugin YouCompleteMe. +OBJDIR_FILES += ["/.ycm_extra_conf.py"] + +if CONFIG["MOZ_VALGRIND"]: + OBJDIR_FILES._valgrind += [ + "valgrind/cross-architecture.sup", + "valgrind/i386-pc-linux-gnu.sup", + "valgrind/x86_64-pc-linux-gnu.sup", + ] diff --git a/build/moz.configure/android-ndk.configure b/build/moz.configure/android-ndk.configure new file mode 100644 index 0000000000..51683d8d33 --- /dev/null +++ b/build/moz.configure/android-ndk.configure @@ -0,0 +1,407 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@depends(toolchains_base_dir, "--help") +@imports(_from="os.path", _import="isdir") +@imports(_from="mozboot.android", _import="NDK_VERSION") +def default_android_ndk_root(toolchains_base_dir, _): + for ndk in ("android-ndk-%s" % NDK_VERSION, "android-ndk"): + path = os.path.join(toolchains_base_dir, ndk) + if isdir(path): + return path + + +option( + "--with-android-ndk", + nargs=1, + default=default_android_ndk_root, + help="location where the Android NDK can be found{|}", +) + +option("--with-android-toolchain", nargs=1, help="location of the Android toolchain") + +option( + "--with-android-googlevr-sdk", nargs=1, help="location of the Android GoogleVR SDK" +) + + +@depends(target) +def min_android_version(target): + if target.cpu in ["aarch64", "x86_64"]: + # 64-bit support was added in API 21. + return "21" + return "16" + + +option( + "--with-android-version", + nargs=1, + help="android platform version{|}", + default=min_android_version, +) + + +@depends("--with-android-version", min_android_version) +@imports(_from="__builtin__", _import="ValueError") +def android_version(value, min_version): + if not value: + # Someone has passed --without-android-version. + die("--with-android-version cannot be disabled.") + + try: + version = int(value[0]) + except ValueError: + die("--with-android-version expects an integer value") + + if version < int(min_version): + die( + "--with-android-version must be at least %s (got %s)", min_version, value[0] + ) + + return version + + +add_old_configure_assignment("android_version", android_version) + + +@depends("--with-android-ndk") +@imports(_from="os.path", _import="isdir") +def ndk(value): + if value: + if not isdir(value[0]): + die( + "The path you specified with --with-android-ndk (%s) is not " + "a directory" % value[0] + ) + return value[0] + + die( + "You must specify --with-android-ndk=/path/to/ndk when targeting Android, " + "or try |mach bootstrap|." + ) + + +set_config("ANDROID_NDK", ndk) +add_old_configure_assignment("android_ndk", ndk) + + +@depends(ndk) +@checking("for android ndk version") +@imports(_from="__builtin__", _import="open") +@imports(_from="mozboot.android", _import="NDK_VERSION") +@imports(_from="mozboot.android", _import="get_ndk_version") +@imports(_from="mozboot.android", _import="GetNdkVersionError") +def ndk_version(ndk): + if not ndk: + # Building 'js/src' for non-Android. + return + + try: + major, minor, human = get_ndk_version(ndk) + except GetNdkVersionError as e: + die(str(e)) + + if NDK_VERSION != human: + die( + "The only supported version of the NDK is %s (have %s)\n" + "Please run |mach bootstrap| " + "to install the correct NDK." % (NDK_VERSION, human) + ) + return namespace( + major=major, + minor=minor, + ) + + +set_config("ANDROID_NDK_MAJOR_VERSION", ndk_version.major) +set_config("ANDROID_NDK_MINOR_VERSION", ndk_version.minor) + + +@depends(target, android_version, ndk) +@checking("for android platform directory") +@imports(_from="os.path", _import="isdir") +def android_platform(target, android_version, ndk): + if target.os != "Android": + return + + if "aarch64" == target.cpu: + target_dir_name = "arm64" + else: + target_dir_name = target.cpu + + # Not all Android releases have their own platform release. We use + # the next lower platform version in these cases. + if android_version in (11, 10): + platform_version = 9 + elif android_version in (20, 22): + platform_version = android_version - 1 + else: + platform_version = android_version + + platform_dir = os.path.join( + ndk, "platforms", "android-%s" % platform_version, "arch-%s" % target_dir_name + ) + + if not isdir(platform_dir): + die( + "Android platform directory not found. With the current " + "configuration, it should be in %s" % platform_dir + ) + + return platform_dir + + +add_old_configure_assignment("android_platform", android_platform) +set_config("ANDROID_PLATFORM", android_platform) + + +@depends(android_platform, ndk, target) +@checking("for android sysroot directory") +@imports(_from="os.path", _import="isdir") +def android_sysroot(android_platform, ndk, target): + if target.os != "Android": + return + + # NDK r15 has both unified and non-unified headers, but we only support + # non-unified for that NDK, so look for that first. + search_dirs = [ + # (, ) + (os.path.join(android_platform, "usr", "include"), android_platform), + (os.path.join(ndk, "sysroot"), os.path.join(ndk, "sysroot")), + ] + + for test_dir, sysroot_dir in search_dirs: + if isdir(test_dir): + return sysroot_dir + + die( + "Android sysroot directory not found in %s." + % str([sysroot_dir for test_dir, sysroot_dir in search_dirs]) + ) + + +add_old_configure_assignment("android_sysroot", android_sysroot) + + +@depends(android_platform, ndk, target) +@checking("for android system directory") +@imports(_from="os.path", _import="isdir") +def android_system(android_platform, ndk, target): + if target.os != "Android": + return + + # NDK r15 has both unified and non-unified headers, but we only support + # non-unified for that NDK, so look for that first. + search_dirs = [ + os.path.join(android_platform, "usr", "include"), + os.path.join(ndk, "sysroot", "usr", "include", target.toolchain), + ] + + for system_dir in search_dirs: + if isdir(system_dir): + return system_dir + + die("Android system directory not found in %s." % str(search_dirs)) + + +add_old_configure_assignment("android_system", android_system) + + +@depends(target, host, ndk, "--with-android-toolchain") +@checking("for the Android toolchain directory", lambda x: x or "not found") +@imports(_from="os.path", _import="isdir") +@imports(_from="mozbuild.shellutil", _import="quote") +def android_toolchain(target, host, ndk, toolchain): + if not ndk: + return + if toolchain: + return toolchain[0] + else: + if target.cpu == "arm" and target.endianness == "little": + target_base = "arm-linux-androideabi" + elif target.cpu == "x86": + target_base = "x86" + elif target.cpu == "x86_64": + target_base = "x86_64" + elif target.cpu == "aarch64" and target.endianness == "little": + target_base = "aarch64-linux-android" + else: + die("Target cpu is not supported.") + + toolchain_format = "%s/toolchains/%s-4.9/prebuilt/%s-%s" + host_kernel = "windows" if host.kernel == "WINNT" else host.kernel.lower() + + toolchain = toolchain_format % (ndk, target_base, host_kernel, host.cpu) + log.debug("Trying %s" % quote(toolchain)) + if not isdir(toolchain) and host.cpu == "x86_64": + toolchain = toolchain_format % (ndk, target_base, host_kernel, "x86") + log.debug("Trying %s" % quote(toolchain)) + if isdir(toolchain): + return toolchain + die("You have to specify --with-android-toolchain=" "/path/to/ndk/toolchain.") + + +set_config("ANDROID_TOOLCHAIN", android_toolchain) + + +@depends(target) +def android_toolchain_prefix_base(target): + if target.cpu == "x86": + # Ideally, the --target should just have the right x86 variant + # in the first place. + return "i686-linux-android" + return target.toolchain + + +option( + env="STLPORT_CPPFLAGS", + nargs=1, + help="Options compiler should pass for standard C++ library", +) + + +@depends("STLPORT_CPPFLAGS", ndk) +@imports(_from="os.path", _import="isdir") +def stlport_cppflags(value, ndk): + if value and len(value): + return value.split() + if not ndk: + return + + ndk_base = os.path.join(ndk, "sources", "cxx-stl") + cxx_base = os.path.join(ndk_base, "llvm-libc++") + cxx_include = os.path.join(cxx_base, "libcxx", "include") + cxxabi_base = os.path.join(ndk_base, "llvm-libc++abi") + cxxabi_include = os.path.join(cxxabi_base, "libcxxabi", "include") + + if not isdir(cxx_include): + # NDK r13 removes the inner "libcxx" directory. + cxx_include = os.path.join(cxx_base, "include") + if not isdir(cxx_include): + die("Couldn't find path to libc++ includes in the android ndk") + + if not isdir(cxxabi_include): + # NDK r13 removes the inner "libcxxabi" directory. + cxxabi_include = os.path.join(cxxabi_base, "include") + if not isdir(cxxabi_include): + die("Couldn't find path to libc++abi includes in the android ndk") + + # Add android/support/include/ for prototyping long double math + # functions, locale-specific C library functions, multibyte support, + # etc. + return [ + # You'd think we'd want to use -stdlib=libc++, but this doesn't work + # (cf. https://bugzilla.mozilla.org/show_bug.cgi?id=1510897#c2) + # Using -stdlib=libc++ and removing some of the -I below also doesn't + # work because not everything that is in cxx_include comes in the C++ + # header directory that comes with clang. + "-stdlib=libstdc++", + "-I%s" % cxx_include, + "-I%s" % os.path.join(ndk, "sources", "android", "support", "include"), + "-I%s" % cxxabi_include, + ] + + +add_old_configure_assignment("stlport_cppflags", stlport_cppflags) + + +@depends(android_system, android_sysroot, android_toolchain, android_version) +def extra_toolchain_flags( + android_system, android_sysroot, toolchain_dir, android_version +): + if not android_sysroot: + return [] + flags = [ + "-isystem", + android_system, + "-isystem", + os.path.join(android_sysroot, "usr", "include"), + "-gcc-toolchain", + toolchain_dir, + "-D__ANDROID_API__=%d" % android_version, + ] + return flags + + +@depends(android_toolchain_prefix_base, android_toolchain) +def android_toolchain_prefix(prefix_base, toolchain): + if toolchain: + return "%s/bin/%s-" % (toolchain, prefix_base) + + +imply_option( + "--with-toolchain-prefix", android_toolchain_prefix, reason="--with-android-ndk" +) + + +@depends( + extra_toolchain_flags, + stlport_cppflags, + android_toolchain, + android_toolchain_prefix_base, +) +@imports(_from="os.path", _import="isdir") +def bindgen_cflags_android(toolchain_flags, stlport_flags, toolchain, toolchain_prefix): + if not toolchain_flags: + return + + gcc_include = os.path.join(toolchain, "lib", "gcc", toolchain_prefix, "4.9.x") + if not isdir(gcc_include): + gcc_include = os.path.join(toolchain, "lib", "gcc", toolchain_prefix, "4.9") + + return ( + toolchain_flags + + stlport_flags + + [ + "-I%s" % os.path.join(gcc_include, "include"), + "-I%s" % os.path.join(gcc_include, "include-fixed"), + ] + ) + + +@depends("--with-android-googlevr-sdk", target) +@checking("for GoogleVR SDK", lambda x: x.result) +@imports(_from="os.path", _import="exists") +@imports(_from="os.path", _import="abspath") +def googlevr_sdk(value, target): + if not value: + return namespace(result="Not specified") + path = abspath(value[0]) + if not exists(path): + die("Could not find GoogleVR SDK %s", path) + include = "%s/libraries/headers/" % path + if "arm" == target.cpu: + arch = "armeabi-v7a" + elif "aarch64" == target.cpu: + arch = "arm64-v8a" + elif "x86" == target.cpu: + arch = "x86" + else: + die("Unsupported GoogleVR cpu architecture %s" % target.cpu) + + libs = "{0}/libraries/jni/{1}/".format(path, arch) + + if not exists(libs): + die( + "Could not find GoogleVR NDK at %s. Did you try running " + "'./gradlew :extractNdk' in %s?", + libs, + path, + ) + + return namespace( + result=path, + include=include, + libs=libs, + enabled=True, + ) + + +set_define("MOZ_ANDROID_GOOGLE_VR", googlevr_sdk.enabled) +set_config("MOZ_ANDROID_GOOGLE_VR", googlevr_sdk.enabled) +set_config("MOZ_ANDROID_GOOGLE_VR_INCLUDE", googlevr_sdk.include) +set_config("MOZ_ANDROID_GOOGLE_VR_LIBS", googlevr_sdk.libs) diff --git a/build/moz.configure/android-sdk.configure b/build/moz.configure/android-sdk.configure new file mode 100644 index 0000000000..129c11525a --- /dev/null +++ b/build/moz.configure/android-sdk.configure @@ -0,0 +1,132 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Ensure Android SDK and build-tools versions depending on mobile target. + + +@depends(host, toolchains_base_dir, "--help") +@imports(_from="os.path", _import="isdir") +def default_android_sdk_root(host, toolchains_base_dir, _): + sdk_basename = { + "Darwin": "android-sdk-macosx", + "Linux": "android-sdk-linux", + "WINNT": "android-sdk-windows", + }.get(host.kernel, "android-sdk") + for sdk_basename in (sdk_basename, "android-sdk"): + path = os.path.join(toolchains_base_dir, sdk_basename) + if isdir(path): + return path + + +option( + "--with-android-sdk", + nargs=1, + default=default_android_sdk_root, + help="location where the Android SDK can be found (like ~/.mozbuild/android-sdk-linux){|}", +) + + +@depends("--with-android-sdk") +@imports(_from="os.path", _import="isdir") +def android_sdk_root(value): + if value: + if not isdir(value[0]): + die( + "The path you specified with --with-android-sdk (%s) is not " + "a directory" % value[0] + ) + return value[0] + + die( + "You must specify --with-android-sdk=/path/to/sdk when targeting Android, " + "or try |mach bootstrap|." + ) + + +@depends("--help") +def android_sdk_version(_): + return namespace(build_tools_version="29.0.3", target_sdk_version="29") + + +@depends(android_sdk_root, android_sdk_version) +@checking("for Android build-tools") +@imports(_from="os.path", _import="exists") +@imports(_from="os.path", _import="isdir") +def android_build_tools(sdk_root, sdk_version): + android_build_tools_base = os.path.join(sdk_root, "build-tools") + version = sdk_version.build_tools_version + if isdir(os.path.join(android_build_tools_base, version)): + tools = os.path.join(android_build_tools_base, version) + for zipalign in ("zipalign", "zipalign.exe"): + if exists(os.path.join(tools, zipalign)): + return [tools] + + die( + "You must install the Android build-tools version %s. " + "Try |mach bootstrap|. (Looked for %s/%s)" + % (version, android_build_tools_base, version) + ) + + +@depends(android_sdk_root) +@checking("for Android tools") +@imports(_from="os.path", _import="isdir") +def android_tools(sdk_root): + tools = os.path.join(sdk_root, "tools") + if isdir(tools): + return tools + + die("You must install the Android tools. Try |mach bootstrap|") + + +@depends(android_sdk_root) +@checking("for Android platform-tools") +@imports(_from="os.path", _import="exists") +@imports(_from="os.path", _import="isdir") +def android_platform_tools(sdk_root): + tools = os.path.join(sdk_root, "platform-tools") + for adb in ("adb", "adb.exe"): + if exists(os.path.join(tools, adb)): + return [tools] + + die( + "You must install the Android platform-tools. Try |mach bootstrap|. (Looked for %s)" + % tools + ) + + +@depends(android_sdk_root) +def android_emulator_path(sdk_root): + return [os.path.join(sdk_root, "emulator")] + + +@template +def check_android_tools(tool, tool_dir): + check = check_prog( + tool.upper(), (tool, tool + ".exe"), paths=tool_dir, allow_missing=True + ) + + @depends(check) + def require_tool(result): + if result is None: + die("The program %s was not found. Try |mach bootstrap|" % tool) + return result + + return require_tool + + +check_android_tools("zipalign", android_build_tools) +check_android_tools("adb", android_platform_tools) +check_android_tools("emulator", android_emulator_path) + +set_config("ANDROID_SDK_ROOT", android_sdk_root) +set_config("ANDROID_TOOLS", android_tools) + +set_config("ANDROID_BUILD_TOOLS_VERSION", android_sdk_version.build_tools_version) +set_config("ANDROID_TARGET_SDK", android_sdk_version.target_sdk_version) +add_old_configure_assignment( + "ANDROID_TARGET_SDK", android_sdk_version.target_sdk_version +) diff --git a/build/moz.configure/arm.configure b/build/moz.configure/arm.configure new file mode 100644 index 0000000000..2082fa640f --- /dev/null +++ b/build/moz.configure/arm.configure @@ -0,0 +1,292 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@depends(target.os, "--help") +def arm_option_defaults(os, _): + if os == "Android": + arch = "armv7-a" + thumb = "yes" + fpu = "neon" + float_abi = "softfp" + else: + arch = thumb = fpu = float_abi = "toolchain-default" + return namespace( + arch=arch, + thumb=thumb, + fpu=fpu, + float_abi=float_abi, + ) + + +# Note: '{...|}' in the help of all options with a non-constant default to +# make the lint happy. The first arm is always going to be used, because a +# default is always returned. The lint is fooled by this file being +# conditional. If it weren't conditional, the lint wouldn't ask for '{|}' to +# be there. +option( + "--with-arch", + nargs=1, + default=arm_option_defaults.arch, + help="{Use specific CPU features (-march=type). Resets thumb, fpu, " + "float-abi, etc. defaults when set|}", +) + + +@depends("--with-arch") +def arch_option(value): + if value: + if value[0] != "toolchain-default": + return ["-march={}".format(value[0])] + return [] + + +option( + "--with-thumb", + choices=("yes", "no", "toolchain-default"), + default=arm_option_defaults.thumb, + nargs="?", + help="{Use Thumb instruction set (-mthumb)|}", +) + + +def normalize_arm_option(value): + if value: + if len(value): + if value[0] == "yes": + return True + elif value[0] == "no": + return False + else: + return value[0] + return True + return False + + +@depends("--with-thumb") +def thumb_option(value): + value = normalize_arm_option(value) + if value is True: + return ["-mthumb"] + if value is False: + return ["-marm"] + return [] + + +option( + "--with-thumb-interwork", + choices=("yes", "no", "toolchain-default"), + default="toolchain-default", + nargs="?", + help="Use Thumb/ARM instuctions interwork (-mthumb-interwork)", +) + + +@depends("--with-thumb-interwork") +def thumb_interwork_option(value): + value = normalize_arm_option(value) + if value is True: + return ["-mthumb-interwork"] + if value is False: + return ["-mno-thumb-interwork"] + return [] + + +option( + "--with-fpu", + nargs=1, + default=arm_option_defaults.fpu, + help="{Use specific FPU type (-mfpu=type)|}", +) + + +@depends("--with-fpu") +def fpu_option(value): + if value: + if value[0] != "toolchain-default": + return ["-mfpu={}".format(value[0])] + return [] + + +option( + "--with-float-abi", + nargs=1, + default=arm_option_defaults.float_abi, + help="{Use specific arm float ABI (-mfloat-abi=type)|}", +) + + +@depends("--with-float-abi") +def float_abi_option(value): + if value: + if value[0] != "toolchain-default": + return ["-mfloat-abi={}".format(value[0])] + return [] + + +option( + "--with-soft-float", + choices=("yes", "no", "toolchain-default"), + default="toolchain-default", + nargs="?", + help="Use soft float library (-msoft-float)", +) + + +@depends("--with-soft-float") +def soft_float_option(value): + value = normalize_arm_option(value) + if value is True: + return ["-msoft-float"] + if value is False: + return ["-mno-soft-float"] + return [] + + +check_and_add_gcc_flag( + "-mno-unaligned-access", when=depends(target.os)(lambda os: os == "Android") +) + + +@depends( + arch_option, + thumb_option, + thumb_interwork_option, + fpu_option, + float_abi_option, + soft_float_option, +) +def all_flags(arch, thumb, interwork, fpu, float_abi, soft_float): + return arch + thumb + interwork + fpu + float_abi + soft_float + + +add_old_configure_assignment("_ARM_FLAGS", all_flags) +add_old_configure_assignment("_THUMB_FLAGS", thumb_option) + + +@depends(c_compiler, all_flags) +@checking("ARM version support in compiler", lambda x: x.arm_arch) +@imports(_from="textwrap", _import="dedent") +def arm_target(compiler, all_flags): + # We're going to preprocess the following source to figure out some details + # about the arm target options we have enabled. + source = dedent( + """\ + %ARM_ARCH __ARM_ARCH + #if __thumb2__ + %THUMB2 yes + #else + %THUMB2 no + #endif + // Confusingly, the __SOFTFP__ preprocessor variable indicates the + // "softfloat" ABI, not the "softfp" ABI. + #if __SOFTFP__ + %FLOAT_ABI soft + #elif __ARM_PCS_VFP + %FLOAT_ABI hard + #else + %FLOAT_ABI softfp + #endif + // There is more subtlety to it than this preprocessor test, but MOZ_FPU doesn't + // need to be too fine-grained. + #if __ARM_NEON + %FPU neon + #elif __ARM_VFPV2__ || __ARM_FP == 12 + %FPU vfpv2 + #elif __ARM_VFPV3__ + %FPU vfpv3 + #elif __ARM_VFPV4__ || __ARM_FP == 14 + %FPU vfpv4 + #elif __ARM_FPV5__ + %FPU fp-armv8 + #endif + """ + ) + result = try_invoke_compiler( + compiler.wrapper + [compiler.compiler] + compiler.flags, + compiler.language, + source, + ["-E"] + all_flags, + ) + # Metadata emitted by preprocessors such as GCC with LANG=ja_JP.utf-8 may + # have non-ASCII characters. Treat the output as bytearray. + data = {"fpu": None} # fpu may not get a value from the preprocessor. + for line in result.splitlines(): + if line.startswith("%"): + k, _, v = line.partition(" ") + k = k.lstrip("%").lower() + if k == "arm_arch": + data[k] = int(v) + else: + data[k] = { + "yes": True, + "no": False, + }.get(v, v) + log.debug("%s = %s", k, data[k]) + + return namespace(**data) + + +@depends(arm_target.arm_arch, when=depends(target.os)(lambda os: os == "Android")) +def armv7(arch): + if arch < 7: + die("Android/armv6 and earlier are not supported") + + +set_config("MOZ_THUMB2", True, when=arm_target.thumb2) +set_define("MOZ_THUMB2", True, when=arm_target.thumb2) +add_old_configure_assignment("MOZ_THUMB2", True, when=arm_target.thumb2) + + +have_arm_simd = c_compiler.try_compile( + body='asm("uqadd8 r1, r1, r2");', check_msg="for ARM SIMD support in compiler" +) + +set_config("HAVE_ARM_SIMD", have_arm_simd) +set_define("HAVE_ARM_SIMD", have_arm_simd) + +have_arm_neon = c_compiler.try_compile( + body='asm(".fpu neon\\n vadd.i8 d0, d0, d0");', + check_msg="for ARM NEON support in compiler", +) + +set_config("HAVE_ARM_NEON", have_arm_neon) +set_define("HAVE_ARM_NEON", have_arm_neon) + + +# We don't need to build NEON support if we're targetting a non-NEON device. +# This matches media/webrtc/trunk/webrtc/build/common.gypi. +@depends(arm_target.arm_arch, when=have_arm_neon) +def build_arm_neon(arm_arch): + return arm_arch >= 7 + + +set_config("BUILD_ARM_NEON", build_arm_neon) +set_define("BUILD_ARM_NEON", build_arm_neon) + + +set_config("ARM_ARCH", depends(arm_target.arm_arch)(lambda x: str(x))) +add_old_configure_assignment("ARM_ARCH", depends(arm_target.arm_arch)(lambda x: str(x))) +set_config("MOZ_FPU", arm_target.fpu) + + +@depends(arm_target.float_abi) +def neon_flags(float_abi): + # Building with -mfpu=neon requires either the "softfp" or the + # "hardfp" ABI. Depending on the compiler's default target, and the + # CFLAGS, the default ABI might be neither, in which case it is the + # "softfloat" ABI. + # The "softfloat" ABI is binary-compatible with the "softfp" ABI, so + # we can safely mix code built with both ABIs. So, if we detect + # that compiling uses the "softfloat" ABI, force the use of the + # "softfp" ABI instead. + flags = ["-mfpu=neon"] + if float_abi == "soft": + flags.append("-mfloat-abi=softfp") + return tuple(flags) + + +set_config("NEON_FLAGS", neon_flags) diff --git a/build/moz.configure/bindgen.configure b/build/moz.configure/bindgen.configure new file mode 100644 index 0000000000..e5e49d8df8 --- /dev/null +++ b/build/moz.configure/bindgen.configure @@ -0,0 +1,371 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@depends(build_project, "--enable-smoosh") +def cbindgen_is_needed(build_project, js_enable_smoosh): + if build_project != "js": + # cbindgen is needed by the style system build and webrender. + return True + + # cbindgen is needed by SmooshMonkey. + return js_enable_smoosh + + +option(env="CBINDGEN", nargs=1, when=cbindgen_is_needed, help="Path to cbindgen") + + +@imports(_from="textwrap", _import="dedent") +def check_cbindgen_version(cbindgen, fatal=False): + log.debug("trying cbindgen: %s" % cbindgen) + + cbindgen_min_version = Version("0.16.0") + + # cbindgen x.y.z + version = Version(check_cmd_output(cbindgen, "--version").strip().split(" ")[1]) + log.debug("%s has version %s" % (cbindgen, version)) + if version >= cbindgen_min_version: + return True + if not fatal: + return False + + die( + dedent( + """\ + cbindgen version {} is too old. At least version {} is required. + + Please update using 'cargo install cbindgen --force' or running + './mach bootstrap', after removing the existing executable located at + {}. + """.format( + version, cbindgen_min_version, cbindgen + ) + ) + ) + + +@depends_if( + "CBINDGEN", + bootstrap_search_path("cbindgen"), + rust_search_path, + when=cbindgen_is_needed, +) +@checking("for cbindgen") +@imports(_from="textwrap", _import="dedent") +def cbindgen(cbindgen_override, bootstrap_search_path, rust_search_path): + if cbindgen_override: + check_cbindgen_version(cbindgen_override[0], fatal=True) + return cbindgen_override[0] + + candidates = [] + for path in bootstrap_search_path + rust_search_path: + candidate = find_program("cbindgen", [path]) + if not candidate: + continue + if check_cbindgen_version(candidate): + return candidate + candidates.append(candidate) + + if not candidates: + raise FatalCheckError( + dedent( + """\ + Cannot find cbindgen. Please run `mach bootstrap`, + `cargo install cbindgen`, ensure that `cbindgen` is on your PATH, + or point at an executable with `CBINDGEN`. + """ + ) + ) + check_cbindgen_version(candidates[0], fatal=True) + + +set_config("CBINDGEN", cbindgen) + +# Bindgen can use rustfmt to format Rust file, but it's not required. +option(env="RUSTFMT", nargs=1, help="Path to the rustfmt program") + +rustfmt = check_prog( + "RUSTFMT", + ["rustfmt"], + paths=rust_search_path, + input="RUSTFMT", + allow_missing=True, +) + + +option( + "--with-libclang-path", + nargs=1, + help="Absolute path to a directory containing Clang/LLVM libraries for bindgen (version 3.9.x or above)", +) +option( + "--with-clang-path", + nargs=1, + help="Absolute path to a Clang binary for bindgen (version 3.9.x or above)", +) + + +@depends( + "--with-clang-path", + c_compiler, + cxx_compiler, + clang_search_path, + target, + macos_sdk, +) +@checking("for clang for bindgen", lambda x: x.path if x else "not found") +def bindgen_clang_compiler( + clang_path, c_compiler, cxx_compiler, clang_search_path, target, macos_sdk +): + # When the target compiler is clang, use that, including flags. + if cxx_compiler.type == "clang": + if clang_path and clang_path[0] not in ( + c_compiler.compiler, + cxx_compiler.compiler, + ): + die( + "--with-clang-path is not valid when the target compiler is %s", + cxx_compiler.type, + ) + return namespace( + path=cxx_compiler.compiler, + flags=cxx_compiler.flags, + ) + # When the target compiler is clang-cl, use clang in the same directory, + # and figure the right flags to use. + if cxx_compiler.type == "clang-cl": + if clang_path and os.path.dirname(clang_path[0]) != os.path.dirname( + cxx_compiler.compiler + ): + die( + "--with-clang-path must point to clang in the same directory " + "as the target compiler" + ) + if not clang_path: + clang_path = [os.path.join(os.path.dirname(cxx_compiler.compiler), "clang")] + + clang_path = find_program( + clang_path[0] if clang_path else "clang++", clang_search_path + ) + if not clang_path: + return + # Hack before bug 1617793: if the compiler is clang-cl, hack the target + if cxx_compiler.type == "clang-cl": + target = split_triplet("%s-pc-windows-msvc" % target.raw_cpu, allow_msvc=True) + flags = prepare_flags(target, macos_sdk) + info = check_compiler([clang_path] + flags, "C++", target) + return namespace( + path=clang_path, + flags=flags + info.flags, + ) + + +@depends("--with-libclang-path", bindgen_clang_compiler, host_library_name_info, host) +@checking("for libclang for bindgen", lambda x: x if x else "not found") +@imports("glob") +@imports(_from="os", _import="pathsep") +@imports(_from="os.path", _import="split", _as="pathsplit") +@imports("re") +def bindgen_libclang_path(libclang_path, clang, library_name_info, host): + if not clang: + if libclang_path: + die( + "--with-libclang-path is not valid without a clang compiler " + "for bindgen" + ) + return + + # Try to ensure that the clang shared library that bindgen is going + # to look for is actually present. The files that we search for + # mirror the logic in clang-sys/build.rs. + libclang_choices = [] + if host.os == "WINNT": + libclang_choices.append("libclang.dll") + libclang_choices.append( + "%sclang%s" % (library_name_info.dll.prefix, library_name_info.dll.suffix) + ) + if host.kernel == "Linux": + libclang_choices.append("libclang.so.*") + + if host.os == "OpenBSD": + libclang_choices.append("libclang.so.*.*") + + candidates = [] + if not libclang_path: + # Try to find libclang_path based on clang search dirs. + clang_search_dirs = check_cmd_output(clang.path, "-print-search-dirs") + for line in clang_search_dirs.splitlines(): + name, _, value = line.partition(": =") + if host.os == "WINNT" and name == "programs": + # On Windows, libclang.dll is in bin/ rather than lib/, + # so scan the programs search dirs. + # To make matters complicated, clang before version 9 uses `:` + # separate between paths (and `;` in newer versions) + if pathsep in value: + candidates.extend(value.split(pathsep)) + else: + for part in value.split(":"): + # Assume that if previous "candidate" was of length 1, + # it's a drive letter and the current part is the rest of + # the corresponding full path. + if candidates and len(candidates[-1]) == 1: + candidates[-1] += ":" + part + else: + candidates.append(part) + elif host.os != "WINNT" and name == "libraries": + # On other platforms, use the directories from the libraries + # search dirs that looks like $something/clang/$version. + for dir in value.split(pathsep): + dir, version = pathsplit(dir) + if re.match(r"[0-9.]+", version): + dir, name = pathsplit(dir) + if name == "clang": + candidates.append(dir) + else: + candidates.append(libclang_path[0]) + + for dir in candidates: + for pattern in libclang_choices: + log.debug('Trying "%s" in "%s"', pattern, dir) + libs = glob.glob(os.path.join(dir, pattern)) + if libs: + return libs[0] + + +@depends(bindgen_clang_compiler, bindgen_libclang_path, build_project) +def bindgen_config_paths(clang, libclang, build_project): + # XXX: we want this code to be run for both Gecko and JS, but we don't + # necessarily want to force a bindgen/Rust dependency on JS just yet. + # Actually, we don't want to force an error if we're not building the + # browser generally. We therefore whitelist the projects that require + # bindgen facilities at this point and leave it at that. + if build_project in ("browser", "mobile/android"): + if not clang: + die( + "Could not find clang to generate run bindings for C/C++. " + "Please install the necessary packages, run `mach bootstrap`, " + "or use --with-clang-path to give the location of clang." + ) + + if not libclang: + die( + "Could not find libclang to generate rust bindings for C/C++. " + "Please install the necessary packages, run `mach bootstrap`, " + "or use --with-libclang-path to give the path containing it." + ) + + if clang and libclang: + return namespace( + libclang=libclang, + libclang_path=os.path.dirname(libclang), + clang_path=clang.path, + clang_flags=clang.flags, + ) + + +@depends(bindgen_config_paths.libclang, when=bindgen_config_paths) +@checking("that libclang is new enough", lambda s: "yes" if s else "no") +@imports(_from="ctypes", _import="CDLL") +@imports(_from="textwrap", _import="dedent") +def min_libclang_version(libclang): + try: + lib = CDLL(libclang) + # We want at least 5.0. The API we test below is enough for that. + # Just accessing it should throw if not found. + fun = lib.clang_getAddressSpace + return True + except: + die( + dedent( + """\ + The libclang located at {} is too old (need at least 5.0). + + Please make sure to update it or point to a newer libclang using + --with-libclang-path. + """.format( + libclang + ) + ) + ) + return False + + +set_config("MOZ_LIBCLANG_PATH", bindgen_config_paths.libclang_path) +set_config("MOZ_CLANG_PATH", bindgen_config_paths.clang_path) + + +@depends( + target, + target_is_unix, + cxx_compiler, + bindgen_cflags_android, + bindgen_config_paths.clang_flags, +) +def basic_bindgen_cflags(target, is_unix, compiler_info, android_cflags, clang_flags): + args = [ + "-x", + "c++", + "-fno-sized-deallocation", + "-fno-aligned-new", + "-DTRACING=1", + "-DIMPL_LIBXUL", + "-DMOZILLA_INTERNAL_API", + "-DRUST_BINDGEN", + ] + + if is_unix: + args += ["-DOS_POSIX=1"] + + if target.os == "Android": + args += android_cflags + + args += { + "Android": ["-DOS_ANDROID=1"], + "DragonFly": ["-DOS_BSD=1", "-DOS_DRAGONFLY=1"], + "FreeBSD": ["-DOS_BSD=1", "-DOS_FREEBSD=1"], + "GNU": ["-DOS_LINUX=1"], + "NetBSD": ["-DOS_BSD=1", "-DOS_NETBSD=1"], + "OpenBSD": ["-DOS_BSD=1", "-DOS_OPENBSD=1"], + "OSX": ["-DOS_MACOSX=1", "-stdlib=libc++"], + "SunOS": ["-DOS_SOLARIS=1"], + "WINNT": [ + "-DOS_WIN=1", + "-DWIN32=1", + ], + }.get(target.os, []) + + if compiler_info.type == "clang-cl": + args += [ + # To enable the builtin __builtin_offsetof so that CRT wouldn't + # use reinterpret_cast in offsetof() which is not allowed inside + # static_assert(). + "-D_CRT_USE_BUILTIN_OFFSETOF", + # Enable hidden attribute (which is not supported by MSVC and + # thus not enabled by default with a MSVC-compatibile build) + # to exclude hidden symbols from the generated file. + "-DHAVE_VISIBILITY_HIDDEN_ATTRIBUTE=1", + ] + + return args + (clang_flags or []) + + +option( + env="BINDGEN_CFLAGS", + nargs=1, + help="Options bindgen should pass to the C/C++ parser", +) + + +@depends(basic_bindgen_cflags, "BINDGEN_CFLAGS") +@checking("bindgen cflags", lambda s: s if s else "no") +def bindgen_cflags(base_flags, extra_flags): + flags = base_flags + if extra_flags and len(extra_flags): + flags += extra_flags[0].split() + return " ".join(flags) + + +add_old_configure_assignment("_BINDGEN_CFLAGS", bindgen_cflags) diff --git a/build/moz.configure/checks.configure b/build/moz.configure/checks.configure new file mode 100644 index 0000000000..9269addbb8 --- /dev/null +++ b/build/moz.configure/checks.configure @@ -0,0 +1,189 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Templates implementing some generic checks. +# ============================================================== + +# Declare some exceptions. This is cumbersome, but since we shouldn't need a +# lot of them, let's stack them all here. When adding a new one, put it in the +# _declare_exceptions template, and add it to the return statement. Then +# destructure in the assignment below the function declaration. + + +@template +@imports(_from="__builtin__", _import="Exception") +def _declare_exceptions(): + class FatalCheckError(Exception): + """An exception to throw from a function decorated with @checking. + It will result in calling die() with the given message. + Debugging messages emitted from the decorated function will also be + printed out.""" + + return (FatalCheckError,) + + +(FatalCheckError,) = _declare_exceptions() + +del _declare_exceptions + +# Helper to display "checking" messages +# @checking('for foo') +# def foo(): +# return 'foo' +# is equivalent to: +# def foo(): +# log.info('checking for foo... ') +# ret = foo +# log.info(ret) +# return ret +# This can be combined with e.g. @depends: +# @depends(some_option) +# @checking('for something') +# def check(value): +# ... +# An optional callback can be given, that will be used to format the returned +# value when displaying it. + + +@template +def checking(what, callback=None): + def decorator(func): + def wrapped(*args, **kwargs): + log.info("checking %s... ", what) + with log.queue_debug(): + error, ret = None, None + try: + ret = func(*args, **kwargs) + except FatalCheckError as e: + error = str(e) + display_ret = callback(ret) if callback else ret + if display_ret is True: + log.info("yes") + elif display_ret is False or display_ret is None: + log.info("no") + else: + log.info(display_ret) + if error is not None: + die(error) + return ret + + return wrapped + + return decorator + + +# Template to check for programs in $PATH. +# - `var` is the name of the variable that will be set with `set_config` when +# the program is found. +# - `progs` is a list (or tuple) of program names that will be searched for. +# It can also be a reference to a @depends function that returns such a +# list. If the list is empty and there is no input, the check is skipped. +# - `what` is a human readable description of what is being looked for. It +# defaults to the lowercase version of `var`. +# - `input` is a string reference to an existing option or a reference to a +# @depends function resolving to explicit input for the program check. +# The default is to create an option for the environment variable `var`. +# This argument allows to use a different kind of option (possibly using a +# configure flag), or doing some pre-processing with a @depends function. +# - `allow_missing` indicates whether not finding the program is an error. +# - `paths` is a list of paths or @depends function returning a list of paths +# that will cause the given path(s) to be searched rather than $PATH. Input +# paths may either be individual paths or delimited by os.pathsep, to allow +# passing $PATH (for example) as an element. +# - `paths_have_priority` means that any programs found early in the PATH +# will be prioritized over programs found later in the PATH. The default is +# False, meaning that any of the programs earlier in the program list will be +# given priority, no matter where in the PATH they are found. +# +# The simplest form is: +# check_prog('PROG', ('a', 'b')) +# This will look for 'a' or 'b' in $PATH, and set_config PROG to the one +# it can find. If PROG is already set from the environment or command line, +# use that value instead. +@template +@imports(_from="mozbuild.shellutil", _import="quote") +def check_prog( + var, + progs, + what=None, + input=None, + allow_missing=False, + paths=None, + paths_have_priority=False, + when=None, +): + if input is not None: + # Wrap input with type checking and normalization. + @depends(input, when=when) + def input(value): + if not value: + return + if isinstance(value, str): + return (value,) + if isinstance(value, (tuple, list)) and len(value) == 1: + return value + configure_error( + "input must resolve to a tuple or a list with a " + "single element, or a string" + ) + + else: + option( + env=var, + nargs=1, + when=when, + help="Path to %s" % (what or "the %s program" % var.lower()), + ) + input = var + what = what or var.lower() + + # Trick to make a @depends function out of an immediate value. + progs = dependable(progs) + paths = dependable(paths) + allow_missing = dependable(allow_missing) + + # Avoid displaying the "Checking for" message when the inputs are such + # that we don't actually want anything to be checked. It is a bit + # convoluted because of how `when` works. + # We first wrap all the inputs except allow_missing (which doesn't count + # for whether to display the "Checking for" message). + @depends_if(input, progs, paths, when=when) + def inputs(input, progs, paths): + if progs is None: + progs = () + + if not isinstance(progs, (tuple, list)): + configure_error("progs must resolve to a list or tuple!") + + return namespace(value=input, progs=progs, paths=paths) + + @depends(inputs, allow_missing, when=inputs) + @checking("for %s" % what, lambda x: quote(x) if x else "not found") + def check(inputs, allow_missing): + value = inputs.value + progs = inputs.progs + paths = inputs.paths + + if paths_have_priority: + for path in paths: + for prog in value or progs: + log.debug("%s: Looking for %s", var.lower(), quote(prog)) + result = find_program(prog, [path]) + if result: + return result + else: + for prog in value or progs: + log.debug("%s: Looking for %s", var.lower(), quote(prog)) + result = find_program(prog, paths) + if result: + return result + + if not allow_missing or value: + raise FatalCheckError("Cannot find %s" % what) + + set_config(var, check) + + return check diff --git a/build/moz.configure/compile-checks.configure b/build/moz.configure/compile-checks.configure new file mode 100755 index 0000000000..25e4f80bf5 --- /dev/null +++ b/build/moz.configure/compile-checks.configure @@ -0,0 +1,287 @@ +# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +# Generates a test program and attempts to compile it. In case of failure, the +# resulting check will return None. If the test program succeeds, it will return +# the output of the test program. +# - `includes` are the includes (as file names) that will appear at the top of +# the generated test program. +# - `body` is the code that will appear in the main function of the generated +# test program. `return 0;` is appended to the function body automatically. +# - `language` is the language selection, so that the appropriate compiler is +# used. +# - `flags` are the flags to be passed to the compiler, in addition to `-c`. +# - `check_msg` is the message to be printed to accompany compiling the test +# program. +@template +def try_compile( + includes=None, + body="", + language="C++", + flags=None, + check_msg=None, + when=None, + onerror=lambda: None, +): + compiler = { + "C": c_compiler, + "C++": cxx_compiler, + }[language] + + return compiler.try_compile( + includes, body, flags, check_msg, when=when, onerror=onerror + ) + + +# Checks for the presence of the given header on the target system by compiling +# a test program including that header. The return value of the template is a +# check function returning True if the header is present, and None if it is not. +# The value of this check function is also used to set a variable (with set_define) +# corresponding to the checked header. For instance, HAVE_MALLOC_H will be set in +# defines if check_header if called with 'malloc.h' as input and malloc.h is +# present on the target. +# - `header` is the header, as a file name, to check for. +# - `language` is the language selection, so that the appropriate compiler is +# used. +# - `flags` are the flags to be passed to the compiler, in addition to `-c`. +# - `includes` are additional includes, as file names, to appear before the +# header checked for. +# - `when` is a depends function that if present will make performing the check +# conditional on the value of that function. +@template +def check_header( + header, language="C++", flags=None, includes=None, when=None, onerror=lambda: None +): + if when is None: + when = always + + if includes: + includes = includes[:] + else: + includes = [] + includes.append(header) + + have_header = try_compile( + includes=includes, + language=language, + flags=flags, + check_msg="for %s" % header, + when=when, + onerror=onerror, + ) + header_var = "HAVE_%s" % ( + header.upper().replace("-", "_").replace("/", "_").replace(".", "_") + ) + set_define(header_var, have_header) + return have_header + + +# A convenience wrapper for check_header for checking multiple headers. +# returns an array of the resulting checks in order corresponding to the +# provided headers. +# - `headers` are the headers to be checked. +# - `kwargs` are keyword arguments passed verbatim to check_header. + + +@template +def check_headers(*headers, **kwargs): + checks = [] + for header in headers: + checks.append(check_header(header, **kwargs)) + return checks + + +# Checks for the presence of the given symbol on the target system by compiling +# a test program. The return value of the template is a check function +# returning True if the symbol can be found, and None if it is not. +@template +def check_symbol(symbol, language="C", flags=None, when=None, onerror=lambda: None): + if when is None: + when = always + + compiler = { + "C": c_compiler, + "C++": cxx_compiler, + }[language] + + # Stolen from autoconf 2.13 ; might be irrelevant now, but it doesn't hurt to + # keep using a char return type. + comment = [ + "/* Override any gcc2 internal prototype to avoid an error. */", + "/* We use char because int might match the return type of a gcc2", + " builtin and then its argument prototype would still apply. */", + ] + + return compiler.try_run( + header=comment + ["char %s();" % symbol], + body="%s();" % symbol, + flags=flags, + check_msg="for %s" % symbol, + when=when, + onerror=onerror, + ) + + +# Determine whether to add a given flag to the given lists of flags for C or +# C++ compilation. +# - `flag` is the flag to test +# - `flags_collection` is a @depends function for a namespace of lists of +# C/C++ compiler flags to add to. +# - `test_flags` is a list of flags to pass to the compiler instead of merely +# passing `flag`. This is especially useful for checking warning flags. If +# this list is empty, `flag` will be passed on its own. +# - `compiler` (optional) is the compiler to test against (c_compiler or +# cxx_compiler, from toolchain.configure). When omitted, both compilers +# are tested; the list of flags added to is dependent on the compiler tested. +# - `when` (optional) is a @depends function or option name conditioning +# when the warning flag is wanted. +# - `check`, when not set, skips checking whether the flag is supported and +# adds it to the list of flags unconditionally. +@template +def check_and_add_flags( + flag, flags_collection, test_flags, compiler=None, when=None, check=True +): + if compiler is not None: + compilers = (compiler,) + else: + compilers = (c_compiler, cxx_compiler) + + if when is None: + when = always + + results = [] + + if test_flags: + flags = test_flags + else: + flags = [flag] + + for c in compilers: + assert c in {c_compiler, cxx_compiler, host_c_compiler, host_cxx_compiler} + lang, list_of_flags = { + c_compiler: ("C", flags_collection.cflags), + cxx_compiler: ("C++", flags_collection.cxxflags), + host_c_compiler: ("host C", flags_collection.host_cflags), + host_cxx_compiler: ("host C++", flags_collection.host_cxxflags), + }[c] + + @depends(c, when) + def result(c, when): + if when and c.type in ("clang", "gcc"): + return True + + if check: + + @depends(c, dependable(flags)) + def flags(c, flags): + # Don't error out just because clang complains about other things. + if c.type == "clang": + flags += ["-Wno-error=unused-command-line-argument"] + + return flags + + result = c.try_compile( + flags=flags, + when=result, + check_msg="whether the %s compiler supports %s" % (lang, flag), + ) + + @depends(result, list_of_flags) + def maybe_add_flag(result, list_of_flags): + if result: + list_of_flags.append(flag) + + results.append(result) + + return tuple(results) + + +@dependable +def warnings_flags(): + return namespace(cflags=[], cxxflags=[], host_cflags=[], host_cxxflags=[]) + + +# Tests whether GCC or clang support the given warning flag, and if it is, +# add it to the list of warning flags for the build. +# - `warning` is the warning flag (e.g. -Wfoo) +# - `compiler` (optional) is the compiler to test against (c_compiler or +# cxx_compiler, from toolchain.configure). When omitted, both compilers +# are tested. +# - `when` (optional) is a @depends function or option name conditioning +# when the warning flag is wanted. +# - `check`, when not set, skips checking whether the flag is supported and +# adds it to the list of warning flags unconditionally. This is only meant +# for add_gcc_warning(). +@template +def check_and_add_gcc_warning(warning, compiler=None, when=None, check=True): + # GCC and clang will fail if given an unknown warning option like + # -Wfoobar. But later versions won't fail if given an unknown negated + # warning option like -Wno-foobar. So when we are checking for support + # of a negated warning option, we actually test the positive form, but + # add the negated form to the flags variable. + if warning.startswith("-Wno-") and not warning.startswith("-Wno-error="): + flags = ["-Werror", "-W" + warning[5:]] + elif warning.startswith("-Werror="): + flags = [warning] + else: + flags = ["-Werror", warning] + + return check_and_add_flags( + warning, warnings_flags, flags, compiler=compiler, when=when, check=check + ) + + +# Add the given warning to the list of warning flags for the build. +# - `warning` is the warning flag (e.g. -Wfoo) +# - `compiler` (optional) is the compiler to add the flag for (c_compiler or +# cxx_compiler, from toolchain.configure). When omitted, the warning flag +# is added for both compilers. +# - `when` (optional) is a @depends function or option name conditioning +# when the warning flag is wanted. + + +@template +def add_gcc_warning(warning, compiler=None, when=None): + check_and_add_gcc_warning(warning, compiler, when, check=False) + + +# Like the warning checks above, but for general compilation flags. +@dependable +def compilation_flags(): + return namespace(cflags=[], cxxflags=[], host_cflags=[], host_cxxflags=[]) + + +# Tests whether GCC or clang support the given compilation flag; if the flag +# is supported, add it to the list of compilation flags for the build. +# - `flag` is the flag to test +# - `compiler` (optional) is the compiler to test against (c_compiler or +# cxx_compiler, from toolchain.configure). When omitted, both compilers +# are tested. +# - `when` (optional) is a @depends function or option name conditioning +# when the warning flag is wanted. +# - `check`, when not set, skips checking whether the flag is supported and +# adds it to the list of flags unconditionally. This is only meant for +# add_gcc_flag(). +@template +def check_and_add_gcc_flag(flag, compiler=None, when=None, check=True): + flags = ["-Werror", flag] + + return check_and_add_flags( + flag, compilation_flags, flags, compiler=compiler, when=when, check=check + ) + + +# Add the given flag to the list of flags for the build. +# - `flag` is the flag (e.g. -fno-sized-deallocation) +# - `compiler` (optional) is the compiler to add the flag for (c_compiler or +# cxx_compiler, from toolchain.configure). When omitted, the flag is added +# for both compilers. +# - `when` (optional) is a @depends function or option name conditioning +# when the flag is wanted. +@template +def add_gcc_flag(warning, compiler=None, when=None): + check_and_add_gcc_flag(warning, compiler, when, check=False) diff --git a/build/moz.configure/compilers-util.configure b/build/moz.configure/compilers-util.configure new file mode 100644 index 0000000000..1d8930347f --- /dev/null +++ b/build/moz.configure/compilers-util.configure @@ -0,0 +1,135 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@template +@imports("textwrap") +@imports(_from="mozbuild.configure", _import="SandboxDependsFunction") +def compiler_class(compiler, host_or_target): + is_target = host_or_target is target + + class Compiler(SandboxDependsFunction): + # Generates a test program and attempts to compile it. In case of + # failure, the resulting check will return None. If the test program + # succeeds, it will return the output of the test program. + # - `includes` are the includes (as file names) that will appear at the + # top of the generated test program. + # - `body` is the code that will appear in the main function of the + # generated test program. `return 0;` is appended to the function + # body automatically. + # - `flags` are the flags to be passed to the compiler, in addition to + # `-c`. + # - `check_msg` is the message to be printed to accompany compiling the + # test program. + def try_compile( + self, + includes=None, + body="", + flags=None, + check_msg=None, + when=None, + onerror=lambda: None, + ): + @depends(dependable(flags)) + def flags(flags): + flags = list(flags or []) + flags.append("-c") + return flags + + @depends(dependable(includes)) + def header(includes): + includes = includes or [] + return ["#include <%s>" % f for f in includes] + + return self.try_run( + header=header, + body=body, + flags=flags, + check_msg=check_msg, + when=when, + onerror=onerror, + ) + + # Generates a test program and run the compiler against it. In case of + # failure, the resulting check will return None. + # - `header` is code that will appear at the top of the generated test + # program. + # - `body` is the code that will appear in the main function of the + # generated test program. `return 0;` is appended to the function + # body automatically. + # - `flags` are the flags to be passed to the compiler. + # - `check_msg` is the message to be printed to accompany compiling the + # test program. + # - `onerror` is a function called when the check fails. + def try_run( + self, + header=None, + body="", + flags=None, + check_msg=None, + when=None, + onerror=lambda: None, + ): + source = textwrap.dedent( + """\ + int + main(void) + { + %s + ; + return 0; + } + """ + % body + ) + + if check_msg: + + def checking_fn(fn): + return checking(check_msg)(fn) + + else: + + def checking_fn(fn): + return fn + + @depends( + self, + dependable(flags), + extra_toolchain_flags, + stlport_cppflags, + dependable(header), + when=when, + ) + @checking_fn + def func(compiler, flags, extra_flags, stlport_flags, header): + flags = list(flags or []) + if is_target: + flags += extra_flags or [] + if compiler.language == "C++": + flags += stlport_flags or [] + header = header or "" + if isinstance(header, (list, tuple)): + header = "\n".join(header) + if header: + header += "\n" + + if ( + try_invoke_compiler( + compiler.wrapper + [compiler.compiler] + compiler.flags, + compiler.language, + header + source, + flags, + onerror=onerror, + ) + is not None + ): + return True + + return func + + compiler.__class__ = Compiler + return compiler diff --git a/build/moz.configure/flags.configure b/build/moz.configure/flags.configure new file mode 100644 index 0000000000..3cad466237 --- /dev/null +++ b/build/moz.configure/flags.configure @@ -0,0 +1,71 @@ +# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# We support C++14, but we don't want to enable the sized deallocation +# facilities in C++14 yet. +check_and_add_gcc_flag("-fno-sized-deallocation", compiler=cxx_compiler) +# Likewise for C++17 and aligned allocation. It's not immediately obvious +# from the clang and GCC documentation, but they both support this. +check_and_add_gcc_flag("-fno-aligned-new", compiler=cxx_compiler) + +# Please keep these last in this file. +add_old_configure_assignment("_COMPILATION_CFLAGS", compilation_flags.cflags) +add_old_configure_assignment("_COMPILATION_CXXFLAGS", compilation_flags.cxxflags) +add_old_configure_assignment("_COMPILATION_HOST_CFLAGS", compilation_flags.host_cflags) +add_old_configure_assignment( + "_COMPILATION_HOST_CXXFLAGS", compilation_flags.host_cxxflags +) + + +@depends(rust_compile_flags, rust_warning_flags) +def rust_flags(compile_flags, warning_flags): + return compile_flags + warning_flags + + +set_config("MOZ_RUST_DEFAULT_FLAGS", rust_flags) + + +option( + "--disable-new-pass-manager", + help="Use the legacy LLVM pass manager in clang builds", +) + + +@depends( + "--enable-new-pass-manager", + c_compiler, + host, + target, + "MOZ_PGO", + enable_fuzzing, + ubsan, +) +def new_pass_manager_flags(enabled, compiler, host, target, pgo, enable_fuzzing, ubsan): + if host.os == "OSX": + # Some native Mac builds hang with the new pass manager. Given the + # inability to test in CI, don't take the risk of further breakage. + return None + if target.os == "OSX" and not pgo: + # Also disable when cross-compiling to Mac, because plain-ish opt + # builds hang. Variants like asan and ccov work fine, but it would be + # too tedious to test them all here. PGO is the only thing that matters + # enough to make an exception for. + return None + if enable_fuzzing and compiler.version < "10.0.0": + # Clang 9 does not seem to play well with libFuzzer + return None + if ubsan and compiler.version >= "10.0.0": + # Temporary until https://bugs.llvm.org/show_bug.cgi?id=45835 gets a + # real fix: clang 10 hangs with some ubsan-inserted code constructs. + return None + if enabled and compiler.version >= "9.0.0": + if compiler.type == "clang": + return ["-fexperimental-new-pass-manager"] + elif compiler.type == "clang-cl": + return ["-Xclang", "-fexperimental-new-pass-manager"] + + +set_config("MOZ_NEW_PASS_MANAGER_FLAGS", new_pass_manager_flags) diff --git a/build/moz.configure/headers.configure b/build/moz.configure/headers.configure new file mode 100644 index 0000000000..5332c7365f --- /dev/null +++ b/build/moz.configure/headers.configure @@ -0,0 +1,119 @@ +# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Check for headers defining standard int types. +check_header("stdint.h") +have_inttypes = check_header("inttypes.h") + +# Assume we have ansi C header files available. +set_define("STDC_HEADERS", True) + +set_config("HAVE_INTTYPES_H", have_inttypes) + +building_linux = depends(target)(lambda target: target.kernel == "Linux") + +have_malloc = check_header("malloc.h") + +check_header("alloca.h") + +add_old_configure_assignment("HAVE_MALLOC_H", have_malloc) + +check_headers( + "sys/byteorder.h", + "getopt.h", + "unistd.h", + "nl_types.h", + "cpuid.h", + "fts.h", +) + +# These are all the places some variant of statfs can be hiding. +check_headers( + "sys/statvfs.h", + "sys/statfs.h", + "sys/vfs.h", + "sys/mount.h", +) + +# Quota support +# Check for both the header and quotactl() because Android headers can have the +# header but not quotactl(). +set_define( + "HAVE_SYS_QUOTA_H", + try_compile( + includes=["sys/quota.h"], + body="quotactl(0, nullptr, 0, (caddr_t)nullptr);", + check_msg="for sys/quota.h", + ), +) +check_header("linux/quota.h", includes=["sys/socket.h"], when=building_linux) + +# SCTP support - needs various network include headers +check_headers( + "linux/if_addr.h", + "linux/rtnetlink.h", + includes=["sys/socket.h"], + when=building_linux, +) + +check_header("sys/queue.h") + +check_headers( + "sys/types.h", + "netinet/in.h", + "byteswap.h", +) + +# memfd_create(2) -- Note that older versions of the Linux man-pages +# project incorrectly cite , which doesn't exist; this +# was fixed in the man-pages-5.00 release. +set_define( + "HAVE_MEMFD_CREATE", + try_compile( + includes=["sys/mman.h"], + body='memfd_create("", 0);', + check_msg="for memfd_create in sys/mman.h", + ), +) + +# TODO: Move these checks to file specific to --enable-project=js. +have_perf_event_h = check_header("linux/perf_event.h", when=building_linux) + +option( + "--with-linux-headers", + help="location where the Linux kernel headers can be found", + nargs=1, +) + +passed_linux_header_flags = depends_if("--with-linux-headers")( + lambda v: ["-I%s" % v[0]] +) + + +@depends( + try_compile( + includes=["asm/unistd.h"], + body="return sizeof(__NR_perf_event_open);", + flags=passed_linux_header_flags, + check_msg="for perf_event_open system call", + ), + when=have_perf_event_h, +) +def have_perf_event_open(have_perf_event_open): + if have_perf_event_open: + return True + + +set_config("HAVE_LINUX_PERF_EVENT_H", have_perf_event_open) + + +@depends(passed_linux_header_flags, have_perf_event_open) +def linux_headers_includes(passed_linux_header_flags, have_perf_event_open): + if have_perf_event_open and passed_linux_header_flags: + return passed_linux_header_flags[0] + + +set_config("LINUX_HEADERS_INCLUDES", linux_headers_includes) diff --git a/build/moz.configure/init.configure b/build/moz.configure/init.configure new file mode 100644 index 0000000000..7435bdeaad --- /dev/null +++ b/build/moz.configure/init.configure @@ -0,0 +1,1408 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +include("util.configure") +include("checks.configure") + +# Make `toolkit` available when toolkit/moz.configure is not included. +toolkit = dependable(None) +# Likewise with `bindgen_config_paths` when +# build/moz.configure/bindgen.configure is not included. +bindgen_config_paths = dependable(None) + +option(env="DIST", nargs=1, help="DIST directory") + + +# Do not allow objdir == srcdir builds. +# ============================================================== +@depends("--help", "DIST") +@imports(_from="__builtin__", _import="open") +@imports(_from="os.path", _import="exists") +@imports(_from="six", _import="ensure_text") +def check_build_environment(help, dist): + topobjdir = os.path.realpath(".") + topsrcdir = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "..")) + + if dist: + dist = normsep(dist[0]) + else: + dist = os.path.join(topobjdir, "dist") + + result = namespace( + topsrcdir=topsrcdir, + topobjdir=topobjdir, + dist=dist, + ) + + if help: + return result + + # This limitation has mostly to do with GNU make. Since make can't represent + # variables with spaces without correct quoting and many paths are used + # without proper quoting, using paths with spaces commonly results in + # targets or dependencies being treated as multiple paths. This, of course, + # undermines the ability for make to perform up-to-date checks and makes + # the build system not work very efficiently. In theory, a non-make build + # backend will make this limitation go away. But there is likely a long tail + # of things that will need fixing due to e.g. lack of proper path quoting. + if len(topsrcdir.split()) > 1: + die("Source directory cannot be located in a path with spaces: %s" % topsrcdir) + if len(topobjdir.split()) > 1: + die("Object directory cannot be located in a path with spaces: %s" % topobjdir) + + if topsrcdir == topobjdir: + die( + " ***\n" + " * Building directly in the main source directory is not allowed.\n" + " *\n" + " * To build, you must run configure from a separate directory\n" + " * (referred to as an object directory).\n" + " *\n" + " * If you are building with a mozconfig, you will need to change your\n" + " * mozconfig to point to a different object directory.\n" + " ***" + ) + + # Check for CRLF line endings. + with open(os.path.join(topsrcdir, "configure.py"), "r") as fh: + data = ensure_text(fh.read()) + if "\r" in data: + die( + "\n ***\n" + " * The source tree appears to have Windows-style line endings.\n" + " *\n" + " * If using Git, Git is likely configured to use Windows-style\n" + " * line endings.\n" + " *\n" + " * To convert the working copy to UNIX-style line endings, run\n" + " * the following:\n" + " *\n" + " * $ git config core.autocrlf false\n" + " * $ git config core.eof lf\n" + " * $ git rm --cached -r .\n" + " * $ git reset --hard\n" + " *\n" + " * If not using Git, the tool you used to obtain the source\n" + " * code likely converted files to Windows line endings. See\n" + " * usage information for that tool for more.\n" + " ***" + ) + + # Check for a couple representative files in the source tree + conflict_files = [ + "* %s" % f + for f in ("Makefile", "config/autoconf.mk") + if exists(os.path.join(topsrcdir, f)) + ] + if conflict_files: + die( + " ***\n" + " * Your source tree contains these files:\n" + " %s\n" + " * This indicates that you previously built in the source tree.\n" + " * A source tree build can confuse the separate objdir build.\n" + " *\n" + " * To clean up the source tree:\n" + " * 1. cd %s\n" + " * 2. gmake distclean\n" + " ***" % ("\n ".join(conflict_files), topsrcdir) + ) + + return result + + +set_config("TOPSRCDIR", check_build_environment.topsrcdir) +set_config("TOPOBJDIR", check_build_environment.topobjdir) +set_config("DIST", check_build_environment.dist) + +add_old_configure_assignment("_topsrcdir", check_build_environment.topsrcdir) +add_old_configure_assignment("_objdir", check_build_environment.topobjdir) +add_old_configure_assignment("DIST", check_build_environment.dist) + +option(env="MOZ_AUTOMATION", help="Enable options for automated builds") +set_config("MOZ_AUTOMATION", depends_if("MOZ_AUTOMATION")(lambda x: True)) + + +option(env="OLD_CONFIGURE", nargs=1, help="Path to the old configure script") + +option(env="MOZCONFIG", nargs=1, help="Mozconfig location") + + +# Read user mozconfig +# ============================================================== +# Note: the dependency on --help is only there to always read the mozconfig, +# even when --help is passed. Without this dependency, the function wouldn't +# be called when --help is passed, and the mozconfig wouldn't be read. + + +@depends("MOZCONFIG", "OLD_CONFIGURE", check_build_environment, "--help") +@imports(_from="mozbuild.mozconfig", _import="MozconfigLoader") +@imports(_from="mozboot.mozconfig", _import="find_mozconfig") +def mozconfig(mozconfig, old_configure, build_env, help): + if not old_configure and not help: + die("The OLD_CONFIGURE environment variable must be set") + + # Don't read the mozconfig for the js configure (yay backwards + # compatibility) + # While the long term goal is that js and top-level use the same configure + # and the same overall setup, including the possibility to use mozconfigs, + # figuring out what we want to do wrt mozconfig vs. command line and + # environment variable is not a clear-cut case, and it's more important to + # fix the immediate problem mozconfig causes to js developers by + # "temporarily" returning to the previous behavior of not loading the + # mozconfig for the js configure. + # Separately to the immediate problem for js developers, there is also the + # need to not load a mozconfig when running js configure as a subconfigure. + # Unfortunately, there is no direct way to tell whether the running + # configure is the js configure. The indirect way is to look at the + # OLD_CONFIGURE path, which points to js/src/old-configure. + # I expect we'll have figured things out for mozconfigs well before + # old-configure dies. + if old_configure and os.path.dirname(os.path.abspath(old_configure[0])).endswith( + "/js/src" + ): + return {"path": None} + + topsrcdir = build_env.topsrcdir + loader = MozconfigLoader(topsrcdir) + mozconfig = mozconfig[0] if mozconfig else None + mozconfig = find_mozconfig(topsrcdir, env={"MOZCONFIG": mozconfig}) + mozconfig = loader.read_mozconfig(mozconfig) + + return mozconfig + + +set_config("MOZCONFIG", depends(mozconfig)(lambda m: m["path"])) + + +# Mozilla-Build +# ============================================================== +option(env="MOZILLABUILD", nargs=1, help="Path to Mozilla Build (Windows-only)") + +option(env="CONFIG_SHELL", nargs=1, help="Path to a POSIX shell") + +# It feels dirty replicating this from python/mozbuild/mozbuild/mozconfig.py, +# but the end goal being that the configure script would go away... + + +@depends("CONFIG_SHELL", "MOZILLABUILD") +@checking("for a shell") +@imports("sys") +def shell(value, mozillabuild): + if value: + return find_program(value[0]) + shell = "sh" + if mozillabuild: + shell = mozillabuild[0] + "/msys/bin/sh" + if sys.platform == "win32": + shell = shell + ".exe" + return find_program(shell) + + +# This defines a reasonable shell for when running with --help. +# If one was passed in the environment, though, fall back to that. +@depends("--help", "CONFIG_SHELL") +def help_shell(help, shell): + if help and not shell: + return "sh" + + +shell = help_shell | shell + + +# Python 3 +# ======== + +option(env="PYTHON3", nargs=1, help="Python 3 interpreter (3.6 or later)") + +option( + env="VIRTUALENV_NAME", + nargs=1, + default="init_py3", + help="Name of the in-objdir virtualenv", +) + + +@depends("PYTHON3", "VIRTUALENV_NAME", check_build_environment, mozconfig, "--help") +@imports(_from="__builtin__", _import="Exception") +@imports("os") +@imports("sys") +@imports("subprocess") +@imports("distutils.sysconfig") +@imports(_from="mozbuild.configure.util", _import="LineIO") +@imports(_from="mozbuild.virtualenv", _import="VirtualenvManager") +@imports(_from="mozbuild.virtualenv", _import="verify_python_version") +@imports(_from="mozbuild.pythonutil", _import="find_python3_executable") +@imports(_from="mozbuild.pythonutil", _import="python_executable_version") +@imports(_from="six", _import="ensure_text") +def virtualenv_python3(env_python, virtualenv_name, build_env, mozconfig, help): + # Avoid re-executing python when running configure --help. + if help: + return + + # NOTE: We cannot assume the Python we are calling this code with is the + # Python we want to set up a virtualenv for. + # + # We also cannot assume that the Python the caller is configuring meets our + # build requirements. + # + # Because of this the code is written to re-execute itself with the correct + # interpreter if required. + + log.debug("python3: running with pid %r" % os.getpid()) + log.debug("python3: sys.executable: %r" % sys.executable) + + python = env_python[0] if env_python else None + virtualenv_name = virtualenv_name[0] + + # Did our python come from mozconfig? Overrides environment setting. + # Ideally we'd rely on the mozconfig injection from mozconfig_options, + # but we'd rather avoid the verbosity when we need to reexecute with + # a different python. + if mozconfig["path"]: + if "PYTHON3" in mozconfig["env"]["added"]: + python = mozconfig["env"]["added"]["PYTHON3"] + elif "PYTHON3" in mozconfig["env"]["modified"]: + python = mozconfig["env"]["modified"]["PYTHON3"][1] + elif "PYTHON3" in mozconfig["vars"]["added"]: + python = mozconfig["vars"]["added"]["PYTHON3"] + elif "PYTHON3" in mozconfig["vars"]["modified"]: + python = mozconfig["vars"]["modified"]["PYTHON3"][1] + + log.debug("python3: executable from configuration: %r" % python) + + # Verify that the Python version we executed this code with is the minimum + # required version to handle all project code. + with LineIO(lambda l: log.error(l)) as out: + verify_python_version(out) + + # If this is a mozilla-central build, we'll find the virtualenv in the top + # source directory. If this is a SpiderMonkey build, we assume we're at + # js/src and try to find the virtualenv from the mozilla-central root. + # See mozilla-central changeset d2cce982a7c809815d86d5daecefe2e7a563ecca + # Bug 784841 + topsrcdir, topobjdir = build_env.topsrcdir, build_env.topobjdir + if topobjdir.endswith("/js/src"): + topobjdir = topobjdir[:-7] + + virtualenvs_root = os.path.join(topobjdir, "_virtualenvs") + with LineIO(lambda l: log.info(l), "replace") as out: + manager = VirtualenvManager( + topsrcdir, + os.path.join(virtualenvs_root, virtualenv_name), + out, + os.path.join(topsrcdir, "build", "build_virtualenv_packages.txt"), + ) + + # If we're not in the virtualenv, we need to update the path to include some + # necessary modules for find_program. + if "MOZBUILD_VIRTUALENV" in os.environ: + python = sys.executable + else: + sys.path.insert(0, os.path.join(topsrcdir, "testing", "mozbase", "mozfile")) + sys.path.insert( + 0, os.path.join(topsrcdir, "third_party", "python", "backports") + ) + + # If we know the Python executable the caller is asking for then verify its + # version. If the caller did not ask for a specific executable then find + # a reasonable default. + if python: + found_python = find_program(python) + if not found_python: + die( + "The PYTHON3 environment variable does not contain " + "a valid path. Cannot find %s", + python, + ) + python = found_python + try: + version = python_executable_version(python).version + except Exception as e: + raise FatalCheckError( + "could not determine version of PYTHON3 " "(%s): %s" % (python, e) + ) + else: + # Fall back to the search routine. + python, version = find_python3_executable(min_version="3.6.0") + + # The API returns a bytes whereas everything in configure is unicode. + if python: + python = ensure_text(python) + + if not python: + raise FatalCheckError( + "Python 3.6 or newer is required to build. " + "Ensure a `python3.x` executable is in your " + "PATH or define PYTHON3 to point to a Python " + "3.6 executable." + ) + + if version < (3, 6, 0): + raise FatalCheckError( + "Python 3.6 or newer is required to build; " + "%s is Python %d.%d" % (python, version[0], version[1]) + ) + + log.debug("python3: found executable: %r" % python) + + if not manager.up_to_date(python): + log.info("Creating Python 3 environment") + manager.build(python) + else: + log.debug("python3: venv is up to date") + + python = normsep(manager.python_path) + + if not normsep(sys.executable).startswith(normsep(virtualenvs_root)): + log.debug( + "python3: executing as %s, should be running as %s" + % (sys.executable, manager.python_path) + ) + log.info("Re-executing in the virtualenv") + if env_python: + del os.environ["PYTHON3"] + # Homebrew on macOS will change Python's sys.executable to a custom + # value which messes with mach's virtualenv handling code. Override + # Homebrew's changes with the correct sys.executable value. + os.environ["PYTHONEXECUTABLE"] = python + # Another quirk on macOS, with the system python, the virtualenv is + # not fully operational (missing entries in sys.path) if + # __PYVENV_LAUNCHER__ is set. + os.environ.pop("__PYVENV_LAUNCHER__", None) + # One would prefer to use os.execl, but that's completely borked on + # Windows. + sys.exit(subprocess.call([python] + sys.argv)) + + # We are now in the virtualenv + if not distutils.sysconfig.get_python_lib(): + die("Could not determine python site packages directory") + + # We may have set PYTHONEXECUTABLE above, and that affects python + # subprocesses we may invoke as part of configure (e.g. hg), so + # unset it. + os.environ.pop("PYTHONEXECUTABLE", None) + + str_version = ".".join(str(v) for v in version) + + return namespace( + path=python, + version=version, + str_version=str_version, + ) + + +@depends(virtualenv_python3) +@checking("for Python 3", callback=lambda x: "%s (%s)" % (x.path, x.str_version)) +def virtualenv_python3(venv): + return venv + + +set_config("PYTHON3", virtualenv_python3.path) +set_config("PYTHON3_VERSION", virtualenv_python3.str_version) +add_old_configure_assignment("PYTHON3", virtualenv_python3.path) + + +# Inject mozconfig options +# ============================================================== +# All options defined above this point can't be injected in mozconfig_options +# below, so collect them. + + +@template +def early_options(): + @depends("--help") + @imports("__sandbox__") + @imports(_from="six", _import="itervalues") + def early_options(_): + return set( + option.env for option in itervalues(__sandbox__._options) if option.env + ) + + return early_options + + +early_options = early_options() + + +@depends(mozconfig, early_options, "MOZ_AUTOMATION", "--help") +# This gives access to the sandbox. Don't copy this blindly. +@imports("__sandbox__") +@imports("os") +@imports("six") +def mozconfig_options(mozconfig, early_options, automation, help): + if mozconfig["path"]: + if "MOZ_AUTOMATION_MOZCONFIG" in mozconfig["env"]["added"]: + if not automation: + log.error( + "%s directly or indirectly includes an in-tree " "mozconfig.", + mozconfig["path"], + ) + log.error( + "In-tree mozconfigs make strong assumptions about " + "and are only meant to be used by Mozilla " + "automation." + ) + die("Please don't use them.") + helper = __sandbox__._helper + log.info("Adding configure options from %s" % mozconfig["path"]) + for arg in mozconfig["configure_args"]: + log.info(" %s" % arg) + # We could be using imply_option() here, but it has other + # contraints that don't really apply to the command-line + # emulation that mozconfig provides. + helper.add(arg, origin="mozconfig", args=helper._args) + + def add(key, value): + if key.isupper(): + arg = "%s=%s" % (key, value) + log.info(" %s" % arg) + if key not in early_options: + helper.add(arg, origin="mozconfig", args=helper._args) + + for key, value in six.iteritems(mozconfig["env"]["added"]): + add(key, value) + os.environ[key] = value + for key, (_, value) in six.iteritems(mozconfig["env"]["modified"]): + add(key, value) + os.environ[key] = value + for key, value in six.iteritems(mozconfig["vars"]["added"]): + add(key, value) + for key, (_, value) in six.iteritems(mozconfig["vars"]["modified"]): + add(key, value) + + +# Source checkout and version control integration. +# ================================================ + + +@depends(check_build_environment, "MOZ_AUTOMATION", "--help") +@checking("for vcs source checkout") +@imports("os") +def vcs_checkout_type(build_env, automation, help): + if os.path.exists(os.path.join(build_env.topsrcdir, ".hg")): + return "hg" + elif os.path.exists(os.path.join(build_env.topsrcdir, ".git")): + return "git" + elif automation and not help: + raise FatalCheckError( + "unable to resolve VCS type; must run " + "from a source checkout when MOZ_AUTOMATION " + "is set" + ) + + +# Resolve VCS binary for detected repository type. + + +# TODO remove hg.exe once bug 1382940 addresses ambiguous executables case. +hg = check_prog( + "HG", + ( + "hg.exe", + "hg", + ), + allow_missing=True, + when=depends(vcs_checkout_type)(lambda x: x == "hg"), +) +git = check_prog( + "GIT", + ("git",), + allow_missing=True, + when=depends(vcs_checkout_type)(lambda x: x == "git"), +) + + +@depends_if(hg) +@checking("for Mercurial version") +@imports("os") +@imports("re") +def hg_version(hg): + # HGPLAIN in Mercurial 1.5+ forces stable output, regardless of set + # locale or encoding. + env = dict(os.environ) + env["HGPLAIN"] = "1" + + out = check_cmd_output(hg, "--version", env=env) + + match = re.search(r"Mercurial Distributed SCM \(version ([^\)]+)", out) + + if not match: + raise FatalCheckError("unable to determine Mercurial version: %s" % out) + + # The version string may be "unknown" for Mercurial run out of its own + # source checkout or for bad builds. But LooseVersion handles it. + + return Version(match.group(1)) + + +# Resolve Mercurial config items so other checks have easy access. +# Do NOT set this in the config because it may contain sensitive data +# like API keys. + + +@depends_all(check_build_environment, hg, hg_version) +@imports("os") +def hg_config(build_env, hg, version): + env = dict(os.environ) + env["HGPLAIN"] = "1" + + # Warnings may get sent to stderr. But check_cmd_output() ignores + # stderr if exit code is 0. And the command should always succeed if + # `hg version` worked. + out = check_cmd_output(hg, "config", env=env, cwd=build_env.topsrcdir) + + config = {} + + for line in out.strip().splitlines(): + key, value = [s.strip() for s in line.split("=", 1)] + config[key] = value + + return config + + +@depends_if(git) +@checking("for Git version") +@imports("re") +def git_version(git): + out = check_cmd_output(git, "--version").rstrip() + + match = re.search("git version (.*)$", out) + + if not match: + raise FatalCheckError("unable to determine Git version: %s" % out) + + return Version(match.group(1)) + + +# Only set VCS_CHECKOUT_TYPE if we resolved the VCS binary. +# Require resolved VCS info when running in automation so automation's +# environment is more well-defined. + + +@depends(vcs_checkout_type, hg_version, git_version, "MOZ_AUTOMATION") +def exposed_vcs_checkout_type(vcs_checkout_type, hg, git, automation): + if vcs_checkout_type == "hg": + if hg: + return "hg" + + if automation: + raise FatalCheckError("could not resolve Mercurial binary info") + + elif vcs_checkout_type == "git": + if git: + return "git" + + if automation: + raise FatalCheckError("could not resolve Git binary info") + elif vcs_checkout_type: + raise FatalCheckError("unhandled VCS type: %s" % vcs_checkout_type) + + +set_config("VCS_CHECKOUT_TYPE", exposed_vcs_checkout_type) + +# Obtain a Repository interface for the current VCS repository. + + +@depends(check_build_environment, exposed_vcs_checkout_type, hg, git) +@imports(_from="mozversioncontrol", _import="get_repository_object") +def vcs_repository(build_env, vcs_checkout_type, hg, git): + if vcs_checkout_type == "hg": + return get_repository_object(build_env.topsrcdir, hg=hg) + elif vcs_checkout_type == "git": + return get_repository_object(build_env.topsrcdir, git=git) + elif vcs_checkout_type: + raise FatalCheckError("unhandled VCS type: %s" % vcs_checkout_type) + + +@depends_if(vcs_repository) +@checking("for sparse checkout") +def vcs_sparse_checkout(repo): + return repo.sparse_checkout_present() + + +set_config("VCS_SPARSE_CHECKOUT", vcs_sparse_checkout) + +# The application/project to build +# ============================================================== +option( + "--enable-application", + nargs=1, + env="MOZ_BUILD_APP", + help="Application to build. Same as --enable-project.", +) + + +@depends("--enable-application") +def application(app): + if app: + return app + + +imply_option("--enable-project", application) + + +@depends(check_build_environment) +def default_project(build_env): + if build_env.topobjdir.endswith("/js/src"): + return "js" + return "browser" + + +option("--enable-project", nargs=1, default=default_project, help="Project to build") + + +# Host and target systems +# ============================================================== +option("--host", nargs=1, help="Define the system type performing the build") + +option( + "--target", + nargs=1, + help="Define the system type where the resulting executables will be " "used", +) + + +@imports(_from="mozbuild.configure.constants", _import="CPU") +@imports(_from="mozbuild.configure.constants", _import="CPU_bitness") +@imports(_from="mozbuild.configure.constants", _import="Endianness") +@imports(_from="mozbuild.configure.constants", _import="Kernel") +@imports(_from="mozbuild.configure.constants", _import="OS") +@imports(_from="__builtin__", _import="ValueError") +def split_triplet(triplet, allow_msvc=False): + # The standard triplet is defined as + # CPU_TYPE-VENDOR-OPERATING_SYSTEM + # There is also a quartet form: + # CPU_TYPE-VENDOR-KERNEL-OPERATING_SYSTEM + # But we can consider the "KERNEL-OPERATING_SYSTEM" as one. + # Additionally, some may omit "unknown" when the vendor + # is not specified and emit + # CPU_TYPE-OPERATING_SYSTEM + vendor = "unknown" + parts = triplet.split("-", 2) + if len(parts) == 3: + cpu, vendor, os = parts + elif len(parts) == 2: + cpu, os = parts + else: + raise ValueError("Unexpected triplet string: %s" % triplet) + + # Autoconf uses config.sub to validate and canonicalize those triplets, + # but the granularity of its results has never been satisfying to our + # use, so we've had our own, different, canonicalization. We've also + # historically not been very consistent with how we use the canonicalized + # values. Hopefully, this will help us make things better. + # The tests are inherited from our decades-old autoconf-based configure, + # which can probably be improved/cleaned up because they are based on a + # mix of uname and config.guess output, while we now only use the latter, + # which presumably has a cleaner and leaner output. Let's refine later. + os = os.replace("/", "_") + if "android" in os: + canonical_os = "Android" + canonical_kernel = "Linux" + elif os.startswith("linux"): + canonical_os = "GNU" + canonical_kernel = "Linux" + elif os.startswith("kfreebsd") and os.endswith("-gnu"): + canonical_os = "GNU" + canonical_kernel = "kFreeBSD" + elif os.startswith("gnu"): + canonical_os = canonical_kernel = "GNU" + elif os.startswith("mingw") or (allow_msvc and os == "windows-msvc"): + # windows-msvc is only opt-in for the caller of this function until + # full support in bug 1617793. + canonical_os = canonical_kernel = "WINNT" + elif os.startswith("darwin"): + canonical_kernel = "Darwin" + canonical_os = "OSX" + elif os.startswith("dragonfly"): + canonical_os = canonical_kernel = "DragonFly" + elif os.startswith("freebsd"): + canonical_os = canonical_kernel = "FreeBSD" + elif os.startswith("netbsd"): + canonical_os = canonical_kernel = "NetBSD" + elif os.startswith("openbsd"): + canonical_os = canonical_kernel = "OpenBSD" + elif os.startswith("solaris"): + canonical_os = canonical_kernel = "SunOS" + else: + raise ValueError("Unknown OS: %s" % os) + + # The CPU granularity is probably not enough. Moving more things from + # old-configure will tell us if we need more + if cpu.endswith("86") or (cpu.startswith("i") and "86" in cpu): + canonical_cpu = "x86" + endianness = "little" + elif cpu in ("x86_64", "ia64"): + canonical_cpu = cpu + endianness = "little" + elif cpu in ("s390", "s390x"): + canonical_cpu = cpu + endianness = "big" + elif cpu in ("powerpc64", "ppc64", "powerpc64le", "ppc64le"): + canonical_cpu = "ppc64" + endianness = "little" if "le" in cpu else "big" + elif cpu in ("powerpc", "ppc", "rs6000") or cpu.startswith("powerpc"): + canonical_cpu = "ppc" + endianness = "big" + elif cpu in ("Alpha", "alpha", "ALPHA"): + canonical_cpu = "Alpha" + endianness = "little" + elif cpu.startswith("hppa") or cpu == "parisc": + canonical_cpu = "hppa" + endianness = "big" + elif cpu.startswith("sparc64") or cpu.startswith("sparcv9"): + canonical_cpu = "sparc64" + endianness = "big" + elif cpu.startswith("sparc") or cpu == "sun4u": + canonical_cpu = "sparc" + endianness = "big" + elif cpu.startswith("arm"): + canonical_cpu = "arm" + endianness = "big" if cpu.startswith(("armeb", "armbe")) else "little" + elif cpu in ("m68k"): + canonical_cpu = "m68k" + endianness = "big" + elif cpu in ("mips", "mipsel"): + canonical_cpu = "mips32" + endianness = "little" if "el" in cpu else "big" + elif cpu in ("mips64", "mips64el"): + canonical_cpu = "mips64" + endianness = "little" if "el" in cpu else "big" + elif cpu.startswith("aarch64"): + canonical_cpu = "aarch64" + endianness = "little" + elif cpu in ("riscv64", "riscv64gc"): + canonical_cpu = "riscv64" + endianness = "little" + elif cpu == "sh4": + canonical_cpu = "sh4" + endianness = "little" + else: + raise ValueError("Unknown CPU type: %s" % cpu) + + # Toolchains, most notably for cross compilation may use cpu-os + # prefixes. We need to be more specific about the LLVM target on Mac + # so cross-language LTO will work correctly. + + if os.startswith("darwin"): + toolchain = "%s-apple-%s" % (cpu, os) + elif canonical_cpu == "aarch64" and canonical_os == "WINNT": + toolchain = "aarch64-windows-msvc" + else: + toolchain = "%s-%s" % (cpu, os) + + return namespace( + alias=triplet, + cpu=CPU(canonical_cpu), + bitness=CPU_bitness[canonical_cpu], + kernel=Kernel(canonical_kernel), + os=OS(canonical_os), + endianness=Endianness(endianness), + raw_cpu=cpu, + raw_os=os, + toolchain=toolchain, + vendor=vendor, + ) + + +# This defines a fake target/host namespace for when running with --help +# If either --host or --target is passed on the command line, then fall +# back to the real deal. +@depends("--help", "--host", "--target") +def help_host_target(help, host, target): + if help and not host and not target: + return namespace( + alias="unknown-unknown-unknown", + cpu="unknown", + bitness="unknown", + kernel="unknown", + os="unknown", + endianness="unknown", + raw_cpu="unknown", + raw_os="unknown", + toolchain="unknown-unknown", + ) + + +def config_sub(shell, triplet): + config_sub = os.path.join(os.path.dirname(__file__), "..", "autoconf", "config.sub") + return check_cmd_output(shell, config_sub, triplet).strip() + + +@depends("--host", shell) +@checking("for host system type", lambda h: h.alias) +@imports("os") +@imports("sys") +@imports(_from="__builtin__", _import="ValueError") +def real_host(value, shell): + if not value and sys.platform == "win32": + arch = os.environ.get("PROCESSOR_ARCHITEW6432") or os.environ.get( + "PROCESSOR_ARCHITECTURE" + ) + if arch == "AMD64": + return split_triplet("x86_64-pc-mingw32") + elif arch == "x86": + return split_triplet("i686-pc-mingw32") + + if not value: + config_guess = os.path.join( + os.path.dirname(__file__), "..", "autoconf", "config.guess" + ) + + # Ensure that config.guess is determining the host triplet, not the target + # triplet + env = os.environ.copy() + env.pop("CC_FOR_BUILD", None) + env.pop("HOST_CC", None) + env.pop("CC", None) + + host = check_cmd_output(shell, config_guess, env=env).strip() + try: + return split_triplet(host) + except ValueError: + pass + else: + host = value[0] + + host = config_sub(shell, host) + + try: + return split_triplet(host) + except ValueError as e: + die(e) + + +host = help_host_target | real_host + + +@depends("--target", real_host, shell, "--enable-project", "--enable-application") +@checking("for target system type", lambda t: t.alias) +@imports(_from="__builtin__", _import="ValueError") +def real_target(value, host, shell, project, application): + # Because --enable-project is implied by --enable-application, and + # implied options are not currently handled during --help, which is + # used get the build target in mozbuild.base, we manually check + # whether --enable-application was given, and fall back to + # --enable-project if not. Both can't be given contradictory values + # under normal circumstances, so it's fine. + if application: + project = application[0] + elif project: + project = project[0] + if not value: + if project == "mobile/android": + if host.raw_os == "mingw32": + log.warning( + "Building Firefox for Android on Windows is not fully " + "supported. See https://bugzilla.mozilla.org/show_bug.cgi?" + "id=1169873 for details." + ) + return split_triplet("arm-unknown-linux-androideabi") + return host + # If --target was only given a cpu arch, expand it with the + # non-cpu part of the host. For mobile/android, expand it with + # unknown-linux-android. + target = value[0] + if "-" not in target: + if project == "mobile/android": + rest = "unknown-linux-android" + if target.startswith("arm"): + rest += "eabi" + else: + cpu, rest = host.alias.split("-", 1) + target = "-".join((target, rest)) + try: + return split_triplet(target) + except ValueError: + pass + + try: + return split_triplet(config_sub(shell, target)) + except ValueError as e: + die(e) + + +target = help_host_target | real_target + + +@depends(host, target) +@checking("whether cross compiling") +def cross_compiling(host, target): + return host != target + + +set_config("CROSS_COMPILE", cross_compiling) +set_define("CROSS_COMPILE", cross_compiling) +add_old_configure_assignment("CROSS_COMPILE", cross_compiling) + + +@depends(target) +def have_64_bit(target): + if target.bitness == 64: + return True + + +set_config("HAVE_64BIT_BUILD", have_64_bit) +set_define("HAVE_64BIT_BUILD", have_64_bit) +add_old_configure_assignment("HAVE_64BIT_BUILD", have_64_bit) + + +@depends(host) +def host_os_kernel_major_version(host): + versions = host.raw_os.split(".") + version = "".join(x for x in versions[0] if x.isdigit()) + return version + + +set_config("HOST_MAJOR_VERSION", host_os_kernel_major_version) + +# Autoconf needs these set + + +@depends(host) +def host_for_sub_configure(host): + return "--host=%s" % host.alias + + +@depends(target) +def target_for_sub_configure(target): + target_alias = target.alias + return "--target=%s" % target_alias + + +# These variables are for compatibility with the current moz.builds and +# old-configure. Eventually, we'll want to canonicalize better. +@depends(target) +def target_variables(target): + if target.kernel == "kFreeBSD": + os_target = "GNU/kFreeBSD" + os_arch = "GNU_kFreeBSD" + elif target.kernel == "Darwin" or (target.kernel == "Linux" and target.os == "GNU"): + os_target = target.kernel + os_arch = target.kernel + else: + os_target = target.os + os_arch = target.kernel + + return namespace( + OS_TARGET=os_target, + OS_ARCH=os_arch, + INTEL_ARCHITECTURE=target.cpu in ("x86", "x86_64") or None, + ) + + +set_config("OS_TARGET", target_variables.OS_TARGET) +add_old_configure_assignment("OS_TARGET", target_variables.OS_TARGET) +set_config("OS_ARCH", target_variables.OS_ARCH) +add_old_configure_assignment("OS_ARCH", target_variables.OS_ARCH) +set_config("CPU_ARCH", target.cpu) +add_old_configure_assignment("CPU_ARCH", target.cpu) +set_config("INTEL_ARCHITECTURE", target_variables.INTEL_ARCHITECTURE) +set_config("TARGET_CPU", target.raw_cpu) +set_config("TARGET_OS", target.raw_os) +set_config("TARGET_ENDIANNESS", target.endianness) + + +@depends(host) +def host_variables(host): + if host.kernel == "kFreeBSD": + os_arch = "GNU_kFreeBSD" + else: + os_arch = host.kernel + return namespace( + HOST_OS_ARCH=os_arch, + ) + + +set_config("HOST_CPU_ARCH", host.cpu) +set_config("HOST_OS_ARCH", host_variables.HOST_OS_ARCH) +add_old_configure_assignment("HOST_OS_ARCH", host_variables.HOST_OS_ARCH) + + +@depends(target) +def target_is_windows(target): + if target.kernel == "WINNT": + return True + + +set_define("_WINDOWS", target_is_windows) +set_define("WIN32", target_is_windows) +set_define("XP_WIN", target_is_windows) + + +@depends(target) +def target_is_unix(target): + if target.kernel != "WINNT": + return True + + +set_define("XP_UNIX", target_is_unix) + + +@depends(target) +def target_is_darwin(target): + if target.kernel == "Darwin": + return True + + +set_define("XP_DARWIN", target_is_darwin) + + +@depends(target) +def target_is_osx(target): + if target.kernel == "Darwin" and target.os == "OSX": + return True + + +set_define("XP_MACOSX", target_is_osx) + + +@depends(target) +def target_is_linux(target): + if target.kernel == "Linux": + return True + + +set_define("XP_LINUX", target_is_linux) + + +@depends(target) +def target_is_android(target): + if target.os == "Android": + return True + + +set_define("ANDROID", target_is_android) + + +@depends(target) +def target_is_openbsd(target): + if target.kernel == "OpenBSD": + return True + + +set_define("XP_OPENBSD", target_is_openbsd) + + +@depends(target) +def target_is_netbsd(target): + if target.kernel == "NetBSD": + return True + + +set_define("XP_NETBSD", target_is_netbsd) + + +@depends(target) +def target_is_freebsd(target): + if target.kernel == "FreeBSD": + return True + + +set_define("XP_FREEBSD", target_is_freebsd) + + +@depends(target) +def target_is_solaris(target): + if target.kernel == "SunOS": + return True + + +set_define("XP_SOLARIS", target_is_solaris) + + +@depends(target) +def target_is_sparc(target): + if target.cpu == "sparc64": + return True + + +set_define("SPARC64", target_is_sparc) + + +@depends("--enable-project", check_build_environment, "--help") +@imports(_from="os.path", _import="exists") +def include_project_configure(project, build_env, help): + if not project: + die("--enable-project is required.") + + base_dir = build_env.topsrcdir + path = os.path.join(base_dir, project[0], "moz.configure") + if not exists(path): + die("Cannot find project %s", project[0]) + return path + + +@depends(include_project_configure, check_build_environment) +def build_project(include_project_configure, build_env): + ret = os.path.dirname( + os.path.relpath(include_project_configure, build_env.topsrcdir) + ) + return ret + + +set_config("MOZ_BUILD_APP", build_project) +set_define("MOZ_BUILD_APP", build_project) +add_old_configure_assignment("MOZ_BUILD_APP", build_project) + + +option(env="MOZILLA_OFFICIAL", help="Build an official release") + + +@depends("MOZILLA_OFFICIAL") +def mozilla_official(official): + if official: + return True + + +set_config("MOZILLA_OFFICIAL", mozilla_official) +set_define("MOZILLA_OFFICIAL", mozilla_official) +add_old_configure_assignment("MOZILLA_OFFICIAL", mozilla_official) + + +# Allow specifying custom paths to the version files used by the milestone() function below. +option( + "--with-version-file-path", + nargs=1, + help="Specify a custom path to app version files instead of auto-detecting", + default=None, +) + + +@depends("--with-version-file-path") +def version_path(path): + return path + + +# set RELEASE_OR_BETA and NIGHTLY_BUILD variables depending on the cycle we're in +# The logic works like this: +# - if we have "a1" in GRE_MILESTONE, we're building Nightly (define NIGHTLY_BUILD) +# - otherwise, if we have "a" in GRE_MILESTONE, we're building Nightly or Aurora +# - otherwise, we're building Release/Beta (define RELEASE_OR_BETA) +@depends(check_build_environment, build_project, version_path, "--help") +@imports(_from="__builtin__", _import="open") +@imports("os") +@imports("re") +def milestone(build_env, build_project, version_path, _): + versions = [] + paths = ["config/milestone.txt"] + if build_project == "js": + paths = paths * 3 + else: + paths += [ + "browser/config/version.txt", + "browser/config/version_display.txt", + ] + if version_path: + version_path = version_path[0] + else: + version_path = os.path.join(build_project, "config") + for f in ("version.txt", "version_display.txt"): + f = os.path.join(version_path, f) + if not os.path.exists(os.path.join(build_env.topsrcdir, f)): + break + paths.append(f) + + for p in paths: + with open(os.path.join(build_env.topsrcdir, p), "r") as fh: + content = fh.read().splitlines() + if not content: + die("Could not find a version number in {}".format(p)) + versions.append(content[-1]) + + milestone, firefox_version, firefox_version_display = versions[:3] + + # version.txt content from the project directory if there is one, otherwise + # the firefox version. + app_version = versions[3] if len(versions) > 3 else firefox_version + # version_display.txt content from the project directory if there is one, + # otherwise version.txt content from the project directory, otherwise the + # firefox version for display. + app_version_display = versions[-1] if len(versions) > 3 else firefox_version_display + + is_nightly = is_release_or_beta = is_early_beta_or_earlier = None + + if "a1" in milestone: + is_nightly = True + elif "a" not in milestone: + is_release_or_beta = True + + major_version = milestone.split(".")[0] + m = re.search(r"([ab]\d+)", milestone) + ab_patch = m.group(1) if m else "" + + defines = os.path.join(build_env.topsrcdir, "build", "defines.sh") + with open(defines, "r") as fh: + for line in fh.read().splitlines(): + line = line.strip() + if not line or line.startswith("#"): + continue + name, _, value = line.partition("=") + name = name.strip() + value = value.strip() + if name != "EARLY_BETA_OR_EARLIER": + die( + "Only the EARLY_BETA_OR_EARLIER variable can be set in build/defines.sh" + ) + if value: + is_early_beta_or_earlier = True + + # Only expose the major version milestone in the UA string and hide the + # patch leve (bugs 572659 and 870868). + # + # Only expose major milestone and alpha version in the symbolversion + # string; as the name suggests, we use it for symbol versioning on Linux. + return namespace( + version=milestone, + uaversion="%s.0" % major_version, + symbolversion="%s%s" % (major_version, ab_patch), + is_nightly=is_nightly, + is_release_or_beta=is_release_or_beta, + is_early_beta_or_earlier=is_early_beta_or_earlier, + app_version=app_version, + app_version_display=app_version_display, + ) + + +set_config("GRE_MILESTONE", milestone.version) +set_config("NIGHTLY_BUILD", milestone.is_nightly) +set_define("NIGHTLY_BUILD", milestone.is_nightly) +set_config("RELEASE_OR_BETA", milestone.is_release_or_beta) +set_define("RELEASE_OR_BETA", milestone.is_release_or_beta) +add_old_configure_assignment("RELEASE_OR_BETA", milestone.is_release_or_beta) +set_config("EARLY_BETA_OR_EARLIER", milestone.is_early_beta_or_earlier) +set_define("EARLY_BETA_OR_EARLIER", milestone.is_early_beta_or_earlier) +add_old_configure_assignment( + "EARLY_BETA_OR_EARLIER", milestone.is_early_beta_or_earlier +) +set_define("MOZILLA_VERSION", depends(milestone)(lambda m: '"%s"' % m.version)) +set_config("MOZILLA_VERSION", milestone.version) +set_define("MOZILLA_VERSION_U", milestone.version) +set_define("MOZILLA_UAVERSION", depends(milestone)(lambda m: '"%s"' % m.uaversion)) +set_config("MOZILLA_SYMBOLVERSION", milestone.symbolversion) +# JS configure still wants to look at these. +add_old_configure_assignment("MOZILLA_VERSION", milestone.version) +add_old_configure_assignment("MOZILLA_SYMBOLVERSION", milestone.symbolversion) + +set_config("MOZ_APP_VERSION", milestone.app_version) +set_config("MOZ_APP_VERSION_DISPLAY", milestone.app_version_display) +add_old_configure_assignment("MOZ_APP_VERSION", milestone.app_version) + + +# Dummy function for availability in toolkit/moz.configure. Overridden in +# mobile/android/moz.configure. +@depends(milestone.is_nightly) +def fennec_nightly(is_nightly): + return is_nightly + + +# The app update channel is 'default' when not supplied. The value is used in +# the application's confvars.sh (and is made available to a project specific +# moz.configure). +option( + "--enable-update-channel", + nargs=1, + help="Select application update channel", + default="default", +) + + +@depends("--enable-update-channel") +def update_channel(channel): + if not channel or channel[0] == "": + return "default" + return channel[0].lower() + + +set_config("MOZ_UPDATE_CHANNEL", update_channel) +set_define("MOZ_UPDATE_CHANNEL", update_channel) +add_old_configure_assignment("MOZ_UPDATE_CHANNEL", update_channel) + + +option( + env="MOZBUILD_STATE_PATH", + nargs=1, + help="Path to a persistent state directory for the build system " + "and related tools", +) + + +@depends("MOZBUILD_STATE_PATH", "--help") +@imports("os") +def mozbuild_state_path(path, _): + if path: + return path[0] + return os.path.expanduser(os.path.join("~", ".mozbuild")) + + +# A template providing a shorthand for setting a variable. The created +# option will only be settable with imply_option. +# It is expected that a project-specific moz.configure will call imply_option +# to set a value other than the default. +# If required, the set_as_define and set_for_old_configure arguments +# will additionally cause the variable to be set using set_define and +# add_old_configure_assignment. util.configure would be an appropriate place for +# this, but it uses add_old_configure_assignment, which is defined in this file. +@template +def project_flag(env=None, set_for_old_configure=False, set_as_define=False, **kwargs): + + if not env: + configure_error("A project_flag must be passed a variable name to set.") + + opt = option(env=env, possible_origins=("implied",), **kwargs) + + @depends(opt.option) + def option_implementation(value): + if value: + if len(value): + return value + return bool(value) + + set_config(env, option_implementation) + if set_as_define: + set_define(env, option_implementation) + if set_for_old_configure: + add_old_configure_assignment(env, option_implementation) + + +# milestone.is_nightly corresponds to cases NIGHTLY_BUILD is set. + + +@depends(milestone) +def enabled_in_nightly(milestone): + return milestone.is_nightly + + +# Branding +# ============================================================== +option( + "--with-app-basename", + env="MOZ_APP_BASENAME", + nargs=1, + help="Typically stays consistent for multiple branded versions of a " + 'given application (e.g. Aurora and Firefox both use "Firefox"), but ' + "may vary for full rebrandings (e.g. Iceweasel). Used for " + 'application.ini\'s "Name" field, which controls profile location in ' + 'the absence of a "Profile" field (see below), and various system ' + "integration hooks (Unix remoting, Windows MessageWindow name, etc.", +) + + +@depends("--with-app-basename", target_is_android) +def moz_app_basename(value, target_is_android): + if value: + return value[0] + if target_is_android: + return "Fennec" + return "Firefox" + + +set_config( + "MOZ_APP_BASENAME", + moz_app_basename, + when=depends(build_project)(lambda p: p != "js"), +) diff --git a/build/moz.configure/java.configure b/build/moz.configure/java.configure new file mode 100644 index 0000000000..1840baf8ec --- /dev/null +++ b/build/moz.configure/java.configure @@ -0,0 +1,66 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +# Java detection +# ======================================================== +option( + "--with-java-bin-path", + nargs=1, + help="Location of Java binaries", +) + + +@depends("--with-java-bin-path") +@imports(_from="mozboot.util", _import="locate_java_bin_path") +@imports(_from="mozboot.util", _import="JavaLocationFailedException") +def java_search_paths(path): + if path: + # Look for javac and jar in the specified path. + return path + + try: + return [locate_java_bin_path()] + except JavaLocationFailedException as e: + die(str(e)) + + +# Finds the given java tool, failing with a custom error message if we can't +# find it. + + +@template +def check_java_tool(tool): + check = check_prog( + tool.upper(), (tool,), paths=java_search_paths, allow_missing=True + ) + + @depends(check) + def require_tool(result): + if result is None: + die( + "The program %s was not found. Set $JAVA_HOME to your Java " + "SDK directory or use '--with-java-bin-path={java-bin-dir}'" % tool + ) + return result + + return require_tool + + +check_java_tool("java") + + +# Java Code Coverage +# ======================================================== +option( + "--enable-java-coverage", + env="MOZ_JAVA_CODE_COVERAGE", + help="Enable Java code coverage", +) + +set_config( + "MOZ_JAVA_CODE_COVERAGE", depends("--enable-java-coverage")(lambda v: bool(v)) +) diff --git a/build/moz.configure/keyfiles.configure b/build/moz.configure/keyfiles.configure new file mode 100644 index 0000000000..242a773aac --- /dev/null +++ b/build/moz.configure/keyfiles.configure @@ -0,0 +1,68 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@template +def keyfile(desc, default=None, help=None, callback=lambda x: x): + help = help or ( + "Use the secret key contained in the given keyfile " "for %s requests" % desc + ) + name = desc.lower().replace(" ", "-") + no_key = callback("no-%s-key" % name) + + option("--with-%s-keyfile" % name, nargs=1, default=default, help=help) + + @depends("--with-%s-keyfile" % name) + @checking("for the %s key" % desc, lambda x: x and x is not no_key) + @imports(_from="__builtin__", _import="open") + @imports(_from="__builtin__", _import="IOError") + def keyfile(value): + if value: + try: + with open(value[0]) as fh: + result = fh.read().strip() + if result: + return callback(result) + raise FatalCheckError("'%s' is empty." % value[0]) + except IOError as e: + raise FatalCheckError("'%s': %s." % (value[0], e.strerror)) + return no_key + + return keyfile + + +@template +def simple_keyfile(desc, default=None): + value = keyfile(desc, default=default) + set_config("MOZ_%s_KEY" % desc.upper().replace(" ", "_"), value) + + +@template +def id_and_secret_keyfile(desc, default=None): + def id_and_secret(value): + if value.startswith("no-") and value.endswith("-key"): + id = value[:-3] + "clientid" + secret = value + elif " " in value: + id, secret = value.split(" ", 1) + else: + raise FatalCheckError("%s key file has an invalid format." % desc) + return namespace( + id=id, + secret=secret, + ) + + content = keyfile( + desc, + help="Use the client id and secret key contained " + "in the given keyfile for %s requests" % desc, + default=default, + callback=id_and_secret, + ) + + name = desc.upper().replace(" ", "_") + set_config("MOZ_%s_CLIENTID" % name, content.id) + set_config("MOZ_%s_KEY" % name, content.secret) diff --git a/build/moz.configure/lto-pgo.configure b/build/moz.configure/lto-pgo.configure new file mode 100644 index 0000000000..1fe5a1ab73 --- /dev/null +++ b/build/moz.configure/lto-pgo.configure @@ -0,0 +1,331 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# PGO +# ============================================================== +llvm_profdata = check_prog( + "LLVM_PROFDATA", ["llvm-profdata"], allow_missing=True, paths=clang_search_path +) + +option( + "--enable-profile-generate", + env="MOZ_PROFILE_GENERATE", + nargs="?", + choices=("cross",), + help="Build a PGO instrumented binary", +) + +imply_option("MOZ_PGO", depends_if("--enable-profile-generate")(lambda _: True)) + +set_config( + "MOZ_PROFILE_GENERATE", depends_if("--enable-profile-generate")(lambda _: True) +) + +set_define( + "MOZ_PROFILE_GENERATE", depends_if("--enable-profile-generate")(lambda _: True) +) + +add_old_configure_assignment( + "MOZ_PROFILE_GENERATE", 1, when="--enable-profile-generate" +) + +option( + "--enable-profile-use", + env="MOZ_PROFILE_USE", + nargs="?", + choices=("cross",), + help="Use a generated profile during the build", +) + +option( + "--with-pgo-profile-path", + help="Path to the directory with unmerged profile data to use during the build", + nargs=1, +) + +imply_option("MOZ_PGO", depends_if("--enable-profile-use")(lambda _: True)) + +set_config("MOZ_PROFILE_USE", depends_if("--enable-profile-use")(lambda _: True)) + + +@depends( + "--with-pgo-profile-path", + "--enable-profile-use", + llvm_profdata, + check_build_environment, +) +@imports("os") +def pgo_profile_path(path, pgo_use, profdata, build_env): + topobjdir = build_env.topobjdir + if topobjdir.endswith("/js/src"): + topobjdir = topobjdir[:-7] + + if not path: + return os.path.join(topobjdir, "instrumented", "merged.profdata") + if path and not pgo_use: + die("Pass --enable-profile-use to use --with-pgo-profile-path.") + if path and not profdata: + die("LLVM_PROFDATA must be set to process the pgo profile.") + if not os.path.isfile(path[0]): + die("Argument to --with-pgo-profile-path must be a file.") + if not os.path.isabs(path[0]): + die("Argument to --with-pgo-profile-path must be an absolute path.") + return path[0] + + +set_config("PGO_PROFILE_PATH", pgo_profile_path) + + +@depends(c_compiler, pgo_profile_path, target_is_windows) +@imports("multiprocessing") +@imports(_from="__builtin__", _import="min") +def pgo_flags(compiler, profdata, target_is_windows): + if compiler.type == "gcc": + return namespace( + gen_cflags=["-fprofile-generate"], + gen_ldflags=["-fprofile-generate"], + use_cflags=["-fprofile-use", "-fprofile-correction", "-Wcoverage-mismatch"], + use_ldflags=["-fprofile-use"], + ) + + if compiler.type in ("clang-cl", "clang"): + prefix = "" + if compiler.type == "clang-cl": + prefix = "/clang:" + gen_ldflags = None + else: + gen_ldflags = ["-fprofile-generate"] + + gen_cflags = [prefix + "-fprofile-generate"] + if target_is_windows: + # native llvm-profdata.exe on Windows can't read profile data + # if name compression is enabled (which cross-compiling enables + # by default) + gen_cflags += ["-mllvm", "-enable-name-compression=false"] + + return namespace( + gen_cflags=gen_cflags, + gen_ldflags=gen_ldflags, + use_cflags=[ + prefix + "-fprofile-use=%s" % profdata, + # Some error messages about mismatched profile data + # come in via -Wbackend-plugin, so disable those too. + "-Wno-error=backend-plugin", + ], + use_ldflags=[], + ) + + +set_config("PROFILE_GEN_CFLAGS", pgo_flags.gen_cflags) +set_config("PROFILE_GEN_LDFLAGS", pgo_flags.gen_ldflags) +set_config("PROFILE_USE_CFLAGS", pgo_flags.use_cflags) +set_config("PROFILE_USE_LDFLAGS", pgo_flags.use_ldflags) + +option( + "--with-pgo-jarlog", + help="Use the provided jarlog file when packaging during a profile-use " "build", + nargs=1, +) + +set_config("PGO_JARLOG_PATH", depends_if("--with-pgo-jarlog")(lambda p: p)) + + +@depends("MOZ_PGO", "--enable-profile-use", "--enable-profile-generate", c_compiler) +def moz_pgo_rust(pgo, profile_use, profile_generate, c_compiler): + if not pgo: + return + + # Enabling PGO through MOZ_PGO only and not --enable* flags. + if not profile_use and not profile_generate: + return + + if profile_use and profile_generate: + die("Cannot build with --enable-profile-use and --enable-profile-generate.") + + want_cross = (len(profile_use) and profile_use[0] == "cross") or ( + len(profile_generate) and profile_generate[0] == "cross" + ) + + if not want_cross: + return + + if c_compiler.type == "gcc": + die("Cannot use cross-language PGO with GCC.") + + return True + + +set_config("MOZ_PGO_RUST", moz_pgo_rust) + +# LTO +# ============================================================== + +option( + "--enable-lto", + env="MOZ_LTO", + nargs="?", + choices=("full", "thin", "cross"), + help="Enable LTO", +) + +option( + env="MOZ_LD64_KNOWN_GOOD", + nargs=1, + help="Indicate that ld64 is free of symbol aliasing bugs.", +) + +imply_option("MOZ_LD64_KNOWN_GOOD", depends_if("MOZ_AUTOMATION")(lambda _: True)) + + +@depends( + "--enable-lto", + c_compiler, + select_linker, + "MOZ_AUTOMATION", + "MOZ_LD64_KNOWN_GOOD", + target, + "--enable-profile-generate", + new_pass_manager_flags, +) +@imports("multiprocessing") +def lto( + value, + c_compiler, + select_linker, + automation, + ld64_known_good, + target, + instrumented_build, + newpm_flags, +): + cflags = [] + ldflags = [] + enabled = None + rust_lto = False + + if value: + if instrumented_build: + log.warning("Disabling LTO because --enable-profile-generate is specified") + return + + enabled = True + # `cross` implies `thin`, but with Rust code participating in LTO + # as well. Make that a little more explicit. + if len(value) and value[0].lower() == "cross": + if c_compiler.type == "gcc": + die("Cross-language LTO is not supported with GCC.") + + rust_lto = True + value = ["thin"] + + if ( + target.kernel == "Darwin" + and target.os == "OSX" + and len(value) + and value[0].lower() == "cross" + and not ld64_known_good + ): + die( + "The Mac linker is known to have a bug that affects cross-language " + "LTO. If you know that your linker is free from this bug, please " + "set the environment variable `MOZ_LD64_KNOWN_GOOD=1` and re-run " + "configure." + ) + + if c_compiler.type == "clang": + if len(value) and value[0].lower() == "full": + cflags.append("-flto") + ldflags.append("-flto") + else: + cflags.append("-flto=thin") + ldflags.append("-flto=thin") + elif c_compiler.type == "clang-cl": + if len(value) and value[0].lower() == "full": + cflags.append("-flto") + else: + cflags.append("-flto=thin") + # With clang-cl, -flto can only be used with -c or -fuse-ld=lld. + # AC_TRY_LINKs during configure don't have -c, so pass -fuse-ld=lld. + cflags.append("-fuse-ld=lld") + + # Explicitly set the CPU to optimize for so the linker doesn't + # choose a poor default. Rust compilation by default uses the + # pentium4 CPU on x86: + # + # https://github.com/rust-lang/rust/blob/master/src/librustc_target/spec/i686_pc_windows_msvc.rs#L5 + # + # which specifically supports "long" (multi-byte) nops. See + # https://bugzilla.mozilla.org/show_bug.cgi?id=1568450#c8 for details. + # + # The pentium4 seems like kind of a weird CPU to optimize for, but + # it seems to have worked out OK thus far. LLVM does not seem to + # specifically schedule code for the pentium4's deep pipeline, so + # that probably contributes to it being an OK default for our + # purposes. + if target.cpu == "x86": + ldflags.append("-mllvm:-mcpu=pentium4") + # This is also the CPU that Rust uses. The LLVM source code + # recommends this as the "generic 64-bit specific x86 processor model": + # + # https://github.com/llvm/llvm-project/blob/e7694f34ab6a12b8bb480cbfcb396d0a64fe965f/llvm/lib/Target/X86/X86.td#L1165-L1187 + if target.cpu == "x86_64": + ldflags.append("-mllvm:-mcpu=x86-64") + # We do not need special flags for arm64. Hooray for fixed-length + # instruction sets. + else: + num_cores = multiprocessing.cpu_count() + if len(value) and value[0].lower() == "thin": + die( + "gcc does not support thin LTO. Use `--enable-lto` " + "to enable full LTO for gcc." + ) + else: + cflags.append("-flto") + cflags.append("-flifetime-dse=1") + + ldflags.append("-flto=%s" % num_cores) + ldflags.append("-flifetime-dse=1") + + # Tell LTO not to inline functions above a certain size, to mitigate + # binary size growth while still getting good performance. + # (For hot functions, PGO will put a multiplier on this limit.) + if target.os == "WINNT": + ldflags.append("-mllvm:-import-instr-limit=10") + elif target.os == "OSX": + ldflags.append("-Wl,-mllvm,-import-instr-limit=10") + elif c_compiler.type == "clang": + ldflags.append("-Wl,-plugin-opt=-import-instr-limit=10") + + # If we're using the new pass manager, we can also enable the new PM + # during LTO. Further we can use the resulting size savings to increase + # the import limit in hot functions. + if newpm_flags: + if target.os == "WINNT": + # On Windows, this flag requires a change from clang-12, which + # is applied as a patch to our automation toolchain. + if automation or c_compiler.version >= "12.0.0": + ldflags.append("-opt:ltonewpassmanager") + ldflags.append("-mllvm:-import-hot-multiplier=30") + elif select_linker.KIND != "ld64" and c_compiler.type == "clang": + ldflags.append("-Wl,-plugin-opt=new-pass-manager") + ldflags.append("-Wl,-plugin-opt=-import-hot-multiplier=30") + + return namespace( + enabled=enabled, + cflags=cflags, + ldflags=ldflags, + rust_lto=rust_lto, + ) + + +add_old_configure_assignment("MOZ_LTO", lto.enabled) +set_config("MOZ_LTO", lto.enabled) +set_define("MOZ_LTO", lto.enabled) +set_config("MOZ_LTO_CFLAGS", lto.cflags) +set_config("MOZ_LTO_LDFLAGS", lto.ldflags) +set_config("MOZ_LTO_RUST_CROSS", lto.rust_lto) +add_old_configure_assignment("MOZ_LTO_CFLAGS", lto.cflags) +add_old_configure_assignment("MOZ_LTO_LDFLAGS", lto.ldflags) diff --git a/build/moz.configure/memory.configure b/build/moz.configure/memory.configure new file mode 100644 index 0000000000..855706b457 --- /dev/null +++ b/build/moz.configure/memory.configure @@ -0,0 +1,98 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@depends(target) +def jemalloc_default(target): + return target.kernel in ("Darwin", "Linux", "WINNT") + + +option( + "--enable-jemalloc", + env="MOZ_MEMORY", + default=jemalloc_default, + help="{Replace|Do not replace} memory allocator with jemalloc", +) + + +set_config("MOZ_MEMORY", True, when="--enable-jemalloc") +set_define("MOZ_MEMORY", True, when="--enable-jemalloc") +add_old_configure_assignment("MOZ_MEMORY", True, when="--enable-jemalloc") + + +@depends(milestone, build_project) +def replace_malloc_default(milestone, build_project): + if build_project == "memory": + return True + if milestone.is_early_beta_or_earlier and build_project != "js": + return True + + +option( + "--enable-replace-malloc", + default=replace_malloc_default, + when="--enable-jemalloc", + help="{Enable|Disable} ability to dynamically replace the malloc implementation", +) + + +set_config("MOZ_REPLACE_MALLOC", True, when="--enable-replace-malloc") +set_define("MOZ_REPLACE_MALLOC", True, when="--enable-replace-malloc") + + +@depends(build_project, when="--enable-replace-malloc") +def replace_malloc_static(build_project): + # Default to statically linking replace-malloc libraries that can be + # statically linked, except when building with --enable-project=memory. + if build_project != "memory": + return True + + +set_config("MOZ_REPLACE_MALLOC_STATIC", replace_malloc_static) + +# PHC (Probabilistic Heap Checker) +# ============================================================== + +# In general, it only makes sense for PHC to run on the platforms that have a +# crash reporter. +@depends( + milestone, + target, + replace_malloc_default, + "--enable-replace-malloc", + when="--enable-jemalloc", +) +def phc_default(milestone, target, replace_malloc_default, replace_malloc): + if not replace_malloc_default or ( + replace_malloc.origin != "default" and not replace_malloc + ): + return False + # Nightly or early beta only because PHC has a non-negligible performance cost. + if not milestone.is_early_beta_or_earlier: + return False + # Both Linux32 and Win32 have frequent crashes when stack tracing (for + # unclear reasons), so PHC is enabled only on 64-bit only in both cases. + # + # XXX: PHC is implemented but not yet enabled on Mac. Bug 1576515 is about + # enabling it on Mac, but it is blocked by bug 1035892. + return ( + target.os == "GNU" and target.kernel == "Linux" and target.bitness == 64 + ) or (target.kernel == "WINNT" and target.bitness == 64) + + +option( + "--enable-phc", + env="MOZ_PHC", + default=phc_default, + when="--enable-jemalloc", + help="{Enable|Disable} PHC (Probabilistic Memory Checker). " + "Also enables replace-malloc and frame pointers", +) +imply_option("--enable-replace-malloc", True, when="--enable-phc") +imply_option("--enable-frame-pointers", True, when="--enable-phc") + + +set_config("MOZ_PHC", True, when="--enable-phc") diff --git a/build/moz.configure/node.configure b/build/moz.configure/node.configure new file mode 100644 index 0000000000..ba2003d197 --- /dev/null +++ b/build/moz.configure/node.configure @@ -0,0 +1,71 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--disable-nodejs", help="Require Node.js to build") +option(env="NODEJS", nargs=1, help="Path to nodejs") + + +@depends("--enable-nodejs", "NODEJS", bootstrap_search_path("node")) +@checking( + "for nodejs", callback=lambda x: "%s (%s)" % (x.path, x.str_version) if x else "no" +) +@imports(_from="mozbuild.nodeutil", _import="find_node_executable") +@imports(_from="mozbuild.nodeutil", _import="NODE_MIN_VERSION") +def nodejs(require, env_node, search_path): + # We don't use the dependency directly, but having it ensures the + # auto-upgrade code in bootstrap_search_path is triggered, while + # find_node_executable will use more or less the same search path. + # We do however need to use the variable for the configure lint + # not to fail. + search_path + + node_exe = env_node[0] if env_node else None + + nodejs, version = find_node_executable(node_exe) + + MAYBE_FILE_A_BUG = """ + + Executing `mach bootstrap --no-system-changes` should + install a compatible version in ~/.mozbuild on most platforms. + If you believe this is a bug, is a good way + to file. More details: + """ + + if not nodejs: + msg = ( + "could not find Node.js executable later than %s; ensure " + "`node` or `nodejs` is in PATH or set NODEJS in environment " + "to point to an executable.%s" % (NODE_MIN_VERSION, MAYBE_FILE_A_BUG) + ) + + if require: + raise FatalCheckError(msg) + else: + log.warning(msg) + log.warning("(This will become an error in the near future.)") + return + + if not version: + msg = "NODEJS must point to node %s or newer; found node location: %s. %s" % ( + NODE_MIN_VERSION, + nodejs, + MAYBE_FILE_A_BUG, + ) + + if require: + raise FatalCheckError(msg) + else: + log.warning(msg) + return + + return namespace( + path=nodejs, + version=version, + str_version=".".join(str(v) for v in version), + ) + + +set_config("NODEJS", depends_if(nodejs)(lambda p: p.path)) diff --git a/build/moz.configure/nspr.configure b/build/moz.configure/nspr.configure new file mode 100644 index 0000000000..2b21a66f03 --- /dev/null +++ b/build/moz.configure/nspr.configure @@ -0,0 +1,117 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Top-level configure defaults to building NSPR from source. Standalone JS +# doesn't. +option( + "--enable-nspr-build", + when=js_standalone, + help="{Build|Do not build} NSPR from source tree", +) + + +@depends("--enable-nspr-build", when=js_standalone) +def enable_nspr_build(enable): + if enable: + return enable + + +option("--with-system-nspr", help="Use system NSPR") + + +@depends(enable_nspr_build, "--with-system-nspr", js_standalone) +def build_nspr(nspr_build, system_nspr, js_standalone): + if nspr_build is not None and nspr_build.origin != "default": + if nspr_build and system_nspr: + die("Cannot use both --enable-nspr-build and --with-system-nspr") + if js_standalone: + return nspr_build + return not system_nspr + + +set_config("MOZ_BUILD_NSPR", True, when=build_nspr) +set_config("MOZ_SYSTEM_NSPR", True, when="--with-system-nspr") + + +@depends(build_nspr, "--with-system-nspr", js_standalone) +def js_without_nspr(build_nspr, system_nspr, js_standalone): + if js_standalone: + return not build_nspr and not system_nspr + + +set_config("JS_WITHOUT_NSPR", True, when=js_without_nspr) +set_define("JS_WITHOUT_NSPR", True, when=js_without_nspr) + + +@depends(js_standalone) +def nspr_minver(js_standalone): + if js_standalone: + return "nspr >= 4.10" + return "nspr >= 4.26" + + +nspr_pkg = pkg_check_modules("NSPR", nspr_minver, when="--with-system-nspr") + + +@depends_if(nspr_pkg) +def nspr_pkg(nspr_pkg): + def extract(prefix, list): + for item in list: + if item.startswith(prefix): + return item[len(prefix) :] + return "" + + include_dir = extract("-I", nspr_pkg.cflags) + lib_dir = extract("-L", nspr_pkg.libs) + return namespace( + cflags=nspr_pkg.cflags, + include_dir=include_dir, + libs=nspr_pkg.libs, + lib_dir=lib_dir, + ) + + +@depends("--with-system-nspr", nspr_minver) +def pkgconf_requires_private(system_nspr, nspr_minver): + if not system_nspr: + return "" + return "Requires.private: %s" % nspr_minver + + +set_config("PKGCONF_REQUIRES_PRIVATE", pkgconf_requires_private) + +# pkg_check_modules takes care of NSPR_CFLAGS and NSPR_LIBS when using --with-system-nspr. +@depends(check_build_environment, c_compiler, fold_libs, when=build_nspr) +def nspr_config(build_env, c_compiler, fold_libs): + libs = ["nspr4", "plc4", "plds4"] + if c_compiler.type == "clang-cl": + lib_dir = os.path.join(build_env.dist, "lib") + libs = [os.path.join(lib_dir, "%s.lib" % lib) for lib in libs] + else: + lib_dir = os.path.join(build_env.dist, "lib" if fold_libs else "bin") + libs = ["-L%s" % lib_dir] + ["-l%s" % lib for lib in libs] + + include_dir = os.path.join(build_env.dist, "include", "nspr") + return namespace( + cflags=["-I%s" % include_dir], + include_dir=include_dir, + libs=libs, + lib_dir=lib_dir, + ) + + +set_config("NSPR_CFLAGS", nspr_config.cflags, when=nspr_config) +set_config("NSPR_LIBS", nspr_config.libs, when=nspr_config) + +set_config("NSPR_INCLUDE_DIR", nspr_config.include_dir, when=nspr_config) +set_config("NSPR_LIB_DIR", nspr_config.lib_dir, when=nspr_config) +set_config("NSPR_INCLUDE_DIR", nspr_pkg.include_dir, when=nspr_pkg) +set_config("NSPR_LIB_DIR", nspr_pkg.lib_dir, when=nspr_pkg) + +add_old_configure_assignment("NSPR_CFLAGS", nspr_config.cflags, when=nspr_config) +add_old_configure_assignment("NSPR_LIBS", nspr_config.libs, when=nspr_config) +add_old_configure_assignment("NSPR_CFLAGS", nspr_pkg.cflags, when=nspr_pkg) +add_old_configure_assignment("NSPR_LIBS", nspr_pkg.libs, when=nspr_pkg) diff --git a/build/moz.configure/nss.configure b/build/moz.configure/nss.configure new file mode 100644 index 0000000000..3cdee33061 --- /dev/null +++ b/build/moz.configure/nss.configure @@ -0,0 +1,30 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--with-system-nss", help="Use system NSS") + +imply_option("--with-system-nspr", True, when="--with-system-nss") + +nss_pkg = pkg_check_modules( + "NSS", "nss >= 3.61", when="--with-system-nss", config=False +) + +set_config("MOZ_SYSTEM_NSS", True, when="--with-system-nss") + + +@depends(nss_pkg, check_build_environment) +def nss_config(nss_pkg, build_env): + cflags = ["-I%s" % os.path.join(build_env.dist, "include", "nss")] + libs = None + if nss_pkg: + cflags = list(nss_pkg.cflags) + cflags + libs = nss_pkg.libs + return namespace(cflags=cflags, libs=libs) + + +set_config("NSS_CFLAGS", nss_config.cflags) +set_config("NSS_LIBS", nss_config.libs) +add_old_configure_assignment("NSS_CFLAGS", nss_config.cflags) diff --git a/build/moz.configure/old.configure b/build/moz.configure/old.configure new file mode 100644 index 0000000000..f1ebd2c35f --- /dev/null +++ b/build/moz.configure/old.configure @@ -0,0 +1,381 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +m4 = check_prog( + "M4", + ( + "gm4", + "m4", + ), +) + + +@depends(mozconfig) +def prepare_mozconfig(mozconfig): + if mozconfig["path"]: + items = {} + for key, value in mozconfig["vars"]["added"].items(): + items[key] = (value, "added") + for key, (old, value) in mozconfig["vars"]["modified"].items(): + items[key] = (value, "modified") + for t in ("env", "vars"): + for key in mozconfig[t]["removed"].keys(): + items[key] = (None, "removed " + t) + return items + + +@depends("OLD_CONFIGURE", build_project) +def old_configure(old_configure, build_project): + # os.path.abspath in the sandbox will ensure forward slashes on Windows, + # which is actually necessary because this path actually ends up literally + # as $0, and backslashes there breaks autoconf's detection of the source + # directory. + old_configure = os.path.abspath(old_configure[0]) + if build_project == "js": + old_configure_dir = os.path.dirname(old_configure) + if not old_configure_dir.endswith("/js/src"): + old_configure = os.path.join( + old_configure_dir, "js", "src", os.path.basename(old_configure) + ) + return old_configure + + +@depends(prepare_mozconfig, old_configure_assignments) +@imports(_from="__builtin__", _import="open") +@imports(_from="__builtin__", _import="print") +@imports(_from="__builtin__", _import="sorted") +@imports(_from="mozbuild.shellutil", _import="quote") +def prepare_configure(mozconfig, old_configure_assignments): + with open("old-configure.vars", "w") as out: + log.debug("Injecting the following to old-configure:") + + def inject(command): + print(command, file=out) # noqa Python 2vs3 + log.debug("| %s", command) + + if mozconfig: + inject("# start of mozconfig values") + for key, (value, action) in sorted(mozconfig.items()): + if action.startswith("removed "): + inject("unset %s # from %s" % (key, action[len("removed ") :])) + else: + inject("%s=%s # %s" % (key, quote(value), action)) + + inject("# end of mozconfig values") + + for k, v in old_configure_assignments: + inject("%s=%s" % (k, quote(v))) + + +@template +def old_configure_options(*options): + for opt in options: + option(opt, nargs="*", help="Help missing for old configure options") + + @dependable + def all_options(): + return list(options) + + return depends( + host_for_sub_configure, target_for_sub_configure, all_options, *options + ) + + +@old_configure_options( + "--cache-file", + "--datadir", + "--enable-crashreporter", + "--enable-dbus", + "--enable-debug-js-modules", + "--enable-dump-painting", + "--enable-extensions", + "--enable-libproxy", + "--enable-logrefcnt", + "--enable-necko-wifi", + "--enable-negotiateauth", + "--enable-official-branding", + "--enable-parental-controls", + "--enable-sandbox", + "--enable-system-cairo", + "--enable-system-extension-dirs", + "--enable-system-pixman", + "--enable-universalchardet", + "--enable-updater", + "--enable-xul", + "--enable-zipwriter", + "--includedir", + "--libdir", + "--prefix", + "--with-android-max-sdk", + "--with-android-min-sdk", + "--with-branding", + "--with-distribution-id", + "--with-macbundlename-prefix", + "--with-system-libevent", + "--with-system-png", + "--with-user-appdir", + "--x-includes", + "--x-libraries", +) +def prepare_configure_options(host, target, all_options, *options): + # old-configure only supports the options listed in @old_configure_options + # so we don't need to pass it every single option we've been passed. Only + # the ones that are not supported by python configure need to. + options = [ + value.format(name) + for name, value in zip(all_options, options) + if value.origin != "default" + ] + [host, target] + + return namespace(options=options, all_options=all_options) + + +@template +def old_configure_for(old_configure_path, extra_env=None): + if extra_env is None: + extra_env = dependable(None) + + @depends( + prepare_configure, + prepare_configure_options, + altered_path, + extra_env, + check_build_environment, + old_configure_path, + "MOZILLABUILD", + awk, + m4, + shell, + ) + @imports(_from="__builtin__", _import="compile") + @imports(_from="__builtin__", _import="open") + @imports(_from="__builtin__", _import="OSError") + @imports("glob") + @imports("itertools") + @imports("logging") + @imports("os") + @imports("subprocess") + @imports("sys") + @imports(_from="mozbuild.shellutil", _import="quote") + @imports(_from="mozbuild.shellutil", _import="split") + @imports(_from="tempfile", _import="NamedTemporaryFile") + @imports(_from="subprocess", _import="CalledProcessError") + @imports(_from="six", _import="exec_") + @imports(_from="six", _import="iteritems") + @imports(_from="six", _import="string_types") + def old_configure( + prepare_configure, + prepare_configure_options, + altered_path, + extra_env, + build_env, + old_configure, + mozillabuild, + awk, + m4, + shell, + ): + # Use prepare_configure to make lint happy + prepare_configure + refresh = True + if os.path.exists(old_configure): + mtime = os.path.getmtime(old_configure) + aclocal = os.path.join(build_env.topsrcdir, "build", "autoconf", "*.m4") + for input in itertools.chain( + ( + old_configure + ".in", + os.path.join(os.path.dirname(old_configure), "aclocal.m4"), + ), + glob.iglob(aclocal), + ): + if os.path.getmtime(input) > mtime: + break + else: + refresh = False + + if refresh: + autoconf = os.path.join( + build_env.topsrcdir, "build", "autoconf", "autoconf.sh" + ) + log.info("Refreshing %s with %s", old_configure, autoconf) + env = dict(os.environ) + env["M4"] = m4 + env["AWK"] = awk + env["AC_MACRODIR"] = os.path.join(build_env.topsrcdir, "build", "autoconf") + + try: + script = subprocess.check_output( + [ + shell, + autoconf, + "--localdir=%s" % os.path.dirname(old_configure), + old_configure + ".in", + ], + # Fix the working directory, so that when m4 is called, that + # includes of relative paths are deterministically resolved + # relative to the directory containing old-configure. + cwd=os.path.dirname(old_configure), + env=env, + ) + except CalledProcessError as exc: + # Autoconf on win32 may break due to a bad $PATH. Let the user know + # their $PATH is suspect. + if mozillabuild: + mozillabuild_path = normsep(mozillabuild[0]) + sh_path = normsep(find_program("sh")) + if mozillabuild_path not in sh_path: + log.warning( + "The '{}msys/bin' directory is not first in $PATH. " + "This may cause autoconf to fail. ($PATH is currently " + "set to: {})".format(mozillabuild_path, os.environ["PATH"]) + ) + die("autoconf exited with return code {}".format(exc.returncode)) + + if not script: + die( + "Generated old-configure is empty! Check that your autoconf 2.13 program works!" + ) + + # Make old-configure append to config.log, where we put our own log. + # This could be done with a m4 macro, but it's way easier this way + script = script.replace(b">./config.log", b">>${CONFIG_LOG=./config.log}") + + with NamedTemporaryFile( + mode="wb", + prefix=os.path.basename(old_configure), + dir=os.path.dirname(old_configure), + delete=False, + ) as fh: + fh.write(script) + + try: + os.rename(fh.name, old_configure) + except OSError: + try: + # Likely the file already existed (on Windows). Retry after removing it. + os.remove(old_configure) + os.rename(fh.name, old_configure) + except OSError as e: + die("Failed re-creating old-configure: %s" % e.message) + + cmd = [shell, old_configure] + prepare_configure_options.options + + env = dict(os.environ) + + # For debugging purpose, in case it's not what we'd expect. + log.debug("Running %s", quote(*cmd)) + + # Our logging goes to config.log, the same file old.configure uses. + # We can't share the handle on the file, so close it. + logger = logging.getLogger("moz.configure") + config_log = None + for handler in logger.handlers: + if isinstance(handler, logging.FileHandler): + config_log = handler + config_log.close() + logger.removeHandler(config_log) + env["CONFIG_LOG"] = config_log.baseFilename + log_size = os.path.getsize(config_log.baseFilename) + break + + if altered_path: + env["PATH"] = altered_path + + if extra_env: + env.update(extra_env) + + env["OLD_CONFIGURE_VARS"] = os.path.join( + build_env.topobjdir, "old-configure.vars" + ) + proc = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env + ) + while True: + line = proc.stdout.readline() + if not line: + break + log.info(line.rstrip()) + + ret = proc.wait() + if ret: + with log.queue_debug(): + if config_log: + with open(config_log.baseFilename, "r") as fh: + fh.seek(log_size) + for line in fh: + log.debug(line.rstrip()) + log.error("old-configure failed") + sys.exit(ret) + + if config_log: + # Create a new handler in append mode + handler = logging.FileHandler(config_log.baseFilename, mode="a", delay=True) + handler.setFormatter(config_log.formatter) + logger.addHandler(handler) + + raw_config = { + "split": split, + "unique_list": unique_list, + } + with open("config.data", "r") as fh: + code = compile(fh.read(), "config.data", "exec") + exec_(code, raw_config) + + # Ensure all the flags known to old-configure appear in the + # @old_configure_options above. + all_options = set(prepare_configure_options.all_options) + for flag in raw_config["flags"]: + if flag not in all_options: + die( + "Missing option in `@old_configure_options` in %s: %s", + __file__, + flag, + ) + + # If the code execution above fails, we want to keep the file around for + # debugging. + os.remove("config.data") + + return namespace( + **{ + c: [ + (k[1:-1], v[1:-1] if isinstance(v, string_types) else v) + for k, v in raw_config[c] + ] + for c in ("substs", "defines") + } + ) + + return old_configure + + +old_configure = old_configure_for(old_configure) +set_config("OLD_CONFIGURE_SUBSTS", old_configure.substs) +set_config("OLD_CONFIGURE_DEFINES", old_configure.defines) + + +# Assuming no other option is declared after this function, handle the +# env options that were injected by mozconfig_options by creating dummy +# Option instances and having the sandbox's CommandLineHelper handle +# them. We only do so for options that haven't been declared so far, +# which should be a proxy for the options that old-configure handles +# and that we don't know anything about. +@depends("--help") +@imports("__sandbox__") +@imports(_from="mozbuild.configure.options", _import="Option") +def remaining_mozconfig_options(_): + helper = __sandbox__._helper + for arg in list(helper): + if helper._origins[arg] != "mozconfig": + continue + name = arg.split("=", 1)[0] + if name.isupper() and name not in __sandbox__._options: + option = Option(env=name, nargs="*", help=name) + helper.handle(option) + + +# Please do not add anything after remaining_mozconfig_options() diff --git a/build/moz.configure/pkg.configure b/build/moz.configure/pkg.configure new file mode 100644 index 0000000000..20d90f17fd --- /dev/null +++ b/build/moz.configure/pkg.configure @@ -0,0 +1,103 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@depends(toolchain_prefix, when=compile_environment) +def pkg_config(prefixes): + return tuple("{}pkg-config".format(p) for p in (prefixes or ()) + ("",)) + + +pkg_config = check_prog("PKG_CONFIG", pkg_config, allow_missing=True) + + +@depends_if(pkg_config) +@checking("for pkg-config version") +def pkg_config_version(pkg_config): + return Version(check_cmd_output(pkg_config, "--version").rstrip()) + + +# Locates the given module using pkg-config. +# - `var` determines the name of variables to set when the package is found. +# _CFLAGS and _LIBS are set with corresponding values. +# - `package_desc` package name and version requirement string, list of +# strings describing packages to locate, or depends function that will +# resolve to such a string or list of strings. +# - `when` a depends function that will determine whether to perform +# any checks (default is to always perform checks). +# - `allow_missing` If set, failure to fulfill the package description +# will not result in an error or logged message, and any error message +# will be returned to the caller. +# Returns `True` when the package description is fulfilled. + + +@template +def pkg_check_modules(var, package_desc, when=always, allow_missing=False, config=True): + if isinstance(package_desc, (tuple, list)): + package_desc = " ".join(package_desc) + package_desc = dependable(package_desc) + allow_missing = dependable(allow_missing) + + @depends(when, "--enable-compile-environment") + def when_and_compile_environment(when, compile_environment): + return when and compile_environment + + @depends(pkg_config, pkg_config_version, when=when_and_compile_environment) + def check_pkg_config(pkg_config, version): + min_version = "0.9.0" + if pkg_config is None: + die( + "*** The pkg-config script could not be found. Make sure it is\n" + "*** in your path, or set the PKG_CONFIG environment variable\n" + "*** to the full path to pkg-config." + ) + if version < min_version: + die( + "*** Your version of pkg-config is too old. You need version %s or newer.", + min_version, + ) + + @depends(pkg_config, package_desc, allow_missing, when=when_and_compile_environment) + @imports("sys") + @imports(_from="mozbuild.configure.util", _import="LineIO") + def package(pkg_config, package_desc, allow_missing): + # package_desc may start as a depends function, so we can't use + # @checking here. + log.info("checking for %s... " % package_desc) + retcode, stdout, stderr = get_cmd_output( + pkg_config, "--errors-to-stdout", "--print-errors", package_desc + ) + if retcode == 0: + log.info("yes") + return True + log.info("no") + log_writer = log.warning if allow_missing else log.error + with LineIO(lambda l: log_writer(l)) as o: + o.write(stdout) + if not allow_missing: + sys.exit(1) + + @depends(pkg_config, package_desc, when=package) + @checking("%s_CFLAGS" % var, callback=lambda t: " ".join(t)) + def pkg_cflags(pkg_config, package_desc): + flags = check_cmd_output(pkg_config, "--cflags", package_desc) + return tuple(flags.split()) + + @depends(pkg_config, package_desc, when=package) + @checking("%s_LIBS" % var, callback=lambda t: " ".join(t)) + def pkg_libs(pkg_config, package_desc): + libs = check_cmd_output(pkg_config, "--libs", package_desc) + # Remove evil flags like -Wl,--export-dynamic + return tuple(libs.replace("-Wl,--export-dynamic", "").split()) + + @depends(pkg_cflags, pkg_libs, when=package) + def pkg_info(cflags, libs): + return namespace(cflags=cflags, libs=libs) + + if config: + set_config("%s_CFLAGS" % var, pkg_cflags) + set_config("%s_LIBS" % var, pkg_libs) + + return pkg_info diff --git a/build/moz.configure/rust.configure b/build/moz.configure/rust.configure new file mode 100644 index 0000000000..ff3dbe066e --- /dev/null +++ b/build/moz.configure/rust.configure @@ -0,0 +1,552 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +# Rust is required by `rust_compiler` below. We allow_missing here +# to propagate failures to the better error message there. +option(env="RUSTC", nargs=1, help="Path to the rust compiler") +option(env="CARGO", nargs=1, help="Path to the Cargo package manager") + +rustc = check_prog( + "_RUSTC", + ["rustc"], + what="rustc", + paths=rust_search_path, + input="RUSTC", + allow_missing=True, +) +cargo = check_prog( + "_CARGO", + ["cargo"], + what="cargo", + paths=rust_search_path, + input="CARGO", + allow_missing=True, +) + + +@template +def unwrap_rustup(prog, name): + # rustc and cargo can either be rustup wrappers, or they can be the actual, + # plain executables. For cargo, on OSX, rustup sets DYLD_LIBRARY_PATH (at + # least until https://github.com/rust-lang/rustup.rs/pull/1752 is merged + # and shipped) and that can wreak havoc (see bug 1536486). Similarly, for + # rustc, rustup silently honors toolchain overrides set by vendored crates + # (see bug 1547196). + # + # In either case, we need to find the plain executables. + # + # To achieve that, try to run `PROG +stable`. When the rustup wrapper is in + # use, it either prints PROG's help and exits with status 0, or prints + # an error message (error: toolchain 'stable' is not installed) and exits + # with status 1. In the cargo case, when plain cargo is in use, it exits + # with a different error message (e.g. "error: no such subcommand: + # `+stable`"), and exits with status 101. + # + # Unfortunately, in the rustc case, when plain rustc is in use, + # `rustc +stable` will exit with status 1, complaining about a missing + # "+stable" file. We'll examine the error output to try and distinguish + # between failing rustup and failing rustc. + @depends(prog, dependable(name)) + @imports(_from="__builtin__", _import="open") + @imports("os") + def unwrap(prog, name): + if not prog: + return + + def from_rustup_which(): + out = check_cmd_output("rustup", "which", name, executable=prog).rstrip() + # If for some reason the above failed to return something, keep the + # PROG we found originally. + if out: + log.info("Actually using '%s'", out) + return out + + log.info("No `rustup which` output, using '%s'", prog) + return prog + + (retcode, stdout, stderr) = get_cmd_output(prog, "+stable") + + if name == "cargo" and retcode != 101: + prog = from_rustup_which() + elif name == "rustc": + if retcode == 0: + prog = from_rustup_which() + elif "+stable" in stderr: + # PROG looks like plain `rustc`. + pass + else: + # Assume PROG looks like `rustup`. This case is a little weird, + # insofar as the user doesn't have the "stable" toolchain + # installed, but go ahead and unwrap anyway: the user might + # have only certain versions, beta, or nightly installed, and + # we'll catch invalid versions later. + prog = from_rustup_which() + + return prog + + return unwrap + + +rustc = unwrap_rustup(rustc, "rustc") +cargo = unwrap_rustup(cargo, "cargo") + + +set_config("CARGO", cargo) +set_config("RUSTC", rustc) + + +@depends_if(rustc) +@checking("rustc version", lambda info: info.version) +def rustc_info(rustc): + if not rustc: + return + out = check_cmd_output(rustc, "--version", "--verbose").splitlines() + info = dict((s.strip() for s in line.split(":", 1)) for line in out[1:]) + return namespace( + version=Version(info.get("release", "0")), + commit=info.get("commit-hash", "unknown"), + host=info["host"], + llvm_version=Version(info.get("LLVM version", "0")), + ) + + +set_config( + "RUSTC_VERSION", + depends(rustc_info)(lambda info: str(info.version) if info else None), +) + + +@depends_if(cargo) +@checking("cargo version", lambda info: info.version) +@imports("re") +def cargo_info(cargo): + if not cargo: + return + out = check_cmd_output(cargo, "--version", "--verbose").splitlines() + info = dict((s.strip() for s in line.split(":", 1)) for line in out[1:]) + version = info.get("release") + # Older versions of cargo didn't support --verbose, in which case, they + # only output a not-really-pleasant-to-parse output. Fortunately, they + # don't error out, so we can just try some regexp matching on the output + # we already got. + if version is None: + VERSION_FORMAT = r"^cargo (\d\.\d+\.\d+).*" + + m = re.search(VERSION_FORMAT, out[0]) + # Fail fast if cargo changes its output on us. + if not m: + die("Could not determine cargo version from output: %s", out) + version = m.group(1) + + return namespace( + version=Version(version), + ) + + +@depends(rustc_info, cargo_info, build_project) +@imports(_from="mozboot.util", _import="MINIMUM_RUST_VERSION") +@imports(_from="textwrap", _import="dedent") +def rust_compiler(rustc_info, cargo_info, build_project): + if not rustc_info: + die( + dedent( + """\ + Rust compiler not found. + To compile rust language sources, you must have 'rustc' in your path. + See https://www.rust-lang.org/ for more information. + + You can install rust by running './mach bootstrap' + or by directly running the installer from https://rustup.rs/ + """ + ) + ) + if build_project == "tools/crashreporter": + rustc_min_version = Version("1.47.0") + else: + rustc_min_version = Version(MINIMUM_RUST_VERSION) + cargo_min_version = rustc_min_version + + version = rustc_info.version + is_nightly = "nightly" in version.version + is_version_number_match = ( + version.major == rustc_min_version.major + and version.minor == rustc_min_version.minor + and version.patch == rustc_min_version.patch + ) + + if version < rustc_min_version or (is_version_number_match and is_nightly): + die( + dedent( + """\ + Rust compiler {} is too old. + + To compile Rust language sources please install at least + version {} of the 'rustc' toolchain (or, if using nightly, + at least one version newer than {}) and make sure it is + first in your path. + + You can verify this by typing 'rustc --version'. + + If you have the 'rustup' tool installed you can upgrade + to the latest release by typing 'rustup update'. The + installer is available from https://rustup.rs/ + """.format( + version, rustc_min_version, rustc_min_version + ) + ) + ) + + if not cargo_info: + die( + dedent( + """\ + Cargo package manager not found. + To compile Rust language sources, you must have 'cargo' in your path. + See https://www.rust-lang.org/ for more information. + + You can install cargo by running './mach bootstrap' + or by directly running the installer from https://rustup.rs/ + """ + ) + ) + + version = cargo_info.version + if version < cargo_min_version: + die( + dedent( + """\ + Cargo package manager {} is too old. + + To compile Rust language sources please install at least + version {} of 'cargo' and make sure it is first in your path. + + You can verify this by typing 'cargo --version'. + """ + ).format(version, cargo_min_version) + ) + + return True + + +@depends(rustc, when=rust_compiler) +@imports(_from="__builtin__", _import="ValueError") +def rust_supported_targets(rustc): + out = check_cmd_output(rustc, "--print", "target-list").splitlines() + data = {} + for t in out: + try: + info = split_triplet(t) + except ValueError: + if t.startswith("thumb"): + cpu, rest = t.split("-", 1) + retry = "-".join(("arm", rest)) + elif t.endswith("-windows-msvc"): + retry = t[: -len("windows-msvc")] + "mingw32" + elif t.endswith("-windows-gnu"): + retry = t[: -len("windows-gnu")] + "mingw32" + else: + continue + try: + info = split_triplet(retry) + except ValueError: + continue + key = (info.cpu, info.endianness, info.os) + data.setdefault(key, []).append(namespace(rust_target=t, target=info)) + return data + + +def detect_rustc_target( + host_or_target, compiler_info, arm_target, rust_supported_targets +): + # Rust's --target options are similar to, but not exactly the same + # as, the autoconf-derived targets we use. An example would be that + # Rust uses distinct target triples for targetting the GNU C++ ABI + # and the MSVC C++ ABI on Win32, whereas autoconf has a single + # triple and relies on the user to ensure that everything is + # compiled for the appropriate ABI. We need to perform appropriate + # munging to get the correct option to rustc. + # We correlate the autoconf-derived targets with the list of targets + # rustc gives us with --print target-list. + candidates = rust_supported_targets.get( + (host_or_target.cpu, host_or_target.endianness, host_or_target.os), [] + ) + + def find_candidate(candidates): + if len(candidates) == 1: + return candidates[0].rust_target + elif not candidates: + return None + + # We have multiple candidates. There are two cases where we can try to + # narrow further down using extra information from the build system. + # - For windows targets, correlate with the C compiler type + if host_or_target.kernel == "WINNT": + if compiler_info.type in ("gcc", "clang"): + suffix = "windows-gnu" + else: + suffix = "windows-msvc" + narrowed = [ + c for c in candidates if c.rust_target.endswith("-{}".format(suffix)) + ] + if len(narrowed) == 1: + return narrowed[0].rust_target + elif narrowed: + candidates = narrowed + + vendor_aliases = {"pc": ("w64", "windows")} + narrowed = [ + c + for c in candidates + if host_or_target.vendor in vendor_aliases.get(c.target.vendor, ()) + ] + + if len(narrowed) == 1: + return narrowed[0].rust_target + + # - For arm targets, correlate with arm_target + # we could be more thorough with the supported rust targets, but they + # don't support OSes that are supported to build Gecko anyways. + # Also, sadly, the only interface to check the rust target cpu features + # is --print target-spec-json, and it's unstable, so we have to rely on + # our own knowledge of what each arm target means. + if host_or_target.cpu == "arm" and host_or_target.endianness == "little": + prefixes = [] + if arm_target.arm_arch >= 7: + if arm_target.thumb2 and arm_target.fpu == "neon": + prefixes.append("thumbv7neon") + if arm_target.thumb2: + prefixes.append("thumbv7a") + prefixes.append("armv7") + if arm_target.arm_arch >= 6: + prefixes.append("armv6") + if host_or_target.os != "Android": + # arm-* rust targets are armv6... except arm-linux-androideabi + prefixes.append("arm") + if arm_target.arm_arch >= 5: + prefixes.append("armv5te") + if host_or_target.os == "Android": + # arm-* rust targets are armv6... except arm-linux-androideabi + prefixes.append("arm") + if arm_target.arm_arch >= 4: + prefixes.append("armv4t") + # rust freebsd targets are the only ones that don't have a 'hf' suffix + # for hard-float. Technically, that means if the float abi ever is not + # hard-float, this will pick a wrong target, but since rust only + # supports hard-float, let's assume that means freebsd only support + # hard-float. + if arm_target.float_abi == "hard" and host_or_target.os != "FreeBSD": + suffix = "hf" + else: + suffix = "" + for p in prefixes: + for c in candidates: + if c.rust_target.startswith( + "{}-".format(p) + ) and c.rust_target.endswith(suffix): + return c.rust_target + + # See if we can narrow down on the exact alias + narrowed = [c for c in candidates if c.target.alias == host_or_target.alias] + if len(narrowed) == 1: + return narrowed[0].rust_target + elif narrowed: + candidates = narrowed + + # See if we can narrow down with the raw OS + narrowed = [c for c in candidates if c.target.raw_os == host_or_target.raw_os] + if len(narrowed) == 1: + return narrowed[0].rust_target + elif narrowed: + candidates = narrowed + + # See if we can narrow down with the raw OS and raw CPU + narrowed = [ + c + for c in candidates + if c.target.raw_os == host_or_target.raw_os + and c.target.raw_cpu == host_or_target.raw_cpu + ] + if len(narrowed) == 1: + return narrowed[0].rust_target + + # Finally, see if the vendor can be used to disambiguate. + narrowed = [c for c in candidates if c.target.vendor == host_or_target.vendor] + if len(narrowed) == 1: + return narrowed[0].rust_target + + return None + + rustc_target = find_candidate(candidates) + + if rustc_target is None: + die("Don't know how to translate {} for rustc".format(host_or_target.alias)) + + return rustc_target + + +@imports("os") +@imports(_from="six", _import="ensure_binary") +@imports(_from="tempfile", _import="mkstemp") +@imports(_from="textwrap", _import="dedent") +@imports(_from="mozbuild.configure.util", _import="LineIO") +def assert_rust_compile(host_or_target, rustc_target, rustc): + # Check to see whether our rustc has a reasonably functional stdlib + # for our chosen target. + target_arg = "--target=" + rustc_target + in_fd, in_path = mkstemp(prefix="conftest", suffix=".rs", text=True) + out_fd, out_path = mkstemp(prefix="conftest", suffix=".rlib") + os.close(out_fd) + try: + source = 'pub extern fn hello() { println!("Hello world"); }' + log.debug("Creating `%s` with content:", in_path) + with LineIO(lambda l: log.debug("| %s", l)) as out: + out.write(source) + + os.write(in_fd, ensure_binary(source)) + os.close(in_fd) + + cmd = [ + rustc, + "--crate-type", + "staticlib", + target_arg, + "-o", + out_path, + in_path, + ] + + def failed(): + die( + dedent( + """\ + Cannot compile for {} with {} + The target may be unsupported, or you may not have + a rust std library for that target installed. Try: + + rustup target add {} + """.format( + host_or_target.alias, rustc, rustc_target + ) + ) + ) + + check_cmd_output(*cmd, onerror=failed) + if not os.path.exists(out_path) or os.path.getsize(out_path) == 0: + failed() + finally: + os.remove(in_path) + os.remove(out_path) + + +@depends( + rustc, + host, + host_c_compiler, + rustc_info.host, + rust_supported_targets, + arm_target, + when=rust_compiler, +) +@checking("for rust host triplet") +@imports(_from="textwrap", _import="dedent") +def rust_host_triple( + rustc, host, compiler_info, rustc_host, rust_supported_targets, arm_target +): + rustc_target = detect_rustc_target( + host, compiler_info, arm_target, rust_supported_targets + ) + if rustc_target != rustc_host: + if host.alias == rustc_target: + configure_host = host.alias + else: + configure_host = "{}/{}".format(host.alias, rustc_target) + die( + dedent( + """\ + The rust compiler host ({rustc}) is not suitable for the configure host ({configure}). + + You can solve this by: + * Set your configure host to match the rust compiler host by editing your + mozconfig and adding "ac_add_options --host={rustc}". + * Or, install the rust toolchain for {configure}, if supported, by running + "rustup default stable-{rustc_target}" + """.format( + rustc=rustc_host, + configure=configure_host, + rustc_target=rustc_target, + ) + ) + ) + assert_rust_compile(host, rustc_target, rustc) + return rustc_target + + +@depends( + rustc, target, c_compiler, rust_supported_targets, arm_target, when=rust_compiler +) +@checking("for rust target triplet") +def rust_target_triple( + rustc, target, compiler_info, rust_supported_targets, arm_target +): + rustc_target = detect_rustc_target( + target, compiler_info, arm_target, rust_supported_targets + ) + assert_rust_compile(target, rustc_target, rustc) + return rustc_target + + +set_config("RUST_TARGET", rust_target_triple) +set_config("RUST_HOST_TARGET", rust_host_triple) + + +# This is used for putting source info into symbol files. +set_config("RUSTC_COMMIT", depends(rustc_info)(lambda i: i.commit)) + +# Rustdoc is required by Rust tests below. +option(env="RUSTDOC", nargs=1, help="Path to the rustdoc program") + +rustdoc = check_prog( + "RUSTDOC", + ["rustdoc"], + paths=rust_search_path, + input="RUSTDOC", + allow_missing=True, +) + +# This option is separate from --enable-tests because Rust tests are particularly +# expensive in terms of compile time (especially for code in libxul). +option( + "--enable-rust-tests", + help="Enable building and running of Rust tests during `make check`", +) + + +@depends("--enable-rust-tests", rustdoc) +def rust_tests(enable_rust_tests, rustdoc): + if enable_rust_tests and not rustdoc: + die("--enable-rust-tests requires rustdoc") + return bool(enable_rust_tests) + + +set_config("MOZ_RUST_TESTS", rust_tests) + + +@depends(target, c_compiler, rustc) +@imports("os") +def rustc_natvis_ldflags(target, compiler_info, rustc): + if target.kernel == "WINNT" and compiler_info.type == "clang-cl": + sysroot = check_cmd_output(rustc, "--print", "sysroot").strip() + etc = os.path.join(sysroot, "lib/rustlib/etc") + ldflags = [] + if os.path.isdir(etc): + for f in os.listdir(etc): + if f.endswith(".natvis"): + ldflags.append("-NATVIS:" + normsep(os.path.join(etc, f))) + return ldflags + + +set_config("RUSTC_NATVIS_LDFLAGS", rustc_natvis_ldflags) diff --git a/build/moz.configure/toolchain.configure b/build/moz.configure/toolchain.configure new file mode 100755 index 0000000000..2ed03d4587 --- /dev/null +++ b/build/moz.configure/toolchain.configure @@ -0,0 +1,2842 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Code optimization +# ============================================================== + +option("--disable-optimize", nargs="?", help="Disable optimizations via compiler flags") + + +@depends("--enable-optimize", "--help") +def moz_optimize(option, _): + flags = None + + if len(option): + val = "2" + flags = option[0] + elif option: + val = "1" + else: + val = None + + return namespace( + optimize=val, + flags=flags, + ) + + +set_config("MOZ_OPTIMIZE", moz_optimize.optimize) +add_old_configure_assignment("MOZ_OPTIMIZE", moz_optimize.optimize) +add_old_configure_assignment("MOZ_CONFIGURE_OPTIMIZE_FLAGS", moz_optimize.flags) + +# yasm detection +# ============================================================== +yasm = check_prog("YASM", ["yasm"], allow_missing=True) + + +@depends_if(yasm) +@checking("yasm version") +def yasm_version(yasm): + version = ( + check_cmd_output( + yasm, "--version", onerror=lambda: die("Failed to get yasm version.") + ) + .splitlines()[0] + .split()[1] + ) + return Version(version) + + +@depends(yasm, target) +def yasm_asflags(yasm, target): + if yasm: + asflags = { + ("OSX", "x86"): ["-f", "macho32"], + ("OSX", "x86_64"): ["-f", "macho64"], + ("WINNT", "x86"): ["-f", "win32"], + ("WINNT", "x86_64"): ["-f", "x64"], + }.get((target.os, target.cpu), None) + if asflags is None: + # We're assuming every x86 platform we support that's + # not Windows or Mac is ELF. + if target.cpu == "x86": + asflags = ["-f", "elf32"] + elif target.cpu == "x86_64": + asflags = ["-f", "elf64"] + if asflags: + asflags += ["-rnasm", "-pnasm"] + return asflags + + +set_config("YASM_ASFLAGS", yasm_asflags) + + +# Android NDK +# ============================================================== + + +@depends("--disable-compile-environment", target) +def compiling_android(compile_env, target): + return compile_env and target.os == "Android" + + +include("android-ndk.configure", when=compiling_android) + +with only_when(target_is_osx): + # MacOS deployment target version + # ============================================================== + # This needs to happen before any compilation test is done. + + option( + "--enable-macos-target", + env="MACOSX_DEPLOYMENT_TARGET", + nargs=1, + default=depends(target)(lambda t: "11.0" if t.cpu == "aarch64" else "10.12"), + help="Set the minimum MacOS version needed at runtime{|}", + ) + + @depends("--enable-macos-target") + @imports(_from="os", _import="environ") + def macos_target(value): + if value: + # Ensure every compiler process we spawn uses this value. + environ["MACOSX_DEPLOYMENT_TARGET"] = value[0] + return value[0] + + set_config("MACOSX_DEPLOYMENT_TARGET", macos_target) + add_old_configure_assignment("MACOSX_DEPLOYMENT_TARGET", macos_target) + + +@depends(host) +def host_is_osx(host): + if host.os == "OSX": + return True + + +with only_when(host_is_osx | target_is_osx): + # MacOS SDK + # ========= + option( + "--with-macos-sdk", + env="MACOS_SDK_DIR", + nargs=1, + help="Location of platform SDK to use", + ) + + @depends("--with-macos-sdk", host) + @imports(_from="__builtin__", _import="open") + @imports(_from="os.path", _import="isdir") + @imports("plistlib") + def macos_sdk(sdk, host): + sdk_min_version = Version("10.12") + sdk_max_version = Version("11.1") + + if sdk: + sdk = sdk[0] + elif host.os == "OSX": + sdk = check_cmd_output( + "xcrun", "--show-sdk-path", onerror=lambda: "" + ).rstrip() + if not sdk: + die( + "Could not find the macOS SDK. Please use --with-macos-sdk to give " + "the path to a macOS SDK." + ) + else: + die( + "Need a macOS SDK when targeting macOS. Please use --with-macos-sdk " + "to give the path to a macOS SDK." + ) + + if not isdir(sdk): + die( + "SDK not found in %s. When using --with-macos-sdk, you must specify a " + "valid SDK. SDKs are installed when the optional cross-development " + "tools are selected during the Xcode/Developer Tools installation." + % sdk + ) + with open(os.path.join(sdk, "SDKSettings.plist"), "rb") as plist: + obj = plistlib.load(plist) + if not obj: + die("Error parsing SDKSettings.plist in the SDK directory: %s" % sdk) + if "Version" not in obj: + die( + "Error finding Version information in SDKSettings.plist from the SDK: %s" + % sdk + ) + version = Version(obj["Version"]) + if version < sdk_min_version: + die( + 'SDK version "%s" is too old. Please upgrade to at least %s. ' + "You may need to point to it using --with-macos-sdk= in your " + "mozconfig." % (version, sdk_min_version) + ) + if version > sdk_max_version: + die( + 'SDK version "%s" is unsupported. Please downgrade to version ' + "%s. You may need to point to it using --with-macos-sdk= in " + "your mozconfig." % (version, sdk_max_version) + ) + return sdk + + set_config("MACOS_SDK_DIR", macos_sdk) + + +with only_when(target_is_osx): + with only_when(cross_compiling): + option( + "--with-macos-private-frameworks", + env="MACOS_PRIVATE_FRAMEWORKS_DIR", + nargs=1, + help="Location of private frameworks to use", + ) + + @depends_if("--with-macos-private-frameworks") + @imports(_from="os.path", _import="isdir") + def macos_private_frameworks(value): + if value and not isdir(value[0]): + die( + "PrivateFrameworks not found not found in %s. When using " + "--with-macos-private-frameworks, you must specify a valid " + "directory", + value[0], + ) + return value[0] + + @depends(macos_private_frameworks, macos_sdk) + def macos_private_frameworks(value, sdk): + if value: + return value + return os.path.join(sdk or "/", "System/Library/PrivateFrameworks") + + set_config("MACOS_PRIVATE_FRAMEWORKS_DIR", macos_private_frameworks) + + +# GC rooting and hazard analysis. +# ============================================================== +option(env="MOZ_HAZARD", help="Build for the GC rooting hazard analysis") + + +@depends("MOZ_HAZARD") +def hazard_analysis(value): + if value: + return True + + +set_config("MOZ_HAZARD", hazard_analysis) + + +# Cross-compilation related things. +# ============================================================== +option( + "--with-toolchain-prefix", + env="TOOLCHAIN_PREFIX", + nargs=1, + help="Prefix for the target toolchain", +) + + +@depends("--with-toolchain-prefix", host, target, cross_compiling) +def toolchain_prefix(value, host, target, cross_compiling): + if value: + return tuple(value) + # We don't want a toolchain prefix by default when building on mac for mac. + if cross_compiling and not (target.os == "OSX" and host.os == "OSX"): + return ("%s-" % target.toolchain, "%s-" % target.alias) + + +@depends(toolchain_prefix, target) +def first_toolchain_prefix(toolchain_prefix, target): + # Pass TOOLCHAIN_PREFIX down to the build system if it was given from the + # command line/environment (in which case there's only one value in the tuple), + # or when cross-compiling for Android or OSX. + if toolchain_prefix and ( + target.os in ("Android", "OSX") or len(toolchain_prefix) == 1 + ): + return toolchain_prefix[0] + + +set_config("TOOLCHAIN_PREFIX", first_toolchain_prefix) +add_old_configure_assignment("TOOLCHAIN_PREFIX", first_toolchain_prefix) + + +# Compilers +# ============================================================== +include("compilers-util.configure") + + +def try_preprocess(compiler, language, source, onerror=None): + return try_invoke_compiler(compiler, language, source, ["-E"], onerror) + + +@imports(_from="mozbuild.configure.constants", _import="CompilerType") +@imports(_from="mozbuild.configure.constants", _import="CPU_preprocessor_checks") +@imports(_from="mozbuild.configure.constants", _import="kernel_preprocessor_checks") +@imports(_from="mozbuild.configure.constants", _import="OS_preprocessor_checks") +@imports(_from="six", _import="iteritems") +@imports(_from="textwrap", _import="dedent") +@imports(_from="__builtin__", _import="Exception") +def get_compiler_info(compiler, language): + """Returns information about the given `compiler` (command line in the + form of a list or tuple), in the given `language`. + + The returned information includes: + - the compiler type (clang-cl, clang or gcc) + - the compiler version + - the compiler supported language + - the compiler supported language version + """ + # Xcode clang versions are different from the underlying llvm version (they + # instead are aligned with the Xcode version). Fortunately, we can tell + # apart plain clang from Xcode clang, and convert the Xcode clang version + # into the more or less corresponding plain clang version. + check = dedent( + """\ + #if defined(_MSC_VER) && defined(__clang__) && defined(_MT) + %COMPILER "clang-cl" + %VERSION __clang_major__.__clang_minor__.__clang_patchlevel__ + #elif defined(__clang__) + %COMPILER "clang" + %VERSION __clang_major__.__clang_minor__.__clang_patchlevel__ + # ifdef __apple_build_version__ + %XCODE 1 + # endif + #elif defined(__GNUC__) && !defined(__MINGW32__) + %COMPILER "gcc" + %VERSION __GNUC__.__GNUC_MINOR__.__GNUC_PATCHLEVEL__ + #endif + + #if __cplusplus + %cplusplus __cplusplus + #elif __STDC_VERSION__ + %STDC_VERSION __STDC_VERSION__ + #endif + """ + ) + + # While we're doing some preprocessing, we might as well do some more + # preprocessor-based tests at the same time, to check the toolchain + # matches what we want. + for name, preprocessor_checks in ( + ("CPU", CPU_preprocessor_checks), + ("KERNEL", kernel_preprocessor_checks), + ("OS", OS_preprocessor_checks), + ): + for n, (value, condition) in enumerate(iteritems(preprocessor_checks)): + check += dedent( + """\ + #%(if)s %(condition)s + %%%(name)s "%(value)s" + """ + % { + "if": "elif" if n else "if", + "condition": condition, + "name": name, + "value": value, + } + ) + check += "#endif\n" + + # Also check for endianness. The advantage of living in modern times is + # that all the modern compilers we support now have __BYTE_ORDER__ defined + # by the preprocessor. + check += dedent( + """\ + #if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + %ENDIANNESS "little" + #elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ + %ENDIANNESS "big" + #endif + """ + ) + + result = try_preprocess(compiler, language, check) + + if not result: + raise FatalCheckError("Unknown compiler or compiler not supported.") + + # Metadata emitted by preprocessors such as GCC with LANG=ja_JP.utf-8 may + # have non-ASCII characters. Treat the output as bytearray. + data = {} + for line in result.splitlines(): + if line.startswith("%"): + k, _, v = line.partition(" ") + k = k.lstrip("%") + data[k] = v.replace(" ", "").lstrip('"').rstrip('"') + log.debug("%s = %s", k, data[k]) + + try: + type = CompilerType(data["COMPILER"]) + except Exception: + raise FatalCheckError("Unknown compiler or compiler not supported.") + + cplusplus = int(data.get("cplusplus", "0L").rstrip("L")) + stdc_version = int(data.get("STDC_VERSION", "0L").rstrip("L")) + + version = data.get("VERSION") + if version: + version = Version(version) + if data.get("XCODE"): + # Derived from https://en.wikipedia.org/wiki/Xcode#Toolchain_versions + # with enough granularity for major.minor version checks further + # down the line + if version < "9.1": + version = Version("4.0.0.or.less") + elif version < "10.0": + version = Version("5.0.2") + elif version < "10.0.1": + version = Version("6.0.1") + elif version < "11.0": + version = Version("7.0.0") + elif version < "11.0.3": + version = Version("8.0.0") + elif version < "12.0": + version = Version("9.0.0") + elif version < "12.0.1": + version = Version("10.0.0") + else: + version = Version("10.0.0.or.more") + + return namespace( + type=type, + version=version, + cpu=data.get("CPU"), + kernel=data.get("KERNEL"), + endianness=data.get("ENDIANNESS"), + os=data.get("OS"), + language="C++" if cplusplus else "C", + language_version=cplusplus if cplusplus else stdc_version, + xcode=bool(data.get("XCODE")), + ) + + +def same_arch_different_bits(): + return ( + ("x86", "x86_64"), + ("ppc", "ppc64"), + ("sparc", "sparc64"), + ) + + +@imports(_from="mozbuild.shellutil", _import="quote") +@imports(_from="mozbuild.configure.constants", _import="OS_preprocessor_checks") +def check_compiler(compiler, language, target): + info = get_compiler_info(compiler, language) + + flags = [] + + # Check language standards + # -------------------------------------------------------------------- + if language != info.language: + raise FatalCheckError( + "`%s` is not a %s compiler." % (quote(*compiler), language) + ) + + # Note: We do a strict version check because there sometimes are backwards + # incompatible changes in the standard, and not all code that compiles as + # C99 compiles as e.g. C11 (as of writing, this is true of libnestegg, for + # example) + if info.language == "C" and info.language_version != 199901: + if info.type == "clang-cl": + flags.append("-Xclang") + flags.append("-std=gnu99") + + cxx17_version = 201703 + if info.language == "C++": + if info.language_version != cxx17_version: + # MSVC headers include C++17 features, but don't guard them + # with appropriate checks. + if info.type == "clang-cl": + flags.append("-Xclang") + flags.append("-std=c++17") + else: + flags.append("-std=gnu++17") + + # Check compiler target + # -------------------------------------------------------------------- + has_target = False + if info.type == "clang": + # Add the target explicitly when the target is aarch64 macosx, because + # the Xcode clang target is named differently, and we need to work around + # https://github.com/rust-lang/rust-bindgen/issues/1871 and + # https://github.com/alexcrichton/cc-rs/issues/542 so we always want + # the target on the command line, even if the compiler would default to + # that. + if info.xcode and target.os == "OSX" and target.cpu == "aarch64": + if "--target=arm64-apple-darwin" not in compiler: + flags.append("--target=arm64-apple-darwin") + has_target = True + + elif ( + not info.kernel + or info.kernel != target.kernel + or not info.endianness + or info.endianness != target.endianness + ): + flags.append("--target=%s" % target.toolchain) + has_target = True + + # Add target flag when there is an OS mismatch (e.g. building for Android on + # Linux). However, only do this if the target OS is in our whitelist, to + # keep things the same on other platforms. + elif target.os in OS_preprocessor_checks and ( + not info.os or info.os != target.os + ): + flags.append("--target=%s" % target.toolchain) + has_target = True + + if not has_target and (not info.cpu or info.cpu != target.cpu): + same_arch = same_arch_different_bits() + if (target.cpu, info.cpu) in same_arch: + flags.append("-m32") + elif (info.cpu, target.cpu) in same_arch: + flags.append("-m64") + elif info.type == "clang-cl" and target.cpu == "aarch64": + flags.append("--target=%s" % target.toolchain) + elif info.type == "clang": + flags.append("--target=%s" % target.toolchain) + + return namespace( + type=info.type, + version=info.version, + target_cpu=info.cpu, + target_kernel=info.kernel, + target_endianness=info.endianness, + target_os=info.os, + flags=flags, + ) + + +@imports(_from="__builtin__", _import="open") +@imports("json") +@imports("os") +def get_vc_paths(topsrcdir): + def vswhere(args): + program_files = os.environ.get("PROGRAMFILES(X86)") or os.environ.get( + "PROGRAMFILES" + ) + if not program_files: + return [] + vswhere = os.path.join( + program_files, "Microsoft Visual Studio", "Installer", "vswhere.exe" + ) + if not os.path.exists(vswhere): + return [] + return json.loads(check_cmd_output(vswhere, "-format", "json", *args)) + + for install in vswhere( + [ + "-products", + "*", + "-requires", + "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + ] + ): + path = install["installationPath"] + tools_version = ( + open( + os.path.join( + path, r"VC\Auxiliary\Build\Microsoft.VCToolsVersion.default.txt" + ), + "r", + ) + .read() + .strip() + ) + tools_path = os.path.join(path, r"VC\Tools\MSVC", tools_version) + yield (Version(install["installationVersion"]), tools_path) + + +@depends(host) +def host_is_windows(host): + if host.kernel == "WINNT": + return True + + +option( + "--with-visual-studio-version", + nargs=1, + choices=("2017",), + when=host_is_windows, + help="Select a specific Visual Studio version to use", +) + + +@depends("--with-visual-studio-version", when=host_is_windows) +def vs_major_version(value): + if value: + return {"2017": 15}[value[0]] + + +option( + env="VC_PATH", + nargs=1, + when=host_is_windows, + help="Path to the Microsoft Visual C/C++ compiler", +) + + +@depends( + host, + vs_major_version, + check_build_environment, + "VC_PATH", + "--with-visual-studio-version", + when=host_is_windows, +) +@imports(_from="__builtin__", _import="sorted") +@imports(_from="operator", _import="itemgetter") +def vc_compiler_paths_for_version( + host, vs_major_version, env, vc_path, vs_release_name +): + if vc_path and vs_release_name: + die("VC_PATH and --with-visual-studio-version cannot be used together.") + if vc_path: + # Use an arbitrary version, it doesn't matter. + all_versions = [(Version("15"), vc_path[0])] + else: + all_versions = sorted(get_vc_paths(env.topsrcdir), key=itemgetter(0)) + if not all_versions: + return + if vs_major_version: + versions = [d for (v, d) in all_versions if v.major == vs_major_version] + if not versions: + die("Visual Studio %s could not be found!" % vs_release_name) + path = versions[0] + else: + # Choose the newest version. + path = all_versions[-1][1] + host_dir = { + "x86_64": "HostX64", + "x86": "HostX86", + }.get(host.cpu) + if host_dir: + path = os.path.join(path, "bin", host_dir) + return { + "x64": [os.path.join(path, "x64")], + # The cross toolchains require DLLs from the native x64 toolchain. + "x86": [os.path.join(path, "x86"), os.path.join(path, "x64")], + "arm64": [os.path.join(path, "arm64"), os.path.join(path, "x64")], + } + + +@depends(target, vc_compiler_paths_for_version, when=host_is_windows) +def vc_compiler_path(target, paths): + vc_target = { + "x86": "x86", + "x86_64": "x64", + "arm": "arm", + "aarch64": "arm64", + }.get(target.cpu) + if not paths: + return + return paths.get(vc_target) + + +@depends(vc_compiler_path, original_path) +@imports("os") +@imports(_from="os", _import="environ") +def vc_toolchain_search_path(vc_compiler_path, original_path): + result = list(original_path) + + if vc_compiler_path: + # The second item, if there is one, is necessary to have in $PATH for + # Windows to load the required DLLs from there. + if len(vc_compiler_path) > 1: + environ["PATH"] = os.pathsep.join(result + vc_compiler_path[1:]) + + # The first item is where the programs are going to be + result.append(vc_compiler_path[0]) + + return result + + +clang_search_path = bootstrap_search_path("clang", "bin") + + +@depends(bootstrap_search_path("rustc", "bin", when="MOZ_AUTOMATION"), original_path) +@imports("os") +@imports(_from="os", _import="environ") +def rust_search_path(rust_path, original_path): + result = list(rust_path or original_path) + # Also add the rustup install directory for cargo/rustc. + cargo_home = environ.get("CARGO_HOME", "") + if cargo_home: + cargo_home = os.path.abspath(cargo_home) + else: + cargo_home = os.path.expanduser(os.path.join("~", ".cargo")) + rustup_path = os.path.join(cargo_home, "bin") + result.append(rustup_path) + return result + + +# As a workaround until bug 1516228 and bug 1516253 are fixed, set the PATH +# variable for the build to contain the toolchain search path. +@depends(vc_toolchain_search_path) +@imports("os") +@imports(_from="os", _import="environ") +def altered_path(vc_toolchain_search_path): + path = environ["PATH"].split(os.pathsep) + altered_path = list(vc_toolchain_search_path) + for p in path: + if p not in altered_path: + altered_path.append(p) + return os.pathsep.join(altered_path) + + +set_config("PATH", altered_path) + + +# Compiler wrappers +# ============================================================== +option( + "--with-compiler-wrapper", + env="COMPILER_WRAPPER", + nargs=1, + help="Enable compiling with wrappers such as distcc and ccache", +) + +option("--with-ccache", env="CCACHE", nargs="?", help="Enable compiling with ccache") + + +@depends_if("--with-ccache") +def ccache(value): + if len(value): + return value + # If --with-ccache was given without an explicit value, we default to + # 'ccache'. + return "ccache" + + +ccache = check_prog( + "CCACHE", + progs=(), + input=ccache, + paths=bootstrap_search_path("sccache"), + allow_missing=True, +) + +option(env="CCACHE_PREFIX", nargs=1, help="Compiler prefix to use when using ccache") + +ccache_prefix = depends_if("CCACHE_PREFIX")(lambda prefix: prefix[0]) +set_config("CCACHE_PREFIX", ccache_prefix) + +# Distinguish ccache from sccache. + + +@depends_if(ccache) +def ccache_is_sccache(ccache): + return check_cmd_output(ccache, "--version").startswith("sccache") + + +@depends(ccache, ccache_is_sccache) +def using_ccache(ccache, ccache_is_sccache): + return ccache and not ccache_is_sccache + + +@depends_if(ccache, ccache_is_sccache) +def using_sccache(ccache, ccache_is_sccache): + return ccache and ccache_is_sccache + + +option(env="RUSTC_WRAPPER", nargs=1, help="Wrap rust compilation with given tool") + + +@depends(ccache, ccache_is_sccache, "RUSTC_WRAPPER") +@imports(_from="textwrap", _import="dedent") +@imports("os") +def check_sccache_version(ccache, ccache_is_sccache, rustc_wrapper): + sccache_min_version = Version("0.2.13") + + def check_version(path): + out = check_cmd_output(path, "--version") + version = Version(out.rstrip().split()[-1]) + if version < sccache_min_version: + die( + dedent( + """\ + sccache %s or later is required. sccache in use at %s has + version %s. + + Please upgrade or acquire a new version with |./mach bootstrap|. + """ + ), + sccache_min_version, + path, + version, + ) + + if ccache and ccache_is_sccache: + check_version(ccache) + + if rustc_wrapper and ( + os.path.splitext(os.path.basename(rustc_wrapper[0]))[0].lower() == "sccache" + ): + check_version(rustc_wrapper[0]) + + +set_config("MOZ_USING_CCACHE", using_ccache) +set_config("MOZ_USING_SCCACHE", using_sccache) + +option(env="SCCACHE_VERBOSE_STATS", help="Print verbose sccache stats after build") + + +@depends(using_sccache, "SCCACHE_VERBOSE_STATS") +def sccache_verbose_stats(using_sccache, verbose_stats): + return using_sccache and bool(verbose_stats) + + +set_config("SCCACHE_VERBOSE_STATS", sccache_verbose_stats) + + +@depends("--with-compiler-wrapper", ccache) +@imports(_from="mozbuild.shellutil", _import="split", _as="shell_split") +def compiler_wrapper(wrapper, ccache): + if wrapper: + raw_wrapper = wrapper[0] + wrapper = shell_split(raw_wrapper) + wrapper_program = find_program(wrapper[0]) + if not wrapper_program: + die( + "Cannot find `%s` from the given compiler wrapper `%s`", + wrapper[0], + raw_wrapper, + ) + wrapper[0] = wrapper_program + + if ccache: + if wrapper: + return tuple([ccache] + wrapper) + else: + return (ccache,) + elif wrapper: + return tuple(wrapper) + + +@depends_if(compiler_wrapper) +def using_compiler_wrapper(compiler_wrapper): + return True + + +set_config("MOZ_USING_COMPILER_WRAPPER", using_compiler_wrapper) + + +@template +def default_c_compilers(host_or_target, other_c_compiler=None): + """Template defining the set of default C compilers for the host and + target platforms. + `host_or_target` is either `host` or `target` (the @depends functions + from init.configure. + `other_c_compiler` is the `target` C compiler when `host_or_target` is `host`. + """ + assert host_or_target in {host, target} + + other_c_compiler = () if other_c_compiler is None else (other_c_compiler,) + + @depends(host_or_target, target, toolchain_prefix, *other_c_compiler) + def default_c_compilers( + host_or_target, target, toolchain_prefix, *other_c_compiler + ): + if host_or_target.kernel == "WINNT": + supported = types = ("clang-cl", "clang") + elif host_or_target.kernel == "Darwin": + types = ("clang",) + supported = ("clang", "gcc") + else: + supported = types = ("clang", "gcc") + + info = other_c_compiler[0] if other_c_compiler else None + if info and info.type in supported: + # When getting default C compilers for the host, we prioritize the + # same compiler as the target C compiler. + prioritized = info.compiler + if info.type == "gcc": + same_arch = same_arch_different_bits() + if ( + target.cpu != host_or_target.cpu + and (target.cpu, host_or_target.cpu) not in same_arch + and (host_or_target.cpu, target.cpu) not in same_arch + ): + # If the target C compiler is GCC, and it can't be used with + # -m32/-m64 for the host, it's probably toolchain-prefixed, + # so we prioritize a raw 'gcc' instead. + prioritized = info.type + + types = [prioritized] + [t for t in types if t != info.type] + + gcc = ("gcc",) + if toolchain_prefix and host_or_target is target: + gcc = tuple("%sgcc" % p for p in toolchain_prefix) + gcc + + result = [] + for type in types: + if type == "gcc": + result.extend(gcc) + else: + result.append(type) + + return tuple(result) + + return default_c_compilers + + +@template +def default_cxx_compilers(c_compiler, other_c_compiler=None, other_cxx_compiler=None): + """Template defining the set of default C++ compilers for the host and + target platforms. + `c_compiler` is the @depends function returning a Compiler instance for + the desired platform. + + Because the build system expects the C and C++ compilers to be from the + same compiler suite, we derive the default C++ compilers from the C + compiler that was found if none was provided. + + We also factor in the target C++ compiler when getting the default host + C++ compiler, using the target C++ compiler if the host and target C + compilers are the same. + """ + + assert (other_c_compiler is None) == (other_cxx_compiler is None) + if other_c_compiler is not None: + other_compilers = (other_c_compiler, other_cxx_compiler) + else: + other_compilers = () + + @depends(c_compiler, *other_compilers) + def default_cxx_compilers(c_compiler, *other_compilers): + if other_compilers: + other_c_compiler, other_cxx_compiler = other_compilers + if other_c_compiler.compiler == c_compiler.compiler: + return (other_cxx_compiler.compiler,) + + dir = os.path.dirname(c_compiler.compiler) + file = os.path.basename(c_compiler.compiler) + + if c_compiler.type == "gcc": + return (os.path.join(dir, file.replace("gcc", "g++")),) + + if c_compiler.type == "clang": + return (os.path.join(dir, file.replace("clang", "clang++")),) + + return (c_compiler.compiler,) + + return default_cxx_compilers + + +@template +def provided_program(env_var, when=None): + """Template handling cases where a program can be specified either as a + path or as a path with applicable arguments. + """ + + @depends_if(env_var, when=when) + @imports(_from="itertools", _import="takewhile") + @imports(_from="mozbuild.shellutil", _import="split", _as="shell_split") + def provided(cmd): + # Assume the first dash-prefixed item (and any subsequent items) are + # command-line options, the item before the dash-prefixed item is + # the program we're looking for, and anything before that is a wrapper + # of some kind (e.g. sccache). + cmd = shell_split(cmd[0]) + + without_flags = list(takewhile(lambda x: not x.startswith("-"), cmd)) + + return namespace( + wrapper=without_flags[:-1], + program=without_flags[-1], + flags=cmd[len(without_flags) :], + ) + + return provided + + +def prepare_flags(host_or_target, macos_sdk): + if macos_sdk and host_or_target.os == "OSX": + return ["-isysroot", macos_sdk] + return [] + + +def minimum_gcc_version(): + return Version("7.1.0") + + +@template +def compiler( + language, + host_or_target, + c_compiler=None, + other_compiler=None, + other_c_compiler=None, +): + """Template handling the generic base checks for the compiler for the + given `language` on the given platform (`host_or_target`). + `host_or_target` is either `host` or `target` (the @depends functions + from init.configure. + When the language is 'C++', `c_compiler` is the result of the `compiler` + template for the language 'C' for the same `host_or_target`. + When `host_or_target` is `host`, `other_compiler` is the result of the + `compiler` template for the same `language` for `target`. + When `host_or_target` is `host` and the language is 'C++', + `other_c_compiler` is the result of the `compiler` template for the + language 'C' for `target`. + """ + assert host_or_target in {host, target} + assert language in ("C", "C++") + assert language == "C" or c_compiler is not None + assert host_or_target is target or other_compiler is not None + assert language == "C" or host_or_target is target or other_c_compiler is not None + + host_or_target_str = { + host: "host", + target: "target", + }[host_or_target] + + var = { + ("C", target): "CC", + ("C++", target): "CXX", + ("C", host): "HOST_CC", + ("C++", host): "HOST_CXX", + }[language, host_or_target] + + default_compilers = { + "C": lambda: default_c_compilers(host_or_target, other_compiler), + "C++": lambda: default_cxx_compilers( + c_compiler, other_c_compiler, other_compiler + ), + }[language]() + + what = "the %s %s compiler" % (host_or_target_str, language) + + option(env=var, nargs=1, help="Path to %s" % what) + + # Handle the compiler given by the user through one of the CC/CXX/HOST_CC/ + # HOST_CXX variables. + provided_compiler = provided_program(var) + + # Normally, we'd use `var` instead of `_var`, but the interaction with + # old-configure complicates things, and for now, we a) can't take the plain + # result from check_prog as CC/CXX/HOST_CC/HOST_CXX and b) have to let + # old-configure AC_SUBST it (because it's autoconf doing it, not us) + compiler = check_prog( + "_%s" % var, + what=what, + progs=default_compilers, + input=provided_compiler.program, + paths=clang_search_path, + ) + + @depends(compiler, provided_compiler, compiler_wrapper, host_or_target, macos_sdk) + @checking("whether %s can be used" % what, lambda x: bool(x)) + @imports(_from="mozbuild.shellutil", _import="quote") + def valid_compiler( + compiler, provided_compiler, compiler_wrapper, host_or_target, macos_sdk + ): + wrapper = list(compiler_wrapper or ()) + flags = prepare_flags(host_or_target, macos_sdk) + if provided_compiler: + provided_wrapper = list(provided_compiler.wrapper) + # When doing a subconfigure, the compiler is set by old-configure + # and it contains the wrappers from --with-compiler-wrapper and + # --with-ccache. + if provided_wrapper[: len(wrapper)] == wrapper: + provided_wrapper = provided_wrapper[len(wrapper) :] + wrapper.extend(provided_wrapper) + flags.extend(provided_compiler.flags) + + info = check_compiler(wrapper + [compiler] + flags, language, host_or_target) + + # Check that the additional flags we got are enough to not require any + # more flags. If we get an exception, just ignore it; it's liable to be + # invalid command-line flags, which means the compiler we're checking + # doesn't support those command-line flags and will fail one or more of + # the checks below. + try: + if info.flags: + flags += info.flags + info = check_compiler( + wrapper + [compiler] + flags, language, host_or_target + ) + except FatalCheckError: + pass + + if not info.target_cpu or info.target_cpu != host_or_target.cpu: + raise FatalCheckError( + "%s %s compiler target CPU (%s) does not match --%s CPU (%s)" + % ( + host_or_target_str.capitalize(), + language, + info.target_cpu or "unknown", + host_or_target_str, + host_or_target.raw_cpu, + ) + ) + + if not info.target_kernel or (info.target_kernel != host_or_target.kernel): + raise FatalCheckError( + "%s %s compiler target kernel (%s) does not match --%s kernel (%s)" + % ( + host_or_target_str.capitalize(), + language, + info.target_kernel or "unknown", + host_or_target_str, + host_or_target.kernel, + ) + ) + + if not info.target_endianness or ( + info.target_endianness != host_or_target.endianness + ): + raise FatalCheckError( + "%s %s compiler target endianness (%s) does not match --%s " + "endianness (%s)" + % ( + host_or_target_str.capitalize(), + language, + info.target_endianness or "unknown", + host_or_target_str, + host_or_target.endianness, + ) + ) + + # Compiler version checks + # =================================================== + # Check the compiler version here instead of in `compiler_version` so + # that the `checking` message doesn't pretend the compiler can be used + # to then bail out one line later. + if info.type == "gcc": + if host_or_target.os == "Android": + raise FatalCheckError( + "GCC is not supported on Android.\n" + "Please use clang from the Android NDK instead." + ) + gcc_version = minimum_gcc_version() + if info.version < gcc_version: + raise FatalCheckError( + "Only GCC %d.%d or newer is supported (found version %s)." + % (gcc_version.major, gcc_version.minor, info.version) + ) + + if info.type == "clang-cl": + if info.version < "8.0.0": + raise FatalCheckError( + "Only clang-cl 8.0 or newer is supported (found version %s)" + % info.version + ) + + # If you want to bump the version check here ensure the version + # is known for Xcode in get_compiler_info. + if info.type == "clang" and info.version < "5.0": + raise FatalCheckError( + "Only clang/llvm 5.0 or newer is supported (found version %s)." + % info.version + ) + + if info.flags: + raise FatalCheckError("Unknown compiler or compiler not supported.") + + return namespace( + wrapper=wrapper, + compiler=compiler, + flags=flags, + type=info.type, + version=info.version, + language=language, + ) + + @depends(valid_compiler) + @checking("%s version" % what) + def compiler_version(compiler): + return compiler.version + + if language == "C++": + + @depends(valid_compiler, c_compiler) + def valid_compiler(compiler, c_compiler): + if compiler.type != c_compiler.type: + die( + "The %s C compiler is %s, while the %s C++ compiler is " + "%s. Need to use the same compiler suite.", + host_or_target_str, + c_compiler.type, + host_or_target_str, + compiler.type, + ) + + if compiler.version != c_compiler.version: + die( + "The %s C compiler is version %s, while the %s C++ " + "compiler is version %s. Need to use the same compiler " + "version.", + host_or_target_str, + c_compiler.version, + host_or_target_str, + compiler.version, + ) + return compiler + + # Set CC/CXX/HOST_CC/HOST_CXX for old-configure, which needs the wrapper + # and the flags that were part of the user input for those variables to + # be provided. + add_old_configure_assignment( + var, + depends_if(valid_compiler)( + lambda x: list(x.wrapper) + [x.compiler] + list(x.flags) + ), + ) + + if host_or_target is target: + add_old_configure_assignment( + "ac_cv_prog_%s" % var, + depends_if(valid_compiler)( + lambda x: list(x.wrapper) + [x.compiler] + list(x.flags) + ), + ) + # We check that it works in python configure already. + add_old_configure_assignment("ac_cv_prog_%s_works" % var.lower(), "yes") + add_old_configure_assignment( + "ac_cv_prog_%s_cross" % var.lower(), + depends(cross_compiling)(lambda x: "yes" if x else "no"), + ) + gcc_like = depends(valid_compiler.type)( + lambda x: "yes" if x in ("gcc", "clang") else "no" + ) + add_old_configure_assignment("ac_cv_prog_%s_g" % var.lower(), gcc_like) + if language == "C": + add_old_configure_assignment("ac_cv_prog_gcc", gcc_like) + if language == "C++": + add_old_configure_assignment("ac_cv_prog_gxx", gcc_like) + + # Set CC_TYPE/CC_VERSION/HOST_CC_TYPE/HOST_CC_VERSION to allow + # old-configure to do some of its still existing checks. + if language == "C": + set_config("%s_TYPE" % var, valid_compiler.type) + add_old_configure_assignment("%s_TYPE" % var, valid_compiler.type) + set_config( + "%s_VERSION" % var, depends(valid_compiler.version)(lambda v: str(v)) + ) + + valid_compiler = compiler_class(valid_compiler, host_or_target) + + def compiler_error(): + raise FatalCheckError( + "Failed compiling a simple %s source with %s" % (language, what) + ) + + valid_compiler.try_compile(check_msg="%s works" % what, onerror=compiler_error) + + set_config("%s_BASE_FLAGS" % var, valid_compiler.flags) + + # Set CPP/CXXCPP for both the build system and old-configure. We don't + # need to check this works for preprocessing, because we already relied + # on $CC -E/$CXX -E doing preprocessing work to validate the compiler + # in the first place. + if host_or_target is target: + pp_var = { + "C": "CPP", + "C++": "CXXCPP", + }[language] + + preprocessor = depends_if(valid_compiler)( + lambda x: list(x.wrapper) + [x.compiler, "-E"] + list(x.flags) + ) + + set_config(pp_var, preprocessor) + add_old_configure_assignment(pp_var, preprocessor) + + if language == "C": + linker_var = { + target: "LD", + host: "HOST_LD", + }[host_or_target] + + @deprecated_option(env=linker_var, nargs=1) + def linker(value): + if value: + return value[0] + + @depends(linker) + def unused_linker(linker): + if linker: + log.warning( + "The value of %s is not used by this build system." % linker_var + ) + + return valid_compiler + + +c_compiler = compiler("C", target) +cxx_compiler = compiler("C++", target, c_compiler=c_compiler) +host_c_compiler = compiler("C", host, other_compiler=c_compiler) +host_cxx_compiler = compiler( + "C++", + host, + c_compiler=host_c_compiler, + other_compiler=cxx_compiler, + other_c_compiler=c_compiler, +) + +# Generic compiler-based conditions. +building_with_gcc = depends(c_compiler)(lambda info: info.type == "gcc") + + +@depends(cxx_compiler, ccache_prefix) +@imports("os") +def cxx_is_icecream(info, ccache_prefix): + if ( + os.path.islink(info.compiler) + and os.path.basename(os.readlink(info.compiler)) == "icecc" + ): + return True + if ccache_prefix and os.path.basename(ccache_prefix) == "icecc": + return True + + +set_config("CXX_IS_ICECREAM", cxx_is_icecream) + + +@depends(c_compiler) +def msvs_version(info): + # clang-cl emulates the same version scheme as cl. And MSVS_VERSION needs to + # be set for GYP on Windows. + if info.type == "clang-cl": + return "2017" + + return "" + + +set_config("MSVS_VERSION", msvs_version) + +include("compile-checks.configure") +include("arm.configure", when=depends(target.cpu)(lambda cpu: cpu == "arm")) + + +@depends(host, host_os_kernel_major_version, target) +def needs_macos_sdk_headers_check(host, version, target): + # Only an issue on Mac OS X 10.14 (and probably above). + if host.kernel != "Darwin" or target.kernel != "Darwin" or version < "18": + return + + return True + + +@depends( + cxx_compiler.try_run( + header="#include_next ", + check_msg="for macOS SDK headers", + when=needs_macos_sdk_headers_check, + ), + when=needs_macos_sdk_headers_check, +) +def check_have_mac_10_14_sdk(value): + if value: + return + + die( + "System inttypes.h not found. Please try running " + "`open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg` " + "and following the instructions to install the necessary headers" + ) + + +@depends( + have_64_bit, + try_compile( + body='static_assert(sizeof(void *) == 8, "")', check_msg="for 64-bit OS" + ), +) +def check_have_64_bit(have_64_bit, compiler_have_64_bit): + if have_64_bit != compiler_have_64_bit: + configure_error( + "The target compiler does not agree with configure " + "about the target bitness." + ) + + +@depends(cxx_compiler, target) +def needs_libstdcxx_newness_check(cxx_compiler, target): + # We only have to care about this on Linux and MinGW. + if cxx_compiler.type == "clang-cl": + return + + if target.kernel not in ("Linux", "WINNT"): + return + + if target.os == "Android": + return + + return True + + +def die_on_old_libstdcxx(): + die( + "The libstdc++ in use is not new enough. Please run " + "./mach bootstrap to update your compiler, or update your system " + "libstdc++ installation." + ) + + +try_compile( + includes=["cstddef"], + body="\n".join( + [ + # _GLIBCXX_RELEASE showed up in libstdc++ 7. + "#if defined(__GLIBCXX__) && !defined(_GLIBCXX_RELEASE)", + "# error libstdc++ not new enough", + "#endif", + "#if defined(_GLIBCXX_RELEASE)", + "# if _GLIBCXX_RELEASE < %d" % minimum_gcc_version().major, + "# error libstdc++ not new enough", + "# else", + " (void) 0", + "# endif", + "#endif", + ] + ), + check_msg="for new enough STL headers from libstdc++", + when=needs_libstdcxx_newness_check, + onerror=die_on_old_libstdcxx, +) + + +@depends(c_compiler, target) +def default_debug_flags(compiler_info, target): + # Debug info is ON by default. + if compiler_info.type == "clang-cl": + return "-Z7" + elif target.kernel == "WINNT" and compiler_info.type == "clang": + return "-g -gcodeview" + return "-g" + + +option(env="MOZ_DEBUG_FLAGS", nargs=1, help="Debug compiler flags") + +imply_option("--enable-debug-symbols", depends_if("--enable-debug")(lambda v: v)) + +option( + "--disable-debug-symbols", + nargs="?", + help="Disable debug symbols using the given compiler flags", +) + +set_config("MOZ_DEBUG_SYMBOLS", depends_if("--enable-debug-symbols")(lambda _: True)) + + +@depends("MOZ_DEBUG_FLAGS", "--enable-debug-symbols", default_debug_flags) +def debug_flags(env_debug_flags, enable_debug_flags, default_debug_flags): + # If MOZ_DEBUG_FLAGS is set, and --enable-debug-symbols is set to a value, + # --enable-debug-symbols takes precedence. Note, the value of + # --enable-debug-symbols may be implied by --enable-debug. + if len(enable_debug_flags): + return enable_debug_flags[0] + if env_debug_flags: + return env_debug_flags[0] + return default_debug_flags + + +set_config("MOZ_DEBUG_FLAGS", debug_flags) +add_old_configure_assignment("MOZ_DEBUG_FLAGS", debug_flags) + + +@depends(c_compiler) +def color_cflags(info): + # We could test compiling with flags. By why incur the overhead when + # color support should always be present in a specific toolchain + # version? + + # Code for auto-adding this flag to compiler invocations needs to + # determine if an existing flag isn't already present. That is likely + # using exact string matching on the returned value. So if the return + # value changes to e.g. "=always", exact string match may fail and + # multiple color flags could be added. So examine downstream consumers + # before adding flags to return values. + if info.type == "gcc": + return "-fdiagnostics-color" + elif info.type == "clang": + return "-fcolor-diagnostics" + else: + return "" + + +set_config("COLOR_CFLAGS", color_cflags) + +# Some standard library headers (notably bionic on Android) declare standard +# functions (e.g. getchar()) and also #define macros for those standard +# functions. libc++ deals with this by doing something like the following +# (explanatory comments added): +# +# #ifdef FUNC +# // Capture the definition of FUNC. +# inline _LIBCPP_INLINE_VISIBILITY int __libcpp_FUNC(...) { return FUNC(...); } +# #undef FUNC +# // Use a real inline definition. +# inline _LIBCPP_INLINE_VISIBILITY int FUNC(...) { return _libcpp_FUNC(...); } +# #endif +# +# _LIBCPP_INLINE_VISIBILITY is typically defined as: +# +# __attribute__((__visibility__("hidden"), __always_inline__)) +# +# Unfortunately, this interacts badly with our system header wrappers, as the: +# +# #pragma GCC visibility push(default) +# +# that they do prior to including the actual system header is treated by the +# compiler as an explicit declaration of visibility on every function declared +# in the header. Therefore, when the libc++ code above is encountered, it is +# as though the compiler has effectively seen: +# +# int FUNC(...) __attribute__((__visibility__("default"))); +# int FUNC(...) __attribute__((__visibility__("hidden"))); +# +# and the compiler complains about the mismatched visibility declarations. +# +# However, libc++ will only define _LIBCPP_INLINE_VISIBILITY if there is no +# existing definition. We can therefore define it to the empty string (since +# we are properly managing visibility ourselves) and avoid this whole mess. +# Note that we don't need to do this with gcc, as libc++ detects gcc and +# effectively does the same thing we are doing here. +# +# _LIBCPP_ALWAYS_INLINE needs a similar workarounds, since it too declares +# hidden visibility. +# +# _LIBCPP_HIDE_FROM_ABI is a macro in libc++ versions in NDKs >=r19. It too +# declares hidden visibility, but it also declares functions as excluded from +# explicit instantiation (roughly: the function can be unused in the current +# compilation, but does not then trigger an actual definition of the function; +# it is assumed the real definition comes from elsewhere). We need to replicate +# this setup. + + +@depends(c_compiler, target) +def libcxx_override_visibility(c_compiler, target): + if c_compiler.type == "clang" and target.os == "Android": + return namespace( + empty="", + hide_from_abi="__attribute__((__exclude_from_explicit_instantiation__))", + ) + + +set_define("_LIBCPP_INLINE_VISIBILITY", libcxx_override_visibility.empty) +set_define("_LIBCPP_ALWAYS_INLINE", libcxx_override_visibility.empty) + +set_define("_LIBCPP_HIDE_FROM_ABI", libcxx_override_visibility.hide_from_abi) + + +@depends(target, check_build_environment) +def visibility_flags(target, env): + if target.os != "WINNT": + if target.kernel == "Darwin": + return ("-fvisibility=hidden", "-fvisibility-inlines-hidden") + return ( + "-I%s/system_wrappers" % os.path.join(env.dist), + "-include", + "%s/config/gcc_hidden.h" % env.topsrcdir, + ) + + +@depends(target, visibility_flags) +def wrap_system_includes(target, visibility_flags): + if visibility_flags and target.kernel != "Darwin": + return True + + +set_define( + "HAVE_VISIBILITY_HIDDEN_ATTRIBUTE", + depends(visibility_flags)(lambda v: bool(v) or None), +) +set_define( + "HAVE_VISIBILITY_ATTRIBUTE", depends(visibility_flags)(lambda v: bool(v) or None) +) +set_config("WRAP_SYSTEM_INCLUDES", wrap_system_includes) +set_config("VISIBILITY_FLAGS", visibility_flags) + + +@template +def depend_cflags(host_or_target_c_compiler): + @depends(host_or_target_c_compiler) + def depend_cflags(host_or_target_c_compiler): + if host_or_target_c_compiler.type != "clang-cl": + return ["-MD", "-MP", "-MF $(MDDEPDIR)/$(@F).pp"] + else: + # clang-cl doesn't accept the normal -MD -MP -MF options that clang + # does, but the underlying cc1 binary understands how to generate + # dependency files. These options are based on analyzing what the + # normal clang driver sends to cc1 when given the "correct" + # dependency options. + return [ + "-Xclang", + "-MP", + "-Xclang", + "-dependency-file", + "-Xclang", + "$(MDDEPDIR)/$(@F).pp", + "-Xclang", + "-MT", + "-Xclang", + "$@", + ] + + return depend_cflags + + +set_config("_DEPEND_CFLAGS", depend_cflags(c_compiler)) +set_config("_HOST_DEPEND_CFLAGS", depend_cflags(host_c_compiler)) + + +@depends(c_compiler) +def preprocess_option(compiler): + # The uses of PREPROCESS_OPTION depend on the spacing for -o/-Fi. + if compiler.type in ("gcc", "clang"): + return "-E -o " + else: + return "-P -Fi" + + +set_config("PREPROCESS_OPTION", preprocess_option) + + +# We only want to include windows.configure when we are compiling on +# Windows, or for Windows. + + +@depends(target, host) +def is_windows(target, host): + return host.kernel == "WINNT" or target.kernel == "WINNT" + + +include("windows.configure", when=is_windows) + + +# On Power ISA, determine compiler flags for VMX, VSX and VSX-3. + +set_config( + "PPC_VMX_FLAGS", + ["-maltivec"], + when=depends(target.cpu)(lambda cpu: cpu.startswith("ppc")), +) + +set_config( + "PPC_VSX_FLAGS", + ["-mvsx"], + when=depends(target.cpu)(lambda cpu: cpu.startswith("ppc")), +) + +set_config( + "PPC_VSX3_FLAGS", + ["-mvsx", "-mcpu=power9"], + when=depends(target.cpu)(lambda cpu: cpu.startswith("ppc")), +) + +# ASAN +# ============================================================== + +option("--enable-address-sanitizer", help="Enable Address Sanitizer") + + +@depends(when="--enable-address-sanitizer") +def asan(): + return True + + +add_old_configure_assignment("MOZ_ASAN", asan) + +# MSAN +# ============================================================== + +option("--enable-memory-sanitizer", help="Enable Memory Sanitizer") + + +@depends(when="--enable-memory-sanitizer") +def msan(): + return True + + +add_old_configure_assignment("MOZ_MSAN", msan) + +# TSAN +# ============================================================== + +option("--enable-thread-sanitizer", help="Enable Thread Sanitizer") + + +@depends(when="--enable-thread-sanitizer") +def tsan(): + return True + + +add_old_configure_assignment("MOZ_TSAN", tsan) + +# UBSAN +# ============================================================== + +option( + "--enable-undefined-sanitizer", nargs="*", help="Enable UndefinedBehavior Sanitizer" +) + + +@depends_if("--enable-undefined-sanitizer") +def ubsan(options): + default_checks = [ + "bool", + "bounds", + "enum", + "integer-divide-by-zero", + "object-size", + "pointer-overflow", + "return", + "vla-bound", + ] + + checks = options if len(options) else default_checks + + return ",".join(checks) + + +add_old_configure_assignment("MOZ_UBSAN_CHECKS", ubsan) + + +option( + "--enable-signed-overflow-sanitizer", + help="Enable UndefinedBehavior Sanitizer (Signed Integer Overflow Parts)", +) + + +@depends(when="--enable-signed-overflow-sanitizer") +def ub_signed_overflow_san(): + return True + + +add_old_configure_assignment("MOZ_SIGNED_OVERFLOW_SANITIZE", ub_signed_overflow_san) + + +option( + "--enable-unsigned-overflow-sanitizer", + help="Enable UndefinedBehavior Sanitizer (Unsigned Integer Overflow Parts)", +) + + +@depends(when="--enable-unsigned-overflow-sanitizer") +def ub_unsigned_overflow_san(): + return True + + +add_old_configure_assignment("MOZ_UNSIGNED_OVERFLOW_SANITIZE", ub_unsigned_overflow_san) + +# Security Hardening +# ============================================================== + +option( + "--enable-hardening", + env="MOZ_SECURITY_HARDENING", + help="Enables security hardening compiler options", +) + + +# This function is a bit confusing. It adds or removes hardening flags in +# three stuations: if --enable-hardening is passed; if --disable-hardening +# is passed, and if no flag is passed. +# +# At time of this comment writing, all flags are actually added in the +# default no-flag case; making --enable-hardening the same as omitting the +# flag. --disable-hardening will omit the security flags. (However, not all +# possible security flags will be omitted by --disable-hardening, as many are +# compiler-default options we do not explicitly enable.) +@depends( + "--enable-hardening", + "--enable-address-sanitizer", + "--enable-debug", + "--enable-optimize", + c_compiler, + target, +) +def security_hardening_cflags( + hardening_flag, asan, debug, optimize, c_compiler, target +): + compiler_is_gccish = c_compiler.type in ("gcc", "clang") + mingw_clang = c_compiler.type == "clang" and target.os == "WINNT" + + flags = [] + ldflags = [] + js_flags = [] + js_ldflags = [] + + # ---------------------------------------------------------- + # If hardening is explicitly enabled, or not explicitly disabled + if hardening_flag.origin == "default" or hardening_flag: + # FORTIFY_SOURCE ------------------------------------ + # Require optimization for FORTIFY_SOURCE. See Bug 1417452 + # Also, undefine it before defining it just in case a distro adds it, see Bug 1418398 + if compiler_is_gccish and optimize and not asan: + # Don't enable FORTIFY_SOURCE on Android on the top-level, but do enable in js/ + if target.os != "Android": + flags.append("-U_FORTIFY_SOURCE") + flags.append("-D_FORTIFY_SOURCE=2") + js_flags.append("-U_FORTIFY_SOURCE") + js_flags.append("-D_FORTIFY_SOURCE=2") + if mingw_clang: + # mingw-clang needs to link in ssp which is not done by default + ldflags.append("-lssp") + js_ldflags.append("-lssp") + + # fstack-protector ------------------------------------ + # Enable only if hardening is not disabled and ASAN is + # not on as ASAN will catch the crashes for us + if compiler_is_gccish and not asan: + flags.append("-fstack-protector-strong") + ldflags.append("-fstack-protector-strong") + js_flags.append("-fstack-protector-strong") + js_ldflags.append("-fstack-protector-strong") + + if ( + c_compiler.type == "clang" + and c_compiler.version >= "11.0.1" + and target.os not in ("WINNT", "OSX") + and target.cpu in ("x86", "x86_64", "ppc64", "s390x") + ): + flags.append("-fstack-clash-protection") + ldflags.append("-fstack-clash-protection") + js_flags.append("-fstack-clash-protection") + js_ldflags.append("-fstack-clash-protection") + + # ftrivial-auto-var-init ------------------------------ + # Initialize local variables with a 0xAA pattern in clang debug builds. + # Linux32 fails some xpcshell tests with -ftrivial-auto-var-init + linux32 = target.kernel == "Linux" and target.cpu == "x86" + if ( + (c_compiler.type == "clang" or c_compiler.type == "clang-cl") + and c_compiler.version >= "8" + and debug + and not linux32 + ): + if c_compiler.type == "clang-cl": + flags.append("-Xclang") + js_flags.append("-Xclang") + flags.append("-ftrivial-auto-var-init=pattern") + js_flags.append("-ftrivial-auto-var-init=pattern") + + # ASLR ------------------------------------------------ + # ASLR (dynamicbase) is enabled by default in clang-cl; but the + # mingw-clang build requires it to be explicitly enabled + if mingw_clang: + ldflags.append("-Wl,--dynamicbase") + js_ldflags.append("-Wl,--dynamicbase") + + # Control Flow Guard (CFG) ---------------------------- + if ( + c_compiler.type == "clang-cl" + and c_compiler.version >= "8" + and (target.cpu != "aarch64" or c_compiler.version >= "8.0.1") + ): + if target.cpu == "aarch64" and c_compiler.version >= "10.0.0": + # The added checks in clang 10 make arm64 builds crash. (Bug 1639318) + flags.append("-guard:cf,nochecks") + js_flags.append("-guard:cf,nochecks") + else: + flags.append("-guard:cf") + js_flags.append("-guard:cf") + # nolongjmp is needed because clang doesn't emit the CFG tables of + # setjmp return addresses https://bugs.llvm.org/show_bug.cgi?id=40057 + ldflags.append("-guard:cf,nolongjmp") + js_ldflags.append("-guard:cf,nolongjmp") + + # ---------------------------------------------------------- + # If ASAN _is_ on, undefine FORTIFY_SOURCE just to be safe + if asan: + flags.append("-U_FORTIFY_SOURCE") + js_flags.append("-U_FORTIFY_SOURCE") + + # fno-common ----------------------------------------- + # Do not merge variables for ASAN; can detect some subtle bugs + if asan: + # clang-cl does not recognize the flag, it must be passed down to clang + if c_compiler.type == "clang-cl": + flags.append("-Xclang") + flags.append("-fno-common") + + return namespace( + flags=flags, + ldflags=ldflags, + js_flags=js_flags, + js_ldflags=js_ldflags, + ) + + +set_config("MOZ_HARDENING_CFLAGS", security_hardening_cflags.flags) +set_config("MOZ_HARDENING_LDFLAGS", security_hardening_cflags.ldflags) +set_config("MOZ_HARDENING_CFLAGS_JS", security_hardening_cflags.js_flags) +set_config("MOZ_HARDENING_LDFLAGS_JS", security_hardening_cflags.js_ldflags) + + +# Frame pointers +# ============================================================== +@depends(c_compiler) +def frame_pointer_flags(compiler): + if compiler.type == "clang-cl": + return namespace( + enable=["-Oy-"], + disable=["-Oy"], + ) + return namespace( + enable=["-fno-omit-frame-pointer", "-funwind-tables"], + disable=["-fomit-frame-pointer", "-funwind-tables"], + ) + + +@depends( + moz_optimize.optimize, + moz_debug, + target, + "--enable-memory-sanitizer", + "--enable-address-sanitizer", + "--enable-undefined-sanitizer", +) +def frame_pointer_default(optimize, debug, target, msan, asan, ubsan): + return bool( + not optimize + or debug + or msan + or asan + or ubsan + or (target.os == "WINNT" and target.cpu in ("x86", "aarch64")) + ) + + +option( + "--enable-frame-pointers", + default=frame_pointer_default, + help="{Enable|Disable} frame pointers", +) + + +@depends("--enable-frame-pointers", frame_pointer_flags) +def frame_pointer_flags(enable, flags): + if enable: + return flags.enable + return flags.disable + + +set_config("MOZ_FRAMEPTR_FLAGS", frame_pointer_flags) + + +# nasm detection +# ============================================================== +nasm = check_prog( + "NASM", ["nasm"], allow_missing=True, paths=bootstrap_search_path("nasm") +) + + +@depends_if(nasm) +@checking("nasm version") +def nasm_version(nasm): + (retcode, stdout, _) = get_cmd_output(nasm, "-v") + if retcode: + # mac stub binary + return None + + version = stdout.splitlines()[0].split()[2] + return Version(version) + + +@depends_if(nasm_version) +def nasm_major_version(nasm_version): + return str(nasm_version.major) + + +@depends_if(nasm_version) +def nasm_minor_version(nasm_version): + return str(nasm_version.minor) + + +set_config("NASM_MAJOR_VERSION", nasm_major_version) +set_config("NASM_MINOR_VERSION", nasm_minor_version) + + +@depends(nasm, target) +def nasm_asflags(nasm, target): + if nasm: + asflags = { + ("OSX", "x86"): ["-f", "macho32"], + ("OSX", "x86_64"): ["-f", "macho64"], + ("WINNT", "x86"): ["-f", "win32"], + ("WINNT", "x86_64"): ["-f", "win64"], + }.get((target.os, target.cpu), None) + if asflags is None: + # We're assuming every x86 platform we support that's + # not Windows or Mac is ELF. + if target.cpu == "x86": + asflags = ["-f", "elf32"] + elif target.cpu == "x86_64": + asflags = ["-f", "elf64"] + return asflags + + +set_config("NASM_ASFLAGS", nasm_asflags) + + +@depends(nasm_asflags) +def have_nasm(value): + if value: + return True + + +@depends(yasm_asflags) +def have_yasm(yasm_asflags): + if yasm_asflags: + return True + + +set_config("HAVE_NASM", have_nasm) + +set_config("HAVE_YASM", have_yasm) +# Until the YASM variable is not necessary in old-configure. +add_old_configure_assignment("YASM", have_yasm) + + +# Code Coverage +# ============================================================== + +option("--enable-coverage", env="MOZ_CODE_COVERAGE", help="Enable code coverage") + + +@depends("--enable-coverage") +def code_coverage(value): + if value: + return True + + +set_config("MOZ_CODE_COVERAGE", code_coverage) +set_define("MOZ_CODE_COVERAGE", code_coverage) + + +@depends(target, c_compiler, vc_path, check_build_environment, when=code_coverage) +@imports("os") +@imports("re") +@imports(_from="__builtin__", _import="open") +def coverage_cflags(target, c_compiler, vc_path, build_env): + cflags = ["--coverage"] + + # clang 11 no longer accepts this flag (its behavior became the default) + if c_compiler.type in ("clang", "clang-cl") and c_compiler.version < "11.0.0": + cflags += [ + "-Xclang", + "-coverage-no-function-names-in-data", + ] + + if target.os == "WINNT" and c_compiler.type == "clang-cl": + # The Visual Studio directory is the parent of the Visual C++ directory. + vs_path = os.path.dirname(vc_path) + + # We need to get the real path of Visual Studio, which can be in a + # symlinked directory (for example, on automation). + vs_path = os.path.realpath(vs_path) + + cflags += [ + "-fprofile-exclude-files=^{}.*$".format(re.escape(vs_path)), + ] + + response_file_path = os.path.join(build_env.topobjdir, "code_coverage_cflags") + + with open(response_file_path, "w") as f: + f.write(" ".join(cflags)) + + return ["@{}".format(response_file_path)] + + +set_config("COVERAGE_CFLAGS", coverage_cflags) + +# ============================================================== + +option(env="RUSTFLAGS", nargs=1, help="Rust compiler flags") +set_config("RUSTFLAGS", depends("RUSTFLAGS")(lambda flags: flags)) + + +# Rust compiler flags +# ============================================================== + +option( + env="RUSTC_OPT_LEVEL", + nargs=1, + help="Rust compiler optimization level (-C opt-level=%s)", +) + +# --enable-release kicks in full optimizations. +imply_option("RUSTC_OPT_LEVEL", "2", when="--enable-release") + + +@depends("RUSTC_OPT_LEVEL", moz_optimize) +def rustc_opt_level(opt_level_option, moz_optimize): + if opt_level_option: + return opt_level_option[0] + else: + return "1" if moz_optimize.optimize else "0" + + +@depends( + rustc_opt_level, + debug_rust, + target, + "--enable-debug-symbols", + "--enable-frame-pointers", +) +def rust_compile_flags(opt_level, debug_rust, target, debug_symbols, frame_pointers): + # Cargo currently supports only two interesting profiles for building: + # development and release. Those map (roughly) to --enable-debug and + # --disable-debug in Gecko, respectively. + # + # But we'd also like to support an additional axis of control for + # optimization level. Since Cargo only supports 2 profiles, we're in + # a bit of a bind. + # + # Code here derives various compiler options given other configure options. + # The options defined here effectively override defaults specified in + # Cargo.toml files. + + debug_assertions = None + debug_info = None + + # opt-level=0 implies -C debug-assertions, which may not be desired + # unless Rust debugging is enabled. + if opt_level == "0" and not debug_rust: + debug_assertions = False + + if debug_symbols: + debug_info = "2" + + opts = [] + + if opt_level is not None: + opts.append("opt-level=%s" % opt_level) + if debug_assertions is not None: + opts.append("debug-assertions=%s" % ("yes" if debug_assertions else "no")) + if debug_info is not None: + opts.append("debuginfo=%s" % debug_info) + if frame_pointers: + opts.append("force-frame-pointers=yes") + # CFG for arm64 is crashy, see `def security_hardening_cflags`. + if target.kernel == "WINNT" and target.cpu != "aarch64": + opts.append("control-flow-guard=yes") + + flags = [] + for opt in opts: + flags.extend(["-C", opt]) + + return flags + + +# Rust incremental compilation +# ============================================================== + + +option("--disable-cargo-incremental", help="Disable incremental rust compilation.") + + +@depends( + rustc_opt_level, + debug_rust, + "MOZ_AUTOMATION", + code_coverage, + "--disable-cargo-incremental", + using_sccache, + "RUSTC_WRAPPER", +) +@imports("os") +def cargo_incremental( + opt_level, + debug_rust, + automation, + code_coverage, + enabled, + using_sccache, + rustc_wrapper, +): + """Return a value for the CARGO_INCREMENTAL environment variable.""" + + if not enabled: + return "0" + + # We never want to use incremental compilation in automation. sccache + # handles our automation use case much better than incremental compilation + # would. + if automation: + return "0" + + # Coverage instrumentation doesn't play well with incremental compilation + # https://github.com/rust-lang/rust/issues/50203. + if code_coverage: + return "0" + + # Incremental compilation doesn't work as well as it should, and if we're + # using sccache, it's better to use sccache than incremental compilation. + if not using_sccache and rustc_wrapper: + rustc_wrapper = os.path.basename(rustc_wrapper[0]) + if os.path.splitext(rustc_wrapper)[0].lower() == "sccache": + using_sccache = True + if using_sccache: + return "0" + + # Incremental compilation is automatically turned on for debug builds, so + # we don't need to do anything special here. + if debug_rust: + return + + # --enable-release automatically sets -O2 for Rust code, and people can + # set RUSTC_OPT_LEVEL to 2 or even 3 if they want to profile Rust code. + # Let's assume that if Rust code is using -O2 or higher, we shouldn't + # be using incremental compilation, because we'd be imposing a + # significant runtime cost. + if opt_level not in ("0", "1"): + return + + # We're clear to use incremental compilation! + return "1" + + +set_config("CARGO_INCREMENTAL", cargo_incremental) + +# Linker detection +# ============================================================== +# The policy is as follows: +# For Windows: +# - the linker is picked via the LINKER environment variable per windows.configure, +# but ought to be llvm-lld in any case. +# For macOS: +# - the linker is ld64, either from XCode on macOS, or from cctools-ports when +# cross-compiling. lld can be enabled manually, but as of writing, mach-o support +# for lld is incomplete. +# For other OSes: +# - on local developer builds: lld is used if present. Otherwise gold is used if present +# otherwise, BFD ld is used. +# - on release/official builds: whatever "ld" resolves to is used, except on Android x86/x86_64 +# where BFD ld is used. Usually, "ld" resolves to BFD ld, except with the Android NDK, +# where it resolves to gold. lld is not used by default on Linux and Android because +# it introduces layout changes that prevent elfhack from working. See e.g. +# https://bugzilla.mozilla.org/show_bug.cgi?id=1563654#c2. +@depends(target) +def is_linker_option_enabled(target): + if target.kernel not in ("WINNT", "SunOS"): + return True + + +option( + "--enable-gold", + env="MOZ_FORCE_GOLD", + help="Enable GNU Gold Linker when it is not already the default", + when=is_linker_option_enabled, +) + +imply_option("--enable-linker", "gold", when="--enable-gold") + + +@depends(target, developer_options) +def enable_linker_default(target, developer_options): + # x86-64 gold has bugs in how it lays out .note.* sections. See bug 1573820. + # x86-32 gold has a bug when assembly files are built. See bug 1651699. + # lld is faster, so prefer that for developer builds. + if target.os == "Android" and target.cpu in ("x86", "x86_64"): + return "lld" if developer_options else "bfd" + + +option( + "--enable-linker", + nargs=1, + help="Select the linker {bfd, gold, ld64, lld, lld-*}{|}", + default=enable_linker_default, + when=is_linker_option_enabled, +) + + +# No-op to enable depending on --enable-linker from default_elfhack in +# toolkit/moz.configure. +@depends("--enable-linker", when=is_linker_option_enabled) +def enable_linker(linker): + return linker + + +@depends( + "--enable-linker", + c_compiler, + developer_options, + "--enable-gold", + extra_toolchain_flags, + target, + when=is_linker_option_enabled, +) +@checking("for linker", lambda x: x.KIND) +@imports("os") +@imports("shutil") +def select_linker( + linker, c_compiler, developer_options, enable_gold, toolchain_flags, target +): + + if linker: + linker = linker[0] + else: + linker = None + + def is_valid_linker(linker): + if target.kernel == "Darwin": + valid_linkers = ("ld64", "lld") + else: + valid_linkers = ("bfd", "gold", "lld") + if linker in valid_linkers: + return True + if "lld" in valid_linkers and linker.startswith("lld-"): + return True + return False + + if linker and not is_valid_linker(linker): + # Check that we are trying to use a supported linker + die("Unsupported linker " + linker) + + # Check the kind of linker + version_check = ["-Wl,--version"] + cmd_base = c_compiler.wrapper + [c_compiler.compiler] + c_compiler.flags + + def try_linker(linker): + # Generate the compiler flag + if linker == "ld64": + linker_flag = ["-fuse-ld=ld"] + elif linker: + linker_flag = ["-fuse-ld=" + linker] + else: + linker_flag = [] + cmd = cmd_base + linker_flag + version_check + if toolchain_flags: + cmd += toolchain_flags + + # ld64 doesn't have anything to print out a version. It does print out + # "ld64: For information on command line options please use 'man ld'." + # but that would require doing two attempts, one with --version, that + # would fail, and another with --help. + # Instead, abuse its LD_PRINT_OPTIONS feature to detect a message + # specific to it on stderr when it fails to process --version. + env = dict(os.environ) + env["LD_PRINT_OPTIONS"] = "1" + # Some locales might not print out the strings we are looking for, so + # ensure consistent output. + env["LC_ALL"] = "C" + retcode, stdout, stderr = get_cmd_output(*cmd, env=env) + if retcode == 1 and "Logging ld64 options" in stderr: + kind = "ld64" + + elif retcode != 0: + return None + + elif "GNU ld" in stdout: + # We are using the normal linker + kind = "bfd" + + elif "GNU gold" in stdout: + kind = "gold" + + elif "LLD" in stdout: + kind = "lld" + + else: + kind = "unknown" + + return namespace( + KIND=kind, + LINKER_FLAG=linker_flag, + ) + + result = try_linker(linker) + if result is None: + if linker: + die("Could not use {} as linker".format(linker)) + die("Failed to find a linker") + + if ( + linker is None + and enable_gold.origin == "default" + and developer_options + and result.KIND in ("bfd", "gold") + ): + # try and use lld if available. + tried = try_linker("lld") + if result.KIND != "gold" and (tried is None or tried.KIND != "lld"): + tried = try_linker("gold") + if tried is None or tried.KIND != "gold": + tried = None + if tried: + result = tried + + # If an explicit linker was given, error out if what we found is different. + if linker and not linker.startswith(result.KIND): + die("Could not use {} as linker".format(linker)) + + return result + + +set_config("LINKER_KIND", select_linker.KIND) + + +@depends_if(select_linker, macos_sdk) +def linker_ldflags(linker, macos_sdk): + flags = list((linker and linker.LINKER_FLAG) or []) + if macos_sdk: + if linker and linker.KIND == "ld64": + flags.append("-Wl,-syslibroot,%s" % macos_sdk) + else: + flags.append("-Wl,--sysroot=%s" % macos_sdk) + + return flags + + +add_old_configure_assignment("LINKER_LDFLAGS", linker_ldflags) + + +# There's a wrinkle with MinGW: linker configuration is not enabled, so +# `select_linker` is never invoked. Hard-code around it. +@depends(select_linker, target, c_compiler) +def gcc_use_gnu_ld(select_linker, target, c_compiler): + if select_linker is not None: + return select_linker.KIND in ("bfd", "gold", "lld") + if target.kernel == "WINNT" and c_compiler.type == "clang": + return True + return None + + +# GCC_USE_GNU_LD=1 means the linker is command line compatible with GNU ld. +set_config("GCC_USE_GNU_LD", gcc_use_gnu_ld) +add_old_configure_assignment("GCC_USE_GNU_LD", gcc_use_gnu_ld) + +# Assembler detection +# ============================================================== + +option(env="AS", nargs=1, help="Path to the assembler") + + +@depends(target, c_compiler) +def as_info(target, c_compiler): + if c_compiler.type == "clang-cl": + ml = { + "x86": "ml.exe", + "x86_64": "ml64.exe", + "aarch64": "armasm64.exe", + }.get(target.cpu) + return namespace(type="masm", names=(ml,)) + # When building with anything but clang-cl, we just use the C compiler as the assembler. + return namespace(type="gcc", names=(c_compiler.compiler,)) + + +# One would expect the assembler to be specified merely as a program. But in +# cases where the assembler is passed down into js/, it can be specified in +# the same way as CC: a program + a list of argument flags. We might as well +# permit the same behavior in general, even though it seems somewhat unusual. +# So we have to do the same sort of dance as we did above with +# `provided_compiler`. +provided_assembler = provided_program("AS") +assembler = check_prog( + "_AS", + input=provided_assembler.program, + what="the assembler", + progs=as_info.names, + paths=vc_toolchain_search_path, +) + + +@depends(as_info, assembler, provided_assembler, c_compiler) +def as_with_flags(as_info, assembler, provided_assembler, c_compiler): + if provided_assembler: + return provided_assembler.wrapper + [assembler] + provided_assembler.flags + + if as_info.type == "masm": + return assembler + + assert as_info.type == "gcc" + + # Need to add compiler wrappers and flags as appropriate. + return c_compiler.wrapper + [assembler] + c_compiler.flags + + +add_old_configure_assignment("AS", as_with_flags) +add_old_configure_assignment("ac_cv_prog_AS", as_with_flags) + + +@depends(assembler, c_compiler, extra_toolchain_flags) +@imports("subprocess") +@imports(_from="os", _import="devnull") +def gnu_as(assembler, c_compiler, toolchain_flags): + # clang uses a compatible GNU assembler. + if c_compiler.type == "clang": + return True + + if c_compiler.type == "gcc": + cmd = [assembler] + c_compiler.flags + if toolchain_flags: + cmd += toolchain_flags + cmd += ["-Wa,--version", "-c", "-o", devnull, "-x", "assembler", "-"] + # We don't actually have to provide any input on stdin, `Popen.communicate` will + # close the stdin pipe. + # clang will error if it uses its integrated assembler for this target, + # so handle failures gracefully. + if "GNU" in check_cmd_output(*cmd, stdin=subprocess.PIPE, onerror=lambda: ""): + return True + + +set_config("GNU_AS", gnu_as) +add_old_configure_assignment("GNU_AS", gnu_as) + + +@depends(as_info, target) +def as_dash_c_flag(as_info, target): + # armasm64 doesn't understand -c. + if as_info.type == "masm" and target.cpu == "aarch64": + return "" + else: + return "-c" + + +set_config("AS_DASH_C_FLAG", as_dash_c_flag) + + +@depends(as_info, target) +def as_outoption(as_info, target): + # The uses of ASOUTOPTION depend on the spacing for -o/-Fo. + if as_info.type == "masm" and target.cpu != "aarch64": + return "-Fo" + + return "-o " + + +set_config("ASOUTOPTION", as_outoption) + +# clang plugin handling +# ============================================================== + +option( + "--enable-clang-plugin", + env="ENABLE_CLANG_PLUGIN", + help="Enable building with the Clang plugin (gecko specific static analyzers)", +) + +add_old_configure_assignment( + "ENABLE_CLANG_PLUGIN", depends_if("--enable-clang-plugin")(lambda _: True) +) + + +@depends(host_c_compiler, c_compiler, when="--enable-clang-plugin") +def llvm_config(host_c_compiler, c_compiler): + clang = None + for compiler in (host_c_compiler, c_compiler): + if compiler and compiler.type == "clang": + clang = compiler.compiler + break + elif compiler and compiler.type == "clang-cl": + clang = os.path.join(os.path.dirname(compiler.compiler), "clang") + break + + if not clang: + die("Cannot --enable-clang-plugin when not building with clang") + llvm_config = "llvm-config" + out = check_cmd_output(clang, "--print-prog-name=llvm-config", onerror=lambda: None) + if out: + llvm_config = out.rstrip() + return (llvm_config,) + + +llvm_config = check_prog( + "LLVM_CONFIG", + llvm_config, + what="llvm-config", + when="--enable-clang-plugin", + paths=clang_search_path, +) + +add_old_configure_assignment("LLVM_CONFIG", llvm_config) + + +option( + "--enable-clang-plugin-alpha", + env="ENABLE_CLANG_PLUGIN_ALPHA", + help="Enable static analysis with clang-plugin alpha checks.", +) + + +@depends("--enable-clang-plugin", "--enable-clang-plugin-alpha") +def check_clang_plugin_alpha(enable_clang_plugin, enable_clang_plugin_alpha): + if enable_clang_plugin_alpha: + if enable_clang_plugin: + return True + die("Cannot enable clang-plugin alpha checkers without --enable-clang-plugin.") + + +add_old_configure_assignment("ENABLE_CLANG_PLUGIN_ALPHA", check_clang_plugin_alpha) +set_define("MOZ_CLANG_PLUGIN_ALPHA", check_clang_plugin_alpha) + +option( + "--enable-mozsearch-plugin", + env="ENABLE_MOZSEARCH_PLUGIN", + help="Enable building with the mozsearch indexer plugin", +) + +add_old_configure_assignment( + "ENABLE_MOZSEARCH_PLUGIN", depends_if("--enable-mozsearch-plugin")(lambda _: True) +) + +# Libstdc++ compatibility hacks +# ============================================================== +# +option( + "--enable-stdcxx-compat", + env="MOZ_STDCXX_COMPAT", + help="Enable compatibility with older libstdc++", +) + + +@template +def libstdcxx_version(var, compiler): + @depends(compiler, when="--enable-stdcxx-compat") + @checking(var, lambda v: v and "GLIBCXX_%s" % v.dotted) + @imports(_from="mozbuild.configure.libstdcxx", _import="find_version") + @imports(_from="__builtin__", _import="Exception") + def version(compiler): + try: + result = find_version( + compiler.wrapper + [compiler.compiler] + compiler.flags + ) + except Exception: + die("Couldn't determine libstdc++ version") + if result: + return namespace( + dotted=result[0], + encoded=str(result[1]), + ) + + set_config(var, version.encoded) + return version + + +add_gcc_flag( + "-D_GLIBCXX_USE_CXX11_ABI=0", + cxx_compiler, + when=libstdcxx_version("MOZ_LIBSTDCXX_TARGET_VERSION", cxx_compiler), +) +add_gcc_flag( + "-D_GLIBCXX_USE_CXX11_ABI=0", + host_cxx_compiler, + when=libstdcxx_version("MOZ_LIBSTDCXX_HOST_VERSION", host_cxx_compiler), +) + + +# Support various fuzzing options +# ============================================================== +option("--enable-fuzzing", help="Enable fuzzing support") + + +@depends("--enable-fuzzing") +def enable_fuzzing(value): + if value: + return True + + +@depends( + try_compile( + body="__AFL_COMPILER;", check_msg="for AFL compiler", when="--enable-fuzzing" + ) +) +def enable_aflfuzzer(afl): + if afl: + return True + + +@depends(enable_fuzzing, enable_aflfuzzer, c_compiler, target) +def enable_libfuzzer(fuzzing, afl, c_compiler, target): + if fuzzing and not afl and c_compiler.type == "clang" and target.os != "Android": + return True + + +@depends(enable_fuzzing, enable_aflfuzzer, enable_libfuzzer) +def enable_fuzzing_interfaces(fuzzing, afl, libfuzzer): + if fuzzing and (afl or libfuzzer): + return True + + +set_config("FUZZING", enable_fuzzing) +set_define("FUZZING", enable_fuzzing) + +set_config("LIBFUZZER", enable_libfuzzer) +set_define("LIBFUZZER", enable_libfuzzer) +add_old_configure_assignment("LIBFUZZER", enable_libfuzzer) + +set_config("FUZZING_INTERFACES", enable_fuzzing_interfaces) +set_define("FUZZING_INTERFACES", enable_fuzzing_interfaces) +add_old_configure_assignment("FUZZING_INTERFACES", enable_fuzzing_interfaces) + + +@depends( + c_compiler.try_compile( + flags=["-fsanitize=fuzzer-no-link"], + when=enable_fuzzing, + check_msg="whether the C compiler supports -fsanitize=fuzzer-no-link", + ), + tsan, +) +def libfuzzer_flags(value, tsan): + if tsan: + # With ThreadSanitizer, we should not use any libFuzzer instrumentation because + # it is incompatible (e.g. there are races on global sanitizer coverage counters). + # Instead we use an empty set of flags here but still build the fuzzing targets. + # With this setup, we can still run files through these targets in TSan builds, + # e.g. those obtained from regular fuzzing. + # This code can be removed once libFuzzer has been made compatible with TSan. + # + # Also, this code needs to be kept in sync with certain gyp files, currently: + # - dom/media/webrtc/transport/third_party/nICEr/nicer.gyp + return namespace(no_link_flag_supported=False, use_flags=[]) + + if value: + no_link_flag_supported = True + # recommended for (and only supported by) clang >= 6 + use_flags = ["-fsanitize=fuzzer-no-link"] + else: + no_link_flag_supported = False + use_flags = ["-fsanitize-coverage=trace-pc-guard,trace-cmp"] + + return namespace( + no_link_flag_supported=no_link_flag_supported, + use_flags=use_flags, + ) + + +set_config("HAVE_LIBFUZZER_FLAG_FUZZER_NO_LINK", libfuzzer_flags.no_link_flag_supported) +set_config("LIBFUZZER_FLAGS", libfuzzer_flags.use_flags) +add_old_configure_assignment("LIBFUZZER_FLAGS", libfuzzer_flags.use_flags) + +# Shared library building +# ============================================================== + +# XXX: The use of makefile constructs in these variables is awful. +@depends(target, c_compiler) +def make_shared_library(target, compiler): + if target.os == "WINNT": + if compiler.type == "gcc": + return namespace( + mkshlib=["$(CXX)", "$(DSO_LDOPTS)", "-o", "$@"], + mkcshlib=["$(CC)", "$(DSO_LDOPTS)", "-o", "$@"], + ) + elif compiler.type == "clang": + return namespace( + mkshlib=[ + "$(CXX)", + "$(DSO_LDOPTS)", + "-Wl,-pdb,$(LINK_PDBFILE)", + "-o", + "$@", + ], + mkcshlib=[ + "$(CC)", + "$(DSO_LDOPTS)", + "-Wl,-pdb,$(LINK_PDBFILE)", + "-o", + "$@", + ], + ) + else: + linker = [ + "$(LINKER)", + "-NOLOGO", + "-DLL", + "-OUT:$@", + "-PDB:$(LINK_PDBFILE)", + "$(DSO_LDOPTS)", + ] + return namespace( + mkshlib=linker, + mkcshlib=linker, + ) + + cc = ["$(CC)", "$(COMPUTED_C_LDFLAGS)"] + cxx = ["$(CXX)", "$(COMPUTED_CXX_LDFLAGS)"] + flags = ["$(PGO_CFLAGS)", "$(DSO_PIC_CFLAGS)", "$(DSO_LDOPTS)"] + output = ["-o", "$@"] + + if target.kernel == "Darwin": + soname = [] + elif target.os == "NetBSD": + soname = ["-Wl,-soname,$(DSO_SONAME)"] + else: + assert compiler.type in ("gcc", "clang") + + soname = ["-Wl,-h,$(DSO_SONAME)"] + + return namespace( + mkshlib=cxx + flags + soname + output, + mkcshlib=cc + flags + soname + output, + ) + + +set_config("MKSHLIB", make_shared_library.mkshlib) +set_config("MKCSHLIB", make_shared_library.mkcshlib) + + +@depends(c_compiler, toolchain_prefix, when=target_is_windows) +def rc_names(c_compiler, toolchain_prefix): + if c_compiler.type in ("gcc", "clang"): + return tuple("%s%s" % (p, "windres") for p in ("",) + (toolchain_prefix or ())) + return ("llvm-rc",) + + +check_prog("RC", rc_names, paths=clang_search_path, when=target_is_windows) + + +@depends(toolchain_prefix, c_compiler) +def ar_config(toolchain_prefix, c_compiler): + if c_compiler.type == "clang-cl": + return namespace( + names=("llvm-lib",), + flags=("-llvmlibthin", "-out:$@"), + ) + + names = tuple("%s%s" % (p, "ar") for p in (toolchain_prefix or ()) + ("",)) + if c_compiler.type == "clang": + # Get the llvm-ar path as per the output from clang --print-prog-name=llvm-ar + # so that we directly get the one under the clang directory, rather than one + # that might be in /usr/bin and that might point to one from a different version + # of clang. + out = check_cmd_output( + c_compiler.compiler, "--print-prog-name=llvm-ar", onerror=lambda: None + ) + llvm_ar = out.rstrip() if out else "llvm-ar" + names = (llvm_ar,) + names + + return namespace( + names=names, + flags=("crs", "$@"), + ) + + +ar = check_prog("AR", ar_config.names, paths=clang_search_path) + +add_old_configure_assignment("AR", ar) + +set_config("AR_FLAGS", ar_config.flags) + + +@depends(toolchain_prefix, c_compiler) +def nm_names(toolchain_prefix, c_compiler): + names = tuple("%s%s" % (p, "nm") for p in (toolchain_prefix or ()) + ("",)) + if c_compiler.type == "clang": + # Get the llvm-nm path as per the output from clang --print-prog-name=llvm-nm + # so that we directly get the one under the clang directory, rather than one + # that might be in /usr/bin and that might point to one from a different version + # of clang. + out = check_cmd_output( + c_compiler.compiler, "--print-prog-name=llvm-nm", onerror=lambda: None + ) + llvm_nm = out.rstrip() if out else "llvm-nm" + names = (llvm_nm,) + names + + return names + + +check_prog("NM", nm_names, paths=clang_search_path, when=target_is_linux) + + +option("--enable-cpp-rtti", help="Enable C++ RTTI") + +add_old_configure_assignment("_MOZ_USE_RTTI", "1", when="--enable-cpp-rtti") diff --git a/build/moz.configure/update-programs.configure b/build/moz.configure/update-programs.configure new file mode 100644 index 0000000000..d5a75b9ac8 --- /dev/null +++ b/build/moz.configure/update-programs.configure @@ -0,0 +1,83 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Verify MAR signatures +# ============================================================== + +option("--disable-verify-mar", help="Disable verifying MAR signatures") + +set_define( + "MOZ_VERIFY_MAR_SIGNATURE", depends_if("--enable-verify-mar")(lambda _: True) +) +set_config( + "MOZ_VERIFY_MAR_SIGNATURE", depends_if("--enable-verify-mar")(lambda _: True) +) + +# Maintenance service (Windows only) +# ============================================================== + +option( + "--enable-maintenance-service", + when=target_is_windows, + default=target_is_windows, + help="{Enable|Disable} building of maintenance service", +) + +set_define( + "MOZ_MAINTENANCE_SERVICE", + depends_if("--enable-maintenance-service", when=target_is_windows)(lambda _: True), +) +set_config( + "MOZ_MAINTENANCE_SERVICE", + depends_if("--enable-maintenance-service", when=target_is_windows)(lambda _: True), +) + +# Update agent (currently Windows only) +# This is an independent task that runs on a schedule to +# check for, download, and install updates. +# ============================================================== + +option( + "--enable-update-agent", + when=target_is_windows, + default=False, + help="{Enable|Disable} building update agent", +) + +set_define( + "MOZ_UPDATE_AGENT", + depends_if("--enable-update-agent", when=target_is_windows)(lambda _: True), +) + +set_config( + "MOZ_UPDATE_AGENT", + depends_if("--enable-update-agent", when=target_is_windows)(lambda _: True), +) + +# Enable or disable the default browser agent, which monitors the user's default +# browser setting on Windows. +# ============================================================================== + + +@depends(target) +def default_browser_agent_default(target): + return target.os == "WINNT" + + +option( + "--enable-default-browser-agent", + default=default_browser_agent_default, + help="{Enable|Disable} building the default browser agent", +) + + +@depends("--enable-default-browser-agent", when=target_is_windows) +def default_agent_flag(enabled): + if enabled: + return True + + +set_config("MOZ_DEFAULT_BROWSER_AGENT", default_agent_flag) diff --git a/build/moz.configure/util.configure b/build/moz.configure/util.configure new file mode 100644 index 0000000000..fe82698c62 --- /dev/null +++ b/build/moz.configure/util.configure @@ -0,0 +1,494 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@imports("sys") +def die(*args): + "Print an error and terminate configure." + log.error(*args) + sys.exit(1) + + +@imports(_from="mozbuild.configure", _import="ConfigureError") +def configure_error(message): + """Raise a programming error and terminate configure. + Primarily for use in moz.configure templates to sanity check + their inputs from moz.configure usage.""" + raise ConfigureError(message) + + +# A wrapper to obtain a process' output and return code. +# Returns a tuple (retcode, stdout, stderr). +@imports("os") +@imports("six") +@imports("subprocess") +@imports(_from="mozbuild.shellutil", _import="quote") +@imports(_from="mozbuild.util", _import="system_encoding") +def get_cmd_output(*args, **kwargs): + log.debug("Executing: `%s`", quote(*args)) + proc = subprocess.Popen( + args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + # On Python 2 on Windows, close_fds prevents the process from inheriting + # stdout/stderr. Elsewhere, it simply prevents it from inheriting extra + # file descriptors, which is what we want. + close_fds=os.name != "nt", + **kwargs + ) + stdout, stderr = proc.communicate() + # Normally we would set the `encoding` and `errors` arguments in the + # constructor to subprocess.Popen, but those arguments were added in 3.6 + # and we need to support back to 3.5, so instead we need to do this + # nonsense. + stdout = six.ensure_text( + stdout, encoding=system_encoding, errors="replace" + ).replace("\r\n", "\n") + stderr = six.ensure_text( + stderr, encoding=system_encoding, errors="replace" + ).replace("\r\n", "\n") + return proc.wait(), stdout, stderr + + +# A wrapper to obtain a process' output that returns the output generated +# by running the given command if it exits normally, and streams that +# output to log.debug and calls die or the given error callback if it +# does not. +@imports(_from="mozbuild.configure.util", _import="LineIO") +@imports(_from="mozbuild.shellutil", _import="quote") +def check_cmd_output(*args, **kwargs): + onerror = kwargs.pop("onerror", None) + + with log.queue_debug(): + retcode, stdout, stderr = get_cmd_output(*args, **kwargs) + if retcode == 0: + return stdout + + log.debug("The command returned non-zero exit status %d.", retcode) + for out, desc in ((stdout, "output"), (stderr, "error output")): + if out: + log.debug("Its %s was:", desc) + with LineIO(lambda l: log.debug("| %s", l)) as o: + o.write(out) + if onerror: + return onerror() + die("Command `%s` failed with exit status %d." % (quote(*args), retcode)) + + +@imports("os") +def is_absolute_or_relative(path): + if os.altsep and os.altsep in path: + return True + return os.sep in path + + +@imports(_import="mozpack.path", _as="mozpath") +def normsep(path): + return mozpath.normsep(path) + + +@imports("ctypes") +@imports(_from="ctypes", _import="wintypes") +@imports(_from="mozbuild.configure.constants", _import="WindowsBinaryType") +def windows_binary_type(path): + """Obtain the type of a binary on Windows. + + Returns WindowsBinaryType constant. + """ + GetBinaryTypeW = ctypes.windll.kernel32.GetBinaryTypeW + GetBinaryTypeW.argtypes = [wintypes.LPWSTR, ctypes.POINTER(wintypes.DWORD)] + GetBinaryTypeW.restype = wintypes.BOOL + + bin_type = wintypes.DWORD() + res = GetBinaryTypeW(path, ctypes.byref(bin_type)) + if not res: + die("could not obtain binary type of %s" % path) + + if bin_type.value == 0: + return WindowsBinaryType("win32") + elif bin_type.value == 6: + return WindowsBinaryType("win64") + # If we see another binary type, something is likely horribly wrong. + else: + die("unsupported binary type on %s: %s" % (path, bin_type)) + + +@imports("ctypes") +@imports(_from="ctypes", _import="wintypes") +def get_GetShortPathNameW(): + GetShortPathNameW = ctypes.windll.kernel32.GetShortPathNameW + GetShortPathNameW.argtypes = [wintypes.LPCWSTR, wintypes.LPWSTR, wintypes.DWORD] + GetShortPathNameW.restype = wintypes.DWORD + return GetShortPathNameW + + +@template +@imports("ctypes") +@imports("platform") +@imports(_from="mozbuild.shellutil", _import="quote") +def normalize_path(): + # Until the build system can properly handle programs that need quoting, + # transform those paths into their short version on Windows (e.g. + # c:\PROGRA~1...). + if platform.system() == "Windows": + GetShortPathNameW = get_GetShortPathNameW() + + def normalize_path(path): + path = normsep(path) + if quote(path) == path: + return path + size = 0 + while True: + out = ctypes.create_unicode_buffer(size) + needed = GetShortPathNameW(path, out, size) + if size >= needed: + if " " in out.value: + die( + "GetShortPathName returned a long path name: `%s`. " + "Use `fsutil file setshortname' " + "to create a short name " + "for any components of this path " + "that have spaces.", + out.value, + ) + return normsep(out.value) + size = needed + + else: + + def normalize_path(path): + return normsep(path) + + return normalize_path + + +normalize_path = normalize_path() + + +# Locates the given program using which, or returns the given path if it +# exists. +# The `paths` parameter may be passed to search the given paths instead of +# $PATH. +@imports("sys") +@imports(_from="os", _import="pathsep") +@imports(_from="os", _import="environ") +@imports(_from="mozfile", _import="which") +def find_program(file, paths=None): + # The following snippet comes from `which` itself, with a slight + # modification to use lowercase extensions, because it's confusing rustup + # (on top of making results not really appealing to the eye). + + # Windows has the concept of a list of extensions (PATHEXT env var). + if sys.platform.startswith("win"): + exts = [e.lower() for e in environ.get("PATHEXT", "").split(pathsep)] + # If '.exe' is not in exts then obviously this is Win9x and + # or a bogus PATHEXT, then use a reasonable default. + if ".exe" not in exts: + exts = [".com", ".exe", ".bat"] + else: + exts = None + + if is_absolute_or_relative(file): + path = which(os.path.basename(file), path=os.path.dirname(file), exts=exts) + return normalize_path(path) if path else None + + if paths: + if not isinstance(paths, (list, tuple)): + die( + "Paths provided to find_program must be a list of strings, " "not %r", + paths, + ) + paths = pathsep.join(paths) + + path = which(file, path=paths, exts=exts) + return normalize_path(path) if path else None + + +@imports("os") +@imports(_from="mozbuild.configure.util", _import="LineIO") +@imports(_from="six", _import="ensure_binary") +@imports(_from="tempfile", _import="mkstemp") +def try_invoke_compiler(compiler, language, source, flags=None, onerror=None): + flags = flags or [] + + if not isinstance(flags, (list, tuple)): + die("Flags provided to try_compile must be a list of strings, " "not %r", flags) + + suffix = { + "C": ".c", + "C++": ".cpp", + }[language] + + fd, path = mkstemp(prefix="conftest.", suffix=suffix, text=True) + try: + source = source.encode("ascii", "replace") + + log.debug("Creating `%s` with content:", path) + with LineIO(lambda l: log.debug("| %s", l)) as out: + out.write(source) + + os.write(fd, ensure_binary(source)) + os.close(fd) + cmd = compiler + [path] + list(flags) + kwargs = {"onerror": onerror} + return check_cmd_output(*cmd, **kwargs) + finally: + os.remove(path) + + +def unique_list(l): + result = [] + for i in l: + if l not in result: + result.append(i) + return result + + +# Get values out of the Windows registry. This function can only be called on +# Windows. +# The `pattern` argument is a string starting with HKEY_ and giving the full +# "path" of the registry key to get the value for, with backslash separators. +# The string can contains wildcards ('*'). +# The result of this functions is an enumerator yielding tuples for each +# match. Each of these tuples contains the key name matching wildcards +# followed by the value. +# +# The `get_32_and_64_bit` argument is a boolean, if True then it will return the +# values from the 32-bit and 64-bit registry views. This defaults to False, +# which will return the view depending on the bitness of python. +# +# Examples: +# get_registry_values(r'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\' +# r'Windows Kits\Installed Roots\KitsRoot*') +# yields e.g.: +# ('KitsRoot81', r'C:\Program Files (x86)\Windows Kits\8.1\') +# ('KitsRoot10', r'C:\Program Files (x86)\Windows Kits\10\') +# +# get_registry_values(r'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\' +# r'Windows Kits\Installed Roots\KitsRoot8.1') +# yields e.g.: +# (r'C:\Program Files (x86)\Windows Kits\8.1\',) +# +# get_registry_values(r'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\' +# r'Windows Kits\Installed Roots\KitsRoot8.1', +# get_32_and_64_bit=True) +# yields e.g.: +# (r'C:\Program Files (x86)\Windows Kits\8.1\',) +# (r'C:\Program Files\Windows Kits\8.1\',) +# +# get_registry_values(r'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\' +# r'Windows Kits\*\KitsRoot*') +# yields e.g.: +# ('Installed Roots', 'KitsRoot81', +# r'C:\Program Files (x86)\Windows Kits\8.1\') +# ('Installed Roots', 'KitsRoot10', +# r'C:\Program Files (x86)\Windows Kits\10\') +# +# get_registry_values(r'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\' +# r'VisualStudio\VC\*\x86\*\Compiler') +# yields e.g.: +# ('19.0', 'arm', r'C:\...\amd64_arm\cl.exe') +# ('19.0', 'x64', r'C:\...\amd64\cl.exe') +# ('19.0', 'x86', r'C:\...\amd64_x86\cl.exe') +@imports(_import="winreg") +@imports(_from="__builtin__", _import="WindowsError") +@imports(_from="fnmatch", _import="fnmatch") +def get_registry_values(pattern, get_32_and_64_bit=False): + def enum_helper(func, key): + i = 0 + while True: + try: + yield func(key, i) + except WindowsError: + break + i += 1 + + def get_keys(key, pattern, access_mask): + try: + s = winreg.OpenKey(key, "\\".join(pattern[:-1]), 0, access_mask) + except WindowsError: + return + for k in enum_helper(winreg.EnumKey, s): + if fnmatch(k, pattern[-1]): + try: + yield k, winreg.OpenKey(s, k, 0, access_mask) + except WindowsError: + pass + + def get_values(key, pattern, access_mask): + try: + s = winreg.OpenKey(key, "\\".join(pattern[:-1]), 0, access_mask) + except WindowsError: + return + for k, v, t in enum_helper(winreg.EnumValue, s): + if fnmatch(k, pattern[-1]): + yield k, v + + def split_pattern(pattern): + subpattern = [] + for p in pattern: + subpattern.append(p) + if "*" in p: + yield subpattern + subpattern = [] + if subpattern: + yield subpattern + + def get_all_values(keys, pattern, access_mask): + for i, p in enumerate(pattern): + next_keys = [] + for base_key in keys: + matches = base_key[:-1] + base_key = base_key[-1] + if i == len(pattern) - 1: + want_name = "*" in p[-1] + for name, value in get_values(base_key, p, access_mask): + yield matches + ((name, value) if want_name else (value,)) + else: + for name, k in get_keys(base_key, p, access_mask): + next_keys.append(matches + (name, k)) + keys = next_keys + + pattern = pattern.split("\\") + assert pattern[0].startswith("HKEY_") + keys = [(getattr(winreg, pattern[0]),)] + pattern = list(split_pattern(pattern[1:])) + if get_32_and_64_bit: + for match in get_all_values( + keys, pattern, winreg.KEY_READ | winreg.KEY_WOW64_32KEY + ): + yield match + for match in get_all_values( + keys, pattern, winreg.KEY_READ | winreg.KEY_WOW64_64KEY + ): + yield match + else: + for match in get_all_values(keys, pattern, winreg.KEY_READ): + yield match + + +@imports(_from="mozbuild.configure.util", _import="Version", _as="_Version") +def Version(v): + "A version number that can be compared usefully." + return _Version(v) + + +# Denotes a deprecated option. Combines option() and @depends: +# @deprecated_option('--option') +# def option(value): +# ... +# @deprecated_option() takes the same arguments as option(), except `help`. +# The function may handle the option like a typical @depends function would, +# but it is recommended it emits a deprecation error message suggesting an +# alternative option to use if there is one. + + +@template +def deprecated_option(*args, **kwargs): + assert "help" not in kwargs + kwargs["help"] = "Deprecated" + opt = option(*args, **kwargs) + + def decorator(func): + @depends(opt.option) + def deprecated(value): + if value.origin != "default": + return func(value) + + return deprecated + + return decorator + + +# from mozbuild.util import ReadOnlyNamespace as namespace +@imports(_from="mozbuild.util", _import="ReadOnlyNamespace") +def namespace(**kwargs): + return ReadOnlyNamespace(**kwargs) + + +# Turn an object into an object that can be used as an argument to @depends. +# The given object can be a literal value, a function that takes no argument, +# or, for convenience, a @depends function. +@template +@imports(_from="inspect", _import="isfunction") +@imports(_from="mozbuild.configure", _import="SandboxDependsFunction") +def dependable(obj): + if isinstance(obj, SandboxDependsFunction): + return obj + if isfunction(obj): + return depends(when=True)(obj) + # Depend on --help to make lint happy if the dependable is used as an input + # to an option(). + return depends("--help", when=True)(lambda _: obj) + + +always = dependable(True) +never = dependable(False) + + +# Create a decorator that will only execute the body of a function +# if the passed function returns True when passed all positional +# arguments. +@template +def depends_tmpl(eval_args_fn, *args, **kwargs): + if kwargs: + assert len(kwargs) == 1 + when = kwargs["when"] + else: + when = None + + def decorator(func): + @depends(*args, when=when) + def wrapper(*args): + if eval_args_fn(args): + return func(*args) + + return wrapper + + return decorator + + +# Like @depends, but the decorated function is only called if one of the +# arguments it would be called with has a positive value (bool(value) is True) +@template +def depends_if(*args, **kwargs): + return depends_tmpl(any, *args, **kwargs) + + +# Like @depends, but the decorated function is only called if all of the +# arguments it would be called with have a positive value. +@template +def depends_all(*args, **kwargs): + return depends_tmpl(all, *args, **kwargs) + + +# Hacks related to old-configure +# ============================== + + +@dependable +def old_configure_assignments(): + return [] + + +@template +def add_old_configure_assignment(var, value, when=None): + var = dependable(var) + value = dependable(value) + + @depends(old_configure_assignments, var, value, when=when) + @imports(_from="mozbuild.shellutil", _import="quote") + def add_assignment(assignments, var, value): + if var is None or value is None: + return + if value is True: + assignments.append((var, "1")) + elif value is False: + assignments.append((var, "")) + else: + if isinstance(value, (list, tuple)): + value = quote(*value) + assignments.append((var, str(value))) diff --git a/build/moz.configure/warnings.configure b/build/moz.configure/warnings.configure new file mode 100755 index 0000000000..d0db70f6d3 --- /dev/null +++ b/build/moz.configure/warnings.configure @@ -0,0 +1,253 @@ +# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option( + "--enable-warnings-as-errors", + env="MOZ_ENABLE_WARNINGS_AS_ERRORS", + default=depends("MOZ_AUTOMATION")(lambda x: bool(x)), + help="{Enable|Disable} treating warnings as errors", +) + + +@depends("--enable-warnings-as-errors") +def rust_warning_flags(warnings_as_errors): + flags = [] + + # Note that cargo passes --cap-lints warn to rustc for third-party code, so + # we don't need a very complicated setup. + if warnings_as_errors: + flags.append("-Dwarnings") + else: + flags.extend(("--cap-lints", "warn")) + + return flags + + +c_warning_flag = dependable("-Werror") + + +@depends("--enable-warnings-as-errors", c_warning_flag) +def warnings_as_errors(warnings_as_errors, c_warning_flag): + if not warnings_as_errors: + return "" + + return c_warning_flag + + +set_config("WARNINGS_AS_ERRORS", warnings_as_errors) +# We have a peculiar setup in old-configure.in where some compilation tests +# depend on enabling warnings-as-errors even if it's disabled for Firefox +# compilation. We therefore need this assignment. +add_old_configure_assignment("WARNINGS_AS_ERRORS", c_warning_flag) + + +# GCC/Clang warnings: +# https://gcc.gnu.org/onlinedocs/gcc/Warning-Options.html +# https://clang.llvm.org/docs/DiagnosticsReference.html + +# lots of useful warnings +add_gcc_warning("-Wall") + +# catch implicit truncation of enum values assigned to smaller bit fields +check_and_add_gcc_warning("-Wbitfield-enum-conversion") + +# catches bugs, e.g. "if (c); foo();", few false positives +add_gcc_warning("-Wempty-body") + +# catches return types with qualifiers like const +add_gcc_warning("-Wignored-qualifiers") + +# function declaration hides virtual function from base class. +# Don't enable for GCC, since it's more strict than clang, +# and the additional cases it covers are not valuable. +add_gcc_warning( + "-Woverloaded-virtual", + cxx_compiler, + when=depends(cxx_compiler)(lambda c: c.type != "gcc"), +) + +# catches pointer arithmetic using NULL or sizeof(void) +add_gcc_warning("-Wpointer-arith") + +# catch modifying constructor parameter that shadows member variable +check_and_add_gcc_warning("-Wshadow-field-in-constructor-modified") + +# catches comparing signed/unsigned ints +add_gcc_warning("-Wsign-compare") + +# catches overflow bugs, few false positives +add_gcc_warning("-Wtype-limits") + +# catches some dead code +add_gcc_warning("-Wunreachable-code") +check_and_add_gcc_warning("-Wunreachable-code-return") + +# catches treating string literals as non-const +add_gcc_warning("-Wwrite-strings", cxx_compiler) + +# turned on by -Wall, but we use offsetof on non-POD types frequently +add_gcc_warning("-Wno-invalid-offsetof", cxx_compiler) + +# catches objects passed by value to variadic functions. +check_and_add_gcc_warning("-Wclass-varargs") + +# catches empty if/switch/for initialization statements that have no effect +check_and_add_gcc_warning("-Wempty-init-stmt", cxx_compiler) + +# catches some implicit conversion of floats to ints +check_and_add_gcc_warning("-Wfloat-overflow-conversion") +check_and_add_gcc_warning("-Wfloat-zero-conversion") + +# catches issues around loops +check_and_add_gcc_warning("-Wloop-analysis") +# But, disable range-loop-analysis because it can raise unhelpful false +# positives. +check_and_add_gcc_warning("-Wno-range-loop-analysis") + +# catches C++ version forward-compat issues +check_and_add_gcc_warning("-Wc++2a-compat", cxx_compiler) + +# catches possible misuse of the comma operator +check_and_add_gcc_warning("-Wcomma", cxx_compiler) + +# catches duplicated conditions in if-else-if chains +check_and_add_gcc_warning("-Wduplicated-cond") + +# catches unintentional switch case fallthroughs +check_and_add_gcc_warning("-Wimplicit-fallthrough", cxx_compiler) + +# catches unused variable/function declarations +check_and_add_gcc_warning("-Wunused-function", cxx_compiler) +check_and_add_gcc_warning("-Wunused-variable", cxx_compiler) + +# catches expressions used as a null pointer constant +# XXX: at the time of writing, the version of clang used on the OS X test +# machines has a bug that causes it to reject some valid files if both +# -Wnon-literal-null-conversion and -Wsometimes-uninitialized are +# specified. We work around this by instead using +# -Werror=non-literal-null-conversion, but we only do that when +# --enable-warnings-as-errors is specified so that no unexpected fatal +# warnings are produced. +check_and_add_gcc_warning( + "-Werror=non-literal-null-conversion", when="--enable-warnings-as-errors" +) + +# catches string literals used in boolean expressions +check_and_add_gcc_warning("-Wstring-conversion") + +# catches comparisons that are always true or false +check_and_add_gcc_warning("-Wtautological-overlap-compare") +check_and_add_gcc_warning("-Wtautological-unsigned-enum-zero-compare") +check_and_add_gcc_warning("-Wtautological-unsigned-zero-compare") +# This can be triggered by certain patterns used deliberately in portable code +check_and_add_gcc_warning("-Wno-error=tautological-type-limit-compare") + +# we inline 'new' and 'delete' in mozalloc +check_and_add_gcc_warning("-Wno-inline-new-delete", cxx_compiler) + +# Prevent the following GCC warnings from being treated as errors: +# too many false positives +check_and_add_gcc_warning("-Wno-error=maybe-uninitialized") + +# we don't want our builds held hostage when a platform-specific API +# becomes deprecated. +check_and_add_gcc_warning("-Wno-error=deprecated-declarations") + +# false positives depending on optimization +check_and_add_gcc_warning("-Wno-error=array-bounds") + +# can't get rid of those PGO warnings +check_and_add_gcc_warning("-Wno-error=coverage-mismatch") + +# -Wbackend-plugin warnings from Android PGO profile-use builds: +# error: /builds/worker/workspace/build/src/mozglue/misc/AutoProfilerLabel.cpp: +# Function control flow change detected (hash mismatch) +# _ZN7mozilla17AutoProfilerLabelD2Ev [-Werror,-Wbackend-plugin] +check_and_add_gcc_warning("-Wno-error=backend-plugin") + +# false positives depending on optimizations +check_and_add_gcc_warning("-Wno-error=free-nonheap-object") + +# Would be a pain to fix all occurrences, for very little gain +check_and_add_gcc_warning("-Wno-multistatement-macros") + +# Disable the -Werror for return-std-move because of a false positive +# on nsTAutoStringN: https://bugs.llvm.org/show_bug.cgi?id=37249 +check_and_add_gcc_warning("-Wno-error=return-std-move") + +# Disable the -Werror for -Wclass-memaccess as we have a long +# tail of issues to fix +check_and_add_gcc_warning("-Wno-error=class-memaccess") + +# -Watomic-alignment is a new warning in clang 7 that seems way too broad. +# https://bugs.llvm.org/show_bug.cgi?id=38593 +check_and_add_gcc_warning("-Wno-error=atomic-alignment") + +# New warning with gcc 9. Not useful +# https://bugzilla.mozilla.org/show_bug.cgi?id=1515356 +check_and_add_gcc_warning("-Wno-error=deprecated-copy") + +# catches format/argument mismatches with printf +c_format_warning, cxx_format_warning = check_and_add_gcc_warning( + "-Wformat", when=depends(target)(lambda t: t.kernel != "WINNT") +) + +# Add compile-time warnings for unprotected functions and format functions +# that represent possible security problems. Enable this only when -Wformat +# is enabled, otherwise it is an error +check_and_add_gcc_warning( + "-Wformat-security", when=c_format_warning & cxx_format_warning +) +check_and_add_gcc_warning( + "-Wformat-overflow=2", when=c_format_warning & cxx_format_warning +) + +# Other MinGW specific things +with only_when(depends(target)(lambda t: t.kernel == "WINNT")): + # When compiling for Windows with gcc, we encounter lots of "#pragma warning"'s + # which is an MSVC-only pragma that GCC does not recognize. + check_and_add_gcc_warning("-Wno-unknown-pragmas") + + # When compiling for Windows with gcc, gcc throws false positives and true + # positives where the callsite is ifdef-ed out + check_and_add_gcc_warning("-Wno-unused-function") + + # When compiling for Windows with gcc, gcc cannot produce this warning + # correctly: it mistakes DWORD_PTR and ULONG_PTR as types you cannot + # give NULL to. (You can in fact do that.) + check_and_add_gcc_warning("-Wno-conversion-null") + + # Throughout the codebase we regularly have switch statements off of enums + # without covering every value in the enum. We don't care about these warnings. + check_and_add_gcc_warning("-Wno-switch") + + # Another code pattern we have is using start and end constants in enums of + # different types. We do this for safety, but then when comparing it throws + # an error, which we would like to ignore. This seems to only affect the MinGW + # build, but we're not sure why. + check_and_add_gcc_warning("-Wno-enum-compare") + +# We hit this all over the place with the gtest INSTANTIATE_TEST_CASE_P macro +check_and_add_gcc_warning("-Wno-gnu-zero-variadic-macro-arguments") + +# Make it an error to be missing function declarations for C code. +check_and_add_gcc_warning("-Werror=implicit-function-declaration", c_compiler) + +# New in clang 11. We can't really do anything about this warning. +check_and_add_gcc_warning("-Wno-psabi") + +# Disable broken missing-braces warning on old clang versions +check_and_add_gcc_warning( + "-Wno-missing-braces", + when=depends(c_compiler)(lambda c: c.type == "clang" and c.version < "6.0"), +) + + +# Please keep these last in this file +add_old_configure_assignment("_WARNINGS_CFLAGS", warnings_flags.cflags) +add_old_configure_assignment("_WARNINGS_CXXFLAGS", warnings_flags.cxxflags) +add_old_configure_assignment("_WARNINGS_HOST_CFLAGS", warnings_flags.host_cflags) +add_old_configure_assignment("_WARNINGS_HOST_CXXFLAGS", warnings_flags.host_cxxflags) diff --git a/build/moz.configure/windows.configure b/build/moz.configure/windows.configure new file mode 100644 index 0000000000..568046c2e5 --- /dev/null +++ b/build/moz.configure/windows.configure @@ -0,0 +1,535 @@ +# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option( + "--with-windows-version", + nargs=1, + default="603", + help="Windows SDK version to target. Win 8.1 (603) is currently" + "the minimum supported version.", +) + + +@depends("--with-windows-version") +@imports(_from="__builtin__", _import="ValueError") +def valid_windows_version(value): + if not value: + die("Cannot build with --without-windows-version") + try: + version = int(value[0], 16) + if version in (0x603,): + return version + except ValueError: + pass + + die("Invalid value for --with-windows-version (%s)", value[0]) + + +option(env="WINDOWSSDKDIR", nargs=1, help="Directory containing the Windows SDK") + + +@depends("WINDOWSSDKDIR", host, c_compiler) +def windows_sdk_dir(value, host, compiler): + if value: + return value + # Ideally, we'd actually check for host/target ABI being MSVC, but + # that's waiting for bug 1617793. + if host.kernel != "WINNT" or compiler.type != "clang-cl": + return () + + return set( + x[1] + for x in get_registry_values( + r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows Kits\Installed Roots" + r"\KitsRoot*", + get_32_and_64_bit=True, + ) + ) + + +# The Windows SDK 8.1 and 10 have different layouts. The former has +# $SDK/include/$subdir, while the latter has $SDK/include/$version/$subdir. +# The vcvars* scripts don't actually care about the version, they just take +# the last alphanumerically. +# The $SDK/lib directories always have version subdirectories, but while the +# versions match the one in $SDK/include for SDK 10, it's "winv6.3" for SDK +# 8.1. + + +@imports("os") +@imports("re") +@imports(_from="__builtin__", _import="sorted") +@imports(_from="__builtin__", _import="Exception") +def get_sdk_dirs(sdk, subdir): + def get_dirs_containing(sdk, stem, subdir): + base = os.path.join(sdk, stem) + try: + subdirs = [ + d for d in os.listdir(base) if os.path.isdir(os.path.join(base, d)) + ] + except Exception: + subdirs = [] + if not subdirs: + return () + if subdir in subdirs: + return (base,) + # At this point, either we have an incomplete or invalid SDK directory, + # or we exclusively have version numbers in subdirs. + return tuple( + os.path.join(base, s) + for s in subdirs + if os.path.isdir(os.path.join(base, s, subdir)) + ) + + def categorize(dirs): + return {os.path.basename(d): d for d in dirs} + + include_dirs = categorize(get_dirs_containing(sdk, "include", subdir)) + lib_dirs = categorize(get_dirs_containing(sdk, "lib", subdir)) + + if "include" in include_dirs: + include_dirs["winv6.3"] = include_dirs["include"] + del include_dirs["include"] + + valid_versions = sorted(set(include_dirs) & set(lib_dirs), reverse=True) + if valid_versions: + return namespace( + path=sdk, + lib=lib_dirs[valid_versions[0]], + include=include_dirs[valid_versions[0]], + ) + + +@imports(_from="mozbuild.shellutil", _import="quote") +def valid_windows_sdk_dir_result(value): + if value: + return "0x%04x in %s" % (value.version, quote(value.path)) + + +@depends(c_compiler, windows_sdk_dir, valid_windows_version, "WINDOWSSDKDIR") +@checking("for Windows SDK", valid_windows_sdk_dir_result) +@imports(_from="__builtin__", _import="sorted") +@imports(_from="__builtin__", _import="Exception") +@imports(_from="textwrap", _import="dedent") +def valid_windows_sdk_dir( + compiler, windows_sdk_dir, target_version, windows_sdk_dir_env +): + # Ideally, we'd actually check for host/target ABI being MSVC, but + # that's waiting for bug 1617793. + if compiler.type != "clang-cl": + return None + if windows_sdk_dir_env: + windows_sdk_dir_env = windows_sdk_dir_env[0] + sdks = {} + for d in windows_sdk_dir: + sdk = get_sdk_dirs(d, "um") + if sdk: + check = dedent( + """\ + #include + WINVER_MAXVER + """ + ) + um_dir = os.path.join(sdk.include, "um") + shared_dir = os.path.join(sdk.include, "shared") + result = try_preprocess( + compiler.wrapper + + [compiler.compiler] + + compiler.flags + + ["-X", "-I", um_dir, "-I", shared_dir], + "C", + check, + onerror=lambda: "", + ) + if result: + maxver = result.splitlines()[-1] + try: + maxver = int(maxver, 0) + except Exception: + pass + else: + sdks[d] = maxver, sdk + continue + if d == windows_sdk_dir_env: + raise FatalCheckError( + "Error while checking the version of the SDK in " + "WINDOWSSDKDIR (%s). Please verify it contains a valid and " + "complete SDK installation." % windows_sdk_dir_env + ) + + valid_sdks = sorted(sdks, key=lambda x: sdks[x][0], reverse=True) + if valid_sdks: + biggest_version, sdk = sdks[valid_sdks[0]] + if not valid_sdks or biggest_version < target_version: + if windows_sdk_dir_env: + raise FatalCheckError( + "You are targeting Windows version 0x%04x, but your SDK only " + "supports up to version 0x%04x. Install and use an updated SDK, " + "or target a lower version using --with-windows-version. " + "Alternatively, try running the Windows SDK Configuration Tool " + "and selecting a newer SDK. See " + "https://developer.mozilla.org/En/Windows_SDK_versions for " + "details on fixing this." % (target_version, biggest_version) + ) + + raise FatalCheckError( + "Cannot find a Windows SDK for version >= 0x%04x." % target_version + ) + + return namespace( + path=sdk.path, + include=sdk.include, + lib=sdk.lib, + version=biggest_version, + ) + + +@imports(_from="mozbuild.shellutil", _import="quote") +def valid_ucrt_sdk_dir_result(value): + if value: + return "%s in %s" % (value.version, quote(value.path)) + + +@depends(windows_sdk_dir, "WINDOWSSDKDIR", c_compiler) +@checking("for Universal CRT SDK", valid_ucrt_sdk_dir_result) +@imports("os") +@imports(_from="__builtin__", _import="sorted") +@imports(_import="mozpack.path", _as="mozpath") +def valid_ucrt_sdk_dir(windows_sdk_dir, windows_sdk_dir_env, compiler): + # Ideally, we'd actually check for host/target ABI being MSVC, but + # that's waiting for bug 1617793. + if compiler.type != "clang-cl": + return None + if windows_sdk_dir_env: + windows_sdk_dir_env = windows_sdk_dir_env[0] + sdks = {} + for d in windows_sdk_dir: + sdk = get_sdk_dirs(d, "ucrt") + if sdk: + version = os.path.basename(sdk.include) + # We're supposed to always find a version in the directory, because + # the 8.1 SDK, which doesn't have a version in the directory, doesn't + # contain the Universal CRT SDK. When the main SDK is 8.1, there + # is, however, supposed to be a reduced install of the SDK 10 + # with the UCRT. + if version != "include": + sdks[d] = Version(version), sdk + continue + if d == windows_sdk_dir_env: + # When WINDOWSSDKDIR is set in the environment and we can't find the + # Universal CRT SDK, chances are this is a start-shell-msvc*.bat + # setup, where INCLUDE and LIB already contain the UCRT paths. + ucrt_includes = [ + p + for p in os.environ.get("INCLUDE", "").split(";") + if os.path.basename(p).lower() == "ucrt" + ] + ucrt_libs = [ + p + for p in os.environ.get("LIB", "").split(";") + if os.path.basename(os.path.dirname(p)).lower() == "ucrt" + ] + if ucrt_includes and ucrt_libs: + # Pick the first of each, since they are the ones that the + # compiler would look first. Assume they contain the SDK files. + include = os.path.dirname(ucrt_includes[0]) + lib = os.path.dirname(os.path.dirname(ucrt_libs[0])) + path = os.path.dirname(os.path.dirname(include)) + version = os.path.basename(include) + if version != "include" and mozpath.basedir(lib, [path]): + sdks[d] = ( + Version(version), + namespace( + path=path, + include=include, + lib=lib, + ), + ) + continue + raise FatalCheckError( + "The SDK in WINDOWSSDKDIR (%s) does not contain the Universal " + "CRT." % windows_sdk_dir_env + ) + + valid_sdks = sorted(sdks, key=lambda x: sdks[x][0], reverse=True) + if not valid_sdks: + raise FatalCheckError( + "Cannot find the Universal CRT SDK. " "Please install it." + ) + + version, sdk = sdks[valid_sdks[0]] + minimum_ucrt_version = Version("10.0.17134.0") + if version < minimum_ucrt_version: + raise FatalCheckError( + "Latest Universal CRT SDK version found %s" + " and minimum required is %s. This or a later" + " version can be installed using the Visual" + " Studio installer." % (version, minimum_ucrt_version) + ) + + return namespace( + path=sdk.path, + include=sdk.include, + lib=sdk.lib, + version=version, + ) + + +@depends(c_compiler, host_c_compiler, vc_toolchain_search_path) +@imports("os") +def vc_path(c_compiler, host_c_compiler, vc_toolchain_search_path): + if c_compiler.type != "clang-cl" and host_c_compiler.type != "clang-cl": + return + + # In clang-cl builds, we need the headers and libraries from an MSVC installation. + vc_program = find_program("cl.exe", paths=vc_toolchain_search_path) + if not vc_program: + die("Cannot find a Visual C++ install for e.g. ATL headers.") + + result = os.path.dirname(vc_program) + while True: + next, p = os.path.split(result) + if next == result: + die( + "Cannot determine the Visual C++ directory the compiler (%s) " + "is in" % vc_program + ) + result = next + if p.lower() == "bin": + break + return os.path.normpath(result) + + +option(env="DIA_SDK_PATH", nargs=1, help="Path to the Debug Interface Access SDK") + + +@depends(vc_path, "DIA_SDK_PATH") +@checking("for the Debug Interface Access SDK", lambda x: x or "not found") +@imports("os") +def dia_sdk_dir(vc_path, dia_sdk_path): + if dia_sdk_path: + path = os.path.normpath(dia_sdk_path[0]) + + elif vc_path: + # This would be easier if we had the installationPath that + # get_vc_paths works with, since 'DIA SDK' is relative to that. + path = os.path.normpath( + os.path.join(vc_path, "..", "..", "..", "..", "DIA SDK") + ) + else: + return + + if os.path.exists(os.path.join(path, "include", "dia2.h")): + return path + + +@depends(vc_path, valid_windows_sdk_dir, valid_ucrt_sdk_dir, dia_sdk_dir) +@imports("os") +def include_path(vc_path, windows_sdk_dir, ucrt_sdk_dir, dia_sdk_dir): + if not vc_path: + return + atlmfc_dir = os.path.join(vc_path, "atlmfc", "include") + if not os.path.isdir(atlmfc_dir): + die( + "Cannot find the ATL/MFC headers in the Visual C++ directory (%s). " + "Please install them." % vc_path + ) + + winrt_dir = os.path.join(windows_sdk_dir.include, "winrt") + if not os.path.isdir(winrt_dir): + die( + "Cannot find the WinRT headers in the Windows SDK directory (%s). " + "Please install them." % windows_sdk_dir.path + ) + + includes = [] + include_env = os.environ.get("INCLUDE") + if include_env: + includes.append(include_env) + includes.extend( + ( + os.path.join(vc_path, "include"), + atlmfc_dir, + os.path.join(windows_sdk_dir.include, "shared"), + os.path.join(windows_sdk_dir.include, "um"), + winrt_dir, + os.path.join(ucrt_sdk_dir.include, "ucrt"), + ) + ) + if dia_sdk_dir: + includes.append(os.path.join(dia_sdk_dir, "include")) + # Set in the environment for old-configure + includes = ";".join(includes) + os.environ["INCLUDE"] = includes + return includes + + +set_config("INCLUDE", include_path) + + +@template +def dia_sdk_subdir(host_or_target, subdir): + @depends(dia_sdk_dir, host_or_target, dependable(subdir)) + def dia_sdk_subdir(dia_sdk_dir, target, subdir): + if not dia_sdk_dir: + return + # For some reason the DIA SDK still uses the old-style targets + # even in a newer MSVC. + old_target = { + "x86": "", + "x86_64": "amd64", + "arm": "arm", + "aarch64": "arm64", + }.get(target.cpu) + if old_target is None: + return + # As old_target can be '', and os.path.join will happily use the empty + # string, leading to a string ending with a backslash, that Make will + # interpret as a "string continues on next line" indicator, use variable + # args. + old_target = (old_target,) if old_target else () + return os.path.join(dia_sdk_dir, subdir, *old_target) + + return dia_sdk_subdir + + +set_config("WIN_DIA_SDK_BIN_DIR", dia_sdk_subdir(host, "bin")) + + +@template +def lib_path_for(host_or_target): + @depends( + host_or_target, + dependable(host_or_target is host), + vc_path, + valid_windows_sdk_dir, + valid_ucrt_sdk_dir, + dia_sdk_subdir(host_or_target, "lib"), + ) + @imports("os") + def lib_path( + target, is_host, vc_path, windows_sdk_dir, ucrt_sdk_dir, dia_sdk_lib_dir + ): + if not vc_path: + return + sdk_target = { + "x86": "x86", + "x86_64": "x64", + "arm": "arm", + "aarch64": "arm64", + }.get(target.cpu) + + # MSVC2017 switched to use the same target naming as the sdk. + atlmfc_dir = os.path.join(vc_path, "atlmfc", "lib", sdk_target) + if not os.path.isdir(atlmfc_dir): + die( + "Cannot find the ATL/MFC libraries in the Visual C++ directory " + "(%s). Please install them." % vc_path + ) + + libs = [] + lib_env = os.environ.get("LIB") + if lib_env and not is_host: + libs.extend(lib_env.split(";")) + libs.extend( + ( + os.path.join(vc_path, "lib", sdk_target), + atlmfc_dir, + os.path.join(windows_sdk_dir.lib, "um", sdk_target), + os.path.join(ucrt_sdk_dir.lib, "ucrt", sdk_target), + ) + ) + if dia_sdk_lib_dir: + libs.append(dia_sdk_lib_dir) + return libs + + return lib_path + + +@depends_if(lib_path_for(target)) +@imports("os") +def lib_path(libs): + # Set in the environment for old-configure + libs = ";".join(libs) + os.environ["LIB"] = libs + return libs + + +set_config("LIB", lib_path) + + +lib_path_for_host = lib_path_for(host) + + +@depends_if(lib_path_for_host) +@imports(_from="mozbuild.shellutil", _import="quote") +def host_linker_libpaths(libs): + return ["-LIBPATH:%s" % quote(l) for l in libs] + + +@depends_if(lib_path_for_host) +@imports(_from="mozbuild.shellutil", _import="quote") +def host_linker_libpaths_bat(libs): + # .bat files need a different style of quoting. Batch quoting is actually + # not defined, and up to applications to handle, so it's not really clear + # what should be escaped and what not, but most paths should work just + # fine without escaping. And we don't care about double-quotes possibly + # having to be escaped because they're not allowed in file names on + # Windows. + return ['"-LIBPATH:%s"' % l for l in libs] + + +set_config("HOST_LINKER_LIBPATHS", host_linker_libpaths) +set_config("HOST_LINKER_LIBPATHS_BAT", host_linker_libpaths_bat) + + +@depends(valid_windows_sdk_dir, valid_ucrt_sdk_dir, host) +@imports(_from="os", _import="environ") +def sdk_bin_path(valid_windows_sdk_dir, valid_ucrt_sdk_dir, host): + if not valid_windows_sdk_dir: + return + + vc_host = { + "x86": "x86", + "x86_64": "x64", + }.get(host.cpu) + + # From version 10.0.15063.0 onwards the bin path contains the version number. + versioned_bin = ( + "bin" + if valid_ucrt_sdk_dir.version < "10.0.15063.0" + else os.path.join("bin", str(valid_ucrt_sdk_dir.version)) + ) + result = [ + environ["PATH"], + os.path.join(valid_windows_sdk_dir.path, versioned_bin, vc_host), + ] + if vc_host == "x64": + result.append(os.path.join(valid_windows_sdk_dir.path, versioned_bin, "x86")) + return result + + +option(env="LINKER", nargs=1, when=target_is_windows, help="Path to the linker") + +link = check_prog( + "LINKER", + ("lld-link",), + input="LINKER", + when=target_is_windows, + paths=clang_search_path, +) + +option(env="HOST_LINKER", nargs=1, when=host_is_windows, help="Path to the host linker") + +host_link = check_prog( + "HOST_LINKER", + ("lld-link",), + input="HOST_LINKER", + when=host_is_windows, + paths=clang_search_path, +) + +add_old_configure_assignment("LINKER", link) diff --git a/build/mozconfig.artifact b/build/mozconfig.artifact new file mode 100644 index 0000000000..b2b12d0faf --- /dev/null +++ b/build/mozconfig.artifact @@ -0,0 +1,11 @@ +# Common options for testing artifact builds in automation. + +# Enable the artifact build. +ac_add_options --enable-artifact-builds +if test -n "$MOZ_ARTIFACT_TASK_WIN32_OPT" -a -n "$MOZ_ENABLE_FULL_SYMBOLS"; then + ac_add_options --enable-artifact-build-symbols=full +else + ac_add_options --enable-artifact-build-symbols +fi + +. "$topsrcdir/build/mozconfig.no-compile" diff --git a/build/mozconfig.artifact.automation b/build/mozconfig.artifact.automation new file mode 100644 index 0000000000..fed5866b68 --- /dev/null +++ b/build/mozconfig.artifact.automation @@ -0,0 +1,6 @@ +# Common options for artifact builds to set automation steps. +# This gets included before mozconfig.automation. + +MOZ_AUTOMATION_BUILD_SYMBOLS=0 +MOZ_AUTOMATION_PACKAGE_GENERATED_SOURCES=0 +MOZ_AUTOMATION_ARTIFACT_BUILDS=1 diff --git a/build/mozconfig.automation b/build/mozconfig.automation new file mode 100644 index 0000000000..0f2ea935ae --- /dev/null +++ b/build/mozconfig.automation @@ -0,0 +1,22 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Common mozconfig for automation builds. +# +# We export MOZ_AUTOMATION_* variables here to trigger various steps in +# automation builds. For example, if MOZ_AUTOMATION_PACKAGE is set, then the +# package step will run. This file contains the default settings, which can be +# overridden by setting them earlier in the appropriate mozconfig. + +mk_add_options "export MOZ_AUTOMATION_BUILD_SYMBOLS=${MOZ_AUTOMATION_BUILD_SYMBOLS-1}" +mk_add_options "export MOZ_AUTOMATION_PACKAGE=${MOZ_AUTOMATION_PACKAGE-1}" +mk_add_options "export MOZ_AUTOMATION_PACKAGE_GENERATED_SOURCES=${MOZ_AUTOMATION_PACKAGE_GENERATED_SOURCES-1}" +mk_add_options "export MOZ_AUTOMATION_UPLOAD=${MOZ_AUTOMATION_UPLOAD-1}" +mk_add_options "export MOZ_AUTOMATION_CHECK=${MOZ_AUTOMATION_CHECK-1}" + +# The following variables are expected to be exported by Taskcluster +# configuration files rather than mozconfig files +mk_add_options "export MOZ_AUTOMATION_PACKAGE_TESTS=${MOZ_AUTOMATION_PACKAGE_TESTS-0}" + +export MOZ_AUTOMATION_MOZCONFIG=1 diff --git a/build/mozconfig.cache b/build/mozconfig.cache new file mode 100644 index 0000000000..1bd202b17d --- /dev/null +++ b/build/mozconfig.cache @@ -0,0 +1,84 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Setup for build cache + +# Thunderbird builds will have this set prior to including this file +aws_prefix=${aws_prefix:-taskcluster} + +# builds where buildprops didn't have the data (eg: taskcluster) and without sccache disabled: +if test -z "$bucket" -a -z "$SCCACHE_DISABLE"; then + + # prevent rerun if az is set, or wget is not available + if test -z "$availability_zone" -a -x "$(command -v wget)"; then + if test -n "$TASKCLUSTER_WORKER_LOCATION" -a -x "$(command -v jq)"; then + cloud=$(echo $TASKCLUSTER_WORKER_LOCATION | jq .cloud | tr -d \") + case $cloud in + aws|google) + availability_zone=$(echo $TASKCLUSTER_WORKER_LOCATION | jq .availabilityZone | tr -d \") + region=$(echo $TASKCLUSTER_WORKER_LOCATION | jq .region | tr -d \") + ;; + esac + fi + if test -z "$availability_zone" -o -z "$region"; then + if test -n "${TASKCLUSTER_WORKER_GROUP}"; then + # TASKCLUSTER_WORKER_GROUP is just the region now, so + # stick an extra character on to make the already-convoluted logic + # here simpler. + availability_zone="${TASKCLUSTER_WORKER_GROUP}x" + elif [ -n "${SCCACHE_GCS_KEY_PATH}" ]; then + # gcp availability_zone is of the form - where region is e.g. us-west2, and az is us-west2-a + gcp_zone=$(wget -T 1 -t 1 -q -O - http://169.254.169.254/computeMetadata/v1beta1/instance/zone || true) + availability_zone=${gcp_zone##*/} + else + # timeout after 1 second, and don't retry (failure indicates instance is not in ec2 or network issue) + # ec2 availability_zone is of the form where region is e.g. us-west-2, and az is us-west-2a + availability_zone=$(wget -T 1 -t 1 -q -O - http://169.254.169.254/latest/meta-data/placement/availability-zone || true) + fi + fi + if test -z "$availability_zone" -o "$availability_zone" = "not-ec2"; then + availability_zone=not-ec2 + elif test -n "$cloud"; then + case $cloud in + google) + bucket=sccache-l${MOZ_SCM_LEVEL}-${region} + ;; + aws) + bucket=${aws_prefix}-level-${MOZ_SCM_LEVEL}-sccache-${region} + ;; + esac + elif [ -n "${SCCACHE_GCS_KEY_PATH}" ]; then + # gcp region is az with last two letters trimmed + if test -z "$region"; then + region=${availability_zone::${#availability_zone}-2} + fi + bucket=${aws_prefix}-level-${MOZ_SCM_LEVEL}-sccache-${region} + else + # ec2 region is az with last letter trimmed + if test -z "$region"; then + region=${availability_zone%?} + fi + bucket=${aws_prefix}-level-${MOZ_SCM_LEVEL}-sccache-${region} + fi + fi +fi + +if test -n "$bucket"; then + if test "$cloud" = "google"; then + mk_add_options "export SCCACHE_GCS_BUCKET=$bucket" + mk_add_options "export SCCACHE_GCS_RW_MODE=READ_WRITE" + mk_add_options "export SCCACHE_GCS_CREDENTIALS_URL=http://taskcluster/auth/v1/gcp/credentials/$SCCACHE_GCS_PROJECT/${bucket}@$SCCACHE_GCS_PROJECT.iam.gserviceaccount.com" + elif [ -n "${SCCACHE_GCS_KEY_PATH}" ]; then + mk_add_options "export SCCACHE_GCS_BUCKET=$bucket" + else + mk_add_options "export SCCACHE_BUCKET=$bucket" + # instruct sccache to fetch the credentials from the Auth service's awsS3Credentials endpoint, via the Taskcluster proxy. + mk_add_options "export AWS_IAM_CREDENTIALS_URL=http://taskcluster/auth/v1/aws/s3/read-write/${bucket}/?format=iam-role-compat" + fi + export CCACHE="sccache" + export SCCACHE_VERBOSE_STATS=1 + # Workaround for https://github.com/mozilla/sccache/issues/459#issuecomment-618756635 + mk_add_options "export SCCACHE_MAX_FRAME_LENGTH=50000000" + mk_add_options MOZBUILD_MANAGE_SCCACHE_DAEMON=${MOZ_FETCHES_DIR}/sccache/sccache +fi diff --git a/build/mozconfig.clang-cl b/build/mozconfig.clang-cl new file mode 100644 index 0000000000..a89ce14792 --- /dev/null +++ b/build/mozconfig.clang-cl @@ -0,0 +1,25 @@ +if test -d "$MOZ_FETCHES_DIR/clang/bin"; then + CLANG_DIR=`cd "$MOZ_FETCHES_DIR/clang/bin" ; pwd` + export PATH="${CLANG_DIR}:${PATH}" + + if $(cd $MOZ_FETCHES_DIR/clang/lib/clang/* && test -d lib/windows); then + export LIB="$(cd $MOZ_FETCHES_DIR/clang/lib/clang/* && cd lib/windows && pwd)" + fi +fi + +export CC=clang-cl +export CXX=clang-cl +export ENABLE_CLANG_PLUGIN=1 + +if [ -n "$UPLOAD_PATH" ]; then + case "$(uname -s)" in + MINGW*) + DIAGNOSTICS_DIR="$(cmd.exe //e:on //c if not exist ${UPLOAD_PATH} mkdir ${UPLOAD_PATH} && cd ${UPLOAD_PATH} && pwd)" + ;; + *) + DIAGNOSTICS_DIR="${UPLOAD_PATH}" + ;; + esac + export CFLAGS="$CFLAGS -fcrash-diagnostics-dir=${DIAGNOSTICS_DIR}" + export CXXFLAGS="$CXXFLAGS -fcrash-diagnostics-dir=${DIAGNOSTICS_DIR}" +fi diff --git a/build/mozconfig.comm-support b/build/mozconfig.comm-support new file mode 100644 index 0000000000..5c7465965f --- /dev/null +++ b/build/mozconfig.comm-support @@ -0,0 +1,49 @@ +# This file exists to support comm-central from building with mozilla-central +# as a subdirectory to building as a subdirectory of mozilla-central. + +# In order to have mozconfig files that support building in either +# configuration during the transition, without duplicating the logic +# in every mozconfig file, there needs to exist a file that exists at the +# same path in mozilla-central and comm-central. + +# This file gets included under two circumstances. +# 1. comm-central is being built as a subdirectory of mozilla-central. +# 2. comm-central is being built as a parent directory of mozilla-central, +# but `mach` was invoked from the objdir and thinks that $topsrcdir is +# the mozilla-central directory. If we detect this is the case, we fix +# $topsrcdir before proceeding. +# In either case, we then invoke the identically named file that lives in +# comm-central, which sets some variables that can be used by the rest of the +# mozconfig. + + +# Note that the top-level mozconfig file is in $2. + +if [ "$(dirname "$2")" = "$topsrcdir" ]; then + # No weirdness + if [ -d "$topsrcdir/mail" ]; then + # Building with comm-central as top-level directory. + echo "ERROR: Should not include mozilla/build/mozconfig.comm when building" + echo " with comm-central as top-level directory." + exit 1 + elif [ -d "$topsrcdir/comm/mail" ]; then + # Building with mozilla-central as top-level directory. + . "$topsrcdir/comm/build/mozconfig.comm-support" + else + echo "ERROR: Unknown build directory layout." + exit 1 + fi +elif [ "$(dirname "$2")" = "$(dirname "$topsrcdir")" ]; then + if [ -d "$topsrcdir/../mail" ]; then + # Building with comm-central as top-level directory; + # but invoked with $topsrcdir as "mozilla/". + topsrcdir=$(dirname "$topsrcdir") + . "$topsrcdir/build/mozconfig.comm-support" + else + echo "ERROR: Unknown build directory layout." + exit 1 + fi +else + echo "ERROR: Unknown build directory layout." + exit 1 +fi diff --git a/build/mozconfig.common b/build/mozconfig.common new file mode 100644 index 0000000000..bad00ac7ae --- /dev/null +++ b/build/mozconfig.common @@ -0,0 +1,28 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Common mozconfig for official builds. +# +# Add options to this file that will be inherited by all in-tree mozconfigs. +# This is useful for eg try builds with nondefault options that apply to all +# architectures, though note that if you want to override options set in +# another mozconfig file, you'll need to use mozconfig.common.override instead +# of this file. + +if test -n "$USE_ARTIFACT"; then +. "$topsrcdir/build/mozconfig.artifact.automation" +fi + +mk_add_options AUTOCLOBBER=1 + +ac_add_options --enable-crashreporter + +# Enable enforcing that add-ons are signed by the trusted root +MOZ_REQUIRE_SIGNING=${MOZ_REQUIRE_SIGNING-1} + +ac_add_options --enable-js-shell + +. "$topsrcdir/build/mozconfig.automation" +. "$topsrcdir/build/mozconfig.rust" +. "$topsrcdir/build/mozconfig.cache" diff --git a/build/mozconfig.common.override b/build/mozconfig.common.override new file mode 100644 index 0000000000..b3feefa5ec --- /dev/null +++ b/build/mozconfig.common.override @@ -0,0 +1,15 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Common mozconfig for all users +# +# Add options to this file that will be inherited by all in-tree mozconfigs. +# This file is included at the *end* of the mozconfigs, and so may be used +# to override anything done previously. +# +# The common expected usage is for try builds with nondefault options. + +if test -n "$USE_ARTIFACT"; then +. "$topsrcdir/build/mozconfig.artifact" +fi diff --git a/build/mozconfig.lld-link b/build/mozconfig.lld-link new file mode 100644 index 0000000000..89f91930e7 --- /dev/null +++ b/build/mozconfig.lld-link @@ -0,0 +1,6 @@ +if test -d "$MOZ_FETCHES_DIR/clang/bin"; then + CLANG_DIR=`cd "$MOZ_FETCHES_DIR/clang/bin" ; pwd` + export PATH="${CLANG_DIR}:${PATH}" +fi + +export LINKER=lld-link diff --git a/build/mozconfig.no-compile b/build/mozconfig.no-compile new file mode 100644 index 0000000000..792e9ca057 --- /dev/null +++ b/build/mozconfig.no-compile @@ -0,0 +1,33 @@ +ac_add_options --disable-compile-environment + +# In case mozconfig.cache was already included +unset CCACHE +unset SCCACHE_VERBOSE_STATS +# In case it wasn't +NO_CACHE=1 + +# Override any toolchain defines we've inherited from other mozconfigs. +unset CC +unset CXX +unset HOST_CC +unset HOST_CXX +unset LINKER +unset RUSTFLAGS +unset TOOLCHAIN_PREFIX +unset BINDGEN_CFLAGS +unset ENABLE_CLANG_PLUGIN +unset MACOS_SDK_DIR +unset MACOS_PRIVATE_FRAMEWORKS_DIR +unset DIA_SDK_PATH +unset VC_PATH +unset WINDOWSSDKDIR +unset MOZ_LTO + +unset MOZ_STDCXX_COMPAT +unset MOZ_NO_PIE_COMPAT + +# Don't unset this on Linux artifact builds so the artifact builds correctly +# package any Wasm sandboxed shared libraries. +if test `uname -s` != Linux; then + unset WASM_SANDBOXED_LIBRARIES +fi diff --git a/build/mozconfig.rust b/build/mozconfig.rust new file mode 100644 index 0000000000..b7ab4d0219 --- /dev/null +++ b/build/mozconfig.rust @@ -0,0 +1 @@ +ac_add_options --enable-rust-simd diff --git a/build/mozconfig.wasm-sandboxing b/build/mozconfig.wasm-sandboxing new file mode 100644 index 0000000000..4cd4cc48a7 --- /dev/null +++ b/build/mozconfig.wasm-sandboxing @@ -0,0 +1,9 @@ +# Tell the build system about bits to build sandboxed wasm libraries. +case "$PERFHERDER_EXTRA_OPTIONS" in +base-toolchains*) + # Clang versions < 8.0 don't support wasm. + ;; +*) + export WASM_SANDBOXED_LIBRARIES=graphite,ogg + ;; +esac diff --git a/build/mozconfig.win-common b/build/mozconfig.win-common new file mode 100644 index 0000000000..9c592d1f37 --- /dev/null +++ b/build/mozconfig.win-common @@ -0,0 +1,12 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +if [ -z "$USE_ARTIFACT" ]; then + if [ -n "$TASKCLUSTER_PGO_PROFILE_USE" ]; then + export MOZ_LTO=cross + ac_add_options --enable-profile-use=cross + ac_add_options --with-pgo-jarlog="${MOZ_FETCHES_DIR}/en-US.log" + ac_add_options --with-pgo-profile-path="${MOZ_FETCHES_DIR}/merged.profdata" + fi +fi diff --git a/build/non-unified-compat b/build/non-unified-compat new file mode 100644 index 0000000000..60a745f421 --- /dev/null +++ b/build/non-unified-compat @@ -0,0 +1,123 @@ +accessible/android/ +accessible/aom/ +accessible/atk/ +accessible/base/ +accessible/generic/ +accessible/html/ +accessible/interfaces/ +accessible/ipc/ +accessible/mac/ +accessible/other/ +accessible/tests/ +accessible/windows/ +accessible/xpcom/ +accessible/xul/ +docshell/base/ +docshell/build/ +docshell/resources/ +docshell/shistory/ +dom/abort/ +dom/animation/ +dom/audiochannel/ +dom/base/ +dom/battery/ +dom/bindings/ +dom/broadcastchannel/ +dom/browser-element/ +dom/cache/ +dom/canvas/ +dom/clients/ +dom/commandhandler/ +dom/console/ +dom/credentialmanagement/ +dom/crypto/ +dom/debugger/ +dom/docs/ +dom/encoding/ +dom/events/ +dom/fetch/ +dom/file/ +dom/filehandle/ +dom/filesystem/ +dom/flex/ +dom/gamepad/ +dom/geolocation/ +dom/grid/ +dom/html/ +dom/imptests/ +dom/indexedDB/ +dom/interfaces/ +dom/ipc/ +dom/jsurl/ +dom/l10n/ +dom/locales/ +dom/localstorage/ +dom/manifest/ +dom/mathml/ +dom/media/ +dom/messagechannel/ +dom/midi/ +dom/network/ +dom/notification/ +dom/offline/ +dom/payments/ +dom/performance/ +dom/permission/ +dom/plugins/ +dom/power/ +dom/quota/ +dom/reporting/ +dom/res/ +dom/script/ +dom/security/ +dom/serviceworkers/ +dom/simpledb/ +dom/smil/ +dom/storage/ +dom/svg/ +dom/system/ +dom/tests/ +dom/u2f/ +dom/url/ +dom/vr/ +dom/webauthn/ +dom/webbrowserpersist/ +dom/webgpu/ +dom/webidl/ +dom/webshare/ +dom/websocket/ +dom/workers/ +dom/worklet/ +dom/xhr/ +dom/xml/ +dom/xslt/ +dom/xul/ +editor/composer/ +editor/libeditor/ +editor/reftests/ +editor/spellchecker/ +editor/txmgr/ +extensions/auth/ +extensions/permissions/ +extensions/pref/ +extensions/spellcheck/ +extensions/universalchardet/ +gfx/2d/ +gfx/config/ +gfx/gl/ +gfx/ipc/ +gfx/layers/ +gfx/ots/ +gfx/qcms/ +gfx/sfntly/ +gfx/src/ +gfx/tests/ +gfx/thebes/ +gfx/vr/ +gfx/webrender_bindings/ +gfx/wgpu/ +gfx/wgpu_bindings/ +gfx/wr/ +gfx/ycbcr/ +hal/ +image/ diff --git a/build/package/mac_osx/make-diskimage b/build/package/mac_osx/make-diskimage new file mode 100755 index 0000000000..c214ceb59c --- /dev/null +++ b/build/package/mac_osx/make-diskimage @@ -0,0 +1,47 @@ +#!/bin/sh +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Create a read-only disk image of the contents of a folder +# +# Usage: make-diskimage +# +# +# +# <.dsstore_file> +# +# +# tip: use '-null-' for if you only want to +# provide <.dsstore_file> and + +DMG_PATH=$1 +SRC_FOLDER=$2 +VOLUME_NAME=$3 + +# optional arguments +EULA_RSRC=$4 +DMG_DSSTORE=$5 +DMG_BKGND_IMG=$6 + +EXTRA_ARGS= + +if test -n "$EULA_RSRC" && test "$EULA_RSRC" != "-null-" ; then + EXTRA_ARGS="--resource $EULA_RSRC" +fi + +if test -n "$DMG_DSSTORE" ; then + EXTRA_ARGS="$EXTRA_ARGS --copy $DMG_DSSTORE:/.DS_Store" +fi + +if test -n "$DMG_BKGND_IMG" ; then + EXTRA_ARGS="$EXTRA_ARGS --mkdir /.background --copy $DMG_BKGND_IMG:/.background" +fi + +echo `dirname $0`/pkg-dmg --target "$DMG_PATH" --source "$SRC_FOLDER" \ + --volname "$VOLUME_NAME" $EXTRA_ARGS + +`dirname $0`/pkg-dmg --target "$DMG_PATH" --source "$SRC_FOLDER" \ + --volname "$VOLUME_NAME" $EXTRA_ARGS + +exit $? diff --git a/build/package/mac_osx/mozilla-background.jpg b/build/package/mac_osx/mozilla-background.jpg new file mode 100644 index 0000000000..adb4df036e Binary files /dev/null and b/build/package/mac_osx/mozilla-background.jpg differ diff --git a/build/package/mac_osx/mozilla.dsstore b/build/package/mac_osx/mozilla.dsstore new file mode 100644 index 0000000000..520eb08d6f Binary files /dev/null and b/build/package/mac_osx/mozilla.dsstore differ diff --git a/build/package/mac_osx/unpack-diskimage b/build/package/mac_osx/unpack-diskimage new file mode 100755 index 0000000000..3ba977805e --- /dev/null +++ b/build/package/mac_osx/unpack-diskimage @@ -0,0 +1,54 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Unpack a disk image to a specified target folder +# +# Usage: unpack-diskimage +# +# + +DMG_PATH=$1 +MOUNTPOINT=$2 +TARGETPATH=$3 + +# How long to wait before giving up waiting for the mount to finish (seconds) +TIMEOUT=90 + +# If mnt already exists, then the previous run may not have cleaned up +# properly. We should try to umount and remove the mnt directory. +if [ -d $MOUNTPOINT ]; then + echo "mnt already exists, trying to clean up" + hdiutil detach $MOUNTPOINT -force + rm -rdfv $MOUNTPOINT +fi + +# Install an on-exit handler that will unmount and remove the '$MOUNTPOINT' directory +trap "{ if [ -d $MOUNTPOINT ]; then hdiutil detach $MOUNTPOINT -force; rm -rdfv $MOUNTPOINT; fi; }" EXIT + +mkdir -p $MOUNTPOINT + +hdiutil attach -verbose -noautoopen -mountpoint $MOUNTPOINT "$DMG_PATH" +# Wait for files to show up +# hdiutil uses a helper process, diskimages-helper, which isn't always done its +# work by the time hdiutil exits. So we wait until something shows up in the +# mnt directory. Due to the async nature of diskimages-helper, the best thing +# we can do is to make sure the glob() rsync is making can find files. +i=0 +while [ "$(echo $MOUNTPOINT/*)" == "$MOUNTPOINT/*" ]; do + if [ $i -gt $TIMEOUT ]; then + echo "No files found, exiting" + exit 1 + fi + sleep 1 + i=$(expr $i + 1) +done +# Now we can copy everything out of the $MOUNTPOINT directory into the target directory +rsync -av $MOUNTPOINT/* $MOUNTPOINT/.DS_Store $MOUNTPOINT/.background $MOUNTPOINT/.VolumeIcon.icns $TARGETPATH/. +hdiutil detach $MOUNTPOINT +rm -rdf $MOUNTPOINT +# diskimage-helper prints messages to stdout asynchronously as well, sleep +# for a bit to ensure they don't disturb following commands in a script that +# might parse stdout messages +sleep 5 diff --git a/build/pgo/blueprint/LICENSE b/build/pgo/blueprint/LICENSE new file mode 100644 index 0000000000..d7474100a9 --- /dev/null +++ b/build/pgo/blueprint/LICENSE @@ -0,0 +1,314 @@ +Blueprint CSS Framework License +---------------------------------------------------------------- + +Copyright (c) 2007-2008 Olav Bjorkoy (olav at bjorkoy.com) + +The Blueprint CSS Framework is available for use in all personal or +commercial projects, under both the (modified) MIT and the GPL license. You +may choose the one that fits your project. + + +The (modified) MIT License +---------------------------------------------------------------- + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sub-license, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice, and every other copyright notice found in this +software, and all the attributions in every file, and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +The GPL License +---------------------------------------------------------------- + + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. \ No newline at end of file diff --git a/build/pgo/blueprint/elements.html b/build/pgo/blueprint/elements.html new file mode 100644 index 0000000000..51d79fae44 --- /dev/null +++ b/build/pgo/blueprint/elements.html @@ -0,0 +1,250 @@ + + + + + + + + Blueprint HTML Elements Tests + + + + + + + + + + + + diff --git a/build/pgo/blueprint/fancytype-screen.css b/build/pgo/blueprint/fancytype-screen.css new file mode 100644 index 0000000000..0d3feb77f7 --- /dev/null +++ b/build/pgo/blueprint/fancytype-screen.css @@ -0,0 +1,75 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +/* -------------------------------------------------------------- + + fancy-type.css + * Lots of pretty advanced classes for manipulating text. + + See the Readme file in this folder for additional instructions. + +-------------------------------------------------------------- */ + +/* Indentation instead of line shifts for sibling paragraphs. */ + p + p { text-indent:2em; margin-top:-1.5em; } + form p + p { text-indent: 0; } /* Don't want this in forms. */ + + +/* For great looking type, use this code instead of asdf: + asdf + Best used on prepositions and ampersands. */ + +.alt { + color: #666; + font-family: "Warnock Pro", "Goudy Old Style","Palatino","Book Antiqua", Georgia, serif; + font-style: italic; + font-weight: normal; +} + + +/* For great looking quote marks in titles, replace "asdf" with: + asdf” + (That is, when the title starts with a quote mark). + (You may have to change this value depending on your font size). */ + +.dquo { margin-left: -.5em; } + + +/* Reduced size type with incremental leading + (http://www.markboulton.co.uk/journal/comments/incremental_leading/) + + This could be used for side notes. For smaller type, you don't necessarily want to + follow the 1.5x vertical rhythm -- the line-height is too much. + + Using this class, it reduces your font size and line-height so that for + every four lines of normal sized type, there is five lines of the sidenote. eg: + + New type size in em's: + 10px (wanted side note size) / 12px (existing base size) = 0.8333 (new type size in ems) + + New line-height value: + 12px x 1.5 = 18px (old line-height) + 18px x 4 = 72px + 72px / 5 = 14.4px (new line height) + 14.4px / 10px = 1.44 (new line height in em's) */ + +p.incr, .incr p { + font-size: 10px; + line-height: 1.44em; + margin-bottom: 1.5em; +} + + +/* Surround uppercase words and abbreviations with this class. + Based on work by Jørgen Arnor Gårdsø Lom [http://twistedintellect.com/] */ + +.caps { + font-variant: small-caps; + letter-spacing: 1px; + text-transform: lowercase; + font-size:1.2em; + line-height:1%; + font-weight:bold; + padding:0 2px; +} diff --git a/build/pgo/blueprint/forms.html b/build/pgo/blueprint/forms.html new file mode 100644 index 0000000000..8310ba4d45 --- /dev/null +++ b/build/pgo/blueprint/forms.html @@ -0,0 +1,104 @@ + + + + + + + + Blueprint Forms Tests + + + + + + + + +
+

Forms

+
+ +
+ +
+ +
+ Simple sample form + +


+

+ +


+

+ +


+

+ +

+

+ +
+
+ +
+
+ +
+ This is a <div> with the class .error. Link. +
+
+ This is a <div> with the class .notice. Link. +
+
+ This is a <div> with the class .success. Link. +
+ +
+ Select, checkboxes, lists + +


+

+ +


+

+ +


+ Radio one
+ Radio two
+ Radio three

+ +


+ Check one
+ Check two
+ Check three

+ +
+ +
+
+ +

+ Valid HTML 4.01 Strict

+ +
+ + diff --git a/build/pgo/blueprint/grid.html b/build/pgo/blueprint/grid.html new file mode 100644 index 0000000000..e851a63133 --- /dev/null +++ b/build/pgo/blueprint/grid.html @@ -0,0 +1,210 @@ + + + + + + + + Blueprint Grid Tests + + + + + + + + +
+

Blueprint Tests: grid.css

+ + +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit.

+
+
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit.

+
+
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit.

+
+ + +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit.

+
+
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit.

+
+
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit.

+
+ + +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit.

+
+
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit.

+
+
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit.

+
+
+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.

+
+
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.

+
+
+ +

1

+

2

+

3

+

4

+

5

+

3

+ +

1

+

2

+

3

+

4

+

5

+

3

+ +

1

+

2

+

3

+

4

+

5

+

6

+

7

+

8

+

9

+

10

+

11

+

12

+

13

+

14

+

15

+

16

+

17

+

18

+

19

+

20

+

21

+

22

+

23

+

24

+ + +

1

+

2

+

3

+

4

+

5

+

6

+ + +

24

+ + +

2

+

23

+
+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.

+
+ + +
+
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod.

+
+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+
+ +
+
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod.

+
+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+
+ + +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+
+ +
+
TESTING .PUSH-1 TO .PUSH-5
+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+ +
+ +
+
TESTING .PULL-1 TO .PULL-5
+ +

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+ +

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+ +

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+ +

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+ +

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+ +
+ + + + + +
+

+ Valid HTML 4.01 Strict

+
+ +
+ + diff --git a/build/pgo/blueprint/grid.png b/build/pgo/blueprint/grid.png new file mode 100644 index 0000000000..129d4a29fb Binary files /dev/null and b/build/pgo/blueprint/grid.png differ diff --git a/build/pgo/blueprint/print.css b/build/pgo/blueprint/print.css new file mode 100644 index 0000000000..661861457a --- /dev/null +++ b/build/pgo/blueprint/print.css @@ -0,0 +1,29 @@ +/* ----------------------------------------------------------------------- + + Blueprint CSS Framework 0.7.1 + http://blueprintcss.googlecode.com + + * Copyright (c) 2007-2008. See LICENSE for more info. + * See README for instructions on how to use Blueprint. + * For credits and origins, see AUTHORS. + * This is a compressed file. See the sources in the 'src' directory. + +----------------------------------------------------------------------- */ + +/* print.css */ +body {line-height:1.5;font-family:"Helvetica Neue", Helvetica, Arial, sans-serif;color:#000;background:none;font-size:10pt;} +.container {background:none;} +hr {background:#ccc;color:#ccc;width:100%;height:2px;margin:2em 0;padding:0;border:none;} +hr.space {background:#fff;color:#fff;} +h1, h2, h3, h4, h5, h6 {font-family:"Helvetica Neue", Arial, "Lucida Grande", sans-serif;} +code {font:.9em "Courier New", Monaco, Courier, monospace;} +img {float:left;margin:1.5em 1.5em 1.5em 0;} +a img {border:none;} +p img.top {margin-top:0;} +blockquote {margin:1.5em;padding:1em;font-style:italic;font-size:.9em;} +.small {font-size:.9em;} +.large {font-size:1.1em;} +.quiet {color:#999;} +.hide {display:none;} +a:link, a:visited {background:transparent;font-weight:700;text-decoration:underline;} +a:link:after, a:visited:after {content:" (" attr(href) ") ";font-size:90%;} \ No newline at end of file diff --git a/build/pgo/blueprint/sample.html b/build/pgo/blueprint/sample.html new file mode 100644 index 0000000000..d2c4dfeb8a --- /dev/null +++ b/build/pgo/blueprint/sample.html @@ -0,0 +1,91 @@ + + + + + + + + Blueprint Sample Page + + + + + + + + + + + + +
+

A simple sample page

+
+

This sample page demonstrates a tiny fraction of what you get with Blueprint.

+
+ +
+
Here's a box
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip.

+
+ +
+
And another box
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat laboris nisi ut aliquip.

+
+ +
+
This box is aligned with the sidebar
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip.

+
+
+
+ +
+

testLorem ipsum dolor sit amet, consectetuer adipiscing elit. Nunc congue ipsum vestibulum libero. Aenean vitae justo. Nam eget tellus. Etiam convallis, est eu lobortis mattis, lectus tellus tempus felis, a ultricies erat ipsum at metus.

+

Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Morbi et risus. Aliquam nisl. Nulla facilisi. Cras accumsan vestibulum ante. Vestibulum sed tortor. Praesent SMALL CAPS tempus fringilla elit. Ut elit diam, sagittis in, nonummy in, gravida non, nunc. Ut orci. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos hymenaeos. Nam egestas, orci eu imperdiet malesuada, nisl purus fringilla odio, quis commodo est orci vitae justo. Aliquam placerat odio tincidunt nulla. Cras in libero. Aenean rutrum, magna non tristique posuere, erat odio eleifend nisl, non convallis est tortor blandit ligula. Nulla id augue.

+

Nullam mattis, odio ut tempus facilisis, metus nisl facilisis metus, auctor consectetuer felis ligula nec mauris. Vestibulum odio erat, fermentum at, commodo vitae, ultrices et, urna. Mauris vulputate, mi pulvinar sagittis condimentum, sem nulla aliquam velit, sed imperdiet mi purus eu magna. Nulla varius metus ut eros. Aenean aliquet magna eget orci. Class aptent taciti sociosqu ad litora.

+

Vivamus euismod. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Suspendisse vel nibh ut turpis dictum sagittis. Aliquam vel velit a elit auctor sollicitudin. Nam vel dui vel neque lacinia pretium. Quisque nunc erat, venenatis id, volutpat ut, scelerisque sed, diam. Mauris ante. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Donec mattis. Morbi dignissim sollicitudin libero. Nulla lorem.

+
+

Integer cursus ornare mauris. Praesent nisl arcu, imperdiet eu, ornare id, scelerisque ut, nunc. Praesent sagittis erat sed velit tempus imperdiet. Ut tristique, ante in interdum hendrerit, erat enim faucibus felis, quis rutrum mauris lorem quis sem. Vestibulum ligula nisi, mattis nec, posuere et, blandit eu, ligula. Nam suscipit placerat odio. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos hymenaeos. Pellentesque tortor libero, venenatis vitae, rhoncus eu, placerat ut, mi. Nulla nulla.

+
+

Maecenas vel metus quis magna pharetra fermentum. Integer sit amet tortor. Maecenas porttitor, pede sed gravida auctor, nulla augue aliquet elit, at pretium urna orci ut metus. Aliquam in dolor. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Sed aliquam, tellus id ornare posuere, quam nunc accumsan turpis, at convallis tellus orci et nisl. Phasellus congue neque a lorem.

+ +
+
+
This is a nested column
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.

+
+
+
This is another nested column
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.

+
+ +
+
+ +

A Simple Sidebar

+ +

Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Cras ornare mattis nunc. Mauris venenatis, pede sed aliquet vehicula, lectus tellus pulvinar neque, non cursus sem nisi vel augue.

+

Mauris a lectus. Aliquam erat volutpat. Phasellus ultrices mi a sapien. Nunc rutrum egestas lorem. Duis ac sem sagittis elit tincidunt gravida. Mauris a lectus. Aliquam erat volutpat. Phasellus ultrices mi a sapien. Nunc rutrum egestas lorem. Duis ac sem sagittis elit tincidunt gravida.

+

Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Cras ornare mattis nunc. Mauris venenatis, pede sed aliquet vehicula, lectus tellus pulvinar neque, non cursus sem nisi vel augue.

+ +
Incremental leading
+

Vestibulum ante ipsum primis in faucibus orci luctus vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Cras ornare mattis nunc. Mauris venenatis, pede sed aliquet vehicula, lectus tellus pulvinar neque, non cursus sem nisi vel augue. sed aliquet vehicula, lectus tellus.

+

Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Cras ornare mattis nunc. Mauris venenatis, pede sed aliquet vehicula, lectus tellus pulvinar neque, non cursus sem nisi vel augue. sed aliquet vehicula, lectus tellus pulvinar neque, non cursus sem nisi vel augue. ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Cras ornare mattis nunc. Mauris venenatis, pede sed aliquet vehicula, lectus tellus pulvinar neque, non cursus sem nisi vel augue. sed aliquet vehicula, lectus tellus pulvinar neque, non cursus sem nisi vel augue.

+ +
+ +
+

You may pick and choose amongst these and many more features, so be bold.

+
+ +

+ Valid HTML 4.01 Strict

+
+ + + diff --git a/build/pgo/blueprint/screen.css b/build/pgo/blueprint/screen.css new file mode 100644 index 0000000000..c631ead6a1 --- /dev/null +++ b/build/pgo/blueprint/screen.css @@ -0,0 +1,226 @@ +/* ----------------------------------------------------------------------- + + Blueprint CSS Framework 0.7.1 + http://blueprintcss.googlecode.com + + * Copyright (c) 2007-2008. See LICENSE for more info. + * See README for instructions on how to use Blueprint. + * For credits and origins, see AUTHORS. + * This is a compressed file. See the sources in the 'src' directory. + +----------------------------------------------------------------------- */ + +/* reset.css */ +html, body, div, span, object, iframe, h1, h2, h3, h4, h5, h6, p, blockquote, pre, a, abbr, acronym, address, code, del, dfn, em, img, q, dl, dt, dd, ol, ul, li, fieldset, form, label, legend, table, caption, tbody, tfoot, thead, tr, th, td {margin:0;padding:0;border:0;font-weight:inherit;font-style:inherit;font-size:100%;font-family:inherit;vertical-align:baseline;} +body {line-height:1.5;} +table {border-collapse:separate;border-spacing:0;} +caption, th, td {text-align:left;font-weight:normal;} +table, td, th {vertical-align:middle;} +blockquote:before, blockquote:after, q:before, q:after {content:"";} +blockquote, q {quotes:"" "";} +a img {border:none;} + +/* typography.css */ +body {font-size:75%;color:#222;background:#fff;font-family:"Helvetica Neue", Helvetica, Arial, sans-serif;} +h1, h2, h3, h4, h5, h6 {font-weight:normal;color:#111;} +h1 {font-size:3em;line-height:1;margin-bottom:0.5em;} +h2 {font-size:2em;margin-bottom:0.75em;} +h3 {font-size:1.5em;line-height:1;margin-bottom:1em;} +h4 {font-size:1.2em;line-height:1.25;margin-bottom:1.25em;height:1.25em;} +h5 {font-size:1em;font-weight:bold;margin-bottom:1.5em;} +h6 {font-size:1em;font-weight:bold;} +h1 img, h2 img, h3 img, h4 img, h5 img, h6 img {margin:0;} +p {margin:0 0 1.5em;} +p img {float:left;margin:1.5em 1.5em 1.5em 0;padding:0;} +p img.right {float:right;margin:1.5em 0 1.5em 1.5em;} +a:focus, a:hover {color:#000;} +a {color:#009;text-decoration:underline;} +blockquote {margin:1.5em;color:#666;font-style:italic;} +strong {font-weight:bold;} +em, dfn {font-style:italic;} +dfn {font-weight:bold;} +sup, sub {line-height:0;} +abbr, acronym {border-bottom:1px dotted #666;} +address {margin:0 0 1.5em;font-style:italic;} +del {color:#666;} +pre, code {margin:1.5em 0;white-space:pre;} +pre, code, tt {font:1em 'andale mono', 'lucida console', monospace;line-height:1.5;} +li ul, li ol {margin:0 1.5em;} +ul, ol {margin:0 1.5em 1.5em 1.5em;} +ul {list-style-type:disc;} +ol {list-style-type:decimal;} +dl {margin:0 0 1.5em 0;} +dl dt {font-weight:bold;} +dd {margin-left:1.5em;} +table {margin-bottom:1.4em;width:100%;} +th {font-weight:bold;background:#C3D9FF;} +th, td {padding:4px 10px 4px 5px;} +tr.even td {background:#E5ECF9;} +tfoot {font-style:italic;} +caption {background:#eee;} +.small {font-size:.8em;margin-bottom:1.875em;line-height:1.875em;} +.large {font-size:1.2em;line-height:2.5em;margin-bottom:1.25em;} +.hide {display:none;} +.quiet {color:#666;} +.loud {color:#000;} +.highlight {background:#ff0;} +.added {background:#060;color:#fff;} +.removed {background:#900;color:#fff;} +.first {margin-left:0;padding-left:0;} +.last {margin-right:0;padding-right:0;} +.top {margin-top:0;padding-top:0;} +.bottom {margin-bottom:0;padding-bottom:0;} + +/* grid.css */ +.container {width:950px;margin:0 auto;} +.showgrid {background:url(grid.png);} +body {margin:1.5em 0;} +div.span-1, div.span-2, div.span-3, div.span-4, div.span-5, div.span-6, div.span-7, div.span-8, div.span-9, div.span-10, div.span-11, div.span-12, div.span-13, div.span-14, div.span-15, div.span-16, div.span-17, div.span-18, div.span-19, div.span-20, div.span-21, div.span-22, div.span-23, div.span-24 {float:left;margin-right:10px;} +div.last {margin-right:0;} +.span-1 {width:30px;} +.span-2 {width:70px;} +.span-3 {width:110px;} +.span-4 {width:150px;} +.span-5 {width:190px;} +.span-6 {width:230px;} +.span-7 {width:270px;} +.span-8 {width:310px;} +.span-9 {width:350px;} +.span-10 {width:390px;} +.span-11 {width:430px;} +.span-12 {width:470px;} +.span-13 {width:510px;} +.span-14 {width:550px;} +.span-15 {width:590px;} +.span-16 {width:630px;} +.span-17 {width:670px;} +.span-18 {width:710px;} +.span-19 {width:750px;} +.span-20 {width:790px;} +.span-21 {width:830px;} +.span-22 {width:870px;} +.span-23 {width:910px;} +.span-24, div.span-24 {width:950px;margin:0;} +.append-1 {padding-right:40px;} +.append-2 {padding-right:80px;} +.append-3 {padding-right:120px;} +.append-4 {padding-right:160px;} +.append-5 {padding-right:200px;} +.append-6 {padding-right:240px;} +.append-7 {padding-right:280px;} +.append-8 {padding-right:320px;} +.append-9 {padding-right:360px;} +.append-10 {padding-right:400px;} +.append-11 {padding-right:440px;} +.append-12 {padding-right:480px;} +.append-13 {padding-right:520px;} +.append-14 {padding-right:560px;} +.append-15 {padding-right:600px;} +.append-16 {padding-right:640px;} +.append-17 {padding-right:680px;} +.append-18 {padding-right:720px;} +.append-19 {padding-right:760px;} +.append-20 {padding-right:800px;} +.append-21 {padding-right:840px;} +.append-22 {padding-right:880px;} +.append-23 {padding-right:920px;} +.prepend-1 {padding-left:40px;} +.prepend-2 {padding-left:80px;} +.prepend-3 {padding-left:120px;} +.prepend-4 {padding-left:160px;} +.prepend-5 {padding-left:200px;} +.prepend-6 {padding-left:240px;} +.prepend-7 {padding-left:280px;} +.prepend-8 {padding-left:320px;} +.prepend-9 {padding-left:360px;} +.prepend-10 {padding-left:400px;} +.prepend-11 {padding-left:440px;} +.prepend-12 {padding-left:480px;} +.prepend-13 {padding-left:520px;} +.prepend-14 {padding-left:560px;} +.prepend-15 {padding-left:600px;} +.prepend-16 {padding-left:640px;} +.prepend-17 {padding-left:680px;} +.prepend-18 {padding-left:720px;} +.prepend-19 {padding-left:760px;} +.prepend-20 {padding-left:800px;} +.prepend-21 {padding-left:840px;} +.prepend-22 {padding-left:880px;} +.prepend-23 {padding-left:920px;} +div.border {padding-right:4px;margin-right:5px;border-right:1px solid #eee;} +div.colborder {padding-right:24px;margin-right:25px;border-right:1px solid #eee;} +.pull-1 {margin-left:-40px;} +.pull-2 {margin-left:-80px;} +.pull-3 {margin-left:-120px;} +.pull-4 {margin-left:-160px;} +.pull-5 {margin-left:-200px;} +.pull-6 {margin-left:-240px;} +.pull-7 {margin-left:-280px;} +.pull-8 {margin-left:-320px;} +.pull-9 {margin-left:-360px;} +.pull-10 {margin-left:-400px;} +.pull-11 {margin-left:-440px;} +.pull-12 {margin-left:-480px;} +.pull-13 {margin-left:-520px;} +.pull-14 {margin-left:-560px;} +.pull-15 {margin-left:-600px;} +.pull-16 {margin-left:-640px;} +.pull-17 {margin-left:-680px;} +.pull-18 {margin-left:-720px;} +.pull-19 {margin-left:-760px;} +.pull-20 {margin-left:-800px;} +.pull-21 {margin-left:-840px;} +.pull-22 {margin-left:-880px;} +.pull-23 {margin-left:-920px;} +.pull-24 {margin-left:-960px;} +.pull-1, .pull-2, .pull-3, .pull-4, .pull-5, .pull-6, .pull-7, .pull-8, .pull-9, .pull-10, .pull-11, .pull-12, .pull-13, .pull-14, .pull-15, .pull-16, .pull-17, .pull-18, .pull-19, .pull-20, .pull-21, .pull-22, .pull-23, .pull-24 {float:left;position:relative;} +.push-1 {margin:0 -40px 1.5em 40px;} +.push-2 {margin:0 -80px 1.5em 80px;} +.push-3 {margin:0 -120px 1.5em 120px;} +.push-4 {margin:0 -160px 1.5em 160px;} +.push-5 {margin:0 -200px 1.5em 200px;} +.push-6 {margin:0 -240px 1.5em 240px;} +.push-7 {margin:0 -280px 1.5em 280px;} +.push-8 {margin:0 -320px 1.5em 320px;} +.push-9 {margin:0 -360px 1.5em 360px;} +.push-10 {margin:0 -400px 1.5em 400px;} +.push-11 {margin:0 -440px 1.5em 440px;} +.push-12 {margin:0 -480px 1.5em 480px;} +.push-13 {margin:0 -520px 1.5em 520px;} +.push-14 {margin:0 -560px 1.5em 560px;} +.push-15 {margin:0 -600px 1.5em 600px;} +.push-16 {margin:0 -640px 1.5em 640px;} +.push-17 {margin:0 -680px 1.5em 680px;} +.push-18 {margin:0 -720px 1.5em 720px;} +.push-19 {margin:0 -760px 1.5em 760px;} +.push-20 {margin:0 -800px 1.5em 800px;} +.push-21 {margin:0 -840px 1.5em 840px;} +.push-22 {margin:0 -880px 1.5em 880px;} +.push-23 {margin:0 -920px 1.5em 920px;} +.push-24 {margin:0 -960px 1.5em 960px;} +.push-1, .push-2, .push-3, .push-4, .push-5, .push-6, .push-7, .push-8, .push-9, .push-10, .push-11, .push-12, .push-13, .push-14, .push-15, .push-16, .push-17, .push-18, .push-19, .push-20, .push-21, .push-22, .push-23, .push-24 {float:right;position:relative;} +.box {padding:1.5em;margin-bottom:1.5em;background:#E5ECF9;} +hr {background:#ddd;color:#ddd;clear:both;float:none;width:100%;height:.1em;margin:0 0 1.45em;border:none;} +hr.space {background:#fff;color:#fff;} +.clearfix:after, .container:after {content:".";display:block;height:0;clear:both;visibility:hidden;} +.clearfix, .container {display:inline-block;} +* html .clearfix, * html .container {height:1%;} +.clearfix, .container {display:block;} +.clear {clear:both;} + +/* forms.css */ +label {font-weight:bold;} +fieldset {padding:1.4em;margin:0 0 1.5em 0;border:1px solid #ccc;} +legend {font-weight:bold;font-size:1.2em;} +input.text, input.title, textarea, select {margin:0.5em 0;border:1px solid #bbb;} +input.text:focus, input.title:focus, textarea:focus, select:focus {border:1px solid #666;} +input.text, input.title {width:300px;padding:5px;} +input.title {font-size:1.5em;} +textarea {width:390px;height:250px;padding:5px;} +.error, .notice, .success {padding:.8em;margin-bottom:1em;border:2px solid #ddd;} +.error {background:#FBE3E4;color:#8a1f11;border-color:#FBC2C4;} +.notice {background:#FFF6BF;color:#514721;border-color:#FFD324;} +.success {background:#E6EFC2;color:#264409;border-color:#C6D880;} +.error a {color:#8a1f11;} +.notice a {color:#514721;} +.success a {color:#264409;} \ No newline at end of file diff --git a/build/pgo/blueprint/test-small.jpg b/build/pgo/blueprint/test-small.jpg new file mode 100644 index 0000000000..aa599d99a9 Binary files /dev/null and b/build/pgo/blueprint/test-small.jpg differ diff --git a/build/pgo/blueprint/test.jpg b/build/pgo/blueprint/test.jpg new file mode 100644 index 0000000000..0107be2734 Binary files /dev/null and b/build/pgo/blueprint/test.jpg differ diff --git a/build/pgo/certs/README b/build/pgo/certs/README new file mode 100644 index 0000000000..7036e4a87e --- /dev/null +++ b/build/pgo/certs/README @@ -0,0 +1,5 @@ +This directory contains CA and server certificates for testing. + +You can find instructions on how to add or modify certificates at: + +https://firefox-source-docs.mozilla.org/build/buildsystem/test_certificates.html diff --git a/build/pgo/certs/alternateroot.ca b/build/pgo/certs/alternateroot.ca new file mode 100644 index 0000000000..9fa2078b4c --- /dev/null +++ b/build/pgo/certs/alternateroot.ca @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE----- +MIIC+zCCAeOgAwIBAgIUb/+pohOlRCuQgMy2GJLCUQq+HeMwDQYJKoZIhvcNAQEL +BQAwJjEkMCIGA1UEAwwbQWx0ZXJuYXRlIFRydXN0ZWQgQXV0aG9yaXR5MCIYDzIw +MTAwMTAxMDAwMDAwWhgPMjA1MDAxMDEwMDAwMDBaMCYxJDAiBgNVBAMMG0FsdGVy +bmF0ZSBUcnVzdGVkIEF1dGhvcml0eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAMF1xlJmCZ93CCpnkfG4dsN/XOU4sGxKzSKxy9RvplraKt1ByMJJisSj +s8H2FIf0G2mJQb2ApRw8EgJExYSkxEgzBeUTjAEGzwi+moYnYLrmoujzbyPF2YMT +ud+vN4NF2s5R1Nbc0qbLPMcG680wcOyYzOQKpZHXKVp/ccW+ZmkdKy3+yElEWQvF +o+pJ/ZOx11NAXxdzdpmVhmYlR5ftQmkIiAgRQiBpmIpD/uSM5oeB3SK2ppzSg3UT +H5MrEozihvp9JRwGKtJ+8Bbxh83VToMrNbiTD3S6kKqLx2FnJCqx/W1iFA0YxMC4 +xo/DdIRXMkrX3obmVS8dHhkdcSFo07sCAwEAAaMdMBswCwYDVR0PBAQDAgEGMAwG +A1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAAS+qy/sIFV+oia7zsyFhe3X +j3ZHSvmqJ4mxIg5KOPVP2NvDaxD/+pysxGLf69QDRjIsePBdRJz0zZoVl9pSXIn1 +Kpk0sjzKX2bJtAomog+ZnAZUxtLzoXy/aqaheWm8cRJ8qFOJtSMDRrLISqBXCQLO +ECqXIxf3Nt3S+Riu2Pam3YymFdtmqUJvLhhekWtEEnXyh/xfAsoUgS3SQ27c4dCY +R7XGnFsaXrKXv93QeJmtfvrAZMXEuKaBGPSNHV6QH0S0Loh9Jed2Zp7GxnFtIPYe +J2Q5qtxa8KD/tgGFpAD74eMBdgQ4SxbA/YqqXIt1lLNcr7wm0cPRpP0vIY3hk8k= +-----END CERTIFICATE----- diff --git a/build/pgo/certs/alternateroot.ca.keyspec b/build/pgo/certs/alternateroot.ca.keyspec new file mode 100644 index 0000000000..cbd5f309c0 --- /dev/null +++ b/build/pgo/certs/alternateroot.ca.keyspec @@ -0,0 +1 @@ +alternate diff --git a/build/pgo/certs/alternateroot.certspec b/build/pgo/certs/alternateroot.certspec new file mode 100644 index 0000000000..d831222020 --- /dev/null +++ b/build/pgo/certs/alternateroot.certspec @@ -0,0 +1,7 @@ +issuer:Alternate Trusted Authority +subject:Alternate Trusted Authority +validity:20100101-20500101 +extension:keyUsage:keyCertSign,cRLSign +extension:basicConstraints:cA, +issuerKey:alternate +subjectKey:alternate diff --git a/build/pgo/certs/badCertDomain.certspec b/build/pgo/certs/badCertDomain.certspec new file mode 100644 index 0000000000..5d13ffae3b --- /dev/null +++ b/build/pgo/certs/badCertDomain.certspec @@ -0,0 +1,3 @@ +subject:www.badcertdomain.example.com +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +extension:subjectAlternativeName:www.badcertdomain.example.com diff --git a/build/pgo/certs/bug413909cert.certspec b/build/pgo/certs/bug413909cert.certspec new file mode 100644 index 0000000000..ed4100219a --- /dev/null +++ b/build/pgo/certs/bug413909cert.certspec @@ -0,0 +1,3 @@ +subject:bug413909.xn--hxajbheg2az3al.xn--jxalpdlp +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +extension:subjectAlternativeName:bug413909.xn--hxajbheg2az3al.xn--jxalpdlp diff --git a/build/pgo/certs/cert9.db b/build/pgo/certs/cert9.db new file mode 100644 index 0000000000..9206c49b32 Binary files /dev/null and b/build/pgo/certs/cert9.db differ diff --git a/build/pgo/certs/dynamicPinningBad.certspec b/build/pgo/certs/dynamicPinningBad.certspec new file mode 100644 index 0000000000..1d377103d2 --- /dev/null +++ b/build/pgo/certs/dynamicPinningBad.certspec @@ -0,0 +1,5 @@ +subject:bad.include-subdomains.pinning-dynamic.example.com +issuer:Alternate Trusted Authority +extension:subjectAlternativeName:bad.include-subdomains.pinning-dynamic.example.com +subjectKey:alternate +issuerKey:alternate diff --git a/build/pgo/certs/dynamicPinningBad.server.keyspec b/build/pgo/certs/dynamicPinningBad.server.keyspec new file mode 100644 index 0000000000..cbd5f309c0 --- /dev/null +++ b/build/pgo/certs/dynamicPinningBad.server.keyspec @@ -0,0 +1 @@ +alternate diff --git a/build/pgo/certs/dynamicPinningGood.certspec b/build/pgo/certs/dynamicPinningGood.certspec new file mode 100644 index 0000000000..2db3836919 --- /dev/null +++ b/build/pgo/certs/dynamicPinningGood.certspec @@ -0,0 +1,3 @@ +subject:dynamic-pinning.example.com +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +extension:subjectAlternativeName:*.include-subdomains.pinning-dynamic.example.com,*.pinning-dynamic.example.com diff --git a/build/pgo/certs/escapeattack1.certspec b/build/pgo/certs/escapeattack1.certspec new file mode 100644 index 0000000000..df34d5920c --- /dev/null +++ b/build/pgo/certs/escapeattack1.certspec @@ -0,0 +1,3 @@ +subject:www.bank1.com\00www.bad-guy.com +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +extension:subjectAlternativeName:www.bank1.com\00www.bad-guy.com diff --git a/build/pgo/certs/evintermediate.ca b/build/pgo/certs/evintermediate.ca new file mode 100644 index 0000000000..84a6d8e802 --- /dev/null +++ b/build/pgo/certs/evintermediate.ca @@ -0,0 +1,26 @@ +-----BEGIN CERTIFICATE----- +MIIEfDCCA2SgAwIBAgIUETbLA86peOWkUFhyKYIuZVGUEygwDQYJKoZIhvcNAQEL +BQAwgdwxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTEWMBQGA1UEBxMNTW91bnRh +aW4gVmlldzEjMCEGA1UEChMaTW96aWxsYSAtIEVWIGRlYnVnIHRlc3QgQ0ExHTAb +BgNVBAsTFFNlY3VyaXR5IEVuZ2luZWVyaW5nMTYwNAYDVQQDEy1FViBUZXN0aW5n +ICh1bnRydXN0d29ydGh5KSBDQS9uYW1lPWV2LXRlc3QtY2ExLDAqBgkqhkiG9w0B +CQEWHWNoYXJsYXRhbkB0ZXN0aW5nLmV4YW1wbGUuY29tMCIYDzIwMTAwMTAxMDAw +MDAwWhgPMjA1MDAxMDEwMDAwMDBaMIHcMQswCQYDVQQGEwJVUzELMAkGA1UECBMC +Q0ExFjAUBgNVBAcTDU1vdW50YWluIFZpZXcxIzAhBgNVBAoTGk1vemlsbGEgLSBF +ViBkZWJ1ZyB0ZXN0IENBMR0wGwYDVQQLExRTZWN1cml0eSBFbmdpbmVlcmluZzE2 +MDQGA1UEAxMtRVYgVGVzdGluZyAodW50cnVzdHdvcnRoeSkgQ0EvbmFtZT1ldi10 +ZXN0LWNhMSwwKgYJKoZIhvcNAQkBFh1jaGFybGF0YW5AdGVzdGluZy5leGFtcGxl +LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALVJiVydABCNEaH5 +n4ep49Gl21367PGI2le/ZBNojyzkciz/EJA4wXQCyToqRz29KGrtP9zTY89aKRR3 +Ab3YGNdhW/k1a9XTyDNqqowJcTaKBsPNRGG5PlFCThdEuy6q1GqrOM4ZaCGWH4dx +ShZjaT8JdhzfTWuhJerOx74nDTiPeJ9s33iuMUTtKMReeSk4Y6eiKkiYCjakDnLV +ecm5Jd/4x5M2L/1ol6fBdUxel8lnw+rdGq6KoszONIoBabgOKKLXDBqWDG8zXy2g +m5tkP1q/uknoqqmB6WDifYdIC91V3ZQX+hhQn7tVTM+BpDl+i6gSijS98nhlwYnl +c0+yKQUCAwEAAaMwMC4wCwYDVR0PBAQDAgEGMAwGA1UdEwQFMAMBAf8wEQYDVR0g +BAowCDAGBgRVHSAAMA0GCSqGSIb3DQEBCwUAA4IBAQArG5slgBRJuytlKFa4qcHW +pAOfjN9fwi57fDds1yNv6tXhESdkbVPhIgw+GanVbrVcorGdCkfB51+dPJM+cBgH +HSwEB7TQnNYvm/csA1zH4n+CnX9nBL7dwK63n6dyR9f1uvu6KSB+YJm3amKil85a +d7HeDWdh+gNhC58lEC2QzuOMivP593aS5vLJHfp8pjc21XJkO8M7SRw44OJKYq9/ +v0k6v4SznbfZzSLg3gM4aSNuCLExUtUY2myxPFwJs9QQ4xx5zJTjJTRlpxUm630Z +n4IYlseao949U+UbBNU4PZKH7dzSQzfhdFJpvK3dsPOPNnHYiXO0xAhsEvvjq8zQ +-----END CERTIFICATE----- diff --git a/build/pgo/certs/evintermediate.ca.keyspec b/build/pgo/certs/evintermediate.ca.keyspec new file mode 100644 index 0000000000..1a3d76a550 --- /dev/null +++ b/build/pgo/certs/evintermediate.ca.keyspec @@ -0,0 +1 @@ +ev diff --git a/build/pgo/certs/evintermediate.certspec b/build/pgo/certs/evintermediate.certspec new file mode 100644 index 0000000000..a04850d53f --- /dev/null +++ b/build/pgo/certs/evintermediate.certspec @@ -0,0 +1,7 @@ +issuer:printableString/C=US/ST=CA/L=Mountain View/O=Mozilla - EV debug test CA/OU=Security Engineering/CN=EV Testing (untrustworthy) CA/name=ev-test-ca/emailAddress=charlatan@testing.example.com +subject:printableString/C=US/ST=CA/L=Mountain View/O=Mozilla - EV debug test CA/OU=Security Engineering/CN=EV Testing (untrustworthy) CA/name=ev-test-ca/emailAddress=charlatan@testing.example.com +subjectKey:ev +validity:20100101-20500101 +extension:keyUsage:keyCertSign,cRLSign +extension:basicConstraints:cA, +extension:certificatePolicies:any diff --git a/build/pgo/certs/expired.certspec b/build/pgo/certs/expired.certspec new file mode 100644 index 0000000000..3193168130 --- /dev/null +++ b/build/pgo/certs/expired.certspec @@ -0,0 +1,4 @@ +subject:expired.example.com +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +extension:subjectAlternativeName:expired.example.com +validity:20100105-20100106 diff --git a/build/pgo/certs/imminently_distrusted.certspec b/build/pgo/certs/imminently_distrusted.certspec new file mode 100644 index 0000000000..e44e4e8e07 --- /dev/null +++ b/build/pgo/certs/imminently_distrusted.certspec @@ -0,0 +1,4 @@ +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +subject:printableString/CN=Imminently Distrusted End Entity +validity:20100101-20500101 +extension:subjectAlternativeName:imminently-distrusted.example.com diff --git a/build/pgo/certs/key4.db b/build/pgo/certs/key4.db new file mode 100644 index 0000000000..3ccd3336f2 Binary files /dev/null and b/build/pgo/certs/key4.db differ diff --git a/build/pgo/certs/mochitest.certspec b/build/pgo/certs/mochitest.certspec new file mode 100644 index 0000000000..31f926290e --- /dev/null +++ b/build/pgo/certs/mochitest.certspec @@ -0,0 +1,3 @@ +subject:Mochitest client +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +serialNumber:3 diff --git a/build/pgo/certs/mochitest.client b/build/pgo/certs/mochitest.client new file mode 100644 index 0000000000..9e965a414d Binary files /dev/null and b/build/pgo/certs/mochitest.client differ diff --git a/build/pgo/certs/mochitest.client.keyspec b/build/pgo/certs/mochitest.client.keyspec new file mode 100644 index 0000000000..4ad96d5159 --- /dev/null +++ b/build/pgo/certs/mochitest.client.keyspec @@ -0,0 +1 @@ +default diff --git a/build/pgo/certs/noSubjectAltName.certspec b/build/pgo/certs/noSubjectAltName.certspec new file mode 100644 index 0000000000..dcbda9ee6e --- /dev/null +++ b/build/pgo/certs/noSubjectAltName.certspec @@ -0,0 +1,2 @@ +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +subject:certificate without subjectAlternativeNames diff --git a/build/pgo/certs/pgoca.ca b/build/pgo/certs/pgoca.ca new file mode 100644 index 0000000000..31cf9c33a0 --- /dev/null +++ b/build/pgo/certs/pgoca.ca @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDgzCCAmugAwIBAgIUQx5pxD+JMg1qPztfSg1Ucw8xsz0wDQYJKoZIhvcNAQEL +BQAwajEoMCYGA1UEAxMfVGVtcG9yYXJ5IENlcnRpZmljYXRlIEF1dGhvcml0eTEY +MBYGA1UEChMPTW96aWxsYSBUZXN0aW5nMSQwIgYDVQQLExtQcm9maWxlIEd1aWRl +ZCBPcHRpbWl6YXRpb24wIhgPMjAxMDAxMDEwMDAwMDBaGA8yMDUwMDEwMTAwMDAw +MFowajEoMCYGA1UEAxMfVGVtcG9yYXJ5IENlcnRpZmljYXRlIEF1dGhvcml0eTEY +MBYGA1UEChMPTW96aWxsYSBUZXN0aW5nMSQwIgYDVQQLExtQcm9maWxlIEd1aWRl +ZCBPcHRpbWl6YXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6 +iFGoRI4W1kH9braIBjYQPTwT2erkNUq07PVoV2wke8HHJajg2B+9sZwGm24ahvJr +4q9adWtqZHEIeqVap0WH9xzVJJwCfs1D/B5p0DggKZOrIMNJ5Nu5TMJrbA7tFYIP +8X6taRqx0wI6iypB7qdw4A8Njf1mCyuwJJKkfbmIYXmQsVeQPdI7xeC4SB+oN9OI +Q+8nFthVt2Zaqn4CkC86exCABiTMHGyXrZZhW7filhLAdTGjDJHdtMr3/K0dJdMJ +77kXDqdo4bN7LyJvaeO0ipVhHe4m1iWdq5EITjbLHCQELL8Wiy/l8Y+ZFzG4s/5J +I/pyUcQx1QOs2hgKNe2NAgMBAAGjHTAbMAsGA1UdDwQEAwIBBjAMBgNVHRMEBTAD +AQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAYFnzom5ROuxDR3WFQatxHs5ekni4uUbEx +6pN8fOzcsllEfCwvmMLVCh36ffSguf/UlmR5Hq1s/S7iMiic5mnK4aaVwixzS4Z3 +ug7Dc+fG7j0VOcBTKWU983xUK/1F409ghQ5KlO38KA7hyx1kzjYjzvxLaweDXRqr +J/RZ1ACP2fKNziEOCbXzzzEx39oc17NBV+LotPFzKZ+pcxMDrtiNts4hwCw/UUw7 +Gp0tKte2CevGJbzjPHP3/6FUzHfOatZSpxEmvAcSTDp5sjdVuOStx4v6jVrwvyAz +VQzDPzaRWh3NtY5JNasrhExr5qxQlygfBngCMgZ9gESG9FvLG+sx +-----END CERTIFICATE----- diff --git a/build/pgo/certs/pgoca.ca.keyspec b/build/pgo/certs/pgoca.ca.keyspec new file mode 100644 index 0000000000..4ad96d5159 --- /dev/null +++ b/build/pgo/certs/pgoca.ca.keyspec @@ -0,0 +1 @@ +default diff --git a/build/pgo/certs/pgoca.certspec b/build/pgo/certs/pgoca.certspec new file mode 100644 index 0000000000..058e5b55a5 --- /dev/null +++ b/build/pgo/certs/pgoca.certspec @@ -0,0 +1,5 @@ +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +subject:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +validity:20100101-20500101 +extension:keyUsage:keyCertSign,cRLSign +extension:basicConstraints:cA, diff --git a/build/pgo/certs/selfsigned.certspec b/build/pgo/certs/selfsigned.certspec new file mode 100644 index 0000000000..be255b497a --- /dev/null +++ b/build/pgo/certs/selfsigned.certspec @@ -0,0 +1,3 @@ +issuer:self-signed.example.com +subject:self-signed.example.com +extension:subjectAlternativeName:self-signed.example.com diff --git a/build/pgo/certs/sha1_end_entity.certspec b/build/pgo/certs/sha1_end_entity.certspec new file mode 100644 index 0000000000..eced653a9a --- /dev/null +++ b/build/pgo/certs/sha1_end_entity.certspec @@ -0,0 +1,4 @@ +subject:sha1ee.example.com +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +extension:subjectAlternativeName:sha1ee.example.com +signature:sha1WithRSAEncryption diff --git a/build/pgo/certs/sha256_end_entity.certspec b/build/pgo/certs/sha256_end_entity.certspec new file mode 100644 index 0000000000..c3cb5fda2a --- /dev/null +++ b/build/pgo/certs/sha256_end_entity.certspec @@ -0,0 +1,4 @@ +subject:sha256ee.example.com +issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization +extension:subjectAlternativeName:sha256ee.example.com +signature:sha256WithRSAEncryption diff --git a/build/pgo/certs/staticPinningBad.certspec b/build/pgo/certs/staticPinningBad.certspec new file mode 100644 index 0000000000..7589ff6fc3 --- /dev/null +++ b/build/pgo/certs/staticPinningBad.certspec @@ -0,0 +1,5 @@ +subject:include-subdomains.pinning.example.com +issuer:Alternate Trusted Authority +extension:subjectAlternativeName:include-subdomains.pinning.example.com +subjectKey:alternate +issuerKey:alternate diff --git a/build/pgo/certs/staticPinningBad.server.keyspec b/build/pgo/certs/staticPinningBad.server.keyspec new file mode 100644 index 0000000000..cbd5f309c0 --- /dev/null +++ b/build/pgo/certs/staticPinningBad.server.keyspec @@ -0,0 +1 @@ +alternate diff --git a/build/pgo/certs/unknown_ca.certspec b/build/pgo/certs/unknown_ca.certspec new file mode 100644 index 0000000000..40e1bedc70 --- /dev/null +++ b/build/pgo/certs/unknown_ca.certspec @@ -0,0 +1,5 @@ +issuer:Unknown CA +subject:Unknown CA +validity:20100101-20500101 +extension:keyUsage:keyCertSign,cRLSign +extension:basicConstraints:cA, diff --git a/build/pgo/certs/untrusted.certspec b/build/pgo/certs/untrusted.certspec new file mode 100644 index 0000000000..445d3451b0 --- /dev/null +++ b/build/pgo/certs/untrusted.certspec @@ -0,0 +1,3 @@ +subject:untrusted.example.com +issuer:Unknown CA +extension:subjectAlternativeName:untrusted.example.com diff --git a/build/pgo/certs/untrustedandexpired.certspec b/build/pgo/certs/untrustedandexpired.certspec new file mode 100644 index 0000000000..bed16c7694 --- /dev/null +++ b/build/pgo/certs/untrustedandexpired.certspec @@ -0,0 +1,4 @@ +subject:untrusted-expired.example.com +issuer:Unknown CA +extension:subjectAlternativeName:untrusted-expired.example.com +validity:20121012-20121012 diff --git a/build/pgo/favicon.ico b/build/pgo/favicon.ico new file mode 100644 index 0000000000..d44438903b Binary files /dev/null and b/build/pgo/favicon.ico differ diff --git a/build/pgo/genpgocert.py b/build/pgo/genpgocert.py new file mode 100644 index 0000000000..8b3a83c196 --- /dev/null +++ b/build/pgo/genpgocert.py @@ -0,0 +1,256 @@ +#!/usr/bin/env python +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This script exists to generate the Certificate Authority and server +# certificates used for SSL testing in Mochitest. The already generated +# certs are located at $topsrcdir/build/pgo/certs/ . + +import mozinfo +import os +import random +import re +import shutil +import subprocess +import sys + +from mozbuild.base import MozbuildObject, BinaryNotFoundException +from mozfile import NamedTemporaryFile, TemporaryDirectory +from mozprofile.permissions import ServerLocations +from distutils.spawn import find_executable + +dbFiles = [ + re.compile("^cert[0-9]+\.db$"), + re.compile("^key[0-9]+\.db$"), + re.compile("^secmod\.db$"), +] + + +def unlinkDbFiles(path): + for root, dirs, files in os.walk(path): + for name in files: + for dbFile in dbFiles: + if dbFile.match(name) and os.path.exists(os.path.join(root, name)): + os.unlink(os.path.join(root, name)) + + +def dbFilesExist(path): + for root, dirs, files in os.walk(path): + for name in files: + for dbFile in dbFiles: + if dbFile.match(name) and os.path.exists(os.path.join(root, name)): + return True + return False + + +def runUtil(util, args, inputdata=None, outputstream=None): + env = os.environ.copy() + if mozinfo.os == "linux": + pathvar = "LD_LIBRARY_PATH" + app_path = os.path.dirname(util) + if pathvar in env: + env[pathvar] = "%s%s%s" % (app_path, os.pathsep, env[pathvar]) + else: + env[pathvar] = app_path + proc = subprocess.Popen( + [util] + args, + env=env, + stdin=subprocess.PIPE if inputdata else None, + stdout=outputstream, + universal_newlines=True, + ) + proc.communicate(inputdata) + return proc.returncode + + +def createRandomFile(randomFile): + for count in xrange(0, 2048): + randomFile.write(chr(random.randint(0, 255))) + + +def writeCertspecForServerLocations(fd): + locations = ServerLocations( + os.path.join(build.topsrcdir, "build", "pgo", "server-locations.txt") + ) + SAN = [] + for loc in [ + i for i in iter(locations) if i.scheme == "https" and "nocert" not in i.options + ]: + customCertOption = False + customCertRE = re.compile("^cert=(?:\w+)") + for _ in [i for i in loc.options if customCertRE.match(i)]: + customCertOption = True + break + + if "ipV4Address" in loc.options: + loc.host = "ip4:" + loc.host + + if not customCertOption: + SAN.append(loc.host) + + fd.write( + "issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization\n" # NOQA: E501 + ) + fd.write("subject:{}\n".format(SAN[0])) + fd.write("extension:subjectAlternativeName:{}\n".format(",".join(SAN))) + + +def constructCertDatabase(build, srcDir): + try: + certutil = build.get_binary_path(what="certutil") + pk12util = build.get_binary_path(what="pk12util") + except BinaryNotFoundException as e: + print("{}\n\n{}\n".format(e, e.help())) + return 1 + openssl = find_executable("openssl") + pycert = os.path.join( + build.topsrcdir, "security", "manager", "ssl", "tests", "unit", "pycert.py" + ) + pykey = os.path.join( + build.topsrcdir, "security", "manager", "ssl", "tests", "unit", "pykey.py" + ) + + with NamedTemporaryFile(mode="wt+") as pwfile, TemporaryDirectory() as pemfolder: + pwfile.write("\n") + pwfile.flush() + + if dbFilesExist(srcDir): + # Make sure all DB files from src are really deleted + unlinkDbFiles(srcDir) + + # Copy all .certspec and .keyspec files to a temporary directory + for root, dirs, files in os.walk(srcDir): + for spec in [ + i for i in files if i.endswith(".certspec") or i.endswith(".keyspec") + ]: + shutil.copyfile(os.path.join(root, spec), os.path.join(pemfolder, spec)) + + # Write a certspec for the "server-locations.txt" file to that temporary directory + pgoserver_certspec = os.path.join(pemfolder, "pgoserver.certspec") + if os.path.exists(pgoserver_certspec): + raise Exception( + "{} already exists, which isn't allowed".format(pgoserver_certspec) + ) + with open(pgoserver_certspec, "w") as fd: + writeCertspecForServerLocations(fd) + + # Generate certs for all certspecs + for root, dirs, files in os.walk(pemfolder): + for certspec in [i for i in files if i.endswith(".certspec")]: + name = certspec.split(".certspec")[0] + pem = os.path.join(pemfolder, "{}.cert.pem".format(name)) + + print("Generating public certificate {} (pem={})".format(name, pem)) + + with open(os.path.join(root, certspec), "r") as certspec_file: + certspec_data = certspec_file.read() + with open(pem, "w") as pem_file: + status = runUtil( + pycert, [], inputdata=certspec_data, outputstream=pem_file + ) + if status: + return status + + status = runUtil( + certutil, + [ + "-A", + "-n", + name, + "-t", + "P,,", + "-i", + pem, + "-d", + srcDir, + "-f", + pwfile.name, + ], + ) + if status: + return status + + for keyspec in [i for i in files if i.endswith(".keyspec")]: + parts = keyspec.split(".") + name = parts[0] + key_type = parts[1] + if key_type not in ["ca", "client", "server"]: + raise Exception( + "{}: keyspec filenames must be of the form XXX.client.keyspec " + "or XXX.ca.keyspec (key_type={})".format(keyspec, key_type) + ) + key_pem = os.path.join(pemfolder, "{}.key.pem".format(name)) + + print("Generating private key {} (pem={})".format(name, key_pem)) + + with open(os.path.join(root, keyspec), "r") as keyspec_file: + keyspec_data = keyspec_file.read() + with open(key_pem, "w") as pem_file: + status = runUtil( + pykey, [], inputdata=keyspec_data, outputstream=pem_file + ) + if status: + return status + + cert_pem = os.path.join(pemfolder, "{}.cert.pem".format(name)) + if not os.path.exists(cert_pem): + raise Exception( + "There has to be a corresponding certificate named {} for " + "the keyspec {}".format(cert_pem, keyspec) + ) + + p12 = os.path.join(pemfolder, "{}.key.p12".format(name)) + print( + "Converting private key {} to PKCS12 (p12={})".format(key_pem, p12) + ) + status = runUtil( + openssl, + [ + "pkcs12", + "-export", + "-inkey", + key_pem, + "-in", + cert_pem, + "-name", + name, + "-out", + p12, + "-passout", + "file:" + pwfile.name, + ], + ) + if status: + return status + + print("Importing private key {} to database".format(key_pem)) + status = runUtil( + pk12util, + ["-i", p12, "-d", srcDir, "-w", pwfile.name, "-k", pwfile.name], + ) + if status: + return status + + if key_type == "ca": + shutil.copyfile( + cert_pem, os.path.join(srcDir, "{}.ca".format(name)) + ) + elif key_type == "client": + shutil.copyfile(p12, os.path.join(srcDir, "{}.client".format(name))) + elif key_type == "server": + pass # Nothing to do for server keys + else: + raise Exception( + "State error: Unknown keyspec key_type: {}".format(key_type) + ) + + return 0 + + +build = MozbuildObject.from_environment() +certdir = os.path.join(build.topsrcdir, "build", "pgo", "certs") +certificateStatus = constructCertDatabase(build, certdir) +if certificateStatus: + print("TEST-UNEXPECTED-FAIL | SSL Server Certificate generation") +sys.exit(certificateStatus) diff --git a/build/pgo/index.html b/build/pgo/index.html new file mode 100644 index 0000000000..107e4e9bed --- /dev/null +++ b/build/pgo/index.html @@ -0,0 +1,109 @@ + diff --git a/build/pgo/js-input/3d-thingy.html b/build/pgo/js-input/3d-thingy.html new file mode 100644 index 0000000000..9e54299df4 --- /dev/null +++ b/build/pgo/js-input/3d-thingy.html @@ -0,0 +1,390 @@ + + +3d thingy + + + + +
Text to be replaced with graphics.
+ + + + + diff --git a/build/pgo/js-input/crypto-otp.html b/build/pgo/js-input/crypto-otp.html new file mode 100644 index 0000000000..b811538ba2 --- /dev/null +++ b/build/pgo/js-input/crypto-otp.html @@ -0,0 +1,1344 @@ + + + + +One-Time Pad Generator + + + + + + + + + + +

  One-Time Pad Generator

+ +

+This page, which requires that your browser support JavaScript +(see Why JavaScript below), +generates one-time pads or password lists in a variety of +forms. It is based a high-quality pseudorandom sequence +generator, which can be seeded either from the current date +and time, or from a seed you provide. Fill in the form below +to select the format of the pad and press “Generate” to +create the pad in the text box. You can then copy and paste +the generated pad into another window to use as you wish. +Each of the labels on the request form is linked to a description +of that parameter. +

+ +
+ +

+Output: +Number of keys: +Line length: +
+Format: +Key length: +Group length: + +
+Composition: +Key text: Numeric + Word-like + Alphabetic + Gibberish +
+Letters: + + + Random separators + Include signatures + +
+Seed: + From clock + User-defined: + +
+ +  + +  +
+ +

+ +
+ + + +

Details

+ +

+Each of the fields in the one-time pad request form is described +below. +

+ +

Output

+ +

Number of keys

+ +

+Enter the number of keys you'd like to generate. If you generate +more than fit in the results text box, you can use the scroll +bar to view the additional lines. +

+ +

Line length

+ +

+Lines in the output will be limited to the given length (or contain +only one key if the line length is less than required for a single +key). If the line length is greater than the width of the results +box, you can use the horizontal scroll bar to view the rest of the +line. Enter 0 to force one key per line; this is handy +when you're preparing a list of keys to be read by a computer program. +

+ +

Format

+ +

Key length

+ +

+Each key will contain this number of characters, not counting +separators between groups. +

+ +

Group length

+ +

+If a nonzero value is entered in this field, the key will be broken +into groups of the given number of characters by separators. Humans +find it easier to read and remember sequences of characters when +divided into groups of five or fewer characters. +

+ +

Composition

+ +

Key text

+ +

+This set of radio buttons lets you select the character set used in +the keys. The alternatives are listed in order of +increasing security. +

+ +
+
+
Numeric
+
Keys contain only the decimal digits “0” through “9”. + Least secure.
+ +
Word-like
+
Keys are composed of alphabetic characters which obey the + digraph statistics of English text. Such keys contain + sequences of vowels and consonants familiar to speakers + of Western languages, and are therefore usually easier to + memorise but, for a given key length, are less secure than + purely random letters.
+ +
Alphabetic
+
Keys consist of letters of the alphabet chosen at random. + Each character has an equal probability of being one of + the 26 letters.
+ +
Gibberish
+
Keys use most of the printable ASCII character set, excluding + only characters frequently used for quoting purposes. This + option provides the greatest security for a given key length, + but most people find keys like this difficult to memorise or + even transcribe from a printed pad. If a human is in the loop, + it's often better to use a longer alphabetic or word-like key. + Most secure.
+
+ +
+ +

Letters

+ +

+The case of letters in keys generated with Word-like, Alphabetic, and +Gibberish key text will be as chosen. Most people find it easier to +read lower case letters than all capitals, but for some applications +(for example, where keys must be scanned optically by hardware that +only recognises capital letters), capitals are required. Selecting +“Mixed case” creates keys with a mix of upper- and +lower-case letters; such keys are more secure than those with uniform +letter case, but do not pass the “telephone test”: you +can't read them across a (hopefully secure) voice link without having +to indicate whether each letter is or is not a capital. +

+ +

Random separators

+ +

+When the Key length is longer than +a nonzero Group length specification, +the key is divided into sequences of the given group length +by separator characters. By default, a hyphen, “-”, is used +to separate groups. If you check this box, separators will be +chosen at random among punctuation marks generally acceptable +for applications such as passwords. If you're generating passwords +for a computer system, random separators dramatically increase +the difficulty of guessing passwords by exhaustive search. +

+ +

Include signatures

+ +

+ +When this box is checked, at the end of the list of keys, preceded by +a line beginning with ten dashes “-”, the 128 bit MD5 signature of +each key is given, one per line, with signatures expressed as 32 +hexadecimal digits. Key signatures can be used to increase security +when keys are used to control access to computer systems or databases. +Instead of storing a copy of the keys, the computer stores their +signatures. When the user enters a key, its signature is computed +with the same MD5 algorithm used to generate it initially, and the key +is accepted only if the signature matches. Since discovering +a key which will generate a given signature is believed to be +computationally prohibitive, even if the list of signatures stored on +the computer is compromised, that information will not permit an +intruder to deduce a valid key. +

+ +

+Signature calculation is a computationally intense process for which +JavaScript is not ideally suited; be patient while signatures are +generated, especially if your computer has modest +processing speed. +

+ +

+For signature-based validation to be secure, it is essential +the original keys be long enough to prohibit discovery of matching +signatures by exhaustive search. Suppose, for example, one used +four digit numeric keys, as used for Personal Identification +Numbers (PINs) by many credit card systems. Since only 10,000 +different keys exist, one could simply compute the signatures of +every possible key from 0000 through 9999, permitting an attacker who +came into possession of the table of signatures to recover the +keys by a simple lookup process. For maximum security, keys must +contain at least as much information as the 128 bit signatures +computed from them. This implies a minimum key length (not counting +non-random separator characters) for the various key formats as +follows: +

+ + + + + + + + +
Key Composition Minimum Characters
Numeric 39
Word-like 30
Alphabetic 28
Gibberish 20
+ +

+It should be noted that for many practical applications there is no +need for anything approaching 128-bit security. The guidelines above +apply only in the case where maximum protection in the event of +undetected compromise of key signatures occurs. In many +cases, much shorter keys are acceptable, especially when it is assumed +that a compromise of the system's password or signature database would +be only part of a much more serious subversion of all resources +on the system. +

+ +

Seed

+ +

+The seed is the starting value which determines all +subsequent values in the pseudorandom sequence used to generate +the one-time pad. Given the seed, the pad can be reproduced. The +seed is a 31-bit number which can be derived from the date and +time at which the one-time pad was requested, or from a +user-defined seed value. If the user-defined seed consists +entirely of decimal digits, it is used directly as the seed, +modulo 231; if a string containing non-digit characters +is entered, it is used to compute a hash code which is +used to seed the generator. + +

+ +

+When the clock is used to create the seed, the seed value is entered +in the User-defined box to allow you, by checking “User-defined”, +to produce additional pads with the same seed. +

+ +

Why JavaScript?

+ +

+At first glance, JavaScript may seem an odd choice for programming +a page such as this. The one-time pad generator program is rather +large and complicated, and downloading it to your browser takes longer +than would be required for a Java applet or to transfer a +one-time pad generated by a CGI program on the Web server. I chose +JavaScript for two reasons: security and transparency. + +

+ +

+Security. +The sole reason for the existence of one-time pads is to +provide a source of information known only to people to whom +they have been distributed in a secure manner. This means +the generation process cannot involve any link whose security +is suspect. If the pad were generated on a Web server and +transmitted to you, it would have to pass over the +Internet, where any intermediate site might make a copy +of your pad before you even received it. Even if some +mechanism such as encryption could absolutely prevent the +pad's being intercepted, you'd still have no way to be sure +the site generating the pad didn't keep a copy +in a file, conveniently tagged with your Internet address. +

+ +

+In order to have any degree of security, it is essential +that the pad be generated on your computer, without +involving any transmission or interaction with other +sites on the Internet. A Web browser with JavaScript makes +this possible, since the generation program embedded in this +page runs entirely on your own computer and does not +transmit anything over the Internet. Its output appears +only in the text box, allowing you to cut and paste it +to another application. From there on, its security is +up to you. +

+ +

+Security is never absolute. A one-time pad generated with +this page might be compromised in a variety of ways, including +the following: + +

+ +
    +
  • Your Web browser and/or JavaScript interpreter may + contain bugs or deliberate security violations + which report activity on your computer back to some + other Internet site.
  • + +
  • Some other applet running on another page of your + browser, perhaps without your being aware of its + existence, is spying on other windows.
  • + +
  • Some other application running on your computer + may have compromised your system's security and + be snooping on your activity.
  • + +
  • Your Web browser may be keeping a “history log” + + or “cache” of data you generate. Somebody may + come along later and recover a copy of the pad + from that log.
  • + +
  • The implementation of this page may contain a bug + or deliberate error which makes its output + predictable. This is why transparency, + discussed below, is essential.
  • + +
  • Your computer's security may have been compromised + physically; when's the last time you checked that a + bug that transmits your keystrokes and/or screen + contents to that white van parked down the street + wasn't lurking inside your computer cabinet?
  • +
+ +

+One can whip oneself into a fine fever of paranoia worrying about +things like this. One way to rule out the most probable risks +is to download a copy of the generator page and run it +from a “file:” URL on a computer which has no network +connection whatsoever and is located in a secure location +under your control. And look very carefully at any files +created by your Web browser. You may find the most interesting +things squirreled away there…. +

+ +

+Transparency. +Any security-related tool is only as good as its design +and implementation. Transparency means that, in +essence, all the moving parts are visible so you can judge +for yourself whether the tool merits your confidence. In +the case of a program, this means that source code must +be available, and that you can verify that the program +you're running corresponds to the source code provided. + +

+ +

+The very nature of JavaScript achieves this transparency. +The program is embedded into this actual Web page; to +examine it you need only use your browser's “View Source” +facility, or save the page into a file on your computer +and read it with a text editor. JavaScript's being +an interpreted language eliminates the risk of your running +a program different from the purported source code: with +an interpreted language what you read is what you run. +

+ +

+Transparency is important even if you don't know enough about +programming or security to determine whether the program +contains any flaws. The very fact that it can be examined +by anybody allows those with the required expertise to pass +judgment, and you can form your own conclusions based on +their analysis. +

+ +

Credits

+ +

+ +The pseudorandom sequence generator is based on L'Ecuyer's +two-sequence generator as described in +Communications of the ACM, Vol. 31 (1968), page 742. +A Bays-Durham shuffle is used to guard against regularities +lurking in L'Ecuyer's algorithm; see +ACM Transactions on Mathematical Software, Vol. 2 (1976) +pages 59–64 for details. +

+ +

+The JavaScript implementation of the +MD5 message-digest algorithm +was developed by Henri Torgemane; please view the source code of this +page to examine the code, including the copyright notice and +conditions of use. The MD5 algorithm was developed by Ron Rivest. +

+ +

+ +


+ +

+ + + +
+ Valid XHTML 1.0 +
+ +

+by John Walker
+May 26, 1997
+ +Updated: November 2006 +
+ +

+This document is in the public domain. +

+ + + diff --git a/build/pgo/js-input/key.gif b/build/pgo/js-input/key.gif new file mode 100644 index 0000000000..050311fc6b Binary files /dev/null and b/build/pgo/js-input/key.gif differ diff --git a/build/pgo/js-input/sunspider/3d-cube.html b/build/pgo/js-input/sunspider/3d-cube.html new file mode 100644 index 0000000000..453167d44d --- /dev/null +++ b/build/pgo/js-input/sunspider/3d-cube.html @@ -0,0 +1,387 @@ + + + + +SunSpider 3d-cube + + + + +

3d-cube

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/3d-morph.html b/build/pgo/js-input/sunspider/3d-morph.html new file mode 100644 index 0000000000..aca991d395 --- /dev/null +++ b/build/pgo/js-input/sunspider/3d-morph.html @@ -0,0 +1,104 @@ + + + + +SunSpider 3d-morph + + + + +

3d-morph

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/3d-raytrace.html b/build/pgo/js-input/sunspider/3d-raytrace.html new file mode 100644 index 0000000000..2097d4238d --- /dev/null +++ b/build/pgo/js-input/sunspider/3d-raytrace.html @@ -0,0 +1,490 @@ + + + + +SunSpider 3d-raytrace + + + +

3d-raytrace

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/access-binary-trees.html b/build/pgo/js-input/sunspider/access-binary-trees.html new file mode 100644 index 0000000000..c2c6cf3d93 --- /dev/null +++ b/build/pgo/js-input/sunspider/access-binary-trees.html @@ -0,0 +1,100 @@ + + + + +SunSpider access-binary-trees + + + + +

access-binary-trees

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/access-fannkuch.html b/build/pgo/js-input/sunspider/access-fannkuch.html new file mode 100644 index 0000000000..02b306ff25 --- /dev/null +++ b/build/pgo/js-input/sunspider/access-fannkuch.html @@ -0,0 +1,116 @@ + + + + +SunSpider access-fannkuch + + + + +

access-fannkuch

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/access-nbody.html b/build/pgo/js-input/sunspider/access-nbody.html new file mode 100644 index 0000000000..4ef73c8552 --- /dev/null +++ b/build/pgo/js-input/sunspider/access-nbody.html @@ -0,0 +1,219 @@ + + + + +SunSpider access-nbody + + + + +

access-nbody

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/access-nsieve.html b/build/pgo/js-input/sunspider/access-nsieve.html new file mode 100644 index 0000000000..c3ed067f11 --- /dev/null +++ b/build/pgo/js-input/sunspider/access-nsieve.html @@ -0,0 +1,88 @@ + + + + +SunSpider access-nsieve + + + + +

access-nsieve

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/bitops-3bit-bits-in-byte.html b/build/pgo/js-input/sunspider/bitops-3bit-bits-in-byte.html new file mode 100644 index 0000000000..c40be94ef0 --- /dev/null +++ b/build/pgo/js-input/sunspider/bitops-3bit-bits-in-byte.html @@ -0,0 +1,82 @@ + + + + +SunSpider bitops-3bit-bits-in-byte + + + + +

bitops-3bit-bits-in-byte

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/bitops-bits-in-byte.html b/build/pgo/js-input/sunspider/bitops-bits-in-byte.html new file mode 100644 index 0000000000..4022c777f4 --- /dev/null +++ b/build/pgo/js-input/sunspider/bitops-bits-in-byte.html @@ -0,0 +1,72 @@ + + + + +SunSpider bitops-bits-in-byte + + + + +

bitops-bits-in-byte

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/bitops-bitwise-and.html b/build/pgo/js-input/sunspider/bitops-bitwise-and.html new file mode 100644 index 0000000000..cca5130400 --- /dev/null +++ b/build/pgo/js-input/sunspider/bitops-bitwise-and.html @@ -0,0 +1,78 @@ + + + + +SunSpider bitops-bitwise-and + + + + +

bitops-bitwise-and

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/bitops-nsieve-bits.html b/build/pgo/js-input/sunspider/bitops-nsieve-bits.html new file mode 100644 index 0000000000..1849f9da2f --- /dev/null +++ b/build/pgo/js-input/sunspider/bitops-nsieve-bits.html @@ -0,0 +1,82 @@ + + + + +SunSpider bitops-nsieve-bits + + + + +

bitops-nsieve-bits

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/controlflow-recursive.html b/build/pgo/js-input/sunspider/controlflow-recursive.html new file mode 100644 index 0000000000..9a9651d4b6 --- /dev/null +++ b/build/pgo/js-input/sunspider/controlflow-recursive.html @@ -0,0 +1,75 @@ + + + + +SunSpider controlflow-recursive + + + + +

controlflow-recursive

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/crypto-aes.html b/build/pgo/js-input/sunspider/crypto-aes.html new file mode 100644 index 0000000000..12f26b2fb7 --- /dev/null +++ b/build/pgo/js-input/sunspider/crypto-aes.html @@ -0,0 +1,472 @@ + + + + +SunSpider crypto-aes + + + + +

crypto-aes

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/crypto-md5.html b/build/pgo/js-input/sunspider/crypto-md5.html new file mode 100644 index 0000000000..8395107ce6 --- /dev/null +++ b/build/pgo/js-input/sunspider/crypto-md5.html @@ -0,0 +1,336 @@ + + + + +SunSpider crypto-md5 + + + + +

crypto-md5

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/crypto-sha1.html b/build/pgo/js-input/sunspider/crypto-sha1.html new file mode 100644 index 0000000000..01d0b56f37 --- /dev/null +++ b/build/pgo/js-input/sunspider/crypto-sha1.html @@ -0,0 +1,274 @@ + + + + +SunSpider crypto-sha1 + + + + +

crypto-sha1

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/date-format-tofte.html b/build/pgo/js-input/sunspider/date-format-tofte.html new file mode 100644 index 0000000000..b8e4773423 --- /dev/null +++ b/build/pgo/js-input/sunspider/date-format-tofte.html @@ -0,0 +1,349 @@ + + + + +SunSpider date-format-tofte + + + + +

date-format-tofte

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/date-format-xparb.html b/build/pgo/js-input/sunspider/date-format-xparb.html new file mode 100644 index 0000000000..dd35713d1d --- /dev/null +++ b/build/pgo/js-input/sunspider/date-format-xparb.html @@ -0,0 +1,467 @@ + + + + +SunSpider date-format-xparb + + + + +

date-format-xparb

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/math-cordic.html b/build/pgo/js-input/sunspider/math-cordic.html new file mode 100644 index 0000000000..ec28f9ddde --- /dev/null +++ b/build/pgo/js-input/sunspider/math-cordic.html @@ -0,0 +1,145 @@ + + + + +SunSpider math-cordic + + + + +

math-cordic

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/math-partial-sums.html b/build/pgo/js-input/sunspider/math-partial-sums.html new file mode 100644 index 0000000000..b78b962489 --- /dev/null +++ b/build/pgo/js-input/sunspider/math-partial-sums.html @@ -0,0 +1,83 @@ + + + + +SunSpider math-partial-sums + + + + +

math-partial-sums

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/math-spectral-norm.html b/build/pgo/js-input/sunspider/math-spectral-norm.html new file mode 100644 index 0000000000..2949f9d780 --- /dev/null +++ b/build/pgo/js-input/sunspider/math-spectral-norm.html @@ -0,0 +1,101 @@ + + + + +SunSpider math-spectral-norm + + + + +

math-spectral-norm

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/regexp-dna.html b/build/pgo/js-input/sunspider/regexp-dna.html new file mode 100644 index 0000000000..4a00399b80 --- /dev/null +++ b/build/pgo/js-input/sunspider/regexp-dna.html @@ -0,0 +1,1762 @@ + + + + +SunSpider regexp-dna + + + + +

regexp-dna

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/string-base64.html b/build/pgo/js-input/sunspider/string-base64.html new file mode 100644 index 0000000000..53280ef2bb --- /dev/null +++ b/build/pgo/js-input/sunspider/string-base64.html @@ -0,0 +1,151 @@ + + + + +SunSpider string-base64 + + + + +

string-base64

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/string-fasta.html b/build/pgo/js-input/sunspider/string-fasta.html new file mode 100644 index 0000000000..240e60147c --- /dev/null +++ b/build/pgo/js-input/sunspider/string-fasta.html @@ -0,0 +1,135 @@ + + + + +SunSpider string-fasta + + + + +

string-fasta

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/string-tagcloud.html b/build/pgo/js-input/sunspider/string-tagcloud.html new file mode 100644 index 0000000000..893a927acd --- /dev/null +++ b/build/pgo/js-input/sunspider/string-tagcloud.html @@ -0,0 +1,315 @@ + + + + +SunSpider string-tagcloud + + + + +

string-tagcloud

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/string-unpack-code.html b/build/pgo/js-input/sunspider/string-unpack-code.html new file mode 100644 index 0000000000..ba80c99ea1 --- /dev/null +++ b/build/pgo/js-input/sunspider/string-unpack-code.html @@ -0,0 +1,117 @@ + + + + +SunSpider string-unpack-code + + + + +

string-unpack-code

+
+
+ + + + + + diff --git a/build/pgo/js-input/sunspider/string-validate-input.html b/build/pgo/js-input/sunspider/string-validate-input.html new file mode 100644 index 0000000000..72cf920b26 --- /dev/null +++ b/build/pgo/js-input/sunspider/string-validate-input.html @@ -0,0 +1,139 @@ + + + + +SunSpider string-validate-input + + + + +

string-validate-input

+
+
+ + + + + + diff --git a/build/pgo/js-input/valid-xhtml10.png b/build/pgo/js-input/valid-xhtml10.png new file mode 100644 index 0000000000..2275ee6ea1 Binary files /dev/null and b/build/pgo/js-input/valid-xhtml10.png differ diff --git a/build/pgo/profileserver.py b/build/pgo/profileserver.py new file mode 100755 index 0000000000..7f3de106ab --- /dev/null +++ b/build/pgo/profileserver.py @@ -0,0 +1,234 @@ +#!/usr/bin/python +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import sys +import glob +import subprocess + +import mozcrash +from mozbuild.base import MozbuildObject, BinaryNotFoundException +from mozfile import TemporaryDirectory +from mozhttpd import MozHttpd +from mozprofile import FirefoxProfile, Preferences +from mozprofile.permissions import ServerLocations +from mozrunner import FirefoxRunner, CLI +from six import string_types + +PORT = 8888 + +PATH_MAPPINGS = { + "/webkit/PerformanceTests": "third_party/webkit/PerformanceTests", + # It is tempting to map to `testing/talos/talos/tests` instead, to avoid + # writing `tests/` in every path, but we can't do that because some files + # refer to scripts located in `../..`. + "/talos": "testing/talos/talos", +} + + +def get_crashreports(directory, name=None): + rc = 0 + upload_path = os.environ.get("UPLOAD_PATH") + if upload_path: + # For automation, log the minidumps with stackwalk and get them moved to + # the artifacts directory. + fetches_dir = os.environ.get("MOZ_FETCHES_DIR") + if not fetches_dir: + raise Exception( + "Unable to process minidump in automation because " + "$MOZ_FETCHES_DIR is not set in the environment" + ) + stackwalk_binary = os.path.join( + fetches_dir, "minidump_stackwalk", "minidump_stackwalk" + ) + if sys.platform == "win32": + stackwalk_binary += ".exe" + minidump_path = os.path.join(directory, "minidumps") + rc = mozcrash.check_for_crashes( + minidump_path, + symbols_path=fetches_dir, + stackwalk_binary=stackwalk_binary, + dump_save_path=upload_path, + test_name=name, + ) + return rc + + +if __name__ == "__main__": + cli = CLI() + debug_args, interactive = cli.debugger_arguments() + runner_args = cli.runner_args() + + build = MozbuildObject.from_environment() + + binary = runner_args.get("binary") + if not binary: + try: + binary = build.get_binary_path(where="staged-package") + except BinaryNotFoundException as e: + print("{}\n\n{}\n".format(e, e.help())) + sys.exit(1) + binary = os.path.normpath(os.path.abspath(binary)) + + path_mappings = { + k: os.path.join(build.topsrcdir, v) for k, v in PATH_MAPPINGS.items() + } + httpd = MozHttpd( + port=PORT, + docroot=os.path.join(build.topsrcdir, "build", "pgo"), + path_mappings=path_mappings, + ) + httpd.start(block=False) + + locations = ServerLocations() + locations.add_host(host="127.0.0.1", port=PORT, options="primary,privileged") + + old_profraw_files = glob.glob("*.profraw") + for f in old_profraw_files: + os.remove(f) + + with TemporaryDirectory() as profilePath: + # TODO: refactor this into mozprofile + profile_data_dir = os.path.join(build.topsrcdir, "testing", "profiles") + with open(os.path.join(profile_data_dir, "profiles.json"), "r") as fh: + base_profiles = json.load(fh)["profileserver"] + + prefpaths = [ + os.path.join(profile_data_dir, profile, "user.js") + for profile in base_profiles + ] + + prefs = {} + for path in prefpaths: + prefs.update(Preferences.read_prefs(path)) + + interpolation = {"server": "%s:%d" % httpd.httpd.server_address} + for k, v in prefs.items(): + if isinstance(v, string_types): + v = v.format(**interpolation) + prefs[k] = Preferences.cast(v) + + # Enforce e10s. This isn't in one of the user.js files because those + # are shared with android, which doesn't want this on. We can't + # interpolate because the formatting code only works for strings, + # and this is a bool pref. + prefs["browser.tabs.remote.autostart"] = True + + profile = FirefoxProfile( + profile=profilePath, + preferences=prefs, + addons=[ + os.path.join( + build.topsrcdir, "tools", "quitter", "quitter@mozilla.org.xpi" + ) + ], + locations=locations, + ) + + env = os.environ.copy() + env["MOZ_CRASHREPORTER_NO_REPORT"] = "1" + env["XPCOM_DEBUG_BREAK"] = "warn" + # We disable sandboxing to make writing profiling data actually work + # Bug 1553850 considers fixing this. + env["MOZ_DISABLE_CONTENT_SANDBOX"] = "1" + env["MOZ_DISABLE_RDD_SANDBOX"] = "1" + env["MOZ_DISABLE_SOCKET_PROCESS_SANDBOX"] = "1" + env["MOZ_DISABLE_GPU_SANDBOX"] = "1" + env["MOZ_DISABLE_GMP_SANDBOX"] = "1" + env["MOZ_DISABLE_NPAPI_SANDBOX"] = "1" + env["MOZ_DISABLE_VR_SANDBOX"] = "1" + + # Ensure different pids write to different files + env["LLVM_PROFILE_FILE"] = "default_%p_random_%m.profraw" + + # Write to an output file if we're running in automation + process_args = {"universal_newlines": True} + if "UPLOAD_PATH" in env: + process_args["logfile"] = os.path.join( + env["UPLOAD_PATH"], "profile-run-1.log" + ) + + # Run Firefox a first time to initialize its profile + runner = FirefoxRunner( + profile=profile, + binary=binary, + cmdargs=["data:text/html,"], + env=env, + process_args=process_args, + ) + runner.start() + ret = runner.wait() + if ret: + print("Firefox exited with code %d during profile initialization" % ret) + logfile = process_args.get("logfile") + if logfile: + print("Firefox output (%s):" % logfile) + with open(logfile) as f: + print(f.read()) + httpd.stop() + get_crashreports(profilePath, name="Profile initialization") + sys.exit(ret) + + jarlog = os.getenv("JARLOG_FILE") + if jarlog: + env["MOZ_JAR_LOG_FILE"] = os.path.abspath(jarlog) + print("jarlog: %s" % env["MOZ_JAR_LOG_FILE"]) + if os.path.exists(jarlog): + os.remove(jarlog) + + if "UPLOAD_PATH" in env: + process_args["logfile"] = os.path.join( + env["UPLOAD_PATH"], "profile-run-2.log" + ) + cmdargs = ["http://localhost:%d/index.html" % PORT] + runner = FirefoxRunner( + profile=profile, + binary=binary, + cmdargs=cmdargs, + env=env, + process_args=process_args, + ) + runner.start(debug_args=debug_args, interactive=interactive) + ret = runner.wait() + httpd.stop() + if ret: + print("Firefox exited with code %d during profiling" % ret) + logfile = process_args.get("logfile") + if logfile: + print("Firefox output (%s):" % logfile) + with open(logfile) as f: + print(f.read()) + get_crashreports(profilePath, name="Profiling run") + sys.exit(ret) + + # Try to move the crash reports to the artifacts even if Firefox appears + # to exit successfully, in case there's a crash that doesn't set the + # return code to non-zero for some reason. + if get_crashreports(profilePath, name="Firefox exited successfully?") != 0: + print("Firefox exited successfully, but produced a crashreport") + sys.exit(1) + + llvm_profdata = env.get("LLVM_PROFDATA") + if llvm_profdata: + profraw_files = glob.glob("*.profraw") + if not profraw_files: + print( + "Could not find profraw files in the current directory: %s" + % os.getcwd() + ) + sys.exit(1) + merge_cmd = [ + llvm_profdata, + "merge", + "-o", + "merged.profdata", + ] + profraw_files + rc = subprocess.call(merge_cmd) + if rc != 0: + print("INFRA-ERROR: Failed to merge profile data. Corrupt profile?") + # exit with TBPL_RETRY + sys.exit(4) diff --git a/build/pgo/server-locations.txt b/build/pgo/server-locations.txt new file mode 100644 index 0000000000..a4a76107ab --- /dev/null +++ b/build/pgo/server-locations.txt @@ -0,0 +1,338 @@ +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# +# This file defines the locations at which this HTTP server may be accessed. +# It is referred to by the following page, so if this file moves, that page must +# be modified accordingly: +# +# https://developer.mozilla.org/en/docs/Mochitest#How_do_I_test_issues_which_only_show_up_when_tests_are_run_across_domains.3F +# +# Empty lines and lines which begin with "#" are ignored and may be used for +# storing comments. All other lines consist of an origin followed by whitespace +# and a comma-separated list of options (if indeed any options are needed). +# +# The format of an origin is, referring to RFC 2396, a scheme (either "http" or +# "https"), followed by "://", followed by a host, followed by ":", followed by +# a port number. The colon and port number must be present even if the port +# number is the default for the protocol. +# +# Unrecognized options are ignored. Recognized options are "primary" and +# "privileged", "nocert", "cert=some_cert_nickname", "redir=hostname" and +# "failHandshake". +# +# "primary" denotes a location which is the canonical location of +# the server; this location is the one assumed for requests which don't +# otherwise identify a particular origin (e.g. HTTP/1.0 requests). +# +# "privileged" denotes a location which should have the ability to request +# elevated privileges; the default is no privileges. +# +# "nocert" makes sense only for https:// hosts and means there is not +# any certificate automatically generated for this host. +# +# "failHandshake" causes the tls handshake to fail (by sending a client hello to +# the client). +# +# "cert=nickname" tells the pgo server to use a particular certificate +# for this host. The certificate is referenced by its nickname that must +# not contain any spaces. The certificate key files (PKCS12 modules) +# for custom certification are loaded from build/pgo/certs +# directory. When new certificate is added to this dir pgo/ssltunnel +# must be built then. This is only necessary for cases where we really do +# want specific certs. +# You can find instructions on how to add or modify certificates at: +# https://firefox-source-docs.mozilla.org/build/buildsystem/test_certificates.html +# +# "redir=hostname" tells the pgo server is only used for https:// +# hosts while processing the CONNECT tunnel request. It responds +# to the CONNECT with a 302 and redirection to the hostname instead +# of connecting to the real back end and replying with a 200. This +# mode exists primarily to ensure we don't allow a proxy to do that. +# + +# +# This is the primary location from which tests run. +# +http://mochi.test:8888 primary,privileged + +# +# These are a common set of prefixes scattered across one TLD with two ports and +# another TLD on a single port. +# +http://127.0.0.1:80 privileged +http://127.0.0.1:8888 privileged +http://test:80 privileged +http://mochi.test:8888 privileged +http://mochi.xorigin-test:8888 privileged +http://test1.mochi.test:8888 +http://sub1.test1.mochi.test:8888 +http://sub2.xn--lt-uia.mochi.test:8888 +http://test2.mochi.test:8888 +http://example.org:80 privileged +http://test1.example.org:80 privileged +http://test2.example.org:80 privileged +http://sub1.test1.example.org:80 privileged +http://sub1.test2.example.org:80 privileged +http://sub2.test1.example.org:80 privileged +http://sub2.test2.example.org:80 privileged +http://example.org:8000 privileged +http://test1.example.org:8000 privileged +http://test2.example.org:8000 privileged +http://sub1.test1.example.org:8000 privileged +http://sub1.test2.example.org:8000 privileged +http://sub2.test1.example.org:8000 privileged +http://sub2.test2.example.org:8000 privileged +http://example.com:80 privileged +http://www.example.com:80 privileged +http://test1.example.com:80 privileged +http://test2.example.com:80 privileged +http://sub1.test1.example.com:80 privileged +http://sub1.test2.example.com:80 privileged +http://sub2.test1.example.com:80 privileged +http://sub2.test2.example.com:80 privileged +http://noxul.example.com:80 privileged,noxul +http://example.net:80 privileged +http://supports-insecure.expired.example.com:80 privileged +# Used to test that clearing Service Workers for domain example.com, does not clear prefixexample.com +http://prefixexample.com:80 + +# The first HTTPS location is used to generate the Common Name (CN) value of the +# certificate's Issued To field. +https://example.com:443 privileged +https://test1.example.com:443 privileged +https://test2.example.com:443 privileged +https://example.org:443 privileged +https://test1.example.org:443 privileged +https://test2.example.org:443 privileged +https://sub1.test1.example.com:443 privileged +https://sub1.test2.example.com:443 privileged +https://sub2.test1.example.com:443 privileged +https://sub2.test2.example.com:443 privileged +https://nocert.example.com:443 privileged,nocert +https://self-signed.example.com:443 privileged,cert=selfsigned +https://untrusted.example.com:443 privileged,cert=untrusted +https://expired.example.com:443 privileged,cert=expired +https://requestclientcert.example.com:443 privileged,clientauth=request +https://requireclientcert.example.com:443 privileged,clientauth=require +https://requireclientcert-2.example.com:443 privileged,clientauth=require +https://mismatch.expired.example.com:443 privileged,cert=expired +https://mismatch.untrusted.example.com:443 privileged,cert=untrusted +https://untrusted-expired.example.com:443 privileged,cert=untrustedandexpired +https://mismatch.untrusted-expired.example.com:443 privileged,cert=untrustedandexpired +https://supports-insecure.expired.example.com:443 privileged,cert=expired +https://no-subject-alt-name.example.com:443 cert=noSubjectAltName + +# Used for secure contexts on ip addresses, see bug 1616675. Note that +# 127.0.0.1 prompts ssltunnel.cpp to do special-cases, so we use .2 +https://127.0.0.2:443 privileged,ipV4Address +https://secureonly.example.com:443 + +# Prevent safebrowsing tests from hitting the network for its-a-trap.html and +# its-an-attack.html. +http://www.itisatrap.org:80 +https://www.itisatrap.org:443 + +# +# These are subdomains of <ält.example.org>. +# +http://sub1.xn--lt-uia.example.org:8000 privileged +http://sub2.xn--lt-uia.example.org:80 privileged +http://xn--exmple-cua.test:80 privileged +http://sub1.xn--exmple-cua.test:80 privileged +http://xn--exaple-kqf.test:80 privileged +http://sub1.xn--exaple-kqf.test:80 privileged + +https://xn--hxajbheg2az3al.xn--jxalpdlp:443 privileged +https://sub1.xn--hxajbheg2az3al.xn--jxalpdlp:443 privileged + +# +# These are subdomains of <παράδειγμα.δοκιμή>, the Greek IDN for example.test. +# +http://xn--hxajbheg2az3al.xn--jxalpdlp:80 privileged +http://sub1.xn--hxajbheg2az3al.xn--jxalpdlp:80 privileged + +# Bug 413909 test host +https://bug413909.xn--hxajbheg2az3al.xn--jxalpdlp:443 privileged,cert=bug413909cert + +# +# These hosts are used in tests which exercise privilege-granting functionality; +# we could reuse some of the names above, but specific names make it easier to +# distinguish one from the other in tests (as well as what functionality is +# being tested). +# +http://sectest1.example.org:80 privileged +http://sub.sectest2.example.org:80 privileged +http://sectest2.example.org:80 +http://sub.sectest1.example.org:80 + +https://sectest1.example.org:443 privileged +https://sub.sectest2.example.org:443 privileged +https://sectest2.example.org:443 +https://sub.sectest1.example.org:443 + +# +# Used while testing the url-classifier +# +http://malware.example.com:80 +http://unwanted.example.com:80 +http://tracking.example.com:80 +http://cryptomining.example.com:80 +http://fingerprinting.example.com:80 +http://not-tracking.example.com:80 +http://tracking.example.org:80 +http://another-tracking.example.net:80 +http://social-tracking.example.org:80 +http://itisatracker.org:80 +https://itisatracker.org:443 +http://trackertest.org:80 +# +# Used while testing TLS session ticket resumption for third-party trackers (bug 1500533) +# (DO NOT USE THIS HOST IN OTHER TESTS!) +# +https://tlsresumptiontest.example.org:443 + +https://malware.example.com:443 +https://unwanted.example.com:443 +https://tracking.example.com:443 +https://cryptomining.example.com:443 +https://fingerprinting.example.com:443 +https://not-tracking.example.com:443 +https://tracking.example.org:443 +https://another-tracking.example.net:443 +https://social-tracking.example.org:443 + +# +# Used while testing flash blocking (Bug 1307604) +# +http://flashallow.example.com:80 +http://exception.flashallow.example.com:80 +http://flashblock.example.com:80 +http://exception.flashblock.example.com:80 +http://subdocument.example.com:80 +https://subdocument.example.com:443 +http://exception.subdocument.example.com:80 + +# +# Used while testing tracking protection (Bug 1580416) +# Not that apps.fbsbx.com is a public suffix +# +http://mochitest.apps.fbsbx.com:80 + +# +# Flash usage can fail unless this URL exists +# +http://fpdownload2.macromedia.com:80 +https://fpdownload2.macromedia.com:443 + +# Bug 1281083 +http://bug1281083.example.com:80 + +# Bug 483437, 484111 +https://www.bank1.com:443 privileged,cert=escapeattack1 + +# +# CONNECT for redirproxy results in a 302 redirect to +# test1.example.com +# +https://redirproxy.example.com:443 privileged,redir=test1.example.com + +# Host used for IndexedDB Quota testing +http://bug704464-1.example.com:80 privileged +http://bug704464-2.example.com:80 privileged +http://bug704464-3.example.com:80 privileged +http://bug702292.example.com:80 privileged + +# W3C hosts. +# See http://www.w3.org/wiki/Testing/Requirements#The_Web_test_server_must_be_available_through_different_domain_names +http://w3c-test.org:80 +http://w3c-test.org:81 +http://w3c-test.org:82 +http://w3c-test.org:83 +http://www.w3c-test.org:80 +http://www.w3c-test.org:81 +http://www.w3c-test.org:82 +http://www.w3c-test.org:83 +http://www1.w3c-test.org:80 +http://www1.w3c-test.org:81 +http://www1.w3c-test.org:82 +http://www1.w3c-test.org:83 +http://www2.w3c-test.org:80 +http://www2.w3c-test.org:81 +http://www2.w3c-test.org:82 +http://www2.w3c-test.org:83 +# http://天気の良い日.w3c-test.org +http://xn--n8j6ds53lwwkrqhv28a.w3c-test.org:80 +http://xn--n8j6ds53lwwkrqhv28a.w3c-test.org:81 +http://xn--n8j6ds53lwwkrqhv28a.w3c-test.org:82 +http://xn--n8j6ds53lwwkrqhv28a.w3c-test.org:83 +# http://élève.w3c-test.org +http://xn--lve-6lad.w3c-test.org:80 +http://xn--lve-6lad.w3c-test.org:81 +http://xn--lve-6lad.w3c-test.org:82 +http://xn--lve-6lad.w3c-test.org:83 +# HTTPS versions of the above +https://w3c-test.org:443 +https://www.w3c-test.org:443 +https://www1.w3c-test.org:443 +https://www2.w3c-test.org:443 +https://xn--n8j6ds53lwwkrqhv28a.w3c-test.org:443 +https://xn--lve-6lad.w3c-test.org:443 +http://test.w3.org:80 + +# Hosts for testing TLD-based fallback encoding +http://example.tw:80 privileged +http://example.cn:80 privileged +http://example.co.jp:80 privileged +http://example.fi:80 privileged +http://example.in:80 privileged +http://example.lk:80 privileged + +# Host for HPKP +https://include-subdomains.pinning-dynamic.example.com:443 privileged,cert=dynamicPinningGood +https://bad.include-subdomains.pinning-dynamic.example.com:443 privileged,cert=dynamicPinningBad + +# Host for static pin tests +https://badchain.include-subdomains.pinning.example.com:443 privileged,cert=staticPinningBad +https://fail-handshake.example.com:443 privileged,failHandshake + +# Host for bad cert domain fixup test +https://badcertdomain.example.com:443 privileged,cert=badCertDomain +https://www.badcertdomain.example.com:443 privileged,cert=badCertDomain +https://127.0.0.3:433 privileged,cert=badCertDomain +https://badcertdomain.example.com:82 privileged,cert=badCertDomain +https://mismatch.badcertdomain.example.com:443 privileged,cert=badCertDomain + +# Hosts for sha1 console warning tests +https://sha1ee.example.com:443 privileged,cert=sha1_end_entity +https://sha256ee.example.com:443 privileged,cert=sha256_end_entity + +# Hosts for imminent distrust warning tests +https://imminently-distrusted.example.com:443 privileged,cert=imminently_distrusted + +# Hosts for ssl3/rc4/tls1 warning tests +https://ssl3.example.com:443 privileged,ssl3 +https://rc4.example.com:443 privileged,rc4 +https://ssl3rc4.example.com:443 privileged,ssl3,rc4 +https://tls1.example.com:443 privileged,tls1 +https://tls11.example.com:443 privileged,tls1_1 +https://tls12.example.com:443 privileged,tls1_2 +https://tls13.example.com:443 privileged,tls1,tls1_3 + +# Hosts for youtube rewrite tests +https://mochitest.youtube.com:443 + +# Host for U2F localhost tests +https://localhost:443 + +# Bug 1402530 +http://localhost:80 privileged + +# Host for testing APIs whitelisted for mozilla.org +https://www.mozilla.org:443 + +# local-IP origins for password manager tests (Bug 1582499) +http://10.0.0.0:80 privileged +http://192.168.0.0:80 privileged diff --git a/build/psutil_requirements.in b/build/psutil_requirements.in new file mode 100644 index 0000000000..ee488cf06c --- /dev/null +++ b/build/psutil_requirements.in @@ -0,0 +1,2 @@ +psutil==5.7.0 + diff --git a/build/psutil_requirements.txt b/build/psutil_requirements.txt new file mode 100644 index 0000000000..ef605f2ec1 --- /dev/null +++ b/build/psutil_requirements.txt @@ -0,0 +1,19 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes --output-file=psutil_requirements.txt psutil_requirements.in +# +psutil==5.7.0 \ + --hash=sha256:1413f4158eb50e110777c4f15d7c759521703bd6beb58926f1d562da40180058 \ + --hash=sha256:298af2f14b635c3c7118fd9183843f4e73e681bb6f01e12284d4d70d48a60953 \ + --hash=sha256:60b86f327c198561f101a92be1995f9ae0399736b6eced8f24af41ec64fb88d4 \ + --hash=sha256:685ec16ca14d079455892f25bd124df26ff9137664af445563c1bd36629b5e0e \ + --hash=sha256:73f35ab66c6c7a9ce82ba44b1e9b1050be2a80cd4dcc3352cc108656b115c74f \ + --hash=sha256:75e22717d4dbc7ca529ec5063000b2b294fc9a367f9c9ede1f65846c7955fd38 \ + --hash=sha256:a02f4ac50d4a23253b68233b07e7cdb567bd025b982d5cf0ee78296990c22d9e \ + --hash=sha256:d008ddc00c6906ec80040d26dc2d3e3962109e40ad07fd8a12d0284ce5e0e4f8 \ + --hash=sha256:d84029b190c8a66a946e28b4d3934d2ca1528ec94764b180f7d6ea57b0e75e26 \ + --hash=sha256:e2d0c5b07c6fe5a87fa27b7855017edb0d52ee73b71e6ee368fae268605cc3f5 \ + --hash=sha256:f344ca230dd8e8d5eee16827596f1c22ec0876127c28e800d7ae20ed44c4b310 \ + # via -r psutil_requirements.in diff --git a/build/qemu-wrap b/build/qemu-wrap new file mode 100755 index 0000000000..e33938955d --- /dev/null +++ b/build/qemu-wrap @@ -0,0 +1,24 @@ +#!/bin/bash +# this script creates a wrapper shell script for an executable. The idea is the actual executable cannot be +# executed natively (it was cross compiled), but we want to run tests natively. Running this script +# as part of the compilation process will move the non-native executable to a new location, and replace it +# with a script that will run it under qemu. +while [[ -n $1 ]]; do + case $1 in + --qemu) QEMU="$2"; shift 2;; + --libdir) LIBDIR="$2"; shift 2;; + --ld) LD="$2"; shift 2;; + *) exe="$1"; shift;; + esac +done +if [[ -z $LIBDIR ]]; then + echo "You need to specify a directory for the cross libraries when you configure the shell" + echo "You can do this with --with-cross-lib=" + exit 1 +fi +LD=${LD:-$LIBDIR/ld-linux.so.3} +mv $exe $exe.target +# Just hardcode the path to the executable. It'll be pretty obvious if it is doing the wrong thing. + +echo $'#!/bin/bash\n' $QEMU -E LD_LIBRARY_PATH="${LIBDIR}" "$LD" "$(readlink -f "$exe.target")" '"$@"' >"$exe" +chmod +x $exe \ No newline at end of file diff --git a/build/sanitizers/asan_blacklist_win.txt b/build/sanitizers/asan_blacklist_win.txt new file mode 100644 index 0000000000..3bc19ba437 --- /dev/null +++ b/build/sanitizers/asan_blacklist_win.txt @@ -0,0 +1,28 @@ +# This is originally copied from Chromium tools/memory/asan/blacklist_win.txt. +# The rules in this file are only applied at compile time. If you can modify the +# source in question, consider function attributes to disable instrumentation. + +# Bug 1200740 - ASan crash due to child process function interceptions +# Sandbox executes some of its code before the ASan RTL gets initialized and +# maps shadow memory. As a result, instrumented code tries to access unavailable +# shadow memory and faults. +fun:*TargetNtSetInformationThread* +fun:*TargetNtOpenThreadToken* +fun:*TargetNtOpenThreadTokenEx* +fun:*TargetNtMapViewOfSection* +fun:*AutoProtectMemory*sandbox* +fun:*EatResolverThunk*sandbox* +fun:*InterceptionAgent*sandbox* +fun:*ResolverThunk*sandbox* +fun:*Target*SandboxFactory*sandbox* +fun:*ProcessState*sandbox* +src:*pe_image.h +src:*pe_image.cc +src:*resolver_32.cc +src:*resolver_64.cc +src:*filesystem_interception.cc +src:*process_thread_interception.cc +src:*registry_interception.cc +src:*sandbox_nt_util.cc +src:*sync_interception.cc +src:*interceptors_64.cc diff --git a/build/sanitizers/ubsan_enum_blacklist.txt b/build/sanitizers/ubsan_enum_blacklist.txt new file mode 100644 index 0000000000..1be9d23d4a --- /dev/null +++ b/build/sanitizers/ubsan_enum_blacklist.txt @@ -0,0 +1,17 @@ +# All entries in this file are to suppress issues reported by enum. +# Blacklists for other reasons should go in separate blacklist files. + +[enum] + +# bug 1404151 +src:*/lul/LulDwarfExt.h +src:*/lul/LulDwarf.cpp + +# bug 1405142 +src:*/widget/gtk/* + +# bug 1619468 +src:*/security/manager/ssl/nsSiteSecurityService.cpp + +# bug 1619484 +src:*/media/webrtc/trunk/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc diff --git a/build/sanitizers/ubsan_object_size_blacklist.txt b/build/sanitizers/ubsan_object_size_blacklist.txt new file mode 100644 index 0000000000..e61b4eb1ec --- /dev/null +++ b/build/sanitizers/ubsan_object_size_blacklist.txt @@ -0,0 +1,7 @@ +# All entries in this file are to suppress issues reported by object-size. +# Blacklists for other reasons should go in separate blacklist files. + +[object-size] + +# bug 1577584 +src:*/gfx/harfbuzz/src/* diff --git a/build/sanitizers/ubsan_pointer_overflow_blacklist.txt b/build/sanitizers/ubsan_pointer_overflow_blacklist.txt new file mode 100644 index 0000000000..3e27b70173 --- /dev/null +++ b/build/sanitizers/ubsan_pointer_overflow_blacklist.txt @@ -0,0 +1,28 @@ +# All entries in this file are to suppress issues reported by pointer-overflow. +# Blacklists for other reasons should go in separate blacklist files. + +[pointer-overflow] + +# cppunittest +src:*/mfbt/tests/TestArrayUtils.cpp +# gtest ImageDecodeToSurface.WebP +src:*/media/libwebp/src/dec/idec_dec.c +# gtest ImageDecoders.WebPLargeMultiChunk +src:*/media/libwebp/src/utils/bit_reader_utils.c +# gtest MediaDataDecoder.VP9 +src:*/media/ffvpx/libavcodec/avpacket.c +src:*/media/ffvpx/libavutil/imgutils.c +# gtest VP8VideoTrackEncoder.FrameEncode +src:*/media/libvpx/libvpx/vp8/encoder/encodeframe.c +# gtest ImageDecoders.AVIFSingleChunk +src:*/third_party/aom/av1/common/quant_common.c +# crashtest +src:*/parser/expat/lib/xmlparse.c +src:*/mfbt/lz4/lz4.c +src:*/media/libogg/src/ogg_framing.c +# mochitest gl2c dom/canvas/test/webgl-conf/generated/test_2_conformance2__reading__read-pixels-pack-parameters.html +src:*/dom/canvas/WebGLContextGL.cpp +# reftest J1 +src:*/js/src/builtin/TypedObject.cpp +# jittest Jit6 +src:*/js/src/jit/x86-shared/Assembler-x86-shared.h diff --git a/build/sanitizers/ubsan_signed_overflow_blacklist.txt b/build/sanitizers/ubsan_signed_overflow_blacklist.txt new file mode 100644 index 0000000000..5cf11100ab --- /dev/null +++ b/build/sanitizers/ubsan_signed_overflow_blacklist.txt @@ -0,0 +1,258 @@ +# This file contains an extensive compile-time blacklist for silencing highly +# frequent signed integer overflows in our codebase, found by the use of +# -fsanitize=signed-integer-overflow. C/C++ say signed integer overflow is +# undefined behavior, so instances of this need to be fixed. But not all code +# has been properly written to not overflow, and overflow-checking can have +# significant compile time and runtime costs, so we will sometimes disable +# signed overflow checking. +# +# The rules in this file are applied at compile time; changes to this list +# usually require a full rebuild to apply. If you can modify the source in +# question to exempt specific functions using MOZ_NO_SANITIZE_SIGNED_OVERFLOW, +# do that instead. +# +# The extensive number of entries below is for two reasons. +# +# First, compiler instrumentation for signed integer overflows has a cost, at +# compile time and at runtime. In performance-critical code proven to have no +# signed overflow, it makes sense to turn off overflow detection to avoid both +# costs. (Indeed, -fsanitize=signed-integer-overflow is unusably slow without +# this.) +# +# Second, many entries here are overly aggressive to get the build into a state +# that allows any testing to happen at all. Some of the entries here are for +# issues that are highly frequent in our test suites -- over 500 times per run. +# Aggressive entries now let us start using this mode, without having to first +# fix wide swaths of existing code. +# +# Entries should be removed 1) as issues are fixed; and 2) as blacklist entries +# can be moved out of this centralized file, into source-level blacklist +# attributes on individual functions. + +# All entries in this file are to suppress signed-integer-overflow problems. +# Blacklists for other reasons should go in separate blacklist files. +[signed-integer-overflow] + +# Overflows in the C++ std headers aren't necessarily bugs, because code inside +# a language implementation can depend on compiler-specific behavior where C/C++ +# leave the behavior undefined. +src:*bits/basic_string.h + +# Assume everything running through CheckedInt.h is ok. Signed overflows here +# should generally have been guarded by safe overflow checks, so it's likely +# safe to exempt it from overflow checking. (This should eventually be verified +# and functions individually tagged safe so this entry can be removed.) +src:*/CheckedInt.h + +# Exclude bignum +src:*/mfbt/double-conversion/source/bignum.cc + +# Exclude anything within gtests +src:*/gtest/* + +# Atomics can overflow, but without a full stack we can't trace these back +# to what is actually causing the overflow. Ignoring these for now, as it will +# be too much effort to determine every single source here. +src:*/mfbt/Atomics.h + +# No reason to instrument certain parts of NSS that explicitely deal with +# arithmetics and crypto. +src:*/security/nss/lib/freebl/mpi/* +src:*/security/nss/lib/freebl/ecl/* + +# nsTArray_base::ShiftData performs overflows +fun:*nsTArray_base*ShiftData* + +### Frequent 0 - 1 overflows +# +# We have several code patterns in our codebase that cause these overflows, +# but they are typically all harmless and could be filtered easily at runtime. +# However, some of them are so frequent that suppressing them at compile-time +# makes sense to increase runtime performance. +# +src:*/netwerk/base/nsSocketTransportService2.cpp +src:*/nsCharTraits.h +# Code in xpcom/base/CycleCollectedJSContext.cpp +fun:*CycleCollectedJSContext*ProcessMetastableStateQueue* +# Code in layout/painting/nsDisplayList.cpp +fun:*nsDisplayOpacity*ShouldFlattenAway* +# Code in modules/libpref/Preferences.cpp +fun:*pref_InitInitialObjects* +# Code in netwerk/base/nsIOService.cpp +fun:*nsIOService*GetCachedProtocolHandler* +# Code in layout/xul/nsXULPopupManager.cpp +fun:*nsXULPopupManager*AdjustPopupsOnWindowChange* +# Code in dom/base/nsDocument.cpp +fun:*1nsDocument@@* +# Code in gfx/layers/ipc/CompositorBridgeChild.cpp +fun:*CompositorBridgeChild*Destroy* +# Code in gfx/layers/ipc/ImageBridgeChild.cpp +fun:*ImageBridgeChild*ShutdownStep1* +# Code in dom/base/nsGlobalWindow.cpp +fun:*nsGlobalWindow*ClearControllers* +# Code in layout/style/AnimationCollection.cpp +fun:*AnimationCollection*PropertyDtor* +# Code in layout/style/nsStyleSet.cpp +fun:*nsStyleSet*AddImportantRules* +fun:*nsStyleSet*CounterStyleRuleForName* + + +### Misc overflows + +# Hot function in protobuf producing overflows +fun:*CodedInputStream*ReadTagWithCutoff* + + +# SQLite3 is full of overflows :/ +src:*/third_party/sqlite3/src/sqlite3.c + +# zlib has some overflows, we can't deal with them right now +src:*/modules/zlib/src/* + +# Our LZ4 implementation uses overflows. By listing it here we might +# miss some unintended overflows in that implementation, but we can't +# check for it right now. +src:*/mfbt/lz4.c + +# Apparently this overflows a lot, because it contains some allocators +# that keep overflowing, not sure why. Disabling by function didn't seem +# to work here for operator new. +src:*/xpcom/ds/nsArrayEnumerator.cpp + +# Memory usage reporting code in gfx/thebes/gfxASurface.cpp +# We probably don't care about the frequent overflows there. +fun:*SurfaceMemoryReporter*AdjustUsedMemory* + +# Frequent overflower in gfx/thebes/gfxFontEntry.cpp +fun:*WeightDistance* + +# Another frequent overflower +fun:*nsTObserverArray_base*AdjustIterators* + +# Overflows in Skia +fun:*SkPathRef*makeSpace* +fun:*SkPathRef*resetToSize* + +# Expat Parser has some overflows +fun:*nsExpatDriver*ConsumeToken* + +# Frequent overflowers in harfbuzz +fun:*hb_in_range* +fun:*OT*collect_glyphs* + +# These look like harmless layouting-related overflows +src:*/gfx/cairo/libpixman/src/pixman-region.c + +# Code in ipc/chromium/src/base/file_path.cc where a function returns -1 +# being cast to unsigned and then overflowed. +fun:*FilePath*Append* +fun:*FilePath*StripTrailingSeparatorsInternal* + +# Code in dom/base/nsJSEnvironment.cpp +fun:*FireForgetSkippable* + +# Code in gfx/thebes/gfxSkipChars.h +fun:*gfxSkipCharsIterator*AdvanceSkipped* + +# Code in gfx/thebes/gfxScriptItemizer.cpp +fun:*gfxScriptItemizer*fixup* +fun:*gfxScriptItemizer*push* + +# Code in dom/base/nsDocument.cpp +fun:*nsDocument*BlockOnload* + +# Code in layout/base/nsCSSFrameConstructor.cpp +fun:*nsCSSFrameConstructor*FrameConstructionItemList*AdjustCountsForItem* + +# Code in nsprpub/lib/ds/plarena.c doing ptrdiffs +fun:*PL_ArenaRelease* + +# This file contains a bunch of arithmetic operations on timestamps that +# apparently are allowed to overflow. +src:*/src/widget/SystemTimeConverter.h + +# Code in dom/media/flac/FlacDemuxer.cpp purposely uses overflowing arithmetics +fun:*Frame*FindNext* + +# Code in netwerk/base/nsStandardURL.cpp, +# these methods return signed but the subtraction is first performed unsigned +fun:*nsStandardURL*ReplaceSegment* + +# Code in netwerk/protocol/http/nsHttpChannel.cpp +# same as previous with the previous entry. +fun:*nsHttpChannel*ReportNetVSCacheTelemetry* + +# Code in layout/tables/nsCellMap.cpp +# again subtraction then cast to signed. +fun:*nsTableCellMap*GetColInfoAt* + +# Code in layout/generic/nsTextFrame.cpp +# again subtraction then cast to signed. +fun:*nsTextFrame*CharacterDataChanged* + +# Not sure what is going on in this file, but it doesn't look +# related to what we are looking for. +src:*/xpcom/base/CountingAllocatorBase.h + +# Code in dom/base/nsDOMNavigationTiming.cpp +# Timestamp related, probably expecting the overflow +fun:*nsDOMNavigationTiming*TimeStampToDOM* + +# Several unsigned arithmetic operations with -1 +src:*/hal/HalWakeLock.cpp + +# Code in layout/generic/nsGfxScrollFrame.cpp that produces +# somewhat frequent signed integer overflows. Probably harmless +# because it's layout code. +fun:*ClampAndAlignWithPixels* + +# Likely benign overflow in mozglue/misc/TimeStamp_posix.cpp +fun:*ClockResolutionNs* + +# This header has all sorts of operators that do post-operation +# overflow and underflow checking, triggering frequent reports +src:*/mozglue/misc/TimeStamp.h + +# +# Various hashing functions, both regular and cryptographic ones +# +src:*/dom/canvas/MurmurHash3.cpp +src:*/gfx/skia/skia/include/private/SkChecksum.h +src:*/intl/icu/source/common/unifiedcache.h +src:*/mfbt/SHA1.cpp +src:*/modules/zlib/src/adler32.c +src:*/netwerk/cache/nsDiskCacheDevice.cpp +src:*/netwerk/cache2/CacheHashUtils.cpp +src:*/netwerk/sctp/src/netinet/sctp_sha1.c +src:*/netwerk/srtp/src/crypto/hash/sha1.c +src:*/netwerk/sctp/src/netinet/sctp_sha1.c +src:*/nsprpub/lib/ds/plhash.c +src:*/security/manager/ssl/md4.c +src:*/security/nss/lib/dbm/src/h_func.c +src:*/security/nss/lib/freebl/sha512.c +src:*/security/nss/lib/freebl/md5.c +src:*/xpcom/ds/PLDHashTable.cpp + +# Hash/Cache function in Skia +fun:*GradientShaderCache*Build32bitCache* + +# Hashing functions in Cairo +fun:*_hash_matrix_fnv* +fun:*_hash_mix_bits* +fun:*_cairo_hash_string* +fun:*_cairo_hash_bytes* + +# intl code hashing functions +fun:*ustr_hash*CharsN* +fun:*hashEntry* + +# harfbuzz hash/digest functions +fun:*hb_set_digest_lowest_bits_t* + +# Hash function in gfx +fun:*gfxFontStyle*Hash* + +# expat uses a CHAR_HASH macro in several places that causes +# a high amount of overflows. We should try finding a better +# way to disable this rather than blacklisting the whole thing. +src:*/parser/expat/* diff --git a/build/sanitizers/ubsan_unsigned_overflow_blacklist.txt b/build/sanitizers/ubsan_unsigned_overflow_blacklist.txt new file mode 100644 index 0000000000..a6b91ddc85 --- /dev/null +++ b/build/sanitizers/ubsan_unsigned_overflow_blacklist.txt @@ -0,0 +1,265 @@ +# This file contains an extensive compile-time blacklist for silencing highly +# frequent *un*signed integer overflows in our codebase, found by the use of +# -fsanitize=unsigned-integer-overflow. Such overflows are not necessarily +# bugs -- unsigned integer overflow has well-defined semantics in C/C++. But +# overflow may still be *unexpected* and incorrectly handled, so we try to +# annotate those places where unsigned overflow is correct and desired. +# +# The rules in this file are applied at compile time; changes to this list +# usually require a full rebuild to apply. If you can modify the source in +# question to exempt specific functions using MOZ_NO_SANITIZE_UNSIGNED_OVERFLOW, +# do that instead. +# +# The extensive number of entries below is for two reasons. +# +# First, compiler instrumentation for unsigned integer overflows has a cost, at +# compile time and at runtime. In places where code expects and depends upon +# overflow behavior -- and especially in performance-critical code -- it makes +# sense to turn off overflow detection to avoid both costs. (Indeed, +# -fsanitize=signed-integer-overflow is unusably slow without this.) +# +# Second, many entries here are overly aggressive to get the build into a state +# that allows any testing to happen at all. Some of the entries here are for +# issues that are highly frequent in our test suites -- over 500 times per run. +# Aggressive entries now let us start using this mode, without having to first +# fix wide swaths of existing code. +# +# Entries should be removed 1) as issues are fixed; and 2) as blacklist entries +# can be moved out of this centralized file, into source-level blacklist +# attributes on individual functions. + +# All entries in this file are to suppress unsigned-integer-overflow problems. +# Blacklists for other reasons should go in separate blacklist files. +[unsigned-integer-overflow] + +# Overflows in the C++ std headers aren't necessarily bugs, because code inside +# a language implementation can depend on compiler-specific behavior where C/C++ +# leave the behavior undefined. +src:*bits/basic_string.h + +# Assume everything running through CheckedInt.h is ok. The CheckedInt class +# casts signed integers to unsigned first and then does a post-overflow +# check causing lots of unsigned integer overflow messages. +src:*/CheckedInt.h + +# Exclude bignum +src:*/mfbt/double-conversion/source/bignum.cc + +# Exclude anything within gtests +src:*/gtest/* + +# The JS engine has a lot of code doing all sorts of overflows. This code +# is pretty well tested though and excluding it here will allow us to go +# for other, less tested code. Ideally, we would include the JS engine here +# at some point. +src:*/js/src/* +src:*/js/public/* +src:*/js/*.h +src:*/jsfriendapi.h + +# Atomics can overflow, but without a full stack we can't trace these back +# to what is actually causing the overflow. Ignoring these for now, as it will +# be too much effort to determine every single source here. +src:*/mfbt/Atomics.h + +# No reason to instrument certain parts of NSS that explicitely deal with +# arithmetics and crypto. +src:*/security/nss/lib/freebl/mpi/* +src:*/security/nss/lib/freebl/ecl/* + +# nsTArray_base::ShiftData performs overflows +fun:*nsTArray_base*ShiftData* + +### Frequent 0 - 1 overflows +# +# We have several code patterns in our codebase that cause these overflows, +# but they are typically all harmless and could be filtered easily at runtime. +# However, some of them are so frequent that suppressing them at compile-time +# makes sense to increase runtime performance. +# +src:*/netwerk/base/nsSocketTransportService2.cpp +src:*/nsCharTraits.h +# Code in xpcom/base/CycleCollectedJSContext.cpp +fun:*CycleCollectedJSContext*ProcessMetastableStateQueue* +# Code in layout/painting/nsDisplayList.cpp +fun:*nsDisplayOpacity*ShouldFlattenAway* +# Code in modules/libpref/Preferences.cpp +fun:*pref_InitInitialObjects* +# Code in netwerk/base/nsIOService.cpp +fun:*nsIOService*GetCachedProtocolHandler* +# Code in layout/xul/nsXULPopupManager.cpp +fun:*nsXULPopupManager*AdjustPopupsOnWindowChange* +# Code in dom/base/nsDocument.cpp +fun:*1nsDocument@@* +# Code in gfx/layers/ipc/CompositorBridgeChild.cpp +fun:*CompositorBridgeChild*Destroy* +# Code in gfx/layers/ipc/ImageBridgeChild.cpp +fun:*ImageBridgeChild*ShutdownStep1* +# Code in dom/base/nsGlobalWindow.cpp +fun:*nsGlobalWindow*ClearControllers* +# Code in layout/style/AnimationCollection.cpp +fun:*AnimationCollection*PropertyDtor* +# Code in layout/style/nsStyleSet.cpp +fun:*nsStyleSet*AddImportantRules* +fun:*nsStyleSet*CounterStyleRuleForName* + + +### Misc overflows + +# Hot function in protobuf producing overflows +fun:*CodedInputStream*ReadTagWithCutoff* + + +# SQLite3 is full of overflows :/ +src:*/third_party/sqlite3/src/sqlite3.c + +# zlib has some overflows, we can't deal with them right now +src:*/modules/zlib/src/* + +# Our LZ4 implementation uses overflows. By listing it here we might +# miss some unintended overflows in that implementation, but we can't +# check for it right now. +src:*/mfbt/lz4/* + +# Apparently this overflows a lot, because it contains some allocators +# that keep overflowing, not sure why. Disabling by function didn't seem +# to work here for operator new. +src:*/xpcom/ds/nsArrayEnumerator.cpp + +# Memory usage reporting code in gfx/thebes/gfxASurface.cpp +# We probably don't care about the frequent overflows there. +fun:*SurfaceMemoryReporter*AdjustUsedMemory* + +# Frequent overflower in gfx/thebes/gfxFontEntry.cpp +fun:*WeightDistance* + +# Another frequent overflower +fun:*nsTObserverArray_base*AdjustIterators* + +# Overflows in Skia +fun:*SkPathRef*makeSpace* +fun:*SkPathRef*resetToSize* + +# Expat Parser has some overflows +fun:*nsExpatDriver*ConsumeToken* + +# Frequent overflowers in harfbuzz +fun:*hb_in_range* +fun:*OT*collect_glyphs* + +# These look like harmless layouting-related overflows +src:*/gfx/cairo/libpixman/src/pixman-region.c + +# Code in ipc/chromium/src/base/file_path.cc where a function returns -1 +# being cast to unsigned and then overflowed. +fun:*FilePath*Append* +fun:*FilePath*StripTrailingSeparatorsInternal* + +# Code in dom/base/nsJSEnvironment.cpp +fun:*FireForgetSkippable* + +# Code in gfx/thebes/gfxSkipChars.h +fun:*gfxSkipCharsIterator*AdvanceSkipped* + +# Code in gfx/thebes/gfxScriptItemizer.cpp +fun:*gfxScriptItemizer*fixup* +fun:*gfxScriptItemizer*push* + +# Code in dom/base/nsDocument.cpp +fun:*nsDocument*BlockOnload* + +# Code in layout/base/nsCSSFrameConstructor.cpp +fun:*nsCSSFrameConstructor*FrameConstructionItemList*AdjustCountsForItem* + +# Code in nsprpub/lib/ds/plarena.c doing ptrdiffs +fun:*PL_ArenaRelease* + +# This file contains a bunch of arithmetic operations on timestamps that +# apparently are allowed to overflow. +src:*/src/widget/SystemTimeConverter.h + +# Code in dom/media/flac/FlacDemuxer.cpp purposely uses overflowing arithmetics +fun:*Frame*FindNext* + +# Code in netwerk/base/nsStandardURL.cpp, +# these methods return signed but the subtraction is first performed unsigned +fun:*nsStandardURL*ReplaceSegment* + +# Code in netwerk/protocol/http/nsHttpChannel.cpp +# same as previous with the previous entry. +fun:*nsHttpChannel*ReportNetVSCacheTelemetry* + +# Code in layout/tables/nsCellMap.cpp +# again subtraction then cast to signed. +fun:*nsTableCellMap*GetColInfoAt* + +# Code in layout/generic/nsTextFrame.cpp +# again subtraction then cast to signed. +fun:*nsTextFrame*CharacterDataChanged* + +# Not sure what is going on in this file, but it doesn't look +# related to what we are looking for. +src:*/xpcom/base/CountingAllocatorBase.h + +# Code in dom/base/nsDOMNavigationTiming.cpp +# Timestamp related, probably expecting the overflow +fun:*nsDOMNavigationTiming*TimeStampToDOM* + +# Several unsigned arithmetic operations with -1 +src:*/hal/HalWakeLock.cpp + +# Code in layout/generic/nsGfxScrollFrame.cpp that produces +# somewhat frequent signed integer overflows. Probably harmless +# because it's layout code. +fun:*ClampAndAlignWithPixels* + +# Likely benign overflow in mozglue/misc/TimeStamp_posix.cpp +fun:*ClockResolutionNs* + +# This header has all sorts of operators that do post-operation +# overflow and underflow checking, triggering frequent reports +src:*/mozglue/misc/TimeStamp.h + +# +# Various hashing functions, both regular and cryptographic ones +# +src:*/dom/canvas/MurmurHash3.cpp +src:*/gfx/skia/skia/include/private/SkChecksum.h +src:*/intl/icu/source/common/unifiedcache.h +src:*/mfbt/SHA1.cpp +src:*/modules/zlib/src/adler32.c +src:*/netwerk/cache/nsDiskCacheDevice.cpp +src:*/netwerk/cache2/CacheHashUtils.cpp +src:*/netwerk/sctp/src/netinet/sctp_sha1.c +src:*/netwerk/srtp/src/crypto/hash/sha1.c +src:*/netwerk/sctp/src/netinet/sctp_sha1.c +src:*/nsprpub/lib/ds/plhash.c +src:*/security/manager/ssl/md4.c +src:*/security/nss/lib/dbm/src/h_func.c +src:*/security/nss/lib/freebl/sha512.c +src:*/security/nss/lib/freebl/md5.c +src:*/xpcom/ds/PLDHashTable.cpp + +# Hash/Cache function in Skia +fun:*GradientShaderCache*Build32bitCache* + +# Hashing functions in Cairo +fun:*_hash_matrix_fnv* +fun:*_hash_mix_bits* +fun:*_cairo_hash_string* +fun:*_cairo_hash_bytes* + +# intl code hashing functions +fun:*ustr_hash*CharsN* +fun:*hashEntry* + +# harfbuzz hash/digest functions +fun:*hb_set_digest_lowest_bits_t* + +# Hash function in gfx +fun:*gfxFontStyle*Hash* + +# expat uses a CHAR_HASH macro in several places that causes +# a high amount of overflows. We should try finding a better +# way to disable this rather than blacklisting the whole thing. +src:*/parser/expat/* diff --git a/build/sparse-profiles/docker-image b/build/sparse-profiles/docker-image new file mode 100644 index 0000000000..9fb1329530 --- /dev/null +++ b/build/sparse-profiles/docker-image @@ -0,0 +1,26 @@ +%include build/sparse-profiles/mach + +[include] +path:taskcluster/ + +# Required for loading taskgraph.parameters. +path:browser/config/version_display.txt +path:browser/config/version.txt + +# Result from `grep -hr %include taskcluster/docker | grep -v " taskcluster/" | sort -u` +path:python/mozbuild/mozbuild/action/tooltool.py +path:testing/mozharness/external_tools/performance-artifact-schema.json +path:testing/mozharness/external_tools/robustcheckout.py +path:tools/lint/spell/codespell_requirements.txt +path:tools/lint/eslint/eslint-plugin-mozilla/manifest.tt +path:tools/lint/eslint/manifest.tt +path:tools/lint/python/flake8_requirements.txt +path:tools/lint/python/pylint_requirements.txt +path:tools/lint/python/black_requirements.txt +path:tools/lint/tox/tox_requirements.txt + +# Required for the webrender docker image +path:gfx/wr/ci-scripts/docker-image/setup.sh + +# for the system-symbol images +path:tools/crashreporter/system-symbols/ diff --git a/build/sparse-profiles/github-sync b/build/sparse-profiles/github-sync new file mode 100644 index 0000000000..8d6922aba6 --- /dev/null +++ b/build/sparse-profiles/github-sync @@ -0,0 +1,8 @@ +%include build/sparse-profiles/mach +%include build/sparse-profiles/taskgraph + +[include] +path:gfx/wgpu/ +path:gfx/wr/ +path:taskcluster/scripts/misc/ +path:tools/github-sync/ diff --git a/build/sparse-profiles/liblowercase b/build/sparse-profiles/liblowercase new file mode 100644 index 0000000000..5f9c2d088b --- /dev/null +++ b/build/sparse-profiles/liblowercase @@ -0,0 +1,3 @@ +[include] +path:build/liblowercase/ +path:taskcluster/scripts/misc/build-liblowercase.sh diff --git a/build/sparse-profiles/mach b/build/sparse-profiles/mach new file mode 100644 index 0000000000..28b4a8b2d0 --- /dev/null +++ b/build/sparse-profiles/mach @@ -0,0 +1,23 @@ +[include] +# Various mach commands call config.guess to resolve the default objdir name. +path:build/autoconf/config.guess +path:build/autoconf/config.sub +path:build/moz.configure/checks.configure +path:build/moz.configure/init.configure +path:build/moz.configure/util.configure +# Used for bootstrapping the mach driver. +path:build/mach_bootstrap.py +path:build/build_virtualenv_packages.txt +path:build/common_virtualenv_packages.txt +path:build/mach_virtualenv_packages.txt +path:build/glean_requirements.txt +path:build/psutil_requirements.txt +path:build/zstandard_requirements.txt +path:mach +# Various dependencies. There is room to trim fat, especially in +# third_party/python. +path:python/ +path:testing/mozbase/ +path:third_party/python/ +# certifi is needed for Sentry +path:testing/web-platform/tests/tools/third_party/certifi diff --git a/build/sparse-profiles/mozharness b/build/sparse-profiles/mozharness new file mode 100644 index 0000000000..c2a49d412b --- /dev/null +++ b/build/sparse-profiles/mozharness @@ -0,0 +1,4 @@ +%include build/sparse-profiles/mach + +[include] +path:testing/mozharness diff --git a/build/sparse-profiles/perftest b/build/sparse-profiles/perftest new file mode 100644 index 0000000000..a2d891c38e --- /dev/null +++ b/build/sparse-profiles/perftest @@ -0,0 +1,7 @@ +%include build/sparse-profiles/docker-image + +[include] +path:tools/lint/eslint/ +path:testing/performance +path:testing/condprofile +glob:**/perftest_*.js diff --git a/build/sparse-profiles/profile-generate b/build/sparse-profiles/profile-generate new file mode 100644 index 0000000000..be1fa53372 --- /dev/null +++ b/build/sparse-profiles/profile-generate @@ -0,0 +1,9 @@ +%include build/sparse-profiles/mach + +[include] +path:build/ +path:testing/talos/talos +path:testing/profiles/ +path:third_party/webkit/ +path:tools/quitter/ +path:taskcluster/scripts/misc diff --git a/build/sparse-profiles/push-to-try b/build/sparse-profiles/push-to-try new file mode 100644 index 0000000000..ff8fd73579 --- /dev/null +++ b/build/sparse-profiles/push-to-try @@ -0,0 +1,5 @@ +%include build/sparse-profiles/taskgraph + +[include] +path:tools/tryselect/ +path:try_task_config.json diff --git a/build/sparse-profiles/sphinx-docs b/build/sparse-profiles/sphinx-docs new file mode 100644 index 0000000000..d12c001629 --- /dev/null +++ b/build/sparse-profiles/sphinx-docs @@ -0,0 +1,39 @@ +%include build/sparse-profiles/mach + +[include] +# Code for generating docs. +glob:docs/** +glob:tools/moztreedocs/** + +# For icons +glob:browser/branding/nightly/** + +# Potential docs sources +glob:**/*.rst +glob:**/*.md +glob:**/*.js +glob:**/*.jsm + +# Potential included images. We don't glob all paths in order to avoid importing +# images unrelated to documentation (like branding or test images) +glob:**/docs/**.jpg +glob:**/docs/**.png +glob:**/docs/**.svg + +# Python API docs. +glob:**/*.py + +# moz.build files are read to discover location of docs. +glob:**/moz.build + +# Read to set the version of the docs. +path:config/milestone.txt + +# metrics.yaml and pings.yaml files (and their index) are needed to generate +# Glean autodocs +glob:**/metrics.yaml +glob:**/pings.yaml +path:toolkit/components/glean/metrics_index.py +# TODO(bug 1672716): Make it easier to use other file names +path:toolkit/components/glean/test_metrics.yaml +path:toolkit/components/glean/test_pings.yaml diff --git a/build/sparse-profiles/taskgraph b/build/sparse-profiles/taskgraph new file mode 100644 index 0000000000..8165614688 --- /dev/null +++ b/build/sparse-profiles/taskgraph @@ -0,0 +1,95 @@ +%include build/sparse-profiles/mach + +# In order to decide which docker images to build, we need all the +# files that docker images depend on as well +%include build/sparse-profiles/docker-image + +[include] +# These files are read as part of generating the taskgraph. +path:browser/locales/l10n-changesets.json +path:browser/locales/l10n-onchange-changesets.json +path:mobile/locales/l10n-changesets.json +path:mobile/locales/l10n-onchange-changesets.json +path:browser/locales/shipped-locales +path:browser/config/version_display.txt +path:browser/config/version.txt +path:browser/config/whats_new_page.yml + +# Lots of random files in here are read. Just pull in the whole thing. +path:build/ + +# TODO remove once bug 1402010 is resolved and test manifests aren't +# processed in Files() reading mode in moz.build files. +path:layout/tools/reftest/ +path:testing/web-platform/tests/tools/ + +# The main meat of this profile. +path:taskcluster/ + +# Various files in these directories are read by taskgraph. Just pull +# them all in. +path:testing/config/tooltool-manifests/ +path:testing/mozharness/ +path:tools/lint/ + +# for new-style try pushes +path:try_task_config.json + +# Moz.build files are read in filesystem mode +glob:**/moz.build +glob:**/*.mozbuild + +# Moz.configure files could trigger changes +glob:**/*.configure + +# Tooltool manifests also need to be opened. Assume they +# are all somewhere in "tooltool-manifests" directories. +glob:**/tooltool-manifests/** + +# For test chunking +glob:**/*.ini +glob:**/*.list +path:testing/mozbase/manifestparser +path:testing/runtimes +path:testing/web-platform/tests/ +path:testing/web-platform/mozilla/tests/ +glob:testing/web-platform/*.py + +# For scheduling android-gradle-dependencies. +path:mobile/android/config/ +glob:**/*.gradle + +# for action-task building +path:.taskcluster.yml +path:.cron.yml + +# for the wrench-deps toolchain task +path:gfx/wr/Cargo.lock +path:gfx/wr/ci-scripts/ + +# for the wgpu-deps toolchain task +path:gfx/wgpu/Cargo.lock + +# for the mar-tools toolchain task +path:mfbt/ +path:modules/libmar/ +path:other-licenses/bsdiff/ +path:other-licenses/nsis/Contrib/CityHash/cityhash/ +path:toolkit/mozapps/update/updater + +# for the minidump_stackwalk toolchain task +path:toolkit/crashreporter +path:tools/crashreporter/ +path:mfbt +path:config/external/zlib +path:build/moz.configure + +# for the browsertime toolchain task +path:tools/browsertime/mach_commands.py +path:tools/browsertime/package.json +path:tools/browsertime/package-lock.json + +# for the geckodriver toolchain task +path:testing/geckodriver +path:testing/mozbase/rust +path:testing/webdriver diff --git a/build/sparse-profiles/toolchain-build b/build/sparse-profiles/toolchain-build new file mode 100644 index 0000000000..fe542dd219 --- /dev/null +++ b/build/sparse-profiles/toolchain-build @@ -0,0 +1,9 @@ +# Profile needed to build toolchain tasks. + +# This is probably a little wider than we need it to be. But it is +# still relatively small and it keeps this profile simple. +%include build/sparse-profiles/taskgraph + +[include] +# Needed by build-clang.py. +path:tools/rewriting/ \ No newline at end of file diff --git a/build/sparse-profiles/tps b/build/sparse-profiles/tps new file mode 100644 index 0000000000..fe9b238f90 --- /dev/null +++ b/build/sparse-profiles/tps @@ -0,0 +1,5 @@ +%include build/sparse-profiles/mach + +[include] +path:services/sync/tps/ +path:testing/tps/ diff --git a/build/sparse-profiles/update-verify b/build/sparse-profiles/update-verify new file mode 100644 index 0000000000..83589d1f3b --- /dev/null +++ b/build/sparse-profiles/update-verify @@ -0,0 +1,4 @@ +%include build/sparse-profiles/mach + +[include] +path:tools/update-verify/ diff --git a/build/sparse-profiles/upload-generated-sources b/build/sparse-profiles/upload-generated-sources new file mode 100644 index 0000000000..3e39d32b66 --- /dev/null +++ b/build/sparse-profiles/upload-generated-sources @@ -0,0 +1,4 @@ +%include build/sparse-profiles/mach + +[include] +path:build/upload_generated_sources.py diff --git a/build/sparse-profiles/upload-symbols b/build/sparse-profiles/upload-symbols new file mode 100644 index 0000000000..1b46f04f99 --- /dev/null +++ b/build/sparse-profiles/upload-symbols @@ -0,0 +1,4 @@ +%include build/sparse-profiles/mach + +[include] +path:toolkit/crashreporter/tools/upload_symbols.py diff --git a/build/sparse-profiles/webrender b/build/sparse-profiles/webrender new file mode 100644 index 0000000000..049e0c5568 --- /dev/null +++ b/build/sparse-profiles/webrender @@ -0,0 +1,6 @@ +%include build/sparse-profiles/mach +%include build/sparse-profiles/taskgraph + +[include] +path:gfx/wr/ +path:taskcluster/scripts/misc/ diff --git a/build/sparse-profiles/wgpu b/build/sparse-profiles/wgpu new file mode 100644 index 0000000000..856905cff1 --- /dev/null +++ b/build/sparse-profiles/wgpu @@ -0,0 +1,4 @@ +%include build/sparse-profiles/mach + +[include] +path:gfx/wgpu/ diff --git a/build/submit_telemetry_data.py b/build/submit_telemetry_data.py new file mode 100644 index 0000000000..0748fbfead --- /dev/null +++ b/build/submit_telemetry_data.py @@ -0,0 +1,153 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import print_function + +import datetime +import json +import logging +import os +import sys + +import requests +import voluptuous +import voluptuous.humanize + +from mozbuild.telemetry import ( + schema as build_telemetry_schema, + verify_statedir, +) + +BUILD_TELEMETRY_URL = "https://incoming.telemetry.mozilla.org/{endpoint}" +SUBMIT_ENDPOINT = "submit/eng-workflow/build/1/{ping_uuid}" +STATUS_ENDPOINT = "status" + + +def delete_expired_files(directory, days=30): + """Discards files in a directory older than a specified number + of days + """ + now = datetime.datetime.now() + for filename in os.listdir(directory): + filepath = os.path.join(directory, filename) + + ctime = os.path.getctime(filepath) + then = datetime.datetime.fromtimestamp(ctime) + + if (now - then) > datetime.timedelta(days=days): + os.remove(filepath) + + return + + +def check_edge_server_status(session): + """Returns True if the Telemetry Edge Server + is ready to accept data + """ + status_url = BUILD_TELEMETRY_URL.format(endpoint=STATUS_ENDPOINT) + response = session.get(status_url) + if response.status_code != 200: + return False + return True + + +def send_telemetry_ping(session, data, ping_uuid): + """Sends a single build telemetry ping to the + edge server, returning the response object + """ + resource_url = SUBMIT_ENDPOINT.format(ping_uuid=str(ping_uuid)) + url = BUILD_TELEMETRY_URL.format(endpoint=resource_url) + response = session.post(url, json=data) + + return response + + +def submit_telemetry_data(outgoing, submitted): + """Sends information about `./mach build` invocations to + the Telemetry pipeline + """ + with requests.Session() as session: + # Confirm the server is OK + if not check_edge_server_status(session): + logging.error('Error posting to telemetry: server status is not "200 OK"') + return 1 + + for filename in os.listdir(outgoing): + path = os.path.join(outgoing, filename) + + if os.path.isdir(path) or not path.endswith(".json"): + logging.info("skipping item {}".format(path)) + continue + + ping_uuid = os.path.splitext(filename)[0] # strip ".json" to get ping UUID + + try: + with open(path, "r") as f: + data = json.load(f) + + # Verify the data matches the schema + voluptuous.humanize.validate_with_humanized_errors( + data, build_telemetry_schema + ) + + response = send_telemetry_ping(session, data, ping_uuid) + if response.status_code != 200: + msg = "response code {code} sending {uuid} to telemetry: {body}".format( + body=response.content, + code=response.status_code, + uuid=ping_uuid, + ) + logging.error(msg) + continue + + # Move from "outgoing" to "submitted" + os.rename( + os.path.join(outgoing, filename), os.path.join(submitted, filename) + ) + + logging.info("successfully posted {} to telemetry".format(ping_uuid)) + + except ValueError as ve: + # ValueError is thrown if JSON cannot be decoded + logging.exception("exception parsing JSON at %s: %s" % (path, str(ve))) + os.remove(path) + + except voluptuous.Error as e: + # Invalid is thrown if some data does not fit + # the correct Schema + logging.exception("invalid data found at %s: %s" % (path, e.message)) + os.remove(path) + + except Exception as e: + logging.error("exception posting to telemetry " "server: %s" % str(e)) + break + + delete_expired_files(submitted) + + return 0 + + +if __name__ == "__main__": + if len(sys.argv) != 2: + print("usage: python submit_telemetry_data.py ") + sys.exit(1) + + statedir = sys.argv[1] + + try: + outgoing, submitted, telemetry_log = verify_statedir(statedir) + + # Configure logging + logging.basicConfig( + filename=telemetry_log, + format="%(asctime)s %(message)s", + level=logging.DEBUG, + ) + + sys.exit(submit_telemetry_data(outgoing, submitted)) + + except Exception as e: + # Handle and print messages from `statedir` verification + print(e.message) + sys.exit(1) diff --git a/build/templates.mozbuild b/build/templates.mozbuild new file mode 100644 index 0000000000..916a1b1ca9 --- /dev/null +++ b/build/templates.mozbuild @@ -0,0 +1,220 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +@template +def Binary(): + '''Generic template for target binaries. Meant to be used by other + templates.''' + + # Add -llog by default, since we use it all over the place. + if CONFIG['OS_TARGET'] == 'Android': + OS_LIBS += ['log'] + + +@template +def Program(name): + '''Template for program executables.''' + PROGRAM = name + + Binary() + + +@template +def SimplePrograms(names, ext='.cpp'): + '''Template for simple program executables. + + Those have a single source with the same base name as the executable. + ''' + SIMPLE_PROGRAMS += names + SOURCES += ['%s%s' % (name, ext) for name in names] + + Binary() + + +@template +def CppUnitTests(names, ext='.cpp'): + '''Template for C++ unit tests. + + Those have a single source with the same base name as the executable. + ''' + COMPILE_FLAGS['EXTRA_INCLUDES'] = ['-I%s/dist/include' % TOPOBJDIR, + '-I%s/dist/include/testing' % TOPOBJDIR] + CPP_UNIT_TESTS += names + SOURCES += ['%s%s' % (name, ext) for name in names] + + Binary() + + +@template +def Library(name): + '''Template for libraries.''' + LIBRARY_NAME = name + +@template +def AllowCompilerWarnings(): + COMPILE_FLAGS['WARNINGS_AS_ERRORS'] = [] + +@template +def DisableCompilerWarnings(): + COMPILE_FLAGS['WARNINGS_CFLAGS'] = [] + +@template +def RustLibrary(name, features=None, output_category=None, is_gkrust=False): + '''Template for Rust libraries.''' + Library(name) + + IS_RUST_LIBRARY = True + # Some Rust build scripts compile C/C++ sources, don't error on warnings for them. + AllowCompilerWarnings() + + # And furthermore, don't even show warnings for them, so they don't regress + # the Compiler Warnings build metric + # . + DisableCompilerWarnings() + + if features: + RUST_LIBRARY_FEATURES = features + + if output_category: + RUST_LIBRARY_OUTPUT_CATEGORY = output_category + + if is_gkrust: + IS_GKRUST = True + + +@template +def SharedLibrary(name, output_category=None): + '''Template for shared libraries.''' + Library(name) + + FORCE_SHARED_LIB = True + + if output_category: + SHARED_LIBRARY_OUTPUT_CATEGORY = output_category + + Binary() + + +@template +def Framework(name, output_category=None): + '''Template for OSX Frameworks.''' + SharedLibrary(name, output_category) + + IS_FRAMEWORK = True + + +@template +def HostProgram(name): + '''Template for build tools executables.''' + HOST_PROGRAM = name + + +@template +def HostSimplePrograms(names, ext='.cpp'): + '''Template for simple build tools executables. + + Those have a single source with the same base name as the executable. + ''' + HOST_SIMPLE_PROGRAMS += names + HOST_SOURCES += ['%s%s' % (name.replace('host_', ''), ext) + for name in names] + + +@template +def HostSharedLibrary(name): + '''Template for build tools libraries.''' + if name != 'clang-plugin': + error('Please make sure host shared library support is complete ' + 'before using for something else than the clang plugin') + + HOST_LIBRARY_NAME = name + + FORCE_SHARED_LIB = True + +@template +def HostLibrary(name): + '''Template for build tools libraries.''' + HOST_LIBRARY_NAME = name + +@template +def HostRustLibrary(name, features=None): + '''Template for host Rust libraries.''' + HostLibrary(name) + + IS_RUST_LIBRARY = True + # Some Rust build scripts compile C/C++ sources, don't error on warnings for them. + AllowCompilerWarnings() + + if features: + HOST_RUST_LIBRARY_FEATURES = features + +@template +def DisableStlWrapping(): + COMPILE_FLAGS['STL'] = [] + +@template +def NoVisibilityFlags(): + COMPILE_FLAGS['VISIBILITY'] = [] + +@template +def ForceInclude(*headers): + """Force includes a set of header files in C++ compilations""" + if CONFIG['CC_TYPE'] == 'clang-cl': + include_flag = '-FI' + else: + include_flag = '-include' + for header in headers: + CXXFLAGS += [include_flag, header] + +@template +def GeneratedFile(name, *names, **kwargs): + """Add one or more GENERATED_FILES with the given attributes. + + You must pass in at least one generated file (the "name" argument). Other + names can be included as positional arguments after "name".""" + script = kwargs.pop('script', None) + entry_point = kwargs.pop('entry_point', None) + inputs = kwargs.pop('inputs', []) + flags = kwargs.pop('flags', []) + force = kwargs.pop('force', False) + if kwargs: + error('Unrecognized argument(s) to GeneratedFile: %s' % + ', '.join(kwargs)) + if entry_point and not script: + error('entry_point cannot be provided if script is not provided') + if script and ':' in script: + error('script should not include a `:`. If you want to provide an ' + 'alternative entry point for your script, use the entry_point ' + 'parameter.') + + key = (name,) + names if names else name + GENERATED_FILES += [key] + generated_file = GENERATED_FILES[key] + if script and not entry_point: + generated_file.script = script + if script and entry_point: + generated_file.script = script + ':' + entry_point + generated_file.inputs = inputs + generated_file.flags = flags + generated_file.force = force + +@template +def CbindgenHeader(name, inputs): + """Add one GENERATED_FILES by running RunCbindgen.py""" + + inputs = ['!/config/cbindgen-metadata.json'] + inputs + GeneratedFile(name, script='/build/RunCbindgen.py', + entry_point='generate', inputs=inputs) + + +include('gecko_templates.mozbuild') +include('test_templates.mozbuild') + + +@template +def DefineAndWasmDefine(k, v): + DEFINES[k] = v + WASM_DEFINES[k] = v diff --git a/build/test_templates.mozbuild b/build/test_templates.mozbuild new file mode 100644 index 0000000000..0f2c5d9cfa --- /dev/null +++ b/build/test_templates.mozbuild @@ -0,0 +1,33 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +@template +def GeneratedTestCertificate(name): + if not CONFIG['COMPILE_ENVIRONMENT']: + return + GeneratedFile(name, script='/security/manager/ssl/tests/unit/pycert.py', + inputs=['%s.certspec' % name]) + # Turn RELATIVEDIR into list entry: like + # 'security/manager/ssl/tests/unit/bad_certs' -> + # TEST_HARNESS_FILES.xpcshell.security.manager.ssl.tests.unit.bad_certs. + files = TEST_HARNESS_FILES.xpcshell + for part in RELATIVEDIR.split('/'): + files = files[part] + files += ['!%s' % name] + +@template +def GeneratedTestKey(name): + if not CONFIG['COMPILE_ENVIRONMENT']: + return + GeneratedFile(name, script='/security/manager/ssl/tests/unit/pykey.py', + inputs=['%s.keyspec' % name]) + # Turn RELATIVEDIR into list entry: like + # 'security/manager/ssl/tests/unit/bad_certs' -> + # TEST_HARNESS_FILES.xpcshell.security.manager.ssl.tests.unit.bad_certs. + files = TEST_HARNESS_FILES.xpcshell + for part in RELATIVEDIR.split('/'): + files = files[part] + files += ['!%s' % name] diff --git a/build/tests/cram/cram.ini b/build/tests/cram/cram.ini new file mode 100644 index 0000000000..feb296db44 --- /dev/null +++ b/build/tests/cram/cram.ini @@ -0,0 +1 @@ +[test_configure_help.t] diff --git a/build/tests/cram/test_configure_help.t b/build/tests/cram/test_configure_help.t new file mode 100644 index 0000000000..f606991260 --- /dev/null +++ b/build/tests/cram/test_configure_help.t @@ -0,0 +1,14 @@ +configure --help works + + $ cd $TESTDIR/../../.. + + $ touch $TMP/mozconfig + $ export MOZCONFIG=$TMP/mozconfig + $ ./configure --help 2>& 1 | head -n 7 + Adding configure options from */tmp/mozconfig (glob) + checking for vcs source checkout... hg + checking for vcs source checkout... hg + Usage: configure.py [options] + + Options: [defaults in brackets after descriptions] + --help print this message diff --git a/build/unix/aix.exp b/build/unix/aix.exp new file mode 100644 index 0000000000..20f7cb4313 --- /dev/null +++ b/build/unix/aix.exp @@ -0,0 +1,5 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +NSGetModule diff --git a/build/unix/build-binutils/3A24BC1E8FB409FA9F14371813FCEF89DD9E3C4F.key b/build/unix/build-binutils/3A24BC1E8FB409FA9F14371813FCEF89DD9E3C4F.key new file mode 100644 index 0000000000..a76485a9fc --- /dev/null +++ b/build/unix/build-binutils/3A24BC1E8FB409FA9F14371813FCEF89DD9E3C4F.key @@ -0,0 +1,51 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBFm/2cUBEADkvRqMWfAryJ52T4J/640Av5cam9ojdFih9MjcX7QWFxIzJfTF +Yq2z+nb4omdfZosdCJL2zGcn6C0AxpHNvxR9HMDkEyFHKrjDh4xWU+pH4z9azQEq +Jh331X7UzbZldqQo16VkuVavgsTJaHcXm+nGIBTcUbl2oiTtHhmuaYxx6JTMcFjC +7vyO5mLBw78wt52HBYweJ0NjHBvvH/JxbAAULSPRUC61K0exlO49VFbFETQNG1hZ +TKEji95fPbre7PpXQ0ewQShUgttEE/J3UA4jYaF9lOcZgUzbA27xTV//KomP0D30 +yr4e4EJEJYYNKa3hofTEHDXeeNgM25tprhBUMdbVRZpf2Keuk2uDVwc+EiOVri48 +rb1NU+60sOXvoGO6Ks81+mhAGmrBrlgLhAp8K1HPHI4MG4gHnrMqX2rEGUGRPFjC +3qqVVlPm8H05PnosNqDLQ1Pf7C0pVgsCx6hKQB7Y1qBui7aoj9zeFaQgpYef+CEE +RIKEcWwrjaOJwK3pi9HFdxS0NNWYZj8HPzz/AsgTTQdsbulPlVq2SsctmOnL42CZ +OCTppGYwl53CG/EqVY+UQBzFzJBaY8TJRFFYVEy5/HH4H11rMoZwqIkk71EOGU3X +6mWlANRikR3M4GhVITRzuaV69Fed+OeXcCmP94ASLfuhBR2uynmcHpBKpwARAQAB +tDtOaWNrIENsaWZ0b24gKENoaWVmIEJpbnV0aWxzIE1haW50YWluZXIpIDxuaWNr +Y0ByZWRoYXQuY29tPokCOAQTAQIAIgUCWb/ZxQIbAwYLCQgHAwIGFQgCCQoLBBYC +AwECHgECF4AACgkQE/zvid2ePE9cOxAA3cX1bdDaTFttTqukdPXLCtD2aNwJos4v +B4LYPSgugLkYaHIQH9d1NQPhS0TlUeovnFNESLaVsoihv0YmBUCyL4jE52FRoTjE +6fUhYkFNqIWN2HYwkVrSap2UUJFquRVoVbPkbSup8P+D8eydBbdxsY6f+5E8Rtz5 +ibVnPZTib7CyqnFokJITWjzGdIP0Gn+JWVa6jtHTImWx1MtqiuVRDapUhrIoUIjf +98HQn9/N5ylEFYQTw7tzaJNWeGUoGYS8+8n/0sNbuYQUU/zwMVY9wpJcrXaas6yZ +XGpF/tua59t9LFCct+07YAUSWyaBXqBW3PKQz7QP+oE8yje91XrhOQam04eJhPIB +LO88g6/UrdKaY7evBB8bJ76Zpn1yqsYOXwAxifD0gDcRTQcB2s5MYXYmizn2GoUm +1MnCJeAfQCi/YMobR+c8xEEkRU83Tnnw3pmAbRU6OcPihEFuK/+SOMKIuV1QWmjk +bAr4g9XeXvaN+TRJ9Hl/k1k/sj+uOfyGIaFzM/fpaLmFk8vHeej4i2/C6cL4mnah +wYBDHAfHO65ZUIBAssdA6AeJ+PGsYeYhqs6zkpaA2b0wT4f9s7BPSqi0Veky8bUY +YY7WpjzDcHnj1gEeIU55EhOQ42dnEfv7WrIAXanOP8SjhgqAUkb3R88azZCpEMTH +iCE4bFxzOmi5Ag0EWb/ZxQEQALaJE/3u23rTvPLkitaTJFqKkwPVylzkwmKdvd2q +eEFk1qys2J3tACTMyYVnYTSXy5EJH2zJyhUfLnhLp8jJZF4oU5QehOaJPcMmzI/C +ZS1AmH+jnm6pukdZAowTzJyt4IKSapr+7mxcxX1YQ2XewMnFYpLkAA2dHaChLSU/ +EHJXe3+O4DgEURTFMa3SRN/J4GNMBacKXnMSSYylI5DcIOZ/v0IGa5MAXHrP1Hwm +1rBmloIcgmzexczBf+IcWgCLThyFPffv+2pfLK1XaS82OzBC7fS01pB/eDOkjQuK +y16sKZX6Rt57vud40uE5a0lpyItC2P7u7QWL4yT5pMF+oS8bm3YWgEntV380RyZp +qgJGZTZLNq2T4ZgfiaueEV4JzOnG2/QRGjOUrNQaYzKy5V127CTnRg4BYF/uLEmi +zLcI3O3U1+mEz6h48wkAojO1B6AZ8Lm+JuxOW5ouGcrkTEuIG56GcDwMWS/Pw/vN +sDyNmOCjy9eEKWJgmMmLaq59HpfTd8IOeaYyuAQHAsYt/zzKy0giMgjhCQtuc99E +4nQE9KZ44DKsnqRabK9s3zYE3PIkCFIEZcUiJXSXWWOIdJ43j+YyFHU5hqXfECM6 +rzKGBeBUGTzyWcOX6YwRM4LzQDVJwYG8cVfth+v4/ImcXR43D4WVxxBEAjKag02b ++1yfABEBAAGJAh8EGAECAAkFAlm/2cUCGwwACgkQE/zvid2ePE/dqQ/6ApUwgsZz +tps0MOdRddjPwz44pWXS5MG45irMQXELGQyxkrafc8lwHeABYstoK8dpopTcJGE3 +dZGL3JNz1YWxQ5AV4uyqBn5N8RubcA8NzR6DQP+OGPIwzMketvVC/cbbKDZqf0uT +Dy3jP65OFhSkTEIynYv1Mb4JJl3Sq+haUbfWLAV5nboSuHmiZE6Bz2+TjdoVkNwH +Bfpqxu6MlWka+P98SUcmY8iVhPy9QC1XFOGdFDFf1kYgHW27mFwds35NQhNARgft +AVz9FZXruW6tFIIfisjr3rVjD9R8VgL7l5vMr9ylOFpepnI6+wd2X1566HW7F1Zw +1DIrY2NHL7kL5635bHrJY4n7o/n7Elk/Ca/MAqzdIZxz6orfXeImsqZ6ODn4Y47P +ToS3Tr3bMNN9N6tmOPQZkJGHDBExbhAi/Jp8fpWxMmpVCUl6c85cOBCR4s8tZsvG +YOjR3CvqKrX4bb8GElrhOvAJa6DdmZXc7AyoVMaTvhpq3gJYKmC64oqt7zwIHwaC +xTbP6C6oUp9ENRV7nHnXN3BlvIgCo4QEs6HkDzkmgYlCEOKBiDyVMSkPDZdsspa+ +K4GlU2Swi/BDJMjtDxyo+K0M81LXXxOeRfEIfPtZ3ddxBKPva1uSsuz+pbN9d1JY +8Ko5T/h16susi2ReUyNJEJaSnjO5z13TQ1U= +=93P0 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/build/unix/build-binutils/build-binutils.sh b/build/unix/build-binutils/build-binutils.sh new file mode 100755 index 0000000000..cab5f3f150 --- /dev/null +++ b/build/unix/build-binutils/build-binutils.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +make_flags="-j$(nproc)" + +root_dir="$1" + +cd $root_dir/binutils-source + +patch -p1 <<'EOF' +From 4476cc67e657d6b26cd453c555a611f1ab956660 Mon Sep 17 00:00:00 2001 +From: "H.J. Lu" +Date: Thu, 30 Aug 2018 09:21:57 -0700 +Subject: [PATCH] ld: Lookup section in output with the same name + +When there are more than one input sections with the same section name, +SECNAME, linker picks the first one to define __start_SECNAME and +__stop_SECNAME symbols. When the first input section is removed by +comdat group, we need to check if there is still an output section +with section name SECNAME. + + PR ld/23591 + * ldlang.c (undef_start_stop): Lookup section in output with + the same name. +--- + ld/ldlang.c | 18 ++++++++++++++++++ + 1 file changed, 18 insertions(+) + +diff --git a/ld/ldlang.c b/ld/ldlang.c +index 8878ccd..d644b56 100644 +--- a/ld/ldlang.c ++++ b/ld/ldlang.c +@@ -6097,6 +6097,24 @@ undef_start_stop (struct bfd_link_hash_entry *h) + || strcmp (h->u.def.section->name, + h->u.def.section->output_section->name) != 0) + { ++ asection *sec = bfd_get_section_by_name (link_info.output_bfd, ++ h->u.def.section->name); ++ if (sec != NULL) ++ { ++ /* When there are more than one input sections with the same ++ section name, SECNAME, linker picks the first one to define ++ __start_SECNAME and __stop_SECNAME symbols. When the first ++ input section is removed by comdat group, we need to check ++ if there is still an output section with section name ++ SECNAME. */ ++ asection *i; ++ for (i = sec->map_head.s; i != NULL; i = i->map_head.s) ++ if (strcmp (h->u.def.section->name, i->name) == 0) ++ { ++ h->u.def.section = i; ++ return; ++ } ++ } + h->type = bfd_link_hash_undefined; + h->u.undef.abfd = NULL; + } +-- +2.17.1 +EOF + +cd .. + +TARGETS="aarch64-linux-gnu" + +# Build target-specific GNU as ; build them first so that the few documentation +# files they install are overwritten by the full binutils build. + +for target in $TARGETS; do + + mkdir binutils-$target + cd binutils-$target + + ../binutils-source/configure --prefix /tools/binutils/ --disable-gold --disable-ld --disable-binutils --disable-gprof --disable-nls --target=$target || exit 1 + make $make_flags || exit 1 + make install $make_flags DESTDIR=$root_dir || exit 1 + + cd .. +done + +# Build binutils +mkdir binutils-objdir +cd binutils-objdir + +# --enable-targets builds extra target support in ld. +# Enabling aarch64 support brings in arm support, so we don't need to specify that too. +../binutils-source/configure --prefix /tools/binutils/ --enable-gold --enable-plugins --disable-nls --enable-targets="$TARGETS" || exit 1 +make $make_flags || exit 1 +make install $make_flags DESTDIR=$root_dir || exit 1 + +cd .. + +# Make a package of the built binutils +cd $root_dir/tools +tar caf $root_dir/binutils.tar.xz binutils/ diff --git a/build/unix/build-gcc/07F3DBBECC1A39605078094D980C197698C3739D.key b/build/unix/build-gcc/07F3DBBECC1A39605078094D980C197698C3739D.key new file mode 100644 index 0000000000..d78c28c2e6 --- /dev/null +++ b/build/unix/build-gcc/07F3DBBECC1A39605078094D980C197698C3739D.key @@ -0,0 +1,53 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQGiBDpu6yARBACaqhVMzAymKhrUcY1uR1pjYxM5LSuYq6mmCPTNwlGRV5RqQL0p +uXrYlfofu8xsKiVuUKk+Dx5aJT6SDxMNkfogPGMgHK8iCaHiMrw4nTtvrJDaoxDo +k0k62fBa8pGv7N7G0FqfkpBS/x+SDNcgWGgsJugFgqetAiaHIVD4A2tRawCgt72R +OX0StnDnwQFxovV0pIy5ka8D/14GxPLs4qTGWWA6B8mycT67/isaAshq9eJKxZVq +M+0rjSRmhMO0/Ajl4PjzjJXA3PH0H8dTyYSkERjEKQ0McjVLmiTM9SYBtCdkra8Q +Fc+zTPqwjX3AayK5DocfHJ2GRhBXNb2DCdznX4A9zFCssb3FLYE/ZCDqwvrQWH6i +dobAA/0ftbhPLtpZnpgGq1InjDzsvEqHEEt97k/iiQxsRH0/52vLD6ZQaENOlDVt +WulDu3gI+TjI1YgGQq8B7VzW6wRR5JW3Gx9emjP3oTVjTz0bmyuaICyetldfu+yZ +A92SU7Wm4NiMMORB+KkMDfveEWT/XW35mMTJdjpgkQH9KgrEI7QkVmluY2VudCBM +ZWZldnJlIDx2aW5jZW50QHZpbmMxNy5uZXQ+iGIEExECACIFAksWVb0CGyMGCwkI +BwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJEJgMGXaYw3OdKBwAn1gsYIqfmX7cFPVP +bRrQo44e7rZFAJ0RqZAd7PDqT0WectbqGWuaugerf4hlBBMRAgAlAhsjBgsJCAcD +AgYVCAIJCgsEFgIDAQIeAQIXgAUCS/PnRQIZAQAKCRCYDBl2mMNznXR7AJ9gDnrA +LCJfyqRjfVBP6aF4JfzxbQCfTXAAEbnlEhBECqgYF/S8ZjNJD8WIVgQTEQIAFgQL +CgQDAxUDAgMWAgECF4AFAkvz5lEACgkQmAwZdpjDc50eRwCgsQeoNoSgrDpFmfIy +gsU7a5qhqR0AoLQWp2fHpmlNhYua+A8HVxBjoyKJiFYEExECABYFAjpu6yAECwoE +AwMVAwIDFgIBAheAAAoJEJgMGXaYw3OdSgQAnRfkXJVySd9AhQYiMX0iIDqfiGRj +AJ4pLPdp4VvVBPloIt4SN2E559kNRIhZBBMRAgAZBAsKBAMDFQMCAxYCAQIXgAUC +SCGibQIZAQAKCRCYDBl2mMNznduQAJoCD5vaJOLGEO605eNKXTXRt2ygvwCfSNHR +RgaYU+5YIWf3zteNWBxC0K6IYgQTEQIAIgIbIwYLCQgHAwIGFQgCCQoLBBYCAwEC +HgECF4AFAkvz50AACgkQmAwZdpjDc534tACggJHDY3pXzW1T8vDLeysKNIVBkukA +nj6WfWlDjvVSGkZDfcJyhvBXDzsZiGIEExECACIFAksWVd8CGyMGCwkIBwMCBhUI +AgkKCwQWAgMBAh4BAheAAAoJEJgMGXaYw3Od6mYAn0JipNlCsSpyet3FelnGFWS0 +2eDzAJ9SFzy6w0IgIdJJdO0Y6/BAzq+jsIhgBBMRAgAgBQJIIaFtAhsjBgsJCAcD +AgQVAggDBBYCAwECHgECF4AACgkQmAwZdpjDc53gqACffa9gv0J/e9JEt6IFLkYY +fRmbt/YAnirKbsByzSvS0csLhOFx/uOA+qB5iGAEExECACACGyMGCwkIBwMCBBUC +CAMEFgIDAQIeAQIXgAUCSCGiaAAKCRCYDBl2mMNznfLyAKCqhRZQegYMDYoJ9Po+ +5RxOHteSlwCfVARE7QYuaEPWdRGE3hEI6l1rhRqIYAQTEQIAIAUCSCGenwIbIwYL +CQgHAwIEFQIIAwQWAgMBAh4BAheAAAoJEJgMGXaYw3OdNQYAn2/gJ1CdC6tTo1O3 +cc4GD+MG9227AJsEi9hD8xkIJqS9J/7KCpy6Cm+h9IhgBBMRAgAgBQJIIaGEAhsj +BgsJCAcDAgQVAggDBBYCAwECHgECF4AACgkQmAwZdpjDc52c8gCeNpU/yisNGveb +z10ifoz6d03XvyAAn3hNIG8aCemLdPgmHGdhATqTJcGmiGAEExECACAFAkghnsAC +GyMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRCYDBl2mMNznbGYAJ42N2JMtPSn +kVn4qVPHUc7WOU3YCACdFgBS10cg1wzkTF40k8PKy5IKnVOIYAQTEQIAIAUCSCGh +oAIbIwYLCQgHAwIEFQIIAwQWAgMBAh4BAheAAAoJEJgMGXaYw3OdvAwAn3Lux4sL ++FNQGaFKviI+4GG+1BlIAKCGu8WiBKIsUjxC98SjMVG+4xN16rkCDQQ6butMEAgA +gUyl/BQ0OA7B/GSDdx6J/wjS/S4QDx7ZehgigOhJAA74e1rUqeFykb1sqxxkKnCy +AOSqHu2BQXqk7G7ozor5bU8eE6Rki7H6Vf734TprsQgYqPrztgcVxL2InRHcMw8I +GMZZKhWbSzKST6XaEg7Yxy7pkvNhl29bc9scWNjOCxkUt6L9wtp2UEZQf5bL41k1 +A7B1/dGOAe+DOX64x2lNYAlry3f7WV7Yq99YgcFy+V+o2wW5OBb/404x8DIm7bKT +zBiOO1QNNe8vGJAEf1lAhldPE03T9aNNXr0tHytLcDsQbHkbnsJELtY6C2AQiAKy +thMo1OVC+y0+Kr3JMFfumwADBQf8CiymrdhZGEZYsgJfpih+eaoBVgnlY6lHx1bQ +Ovfol4x7B+szlNtHjA+r3PV9uPsrxa6J5qT31iPPHgwu1utTJ8tQov9OpXvEB/2J +8DV8lYzTMpAB/GKoDUFZEGc4q+BQAvTfYYv+6WKoFjRL6iKt+Qb6WyonjG6ViPeb +IURoMP6eE7wPFCVwK8xWHvB32jdf+ni9a2XuE9bLkF8pHcC2pz0gi7vIk88FPo8E +ypKTL5MjC0/7+nYK9K45PZwmWNO0m5BooyP6ddGP0xJq8gisZuSWAFW3I+SW5DyP +nvxpOXCzSj0vCHuHvDbdsUArdNWUTpxw5k3XvAIxPLMBsFK3qIhGBBgRAgAGBQI6 +butMAAoJEJgMGXaYw3OdiYYAn2SsLZg3Cj2Rg7ZziZ01NE5QpP5CAKCLyZeqvx28 +Lt44/DBv052TOb47tw== +=ERlK +-----END PGP PUBLIC KEY BLOCK----- diff --git a/build/unix/build-gcc/13975A70E63C361C73AE69EF6EEB81F8981C74C7.key b/build/unix/build-gcc/13975A70E63C361C73AE69EF6EEB81F8981C74C7.key new file mode 100644 index 0000000000..53591cf752 --- /dev/null +++ b/build/unix/build-gcc/13975A70E63C361C73AE69EF6EEB81F8981C74C7.key @@ -0,0 +1,82 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQGiBDs4dV0RBACZII57dgbfnCC7RTrJ1yc0F1ofEZJJ/x4tAtSHMDNj2zTnLR25 +5AHmxN85namwJdn7ixXSZv1FMPCeTs6jDk98YuA9r5uuCNPqCNZsuQtREpN7h+wO +IeRrhvg9/F11mty/5NthXNh8P2ELnkWXSHu6DvTQyGppAtxueOL0CjRrpwCggVYu +vxui5mqNq9+lILbMi2Zm3UkD/0T/0HupthZFXbuzY/h/nyqzoPOxnSAAAx6N7SiE +2w9OQ1w3K8WOFoPH9P0cnIQ+KnMSGQV4C2WY/d8YtShnKkXRYZVvlK+aiwmvf1kU +yNyUqaA/GhW5FWN26zFQc3G5Y9TDjgBqjd6SequZztK5M5cknJGJn+otpdQtA1Dx +2KEABACSYjdRNT3OvQJ7OSz4x4C58JKz/P69WsNZxqYVo66P7PGxM7V2GykFPbG7 +agyEMWP1alvUK551IamVtXN+mD7h3uwi5Er0cFBBfV8bSLjmhSchVpyQpiMe2iAr +IFeWox7IUp3zoT35/CP4xMu5l8pza61U5+hK3G7ud5ZQzVvh8bQrUmljaGFyZCBH +dWVudGhlciAoV29yaykgPHJndWVudGhlckBzdXNlLmRlPohfBBMRAgAfBQJDJvJg +AhsDBwsJCAcDAgEDFQIDAxYCAQIeAQIXgAAKCRBu64H4mBx0x0IPAJ9OiDKdHqdX +2ETKcxD78PcKDCcg6gCfWuJ6TizPW0n5vV16NMKl74j528aIYgQTEQIAIgIbAwIe +AQIXgAUCVmLemAYLCQgHAwIGFQgCCQoLBBYCAwEACgkQbuuB+JgcdMdosgCeLZi7 +4DbKYbK6Sinww8ldLc0eRbgAnjcppbTLIHxcr6Lngb44v4fh8jm5iF8EExECAB8F +AkMm8uECGwMHCwkIBwMCAQMVAgMDFgIBAh4BAheAAAoJEG7rgfiYHHTHTcoAn0/u +FvF25feqywtGPSpL6gQ+VQZiAJ42Q8zMLMqHxd5g0e3L7mrag7EgVIhiBBMRAgAi +AhsDAh4BAheABQJWYt6YBgsJCAcDAgYVCAIJCgsEFgIDAQAKCRBu64H4mBx0x14N +AJ9lFQUMIHywsroHrCpGbAKxvcQrowCeNIbpm2Ct0SNJBKZ8BwhX/1bfrsyIXwQT +EQIAHwUCQybzCQIbAwcLCQgHAwIBAxUCAwMWAgECHgECF4AACgkQbuuB+JgcdMeg +1gCff0P5UUkRXbj/0n0ron/Xh3ji0isAnRZOtUOA2ILSNd9PNCLea9jstf6hiGIE +ExECACICGwMCHgECF4AFAlZi3pgGCwkIBwMCBhUIAgkKCwQWAgMBAAoJEG7rgfiY +HHTH1PAAnj/1LWl3pxLYweV1ZClR0i44GJQcAJoCM0+92pI3VIsSMfkYaUVmOjVz +f4hfBBMRAgAfBQJDJvKmAhsDBwsJCAcDAgEDFQIDAxYCAQIeAQIXgAAKCRBu64H4 +mBx0xyAgAJwN2SASDJN9Y2H9iMjRSCkEftC7PgCeOTjpR3vyDnM7QL8bjwEiR5l7 +l3qIYgQTEQIAIgIbAwcLCQgHAwIBAxUCAwMWAgECHgECF4AFAkm3jjkCGQEACgkQ +buuB+JgcdMcXrACfVTEyxl0EqQN+FpmssqVUXMuGIPkAnjuh0lk4rlWnFHuRPKFP +aLNcn7TbiGUEExECACUCGwMCHgECF4ACGQEFAlZi3pMGCwkIBwMCBhUIAgkKCwQW +AgMBAAoJEG7rgfiYHHTHIBIAn20wZDYF0KrfbJNzK4/VwAEAzN+wAJ9Dpbhtq4sR +oH3cbadBsD2mXXthOohXBBMRAgAXBQI7OHVdBQsHCgMEAxUDAgMWAgECF4AACgkQ +buuB+JgcdMexIACfUdyOhJRqUp4ENf5WMF7zbVVLryoAn2cNiUWC2u4za4NDyde6 ++JGW3yo4iFoEMBECABoFAkm3je4THSBBY2NvdW50IGRpc2FibGVkLgAKCRBu64H4 +mBx0xw8pAJ9f38BHfCYcFBFrzasWJ50aYiq9agCeJc39ixXix4rnOa8vzBvSqILU +3J2IXwQTEQIAHwUCPvYc2wIbAwcLCQgHAwIBAxUCAwMWAgECHgECF4AACgkQbuuB ++JgcdMcsEACfQPXptVqB3lVdH8NmJq9988UjdugAnjc51tLV7wP/omMaG6zxqOBe +bByGiFcEExECABcFAjs4dV0FCwcKAwQDFQMCAxYCAQIXgAAKCRBu64H4mBx0x7Eg +AJ9R3I6ElGpSngQ1/lYwXvNtVUuvKgCfZw2JRYLa7jNrg0PJ17r4kZbfKjiIVwQT +EQIAFwULBwoDBAMVAwIDFgIBAheABQJJt44zAAoJEG7rgfiYHHTHt1oAmwfqV/fy +BQtuo6iVwyrLTrv6SH8WAJ9+vQxODP5nLEVv0VDkPe9YDmnHIohaBBMRAgAaBQsH +CgMEAxUDAgMWAgECF4AFAkMm92MCGQEACgkQbuuB+JgcdMf9FgCffJBUSQIPBPWC +zQvDLdCCQKj1gS0AnjY8bbEU+8j9MJdoyti8VQqc063IiFoEMBECABoFAkm3jboT +HSBBY2NvdW50IGRpc2FibGVkLgAKCRBu64H4mBx0x3w4AJ9uJb1MnaB4XL2W4/ur +kpvbRPiNrgCfRnEpymRfBRjuqSZpLr6t2548MFaIWgQwEQIAGgUCSbeOjRMdIEFj +Y291bnQgZGlzYWJsZWQuAAoJEG7rgfiYHHTHsjkAn3kJ+cwIuWjR07f/1L87hC1x +MGmAAJ45JUNoUgl45+JYUVamI+Sno02roLkBCwRDJvHRAQgA+McP+S2zoZBu2xX7 +r5pmB8IroxVl7Xgw5cUbrQWacc/NfKaivO7sPFJA6QqIpTj2ZSSVMhDUSsYivycL +OOZUeabsIfnd3Lz86SU+Cl5wEsZI/1aKpDxMnE1SINZADSvYdZUCyLzo34Td725s +3hVIrjJ3okxHUynYqDJLYsrY+NGj6jua6U4VoACjGaLyBYhVHqy/l2SHeD/r8N8q +DfZTwJaMWnkhcqaTIw9Ifl45kvh4F/HghrVwVxZ8Mll2xhD4QH5q7MerKv8NLmif +hpLvZYCmlaTAfUy799ic4RjfvIXgbBg9v8zkujPbBMzF2N9+XMIx19DnoK4yV9zz +gx5P8wAGKYhJBBgRAgAJBQJDJvHRAhsCAAoJEG7rgfiYHHTH8bQAn3wHFhPW+umo +2VjoxvBftJ3dKzXnAJ0Q6iW7EvhZeCIUE4Wcs5AYavoaXIkBZwQYEQIACQIbAgUC +SXscKQEpwF0gBBkBAgAGBQJDJvHRAAoJEDqwCZb8JqZBuHQIAOoXgUMEyxCHz6+S +EW9c5NC+1eRAy5B52vJoIYdxL97n8nTFvm4vJsyecXKH20jLxyP2xzv3J5NO5dJA +smBTTZeHoQviiwal7klZa8VtjhLI2TJHdRyleDOQfzRyuwcXmLHALHLs9MSNDjzJ +PT2GKDh6IMdDV9LijHQXlpRDiraaThs21TpYcQ//yXoErBJQL3+V8VCYyeTtJ4hC +pPCAL1NqA9mEJDP+01kGj63cROVFx89nZ2MIZEmbmZswb+nATLUv1+t2inMFiTnr +ISm4D3seOYgO+3fhhsA6U9g9IKy+eHNl2hWtG/+oFwbE2F1gDvPCIYOWuNF/tGDU +pPyLO+gJEG7rgfiYHHTHVYMAlROqNeZ/TalCmF9ijupqU65WvW0AnRRSCD49emCs +/SWngtDxJuTG8FGFuQELBEMm8fwBCAC3KLX691TOFiizmWZTOeRNREUEZYy89I6c +HrYjYyrRkBrOHJGNvoS5JO4Zy6wlc9bNGWxQU92bJCMiqE8n1mRRIs6J4gExThWq +BZzsZlcrs/gu6HxPFCvPlg62emPkd6//KPrcAIMshvNKGLMFK15n5Nkv5ofv/xcr +/fqjisISnk4fr1GI9wJQUQdCTEXu9o92erIfzb8m1Q7FJbXNhyv7tcekdr5Q20jr +ZDgxX3H1aLq8EG8nrNlJqulWLtWIh/k9Uwa5ZvmcDVhKES1BUqdCefqkGpFQXiIt +zKu6cgs8anXeG1RRqFoOvipZQ/lUqYQtP0iK05NHQFfp7cTaHo2fAAYpiEYEGBEC +AAYFAjs4dV4ACgkQbuuB+JgcdMeqzACfeHjT2PFYdy88PHNVGw5se9PqGPYAnArp +X32fDdu/xhuqjqHrNkwyO/YoiEkEGBECAAkFAkMm8dECGwIACgkQbuuB+JgcdMfx +tACffAcWE9b66ajZWOjG8F+0nd0rNecAnRDqJbsS+Fl4IhQThZyzkBhq+hpciEkE +GBECAAkFAkMm8fwCGwwACgkQbuuB+JgcdMcy4QCdFw3ipNDVX3Z77ZHMmbYhhtUm +M8EAnA1jqzeVutwLtlzYT+Tl/HDB6dJOuQENBDs4dV4QBACXdavIYhl+L248s1mU +i9EUESu9QovNzuf79zUZpRUzFdwX8hq56BuWHjU6hXYpzPWwXHnYwsNINNXUPAOf +h83PA/sNg572HgQGkx48bUNLstDQugPrzau97LoK/DD54WYEFd2ISoJe8+5bh3dY +yc6xCovkGJJAf4aLAissU3vKPwADBQP+P0U7OJ/UYt2hIbx+wSL/9rGrSxcj421F +Q6u+auRMIbejmtk4k3DP4oFCk/jkt3Oiw7hX+Q9W4nlTgSmsQ9Gp6N9JNb6gr4GC +bSZ8iaDDsm9p2Q15d8l3BiJ263IXWOOuhV2qmtKMABqhmBKLazDTcIXHVaR0v4YJ +xzA3ohWXk4iIRgQYEQIABgUCOzh1XgAKCRBu64H4mBx0x6rMAJ94eNPY8Vh3Lzw8 +c1UbDmx70+oY9gCcCulffZ8N27/GG6qOoes2TDI79iiISQQYEQIACQUCQybx0QIb +AgAKCRBu64H4mBx0x/G0AJ98BxYT1vrpqNlY6MbwX7Sd3Ss15wCdEOoluxL4WXgi +FBOFnLOQGGr6Glw= +=XJ6e +-----END PGP PUBLIC KEY BLOCK----- diff --git a/build/unix/build-gcc/33C235A34C46AA3FFB293709A328C3A2C3C45C06.key b/build/unix/build-gcc/33C235A34C46AA3FFB293709A328C3A2C3C45C06.key new file mode 100644 index 0000000000..f183ce9e49 --- /dev/null +++ b/build/unix/build-gcc/33C235A34C46AA3FFB293709A328C3A2C3C45C06.key @@ -0,0 +1,33 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQGiBECGYZsRBAC9VE8N8vHAG87MTC2wbtnmbSD8Wc2xYCaLofZAH+vXyio3Dnwx +jQLlj7IgwRWNAVq13uL0wn0WAsGop5Cs7nA/JD4MEBBNSdnvq1bMYitch2PTtAU+ +h6HaI9JXBDUh4AKZz2rllKgbigMHlgIugxnKTAMJIhS63lCTHWEDlnycJwCgqSX9 +hDs9eBC5coearGDhc0BDvTsD/A05YkZkQBgsYD6cjWFwNLJIcaHORKlLLZ9gRJO5 +LVcKaCEgYSWAM7dadJeqIFi9RkXdv+cWozxTgrGlY4T7/PakIBB7wWj2Zl72mW5a +NHT2vAemB8IFV1saiFXZM+qDhCHbV4yKSmNOQHY1VnSCUrgINiM0qlTz08yjUazK +fm2BBACDF3ZfUQNeHC9zwfsgCzKnqOm7FSlwOyI0f+j83B5PH2+KuzuyEqYoxGp+ +2d1zTxvbOeBBaX8T1M4n5d9ixiFMhgbTzuyit3nn6cp5j2L0IAS9pw0kaWpPMhpQ +zydNgnaBxHs1Y+cP4iM/4FWFCvfjUdR7xULdEzkgGxevu8pNEbQgSmFrdWIgSmVs +aW5layA8amFrdWJAcmVkaGF0LmNvbT6IZAQTEQIAJAIbAwYLCQgHAwIDFQIDAxYC +AQIeAQIXgAUCSe3VIgUJEs109wAKCRCjKMOiw8RcBqANAJ0VlFMTtevlkEM+ym4k +yE3YOrGZ+wCeP7lZGc2jVLHJfrOKxXsTM5YPWhqIZAQTEQIAJAIbAwYLCQgHAwID +FQIDAxYCAQIeAQIXgAUCTI3tMgUJHtOOlwAKCRCjKMOiw8RcBjySAJ9ApMXF3+gW +Ir0zpMxvWb53/oxsHgCaAl6V5JS9GJUnrPiHKdR+sMFPkd6IZAQTEQIAJAUCQIZh +mwIbAwUJCWYBgAYLCQgHAwIDFQIDAxYCAQIeAQIXgAAKCRCjKMOiw8RcBrC+AJ9d +mQcWoZHFGoinHck309KD0m2FegCeMBjr/M6Ec1myCYMUhtpl5DI7zY25Ag0EQIZh +ohAIALrI1X59CM30/Ufg+O9FFRRyM8GefACfItrIvp6jx+0ZMY+/ZbYnlMzI7Gz4 +xNXc+83Zsz7zE5xogNcq9LILdhB7Ta1ZRkRttM8AdfyakRQTjzCPtxSPgSao/Dcu +CL09BZdaeeqMAxLmw9DnY3xmZqQtCau8PlgIiClq2db9Wy0bpQ+DDfQV4MlX6eoI +33TG9Moy59QQUG5reQ2JNkQZRebPxJAPiAgHoF/Q+XO1pLeCccIN7SApe7yVd/4A +sS3Y9lZj2JvEvutLojsRGL0E/CAwH8cJqPAt65qbOgQzCILhcc9aYZ234g9n7Kpx +Ck1h2QMtXfsmaA7GsrXo1Ddfra8ABA0H/0sa4SCQhWQ14tOFkN15xzuaqGOxUD+O +uAsgRdKaFdIhZnj0MRmvOfBSP7hONw7fE0m9DVq9NDPqFcMeyCuBNIMpGIuN6CAK +/G0K2UgzoCxMXUEYGncFfVnOoNURV9u2lGq7ZMNJmuzt0BhxXtUYRlH3WRPqPyGv +s/OrIqvgN+Kf9+i0kQSObWz6CeYnBKzCc++MPkVhYj8KR5Y6n3zPZpnOfmO3c0rY +C+KiNoMwchlZmiOh7zgcTybv4zuOU7bppEidreIq2/o4nBNTao/5uzYdDX9FBpDT +hhU9ErdO8Vd7Vf2I1/WQdt6dHUXPLfkwI8+ODE/4R/Oz8opFC5L22kSITwQYEQIA +DwIbDAUCTI3tTQUJHtOOqwAKCRCjKMOiw8RcBrBvAKCTFx5FOuuxM2VoQka8iBGj +f1vcugCdHV/JIhOwETTqOQEbkw3y9ng2+4U= +=K9Jj +-----END PGP PUBLIC KEY BLOCK----- diff --git a/build/unix/build-gcc/343C2FF0FBEE5EC2EDBEF399F3599FF828C67298.key b/build/unix/build-gcc/343C2FF0FBEE5EC2EDBEF399F3599FF828C67298.key new file mode 100644 index 0000000000..548a560202 --- /dev/null +++ b/build/unix/build-gcc/343C2FF0FBEE5EC2EDBEF399F3599FF828C67298.key @@ -0,0 +1,35 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQFNBFDrIWMBCgCyyYoTAD/aL6Yl90eSJ1xuFpODTcwyRZsNSUZKSmKwnqXo9LgS +2B00yVZ2nO2OrSmWPiYikTciitv04bAqFaggSstx6hlni6n3h2PL0jXpf9EI6qOO +oKwi2IVtbBnJAhWpfRcAce6WEqvnav6KjuBM3lr8/5GzDV8tm6+X/G/paTnBqTB9 +pBxrH7smB+iRjDt/6ykWkbYLd6uBKzIkAp4HqAZb/aZMvxI28PeWGjZJQYq2nVPf +LroM6Ub/sNlXpv/bmHJusFQjUL368njhZD1+aVLCUfBCCDzvZc3EYt3wBkbmuCiA +xOb9ramHgiVkNENtzXR+sbQHtKRQv/jllY1qxROM2/rWmL+HohdxL5E0VPple2bg +U/zqX0Hg2byb8FbpzPJO5PnBD+1PME3Uirsly4N7XT80OvhXlYe4t+9X0QARAQAB +tCROaWVscyBNw7ZsbGVyIDxuaXNzZUBseXNhdG9yLmxpdS5zZT6JAX4EEwECACgF +AlDrIWMCGwMFCRLMAwAGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJEPNZn/go +xnKYqm0J/A4b6TE5qPWiWj0kriUBSmpys3qUz93gR6Ft7w2f478KJuzbSadvyn0u +PcnP26AGTOQq75RhtgCJgdYbvRocTjlMh9jOX584Hx8hi/QSrpCSYMnj6dQKbu0Y +QIFjZx8gPeYvzG8t34FCNEzZ09RQZqy/ukRyN99LkwEuP4FWq486b7dpgv7GC+SH +lZcMco6VW8FLOT7KMalH06cmdhFPrFSYAIHDu3CsYhC8knIQV99Xzno/KeSkEwkq +tYDOdz0x4HWdOwHrl2S2X6Ex1q3QRXcq84EYQwHz2WEGaPR7Vd76P5J1wiHN6rwO +4exfgsRyTvc6NDQPTFqmoCzwuPviYk6JNnHr9E5TkLT7lAnESEhMLyyIG/7Uwpgu +5C71IMaTpOpf8DEU9NU/zuxgHoMaKBZaeYKs0S26s1zwGOlQX0T9uQFNBFDrIWMB +CgDKlONI+5Bqcu69+72fmLZPizzEUsIRA2Y0w2RE7+uJ5Es9/YTp5PnWANpPT7GS +8JJnc6NJJeh6GkMkGGwq5Op7CDsjW9pQZ0vAW90XjnyniDa9W0W+m5+X/LPOzh+n +is9Zcf17P91tprLCLi+TOOb35xt396pZ+S+PwuV0dLiIYdVYV3e6LNCV0LjhEqp5 +3TRwTrLTNPQVnt0DPYTh/Kn1x6d5zOS0MK4QybKN1WJU6nYIQRXyWKkixjbs++jc +gV/juck96Ve0blvn6DfqfpG8YzbmqRCufLo683LtlBUZ0c+znrD1nouqX2Eb/Cyl +G8Q8ZUHXimCJ+g6RfH9kOmtVH/208u/nDofVL/Q0dvAXfU5MX49c7XYy7B2rTlk+ +4nuNeaHM0aU2Y14+SQy+sR6zydu7eGLdqjzV0CX/ekgrjQARAQABiQFlBBgBAgAP +BQJQ6yFjAhsMBQkSzAMAAAoJEPNZn/goxnKYGUcJ/j+L0/uzfwCR1aTBZ6FBT9Od +NyatVjmz20ahskF3BySmkT1R06K08YOGJ//LPajj0eKqU8WKgxMc7pWi5SG+yMFn +2db5HnJDGiSmSjCXW/BzsSt1786LtO0m0ehatj9kl6JrxQNXazOkRJ2ww13P6/91 +RBaV6R08BmFTrUco2P6w+djCF4NlnkOLa7fM6QtNZM+yB+EzaPjSBFjZG52BVWZk +cXEVN0cEjPuznuQOmx8Dny7lQikp49NumrbamaxZEilx2Bi9gSbovNaKBuncKi9X +boiEiNbAarGxP40Qvlk2AuXWvq+fiBnU1e1nU2oV7/7nAWH7kj/Vr/JxcBeOpsND +GkW7Yrd3mkJCrhG+jMs1V2qNb9Uhr5ZLOA40sIz2PHfDrR+gc8THm2p5OvCWEAeu +kYJ22XTUIt6XoPO0ERYD +=MH4q +-----END PGP PUBLIC KEY BLOCK----- diff --git a/build/unix/build-gcc/5ED46A6721D365587791E2AA783FCD8E58BCAFBA.key b/build/unix/build-gcc/5ED46A6721D365587791E2AA783FCD8E58BCAFBA.key new file mode 100644 index 0000000000..7cc6ba735c --- /dev/null +++ b/build/unix/build-gcc/5ED46A6721D365587791E2AA783FCD8E58BCAFBA.key @@ -0,0 +1,38 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQGiBDuVqKgRBAD5Mcvdc41W5lpeZvYplEuyEBXwmxnUryE2KaCG1C06sGyqgiec +VPXPbgIPKOUt4veMycVoqU4U4ZNuIeCGPfUvkGKLKvy5lK3iexC1Qvat+9ek2+yX +9zFlTo9QyT4kjn+xaZQYVctL370gUNV4eoiWDdrTjIvBfQCb+bf87eHv0QCg/7xt +wnq3uMpQHX+k2LGD2QDEjUcEALalUPPX99ZDjBN75CFUtbE43a73+jtNOLJFqGo3 +ne/lB8DqVwavrgQQxQqjg2xBVvagNpu2Cpmz3HlWoaqEb5vwxjRjhF5WRE+4s4es +9536lQ6pd5tZK4tHMOjvICkSg2BLUsc8XzBreLv3GEdpHP6EeezgAVQyWMpZkCdn +Xk8FA/9gRmro4+X0KJilw1EShYzudEAi02xQbr9hGiA84pQ4hYkdnLLeRscChwxM +VmoiEuJ51ZzIPlcSifzvlQBHIyYCl0KJeVMECXyjLddWkQM32ZZmQvG02mL2XYmF +/UG+/0vd6b2ISmtns6WrULGPNtagHhul+8j7zUfedsWuqpwbm7QmTWFyayBBZGxl +ciA8bWFkbGVyQGFsdW1uaS5jYWx0ZWNoLmVkdT6IRgQQEQIABgUCPIx/xAAKCRDZ +on0lAZZxp+ETAJ0bn8ntrka3vrFPtI6pRwOlueDEgQCfdFqvNgLv1QTYZJQZ5rUn +oM+F+aGIRgQQEQIABgUCQ5GdzQAKCRAvWOuZeViwlP1AAJ4lI6tis2lruhG8DsQ0 +xtWvb2OCfACfb5B/CYDjmRInrAgbVEla3EiO9sKIWAQQEQIAGAUCO5WoqAgLAwkI +BwIBCgIZAQUbAwAAAAAKCRB4P82OWLyvunKOAJ9kOC1uyoYYiXp2SMdcPMj5J+8J +XQCeKBP9Orx0bXK6luyWnCS5LJhevTyJARwEEAECAAYFAlDH6cIACgkQdxZ3RMno +5CguZAf/dxDbnY+rad6GJ1fYVyB9PfboyXLY/vksmupE9rbYmuLP85Rq1hdN56aZ +Qwjm7EPQi6htFANKOPkjOhutSD4X530Dj6Y7To8t85lW3351OP07EfZGilolIugU +6IMZNaUHVF1T0I68frkNTrmRx0PcOJacWB6fkBdoNtd5NLASgI+cszgLsD6THJZk +58RUDINY6fGBYFZkl2/dBbkLaj3DFr+ed6Oe99d546nfSz+zsm454W2M+Wf/yplK +O8Sd641h1eRGD/vihsOO+4gRgS+tQNzwb+eivON0PMvsGAEPEQ+aPVQ/U/UIQSYA ++cYz2jGSXhVppatEpq5U3aJLbcZKOrkCDQQ7laipEAgA9kJXtwh/CBdyorrWqULz +Bej5UxE5T7bxbrlLOCDaAadWoxTpj0BV89AHxstDqZSt90xkhkn4DIO9ZekX1KHT +UPj1WV/cdlJPPT2N286Z4VeSWc39uK50T8X8dryDxUcwYc58yWb/Ffm7/ZFexwGq +01uejaClcjrUGvC/RgBYK+X0iP1YTknbzSC0neSRBzZrM2w4DUUdD3yIsxx8Wy2O +9vPJI8BD8KVbGI2Ou1WMuF040zT9fBdXQ6MdGGzeMyEstSr/POGxKUAYEY18hKcK +ctaGxAMZyAcpesqVDNmWn6vQClCbAkbTCD1mpF1Bn5x8vYlLIhkmuquiXsNV6TIL +OwACAgf/aMWYoBCocATXsfSUAJb69OPUXWjevZiCf6n+7Id3L5X5um55L5sEBr8+ +8m5SIuHUippgNFJdu2xyulbb1MeegtTttEWymF9sM8cWfeTjXPOd7+ZQumiOXwk/ +g0qqjTrq7EYW5PlMjO2FbH/Ix9SHKVS9a0eGUUl+PBv3fkEZBJ4HhweqcSfLyKU/ +CHysN03Z36gtdu1BJlzHy8BPxWzP4vtPEi57Q1dFDY/+OrdlBnwKTpne6y0rAbi/ +wk6FxDGQ86vdapLI51kTxvkYx8+qZXqE4CG5fWbAFDQVTNZIWJNgYMX7Kgl8Fvw+ +7zCqJsv/KbuonIEb5hNViflVTWlBAIhMBBgRAgAMBQI7laipBRsMAAAAAAoJEHg/ +zY5YvK+6T88An1VSVGbeKbIL+k8HaPUsWB7qs5RhAKDdtkn0xqOr+0pE5eilEc61 +pMCmSQ== +=5shY +-----END PGP PUBLIC KEY BLOCK----- diff --git a/build/unix/build-gcc/7F74F97C103468EE5D750B583AB00996FC26A641.key b/build/unix/build-gcc/7F74F97C103468EE5D750B583AB00996FC26A641.key new file mode 100644 index 0000000000..6f23744afe --- /dev/null +++ b/build/unix/build-gcc/7F74F97C103468EE5D750B583AB00996FC26A641.key @@ -0,0 +1,54 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQGiBDs4dV0RBACZII57dgbfnCC7RTrJ1yc0F1ofEZJJ/x4tAtSHMDNj2zTnLR25 +5AHmxN85namwJdn7ixXSZv1FMPCeTs6jDk98YuA9r5uuCNPqCNZsuQtREpN7h+wO +IeRrhvg9/F11mty/5NthXNh8P2ELnkWXSHu6DvTQyGppAtxueOL0CjRrpwCggVYu +vxui5mqNq9+lILbMi2Zm3UkD/0T/0HupthZFXbuzY/h/nyqzoPOxnSAAAx6N7SiE +2w9OQ1w3K8WOFoPH9P0cnIQ+KnMSGQV4C2WY/d8YtShnKkXRYZVvlK+aiwmvf1kU +yNyUqaA/GhW5FWN26zFQc3G5Y9TDjgBqjd6SequZztK5M5cknJGJn+otpdQtA1Dx +2KEABACSYjdRNT3OvQJ7OSz4x4C58JKz/P69WsNZxqYVo66P7PGxM7V2GykFPbG7 +agyEMWP1alvUK551IamVtXN+mD7h3uwi5Er0cFBBfV8bSLjmhSchVpyQpiMe2iAr +IFeWox7IUp3zoT35/CP4xMu5l8pza61U5+hK3G7ud5ZQzVvh8bQtUmljaGFyZCBH +dWVudGhlciA8cmljaGFyZC5ndWVudGhlckBnbWFpbC5jb20+iGUEExECACUCGwMC +HgECF4ACGQEFAlZi3pMGCwkIBwMCBhUIAgkKCwQWAgMBAAoJEG7rgfiYHHTHIBIA +n20wZDYF0KrfbJNzK4/VwAEAzN+wAJ9Dpbhtq4sRoH3cbadBsD2mXXthOohiBBMR +AgAiAhsDAh4BAheABQJWYt6YBgsJCAcDAgYVCAIJCgsEFgIDAQAKCRBu64H4mBx0 +x2iyAJ4tmLvgNsphsrpKKfDDyV0tzR5FuACeNymltMsgfFyvoueBvji/h+HyObmI +YgQTEQIAIgIbAwIeAQIXgAUCVmLemAYLCQgHAwIGFQgCCQoLBBYCAwEACgkQbuuB ++JgcdMdeDQCfZRUFDCB8sLK6B6wqRmwCsb3EK6MAnjSG6ZtgrdEjSQSmfAcIV/9W +367MiGIEExECACICGwMCHgECF4AFAlZi3pgGCwkIBwMCBhUIAgkKCwQWAgMBAAoJ +EG7rgfiYHHTH1PAAnj/1LWl3pxLYweV1ZClR0i44GJQcAJoCM0+92pI3VIsSMfkY +aUVmOjVzf4haBDARAgAaBQJJt43uEx0gQWNjb3VudCBkaXNhYmxlZC4ACgkQbuuB ++JgcdMcPKQCfX9/AR3wmHBQRa82rFiedGmIqvWoAniXN/YsV4seK5zmvL8wb0qiC +1NydiFoEMBECABoFAkm3jo0THSBBY2NvdW50IGRpc2FibGVkLgAKCRBu64H4mBx0 +x7I5AJ95CfnMCLlo0dO3/9S/O4QtcTBpgACeOSVDaFIJeOfiWFFWpiPkp6NNq6C5 +AQsEQybx0QEIAPjHD/kts6GQbtsV+6+aZgfCK6MVZe14MOXFG60FmnHPzXymorzu +7DxSQOkKiKU49mUklTIQ1ErGIr8nCzjmVHmm7CH53dy8/OklPgpecBLGSP9WiqQ8 +TJxNUiDWQA0r2HWVAsi86N+E3e9ubN4VSK4yd6JMR1Mp2KgyS2LK2PjRo+o7mulO +FaAAoxmi8gWIVR6sv5dkh3g/6/DfKg32U8CWjFp5IXKmkyMPSH5eOZL4eBfx4Ia1 +cFcWfDJZdsYQ+EB+auzHqyr/DS5on4aS72WAppWkwH1Mu/fYnOEY37yF4GwYPb/M +5Loz2wTMxdjfflzCMdfQ56CuMlfc84MeT/MABimJAWcEGBECAAkCGwIFAkl7HCkB +KcBdIAQZAQIABgUCQybx0QAKCRA6sAmW/CamQbh0CADqF4FDBMsQh8+vkhFvXOTQ +vtXkQMuQedryaCGHcS/e5/J0xb5uLybMnnFyh9tIy8cj9sc79yeTTuXSQLJgU02X +h6EL4osGpe5JWWvFbY4SyNkyR3UcpXgzkH80crsHF5ixwCxy7PTEjQ48yT09hig4 +eiDHQ1fS4ox0F5aUQ4q2mk4bNtU6WHEP/8l6BKwSUC9/lfFQmMnk7SeIQqTwgC9T +agPZhCQz/tNZBo+t3ETlRcfPZ2djCGRJm5mbMG/pwEy1L9frdopzBYk56yEpuA97 +HjmIDvt34YbAOlPYPSCsvnhzZdoVrRv/qBcGxNhdYA7zwiGDlrjRf7Rg1KT8izvo +CRBu64H4mBx0x1WDAJUTqjXmf02pQphfYo7qalOuVr1tAJ0UUgg+PXpgrP0lp4LQ +8SbkxvBRhbkBCwRDJvH8AQgAtyi1+vdUzhYos5lmUznkTURFBGWMvPSOnB62I2Mq +0ZAazhyRjb6EuSTuGcusJXPWzRlsUFPdmyQjIqhPJ9ZkUSLOieIBMU4VqgWc7GZX +K7P4Luh8TxQrz5YOtnpj5Hev/yj63ACDLIbzShizBSteZ+TZL+aH7/8XK/36o4rC +Ep5OH69RiPcCUFEHQkxF7vaPdnqyH82/JtUOxSW1zYcr+7XHpHa+UNtI62Q4MV9x +9Wi6vBBvJ6zZSarpVi7ViIf5PVMGuWb5nA1YShEtQVKnQnn6pBqRUF4iLcyrunIL +PGp13htUUahaDr4qWUP5VKmELT9IitOTR0BX6e3E2h6NnwAGKYhJBBgRAgAJBQJD +JvH8AhsMAAoJEG7rgfiYHHTHMuEAnRcN4qTQ1V92e+2RzJm2IYbVJjPBAJwNY6s3 +lbrcC7Zc2E/k5fxwwenSTrkBDQQ7OHVeEAQAl3WryGIZfi9uPLNZlIvRFBErvUKL +zc7n+/c1GaUVMxXcF/Iauegblh41OoV2Kcz1sFx52MLDSDTV1DwDn4fNzwP7DYOe +9h4EBpMePG1DS7LQ0LoD682rvey6Cvww+eFmBBXdiEqCXvPuW4d3WMnOsQqL5BiS +QH+GiwIrLFN7yj8AAwUD/j9FOzif1GLdoSG8fsEi//axq0sXI+NtRUOrvmrkTCG3 +o5rZOJNwz+KBQpP45LdzosO4V/kPVuJ5U4EprEPRqejfSTW+oK+Bgm0mfImgw7Jv +adkNeXfJdwYidutyF1jjroVdqprSjAAaoZgSi2sw03CFx1WkdL+GCccwN6IVl5OI +iEYEGBECAAYFAjs4dV4ACgkQbuuB+JgcdMeqzACfeHjT2PFYdy88PHNVGw5se9Pq +GPYAnArpX32fDdu/xhuqjqHrNkwyO/Yo +=TzkT +-----END PGP PUBLIC KEY BLOCK----- diff --git a/build/unix/build-gcc/AD17A21EF8AED8F1CC02DBD9F7D5C9BF765C61E3.key b/build/unix/build-gcc/AD17A21EF8AED8F1CC02DBD9F7D5C9BF765C61E3.key new file mode 100644 index 0000000000..95a04ebe6a --- /dev/null +++ b/build/unix/build-gcc/AD17A21EF8AED8F1CC02DBD9F7D5C9BF765C61E3.key @@ -0,0 +1,57 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQMuBEvtHBoRCACUnk4CbRKM5SsykvTko30oeZqmzDF4bS/usOEcZBjtpudsZBC4 +Po7zfIQAvRyCyEsXtBHCM9KhUNgIbfToDfb9quXvH0KR5D/lcHL3eOHfFPX+Yr34 +ouHj/+2yFQNNrsmEmteOFJVM+zX1KBx2I8XQWDNbnMbEbPj/DdCvsk7+3uoQCepG +bFD07pk7iFb1ny6DXgvM4fItJbY5z7+IQSJCv9blRNy55oCkOdGm1FE4Q/SPgbT4 +quZoec2IxGlFGt9ThUDpuYPcdejyjaC5eFDozhqXwMDh17yBDS53XF6lV02Djs7L +e6QbUJv4B3rqvOGV+eLfRxFuy6X6XEOh8FgrAQCzj7dNslwWI9nTwp5GCr7IO7jz +Ynmw+keMcaOUu0Gd2wf/f/uonF/RVy+Gp+PGHnPhi20xaKZ9unf3l3KWELTpizI9 +Of4R+N9AOpVR4Bf1MgkCV4VH8cpOUQOxQQUEYOpYYYH0EeuDlBItVgvcdG40bnQA +PUwWdqbHUh1cXjD0kGQLv8B2+O31GfnjDQhnNJ5C9KdhKf2sLRkNJtMLU5XsPFMF +qoAW7I0cak2XCuHokiOdJq3bhOX4FdxRGlFPOXNOQA53nYRb0kHv4gfKBHwPJbPT +T3MFgoqO23q+om2cFqwVRTVLW4Cg+Ki5dvFkJrufE/NNaCRuSlj3G2WF5K3OOZct +O7xsDsp5wPMQu1tkuwoZcnp+EmvI8QQkPl722eWf3wf7BFjLCIqi1ivu0GVVMLOM +DMGRZeSkjVrLj1xw5BbWsQ8jOAGvnrqC5zpQoMQLzYyPGb6KzXX8Df1kbQEys7M/ +FoLVIhSE/Elr4e5epNW+8zpmLSW61PlDNraHYHcCxf9RY9aZrxtzEXxdCpPZ+bk3 +8sh4kvAv6XUsmweAu2RRY97u5KNyWkIEhhJJcd96cK6FNc9GeOLCiXQPJqK1ORSj +bCBX8HL1U1r8iOo7Hh+Y25flZ0vRSE/6Fsw1X+seTakelh8EWQtIr+i+oClHgmrT +su9NhhQFFvAUFNdN0K1TcADhfj5nPTImet1x9oAUsU//lOXBFWYhs9sitE879uQs +d7QeQW5kcmVhcyBFbmdlIDxhbmRyZWFzQGVuZ2UuZnI+iIAEExEIACgCGwMGCwkI +BwMCBhUIAgkKCwQWAgMBAh4BAheABQJVWjYMBQkLTk1nAAoJEPfVyb92XGHjOqEB +AJsOI48xKPLh09bAzvzSOqS7H/KR6zWIfvLvu1gDhZVrAP92LZoj7qcgnZ15tY2Y +yqHYHk87zl3vRlMLJXizEz64xIiABBMRCAAoAhsDBgsJCAcDAgYVCAIJCgsEFgID +AQIeAQIXgAUCVqUDRgUJDJkamgAKCRD31cm/dlxh42vPAPoDs4RuOS7YWYM7gKiC +3oNVTTIDKz9foDlOIXUhlWf6dwD/S1ofL5UNLLubCdK3UYNHNj+8r4ynz3YezHaR +MDCTtGmIgAQTEQgAKAIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AFAldHXPUF +CQ07dFMACgkQ99XJv3ZcYeOc7wD/eE9W2sl2zI6h1LXTA6tVharyhP8cOAtzuuw7 +auZaE3wA/jaKo0HYrSnhrg8bF2zMnf9LQQdPdW99jZNVFIMcnOrniIAEExEIACgF +AlIWO54CGwMFCQlmAYAGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJEPfVyb92 +XGHj9VkBAJe2uRxafZnUWpkTMD2CGg2EQgIP0R4bH3lykKtNKiZ/AQChGBkQWref +Z4eGsXhO205DYKq8TXKmAxuSVYv3UahXXIiABBMRCAAoAhsDBgsJCAcDAgYVCAIJ +CgsEFgIDAQIeAQIXgAUCVVo2GgUJC05NZwAKCRD31cm/dlxh4yb4AP9PxhxI7yE/ +PiCa9hmrl5rvilMGXNBzA80re3+G8un6EgD7BQPdd9hBlC98uC6WtYtB9xFgny3M +mNPpcUM7NHDjdYKIgAQTEQgAKAIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AF +AlalAz0FCQyZGpoACgkQ99XJv3ZcYeMR7gEAlSYGcUywSjjXJ+kjz6n3wddHZFGl +q3Z4zmdVeIJctv8A/R0qGx73rFDNN1aEB36RZmjf6s3OKEtZ+sFNPEXOWwpAiIAE +ExEIACgCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJXR10BBQkNO3RTAAoJ +EPfVyb92XGHjgN4BAKeBkmxrmrSPU9HUDlE7L/ecR7rUlF2Go4ibuDvOWp0BAP9X +wXSHKxDlL2lh/IeiZSqIW09GXBItfQACaeoJz4s4oYiABBMRCAAoBQJL7RwaAhsD +BQkJZgGABgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAAKCRD31cm/dlxh4zhsAQCf +pbJqrGh6rGBAW1L3jCHNeYt9ughb6wxtlwFclThG/QD/bccAIkDT1lem8Bhf66d5 +sYEx+d27d2rvyBNblP3Urwa5Ag0ES+0cGhAIAI7fBR4UWKVQ8t5A0hPXbOhQkxyt +ztcIRo8rpGGMq//STIa4gBZjuyomkOGss8bElWFYeco09+OqGimD4fDEHXVpD/ev +IYiLq9U2sAUHZaKQAM3vE5LBfWa6zeuQwQj0/t9+cDyNCLTEjPsFQ5AdWyXxxO2c +XetgOHbKwtyjEEsjbJNms6ysjsmXzQGkDRCarGpWrqhAE+jweykpJLoCpCI8AmTv +1/dA5AOcDfsNlTDJnKwWsIaEnvscE4YMwcbCxwHUbhlzzEs8uS7Bk1LaQKQFUcvQ +Bt1nFiHD3uTHZLX5RjL2VTRArQFWN3PefAW1T5Ws+Fs+JwBy/VeKbuBud5sAAwYH +/167fa00yFiCtloWPJ/Xv7Marh/CIpAG0GOuPIJ4IqdEl/ZZ76A0KalUbrSL+fj1 +Eq/0auiNi9CbtlKI8lebn0AkKRYZe9j6JwIHJGomn1hgFhPGMKUToE4iUXmv+ZWN +BbH4iJz87xcrmtV9mLHiVZHGMwMBv5VVSnBoGcxcHHYnC3iAP8h+yaFt4pVIxQXR +NNfbXsUFvZaW2Tgat8knupmxOZfJfdesIf+n1X36OvhsZgFw6rHTSf2mAfkiBl47 +uYbB8v8BR2nDXbtpNlg2ssPbmPIfOE0Ft7pZ5VN1YiNY60w+Sbh5wD0A4mr7OZ/t +2NP0yxDMCLYN3jY5R+P/e4OIZwQYEQgADwIbDAUCV0dd7gUJDTt1RgAKCRD31cm/ +dlxh4xPFAQCXDeJBh1YPVkD8rgFlmMIEtorkzK0tHfCap6j1cG4iFAD/SCXCufA7 +8GOBvibrC/azKvoBKLY1/stpKCrecZdRFkk= +=SDN9 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/build/unix/build-gcc/DA23579A74D4AD9AF9D3F945CEFAC8EAAF17519D.key b/build/unix/build-gcc/DA23579A74D4AD9AF9D3F945CEFAC8EAAF17519D.key new file mode 100644 index 0000000000..1b658e615b --- /dev/null +++ b/build/unix/build-gcc/DA23579A74D4AD9AF9D3F945CEFAC8EAAF17519D.key @@ -0,0 +1,52 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBFY4SBYBEAC11sh4AMhIhiLxj76FXsluVJIU4nZjVmexar+/5WMlVvMX+Dxk +lUbKDCBOUMtPFsAXMpcxOGwscCr3WMuI8WszTjKDs3mdQ37o/pzXMbRhY0oZV29Z +EhNLds14qhMLlQiDEm5lJ5bOsLevHJ9hR4wvwY6UR881xsiXsNU+iNMRP0cWeRjQ +84pSCLOt9i+D8rdllVob871gN/tjY4Ll13Tg7qmtFE1YEFJaLb2yik0bO7gPkig/ +ADmKMBhOtgAHU9i+gmtP+x+agk7cbXkR06Pd9VBkd9nYlFXbR+zcE15AqauEF1Y2 +V9RbW/Ewt4Fmgr+QQnJhiSMO2BUTS2Q0CC3LznB9QOdEriUmeXGJdim0OJiwYDDX +4CNRk+2CAePbrZnGv+YXgeNPHvFa0Baj73HP8Ptok+OeyWIenRPHG3Ni+O5p1n5k +QK0bHqIwChMtAJvzdoC77XIJhbCtStmvo2FdSA8YcG4stlz+Wk1ZtNMen83ZEscS +OXEVpxcPGlbmWmkWj8DF5zbB1dRdh4T6LLM4nZViBu7oGD76z3c/x2zc7l3pyVHx +Cw70a+r+6LvUwnvCiApCBS72uDc4zZtnkNUQHlXHkz9wEeYUtUB0wkCYWPZy7BZy +0aFfKWK4Jg7uGx/mdHRCJ35MdXWxeQ4yPUE+tF951s167ANr1+ayt87pQwARAQAB +tChBbGV4YW5kcmUgSnVsbGlhcmQgPGp1bGxpYXJkQHdpbmVocS5vcmc+iQI3BBMB +AgAhAhsDAh4BAheABQJWOEvIBQsJCAcDBRUKCQgLBRYCAwEAAAoJEM76yOqvF1Gd +UqkQAJw6ot97efCon6qMA7ctJTqhOvnPSxf430aZgaTuNBEfY3RPeWC+k11cTvKV +dny9xwC+N8U2Jfdd0iXqlwUdM4ThOKZCXGOykCHJmrYGPqWsjGKUO7EoMwJB00qi +nOJdgj7zWLb6MuuKx2eavGYVLCFG4sQ8fjX0+sxuD+Cl++UyS9+t/C3ijeXTxaZn +qSLFKUFzyngXIUhFxMLkUdh397WeTaBtUTyLT0lwOKTllxIyC/+t2e9QcfgdLE/q +wKmRjihNq6I5JOQfO8JynUoR8WzKQaCX5VL6ZPaQa8ZzUdS/h0WlMlQuD5mrcDBa +ZQjqPEIL6/oExk1a7yeQFKNKisq94rVF0Ly1o7w+n+7X4lT9T9zhiPKVXvlxHB0h +SeJm4j/qDq1DSiGVfIR2CChObyeHAZhQZMMr/Ni9XtqzHsd2qhcP1ZYvbQZ2UK/N +Lv398VY/f+kXApFMDQLj1jGA8aXbkE8ChIAiZAAzVMg2wJ2x5/7bImbICsvGSwfx +awlsHzc7CR0Pj2Kdgr7UtsDk+cBRQMEqAIGWiCOKnBD8eoNGaiCoLHI/3ce4dJ/y +pXFtJSkJa8wpK4+xdckAvtPQZgOV5gLCJqNqEF+8aIjsTwwu7dcIXG2qLHD5C5tq +viuZtOYO7UdQbIHuYY5Xy8/W7hQRfIaq1NfKf9qJx4hrCWLviEYEEBECAAYFAlY4 +S3QACgkQ9ebp7rlGHdcg6ACfXNdYTmPe1Ej0rd+eO+yuDF/kwccAoItuIMi7EXu0 +FR4Ui8cBaZI3hweFuQINBFY4SBYBEAD7ZonYuSKxToJ4plL22rv4wPPbqACcLbIG +5t3s/Gb3/twOtaCgOEFhzNv+8K87jX6iSHJYeGhu7e2eRxeGHkrqliNJoHUi9Ddu +ygHqhoNmSHNSqI36/TU5yCRArKS3wwq7cafGnncdVOLBYfj497IxGK8fANhDf7TV +vqUGIb06gkpWbrwmUWgV8pk7MHgL93T5Ph+KSgdEbOSePFwQb9piyp9vWNmZnqK2 +9TFNtTULGtQa0y8ZCNSSEh4YP/DxDraq1OJ2Gh3WHSQ4f2hfGXJMzr4cyIrOJHQ8 +mby6xHmvldsAGsZJ/CSMj27UhJJYOzNCxWOp9NBNARB/6N1Ikvv9Vs6G7lZ4Dmuk +wvAWqzlomO/ctt0XmvY7N7ddIviDCQ0Z5bGJQlOWuIBR04tt7CePNzxG91q8x7FN +P8r+BSvxtGheeFiQYsC5FINYWUelL/SU8/U9sG30YLpujvjB5mqYZJtmotSqFbwl +81/bLU170OdG9n7FWp09f9yB1KlSq3hSwKBKu2bGUy2sS6w5MqEtxBHVUjLlS9oP +GQK+wr1m70rgfK/2N3HdcSqr2e2aKxnCx5wDvqB19Zq0TX5CXobEy3ohnul3Ez7a +2HBq543rdZpS9xuF2IHK6zMn5Xv0WKrODxIOnjs1mKbQzP5/6PVOejH/AnO38pCb +hoj0/zvnKQARAQABiQIfBBgBAgAJBQJWOEgWAhsMAAoJEM76yOqvF1Gde00QAJMF +OZhnPeiDFigLsqiqPGQzqSlZ5r4rQ3t6txfBYDclTq3rMqmk75bxteZHpSgMvdHF +SgqrvcyCJP5F8IRbk+J/tUb10icnl7+vsb6PfNXXflX0cIeAC9yqB3Z6RO77NoMy +HzMlw4EcNUXdmC46s+h6y74BeWWLBwYR18XgTSuw3gYpL7P0lqM2d7H6HCQMkZD/ +on9pT3lOc5k9YeM+B+Ak0nDyJGrdj6EES/ukrmq/szJhx+2zMbKU6Ds/uIRE0zuS +VUPnCy+3KPuJk+xLWtuVD2v2G0PXBrKKcgLfQzTQeGT5R/8rTt2w3ah4dXYRG5Ad +N5fIaTfjJTZGmht3pvHuucoloqMWl6DD7a3XZjWtUBMhPboAZiCmXiBWn3c26ITu +N9j4gSpl3hbWYJXjTWocGs2YyiuMRsO6Minfz5l2/iZjp8xHJ8GajuLGQES7CwGH +uShQ0hknHZmrH0d6xOhD64czgmTI2HraujWz+u31sHM1yEJgQKAtEL2AKWGSadly +/eI2rCQDEn6mIe34I04SPr/XrTpFClmUBbZBBir7KMRhB8B9ERdJElbtb4ubGZ0D +FCYpueJgVv9agvV2ONVb/K0BIevJy9v5+FbSFIQG/spkwf/71olib93iUr9tKTaE +mOMR1xJlCiQvAQYsmqwM9FHDmGJYTQE1WbVZu4gZ +=6vF7 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/build/unix/build-gcc/EAF1C276A747E9ED86210CBAC3126D3B4AE55E93.key b/build/unix/build-gcc/EAF1C276A747E9ED86210CBAC3126D3B4AE55E93.key new file mode 100644 index 0000000000..13ea6d69ec --- /dev/null +++ b/build/unix/build-gcc/EAF1C276A747E9ED86210CBAC3126D3B4AE55E93.key @@ -0,0 +1,29 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQGiBEj3S14RBACUE+e2hRWwM6AFWaNKsLgDg6ebDNCI6z/Pk38t6JUeM+D5MAvq +fnL45bF/3CUrZRK+/qLg5iwRWehKh08VQ7GqDxMerZkPmfvirVxLwpc5ngCOGJwv +ba13xdaxfTLMkHxaQyGWiUqHIwdzFoNBgjq9XTY0GGqHwVA1Hb+xTAL8PwCg+wru +41p/aOq9cfPN1U1BjulWCSMD/2YP23pI19o9Hr26ltyJcd/xkSRiCUk84efIw5JH +7QlxoMoW/SdJQAGi2pZN9o4I/fPDB3Gna9M9rZfacKda857dwkALPK8xTsfHiZzH +40g+eYUvl9nloNidneSFcxbLO/euURcCJ6Ri6nb4QWWLVH1XF6wQxEGyn7ojYMOn +ihlkA/9KATtSt+T0zgWskqckgV1ZQg9Ysqwp3GAvezJuyUTXlB02ApVFEsRTQtLZ +2WPvo/gzfih/EdNLrq7UeeB4dr/nlpANn4IyBRN6EmBHaJ4MMKN77B7bB0GaBfGJ +rgNNp7W0xG0FLjaMoQzP62qqAtZHNlW1qCmkyJGUpAa6tJw9CbQlVHJpc3RhbiBH +aW5nb2xkIDxnaW5nb2xkQGFkYWNvcmUuY29tPohgBBMRAgAgBQJI90teAhsDBgsJ +CAcDAgQVAggDBBYCAwECHgECF4AACgkQwxJtO0rlXpMVcwCgoQ91OLI+m2bsu2SS +d5MsRQH3FWsAnRFG2YGk5o5zuoLzdZd6KlL9xJ+uuQINBEj3S2MQCACnDo5dHujc +u7QHRPnxNwiKhMP6eIZaEm9tavab3UxsRufMyVC8nQ8+EmCOwfBrqstfRVoQnoDI +s5UY1XAM3mBFXYqfY9wR6NISUlzK/HPyFhGE7t3lVjOkiqbWOftDt6GgRETeqYsW +XkDV/dL4+P3eSaOSP6KMZwdjgXPOciN59KIiii9NK4icxP0lJHDk5WJFwfucEyUt +Sz7uwuUFcajHZmMxxHAnWT3uJ+ZasSijduZevsHhKTTXaZRideqf+ur1/TcUaZDQ +O3wist1qc03NkL+oGu6HYPx9ZV40p/axdTaUXMcBjtAZIzvy984HF9EsFQnvbiXt +R8zg6SYXLRmbAAMGB/9JMKWsCuxUzXmU1jyJvMXdRBZ4YQYkKFYWrEXwjYlBEGx6 +01PkR//4QJVR4zFjy4zVnaUrOxtR+65Eedf+9fNZzSNeI24TGaqyVM0OYYQtp9cH +kRDu3wif1k2NW3BnrmTjVefdAWVH6zKT9lP9m6RPHCwVGyORhVQtB3+ZXOehNJwL +9NBU4MUpGKpoQCuODdgZ8iQXbo+plg0eCxcpNaYzSnq9DMAU+2qnP6d3x4DeWzlL +wvJ2K2Mw89gvCImy/JDe05EXqKowR6aiIPvw5ou9xSHmjT6rcaIBiROCe+1hh4XC +djCdb4kOskWCEXfFKcHax4N5fI9vmk3P5068BELMiEkEGBECAAkFAkj3S2MCGwwA +CgkQwxJtO0rlXpNxdwCgjr4sQRf2cyDkCSWe4AElbI74BREAoPdet3XvE6ZcZJGl +UIZySRkdpk/A +=dGh0 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/build/unix/build-gcc/build-gcc.sh b/build/unix/build-gcc/build-gcc.sh new file mode 100755 index 0000000000..a7760fa098 --- /dev/null +++ b/build/unix/build-gcc/build-gcc.sh @@ -0,0 +1,118 @@ +#!/bin/bash + +set -e +set -x + +make_flags="-j$(nproc)" + +prepare_mingw() { + export prefix=/tools/mingw32 + export install_dir=$root_dir$prefix + mkdir -p $install_dir + export PATH=$PATH:$install_dir/bin/ + + cd $root_dir + + git clone -n git://git.code.sf.net/p/mingw-w64/mingw-w64 + pushd mingw-w64 + git checkout $mingw_version # Asserts the integrity of the checkout (Right?) + popd +} + +apply_patch() { + if [ $# -ge 2 ]; then + pushd $root_dir/$1 + shift + else + pushd $root_dir/gcc-source + fi + patch -p1 < $1 + popd +} + +build_binutils() { + # if binutils_configure_flags is not set at all, give it the default value + if [ -z "${binutils_configure_flags+xxx}" ]; + then + # gold is disabled because we don't use it on automation, and also we ran into + # some issues with it using this script in build-clang.py. + # + # --enable-targets builds extra target support in ld. + # Enabling aarch64 support brings in arm support, so we don't need to specify that too. + # + # It is important to have the binutils --target and the gcc --target match, + # so binutils will install binaries in a place that gcc will look for them. + binutils_configure_flags="--enable-targets=aarch64-unknown-linux-gnu --build=x86_64-unknown-linux-gnu --target=x86_64-unknown-linux-gnu --disable-gold --enable-plugins --disable-nls --with-sysroot=/" + fi + + mkdir $root_dir/binutils-objdir + pushd $root_dir/binutils-objdir + ../binutils-source/configure --prefix=${prefix-/tools/gcc}/ $binutils_configure_flags + make $make_flags + make install $make_flags DESTDIR=$root_dir + export PATH=$root_dir/${prefix-/tools/gcc}/bin:$PATH + popd +} + +build_gcc() { + # Be explicit about --build and --target so header and library install + # directories are consistent. + local target="${1:-x86_64-unknown-linux-gnu}" + + mkdir $root_dir/gcc-objdir + pushd $root_dir/gcc-objdir + ../gcc-source/configure --prefix=${prefix-/tools/gcc} --build=x86_64-unknown-linux-gnu --target="${target}" --enable-languages=c,c++ --disable-nls --disable-gnu-unique-object --enable-__cxa_atexit --with-arch-32=pentiumpro --with-sysroot=/ + make $make_flags + make $make_flags install DESTDIR=$root_dir + + cd $root_dir/tools + ln -s gcc gcc/bin/cc + + tar caf $root_dir/gcc.tar.xz gcc/ + popd +} + +build_gcc_and_mingw() { + mkdir gcc-objdir + pushd gcc-objdir + ../gcc-source/configure --prefix=$install_dir --target=i686-w64-mingw32 --with-gnu-ld --with-gnu-as --disable-multilib --enable-threads=posix + make $make_flags all-gcc + make $make_flags install-gcc + popd + + mkdir mingw-w64-headers32 + pushd mingw-w64-headers32 + ../mingw-w64/mingw-w64-headers/configure --host=i686-w64-mingw32 --prefix=$install_dir/i686-w64-mingw32/ --enable-sdk=all --enable-secure-api --enable-idl + make $make_flags install + popd + + mkdir mingw-w64-crt32 + pushd mingw-w64-crt32 + ../mingw-w64/mingw-w64-crt/configure --host=i686-w64-mingw32 --prefix=$install_dir/i686-w64-mingw32/ + make + make install + popd + + mkdir mingw-w64-pthread + pushd mingw-w64-pthread + ../mingw-w64/mingw-w64-libraries/winpthreads/configure --host=i686-w64-mingw32 --prefix=$install_dir/i686-w64-mingw32/ + make + make install + popd + + pushd gcc-objdir + make + make install + popd + + mkdir widl32 + pushd widl32 + ../mingw-w64/mingw-w64-tools/widl/configure --prefix=$install_dir --target=i686-w64-mingw32 + make + make install + popd + + pushd $(dirname $install_dir) + tar caf $root_dir/mingw32.tar.xz $(basename $install_dir)/ + popd +} diff --git a/build/unix/build-hfsplus/build-hfsplus.sh b/build/unix/build-hfsplus/build-hfsplus.sh new file mode 100755 index 0000000000..389927e950 --- /dev/null +++ b/build/unix/build-hfsplus/build-hfsplus.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +# hfsplus needs to be rebuilt when changing the clang version used to build it. +# Until bug 1471905 is addressed, increase the following number +# when that happens: 1 + +set -e +set -x + +hfplus_version=540.1.linux3 +dirname=diskdev_cmds-${hfplus_version} +make_flags="-j$(nproc)" + +root_dir="$1" +if [ -z "$root_dir" -o ! -d "$root_dir" ]; then + root_dir=$(mktemp -d) +fi +cd $root_dir + +if test -z $TMPDIR; then + TMPDIR=/tmp/ +fi + +# Build +cd $dirname +# We want to statically link against libcrypto. On CentOS, that requires zlib +# and libdl, because of FIPS functions pulling in more than necessary from +# libcrypto (only SHA1 functions are used), but not on Debian, thus +# --as-needed. +patch -p1 << 'EOF' +--- a/newfs_hfs.tproj/Makefile.lnx ++++ b/newfs_hfs.tproj/Makefile.lnx +@@ -6,3 +6,3 @@ + newfs_hfs: $(OFILES) +- ${CC} ${CFLAGS} ${LDFLAGS} -o newfs_hfs ${OFILES} -lcrypto ++ ${CC} ${CFLAGS} ${LDFLAGS} -o newfs_hfs ${OFILES} -Wl,-Bstatic -lcrypto -Wl,-Bdynamic,--as-needed,-lz,-ldl + +EOF +make $make_flags || exit 1 +cd .. + +mkdir hfsplus-tools +cp $dirname/newfs_hfs.tproj/newfs_hfs hfsplus-tools/newfs_hfs +## XXX fsck_hfs is unused, but is small and built from the package. +cp $dirname/fsck_hfs.tproj/fsck_hfs hfsplus-tools/fsck_hfs + +# Make a package of the built utils +cd $root_dir +tar caf $root_dir/hfsplus-tools.tar.xz hfsplus-tools diff --git a/build/unix/elfhack/Makefile.in b/build/unix/elfhack/Makefile.in new file mode 100644 index 0000000000..08cc3d6852 --- /dev/null +++ b/build/unix/elfhack/Makefile.in @@ -0,0 +1,44 @@ +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +include $(topsrcdir)/config/rules.mk + +test-array$(DLL_SUFFIX) test-ctors$(DLL_SUFFIX): %$(DLL_SUFFIX): %.$(OBJ_SUFFIX) elfhack + $(MKSHLIB) $(LDFLAGS) $< -nostartfiles + @echo === + @echo === If you get failures below, please file a bug describing the error + @echo === and your environment \(compiler and linker versions\), and + @echo === provide the pre-elfhacked library as an attachment. + @echo === Use --disable-elf-hack until this is fixed. + @echo === + # Fail if the library doesn't have $(DT_TYPE) .dynamic info + $(TOOLCHAIN_PREFIX)readelf -d $@ | grep '($(DT_TYPE))' + @rm -f $@.bak + $(CURDIR)/elfhack -b -f $@ + # Fail if the backup file doesn't exist + [ -f '$@.bak' ] + # Fail if the new library doesn't contain less relocations + [ $$($(TOOLCHAIN_PREFIX)objdump -R $@.bak | wc -l) -gt $$(objdump -R $@ | wc -l) ] + +test-array$(DLL_SUFFIX) test-ctors$(DLL_SUFFIX): DSO_SONAME=$@ +test-array$(DLL_SUFFIX): DT_TYPE=INIT_ARRAY +test-ctors$(DLL_SUFFIX): DT_TYPE=INIT + +.PRECIOUS: test-array$(DLL_SUFFIX) test-ctors$(DLL_SUFFIX) + +ifndef CROSS_COMPILE +ifdef COMPILE_ENVIRONMENT +libs:: test-array$(DLL_SUFFIX) test-ctors$(DLL_SUFFIX) + +dummy: dummy.$(OBJ_SUFFIX) + $(CC) -o $@ $^ $(LDFLAGS) + +libs:: dummy + # Will either crash or return exit code 1 if elfhack is broken + LD_PRELOAD=$(CURDIR)/test-array$(DLL_SUFFIX) $(CURDIR)/dummy + LD_PRELOAD=$(CURDIR)/test-ctors$(DLL_SUFFIX) $(CURDIR)/dummy + +endif +endif diff --git a/build/unix/elfhack/README b/build/unix/elfhack/README new file mode 100644 index 0000000000..8c68031e33 --- /dev/null +++ b/build/unix/elfhack/README @@ -0,0 +1,28 @@ +Elfhack is a program to optimize ELF binaries for size and cold startup +speed. + +Presently, it is quite experimental, though it works well for the target +it was created for: Firefox's libxul.so. + +Elfhack currently only does one thing: packing dynamic relocations ; +which ends up being a quite complex task, that can be summarized this +way: +- Remove RELATIVE relocations from the .rel.dyn/.rela.dyn section. +- Inject a small code able to apply relative relocations "by hand" + after the .rel.dyn/.rela.dyn section. +- Inject a section containing relocative relocations in a different + and more packed format, after the small code. +- Register the small code as DT_INIT function. Make the small code call + what was initially the DT_INIT function, if there was one. +- Remove the hole between the new section containing relative + relocations and the following sections, adjusting offsets and base + addresses accordingly. +- Adjust PT_LOAD entries to fit new offsets, and add an additional + PT_LOAD entry when that is necessary to handle the discrepancy between + offsets and base addresses, meaning the section offsets may yet again + need adjustments. +- Adjust various DT_* dynamic tags to fit the new ELF layout. +- Adjust section headers. +- Adjust ELF headers. + +See http://glandium.org/blog/?p=1177#relocations for some figures. diff --git a/build/unix/elfhack/dummy.c b/build/unix/elfhack/dummy.c new file mode 100644 index 0000000000..2cde16102e --- /dev/null +++ b/build/unix/elfhack/dummy.c @@ -0,0 +1,7 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +extern __attribute__((visibility("default"), weak)) int print_status(); + +int main() { return print_status(); } diff --git a/build/unix/elfhack/elf.cpp b/build/unix/elfhack/elf.cpp new file mode 100644 index 0000000000..679770fcba --- /dev/null +++ b/build/unix/elfhack/elf.cpp @@ -0,0 +1,934 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#undef NDEBUG +#include +#include +#include "elfxx.h" + +template +void Elf_Ehdr_Traits::swap(T& t, R& r) { + memcpy(r.e_ident, t.e_ident, sizeof(r.e_ident)); + r.e_type = endian::swap(t.e_type); + r.e_machine = endian::swap(t.e_machine); + r.e_version = endian::swap(t.e_version); + r.e_entry = endian::swap(t.e_entry); + r.e_phoff = endian::swap(t.e_phoff); + r.e_shoff = endian::swap(t.e_shoff); + r.e_flags = endian::swap(t.e_flags); + r.e_ehsize = endian::swap(t.e_ehsize); + r.e_phentsize = endian::swap(t.e_phentsize); + r.e_phnum = endian::swap(t.e_phnum); + r.e_shentsize = endian::swap(t.e_shentsize); + r.e_shnum = endian::swap(t.e_shnum); + r.e_shstrndx = endian::swap(t.e_shstrndx); +} + +template +void Elf_Phdr_Traits::swap(T& t, R& r) { + r.p_type = endian::swap(t.p_type); + r.p_offset = endian::swap(t.p_offset); + r.p_vaddr = endian::swap(t.p_vaddr); + r.p_paddr = endian::swap(t.p_paddr); + r.p_filesz = endian::swap(t.p_filesz); + r.p_memsz = endian::swap(t.p_memsz); + r.p_flags = endian::swap(t.p_flags); + r.p_align = endian::swap(t.p_align); +} + +template +void Elf_Shdr_Traits::swap(T& t, R& r) { + r.sh_name = endian::swap(t.sh_name); + r.sh_type = endian::swap(t.sh_type); + r.sh_flags = endian::swap(t.sh_flags); + r.sh_addr = endian::swap(t.sh_addr); + r.sh_offset = endian::swap(t.sh_offset); + r.sh_size = endian::swap(t.sh_size); + r.sh_link = endian::swap(t.sh_link); + r.sh_info = endian::swap(t.sh_info); + r.sh_addralign = endian::swap(t.sh_addralign); + r.sh_entsize = endian::swap(t.sh_entsize); +} + +template +void Elf_Dyn_Traits::swap(T& t, R& r) { + r.d_tag = endian::swap(t.d_tag); + r.d_un.d_val = endian::swap(t.d_un.d_val); +} + +template +void Elf_Sym_Traits::swap(T& t, R& r) { + r.st_name = endian::swap(t.st_name); + r.st_value = endian::swap(t.st_value); + r.st_size = endian::swap(t.st_size); + r.st_info = t.st_info; + r.st_other = t.st_other; + r.st_shndx = endian::swap(t.st_shndx); +} + +template +struct _Rel_info { + static inline void swap(Elf32_Word& t, Elf32_Word& r) { r = endian::swap(t); } + static inline void swap(Elf64_Xword& t, Elf64_Xword& r) { + r = endian::swap(t); + } + static inline void swap(Elf64_Xword& t, Elf32_Word& r) { + r = endian::swap(ELF32_R_INFO(ELF64_R_SYM(t), ELF64_R_TYPE(t))); + } + static inline void swap(Elf32_Word& t, Elf64_Xword& r) { + r = endian::swap(ELF64_R_INFO(ELF32_R_SYM(t), ELF32_R_TYPE(t))); + } +}; + +template +void Elf_Rel_Traits::swap(T& t, R& r) { + r.r_offset = endian::swap(t.r_offset); + _Rel_info::swap(t.r_info, r.r_info); +} + +template +void Elf_Rela_Traits::swap(T& t, R& r) { + r.r_offset = endian::swap(t.r_offset); + _Rel_info::swap(t.r_info, r.r_info); + r.r_addend = endian::swap(t.r_addend); +} + +static const Elf32_Shdr null32_section = {0, SHT_NULL, 0, 0, 0, + 0, SHN_UNDEF, 0, 0, 0}; + +Elf_Shdr null_section(null32_section); + +Elf_Ehdr::Elf_Ehdr(std::ifstream& file, char ei_class, char ei_data) + : serializable(file, ei_class, ei_data), + ElfSection(null_section, nullptr, nullptr) { + shdr.sh_size = Elf_Ehdr::size(ei_class); +} + +Elf::Elf(std::ifstream& file) { + if (!file.is_open()) throw std::runtime_error("Error opening file"); + + file.exceptions(std::ifstream::eofbit | std::ifstream::failbit | + std::ifstream::badbit); + // Read ELF magic number and identification information + char e_ident[EI_VERSION]; + file.seekg(0); + file.read(e_ident, sizeof(e_ident)); + file.seekg(0); + ehdr = new Elf_Ehdr(file, e_ident[EI_CLASS], e_ident[EI_DATA]); + + // ELFOSABI_LINUX is kept unsupported because I haven't looked whether + // STB_GNU_UNIQUE or STT_GNU_IFUNC would need special casing. + if ((ehdr->e_ident[EI_OSABI] != ELFOSABI_NONE) && + (ehdr->e_ident[EI_ABIVERSION] != 0)) + throw std::runtime_error("unsupported ELF ABI"); + + if (ehdr->e_version != 1) throw std::runtime_error("unsupported ELF version"); + + // Sanity checks + if (ehdr->e_shnum == 0) + throw std::runtime_error("sstripped ELF files aren't supported"); + + if (ehdr->e_ehsize != Elf_Ehdr::size(e_ident[EI_CLASS])) + throw std::runtime_error( + "unsupported ELF inconsistency: ehdr.e_ehsize != sizeof(ehdr)"); + + if (ehdr->e_shentsize != Elf_Shdr::size(e_ident[EI_CLASS])) + throw std::runtime_error( + "unsupported ELF inconsistency: ehdr.e_shentsize != sizeof(shdr)"); + + if (ehdr->e_phnum == 0) { + if (ehdr->e_phoff != 0) + throw std::runtime_error( + "unsupported ELF inconsistency: e_phnum == 0 && e_phoff != 0"); + if (ehdr->e_phentsize != 0) + throw std::runtime_error( + "unsupported ELF inconsistency: e_phnum == 0 && e_phentsize != 0"); + } else if (ehdr->e_phoff != ehdr->e_ehsize) + throw std::runtime_error( + "unsupported ELF inconsistency: ehdr->e_phoff != ehdr->e_ehsize"); + else if (ehdr->e_phentsize != Elf_Phdr::size(e_ident[EI_CLASS])) + throw std::runtime_error( + "unsupported ELF inconsistency: ehdr->e_phentsize != sizeof(phdr)"); + + // Read section headers + Elf_Shdr** shdr = new Elf_Shdr*[ehdr->e_shnum]; + file.seekg(ehdr->e_shoff); + for (int i = 0; i < ehdr->e_shnum; i++) + shdr[i] = new Elf_Shdr(file, e_ident[EI_CLASS], e_ident[EI_DATA]); + + // Sanity check in section header for index 0 + if ((shdr[0]->sh_name != 0) || (shdr[0]->sh_type != SHT_NULL) || + (shdr[0]->sh_flags != 0) || (shdr[0]->sh_addr != 0) || + (shdr[0]->sh_offset != 0) || (shdr[0]->sh_size != 0) || + (shdr[0]->sh_link != SHN_UNDEF) || (shdr[0]->sh_info != 0) || + (shdr[0]->sh_addralign != 0) || (shdr[0]->sh_entsize != 0)) + throw std::runtime_error( + "Section header for index 0 contains unsupported values"); + + if ((shdr[ehdr->e_shstrndx]->sh_link != 0) || + (shdr[ehdr->e_shstrndx]->sh_info != 0)) + throw std::runtime_error( + "unsupported ELF content: string table with sh_link != 0 || sh_info != " + "0"); + + // Store these temporarily + tmp_shdr = shdr; + tmp_file = &file; + + // Fill sections list + sections = new ElfSection*[ehdr->e_shnum]; + for (int i = 0; i < ehdr->e_shnum; i++) sections[i] = nullptr; + for (int i = 1; i < ehdr->e_shnum; i++) { + // The .dynamic section is going to have references to other sections, + // so it's better to start with that one and recursively initialize those + // other sections first, to avoid possible infinite recursion (bug 1606739). + if (tmp_shdr[i]->sh_type == SHT_DYNAMIC) { + getSection(i); + } + } + for (int i = 1; i < ehdr->e_shnum; i++) { + if (sections[i] != nullptr) continue; + getSection(i); + } + Elf_Shdr s; + s.sh_name = 0; + s.sh_type = SHT_NULL; + s.sh_flags = 0; + s.sh_addr = 0; + s.sh_offset = ehdr->e_shoff; + s.sh_entsize = Elf_Shdr::size(e_ident[EI_CLASS]); + s.sh_size = s.sh_entsize * ehdr->e_shnum; + s.sh_link = 0; + s.sh_info = 0; + s.sh_addralign = (e_ident[EI_CLASS] == ELFCLASS32) ? 4 : 8; + shdr_section = new ElfSection(s, nullptr, nullptr); + + // Fake section for program headers + s.sh_offset = ehdr->e_phoff; + s.sh_addr = ehdr->e_phoff; + s.sh_entsize = Elf_Phdr::size(e_ident[EI_CLASS]); + s.sh_size = s.sh_entsize * ehdr->e_phnum; + phdr_section = new ElfSection(s, nullptr, nullptr); + + phdr_section->insertAfter(ehdr, false); + + sections[1]->insertAfter(phdr_section, false); + for (int i = 2; i < ehdr->e_shnum; i++) { + // TODO: this should be done in a better way + if ((shdr_section->getPrevious() == nullptr) && + (shdr[i]->sh_offset > ehdr->e_shoff)) { + shdr_section->insertAfter(sections[i - 1], false); + sections[i]->insertAfter(shdr_section, false); + } else + sections[i]->insertAfter(sections[i - 1], false); + } + if (shdr_section->getPrevious() == nullptr) + shdr_section->insertAfter(sections[ehdr->e_shnum - 1], false); + + tmp_file = nullptr; + tmp_shdr = nullptr; + for (int i = 0; i < ehdr->e_shnum; i++) delete shdr[i]; + delete[] shdr; + + eh_shstrndx = (ElfStrtab_Section*)sections[ehdr->e_shstrndx]; + + // Skip reading program headers if there aren't any + if (ehdr->e_phnum == 0) return; + + bool adjusted_phdr_section = false; + // Read program headers + file.seekg(ehdr->e_phoff); + for (int i = 0; i < ehdr->e_phnum; i++) { + Elf_Phdr phdr(file, e_ident[EI_CLASS], e_ident[EI_DATA]); + if (phdr.p_type == PT_LOAD) { + // Default alignment for PT_LOAD on x86-64 prevents elfhack from + // doing anything useful. However, the system doesn't actually + // require such a big alignment, so in order for elfhack to work + // efficiently, reduce alignment when it's originally the default + // one. + if ((ehdr->e_machine == EM_X86_64) && (phdr.p_align == 0x200000)) + phdr.p_align = 0x1000; + } + ElfSegment* segment = new ElfSegment(&phdr); + // Some segments aren't entirely filled (if at all) by sections + // For those, we use fake sections + if ((phdr.p_type == PT_LOAD) && (phdr.p_offset == 0)) { + // Use a fake section for ehdr and phdr + ehdr->getShdr().sh_addr = phdr.p_vaddr; + if (!adjusted_phdr_section) { + phdr_section->getShdr().sh_addr += phdr.p_vaddr; + adjusted_phdr_section = true; + } + segment->addSection(ehdr); + segment->addSection(phdr_section); + } + if (phdr.p_type == PT_PHDR) { + if (!adjusted_phdr_section) { + phdr_section->getShdr().sh_addr = phdr.p_vaddr; + adjusted_phdr_section = true; + } + segment->addSection(phdr_section); + } + for (int j = 1; j < ehdr->e_shnum; j++) + if (phdr.contains(sections[j])) segment->addSection(sections[j]); + // Make sure that our view of segments corresponds to the original + // ELF file. + // GNU gold likes to start some segments before the first section + // they contain. https://sourceware.org/bugzilla/show_bug.cgi?id=19392 + unsigned int gold_adjustment = segment->getAddr() - phdr.p_vaddr; + assert(segment->getFileSize() == phdr.p_filesz - gold_adjustment); + // gold makes TLS segments end on an aligned virtual address, even + // when the underlying section ends before that, while bfd ld + // doesn't. It's fine if we don't keep that alignment. + unsigned int memsize = segment->getMemSize(); + if (phdr.p_type == PT_TLS && memsize != phdr.p_memsz) { + unsigned int align = segment->getAlign(); + memsize = (memsize + align - 1) & ~(align - 1); + } + assert(memsize == phdr.p_memsz - gold_adjustment); + segments.push_back(segment); + } + + new (&eh_entry) ElfLocation(ehdr->e_entry, this); +} + +Elf::~Elf() { + for (std::vector::iterator seg = segments.begin(); + seg != segments.end(); seg++) + delete *seg; + delete[] sections; + ElfSection* section = ehdr; + while (section != nullptr) { + ElfSection* next = section->getNext(); + delete section; + section = next; + } +} + +// TODO: This shouldn't fail after inserting sections +ElfSection* Elf::getSection(int index) { + if ((index < -1) || (index >= ehdr->e_shnum)) + throw std::runtime_error("Section index out of bounds"); + if (index == -1) + index = ehdr->e_shstrndx; // TODO: should be fixed to use the actual + // current number + // Special case: the section at index 0 is void + if (index == 0) return nullptr; + // Infinite recursion guard + if (sections[index] == (ElfSection*)this) return nullptr; + if (sections[index] == nullptr) { + sections[index] = (ElfSection*)this; + switch (tmp_shdr[index]->sh_type) { + case SHT_DYNAMIC: + sections[index] = + new ElfDynamic_Section(*tmp_shdr[index], tmp_file, this); + break; + case SHT_REL: + sections[index] = + new ElfRel_Section(*tmp_shdr[index], tmp_file, this); + break; + case SHT_RELA: + sections[index] = + new ElfRel_Section(*tmp_shdr[index], tmp_file, this); + break; + case SHT_DYNSYM: + case SHT_SYMTAB: + sections[index] = + new ElfSymtab_Section(*tmp_shdr[index], tmp_file, this); + break; + case SHT_STRTAB: + sections[index] = + new ElfStrtab_Section(*tmp_shdr[index], tmp_file, this); + break; + default: + sections[index] = new ElfSection(*tmp_shdr[index], tmp_file, this); + } + } + return sections[index]; +} + +ElfSection* Elf::getSectionAt(unsigned int offset) { + for (int i = 1; i < ehdr->e_shnum; i++) { + ElfSection* section = getSection(i); + if ((section != nullptr) && (section->getFlags() & SHF_ALLOC) && + !(section->getFlags() & SHF_TLS) && (offset >= section->getAddr()) && + (offset < section->getAddr() + section->getSize())) + return section; + } + return nullptr; +} + +ElfSegment* Elf::getSegmentByType(unsigned int type, ElfSegment* last) { + std::vector::iterator seg; + if (last) { + seg = std::find(segments.begin(), segments.end(), last); + ++seg; + } else + seg = segments.begin(); + for (; seg != segments.end(); seg++) + if ((*seg)->getType() == type) return *seg; + return nullptr; +} + +void Elf::removeSegment(ElfSegment* segment) { + if (!segment) return; + std::vector::iterator seg; + seg = std::find(segments.begin(), segments.end(), segment); + if (seg == segments.end()) return; + segment->clear(); + segments.erase(seg); +} + +ElfDynamic_Section* Elf::getDynSection() { + for (std::vector::iterator seg = segments.begin(); + seg != segments.end(); seg++) + if (((*seg)->getType() == PT_DYNAMIC) && + ((*seg)->getFirstSection() != nullptr) && + (*seg)->getFirstSection()->getType() == SHT_DYNAMIC) + return (ElfDynamic_Section*)(*seg)->getFirstSection(); + + return nullptr; +} + +void Elf::normalize() { + // fixup section headers sh_name; TODO: that should be done by sections + // themselves + for (ElfSection* section = ehdr; section != nullptr; + section = section->getNext()) { + if (section->getIndex() == 0) + continue; + else + ehdr->e_shnum = section->getIndex() + 1; + section->getShdr().sh_name = eh_shstrndx->getStrIndex(section->getName()); + } + ehdr->markDirty(); + // Check segments consistency + int i = 0; + for (std::vector::iterator seg = segments.begin(); + seg != segments.end(); seg++, i++) { + std::list::iterator it = (*seg)->begin(); + for (ElfSection* last = *(it++); it != (*seg)->end(); last = *(it++)) { + if (((*it)->getType() != SHT_NOBITS) && + ((*it)->getAddr() - last->getAddr()) != + ((*it)->getOffset() - last->getOffset())) { + throw std::runtime_error("Segments inconsistency"); + } + } + } + + ElfSegment* prevLoad = nullptr; + for (auto& it : segments) { + if (it->getType() == PT_LOAD) { + if (prevLoad) { + size_t alignedPrevEnd = (prevLoad->getAddr() + prevLoad->getMemSize() + + prevLoad->getAlign() - 1) & + ~(prevLoad->getAlign() - 1); + size_t alignedStart = it->getAddr() & ~(it->getAlign() - 1); + if (alignedPrevEnd > alignedStart) { + throw std::runtime_error("Segments overlap"); + } + } + prevLoad = it; + } + } + + // fixup ehdr before writing + if (ehdr->e_phnum != segments.size()) { + ehdr->e_phnum = segments.size(); + phdr_section->getShdr().sh_size = + segments.size() * Elf_Phdr::size(ehdr->e_ident[EI_CLASS]); + phdr_section->getNext()->markDirty(); + } + // fixup shdr before writing + if (ehdr->e_shnum != shdr_section->getSize() / shdr_section->getEntSize()) + shdr_section->getShdr().sh_size = + ehdr->e_shnum * Elf_Shdr::size(ehdr->e_ident[EI_CLASS]); + ehdr->e_shoff = shdr_section->getOffset(); + ehdr->e_entry = eh_entry.getValue(); + ehdr->e_shstrndx = eh_shstrndx->getIndex(); + + // Check sections consistency + unsigned int minOffset = 0; + for (ElfSection* section = ehdr; section != nullptr; + section = section->getNext()) { + unsigned int offset = section->getOffset(); + if (offset < minOffset) { + throw std::runtime_error("Sections overlap"); + } + if (section->getType() != SHT_NOBITS) { + minOffset = offset + section->getSize(); + } + } +} + +void Elf::write(std::ofstream& file) { + normalize(); + for (ElfSection* section = ehdr; section != nullptr; + section = section->getNext()) { + file.seekp(section->getOffset()); + if (section == phdr_section) { + for (std::vector::iterator seg = segments.begin(); + seg != segments.end(); seg++) { + Elf_Phdr phdr; + phdr.p_type = (*seg)->getType(); + phdr.p_flags = (*seg)->getFlags(); + phdr.p_offset = (*seg)->getOffset(); + phdr.p_vaddr = (*seg)->getAddr(); + phdr.p_paddr = phdr.p_vaddr + (*seg)->getVPDiff(); + phdr.p_filesz = (*seg)->getFileSize(); + phdr.p_memsz = (*seg)->getMemSize(); + phdr.p_align = (*seg)->getAlign(); + phdr.serialize(file, ehdr->e_ident[EI_CLASS], ehdr->e_ident[EI_DATA]); + } + } else if (section == shdr_section) { + null_section.serialize(file, ehdr->e_ident[EI_CLASS], + ehdr->e_ident[EI_DATA]); + for (ElfSection* sec = ehdr; sec != nullptr; sec = sec->getNext()) { + if (sec->getType() != SHT_NULL) + sec->getShdr().serialize(file, ehdr->e_ident[EI_CLASS], + ehdr->e_ident[EI_DATA]); + } + } else + section->serialize(file, ehdr->e_ident[EI_CLASS], ehdr->e_ident[EI_DATA]); + } +} + +ElfSection::ElfSection(Elf_Shdr& s, std::ifstream* file, Elf* parent) + : shdr(s), + link(shdr.sh_link == SHN_UNDEF ? nullptr + : parent->getSection(shdr.sh_link)), + next(nullptr), + previous(nullptr), + index(-1) { + if ((file == nullptr) || (shdr.sh_type == SHT_NULL) || + (shdr.sh_type == SHT_NOBITS)) + data = nullptr; + else { + data = static_cast(malloc(shdr.sh_size)); + if (!data) { + throw std::runtime_error("Could not malloc ElfSection data"); + } + auto pos = file->tellg(); + file->seekg(shdr.sh_offset); + file->read(data, shdr.sh_size); + file->seekg(pos); + } + if (shdr.sh_name == 0) + name = nullptr; + else { + ElfStrtab_Section* strtab = (ElfStrtab_Section*)parent->getSection(-1); + // Special case (see elfgeneric.cpp): if strtab is nullptr, the + // section being created is the strtab. + if (strtab == nullptr) + name = &data[shdr.sh_name]; + else + name = strtab->getStr(shdr.sh_name); + } + // Only SHT_REL/SHT_RELA sections use sh_info to store a section + // number. + if ((shdr.sh_type == SHT_REL) || (shdr.sh_type == SHT_RELA)) + info.section = shdr.sh_info ? parent->getSection(shdr.sh_info) : nullptr; + else + info.index = shdr.sh_info; +} + +unsigned int ElfSection::getAddr() { + if (shdr.sh_addr != (Elf32_Word)-1) return shdr.sh_addr; + + // It should be safe to adjust sh_addr for all allocated sections that + // are neither SHT_NOBITS nor SHT_PROGBITS + if ((previous != nullptr) && isRelocatable()) { + unsigned int addr = previous->getAddr(); + if (previous->getType() != SHT_NOBITS) addr += previous->getSize(); + + if (addr & (getAddrAlign() - 1)) addr = (addr | (getAddrAlign() - 1)) + 1; + + return (shdr.sh_addr = addr); + } + return shdr.sh_addr; +} + +unsigned int ElfSection::getOffset() { + if (shdr.sh_offset != (Elf32_Word)-1) return shdr.sh_offset; + + if (previous == nullptr) return (shdr.sh_offset = 0); + + unsigned int offset = previous->getOffset(); + + ElfSegment* ptload = getSegmentByType(PT_LOAD); + ElfSegment* prev_ptload = previous->getSegmentByType(PT_LOAD); + + if (ptload && (ptload == prev_ptload)) { + offset += getAddr() - previous->getAddr(); + return (shdr.sh_offset = offset); + } + + if (previous->getType() != SHT_NOBITS) offset += previous->getSize(); + + Elf32_Word align = 0x1000; + for (std::vector::iterator seg = segments.begin(); + seg != segments.end(); seg++) + align = std::max(align, (*seg)->getAlign()); + + Elf32_Word mask = align - 1; + // SHF_TLS is used for .tbss which is some kind of special case. + if (((getType() != SHT_NOBITS) || (getFlags() & SHF_TLS)) && + (getFlags() & SHF_ALLOC)) { + if ((getAddr() & mask) < (offset & mask)) + offset = (offset | mask) + (getAddr() & mask) + 1; + else + offset = (offset & ~mask) + (getAddr() & mask); + } + if ((getType() != SHT_NOBITS) && (offset & (getAddrAlign() - 1))) + offset = (offset | (getAddrAlign() - 1)) + 1; + + return (shdr.sh_offset = offset); +} + +int ElfSection::getIndex() { + if (index != -1) return index; + if (getType() == SHT_NULL) return (index = 0); + ElfSection* reference; + for (reference = previous; + (reference != nullptr) && (reference->getType() == SHT_NULL); + reference = reference->getPrevious()) + ; + if (reference == nullptr) return (index = 1); + return (index = reference->getIndex() + 1); +} + +Elf_Shdr& ElfSection::getShdr() { + getOffset(); + if (shdr.sh_link == (Elf32_Word)-1) + shdr.sh_link = getLink() ? getLink()->getIndex() : 0; + if (shdr.sh_info == (Elf32_Word)-1) + shdr.sh_info = ((getType() == SHT_REL) || (getType() == SHT_RELA)) + ? (getInfo().section ? getInfo().section->getIndex() : 0) + : getInfo().index; + + return shdr; +} + +ElfSegment::ElfSegment(Elf_Phdr* phdr) + : type(phdr->p_type), + v_p_diff(phdr->p_paddr - phdr->p_vaddr), + flags(phdr->p_flags), + align(phdr->p_align), + vaddr(phdr->p_vaddr), + filesz(phdr->p_filesz), + memsz(phdr->p_memsz) {} + +void ElfSegment::addSection(ElfSection* section) { + // Make sure all sections in PT_GNU_RELRO won't be moved by elfhack + assert(!((type == PT_GNU_RELRO) && (section->isRelocatable()))); + + // TODO: Check overlapping sections + std::list::iterator i; + for (i = sections.begin(); i != sections.end(); ++i) + if ((*i)->getAddr() > section->getAddr()) break; + sections.insert(i, section); + section->addToSegment(this); +} + +void ElfSegment::removeSection(ElfSection* section) { + sections.remove(section); + section->removeFromSegment(this); +} + +unsigned int ElfSegment::getFileSize() { + if (type == PT_GNU_RELRO) return filesz; + + if (sections.empty()) return 0; + // Search the last section that is not SHT_NOBITS + std::list::reverse_iterator i; + for (i = sections.rbegin(); + (i != sections.rend()) && ((*i)->getType() == SHT_NOBITS); ++i) + ; + // All sections are SHT_NOBITS + if (i == sections.rend()) return 0; + + unsigned int end = (*i)->getAddr() + (*i)->getSize(); + + return end - sections.front()->getAddr(); +} + +unsigned int ElfSegment::getMemSize() { + if (type == PT_GNU_RELRO) return memsz; + + if (sections.empty()) return 0; + + unsigned int end = sections.back()->getAddr() + sections.back()->getSize(); + + return end - sections.front()->getAddr(); +} + +unsigned int ElfSegment::getOffset() { + if ((type == PT_GNU_RELRO) && !sections.empty() && + (sections.front()->getAddr() != vaddr)) + throw std::runtime_error( + "PT_GNU_RELRO segment doesn't start on a section start"); + + return sections.empty() ? 0 : sections.front()->getOffset(); +} + +unsigned int ElfSegment::getAddr() { + if ((type == PT_GNU_RELRO) && !sections.empty() && + (sections.front()->getAddr() != vaddr)) + throw std::runtime_error( + "PT_GNU_RELRO segment doesn't start on a section start"); + + return sections.empty() ? 0 : sections.front()->getAddr(); +} + +void ElfSegment::clear() { + for (std::list::iterator i = sections.begin(); + i != sections.end(); ++i) + (*i)->removeFromSegment(this); + sections.clear(); +} + +ElfValue* ElfDynamic_Section::getValueForType(unsigned int tag) { + for (unsigned int i = 0; i < shdr.sh_size / shdr.sh_entsize; i++) + if (dyns[i].tag == tag) return dyns[i].value; + + return nullptr; +} + +ElfSection* ElfDynamic_Section::getSectionForType(unsigned int tag) { + ElfValue* value = getValueForType(tag); + return value ? value->getSection() : nullptr; +} + +bool ElfDynamic_Section::setValueForType(unsigned int tag, ElfValue* val) { + unsigned int i; + unsigned int shnum = shdr.sh_size / shdr.sh_entsize; + for (i = 0; (i < shnum) && (dyns[i].tag != DT_NULL); i++) + if (dyns[i].tag == tag) { + delete dyns[i].value; + dyns[i].value = val; + return true; + } + // If we get here, this means we didn't match for the given tag + // Most of the time, there are a few DT_NULL entries, that we can + // use to add our value, but if we are on the last entry, we can't. + if (i >= shnum - 1) return false; + + dyns[i].tag = tag; + dyns[i].value = val; + return true; +} + +ElfDynamic_Section::ElfDynamic_Section(Elf_Shdr& s, std::ifstream* file, + Elf* parent) + : ElfSection(s, file, parent) { + auto pos = file->tellg(); + dyns.resize(s.sh_size / s.sh_entsize); + file->seekg(shdr.sh_offset); + // Here we assume tags refer to only one section (e.g. DT_RELSZ accounts + // for .rel.dyn size) + for (unsigned int i = 0; i < s.sh_size / s.sh_entsize; i++) { + Elf_Dyn dyn(*file, parent->getClass(), parent->getData()); + dyns[i].tag = dyn.d_tag; + switch (dyn.d_tag) { + case DT_NULL: + case DT_SYMBOLIC: + case DT_TEXTREL: + case DT_BIND_NOW: + dyns[i].value = new ElfValue(); + break; + case DT_NEEDED: + case DT_SONAME: + case DT_RPATH: + case DT_PLTREL: + case DT_RUNPATH: + case DT_FLAGS: + case DT_RELACOUNT: + case DT_RELCOUNT: + case DT_VERDEFNUM: + case DT_VERNEEDNUM: + dyns[i].value = new ElfPlainValue(dyn.d_un.d_val); + break; + case DT_PLTGOT: + case DT_HASH: + case DT_STRTAB: + case DT_SYMTAB: + case DT_RELA: + case DT_INIT: + case DT_FINI: + case DT_REL: + case DT_JMPREL: + case DT_INIT_ARRAY: + case DT_FINI_ARRAY: + case DT_GNU_HASH: + case DT_VERSYM: + case DT_VERNEED: + case DT_VERDEF: + dyns[i].value = new ElfLocation(dyn.d_un.d_ptr, parent); + break; + default: + dyns[i].value = nullptr; + } + } + // Another loop to get the section sizes + for (unsigned int i = 0; i < s.sh_size / s.sh_entsize; i++) + switch (dyns[i].tag) { + case DT_PLTRELSZ: + dyns[i].value = new ElfSize(getSectionForType(DT_JMPREL)); + break; + case DT_RELASZ: + dyns[i].value = new ElfSize(getSectionForType(DT_RELA)); + break; + case DT_STRSZ: + dyns[i].value = new ElfSize(getSectionForType(DT_STRTAB)); + break; + case DT_RELSZ: + dyns[i].value = new ElfSize(getSectionForType(DT_REL)); + break; + case DT_INIT_ARRAYSZ: + dyns[i].value = new ElfSize(getSectionForType(DT_INIT_ARRAY)); + break; + case DT_FINI_ARRAYSZ: + dyns[i].value = new ElfSize(getSectionForType(DT_FINI_ARRAY)); + break; + case DT_RELAENT: + dyns[i].value = new ElfEntSize(getSectionForType(DT_RELA)); + break; + case DT_SYMENT: + dyns[i].value = new ElfEntSize(getSectionForType(DT_SYMTAB)); + break; + case DT_RELENT: + dyns[i].value = new ElfEntSize(getSectionForType(DT_REL)); + break; + } + + file->seekg(pos); +} + +ElfDynamic_Section::~ElfDynamic_Section() { + for (unsigned int i = 0; i < shdr.sh_size / shdr.sh_entsize; i++) + delete dyns[i].value; +} + +void ElfDynamic_Section::serialize(std::ofstream& file, char ei_class, + char ei_data) { + for (unsigned int i = 0; i < shdr.sh_size / shdr.sh_entsize; i++) { + Elf_Dyn dyn; + dyn.d_tag = dyns[i].tag; + dyn.d_un.d_val = (dyns[i].value != nullptr) ? dyns[i].value->getValue() : 0; + dyn.serialize(file, ei_class, ei_data); + } +} + +ElfSymtab_Section::ElfSymtab_Section(Elf_Shdr& s, std::ifstream* file, + Elf* parent) + : ElfSection(s, file, parent) { + auto pos = file->tellg(); + syms.resize(s.sh_size / s.sh_entsize); + ElfStrtab_Section* strtab = (ElfStrtab_Section*)getLink(); + file->seekg(shdr.sh_offset); + for (unsigned int i = 0; i < shdr.sh_size / shdr.sh_entsize; i++) { + Elf_Sym sym(*file, parent->getClass(), parent->getData()); + syms[i].name = strtab->getStr(sym.st_name); + syms[i].info = sym.st_info; + syms[i].other = sym.st_other; + ElfSection* section = + (sym.st_shndx == SHN_ABS) ? nullptr : parent->getSection(sym.st_shndx); + new (&syms[i].value) + ElfLocation(section, sym.st_value, ElfLocation::ABSOLUTE); + syms[i].size = sym.st_size; + syms[i].defined = (sym.st_shndx != SHN_UNDEF); + } + file->seekg(pos); +} + +void ElfSymtab_Section::serialize(std::ofstream& file, char ei_class, + char ei_data) { + ElfStrtab_Section* strtab = (ElfStrtab_Section*)getLink(); + for (unsigned int i = 0; i < shdr.sh_size / shdr.sh_entsize; i++) { + Elf_Sym sym; + sym.st_name = strtab->getStrIndex(syms[i].name); + sym.st_info = syms[i].info; + sym.st_other = syms[i].other; + sym.st_value = syms[i].value.getValue(); + ElfSection* section = syms[i].value.getSection(); + if (syms[i].defined) + sym.st_shndx = section ? section->getIndex() : SHN_ABS; + else + sym.st_shndx = SHN_UNDEF; + sym.st_size = syms[i].size; + sym.serialize(file, ei_class, ei_data); + } +} + +Elf_SymValue* ElfSymtab_Section::lookup(const char* name, + unsigned int type_filter) { + for (std::vector::iterator sym = syms.begin(); + sym != syms.end(); sym++) { + if ((type_filter & (1 << ELF32_ST_TYPE(sym->info))) && + (strcmp(sym->name, name) == 0)) { + return &*sym; + } + } + return nullptr; +} + +const char* ElfStrtab_Section::getStr(unsigned int index) { + for (std::vector::iterator t = table.begin(); t != table.end(); + t++) { + if (index < t->used) return t->buf + index; + index -= t->used; + } + assert(1 == 0); + return nullptr; +} + +const char* ElfStrtab_Section::getStr(const char* string) { + if (string == nullptr) return nullptr; + + // If the given string is within the section, return it + for (std::vector::iterator t = table.begin(); t != table.end(); + t++) + if ((string >= t->buf) && (string < t->buf + t->used)) return string; + + // TODO: should scan in the section to find an existing string + + // If not, we need to allocate the string in the section + size_t len = strlen(string) + 1; + + if (table.back().size - table.back().used < len) + table.resize(table.size() + 1); + + char* alloc_str = table.back().buf + table.back().used; + memcpy(alloc_str, string, len); + table.back().used += len; + + shdr.sh_size += len; + markDirty(); + + return alloc_str; +} + +unsigned int ElfStrtab_Section::getStrIndex(const char* string) { + if (string == nullptr) return 0; + + unsigned int index = 0; + string = getStr(string); + for (std::vector::iterator t = table.begin(); t != table.end(); + t++) { + if ((string >= t->buf) && (string < t->buf + t->used)) + return index + (string - t->buf); + index += t->used; + } + + assert(1 == 0); + return 0; +} + +void ElfStrtab_Section::serialize(std::ofstream& file, char ei_class, + char ei_data) { + file.seekp(getOffset()); + for (std::vector::iterator t = table.begin(); t != table.end(); + t++) + file.write(t->buf, t->used); +} diff --git a/build/unix/elfhack/elfhack.cpp b/build/unix/elfhack/elfhack.cpp new file mode 100644 index 0000000000..ec01e54674 --- /dev/null +++ b/build/unix/elfhack/elfhack.cpp @@ -0,0 +1,1325 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#undef NDEBUG +#include +#include +#include +#include +#include "elfxx.h" +#include "mozilla/CheckedInt.h" + +#define ver "0" +#define elfhack_data ".elfhack.data.v" ver +#define elfhack_text ".elfhack.text.v" ver + +#ifndef R_ARM_V4BX +# define R_ARM_V4BX 0x28 +#endif +#ifndef R_ARM_CALL +# define R_ARM_CALL 0x1c +#endif +#ifndef R_ARM_JUMP24 +# define R_ARM_JUMP24 0x1d +#endif +#ifndef R_ARM_THM_JUMP24 +# define R_ARM_THM_JUMP24 0x1e +#endif + +char* rundir = nullptr; + +template +struct wrapped { + T value; +}; + +class Elf_Addr_Traits { + public: + typedef wrapped Type32; + typedef wrapped Type64; + + template + static inline void swap(T& t, R& r) { + r.value = endian::swap(t.value); + } +}; + +typedef serializable Elf_Addr; + +class Elf_RelHack_Traits { + public: + typedef Elf32_Rel Type32; + typedef Elf32_Rel Type64; + + template + static inline void swap(T& t, R& r) { + r.r_offset = endian::swap(t.r_offset); + r.r_info = endian::swap(t.r_info); + } +}; + +typedef serializable Elf_RelHack; + +class ElfRelHack_Section : public ElfSection { + public: + ElfRelHack_Section(Elf_Shdr& s) : ElfSection(s, nullptr, nullptr) { + name = elfhack_data; + }; + + void serialize(std::ofstream& file, char ei_class, char ei_data) { + for (std::vector::iterator i = rels.begin(); i != rels.end(); + ++i) + (*i).serialize(file, ei_class, ei_data); + } + + bool isRelocatable() { return true; } + + void push_back(Elf_RelHack& r) { + rels.push_back(r); + shdr.sh_size = rels.size() * shdr.sh_entsize; + } + + private: + std::vector rels; +}; + +class ElfRelHackCode_Section : public ElfSection { + public: + ElfRelHackCode_Section(Elf_Shdr& s, Elf& e, + ElfRelHack_Section& relhack_section, unsigned int init, + unsigned int mprotect_cb, unsigned int sysconf_cb) + : ElfSection(s, nullptr, nullptr), + parent(e), + relhack_section(relhack_section), + init(init), + init_trampoline(nullptr), + mprotect_cb(mprotect_cb), + sysconf_cb(sysconf_cb) { + std::string file(rundir); + file += "/inject/"; + switch (parent.getMachine()) { + case EM_386: + file += "x86"; + break; + case EM_X86_64: + file += "x86_64"; + break; + case EM_ARM: + file += "arm"; + break; + default: + throw std::runtime_error("unsupported architecture"); + } + file += ".o"; + std::ifstream inject(file.c_str(), std::ios::in | std::ios::binary); + elf = new Elf(inject); + if (elf->getType() != ET_REL) + throw std::runtime_error("object for injected code is not ET_REL"); + if (elf->getMachine() != parent.getMachine()) + throw std::runtime_error( + "architecture of object for injected code doesn't match"); + + ElfSymtab_Section* symtab = nullptr; + + // Find the symbol table. + for (ElfSection* section = elf->getSection(1); section != nullptr; + section = section->getNext()) { + if (section->getType() == SHT_SYMTAB) + symtab = (ElfSymtab_Section*)section; + } + if (symtab == nullptr) + throw std::runtime_error( + "Couldn't find a symbol table for the injected code"); + + relro = parent.getSegmentByType(PT_GNU_RELRO); + + // Find the init symbol + entry_point = -1; + std::string symbol = "init"; + if (!init) symbol += "_noinit"; + if (relro) symbol += "_relro"; + Elf_SymValue* sym = symtab->lookup(symbol.c_str()); + if (!sym) + throw std::runtime_error( + "Couldn't find an 'init' symbol in the injected code"); + + entry_point = sym->value.getValue(); + + // Get all relevant sections from the injected code object. + add_code_section(sym->value.getSection()); + + // If the original init function is located too far away, we're going to + // need to use a trampoline. See comment in inject.c. + // Theoretically, we should check for (init - instr) > 0xffffff, where instr + // is the virtual address of the instruction that calls the original init, + // but we don't have it at this point, so punt to just init. + if (init > 0xffffff && parent.getMachine() == EM_ARM) { + Elf_SymValue* trampoline = symtab->lookup("init_trampoline"); + if (!trampoline) { + throw std::runtime_error( + "Couldn't find an 'init_trampoline' symbol in the injected code"); + } + + init_trampoline = trampoline->value.getSection(); + add_code_section(init_trampoline); + } + + // Adjust code sections offsets according to their size + std::vector::iterator c = code.begin(); + (*c)->getShdr().sh_addr = 0; + for (ElfSection* last = *(c++); c != code.end(); ++c) { + unsigned int addr = last->getShdr().sh_addr + last->getSize(); + if (addr & ((*c)->getAddrAlign() - 1)) + addr = (addr | ((*c)->getAddrAlign() - 1)) + 1; + (*c)->getShdr().sh_addr = addr; + // We need to align this section depending on the greater + // alignment required by code sections. + if (shdr.sh_addralign < (*c)->getAddrAlign()) + shdr.sh_addralign = (*c)->getAddrAlign(); + last = *c; + } + shdr.sh_size = code.back()->getAddr() + code.back()->getSize(); + data = static_cast(malloc(shdr.sh_size)); + if (!data) { + throw std::runtime_error("Could not malloc ElfSection data"); + } + char* buf = data; + for (c = code.begin(); c != code.end(); ++c) { + memcpy(buf, (*c)->getData(), (*c)->getSize()); + buf += (*c)->getSize(); + } + name = elfhack_text; + } + + ~ElfRelHackCode_Section() { delete elf; } + + void serialize(std::ofstream& file, char ei_class, char ei_data) override { + // Readjust code offsets + for (std::vector::iterator c = code.begin(); c != code.end(); + ++c) + (*c)->getShdr().sh_addr += getAddr(); + + // Apply relocations + for (std::vector::iterator c = code.begin(); c != code.end(); + ++c) { + for (ElfSection* rel = elf->getSection(1); rel != nullptr; + rel = rel->getNext()) + if (((rel->getType() == SHT_REL) || (rel->getType() == SHT_RELA)) && + (rel->getInfo().section == *c)) { + if (rel->getType() == SHT_REL) + apply_relocations((ElfRel_Section*)rel, *c); + else + apply_relocations((ElfRel_Section*)rel, *c); + } + } + + ElfSection::serialize(file, ei_class, ei_data); + } + + bool isRelocatable() override { return false; } + + unsigned int getEntryPoint() { return entry_point; } + + void insertBefore(ElfSection* section, bool dirty = true) override { + // Adjust the address so that this section is adjacent to the one it's + // being inserted before. This avoids creating holes which subsequently + // might lead the PHDR-adjusting code to create unnecessary additional + // PT_LOADs. + shdr.sh_addr = + (section->getAddr() - shdr.sh_size) & ~(shdr.sh_addralign - 1); + ElfSection::insertBefore(section, dirty); + } + + private: + void add_code_section(ElfSection* section) { + if (section) { + /* Don't add section if it's already been added in the past */ + for (auto s = code.begin(); s != code.end(); ++s) { + if (section == *s) return; + } + code.push_back(section); + find_code(section); + } + } + + /* Look at the relocations associated to the given section to find other + * sections that it requires */ + void find_code(ElfSection* section) { + for (ElfSection* s = elf->getSection(1); s != nullptr; s = s->getNext()) { + if (((s->getType() == SHT_REL) || (s->getType() == SHT_RELA)) && + (s->getInfo().section == section)) { + if (s->getType() == SHT_REL) + scan_relocs_for_code((ElfRel_Section*)s); + else + scan_relocs_for_code((ElfRel_Section*)s); + } + } + } + + template + void scan_relocs_for_code(ElfRel_Section* rel) { + ElfSymtab_Section* symtab = (ElfSymtab_Section*)rel->getLink(); + for (auto r = rel->rels.begin(); r != rel->rels.end(); ++r) { + ElfSection* section = + symtab->syms[ELF32_R_SYM(r->r_info)].value.getSection(); + add_code_section(section); + } + } + + class pc32_relocation { + public: + Elf32_Addr operator()(unsigned int base_addr, Elf32_Off offset, + Elf32_Word addend, unsigned int addr) { + return addr + addend - offset - base_addr; + } + }; + + class arm_plt32_relocation { + public: + Elf32_Addr operator()(unsigned int base_addr, Elf32_Off offset, + Elf32_Word addend, unsigned int addr) { + // We don't care about sign_extend because the only case where this is + // going to be used only jumps forward. + Elf32_Addr tmp = (Elf32_Addr)(addr - offset - base_addr) >> 2; + tmp = (addend + tmp) & 0x00ffffff; + return (addend & 0xff000000) | tmp; + } + }; + + class arm_thm_jump24_relocation { + public: + Elf32_Addr operator()(unsigned int base_addr, Elf32_Off offset, + Elf32_Word addend, unsigned int addr) { + /* Follows description of b.w and bl instructions as per + ARM Architecture Reference Manual ARM® v7-A and ARM® v7-R edition, + A8.6.16 We limit ourselves to Encoding T4 of b.w and Encoding T1 of bl. + We don't care about sign_extend because the only case where this is + going to be used only jumps forward. */ + Elf32_Addr tmp = (Elf32_Addr)(addr - offset - base_addr); + unsigned int word0 = addend & 0xffff, word1 = addend >> 16; + + /* Encoding T4 of B.W is 10x1 ; Encoding T1 of BL is 11x1. */ + unsigned int type = (word1 & 0xd000) >> 12; + if (((word0 & 0xf800) != 0xf000) || ((type & 0x9) != 0x9)) + throw std::runtime_error( + "R_ARM_THM_JUMP24/R_ARM_THM_CALL relocation only supported for B.W " + "
+

Tests for common HTML elements

+
+ +
PARAGRAPHS & BOXES
+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.

+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor sub text ut labore et sup text magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.

+
+ +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.

+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.

+
+ +
+ +
+

Aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.

+
+
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.

+
+ +
+
+ +
LISTS
+ +
+
    +
  • Unordered list test
  • +
  • Another list element. Lorem ipsum dolor sit amet, consectetur adipisicing elit.
  • +
  • Yet another element in the list
  • +
  • Some long text. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Lorem ipsum dolor sit amet, consectetur adipisicing elit.
  • +
+
    +
  1. Ordered list test
  2. +
  3. Another list element
  4. +
  5. Yet another element in the list
  6. +
+
+ +
+
    +
  1. Ordered list
  2. +
  3. Here's a nested unordered list +
      +
    • Nested Unordered list
    • +
    • Nested ordered list +
        +
      1. The first
      2. +
      3. And the second
      4. +
      +
    • +
    +
  4. +
  5. Ordered List item
  6. +
  7. Nested Ordered list +
      +
    1. Some point
    2. +
    3. Nested Unordered list +
        +
      • The first
      • +
      • And the second
      • +
      +
    4. +
    +
  8. +
+
+ +
+
+
definition list dt
+
definition list dd
+
definition list dt
+
definition list dd
+
Lorem ipsum dolor sit amet, consectetur adipisicing elit adipisicing elit adipisicing elit
+
Lorem ipsum dolor sit amet, consectetur adipisicing elit adipisicing elit adipisicing elit
+
Lorem ipsum dolor sit amet, consectetur adipisicing elit adipisicing elit adipisicing elit
+
Lorem ipsum dolor sit amet, consectetur adipisicing elit adipisicing elit adipisicing elit
+
+
+
+ +
HEADINGS
+ +
+

H1: Lorem ipsum dolor sit amet

+

H2: Lorem ipsum dolor sit amet, consectetur elit

+

H3: Lorem ipsum dolor sit amet, consectetur adipisicing elit

+

H4: Lorem ipsum dolor sit amet, consectetur adipisicing elit adipis

+
H5: Lorem ipsum dolor sit amet, consectetur adipisicing elit adipisicing elit adipisicing elit
+
H6: Lorem ipsum dolor sit amet, consectetur adipisicing elit adipisicing elit adipisicing elit
+
+ +
+

Heading 1


+

Heading 2


+

Heading 3


+

Heading 4


+
Heading 5

+
Heading 6
+
+ +
+

Heading 1

+

Heading 2

+

Heading 3

+

Heading 4

+
Heading 5
+
Heading 6
+
+
+ +
MISC ELEMENTS
+ +
+

+ <strong>
+ <del> deleted
+ <dfn> dfn
+ <em> emphasis +

+

+ <a> anchor
+ <a> a + href +

+

+ <abbr> abbr - extended text when mouseover.
+ <acronym> acronym - extended text when mouseover. +

+
+ <address>
+ Donald Duck
+ Box 555
+ Disneyland +
+

Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore dolore.

+
+ +
+ + + + + + + + + + + + + +
A standard test table with a caption, tr, td elements
Table Header OneTable Header Two
TD OneTD Two
TD colspan 2
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
A test table with a thead, tfoot, and tbody elements
Table Header OneTable Header Two
tfoot footer
TD OneTD Two
TD OneTD Two
TD OneTD Two
TD OneTD Two
+
+ +
+ +
<pre>
+pre  space1
+pre  space1
+pre    space2
+pre    space2
+pre	tab
+pre	tab
+ +<code> +Not indented + indent1 + indent1 + indent2 + indent3 + + <tt> + This tt text should be monospaced + and + wrap as if + one line of text + even though the code has newlines, spaces, and tabs. + It should be the same size as <p> text. + +
+
+ +

+ Valid HTML 4.01 Strict

+ +