summaryrefslogtreecommitdiffstats
path: root/mesonbuild/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'mesonbuild/scripts')
-rw-r--r--mesonbuild/scripts/__init__.py21
-rw-r--r--mesonbuild/scripts/clangformat.py61
-rw-r--r--mesonbuild/scripts/clangtidy.py35
-rw-r--r--mesonbuild/scripts/cleantrees.py45
-rwxr-xr-xmesonbuild/scripts/cmake_run_ctgt.py103
-rw-r--r--mesonbuild/scripts/cmd_or_ps.ps117
-rw-r--r--mesonbuild/scripts/copy.py19
-rw-r--r--mesonbuild/scripts/coverage.py202
-rw-r--r--mesonbuild/scripts/delwithsuffix.py37
-rw-r--r--mesonbuild/scripts/depfixer.py505
-rw-r--r--mesonbuild/scripts/depscan.py208
-rw-r--r--mesonbuild/scripts/dirchanger.py30
-rwxr-xr-xmesonbuild/scripts/env2mfile.py368
-rw-r--r--mesonbuild/scripts/externalproject.py116
-rw-r--r--mesonbuild/scripts/gettext.py96
-rw-r--r--mesonbuild/scripts/gtkdochelper.py296
-rw-r--r--mesonbuild/scripts/hotdochelper.py40
-rw-r--r--mesonbuild/scripts/itstool.py86
-rw-r--r--mesonbuild/scripts/meson_exe.py124
-rw-r--r--mesonbuild/scripts/msgfmthelper.py39
-rw-r--r--mesonbuild/scripts/regen_checker.py65
-rw-r--r--mesonbuild/scripts/run_tool.py68
-rw-r--r--mesonbuild/scripts/scanbuild.py66
-rw-r--r--mesonbuild/scripts/symbolextractor.py333
-rw-r--r--mesonbuild/scripts/tags.py54
-rw-r--r--mesonbuild/scripts/test_loaded_modules.py11
-rw-r--r--mesonbuild/scripts/uninstall.py51
-rw-r--r--mesonbuild/scripts/vcstagger.py45
-rw-r--r--mesonbuild/scripts/yasm.py22
29 files changed, 3163 insertions, 0 deletions
diff --git a/mesonbuild/scripts/__init__.py b/mesonbuild/scripts/__init__.py
new file mode 100644
index 0000000..7277771
--- /dev/null
+++ b/mesonbuild/scripts/__init__.py
@@ -0,0 +1,21 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pathlib import PurePath
+
+def destdir_join(d1: str, d2: str) -> str:
+ if not d1:
+ return d2
+ # c:\destdir + c:\prefix must produce c:\destdir\prefix
+ return str(PurePath(d1, *PurePath(d2).parts[1:]))
diff --git a/mesonbuild/scripts/clangformat.py b/mesonbuild/scripts/clangformat.py
new file mode 100644
index 0000000..a706b76
--- /dev/null
+++ b/mesonbuild/scripts/clangformat.py
@@ -0,0 +1,61 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import argparse
+import subprocess
+from pathlib import Path
+
+from .run_tool import run_tool
+from ..environment import detect_clangformat
+from ..mesonlib import version_compare
+from ..programs import ExternalProgram
+import typing as T
+
+def run_clang_format(fname: Path, exelist: T.List[str], check: bool) -> subprocess.CompletedProcess:
+ clangformat_10 = False
+ if check:
+ cformat_ver = ExternalProgram('clang-format', exelist).get_version()
+ if version_compare(cformat_ver, '>=10'):
+ clangformat_10 = True
+ exelist = exelist + ['--dry-run', '--Werror']
+ else:
+ original = fname.read_bytes()
+ before = fname.stat().st_mtime
+ ret = subprocess.run(exelist + ['-style=file', '-i', str(fname)])
+ after = fname.stat().st_mtime
+ if before != after:
+ print('File reformatted: ', fname)
+ if check and not clangformat_10:
+ # Restore the original if only checking.
+ fname.write_bytes(original)
+ ret.returncode = 1
+ return ret
+
+def run(args: T.List[str]) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--check', action='store_true')
+ parser.add_argument('sourcedir')
+ parser.add_argument('builddir')
+ options = parser.parse_args(args)
+
+ srcdir = Path(options.sourcedir)
+ builddir = Path(options.builddir)
+
+ exelist = detect_clangformat()
+ if not exelist:
+ print('Could not execute clang-format "%s"' % ' '.join(exelist))
+ return 1
+
+ return run_tool('clang-format', srcdir, builddir, run_clang_format, exelist, options.check)
diff --git a/mesonbuild/scripts/clangtidy.py b/mesonbuild/scripts/clangtidy.py
new file mode 100644
index 0000000..324a26e
--- /dev/null
+++ b/mesonbuild/scripts/clangtidy.py
@@ -0,0 +1,35 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import argparse
+import subprocess
+from pathlib import Path
+
+from .run_tool import run_tool
+import typing as T
+
+def run_clang_tidy(fname: Path, builddir: Path) -> subprocess.CompletedProcess:
+ return subprocess.run(['clang-tidy', '-p', str(builddir), str(fname)])
+
+def run(args: T.List[str]) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('sourcedir')
+ parser.add_argument('builddir')
+ options = parser.parse_args(args)
+
+ srcdir = Path(options.sourcedir)
+ builddir = Path(options.builddir)
+
+ return run_tool('clang-tidy', srcdir, builddir, run_clang_tidy, builddir)
diff --git a/mesonbuild/scripts/cleantrees.py b/mesonbuild/scripts/cleantrees.py
new file mode 100644
index 0000000..3512f56
--- /dev/null
+++ b/mesonbuild/scripts/cleantrees.py
@@ -0,0 +1,45 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import sys
+import shutil
+import pickle
+import typing as T
+
+def rmtrees(build_dir: str, trees: T.List[str]) -> None:
+ for t in trees:
+ # Never delete trees outside of the builddir
+ if os.path.isabs(t):
+ print(f'Cannot delete dir with absolute path {t!r}')
+ continue
+ bt = os.path.join(build_dir, t)
+ # Skip if it doesn't exist, or if it is not a directory
+ if os.path.isdir(bt):
+ shutil.rmtree(bt, ignore_errors=True)
+
+def run(args: T.List[str]) -> int:
+ if len(args) != 1:
+ print('Cleaner script for Meson. Do not run on your own please.')
+ print('cleantrees.py <data-file>')
+ return 1
+ with open(args[0], 'rb') as f:
+ data = pickle.load(f)
+ rmtrees(data.build_dir, data.trees)
+ # Never fail cleaning
+ return 0
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
diff --git a/mesonbuild/scripts/cmake_run_ctgt.py b/mesonbuild/scripts/cmake_run_ctgt.py
new file mode 100755
index 0000000..a788ba5
--- /dev/null
+++ b/mesonbuild/scripts/cmake_run_ctgt.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+from __future__ import annotations
+
+import argparse
+import subprocess
+import shutil
+import sys
+from pathlib import Path
+import typing as T
+
+def run(argsv: T.List[str]) -> int:
+ commands = [[]] # type: T.List[T.List[str]]
+ SEPARATOR = ';;;'
+
+ # Generate CMD parameters
+ parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
+ parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
+ parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
+ parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
+ parser.add_argument('commands', nargs=argparse.REMAINDER, help=f'A "{SEPARATOR}" separated list of commands')
+
+ # Parse
+ args = parser.parse_args(argsv)
+ directory = Path(args.directory)
+
+ dummy_target = None
+ if len(args.outputs) == 1 and len(args.original_outputs) == 0:
+ dummy_target = Path(args.outputs[0])
+ elif len(args.outputs) != len(args.original_outputs):
+ print('Length of output list and original output list differ')
+ return 1
+
+ for i in args.commands:
+ if i == SEPARATOR:
+ commands += [[]]
+ continue
+
+ i = i.replace('"', '') # Remove lefover quotes
+ commands[-1] += [i]
+
+ # Execute
+ for i in commands:
+ # Skip empty lists
+ if not i:
+ continue
+
+ cmd = []
+ stdout = None
+ stderr = None
+ capture_file = ''
+
+ for j in i:
+ if j in {'>', '>>'}:
+ stdout = subprocess.PIPE
+ continue
+ elif j in {'&>', '&>>'}:
+ stdout = subprocess.PIPE
+ stderr = subprocess.STDOUT
+ continue
+
+ if stdout is not None or stderr is not None:
+ capture_file += j
+ else:
+ cmd += [j]
+
+ try:
+ directory.mkdir(parents=True, exist_ok=True)
+
+ res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=str(directory), check=True)
+ if capture_file:
+ out_file = directory / capture_file
+ out_file.write_bytes(res.stdout)
+ except subprocess.CalledProcessError:
+ return 1
+
+ if dummy_target:
+ dummy_target.touch()
+ return 0
+
+ # Copy outputs
+ zipped_outputs = zip([Path(x) for x in args.outputs], [Path(x) for x in args.original_outputs])
+ for expected, generated in zipped_outputs:
+ do_copy = False
+ if not expected.exists():
+ if not generated.exists():
+ print('Unable to find generated file. This can cause the build to fail:')
+ print(generated)
+ do_copy = False
+ else:
+ do_copy = True
+ elif generated.exists():
+ if generated.stat().st_mtime > expected.stat().st_mtime:
+ do_copy = True
+
+ if do_copy:
+ if expected.exists():
+ expected.unlink()
+ shutil.copyfile(str(generated), str(expected))
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/cmd_or_ps.ps1 b/mesonbuild/scripts/cmd_or_ps.ps1
new file mode 100644
index 0000000..96c32e2
--- /dev/null
+++ b/mesonbuild/scripts/cmd_or_ps.ps1
@@ -0,0 +1,17 @@
+# Copied from GStreamer project
+# Author: Seungha Yang <seungha.yang@navercorp.com>
+# Xavier Claessens <xclaesse@gmail.com>
+
+$i=1
+$ppid=$PID
+do {
+ $ppid=(Get-CimInstance Win32_Process -Filter "ProcessId=$ppid").parentprocessid
+ $pname=(Get-Process -id $ppid).Name
+ if($pname -eq "cmd" -Or $pname -eq "powershell" -Or $pname -eq "pwsh") {
+ Write-Host ("{0}.exe" -f $pname)
+ Break
+ }
+ # not found yet, find grand parent
+ # 10 times iteration seems to be sufficient
+ $i++
+} while ($i -lt 10)
diff --git a/mesonbuild/scripts/copy.py b/mesonbuild/scripts/copy.py
new file mode 100644
index 0000000..dba13a5
--- /dev/null
+++ b/mesonbuild/scripts/copy.py
@@ -0,0 +1,19 @@
+# SPDX-License-Identifer: Apache-2.0
+# Copyright © 2021 Intel Corporation
+from __future__ import annotations
+
+"""Helper script to copy files at build time.
+
+This is easier than trying to detect whether to use copy, cp, or something else.
+"""
+
+import shutil
+import typing as T
+
+
+def run(args: T.List[str]) -> int:
+ try:
+ shutil.copy2(args[0], args[1])
+ except Exception:
+ return 1
+ return 0
diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py
new file mode 100644
index 0000000..5e78639
--- /dev/null
+++ b/mesonbuild/scripts/coverage.py
@@ -0,0 +1,202 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild import environment, mesonlib
+
+import argparse, re, sys, os, subprocess, pathlib, stat
+import typing as T
+
+def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build_root: str, log_dir: str, use_llvm_cov: bool) -> int:
+ outfiles = []
+ exitcode = 0
+
+ (gcovr_exe, gcovr_version, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools()
+
+ # load config files for tools if available in the source tree
+ # - lcov requires manually specifying a per-project config
+ # - gcovr picks up the per-project config, and also supports filtering files
+ # so don't exclude subprojects ourselves, if the project has a config,
+ # because they either don't want that, or should set it themselves
+ lcovrc = os.path.join(source_root, '.lcovrc')
+ if os.path.exists(lcovrc):
+ lcov_config = ['--config-file', lcovrc]
+ else:
+ lcov_config = []
+
+ gcovr_config = ['-e', re.escape(subproject_root)]
+
+ # gcovr >= 4.2 requires a different syntax for out of source builds
+ if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=4.2'):
+ gcovr_base_cmd = [gcovr_exe, '-r', source_root, build_root]
+ # it also started supporting the config file
+ if os.path.exists(os.path.join(source_root, 'gcovr.cfg')):
+ gcovr_config = []
+ else:
+ gcovr_base_cmd = [gcovr_exe, '-r', build_root]
+
+ if use_llvm_cov:
+ gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov']
+ else:
+ gcov_exe_args = []
+
+ if not outputs or 'xml' in outputs:
+ if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=3.3'):
+ subprocess.check_call(gcovr_base_cmd + gcovr_config +
+ ['-x',
+ '-o', os.path.join(log_dir, 'coverage.xml')
+ ] + gcov_exe_args)
+ outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml')))
+ elif outputs:
+ print('gcovr >= 3.3 needed to generate Xml coverage report')
+ exitcode = 1
+
+ if not outputs or 'sonarqube' in outputs:
+ if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=4.2'):
+ subprocess.check_call(gcovr_base_cmd + gcovr_config +
+ ['--sonarqube',
+ '-o', os.path.join(log_dir, 'sonarqube.xml'),
+ ] + gcov_exe_args)
+ outfiles.append(('Sonarqube', pathlib.Path(log_dir, 'sonarqube.xml')))
+ elif outputs:
+ print('gcovr >= 4.2 needed to generate Xml coverage report')
+ exitcode = 1
+
+ if not outputs or 'text' in outputs:
+ if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=3.3'):
+ subprocess.check_call(gcovr_base_cmd + gcovr_config +
+ ['-o', os.path.join(log_dir, 'coverage.txt')] +
+ gcov_exe_args)
+ outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt')))
+ elif outputs:
+ print('gcovr >= 3.3 needed to generate text coverage report')
+ exitcode = 1
+
+ if not outputs or 'html' in outputs:
+ if lcov_exe and genhtml_exe:
+ htmloutdir = os.path.join(log_dir, 'coveragereport')
+ covinfo = os.path.join(log_dir, 'coverage.info')
+ initial_tracefile = covinfo + '.initial'
+ run_tracefile = covinfo + '.run'
+ raw_tracefile = covinfo + '.raw'
+ if use_llvm_cov:
+ # Create a shim to allow using llvm-cov as a gcov tool.
+ if mesonlib.is_windows():
+ llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat')
+ with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_bat:
+ llvm_cov_bat.write(f'@"{llvm_cov_exe}" gcov %*')
+ else:
+ llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh')
+ with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_sh:
+ llvm_cov_sh.write(f'#!/usr/bin/env sh\nexec "{llvm_cov_exe}" gcov $@')
+ os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC)
+ gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path]
+ else:
+ gcov_tool_args = []
+ subprocess.check_call([lcov_exe,
+ '--directory', build_root,
+ '--capture',
+ '--initial',
+ '--output-file',
+ initial_tracefile] +
+ lcov_config +
+ gcov_tool_args)
+ subprocess.check_call([lcov_exe,
+ '--directory', build_root,
+ '--capture',
+ '--output-file', run_tracefile,
+ '--no-checksum',
+ '--rc', 'lcov_branch_coverage=1'] +
+ lcov_config +
+ gcov_tool_args)
+ # Join initial and test results.
+ subprocess.check_call([lcov_exe,
+ '-a', initial_tracefile,
+ '-a', run_tracefile,
+ '--rc', 'lcov_branch_coverage=1',
+ '-o', raw_tracefile] + lcov_config)
+ # Remove all directories outside the source_root from the covinfo
+ subprocess.check_call([lcov_exe,
+ '--extract', raw_tracefile,
+ os.path.join(source_root, '*'),
+ '--rc', 'lcov_branch_coverage=1',
+ '--output-file', covinfo] + lcov_config)
+ # Remove all directories inside subproject dir
+ subprocess.check_call([lcov_exe,
+ '--remove', covinfo,
+ os.path.join(subproject_root, '*'),
+ '--rc', 'lcov_branch_coverage=1',
+ '--output-file', covinfo] + lcov_config)
+ subprocess.check_call([genhtml_exe,
+ '--prefix', build_root,
+ '--prefix', source_root,
+ '--output-directory', htmloutdir,
+ '--title', 'Code coverage',
+ '--legend',
+ '--show-details',
+ '--branch-coverage',
+ covinfo])
+ outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+ elif gcovr_exe and mesonlib.version_compare(gcovr_version, '>=3.3'):
+ htmloutdir = os.path.join(log_dir, 'coveragereport')
+ if not os.path.isdir(htmloutdir):
+ os.mkdir(htmloutdir)
+ subprocess.check_call(gcovr_base_cmd + gcovr_config +
+ ['--html',
+ '--html-details',
+ '--print-summary',
+ '-o', os.path.join(htmloutdir, 'index.html'),
+ ])
+ outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+ elif outputs:
+ print('lcov/genhtml or gcovr >= 3.3 needed to generate Html coverage report')
+ exitcode = 1
+
+ if not outputs and not outfiles:
+ print('Need gcovr or lcov/genhtml to generate any coverage reports')
+ exitcode = 1
+
+ if outfiles:
+ print('')
+ for (filetype, path) in outfiles:
+ print(filetype + ' coverage report can be found at', path.as_uri())
+
+ return exitcode
+
+def run(args: T.List[str]) -> int:
+ if not os.path.isfile('build.ninja'):
+ print('Coverage currently only works with the Ninja backend.')
+ return 1
+ parser = argparse.ArgumentParser(description='Generate coverage reports')
+ parser.add_argument('--text', dest='outputs', action='append_const',
+ const='text', help='generate Text report')
+ parser.add_argument('--xml', dest='outputs', action='append_const',
+ const='xml', help='generate Xml report')
+ parser.add_argument('--sonarqube', dest='outputs', action='append_const',
+ const='sonarqube', help='generate Sonarqube Xml report')
+ parser.add_argument('--html', dest='outputs', action='append_const',
+ const='html', help='generate Html report')
+ parser.add_argument('--use_llvm_cov', action='store_true',
+ help='use llvm-cov')
+ parser.add_argument('source_root')
+ parser.add_argument('subproject_root')
+ parser.add_argument('build_root')
+ parser.add_argument('log_dir')
+ options = parser.parse_args(args)
+ return coverage(options.outputs, options.source_root,
+ options.subproject_root, options.build_root,
+ options.log_dir, options.use_llvm_cov)
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/delwithsuffix.py b/mesonbuild/scripts/delwithsuffix.py
new file mode 100644
index 0000000..f58b19c
--- /dev/null
+++ b/mesonbuild/scripts/delwithsuffix.py
@@ -0,0 +1,37 @@
+# Copyright 2013 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os, sys
+import typing as T
+
+def run(args: T.List[str]) -> int:
+ if len(args) != 2:
+ print('delwithsuffix.py <root of subdir to process> <suffix to delete>')
+ sys.exit(1)
+
+ topdir = args[0]
+ suffix = args[1]
+ if suffix[0] != '.':
+ suffix = '.' + suffix
+
+ for (root, _, files) in os.walk(topdir):
+ for f in files:
+ if f.endswith(suffix):
+ fullname = os.path.join(root, f)
+ os.unlink(fullname)
+ return 0
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py
new file mode 100644
index 0000000..ae18594
--- /dev/null
+++ b/mesonbuild/scripts/depfixer.py
@@ -0,0 +1,505 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+
+import sys
+import os
+import stat
+import struct
+import shutil
+import subprocess
+import typing as T
+
+from ..mesonlib import OrderedSet, generate_list, Popen_safe
+
+SHT_STRTAB = 3
+DT_NEEDED = 1
+DT_RPATH = 15
+DT_RUNPATH = 29
+DT_STRTAB = 5
+DT_SONAME = 14
+DT_MIPS_RLD_MAP_REL = 1879048245
+
+# Global cache for tools
+INSTALL_NAME_TOOL = False
+
+class DataSizes:
+ def __init__(self, ptrsize: int, is_le: bool) -> None:
+ if is_le:
+ p = '<'
+ else:
+ p = '>'
+ self.Half = p + 'h'
+ self.HalfSize = 2
+ self.Word = p + 'I'
+ self.WordSize = 4
+ self.Sword = p + 'i'
+ self.SwordSize = 4
+ if ptrsize == 64:
+ self.Addr = p + 'Q'
+ self.AddrSize = 8
+ self.Off = p + 'Q'
+ self.OffSize = 8
+ self.XWord = p + 'Q'
+ self.XWordSize = 8
+ self.Sxword = p + 'q'
+ self.SxwordSize = 8
+ else:
+ self.Addr = p + 'I'
+ self.AddrSize = 4
+ self.Off = p + 'I'
+ self.OffSize = 4
+
+class DynamicEntry(DataSizes):
+ def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
+ super().__init__(ptrsize, is_le)
+ self.ptrsize = ptrsize
+ if ptrsize == 64:
+ self.d_tag = struct.unpack(self.Sxword, ifile.read(self.SxwordSize))[0]
+ self.val = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.d_tag = struct.unpack(self.Sword, ifile.read(self.SwordSize))[0]
+ self.val = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+
+ def write(self, ofile: T.BinaryIO) -> None:
+ if self.ptrsize == 64:
+ ofile.write(struct.pack(self.Sxword, self.d_tag))
+ ofile.write(struct.pack(self.XWord, self.val))
+ else:
+ ofile.write(struct.pack(self.Sword, self.d_tag))
+ ofile.write(struct.pack(self.Word, self.val))
+
+class SectionHeader(DataSizes):
+ def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
+ super().__init__(ptrsize, is_le)
+ is_64 = ptrsize == 64
+
+# Elf64_Word
+ self.sh_name = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+ self.sh_type = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_flags = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_flags = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Addr
+ self.sh_addr = struct.unpack(self.Addr, ifile.read(self.AddrSize))[0]
+# Elf64_Off
+ self.sh_offset = struct.unpack(self.Off, ifile.read(self.OffSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_size = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_size = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+ self.sh_link = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+ self.sh_info = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_addralign = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_addralign = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_entsize = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_entsize = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+
+class Elf(DataSizes):
+ def __init__(self, bfile: str, verbose: bool = True) -> None:
+ self.bfile = bfile
+ self.verbose = verbose
+ self.sections = [] # type: T.List[SectionHeader]
+ self.dynamic = [] # type: T.List[DynamicEntry]
+ self.open_bf(bfile)
+ try:
+ (self.ptrsize, self.is_le) = self.detect_elf_type()
+ super().__init__(self.ptrsize, self.is_le)
+ self.parse_header()
+ self.parse_sections()
+ self.parse_dynamic()
+ except (struct.error, RuntimeError):
+ self.close_bf()
+ raise
+
+ def open_bf(self, bfile: str) -> None:
+ self.bf = None
+ self.bf_perms = None
+ try:
+ self.bf = open(bfile, 'r+b')
+ except PermissionError as e:
+ self.bf_perms = stat.S_IMODE(os.lstat(bfile).st_mode)
+ os.chmod(bfile, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+ try:
+ self.bf = open(bfile, 'r+b')
+ except Exception:
+ os.chmod(bfile, self.bf_perms)
+ self.bf_perms = None
+ raise e
+
+ def close_bf(self) -> None:
+ if self.bf is not None:
+ if self.bf_perms is not None:
+ os.fchmod(self.bf.fileno(), self.bf_perms)
+ self.bf_perms = None
+ self.bf.close()
+ self.bf = None
+
+ def __enter__(self) -> 'Elf':
+ return self
+
+ def __del__(self) -> None:
+ self.close_bf()
+
+ def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
+ self.close_bf()
+
+ def detect_elf_type(self) -> T.Tuple[int, bool]:
+ data = self.bf.read(6)
+ if data[1:4] != b'ELF':
+ # This script gets called to non-elf targets too
+ # so just ignore them.
+ if self.verbose:
+ print(f'File {self.bfile!r} is not an ELF file.')
+ sys.exit(0)
+ if data[4] == 1:
+ ptrsize = 32
+ elif data[4] == 2:
+ ptrsize = 64
+ else:
+ sys.exit(f'File {self.bfile!r} has unknown ELF class.')
+ if data[5] == 1:
+ is_le = True
+ elif data[5] == 2:
+ is_le = False
+ else:
+ sys.exit(f'File {self.bfile!r} has unknown ELF endianness.')
+ return ptrsize, is_le
+
+ def parse_header(self) -> None:
+ self.bf.seek(0)
+ self.e_ident = struct.unpack('16s', self.bf.read(16))[0]
+ self.e_type = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_machine = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_version = struct.unpack(self.Word, self.bf.read(self.WordSize))[0]
+ self.e_entry = struct.unpack(self.Addr, self.bf.read(self.AddrSize))[0]
+ self.e_phoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0]
+ self.e_shoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0]
+ self.e_flags = struct.unpack(self.Word, self.bf.read(self.WordSize))[0]
+ self.e_ehsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_phentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_phnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_shentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_shnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_shstrndx = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+
+ def parse_sections(self) -> None:
+ self.bf.seek(self.e_shoff)
+ for _ in range(self.e_shnum):
+ self.sections.append(SectionHeader(self.bf, self.ptrsize, self.is_le))
+
+ def read_str(self) -> bytes:
+ arr = []
+ x = self.bf.read(1)
+ while x != b'\0':
+ arr.append(x)
+ x = self.bf.read(1)
+ if x == b'':
+ raise RuntimeError('Tried to read past the end of the file')
+ return b''.join(arr)
+
+ def find_section(self, target_name: bytes) -> T.Optional[SectionHeader]:
+ section_names = self.sections[self.e_shstrndx]
+ for i in self.sections:
+ self.bf.seek(section_names.sh_offset + i.sh_name)
+ name = self.read_str()
+ if name == target_name:
+ return i
+ return None
+
+ def parse_dynamic(self) -> None:
+ sec = self.find_section(b'.dynamic')
+ if sec is None:
+ return
+ self.bf.seek(sec.sh_offset)
+ while True:
+ e = DynamicEntry(self.bf, self.ptrsize, self.is_le)
+ self.dynamic.append(e)
+ if e.d_tag == 0:
+ break
+
+ @generate_list
+ def get_section_names(self) -> T.Generator[str, None, None]:
+ section_names = self.sections[self.e_shstrndx]
+ for i in self.sections:
+ self.bf.seek(section_names.sh_offset + i.sh_name)
+ yield self.read_str().decode()
+
+ def get_soname(self) -> T.Optional[str]:
+ soname = None
+ strtab = None
+ for i in self.dynamic:
+ if i.d_tag == DT_SONAME:
+ soname = i
+ if i.d_tag == DT_STRTAB:
+ strtab = i
+ if soname is None or strtab is None:
+ return None
+ self.bf.seek(strtab.val + soname.val)
+ return self.read_str().decode()
+
+ def get_entry_offset(self, entrynum: int) -> T.Optional[int]:
+ sec = self.find_section(b'.dynstr')
+ for i in self.dynamic:
+ if i.d_tag == entrynum:
+ res = sec.sh_offset + i.val
+ assert isinstance(res, int)
+ return res
+ return None
+
+ def get_rpath(self) -> T.Optional[str]:
+ offset = self.get_entry_offset(DT_RPATH)
+ if offset is None:
+ return None
+ self.bf.seek(offset)
+ return self.read_str().decode()
+
+ def get_runpath(self) -> T.Optional[str]:
+ offset = self.get_entry_offset(DT_RUNPATH)
+ if offset is None:
+ return None
+ self.bf.seek(offset)
+ return self.read_str().decode()
+
+ @generate_list
+ def get_deps(self) -> T.Generator[str, None, None]:
+ sec = self.find_section(b'.dynstr')
+ for i in self.dynamic:
+ if i.d_tag == DT_NEEDED:
+ offset = sec.sh_offset + i.val
+ self.bf.seek(offset)
+ yield self.read_str().decode()
+
+ def fix_deps(self, prefix: bytes) -> None:
+ sec = self.find_section(b'.dynstr')
+ deps = []
+ for i in self.dynamic:
+ if i.d_tag == DT_NEEDED:
+ deps.append(i)
+ for i in deps:
+ offset = sec.sh_offset + i.val
+ self.bf.seek(offset)
+ name = self.read_str()
+ if name.startswith(prefix):
+ basename = name.rsplit(b'/', maxsplit=1)[-1]
+ padding = b'\0' * (len(name) - len(basename))
+ newname = basename + padding
+ assert len(newname) == len(name)
+ self.bf.seek(offset)
+ self.bf.write(newname)
+
+ def fix_rpath(self, fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: bytes) -> None:
+ # The path to search for can be either rpath or runpath.
+ # Fix both of them to be sure.
+ self.fix_rpathtype_entry(fname, rpath_dirs_to_remove, new_rpath, DT_RPATH)
+ self.fix_rpathtype_entry(fname, rpath_dirs_to_remove, new_rpath, DT_RUNPATH)
+
+ def fix_rpathtype_entry(self, fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: bytes, entrynum: int) -> None:
+ rp_off = self.get_entry_offset(entrynum)
+ if rp_off is None:
+ if self.verbose:
+ print(f'File {fname!r} does not have an rpath. It should be a fully static executable.')
+ return
+ self.bf.seek(rp_off)
+
+ old_rpath = self.read_str()
+ # Some rpath entries may come from multiple sources.
+ # Only add each one once.
+ new_rpaths = OrderedSet() # type: OrderedSet[bytes]
+ if new_rpath:
+ new_rpaths.update(new_rpath.split(b':'))
+ if old_rpath:
+ # Filter out build-only rpath entries
+ # added by get_link_dep_subdirs() or
+ # specified by user with build_rpath.
+ for rpath_dir in old_rpath.split(b':'):
+ if not (rpath_dir in rpath_dirs_to_remove or
+ rpath_dir == (b'X' * len(rpath_dir))):
+ if rpath_dir:
+ new_rpaths.add(rpath_dir)
+
+ # Prepend user-specified new entries while preserving the ones that came from pkgconfig etc.
+ new_rpath = b':'.join(new_rpaths)
+
+ if len(old_rpath) < len(new_rpath):
+ msg = "New rpath must not be longer than the old one.\n Old: {}\n New: {}".format(old_rpath.decode('utf-8'), new_rpath.decode('utf-8'))
+ sys.exit(msg)
+ # The linker does read-only string deduplication. If there is a
+ # string that shares a suffix with the rpath, they might get
+ # dedupped. This means changing the rpath string might break something
+ # completely unrelated. This has already happened once with X.org.
+ # Thus we want to keep this change as small as possible to minimize
+ # the chance of obliterating other strings. It might still happen
+ # but our behavior is identical to what chrpath does and it has
+ # been in use for ages so based on that this should be rare.
+ if not new_rpath:
+ self.remove_rpath_entry(entrynum)
+ else:
+ self.bf.seek(rp_off)
+ self.bf.write(new_rpath)
+ self.bf.write(b'\0')
+
+ def remove_rpath_entry(self, entrynum: int) -> None:
+ sec = self.find_section(b'.dynamic')
+ if sec is None:
+ return None
+ for (i, entry) in enumerate(self.dynamic):
+ if entry.d_tag == entrynum:
+ rpentry = self.dynamic[i]
+ rpentry.d_tag = 0
+ self.dynamic = self.dynamic[:i] + self.dynamic[i + 1:] + [rpentry]
+ break
+ # DT_MIPS_RLD_MAP_REL is relative to the offset of the tag. Adjust it consequently.
+ for entry in self.dynamic[i:]:
+ if entry.d_tag == DT_MIPS_RLD_MAP_REL:
+ entry.val += 2 * (self.ptrsize // 8)
+ break
+ self.bf.seek(sec.sh_offset)
+ for entry in self.dynamic:
+ entry.write(self.bf)
+ return None
+
+def fix_elf(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Optional[bytes], verbose: bool = True) -> None:
+ if new_rpath is not None:
+ with Elf(fname, verbose) as e:
+ # note: e.get_rpath() and e.get_runpath() may be useful
+ e.fix_rpath(fname, rpath_dirs_to_remove, new_rpath)
+
+def get_darwin_rpaths_to_remove(fname: str) -> T.List[str]:
+ p, out, _ = Popen_safe(['otool', '-l', fname], stderr=subprocess.DEVNULL)
+ if p.returncode != 0:
+ raise subprocess.CalledProcessError(p.returncode, p.args, out)
+ result = []
+ current_cmd = 'FOOBAR'
+ for line in out.split('\n'):
+ line = line.strip()
+ if ' ' not in line:
+ continue
+ key, value = line.strip().split(' ', 1)
+ if key == 'cmd':
+ current_cmd = value
+ if key == 'path' and current_cmd == 'LC_RPATH':
+ rp = value.split('(', 1)[0].strip()
+ result.append(rp)
+ return result
+
+def fix_darwin(fname: str, new_rpath: str, final_path: str, install_name_mappings: T.Dict[str, str]) -> None:
+ try:
+ rpaths = get_darwin_rpaths_to_remove(fname)
+ except subprocess.CalledProcessError:
+ # Otool failed, which happens when invoked on a
+ # non-executable target. Just return.
+ return
+ try:
+ args = []
+ if rpaths:
+ # TODO: fix this properly, not totally clear how
+ #
+ # removing rpaths from binaries on macOS has tons of
+ # weird edge cases. For instance, if the user provided
+ # a '-Wl,-rpath' argument in LDFLAGS that happens to
+ # coincide with an rpath generated from a dependency,
+ # this would cause installation failures, as meson would
+ # generate install_name_tool calls with two identical
+ # '-delete_rpath' arguments, which install_name_tool
+ # fails on. Because meson itself ensures that it never
+ # adds duplicate rpaths, duplicate rpaths necessarily
+ # come from user variables. The idea of using OrderedSet
+ # is to remove *at most one* duplicate RPATH entry. This
+ # is not optimal, as it only respects the user's choice
+ # partially: if they provided a non-duplicate '-Wl,-rpath'
+ # argument, it gets removed, if they provided a duplicate
+ # one, it remains in the final binary. A potentially optimal
+ # solution would split all user '-Wl,-rpath' arguments from
+ # LDFLAGS, and later add them back with '-add_rpath'.
+ for rp in OrderedSet(rpaths):
+ args += ['-delete_rpath', rp]
+ subprocess.check_call(['install_name_tool', fname] + args,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ args = []
+ if new_rpath:
+ args += ['-add_rpath', new_rpath]
+ # Rewrite -install_name @rpath/libfoo.dylib to /path/to/libfoo.dylib
+ if fname.endswith('dylib'):
+ args += ['-id', final_path]
+ if install_name_mappings:
+ for old, new in install_name_mappings.items():
+ args += ['-change', old, new]
+ if args:
+ subprocess.check_call(['install_name_tool', fname] + args,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ except Exception as err:
+ raise SystemExit(err)
+
+def fix_jar(fname: str) -> None:
+ subprocess.check_call(['jar', 'xf', fname, 'META-INF/MANIFEST.MF'])
+ with open('META-INF/MANIFEST.MF', 'r+', encoding='utf-8') as f:
+ lines = f.readlines()
+ f.seek(0)
+ for line in lines:
+ if not line.startswith('Class-Path:'):
+ f.write(line)
+ f.truncate()
+ # jar -um doesn't allow removing existing attributes. Use -uM instead,
+ # which a) removes the existing manifest from the jar and b) disables
+ # special-casing for the manifest file, so we can re-add it as a normal
+ # archive member. This puts the manifest at the end of the jar rather
+ # than the beginning, but the spec doesn't forbid that.
+ subprocess.check_call(['jar', 'ufM', fname, 'META-INF/MANIFEST.MF'])
+
+def fix_rpath(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Union[str, bytes], final_path: str, install_name_mappings: T.Dict[str, str], verbose: bool = True) -> None:
+ global INSTALL_NAME_TOOL # pylint: disable=global-statement
+ # Static libraries, import libraries, debug information, headers, etc
+ # never have rpaths
+ # DLLs and EXE currently do not need runtime path fixing
+ if fname.endswith(('.a', '.lib', '.pdb', '.h', '.hpp', '.dll', '.exe')):
+ return
+ try:
+ if fname.endswith('.jar'):
+ fix_jar(fname)
+ return
+ if isinstance(new_rpath, str):
+ new_rpath = new_rpath.encode('utf8')
+ fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose)
+ return
+ except SystemExit as e:
+ if isinstance(e.code, int) and e.code == 0:
+ pass
+ else:
+ raise
+ # We don't look for this on import because it will do a useless PATH lookup
+ # on non-mac platforms. That can be expensive on some Windows machines
+ # (up to 30ms), which is significant with --only-changed. For details, see:
+ # https://github.com/mesonbuild/meson/pull/6612#discussion_r378581401
+ if INSTALL_NAME_TOOL is False:
+ INSTALL_NAME_TOOL = bool(shutil.which('install_name_tool'))
+ if INSTALL_NAME_TOOL:
+ if isinstance(new_rpath, bytes):
+ new_rpath = new_rpath.decode('utf8')
+ fix_darwin(fname, new_rpath, final_path, install_name_mappings)
diff --git a/mesonbuild/scripts/depscan.py b/mesonbuild/scripts/depscan.py
new file mode 100644
index 0000000..3ae14c0
--- /dev/null
+++ b/mesonbuild/scripts/depscan.py
@@ -0,0 +1,208 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import json
+import os
+import pathlib
+import pickle
+import re
+import sys
+import typing as T
+
+from ..backend.ninjabackend import ninja_quote
+from ..compilers.compilers import lang_suffixes
+
+if T.TYPE_CHECKING:
+ from ..backend.ninjabackend import TargetDependencyScannerInfo
+
+CPP_IMPORT_RE = re.compile(r'\w*import ([a-zA-Z0-9]+);')
+CPP_EXPORT_RE = re.compile(r'\w*export module ([a-zA-Z0-9]+);')
+
+FORTRAN_INCLUDE_PAT = r"^\s*include\s*['\"](\w+\.\w+)['\"]"
+FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$"
+FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)"
+FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)"
+
+FORTRAN_MODULE_RE = re.compile(FORTRAN_MODULE_PAT, re.IGNORECASE)
+FORTRAN_SUBMOD_RE = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE)
+FORTRAN_USE_RE = re.compile(FORTRAN_USE_PAT, re.IGNORECASE)
+
+class DependencyScanner:
+ def __init__(self, pickle_file: str, outfile: str, sources: T.List[str]):
+ with open(pickle_file, 'rb') as pf:
+ self.target_data: TargetDependencyScannerInfo = pickle.load(pf)
+ self.outfile = outfile
+ self.sources = sources
+ self.provided_by: T.Dict[str, str] = {}
+ self.exports: T.Dict[str, str] = {}
+ self.needs: T.Dict[str, T.List[str]] = {}
+ self.sources_with_exports: T.List[str] = []
+
+ def scan_file(self, fname: str) -> None:
+ suffix = os.path.splitext(fname)[1][1:]
+ if suffix != 'C':
+ suffix = suffix.lower()
+ if suffix in lang_suffixes['fortran']:
+ self.scan_fortran_file(fname)
+ elif suffix in lang_suffixes['cpp']:
+ self.scan_cpp_file(fname)
+ else:
+ sys.exit(f'Can not scan files with suffix .{suffix}.')
+
+ def scan_fortran_file(self, fname: str) -> None:
+ fpath = pathlib.Path(fname)
+ modules_in_this_file = set()
+ for line in fpath.read_text(encoding='utf-8', errors='ignore').split('\n'):
+ import_match = FORTRAN_USE_RE.match(line)
+ export_match = FORTRAN_MODULE_RE.match(line)
+ submodule_export_match = FORTRAN_SUBMOD_RE.match(line)
+ if import_match:
+ needed = import_match.group(1).lower()
+ # In Fortran you have an using declaration also for the module
+ # you define in the same file. Prevent circular dependencies.
+ if needed not in modules_in_this_file:
+ if fname in self.needs:
+ self.needs[fname].append(needed)
+ else:
+ self.needs[fname] = [needed]
+ if export_match:
+ exported_module = export_match.group(1).lower()
+ assert exported_module not in modules_in_this_file
+ modules_in_this_file.add(exported_module)
+ if exported_module in self.provided_by:
+ raise RuntimeError(f'Multiple files provide module {exported_module}.')
+ self.sources_with_exports.append(fname)
+ self.provided_by[exported_module] = fname
+ self.exports[fname] = exported_module
+ if submodule_export_match:
+ # Store submodule "Foo" "Bar" as "foo:bar".
+ # A submodule declaration can be both an import and an export declaration:
+ #
+ # submodule (a1:a2) a3
+ # - requires a1@a2.smod
+ # - produces a1@a3.smod
+ parent_module_name_full = submodule_export_match.group(1).lower()
+ parent_module_name = parent_module_name_full.split(':')[0]
+ submodule_name = submodule_export_match.group(2).lower()
+ concat_name = f'{parent_module_name}:{submodule_name}'
+ self.sources_with_exports.append(fname)
+ self.provided_by[concat_name] = fname
+ self.exports[fname] = concat_name
+ # Fortran requires that the immediate parent module must be built
+ # before the current one. Thus:
+ #
+ # submodule (parent) parent <- requires parent.mod (really parent.smod, but they are created at the same time)
+ # submodule (a1:a2) a3 <- requires a1@a2.smod
+ #
+ # a3 does not depend on the a1 parent module directly, only transitively.
+ if fname in self.needs:
+ self.needs[fname].append(parent_module_name_full)
+ else:
+ self.needs[fname] = [parent_module_name_full]
+
+ def scan_cpp_file(self, fname: str) -> None:
+ fpath = pathlib.Path(fname)
+ for line in fpath.read_text(encoding='utf-8', errors='ignore').split('\n'):
+ import_match = CPP_IMPORT_RE.match(line)
+ export_match = CPP_EXPORT_RE.match(line)
+ if import_match:
+ needed = import_match.group(1)
+ if fname in self.needs:
+ self.needs[fname].append(needed)
+ else:
+ self.needs[fname] = [needed]
+ if export_match:
+ exported_module = export_match.group(1)
+ if exported_module in self.provided_by:
+ raise RuntimeError(f'Multiple files provide module {exported_module}.')
+ self.sources_with_exports.append(fname)
+ self.provided_by[exported_module] = fname
+ self.exports[fname] = exported_module
+
+ def objname_for(self, src: str) -> str:
+ objname = self.target_data.source2object[src]
+ assert isinstance(objname, str)
+ return objname
+
+ def module_name_for(self, src: str) -> str:
+ suffix = os.path.splitext(src)[1][1:].lower()
+ if suffix in lang_suffixes['fortran']:
+ exported = self.exports[src]
+ # Module foo:bar goes to a file name foo@bar.smod
+ # Module Foo goes to a file name foo.mod
+ namebase = exported.replace(':', '@')
+ if ':' in exported:
+ extension = 'smod'
+ else:
+ extension = 'mod'
+ return os.path.join(self.target_data.private_dir, f'{namebase}.{extension}')
+ elif suffix in lang_suffixes['cpp']:
+ return '{}.ifc'.format(self.exports[src])
+ else:
+ raise RuntimeError('Unreachable code.')
+
+ def scan(self) -> int:
+ for s in self.sources:
+ self.scan_file(s)
+ with open(self.outfile, 'w', encoding='utf-8') as ofile:
+ ofile.write('ninja_dyndep_version = 1\n')
+ for src in self.sources:
+ objfilename = self.objname_for(src)
+ mods_and_submods_needed = []
+ module_files_generated = []
+ module_files_needed = []
+ if src in self.sources_with_exports:
+ module_files_generated.append(self.module_name_for(src))
+ if src in self.needs:
+ for modname in self.needs[src]:
+ if modname not in self.provided_by:
+ # Nothing provides this module, we assume that it
+ # comes from a dependency library somewhere and is
+ # already built by the time this compilation starts.
+ pass
+ else:
+ mods_and_submods_needed.append(modname)
+
+ for modname in mods_and_submods_needed:
+ provider_src = self.provided_by[modname]
+ provider_modfile = self.module_name_for(provider_src)
+ # Prune self-dependencies
+ if provider_src != src:
+ module_files_needed.append(provider_modfile)
+
+ quoted_objfilename = ninja_quote(objfilename, True)
+ quoted_module_files_generated = [ninja_quote(x, True) for x in module_files_generated]
+ quoted_module_files_needed = [ninja_quote(x, True) for x in module_files_needed]
+ if quoted_module_files_generated:
+ mod_gen = '| ' + ' '.join(quoted_module_files_generated)
+ else:
+ mod_gen = ''
+ if quoted_module_files_needed:
+ mod_dep = '| ' + ' '.join(quoted_module_files_needed)
+ else:
+ mod_dep = ''
+ build_line = 'build {} {}: dyndep {}'.format(quoted_objfilename,
+ mod_gen,
+ mod_dep)
+ ofile.write(build_line + '\n')
+ return 0
+
+def run(args: T.List[str]) -> int:
+ assert len(args) == 3, 'got wrong number of arguments!'
+ pickle_file, outfile, jsonfile = args
+ with open(jsonfile, encoding='utf-8') as f:
+ sources = json.load(f)
+ scanner = DependencyScanner(pickle_file, outfile, sources)
+ return scanner.scan()
diff --git a/mesonbuild/scripts/dirchanger.py b/mesonbuild/scripts/dirchanger.py
new file mode 100644
index 0000000..60c4f12
--- /dev/null
+++ b/mesonbuild/scripts/dirchanger.py
@@ -0,0 +1,30 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+'''CD into dir given as first argument and execute
+the command given in the rest of the arguments.'''
+
+import os, subprocess, sys
+import typing as T
+
+def run(args: T.List[str]) -> int:
+ dirname = args[0]
+ command = args[1:]
+
+ os.chdir(dirname)
+ return subprocess.call(command)
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/env2mfile.py b/mesonbuild/scripts/env2mfile.py
new file mode 100755
index 0000000..af7ffc6
--- /dev/null
+++ b/mesonbuild/scripts/env2mfile.py
@@ -0,0 +1,368 @@
+# Copyright 2022 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import sys, os, subprocess, shutil
+import shlex
+import typing as T
+
+from .. import envconfig
+from .. import mlog
+from ..compilers import compilers
+from ..compilers.detect import defaults as compiler_names
+
+if T.TYPE_CHECKING:
+ import argparse
+
+def has_for_build() -> bool:
+ for cenv in envconfig.ENV_VAR_COMPILER_MAP.values():
+ if os.environ.get(cenv + '_FOR_BUILD'):
+ return True
+ return False
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+ parser.add_argument('--debarch', default=None,
+ help='The dpkg architecture to generate.')
+ parser.add_argument('--gccsuffix', default="",
+ help='A particular gcc version suffix if necessary.')
+ parser.add_argument('-o', required=True, dest='outfile',
+ help='The output file.')
+ parser.add_argument('--cross', default=False, action='store_true',
+ help='Generate a cross compilation file.')
+ parser.add_argument('--native', default=False, action='store_true',
+ help='Generate a native compilation file.')
+ parser.add_argument('--system', default=None,
+ help='Define system for cross compilation.')
+ parser.add_argument('--cpu', default=None,
+ help='Define cpu for cross compilation.')
+ parser.add_argument('--cpu-family', default=None,
+ help='Define cpu family for cross compilation.')
+ parser.add_argument('--endian', default='little', choices=['big', 'little'],
+ help='Define endianness for cross compilation.')
+
+class MachineInfo:
+ def __init__(self) -> None:
+ self.compilers: T.Dict[str, T.List[str]] = {}
+ self.binaries: T.Dict[str, T.List[str]] = {}
+ self.properties: T.Dict[str, T.Union[str, T.List[str]]] = {}
+ self.compile_args: T.Dict[str, T.List[str]] = {}
+ self.link_args: T.Dict[str, T.List[str]] = {}
+ self.cmake: T.Dict[str, T.Union[str, T.List[str]]] = {}
+
+ self.system: T.Optional[str] = None
+ self.cpu: T.Optional[str] = None
+ self.cpu_family: T.Optional[str] = None
+ self.endian: T.Optional[str] = None
+
+#parser = argparse.ArgumentParser(description='''Generate cross compilation definition file for the Meson build system.
+#
+#If you do not specify the --arch argument, Meson assumes that running
+#plain 'dpkg-architecture' will return correct information for the
+#host system.
+#
+#This script must be run in an environment where CPPFLAGS et al are set to the
+#same values used in the actual compilation.
+#'''
+#)
+
+def locate_path(program: str) -> T.List[str]:
+ if os.path.isabs(program):
+ return [program]
+ for d in os.get_exec_path():
+ f = os.path.join(d, program)
+ if os.access(f, os.X_OK):
+ return [f]
+ raise ValueError("%s not found on $PATH" % program)
+
+def write_args_line(ofile: T.TextIO, name: str, args: T.Union[str, T.List[str]]) -> None:
+ if len(args) == 0:
+ return
+ if isinstance(args, str):
+ ostr = name + "= '" + args + "'\n"
+ else:
+ ostr = name + ' = ['
+ ostr += ', '.join("'" + i + "'" for i in args)
+ ostr += ']\n'
+ ofile.write(ostr)
+
+def get_args_from_envvars(infos: MachineInfo) -> None:
+ cppflags = shlex.split(os.environ.get('CPPFLAGS', ''))
+ cflags = shlex.split(os.environ.get('CFLAGS', ''))
+ cxxflags = shlex.split(os.environ.get('CXXFLAGS', ''))
+ objcflags = shlex.split(os.environ.get('OBJCFLAGS', ''))
+ objcxxflags = shlex.split(os.environ.get('OBJCXXFLAGS', ''))
+ ldflags = shlex.split(os.environ.get('LDFLAGS', ''))
+
+ c_args = cppflags + cflags
+ cpp_args = cppflags + cxxflags
+ c_link_args = cflags + ldflags
+ cpp_link_args = cxxflags + ldflags
+
+ objc_args = cppflags + objcflags
+ objcpp_args = cppflags + objcxxflags
+ objc_link_args = objcflags + ldflags
+ objcpp_link_args = objcxxflags + ldflags
+
+ if c_args:
+ infos.compile_args['c'] = c_args
+ if c_link_args:
+ infos.link_args['c'] = c_link_args
+ if cpp_args:
+ infos.compile_args['cpp'] = cpp_args
+ if cpp_link_args:
+ infos.link_args['cpp'] = cpp_link_args
+ if objc_args:
+ infos.compile_args['objc'] = objc_args
+ if objc_link_args:
+ infos.link_args['objc'] = objc_link_args
+ if objcpp_args:
+ infos.compile_args['objcpp'] = objcpp_args
+ if objcpp_link_args:
+ infos.link_args['objcpp'] = objcpp_link_args
+
+cpu_family_map = {
+ 'mips64el': 'mips64',
+ 'i686': 'x86',
+}
+cpu_map = {
+ 'armhf': 'arm7hlf',
+ 'mips64el': 'mips64'
+}
+
+def deb_detect_cmake(infos: MachineInfo, data: T.Dict[str, str]) -> None:
+ system_name_map = {'linux': 'Linux', 'kfreebsd': 'kFreeBSD', 'hurd': 'GNU'}
+ system_processor_map = {'arm': 'armv7l', 'mips64el': 'mips64', 'powerpc64le': 'ppc64le'}
+
+ infos.cmake["CMAKE_C_COMPILER"] = infos.compilers['c']
+ infos.cmake["CMAKE_CXX_COMPILER"] = infos.compilers['cpp']
+ infos.cmake["CMAKE_SYSTEM_NAME"] = system_name_map[data['DEB_HOST_ARCH_OS']]
+ infos.cmake["CMAKE_SYSTEM_PROCESSOR"] = system_processor_map.get(data['DEB_HOST_GNU_CPU'],
+ data['DEB_HOST_GNU_CPU'])
+
+def deb_compiler_lookup(infos: MachineInfo, compilerstems: T.List[T.Tuple[str, str]], host_arch: str, gccsuffix: str) -> None:
+ for langname, stem in compilerstems:
+ compilername = f'{host_arch}-{stem}{gccsuffix}'
+ try:
+ p = locate_path(compilername)
+ infos.compilers[langname] = p
+ except ValueError:
+ pass
+
+def detect_cross_debianlike(options: T.Any) -> MachineInfo:
+ if options.debarch is None:
+ cmd = ['dpkg-architecture']
+ else:
+ cmd = ['dpkg-architecture', '-a' + options.debarch]
+ output = subprocess.check_output(cmd, universal_newlines=True,
+ stderr=subprocess.DEVNULL)
+ data = {}
+ for line in output.split('\n'):
+ line = line.strip()
+ if line == '':
+ continue
+ k, v = line.split('=', 1)
+ data[k] = v
+ host_arch = data['DEB_HOST_GNU_TYPE']
+ host_os = data['DEB_HOST_ARCH_OS']
+ host_cpu_family = cpu_family_map.get(data['DEB_HOST_GNU_CPU'],
+ data['DEB_HOST_GNU_CPU'])
+ host_cpu = cpu_map.get(data['DEB_HOST_ARCH'],
+ data['DEB_HOST_ARCH'])
+ host_endian = data['DEB_HOST_ARCH_ENDIAN']
+
+ compilerstems = [('c', 'gcc'),
+ ('cpp', 'g++'),
+ ('objc', 'gobjc'),
+ ('objcpp', 'gobjc++')]
+ infos = MachineInfo()
+ deb_compiler_lookup(infos, compilerstems, host_arch, options.gccsuffix)
+ if len(infos.compilers) == 0:
+ print('Warning: no compilers were detected.')
+ infos.binaries['ar'] = locate_path("%s-ar" % host_arch)
+ infos.binaries['strip'] = locate_path("%s-strip" % host_arch)
+ infos.binaries['objcopy'] = locate_path("%s-objcopy" % host_arch)
+ infos.binaries['ld'] = locate_path("%s-ld" % host_arch)
+ try:
+ infos.binaries['cmake'] = locate_path("cmake")
+ deb_detect_cmake(infos, data)
+ except ValueError:
+ pass
+ try:
+ infos.binaries['pkgconfig'] = locate_path("%s-pkg-config" % host_arch)
+ except ValueError:
+ pass # pkg-config is optional
+ try:
+ infos.binaries['cups-config'] = locate_path("cups-config")
+ except ValueError:
+ pass
+ infos.system = host_os
+ infos.cpu_family = host_cpu_family
+ infos.cpu = host_cpu
+ infos.endian = host_endian
+
+ get_args_from_envvars(infos)
+ return infos
+
+def write_machine_file(infos: MachineInfo, ofilename: str, write_system_info: bool) -> None:
+ tmpfilename = ofilename + '~'
+ with open(tmpfilename, 'w', encoding='utf-8') as ofile:
+ ofile.write('[binaries]\n')
+ ofile.write('# Compilers\n')
+ for langname in sorted(infos.compilers.keys()):
+ compiler = infos.compilers[langname]
+ write_args_line(ofile, langname, compiler)
+ ofile.write('\n')
+
+ ofile.write('# Other binaries\n')
+ for exename in sorted(infos.binaries.keys()):
+ exe = infos.binaries[exename]
+ write_args_line(ofile, exename, exe)
+ ofile.write('\n')
+
+ ofile.write('[properties]\n')
+ all_langs = list(set(infos.compile_args.keys()).union(set(infos.link_args.keys())))
+ all_langs.sort()
+ for lang in all_langs:
+ if lang in infos.compile_args:
+ write_args_line(ofile, lang + '_args', infos.compile_args[lang])
+ if lang in infos.link_args:
+ write_args_line(ofile, lang + '_link_args', infos.link_args[lang])
+ for k, v in infos.properties.items():
+ write_args_line(ofile, k, v)
+ ofile.write('\n')
+
+ if infos.cmake:
+ ofile.write('[cmake]\n\n')
+ for k, v in infos.cmake.items():
+ write_args_line(ofile, k, v)
+ ofile.write('\n')
+
+ if write_system_info:
+ ofile.write('[host_machine]\n')
+ ofile.write(f"cpu = '{infos.cpu}'\n")
+ ofile.write(f"cpu_family = '{infos.cpu_family}'\n")
+ ofile.write(f"endian = '{infos.endian}'\n")
+ ofile.write(f"system = '{infos.system}'\n")
+ os.replace(tmpfilename, ofilename)
+
+def detect_language_args_from_envvars(langname: str, envvar_suffix: str = '') -> T.Tuple[T.List[str], T.List[str]]:
+ ldflags = tuple(shlex.split(os.environ.get('LDFLAGS' + envvar_suffix, '')))
+ compile_args = shlex.split(os.environ.get(compilers.CFLAGS_MAPPING[langname] + envvar_suffix, ''))
+ if langname in compilers.LANGUAGES_USING_CPPFLAGS:
+ cppflags = tuple(shlex.split(os.environ.get('CPPFLAGS' + envvar_suffix, '')))
+ lang_compile_args = list(cppflags) + compile_args
+ else:
+ lang_compile_args = compile_args
+ lang_link_args = list(ldflags) + compile_args
+ return (lang_compile_args, lang_link_args)
+
+def detect_compilers_from_envvars(envvar_suffix: str = '') -> MachineInfo:
+ infos = MachineInfo()
+ for langname, envvarname in envconfig.ENV_VAR_COMPILER_MAP.items():
+ compilerstr = os.environ.get(envvarname + envvar_suffix)
+ if not compilerstr:
+ continue
+ compiler = shlex.split(compilerstr)
+ infos.compilers[langname] = compiler
+ lang_compile_args, lang_link_args = detect_language_args_from_envvars(langname, envvar_suffix)
+ if lang_compile_args:
+ infos.compile_args[langname] = lang_compile_args
+ if lang_link_args:
+ infos.link_args[langname] = lang_link_args
+ return infos
+
+def detect_binaries_from_envvars(infos: MachineInfo, envvar_suffix: str = '') -> None:
+ for binname, envvar_base in envconfig.ENV_VAR_TOOL_MAP.items():
+ envvar = envvar_base + envvar_suffix
+ binstr = os.environ.get(envvar)
+ if binstr:
+ infos.binaries[binname] = shlex.split(binstr)
+
+def detect_cross_system(infos: MachineInfo, options: T.Any) -> None:
+ for optname in ('system', 'cpu', 'cpu_family', 'endian'):
+ v = getattr(options, optname)
+ if not v:
+ mlog.error(f'Cross property "{optname}" missing, set it with --{optname.replace("_", "-")}.')
+ sys.exit(1)
+ setattr(infos, optname, v)
+
+def detect_cross_env(options: T.Any) -> MachineInfo:
+ if options.debarch:
+ print('Detecting cross environment via dpkg-reconfigure.')
+ infos = detect_cross_debianlike(options)
+ else:
+ print('Detecting cross environment via environment variables.')
+ infos = detect_compilers_from_envvars()
+ detect_cross_system(infos, options)
+ return infos
+
+def add_compiler_if_missing(infos: MachineInfo, langname: str, exe_names: T.List[str]) -> None:
+ if langname in infos.compilers:
+ return
+ for exe_name in exe_names:
+ lookup = shutil.which(exe_name)
+ if not lookup:
+ continue
+ compflags, linkflags = detect_language_args_from_envvars(langname)
+ infos.compilers[langname] = [lookup]
+ if compflags:
+ infos.compile_args[langname] = compflags
+ if linkflags:
+ infos.link_args[langname] = linkflags
+ return
+
+def detect_missing_native_compilers(infos: MachineInfo) -> None:
+ # T.Any per-platform special detection should go here.
+ for langname, exes in compiler_names.items():
+ if langname not in envconfig.ENV_VAR_COMPILER_MAP:
+ continue
+ add_compiler_if_missing(infos, langname, exes)
+
+def detect_missing_native_binaries(infos: MachineInfo) -> None:
+ # T.Any per-platform special detection should go here.
+ for toolname in sorted(envconfig.ENV_VAR_TOOL_MAP.keys()):
+ if toolname in infos.binaries:
+ continue
+ exe = shutil.which(toolname)
+ if exe:
+ infos.binaries[toolname] = [exe]
+
+def detect_native_env(options: T.Any) -> MachineInfo:
+ use_for_build = has_for_build()
+ if use_for_build:
+ mlog.log('Using FOR_BUILD envvars for detection')
+ esuffix = '_FOR_BUILD'
+ else:
+ mlog.log('Using regular envvars for detection.')
+ esuffix = ''
+ infos = detect_compilers_from_envvars(esuffix)
+ detect_missing_native_compilers(infos)
+ detect_binaries_from_envvars(infos, esuffix)
+ detect_missing_native_binaries(infos)
+ return infos
+
+def run(options: T.Any) -> None:
+ if options.cross and options.native:
+ sys.exit('You can only specify either --cross or --native, not both.')
+ if not options.cross and not options.native:
+ sys.exit('You must specify --cross or --native.')
+ mlog.notice('This functionality is experimental and subject to change.')
+ detect_cross = options.cross
+ if detect_cross:
+ infos = detect_cross_env(options)
+ write_system_info = True
+ else:
+ infos = detect_native_env(options)
+ write_system_info = False
+ write_machine_file(infos, options.outfile, write_system_info)
diff --git a/mesonbuild/scripts/externalproject.py b/mesonbuild/scripts/externalproject.py
new file mode 100644
index 0000000..17c2251
--- /dev/null
+++ b/mesonbuild/scripts/externalproject.py
@@ -0,0 +1,116 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import argparse
+import multiprocessing
+import subprocess
+from pathlib import Path
+import typing as T
+
+from ..mesonlib import Popen_safe, split_args
+
+class ExternalProject:
+ def __init__(self, options: argparse.Namespace):
+ self.name = options.name
+ self.src_dir = options.srcdir
+ self.build_dir = options.builddir
+ self.install_dir = options.installdir
+ self.log_dir = options.logdir
+ self.verbose = options.verbose
+ self.stampfile = options.stampfile
+ self.depfile = options.depfile
+ self.make = split_args(options.make)
+
+ def write_depfile(self) -> None:
+ with open(self.depfile, 'w', encoding='utf-8') as f:
+ f.write(f'{self.stampfile}: \\\n')
+ for dirpath, dirnames, filenames in os.walk(self.src_dir):
+ dirnames[:] = [d for d in dirnames if not d.startswith('.')]
+ for fname in filenames:
+ if fname.startswith('.'):
+ continue
+ path = Path(dirpath, fname)
+ f.write(' {} \\\n'.format(path.as_posix().replace(' ', '\\ ')))
+
+ def write_stampfile(self) -> None:
+ with open(self.stampfile, 'w', encoding='utf-8'):
+ pass
+
+ def supports_jobs_flag(self) -> bool:
+ p, o, e = Popen_safe(self.make + ['--version'])
+ if p.returncode == 0 and ('GNU Make' in o or 'waf' in o):
+ return True
+ return False
+
+ def build(self) -> int:
+ make_cmd = self.make.copy()
+ if self.supports_jobs_flag():
+ make_cmd.append(f'-j{multiprocessing.cpu_count()}')
+ rc = self._run('build', make_cmd)
+ if rc != 0:
+ return rc
+
+ install_cmd = self.make.copy()
+ install_env = {}
+ install_env['DESTDIR'] = self.install_dir
+ install_cmd.append('install')
+ rc = self._run('install', install_cmd, install_env)
+ if rc != 0:
+ return rc
+
+ self.write_depfile()
+ self.write_stampfile()
+
+ return 0
+
+ def _run(self, step: str, command: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> int:
+ m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n'
+ log_filename = Path(self.log_dir, f'{self.name}-{step}.log')
+ output = None
+ if not self.verbose:
+ output = open(log_filename, 'w', encoding='utf-8')
+ output.write(m + '\n')
+ output.flush()
+ else:
+ print(m)
+ run_env = os.environ.copy()
+ if env:
+ run_env.update(env)
+ p, o, e = Popen_safe(command, stderr=subprocess.STDOUT, stdout=output,
+ cwd=self.build_dir,
+ env=run_env)
+ if p.returncode != 0:
+ m = f'{step} step returned error code {p.returncode}.'
+ if not self.verbose:
+ m += '\nSee logs: ' + str(log_filename)
+ print(m)
+ return p.returncode
+
+def run(args: T.List[str]) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--name')
+ parser.add_argument('--srcdir')
+ parser.add_argument('--builddir')
+ parser.add_argument('--installdir')
+ parser.add_argument('--logdir')
+ parser.add_argument('--make')
+ parser.add_argument('--verbose', action='store_true')
+ parser.add_argument('stampfile')
+ parser.add_argument('depfile')
+
+ options = parser.parse_args(args)
+ ep = ExternalProject(options)
+ return ep.build()
diff --git a/mesonbuild/scripts/gettext.py b/mesonbuild/scripts/gettext.py
new file mode 100644
index 0000000..4a6bb9c
--- /dev/null
+++ b/mesonbuild/scripts/gettext.py
@@ -0,0 +1,96 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import argparse
+import subprocess
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('command')
+parser.add_argument('--pkgname', default='')
+parser.add_argument('--datadirs', default='')
+parser.add_argument('--langs', default='')
+parser.add_argument('--localedir', default='')
+parser.add_argument('--source-root', default='')
+parser.add_argument('--subdir', default='')
+parser.add_argument('--xgettext', default='xgettext')
+parser.add_argument('--msgmerge', default='msgmerge')
+parser.add_argument('--msginit', default='msginit')
+parser.add_argument('--extra-args', default='')
+
+def read_linguas(src_sub: str) -> T.List[str]:
+ # Syntax of this file is documented here:
+ # https://www.gnu.org/software/gettext/manual/html_node/po_002fLINGUAS.html
+ linguas = os.path.join(src_sub, 'LINGUAS')
+ try:
+ langs = []
+ with open(linguas, encoding='utf-8') as f:
+ for line in f:
+ line = line.strip()
+ if line and not line.startswith('#'):
+ langs += line.split()
+ return langs
+ except (FileNotFoundError, PermissionError):
+ print(f'Could not find file LINGUAS in {src_sub}')
+ return []
+
+def run_potgen(src_sub: str, xgettext: str, pkgname: str, datadirs: str, args: T.List[str], source_root: str) -> int:
+ listfile = os.path.join(src_sub, 'POTFILES.in')
+ if not os.path.exists(listfile):
+ listfile = os.path.join(src_sub, 'POTFILES')
+ if not os.path.exists(listfile):
+ print('Could not find file POTFILES in %s' % src_sub)
+ return 1
+
+ child_env = os.environ.copy()
+ if datadirs:
+ child_env['GETTEXTDATADIRS'] = datadirs
+
+ ofile = os.path.join(src_sub, pkgname + '.pot')
+ return subprocess.call([xgettext, '--package-name=' + pkgname, '-p', src_sub, '-f', listfile,
+ '-D', source_root, '-k_', '-o', ofile] + args,
+ env=child_env)
+
+def update_po(src_sub: str, msgmerge: str, msginit: str, pkgname: str, langs: T.List[str]) -> int:
+ potfile = os.path.join(src_sub, pkgname + '.pot')
+ for l in langs:
+ pofile = os.path.join(src_sub, l + '.po')
+ if os.path.exists(pofile):
+ subprocess.check_call([msgmerge, '-q', '-o', pofile, pofile, potfile])
+ else:
+ subprocess.check_call([msginit, '--input', potfile, '--output-file', pofile, '--locale', l, '--no-translator'])
+ return 0
+
+def run(args: T.List[str]) -> int:
+ options = parser.parse_args(args)
+ subcmd = options.command
+ langs = options.langs.split('@@') if options.langs else None
+ extra_args = options.extra_args.split('@@') if options.extra_args else []
+ subdir = options.subdir
+ src_sub = os.path.join(options.source_root, subdir)
+
+ if not langs:
+ langs = read_linguas(src_sub)
+
+ if subcmd == 'pot':
+ return run_potgen(src_sub, options.xgettext, options.pkgname, options.datadirs, extra_args, options.source_root)
+ elif subcmd == 'update_po':
+ if run_potgen(src_sub, options.xgettext, options.pkgname, options.datadirs, extra_args, options.source_root) != 0:
+ return 1
+ return update_po(src_sub, options.msgmerge, options.msginit, options.pkgname, langs)
+ else:
+ print('Unknown subcommand.')
+ return 1
diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py
new file mode 100644
index 0000000..ded952d
--- /dev/null
+++ b/mesonbuild/scripts/gtkdochelper.py
@@ -0,0 +1,296 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import sys, os
+import subprocess
+import shutil
+import argparse
+from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
+from . import destdir_join
+import typing as T
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--sourcedir', dest='sourcedir')
+parser.add_argument('--builddir', dest='builddir')
+parser.add_argument('--subdir', dest='subdir')
+parser.add_argument('--headerdirs', dest='headerdirs')
+parser.add_argument('--mainfile', dest='mainfile')
+parser.add_argument('--modulename', dest='modulename')
+parser.add_argument('--moduleversion', dest='moduleversion')
+parser.add_argument('--htmlargs', dest='htmlargs', default='')
+parser.add_argument('--scanargs', dest='scanargs', default='')
+parser.add_argument('--scanobjsargs', dest='scanobjsargs', default='')
+parser.add_argument('--gobjects-types-file', dest='gobject_typesfile', default='')
+parser.add_argument('--fixxrefargs', dest='fixxrefargs', default='')
+parser.add_argument('--mkdbargs', dest='mkdbargs', default='')
+parser.add_argument('--ld', dest='ld', default='')
+parser.add_argument('--cc', dest='cc', default='')
+parser.add_argument('--ldflags', dest='ldflags', default='')
+parser.add_argument('--cflags', dest='cflags', default='')
+parser.add_argument('--content-files', dest='content_files', default='')
+parser.add_argument('--expand-content-files', dest='expand_content_files', default='')
+parser.add_argument('--html-assets', dest='html_assets', default='')
+parser.add_argument('--ignore-headers', dest='ignore_headers', default='')
+parser.add_argument('--namespace', dest='namespace', default='')
+parser.add_argument('--mode', dest='mode', default='')
+parser.add_argument('--installdir', dest='install_dir')
+parser.add_argument('--run', dest='run', default='')
+for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']:
+ program_name = 'gtkdoc-' + tool
+ parser.add_argument('--' + program_name, dest=program_name.replace('-', '_'))
+
+def gtkdoc_run_check(cmd: T.List[str], cwd: str, library_paths: T.Optional[T.List[str]] = None) -> None:
+ if library_paths is None:
+ library_paths = []
+
+ env = dict(os.environ)
+ if is_windows() or is_cygwin():
+ if 'PATH' in env:
+ library_paths.extend(env['PATH'].split(os.pathsep))
+ env['PATH'] = os.pathsep.join(library_paths)
+ else:
+ if 'LD_LIBRARY_PATH' in env:
+ library_paths.extend(env['LD_LIBRARY_PATH'].split(os.pathsep))
+ env['LD_LIBRARY_PATH'] = os.pathsep.join(library_paths)
+
+ if is_windows():
+ cmd.insert(0, sys.executable)
+
+ # Put stderr into stdout since we want to print it out anyway.
+ # This preserves the order of messages.
+ p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2]
+ if p.returncode != 0:
+ err_msg = [f"{cmd!r} failed with status {p.returncode:d}"]
+ if out:
+ err_msg.append(out)
+ raise MesonException('\n'.join(err_msg))
+ elif out:
+ # Unfortunately Windows cmd.exe consoles may be using a codepage
+ # that might choke print() with a UnicodeEncodeError, so let's
+ # ignore such errors for now, as a compromise as we are outputting
+ # console output here...
+ try:
+ print(out)
+ except UnicodeEncodeError:
+ pass
+
+def build_gtkdoc(source_root: str, build_root: str, doc_subdir: str, src_subdirs: T.List[str],
+ main_file: str, module: str, module_version: str,
+ html_args: T.List[str], scan_args: T.List[str], fixxref_args: T.List[str], mkdb_args: T.List[str],
+ gobject_typesfile: str, scanobjs_args: T.List[str], run: str, ld: str, cc: str, ldflags: str, cflags: str,
+ html_assets: T.List[str], content_files: T.List[str], ignore_headers: T.List[str], namespace: str,
+ expand_content_files: T.List[str], mode: str, options: argparse.Namespace) -> None:
+ print("Building documentation for %s" % module)
+
+ src_dir_args = []
+ for src_dir in src_subdirs:
+ if not os.path.isabs(src_dir):
+ dirs = [os.path.join(source_root, src_dir),
+ os.path.join(build_root, src_dir)]
+ else:
+ dirs = [src_dir]
+ src_dir_args += ['--source-dir=' + d for d in dirs]
+
+ doc_src = os.path.join(source_root, doc_subdir)
+ abs_out = os.path.join(build_root, doc_subdir)
+ htmldir = os.path.join(abs_out, 'html')
+
+ content_files += [main_file]
+ sections = os.path.join(doc_src, module + "-sections.txt")
+ if os.path.exists(sections):
+ content_files.append(sections)
+
+ overrides = os.path.join(doc_src, module + "-overrides.txt")
+ if os.path.exists(overrides):
+ content_files.append(overrides)
+
+ # Copy files to build directory
+ for f in content_files:
+ # FIXME: Use mesonlib.File objects so we don't need to do this
+ if not os.path.isabs(f):
+ f = os.path.join(doc_src, f)
+ elif os.path.commonpath([f, build_root]) == build_root:
+ continue
+ shutil.copyfile(f, os.path.join(abs_out, os.path.basename(f)))
+
+ shutil.rmtree(htmldir, ignore_errors=True)
+ try:
+ os.mkdir(htmldir)
+ except Exception:
+ pass
+
+ for f in html_assets:
+ f_abs = os.path.join(doc_src, f)
+ shutil.copyfile(f_abs, os.path.join(htmldir, os.path.basename(f_abs)))
+
+ scan_cmd = [options.gtkdoc_scan, '--module=' + module] + src_dir_args
+ if ignore_headers:
+ scan_cmd.append('--ignore-headers=' + ' '.join(ignore_headers))
+ # Add user-specified arguments
+ scan_cmd += scan_args
+ gtkdoc_run_check(scan_cmd, abs_out)
+
+ # Use the generated types file when available, otherwise gobject_typesfile
+ # would often be a path to source dir instead of build dir.
+ if '--rebuild-types' in scan_args:
+ gobject_typesfile = os.path.join(abs_out, module + '.types')
+
+ if gobject_typesfile:
+ scanobjs_cmd = [options.gtkdoc_scangobj] + scanobjs_args
+ scanobjs_cmd += ['--types=' + gobject_typesfile,
+ '--module=' + module,
+ '--run=' + run,
+ '--cflags=' + cflags,
+ '--ldflags=' + ldflags,
+ '--cc=' + cc,
+ '--ld=' + ld,
+ '--output-dir=' + abs_out]
+
+ library_paths = []
+ for ldflag in split_args(ldflags):
+ if ldflag.startswith('-Wl,-rpath,'):
+ library_paths.append(ldflag[11:])
+
+ gtkdoc_run_check(scanobjs_cmd, build_root, library_paths)
+
+ # Make docbook files
+ if mode == 'auto':
+ # Guessing is probably a poor idea but these keeps compat
+ # with previous behavior
+ if main_file.endswith('sgml'):
+ modeflag = '--sgml-mode'
+ else:
+ modeflag = '--xml-mode'
+ elif mode == 'xml':
+ modeflag = '--xml-mode'
+ elif mode == 'sgml':
+ modeflag = '--sgml-mode'
+ else: # none
+ modeflag = None
+
+ mkdb_cmd = [options.gtkdoc_mkdb,
+ '--module=' + module,
+ '--output-format=xml',
+ '--expand-content-files=' + ' '.join(expand_content_files),
+ ] + src_dir_args
+ if namespace:
+ mkdb_cmd.append('--name-space=' + namespace)
+ if modeflag:
+ mkdb_cmd.append(modeflag)
+ if main_file:
+ # Yes, this is the flag even if the file is in xml.
+ mkdb_cmd.append('--main-sgml-file=' + main_file)
+ # Add user-specified arguments
+ mkdb_cmd += mkdb_args
+ gtkdoc_run_check(mkdb_cmd, abs_out)
+
+ # Make HTML documentation
+ mkhtml_cmd = [options.gtkdoc_mkhtml,
+ '--path=' + os.pathsep.join((doc_src, abs_out)),
+ module,
+ ] + html_args
+ if main_file:
+ mkhtml_cmd.append('../' + main_file)
+ else:
+ mkhtml_cmd.append('%s-docs.xml' % module)
+ # html gen must be run in the HTML dir
+ gtkdoc_run_check(mkhtml_cmd, htmldir)
+
+ # Fix cross-references in HTML files
+ fixref_cmd = [options.gtkdoc_fixxref,
+ '--module=' + module,
+ '--module-dir=html'] + fixxref_args
+ gtkdoc_run_check(fixref_cmd, abs_out)
+
+ if module_version:
+ shutil.move(os.path.join(htmldir, f'{module}.devhelp2'),
+ os.path.join(htmldir, f'{module}-{module_version}.devhelp2'))
+
+def install_gtkdoc(build_root: str, doc_subdir: str, install_prefix: str, datadir: str, module: str) -> None:
+ source = os.path.join(build_root, doc_subdir, 'html')
+ final_destination = os.path.join(install_prefix, datadir, module)
+ shutil.rmtree(final_destination, ignore_errors=True)
+ shutil.copytree(source, final_destination)
+
+def run(args: T.List[str]) -> int:
+ options = parser.parse_args(args)
+ if options.htmlargs:
+ htmlargs = options.htmlargs.split('@@')
+ else:
+ htmlargs = []
+ if options.scanargs:
+ scanargs = options.scanargs.split('@@')
+ else:
+ scanargs = []
+ if options.scanobjsargs:
+ scanobjsargs = options.scanobjsargs.split('@@')
+ else:
+ scanobjsargs = []
+ if options.fixxrefargs:
+ fixxrefargs = options.fixxrefargs.split('@@')
+ else:
+ fixxrefargs = []
+ if options.mkdbargs:
+ mkdbargs = options.mkdbargs.split('@@')
+ else:
+ mkdbargs = []
+ build_gtkdoc(
+ options.sourcedir,
+ options.builddir,
+ options.subdir,
+ options.headerdirs.split('@@'),
+ options.mainfile,
+ options.modulename,
+ options.moduleversion,
+ htmlargs,
+ scanargs,
+ fixxrefargs,
+ mkdbargs,
+ options.gobject_typesfile,
+ scanobjsargs,
+ options.run,
+ options.ld,
+ options.cc,
+ options.ldflags,
+ options.cflags,
+ options.html_assets.split('@@') if options.html_assets else [],
+ options.content_files.split('@@') if options.content_files else [],
+ options.ignore_headers.split('@@') if options.ignore_headers else [],
+ options.namespace,
+ options.expand_content_files.split('@@') if options.expand_content_files else [],
+ options.mode,
+ options)
+
+ if 'MESON_INSTALL_PREFIX' in os.environ:
+ destdir = os.environ.get('DESTDIR', '')
+ install_prefix = destdir_join(destdir, os.environ['MESON_INSTALL_PREFIX'])
+ if options.install_dir:
+ install_dir = options.install_dir
+ else:
+ install_dir = options.modulename
+ if options.moduleversion:
+ install_dir += '-' + options.moduleversion
+ if os.path.isabs(install_dir):
+ install_dir = destdir_join(destdir, install_dir)
+ install_gtkdoc(options.builddir,
+ options.subdir,
+ install_prefix,
+ 'share/gtk-doc/html',
+ install_dir)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/hotdochelper.py b/mesonbuild/scripts/hotdochelper.py
new file mode 100644
index 0000000..2c3b85d
--- /dev/null
+++ b/mesonbuild/scripts/hotdochelper.py
@@ -0,0 +1,40 @@
+from __future__ import annotations
+
+import os
+import shutil
+import subprocess
+
+from . import destdir_join
+
+import argparse
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--install')
+parser.add_argument('--extra-extension-path', action="append", default=[])
+parser.add_argument('--name')
+parser.add_argument('--builddir')
+parser.add_argument('--project-version')
+
+
+def run(argv: T.List[str]) -> int:
+ options, args = parser.parse_known_args(argv)
+ subenv = os.environ.copy()
+
+ for ext_path in options.extra_extension_path:
+ subenv['PYTHONPATH'] = subenv.get('PYTHONPATH', '') + ':' + ext_path
+
+ res = subprocess.call(args, cwd=options.builddir, env=subenv)
+ if res != 0:
+ return res
+
+ if options.install:
+ source_dir = os.path.join(options.builddir, options.install)
+ destdir = os.environ.get('DESTDIR', '')
+ installdir = destdir_join(destdir,
+ os.path.join(os.environ['MESON_INSTALL_PREFIX'],
+ 'share/doc/', options.name, "html"))
+
+ shutil.rmtree(installdir, ignore_errors=True)
+ shutil.copytree(source_dir, installdir)
+ return 0
diff --git a/mesonbuild/scripts/itstool.py b/mesonbuild/scripts/itstool.py
new file mode 100644
index 0000000..0bfcaf9
--- /dev/null
+++ b/mesonbuild/scripts/itstool.py
@@ -0,0 +1,86 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import argparse
+import subprocess
+import tempfile
+import shutil
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('command')
+parser.add_argument('--build-dir', default='')
+parser.add_argument('-i', '--input', default='')
+parser.add_argument('-o', '--output', default='')
+parser.add_argument('--itstool', default='itstool')
+parser.add_argument('--its', action='append', default=[])
+parser.add_argument('mo_files', nargs='+')
+
+
+def run_join(build_dir: str, itstool: str, its_files: T.List[str], mo_files: T.List[str],
+ in_fname: str, out_fname: str) -> int:
+ if not mo_files:
+ print('No mo files specified to use for translation.')
+ return 1
+
+ with tempfile.TemporaryDirectory(prefix=os.path.basename(in_fname), dir=build_dir) as tmp_dir:
+ # copy mo files to have the right names so itstool can infer their locale
+ locale_mo_files = []
+ for mo_file in mo_files:
+ if not os.path.exists(mo_file):
+ print(f'Could not find mo file {mo_file}')
+ return 1
+ if not mo_file.endswith('.mo'):
+ print(f'File is not a mo file: {mo_file}')
+ return 1
+ # determine locale of this mo file
+ parts = mo_file.partition('LC_MESSAGES')
+ if parts[0].endswith((os.sep, '/')):
+ locale = os.path.basename(parts[0][:-1])
+ else:
+ locale = os.path.basename(parts[0])
+ tmp_mo_fname = os.path.join(tmp_dir, locale + '.mo')
+ shutil.copy(mo_file, tmp_mo_fname)
+ locale_mo_files.append(tmp_mo_fname)
+
+ cmd = [itstool]
+ if its_files:
+ for fname in its_files:
+ cmd.extend(['-i', fname])
+ cmd.extend(['-j', in_fname,
+ '-o', out_fname])
+ cmd.extend(locale_mo_files)
+
+ return subprocess.call(cmd)
+
+
+def run(args: T.List[str]) -> int:
+ options = parser.parse_args(args)
+ command = options.command
+ build_dir = os.environ.get('MESON_BUILD_ROOT', os.getcwd())
+ if options.build_dir:
+ build_dir = options.build_dir
+
+ if command == 'join':
+ return run_join(build_dir,
+ options.itstool,
+ options.its,
+ options.mo_files,
+ options.input,
+ options.output)
+ else:
+ print('Unknown subcommand.')
+ return 1
diff --git a/mesonbuild/scripts/meson_exe.py b/mesonbuild/scripts/meson_exe.py
new file mode 100644
index 0000000..33408d8
--- /dev/null
+++ b/mesonbuild/scripts/meson_exe.py
@@ -0,0 +1,124 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import sys
+import argparse
+import pickle
+import subprocess
+import typing as T
+import locale
+
+from ..utils.core import ExecutableSerialisation
+
+def buildparser() -> argparse.ArgumentParser:
+ parser = argparse.ArgumentParser(description='Custom executable wrapper for Meson. Do not run on your own, mmm\'kay?')
+ parser.add_argument('--unpickle')
+ parser.add_argument('--capture')
+ parser.add_argument('--feed')
+ return parser
+
+def run_exe(exe: ExecutableSerialisation, extra_env: T.Optional[T.Dict[str, str]] = None) -> int:
+ if exe.exe_wrapper:
+ if not exe.exe_wrapper.found():
+ raise AssertionError('BUG: Can\'t run cross-compiled exe {!r} with not-found '
+ 'wrapper {!r}'.format(exe.cmd_args[0], exe.exe_wrapper.get_path()))
+ cmd_args = exe.exe_wrapper.get_command() + exe.cmd_args
+ else:
+ cmd_args = exe.cmd_args
+ child_env = os.environ.copy()
+ if extra_env:
+ child_env.update(extra_env)
+ if exe.env:
+ child_env = exe.env.get_env(child_env)
+ if exe.extra_paths:
+ child_env['PATH'] = (os.pathsep.join(exe.extra_paths + ['']) +
+ child_env['PATH'])
+ if exe.exe_wrapper and any('wine' in i for i in exe.exe_wrapper.get_command()):
+ from .. import mesonlib
+ child_env['WINEPATH'] = mesonlib.get_wine_shortpath(
+ exe.exe_wrapper.get_command(),
+ ['Z:' + p for p in exe.extra_paths] + child_env.get('WINEPATH', '').split(';'),
+ exe.workdir
+ )
+
+ stdin = None
+ if exe.feed:
+ stdin = open(exe.feed, 'rb')
+
+ pipe = subprocess.PIPE
+ if exe.verbose:
+ assert not exe.capture, 'Cannot capture and print to console at the same time'
+ pipe = None
+
+ p = subprocess.Popen(cmd_args, env=child_env, cwd=exe.workdir,
+ close_fds=False, stdin=stdin, stdout=pipe, stderr=pipe)
+ stdout, stderr = p.communicate()
+
+ if stdin is not None:
+ stdin.close()
+
+ if p.returncode == 0xc0000135:
+ # STATUS_DLL_NOT_FOUND on Windows indicating a common problem that is otherwise hard to diagnose
+ raise FileNotFoundError('due to missing DLLs')
+
+ if p.returncode != 0:
+ if exe.pickled:
+ print(f'while executing {cmd_args!r}')
+ if exe.verbose:
+ return p.returncode
+ encoding = locale.getpreferredencoding()
+ if not exe.capture:
+ print('--- stdout ---')
+ print(stdout.decode(encoding=encoding, errors='replace'))
+ print('--- stderr ---')
+ print(stderr.decode(encoding=encoding, errors='replace'))
+ return p.returncode
+
+ if exe.capture:
+ skip_write = False
+ try:
+ with open(exe.capture, 'rb') as cur:
+ skip_write = cur.read() == stdout
+ except OSError:
+ pass
+ if not skip_write:
+ with open(exe.capture, 'wb') as output:
+ output.write(stdout)
+
+ return 0
+
+def run(args: T.List[str]) -> int:
+ parser = buildparser()
+ options, cmd_args = parser.parse_known_args(args)
+ # argparse supports double dash to separate options and positional arguments,
+ # but the user has to remove it manually.
+ if cmd_args and cmd_args[0] == '--':
+ cmd_args = cmd_args[1:]
+ if not options.unpickle and not cmd_args:
+ parser.error('either --unpickle or executable and arguments are required')
+ if options.unpickle:
+ if cmd_args or options.capture or options.feed:
+ parser.error('no other arguments can be used with --unpickle')
+ with open(options.unpickle, 'rb') as f:
+ exe = pickle.load(f)
+ exe.pickled = True
+ else:
+ exe = ExecutableSerialisation(cmd_args, capture=options.capture, feed=options.feed)
+
+ return run_exe(exe)
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/msgfmthelper.py b/mesonbuild/scripts/msgfmthelper.py
new file mode 100644
index 0000000..28bcc8b
--- /dev/null
+++ b/mesonbuild/scripts/msgfmthelper.py
@@ -0,0 +1,39 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import argparse
+import subprocess
+import os
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('input')
+parser.add_argument('output')
+parser.add_argument('type')
+parser.add_argument('podir')
+parser.add_argument('--msgfmt', default='msgfmt')
+parser.add_argument('--datadirs', default='')
+parser.add_argument('args', default=[], metavar='extra msgfmt argument', nargs='*')
+
+
+def run(args: T.List[str]) -> int:
+ options = parser.parse_args(args)
+ env = None
+ if options.datadirs:
+ env = os.environ.copy()
+ env.update({'GETTEXTDATADIRS': options.datadirs})
+ return subprocess.call([options.msgfmt, '--' + options.type, '-d', options.podir,
+ '--template', options.input, '-o', options.output] + options.args,
+ env=env)
diff --git a/mesonbuild/scripts/regen_checker.py b/mesonbuild/scripts/regen_checker.py
new file mode 100644
index 0000000..f3a6f3c
--- /dev/null
+++ b/mesonbuild/scripts/regen_checker.py
@@ -0,0 +1,65 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import sys, os
+import pickle, subprocess
+import typing as T
+from ..coredata import CoreData
+from ..backend.backends import RegenInfo
+from ..mesonlib import OptionKey
+
+# This could also be used for XCode.
+
+def need_regen(regeninfo: RegenInfo, regen_timestamp: float) -> bool:
+ for i in regeninfo.depfiles:
+ curfile = os.path.join(regeninfo.build_dir, i)
+ curtime = os.stat(curfile).st_mtime
+ if curtime > regen_timestamp:
+ return True
+ # The timestamp file gets automatically deleted by MSBuild during a 'Clean' build.
+ # We must make sure to recreate it, even if we do not regenerate the solution.
+ # Otherwise, Visual Studio will always consider the REGEN project out of date.
+ print("Everything is up-to-date, regeneration of build files is not needed.")
+ from ..backend.vs2010backend import Vs2010Backend
+ Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)
+ return False
+
+def regen(regeninfo: RegenInfo, meson_command: T.List[str], backend: str) -> None:
+ cmd = meson_command + ['--internal',
+ 'regenerate',
+ regeninfo.build_dir,
+ regeninfo.source_dir,
+ '--backend=' + backend]
+ subprocess.check_call(cmd)
+
+def run(args: T.List[str]) -> int:
+ private_dir = args[0]
+ dumpfile = os.path.join(private_dir, 'regeninfo.dump')
+ coredata_file = os.path.join(private_dir, 'coredata.dat')
+ with open(dumpfile, 'rb') as f:
+ regeninfo = pickle.load(f)
+ assert isinstance(regeninfo, RegenInfo)
+ with open(coredata_file, 'rb') as f:
+ coredata = pickle.load(f)
+ assert isinstance(coredata, CoreData)
+ backend = coredata.get_option(OptionKey('backend'))
+ assert isinstance(backend, str)
+ regen_timestamp = os.stat(dumpfile).st_mtime
+ if need_regen(regeninfo, regen_timestamp):
+ regen(regeninfo, coredata.meson_command, backend)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/run_tool.py b/mesonbuild/scripts/run_tool.py
new file mode 100644
index 0000000..88376dd
--- /dev/null
+++ b/mesonbuild/scripts/run_tool.py
@@ -0,0 +1,68 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import itertools
+import fnmatch
+from pathlib import Path
+from concurrent.futures import ThreadPoolExecutor
+
+from ..compilers import lang_suffixes
+from ..mesonlib import Popen_safe
+import typing as T
+
+if T.TYPE_CHECKING:
+ import subprocess
+
+def parse_pattern_file(fname: Path) -> T.List[str]:
+ patterns = []
+ try:
+ with fname.open(encoding='utf-8') as f:
+ for line in f:
+ pattern = line.strip()
+ if pattern and not pattern.startswith('#'):
+ patterns.append(pattern)
+ except FileNotFoundError:
+ pass
+ return patterns
+
+def run_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., subprocess.CompletedProcess], *args: T.Any) -> int:
+ patterns = parse_pattern_file(srcdir / f'.{name}-include')
+ globs: T.Union[T.List[T.List[Path]], T.List[T.Generator[Path, None, None]]]
+ if patterns:
+ globs = [srcdir.glob(p) for p in patterns]
+ else:
+ p, o, _ = Popen_safe(['git', 'ls-files'], cwd=srcdir)
+ if p.returncode == 0:
+ globs = [[Path(srcdir, f) for f in o.splitlines()]]
+ else:
+ globs = [srcdir.glob('**/*')]
+ patterns = parse_pattern_file(srcdir / f'.{name}-ignore')
+ ignore = [str(builddir / '*')]
+ ignore.extend([str(srcdir / p) for p in patterns])
+ suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
+ suffixes.add('h')
+ suffixes = {f'.{s}' for s in suffixes}
+ futures = []
+ returncode = 0
+ with ThreadPoolExecutor() as e:
+ for f in itertools.chain(*globs):
+ strf = str(f)
+ if f.is_dir() or f.suffix not in suffixes or \
+ any(fnmatch.fnmatch(strf, i) for i in ignore):
+ continue
+ futures.append(e.submit(fn, f, *args))
+ if futures:
+ returncode = max(x.result().returncode for x in futures)
+ return returncode
diff --git a/mesonbuild/scripts/scanbuild.py b/mesonbuild/scripts/scanbuild.py
new file mode 100644
index 0000000..9cfc75d
--- /dev/null
+++ b/mesonbuild/scripts/scanbuild.py
@@ -0,0 +1,66 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import subprocess
+import shutil
+import tempfile
+from ..environment import detect_ninja, detect_scanbuild
+from ..coredata import get_cmd_line_file, CmdLineFileParser
+from ..mesonlib import windows_proof_rmtree
+from pathlib import Path
+import typing as T
+from ast import literal_eval
+import os
+
+def scanbuild(exelist: T.List[str], srcdir: Path, blddir: Path, privdir: Path, logdir: Path, args: T.List[str]) -> int:
+ # In case of problems leave the temp directory around
+ # so it can be debugged.
+ scandir = tempfile.mkdtemp(dir=str(privdir))
+ meson_cmd = exelist + args
+ build_cmd = exelist + ['-o', str(logdir)] + detect_ninja() + ['-C', scandir]
+ rc = subprocess.call(meson_cmd + [str(srcdir), scandir])
+ if rc != 0:
+ return rc
+ rc = subprocess.call(build_cmd)
+ if rc == 0:
+ windows_proof_rmtree(scandir)
+ return rc
+
+def run(args: T.List[str]) -> int:
+ srcdir = Path(args[0])
+ bldpath = Path(args[1])
+ blddir = args[1]
+ meson_cmd = args[2:]
+ privdir = bldpath / 'meson-private'
+ logdir = bldpath / 'meson-logs' / 'scanbuild'
+ shutil.rmtree(str(logdir), ignore_errors=True)
+
+ # if any cross or native files are specified we should use them
+ cmd = get_cmd_line_file(blddir)
+ data = CmdLineFileParser()
+ data.read(cmd)
+
+ if 'cross_file' in data['properties']:
+ meson_cmd.extend([f'--cross-file={os.path.abspath(f)}' for f in literal_eval(data['properties']['cross_file'])])
+
+ if 'native_file' in data['properties']:
+ meson_cmd.extend([f'--native-file={os.path.abspath(f)}' for f in literal_eval(data['properties']['native_file'])])
+
+ exelist = detect_scanbuild()
+ if not exelist:
+ print('Could not execute scan-build "%s"' % ' '.join(exelist))
+ return 1
+
+ return scanbuild(exelist, srcdir, bldpath, privdir, logdir, meson_cmd)
diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py
new file mode 100644
index 0000000..08d839b
--- /dev/null
+++ b/mesonbuild/scripts/symbolextractor.py
@@ -0,0 +1,333 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script extracts the symbols of a given shared library
+# into a file. If the symbols have not changed, the file is not
+# touched. This information is used to skip link steps if the
+# ABI has not changed.
+
+# This file is basically a reimplementation of
+# http://cgit.freedesktop.org/libreoffice/core/commit/?id=3213cd54b76bc80a6f0516aac75a48ff3b2ad67c
+from __future__ import annotations
+
+import typing as T
+import os, sys
+from .. import mesonlib
+from .. import mlog
+from ..mesonlib import Popen_safe
+import argparse
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--cross-host', default=None, dest='cross_host',
+ help='cross compilation host platform')
+parser.add_argument('args', nargs='+')
+
+TOOL_WARNING_FILE = None
+RELINKING_WARNING = 'Relinking will always happen on source changes.'
+
+def dummy_syms(outfilename: str) -> None:
+ """Just touch it so relinking happens always."""
+ with open(outfilename, 'w', encoding='utf-8'):
+ pass
+
+def write_if_changed(text: str, outfilename: str) -> None:
+ try:
+ with open(outfilename, encoding='utf-8') as f:
+ oldtext = f.read()
+ if text == oldtext:
+ return
+ except FileNotFoundError:
+ pass
+ with open(outfilename, 'w', encoding='utf-8') as f:
+ f.write(text)
+
+def print_tool_warning(tools: T.List[str], msg: str, stderr: T.Optional[str] = None) -> None:
+ if os.path.exists(TOOL_WARNING_FILE):
+ return
+ m = f'{tools!r} {msg}. {RELINKING_WARNING}'
+ if stderr:
+ m += '\n' + stderr
+ mlog.warning(m)
+ # Write it out so we don't warn again
+ with open(TOOL_WARNING_FILE, 'w', encoding='utf-8'):
+ pass
+
+def get_tool(name: str) -> T.List[str]:
+ evar = name.upper()
+ if evar in os.environ:
+ import shlex
+ return shlex.split(os.environ[evar])
+ return [name]
+
+def call_tool(name: str, args: T.List[str], **kwargs: T.Any) -> str:
+ tool = get_tool(name)
+ try:
+ p, output, e = Popen_safe(tool + args, **kwargs)
+ except FileNotFoundError:
+ print_tool_warning(tool, 'not found')
+ return None
+ except PermissionError:
+ print_tool_warning(tool, 'not usable')
+ return None
+ if p.returncode != 0:
+ print_tool_warning(tool, 'does not work', e)
+ return None
+ return output
+
+def call_tool_nowarn(tool: T.List[str], **kwargs: T.Any) -> T.Tuple[str, str]:
+ try:
+ p, output, e = Popen_safe(tool, **kwargs)
+ except FileNotFoundError:
+ return None, '{!r} not found\n'.format(tool[0])
+ except PermissionError:
+ return None, '{!r} not usable\n'.format(tool[0])
+ if p.returncode != 0:
+ return None, e
+ return output, None
+
+def gnu_syms(libfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ output = call_tool('readelf', ['-d', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result = [x for x in output.split('\n') if 'SONAME' in x]
+ assert len(result) <= 1
+ # Get a list of all symbols exported
+ output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only',
+ '--format=posix', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ for line in output.split('\n'):
+ if not line:
+ continue
+ line_split = line.split()
+ entry = line_split[0:2]
+ # Store the size of symbols pointing to data objects so we relink
+ # when those change, which is needed because of copy relocations
+ # https://github.com/mesonbuild/meson/pull/7132#issuecomment-628353702
+ if line_split[1].upper() in {'B', 'G', 'D'} and len(line_split) >= 4:
+ entry += [line_split[3]]
+ result += [' '.join(entry)]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def solaris_syms(libfilename: str, outfilename: str) -> None:
+ # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump
+ origpath = os.environ['PATH']
+ try:
+ os.environ['PATH'] = '/usr/gnu/bin:' + origpath
+ gnu_syms(libfilename, outfilename)
+ finally:
+ os.environ['PATH'] = origpath
+
+def osx_syms(libfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ output = call_tool('otool', ['-l', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ arr = output.split('\n')
+ for (i, val) in enumerate(arr):
+ if 'LC_ID_DYLIB' in val:
+ match = i
+ break
+ result = [arr[match + 2], arr[match + 5]] # Libreoffice stores all 5 lines but the others seem irrelevant.
+ # Get a list of all symbols exported
+ output = call_tool('nm', ['--extern-only', '--defined-only',
+ '--format=posix', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def openbsd_syms(libfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ output = call_tool('readelf', ['-d', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result = [x for x in output.split('\n') if 'SONAME' in x]
+ assert len(result) <= 1
+ # Get a list of all symbols exported
+ output = call_tool('nm', ['-D', '-P', '-g', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ # U = undefined (cope with the lack of --defined-only option)
+ result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def freebsd_syms(libfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ output = call_tool('readelf', ['-d', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result = [x for x in output.split('\n') if 'SONAME' in x]
+ assert len(result) <= 1
+ # Get a list of all symbols exported
+ output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only',
+ '--format=posix', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+
+ result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def cygwin_syms(impfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ output = call_tool('dlltool', ['-I', impfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result = [output]
+ # Get the list of all symbols exported
+ output = call_tool('nm', ['--extern-only', '--defined-only',
+ '--format=posix', impfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ for line in output.split('\n'):
+ if ' T ' not in line:
+ continue
+ result.append(line.split(maxsplit=1)[0])
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def _get_implib_dllname(impfilename: str) -> T.Tuple[T.List[str], str]:
+ all_stderr = ''
+ # First try lib.exe, which is provided by MSVC. Then llvm-lib.exe, by LLVM
+ # for clang-cl.
+ #
+ # We cannot call get_tool on `lib` because it will look at the `LIB` env
+ # var which is the list of library paths MSVC will search for import
+ # libraries while linking.
+ for lib in (['lib'], get_tool('llvm-lib')):
+ output, e = call_tool_nowarn(lib + ['-list', impfilename])
+ if output:
+ # The output is a list of DLLs that each symbol exported by the import
+ # library is available in. We only build import libraries that point to
+ # a single DLL, so we can pick any of these. Pick the last one for
+ # simplicity. Also skip the last line, which is empty.
+ return output.split('\n')[-2:-1], None
+ all_stderr += e
+ # Next, try dlltool.exe which is provided by MinGW
+ output, e = call_tool_nowarn(get_tool('dlltool') + ['-I', impfilename])
+ if output:
+ return [output], None
+ all_stderr += e
+ return ([], all_stderr)
+
+def _get_implib_exports(impfilename: str) -> T.Tuple[T.List[str], str]:
+ all_stderr = ''
+ # Force dumpbin.exe to use en-US so we can parse its output
+ env = os.environ.copy()
+ env['VSLANG'] = '1033'
+ output, e = call_tool_nowarn(get_tool('dumpbin') + ['-exports', impfilename], env=env)
+ if output:
+ lines = output.split('\n')
+ start = lines.index('File Type: LIBRARY')
+ end = lines.index(' Summary')
+ return lines[start:end], None
+ all_stderr += e
+ # Next, try llvm-nm.exe provided by LLVM, then nm.exe provided by MinGW
+ for nm in ('llvm-nm', 'nm'):
+ output, e = call_tool_nowarn(get_tool(nm) + ['--extern-only', '--defined-only',
+ '--format=posix', impfilename])
+ if output:
+ result = []
+ for line in output.split('\n'):
+ if ' T ' not in line or line.startswith('.text'):
+ continue
+ result.append(line.split(maxsplit=1)[0])
+ return result, None
+ all_stderr += e
+ return ([], all_stderr)
+
+def windows_syms(impfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ result, e = _get_implib_dllname(impfilename)
+ if not result:
+ print_tool_warning(['lib', 'llvm-lib', 'dlltool'], 'do not work or were not found', e)
+ dummy_syms(outfilename)
+ return
+ # Get a list of all symbols exported
+ symbols, e = _get_implib_exports(impfilename)
+ if not symbols:
+ print_tool_warning(['dumpbin', 'llvm-nm', 'nm'], 'do not work or were not found', e)
+ dummy_syms(outfilename)
+ return
+ result += symbols
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host: str) -> None:
+ if cross_host is not None:
+ # In case of cross builds just always relink. In theory we could
+ # determine the correct toolset, but we would need to use the correct
+ # `nm`, `readelf`, etc, from the cross info which requires refactoring.
+ dummy_syms(outfilename)
+ elif mesonlib.is_linux() or mesonlib.is_hurd():
+ gnu_syms(libfilename, outfilename)
+ elif mesonlib.is_osx():
+ osx_syms(libfilename, outfilename)
+ elif mesonlib.is_openbsd():
+ openbsd_syms(libfilename, outfilename)
+ elif mesonlib.is_freebsd():
+ freebsd_syms(libfilename, outfilename)
+ elif mesonlib.is_netbsd():
+ freebsd_syms(libfilename, outfilename)
+ elif mesonlib.is_windows():
+ if os.path.isfile(impfilename):
+ windows_syms(impfilename, outfilename)
+ else:
+ # No import library. Not sure how the DLL is being used, so just
+ # rebuild everything that links to it every time.
+ dummy_syms(outfilename)
+ elif mesonlib.is_cygwin():
+ if os.path.isfile(impfilename):
+ cygwin_syms(impfilename, outfilename)
+ else:
+ # No import library. Not sure how the DLL is being used, so just
+ # rebuild everything that links to it every time.
+ dummy_syms(outfilename)
+ elif mesonlib.is_sunos():
+ solaris_syms(libfilename, outfilename)
+ else:
+ if not os.path.exists(TOOL_WARNING_FILE):
+ mlog.warning('Symbol extracting has not been implemented for this '
+ 'platform. ' + RELINKING_WARNING)
+ # Write it out so we don't warn again
+ with open(TOOL_WARNING_FILE, 'w', encoding='utf-8'):
+ pass
+ dummy_syms(outfilename)
+
+def run(args: T.List[str]) -> int:
+ global TOOL_WARNING_FILE # pylint: disable=global-statement
+ options = parser.parse_args(args)
+ if len(options.args) != 4:
+ print('symbolextractor.py <shared library file> <import library> <output file>')
+ sys.exit(1)
+ privdir = os.path.join(options.args[0], 'meson-private')
+ TOOL_WARNING_FILE = os.path.join(privdir, 'symbolextractor_tool_warning_printed')
+ libfile = options.args[1]
+ impfile = options.args[2] # Only used on Windows
+ outfile = options.args[3]
+ gen_symbols(libfile, impfile, outfile, options.cross_host)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/tags.py b/mesonbuild/scripts/tags.py
new file mode 100644
index 0000000..c856807
--- /dev/null
+++ b/mesonbuild/scripts/tags.py
@@ -0,0 +1,54 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import subprocess
+from pathlib import Path
+import typing as T
+
+def ls_as_bytestream() -> bytes:
+ if os.path.exists('.git'):
+ return subprocess.run(['git', 'ls-tree', '-r', '--name-only', 'HEAD'],
+ stdout=subprocess.PIPE).stdout
+
+ files = [str(p) for p in Path('.').glob('**/*')
+ if not p.is_dir() and
+ not next((x for x in p.parts if x.startswith('.')), None)]
+ return '\n'.join(files).encode()
+
+
+def cscope() -> int:
+ ls = b'\n'.join([b'"%s"' % f for f in ls_as_bytestream().split()])
+ return subprocess.run(['cscope', '-v', '-b', '-i-'], input=ls).returncode
+
+
+def ctags() -> int:
+ ls = ls_as_bytestream()
+ return subprocess.run(['ctags', '-L-'], input=ls).returncode
+
+
+def etags() -> int:
+ ls = ls_as_bytestream()
+ return subprocess.run(['etags', '-'], input=ls).returncode
+
+
+def run(args: T.List[str]) -> int:
+ tool_name = args[0]
+ srcdir_name = args[1]
+ os.chdir(srcdir_name)
+ assert tool_name in {'cscope', 'ctags', 'etags'}
+ res = globals()[tool_name]()
+ assert isinstance(res, int)
+ return res
diff --git a/mesonbuild/scripts/test_loaded_modules.py b/mesonbuild/scripts/test_loaded_modules.py
new file mode 100644
index 0000000..b3547be
--- /dev/null
+++ b/mesonbuild/scripts/test_loaded_modules.py
@@ -0,0 +1,11 @@
+import sys
+import json
+import typing as T
+from . import meson_exe
+
+# This script is used by run_unittests.py to verify we don't load too many
+# modules when executing a wrapped command.
+def run(args: T.List[str]) -> int:
+ meson_exe.run(args)
+ print(json.dumps(list(sys.modules.keys())))
+ return 0
diff --git a/mesonbuild/scripts/uninstall.py b/mesonbuild/scripts/uninstall.py
new file mode 100644
index 0000000..8548766
--- /dev/null
+++ b/mesonbuild/scripts/uninstall.py
@@ -0,0 +1,51 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import typing as T
+
+logfile = 'meson-logs/install-log.txt'
+
+def do_uninstall(log: str) -> None:
+ failures = 0
+ successes = 0
+ for line in open(log, encoding='utf-8'):
+ if line.startswith('#'):
+ continue
+ fname = line.strip()
+ try:
+ if os.path.isdir(fname) and not os.path.islink(fname):
+ os.rmdir(fname)
+ else:
+ os.unlink(fname)
+ print('Deleted:', fname)
+ successes += 1
+ except Exception as e:
+ print(f'Could not delete {fname}: {e}.')
+ failures += 1
+ print('\nUninstall finished.\n')
+ print('Deleted:', successes)
+ print('Failed:', failures)
+ print('\nRemember that files created by custom scripts have not been removed.')
+
+def run(args: T.List[str]) -> int:
+ if args:
+ print('Weird error.')
+ return 1
+ if not os.path.exists(logfile):
+ print('Log file does not exist, no installation has been done.')
+ return 0
+ do_uninstall(logfile)
+ return 0
diff --git a/mesonbuild/scripts/vcstagger.py b/mesonbuild/scripts/vcstagger.py
new file mode 100644
index 0000000..c484ee1
--- /dev/null
+++ b/mesonbuild/scripts/vcstagger.py
@@ -0,0 +1,45 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import sys, os, subprocess, re
+import typing as T
+
+def config_vcs_tag(infile: str, outfile: str, fallback: str, source_dir: str, replace_string: str, regex_selector: str, cmd: T.List[str]) -> None:
+ try:
+ output = subprocess.check_output(cmd, cwd=source_dir)
+ new_string = re.search(regex_selector, output.decode()).group(1).strip()
+ except Exception:
+ new_string = fallback
+
+ with open(infile, encoding='utf-8') as f:
+ new_data = f.read().replace(replace_string, new_string)
+ if os.path.exists(outfile):
+ with open(outfile, encoding='utf-8') as f:
+ needs_update = f.read() != new_data
+ else:
+ needs_update = True
+ if needs_update:
+ with open(outfile, 'w', encoding='utf-8') as f:
+ f.write(new_data)
+
+
+def run(args: T.List[str]) -> int:
+ infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
+ command = args[6:]
+ config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/yasm.py b/mesonbuild/scripts/yasm.py
new file mode 100644
index 0000000..730ff3e
--- /dev/null
+++ b/mesonbuild/scripts/yasm.py
@@ -0,0 +1,22 @@
+import argparse
+import subprocess
+import typing as T
+
+def run(args: T.List[str]) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--depfile')
+ options, yasm_cmd = parser.parse_known_args(args)
+
+ # Compile
+ returncode = subprocess.call(yasm_cmd)
+ if returncode != 0:
+ return returncode
+
+ # Capture and write depfile
+ ret = subprocess.run(yasm_cmd + ['-M'], capture_output=True)
+ if ret.returncode != 0:
+ return ret.returncode
+ with open(options.depfile, 'wb') as f:
+ f.write(ret.stdout)
+
+ return 0