summaryrefslogtreecommitdiffstats
path: root/mesonbuild/interpreter
diff options
context:
space:
mode:
Diffstat (limited to 'mesonbuild/interpreter')
-rw-r--r--mesonbuild/interpreter/__init__.py59
-rw-r--r--mesonbuild/interpreter/compiler.py781
-rw-r--r--mesonbuild/interpreter/dependencyfallbacks.py373
-rw-r--r--mesonbuild/interpreter/interpreter.py3275
-rw-r--r--mesonbuild/interpreter/interpreterobjects.py987
-rw-r--r--mesonbuild/interpreter/kwargs.py310
-rw-r--r--mesonbuild/interpreter/mesonmain.py456
-rw-r--r--mesonbuild/interpreter/primitives/__init__.py29
-rw-r--r--mesonbuild/interpreter/primitives/array.py108
-rw-r--r--mesonbuild/interpreter/primitives/boolean.py52
-rw-r--r--mesonbuild/interpreter/primitives/dict.py88
-rw-r--r--mesonbuild/interpreter/primitives/integer.py81
-rw-r--r--mesonbuild/interpreter/primitives/range.py38
-rw-r--r--mesonbuild/interpreter/primitives/string.py233
-rw-r--r--mesonbuild/interpreter/type_checking.py479
15 files changed, 7349 insertions, 0 deletions
diff --git a/mesonbuild/interpreter/__init__.py b/mesonbuild/interpreter/__init__.py
new file mode 100644
index 0000000..016e4dc
--- /dev/null
+++ b/mesonbuild/interpreter/__init__.py
@@ -0,0 +1,59 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Meson interpreter."""
+
+__all__ = [
+ 'Interpreter',
+ 'permitted_dependency_kwargs',
+
+ 'CompilerHolder',
+
+ 'ExecutableHolder',
+ 'BuildTargetHolder',
+ 'CustomTargetHolder',
+ 'CustomTargetIndexHolder',
+ 'MachineHolder',
+ 'Test',
+ 'ConfigurationDataHolder',
+ 'SubprojectHolder',
+ 'DependencyHolder',
+ 'GeneratedListHolder',
+ 'ExternalProgramHolder',
+ 'extract_required_kwarg',
+
+ 'ArrayHolder',
+ 'BooleanHolder',
+ 'DictHolder',
+ 'IntegerHolder',
+ 'StringHolder',
+]
+
+from .interpreter import Interpreter, permitted_dependency_kwargs
+from .compiler import CompilerHolder
+from .interpreterobjects import (ExecutableHolder, BuildTargetHolder, CustomTargetHolder,
+ CustomTargetIndexHolder, MachineHolder, Test,
+ ConfigurationDataHolder, SubprojectHolder, DependencyHolder,
+ GeneratedListHolder, ExternalProgramHolder,
+ extract_required_kwarg)
+
+from .primitives import (
+ ArrayHolder,
+ BooleanHolder,
+ DictHolder,
+ IntegerHolder,
+ StringHolder,
+)
diff --git a/mesonbuild/interpreter/compiler.py b/mesonbuild/interpreter/compiler.py
new file mode 100644
index 0000000..95126cf
--- /dev/null
+++ b/mesonbuild/interpreter/compiler.py
@@ -0,0 +1,781 @@
+# SPDX-Licnese-Identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+from __future__ import annotations
+
+import enum
+import functools
+import os
+import typing as T
+
+from .. import build
+from .. import coredata
+from .. import dependencies
+from .. import mesonlib
+from .. import mlog
+from ..compilers import SUFFIX_TO_LANG
+from ..compilers.compilers import CompileCheckMode
+from ..interpreterbase import (ObjectHolder, noPosargs, noKwargs,
+ FeatureNew, disablerIfNotFound,
+ InterpreterException)
+from ..interpreterbase.decorators import ContainerTypeInfo, typed_kwargs, KwargInfo, typed_pos_args
+from ..mesonlib import OptionKey
+from .interpreterobjects import (extract_required_kwarg, extract_search_dirs)
+from .type_checking import REQUIRED_KW, in_set_validator, NoneType
+
+if T.TYPE_CHECKING:
+ from ..interpreter import Interpreter
+ from ..compilers import Compiler, RunResult
+ from ..interpreterbase import TYPE_var, TYPE_kwargs
+ from .kwargs import ExtractRequired, ExtractSearchDirs
+
+ from typing_extensions import TypedDict, Literal
+
+ class GetSupportedArgumentKw(TypedDict):
+
+ checked: Literal['warn', 'require', 'off']
+
+ class AlignmentKw(TypedDict):
+
+ prefix: str
+ args: T.List[str]
+ dependencies: T.List[dependencies.Dependency]
+
+ class CompileKW(TypedDict):
+
+ name: str
+ no_builtin_args: bool
+ include_directories: T.List[build.IncludeDirs]
+ args: T.List[str]
+ dependencies: T.List[dependencies.Dependency]
+
+ class CommonKW(TypedDict):
+
+ prefix: str
+ no_builtin_args: bool
+ include_directories: T.List[build.IncludeDirs]
+ args: T.List[str]
+ dependencies: T.List[dependencies.Dependency]
+
+ class CompupteIntKW(CommonKW):
+
+ guess: T.Optional[int]
+ high: T.Optional[int]
+ low: T.Optional[int]
+
+ class HeaderKW(CommonKW, ExtractRequired):
+ pass
+
+ class FindLibraryKW(ExtractRequired, ExtractSearchDirs):
+
+ disabler: bool
+ has_headers: T.List[str]
+ static: bool
+
+ # This list must be all of the `HeaderKW` values with `header_`
+ # prepended to the key
+ header_args: T.List[str]
+ header_dependencies: T.List[dependencies.Dependency]
+ header_include_directories: T.List[build.IncludeDirs]
+ header_no_builtin_args: bool
+ header_prefix: str
+ header_required: T.Union[bool, coredata.UserFeatureOption]
+
+ class PreprocessKW(TypedDict):
+ output: str
+ compile_args: T.List[str]
+ include_directories: T.List[build.IncludeDirs]
+
+
+class _TestMode(enum.Enum):
+
+ """Whether we're doing a compiler or linker check."""
+
+ COMPILER = 0
+ LINKER = 1
+
+
+class TryRunResultHolder(ObjectHolder['RunResult']):
+ def __init__(self, res: 'RunResult', interpreter: 'Interpreter'):
+ super().__init__(res, interpreter)
+ self.methods.update({'returncode': self.returncode_method,
+ 'compiled': self.compiled_method,
+ 'stdout': self.stdout_method,
+ 'stderr': self.stderr_method,
+ })
+
+ @noPosargs
+ @noKwargs
+ def returncode_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> int:
+ return self.held_object.returncode
+
+ @noPosargs
+ @noKwargs
+ def compiled_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+ return self.held_object.compiled
+
+ @noPosargs
+ @noKwargs
+ def stdout_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.held_object.stdout
+
+ @noPosargs
+ @noKwargs
+ def stderr_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.held_object.stderr
+
+
+_ARGS_KW: KwargInfo[T.List[str]] = KwargInfo(
+ 'args',
+ ContainerTypeInfo(list, str),
+ listify=True,
+ default=[],
+)
+_DEPENDENCIES_KW: KwargInfo[T.List['dependencies.Dependency']] = KwargInfo(
+ 'dependencies',
+ ContainerTypeInfo(list, dependencies.Dependency),
+ listify=True,
+ default=[],
+)
+_INCLUDE_DIRS_KW: KwargInfo[T.List[build.IncludeDirs]] = KwargInfo(
+ 'include_directories',
+ ContainerTypeInfo(list, build.IncludeDirs),
+ default=[],
+ listify=True,
+)
+_PREFIX_KW: KwargInfo[str] = KwargInfo(
+ 'prefix',
+ (str, ContainerTypeInfo(list, str)),
+ default='',
+ since_values={list: '1.0.0'},
+ convertor=lambda x: '\n'.join(x) if isinstance(x, list) else x)
+
+_NO_BUILTIN_ARGS_KW = KwargInfo('no_builtin_args', bool, default=False)
+_NAME_KW = KwargInfo('name', str, default='')
+
+# Many of the compiler methods take this kwarg signature exactly, this allows
+# simplifying the `typed_kwargs` calls
+_COMMON_KWS: T.List[KwargInfo] = [_ARGS_KW, _DEPENDENCIES_KW, _INCLUDE_DIRS_KW, _PREFIX_KW, _NO_BUILTIN_ARGS_KW]
+
+# Common methods of compiles, links, runs, and similar
+_COMPILES_KWS: T.List[KwargInfo] = [_NAME_KW, _ARGS_KW, _DEPENDENCIES_KW, _INCLUDE_DIRS_KW, _NO_BUILTIN_ARGS_KW]
+
+_HEADER_KWS: T.List[KwargInfo] = [REQUIRED_KW.evolve(since='0.50.0', default=False), *_COMMON_KWS]
+
+class CompilerHolder(ObjectHolder['Compiler']):
+ def __init__(self, compiler: 'Compiler', interpreter: 'Interpreter'):
+ super().__init__(compiler, interpreter)
+ self.environment = self.env
+ self.methods.update({'compiles': self.compiles_method,
+ 'links': self.links_method,
+ 'get_id': self.get_id_method,
+ 'get_linker_id': self.get_linker_id_method,
+ 'compute_int': self.compute_int_method,
+ 'sizeof': self.sizeof_method,
+ 'get_define': self.get_define_method,
+ 'check_header': self.check_header_method,
+ 'has_header': self.has_header_method,
+ 'has_header_symbol': self.has_header_symbol_method,
+ 'run': self.run_method,
+ 'has_function': self.has_function_method,
+ 'has_member': self.has_member_method,
+ 'has_members': self.has_members_method,
+ 'has_type': self.has_type_method,
+ 'alignment': self.alignment_method,
+ 'version': self.version_method,
+ 'cmd_array': self.cmd_array_method,
+ 'find_library': self.find_library_method,
+ 'has_argument': self.has_argument_method,
+ 'has_function_attribute': self.has_func_attribute_method,
+ 'get_supported_function_attributes': self.get_supported_function_attributes_method,
+ 'has_multi_arguments': self.has_multi_arguments_method,
+ 'get_supported_arguments': self.get_supported_arguments_method,
+ 'first_supported_argument': self.first_supported_argument_method,
+ 'has_link_argument': self.has_link_argument_method,
+ 'has_multi_link_arguments': self.has_multi_link_arguments_method,
+ 'get_supported_link_arguments': self.get_supported_link_arguments_method,
+ 'first_supported_link_argument': self.first_supported_link_argument_method,
+ 'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method,
+ 'get_argument_syntax': self.get_argument_syntax_method,
+ 'preprocess': self.preprocess_method,
+ })
+
+ @property
+ def compiler(self) -> 'Compiler':
+ return self.held_object
+
+ def _dep_msg(self, deps: T.List['dependencies.Dependency'], compile_only: bool, endl: str) -> str:
+ msg_single = 'with dependency {}'
+ msg_many = 'with dependencies {}'
+ names = []
+ for d in deps:
+ if isinstance(d, dependencies.InternalDependency):
+ FeatureNew.single_use('compiler method "dependencies" kwarg with internal dep', '0.57.0', self.subproject,
+ location=self.current_node)
+ continue
+ if isinstance(d, dependencies.ExternalLibrary):
+ if compile_only:
+ continue
+ name = '-l' + d.name
+ else:
+ name = d.name
+ names.append(name)
+ if not names:
+ return endl
+ tpl = msg_many if len(names) > 1 else msg_single
+ if endl is None:
+ endl = ''
+ return tpl.format(', '.join(names)) + endl
+
+ @noPosargs
+ @noKwargs
+ def version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.compiler.version
+
+ @noPosargs
+ @noKwargs
+ def cmd_array_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.List[str]:
+ return self.compiler.exelist
+
+ def _determine_args(self, nobuiltins: bool,
+ incdirs: T.List[build.IncludeDirs],
+ extra_args: T.List[str],
+ mode: CompileCheckMode = CompileCheckMode.LINK) -> T.List[str]:
+ args: T.List[str] = []
+ for i in incdirs:
+ for idir in i.to_string_list(self.environment.get_source_dir()):
+ args.extend(self.compiler.get_include_args(idir, False))
+ if not nobuiltins:
+ opts = self.environment.coredata.options
+ args += self.compiler.get_option_compile_args(opts)
+ if mode is CompileCheckMode.LINK:
+ args.extend(self.compiler.get_option_link_args(opts))
+ args.extend(extra_args)
+ return args
+
+ def _determine_dependencies(self, deps: T.List['dependencies.Dependency'], compile_only: bool = False, endl: str = ':') -> T.Tuple[T.List['dependencies.Dependency'], str]:
+ deps = dependencies.get_leaf_external_dependencies(deps)
+ return deps, self._dep_msg(deps, compile_only, endl)
+
+ @typed_pos_args('compiler.alignment', str)
+ @typed_kwargs(
+ 'compiler.alignment',
+ _PREFIX_KW,
+ _ARGS_KW,
+ _DEPENDENCIES_KW,
+ )
+ def alignment_method(self, args: T.Tuple[str], kwargs: 'AlignmentKw') -> int:
+ typename = args[0]
+ deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross)
+ result = self.compiler.alignment(typename, kwargs['prefix'], self.environment,
+ extra_args=kwargs['args'],
+ dependencies=deps)
+ mlog.log('Checking for alignment of', mlog.bold(typename, True), msg, result)
+ return result
+
+ @typed_pos_args('compiler.run', (str, mesonlib.File))
+ @typed_kwargs('compiler.run', *_COMPILES_KWS)
+ def run_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> 'RunResult':
+ code = args[0]
+ if isinstance(code, mesonlib.File):
+ self.interpreter.add_build_def_file(code)
+ code = mesonlib.File.from_absolute_file(
+ code.rel_to_builddir(self.environment.source_dir))
+ testname = kwargs['name']
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False, endl=None)
+ result = self.compiler.run(code, self.environment, extra_args=extra_args,
+ dependencies=deps)
+ if testname:
+ if not result.compiled:
+ h = mlog.red('DID NOT COMPILE')
+ elif result.returncode == 0:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red(f'NO ({result.returncode})')
+ mlog.log('Checking if', mlog.bold(testname, True), msg, 'runs:', h)
+ return result
+
+ @noPosargs
+ @noKwargs
+ def get_id_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.compiler.get_id()
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('compiler.get_linker_id', '0.53.0')
+ def get_linker_id_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.compiler.get_linker_id()
+
+ @noPosargs
+ @noKwargs
+ def symbols_have_underscore_prefix_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+ '''
+ Check if the compiler prefixes _ (underscore) to global C symbols
+ See: https://en.wikipedia.org/wiki/Name_mangling#C
+ '''
+ return self.compiler.symbols_have_underscore_prefix(self.environment)
+
+ @typed_pos_args('compiler.has_member', str, str)
+ @typed_kwargs('compiler.has_member', *_COMMON_KWS)
+ def has_member_method(self, args: T.Tuple[str, str], kwargs: 'CommonKW') -> bool:
+ typename, membername = args
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'])
+ had, cached = self.compiler.has_members(typename, [membername], kwargs['prefix'],
+ self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ mlog.log('Checking whether type', mlog.bold(typename, True),
+ 'has member', mlog.bold(membername, True), msg, hadtxt, cached_msg)
+ return had
+
+ @typed_pos_args('compiler.has_members', str, varargs=str, min_varargs=1)
+ @typed_kwargs('compiler.has_members', *_COMMON_KWS)
+ def has_members_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'CommonKW') -> bool:
+ typename, membernames = args
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'])
+ had, cached = self.compiler.has_members(typename, membernames, kwargs['prefix'],
+ self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ members = mlog.bold(', '.join([f'"{m}"' for m in membernames]))
+ mlog.log('Checking whether type', mlog.bold(typename, True),
+ 'has members', members, msg, hadtxt, cached_msg)
+ return had
+
+ @typed_pos_args('compiler.has_function', str)
+ @typed_kwargs('compiler.has_function', *_COMMON_KWS)
+ def has_function_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> bool:
+ funcname = args[0]
+ extra_args = self._determine_args(kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False)
+ had, cached = self.compiler.has_function(funcname, kwargs['prefix'], self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ mlog.log('Checking for function', mlog.bold(funcname, True), msg, hadtxt, cached_msg)
+ return had
+
+ @typed_pos_args('compiler.has_type', str)
+ @typed_kwargs('compiler.has_type', *_COMMON_KWS)
+ def has_type_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> bool:
+ typename = args[0]
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'])
+ had, cached = self.compiler.has_type(typename, kwargs['prefix'], self.environment,
+ extra_args=extra_args, dependencies=deps)
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ mlog.log('Checking for type', mlog.bold(typename, True), msg, hadtxt, cached_msg)
+ return had
+
+ @FeatureNew('compiler.compute_int', '0.40.0')
+ @typed_pos_args('compiler.compute_int', str)
+ @typed_kwargs(
+ 'compiler.compute_int',
+ KwargInfo('low', (int, NoneType)),
+ KwargInfo('high', (int, NoneType)),
+ KwargInfo('guess', (int, NoneType)),
+ *_COMMON_KWS,
+ )
+ def compute_int_method(self, args: T.Tuple[str], kwargs: 'CompupteIntKW') -> int:
+ expression = args[0]
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross)
+ res = self.compiler.compute_int(expression, kwargs['low'], kwargs['high'],
+ kwargs['guess'], kwargs['prefix'],
+ self.environment, extra_args=extra_args,
+ dependencies=deps)
+ mlog.log('Computing int of', mlog.bold(expression, True), msg, res)
+ return res
+
+ @typed_pos_args('compiler.sizeof', str)
+ @typed_kwargs('compiler.sizeof', *_COMMON_KWS)
+ def sizeof_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> int:
+ element = args[0]
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross)
+ esize = self.compiler.sizeof(element, kwargs['prefix'], self.environment,
+ extra_args=extra_args, dependencies=deps)
+ mlog.log('Checking for size of', mlog.bold(element, True), msg, esize)
+ return esize
+
+ @FeatureNew('compiler.get_define', '0.40.0')
+ @typed_pos_args('compiler.get_define', str)
+ @typed_kwargs('compiler.get_define', *_COMMON_KWS)
+ def get_define_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> str:
+ element = args[0]
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'])
+ value, cached = self.compiler.get_define(element, kwargs['prefix'], self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ mlog.log('Fetching value of define', mlog.bold(element, True), msg, value, cached_msg)
+ return value
+
+ @typed_pos_args('compiler.compiles', (str, mesonlib.File))
+ @typed_kwargs('compiler.compiles', *_COMPILES_KWS)
+ def compiles_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> bool:
+ code = args[0]
+ if isinstance(code, mesonlib.File):
+ self.interpreter.add_build_def_file(code)
+ code = mesonlib.File.from_absolute_file(
+ code.rel_to_builddir(self.environment.source_dir))
+ testname = kwargs['name']
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'], endl=None)
+ result, cached = self.compiler.compiles(code, self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ if testname:
+ if result:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ mlog.log('Checking if', mlog.bold(testname, True), msg, 'compiles:', h, cached_msg)
+ return result
+
+ @typed_pos_args('compiler.links', (str, mesonlib.File))
+ @typed_kwargs('compiler.links', *_COMPILES_KWS)
+ def links_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> bool:
+ code = args[0]
+ compiler = None
+ if isinstance(code, mesonlib.File):
+ self.interpreter.add_build_def_file(code)
+ code = mesonlib.File.from_absolute_file(
+ code.rel_to_builddir(self.environment.source_dir))
+ suffix = code.suffix
+ if suffix not in self.compiler.file_suffixes:
+ for_machine = self.compiler.for_machine
+ clist = self.interpreter.coredata.compilers[for_machine]
+ if suffix not in SUFFIX_TO_LANG:
+ # just pass it to the compiler driver
+ mlog.warning(f'Unknown suffix for test file {code}')
+ elif SUFFIX_TO_LANG[suffix] not in clist:
+ mlog.warning(f'Passed {SUFFIX_TO_LANG[suffix]} source to links method, not specified for {for_machine.get_lower_case_name()} machine.')
+ else:
+ compiler = clist[SUFFIX_TO_LANG[suffix]]
+
+ testname = kwargs['name']
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False)
+ result, cached = self.compiler.links(code, self.environment,
+ compiler=compiler,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ if testname:
+ if result:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Checking if', mlog.bold(testname, True), msg, 'links:', h, cached_msg)
+ return result
+
+ @FeatureNew('compiler.check_header', '0.47.0')
+ @typed_pos_args('compiler.check_header', str)
+ @typed_kwargs('compiler.check_header', *_HEADER_KWS)
+ def check_header_method(self, args: T.Tuple[str], kwargs: 'HeaderKW') -> bool:
+ hname = args[0]
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+ if disabled:
+ mlog.log('Check usable header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return False
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'])
+ haz, cached = self.compiler.check_header(hname, kwargs['prefix'], self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ if required and not haz:
+ raise InterpreterException(f'{self.compiler.get_display_language()} header {hname!r} not usable')
+ elif haz:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Check usable header', mlog.bold(hname, True), msg, h, cached_msg)
+ return haz
+
+ def _has_header_impl(self, hname: str, kwargs: 'HeaderKW') -> bool:
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+ if disabled:
+ mlog.log('Has header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return False
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'])
+ haz, cached = self.compiler.has_header(hname, kwargs['prefix'], self.environment,
+ extra_args=extra_args, dependencies=deps)
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ if required and not haz:
+ raise InterpreterException(f'{self.compiler.get_display_language()} header {hname!r} not found')
+ elif haz:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Has header', mlog.bold(hname, True), msg, h, cached_msg)
+ return haz
+
+ @typed_pos_args('compiler.has_header', str)
+ @typed_kwargs('compiler.has_header', *_HEADER_KWS)
+ def has_header_method(self, args: T.Tuple[str], kwargs: 'HeaderKW') -> bool:
+ return self._has_header_impl(args[0], kwargs)
+
+ @typed_pos_args('compiler.has_header_symbol', str, str)
+ @typed_kwargs('compiler.has_header_symbol', *_HEADER_KWS)
+ def has_header_symbol_method(self, args: T.Tuple[str, str], kwargs: 'HeaderKW') -> bool:
+ hname, symbol = args
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+ if disabled:
+ mlog.log('Header', mlog.bold(hname, True), 'has symbol', mlog.bold(symbol, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return False
+ extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+ deps, msg = self._determine_dependencies(kwargs['dependencies'])
+ haz, cached = self.compiler.has_header_symbol(hname, symbol, kwargs['prefix'], self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ if required and not haz:
+ raise InterpreterException(f'{self.compiler.get_display_language()} symbol {symbol} not found in header {hname}')
+ elif haz:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ mlog.log('Header', mlog.bold(hname, True), 'has symbol', mlog.bold(symbol, True), msg, h, cached_msg)
+ return haz
+
+ def notfound_library(self, libname: str) -> 'dependencies.ExternalLibrary':
+ lib = dependencies.ExternalLibrary(libname, None,
+ self.environment,
+ self.compiler.language,
+ silent=True)
+ return lib
+
+ @disablerIfNotFound
+ @typed_pos_args('compiler.find_library', str)
+ @typed_kwargs(
+ 'compiler.find_library',
+ KwargInfo('required', (bool, coredata.UserFeatureOption), default=True),
+ KwargInfo('has_headers', ContainerTypeInfo(list, str), listify=True, default=[], since='0.50.0'),
+ KwargInfo('static', (bool, NoneType), since='0.51.0'),
+ KwargInfo('disabler', bool, default=False, since='0.49.0'),
+ KwargInfo('dirs', ContainerTypeInfo(list, str), listify=True, default=[]),
+ *(k.evolve(name=f'header_{k.name}') for k in _HEADER_KWS)
+ )
+ def find_library_method(self, args: T.Tuple[str], kwargs: 'FindLibraryKW') -> 'dependencies.ExternalLibrary':
+ # TODO add dependencies support?
+ libname = args[0]
+
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ mlog.log('Library', mlog.bold(libname), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return self.notfound_library(libname)
+
+ # This could be done with a comprehension, but that confuses the type
+ # checker, and having it check this seems valuable
+ has_header_kwargs: 'HeaderKW' = {
+ 'required': required,
+ 'args': kwargs['header_args'],
+ 'dependencies': kwargs['header_dependencies'],
+ 'include_directories': kwargs['header_include_directories'],
+ 'prefix': kwargs['header_prefix'],
+ 'no_builtin_args': kwargs['header_no_builtin_args'],
+ }
+ for h in kwargs['has_headers']:
+ if not self._has_header_impl(h, has_header_kwargs):
+ return self.notfound_library(libname)
+
+ search_dirs = extract_search_dirs(kwargs)
+
+ prefer_static = self.environment.coredata.get_option(OptionKey('prefer_static'))
+ if kwargs['static'] is True:
+ libtype = mesonlib.LibType.STATIC
+ elif kwargs['static'] is False:
+ libtype = mesonlib.LibType.SHARED
+ elif prefer_static:
+ libtype = mesonlib.LibType.PREFER_STATIC
+ else:
+ libtype = mesonlib.LibType.PREFER_SHARED
+ linkargs = self.compiler.find_library(libname, self.environment, search_dirs, libtype)
+ if required and not linkargs:
+ if libtype == mesonlib.LibType.PREFER_SHARED:
+ libtype_s = 'shared or static'
+ else:
+ libtype_s = libtype.name.lower()
+ raise InterpreterException('{} {} library {!r} not found'
+ .format(self.compiler.get_display_language(),
+ libtype_s, libname))
+ lib = dependencies.ExternalLibrary(libname, linkargs, self.environment,
+ self.compiler.language)
+ return lib
+
+ def _has_argument_impl(self, arguments: T.Union[str, T.List[str]],
+ mode: _TestMode = _TestMode.COMPILER) -> bool:
+ """Shared implementation for methods checking compiler and linker arguments."""
+ # This simplifies the callers
+ if isinstance(arguments, str):
+ arguments = [arguments]
+ test = self.compiler.has_multi_link_arguments if mode is _TestMode.LINKER else self.compiler.has_multi_arguments
+ result, cached = test(arguments, self.environment)
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ mlog.log(
+ 'Compiler for',
+ self.compiler.get_display_language(),
+ 'supports{}'.format(' link' if mode is _TestMode.LINKER else ''),
+ 'arguments {}:'.format(' '.join(arguments)),
+ mlog.green('YES') if result else mlog.red('NO'),
+ cached_msg)
+ return result
+
+ @noKwargs
+ @typed_pos_args('compiler.has_argument', str)
+ def has_argument_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
+ return self._has_argument_impl([args[0]])
+
+ @noKwargs
+ @typed_pos_args('compiler.has_multi_arguments', varargs=str)
+ @FeatureNew('compiler.has_multi_arguments', '0.37.0')
+ def has_multi_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> bool:
+ return self._has_argument_impl(args[0])
+
+ @FeatureNew('compiler.get_supported_arguments', '0.43.0')
+ @typed_pos_args('compiler.get_supported_arguments', varargs=str)
+ @typed_kwargs(
+ 'compiler.get_supported_arguments',
+ KwargInfo('checked', str, default='off', since='0.59.0',
+ validator=in_set_validator({'warn', 'require', 'off'})),
+ )
+ def get_supported_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'GetSupportedArgumentKw') -> T.List[str]:
+ supported_args: T.List[str] = []
+ checked = kwargs['checked']
+
+ for arg in args[0]:
+ if not self._has_argument_impl([arg]):
+ msg = f'Compiler for {self.compiler.get_display_language()} does not support "{arg}"'
+ if checked == 'warn':
+ mlog.warning(msg)
+ elif checked == 'require':
+ raise mesonlib.MesonException(msg)
+ else:
+ supported_args.append(arg)
+ return supported_args
+
+ @noKwargs
+ @typed_pos_args('compiler.first_supported_argument', varargs=str)
+ def first_supported_argument_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
+ for arg in args[0]:
+ if self._has_argument_impl([arg]):
+ mlog.log('First supported argument:', mlog.bold(arg))
+ return [arg]
+ mlog.log('First supported argument:', mlog.red('None'))
+ return []
+
+ @FeatureNew('compiler.has_link_argument', '0.46.0')
+ @noKwargs
+ @typed_pos_args('compiler.has_link_argument', str)
+ def has_link_argument_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
+ return self._has_argument_impl([args[0]], mode=_TestMode.LINKER)
+
+ @FeatureNew('compiler.has_multi_link_argument', '0.46.0')
+ @noKwargs
+ @typed_pos_args('compiler.has_multi_link_argument', varargs=str)
+ def has_multi_link_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> bool:
+ return self._has_argument_impl(args[0], mode=_TestMode.LINKER)
+
+ @FeatureNew('compiler.get_supported_link_arguments', '0.46.0')
+ @noKwargs
+ @typed_pos_args('compiler.get_supported_link_arguments', varargs=str)
+ def get_supported_link_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
+ supported_args: T.List[str] = []
+ for arg in args[0]:
+ if self._has_argument_impl([arg], mode=_TestMode.LINKER):
+ supported_args.append(arg)
+ return supported_args
+
+ @FeatureNew('compiler.first_supported_link_argument_method', '0.46.0')
+ @noKwargs
+ @typed_pos_args('compiler.first_supported_link_argument', varargs=str)
+ def first_supported_link_argument_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
+ for arg in args[0]:
+ if self._has_argument_impl([arg], mode=_TestMode.LINKER):
+ mlog.log('First supported link argument:', mlog.bold(arg))
+ return [arg]
+ mlog.log('First supported link argument:', mlog.red('None'))
+ return []
+
+ def _has_function_attribute_impl(self, attr: str) -> bool:
+ """Common helper for function attribute testing."""
+ result, cached = self.compiler.has_func_attribute(attr, self.environment)
+ cached_msg = mlog.blue('(cached)') if cached else ''
+ h = mlog.green('YES') if result else mlog.red('NO')
+ mlog.log(f'Compiler for {self.compiler.get_display_language()} supports function attribute {attr}:', h, cached_msg)
+ return result
+
+ @FeatureNew('compiler.has_function_attribute', '0.48.0')
+ @noKwargs
+ @typed_pos_args('compiler.has_function_attribute', str)
+ def has_func_attribute_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
+ return self._has_function_attribute_impl(args[0])
+
+ @FeatureNew('compiler.get_supported_function_attributes', '0.48.0')
+ @noKwargs
+ @typed_pos_args('compiler.get_supported_function_attributes', varargs=str)
+ def get_supported_function_attributes_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
+ return [a for a in args[0] if self._has_function_attribute_impl(a)]
+
+ @FeatureNew('compiler.get_argument_syntax_method', '0.49.0')
+ @noPosargs
+ @noKwargs
+ def get_argument_syntax_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.compiler.get_argument_syntax()
+
+ @FeatureNew('compiler.preprocess', '0.64.0')
+ @typed_pos_args('compiler.preprocess', varargs=(mesonlib.File, str), min_varargs=1)
+ @typed_kwargs(
+ 'compiler.preprocess',
+ KwargInfo('output', str, default='@PLAINNAME@.i'),
+ KwargInfo('compile_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+ _INCLUDE_DIRS_KW,
+ )
+ def preprocess_method(self, args: T.Tuple[T.List['mesonlib.FileOrString']], kwargs: 'PreprocessKW') -> T.List[build.CustomTargetIndex]:
+ compiler = self.compiler.get_preprocessor()
+ sources = self.interpreter.source_strings_to_files(args[0])
+ tg_kwargs = {
+ f'{self.compiler.language}_args': kwargs['compile_args'],
+ 'build_by_default': False,
+ 'include_directories': kwargs['include_directories'],
+ }
+ tg = build.CompileTarget(
+ 'preprocessor',
+ self.interpreter.subdir,
+ self.subproject,
+ self.environment,
+ sources,
+ kwargs['output'],
+ compiler,
+ tg_kwargs)
+ self.interpreter.add_target(tg.name, tg)
+ # Expose this target as list of its outputs, so user can pass them to
+ # other targets, list outputs, etc.
+ private_dir = os.path.relpath(self.interpreter.backend.get_target_private_dir(tg), self.interpreter.subdir)
+ return [build.CustomTargetIndex(tg, os.path.join(private_dir, o)) for o in tg.outputs]
diff --git a/mesonbuild/interpreter/dependencyfallbacks.py b/mesonbuild/interpreter/dependencyfallbacks.py
new file mode 100644
index 0000000..54be990
--- /dev/null
+++ b/mesonbuild/interpreter/dependencyfallbacks.py
@@ -0,0 +1,373 @@
+from __future__ import annotations
+
+from .interpreterobjects import extract_required_kwarg
+from .. import mlog
+from .. import dependencies
+from .. import build
+from ..wrap import WrapMode
+from ..mesonlib import OptionKey, extract_as_list, stringlistify, version_compare_many, listify
+from ..dependencies import Dependency, DependencyException, NotFoundDependency
+from ..interpreterbase import (MesonInterpreterObject, FeatureNew,
+ InterpreterException, InvalidArguments)
+
+import typing as T
+if T.TYPE_CHECKING:
+ from .interpreter import Interpreter
+ from ..interpreterbase import TYPE_nkwargs, TYPE_nvar
+ from .interpreterobjects import SubprojectHolder
+
+
+class DependencyFallbacksHolder(MesonInterpreterObject):
+ def __init__(self, interpreter: 'Interpreter', names: T.List[str], allow_fallback: T.Optional[bool] = None,
+ default_options: T.Optional[T.List[str]] = None) -> None:
+ super().__init__(subproject=interpreter.subproject)
+ self.interpreter = interpreter
+ self.subproject = interpreter.subproject
+ self.coredata = interpreter.coredata
+ self.build = interpreter.build
+ self.environment = interpreter.environment
+ self.wrap_resolver = interpreter.environment.wrap_resolver
+ self.allow_fallback = allow_fallback
+ self.subproject_name: T.Optional[str] = None
+ self.subproject_varname: T.Optional[str] = None
+ self.subproject_kwargs = {'default_options': default_options or []}
+ self.names: T.List[str] = []
+ self.forcefallback: bool = False
+ self.nofallback: bool = False
+ for name in names:
+ if not name:
+ raise InterpreterException('dependency_fallbacks empty name \'\' is not allowed')
+ if '<' in name or '>' in name or '=' in name:
+ raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
+ 'version\n requirements use the \'version\' keyword argument instead.')
+ if name in self.names:
+ raise InterpreterException(f'dependency_fallbacks name {name!r} is duplicated')
+ self.names.append(name)
+ self._display_name = self.names[0] if self.names else '(anonymous)'
+
+ def set_fallback(self, fbinfo: T.Optional[T.Union[T.List[str], str]]) -> None:
+ # Legacy: This converts dependency()'s fallback kwargs.
+ if fbinfo is None:
+ return
+ if self.allow_fallback is not None:
+ raise InvalidArguments('"fallback" and "allow_fallback" arguments are mutually exclusive')
+ fbinfo = stringlistify(fbinfo)
+ if len(fbinfo) == 0:
+ # dependency('foo', fallback: []) is the same as dependency('foo', allow_fallback: false)
+ self.allow_fallback = False
+ return
+ if len(fbinfo) == 1:
+ FeatureNew.single_use('Fallback without variable name', '0.53.0', self.subproject)
+ subp_name, varname = fbinfo[0], None
+ elif len(fbinfo) == 2:
+ subp_name, varname = fbinfo
+ else:
+ raise InterpreterException('Fallback info must have one or two items.')
+ self._subproject_impl(subp_name, varname)
+
+ def _subproject_impl(self, subp_name: str, varname: str) -> None:
+ assert self.subproject_name is None
+ self.subproject_name = subp_name
+ self.subproject_varname = varname
+
+ def _do_dependency_cache(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ name = func_args[0]
+ cached_dep = self._get_cached_dep(name, kwargs)
+ if cached_dep:
+ self._verify_fallback_consistency(cached_dep)
+ return cached_dep
+
+ def _do_dependency(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ # Note that there is no df.dependency() method, this is called for names
+ # given as positional arguments to dependency_fallbacks(name1, ...).
+ # We use kwargs from the dependency() function, for things like version,
+ # module, etc.
+ name = func_args[0]
+ self._handle_featurenew_dependencies(name)
+ dep = dependencies.find_external_dependency(name, self.environment, kwargs)
+ if dep.found():
+ for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ identifier = dependencies.get_dep_identifier(name, kwargs)
+ self.coredata.deps[for_machine].put(identifier, dep)
+ return dep
+ return None
+
+ def _do_existing_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ subp_name = func_args[0]
+ varname = self.subproject_varname
+ if subp_name and self._get_subproject(subp_name):
+ return self._get_subproject_dep(subp_name, varname, kwargs)
+ return None
+
+ def _do_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ if self.forcefallback:
+ mlog.log('Looking for a fallback subproject for the dependency',
+ mlog.bold(self._display_name), 'because:\nUse of fallback dependencies is forced.')
+ elif self.nofallback:
+ mlog.log('Not looking for a fallback subproject for the dependency',
+ mlog.bold(self._display_name), 'because:\nUse of fallback dependencies is disabled.')
+ return None
+ else:
+ mlog.log('Looking for a fallback subproject for the dependency',
+ mlog.bold(self._display_name))
+
+ # dependency('foo', static: true) should implicitly add
+ # default_options: ['default_library=static']
+ static = kwargs.get('static')
+ default_options = stringlistify(func_kwargs.get('default_options', []))
+ if static is not None and not any('default_library' in i for i in default_options):
+ default_library = 'static' if static else 'shared'
+ opt = f'default_library={default_library}'
+ mlog.log(f'Building fallback subproject with {opt}')
+ default_options.append(opt)
+ func_kwargs['default_options'] = default_options
+
+ # Configure the subproject
+ subp_name = self.subproject_name
+ varname = self.subproject_varname
+ func_kwargs.setdefault('version', [])
+ if 'default_options' in kwargs and isinstance(kwargs['default_options'], str):
+ func_kwargs['default_options'] = listify(kwargs['default_options'])
+ self.interpreter.do_subproject(subp_name, 'meson', func_kwargs)
+ return self._get_subproject_dep(subp_name, varname, kwargs)
+
+ def _get_subproject(self, subp_name: str) -> T.Optional[SubprojectHolder]:
+ sub = self.interpreter.subprojects.get(subp_name)
+ if sub and sub.found():
+ return sub
+ return None
+
+ def _get_subproject_dep(self, subp_name: str, varname: str, kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ # Verify the subproject is found
+ subproject = self._get_subproject(subp_name)
+ if not subproject:
+ mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
+ mlog.bold(subp_name), 'found:', mlog.red('NO'),
+ mlog.blue('(subproject failed to configure)'))
+ return None
+
+ # The subproject has been configured. If for any reason the dependency
+ # cannot be found in this subproject we have to return not-found object
+ # instead of None, because we don't want to continue the lookup on the
+ # system.
+
+ # Check if the subproject overridden at least one of the names we got.
+ cached_dep = None
+ for name in self.names:
+ cached_dep = self._get_cached_dep(name, kwargs)
+ if cached_dep:
+ break
+
+ # If we have cached_dep we did all the checks and logging already in
+ # self._get_cached_dep().
+ if cached_dep:
+ self._verify_fallback_consistency(cached_dep)
+ return cached_dep
+
+ # Legacy: Use the variable name if provided instead of relying on the
+ # subproject to override one of our dependency names
+ if not varname:
+ # If no variable name is specified, check if the wrap file has one.
+ # If the wrap file has a variable name, better use it because the
+ # subproject most probably is not using meson.override_dependency().
+ for name in self.names:
+ varname = self.wrap_resolver.get_varname(subp_name, name)
+ if varname:
+ break
+ if not varname:
+ mlog.warning(f'Subproject {subp_name!r} did not override {self._display_name!r} dependency and no variable name specified')
+ mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
+ mlog.bold(subproject.subdir), 'found:', mlog.red('NO'))
+ return self._notfound_dependency()
+
+ var_dep = self._get_subproject_variable(subproject, varname) or self._notfound_dependency()
+ if not var_dep.found():
+ mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
+ mlog.bold(subproject.subdir), 'found:', mlog.red('NO'))
+ return var_dep
+
+ wanted = stringlistify(kwargs.get('version', []))
+ found = var_dep.get_version()
+ if not self._check_version(wanted, found):
+ mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
+ mlog.bold(subproject.subdir), 'found:', mlog.red('NO'),
+ 'found', mlog.normal_cyan(found), 'but need:',
+ mlog.bold(', '.join([f"'{e}'" for e in wanted])))
+ return self._notfound_dependency()
+
+ mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
+ mlog.bold(subproject.subdir), 'found:', mlog.green('YES'),
+ mlog.normal_cyan(found) if found else None)
+ return var_dep
+
+ def _get_cached_dep(self, name: str, kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ # Unlike other methods, this one returns not-found dependency instead
+ # of None in the case the dependency is cached as not-found, or if cached
+ # version does not match. In that case we don't want to continue with
+ # other candidates.
+ for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ identifier = dependencies.get_dep_identifier(name, kwargs)
+ wanted_vers = stringlistify(kwargs.get('version', []))
+
+ override = self.build.dependency_overrides[for_machine].get(identifier)
+ if override:
+ info = [mlog.blue('(overridden)' if override.explicit else '(cached)')]
+ cached_dep = override.dep
+ # We don't implicitly override not-found dependencies, but user could
+ # have explicitly called meson.override_dependency() with a not-found
+ # dep.
+ if not cached_dep.found():
+ mlog.log('Dependency', mlog.bold(self._display_name),
+ 'found:', mlog.red('NO'), *info)
+ return cached_dep
+ else:
+ info = [mlog.blue('(cached)')]
+ cached_dep = self.coredata.deps[for_machine].get(identifier)
+
+ if cached_dep:
+ found_vers = cached_dep.get_version()
+ if not self._check_version(wanted_vers, found_vers):
+ if not override:
+ # We cached this dependency on disk from a previous run,
+ # but it could got updated on the system in the meantime.
+ return None
+ mlog.log('Dependency', mlog.bold(name),
+ 'found:', mlog.red('NO'),
+ 'found', mlog.normal_cyan(found_vers), 'but need:',
+ mlog.bold(', '.join([f"'{e}'" for e in wanted_vers])),
+ *info)
+ return self._notfound_dependency()
+ if found_vers:
+ info = [mlog.normal_cyan(found_vers), *info]
+ mlog.log('Dependency', mlog.bold(self._display_name),
+ 'found:', mlog.green('YES'), *info)
+ return cached_dep
+ return None
+
+ def _get_subproject_variable(self, subproject: SubprojectHolder, varname: str) -> T.Optional[Dependency]:
+ try:
+ var_dep = subproject.get_variable_method([varname], {})
+ except InvalidArguments:
+ var_dep = None
+ if not isinstance(var_dep, Dependency):
+ mlog.warning(f'Variable {varname!r} in the subproject {subproject.subdir!r} is',
+ 'not found' if var_dep is None else 'not a dependency object')
+ return None
+ return var_dep
+
+ def _verify_fallback_consistency(self, cached_dep: Dependency) -> None:
+ subp_name = self.subproject_name
+ varname = self.subproject_varname
+ subproject = self._get_subproject(subp_name)
+ if subproject and varname:
+ var_dep = self._get_subproject_variable(subproject, varname)
+ if var_dep and cached_dep.found() and var_dep != cached_dep:
+ mlog.warning(f'Inconsistency: Subproject has overridden the dependency with another variable than {varname!r}')
+
+ def _handle_featurenew_dependencies(self, name: str) -> None:
+ 'Do a feature check on dependencies used by this subproject'
+ if name == 'mpi':
+ FeatureNew.single_use('MPI Dependency', '0.42.0', self.subproject)
+ elif name == 'pcap':
+ FeatureNew.single_use('Pcap Dependency', '0.42.0', self.subproject)
+ elif name == 'vulkan':
+ FeatureNew.single_use('Vulkan Dependency', '0.42.0', self.subproject)
+ elif name == 'libwmf':
+ FeatureNew.single_use('LibWMF Dependency', '0.44.0', self.subproject)
+ elif name == 'openmp':
+ FeatureNew.single_use('OpenMP Dependency', '0.46.0', self.subproject)
+
+ def _notfound_dependency(self) -> NotFoundDependency:
+ return NotFoundDependency(self.names[0] if self.names else '', self.environment)
+
+ @staticmethod
+ def _check_version(wanted: T.List[str], found: str) -> bool:
+ if not wanted:
+ return True
+ return not (found == 'undefined' or not version_compare_many(found, wanted)[0])
+
+ def _get_candidates(self) -> T.List[T.Tuple[T.Callable[[TYPE_nkwargs, TYPE_nvar, TYPE_nkwargs], T.Optional[Dependency]], TYPE_nvar, TYPE_nkwargs]]:
+ candidates = []
+ # 1. check if any of the names is cached already.
+ for name in self.names:
+ candidates.append((self._do_dependency_cache, [name], {}))
+ # 2. check if the subproject fallback has already been configured.
+ if self.subproject_name:
+ candidates.append((self._do_existing_subproject, [self.subproject_name], self.subproject_kwargs))
+ # 3. check external dependency if we are not forced to use subproject
+ if not self.forcefallback or not self.subproject_name:
+ for name in self.names:
+ candidates.append((self._do_dependency, [name], {}))
+ # 4. configure the subproject
+ if self.subproject_name:
+ candidates.append((self._do_subproject, [self.subproject_name], self.subproject_kwargs))
+ return candidates
+
+ def lookup(self, kwargs: TYPE_nkwargs, force_fallback: bool = False) -> Dependency:
+ mods = extract_as_list(kwargs, 'modules')
+ if mods:
+ self._display_name += ' (modules: {})'.format(', '.join(str(i) for i in mods))
+
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ mlog.log('Dependency', mlog.bold(self._display_name), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return self._notfound_dependency()
+
+ # Check if usage of the subproject fallback is forced
+ wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+ assert isinstance(wrap_mode, WrapMode), 'for mypy'
+ force_fallback_for = self.coredata.get_option(OptionKey('force_fallback_for'))
+ assert isinstance(force_fallback_for, list), 'for mypy'
+ self.nofallback = wrap_mode == WrapMode.nofallback
+ self.forcefallback = (force_fallback or
+ wrap_mode == WrapMode.forcefallback or
+ any(name in force_fallback_for for name in self.names) or
+ self.subproject_name in force_fallback_for)
+
+ # Add an implicit subproject fallback if none has been set explicitly,
+ # unless implicit fallback is not allowed.
+ # Legacy: self.allow_fallback can be None when that kwarg is not defined
+ # in dependency('name'). In that case we don't want to use implicit
+ # fallback when required is false because user will typically fallback
+ # manually using cc.find_library() for example.
+ if not self.subproject_name and self.allow_fallback is not False:
+ for name in self.names:
+ subp_name, varname = self.wrap_resolver.find_dep_provider(name)
+ if subp_name:
+ self.forcefallback |= subp_name in force_fallback_for
+ if self.forcefallback or self.allow_fallback is True or required or self._get_subproject(subp_name):
+ self._subproject_impl(subp_name, varname)
+ break
+
+ candidates = self._get_candidates()
+
+ # writing just "dependency('')" is an error, because it can only fail
+ if not candidates and required:
+ raise InvalidArguments('Dependency is required but has no candidates.')
+
+ # Try all candidates, only the last one is really required.
+ last = len(candidates) - 1
+ for i, item in enumerate(candidates):
+ func, func_args, func_kwargs = item
+ func_kwargs['required'] = required and (i == last)
+ kwargs['required'] = required and (i == last)
+ dep = func(kwargs, func_args, func_kwargs)
+ if dep and dep.found():
+ # Override this dependency to have consistent results in subsequent
+ # dependency lookups.
+ for name in self.names:
+ for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ identifier = dependencies.get_dep_identifier(name, kwargs)
+ if identifier not in self.build.dependency_overrides[for_machine]:
+ self.build.dependency_overrides[for_machine][identifier] = \
+ build.DependencyOverride(dep, self.interpreter.current_node, explicit=False)
+ return dep
+ elif required and (dep or i == last):
+ # This was the last candidate or the dependency has been cached
+ # as not-found, or cached dependency version does not match,
+ # otherwise func() would have returned None instead.
+ raise DependencyException(f'Dependency {self._display_name!r} is required but not found.')
+ elif dep:
+ # Same as above, but the dependency is not required.
+ return dep
+ return self._notfound_dependency()
diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py
new file mode 100644
index 0000000..a21c809
--- /dev/null
+++ b/mesonbuild/interpreter/interpreter.py
@@ -0,0 +1,3275 @@
+# Copyright 2012-2021 The Meson development team
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .. import mparser
+from .. import environment
+from .. import coredata
+from .. import dependencies
+from .. import mlog
+from .. import build
+from .. import optinterpreter
+from .. import compilers
+from .. import envconfig
+from ..wrap import wrap, WrapMode
+from .. import mesonlib
+from ..mesonlib import (MesonBugException, HoldableObject, FileMode, MachineChoice, OptionKey,
+ listify, extract_as_list, has_path_sep, PerMachine)
+from ..programs import ExternalProgram, NonExistingExternalProgram
+from ..dependencies import Dependency
+from ..depfile import DepFile
+from ..interpreterbase import ContainerTypeInfo, InterpreterBase, KwargInfo, typed_kwargs, typed_pos_args
+from ..interpreterbase import noPosargs, noKwargs, permittedKwargs, noArgsFlattening, noSecondLevelHolderResolving, unholder_return
+from ..interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
+from ..interpreterbase import Disabler, disablerIfNotFound
+from ..interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs, FeatureDeprecatedKwargs
+from ..interpreterbase import ObjectHolder
+from ..modules import ExtensionModule, ModuleObject, MutableModuleObject, NewExtensionModule, NotFoundExtensionModule
+from ..cmake import CMakeInterpreter
+from ..backend.backends import ExecutableSerialisation
+
+from . import interpreterobjects as OBJ
+from . import compiler as compilerOBJ
+from .mesonmain import MesonMain
+from .dependencyfallbacks import DependencyFallbacksHolder
+from .interpreterobjects import (
+ SubprojectHolder,
+ Test,
+ RunProcess,
+ extract_required_kwarg,
+ extract_search_dirs,
+ NullSubprojectInterpreter,
+)
+from .type_checking import (
+ COMMAND_KW,
+ CT_BUILD_ALWAYS,
+ CT_BUILD_ALWAYS_STALE,
+ CT_BUILD_BY_DEFAULT,
+ CT_INPUT_KW,
+ CT_INSTALL_DIR_KW,
+ MULTI_OUTPUT_KW,
+ OUTPUT_KW,
+ DEFAULT_OPTIONS,
+ DEPENDENCIES_KW,
+ DEPENDS_KW,
+ DEPEND_FILES_KW,
+ DEPFILE_KW,
+ DISABLER_KW,
+ D_MODULE_VERSIONS_KW,
+ ENV_KW,
+ ENV_METHOD_KW,
+ ENV_SEPARATOR_KW,
+ INCLUDE_DIRECTORIES,
+ INSTALL_KW,
+ INSTALL_DIR_KW,
+ INSTALL_MODE_KW,
+ LINK_WITH_KW,
+ LINK_WHOLE_KW,
+ CT_INSTALL_TAG_KW,
+ INSTALL_TAG_KW,
+ LANGUAGE_KW,
+ NATIVE_KW,
+ PRESERVE_PATH_KW,
+ REQUIRED_KW,
+ SOURCES_KW,
+ VARIABLES_KW,
+ TEST_KWS,
+ NoneType,
+ in_set_validator,
+ env_convertor_with_method
+)
+from . import primitives as P_OBJ
+
+from pathlib import Path
+from enum import Enum
+import os
+import shutil
+import uuid
+import re
+import stat
+import collections
+import typing as T
+import textwrap
+import importlib
+import copy
+
+if T.TYPE_CHECKING:
+ import argparse
+
+ from typing_extensions import Literal
+
+ from . import kwargs as kwtypes
+ from ..backend.backends import Backend
+ from ..interpreterbase.baseobjects import InterpreterObject, TYPE_var, TYPE_kwargs
+ from ..programs import OverrideProgram
+
+ # Input source types passed to Targets
+ SourceInputs = T.Union[mesonlib.File, build.GeneratedList, build.BuildTarget, build.BothLibraries,
+ build.CustomTargetIndex, build.CustomTarget, build.GeneratedList,
+ build.ExtractedObjects, str]
+ # Input source types passed to the build.Target classes
+ SourceOutputs = T.Union[mesonlib.File, build.GeneratedList,
+ build.BuildTarget, build.CustomTargetIndex, build.CustomTarget,
+ build.ExtractedObjects, build.GeneratedList, build.StructuredSources]
+
+
+def _project_version_validator(value: T.Union[T.List, str, mesonlib.File, None]) -> T.Optional[str]:
+ if isinstance(value, list):
+ if len(value) != 1:
+ return 'when passed as array must have a length of 1'
+ elif not isinstance(value[0], mesonlib.File):
+ return 'when passed as array must contain a File'
+ return None
+
+
+def stringifyUserArguments(args: T.List[T.Any], quote: bool = False) -> str:
+ if isinstance(args, list):
+ return '[%s]' % ', '.join([stringifyUserArguments(x, True) for x in args])
+ elif isinstance(args, dict):
+ return '{%s}' % ', '.join(['{} : {}'.format(stringifyUserArguments(k, True), stringifyUserArguments(v, True)) for k, v in args.items()])
+ elif isinstance(args, bool):
+ return 'true' if args else 'false'
+ elif isinstance(args, int):
+ return str(args)
+ elif isinstance(args, str):
+ return f"'{args}'" if quote else args
+ raise InvalidArguments('Function accepts only strings, integers, bools, lists, dictionaries and lists thereof.')
+
+class Summary:
+ def __init__(self, project_name: str, project_version: str):
+ self.project_name = project_name
+ self.project_version = project_version
+ self.sections = collections.defaultdict(dict)
+ self.max_key_len = 0
+
+ def add_section(self, section: str, values: T.Dict[str, T.Any], bool_yn: bool,
+ list_sep: T.Optional[str], subproject: str) -> None:
+ for k, v in values.items():
+ if k in self.sections[section]:
+ raise InterpreterException(f'Summary section {section!r} already have key {k!r}')
+ formatted_values = []
+ for i in listify(v):
+ if isinstance(i, bool) and bool_yn:
+ formatted_values.append(mlog.green('YES') if i else mlog.red('NO'))
+ elif isinstance(i, (str, int, bool)):
+ formatted_values.append(str(i))
+ elif isinstance(i, (ExternalProgram, Dependency)):
+ FeatureNew.single_use('dependency or external program in summary', '0.57.0', subproject)
+ formatted_values.append(i.summary_value())
+ elif isinstance(i, Disabler):
+ FeatureNew.single_use('disabler in summary', '0.64.0', subproject)
+ formatted_values.append(mlog.red('NO'))
+ elif isinstance(i, coredata.UserOption):
+ FeatureNew.single_use('feature option in summary', '0.58.0', subproject)
+ formatted_values.append(i.printable_value())
+ else:
+ m = 'Summary value in section {!r}, key {!r}, must be string, integer, boolean, dependency, disabler, or external program'
+ raise InterpreterException(m.format(section, k))
+ self.sections[section][k] = (formatted_values, list_sep)
+ self.max_key_len = max(self.max_key_len, len(k))
+
+ def dump(self):
+ mlog.log(self.project_name, mlog.normal_cyan(self.project_version))
+ for section, values in self.sections.items():
+ mlog.log('') # newline
+ if section:
+ mlog.log(' ', mlog.bold(section))
+ for k, v in values.items():
+ v, list_sep = v
+ padding = self.max_key_len - len(k)
+ end = ' ' if v else ''
+ mlog.log(' ' * 3, k + ' ' * padding + ':', end=end)
+ indent = self.max_key_len + 6
+ self.dump_value(v, list_sep, indent)
+ mlog.log('') # newline
+
+ def dump_value(self, arr, list_sep, indent):
+ lines_sep = '\n' + ' ' * indent
+ if list_sep is None:
+ mlog.log(*arr, sep=lines_sep)
+ return
+ max_len = shutil.get_terminal_size().columns
+ line = []
+ line_len = indent
+ lines_sep = list_sep.rstrip() + lines_sep
+ for v in arr:
+ v_len = len(v) + len(list_sep)
+ if line and line_len + v_len > max_len:
+ mlog.log(*line, sep=list_sep, end=lines_sep)
+ line_len = indent
+ line = []
+ line.append(v)
+ line_len += v_len
+ mlog.log(*line, sep=list_sep)
+
+known_library_kwargs = (
+ build.known_shlib_kwargs |
+ build.known_stlib_kwargs
+)
+
+known_build_target_kwargs = (
+ known_library_kwargs |
+ build.known_exe_kwargs |
+ build.known_jar_kwargs |
+ {'target_type'}
+)
+
+class InterpreterRuleRelaxation(Enum):
+ ''' Defines specific relaxations of the Meson rules.
+
+ This is intended to be used for automatically converted
+ projects (CMake subprojects, build system mixing) that
+ generate a Meson AST via introspection, etc.
+ '''
+
+ ALLOW_BUILD_DIR_FILE_REFFERENCES = 1
+
+permitted_dependency_kwargs = {
+ 'allow_fallback',
+ 'cmake_args',
+ 'cmake_module_path',
+ 'cmake_package_version',
+ 'components',
+ 'default_options',
+ 'fallback',
+ 'include_type',
+ 'language',
+ 'main',
+ 'method',
+ 'modules',
+ 'native',
+ 'not_found_message',
+ 'optional_modules',
+ 'private_headers',
+ 'required',
+ 'static',
+ 'version',
+}
+
+implicit_check_false_warning = """You should add the boolean check kwarg to the run_command call.
+ It currently defaults to false,
+ but it will default to true in future releases of meson.
+ See also: https://github.com/mesonbuild/meson/issues/9300"""
+class Interpreter(InterpreterBase, HoldableObject):
+
+ def __init__(
+ self,
+ _build: build.Build,
+ backend: T.Optional[Backend] = None,
+ subproject: str = '',
+ subdir: str = '',
+ subproject_dir: str = 'subprojects',
+ default_project_options: T.Optional[T.Dict[OptionKey, str]] = None,
+ mock: bool = False,
+ ast: T.Optional[mparser.CodeBlockNode] = None,
+ is_translated: bool = False,
+ relaxations: T.Optional[T.Set[InterpreterRuleRelaxation]] = None,
+ user_defined_options: T.Optional['argparse.Namespace'] = None,
+ ) -> None:
+ super().__init__(_build.environment.get_source_dir(), subdir, subproject)
+ self.active_projectname = ''
+ self.build = _build
+ self.environment = self.build.environment
+ self.coredata = self.environment.get_coredata()
+ self.backend = backend
+ self.summary: T.Dict[str, 'Summary'] = {}
+ self.modules: T.Dict[str, NewExtensionModule] = {}
+ # Subproject directory is usually the name of the subproject, but can
+ # be different for dependencies provided by wrap files.
+ self.subproject_directory_name = subdir.split(os.path.sep)[-1]
+ self.subproject_dir = subproject_dir
+ self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
+ self.relaxations = relaxations or set()
+ if not mock and ast is None:
+ self.load_root_meson_file()
+ self.sanity_check_ast()
+ elif ast is not None:
+ self.ast = ast
+ self.sanity_check_ast()
+ self.builtin.update({'meson': MesonMain(self.build, self)})
+ self.generators: T.List[build.Generator] = []
+ self.processed_buildfiles = set() # type: T.Set[str]
+ self.project_args_frozen = False
+ self.global_args_frozen = False # implies self.project_args_frozen
+ self.subprojects: T.Dict[str, SubprojectHolder] = {}
+ self.subproject_stack: T.List[str] = []
+ self.configure_file_outputs: T.Dict[str, int] = {}
+ # Passed from the outside, only used in subprojects.
+ if default_project_options:
+ self.default_project_options = default_project_options.copy()
+ else:
+ self.default_project_options = {}
+ self.project_default_options: T.Dict[OptionKey, str] = {}
+ self.build_func_dict()
+ self.build_holder_map()
+ self.user_defined_options = user_defined_options
+ self.compilers: PerMachine[T.Dict[str, 'compilers.Compiler']] = PerMachine({}, {})
+
+ # build_def_files needs to be defined before parse_project is called
+ #
+ # For non-meson subprojects, we'll be using the ast. Even if it does
+ # exist we don't want to add a dependency on it, it's autogenerated
+ # from the actual build files, and is just for reference.
+ self.build_def_files: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
+ build_filename = os.path.join(self.subdir, environment.build_filename)
+ if not is_translated:
+ self.build_def_files.add(build_filename)
+ if not mock:
+ self.parse_project()
+ self._redetect_machines()
+
+ def __getnewargs_ex__(self) -> T.Tuple[T.Tuple[object], T.Dict[str, object]]:
+ raise MesonBugException('This class is unpicklable')
+
+ def _redetect_machines(self) -> None:
+ # Re-initialize machine descriptions. We can do a better job now because we
+ # have the compilers needed to gain more knowledge, so wipe out old
+ # inference and start over.
+ machines = self.build.environment.machines.miss_defaulting()
+ machines.build = environment.detect_machine_info(self.coredata.compilers.build)
+ self.build.environment.machines = machines.default_missing()
+ assert self.build.environment.machines.build.cpu is not None
+ assert self.build.environment.machines.host.cpu is not None
+ assert self.build.environment.machines.target.cpu is not None
+
+ self.builtin['build_machine'] = \
+ OBJ.MachineHolder(self.build.environment.machines.build, self)
+ self.builtin['host_machine'] = \
+ OBJ.MachineHolder(self.build.environment.machines.host, self)
+ self.builtin['target_machine'] = \
+ OBJ.MachineHolder(self.build.environment.machines.target, self)
+
+ def build_func_dict(self) -> None:
+ self.funcs.update({'add_global_arguments': self.func_add_global_arguments,
+ 'add_global_link_arguments': self.func_add_global_link_arguments,
+ 'add_languages': self.func_add_languages,
+ 'add_project_arguments': self.func_add_project_arguments,
+ 'add_project_dependencies': self.func_add_project_dependencies,
+ 'add_project_link_arguments': self.func_add_project_link_arguments,
+ 'add_test_setup': self.func_add_test_setup,
+ 'alias_target': self.func_alias_target,
+ 'assert': self.func_assert,
+ 'benchmark': self.func_benchmark,
+ 'both_libraries': self.func_both_lib,
+ 'build_target': self.func_build_target,
+ 'configuration_data': self.func_configuration_data,
+ 'configure_file': self.func_configure_file,
+ 'custom_target': self.func_custom_target,
+ 'debug': self.func_debug,
+ 'declare_dependency': self.func_declare_dependency,
+ 'dependency': self.func_dependency,
+ 'disabler': self.func_disabler,
+ 'environment': self.func_environment,
+ 'error': self.func_error,
+ 'executable': self.func_executable,
+ 'files': self.func_files,
+ 'find_library': self.func_find_library,
+ 'find_program': self.func_find_program,
+ 'generator': self.func_generator,
+ 'get_option': self.func_get_option,
+ 'get_variable': self.func_get_variable,
+ 'gettext': self.func_gettext,
+ 'import': self.func_import,
+ 'include_directories': self.func_include_directories,
+ 'install_data': self.func_install_data,
+ 'install_emptydir': self.func_install_emptydir,
+ 'install_headers': self.func_install_headers,
+ 'install_man': self.func_install_man,
+ 'install_subdir': self.func_install_subdir,
+ 'install_symlink': self.func_install_symlink,
+ 'is_disabler': self.func_is_disabler,
+ 'is_variable': self.func_is_variable,
+ 'jar': self.func_jar,
+ 'join_paths': self.func_join_paths,
+ 'library': self.func_library,
+ 'message': self.func_message,
+ 'option': self.func_option,
+ 'project': self.func_project,
+ 'range': self.func_range,
+ 'run_command': self.func_run_command,
+ 'run_target': self.func_run_target,
+ 'set_variable': self.func_set_variable,
+ 'structured_sources': self.func_structured_sources,
+ 'subdir': self.func_subdir,
+ 'shared_library': self.func_shared_lib,
+ 'shared_module': self.func_shared_module,
+ 'static_library': self.func_static_lib,
+ 'subdir_done': self.func_subdir_done,
+ 'subproject': self.func_subproject,
+ 'summary': self.func_summary,
+ 'test': self.func_test,
+ 'unset_variable': self.func_unset_variable,
+ 'vcs_tag': self.func_vcs_tag,
+ 'warning': self.func_warning,
+ })
+ if 'MESON_UNIT_TEST' in os.environ:
+ self.funcs.update({'exception': self.func_exception})
+
+ def build_holder_map(self) -> None:
+ '''
+ Build a mapping of `HoldableObject` types to their corresponding
+ `ObjectHolder`s. This mapping is used in `InterpreterBase` to automatically
+ holderify all returned values from methods and functions.
+ '''
+ self.holder_map.update({
+ # Primitives
+ list: P_OBJ.ArrayHolder,
+ dict: P_OBJ.DictHolder,
+ int: P_OBJ.IntegerHolder,
+ bool: P_OBJ.BooleanHolder,
+ str: P_OBJ.StringHolder,
+ P_OBJ.MesonVersionString: P_OBJ.MesonVersionStringHolder,
+ P_OBJ.DependencyVariableString: P_OBJ.DependencyVariableStringHolder,
+ P_OBJ.OptionString: P_OBJ.OptionStringHolder,
+
+ # Meson types
+ mesonlib.File: OBJ.FileHolder,
+ build.SharedLibrary: OBJ.SharedLibraryHolder,
+ build.StaticLibrary: OBJ.StaticLibraryHolder,
+ build.BothLibraries: OBJ.BothLibrariesHolder,
+ build.SharedModule: OBJ.SharedModuleHolder,
+ build.Executable: OBJ.ExecutableHolder,
+ build.Jar: OBJ.JarHolder,
+ build.CustomTarget: OBJ.CustomTargetHolder,
+ build.CustomTargetIndex: OBJ.CustomTargetIndexHolder,
+ build.Generator: OBJ.GeneratorHolder,
+ build.GeneratedList: OBJ.GeneratedListHolder,
+ build.ExtractedObjects: OBJ.GeneratedObjectsHolder,
+ build.RunTarget: OBJ.RunTargetHolder,
+ build.AliasTarget: OBJ.AliasTargetHolder,
+ build.Headers: OBJ.HeadersHolder,
+ build.Man: OBJ.ManHolder,
+ build.EmptyDir: OBJ.EmptyDirHolder,
+ build.Data: OBJ.DataHolder,
+ build.SymlinkData: OBJ.SymlinkDataHolder,
+ build.InstallDir: OBJ.InstallDirHolder,
+ build.IncludeDirs: OBJ.IncludeDirsHolder,
+ build.EnvironmentVariables: OBJ.EnvironmentVariablesHolder,
+ build.StructuredSources: OBJ.StructuredSourcesHolder,
+ compilers.RunResult: compilerOBJ.TryRunResultHolder,
+ dependencies.ExternalLibrary: OBJ.ExternalLibraryHolder,
+ coredata.UserFeatureOption: OBJ.FeatureOptionHolder,
+ envconfig.MachineInfo: OBJ.MachineHolder,
+ build.ConfigurationData: OBJ.ConfigurationDataHolder,
+ })
+
+ '''
+ Build a mapping of `HoldableObject` base classes to their
+ corresponding `ObjectHolder`s. The difference to `self.holder_map`
+ is that the keys here define an upper bound instead of requiring an
+ exact match.
+
+ The mappings defined here are only used when there was no direct hit
+ found in `self.holder_map`.
+ '''
+ self.bound_holder_map.update({
+ dependencies.Dependency: OBJ.DependencyHolder,
+ ExternalProgram: OBJ.ExternalProgramHolder,
+ compilers.Compiler: compilerOBJ.CompilerHolder,
+ ModuleObject: OBJ.ModuleObjectHolder,
+ MutableModuleObject: OBJ.MutableModuleObjectHolder,
+ })
+
+ def append_holder_map(self, held_type: T.Type[mesonlib.HoldableObject], holder_type: T.Type[ObjectHolder]) -> None:
+ '''
+ Adds one additional mapping to the `holder_map`.
+
+ The intended use for this function is in the `initialize` method of
+ modules to register custom object holders.
+ '''
+ self.holder_map.update({
+ held_type: holder_type
+ })
+
+ def process_new_values(self, invalues: T.List[T.Union[TYPE_var, ExecutableSerialisation]]) -> None:
+ invalues = listify(invalues)
+ for v in invalues:
+ if isinstance(v, ObjectHolder):
+ raise InterpreterException('Modules must not return ObjectHolders')
+ if isinstance(v, (build.BuildTarget, build.CustomTarget, build.RunTarget)):
+ self.add_target(v.name, v)
+ elif isinstance(v, list):
+ self.process_new_values(v)
+ elif isinstance(v, ExecutableSerialisation):
+ v.subproject = self.subproject
+ self.build.install_scripts.append(v)
+ elif isinstance(v, build.Data):
+ self.build.data.append(v)
+ elif isinstance(v, build.SymlinkData):
+ self.build.symlinks.append(v)
+ elif isinstance(v, dependencies.InternalDependency):
+ # FIXME: This is special cased and not ideal:
+ # The first source is our new VapiTarget, the rest are deps
+ self.process_new_values(v.sources[0])
+ elif isinstance(v, build.InstallDir):
+ self.build.install_dirs.append(v)
+ elif isinstance(v, Test):
+ self.build.tests.append(v)
+ elif isinstance(v, (int, str, bool, Disabler, ObjectHolder, build.GeneratedList,
+ ExternalProgram, build.ConfigurationData)):
+ pass
+ else:
+ raise InterpreterException(f'Module returned a value of unknown type {v!r}.')
+
+ def get_build_def_files(self) -> mesonlib.OrderedSet[str]:
+ return self.build_def_files
+
+ def add_build_def_file(self, f: mesonlib.FileOrString) -> None:
+ # Use relative path for files within source directory, and absolute path
+ # for system files. Skip files within build directory. Also skip not regular
+ # files (e.g. /dev/stdout) Normalize the path to avoid duplicates, this
+ # is especially important to convert '/' to '\' on Windows.
+ if isinstance(f, mesonlib.File):
+ if f.is_built:
+ return
+ f = os.path.normpath(f.relative_name())
+ elif os.path.isfile(f) and not f.startswith('/dev'):
+ srcdir = Path(self.environment.get_source_dir())
+ builddir = Path(self.environment.get_build_dir())
+ try:
+ f_ = Path(f).resolve()
+ except OSError:
+ f_ = Path(f)
+ s = f_.stat()
+ if (hasattr(s, 'st_file_attributes') and
+ s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+ s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK):
+ # This is a Windows Store link which we can't
+ # resolve, so just do our best otherwise.
+ f_ = f_.parent.resolve() / f_.name
+ else:
+ raise
+ if builddir in f_.parents:
+ return
+ if srcdir in f_.parents:
+ f_ = f_.relative_to(srcdir)
+ f = str(f_)
+ else:
+ return
+ if f not in self.build_def_files:
+ self.build_def_files.add(f)
+
+ def get_variables(self) -> T.Dict[str, InterpreterObject]:
+ return self.variables
+
+ def check_stdlibs(self) -> None:
+ machine_choices = [MachineChoice.HOST]
+ if self.coredata.is_cross_build():
+ machine_choices.append(MachineChoice.BUILD)
+ for for_machine in machine_choices:
+ props = self.build.environment.properties[for_machine]
+ for l in self.coredata.compilers[for_machine].keys():
+ try:
+ di = mesonlib.stringlistify(props.get_stdlib(l))
+ except KeyError:
+ continue
+ if len(di) == 1:
+ FeatureNew.single_use('stdlib without variable name', '0.56.0', self.subproject, location=self.current_node)
+ kwargs = {'native': for_machine is MachineChoice.BUILD,
+ }
+ name = l + '_stdlib'
+ df = DependencyFallbacksHolder(self, [name])
+ df.set_fallback(di)
+ dep = df.lookup(kwargs, force_fallback=True)
+ self.build.stdlibs[for_machine][l] = dep
+
+ @typed_pos_args('import', str)
+ @typed_kwargs(
+ 'import',
+ REQUIRED_KW.evolve(since='0.59.0'),
+ DISABLER_KW.evolve(since='0.59.0'),
+ )
+ @disablerIfNotFound
+ def func_import(self, node: mparser.BaseNode, args: T.Tuple[str],
+ kwargs: 'kwtypes.FuncImportModule') -> T.Union[ExtensionModule, NewExtensionModule, NotFoundExtensionModule]:
+ modname = args[0]
+ disabled, required, _ = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ return NotFoundExtensionModule(modname)
+
+ expect_unstable = False
+ # Some tests use "unstable_" instead of "unstable-", and that happens to work because
+ # of implementation details
+ if modname.startswith(('unstable-', 'unstable_')):
+ if modname.startswith('unstable_'):
+ mlog.deprecation(f'Importing unstable modules as "{modname}" instead of "{modname.replace("_", "-", 1)}"',
+ location=node)
+ real_modname = modname[len('unstable') + 1:] # + 1 to handle the - or _
+ expect_unstable = True
+ else:
+ real_modname = modname
+
+ if real_modname in self.modules:
+ return self.modules[real_modname]
+ try:
+ module = importlib.import_module(f'mesonbuild.modules.{real_modname}')
+ except ImportError:
+ if required:
+ raise InvalidArguments(f'Module "{modname}" does not exist')
+ ext_module = NotFoundExtensionModule(real_modname)
+ else:
+ ext_module = module.initialize(self)
+ assert isinstance(ext_module, (ExtensionModule, NewExtensionModule))
+ self.build.modules.append(real_modname)
+ if ext_module.INFO.added:
+ FeatureNew.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.added, self.subproject, location=node)
+ if ext_module.INFO.deprecated:
+ FeatureDeprecated.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.deprecated, self.subproject, location=node)
+ if expect_unstable and not ext_module.INFO.unstable and ext_module.INFO.stabilized is None:
+ raise InvalidArguments(f'Module {ext_module.INFO.name} has never been unstable, remove "unstable-" prefix.')
+ if ext_module.INFO.stabilized is not None:
+ if expect_unstable:
+ FeatureDeprecated.single_use(
+ f'module {ext_module.INFO.name} has been stabilized',
+ ext_module.INFO.stabilized, self.subproject,
+ 'drop "unstable-" prefix from the module name',
+ location=node)
+ else:
+ FeatureNew.single_use(
+ f'module {ext_module.INFO.name} as stable module',
+ ext_module.INFO.stabilized, self.subproject,
+ f'Consider either adding "unstable-" to the module name, or updating the meson required version to ">= {ext_module.INFO.stabilized}"',
+ location=node)
+ elif ext_module.INFO.unstable:
+ if not expect_unstable:
+ if required:
+ raise InvalidArguments(f'Module "{ext_module.INFO.name}" has not been stabilized, and must be imported as unstable-{ext_module.INFO.name}')
+ ext_module = NotFoundExtensionModule(real_modname)
+ else:
+ mlog.warning(f'Module {ext_module.INFO.name} has no backwards or forwards compatibility and might not exist in future releases.', location=node, fatal=False)
+
+ self.modules[real_modname] = ext_module
+ return ext_module
+
+ @typed_pos_args('files', varargs=str)
+ @noKwargs
+ def func_files(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[mesonlib.File]:
+ return self.source_strings_to_files(args[0])
+
+ @noPosargs
+ @typed_kwargs(
+ 'declare_dependency',
+ KwargInfo('compile_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+ INCLUDE_DIRECTORIES.evolve(name='d_import_dirs', since='0.62.0'),
+ D_MODULE_VERSIONS_KW.evolve(since='0.62.0'),
+ KwargInfo('link_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+ DEPENDENCIES_KW,
+ INCLUDE_DIRECTORIES,
+ LINK_WITH_KW,
+ LINK_WHOLE_KW.evolve(since='0.46.0'),
+ SOURCES_KW,
+ VARIABLES_KW.evolve(since='0.54.0', since_values={list: '0.56.0'}),
+ KwargInfo('version', (str, NoneType)),
+ )
+ def func_declare_dependency(self, node, args, kwargs):
+ deps = kwargs['dependencies']
+ incs = self.extract_incdirs(kwargs)
+ libs = kwargs['link_with']
+ libs_whole = kwargs['link_whole']
+ sources = self.source_strings_to_files(kwargs['sources'])
+ compile_args = kwargs['compile_args']
+ link_args = kwargs['link_args']
+ variables = kwargs['variables']
+ version = kwargs['version']
+ if version is None:
+ version = self.project_version
+ d_module_versions = kwargs['d_module_versions']
+ d_import_dirs = self.extract_incdirs(kwargs, 'd_import_dirs')
+ srcdir = Path(self.environment.source_dir)
+ # convert variables which refer to an -uninstalled.pc style datadir
+ for k, v in variables.items():
+ try:
+ p = Path(v)
+ except ValueError:
+ continue
+ else:
+ if not self.is_subproject() and srcdir / self.subproject_dir in p.parents:
+ continue
+ if p.is_absolute() and p.is_dir() and srcdir / self.root_subdir in [p] + list(Path(os.path.abspath(p)).parents):
+ variables[k] = P_OBJ.DependencyVariableString(v)
+ for d in deps:
+ if not isinstance(d, dependencies.Dependency):
+ raise InterpreterException('Invalid dependency')
+
+ dep = dependencies.InternalDependency(version, incs, compile_args,
+ link_args, libs, libs_whole, sources, deps,
+ variables, d_module_versions, d_import_dirs)
+ return dep
+
+ @typed_pos_args('assert', bool, optargs=[str])
+ @noKwargs
+ def func_assert(self, node: mparser.FunctionNode, args: T.Tuple[bool, T.Optional[str]],
+ kwargs: 'TYPE_kwargs') -> None:
+ value, message = args
+ if message is None:
+ FeatureNew.single_use('assert function without message argument', '0.53.0', self.subproject, location=node)
+
+ if not value:
+ if message is None:
+ from ..ast import AstPrinter
+ printer = AstPrinter()
+ node.args.arguments[0].accept(printer)
+ message = printer.result
+ raise InterpreterException('Assert failed: ' + message)
+
+ def validate_arguments(self, args, argcount, arg_types):
+ if argcount is not None:
+ if argcount != len(args):
+ raise InvalidArguments(f'Expected {argcount} arguments, got {len(args)}.')
+ for actual, wanted in zip(args, arg_types):
+ if wanted is not None:
+ if not isinstance(actual, wanted):
+ raise InvalidArguments('Incorrect argument type.')
+
+ # Executables aren't actually accepted, but we allow them here to allow for
+ # better error messages when overridden
+ @typed_pos_args(
+ 'run_command',
+ (build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str),
+ varargs=(build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str))
+ @typed_kwargs(
+ 'run_command',
+ KwargInfo('check', (bool, NoneType), since='0.47.0'),
+ KwargInfo('capture', bool, default=True, since='0.47.0'),
+ ENV_KW.evolve(since='0.50.0'),
+ )
+ def func_run_command(self, node: mparser.BaseNode,
+ args: T.Tuple[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str],
+ T.List[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str]]],
+ kwargs: 'kwtypes.RunCommand') -> RunProcess:
+ return self.run_command_impl(node, args, kwargs)
+
+ def run_command_impl(self,
+ node: mparser.BaseNode,
+ args: T.Tuple[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str],
+ T.List[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str]]],
+ kwargs: 'kwtypes.RunCommand',
+ in_builddir: bool = False) -> RunProcess:
+ cmd, cargs = args
+ capture = kwargs['capture']
+ env = kwargs['env']
+ srcdir = self.environment.get_source_dir()
+ builddir = self.environment.get_build_dir()
+
+ check = kwargs['check']
+ if check is None:
+ mlog.warning(implicit_check_false_warning, once=True)
+ check = False
+
+ overridden_msg = ('Program {!r} was overridden with the compiled '
+ 'executable {!r} and therefore cannot be used during '
+ 'configuration')
+ expanded_args: T.List[str] = []
+ if isinstance(cmd, build.Executable):
+ for name, exe in self.build.find_overrides.items():
+ if cmd == exe:
+ progname = name
+ break
+ else:
+ raise MesonBugException('cmd was a built executable but not found in overrides table')
+ raise InterpreterException(overridden_msg.format(progname, cmd.description()))
+ if isinstance(cmd, ExternalProgram):
+ if not cmd.found():
+ raise InterpreterException(f'command {cmd.get_name()!r} not found or not executable')
+ elif isinstance(cmd, compilers.Compiler):
+ exelist = cmd.get_exelist()
+ cmd = exelist[0]
+ prog = ExternalProgram(cmd, silent=True)
+ if not prog.found():
+ raise InterpreterException(f'Program {cmd!r} not found or not executable')
+ cmd = prog
+ expanded_args = exelist[1:]
+ else:
+ if isinstance(cmd, mesonlib.File):
+ cmd = cmd.absolute_path(srcdir, builddir)
+ # Prefer scripts in the current source directory
+ search_dir = os.path.join(srcdir, self.subdir)
+ prog = ExternalProgram(cmd, silent=True, search_dir=search_dir)
+ if not prog.found():
+ raise InterpreterException(f'Program or command {cmd!r} not found or not executable')
+ cmd = prog
+ for a in cargs:
+ if isinstance(a, str):
+ expanded_args.append(a)
+ elif isinstance(a, mesonlib.File):
+ expanded_args.append(a.absolute_path(srcdir, builddir))
+ elif isinstance(a, ExternalProgram):
+ expanded_args.append(a.get_path())
+ elif isinstance(a, compilers.Compiler):
+ FeatureNew.single_use('Compiler object as a variadic argument to `run_command`', '0.61.0', self.subproject, location=node)
+ prog = ExternalProgram(a.exelist[0], silent=True)
+ if not prog.found():
+ raise InterpreterException(f'Program {cmd!r} not found or not executable')
+ expanded_args.append(prog.get_path())
+ else:
+ raise InterpreterException(overridden_msg.format(a.name, cmd.description()))
+
+ # If any file that was used as an argument to the command
+ # changes, we must re-run the configuration step.
+ self.add_build_def_file(cmd.get_path())
+ for a in expanded_args:
+ if not os.path.isabs(a):
+ a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a)
+ self.add_build_def_file(a)
+
+ return RunProcess(cmd, expanded_args, env, srcdir, builddir, self.subdir,
+ self.environment.get_build_command() + ['introspect'],
+ in_builddir=in_builddir, check=check, capture=capture)
+
+ def func_gettext(self, nodes, args, kwargs):
+ raise InterpreterException('Gettext() function has been moved to module i18n. Import it and use i18n.gettext() instead')
+
+ def func_option(self, nodes, args, kwargs):
+ raise InterpreterException('Tried to call option() in build description file. All options must be in the option file.')
+
+ @typed_pos_args('subproject', str)
+ @typed_kwargs(
+ 'subproject',
+ REQUIRED_KW,
+ DEFAULT_OPTIONS.evolve(since='0.38.0'),
+ KwargInfo('version', ContainerTypeInfo(list, str), default=[], listify=True),
+ )
+ def func_subproject(self, nodes: mparser.BaseNode, args: T.Tuple[str], kwargs: kwtypes.Subproject) -> SubprojectHolder:
+ kw: kwtypes.DoSubproject = {
+ 'required': kwargs['required'],
+ 'default_options': kwargs['default_options'],
+ 'version': kwargs['version'],
+ 'options': None,
+ 'cmake_options': [],
+ }
+ return self.do_subproject(args[0], 'meson', kw)
+
+ def disabled_subproject(self, subp_name: str, disabled_feature: T.Optional[str] = None,
+ exception: T.Optional[Exception] = None) -> SubprojectHolder:
+ sub = SubprojectHolder(NullSubprojectInterpreter(), os.path.join(self.subproject_dir, subp_name),
+ disabled_feature=disabled_feature, exception=exception)
+ self.subprojects[subp_name] = sub
+ return sub
+
+ def do_subproject(self, subp_name: str, method: Literal['meson', 'cmake'], kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ mlog.log('Subproject', mlog.bold(subp_name), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
+ return self.disabled_subproject(subp_name, disabled_feature=feature)
+
+ default_options = coredata.create_options_dict(kwargs['default_options'], subp_name)
+
+ if subp_name == '':
+ raise InterpreterException('Subproject name must not be empty.')
+ if subp_name[0] == '.':
+ raise InterpreterException('Subproject name must not start with a period.')
+ if '..' in subp_name:
+ raise InterpreterException('Subproject name must not contain a ".." path segment.')
+ if os.path.isabs(subp_name):
+ raise InterpreterException('Subproject name must not be an absolute path.')
+ if has_path_sep(subp_name):
+ mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.',
+ location=self.current_node)
+ if subp_name in self.subproject_stack:
+ fullstack = self.subproject_stack + [subp_name]
+ incpath = ' => '.join(fullstack)
+ raise InvalidCode(f'Recursive include of subprojects: {incpath}.')
+ if subp_name in self.subprojects:
+ subproject = self.subprojects[subp_name]
+ if required and not subproject.found():
+ raise InterpreterException(f'Subproject "{subproject.subdir}" required but not found.')
+ if kwargs['version']:
+ pv = self.build.subprojects[subp_name]
+ wanted = kwargs['version']
+ if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]:
+ raise InterpreterException(f'Subproject {subp_name} version is {pv} but {wanted} required.')
+ return subproject
+
+ r = self.environment.wrap_resolver
+ try:
+ subdir = r.resolve(subp_name, method)
+ except wrap.WrapException as e:
+ if not required:
+ mlog.log(e)
+ mlog.log('Subproject ', mlog.bold(subp_name), 'is buildable:', mlog.red('NO'), '(disabling)')
+ return self.disabled_subproject(subp_name, exception=e)
+ raise e
+
+ subdir_abs = os.path.join(self.environment.get_source_dir(), subdir)
+ os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
+ self.global_args_frozen = True
+
+ stack = ':'.join(self.subproject_stack + [subp_name])
+ m = ['\nExecuting subproject', mlog.bold(stack)]
+ if method != 'meson':
+ m += ['method', mlog.bold(method)]
+ mlog.log(*m, '\n', nested=False)
+
+ try:
+ if method == 'meson':
+ return self._do_subproject_meson(subp_name, subdir, default_options, kwargs)
+ elif method == 'cmake':
+ return self._do_subproject_cmake(subp_name, subdir, subdir_abs, default_options, kwargs)
+ else:
+ raise mesonlib.MesonBugException(f'The method {method} is invalid for the subproject {subp_name}')
+ # Invalid code is always an error
+ except InvalidCode:
+ raise
+ except Exception as e:
+ if not required:
+ with mlog.nested(subp_name):
+ # Suppress the 'ERROR:' prefix because this exception is not
+ # fatal and VS CI treat any logs with "ERROR:" as fatal.
+ mlog.exception(e, prefix=mlog.yellow('Exception:'))
+ mlog.log('\nSubproject', mlog.bold(subdir), 'is buildable:', mlog.red('NO'), '(disabling)')
+ return self.disabled_subproject(subp_name, exception=e)
+ raise e
+
+ def _do_subproject_meson(self, subp_name: str, subdir: str,
+ default_options: T.Dict[OptionKey, str],
+ kwargs: kwtypes.DoSubproject,
+ ast: T.Optional[mparser.CodeBlockNode] = None,
+ build_def_files: T.Optional[T.List[str]] = None,
+ is_translated: bool = False,
+ relaxations: T.Optional[T.Set[InterpreterRuleRelaxation]] = None) -> SubprojectHolder:
+ with mlog.nested(subp_name):
+ new_build = self.build.copy()
+ subi = Interpreter(new_build, self.backend, subp_name, subdir, self.subproject_dir,
+ default_options, ast=ast, is_translated=is_translated,
+ relaxations=relaxations,
+ user_defined_options=self.user_defined_options)
+ # Those lists are shared by all interpreters. That means that
+ # even if the subproject fails, any modification that the subproject
+ # made to those lists will affect the parent project.
+ subi.subprojects = self.subprojects
+ subi.modules = self.modules
+ subi.holder_map = self.holder_map
+ subi.bound_holder_map = self.bound_holder_map
+ subi.summary = self.summary
+
+ subi.subproject_stack = self.subproject_stack + [subp_name]
+ current_active = self.active_projectname
+ current_warnings_counter = mlog.log_warnings_counter
+ mlog.log_warnings_counter = 0
+ subi.run()
+ subi_warnings = mlog.log_warnings_counter
+ mlog.log_warnings_counter = current_warnings_counter
+
+ mlog.log('Subproject', mlog.bold(subp_name), 'finished.')
+
+ mlog.log()
+
+ if kwargs['version']:
+ pv = subi.project_version
+ wanted = kwargs['version']
+ if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]:
+ raise InterpreterException(f'Subproject {subp_name} version is {pv} but {wanted} required.')
+ self.active_projectname = current_active
+ self.subprojects.update(subi.subprojects)
+ self.subprojects[subp_name] = SubprojectHolder(subi, subdir, warnings=subi_warnings)
+ # Duplicates are possible when subproject uses files from project root
+ if build_def_files:
+ self.build_def_files.update(build_def_files)
+ # We always need the subi.build_def_files, to propgate sub-sub-projects
+ self.build_def_files.update(subi.build_def_files)
+ self.build.merge(subi.build)
+ self.build.subprojects[subp_name] = subi.project_version
+ return self.subprojects[subp_name]
+
+ def _do_subproject_cmake(self, subp_name: str, subdir: str, subdir_abs: str,
+ default_options: T.Dict[OptionKey, str],
+ kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
+ with mlog.nested(subp_name):
+ new_build = self.build.copy()
+ prefix = self.coredata.options[OptionKey('prefix')].value
+
+ from ..modules.cmake import CMakeSubprojectOptions
+ options = kwargs['options'] or CMakeSubprojectOptions()
+ cmake_options = kwargs['cmake_options'] + options.cmake_options
+ cm_int = CMakeInterpreter(new_build, Path(subdir), Path(subdir_abs), Path(prefix), new_build.environment, self.backend)
+ cm_int.initialise(cmake_options)
+ cm_int.analyse()
+
+ # Generate a meson ast and execute it with the normal do_subproject_meson
+ ast = cm_int.pretend_to_be_meson(options.target_options)
+
+ mlog.log()
+ with mlog.nested('cmake-ast'):
+ mlog.log('Processing generated meson AST')
+
+ # Debug print the generated meson file
+ from ..ast import AstIndentationGenerator, AstPrinter
+ printer = AstPrinter(update_ast_line_nos=True)
+ ast.accept(AstIndentationGenerator())
+ ast.accept(printer)
+ printer.post_process()
+ meson_filename = os.path.join(self.build.environment.get_build_dir(), subdir, 'meson.build')
+ with open(meson_filename, "w", encoding='utf-8') as f:
+ f.write(printer.result)
+
+ mlog.log('Build file:', meson_filename)
+ mlog.cmd_ci_include(meson_filename)
+ mlog.log()
+
+ result = self._do_subproject_meson(
+ subp_name, subdir, default_options,
+ kwargs, ast,
+ [str(f) for f in cm_int.bs_files],
+ is_translated=True,
+ relaxations={
+ InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFFERENCES,
+ }
+ )
+ result.cm_interpreter = cm_int
+
+ mlog.log()
+ return result
+
+ def get_option_internal(self, optname: str) -> coredata.UserOption:
+ key = OptionKey.from_string(optname).evolve(subproject=self.subproject)
+
+ if not key.is_project():
+ for opts in [self.coredata.options, compilers.base_options]:
+ v = opts.get(key)
+ if v is None or v.yielding:
+ v = opts.get(key.as_root())
+ if v is not None:
+ assert isinstance(v, coredata.UserOption), 'for mypy'
+ return v
+
+ try:
+ opt = self.coredata.options[key]
+ if opt.yielding and key.subproject and key.as_root() in self.coredata.options:
+ popt = self.coredata.options[key.as_root()]
+ if type(opt) is type(popt):
+ opt = popt
+ else:
+ # Get class name, then option type as a string
+ opt_type = opt.__class__.__name__[4:][:-6].lower()
+ popt_type = popt.__class__.__name__[4:][:-6].lower()
+ # This is not a hard error to avoid dependency hell, the workaround
+ # when this happens is to simply set the subproject's option directly.
+ mlog.warning('Option {0!r} of type {1!r} in subproject {2!r} cannot yield '
+ 'to parent option of type {3!r}, ignoring parent value. '
+ 'Use -D{2}:{0}=value to set the value for this option manually'
+ '.'.format(optname, opt_type, self.subproject, popt_type),
+ location=self.current_node)
+ return opt
+ except KeyError:
+ pass
+
+ raise InterpreterException(f'Tried to access unknown option {optname!r}.')
+
+ @typed_pos_args('get_option', str)
+ @noKwargs
+ def func_get_option(self, nodes: mparser.BaseNode, args: T.Tuple[str],
+ kwargs: 'TYPE_kwargs') -> T.Union[coredata.UserOption, 'TYPE_var']:
+ optname = args[0]
+ if ':' in optname:
+ raise InterpreterException('Having a colon in option name is forbidden, '
+ 'projects are not allowed to directly access '
+ 'options of other subprojects.')
+ opt = self.get_option_internal(optname)
+ if isinstance(opt, coredata.UserFeatureOption):
+ opt.name = optname
+ return opt
+ elif isinstance(opt, coredata.UserOption):
+ if isinstance(opt.value, str):
+ return P_OBJ.OptionString(opt.value, f'{{{optname}}}')
+ return opt.value
+ return opt
+
+ @typed_pos_args('configuration_data', optargs=[dict])
+ @noKwargs
+ def func_configuration_data(self, node: mparser.BaseNode, args: T.Tuple[T.Optional[T.Dict[str, T.Any]]],
+ kwargs: 'TYPE_kwargs') -> build.ConfigurationData:
+ initial_values = args[0]
+ if initial_values is not None:
+ FeatureNew.single_use('configuration_data dictionary', '0.49.0', self.subproject, location=node)
+ for k, v in initial_values.items():
+ if not isinstance(v, (str, int, bool)):
+ raise InvalidArguments(
+ f'"configuration_data": initial value dictionary key "{k!r}"" must be "str | int | bool", not "{v!r}"')
+ return build.ConfigurationData(initial_values)
+
+ def set_backend(self) -> None:
+ # The backend is already set when parsing subprojects
+ if self.backend is not None:
+ return
+ backend = self.coredata.get_option(OptionKey('backend'))
+ from ..backend import backends
+ self.backend = backends.get_backend_from_name(backend, self.build, self)
+
+ if self.backend is None:
+ raise InterpreterException(f'Unknown backend "{backend}".')
+ if backend != self.backend.name:
+ if self.backend.name.startswith('vs'):
+ mlog.log('Auto detected Visual Studio backend:', mlog.bold(self.backend.name))
+ self.coredata.set_option(OptionKey('backend'), self.backend.name)
+
+ # Only init backend options on first invocation otherwise it would
+ # override values previously set from command line.
+ if self.environment.first_invocation:
+ self.coredata.init_backend_options(backend)
+
+ options = {k: v for k, v in self.environment.options.items() if k.is_backend()}
+ self.coredata.set_options(options)
+
+ @typed_pos_args('project', str, varargs=str)
+ @typed_kwargs(
+ 'project',
+ DEFAULT_OPTIONS,
+ KwargInfo('meson_version', (str, NoneType)),
+ KwargInfo(
+ 'version',
+ (str, mesonlib.File, NoneType, list),
+ default='undefined',
+ validator=_project_version_validator,
+ convertor=lambda x: x[0] if isinstance(x, list) else x,
+ ),
+ KwargInfo('license', ContainerTypeInfo(list, str), default=['unknown'], listify=True),
+ KwargInfo('subproject_dir', str, default='subprojects'),
+ )
+ def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str]], kwargs: 'kwtypes.Project') -> None:
+ proj_name, proj_langs = args
+ if ':' in proj_name:
+ raise InvalidArguments(f"Project name {proj_name!r} must not contain ':'")
+
+ # This needs to be evaluated as early as possible, as meson uses this
+ # for things like deprecation testing.
+ if kwargs['meson_version']:
+ cv = coredata.version
+ pv = kwargs['meson_version']
+ if not mesonlib.version_compare(cv, pv):
+ raise InterpreterException(f'Meson version is {cv} but project requires {pv}')
+ mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version']
+
+ if os.path.exists(self.option_file):
+ oi = optinterpreter.OptionInterpreter(self.subproject)
+ oi.process(self.option_file)
+ self.coredata.update_project_options(oi.options)
+ self.add_build_def_file(self.option_file)
+
+ # Do not set default_options on reconfigure otherwise it would override
+ # values previously set from command line. That means that changing
+ # default_options in a project will trigger a reconfigure but won't
+ # have any effect.
+ self.project_default_options = coredata.create_options_dict(
+ kwargs['default_options'], self.subproject)
+
+ # If this is the first invocation we always need to initialize
+ # builtins, if this is a subproject that is new in a re-invocation we
+ # need to initialize builtins for that
+ if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects):
+ default_options = self.project_default_options.copy()
+ default_options.update(self.default_project_options)
+ self.coredata.init_builtins(self.subproject)
+ self.coredata.initialized_subprojects.add(self.subproject)
+ else:
+ default_options = {}
+ self.coredata.set_default_options(default_options, self.subproject, self.environment)
+
+ if not self.is_subproject():
+ self.build.project_name = proj_name
+ self.active_projectname = proj_name
+
+ version = kwargs['version']
+ if isinstance(version, mesonlib.File):
+ FeatureNew.single_use('version from file', '0.57.0', self.subproject, location=node)
+ self.add_build_def_file(version)
+ ifname = version.absolute_path(self.environment.source_dir,
+ self.environment.build_dir)
+ try:
+ ver_data = Path(ifname).read_text(encoding='utf-8').split('\n')
+ except FileNotFoundError:
+ raise InterpreterException('Version file not found.')
+ if len(ver_data) == 2 and ver_data[1] == '':
+ ver_data = ver_data[0:1]
+ if len(ver_data) != 1:
+ raise InterpreterException('Version file must contain exactly one line of text.')
+ self.project_version = ver_data[0]
+ else:
+ self.project_version = version
+
+ if self.build.project_version is None:
+ self.build.project_version = self.project_version
+ proj_license = kwargs['license']
+ self.build.dep_manifest[proj_name] = build.DepManifest(self.project_version, proj_license)
+ if self.subproject in self.build.projects:
+ raise InvalidCode('Second call to project().')
+
+ # spdirname is the subproject_dir for this project, relative to self.subdir.
+ # self.subproject_dir is the subproject_dir for the main project, relative to top source dir.
+ spdirname = kwargs['subproject_dir']
+ if not isinstance(spdirname, str):
+ raise InterpreterException('Subproject_dir must be a string')
+ if os.path.isabs(spdirname):
+ raise InterpreterException('Subproject_dir must not be an absolute path.')
+ if spdirname.startswith('.'):
+ raise InterpreterException('Subproject_dir must not begin with a period.')
+ if '..' in spdirname:
+ raise InterpreterException('Subproject_dir must not contain a ".." segment.')
+ if not self.is_subproject():
+ self.subproject_dir = spdirname
+ self.build.subproject_dir = self.subproject_dir
+
+ # Load wrap files from this (sub)project.
+ wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+ if not self.is_subproject() or wrap_mode != WrapMode.nopromote:
+ subdir = os.path.join(self.subdir, spdirname)
+ r = wrap.Resolver(self.environment.get_source_dir(), subdir, self.subproject, wrap_mode)
+ if self.is_subproject():
+ self.environment.wrap_resolver.merge_wraps(r)
+ else:
+ self.environment.wrap_resolver = r
+
+ self.build.projects[self.subproject] = proj_name
+ mlog.log('Project name:', mlog.bold(proj_name))
+ mlog.log('Project version:', mlog.bold(self.project_version))
+
+ if not self.is_subproject():
+ # We have to activate VS before adding languages and before calling
+ # self.set_backend() otherwise it wouldn't be able to detect which
+ # vs backend version we need. But after setting default_options in case
+ # the project sets vs backend by default.
+ backend = self.coredata.get_option(OptionKey('backend'))
+ force_vsenv = self.user_defined_options.vsenv or backend.startswith('vs')
+ if mesonlib.setup_vsenv(force_vsenv):
+ self.build.need_vsenv = True
+
+ self.add_languages(proj_langs, True, MachineChoice.HOST)
+ self.add_languages(proj_langs, False, MachineChoice.BUILD)
+
+ self.set_backend()
+ if not self.is_subproject():
+ self.check_stdlibs()
+
+ @typed_kwargs('add_languages', KwargInfo('native', (bool, NoneType), since='0.54.0'), REQUIRED_KW)
+ @typed_pos_args('add_languages', varargs=str)
+ def func_add_languages(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddLanguages') -> bool:
+ langs = args[0]
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ native = kwargs['native']
+
+ if disabled:
+ for lang in sorted(langs, key=compilers.sort_clink):
+ mlog.log('Compiler for language', mlog.bold(lang), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return False
+ if native is not None:
+ return self.add_languages(langs, required, self.machine_from_native_kwarg(kwargs))
+ else:
+ # absent 'native' means 'both' for backwards compatibility
+ tv = FeatureNew.get_target_version(self.subproject)
+ if FeatureNew.check_version(tv, '0.54.0'):
+ mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
+ location=node)
+
+ success = self.add_languages(langs, False, MachineChoice.BUILD)
+ success &= self.add_languages(langs, required, MachineChoice.HOST)
+ return success
+
+ @noArgsFlattening
+ @noKwargs
+ def func_message(self, node: mparser.BaseNode, args, kwargs):
+ if len(args) > 1:
+ FeatureNew.single_use('message with more than one argument', '0.54.0', self.subproject, location=node)
+ args_str = [stringifyUserArguments(i) for i in args]
+ self.message_impl(args_str)
+
+ def message_impl(self, args):
+ mlog.log(mlog.bold('Message:'), *args)
+
+ @noArgsFlattening
+ @FeatureNew('summary', '0.53.0')
+ @typed_pos_args('summary', (str, dict), optargs=[object])
+ @typed_kwargs(
+ 'summary',
+ KwargInfo('section', str, default=''),
+ KwargInfo('bool_yn', bool, default=False),
+ KwargInfo('list_sep', (str, NoneType), since='0.54.0')
+ )
+ def func_summary(self, node: mparser.BaseNode, args: T.Tuple[T.Union[str, T.Dict[str, T.Any]], T.Optional[T.Any]],
+ kwargs: 'kwtypes.Summary') -> None:
+ if args[1] is None:
+ if not isinstance(args[0], dict):
+ raise InterpreterException('Summary first argument must be dictionary.')
+ values = args[0]
+ else:
+ if not isinstance(args[0], str):
+ raise InterpreterException('Summary first argument must be string.')
+ values = {args[0]: args[1]}
+ self.summary_impl(kwargs['section'], values, kwargs)
+
+ def summary_impl(self, section: str, values, kwargs: 'kwtypes.Summary') -> None:
+ if self.subproject not in self.summary:
+ self.summary[self.subproject] = Summary(self.active_projectname, self.project_version)
+ self.summary[self.subproject].add_section(
+ section, values, kwargs['bool_yn'], kwargs['list_sep'], self.subproject)
+
+ def _print_summary(self) -> None:
+ # Add automatic 'Supbrojects' section in main project.
+ all_subprojects = collections.OrderedDict()
+ for name, subp in sorted(self.subprojects.items()):
+ value = subp.found()
+ if subp.disabled_feature:
+ value = [value, f'Feature {subp.disabled_feature!r} disabled']
+ elif subp.exception:
+ value = [value, str(subp.exception)]
+ elif subp.warnings > 0:
+ value = [value, f'{subp.warnings} warnings']
+ all_subprojects[name] = value
+ if all_subprojects:
+ self.summary_impl('Subprojects', all_subprojects,
+ {'bool_yn': True,
+ 'list_sep': ' ',
+ })
+ # Add automatic section with all user defined options
+ if self.user_defined_options:
+ values = collections.OrderedDict()
+ if self.user_defined_options.cross_file:
+ values['Cross files'] = self.user_defined_options.cross_file
+ if self.user_defined_options.native_file:
+ values['Native files'] = self.user_defined_options.native_file
+ sorted_options = sorted(self.user_defined_options.cmd_line_options.items())
+ values.update({str(k): v for k, v in sorted_options})
+ if values:
+ self.summary_impl('User defined options', values, {'bool_yn': False, 'list_sep': None})
+ # Print all summaries, main project last.
+ mlog.log('') # newline
+ main_summary = self.summary.pop('', None)
+ for subp_name, summary in sorted(self.summary.items()):
+ if self.subprojects[subp_name].found():
+ summary.dump()
+ if main_summary:
+ main_summary.dump()
+
+ @noArgsFlattening
+ @FeatureNew('warning', '0.44.0')
+ @noKwargs
+ def func_warning(self, node, args, kwargs):
+ if len(args) > 1:
+ FeatureNew.single_use('warning with more than one argument', '0.54.0', self.subproject, location=node)
+ args_str = [stringifyUserArguments(i) for i in args]
+ mlog.warning(*args_str, location=node)
+
+ @noArgsFlattening
+ @noKwargs
+ def func_error(self, node, args, kwargs):
+ if len(args) > 1:
+ FeatureNew.single_use('error with more than one argument', '0.58.0', self.subproject, location=node)
+ args_str = [stringifyUserArguments(i) for i in args]
+ raise InterpreterException('Problem encountered: ' + ' '.join(args_str))
+
+ @noArgsFlattening
+ @FeatureNew('debug', '0.63.0')
+ @noKwargs
+ def func_debug(self, node, args, kwargs):
+ args_str = [stringifyUserArguments(i) for i in args]
+ mlog.debug('Debug:', *args_str)
+
+ @noKwargs
+ @noPosargs
+ def func_exception(self, node, args, kwargs):
+ raise RuntimeError('unit test traceback :)')
+
+ def add_languages(self, args: T.List[str], required: bool, for_machine: MachineChoice) -> bool:
+ success = self.add_languages_for(args, required, for_machine)
+ if not self.coredata.is_cross_build():
+ self.coredata.copy_build_options_from_regular_ones()
+ self._redetect_machines()
+ return success
+
+ def should_skip_sanity_check(self, for_machine: MachineChoice) -> bool:
+ should = self.environment.properties.host.get('skip_sanity_check', False)
+ if not isinstance(should, bool):
+ raise InterpreterException('Option skip_sanity_check must be a boolean.')
+ if for_machine != MachineChoice.HOST and not should:
+ return False
+ if not self.environment.is_cross_build() and not should:
+ return False
+ return should
+
+ def add_languages_for(self, args: T.List[str], required: bool, for_machine: MachineChoice) -> bool:
+ args = [a.lower() for a in args]
+ langs = set(self.compilers[for_machine].keys())
+ langs.update(args)
+ # We'd really like to add cython's default language here, but it can't
+ # actually be done because the cython compiler hasn't been initialized,
+ # so we can't actually get the option yet. Because we can't know what
+ # compiler to add by default, and we don't want to add unnecessary
+ # compilers we don't add anything for cython here, and instead do it
+ # When the first cython target using a particular language is used.
+ if 'vala' in langs and 'c' not in langs:
+ FeatureNew.single_use('Adding Vala language without C', '0.59.0', self.subproject, location=self.current_node)
+ args.append('c')
+ if 'nasm' in langs:
+ FeatureNew.single_use('Adding NASM language', '0.64.0', self.subproject, location=self.current_node)
+
+ success = True
+ for lang in sorted(args, key=compilers.sort_clink):
+ if lang in self.compilers[for_machine]:
+ continue
+ machine_name = for_machine.get_lower_case_name()
+ comp = self.coredata.compilers[for_machine].get(lang)
+ if not comp:
+ try:
+ comp = compilers.detect_compiler_for(self.environment, lang, for_machine)
+ if comp is None:
+ raise InvalidArguments(f'Tried to use unknown language "{lang}".')
+ if self.should_skip_sanity_check(for_machine):
+ mlog.log_once('Cross compiler sanity tests disabled via the cross file.')
+ else:
+ comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
+ except Exception:
+ if not required:
+ mlog.log('Compiler for language',
+ mlog.bold(lang), 'for the', machine_name,
+ 'machine not found.')
+ success = False
+ continue
+ else:
+ raise
+
+ # Add per-subproject compiler options. They inherit value from main project.
+ if self.subproject:
+ options = {}
+ for k in comp.get_options():
+ v = copy.copy(self.coredata.options[k])
+ k = k.evolve(subproject=self.subproject)
+ options[k] = v
+ self.coredata.add_compiler_options(options, lang, for_machine, self.environment)
+
+ if for_machine == MachineChoice.HOST or self.environment.is_cross_build():
+ logger_fun = mlog.log
+ else:
+ logger_fun = mlog.debug
+ logger_fun(comp.get_display_language(), 'compiler for the', machine_name, 'machine:',
+ mlog.bold(' '.join(comp.get_exelist())), comp.get_version_string())
+ if comp.linker is not None:
+ logger_fun(comp.get_display_language(), 'linker for the', machine_name, 'machine:',
+ mlog.bold(' '.join(comp.linker.get_exelist())), comp.linker.id, comp.linker.version)
+ self.build.ensure_static_linker(comp)
+ self.compilers[for_machine][lang] = comp
+
+ return success
+
+ def program_from_file_for(self, for_machine: MachineChoice, prognames: T.List[mesonlib.FileOrString]
+ ) -> T.Optional[ExternalProgram]:
+ for p in prognames:
+ if isinstance(p, mesonlib.File):
+ continue # Always points to a local (i.e. self generated) file.
+ if not isinstance(p, str):
+ raise InterpreterException('Executable name must be a string')
+ prog = ExternalProgram.from_bin_list(self.environment, for_machine, p)
+ # if the machine file specified something, it may be a regular
+ # not-found program but we still want to return that
+ if not isinstance(prog, NonExistingExternalProgram):
+ return prog
+ return None
+
+ def program_from_system(self, args: T.List[mesonlib.FileOrString], search_dirs: T.List[str],
+ extra_info: T.List[mlog.TV_Loggable]) -> T.Optional[ExternalProgram]:
+ # Search for scripts relative to current subdir.
+ # Do not cache found programs because find_program('foobar')
+ # might give different results when run from different source dirs.
+ source_dir = os.path.join(self.environment.get_source_dir(), self.subdir)
+ for exename in args:
+ if isinstance(exename, mesonlib.File):
+ if exename.is_built:
+ search_dir = os.path.join(self.environment.get_build_dir(),
+ exename.subdir)
+ else:
+ search_dir = os.path.join(self.environment.get_source_dir(),
+ exename.subdir)
+ exename = exename.fname
+ extra_search_dirs = []
+ elif isinstance(exename, str):
+ search_dir = source_dir
+ extra_search_dirs = search_dirs
+ else:
+ raise InvalidArguments(f'find_program only accepts strings and files, not {exename!r}')
+ extprog = ExternalProgram(exename, search_dir=search_dir,
+ extra_search_dirs=extra_search_dirs,
+ silent=True)
+ if extprog.found():
+ extra_info.append(f"({' '.join(extprog.get_command())})")
+ return extprog
+ return None
+
+ def program_from_overrides(self, command_names: T.List[mesonlib.FileOrString],
+ extra_info: T.List['mlog.TV_Loggable']
+ ) -> T.Optional[T.Union[ExternalProgram, OverrideProgram, build.Executable]]:
+ for name in command_names:
+ if not isinstance(name, str):
+ continue
+ if name in self.build.find_overrides:
+ exe = self.build.find_overrides[name]
+ extra_info.append(mlog.blue('(overridden)'))
+ return exe
+ return None
+
+ def store_name_lookups(self, command_names: T.List[mesonlib.FileOrString]) -> None:
+ for name in command_names:
+ if isinstance(name, str):
+ self.build.searched_programs.add(name)
+
+ def add_find_program_override(self, name: str, exe: T.Union[build.Executable, ExternalProgram, 'OverrideProgram']) -> None:
+ if name in self.build.searched_programs:
+ raise InterpreterException(f'Tried to override finding of executable "{name}" which has already been found.')
+ if name in self.build.find_overrides:
+ raise InterpreterException(f'Tried to override executable "{name}" which has already been overridden.')
+ self.build.find_overrides[name] = exe
+
+ def notfound_program(self, args: T.List[mesonlib.FileOrString]) -> ExternalProgram:
+ return NonExistingExternalProgram(' '.join(
+ [a if isinstance(a, str) else a.absolute_path(self.environment.source_dir, self.environment.build_dir)
+ for a in args]))
+
+ # TODO update modules to always pass `for_machine`. It is bad-form to assume
+ # the host machine.
+ def find_program_impl(self, args: T.List[mesonlib.FileOrString],
+ for_machine: MachineChoice = MachineChoice.HOST,
+ required: bool = True, silent: bool = True,
+ wanted: T.Union[str, T.List[str]] = '',
+ search_dirs: T.Optional[T.List[str]] = None,
+ version_func: T.Optional[T.Callable[[T.Union['ExternalProgram', 'build.Executable', 'OverrideProgram']], str]] = None
+ ) -> T.Union['ExternalProgram', 'build.Executable', 'OverrideProgram']:
+ args = mesonlib.listify(args)
+
+ extra_info: T.List[mlog.TV_Loggable] = []
+ progobj = self.program_lookup(args, for_machine, required, search_dirs, extra_info)
+ if progobj is None:
+ progobj = self.notfound_program(args)
+
+ if isinstance(progobj, ExternalProgram) and not progobj.found():
+ if not silent:
+ mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'))
+ if required:
+ m = 'Program {!r} not found or not executable'
+ raise InterpreterException(m.format(progobj.get_name()))
+ return progobj
+
+ if wanted:
+ if version_func:
+ version = version_func(progobj)
+ elif isinstance(progobj, build.Executable):
+ if progobj.subproject:
+ interp = self.subprojects[progobj.subproject].held_object
+ else:
+ interp = self
+ assert isinstance(interp, Interpreter)
+ version = interp.project_version
+ else:
+ version = progobj.get_version(self)
+ is_found, not_found, _ = mesonlib.version_compare_many(version, wanted)
+ if not is_found:
+ mlog.log('Program', mlog.bold(progobj.name), 'found:', mlog.red('NO'),
+ 'found', mlog.normal_cyan(version), 'but need:',
+ mlog.bold(', '.join([f"'{e}'" for e in not_found])), *extra_info)
+ if required:
+ m = 'Invalid version of program, need {!r} {!r} found {!r}.'
+ raise InterpreterException(m.format(progobj.name, not_found, version))
+ return self.notfound_program(args)
+ extra_info.insert(0, mlog.normal_cyan(version))
+
+ # Only store successful lookups
+ self.store_name_lookups(args)
+ if not silent:
+ mlog.log('Program', mlog.bold(progobj.name), 'found:', mlog.green('YES'), *extra_info)
+ if isinstance(progobj, build.Executable):
+ progobj.was_returned_by_find_program = True
+ return progobj
+
+ def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: MachineChoice,
+ required: bool, search_dirs: T.List[str], extra_info: T.List[mlog.TV_Loggable]
+ ) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]:
+ progobj = self.program_from_overrides(args, extra_info)
+ if progobj:
+ return progobj
+
+ fallback = None
+ wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+ if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver:
+ fallback = self.environment.wrap_resolver.find_program_provider(args)
+ if fallback and wrap_mode == WrapMode.forcefallback:
+ return self.find_program_fallback(fallback, args, required, extra_info)
+
+ progobj = self.program_from_file_for(for_machine, args)
+ if progobj is None:
+ progobj = self.program_from_system(args, search_dirs, extra_info)
+ if progobj is None and args[0].endswith('python3'):
+ prog = ExternalProgram('python3', mesonlib.python_command, silent=True)
+ progobj = prog if prog.found() else None
+ if progobj is None and fallback and required:
+ progobj = self.find_program_fallback(fallback, args, required, extra_info)
+
+ return progobj
+
+ def find_program_fallback(self, fallback: str, args: T.List[mesonlib.FileOrString],
+ required: bool, extra_info: T.List[mlog.TV_Loggable]
+ ) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]:
+ mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program',
+ mlog.bold(' '.join(args)))
+ sp_kwargs: kwtypes.DoSubproject = {
+ 'required': required,
+ 'default_options': [],
+ 'version': [],
+ 'cmake_options': [],
+ 'options': None,
+ }
+ self.do_subproject(fallback, 'meson', sp_kwargs)
+ return self.program_from_overrides(args, extra_info)
+
+ @typed_pos_args('find_program', varargs=(str, mesonlib.File), min_varargs=1)
+ @typed_kwargs(
+ 'find_program',
+ DISABLER_KW.evolve(since='0.49.0'),
+ NATIVE_KW,
+ REQUIRED_KW,
+ KwargInfo('dirs', ContainerTypeInfo(list, str), default=[], listify=True, since='0.53.0'),
+ KwargInfo('version', ContainerTypeInfo(list, str), default=[], listify=True, since='0.52.0'),
+ )
+ @disablerIfNotFound
+ def func_find_program(self, node: mparser.BaseNode, args: T.Tuple[T.List[mesonlib.FileOrString]],
+ kwargs: 'kwtypes.FindProgram',
+ ) -> T.Union['build.Executable', ExternalProgram, 'OverrideProgram']:
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ mlog.log('Program', mlog.bold(' '.join(args[0])), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return self.notfound_program(args[0])
+
+ search_dirs = extract_search_dirs(kwargs)
+ return self.find_program_impl(args[0], kwargs['native'], required=required,
+ silent=False, wanted=kwargs['version'],
+ search_dirs=search_dirs)
+
+ def func_find_library(self, node, args, kwargs):
+ raise InvalidCode('find_library() is removed, use meson.get_compiler(\'name\').find_library() instead.\n'
+ 'Look here for documentation: http://mesonbuild.com/Reference-manual.html#compiler-object\n'
+ 'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n'
+ )
+
+ # When adding kwargs, please check if they make sense in dependencies.get_dep_identifier()
+ @FeatureNewKwargs('dependency', '0.57.0', ['cmake_package_version'])
+ @FeatureNewKwargs('dependency', '0.56.0', ['allow_fallback'])
+ @FeatureNewKwargs('dependency', '0.54.0', ['components'])
+ @FeatureNewKwargs('dependency', '0.52.0', ['include_type'])
+ @FeatureNewKwargs('dependency', '0.50.0', ['not_found_message', 'cmake_module_path', 'cmake_args'])
+ @FeatureNewKwargs('dependency', '0.49.0', ['disabler'])
+ @FeatureNewKwargs('dependency', '0.40.0', ['method'])
+ @FeatureNewKwargs('dependency', '0.38.0', ['default_options'])
+ @disablerIfNotFound
+ @permittedKwargs(permitted_dependency_kwargs)
+ @typed_pos_args('dependency', varargs=str, min_varargs=1)
+ def func_dependency(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kwargs) -> Dependency:
+ # Replace '' by empty list of names
+ names = [n for n in args[0] if n]
+ if len(names) > 1:
+ FeatureNew('dependency with more than one name', '0.60.0').use(self.subproject)
+ allow_fallback = kwargs.get('allow_fallback')
+ if allow_fallback is not None and not isinstance(allow_fallback, bool):
+ raise InvalidArguments('"allow_fallback" argument must be boolean')
+ fallback = kwargs.get('fallback')
+ default_options = kwargs.get('default_options')
+ df = DependencyFallbacksHolder(self, names, allow_fallback, default_options)
+ df.set_fallback(fallback)
+ not_found_message = kwargs.get('not_found_message', '')
+ if not isinstance(not_found_message, str):
+ raise InvalidArguments('The not_found_message must be a string.')
+ try:
+ d = df.lookup(kwargs)
+ except Exception:
+ if not_found_message:
+ self.message_impl([not_found_message])
+ raise
+ assert isinstance(d, Dependency)
+ if not d.found() and not_found_message:
+ self.message_impl([not_found_message])
+ # Ensure the correct include type
+ if 'include_type' in kwargs:
+ wanted = kwargs['include_type']
+ if not isinstance(wanted, str):
+ raise InvalidArguments('The `include_type` kwarg must be a string')
+ actual = d.get_include_type()
+ if wanted != actual:
+ mlog.debug(f'Current include type of {args[0]} is {actual}. Converting to requested {wanted}')
+ d = d.generate_system_dependency(wanted)
+ if d.feature_since is not None:
+ version, extra_msg = d.feature_since
+ FeatureNew.single_use(f'dep {d.name!r} custom lookup', version, self.subproject, extra_msg, node)
+ for f in d.featurechecks:
+ f.use(self.subproject, node)
+ return d
+
+ @FeatureNew('disabler', '0.44.0')
+ @noKwargs
+ @noPosargs
+ def func_disabler(self, node, args, kwargs):
+ return Disabler()
+
+ @FeatureNewKwargs('executable', '0.42.0', ['implib'])
+ @FeatureNewKwargs('executable', '0.56.0', ['win_subsystem'])
+ @FeatureDeprecatedKwargs('executable', '0.56.0', ['gui_app'], extra_message="Use 'win_subsystem' instead.")
+ @permittedKwargs(build.known_exe_kwargs)
+ def func_executable(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, build.Executable)
+
+ @permittedKwargs(build.known_stlib_kwargs)
+ def func_static_lib(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, build.StaticLibrary)
+
+ @permittedKwargs(build.known_shlib_kwargs)
+ def func_shared_lib(self, node, args, kwargs):
+ holder = self.build_target(node, args, kwargs, build.SharedLibrary)
+ holder.shared_library_only = True
+ return holder
+
+ @permittedKwargs(known_library_kwargs)
+ def func_both_lib(self, node, args, kwargs):
+ return self.build_both_libraries(node, args, kwargs)
+
+ @FeatureNew('shared_module', '0.37.0')
+ @permittedKwargs(build.known_shmod_kwargs)
+ def func_shared_module(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, build.SharedModule)
+
+ @permittedKwargs(known_library_kwargs)
+ def func_library(self, node, args, kwargs):
+ return self.build_library(node, args, kwargs)
+
+ @permittedKwargs(build.known_jar_kwargs)
+ def func_jar(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, build.Jar)
+
+ @FeatureNewKwargs('build_target', '0.40.0', ['link_whole', 'override_options'])
+ @permittedKwargs(known_build_target_kwargs)
+ def func_build_target(self, node, args, kwargs):
+ if 'target_type' not in kwargs:
+ raise InterpreterException('Missing target_type keyword argument')
+ target_type = kwargs.pop('target_type')
+ if target_type == 'executable':
+ return self.build_target(node, args, kwargs, build.Executable)
+ elif target_type == 'shared_library':
+ return self.build_target(node, args, kwargs, build.SharedLibrary)
+ elif target_type == 'shared_module':
+ FeatureNew.single_use(
+ 'build_target(target_type: \'shared_module\')',
+ '0.51.0', self.subproject, location=node)
+ return self.build_target(node, args, kwargs, build.SharedModule)
+ elif target_type == 'static_library':
+ return self.build_target(node, args, kwargs, build.StaticLibrary)
+ elif target_type == 'both_libraries':
+ return self.build_both_libraries(node, args, kwargs)
+ elif target_type == 'library':
+ return self.build_library(node, args, kwargs)
+ elif target_type == 'jar':
+ return self.build_target(node, args, kwargs, build.Jar)
+ else:
+ raise InterpreterException('Unknown target_type.')
+
+ @noPosargs
+ @typed_kwargs(
+ 'vcs_tag',
+ CT_INPUT_KW.evolve(required=True),
+ MULTI_OUTPUT_KW,
+ # Cannot use the COMMAND_KW because command is allowed to be empty
+ KwargInfo(
+ 'command',
+ ContainerTypeInfo(list, (str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, ExternalProgram, mesonlib.File)),
+ listify=True,
+ default=[],
+ ),
+ KwargInfo('fallback', (str, NoneType)),
+ KwargInfo('replace_string', str, default='@VCS_TAG@'),
+ )
+ def func_vcs_tag(self, node: mparser.BaseNode, args: T.List['TYPE_var'], kwargs: 'kwtypes.VcsTag') -> build.CustomTarget:
+ if kwargs['fallback'] is None:
+ FeatureNew.single_use('Optional fallback in vcs_tag', '0.41.0', self.subproject, location=node)
+ fallback = kwargs['fallback'] or self.project_version
+ replace_string = kwargs['replace_string']
+ regex_selector = '(.*)' # default regex selector for custom command: use complete output
+ vcs_cmd = kwargs['command']
+ source_dir = os.path.normpath(os.path.join(self.environment.get_source_dir(), self.subdir))
+ if vcs_cmd:
+ if isinstance(vcs_cmd[0], (str, mesonlib.File)):
+ if isinstance(vcs_cmd[0], mesonlib.File):
+ FeatureNew.single_use('vcs_tag with file as the first argument', '0.62.0', self.subproject, location=node)
+ maincmd = self.find_program_impl(vcs_cmd[0], required=False)
+ if maincmd.found():
+ vcs_cmd[0] = maincmd
+ else:
+ FeatureNew.single_use('vcs_tag with custom_tgt, external_program, or exe as the first argument', '0.63.0', self.subproject, location=node)
+ else:
+ vcs = mesonlib.detect_vcs(source_dir)
+ if vcs:
+ mlog.log('Found {} repository at {}'.format(vcs['name'], vcs['wc_dir']))
+ vcs_cmd = vcs['get_rev'].split()
+ regex_selector = vcs['rev_regex']
+ else:
+ vcs_cmd = [' '] # executing this cmd will fail in vcstagger.py and force to use the fallback string
+ # vcstagger.py parameters: infile, outfile, fallback, source_dir, replace_string, regex_selector, command...
+
+ self._validate_custom_target_outputs(len(kwargs['input']) > 1, kwargs['output'], "vcs_tag")
+
+ cmd = self.environment.get_build_command() + \
+ ['--internal',
+ 'vcstagger',
+ '@INPUT0@',
+ '@OUTPUT0@',
+ fallback,
+ source_dir,
+ replace_string,
+ regex_selector] + vcs_cmd
+
+ tg = build.CustomTarget(
+ kwargs['output'][0],
+ self.subdir,
+ self.subproject,
+ self.environment,
+ cmd,
+ self.source_strings_to_files(kwargs['input']),
+ kwargs['output'],
+ build_by_default=True,
+ build_always_stale=True,
+ )
+ self.add_target(tg.name, tg)
+ return tg
+
+ @FeatureNew('subdir_done', '0.46.0')
+ @noPosargs
+ @noKwargs
+ def func_subdir_done(self, node: mparser.BaseNode, args: TYPE_var, kwargs: TYPE_kwargs) -> T.NoReturn:
+ raise SubdirDoneRequest()
+
+ @staticmethod
+ def _validate_custom_target_outputs(has_multi_in: bool, outputs: T.Iterable[str], name: str) -> None:
+ """Checks for additional invalid values in a custom_target output.
+
+ This cannot be done with typed_kwargs because it requires the number of
+ inputs.
+ """
+ for out in outputs:
+ if has_multi_in and ('@PLAINNAME@' in out or '@BASENAME@' in out):
+ raise InvalidArguments(f'{name}: output cannot contain "@PLAINNAME@" or "@BASENAME@" '
+ 'when there is more than one input (we can\'t know which to use)')
+
+ @typed_pos_args('custom_target', optargs=[str])
+ @typed_kwargs(
+ 'custom_target',
+ COMMAND_KW,
+ CT_BUILD_ALWAYS,
+ CT_BUILD_ALWAYS_STALE,
+ CT_BUILD_BY_DEFAULT,
+ CT_INPUT_KW,
+ CT_INSTALL_DIR_KW,
+ CT_INSTALL_TAG_KW,
+ MULTI_OUTPUT_KW,
+ DEPENDS_KW,
+ DEPEND_FILES_KW,
+ DEPFILE_KW,
+ ENV_KW.evolve(since='0.57.0'),
+ INSTALL_KW,
+ INSTALL_MODE_KW.evolve(since='0.47.0'),
+ KwargInfo('feed', bool, default=False, since='0.59.0'),
+ KwargInfo('capture', bool, default=False),
+ KwargInfo('console', bool, default=False, since='0.48.0'),
+ )
+ def func_custom_target(self, node: mparser.FunctionNode, args: T.Tuple[str],
+ kwargs: 'kwtypes.CustomTarget') -> build.CustomTarget:
+ if kwargs['depfile'] and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']):
+ FeatureNew.single_use('substitutions in custom_target depfile', '0.47.0', self.subproject, location=node)
+ install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+
+ # Don't mutate the kwargs
+
+ build_by_default = kwargs['build_by_default']
+ build_always_stale = kwargs['build_always_stale']
+ # Remap build_always to build_by_default and build_always_stale
+ if kwargs['build_always'] is not None and kwargs['build_always_stale'] is not None:
+ raise InterpreterException('CustomTarget: "build_always" and "build_always_stale" are mutually exclusive')
+
+ if build_by_default is None and kwargs['install']:
+ build_by_default = True
+
+ elif kwargs['build_always'] is not None:
+ if build_by_default is None:
+ build_by_default = kwargs['build_always']
+ build_always_stale = kwargs['build_by_default']
+
+ # These are are nullaable so that we can know whether they're explicitly
+ # set or not. If they haven't been overwritten, set them to their true
+ # default
+ if build_by_default is None:
+ build_by_default = False
+ if build_always_stale is None:
+ build_always_stale = False
+
+ name = args[0]
+ if name is None:
+ # name will default to first output, but we cannot do that yet because
+ # they could need substitutions (e.g. @BASENAME@) first. CustomTarget()
+ # will take care of setting a proper default but name must be an empty
+ # string in the meantime.
+ FeatureNew.single_use('custom_target() with no name argument', '0.60.0', self.subproject, location=node)
+ name = ''
+ inputs = self.source_strings_to_files(kwargs['input'], strict=False)
+ command = kwargs['command']
+ if command and isinstance(command[0], str):
+ command[0] = self.find_program_impl([command[0]])
+
+ if len(inputs) > 1 and kwargs['feed']:
+ raise InvalidArguments('custom_target: "feed" keyword argument can only be used used with a single input')
+ if len(kwargs['output']) > 1 and kwargs['capture']:
+ raise InvalidArguments('custom_target: "capture" keyword argument can only be used used with a single output')
+ if kwargs['capture'] and kwargs['console']:
+ raise InvalidArguments('custom_target: "capture" and "console" keyword arguments are mutually exclusive')
+ for c in command:
+ if kwargs['capture'] and isinstance(c, str) and '@OUTPUT@' in c:
+ raise InvalidArguments('custom_target: "capture" keyword argument cannot be used with "@OUTPUT@"')
+ if kwargs['feed'] and isinstance(c, str) and '@INPUT@' in c:
+ raise InvalidArguments('custom_target: "feed" keyword argument cannot be used with "@INPUT@"')
+ if kwargs['install'] and not kwargs['install_dir']:
+ raise InvalidArguments('custom_target: "install_dir" keyword argument must be set when "install" is true.')
+ if len(kwargs['install_dir']) > 1:
+ FeatureNew.single_use('multiple install_dir for custom_target', '0.40.0', self.subproject, location=node)
+ if len(kwargs['install_tag']) not in {0, 1, len(kwargs['output'])}:
+ raise InvalidArguments('custom_target: install_tag argument must have 0 or 1 outputs, '
+ 'or the same number of elements as the output keyword argument. '
+ f'(there are {len(kwargs["install_tag"])} install_tags, '
+ f'and {len(kwargs["output"])} outputs)')
+
+ for t in kwargs['output']:
+ self.validate_forbidden_targets(t)
+ self._validate_custom_target_outputs(len(inputs) > 1, kwargs['output'], "custom_target")
+
+ tg = build.CustomTarget(
+ name,
+ self.subdir,
+ self.subproject,
+ self.environment,
+ command,
+ inputs,
+ kwargs['output'],
+ build_always_stale=build_always_stale,
+ build_by_default=build_by_default,
+ capture=kwargs['capture'],
+ console=kwargs['console'],
+ depend_files=kwargs['depend_files'],
+ depfile=kwargs['depfile'],
+ extra_depends=kwargs['depends'],
+ env=kwargs['env'],
+ feed=kwargs['feed'],
+ install=kwargs['install'],
+ install_dir=kwargs['install_dir'],
+ install_mode=install_mode,
+ install_tag=kwargs['install_tag'],
+ backend=self.backend)
+ self.add_target(tg.name, tg)
+ return tg
+
+ @typed_pos_args('run_target', str)
+ @typed_kwargs(
+ 'run_target',
+ COMMAND_KW,
+ DEPENDS_KW,
+ ENV_KW.evolve(since='0.57.0'),
+ )
+ def func_run_target(self, node: mparser.FunctionNode, args: T.Tuple[str],
+ kwargs: 'kwtypes.RunTarget') -> build.RunTarget:
+ all_args = kwargs['command'].copy()
+
+ for i in listify(all_args):
+ if isinstance(i, ExternalProgram) and not i.found():
+ raise InterpreterException(f'Tried to use non-existing executable {i.name!r}')
+ if isinstance(all_args[0], str):
+ all_args[0] = self.find_program_impl([all_args[0]])
+ name = args[0]
+ tg = build.RunTarget(name, all_args, kwargs['depends'], self.subdir, self.subproject, self.environment,
+ kwargs['env'])
+ self.add_target(name, tg)
+ return tg
+
+ @FeatureNew('alias_target', '0.52.0')
+ @typed_pos_args('alias_target', str, varargs=build.Target, min_varargs=1)
+ @noKwargs
+ def func_alias_target(self, node: mparser.BaseNode, args: T.Tuple[str, T.List[build.Target]],
+ kwargs: 'TYPE_kwargs') -> build.AliasTarget:
+ name, deps = args
+ tg = build.AliasTarget(name, deps, self.subdir, self.subproject, self.environment)
+ self.add_target(name, tg)
+ return tg
+
+ @typed_pos_args('generator', (build.Executable, ExternalProgram))
+ @typed_kwargs(
+ 'generator',
+ KwargInfo('arguments', ContainerTypeInfo(list, str, allow_empty=False), required=True, listify=True),
+ KwargInfo('output', ContainerTypeInfo(list, str, allow_empty=False), required=True, listify=True),
+ DEPFILE_KW,
+ DEPENDS_KW,
+ KwargInfo('capture', bool, default=False, since='0.43.0'),
+ )
+ def func_generator(self, node: mparser.FunctionNode,
+ args: T.Tuple[T.Union[build.Executable, ExternalProgram]],
+ kwargs: 'kwtypes.FuncGenerator') -> build.Generator:
+ for rule in kwargs['output']:
+ if '@BASENAME@' not in rule and '@PLAINNAME@' not in rule:
+ raise InvalidArguments('Every element of "output" must contain @BASENAME@ or @PLAINNAME@.')
+ if has_path_sep(rule):
+ raise InvalidArguments('"output" must not contain a directory separator.')
+ if len(kwargs['output']) > 1:
+ for o in kwargs['output']:
+ if '@OUTPUT@' in o:
+ raise InvalidArguments('Tried to use @OUTPUT@ in a rule with more than one output.')
+
+ gen = build.Generator(args[0], **kwargs)
+ self.generators.append(gen)
+ return gen
+
+ @typed_pos_args('benchmark', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File))
+ @typed_kwargs('benchmark', *TEST_KWS)
+ def func_benchmark(self, node: mparser.BaseNode,
+ args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+ kwargs: 'kwtypes.FuncBenchmark') -> None:
+ self.add_test(node, args, kwargs, False)
+
+ @typed_pos_args('test', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File))
+ @typed_kwargs('test', *TEST_KWS, KwargInfo('is_parallel', bool, default=True))
+ def func_test(self, node: mparser.BaseNode,
+ args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+ kwargs: 'kwtypes.FuncTest') -> None:
+ self.add_test(node, args, kwargs, True)
+
+ def unpack_env_kwarg(self, kwargs: T.Union[build.EnvironmentVariables, T.Dict[str, 'TYPE_var'], T.List['TYPE_var'], str]) -> build.EnvironmentVariables:
+ envlist = kwargs.get('env')
+ if envlist is None:
+ return build.EnvironmentVariables()
+ msg = ENV_KW.validator(envlist)
+ if msg:
+ raise InvalidArguments(f'"env": {msg}')
+ return ENV_KW.convertor(envlist)
+
+ def make_test(self, node: mparser.BaseNode,
+ args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+ kwargs: 'kwtypes.BaseTest') -> Test:
+ name = args[0]
+ if ':' in name:
+ mlog.deprecation(f'":" is not allowed in test name "{name}", it has been replaced with "_"',
+ location=node)
+ name = name.replace(':', '_')
+ exe = args[1]
+ if isinstance(exe, ExternalProgram):
+ if not exe.found():
+ raise InvalidArguments('Tried to use not-found external program as test exe')
+ elif isinstance(exe, mesonlib.File):
+ exe = self.find_program_impl([exe])
+
+ env = self.unpack_env_kwarg(kwargs)
+
+ if kwargs['timeout'] <= 0:
+ FeatureNew.single_use('test() timeout <= 0', '0.57.0', self.subproject, location=node)
+
+ prj = self.subproject if self.is_subproject() else self.build.project_name
+
+ suite: T.List[str] = []
+ for s in kwargs['suite']:
+ if s:
+ s = ':' + s
+ suite.append(prj.replace(' ', '_').replace(':', '_') + s)
+
+ return Test(name,
+ prj,
+ suite,
+ exe,
+ kwargs['depends'],
+ kwargs.get('is_parallel', False),
+ kwargs['args'],
+ env,
+ kwargs['should_fail'],
+ kwargs['timeout'],
+ kwargs['workdir'],
+ kwargs['protocol'],
+ kwargs['priority'],
+ kwargs['verbose'])
+
+ def add_test(self, node: mparser.BaseNode, args: T.List, kwargs: T.Dict[str, T.Any], is_base_test: bool):
+ t = self.make_test(node, args, kwargs)
+ if is_base_test:
+ self.build.tests.append(t)
+ mlog.debug('Adding test', mlog.bold(t.name, True))
+ else:
+ self.build.benchmarks.append(t)
+ mlog.debug('Adding benchmark', mlog.bold(t.name, True))
+
+ @typed_pos_args('install_headers', varargs=(str, mesonlib.File))
+ @typed_kwargs(
+ 'install_headers',
+ PRESERVE_PATH_KW,
+ KwargInfo('subdir', (str, NoneType)),
+ INSTALL_MODE_KW.evolve(since='0.47.0'),
+ INSTALL_DIR_KW,
+ )
+ def func_install_headers(self, node: mparser.BaseNode,
+ args: T.Tuple[T.List['mesonlib.FileOrString']],
+ kwargs: 'kwtypes.FuncInstallHeaders') -> build.Headers:
+ install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+ source_files = self.source_strings_to_files(args[0])
+ install_subdir = kwargs['subdir']
+ if install_subdir is not None:
+ if kwargs['install_dir'] is not None:
+ raise InterpreterException('install_headers: cannot specify both "install_dir" and "subdir". Use only "install_dir".')
+ if os.path.isabs(install_subdir):
+ mlog.deprecation('Subdir keyword must not be an absolute path. This will be a hard error in the next release.')
+ else:
+ install_subdir = ''
+
+ dirs = collections.defaultdict(list)
+ ret_headers = []
+ if kwargs['preserve_path']:
+ for file in source_files:
+ dirname = os.path.dirname(file.fname)
+ dirs[dirname].append(file)
+ else:
+ dirs[''].extend(source_files)
+
+ for childdir in dirs:
+ h = build.Headers(dirs[childdir], os.path.join(install_subdir, childdir), kwargs['install_dir'],
+ install_mode, self.subproject)
+ ret_headers.append(h)
+ self.build.headers.append(h)
+
+ return ret_headers
+
+ @typed_pos_args('install_man', varargs=(str, mesonlib.File))
+ @typed_kwargs(
+ 'install_man',
+ KwargInfo('locale', (str, NoneType), since='0.58.0'),
+ INSTALL_MODE_KW.evolve(since='0.47.0'),
+ INSTALL_DIR_KW,
+ )
+ def func_install_man(self, node: mparser.BaseNode,
+ args: T.Tuple[T.List['mesonlib.FileOrString']],
+ kwargs: 'kwtypes.FuncInstallMan') -> build.Man:
+ install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+ # We just need to narrow this, because the input is limited to files and
+ # Strings as inputs, so only Files will be returned
+ sources = self.source_strings_to_files(args[0])
+ for s in sources:
+ try:
+ num = int(s.rsplit('.', 1)[-1])
+ except (IndexError, ValueError):
+ num = 0
+ if not 1 <= num <= 9:
+ raise InvalidArguments('Man file must have a file extension of a number between 1 and 9')
+
+ m = build.Man(sources, kwargs['install_dir'], install_mode,
+ self.subproject, kwargs['locale'])
+ self.build.man.append(m)
+
+ return m
+
+ @FeatureNew('install_emptydir', '0.60.0')
+ @typed_kwargs(
+ 'install_emptydir',
+ INSTALL_MODE_KW,
+ KwargInfo('install_tag', (str, NoneType), since='0.62.0')
+ )
+ def func_install_emptydir(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs) -> None:
+ d = build.EmptyDir(args[0], kwargs['install_mode'], self.subproject, kwargs['install_tag'])
+ self.build.emptydir.append(d)
+
+ return d
+
+ @FeatureNew('install_symlink', '0.61.0')
+ @typed_pos_args('symlink_name', str)
+ @typed_kwargs(
+ 'install_symlink',
+ KwargInfo('pointing_to', str, required=True),
+ KwargInfo('install_dir', str, required=True),
+ INSTALL_TAG_KW,
+ )
+ def func_install_symlink(self, node: mparser.BaseNode,
+ args: T.Tuple[T.List[str]],
+ kwargs) -> build.SymlinkData:
+ name = args[0] # Validation while creating the SymlinkData object
+ target = kwargs['pointing_to']
+ l = build.SymlinkData(target, name, kwargs['install_dir'],
+ self.subproject, kwargs['install_tag'])
+ self.build.symlinks.append(l)
+ return l
+
+ @FeatureNew('structured_sources', '0.62.0')
+ @typed_pos_args('structured_sources', object, optargs=[dict])
+ @noKwargs
+ @noArgsFlattening
+ def func_structured_sources(
+ self, node: mparser.BaseNode,
+ args: T.Tuple[object, T.Optional[T.Dict[str, object]]],
+ kwargs: 'TYPE_kwargs') -> build.StructuredSources:
+ valid_types = (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)
+ sources: T.Dict[str, T.List[T.Union[mesonlib.File, 'build.GeneratedTypes']]] = collections.defaultdict(list)
+
+ for arg in mesonlib.listify(args[0]):
+ if not isinstance(arg, valid_types):
+ raise InvalidArguments(f'structured_sources: type "{type(arg)}" is not valid')
+ if isinstance(arg, str):
+ arg = mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, arg)
+ sources[''].append(arg)
+ if args[1]:
+ if '' in args[1]:
+ raise InvalidArguments('structured_sources: keys to dictionary argument may not be an empty string.')
+ for k, v in args[1].items():
+ for arg in mesonlib.listify(v):
+ if not isinstance(arg, valid_types):
+ raise InvalidArguments(f'structured_sources: type "{type(arg)}" is not valid')
+ if isinstance(arg, str):
+ arg = mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, arg)
+ sources[k].append(arg)
+ return build.StructuredSources(sources)
+
+ @typed_pos_args('subdir', str)
+ @typed_kwargs(
+ 'subdir',
+ KwargInfo(
+ 'if_found',
+ ContainerTypeInfo(list, object),
+ validator=lambda a: 'Objects must have a found() method' if not all(hasattr(x, 'found') for x in a) else None,
+ since='0.44.0',
+ default=[],
+ listify=True,
+ ),
+ )
+ def func_subdir(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'kwtypes.Subdir') -> None:
+ mesonlib.check_direntry_issues(args)
+ if '..' in args[0]:
+ raise InvalidArguments('Subdir contains ..')
+ if self.subdir == '' and args[0] == self.subproject_dir:
+ raise InvalidArguments('Must not go into subprojects dir with subdir(), use subproject() instead.')
+ if self.subdir == '' and args[0].startswith('meson-'):
+ raise InvalidArguments('The "meson-" prefix is reserved and cannot be used for top-level subdir().')
+ if args[0] == '':
+ raise InvalidArguments("The argument given to subdir() is the empty string ''. This is prohibited.")
+ for i in kwargs['if_found']:
+ if not i.found():
+ return
+
+ prev_subdir = self.subdir
+ subdir = os.path.join(prev_subdir, args[0])
+ if os.path.isabs(subdir):
+ raise InvalidArguments('Subdir argument must be a relative path.')
+ absdir = os.path.join(self.environment.get_source_dir(), subdir)
+ symlinkless_dir = os.path.realpath(absdir)
+ build_file = os.path.join(symlinkless_dir, 'meson.build')
+ if build_file in self.processed_buildfiles:
+ raise InvalidArguments(f'Tried to enter directory "{subdir}", which has already been visited.')
+ self.processed_buildfiles.add(build_file)
+ self.subdir = subdir
+ os.makedirs(os.path.join(self.environment.build_dir, subdir), exist_ok=True)
+ buildfilename = os.path.join(self.subdir, environment.build_filename)
+ self.build_def_files.add(buildfilename)
+ absname = os.path.join(self.environment.get_source_dir(), buildfilename)
+ if not os.path.isfile(absname):
+ self.subdir = prev_subdir
+ raise InterpreterException(f"Non-existent build file '{buildfilename!s}'")
+ with open(absname, encoding='utf-8') as f:
+ code = f.read()
+ assert isinstance(code, str)
+ try:
+ codeblock = mparser.Parser(code, absname).parse()
+ except mesonlib.MesonException as me:
+ me.file = absname
+ raise me
+ try:
+ self.evaluate_codeblock(codeblock)
+ except SubdirDoneRequest:
+ pass
+ self.subdir = prev_subdir
+
+ def _get_kwarg_install_mode(self, kwargs: T.Dict[str, T.Any]) -> T.Optional[FileMode]:
+ if kwargs.get('install_mode', None) is None:
+ return None
+ if isinstance(kwargs['install_mode'], FileMode):
+ return kwargs['install_mode']
+ install_mode: T.List[str] = []
+ mode = mesonlib.typeslistify(kwargs.get('install_mode', []), (str, int))
+ for m in mode:
+ # We skip any arguments that are set to `false`
+ if m is False:
+ m = None
+ install_mode.append(m)
+ if len(install_mode) > 3:
+ raise InvalidArguments('Keyword argument install_mode takes at '
+ 'most 3 arguments.')
+ if len(install_mode) > 0 and install_mode[0] is not None and \
+ not isinstance(install_mode[0], str):
+ raise InvalidArguments('Keyword argument install_mode requires the '
+ 'permissions arg to be a string or false')
+ return FileMode(*install_mode)
+
+ # This is either ignored on basically any OS nowadays, or silently gets
+ # ignored (Solaris) or triggers an "illegal operation" error (FreeBSD).
+ # It was likely added "because it exists", but should never be used. In
+ # theory it is useful for directories, but we never apply modes to
+ # directories other than in install_emptydir.
+ def _warn_kwarg_install_mode_sticky(self, mode: FileMode) -> None:
+ if mode.perms > 0 and mode.perms & stat.S_ISVTX:
+ mlog.deprecation('install_mode with the sticky bit on a file does not do anything and will '
+ 'be ignored since Meson 0.64.0', location=self.current_node)
+ perms = stat.filemode(mode.perms - stat.S_ISVTX)[1:]
+ return FileMode(perms, mode.owner, mode.group)
+ else:
+ return mode
+
+ @typed_pos_args('install_data', varargs=(str, mesonlib.File))
+ @typed_kwargs(
+ 'install_data',
+ KwargInfo('sources', ContainerTypeInfo(list, (str, mesonlib.File)), listify=True, default=[]),
+ KwargInfo('rename', ContainerTypeInfo(list, str), default=[], listify=True, since='0.46.0'),
+ INSTALL_MODE_KW.evolve(since='0.38.0'),
+ INSTALL_TAG_KW.evolve(since='0.60.0'),
+ INSTALL_DIR_KW,
+ PRESERVE_PATH_KW.evolve(since='0.64.0'),
+ )
+ def func_install_data(self, node: mparser.BaseNode,
+ args: T.Tuple[T.List['mesonlib.FileOrString']],
+ kwargs: 'kwtypes.FuncInstallData') -> build.Data:
+ sources = self.source_strings_to_files(args[0] + kwargs['sources'])
+ rename = kwargs['rename'] or None
+ if rename:
+ if len(rename) != len(sources):
+ raise InvalidArguments(
+ '"rename" and "sources" argument lists must be the same length if "rename" is given. '
+ f'Rename has {len(rename)} elements and sources has {len(sources)}.')
+
+ install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+ return self.install_data_impl(sources, kwargs['install_dir'], install_mode,
+ rename, kwargs['install_tag'],
+ preserve_path=kwargs['preserve_path'])
+
+ def install_data_impl(self, sources: T.List[mesonlib.File], install_dir: T.Optional[str],
+ install_mode: FileMode, rename: T.Optional[str],
+ tag: T.Optional[str],
+ install_dir_name: T.Optional[str] = None,
+ install_data_type: T.Optional[str] = None,
+ preserve_path: bool = False) -> build.Data:
+
+ """Just the implementation with no validation."""
+ idir = install_dir or ''
+ idir_name = install_dir_name or idir or '{datadir}'
+ if isinstance(idir_name, P_OBJ.OptionString):
+ idir_name = idir_name.optname
+ dirs = collections.defaultdict(list)
+ ret_data = []
+ if preserve_path:
+ for file in sources:
+ dirname = os.path.dirname(file.fname)
+ dirs[dirname].append(file)
+ else:
+ dirs[''].extend(sources)
+
+ for childdir, files in dirs.items():
+ d = build.Data(files, os.path.join(idir, childdir), os.path.join(idir_name, childdir),
+ install_mode, self.subproject, rename, tag, install_data_type)
+ ret_data.append(d)
+
+ self.build.data.extend(ret_data)
+ return ret_data
+
+ @typed_pos_args('install_subdir', str)
+ @typed_kwargs(
+ 'install_subdir',
+ KwargInfo('install_dir', str, required=True),
+ KwargInfo('strip_directory', bool, default=False),
+ KwargInfo('exclude_files', ContainerTypeInfo(list, str),
+ default=[], listify=True, since='0.42.0',
+ validator=lambda x: 'cannot be absolute' if any(os.path.isabs(d) for d in x) else None),
+ KwargInfo('exclude_directories', ContainerTypeInfo(list, str),
+ default=[], listify=True, since='0.42.0',
+ validator=lambda x: 'cannot be absolute' if any(os.path.isabs(d) for d in x) else None),
+ INSTALL_MODE_KW.evolve(since='0.38.0'),
+ INSTALL_TAG_KW.evolve(since='0.60.0'),
+ )
+ def func_install_subdir(self, node: mparser.BaseNode, args: T.Tuple[str],
+ kwargs: 'kwtypes.FuncInstallSubdir') -> build.InstallDir:
+ exclude = (set(kwargs['exclude_files']), set(kwargs['exclude_directories']))
+
+ srcdir = os.path.join(self.environment.source_dir, self.subdir, args[0])
+ if not os.path.isdir(srcdir) or not any(os.scandir(srcdir)):
+ FeatureNew.single_use('install_subdir with empty directory', '0.47.0', self.subproject, location=node)
+ FeatureDeprecated.single_use('install_subdir with empty directory', '0.60.0', self.subproject,
+ 'It worked by accident and is buggy. Use install_emptydir instead.', node)
+ install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+
+ idir_name = kwargs['install_dir']
+ if isinstance(idir_name, P_OBJ.OptionString):
+ idir_name = idir_name.optname
+
+ idir = build.InstallDir(
+ self.subdir,
+ args[0],
+ kwargs['install_dir'],
+ idir_name,
+ install_mode,
+ exclude,
+ kwargs['strip_directory'],
+ self.subproject,
+ install_tag=kwargs['install_tag'])
+ self.build.install_dirs.append(idir)
+ return idir
+
+ @noPosargs
+ @typed_kwargs(
+ 'configure_file',
+ DEPFILE_KW.evolve(since='0.52.0'),
+ INSTALL_MODE_KW.evolve(since='0.47.0,'),
+ INSTALL_TAG_KW.evolve(since='0.60.0'),
+ KwargInfo('capture', bool, default=False, since='0.41.0'),
+ KwargInfo(
+ 'command',
+ (ContainerTypeInfo(list, (build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str), allow_empty=False), NoneType),
+ listify=True,
+ ),
+ KwargInfo(
+ 'configuration',
+ (ContainerTypeInfo(dict, (str, int, bool)), build.ConfigurationData, NoneType),
+ ),
+ KwargInfo(
+ 'copy', bool, default=False, since='0.47.0',
+ deprecated='0.64.0', deprecated_message='Use fs.copyfile instead',
+ ),
+ KwargInfo('encoding', str, default='utf-8', since='0.47.0'),
+ KwargInfo('format', str, default='meson', since='0.46.0',
+ validator=in_set_validator({'meson', 'cmake', 'cmake@'})),
+ KwargInfo(
+ 'input',
+ ContainerTypeInfo(list, (mesonlib.File, str)),
+ listify=True,
+ default=[],
+ ),
+ # Cannot use shared implementation until None backwards compat is dropped
+ KwargInfo('install', (bool, NoneType), since='0.50.0'),
+ KwargInfo('install_dir', (str, bool), default='',
+ validator=lambda x: 'must be `false` if boolean' if x is True else None),
+ OUTPUT_KW,
+ KwargInfo('output_format', str, default='c', since='0.47.0',
+ validator=in_set_validator({'c', 'nasm'})),
+ )
+ def func_configure_file(self, node: mparser.BaseNode, args: T.List[TYPE_var],
+ kwargs: kwtypes.ConfigureFile):
+ actions = sorted(x for x in ['configuration', 'command', 'copy']
+ if kwargs[x] not in [None, False])
+ num_actions = len(actions)
+ if num_actions == 0:
+ raise InterpreterException('Must specify an action with one of these '
+ 'keyword arguments: \'configuration\', '
+ '\'command\', or \'copy\'.')
+ elif num_actions == 2:
+ raise InterpreterException('Must not specify both {!r} and {!r} '
+ 'keyword arguments since they are '
+ 'mutually exclusive.'.format(*actions))
+ elif num_actions == 3:
+ raise InterpreterException('Must specify one of {!r}, {!r}, and '
+ '{!r} keyword arguments since they are '
+ 'mutually exclusive.'.format(*actions))
+
+ if kwargs['capture'] and not kwargs['command']:
+ raise InvalidArguments('configure_file: "capture" keyword requires "command" keyword.')
+
+ install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+
+ fmt = kwargs['format']
+ output_format = kwargs['output_format']
+ depfile = kwargs['depfile']
+
+ # Validate input
+ inputs = self.source_strings_to_files(kwargs['input'])
+ inputs_abs = []
+ for f in inputs:
+ if isinstance(f, mesonlib.File):
+ inputs_abs.append(f.absolute_path(self.environment.source_dir,
+ self.environment.build_dir))
+ self.add_build_def_file(f)
+ else:
+ raise InterpreterException('Inputs can only be strings or file objects')
+
+ # Validate output
+ output = kwargs['output']
+ if inputs_abs:
+ values = mesonlib.get_filenames_templates_dict(inputs_abs, None)
+ outputs = mesonlib.substitute_values([output], values)
+ output = outputs[0]
+ if depfile:
+ depfile = mesonlib.substitute_values([depfile], values)[0]
+ ofile_rpath = os.path.join(self.subdir, output)
+ if ofile_rpath in self.configure_file_outputs:
+ mesonbuildfile = os.path.join(self.subdir, 'meson.build')
+ current_call = f"{mesonbuildfile}:{self.current_lineno}"
+ first_call = "{}:{}".format(mesonbuildfile, self.configure_file_outputs[ofile_rpath])
+ mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call)
+ else:
+ self.configure_file_outputs[ofile_rpath] = self.current_lineno
+ (ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output))
+ ofile_abs = os.path.join(self.environment.build_dir, ofile_path, ofile_fname)
+
+ # Perform the appropriate action
+ if kwargs['configuration'] is not None:
+ conf = kwargs['configuration']
+ if isinstance(conf, dict):
+ FeatureNew.single_use('configure_file.configuration dictionary', '0.49.0', self.subproject, location=node)
+ for k, v in conf.items():
+ if not isinstance(v, (str, int, bool)):
+ raise InvalidArguments(
+ f'"configuration_data": initial value dictionary key "{k!r}"" must be "str | int | bool", not "{v!r}"')
+ conf = build.ConfigurationData(conf)
+ mlog.log('Configuring', mlog.bold(output), 'using configuration')
+ if len(inputs) > 1:
+ raise InterpreterException('At most one input file can given in configuration mode')
+ if inputs:
+ os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
+ file_encoding = kwargs['encoding']
+ missing_variables, confdata_useless = \
+ mesonlib.do_conf_file(inputs_abs[0], ofile_abs, conf,
+ fmt, file_encoding)
+ if missing_variables:
+ var_list = ", ".join(repr(m) for m in sorted(missing_variables))
+ mlog.warning(
+ f"The variable(s) {var_list} in the input file '{inputs[0]}' are not "
+ "present in the given configuration data.", location=node)
+ if confdata_useless:
+ ifbase = os.path.basename(inputs_abs[0])
+ tv = FeatureNew.get_target_version(self.subproject)
+ if FeatureNew.check_version(tv, '0.47.0'):
+ mlog.warning('Got an empty configuration_data() object and found no '
+ f'substitutions in the input file {ifbase!r}. If you want to '
+ 'copy a file to the build dir, use the \'copy:\' keyword '
+ 'argument added in 0.47.0', location=node)
+ else:
+ mesonlib.dump_conf_header(ofile_abs, conf, output_format)
+ conf.used = True
+ elif kwargs['command'] is not None:
+ if len(inputs) > 1:
+ FeatureNew.single_use('multiple inputs in configure_file()', '0.52.0', self.subproject, location=node)
+ # We use absolute paths for input and output here because the cwd
+ # that the command is run from is 'unspecified', so it could change.
+ # Currently it's builddir/subdir for in_builddir else srcdir/subdir.
+ values = mesonlib.get_filenames_templates_dict(inputs_abs, [ofile_abs])
+ if depfile:
+ depfile = os.path.join(self.environment.get_scratch_dir(), depfile)
+ values['@DEPFILE@'] = depfile
+ # Substitute @INPUT@, @OUTPUT@, etc here.
+ _cmd = mesonlib.substitute_values(kwargs['command'], values)
+ mlog.log('Configuring', mlog.bold(output), 'with command')
+ cmd, *args = _cmd
+ res = self.run_command_impl(node, (cmd, args),
+ {'capture': True, 'check': True, 'env': build.EnvironmentVariables()},
+ True)
+ if kwargs['capture']:
+ dst_tmp = ofile_abs + '~'
+ file_encoding = kwargs['encoding']
+ with open(dst_tmp, 'w', encoding=file_encoding) as f:
+ f.writelines(res.stdout)
+ if inputs_abs:
+ shutil.copymode(inputs_abs[0], dst_tmp)
+ mesonlib.replace_if_different(ofile_abs, dst_tmp)
+ if depfile:
+ mlog.log('Reading depfile:', mlog.bold(depfile))
+ with open(depfile, encoding='utf-8') as f:
+ df = DepFile(f.readlines())
+ deps = df.get_all_dependencies(ofile_fname)
+ for dep in deps:
+ self.add_build_def_file(dep)
+
+ elif kwargs['copy']:
+ if len(inputs_abs) != 1:
+ raise InterpreterException('Exactly one input file must be given in copy mode')
+ os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
+ shutil.copy2(inputs_abs[0], ofile_abs)
+
+ # Install file if requested, we check for the empty string
+ # for backwards compatibility. That was the behaviour before
+ # 0.45.0 so preserve it.
+ idir = kwargs['install_dir']
+ if idir is False:
+ idir = ''
+ FeatureDeprecated.single_use('configure_file install_dir: false', '0.50.0',
+ self.subproject, 'Use the `install:` kwarg instead', location=node)
+ install = kwargs['install'] if kwargs['install'] is not None else idir != ''
+ if install:
+ if not idir:
+ raise InterpreterException(
+ '"install_dir" must be specified when "install" in a configure_file is true')
+ idir_name = idir
+ if isinstance(idir_name, P_OBJ.OptionString):
+ idir_name = idir_name.optname
+ cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname)
+ install_tag = kwargs['install_tag']
+ self.build.data.append(build.Data([cfile], idir, idir_name, install_mode, self.subproject,
+ install_tag=install_tag, data_type='configure'))
+ return mesonlib.File.from_built_file(self.subdir, output)
+
+ def extract_incdirs(self, kwargs, key: str = 'include_directories'):
+ prospectives = extract_as_list(kwargs, key)
+ if key == 'include_directories':
+ for i in prospectives:
+ if isinstance(i, str):
+ FeatureNew.single_use('include_directories kwarg of type string', '0.50.0', self.subproject,
+ f'Use include_directories({i!r}) instead', location=self.current_node)
+ break
+
+ result = []
+ for p in prospectives:
+ if isinstance(p, build.IncludeDirs):
+ result.append(p)
+ elif isinstance(p, str):
+ result.append(self.build_incdir_object([p]))
+ else:
+ raise InterpreterException('Include directory objects can only be created from strings or include directories.')
+ return result
+
+ @typed_pos_args('include_directories', varargs=str)
+ @typed_kwargs('include_directories', KwargInfo('is_system', bool, default=False))
+ def func_include_directories(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]],
+ kwargs: 'kwtypes.FuncIncludeDirectories') -> build.IncludeDirs:
+ return self.build_incdir_object(args[0], kwargs['is_system'])
+
+ def build_incdir_object(self, incdir_strings: T.List[str], is_system: bool = False) -> build.IncludeDirs:
+ if not isinstance(is_system, bool):
+ raise InvalidArguments('Is_system must be boolean.')
+ src_root = self.environment.get_source_dir()
+ build_root = self.environment.get_build_dir()
+ absbase_src = os.path.join(src_root, self.subdir)
+ absbase_build = os.path.join(build_root, self.subdir)
+
+ for a in incdir_strings:
+ if a.startswith(src_root):
+ raise InvalidArguments(textwrap.dedent('''\
+ Tried to form an absolute path to a dir in the source tree.
+ You should not do that but use relative paths instead, for
+ directories that are part of your project.
+
+ To get include path to any directory relative to the current dir do
+
+ incdir = include_directories(dirname)
+
+ After this incdir will contain both the current source dir as well as the
+ corresponding build dir. It can then be used in any subdirectory and
+ Meson will take care of all the busywork to make paths work.
+
+ Dirname can even be '.' to mark the current directory. Though you should
+ remember that the current source and build directories are always
+ put in the include directories by default so you only need to do
+ include_directories('.') if you intend to use the result in a
+ different subdirectory.
+
+ Note that this error message can also be triggered by
+ external dependencies being installed within your source
+ tree - it's not recommended to do this.
+ '''))
+ else:
+ try:
+ self.validate_within_subproject(self.subdir, a)
+ except InterpreterException:
+ mlog.warning('include_directories sandbox violation!', location=self.current_node)
+ print(textwrap.dedent(f'''\
+ The project is trying to access the directory {a!r} which belongs to a different
+ subproject. This is a problem as it hardcodes the relative paths of these two projects.
+ This makes it impossible to compile the project in any other directory layout and also
+ prevents the subproject from changing its own directory layout.
+
+ Instead of poking directly at the internals the subproject should be executed and
+ it should set a variable that the caller can then use. Something like:
+
+ # In subproject
+ some_dep = declare_dependency(include_directories: include_directories('include'))
+
+ # In subproject wrap file
+ [provide]
+ some = some_dep
+
+ # In parent project
+ some_dep = dependency('some')
+ executable(..., dependencies: [some_dep])
+
+ This warning will become a hard error in a future Meson release.
+ '''))
+ absdir_src = os.path.join(absbase_src, a)
+ absdir_build = os.path.join(absbase_build, a)
+ if not os.path.isdir(absdir_src) and not os.path.isdir(absdir_build):
+ raise InvalidArguments(f'Include dir {a} does not exist.')
+ i = build.IncludeDirs(self.subdir, incdir_strings, is_system)
+ return i
+
+ @typed_pos_args('add_test_setup', str)
+ @typed_kwargs(
+ 'add_test_setup',
+ KwargInfo('exe_wrapper', ContainerTypeInfo(list, (str, ExternalProgram)), listify=True, default=[]),
+ KwargInfo('gdb', bool, default=False),
+ KwargInfo('timeout_multiplier', int, default=1),
+ KwargInfo('exclude_suites', ContainerTypeInfo(list, str), listify=True, default=[], since='0.57.0'),
+ KwargInfo('is_default', bool, default=False, since='0.49.0'),
+ ENV_KW,
+ )
+ def func_add_test_setup(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'kwtypes.AddTestSetup') -> None:
+ setup_name = args[0]
+ if re.fullmatch('([_a-zA-Z][_0-9a-zA-Z]*:)?[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None:
+ raise InterpreterException('Setup name may only contain alphanumeric characters.')
+ if ":" not in setup_name:
+ setup_name = f'{(self.subproject if self.subproject else self.build.project_name)}:{setup_name}'
+
+ exe_wrapper: T.List[str] = []
+ for i in kwargs['exe_wrapper']:
+ if isinstance(i, str):
+ exe_wrapper.append(i)
+ else:
+ if not i.found():
+ raise InterpreterException('Tried to use non-found executable.')
+ exe_wrapper += i.get_command()
+
+ timeout_multiplier = kwargs['timeout_multiplier']
+ if timeout_multiplier <= 0:
+ FeatureNew('add_test_setup() timeout_multiplier <= 0', '0.57.0').use(self.subproject)
+
+ if kwargs['is_default']:
+ if self.build.test_setup_default_name is not None:
+ raise InterpreterException(f'{self.build.test_setup_default_name!r} is already set as default. '
+ 'is_default can be set to true only once')
+ self.build.test_setup_default_name = setup_name
+ self.build.test_setups[setup_name] = build.TestSetup(exe_wrapper, kwargs['gdb'], timeout_multiplier, kwargs['env'],
+ kwargs['exclude_suites'])
+
+ @typed_pos_args('add_global_arguments', varargs=str)
+ @typed_kwargs('add_global_arguments', NATIVE_KW, LANGUAGE_KW)
+ def func_add_global_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+ self._add_global_arguments(node, self.build.global_args[kwargs['native']], args[0], kwargs)
+
+ @typed_pos_args('add_global_link_arguments', varargs=str)
+ @typed_kwargs('add_global_arguments', NATIVE_KW, LANGUAGE_KW)
+ def func_add_global_link_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+ self._add_global_arguments(node, self.build.global_link_args[kwargs['native']], args[0], kwargs)
+
+ @typed_pos_args('add_project_arguments', varargs=str)
+ @typed_kwargs('add_project_arguments', NATIVE_KW, LANGUAGE_KW)
+ def func_add_project_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+ self._add_project_arguments(node, self.build.projects_args[kwargs['native']], args[0], kwargs)
+
+ @typed_pos_args('add_project_link_arguments', varargs=str)
+ @typed_kwargs('add_global_arguments', NATIVE_KW, LANGUAGE_KW)
+ def func_add_project_link_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+ self._add_project_arguments(node, self.build.projects_link_args[kwargs['native']], args[0], kwargs)
+
+ @FeatureNew('add_project_dependencies', '0.63.0')
+ @typed_pos_args('add_project_dependencies', varargs=dependencies.Dependency)
+ @typed_kwargs('add_project_dependencies', NATIVE_KW, LANGUAGE_KW)
+ def func_add_project_dependencies(self, node: mparser.FunctionNode, args: T.Tuple[T.List[dependencies.Dependency]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+ for_machine = kwargs['native']
+ for lang in kwargs['language']:
+ if lang not in self.compilers[for_machine]:
+ raise InvalidCode(f'add_project_dependencies() called before add_language() for language "{lang}"')
+
+ for d in dependencies.get_leaf_external_dependencies(args[0]):
+ compile_args = list(d.get_compile_args())
+ system_incdir = d.get_include_type() == 'system'
+ for i in d.get_include_dirs():
+ for lang in kwargs['language']:
+ comp = self.coredata.compilers[for_machine][lang]
+ for idir in i.to_string_list(self.environment.get_source_dir(), self.environment.get_build_dir()):
+ compile_args.extend(comp.get_include_args(idir, system_incdir))
+
+ self._add_project_arguments(node, self.build.projects_args[for_machine], compile_args, kwargs)
+ self._add_project_arguments(node, self.build.projects_link_args[for_machine], d.get_link_args(), kwargs)
+
+ def _warn_about_builtin_args(self, args: T.List[str]) -> None:
+ # -Wpedantic is deliberately not included, since some people want to use it but not use -Wextra
+ # see e.g.
+ # https://github.com/mesonbuild/meson/issues/3275#issuecomment-641354956
+ # https://github.com/mesonbuild/meson/issues/3742
+ warnargs = ('/W1', '/W2', '/W3', '/W4', '/Wall', '-Wall', '-Wextra')
+ optargs = ('-O0', '-O2', '-O3', '-Os', '-Oz', '/O1', '/O2', '/Os')
+ for arg in args:
+ if arg in warnargs:
+ mlog.warning(f'Consider using the built-in warning_level option instead of using "{arg}".',
+ location=self.current_node)
+ elif arg in optargs:
+ mlog.warning(f'Consider using the built-in optimization level instead of using "{arg}".',
+ location=self.current_node)
+ elif arg == '-Werror':
+ mlog.warning(f'Consider using the built-in werror option instead of using "{arg}".',
+ location=self.current_node)
+ elif arg == '-g':
+ mlog.warning(f'Consider using the built-in debug option instead of using "{arg}".',
+ location=self.current_node)
+ elif arg.startswith('-fsanitize'):
+ mlog.warning(f'Consider using the built-in option for sanitizers instead of using "{arg}".',
+ location=self.current_node)
+ elif arg.startswith('-std=') or arg.startswith('/std:'):
+ mlog.warning(f'Consider using the built-in option for language standard version instead of using "{arg}".',
+ location=self.current_node)
+
+ def _add_global_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.List[str]],
+ args: T.List[str], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+ if self.is_subproject():
+ msg = f'Function \'{node.func_name}\' cannot be used in subprojects because ' \
+ 'there is no way to make that reliable.\nPlease only call ' \
+ 'this if is_subproject() returns false. Alternatively, ' \
+ 'define a variable that\ncontains your language-specific ' \
+ 'arguments and add it to the appropriate *_args kwarg ' \
+ 'in each target.'
+ raise InvalidCode(msg)
+ frozen = self.project_args_frozen or self.global_args_frozen
+ self._add_arguments(node, argsdict, frozen, args, kwargs)
+
+ def _add_project_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.Dict[str, T.List[str]]],
+ args: T.List[str], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+ if self.subproject not in argsdict:
+ argsdict[self.subproject] = {}
+ self._add_arguments(node, argsdict[self.subproject],
+ self.project_args_frozen, args, kwargs)
+
+ def _add_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.List[str]],
+ args_frozen: bool, args: T.List[str], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+ if args_frozen:
+ msg = f'Tried to use \'{node.func_name}\' after a build target has been declared.\n' \
+ 'This is not permitted. Please declare all arguments before your targets.'
+ raise InvalidCode(msg)
+
+ self._warn_about_builtin_args(args)
+
+ for lang in kwargs['language']:
+ argsdict[lang] = argsdict.get(lang, []) + args
+
+ @noArgsFlattening
+ @typed_pos_args('environment', optargs=[(str, list, dict)])
+ @typed_kwargs('environment', ENV_METHOD_KW, ENV_SEPARATOR_KW.evolve(since='0.62.0'))
+ def func_environment(self, node: mparser.FunctionNode, args: T.Tuple[T.Union[None, str, T.List['TYPE_var'], T.Dict[str, 'TYPE_var']]],
+ kwargs: 'TYPE_kwargs') -> build.EnvironmentVariables:
+ init = args[0]
+ if init is not None:
+ FeatureNew.single_use('environment positional arguments', '0.52.0', self.subproject, location=node)
+ msg = ENV_KW.validator(init)
+ if msg:
+ raise InvalidArguments(f'"environment": {msg}')
+ if isinstance(init, dict) and any(i for i in init.values() if isinstance(i, list)):
+ FeatureNew.single_use('List of string in dictionary value', '0.62.0', self.subproject, location=node)
+ return env_convertor_with_method(init, kwargs['method'], kwargs['separator'])
+ return build.EnvironmentVariables()
+
+ @typed_pos_args('join_paths', varargs=str, min_varargs=1)
+ @noKwargs
+ def func_join_paths(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> str:
+ parts = args[0]
+ other = os.path.join('', *parts[1:]).replace('\\', '/')
+ ret = os.path.join(*parts).replace('\\', '/')
+ if isinstance(parts[0], P_OBJ.DependencyVariableString) and '..' not in other:
+ return P_OBJ.DependencyVariableString(ret)
+ elif isinstance(parts[0], P_OBJ.OptionString):
+ name = os.path.join(parts[0].optname, other)
+ return P_OBJ.OptionString(ret, name)
+ else:
+ return ret
+
+ def run(self) -> None:
+ super().run()
+ mlog.log('Build targets in project:', mlog.bold(str(len(self.build.targets))))
+ FeatureNew.report(self.subproject)
+ FeatureDeprecated.report(self.subproject)
+ if not self.is_subproject():
+ self.print_extra_warnings()
+ self._print_summary()
+
+ def print_extra_warnings(self) -> None:
+ # TODO cross compilation
+ for c in self.coredata.compilers.host.values():
+ if c.get_id() == 'clang':
+ self.check_clang_asan_lundef()
+ break
+
+ def check_clang_asan_lundef(self) -> None:
+ if OptionKey('b_lundef') not in self.coredata.options:
+ return
+ if OptionKey('b_sanitize') not in self.coredata.options:
+ return
+ if (self.coredata.options[OptionKey('b_lundef')].value and
+ self.coredata.options[OptionKey('b_sanitize')].value != 'none'):
+ mlog.warning('''Trying to use {} sanitizer on Clang with b_lundef.
+This will probably not work.
+Try setting b_lundef to false instead.'''.format(self.coredata.options[OptionKey('b_sanitize')].value),
+ location=self.current_node)
+
+ # Check that the indicated file is within the same subproject
+ # as we currently are. This is to stop people doing
+ # nasty things like:
+ #
+ # f = files('../../master_src/file.c')
+ #
+ # Note that this is validated only when the file
+ # object is generated. The result can be used in a different
+ # subproject than it is defined in (due to e.g. a
+ # declare_dependency).
+ def validate_within_subproject(self, subdir, fname):
+ srcdir = Path(self.environment.source_dir)
+ builddir = Path(self.environment.build_dir)
+ if isinstance(fname, P_OBJ.DependencyVariableString):
+ def validate_installable_file(fpath: Path) -> bool:
+ installablefiles: T.Set[Path] = set()
+ for d in self.build.data:
+ for s in d.sources:
+ installablefiles.add(Path(s.absolute_path(srcdir, builddir)))
+ installabledirs = [str(Path(srcdir, s.source_subdir)) for s in self.build.install_dirs]
+ if fpath in installablefiles:
+ return True
+ for d in installabledirs:
+ if str(fpath).startswith(d):
+ return True
+ return False
+
+ norm = Path(fname)
+ # variables built from a dep.get_variable are allowed to refer to
+ # subproject files, as long as they are scheduled to be installed.
+ if validate_installable_file(norm):
+ return
+ norm = Path(os.path.abspath(Path(srcdir, subdir, fname)))
+ if os.path.isdir(norm):
+ inputtype = 'directory'
+ else:
+ inputtype = 'file'
+ if InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFFERENCES in self.relaxations and builddir in norm.parents:
+ return
+ if srcdir not in norm.parents:
+ # Grabbing files outside the source tree is ok.
+ # This is for vendor stuff like:
+ #
+ # /opt/vendorsdk/src/file_with_license_restrictions.c
+ return
+ project_root = Path(srcdir, self.root_subdir)
+ subproject_dir = project_root / self.subproject_dir
+ if norm == project_root:
+ return
+ if project_root not in norm.parents:
+ raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} outside current (sub)project.')
+ if subproject_dir == norm or subproject_dir in norm.parents:
+ raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} from a nested subproject.')
+
+ @T.overload
+ def source_strings_to_files(self, sources: T.List['mesonlib.FileOrString'], strict: bool = True) -> T.List['mesonlib.File']: ...
+
+ @T.overload
+ def source_strings_to_files(self, sources: T.List['mesonlib.FileOrString'], strict: bool = False) -> T.List['mesonlib.FileOrString']: ... # noqa: F811
+
+ @T.overload
+ def source_strings_to_files(self, sources: T.List[mesonlib.FileOrString, build.GeneratedTypes]) -> T.List[T.Union[mesonlib.File, build.GeneratedTypes]]: ... # noqa: F811
+
+ @T.overload
+ def source_strings_to_files(self, sources: T.List['SourceInputs'], strict: bool = True) -> T.List['SourceOutputs']: ... # noqa: F811
+
+ def source_strings_to_files(self, sources: T.List['SourceInputs'], strict: bool = True) -> T.List['SourceOutputs']: # noqa: F811
+ """Lower inputs to a list of Targets and Files, replacing any strings.
+
+ :param sources: A raw (Meson DSL) list of inputs (targets, files, and
+ strings)
+ :raises InterpreterException: if any of the inputs are of an invalid type
+ :return: A list of Targets and Files
+ """
+ mesonlib.check_direntry_issues(sources)
+ if not isinstance(sources, list):
+ sources = [sources]
+ results: T.List['SourceOutputs'] = []
+ for s in sources:
+ if isinstance(s, str):
+ if not strict and s.startswith(self.environment.get_build_dir()):
+ results.append(s)
+ mlog.warning(f'Source item {s!r} cannot be converted to File object, because it is a generated file. '
+ 'This will become a hard error in the future.', location=self.current_node)
+ else:
+ self.validate_within_subproject(self.subdir, s)
+ results.append(mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, s))
+ elif isinstance(s, mesonlib.File):
+ results.append(s)
+ elif isinstance(s, (build.GeneratedList, build.BuildTarget,
+ build.CustomTargetIndex, build.CustomTarget,
+ build.ExtractedObjects, build.StructuredSources)):
+ results.append(s)
+ else:
+ raise InterpreterException(f'Source item is {s!r} instead of '
+ 'string or File-type object')
+ return results
+
+ @staticmethod
+ def validate_forbidden_targets(name: str) -> None:
+ if name.startswith('meson-internal__'):
+ raise InvalidArguments("Target names starting with 'meson-internal__' are reserved "
+ "for Meson's internal use. Please rename.")
+ if name.startswith('meson-') and '.' not in name:
+ raise InvalidArguments("Target names starting with 'meson-' and without a file extension "
+ "are reserved for Meson's internal use. Please rename.")
+ if name in coredata.FORBIDDEN_TARGET_NAMES:
+ raise InvalidArguments(f"Target name '{name}' is reserved for Meson's "
+ "internal use. Please rename.")
+
+ def add_target(self, name: str, tobj: build.Target) -> None:
+ if name == '':
+ raise InterpreterException('Target name must not be empty.')
+ if name.strip() == '':
+ raise InterpreterException('Target name must not consist only of whitespace.')
+ if has_path_sep(name):
+ pathseg = os.path.join(self.subdir, os.path.split(name)[0])
+ if os.path.exists(os.path.join(self.source_root, pathseg)):
+ raise InvalidArguments(textwrap.dedent(f'''\
+ Target "{name}" has a path segment pointing to directory "{pathseg}". This is an error.
+ To define a target that builds in that directory you must define it
+ in the meson.build file in that directory.
+ '''))
+ self.validate_forbidden_targets(name)
+ # To permit an executable and a shared library to have the
+ # same name, such as "foo.exe" and "libfoo.a".
+ idname = tobj.get_id()
+ if idname in self.build.targets:
+ raise InvalidCode(f'Tried to create target "{name}", but a target of that name already exists.')
+
+ if isinstance(tobj, build.BuildTarget):
+ missing_languages = tobj.process_compilers()
+ self.add_languages(missing_languages, True, tobj.for_machine)
+ tobj.process_compilers_late(missing_languages)
+ self.add_stdlib_info(tobj)
+
+ self.build.targets[idname] = tobj
+ if idname not in self.coredata.target_guids:
+ self.coredata.target_guids[idname] = str(uuid.uuid4()).upper()
+
+ @FeatureNew('both_libraries', '0.46.0')
+ def build_both_libraries(self, node, args, kwargs):
+ shared_lib = self.build_target(node, args, kwargs, build.SharedLibrary)
+ static_lib = self.build_target(node, args, kwargs, build.StaticLibrary)
+
+ # Check if user forces non-PIC static library.
+ pic = True
+ key = OptionKey('b_staticpic')
+ if 'pic' in kwargs:
+ pic = kwargs['pic']
+ elif key in self.environment.coredata.options:
+ pic = self.environment.coredata.options[key].value
+
+ if self.backend.name == 'xcode':
+ # Xcode is a bit special in that you can't (at least for the moment)
+ # form a library only from object file inputs. The simple but inefficient
+ # solution is to use the sources directly. This will lead to them being
+ # built twice. This is unfortunate and slow, but at least it works.
+ # Feel free to submit patches to get this fixed if it is an
+ # issue for you.
+ reuse_object_files = False
+ else:
+ reuse_object_files = pic
+
+ if reuse_object_files:
+ # Replace sources with objects from the shared library to avoid
+ # building them twice. We post-process the static library instead of
+ # removing sources from args because sources could also come from
+ # any InternalDependency, see BuildTarget.add_deps().
+ static_lib.objects.append(build.ExtractedObjects(shared_lib, shared_lib.sources, shared_lib.generated, []))
+ static_lib.sources = []
+ static_lib.generated = []
+ # Compilers with no corresponding sources confuses the backend.
+ # Keep only compilers used for linking
+ static_lib.compilers = {k: v for k, v in static_lib.compilers.items() if k in compilers.clink_langs}
+
+ return build.BothLibraries(shared_lib, static_lib)
+
+ def build_library(self, node, args, kwargs):
+ default_library = self.coredata.get_option(OptionKey('default_library', subproject=self.subproject))
+ if default_library == 'shared':
+ return self.build_target(node, args, kwargs, build.SharedLibrary)
+ elif default_library == 'static':
+ return self.build_target(node, args, kwargs, build.StaticLibrary)
+ elif default_library == 'both':
+ return self.build_both_libraries(node, args, kwargs)
+ else:
+ raise InterpreterException(f'Unknown default_library value: {default_library}.')
+
+ def build_target(self, node: mparser.BaseNode, args, kwargs, targetclass):
+ @FeatureNewKwargs('build target', '0.42.0', ['rust_crate_type', 'build_rpath', 'implicit_include_directories'])
+ @FeatureNewKwargs('build target', '0.41.0', ['rust_args'])
+ @FeatureNewKwargs('build target', '0.38.0', ['build_by_default'])
+ @FeatureNewKwargs('build target', '0.48.0', ['gnu_symbol_visibility'])
+ def build_target_decorator_caller(self, node, args, kwargs):
+ return True
+
+ build_target_decorator_caller(self, node, args, kwargs)
+
+ if not args:
+ raise InterpreterException('Target does not have a name.')
+ name, *sources = args
+ for_machine = self.machine_from_native_kwarg(kwargs)
+ if 'sources' in kwargs:
+ sources += listify(kwargs['sources'])
+ sources = self.source_strings_to_files(sources)
+ objs = extract_as_list(kwargs, 'objects')
+ kwargs['dependencies'] = extract_as_list(kwargs, 'dependencies')
+ kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)
+ if 'extra_files' in kwargs:
+ ef = extract_as_list(kwargs, 'extra_files')
+ kwargs['extra_files'] = self.source_strings_to_files(ef)
+ self.check_sources_exist(os.path.join(self.source_root, self.subdir), sources)
+ if targetclass not in {build.Executable, build.SharedLibrary, build.SharedModule, build.StaticLibrary, build.Jar}:
+ mlog.debug('Unknown target type:', str(targetclass))
+ raise RuntimeError('Unreachable code')
+ self.kwarg_strings_to_includedirs(kwargs)
+
+ # Filter out kwargs from other target types. For example 'soversion'
+ # passed to library() when default_library == 'static'.
+ kwargs = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs}
+
+ srcs: T.List['SourceInputs'] = []
+ struct: T.Optional[build.StructuredSources] = build.StructuredSources()
+ for s in sources:
+ if isinstance(s, build.StructuredSources):
+ struct = struct + s
+ else:
+ srcs.append(s)
+
+ if not struct:
+ struct = None
+ else:
+ # Validate that we won't end up with two outputs with the same name.
+ # i.e, don't allow:
+ # [structured_sources('foo/bar.rs'), structured_sources('bar/bar.rs')]
+ for v in struct.sources.values():
+ outputs: T.Set[str] = set()
+ for f in v:
+ o: T.List[str]
+ if isinstance(f, str):
+ o = [os.path.basename(f)]
+ elif isinstance(f, mesonlib.File):
+ o = [f.fname]
+ else:
+ o = f.get_outputs()
+ conflicts = outputs.intersection(o)
+ if conflicts:
+ raise InvalidArguments.from_node(
+ f"Conflicting sources in structured sources: {', '.join(sorted(conflicts))}",
+ node=node)
+ outputs.update(o)
+
+ kwargs['include_directories'] = self.extract_incdirs(kwargs)
+ target = targetclass(name, self.subdir, self.subproject, for_machine, srcs, struct, objs,
+ self.environment, self.compilers[for_machine], kwargs)
+ target.project_version = self.project_version
+
+ self.add_target(name, target)
+ self.project_args_frozen = True
+ return target
+
+ def kwarg_strings_to_includedirs(self, kwargs):
+ if 'd_import_dirs' in kwargs:
+ items = mesonlib.extract_as_list(kwargs, 'd_import_dirs')
+ cleaned_items = []
+ for i in items:
+ if isinstance(i, str):
+ # BW compatibility. This was permitted so we must support it
+ # for a few releases so people can transition to "correct"
+ # path declarations.
+ if os.path.normpath(i).startswith(self.environment.get_source_dir()):
+ mlog.warning('''Building a path to the source dir is not supported. Use a relative path instead.
+This will become a hard error in the future.''', location=self.current_node)
+ i = os.path.relpath(i, os.path.join(self.environment.get_source_dir(), self.subdir))
+ i = self.build_incdir_object([i])
+ cleaned_items.append(i)
+ kwargs['d_import_dirs'] = cleaned_items
+
+ def add_stdlib_info(self, target):
+ for l in target.compilers.keys():
+ dep = self.build.stdlibs[target.for_machine].get(l, None)
+ if dep:
+ target.add_deps(dep)
+
+ def check_sources_exist(self, subdir, sources):
+ for s in sources:
+ if not isinstance(s, str):
+ continue # This means a generated source and they always exist.
+ fname = os.path.join(subdir, s)
+ if not os.path.isfile(fname):
+ raise InterpreterException(f'Tried to add non-existing source file {s}.')
+
+ # Only permit object extraction from the same subproject
+ def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
+ if self.subproject != buildtarget.subproject:
+ raise InterpreterException('Tried to extract objects from a different subproject.')
+
+ def is_subproject(self) -> bool:
+ return self.subproject != ''
+
+ @typed_pos_args('set_variable', str, object)
+ @noKwargs
+ @noArgsFlattening
+ @noSecondLevelHolderResolving
+ def func_set_variable(self, node: mparser.BaseNode, args: T.Tuple[str, object], kwargs: 'TYPE_kwargs') -> None:
+ varname, value = args
+ self.set_variable(varname, value, holderify=True)
+
+ @typed_pos_args('get_variable', (str, Disabler), optargs=[object])
+ @noKwargs
+ @noArgsFlattening
+ @unholder_return
+ def func_get_variable(self, node: mparser.BaseNode, args: T.Tuple[T.Union[str, Disabler], T.Optional[object]],
+ kwargs: 'TYPE_kwargs') -> 'TYPE_var':
+ varname, fallback = args
+ if isinstance(varname, Disabler):
+ return varname
+
+ try:
+ return self.variables[varname]
+ except KeyError:
+ if fallback is not None:
+ return self._holderify(fallback)
+ raise InterpreterException(f'Tried to get unknown variable "{varname}".')
+
+ @typed_pos_args('is_variable', str)
+ @noKwargs
+ def func_is_variable(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
+ return args[0] in self.variables
+
+ @FeatureNew('unset_variable', '0.60.0')
+ @typed_pos_args('unset_variable', str)
+ @noKwargs
+ def func_unset_variable(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> None:
+ varname = args[0]
+ try:
+ del self.variables[varname]
+ except KeyError:
+ raise InterpreterException(f'Tried to unset unknown variable "{varname}".')
+
+ @staticmethod
+ def machine_from_native_kwarg(kwargs: T.Dict[str, T.Any]) -> MachineChoice:
+ native = kwargs.get('native', False)
+ if not isinstance(native, bool):
+ raise InvalidArguments('Argument to "native" must be a boolean.')
+ return MachineChoice.BUILD if native else MachineChoice.HOST
+
+ @FeatureNew('is_disabler', '0.52.0')
+ @typed_pos_args('is_disabler', object)
+ @noKwargs
+ def func_is_disabler(self, node: mparser.BaseNode, args: T.Tuple[object], kwargs: 'TYPE_kwargs') -> bool:
+ return isinstance(args[0], Disabler)
+
+ @noKwargs
+ @FeatureNew('range', '0.58.0')
+ @typed_pos_args('range', int, optargs=[int, int])
+ def func_range(self, node, args: T.Tuple[int, T.Optional[int], T.Optional[int]], kwargs: T.Dict[str, T.Any]) -> P_OBJ.RangeHolder:
+ start, stop, step = args
+ # Just like Python's range, we allow range(stop), range(start, stop), or
+ # range(start, stop, step)
+ if stop is None:
+ stop = start
+ start = 0
+ if step is None:
+ step = 1
+ # This is more strict than Python's range()
+ if start < 0:
+ raise InterpreterException('start cannot be negative')
+ if stop < start:
+ raise InterpreterException('stop cannot be less than start')
+ if step < 1:
+ raise InterpreterException('step must be >=1')
+ return P_OBJ.RangeHolder(start, stop, step, subproject=self.subproject)
diff --git a/mesonbuild/interpreter/interpreterobjects.py b/mesonbuild/interpreter/interpreterobjects.py
new file mode 100644
index 0000000..538d134
--- /dev/null
+++ b/mesonbuild/interpreter/interpreterobjects.py
@@ -0,0 +1,987 @@
+from __future__ import annotations
+import os
+import shlex
+import subprocess
+import copy
+import textwrap
+
+from pathlib import Path, PurePath
+
+from .. import mesonlib
+from .. import coredata
+from .. import build
+from .. import mlog
+
+from ..modules import ModuleReturnValue, ModuleObject, ModuleState, ExtensionModule
+from ..backend.backends import TestProtocol
+from ..interpreterbase import (
+ ContainerTypeInfo, KwargInfo, MesonOperator,
+ MesonInterpreterObject, ObjectHolder, MutableInterpreterObject,
+ FeatureNew, FeatureDeprecated,
+ typed_pos_args, typed_kwargs, typed_operator,
+ noArgsFlattening, noPosargs, noKwargs, unholder_return,
+ flatten, resolve_second_level_holders, InterpreterException, InvalidArguments, InvalidCode)
+from ..interpreter.type_checking import NoneType, ENV_SEPARATOR_KW
+from ..dependencies import Dependency, ExternalLibrary, InternalDependency
+from ..programs import ExternalProgram
+from ..mesonlib import HoldableObject, OptionKey, listify, Popen_safe
+
+import typing as T
+
+if T.TYPE_CHECKING:
+ from . import kwargs
+ from ..cmake.interpreter import CMakeInterpreter
+ from ..envconfig import MachineInfo
+ from ..interpreterbase import FeatureCheckBase, InterpreterObject, SubProject, TYPE_var, TYPE_kwargs, TYPE_nvar, TYPE_nkwargs
+ from .interpreter import Interpreter
+
+ from typing_extensions import TypedDict
+
+ class EnvironmentSeparatorKW(TypedDict):
+
+ separator: str
+
+
+def extract_required_kwarg(kwargs: 'kwargs.ExtractRequired',
+ subproject: 'SubProject',
+ feature_check: T.Optional[FeatureCheckBase] = None,
+ default: bool = True) -> T.Tuple[bool, bool, T.Optional[str]]:
+ val = kwargs.get('required', default)
+ disabled = False
+ required = False
+ feature: T.Optional[str] = None
+ if isinstance(val, coredata.UserFeatureOption):
+ if not feature_check:
+ feature_check = FeatureNew('User option "feature"', '0.47.0')
+ feature_check.use(subproject)
+ feature = val.name
+ if val.is_disabled():
+ disabled = True
+ elif val.is_enabled():
+ required = True
+ elif isinstance(val, bool):
+ required = val
+ else:
+ raise InterpreterException('required keyword argument must be boolean or a feature option')
+
+ # Keep boolean value in kwargs to simplify other places where this kwarg is
+ # checked.
+ # TODO: this should be removed, and those callers should learn about FeatureOptions
+ kwargs['required'] = required
+
+ return disabled, required, feature
+
+def extract_search_dirs(kwargs: 'kwargs.ExtractSearchDirs') -> T.List[str]:
+ search_dirs_str = mesonlib.stringlistify(kwargs.get('dirs', []))
+ search_dirs = [Path(d).expanduser() for d in search_dirs_str]
+ for d in search_dirs:
+ if mesonlib.is_windows() and d.root.startswith('\\'):
+ # a Unix-path starting with `/` that is not absolute on Windows.
+ # discard without failing for end-user ease of cross-platform directory arrays
+ continue
+ if not d.is_absolute():
+ raise InvalidCode(f'Search directory {d} is not an absolute path.')
+ return [str(s) for s in search_dirs]
+
+class FeatureOptionHolder(ObjectHolder[coredata.UserFeatureOption]):
+ def __init__(self, option: coredata.UserFeatureOption, interpreter: 'Interpreter'):
+ super().__init__(option, interpreter)
+ if option and option.is_auto():
+ # TODO: we need to cast here because options is not a TypedDict
+ auto = T.cast('coredata.UserFeatureOption', self.env.coredata.options[OptionKey('auto_features')])
+ self.held_object = copy.copy(auto)
+ self.held_object.name = option.name
+ self.methods.update({'enabled': self.enabled_method,
+ 'disabled': self.disabled_method,
+ 'allowed': self.allowed_method,
+ 'auto': self.auto_method,
+ 'require': self.require_method,
+ 'disable_auto_if': self.disable_auto_if_method,
+ })
+
+ @property
+ def value(self) -> str:
+ return 'disabled' if not self.held_object else self.held_object.value
+
+ def as_disabled(self) -> coredata.UserFeatureOption:
+ disabled = copy.deepcopy(self.held_object)
+ disabled.value = 'disabled'
+ return disabled
+
+ @noPosargs
+ @noKwargs
+ def enabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.value == 'enabled'
+
+ @noPosargs
+ @noKwargs
+ def disabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.value == 'disabled'
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('feature_option.allowed()', '0.59.0')
+ def allowed_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.value != 'disabled'
+
+ @noPosargs
+ @noKwargs
+ def auto_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.value == 'auto'
+
+ @FeatureNew('feature_option.require()', '0.59.0')
+ @typed_pos_args('feature_option.require', bool)
+ @typed_kwargs(
+ 'feature_option.require',
+ KwargInfo('error_message', (str, NoneType))
+ )
+ def require_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> coredata.UserFeatureOption:
+ if args[0]:
+ return copy.deepcopy(self.held_object)
+
+ if self.value == 'enabled':
+ err_msg = f'Feature {self.held_object.name} cannot be enabled'
+ if kwargs['error_message']:
+ err_msg += f': {kwargs["error_message"]}'
+ raise InterpreterException(err_msg)
+ return self.as_disabled()
+
+ @FeatureNew('feature_option.disable_auto_if()', '0.59.0')
+ @noKwargs
+ @typed_pos_args('feature_option.disable_auto_if', bool)
+ def disable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption:
+ return copy.deepcopy(self.held_object) if self.value != 'auto' or not args[0] else self.as_disabled()
+
+
+class RunProcess(MesonInterpreterObject):
+
+ def __init__(self,
+ cmd: ExternalProgram,
+ args: T.List[str],
+ env: build.EnvironmentVariables,
+ source_dir: str,
+ build_dir: str,
+ subdir: str,
+ mesonintrospect: T.List[str],
+ in_builddir: bool = False,
+ check: bool = False,
+ capture: bool = True) -> None:
+ super().__init__()
+ if not isinstance(cmd, ExternalProgram):
+ raise AssertionError('BUG: RunProcess must be passed an ExternalProgram')
+ self.capture = capture
+ self.returncode, self.stdout, self.stderr = self.run_command(cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check)
+ self.methods.update({'returncode': self.returncode_method,
+ 'stdout': self.stdout_method,
+ 'stderr': self.stderr_method,
+ })
+
+ def run_command(self,
+ cmd: ExternalProgram,
+ args: T.List[str],
+ env: build.EnvironmentVariables,
+ source_dir: str,
+ build_dir: str,
+ subdir: str,
+ mesonintrospect: T.List[str],
+ in_builddir: bool,
+ check: bool = False) -> T.Tuple[int, str, str]:
+ command_array = cmd.get_command() + args
+ menv = {'MESON_SOURCE_ROOT': source_dir,
+ 'MESON_BUILD_ROOT': build_dir,
+ 'MESON_SUBDIR': subdir,
+ 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in mesonintrospect]),
+ }
+ if in_builddir:
+ cwd = os.path.join(build_dir, subdir)
+ else:
+ cwd = os.path.join(source_dir, subdir)
+ child_env = os.environ.copy()
+ child_env.update(menv)
+ child_env = env.get_env(child_env)
+ stdout = subprocess.PIPE if self.capture else subprocess.DEVNULL
+ mlog.debug('Running command:', mesonlib.join_args(command_array))
+ try:
+ p, o, e = Popen_safe(command_array, stdout=stdout, env=child_env, cwd=cwd)
+ if self.capture:
+ mlog.debug('--- stdout ---')
+ mlog.debug(o)
+ else:
+ o = ''
+ mlog.debug('--- stdout disabled ---')
+ mlog.debug('--- stderr ---')
+ mlog.debug(e)
+ mlog.debug('')
+
+ if check and p.returncode != 0:
+ raise InterpreterException('Command `{}` failed with status {}.'.format(mesonlib.join_args(command_array), p.returncode))
+
+ return p.returncode, o, e
+ except FileNotFoundError:
+ raise InterpreterException('Could not execute command `%s`.' % mesonlib.join_args(command_array))
+
+ @noPosargs
+ @noKwargs
+ def returncode_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
+ return self.returncode
+
+ @noPosargs
+ @noKwargs
+ def stdout_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.stdout
+
+ @noPosargs
+ @noKwargs
+ def stderr_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.stderr
+
+class EnvironmentVariablesHolder(ObjectHolder[build.EnvironmentVariables], MutableInterpreterObject):
+
+ def __init__(self, obj: build.EnvironmentVariables, interpreter: 'Interpreter'):
+ super().__init__(obj, interpreter)
+ self.methods.update({'set': self.set_method,
+ 'append': self.append_method,
+ 'prepend': self.prepend_method,
+ })
+
+ def __repr__(self) -> str:
+ repr_str = "<{0}: {1}>"
+ return repr_str.format(self.__class__.__name__, self.held_object.envvars)
+
+ def __deepcopy__(self, memo: T.Dict[str, object]) -> 'EnvironmentVariablesHolder':
+ # Avoid trying to copy the interpreter
+ return EnvironmentVariablesHolder(copy.deepcopy(self.held_object), self.interpreter)
+
+ def warn_if_has_name(self, name: str) -> None:
+ # Multiple append/prepend operations was not supported until 0.58.0.
+ if self.held_object.has_name(name):
+ m = f'Overriding previous value of environment variable {name!r} with a new one'
+ FeatureNew(m, '0.58.0').use(self.subproject, self.current_node)
+
+ @typed_pos_args('environment.set', str, varargs=str, min_varargs=1)
+ @typed_kwargs('environment.set', ENV_SEPARATOR_KW)
+ def set_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None:
+ name, values = args
+ self.held_object.set(name, values, kwargs['separator'])
+
+ @typed_pos_args('environment.append', str, varargs=str, min_varargs=1)
+ @typed_kwargs('environment.append', ENV_SEPARATOR_KW)
+ def append_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None:
+ name, values = args
+ self.warn_if_has_name(name)
+ self.held_object.append(name, values, kwargs['separator'])
+
+ @typed_pos_args('environment.prepend', str, varargs=str, min_varargs=1)
+ @typed_kwargs('environment.prepend', ENV_SEPARATOR_KW)
+ def prepend_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None:
+ name, values = args
+ self.warn_if_has_name(name)
+ self.held_object.prepend(name, values, kwargs['separator'])
+
+
+_CONF_DATA_SET_KWS: KwargInfo[T.Optional[str]] = KwargInfo('description', (str, NoneType))
+
+
+class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInterpreterObject):
+
+ def __init__(self, obj: build.ConfigurationData, interpreter: 'Interpreter'):
+ super().__init__(obj, interpreter)
+ self.methods.update({'set': self.set_method,
+ 'set10': self.set10_method,
+ 'set_quoted': self.set_quoted_method,
+ 'has': self.has_method,
+ 'get': self.get_method,
+ 'keys': self.keys_method,
+ 'get_unquoted': self.get_unquoted_method,
+ 'merge_from': self.merge_from_method,
+ })
+
+ def __deepcopy__(self, memo: T.Dict) -> 'ConfigurationDataHolder':
+ return ConfigurationDataHolder(copy.deepcopy(self.held_object), self.interpreter)
+
+ def is_used(self) -> bool:
+ return self.held_object.used
+
+ def __check_used(self) -> None:
+ if self.is_used():
+ raise InterpreterException("Can not set values on configuration object that has been used.")
+
+ @typed_pos_args('configuration_data.set', str, (str, int, bool))
+ @typed_kwargs('configuration_data.set', _CONF_DATA_SET_KWS)
+ def set_method(self, args: T.Tuple[str, T.Union[str, int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None:
+ self.__check_used()
+ self.held_object.values[args[0]] = (args[1], kwargs['description'])
+
+ @typed_pos_args('configuration_data.set_quoted', str, str)
+ @typed_kwargs('configuration_data.set_quoted', _CONF_DATA_SET_KWS)
+ def set_quoted_method(self, args: T.Tuple[str, str], kwargs: 'kwargs.ConfigurationDataSet') -> None:
+ self.__check_used()
+ escaped_val = '\\"'.join(args[1].split('"'))
+ self.held_object.values[args[0]] = (f'"{escaped_val}"', kwargs['description'])
+
+ @typed_pos_args('configuration_data.set10', str, (int, bool))
+ @typed_kwargs('configuration_data.set10', _CONF_DATA_SET_KWS)
+ def set10_method(self, args: T.Tuple[str, T.Union[int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None:
+ self.__check_used()
+ # bool is a subclass of int, so we need to check for bool explicitly.
+ # We already have typed_pos_args checking that this is either a bool or
+ # an int.
+ if not isinstance(args[1], bool):
+ mlog.deprecation('configuration_data.set10 with number. the `set10` '
+ 'method should only be used with booleans',
+ location=self.interpreter.current_node)
+ if args[1] < 0:
+ mlog.warning('Passing a number that is less than 0 may not have the intended result, '
+ 'as meson will treat all non-zero values as true.',
+ location=self.interpreter.current_node)
+ self.held_object.values[args[0]] = (int(args[1]), kwargs['description'])
+
+ @typed_pos_args('configuration_data.has', (str, int, bool))
+ @noKwargs
+ def has_method(self, args: T.Tuple[T.Union[str, int, bool]], kwargs: TYPE_kwargs) -> bool:
+ return args[0] in self.held_object.values
+
+ @FeatureNew('configuration_data.get()', '0.38.0')
+ @typed_pos_args('configuration_data.get', str, optargs=[(str, int, bool)])
+ @noKwargs
+ def get_method(self, args: T.Tuple[str, T.Optional[T.Union[str, int, bool]]],
+ kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
+ name = args[0]
+ if name in self.held_object:
+ return self.held_object.get(name)[0]
+ elif args[1] is not None:
+ return args[1]
+ raise InterpreterException(f'Entry {name} not in configuration data.')
+
+ @FeatureNew('configuration_data.get_unquoted()', '0.44.0')
+ @typed_pos_args('configuration_data.get_unquoted', str, optargs=[(str, int, bool)])
+ @noKwargs
+ def get_unquoted_method(self, args: T.Tuple[str, T.Optional[T.Union[str, int, bool]]],
+ kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
+ name = args[0]
+ if name in self.held_object:
+ val = self.held_object.get(name)[0]
+ elif args[1] is not None:
+ val = args[1]
+ else:
+ raise InterpreterException(f'Entry {name} not in configuration data.')
+ if isinstance(val, str) and val[0] == '"' and val[-1] == '"':
+ return val[1:-1]
+ return val
+
+ def get(self, name: str) -> T.Tuple[T.Union[str, int, bool], T.Optional[str]]:
+ return self.held_object.values[name]
+
+ @FeatureNew('configuration_data.keys()', '0.57.0')
+ @noPosargs
+ @noKwargs
+ def keys_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]:
+ return sorted(self.keys())
+
+ def keys(self) -> T.List[str]:
+ return list(self.held_object.values.keys())
+
+ @typed_pos_args('configuration_data.merge_from', build.ConfigurationData)
+ @noKwargs
+ def merge_from_method(self, args: T.Tuple[build.ConfigurationData], kwargs: TYPE_kwargs) -> None:
+ from_object = args[0]
+ self.held_object.values.update(from_object.values)
+
+
+_PARTIAL_DEP_KWARGS = [
+ KwargInfo('compile_args', bool, default=False),
+ KwargInfo('link_args', bool, default=False),
+ KwargInfo('links', bool, default=False),
+ KwargInfo('includes', bool, default=False),
+ KwargInfo('sources', bool, default=False),
+]
+
+class DependencyHolder(ObjectHolder[Dependency]):
+ def __init__(self, dep: Dependency, interpreter: 'Interpreter'):
+ super().__init__(dep, interpreter)
+ self.methods.update({'found': self.found_method,
+ 'type_name': self.type_name_method,
+ 'version': self.version_method,
+ 'name': self.name_method,
+ 'get_pkgconfig_variable': self.pkgconfig_method,
+ 'get_configtool_variable': self.configtool_method,
+ 'get_variable': self.variable_method,
+ 'partial_dependency': self.partial_dependency_method,
+ 'include_type': self.include_type_method,
+ 'as_system': self.as_system_method,
+ 'as_link_whole': self.as_link_whole_method,
+ })
+
+ def found(self) -> bool:
+ return self.found_method([], {})
+
+ @noPosargs
+ @noKwargs
+ def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.type_name
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ if self.held_object.type_name == 'internal':
+ return True
+ return self.held_object.found()
+
+ @noPosargs
+ @noKwargs
+ def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.get_version()
+
+ @noPosargs
+ @noKwargs
+ def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.get_name()
+
+ @FeatureDeprecated('dependency.get_pkgconfig_variable', '0.56.0',
+ 'use dependency.get_variable(pkgconfig : ...) instead')
+ @typed_pos_args('dependency.get_pkgconfig_variable', str)
+ @typed_kwargs(
+ 'dependency.get_pkgconfig_variable',
+ KwargInfo('default', (str, NoneType)),
+ KwargInfo(
+ 'define_variable',
+ ContainerTypeInfo(list, str, pairs=True),
+ default=[],
+ listify=True,
+ validator=lambda x: 'must be of length 2 or empty' if len(x) not in {0, 2} else None,
+ ),
+ )
+ def pkgconfig_method(self, args: T.Tuple[str], kwargs: 'kwargs.DependencyPkgConfigVar') -> str:
+ return self.held_object.get_pkgconfig_variable(args[0], **kwargs)
+
+ @FeatureNew('dependency.get_configtool_variable', '0.44.0')
+ @FeatureDeprecated('dependency.get_configtool_variable', '0.56.0',
+ 'use dependency.get_variable(configtool : ...) instead')
+ @noKwargs
+ @typed_pos_args('dependency.get_config_tool_variable', str)
+ def configtool_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.get_configtool_variable(args[0])
+
+ @FeatureNew('dependency.partial_dependency', '0.46.0')
+ @noPosargs
+ @typed_kwargs('dependency.partial_dependency', *_PARTIAL_DEP_KWARGS)
+ def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency:
+ pdep = self.held_object.get_partial_dependency(**kwargs)
+ return pdep
+
+ @FeatureNew('dependency.get_variable', '0.51.0')
+ @typed_pos_args('dependency.get_variable', optargs=[str])
+ @typed_kwargs(
+ 'dependency.get_variable',
+ KwargInfo('cmake', (str, NoneType)),
+ KwargInfo('pkgconfig', (str, NoneType)),
+ KwargInfo('configtool', (str, NoneType)),
+ KwargInfo('internal', (str, NoneType), since='0.54.0'),
+ KwargInfo('default_value', (str, NoneType)),
+ KwargInfo('pkgconfig_define', ContainerTypeInfo(list, str, pairs=True), default=[], listify=True),
+ )
+ def variable_method(self, args: T.Tuple[T.Optional[str]], kwargs: 'kwargs.DependencyGetVariable') -> str:
+ default_varname = args[0]
+ if default_varname is not None:
+ FeatureNew('Positional argument to dependency.get_variable()', '0.58.0').use(self.subproject, self.current_node)
+ return self.held_object.get_variable(
+ cmake=kwargs['cmake'] or default_varname,
+ pkgconfig=kwargs['pkgconfig'] or default_varname,
+ configtool=kwargs['configtool'] or default_varname,
+ internal=kwargs['internal'] or default_varname,
+ default_value=kwargs['default_value'],
+ pkgconfig_define=kwargs['pkgconfig_define'],
+ )
+
+ @FeatureNew('dependency.include_type', '0.52.0')
+ @noPosargs
+ @noKwargs
+ def include_type_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.get_include_type()
+
+ @FeatureNew('dependency.as_system', '0.52.0')
+ @noKwargs
+ @typed_pos_args('dependency.as_system', optargs=[str])
+ def as_system_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> Dependency:
+ return self.held_object.generate_system_dependency(args[0] or 'system')
+
+ @FeatureNew('dependency.as_link_whole', '0.56.0')
+ @noKwargs
+ @noPosargs
+ def as_link_whole_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> Dependency:
+ if not isinstance(self.held_object, InternalDependency):
+ raise InterpreterException('as_link_whole method is only supported on declare_dependency() objects')
+ new_dep = self.held_object.generate_link_whole_dependency()
+ return new_dep
+
+class ExternalProgramHolder(ObjectHolder[ExternalProgram]):
+ def __init__(self, ep: ExternalProgram, interpreter: 'Interpreter') -> None:
+ super().__init__(ep, interpreter)
+ self.methods.update({'found': self.found_method,
+ 'path': self.path_method,
+ 'version': self.version_method,
+ 'full_path': self.full_path_method})
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.found()
+
+ @noPosargs
+ @noKwargs
+ @FeatureDeprecated('ExternalProgram.path', '0.55.0',
+ 'use ExternalProgram.full_path() instead')
+ def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self._full_path()
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('ExternalProgram.full_path', '0.55.0')
+ def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self._full_path()
+
+ def _full_path(self) -> str:
+ if not self.found():
+ raise InterpreterException('Unable to get the path of a not-found external program')
+ path = self.held_object.get_path()
+ assert path is not None
+ return path
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('ExternalProgram.version', '0.62.0')
+ def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ if not self.found():
+ raise InterpreterException('Unable to get the version of a not-found external program')
+ try:
+ return self.held_object.get_version(self.interpreter)
+ except mesonlib.MesonException:
+ return 'unknown'
+
+ def found(self) -> bool:
+ return self.held_object.found()
+
+class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]):
+ def __init__(self, el: ExternalLibrary, interpreter: 'Interpreter'):
+ super().__init__(el, interpreter)
+ self.methods.update({'found': self.found_method,
+ 'type_name': self.type_name_method,
+ 'partial_dependency': self.partial_dependency_method,
+ })
+
+ @noPosargs
+ @noKwargs
+ def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.type_name
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.held_object.found()
+
+ @FeatureNew('dependency.partial_dependency', '0.46.0')
+ @noPosargs
+ @typed_kwargs('dependency.partial_dependency', *_PARTIAL_DEP_KWARGS)
+ def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency:
+ pdep = self.held_object.get_partial_dependency(**kwargs)
+ return pdep
+
+# A machine that's statically known from the cross file
+class MachineHolder(ObjectHolder['MachineInfo']):
+ def __init__(self, machine_info: 'MachineInfo', interpreter: 'Interpreter'):
+ super().__init__(machine_info, interpreter)
+ self.methods.update({'system': self.system_method,
+ 'cpu': self.cpu_method,
+ 'cpu_family': self.cpu_family_method,
+ 'endian': self.endian_method,
+ })
+
+ @noPosargs
+ @noKwargs
+ def cpu_family_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.cpu_family
+
+ @noPosargs
+ @noKwargs
+ def cpu_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.cpu
+
+ @noPosargs
+ @noKwargs
+ def system_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.system
+
+ @noPosargs
+ @noKwargs
+ def endian_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.endian
+
+class IncludeDirsHolder(ObjectHolder[build.IncludeDirs]):
+ pass
+
+class FileHolder(ObjectHolder[mesonlib.File]):
+ pass
+
+class HeadersHolder(ObjectHolder[build.Headers]):
+ pass
+
+class DataHolder(ObjectHolder[build.Data]):
+ pass
+
+class SymlinkDataHolder(ObjectHolder[build.SymlinkData]):
+ pass
+
+class InstallDirHolder(ObjectHolder[build.InstallDir]):
+ pass
+
+class ManHolder(ObjectHolder[build.Man]):
+ pass
+
+class EmptyDirHolder(ObjectHolder[build.EmptyDir]):
+ pass
+
+class GeneratedObjectsHolder(ObjectHolder[build.ExtractedObjects]):
+ pass
+
+class Test(MesonInterpreterObject):
+ def __init__(self, name: str, project: str, suite: T.List[str],
+ exe: T.Union[ExternalProgram, build.Executable, build.CustomTarget],
+ depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]],
+ is_parallel: bool,
+ cmd_args: T.List[T.Union[str, mesonlib.File, build.Target]],
+ env: build.EnvironmentVariables,
+ should_fail: bool, timeout: int, workdir: T.Optional[str], protocol: str,
+ priority: int, verbose: bool):
+ super().__init__()
+ self.name = name
+ self.suite = listify(suite)
+ self.project_name = project
+ self.exe = exe
+ self.depends = depends
+ self.is_parallel = is_parallel
+ self.cmd_args = cmd_args
+ self.env = env
+ self.should_fail = should_fail
+ self.timeout = timeout
+ self.workdir = workdir
+ self.protocol = TestProtocol.from_str(protocol)
+ self.priority = priority
+ self.verbose = verbose
+
+ def get_exe(self) -> T.Union[ExternalProgram, build.Executable, build.CustomTarget]:
+ return self.exe
+
+ def get_name(self) -> str:
+ return self.name
+
+class NullSubprojectInterpreter(HoldableObject):
+ pass
+
+# TODO: This should really be an `ObjectHolder`, but the additional stuff in this
+# class prevents this. Thus, this class should be split into a pure
+# `ObjectHolder` and a class specifically for storing in `Interpreter`.
+class SubprojectHolder(MesonInterpreterObject):
+
+ def __init__(self, subinterpreter: T.Union['Interpreter', NullSubprojectInterpreter],
+ subdir: str,
+ warnings: int = 0,
+ disabled_feature: T.Optional[str] = None,
+ exception: T.Optional[Exception] = None) -> None:
+ super().__init__()
+ self.held_object = subinterpreter
+ self.warnings = warnings
+ self.disabled_feature = disabled_feature
+ self.exception = exception
+ self.subdir = PurePath(subdir).as_posix()
+ self.cm_interpreter: T.Optional[CMakeInterpreter] = None
+ self.methods.update({'get_variable': self.get_variable_method,
+ 'found': self.found_method,
+ })
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.found()
+
+ def found(self) -> bool:
+ return not isinstance(self.held_object, NullSubprojectInterpreter)
+
+ @noKwargs
+ @noArgsFlattening
+ @unholder_return
+ def get_variable_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
+ if len(args) < 1 or len(args) > 2:
+ raise InterpreterException('Get_variable takes one or two arguments.')
+ if isinstance(self.held_object, NullSubprojectInterpreter): # == not self.found()
+ raise InterpreterException(f'Subproject "{self.subdir}" disabled can\'t get_variable on it.')
+ varname = args[0]
+ if not isinstance(varname, str):
+ raise InterpreterException('Get_variable first argument must be a string.')
+ try:
+ return self.held_object.variables[varname]
+ except KeyError:
+ pass
+
+ if len(args) == 2:
+ return self.held_object._holderify(args[1])
+
+ raise InvalidArguments(f'Requested variable "{varname}" not found.')
+
+class ModuleObjectHolder(ObjectHolder[ModuleObject]):
+ def method_call(self, method_name: str, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> TYPE_var:
+ modobj = self.held_object
+ method = modobj.methods.get(method_name)
+ if not method:
+ raise InvalidCode(f'Unknown method {method_name!r} in object.')
+ if not getattr(method, 'no-args-flattening', False):
+ args = flatten(args)
+ if not getattr(method, 'no-second-level-holder-flattening', False):
+ args, kwargs = resolve_second_level_holders(args, kwargs)
+ state = ModuleState(self.interpreter)
+ # Many modules do for example self.interpreter.find_program_impl(),
+ # so we have to ensure they use the current interpreter and not the one
+ # that first imported that module, otherwise it will use outdated
+ # overrides.
+ if isinstance(modobj, ExtensionModule):
+ modobj.interpreter = self.interpreter
+ ret = method(state, args, kwargs)
+ if isinstance(ret, ModuleReturnValue):
+ self.interpreter.process_new_values(ret.new_objects)
+ ret = ret.return_value
+ return ret
+
+class MutableModuleObjectHolder(ModuleObjectHolder, MutableInterpreterObject):
+ def __deepcopy__(self, memo: T.Dict[int, T.Any]) -> 'MutableModuleObjectHolder':
+ # Deepcopy only held object, not interpreter
+ modobj = copy.deepcopy(self.held_object, memo)
+ return MutableModuleObjectHolder(modobj, self.interpreter)
+
+
+_BuildTarget = T.TypeVar('_BuildTarget', bound=T.Union[build.BuildTarget, build.BothLibraries])
+
+class BuildTargetHolder(ObjectHolder[_BuildTarget]):
+ def __init__(self, target: _BuildTarget, interp: 'Interpreter'):
+ super().__init__(target, interp)
+ self.methods.update({'extract_objects': self.extract_objects_method,
+ 'extract_all_objects': self.extract_all_objects_method,
+ 'name': self.name_method,
+ 'get_id': self.get_id_method,
+ 'outdir': self.outdir_method,
+ 'full_path': self.full_path_method,
+ 'path': self.path_method,
+ 'found': self.found_method,
+ 'private_dir_include': self.private_dir_include_method,
+ })
+
+ def __repr__(self) -> str:
+ r = '<{} {}: {}>'
+ h = self.held_object
+ assert isinstance(h, build.BuildTarget)
+ return r.format(self.__class__.__name__, h.get_id(), h.filename)
+
+ @property
+ def _target_object(self) -> build.BuildTarget:
+ if isinstance(self.held_object, build.BothLibraries):
+ return self.held_object.get_default_object()
+ assert isinstance(self.held_object, build.BuildTarget)
+ return self.held_object
+
+ def is_cross(self) -> bool:
+ return not self._target_object.environment.machines.matches_build_machine(self._target_object.for_machine)
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ if not (isinstance(self.held_object, build.Executable) and self.held_object.was_returned_by_find_program):
+ FeatureNew.single_use('BuildTarget.found', '0.59.0', subproject=self.held_object.subproject)
+ return True
+
+ @noPosargs
+ @noKwargs
+ def private_dir_include_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.IncludeDirs:
+ return build.IncludeDirs('', [], False, [self.interpreter.backend.get_target_private_dir(self._target_object)])
+
+ @noPosargs
+ @noKwargs
+ def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.interpreter.backend.get_target_filename_abs(self._target_object)
+
+ @noPosargs
+ @noKwargs
+ @FeatureDeprecated('BuildTarget.path', '0.55.0', 'Use BuildTarget.full_path instead')
+ def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.interpreter.backend.get_target_filename_abs(self._target_object)
+
+ @noPosargs
+ @noKwargs
+ def outdir_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.interpreter.backend.get_target_dir(self._target_object)
+
+ @noKwargs
+ @typed_pos_args('extract_objects', varargs=(mesonlib.File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList))
+ def extract_objects_method(self, args: T.Tuple[T.List[T.Union[mesonlib.FileOrString, 'build.GeneratedTypes']]], kwargs: TYPE_nkwargs) -> build.ExtractedObjects:
+ return self._target_object.extract_objects(args[0])
+
+ @noPosargs
+ @typed_kwargs(
+ 'extract_all_objects',
+ KwargInfo(
+ 'recursive', bool, default=False, since='0.46.0',
+ not_set_warning=textwrap.dedent('''\
+ extract_all_objects called without setting recursive
+ keyword argument. Meson currently defaults to
+ non-recursive to maintain backward compatibility but
+ the default will be changed in the future.
+ ''')
+ )
+ )
+ def extract_all_objects_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.BuildTargeMethodExtractAllObjects') -> build.ExtractedObjects:
+ return self._target_object.extract_all_objects(kwargs['recursive'])
+
+ @noPosargs
+ @noKwargs
+ def get_id_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self._target_object.get_id()
+
+ @FeatureNew('name', '0.54.0')
+ @noPosargs
+ @noKwargs
+ def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self._target_object.name
+
+class ExecutableHolder(BuildTargetHolder[build.Executable]):
+ pass
+
+class StaticLibraryHolder(BuildTargetHolder[build.StaticLibrary]):
+ pass
+
+class SharedLibraryHolder(BuildTargetHolder[build.SharedLibrary]):
+ pass
+
+class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]):
+ def __init__(self, libs: build.BothLibraries, interp: 'Interpreter'):
+ # FIXME: This build target always represents the shared library, but
+ # that should be configurable.
+ super().__init__(libs, interp)
+ self.methods.update({'get_shared_lib': self.get_shared_lib_method,
+ 'get_static_lib': self.get_static_lib_method,
+ })
+
+ def __repr__(self) -> str:
+ r = '<{} {}: {}, {}: {}>'
+ h1 = self.held_object.shared
+ h2 = self.held_object.static
+ return r.format(self.__class__.__name__, h1.get_id(), h1.filename, h2.get_id(), h2.filename)
+
+ @noPosargs
+ @noKwargs
+ def get_shared_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.SharedLibrary:
+ return self.held_object.shared
+
+ @noPosargs
+ @noKwargs
+ def get_static_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.StaticLibrary:
+ return self.held_object.static
+
+class SharedModuleHolder(BuildTargetHolder[build.SharedModule]):
+ pass
+
+class JarHolder(BuildTargetHolder[build.Jar]):
+ pass
+
+class CustomTargetIndexHolder(ObjectHolder[build.CustomTargetIndex]):
+ def __init__(self, target: build.CustomTargetIndex, interp: 'Interpreter'):
+ super().__init__(target, interp)
+ self.methods.update({'full_path': self.full_path_method,
+ })
+
+ @FeatureNew('custom_target[i].full_path', '0.54.0')
+ @noPosargs
+ @noKwargs
+ def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ assert self.interpreter.backend is not None
+ return self.interpreter.backend.get_target_filename_abs(self.held_object)
+
+class CustomTargetHolder(ObjectHolder[build.CustomTarget]):
+ def __init__(self, target: 'build.CustomTarget', interp: 'Interpreter'):
+ super().__init__(target, interp)
+ self.methods.update({'full_path': self.full_path_method,
+ 'to_list': self.to_list_method,
+ })
+
+ self.operators.update({
+ MesonOperator.INDEX: self.op_index,
+ })
+
+ def __repr__(self) -> str:
+ r = '<{} {}: {}>'
+ h = self.held_object
+ return r.format(self.__class__.__name__, h.get_id(), h.command)
+
+ @noPosargs
+ @noKwargs
+ def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.interpreter.backend.get_target_filename_abs(self.held_object)
+
+ @FeatureNew('custom_target.to_list', '0.54.0')
+ @noPosargs
+ @noKwargs
+ def to_list_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[build.CustomTargetIndex]:
+ result = []
+ for i in self.held_object:
+ result.append(i)
+ return result
+
+ @noKwargs
+ @typed_operator(MesonOperator.INDEX, int)
+ def op_index(self, other: int) -> build.CustomTargetIndex:
+ try:
+ return self.held_object[other]
+ except IndexError:
+ raise InvalidArguments(f'Index {other} out of bounds of custom target {self.held_object.name} output of size {len(self.held_object)}.')
+
+class RunTargetHolder(ObjectHolder[build.RunTarget]):
+ pass
+
+class AliasTargetHolder(ObjectHolder[build.AliasTarget]):
+ pass
+
+class GeneratedListHolder(ObjectHolder[build.GeneratedList]):
+ pass
+
+class GeneratorHolder(ObjectHolder[build.Generator]):
+ def __init__(self, gen: build.Generator, interpreter: 'Interpreter'):
+ super().__init__(gen, interpreter)
+ self.methods.update({'process': self.process_method})
+
+ @typed_pos_args('generator.process', min_varargs=1, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList))
+ @typed_kwargs(
+ 'generator.process',
+ KwargInfo('preserve_path_from', (str, NoneType), since='0.45.0'),
+ KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+ )
+ def process_method(self,
+ args: T.Tuple[T.List[T.Union[str, mesonlib.File, 'build.GeneratedTypes']]],
+ kwargs: 'kwargs.GeneratorProcess') -> build.GeneratedList:
+ preserve_path_from = kwargs['preserve_path_from']
+ if preserve_path_from is not None:
+ preserve_path_from = os.path.normpath(preserve_path_from)
+ if not os.path.isabs(preserve_path_from):
+ # This is a bit of a hack. Fix properly before merging.
+ raise InvalidArguments('Preserve_path_from must be an absolute path for now. Sorry.')
+
+ if any(isinstance(a, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for a in args[0]):
+ FeatureNew.single_use(
+ 'Calling generator.process with CustomTarget or Index of CustomTarget.',
+ '0.57.0', self.interpreter.subproject)
+
+ gl = self.held_object.process_files(args[0], self.interpreter,
+ preserve_path_from, extra_args=kwargs['extra_args'])
+
+ return gl
+
+
+class StructuredSourcesHolder(ObjectHolder[build.StructuredSources]):
+
+ def __init__(self, sources: build.StructuredSources, interp: 'Interpreter'):
+ super().__init__(sources, interp)
diff --git a/mesonbuild/interpreter/kwargs.py b/mesonbuild/interpreter/kwargs.py
new file mode 100644
index 0000000..fb02374
--- /dev/null
+++ b/mesonbuild/interpreter/kwargs.py
@@ -0,0 +1,310 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2021 The Meson Developers
+# Copyright © 2021 Intel Corporation
+from __future__ import annotations
+
+"""Keyword Argument type annotations."""
+
+import typing as T
+
+from typing_extensions import TypedDict, Literal, Protocol
+
+from .. import build
+from .. import coredata
+from ..compilers import Compiler
+from ..mesonlib import MachineChoice, File, FileMode, FileOrString
+from ..modules.cmake import CMakeSubprojectOptions
+from ..programs import ExternalProgram
+
+
+class FuncAddProjectArgs(TypedDict):
+
+ """Keyword Arguments for the add_*_arguments family of arguments.
+
+ including `add_global_arguments`, `add_project_arguments`, and their
+ link variants
+
+ Because of the use of a convertor function, we get the native keyword as
+ a MachineChoice instance already.
+ """
+
+ native: MachineChoice
+ language: T.List[str]
+
+
+class BaseTest(TypedDict):
+
+ """Shared base for the Rust module."""
+
+ args: T.List[T.Union[str, File, build.Target]]
+ should_fail: bool
+ timeout: int
+ workdir: T.Optional[str]
+ depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]]
+ priority: int
+ env: build.EnvironmentVariables
+ suite: T.List[str]
+
+
+class FuncBenchmark(BaseTest):
+
+ """Keyword Arguments shared between `test` and `benchmark`."""
+
+ protocol: Literal['exitcode', 'tap', 'gtest', 'rust']
+
+
+class FuncTest(FuncBenchmark):
+
+ """Keyword Arguments for `test`
+
+ `test` only adds the `is_prallel` argument over benchmark, so inherintance
+ is helpful here.
+ """
+
+ is_parallel: bool
+
+
+class ExtractRequired(TypedDict):
+
+ """Keyword Arguments consumed by the `extract_required_kwargs` function.
+
+ Any function that uses the `required` keyword argument which accepts either
+ a boolean or a feature option should inherit it's arguments from this class.
+ """
+
+ required: T.Union[bool, coredata.UserFeatureOption]
+
+
+class ExtractSearchDirs(TypedDict):
+
+ """Keyword arguments consumed by the `extract_search_dirs` function.
+
+ See the not in `ExtractRequired`
+ """
+
+ dirs: T.List[str]
+
+
+class FuncGenerator(TypedDict):
+
+ """Keyword rguments for the generator function."""
+
+ arguments: T.List[str]
+ output: T.List[str]
+ depfile: T.Optional[str]
+ capture: bool
+ depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]]
+
+
+class GeneratorProcess(TypedDict):
+
+ """Keyword Arguments for generator.process."""
+
+ preserve_path_from: T.Optional[str]
+ extra_args: T.List[str]
+
+class DependencyMethodPartialDependency(TypedDict):
+
+ """ Keyword Arguments for the dep.partial_dependency methods """
+
+ compile_args: bool
+ link_args: bool
+ links: bool
+ includes: bool
+ sources: bool
+
+class BuildTargeMethodExtractAllObjects(TypedDict):
+ recursive: bool
+
+class FuncInstallSubdir(TypedDict):
+
+ install_dir: str
+ strip_directory: bool
+ exclude_files: T.List[str]
+ exclude_directories: T.List[str]
+ install_mode: FileMode
+
+
+class FuncInstallData(TypedDict):
+
+ install_dir: str
+ sources: T.List[FileOrString]
+ rename: T.List[str]
+ install_mode: FileMode
+
+
+class FuncInstallHeaders(TypedDict):
+
+ install_dir: T.Optional[str]
+ install_mode: FileMode
+ subdir: T.Optional[str]
+
+
+class FuncInstallMan(TypedDict):
+
+ install_dir: T.Optional[str]
+ install_mode: FileMode
+ locale: T.Optional[str]
+
+
+class FuncImportModule(ExtractRequired):
+
+ disabler: bool
+
+
+class FuncIncludeDirectories(TypedDict):
+
+ is_system: bool
+
+class FuncAddLanguages(ExtractRequired):
+
+ native: T.Optional[bool]
+
+class RunTarget(TypedDict):
+
+ command: T.List[T.Union[str, build.BuildTarget, build.CustomTarget, ExternalProgram, File]]
+ depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]]
+ env: build.EnvironmentVariables
+
+
+class CustomTarget(TypedDict):
+
+ build_always: bool
+ build_always_stale: T.Optional[bool]
+ build_by_default: T.Optional[bool]
+ capture: bool
+ command: T.List[T.Union[str, build.BuildTarget, build.CustomTarget,
+ build.CustomTargetIndex, ExternalProgram, File]]
+ console: bool
+ depend_files: T.List[FileOrString]
+ depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]]
+ depfile: T.Optional[str]
+ env: build.EnvironmentVariables
+ feed: bool
+ input: T.List[T.Union[str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex,
+ build.ExtractedObjects, build.GeneratedList, ExternalProgram, File]]
+ install: bool
+ install_dir: T.List[T.Union[str, T.Literal[False]]]
+ install_mode: FileMode
+ install_tag: T.List[T.Optional[str]]
+ output: T.List[str]
+
+class AddTestSetup(TypedDict):
+
+ exe_wrapper: T.List[T.Union[str, ExternalProgram]]
+ gdb: bool
+ timeout_multiplier: int
+ is_default: bool
+ exclude_suites: T.List[str]
+ env: build.EnvironmentVariables
+
+
+class Project(TypedDict):
+
+ version: T.Optional[FileOrString]
+ meson_version: T.Optional[str]
+ default_options: T.List[str]
+ license: T.List[str]
+ subproject_dir: str
+
+
+class _FoundProto(Protocol):
+
+ """Protocol for subdir arguments.
+
+ This allows us to define any object that has a found(self) -> bool method
+ """
+
+ def found(self) -> bool: ...
+
+
+class Subdir(TypedDict):
+
+ if_found: T.List[_FoundProto]
+
+
+class Summary(TypedDict):
+
+ section: str
+ bool_yn: bool
+ list_sep: T.Optional[str]
+
+
+class FindProgram(ExtractRequired, ExtractSearchDirs):
+
+ native: MachineChoice
+ version: T.List[str]
+
+
+class RunCommand(TypedDict):
+
+ check: bool
+ capture: T.Optional[bool]
+ env: build.EnvironmentVariables
+
+
+class FeatureOptionRequire(TypedDict):
+
+ error_message: T.Optional[str]
+
+
+class DependencyPkgConfigVar(TypedDict):
+
+ default: T.Optional[str]
+ define_variable: T.List[str]
+
+
+class DependencyGetVariable(TypedDict):
+
+ cmake: T.Optional[str]
+ pkgconfig: T.Optional[str]
+ configtool: T.Optional[str]
+ internal: T.Optional[str]
+ default_value: T.Optional[str]
+ pkgconfig_define: T.List[str]
+
+
+class ConfigurationDataSet(TypedDict):
+
+ description: T.Optional[str]
+
+class VcsTag(TypedDict):
+
+ command: T.List[T.Union[str, build.BuildTarget, build.CustomTarget,
+ build.CustomTargetIndex, ExternalProgram, File]]
+ fallback: T.Optional[str]
+ input: T.List[T.Union[str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex,
+ build.ExtractedObjects, build.GeneratedList, ExternalProgram, File]]
+ output: T.List[str]
+ replace_string: str
+
+
+class ConfigureFile(TypedDict):
+
+ output: str
+ capture: bool
+ format: T.Literal['meson', 'cmake', 'cmake@']
+ output_format: T.Literal['c', 'nasm']
+ depfile: T.Optional[str]
+ install: T.Optional[bool]
+ install_dir: T.Union[str, T.Literal[False]]
+ install_mode: FileMode
+ install_tag: T.Optional[str]
+ encoding: str
+ command: T.Optional[T.List[T.Union[build.Executable, ExternalProgram, Compiler, File, str]]]
+ input: T.List[FileOrString]
+ configuration: T.Optional[T.Union[T.Dict[str, T.Union[str, int, bool]], build.ConfigurationData]]
+
+
+class Subproject(ExtractRequired):
+
+ default_options: T.List[str]
+ version: T.List[str]
+
+
+class DoSubproject(ExtractRequired):
+
+ default_options: T.List[str]
+ version: T.List[str]
+ cmake_options: T.List[str]
+ options: T.Optional[CMakeSubprojectOptions]
diff --git a/mesonbuild/interpreter/mesonmain.py b/mesonbuild/interpreter/mesonmain.py
new file mode 100644
index 0000000..01d0029
--- /dev/null
+++ b/mesonbuild/interpreter/mesonmain.py
@@ -0,0 +1,456 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+import os
+import typing as T
+
+from .. import mesonlib
+from .. import dependencies
+from .. import build
+from .. import mlog
+
+from ..mesonlib import MachineChoice, OptionKey
+from ..programs import OverrideProgram, ExternalProgram
+from ..interpreter.type_checking import ENV_KW, ENV_METHOD_KW, ENV_SEPARATOR_KW, env_convertor_with_method
+from ..interpreterbase import (MesonInterpreterObject, FeatureNew, FeatureDeprecated,
+ typed_pos_args, noArgsFlattening, noPosargs, noKwargs,
+ typed_kwargs, KwargInfo, InterpreterException)
+from .primitives import MesonVersionString
+from .type_checking import NATIVE_KW, NoneType
+
+if T.TYPE_CHECKING:
+ from typing_extensions import Literal
+ from ..backend.backends import ExecutableSerialisation
+ from ..compilers import Compiler
+ from ..interpreterbase import TYPE_kwargs, TYPE_var
+ from .interpreter import Interpreter
+
+ from typing_extensions import TypedDict
+
+ class FuncOverrideDependency(TypedDict):
+
+ native: mesonlib.MachineChoice
+ static: T.Optional[bool]
+
+ class AddInstallScriptKW(TypedDict):
+
+ skip_if_destdir: bool
+ install_tag: str
+
+ class NativeKW(TypedDict):
+
+ native: mesonlib.MachineChoice
+
+ class AddDevenvKW(TypedDict):
+ method: Literal['set', 'prepend', 'append']
+ separator: str
+
+
+class MesonMain(MesonInterpreterObject):
+ def __init__(self, build: 'build.Build', interpreter: 'Interpreter'):
+ super().__init__(subproject=interpreter.subproject)
+ self.build = build
+ self.interpreter = interpreter
+ self.methods.update({'get_compiler': self.get_compiler_method,
+ 'is_cross_build': self.is_cross_build_method,
+ 'has_exe_wrapper': self.has_exe_wrapper_method,
+ 'can_run_host_binaries': self.can_run_host_binaries_method,
+ 'is_unity': self.is_unity_method,
+ 'is_subproject': self.is_subproject_method,
+ 'current_source_dir': self.current_source_dir_method,
+ 'current_build_dir': self.current_build_dir_method,
+ 'source_root': self.source_root_method,
+ 'build_root': self.build_root_method,
+ 'project_source_root': self.project_source_root_method,
+ 'project_build_root': self.project_build_root_method,
+ 'global_source_root': self.global_source_root_method,
+ 'global_build_root': self.global_build_root_method,
+ 'add_install_script': self.add_install_script_method,
+ 'add_postconf_script': self.add_postconf_script_method,
+ 'add_dist_script': self.add_dist_script_method,
+ 'install_dependency_manifest': self.install_dependency_manifest_method,
+ 'override_dependency': self.override_dependency_method,
+ 'override_find_program': self.override_find_program_method,
+ 'project_version': self.project_version_method,
+ 'project_license': self.project_license_method,
+ 'version': self.version_method,
+ 'project_name': self.project_name_method,
+ 'get_cross_property': self.get_cross_property_method,
+ 'get_external_property': self.get_external_property_method,
+ 'has_external_property': self.has_external_property_method,
+ 'backend': self.backend_method,
+ 'add_devenv': self.add_devenv_method,
+ })
+
+ def _find_source_script(
+ self, name: str, prog: T.Union[str, mesonlib.File, build.Executable, ExternalProgram],
+ args: T.List[str]) -> 'ExecutableSerialisation':
+ largs: T.List[T.Union[str, build.Executable, ExternalProgram]] = []
+
+ if isinstance(prog, (build.Executable, ExternalProgram)):
+ FeatureNew.single_use(f'Passing executable/found program object to script parameter of {name}',
+ '0.55.0', self.subproject, location=self.current_node)
+ largs.append(prog)
+ else:
+ if isinstance(prog, mesonlib.File):
+ FeatureNew.single_use(f'Passing file object to script parameter of {name}',
+ '0.57.0', self.subproject, location=self.current_node)
+ found = self.interpreter.find_program_impl([prog])
+ largs.append(found)
+
+ largs.extend(args)
+ es = self.interpreter.backend.get_executable_serialisation(largs)
+ es.subproject = self.interpreter.subproject
+ return es
+
+ def _process_script_args(
+ self, name: str, args: T.Sequence[T.Union[
+ str, mesonlib.File, build.BuildTarget, build.CustomTarget,
+ build.CustomTargetIndex,
+ ExternalProgram,
+ ]]) -> T.List[str]:
+ script_args = [] # T.List[str]
+ new = False
+ for a in args:
+ if isinstance(a, str):
+ script_args.append(a)
+ elif isinstance(a, mesonlib.File):
+ new = True
+ script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir))
+ elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)):
+ new = True
+ script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()])
+
+ # This feels really hacky, but I'm not sure how else to fix
+ # this without completely rewriting install script handling.
+ # This is complicated by the fact that the install target
+ # depends on all.
+ if isinstance(a, build.CustomTargetIndex):
+ a.target.build_by_default = True
+ else:
+ a.build_by_default = True
+ else:
+ script_args.extend(a.command)
+ new = True
+
+ if new:
+ FeatureNew.single_use(
+ f'Calling "{name}" with File, CustomTarget, Index of CustomTarget, '
+ 'Executable, or ExternalProgram',
+ '0.55.0', self.interpreter.subproject, location=self.current_node)
+ return script_args
+
+ @typed_pos_args(
+ 'meson.add_install_script',
+ (str, mesonlib.File, build.Executable, ExternalProgram),
+ varargs=(str, mesonlib.File, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, ExternalProgram)
+ )
+ @typed_kwargs(
+ 'meson.add_install_script',
+ KwargInfo('skip_if_destdir', bool, default=False, since='0.57.0'),
+ KwargInfo('install_tag', (str, NoneType), since='0.60.0'),
+ )
+ def add_install_script_method(
+ self,
+ args: T.Tuple[T.Union[str, mesonlib.File, build.Executable, ExternalProgram],
+ T.List[T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, ExternalProgram]]],
+ kwargs: 'AddInstallScriptKW') -> None:
+ script_args = self._process_script_args('add_install_script', args[1])
+ script = self._find_source_script('add_install_script', args[0], script_args)
+ script.skip_if_destdir = kwargs['skip_if_destdir']
+ script.tag = kwargs['install_tag']
+ self.build.install_scripts.append(script)
+
+ @typed_pos_args(
+ 'meson.add_postconf_script',
+ (str, mesonlib.File, ExternalProgram),
+ varargs=(str, mesonlib.File, ExternalProgram)
+ )
+ @noKwargs
+ def add_postconf_script_method(
+ self,
+ args: T.Tuple[T.Union[str, mesonlib.File, ExternalProgram],
+ T.List[T.Union[str, mesonlib.File, ExternalProgram]]],
+ kwargs: 'TYPE_kwargs') -> None:
+ script_args = self._process_script_args('add_postconf_script', args[1])
+ script = self._find_source_script('add_postconf_script', args[0], script_args)
+ self.build.postconf_scripts.append(script)
+
+ @typed_pos_args(
+ 'meson.add_dist_script',
+ (str, mesonlib.File, ExternalProgram),
+ varargs=(str, mesonlib.File, ExternalProgram)
+ )
+ @noKwargs
+ def add_dist_script_method(
+ self,
+ args: T.Tuple[T.Union[str, mesonlib.File, ExternalProgram],
+ T.List[T.Union[str, mesonlib.File, ExternalProgram]]],
+ kwargs: 'TYPE_kwargs') -> None:
+ if args[1]:
+ FeatureNew.single_use('Calling "add_dist_script" with multiple arguments',
+ '0.49.0', self.interpreter.subproject, location=self.current_node)
+ if self.interpreter.subproject != '':
+ FeatureNew.single_use('Calling "add_dist_script" in a subproject',
+ '0.58.0', self.interpreter.subproject, location=self.current_node)
+ script_args = self._process_script_args('add_dist_script', args[1])
+ script = self._find_source_script('add_dist_script', args[0], script_args)
+ self.build.dist_scripts.append(script)
+
+ @noPosargs
+ @noKwargs
+ def current_source_dir_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ src = self.interpreter.environment.source_dir
+ sub = self.interpreter.subdir
+ if sub == '':
+ return src
+ return os.path.join(src, sub)
+
+ @noPosargs
+ @noKwargs
+ def current_build_dir_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ src = self.interpreter.environment.build_dir
+ sub = self.interpreter.subdir
+ if sub == '':
+ return src
+ return os.path.join(src, sub)
+
+ @noPosargs
+ @noKwargs
+ def backend_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.interpreter.backend.name
+
+ @noPosargs
+ @noKwargs
+ @FeatureDeprecated('meson.source_root', '0.56.0', 'use meson.project_source_root() or meson.global_source_root() instead.')
+ def source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.interpreter.environment.source_dir
+
+ @noPosargs
+ @noKwargs
+ @FeatureDeprecated('meson.build_root', '0.56.0', 'use meson.project_build_root() or meson.global_build_root() instead.')
+ def build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.interpreter.environment.build_dir
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('meson.project_source_root', '0.56.0')
+ def project_source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ src = self.interpreter.environment.source_dir
+ sub = self.interpreter.root_subdir
+ if sub == '':
+ return src
+ return os.path.join(src, sub)
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('meson.project_build_root', '0.56.0')
+ def project_build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ src = self.interpreter.environment.build_dir
+ sub = self.interpreter.root_subdir
+ if sub == '':
+ return src
+ return os.path.join(src, sub)
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('meson.global_source_root', '0.58.0')
+ def global_source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.interpreter.environment.source_dir
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('meson.global_build_root', '0.58.0')
+ def global_build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.interpreter.environment.build_dir
+
+ @noPosargs
+ @noKwargs
+ @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.')
+ def has_exe_wrapper_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+ return self._can_run_host_binaries_impl()
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('meson.can_run_host_binaries', '0.55.0')
+ def can_run_host_binaries_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+ return self._can_run_host_binaries_impl()
+
+ def _can_run_host_binaries_impl(self) -> bool:
+ return not (
+ self.build.environment.is_cross_build() and
+ self.build.environment.need_exe_wrapper() and
+ self.build.environment.exe_wrapper is None
+ )
+
+ @noPosargs
+ @noKwargs
+ def is_cross_build_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+ return self.build.environment.is_cross_build()
+
+ @typed_pos_args('meson.get_compiler', str)
+ @typed_kwargs('meson.get_compiler', NATIVE_KW)
+ def get_compiler_method(self, args: T.Tuple[str], kwargs: 'NativeKW') -> 'Compiler':
+ cname = args[0]
+ for_machine = kwargs['native']
+ clist = self.interpreter.coredata.compilers[for_machine]
+ try:
+ return clist[cname]
+ except KeyError:
+ raise InterpreterException(f'Tried to access compiler for language "{cname}", not specified for {for_machine.get_lower_case_name()} machine.')
+
+ @noPosargs
+ @noKwargs
+ def is_unity_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+ optval = self.interpreter.environment.coredata.get_option(OptionKey('unity'))
+ return optval == 'on' or (optval == 'subprojects' and self.interpreter.is_subproject())
+
+ @noPosargs
+ @noKwargs
+ def is_subproject_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+ return self.interpreter.is_subproject()
+
+ @typed_pos_args('meson.install_dependency_manifest', str)
+ @noKwargs
+ def install_dependency_manifest_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> None:
+ self.build.dep_manifest_name = args[0]
+
+ @FeatureNew('meson.override_find_program', '0.46.0')
+ @typed_pos_args('meson.override_find_program', str, (mesonlib.File, ExternalProgram, build.Executable))
+ @noKwargs
+ def override_find_program_method(self, args: T.Tuple[str, T.Union[mesonlib.File, ExternalProgram, build.Executable]], kwargs: 'TYPE_kwargs') -> None:
+ name, exe = args
+ if isinstance(exe, mesonlib.File):
+ abspath = exe.absolute_path(self.interpreter.environment.source_dir,
+ self.interpreter.environment.build_dir)
+ if not os.path.exists(abspath):
+ raise InterpreterException(f'Tried to override {name} with a file that does not exist.')
+ exe = OverrideProgram(name, [abspath])
+ self.interpreter.add_find_program_override(name, exe)
+
+ @typed_kwargs(
+ 'meson.override_dependency',
+ NATIVE_KW,
+ KwargInfo('static', (bool, NoneType), since='0.60.0'),
+ )
+ @typed_pos_args('meson.override_dependency', str, dependencies.Dependency)
+ @FeatureNew('meson.override_dependency', '0.54.0')
+ def override_dependency_method(self, args: T.Tuple[str, dependencies.Dependency], kwargs: 'FuncOverrideDependency') -> None:
+ name, dep = args
+ if not name:
+ raise InterpreterException('First argument must be a string and cannot be empty')
+
+ optkey = OptionKey('default_library', subproject=self.interpreter.subproject)
+ default_library = self.interpreter.coredata.get_option(optkey)
+ assert isinstance(default_library, str), 'for mypy'
+ static = kwargs['static']
+ if static is None:
+ # We don't know if dep represents a static or shared library, could
+ # be a mix of both. We assume it is following default_library
+ # value.
+ self._override_dependency_impl(name, dep, kwargs, static=None)
+ if default_library == 'static':
+ self._override_dependency_impl(name, dep, kwargs, static=True)
+ elif default_library == 'shared':
+ self._override_dependency_impl(name, dep, kwargs, static=False)
+ else:
+ self._override_dependency_impl(name, dep, kwargs, static=True)
+ self._override_dependency_impl(name, dep, kwargs, static=False)
+ else:
+ # dependency('foo') without specifying static kwarg should find this
+ # override regardless of the static value here. But do not raise error
+ # if it has already been overridden, which would happen when overriding
+ # static and shared separately:
+ # meson.override_dependency('foo', shared_dep, static: false)
+ # meson.override_dependency('foo', static_dep, static: true)
+ # In that case dependency('foo') would return the first override.
+ self._override_dependency_impl(name, dep, kwargs, static=None, permissive=True)
+ self._override_dependency_impl(name, dep, kwargs, static=static)
+
+ def _override_dependency_impl(self, name: str, dep: dependencies.Dependency, kwargs: 'FuncOverrideDependency',
+ static: T.Optional[bool], permissive: bool = False) -> None:
+ # We need the cast here as get_dep_identifier works on such a dict,
+ # which FuncOverrideDependency is, but mypy can't fgure that out
+ nkwargs = T.cast('T.Dict[str, T.Any]', kwargs.copy())
+ if static is None:
+ del nkwargs['static']
+ else:
+ nkwargs['static'] = static
+ identifier = dependencies.get_dep_identifier(name, nkwargs)
+ for_machine = kwargs['native']
+ override = self.build.dependency_overrides[for_machine].get(identifier)
+ if override:
+ if permissive:
+ return
+ m = 'Tried to override dependency {!r} which has already been resolved or overridden at {}'
+ location = mlog.get_error_location_string(override.node.filename, override.node.lineno)
+ raise InterpreterException(m.format(name, location))
+ self.build.dependency_overrides[for_machine][identifier] = \
+ build.DependencyOverride(dep, self.interpreter.current_node)
+
+ @noPosargs
+ @noKwargs
+ def project_version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.build.dep_manifest[self.interpreter.active_projectname].version
+
+ @FeatureNew('meson.project_license()', '0.45.0')
+ @noPosargs
+ @noKwargs
+ def project_license_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.List[str]:
+ return self.build.dep_manifest[self.interpreter.active_projectname].license
+
+ @noPosargs
+ @noKwargs
+ def version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> MesonVersionString:
+ return MesonVersionString(self.interpreter.coredata.version)
+
+ @noPosargs
+ @noKwargs
+ def project_name_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+ return self.interpreter.active_projectname
+
+ def __get_external_property_impl(self, propname: str, fallback: T.Optional[object], machine: MachineChoice) -> object:
+ """Shared implementation for get_cross_property and get_external_property."""
+ try:
+ return self.interpreter.environment.properties[machine][propname]
+ except KeyError:
+ if fallback is not None:
+ return fallback
+ raise InterpreterException(f'Unknown property for {machine.get_lower_case_name()} machine: {propname}')
+
+ @noArgsFlattening
+ @FeatureDeprecated('meson.get_cross_property', '0.58.0', 'Use meson.get_external_property() instead')
+ @typed_pos_args('meson.get_cross_property', str, optargs=[object])
+ @noKwargs
+ def get_cross_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: 'TYPE_kwargs') -> object:
+ propname, fallback = args
+ return self.__get_external_property_impl(propname, fallback, MachineChoice.HOST)
+
+ @noArgsFlattening
+ @FeatureNew('meson.get_external_property', '0.54.0')
+ @typed_pos_args('meson.get_external_property', str, optargs=[object])
+ @typed_kwargs('meson.get_external_property', NATIVE_KW)
+ def get_external_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: 'NativeKW') -> object:
+ propname, fallback = args
+ return self.__get_external_property_impl(propname, fallback, kwargs['native'])
+
+ @FeatureNew('meson.has_external_property', '0.58.0')
+ @typed_pos_args('meson.has_external_property', str)
+ @typed_kwargs('meson.has_external_property', NATIVE_KW)
+ def has_external_property_method(self, args: T.Tuple[str], kwargs: 'NativeKW') -> bool:
+ prop_name = args[0]
+ return prop_name in self.interpreter.environment.properties[kwargs['native']]
+
+ @FeatureNew('add_devenv', '0.58.0')
+ @typed_kwargs('environment', ENV_METHOD_KW, ENV_SEPARATOR_KW.evolve(since='0.62.0'))
+ @typed_pos_args('add_devenv', (str, list, dict, build.EnvironmentVariables))
+ def add_devenv_method(self, args: T.Tuple[T.Union[str, list, dict, build.EnvironmentVariables]],
+ kwargs: 'AddDevenvKW') -> None:
+ env = args[0]
+ msg = ENV_KW.validator(env)
+ if msg:
+ raise build.InvalidArguments(f'"add_devenv": {msg}')
+ converted = env_convertor_with_method(env, kwargs['method'], kwargs['separator'])
+ assert isinstance(converted, build.EnvironmentVariables)
+ self.build.devenv.append(converted)
diff --git a/mesonbuild/interpreter/primitives/__init__.py b/mesonbuild/interpreter/primitives/__init__.py
new file mode 100644
index 0000000..aebef41
--- /dev/null
+++ b/mesonbuild/interpreter/primitives/__init__.py
@@ -0,0 +1,29 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+
+__all__ = [
+ 'ArrayHolder',
+ 'BooleanHolder',
+ 'DictHolder',
+ 'IntegerHolder',
+ 'RangeHolder',
+ 'StringHolder',
+ 'MesonVersionString',
+ 'MesonVersionStringHolder',
+ 'DependencyVariableString',
+ 'DependencyVariableStringHolder',
+ 'OptionString',
+ 'OptionStringHolder',
+]
+
+from .array import ArrayHolder
+from .boolean import BooleanHolder
+from .dict import DictHolder
+from .integer import IntegerHolder
+from .range import RangeHolder
+from .string import (
+ StringHolder,
+ MesonVersionString, MesonVersionStringHolder,
+ DependencyVariableString, DependencyVariableStringHolder,
+ OptionString, OptionStringHolder,
+)
diff --git a/mesonbuild/interpreter/primitives/array.py b/mesonbuild/interpreter/primitives/array.py
new file mode 100644
index 0000000..eeea112
--- /dev/null
+++ b/mesonbuild/interpreter/primitives/array.py
@@ -0,0 +1,108 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+import typing as T
+
+from ...interpreterbase import (
+ ObjectHolder,
+ IterableObject,
+ MesonOperator,
+ typed_operator,
+ noKwargs,
+ noPosargs,
+ noArgsFlattening,
+ typed_pos_args,
+ FeatureNew,
+
+ TYPE_var,
+
+ InvalidArguments,
+)
+from ...mparser import PlusAssignmentNode
+
+if T.TYPE_CHECKING:
+ # Object holders need the actual interpreter
+ from ...interpreter import Interpreter
+ from ...interpreterbase import TYPE_kwargs
+
+class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject):
+ def __init__(self, obj: T.List[TYPE_var], interpreter: 'Interpreter') -> None:
+ super().__init__(obj, interpreter)
+ self.methods.update({
+ 'contains': self.contains_method,
+ 'length': self.length_method,
+ 'get': self.get_method,
+ })
+
+ self.trivial_operators.update({
+ MesonOperator.EQUALS: (list, lambda x: self.held_object == x),
+ MesonOperator.NOT_EQUALS: (list, lambda x: self.held_object != x),
+ MesonOperator.IN: (object, lambda x: x in self.held_object),
+ MesonOperator.NOT_IN: (object, lambda x: x not in self.held_object),
+ })
+
+ # Use actual methods for functions that require additional checks
+ self.operators.update({
+ MesonOperator.PLUS: self.op_plus,
+ MesonOperator.INDEX: self.op_index,
+ })
+
+ def display_name(self) -> str:
+ return 'array'
+
+ def iter_tuple_size(self) -> None:
+ return None
+
+ def iter_self(self) -> T.Iterator[TYPE_var]:
+ return iter(self.held_object)
+
+ def size(self) -> int:
+ return len(self.held_object)
+
+ @noArgsFlattening
+ @noKwargs
+ @typed_pos_args('array.contains', object)
+ def contains_method(self, args: T.Tuple[object], kwargs: TYPE_kwargs) -> bool:
+ def check_contains(el: T.List[TYPE_var]) -> bool:
+ for element in el:
+ if isinstance(element, list):
+ found = check_contains(element)
+ if found:
+ return True
+ if element == args[0]:
+ return True
+ return False
+ return check_contains(self.held_object)
+
+ @noKwargs
+ @noPosargs
+ def length_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
+ return len(self.held_object)
+
+ @noArgsFlattening
+ @noKwargs
+ @typed_pos_args('array.get', int, optargs=[object])
+ def get_method(self, args: T.Tuple[int, T.Optional[TYPE_var]], kwargs: TYPE_kwargs) -> TYPE_var:
+ index = args[0]
+ if index < -len(self.held_object) or index >= len(self.held_object):
+ if args[1] is None:
+ raise InvalidArguments(f'Array index {index} is out of bounds for array of size {len(self.held_object)}.')
+ return args[1]
+ return self.held_object[index]
+
+ @typed_operator(MesonOperator.PLUS, object)
+ def op_plus(self, other: TYPE_var) -> T.List[TYPE_var]:
+ if not isinstance(other, list):
+ if not isinstance(self.current_node, PlusAssignmentNode):
+ FeatureNew.single_use('list.<plus>', '0.60.0', self.subproject, 'The right hand operand was not a list.',
+ location=self.current_node)
+ other = [other]
+ return self.held_object + other
+
+ @typed_operator(MesonOperator.INDEX, int)
+ def op_index(self, other: int) -> TYPE_var:
+ try:
+ return self.held_object[other]
+ except IndexError:
+ raise InvalidArguments(f'Index {other} out of bounds of array of size {len(self.held_object)}.')
diff --git a/mesonbuild/interpreter/primitives/boolean.py b/mesonbuild/interpreter/primitives/boolean.py
new file mode 100644
index 0000000..4b49caf
--- /dev/null
+++ b/mesonbuild/interpreter/primitives/boolean.py
@@ -0,0 +1,52 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+from ...interpreterbase import (
+ ObjectHolder,
+ MesonOperator,
+ typed_pos_args,
+ noKwargs,
+ noPosargs,
+
+ InvalidArguments
+)
+
+import typing as T
+
+if T.TYPE_CHECKING:
+ # Object holders need the actual interpreter
+ from ...interpreter import Interpreter
+ from ...interpreterbase import TYPE_var, TYPE_kwargs
+
+class BooleanHolder(ObjectHolder[bool]):
+ def __init__(self, obj: bool, interpreter: 'Interpreter') -> None:
+ super().__init__(obj, interpreter)
+ self.methods.update({
+ 'to_int': self.to_int_method,
+ 'to_string': self.to_string_method,
+ })
+
+ self.trivial_operators.update({
+ MesonOperator.BOOL: (None, lambda x: self.held_object),
+ MesonOperator.NOT: (None, lambda x: not self.held_object),
+ MesonOperator.EQUALS: (bool, lambda x: self.held_object == x),
+ MesonOperator.NOT_EQUALS: (bool, lambda x: self.held_object != x),
+ })
+
+ def display_name(self) -> str:
+ return 'bool'
+
+ @noKwargs
+ @noPosargs
+ def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
+ return 1 if self.held_object else 0
+
+ @noKwargs
+ @typed_pos_args('bool.to_string', optargs=[str, str])
+ def to_string_method(self, args: T.Tuple[T.Optional[str], T.Optional[str]], kwargs: TYPE_kwargs) -> str:
+ true_str = args[0] or 'true'
+ false_str = args[1] or 'false'
+ if any(x is not None for x in args) and not all(x is not None for x in args):
+ raise InvalidArguments('bool.to_string() must have either no arguments or exactly two string arguments that signify what values to return for true and false.')
+ return true_str if self.held_object else false_str
diff --git a/mesonbuild/interpreter/primitives/dict.py b/mesonbuild/interpreter/primitives/dict.py
new file mode 100644
index 0000000..ac7c99b
--- /dev/null
+++ b/mesonbuild/interpreter/primitives/dict.py
@@ -0,0 +1,88 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+import typing as T
+
+from ...interpreterbase import (
+ ObjectHolder,
+ IterableObject,
+ MesonOperator,
+ typed_operator,
+ noKwargs,
+ noPosargs,
+ noArgsFlattening,
+ typed_pos_args,
+
+ TYPE_var,
+
+ InvalidArguments,
+)
+
+if T.TYPE_CHECKING:
+ # Object holders need the actual interpreter
+ from ...interpreter import Interpreter
+ from ...interpreterbase import TYPE_kwargs
+
+class DictHolder(ObjectHolder[T.Dict[str, TYPE_var]], IterableObject):
+ def __init__(self, obj: T.Dict[str, TYPE_var], interpreter: 'Interpreter') -> None:
+ super().__init__(obj, interpreter)
+ self.methods.update({
+ 'has_key': self.has_key_method,
+ 'keys': self.keys_method,
+ 'get': self.get_method,
+ })
+
+ self.trivial_operators.update({
+ # Arithmetic
+ MesonOperator.PLUS: (dict, lambda x: {**self.held_object, **x}),
+
+ # Comparison
+ MesonOperator.EQUALS: (dict, lambda x: self.held_object == x),
+ MesonOperator.NOT_EQUALS: (dict, lambda x: self.held_object != x),
+ MesonOperator.IN: (str, lambda x: x in self.held_object),
+ MesonOperator.NOT_IN: (str, lambda x: x not in self.held_object),
+ })
+
+ # Use actual methods for functions that require additional checks
+ self.operators.update({
+ MesonOperator.INDEX: self.op_index,
+ })
+
+ def display_name(self) -> str:
+ return 'dict'
+
+ def iter_tuple_size(self) -> int:
+ return 2
+
+ def iter_self(self) -> T.Iterator[T.Tuple[str, TYPE_var]]:
+ return iter(self.held_object.items())
+
+ def size(self) -> int:
+ return len(self.held_object)
+
+ @noKwargs
+ @typed_pos_args('dict.has_key', str)
+ def has_key_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+ return args[0] in self.held_object
+
+ @noKwargs
+ @noPosargs
+ def keys_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]:
+ return sorted(self.held_object)
+
+ @noArgsFlattening
+ @noKwargs
+ @typed_pos_args('dict.get', str, optargs=[object])
+ def get_method(self, args: T.Tuple[str, T.Optional[TYPE_var]], kwargs: TYPE_kwargs) -> TYPE_var:
+ if args[0] in self.held_object:
+ return self.held_object[args[0]]
+ if args[1] is not None:
+ return args[1]
+ raise InvalidArguments(f'Key {args[0]!r} is not in the dictionary.')
+
+ @typed_operator(MesonOperator.INDEX, str)
+ def op_index(self, other: str) -> TYPE_var:
+ if other not in self.held_object:
+ raise InvalidArguments(f'Key {other} is not in the dictionary.')
+ return self.held_object[other]
diff --git a/mesonbuild/interpreter/primitives/integer.py b/mesonbuild/interpreter/primitives/integer.py
new file mode 100644
index 0000000..f433f57
--- /dev/null
+++ b/mesonbuild/interpreter/primitives/integer.py
@@ -0,0 +1,81 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+from ...interpreterbase import (
+ ObjectHolder,
+ MesonOperator,
+ typed_operator,
+ noKwargs,
+ noPosargs,
+
+ InvalidArguments
+)
+
+import typing as T
+
+if T.TYPE_CHECKING:
+ # Object holders need the actual interpreter
+ from ...interpreter import Interpreter
+ from ...interpreterbase import TYPE_var, TYPE_kwargs
+
+class IntegerHolder(ObjectHolder[int]):
+ def __init__(self, obj: int, interpreter: 'Interpreter') -> None:
+ super().__init__(obj, interpreter)
+ self.methods.update({
+ 'is_even': self.is_even_method,
+ 'is_odd': self.is_odd_method,
+ 'to_string': self.to_string_method,
+ })
+
+ self.trivial_operators.update({
+ # Arithmetic
+ MesonOperator.UMINUS: (None, lambda x: -self.held_object),
+ MesonOperator.PLUS: (int, lambda x: self.held_object + x),
+ MesonOperator.MINUS: (int, lambda x: self.held_object - x),
+ MesonOperator.TIMES: (int, lambda x: self.held_object * x),
+
+ # Comparison
+ MesonOperator.EQUALS: (int, lambda x: self.held_object == x),
+ MesonOperator.NOT_EQUALS: (int, lambda x: self.held_object != x),
+ MesonOperator.GREATER: (int, lambda x: self.held_object > x),
+ MesonOperator.LESS: (int, lambda x: self.held_object < x),
+ MesonOperator.GREATER_EQUALS: (int, lambda x: self.held_object >= x),
+ MesonOperator.LESS_EQUALS: (int, lambda x: self.held_object <= x),
+ })
+
+ # Use actual methods for functions that require additional checks
+ self.operators.update({
+ MesonOperator.DIV: self.op_div,
+ MesonOperator.MOD: self.op_mod,
+ })
+
+ def display_name(self) -> str:
+ return 'int'
+
+ @noKwargs
+ @noPosargs
+ def is_even_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.held_object % 2 == 0
+
+ @noKwargs
+ @noPosargs
+ def is_odd_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.held_object % 2 != 0
+
+ @noKwargs
+ @noPosargs
+ def to_string_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return str(self.held_object)
+
+ @typed_operator(MesonOperator.DIV, int)
+ def op_div(self, other: int) -> int:
+ if other == 0:
+ raise InvalidArguments('Tried to divide by 0')
+ return self.held_object // other
+
+ @typed_operator(MesonOperator.MOD, int)
+ def op_mod(self, other: int) -> int:
+ if other == 0:
+ raise InvalidArguments('Tried to divide by 0')
+ return self.held_object % other
diff --git a/mesonbuild/interpreter/primitives/range.py b/mesonbuild/interpreter/primitives/range.py
new file mode 100644
index 0000000..5eb5e03
--- /dev/null
+++ b/mesonbuild/interpreter/primitives/range.py
@@ -0,0 +1,38 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+import typing as T
+
+from ...interpreterbase import (
+ MesonInterpreterObject,
+ IterableObject,
+ MesonOperator,
+ InvalidArguments,
+)
+
+if T.TYPE_CHECKING:
+ from ...interpreterbase import SubProject
+
+class RangeHolder(MesonInterpreterObject, IterableObject):
+ def __init__(self, start: int, stop: int, step: int, *, subproject: 'SubProject') -> None:
+ super().__init__(subproject=subproject)
+ self.range = range(start, stop, step)
+ self.operators.update({
+ MesonOperator.INDEX: self.op_index,
+ })
+
+ def op_index(self, other: int) -> int:
+ try:
+ return self.range[other]
+ except IndexError:
+ raise InvalidArguments(f'Index {other} out of bounds of range.')
+
+ def iter_tuple_size(self) -> None:
+ return None
+
+ def iter_self(self) -> T.Iterator[int]:
+ return iter(self.range)
+
+ def size(self) -> int:
+ return len(self.range)
diff --git a/mesonbuild/interpreter/primitives/string.py b/mesonbuild/interpreter/primitives/string.py
new file mode 100644
index 0000000..d9f6a06
--- /dev/null
+++ b/mesonbuild/interpreter/primitives/string.py
@@ -0,0 +1,233 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+import re
+import os
+
+import typing as T
+
+from ...mesonlib import version_compare
+from ...interpreterbase import (
+ ObjectHolder,
+ MesonOperator,
+ FeatureNew,
+ typed_operator,
+ noArgsFlattening,
+ noKwargs,
+ noPosargs,
+ typed_pos_args,
+
+ InvalidArguments,
+)
+
+
+if T.TYPE_CHECKING:
+ # Object holders need the actual interpreter
+ from ...interpreter import Interpreter
+ from ...interpreterbase import TYPE_var, TYPE_kwargs
+
+class StringHolder(ObjectHolder[str]):
+ def __init__(self, obj: str, interpreter: 'Interpreter') -> None:
+ super().__init__(obj, interpreter)
+ self.methods.update({
+ 'contains': self.contains_method,
+ 'startswith': self.startswith_method,
+ 'endswith': self.endswith_method,
+ 'format': self.format_method,
+ 'join': self.join_method,
+ 'replace': self.replace_method,
+ 'split': self.split_method,
+ 'strip': self.strip_method,
+ 'substring': self.substring_method,
+ 'to_int': self.to_int_method,
+ 'to_lower': self.to_lower_method,
+ 'to_upper': self.to_upper_method,
+ 'underscorify': self.underscorify_method,
+ 'version_compare': self.version_compare_method,
+ })
+
+ self.trivial_operators.update({
+ # Arithmetic
+ MesonOperator.PLUS: (str, lambda x: self.held_object + x),
+
+ # Comparison
+ MesonOperator.EQUALS: (str, lambda x: self.held_object == x),
+ MesonOperator.NOT_EQUALS: (str, lambda x: self.held_object != x),
+ MesonOperator.GREATER: (str, lambda x: self.held_object > x),
+ MesonOperator.LESS: (str, lambda x: self.held_object < x),
+ MesonOperator.GREATER_EQUALS: (str, lambda x: self.held_object >= x),
+ MesonOperator.LESS_EQUALS: (str, lambda x: self.held_object <= x),
+ })
+
+ # Use actual methods for functions that require additional checks
+ self.operators.update({
+ MesonOperator.DIV: self.op_div,
+ MesonOperator.INDEX: self.op_index,
+ MesonOperator.IN: self.op_in,
+ MesonOperator.NOT_IN: self.op_notin,
+ })
+
+ def display_name(self) -> str:
+ return 'str'
+
+ @noKwargs
+ @typed_pos_args('str.contains', str)
+ def contains_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+ return self.held_object.find(args[0]) >= 0
+
+ @noKwargs
+ @typed_pos_args('str.startswith', str)
+ def startswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+ return self.held_object.startswith(args[0])
+
+ @noKwargs
+ @typed_pos_args('str.endswith', str)
+ def endswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+ return self.held_object.endswith(args[0])
+
+ @noArgsFlattening
+ @noKwargs
+ @typed_pos_args('str.format', varargs=object)
+ def format_method(self, args: T.Tuple[T.List[object]], kwargs: TYPE_kwargs) -> str:
+ arg_strings: T.List[str] = []
+ for arg in args[0]:
+ if isinstance(arg, bool): # Python boolean is upper case.
+ arg = str(arg).lower()
+ arg_strings.append(str(arg))
+
+ def arg_replace(match: T.Match[str]) -> str:
+ idx = int(match.group(1))
+ if idx >= len(arg_strings):
+ raise InvalidArguments(f'Format placeholder @{idx}@ out of range.')
+ return arg_strings[idx]
+
+ return re.sub(r'@(\d+)@', arg_replace, self.held_object)
+
+ @noKwargs
+ @typed_pos_args('str.join', varargs=str)
+ def join_method(self, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.join(args[0])
+
+ @noKwargs
+ @typed_pos_args('str.replace', str, str)
+ def replace_method(self, args: T.Tuple[str, str], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.replace(args[0], args[1])
+
+ @noKwargs
+ @typed_pos_args('str.split', optargs=[str])
+ def split_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> T.List[str]:
+ return self.held_object.split(args[0])
+
+ @noKwargs
+ @typed_pos_args('str.strip', optargs=[str])
+ def strip_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.strip(args[0])
+
+ @noKwargs
+ @typed_pos_args('str.substring', optargs=[int, int])
+ def substring_method(self, args: T.Tuple[T.Optional[int], T.Optional[int]], kwargs: TYPE_kwargs) -> str:
+ start = args[0] if args[0] is not None else 0
+ end = args[1] if args[1] is not None else len(self.held_object)
+ return self.held_object[start:end]
+
+ @noKwargs
+ @noPosargs
+ def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
+ try:
+ return int(self.held_object)
+ except ValueError:
+ raise InvalidArguments(f'String {self.held_object!r} cannot be converted to int')
+
+ @noKwargs
+ @noPosargs
+ def to_lower_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.lower()
+
+ @noKwargs
+ @noPosargs
+ def to_upper_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.upper()
+
+ @noKwargs
+ @noPosargs
+ def underscorify_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return re.sub(r'[^a-zA-Z0-9]', '_', self.held_object)
+
+ @noKwargs
+ @typed_pos_args('str.version_compare', str)
+ def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+ return version_compare(self.held_object, args[0])
+
+ @staticmethod
+ def _op_div(this: str, other: str) -> str:
+ return os.path.join(this, other).replace('\\', '/')
+
+ @FeatureNew('/ with string arguments', '0.49.0')
+ @typed_operator(MesonOperator.DIV, str)
+ def op_div(self, other: str) -> str:
+ return self._op_div(self.held_object, other)
+
+ @typed_operator(MesonOperator.INDEX, int)
+ def op_index(self, other: int) -> str:
+ try:
+ return self.held_object[other]
+ except IndexError:
+ raise InvalidArguments(f'Index {other} out of bounds of string of size {len(self.held_object)}.')
+
+ @FeatureNew('"in" string operator', '1.0.0')
+ @typed_operator(MesonOperator.IN, str)
+ def op_in(self, other: str) -> bool:
+ return other in self.held_object
+
+ @FeatureNew('"not in" string operator', '1.0.0')
+ @typed_operator(MesonOperator.NOT_IN, str)
+ def op_notin(self, other: str) -> bool:
+ return other not in self.held_object
+
+
+class MesonVersionString(str):
+ pass
+
+class MesonVersionStringHolder(StringHolder):
+ @noKwargs
+ @typed_pos_args('str.version_compare', str)
+ def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+ self.interpreter.tmp_meson_version = args[0]
+ return version_compare(self.held_object, args[0])
+
+# These special subclasses of string exist to cover the case where a dependency
+# exports a string variable interchangeable with a system dependency. This
+# matters because a dependency can only have string-type get_variable() return
+# values. If at any time dependencies start supporting additional variable
+# types, this class could be deprecated.
+class DependencyVariableString(str):
+ pass
+
+class DependencyVariableStringHolder(StringHolder):
+ def op_div(self, other: str) -> T.Union[str, DependencyVariableString]:
+ ret = super().op_div(other)
+ if '..' in other:
+ return ret
+ return DependencyVariableString(ret)
+
+
+class OptionString(str):
+ optname: str
+
+ def __new__(cls, value: str, name: str) -> 'OptionString':
+ obj = str.__new__(cls, value)
+ obj.optname = name
+ return obj
+
+ def __getnewargs__(self) -> T.Tuple[str, str]: # type: ignore # because the entire point of this is to diverge
+ return (str(self), self.optname)
+
+
+class OptionStringHolder(StringHolder):
+ held_object: OptionString
+
+ def op_div(self, other: str) -> T.Union[str, OptionString]:
+ ret = super().op_div(other)
+ name = self._op_div(self.held_object.optname, other)
+ return OptionString(ret, name)
diff --git a/mesonbuild/interpreter/type_checking.py b/mesonbuild/interpreter/type_checking.py
new file mode 100644
index 0000000..7e1bd80
--- /dev/null
+++ b/mesonbuild/interpreter/type_checking.py
@@ -0,0 +1,479 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2021 Intel Corporation
+
+"""Helpers for strict type checking."""
+
+from __future__ import annotations
+import os
+import typing as T
+
+from .. import compilers
+from ..build import (CustomTarget, BuildTarget,
+ CustomTargetIndex, ExtractedObjects, GeneratedList, IncludeDirs,
+ BothLibraries, SharedLibrary, StaticLibrary, Jar, Executable)
+from ..coredata import UserFeatureOption
+from ..dependencies import Dependency, InternalDependency
+from ..interpreterbase import FeatureNew
+from ..interpreterbase.decorators import KwargInfo, ContainerTypeInfo
+from ..mesonlib import (File, FileMode, MachineChoice, listify, has_path_sep,
+ OptionKey, EnvironmentVariables)
+from ..programs import ExternalProgram
+
+# Helper definition for type checks that are `Optional[T]`
+NoneType: T.Type[None] = type(None)
+
+if T.TYPE_CHECKING:
+ from typing_extensions import Literal
+
+ from ..interpreterbase import TYPE_var
+ from ..interpreterbase.decorators import FeatureCheckBase
+ from ..mesonlib import EnvInitValueType
+
+ _FullEnvInitValueType = T.Union[EnvironmentVariables, T.List[str], T.List[T.List[str]], EnvInitValueType, str, None]
+
+
+def in_set_validator(choices: T.Set[str]) -> T.Callable[[str], T.Optional[str]]:
+ """Check that the choice given was one of the given set."""
+
+ def inner(check: str) -> T.Optional[str]:
+ if check not in choices:
+ return f"must be one of {', '.join(sorted(choices))}, not {check}"
+ return None
+
+ return inner
+
+
+def _language_validator(l: T.List[str]) -> T.Optional[str]:
+ """Validate language keyword argument.
+
+ Particularly for functions like `add_compiler()`, and `add_*_args()`
+ """
+ diff = {a.lower() for a in l}.difference(compilers.all_languages)
+ if diff:
+ return f'unknown languages: {", ".join(diff)}'
+ return None
+
+
+def _install_mode_validator(mode: T.List[T.Union[str, bool, int]]) -> T.Optional[str]:
+ """Validate the `install_mode` keyword argument.
+
+ This is a rather odd thing, it's a scalar, or an array of 3 values in the form:
+ [(str | False), (str | int | False) = False, (str | int | False) = False]
+ where the second and third components are not required and default to False.
+ """
+ if not mode:
+ return None
+ if True in mode:
+ return 'components can only be permission strings, numbers, or False'
+ if len(mode) > 3:
+ return 'may have at most 3 elements'
+
+ perms = mode[0]
+ if not isinstance(perms, (str, bool)):
+ return 'first component must be a permissions string or False'
+
+ if isinstance(perms, str):
+ if not len(perms) == 9:
+ return ('permissions string must be exactly 9 characters in the form rwxr-xr-x,'
+ f' got {len(perms)}')
+ for i in [0, 3, 6]:
+ if perms[i] not in {'-', 'r'}:
+ return f'permissions character {i+1} must be "-" or "r", not {perms[i]}'
+ for i in [1, 4, 7]:
+ if perms[i] not in {'-', 'w'}:
+ return f'permissions character {i+1} must be "-" or "w", not {perms[i]}'
+ for i in [2, 5]:
+ if perms[i] not in {'-', 'x', 's', 'S'}:
+ return f'permissions character {i+1} must be "-", "s", "S", or "x", not {perms[i]}'
+ if perms[8] not in {'-', 'x', 't', 'T'}:
+ return f'permission character 9 must be "-", "t", "T", or "x", not {perms[8]}'
+
+ if len(mode) >= 2 and not isinstance(mode[1], (int, str, bool)):
+ return 'second componenent can only be a string, number, or False'
+ if len(mode) >= 3 and not isinstance(mode[2], (int, str, bool)):
+ return 'third componenent can only be a string, number, or False'
+
+ return None
+
+
+def _install_mode_convertor(mode: T.Optional[T.List[T.Union[str, bool, int]]]) -> FileMode:
+ """Convert the DSL form of the `install_mode` keyword argument to `FileMode`
+
+ This is not required, and if not required returns None
+
+ TODO: It's not clear to me why this needs to be None and not just return an
+ empty FileMode.
+ """
+ # this has already been validated by the validator
+ return FileMode(*(m if isinstance(m, str) else None for m in mode))
+
+
+def _lower_strlist(input: T.List[str]) -> T.List[str]:
+ """Lower a list of strings.
+
+ mypy (but not pyright) gets confused about using a lambda as the convertor function
+ """
+ return [i.lower() for i in input]
+
+
+def variables_validator(contents: T.Union[str, T.List[str], T.Dict[str, str]]) -> T.Optional[str]:
+ if isinstance(contents, str):
+ contents = [contents]
+ if isinstance(contents, dict):
+ variables = contents
+ else:
+ variables = {}
+ for v in contents:
+ try:
+ key, val = v.split('=', 1)
+ except ValueError:
+ return f'variable {v!r} must have a value separated by equals sign.'
+ variables[key.strip()] = val.strip()
+ for k, v in variables.items():
+ if not k:
+ return 'empty variable name'
+ if not v:
+ return 'empty variable value'
+ if any(c.isspace() for c in k):
+ return f'invalid whitespace in variable name {k!r}'
+ return None
+
+
+def variables_convertor(contents: T.Union[str, T.List[str], T.Dict[str, str]]) -> T.Dict[str, str]:
+ if isinstance(contents, str):
+ contents = [contents]
+ if isinstance(contents, dict):
+ return contents
+ variables = {}
+ for v in contents:
+ key, val = v.split('=', 1)
+ variables[key.strip()] = val.strip()
+ return variables
+
+
+NATIVE_KW = KwargInfo(
+ 'native', bool,
+ default=False,
+ convertor=lambda n: MachineChoice.BUILD if n else MachineChoice.HOST)
+
+LANGUAGE_KW = KwargInfo(
+ 'language', ContainerTypeInfo(list, str, allow_empty=False),
+ listify=True,
+ required=True,
+ validator=_language_validator,
+ convertor=_lower_strlist)
+
+INSTALL_MODE_KW: KwargInfo[T.List[T.Union[str, bool, int]]] = KwargInfo(
+ 'install_mode',
+ ContainerTypeInfo(list, (str, bool, int)),
+ listify=True,
+ default=[],
+ validator=_install_mode_validator,
+ convertor=_install_mode_convertor,
+)
+
+REQUIRED_KW: KwargInfo[T.Union[bool, UserFeatureOption]] = KwargInfo(
+ 'required',
+ (bool, UserFeatureOption),
+ default=True,
+ # TODO: extract_required_kwarg could be converted to a convertor
+)
+
+DISABLER_KW: KwargInfo[bool] = KwargInfo('disabler', bool, default=False)
+
+def _env_validator(value: T.Union[EnvironmentVariables, T.List['TYPE_var'], T.Dict[str, 'TYPE_var'], str, None],
+ allow_dict_list: bool = True) -> T.Optional[str]:
+ def _splitter(v: str) -> T.Optional[str]:
+ split = v.split('=', 1)
+ if len(split) == 1:
+ return f'"{v}" is not two string values separated by an "="'
+ return None
+
+ if isinstance(value, str):
+ v = _splitter(value)
+ if v is not None:
+ return v
+ elif isinstance(value, list):
+ for i in listify(value):
+ if not isinstance(i, str):
+ return f"All array elements must be a string, not {i!r}"
+ v = _splitter(i)
+ if v is not None:
+ return v
+ elif isinstance(value, dict):
+ # We don't need to spilt here, just do the type checking
+ for k, dv in value.items():
+ if allow_dict_list:
+ if any(i for i in listify(dv) if not isinstance(i, str)):
+ return f"Dictionary element {k} must be a string or list of strings not {dv!r}"
+ elif not isinstance(dv, str):
+ return f"Dictionary element {k} must be a string not {dv!r}"
+ # We know that otherwise we have an EnvironmentVariables object or None, and
+ # we're okay at this point
+ return None
+
+def _options_validator(value: T.Union[EnvironmentVariables, T.List['TYPE_var'], T.Dict[str, 'TYPE_var'], str, None]) -> T.Optional[str]:
+ # Reusing the env validator is a littl overkill, but nicer than duplicating the code
+ return _env_validator(value, allow_dict_list=False)
+
+def split_equal_string(input: str) -> T.Tuple[str, str]:
+ """Split a string in the form `x=y`
+
+ This assumes that the string has already been validated to split properly.
+ """
+ a, b = input.split('=', 1)
+ return (a, b)
+
+# Split _env_convertor() and env_convertor_with_method() to make mypy happy.
+# It does not want extra arguments in KwargInfo convertor callable.
+def env_convertor_with_method(value: _FullEnvInitValueType,
+ init_method: Literal['set', 'prepend', 'append'] = 'set',
+ separator: str = os.pathsep) -> EnvironmentVariables:
+ if isinstance(value, str):
+ return EnvironmentVariables(dict([split_equal_string(value)]), init_method, separator)
+ elif isinstance(value, list):
+ return EnvironmentVariables(dict(split_equal_string(v) for v in listify(value)), init_method, separator)
+ elif isinstance(value, dict):
+ return EnvironmentVariables(value, init_method, separator)
+ elif value is None:
+ return EnvironmentVariables()
+ return value
+
+def _env_convertor(value: _FullEnvInitValueType) -> EnvironmentVariables:
+ return env_convertor_with_method(value)
+
+ENV_KW: KwargInfo[T.Union[EnvironmentVariables, T.List, T.Dict, str, None]] = KwargInfo(
+ 'env',
+ (EnvironmentVariables, list, dict, str, NoneType),
+ validator=_env_validator,
+ convertor=_env_convertor,
+)
+
+DEPFILE_KW: KwargInfo[T.Optional[str]] = KwargInfo(
+ 'depfile',
+ (str, type(None)),
+ validator=lambda x: 'Depfile must be a plain filename with a subdirectory' if has_path_sep(x) else None
+)
+
+# TODO: CustomTargetIndex should be supported here as well
+DEPENDS_KW: KwargInfo[T.List[T.Union[BuildTarget, CustomTarget]]] = KwargInfo(
+ 'depends',
+ ContainerTypeInfo(list, (BuildTarget, CustomTarget)),
+ listify=True,
+ default=[],
+)
+
+DEPEND_FILES_KW: KwargInfo[T.List[T.Union[str, File]]] = KwargInfo(
+ 'depend_files',
+ ContainerTypeInfo(list, (File, str)),
+ listify=True,
+ default=[],
+)
+
+COMMAND_KW: KwargInfo[T.List[T.Union[str, BuildTarget, CustomTarget, CustomTargetIndex, ExternalProgram, File]]] = KwargInfo(
+ 'command',
+ # TODO: should accept CustomTargetIndex as well?
+ ContainerTypeInfo(list, (str, BuildTarget, CustomTarget, CustomTargetIndex, ExternalProgram, File), allow_empty=False),
+ required=True,
+ listify=True,
+ default=[],
+)
+
+def _override_options_convertor(raw: T.List[str]) -> T.Dict[OptionKey, str]:
+ output: T.Dict[OptionKey, str] = {}
+ for each in raw:
+ k, v = split_equal_string(each)
+ output[OptionKey.from_string(k)] = v
+ return output
+
+
+OVERRIDE_OPTIONS_KW: KwargInfo[T.List[str]] = KwargInfo(
+ 'override_options',
+ ContainerTypeInfo(list, str),
+ listify=True,
+ default=[],
+ validator=_options_validator,
+ convertor=_override_options_convertor,
+)
+
+
+def _output_validator(outputs: T.List[str]) -> T.Optional[str]:
+ for i in outputs:
+ if i == '':
+ return 'Output must not be empty.'
+ elif i.strip() == '':
+ return 'Output must not consist only of whitespace.'
+ elif has_path_sep(i):
+ return f'Output {i!r} must not contain a path segment.'
+ elif '@INPUT' in i:
+ return f'output {i!r} contains "@INPUT", which is invalid. Did you mean "@PLAINNAME@" or "@BASENAME@?'
+
+ return None
+
+MULTI_OUTPUT_KW: KwargInfo[T.List[str]] = KwargInfo(
+ 'output',
+ ContainerTypeInfo(list, str, allow_empty=False),
+ listify=True,
+ required=True,
+ default=[],
+ validator=_output_validator,
+)
+
+OUTPUT_KW: KwargInfo[str] = KwargInfo(
+ 'output',
+ str,
+ required=True,
+ validator=lambda x: _output_validator([x])
+)
+
+CT_INPUT_KW: KwargInfo[T.List[T.Union[str, File, ExternalProgram, BuildTarget, CustomTarget, CustomTargetIndex, ExtractedObjects, GeneratedList]]] = KwargInfo(
+ 'input',
+ ContainerTypeInfo(list, (str, File, ExternalProgram, BuildTarget, CustomTarget, CustomTargetIndex, ExtractedObjects, GeneratedList)),
+ listify=True,
+ default=[],
+)
+
+CT_INSTALL_TAG_KW: KwargInfo[T.List[T.Union[str, bool]]] = KwargInfo(
+ 'install_tag',
+ ContainerTypeInfo(list, (str, bool)),
+ listify=True,
+ default=[],
+ since='0.60.0',
+ convertor=lambda x: [y if isinstance(y, str) else None for y in x],
+)
+
+INSTALL_TAG_KW: KwargInfo[T.Optional[str]] = KwargInfo('install_tag', (str, NoneType))
+
+INSTALL_KW = KwargInfo('install', bool, default=False)
+
+CT_INSTALL_DIR_KW: KwargInfo[T.List[T.Union[str, Literal[False]]]] = KwargInfo(
+ 'install_dir',
+ ContainerTypeInfo(list, (str, bool)),
+ listify=True,
+ default=[],
+ validator=lambda x: 'must be `false` if boolean' if True in x else None,
+)
+
+CT_BUILD_BY_DEFAULT: KwargInfo[T.Optional[bool]] = KwargInfo('build_by_default', (bool, type(None)), since='0.40.0')
+
+CT_BUILD_ALWAYS: KwargInfo[T.Optional[bool]] = KwargInfo(
+ 'build_always', (bool, NoneType),
+ deprecated='0.47.0',
+ deprecated_message='combine build_by_default and build_always_stale instead.',
+)
+
+CT_BUILD_ALWAYS_STALE: KwargInfo[T.Optional[bool]] = KwargInfo(
+ 'build_always_stale', (bool, NoneType),
+ since='0.47.0',
+)
+
+INSTALL_DIR_KW: KwargInfo[T.Optional[str]] = KwargInfo('install_dir', (str, NoneType))
+
+INCLUDE_DIRECTORIES: KwargInfo[T.List[T.Union[str, IncludeDirs]]] = KwargInfo(
+ 'include_directories',
+ ContainerTypeInfo(list, (str, IncludeDirs)),
+ listify=True,
+ default=[],
+)
+
+def include_dir_string_new(val: T.List[T.Union[str, IncludeDirs]]) -> T.Iterable[FeatureCheckBase]:
+ strs = [v for v in val if isinstance(v, str)]
+ if strs:
+ str_msg = ", ".join(f"'{s}'" for s in strs)
+ yield FeatureNew('include_directories kwarg of type string', '1.0.0',
+ f'Use include_directories({str_msg}) instead')
+
+# for cases like default_options and override_options
+DEFAULT_OPTIONS: KwargInfo[T.List[str]] = KwargInfo(
+ 'default_options',
+ ContainerTypeInfo(list, str),
+ listify=True,
+ default=[],
+ validator=_options_validator,
+)
+
+ENV_METHOD_KW = KwargInfo('method', str, default='set', since='0.62.0',
+ validator=in_set_validator({'set', 'prepend', 'append'}))
+
+ENV_SEPARATOR_KW = KwargInfo('separator', str, default=os.pathsep)
+
+DEPENDENCIES_KW: KwargInfo[T.List[Dependency]] = KwargInfo(
+ 'dependencies',
+ # InternalDependency is a subclass of Dependency, but we want to
+ # print it in error messages
+ ContainerTypeInfo(list, (Dependency, InternalDependency)),
+ listify=True,
+ default=[],
+)
+
+D_MODULE_VERSIONS_KW: KwargInfo[T.List[T.Union[str, int]]] = KwargInfo(
+ 'd_module_versions',
+ ContainerTypeInfo(list, (str, int)),
+ listify=True,
+ default=[],
+)
+
+_link_with_error = '''can only be self-built targets, external dependencies (including libraries) must go in "dependencies".'''
+
+# Allow Dependency for the better error message? But then in other cases it will list this as one of the allowed types!
+LINK_WITH_KW: KwargInfo[T.List[T.Union[BothLibraries, SharedLibrary, StaticLibrary, CustomTarget, CustomTargetIndex, Jar, Executable]]] = KwargInfo(
+ 'link_with',
+ ContainerTypeInfo(list, (BothLibraries, SharedLibrary, StaticLibrary, CustomTarget, CustomTargetIndex, Jar, Executable, Dependency)),
+ listify=True,
+ default=[],
+ validator=lambda x: _link_with_error if any(isinstance(i, Dependency) for i in x) else None,
+)
+
+def link_whole_validator(values: T.List[T.Union[StaticLibrary, CustomTarget, CustomTargetIndex, Dependency]]) -> T.Optional[str]:
+ for l in values:
+ if isinstance(l, (CustomTarget, CustomTargetIndex)) and l.links_dynamically():
+ return f'{type(l).__name__} returning a shared library is not allowed'
+ if isinstance(l, Dependency):
+ return _link_with_error
+ return None
+
+LINK_WHOLE_KW: KwargInfo[T.List[T.Union[BothLibraries, StaticLibrary, CustomTarget, CustomTargetIndex]]] = KwargInfo(
+ 'link_whole',
+ ContainerTypeInfo(list, (BothLibraries, StaticLibrary, CustomTarget, CustomTargetIndex, Dependency)),
+ listify=True,
+ default=[],
+ validator=link_whole_validator,
+)
+
+SOURCES_KW: KwargInfo[T.List[T.Union[str, File, CustomTarget, CustomTargetIndex, GeneratedList]]] = KwargInfo(
+ 'sources',
+ ContainerTypeInfo(list, (str, File, CustomTarget, CustomTargetIndex, GeneratedList)),
+ listify=True,
+ default=[],
+)
+
+VARIABLES_KW: KwargInfo[T.Dict[str, str]] = KwargInfo(
+ 'variables',
+ # str is listified by validator/convertor, cannot use listify=True here because
+ # that would listify dict too.
+ (str, ContainerTypeInfo(list, str), ContainerTypeInfo(dict, str)), # type: ignore
+ validator=variables_validator,
+ convertor=variables_convertor,
+ default={},
+)
+
+PRESERVE_PATH_KW: KwargInfo[bool] = KwargInfo('preserve_path', bool, default=False, since='0.63.0')
+
+TEST_KWS: T.List[KwargInfo] = [
+ KwargInfo('args', ContainerTypeInfo(list, (str, File, BuildTarget, CustomTarget, CustomTargetIndex)),
+ listify=True, default=[]),
+ KwargInfo('should_fail', bool, default=False),
+ KwargInfo('timeout', int, default=30),
+ KwargInfo('workdir', (str, NoneType), default=None,
+ validator=lambda x: 'must be an absolute path' if not os.path.isabs(x) else None),
+ KwargInfo('protocol', str,
+ default='exitcode',
+ validator=in_set_validator({'exitcode', 'tap', 'gtest', 'rust'}),
+ since_values={'gtest': '0.55.0', 'rust': '0.57.0'}),
+ KwargInfo('priority', int, default=0, since='0.52.0'),
+ # TODO: env needs reworks of the way the environment variable holder itself works probably
+ ENV_KW,
+ DEPENDS_KW.evolve(since='0.46.0'),
+ KwargInfo('suite', ContainerTypeInfo(list, str), listify=True, default=['']), # yes, a list of empty string
+ KwargInfo('verbose', bool, default=False, since='0.62.0'),
+]