diff options
Diffstat (limited to 'mesonbuild/modules')
25 files changed, 8606 insertions, 0 deletions
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py new file mode 100644 index 0000000..b63a5da --- /dev/null +++ b/mesonbuild/modules/__init__.py @@ -0,0 +1,262 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the base representation for import('modname') + +from __future__ import annotations +import dataclasses +import typing as T + +from .. import mesonlib +from ..build import IncludeDirs +from ..interpreterbase.decorators import noKwargs, noPosargs +from ..mesonlib import relpath, HoldableObject, MachineChoice +from ..programs import ExternalProgram + +if T.TYPE_CHECKING: + from .. import build + from ..interpreter import Interpreter + from ..interpreter.interpreterobjects import MachineHolder + from ..interpreterbase import TYPE_var, TYPE_kwargs + from ..programs import OverrideProgram + from ..wrap import WrapMode + from ..build import EnvironmentVariables, Executable + from ..dependencies import Dependency + +class ModuleState: + """Object passed to all module methods. + + This is a WIP API provided to modules, it should be extended to have everything + needed so modules does not touch any other part of Meson internal APIs. + """ + + def __init__(self, interpreter: 'Interpreter') -> None: + # Keep it private, it should be accessed only through methods. + self._interpreter = interpreter + + self.source_root = interpreter.environment.get_source_dir() + self.build_to_src = relpath(interpreter.environment.get_source_dir(), + interpreter.environment.get_build_dir()) + self.subproject = interpreter.subproject + self.subdir = interpreter.subdir + self.root_subdir = interpreter.root_subdir + self.current_lineno = interpreter.current_lineno + self.environment = interpreter.environment + self.project_name = interpreter.build.project_name + self.project_version = interpreter.build.dep_manifest[interpreter.active_projectname].version + # The backend object is under-used right now, but we will need it: + # https://github.com/mesonbuild/meson/issues/1419 + self.backend = interpreter.backend + self.targets = interpreter.build.targets + self.data = interpreter.build.data + self.headers = interpreter.build.get_headers() + self.man = interpreter.build.get_man() + self.global_args = interpreter.build.global_args.host + self.project_args = interpreter.build.projects_args.host.get(interpreter.subproject, {}) + self.build_machine = T.cast('MachineHolder', interpreter.builtin['build_machine']).held_object + self.host_machine = T.cast('MachineHolder', interpreter.builtin['host_machine']).held_object + self.target_machine = T.cast('MachineHolder', interpreter.builtin['target_machine']).held_object + self.current_node = interpreter.current_node + + def get_include_args(self, include_dirs: T.Iterable[T.Union[str, build.IncludeDirs]], prefix: str = '-I') -> T.List[str]: + if not include_dirs: + return [] + + srcdir = self.environment.get_source_dir() + builddir = self.environment.get_build_dir() + + dirs_str: T.List[str] = [] + for dirs in include_dirs: + if isinstance(dirs, str): + dirs_str += [f'{prefix}{dirs}'] + else: + dirs_str.extend([f'{prefix}{i}' for i in dirs.to_string_list(srcdir, builddir)]) + dirs_str.extend([f'{prefix}{i}' for i in dirs.get_extra_build_dirs()]) + + return dirs_str + + def find_program(self, prog: T.Union[str, T.List[str]], required: bool = True, + version_func: T.Optional[T.Callable[['ExternalProgram'], str]] = None, + wanted: T.Optional[str] = None, silent: bool = False, + for_machine: MachineChoice = MachineChoice.HOST) -> 'ExternalProgram': + return self._interpreter.find_program_impl(prog, required=required, version_func=version_func, + wanted=wanted, silent=silent, for_machine=for_machine) + + def find_tool(self, name: str, depname: str, varname: str, required: bool = True, + wanted: T.Optional[str] = None) -> T.Union['Executable', ExternalProgram, 'OverrideProgram']: + # Look in overrides in case it's built as subproject + progobj = self._interpreter.program_from_overrides([name], []) + if progobj is not None: + return progobj + + # Look in machine file + prog_list = self.environment.lookup_binary_entry(MachineChoice.HOST, name) + if prog_list is not None: + return ExternalProgram.from_entry(name, prog_list) + + # Check if pkgconfig has a variable + dep = self.dependency(depname, native=True, required=False, wanted=wanted) + if dep.found() and dep.type_name == 'pkgconfig': + value = dep.get_variable(pkgconfig=varname) + if value: + return ExternalProgram(name, [value]) + + # Normal program lookup + return self.find_program(name, required=required, wanted=wanted) + + def dependency(self, depname: str, native: bool = False, required: bool = True, + wanted: T.Optional[str] = None) -> 'Dependency': + kwargs = {'native': native, 'required': required} + if wanted: + kwargs['version'] = wanted + # FIXME: Even if we fix the function, mypy still can't figure out what's + # going on here. And we really dont want to call interpreter + # implementations of meson functions anyway. + return self._interpreter.func_dependency(self.current_node, [depname], kwargs) # type: ignore + + def test(self, args: T.Tuple[str, T.Union[build.Executable, build.Jar, 'ExternalProgram', mesonlib.File]], + workdir: T.Optional[str] = None, + env: T.Union[T.List[str], T.Dict[str, str], str] = None, + depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]] = None) -> None: + kwargs = {'workdir': workdir, + 'env': env, + 'depends': depends, + } + # typed_* takes a list, and gives a tuple to func_test. Violating that constraint + # makes the universe (or at least use of this function) implode + real_args = list(args) + # TODO: Use interpreter internal API, but we need to go through @typed_kwargs + self._interpreter.func_test(self.current_node, real_args, kwargs) + + def get_option(self, name: str, subproject: str = '', + machine: MachineChoice = MachineChoice.HOST, + lang: T.Optional[str] = None, + module: T.Optional[str] = None) -> T.Union[str, int, bool, 'WrapMode']: + return self.environment.coredata.get_option(mesonlib.OptionKey(name, subproject, machine, lang, module)) + + def is_user_defined_option(self, name: str, subproject: str = '', + machine: MachineChoice = MachineChoice.HOST, + lang: T.Optional[str] = None, + module: T.Optional[str] = None) -> bool: + key = mesonlib.OptionKey(name, subproject, machine, lang, module) + return key in self._interpreter.user_defined_options.cmd_line_options + + def process_include_dirs(self, dirs: T.Iterable[T.Union[str, IncludeDirs]]) -> T.Iterable[IncludeDirs]: + """Convert raw include directory arguments to only IncludeDirs + + :param dirs: An iterable of strings and IncludeDirs + :return: None + :yield: IncludeDirs objects + """ + for d in dirs: + if isinstance(d, IncludeDirs): + yield d + else: + yield self._interpreter.build_incdir_object([d]) + + +class ModuleObject(HoldableObject): + """Base class for all objects returned by modules + """ + def __init__(self) -> None: + self.methods: T.Dict[ + str, + T.Callable[[ModuleState, T.List['TYPE_var'], 'TYPE_kwargs'], T.Union[ModuleReturnValue, 'TYPE_var']] + ] = {} + + +class MutableModuleObject(ModuleObject): + pass + + +@dataclasses.dataclass +class ModuleInfo: + + """Metadata about a Module.""" + + name: str + added: T.Optional[str] = None + deprecated: T.Optional[str] = None + unstable: bool = False + stabilized: T.Optional[str] = None + + +class NewExtensionModule(ModuleObject): + + """Class for modern modules + + provides the found method. + """ + + INFO: ModuleInfo + + def __init__(self) -> None: + super().__init__() + self.methods.update({ + 'found': self.found_method, + }) + + @noPosargs + @noKwargs + def found_method(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool: + return self.found() + + @staticmethod + def found() -> bool: + return True + + def get_devenv(self) -> T.Optional['EnvironmentVariables']: + return None + +# FIXME: Port all modules to stop using self.interpreter and use API on +# ModuleState instead. Modules should stop using this class and instead use +# ModuleObject base class. +class ExtensionModule(NewExtensionModule): + def __init__(self, interpreter: 'Interpreter') -> None: + super().__init__() + self.interpreter = interpreter + +class NotFoundExtensionModule(NewExtensionModule): + + """Class for modern modules + + provides the found method. + """ + + def __init__(self, name: str) -> None: + super().__init__() + self.INFO = ModuleInfo(name) + + @staticmethod + def found() -> bool: + return False + + +def is_module_library(fname): + ''' + Check if the file is a library-like file generated by a module-specific + target, such as GirTarget or TypelibTarget + ''' + if hasattr(fname, 'fname'): + fname = fname.fname + suffix = fname.split('.')[-1] + return suffix in {'gir', 'typelib'} + + +class ModuleReturnValue: + def __init__(self, return_value: T.Optional['TYPE_var'], + new_objects: T.Sequence[T.Union['TYPE_var', 'build.ExecutableSerialisation']]) -> None: + self.return_value = return_value + assert isinstance(new_objects, list) + self.new_objects: T.List[T.Union['TYPE_var', 'build.ExecutableSerialisation']] = new_objects diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py new file mode 100644 index 0000000..ee40b44 --- /dev/null +++ b/mesonbuild/modules/cmake.py @@ -0,0 +1,453 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +import re +import os, os.path, pathlib +import shutil +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleObject, ModuleInfo + +from .. import build, mesonlib, mlog, dependencies +from ..cmake import TargetOptions, cmake_defines_to_args +from ..interpreter import SubprojectHolder +from ..interpreter.type_checking import REQUIRED_KW, INSTALL_DIR_KW, NoneType, in_set_validator +from ..interpreterbase import ( + FeatureNew, + FeatureNewKwargs, + + stringArgs, + permittedKwargs, + noPosargs, + noKwargs, + + InvalidArguments, + InterpreterException, + + typed_pos_args, + typed_kwargs, + KwargInfo, + ContainerTypeInfo, +) + +if T.TYPE_CHECKING: + from typing_extensions import TypedDict + + from . import ModuleState + from ..cmake import SingleTargetOptions + from ..interpreter import kwargs + + class WriteBasicPackageVersionFile(TypedDict): + + arch_independent: bool + compatibility: str + install_dir: T.Optional[str] + name: str + version: str + + class ConfigurePackageConfigFile(TypedDict): + + configuration: T.Union[build.ConfigurationData, dict] + input: T.Union[str, mesonlib.File] + install_dir: T.Optional[str] + name: str + + class Subproject(kwargs.ExtractRequired): + + options: T.Optional[CMakeSubprojectOptions] + cmake_options: T.List[str] + + +COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion'] + +# Taken from https://github.com/Kitware/CMake/blob/master/Modules/CMakePackageConfigHelpers.cmake +PACKAGE_INIT_BASE = ''' +####### Expanded from \\@PACKAGE_INIT\\@ by configure_package_config_file() ####### +####### Any changes to this file will be overwritten by the next CMake run #### +####### The input file was @inputFileName@ ######## + +get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/@PACKAGE_RELATIVE_PATH@" ABSOLUTE) +''' +PACKAGE_INIT_EXT = ''' +# Use original install prefix when loaded through a "/usr move" +# cross-prefix symbolic link such as /lib -> /usr/lib. +get_filename_component(_realCurr "${CMAKE_CURRENT_LIST_DIR}" REALPATH) +get_filename_component(_realOrig "@absInstallDir@" REALPATH) +if(_realCurr STREQUAL _realOrig) + set(PACKAGE_PREFIX_DIR "@installPrefix@") +endif() +unset(_realOrig) +unset(_realCurr) +''' +PACKAGE_INIT_SET_AND_CHECK = ''' +macro(set_and_check _var _file) + set(${_var} "${_file}") + if(NOT EXISTS "${_file}") + message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !") + endif() +endmacro() + +#################################################################################### +''' + +class CMakeSubproject(ModuleObject): + def __init__(self, subp: SubprojectHolder): + assert isinstance(subp, SubprojectHolder) + assert subp.cm_interpreter is not None + super().__init__() + self.subp = subp + self.cm_interpreter = subp.cm_interpreter + self.methods.update({'get_variable': self.get_variable, + 'dependency': self.dependency, + 'include_directories': self.include_directories, + 'target': self.target, + 'target_type': self.target_type, + 'target_list': self.target_list, + 'found': self.found_method, + }) + + def _args_to_info(self, args): + if len(args) != 1: + raise InterpreterException('Exactly one argument is required.') + + tgt = args[0] + res = self.cm_interpreter.target_info(tgt) + if res is None: + raise InterpreterException(f'The CMake target {tgt} does not exist\n' + + ' Use the following command in your meson.build to list all available targets:\n\n' + + ' message(\'CMake targets:\\n - \' + \'\\n - \'.join(<cmake_subproject>.target_list()))') + + # Make sure that all keys are present (if not this is a bug) + assert all(x in res for x in ['inc', 'src', 'dep', 'tgt', 'func']) + return res + + @noKwargs + @stringArgs + def get_variable(self, state, args, kwargs): + return self.subp.get_variable_method(args, kwargs) + + @FeatureNewKwargs('dependency', '0.56.0', ['include_type']) + @permittedKwargs({'include_type'}) + @stringArgs + def dependency(self, state, args, kwargs): + info = self._args_to_info(args) + if info['func'] == 'executable': + raise InvalidArguments(f'{args[0]} is an executable and does not support the dependency() method. Use target() instead.') + orig = self.get_variable(state, [info['dep']], {}) + assert isinstance(orig, dependencies.Dependency) + actual = orig.include_type + if 'include_type' in kwargs and kwargs['include_type'] != actual: + mlog.debug('Current include type is {}. Converting to requested {}'.format(actual, kwargs['include_type'])) + return orig.generate_system_dependency(kwargs['include_type']) + return orig + + @noKwargs + @stringArgs + def include_directories(self, state, args, kwargs): + info = self._args_to_info(args) + return self.get_variable(state, [info['inc']], kwargs) + + @noKwargs + @stringArgs + def target(self, state, args, kwargs): + info = self._args_to_info(args) + return self.get_variable(state, [info['tgt']], kwargs) + + @noKwargs + @stringArgs + def target_type(self, state, args, kwargs): + info = self._args_to_info(args) + return info['func'] + + @noPosargs + @noKwargs + def target_list(self, state, args, kwargs): + return self.cm_interpreter.target_list() + + @noPosargs + @noKwargs + @FeatureNew('CMakeSubproject.found()', '0.53.2') + def found_method(self, state, args, kwargs): + return self.subp is not None + + +class CMakeSubprojectOptions(ModuleObject): + def __init__(self) -> None: + super().__init__() + self.cmake_options = [] # type: T.List[str] + self.target_options = TargetOptions() + + self.methods.update( + { + 'add_cmake_defines': self.add_cmake_defines, + 'set_override_option': self.set_override_option, + 'set_install': self.set_install, + 'append_compile_args': self.append_compile_args, + 'append_link_args': self.append_link_args, + 'clear': self.clear, + } + ) + + def _get_opts(self, kwargs: dict) -> SingleTargetOptions: + if 'target' in kwargs: + return self.target_options[kwargs['target']] + return self.target_options.global_options + + @noKwargs + def add_cmake_defines(self, state, args, kwargs) -> None: + self.cmake_options += cmake_defines_to_args(args) + + @stringArgs + @permittedKwargs({'target'}) + def set_override_option(self, state, args, kwargs) -> None: + if len(args) != 2: + raise InvalidArguments('set_override_option takes exactly 2 positional arguments') + self._get_opts(kwargs).set_opt(args[0], args[1]) + + @permittedKwargs({'target'}) + def set_install(self, state, args, kwargs) -> None: + if len(args) != 1 or not isinstance(args[0], bool): + raise InvalidArguments('set_install takes exactly 1 boolean argument') + self._get_opts(kwargs).set_install(args[0]) + + @stringArgs + @permittedKwargs({'target'}) + def append_compile_args(self, state, args, kwargs) -> None: + if len(args) < 2: + raise InvalidArguments('append_compile_args takes at least 2 positional arguments') + self._get_opts(kwargs).append_args(args[0], args[1:]) + + @stringArgs + @permittedKwargs({'target'}) + def append_link_args(self, state, args, kwargs) -> None: + if not args: + raise InvalidArguments('append_link_args takes at least 1 positional argument') + self._get_opts(kwargs).append_link_args(args) + + @noPosargs + @noKwargs + def clear(self, state, args, kwargs) -> None: + self.cmake_options.clear() + self.target_options = TargetOptions() + + +class CmakeModule(ExtensionModule): + cmake_detected = False + cmake_root = None + + INFO = ModuleInfo('cmake', '0.50.0') + + def __init__(self, interpreter): + super().__init__(interpreter) + self.methods.update({ + 'write_basic_package_version_file': self.write_basic_package_version_file, + 'configure_package_config_file': self.configure_package_config_file, + 'subproject': self.subproject, + 'subproject_options': self.subproject_options, + }) + + def detect_voidp_size(self, env): + compilers = env.coredata.compilers.host + compiler = compilers.get('c', None) + if not compiler: + compiler = compilers.get('cpp', None) + + if not compiler: + raise mesonlib.MesonException('Requires a C or C++ compiler to compute sizeof(void *).') + + return compiler.sizeof('void *', '', env) + + def detect_cmake(self, state): + if self.cmake_detected: + return True + + cmakebin = state.find_program('cmake', silent=False) + if not cmakebin.found(): + return False + + p, stdout, stderr = mesonlib.Popen_safe(cmakebin.get_command() + ['--system-information', '-G', 'Ninja'])[0:3] + if p.returncode != 0: + mlog.log(f'error retrieving cmake information: returnCode={p.returncode} stdout={stdout} stderr={stderr}') + return False + + match = re.search('\nCMAKE_ROOT \\"([^"]+)"\n', stdout.strip()) + if not match: + mlog.log('unable to determine cmake root') + return False + + cmakePath = pathlib.PurePath(match.group(1)) + self.cmake_root = os.path.join(*cmakePath.parts) + self.cmake_detected = True + return True + + @noPosargs + @typed_kwargs( + 'cmake.write_basic_package_version_file', + KwargInfo('arch_independent', bool, default=False, since='0.62.0'), + KwargInfo('compatibility', str, default='AnyNewerVersion', validator=in_set_validator(set(COMPATIBILITIES))), + KwargInfo('name', str, required=True), + KwargInfo('version', str, required=True), + INSTALL_DIR_KW, + ) + def write_basic_package_version_file(self, state, args, kwargs: 'WriteBasicPackageVersionFile'): + arch_independent = kwargs['arch_independent'] + compatibility = kwargs['compatibility'] + name = kwargs['name'] + version = kwargs['version'] + + if not self.detect_cmake(state): + raise mesonlib.MesonException('Unable to find cmake') + + pkgroot = pkgroot_name = kwargs['install_dir'] + if pkgroot is None: + pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name) + pkgroot_name = os.path.join('{libdir}', 'cmake', name) + + template_file = os.path.join(self.cmake_root, 'Modules', f'BasicConfigVersion-{compatibility}.cmake.in') + if not os.path.exists(template_file): + raise mesonlib.MesonException(f'your cmake installation doesn\'t support the {compatibility} compatibility') + + version_file = os.path.join(state.environment.scratch_dir, f'{name}ConfigVersion.cmake') + + conf = { + 'CVF_VERSION': (version, ''), + 'CMAKE_SIZEOF_VOID_P': (str(self.detect_voidp_size(state.environment)), ''), + 'CVF_ARCH_INDEPENDENT': (arch_independent, ''), + } + mesonlib.do_conf_file(template_file, version_file, conf, 'meson') + + res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), version_file)], pkgroot, pkgroot_name, None, state.subproject) + return ModuleReturnValue(res, [res]) + + def create_package_file(self, infile, outfile, PACKAGE_RELATIVE_PATH, extra, confdata): + package_init = PACKAGE_INIT_BASE.replace('@PACKAGE_RELATIVE_PATH@', PACKAGE_RELATIVE_PATH) + package_init = package_init.replace('@inputFileName@', os.path.basename(infile)) + package_init += extra + package_init += PACKAGE_INIT_SET_AND_CHECK + + try: + with open(infile, encoding='utf-8') as fin: + data = fin.readlines() + except Exception as e: + raise mesonlib.MesonException(f'Could not read input file {infile}: {e!s}') + + result = [] + regex = mesonlib.get_variable_regex('cmake@') + for line in data: + line = line.replace('@PACKAGE_INIT@', package_init) + line, _missing = mesonlib.do_replacement(regex, line, 'cmake@', confdata) + + result.append(line) + + outfile_tmp = outfile + "~" + with open(outfile_tmp, "w", encoding='utf-8') as fout: + fout.writelines(result) + + shutil.copymode(infile, outfile_tmp) + mesonlib.replace_if_different(outfile, outfile_tmp) + + @noPosargs + @typed_kwargs( + 'cmake.configure_package_config_file', + KwargInfo('configuration', (build.ConfigurationData, dict), required=True), + KwargInfo('input', + (str, mesonlib.File, ContainerTypeInfo(list, mesonlib.File)), required=True, + validator=lambda x: 'requires exactly one file' if isinstance(x, list) and len(x) != 1 else None, + convertor=lambda x: x[0] if isinstance(x, list) else x), + KwargInfo('name', str, required=True), + INSTALL_DIR_KW, + ) + def configure_package_config_file(self, state, args, kwargs: 'ConfigurePackageConfigFile'): + inputfile = kwargs['input'] + if isinstance(inputfile, str): + inputfile = mesonlib.File.from_source_file(state.environment.source_dir, state.subdir, inputfile) + + ifile_abs = inputfile.absolute_path(state.environment.source_dir, state.environment.build_dir) + + name = kwargs['name'] + + (ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, f'{name}Config.cmake')) + ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname) + + install_dir = kwargs['install_dir'] + if install_dir is None: + install_dir = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name) + + conf = kwargs['configuration'] + if isinstance(conf, dict): + FeatureNew.single_use('cmake.configure_package_config_file dict as configuration', '0.62.0', state.subproject, location=state.current_node) + conf = build.ConfigurationData(conf) + + prefix = state.environment.coredata.get_option(mesonlib.OptionKey('prefix')) + abs_install_dir = install_dir + if not os.path.isabs(abs_install_dir): + abs_install_dir = os.path.join(prefix, install_dir) + + PACKAGE_RELATIVE_PATH = os.path.relpath(prefix, abs_install_dir) + extra = '' + if re.match('^(/usr)?/lib(64)?/.+', abs_install_dir): + extra = PACKAGE_INIT_EXT.replace('@absInstallDir@', abs_install_dir) + extra = extra.replace('@installPrefix@', prefix) + + self.create_package_file(ifile_abs, ofile_abs, PACKAGE_RELATIVE_PATH, extra, conf) + conf.used = True + + conffile = os.path.normpath(inputfile.relative_name()) + self.interpreter.build_def_files.add(conffile) + + res = build.Data([mesonlib.File(True, ofile_path, ofile_fname)], install_dir, install_dir, None, state.subproject) + self.interpreter.build.data.append(res) + + return res + + @FeatureNew('subproject', '0.51.0') + @typed_pos_args('cmake.subproject', str) + @typed_kwargs( + 'cmake.subproject', + REQUIRED_KW, + KwargInfo('options', (CMakeSubprojectOptions, NoneType), since='0.55.0'), + KwargInfo( + 'cmake_options', + ContainerTypeInfo(list, str), + default=[], + listify=True, + deprecated='0.55.0', + deprecated_message='Use options instead', + ), + ) + def subproject(self, state: ModuleState, args: T.Tuple[str], kwargs_: Subproject) -> T.Union[SubprojectHolder, CMakeSubproject]: + if kwargs_['cmake_options'] and kwargs_['options'] is not None: + raise InterpreterException('"options" cannot be used together with "cmake_options"') + dirname = args[0] + kw: kwargs.DoSubproject = { + 'required': kwargs_['required'], + 'options': kwargs_['options'], + 'cmake_options': kwargs_['cmake_options'], + 'default_options': [], + 'version': [], + } + subp = self.interpreter.do_subproject(dirname, 'cmake', kw) + if not subp.found(): + return subp + return CMakeSubproject(subp) + + @FeatureNew('subproject_options', '0.55.0') + @noKwargs + @noPosargs + def subproject_options(self, state, args, kwargs) -> CMakeSubprojectOptions: + return CMakeSubprojectOptions() + +def initialize(*args, **kwargs): + return CmakeModule(*args, **kwargs) diff --git a/mesonbuild/modules/cuda.py b/mesonbuild/modules/cuda.py new file mode 100644 index 0000000..72ca306 --- /dev/null +++ b/mesonbuild/modules/cuda.py @@ -0,0 +1,383 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import typing as T +import re + +from ..mesonlib import version_compare +from ..compilers.cuda import CudaCompiler + +from . import NewExtensionModule, ModuleInfo + +from ..interpreterbase import ( + flatten, permittedKwargs, noKwargs, + InvalidArguments +) + +if T.TYPE_CHECKING: + from . import ModuleState + from ..compilers import Compiler + +class CudaModule(NewExtensionModule): + + INFO = ModuleInfo('CUDA', '0.50.0', unstable=True) + + def __init__(self, *args, **kwargs): + super().__init__() + self.methods.update({ + "min_driver_version": self.min_driver_version, + "nvcc_arch_flags": self.nvcc_arch_flags, + "nvcc_arch_readable": self.nvcc_arch_readable, + }) + + @noKwargs + def min_driver_version(self, state: 'ModuleState', + args: T.Tuple[str], + kwargs: T.Dict[str, T.Any]) -> str: + argerror = InvalidArguments('min_driver_version must have exactly one positional argument: ' + + 'a CUDA Toolkit version string. Beware that, since CUDA 11.0, ' + + 'the CUDA Toolkit\'s components (including NVCC) are versioned ' + + 'independently from each other (and the CUDA Toolkit as a whole).') + + if len(args) != 1 or not isinstance(args[0], str): + raise argerror + + cuda_version = args[0] + driver_version_table = [ + {'cuda_version': '>=12.0.0', 'windows': '527.41', 'linux': '525.60.13'}, + {'cuda_version': '>=11.8.0', 'windows': '522.06', 'linux': '520.61.05'}, + {'cuda_version': '>=11.7.1', 'windows': '516.31', 'linux': '515.48.07'}, + {'cuda_version': '>=11.7.0', 'windows': '516.01', 'linux': '515.43.04'}, + {'cuda_version': '>=11.6.1', 'windows': '511.65', 'linux': '510.47.03'}, + {'cuda_version': '>=11.6.0', 'windows': '511.23', 'linux': '510.39.01'}, + {'cuda_version': '>=11.5.1', 'windows': '496.13', 'linux': '495.29.05'}, + {'cuda_version': '>=11.5.0', 'windows': '496.04', 'linux': '495.29.05'}, + {'cuda_version': '>=11.4.3', 'windows': '472.50', 'linux': '470.82.01'}, + {'cuda_version': '>=11.4.1', 'windows': '471.41', 'linux': '470.57.02'}, + {'cuda_version': '>=11.4.0', 'windows': '471.11', 'linux': '470.42.01'}, + {'cuda_version': '>=11.3.0', 'windows': '465.89', 'linux': '465.19.01'}, + {'cuda_version': '>=11.2.2', 'windows': '461.33', 'linux': '460.32.03'}, + {'cuda_version': '>=11.2.1', 'windows': '461.09', 'linux': '460.32.03'}, + {'cuda_version': '>=11.2.0', 'windows': '460.82', 'linux': '460.27.03'}, + {'cuda_version': '>=11.1.1', 'windows': '456.81', 'linux': '455.32'}, + {'cuda_version': '>=11.1.0', 'windows': '456.38', 'linux': '455.23'}, + {'cuda_version': '>=11.0.3', 'windows': '451.82', 'linux': '450.51.06'}, + {'cuda_version': '>=11.0.2', 'windows': '451.48', 'linux': '450.51.05'}, + {'cuda_version': '>=11.0.1', 'windows': '451.22', 'linux': '450.36.06'}, + {'cuda_version': '>=10.2.89', 'windows': '441.22', 'linux': '440.33'}, + {'cuda_version': '>=10.1.105', 'windows': '418.96', 'linux': '418.39'}, + {'cuda_version': '>=10.0.130', 'windows': '411.31', 'linux': '410.48'}, + {'cuda_version': '>=9.2.148', 'windows': '398.26', 'linux': '396.37'}, + {'cuda_version': '>=9.2.88', 'windows': '397.44', 'linux': '396.26'}, + {'cuda_version': '>=9.1.85', 'windows': '391.29', 'linux': '390.46'}, + {'cuda_version': '>=9.0.76', 'windows': '385.54', 'linux': '384.81'}, + {'cuda_version': '>=8.0.61', 'windows': '376.51', 'linux': '375.26'}, + {'cuda_version': '>=8.0.44', 'windows': '369.30', 'linux': '367.48'}, + {'cuda_version': '>=7.5.16', 'windows': '353.66', 'linux': '352.31'}, + {'cuda_version': '>=7.0.28', 'windows': '347.62', 'linux': '346.46'}, + ] + + driver_version = 'unknown' + for d in driver_version_table: + if version_compare(cuda_version, d['cuda_version']): + driver_version = d.get(state.host_machine.system, d['linux']) + break + + return driver_version + + @permittedKwargs(['detected']) + def nvcc_arch_flags(self, state: 'ModuleState', + args: T.Tuple[T.Union[Compiler, CudaCompiler, str]], + kwargs: T.Dict[str, T.Any]) -> T.List[str]: + nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs) + ret = self._nvcc_arch_flags(*nvcc_arch_args)[0] + return ret + + @permittedKwargs(['detected']) + def nvcc_arch_readable(self, state: 'ModuleState', + args: T.Tuple[T.Union[Compiler, CudaCompiler, str]], + kwargs: T.Dict[str, T.Any]) -> T.List[str]: + nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs) + ret = self._nvcc_arch_flags(*nvcc_arch_args)[1] + return ret + + @staticmethod + def _break_arch_string(s): + s = re.sub('[ \t\r\n,;]+', ';', s) + s = s.strip(';').split(';') + return s + + @staticmethod + def _detected_cc_from_compiler(c): + if isinstance(c, CudaCompiler): + return c.detected_cc + return '' + + @staticmethod + def _version_from_compiler(c): + if isinstance(c, CudaCompiler): + return c.version + if isinstance(c, str): + return c + return 'unknown' + + def _validate_nvcc_arch_args(self, args, kwargs): + argerror = InvalidArguments('The first argument must be an NVCC compiler object, or its version string!') + + if len(args) < 1: + raise argerror + else: + compiler = args[0] + cuda_version = self._version_from_compiler(compiler) + if cuda_version == 'unknown': + raise argerror + + arch_list = [] if len(args) <= 1 else flatten(args[1:]) + arch_list = [self._break_arch_string(a) for a in arch_list] + arch_list = flatten(arch_list) + if len(arch_list) > 1 and not set(arch_list).isdisjoint({'All', 'Common', 'Auto'}): + raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''') + arch_list = arch_list[0] if len(arch_list) == 1 else arch_list + + detected = kwargs.get('detected', self._detected_cc_from_compiler(compiler)) + detected = flatten([detected]) + detected = [self._break_arch_string(a) for a in detected] + detected = flatten(detected) + if not set(detected).isdisjoint({'All', 'Common', 'Auto'}): + raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''') + + return cuda_version, arch_list, detected + + def _filter_cuda_arch_list(self, cuda_arch_list, lo=None, hi=None, saturate=None): + """ + Filter CUDA arch list (no codenames) for >= low and < hi architecture + bounds, and deduplicate. + If saturate is provided, architectures >= hi are replaced with saturate. + """ + + filtered_cuda_arch_list = [] + for arch in cuda_arch_list: + if arch: + if lo and version_compare(arch, '<' + lo): + continue + if hi and version_compare(arch, '>=' + hi): + if not saturate: + continue + arch = saturate + if arch not in filtered_cuda_arch_list: + filtered_cuda_arch_list.append(arch) + return filtered_cuda_arch_list + + def _nvcc_arch_flags(self, cuda_version, cuda_arch_list='Auto', detected=''): + """ + Using the CUDA Toolkit version and the target architectures, compute + the NVCC architecture flags. + """ + + # Replicates much of the logic of + # https://github.com/Kitware/CMake/blob/master/Modules/FindCUDA/select_compute_arch.cmake + # except that a bug with cuda_arch_list="All" is worked around by + # tracking both lower and upper limits on GPU architectures. + + cuda_known_gpu_architectures = ['Fermi', 'Kepler', 'Maxwell'] # noqa: E221 + cuda_common_gpu_architectures = ['3.0', '3.5', '5.0'] # noqa: E221 + cuda_hi_limit_gpu_architecture = None # noqa: E221 + cuda_lo_limit_gpu_architecture = '2.0' # noqa: E221 + cuda_all_gpu_architectures = ['3.0', '3.2', '3.5', '5.0'] # noqa: E221 + + if version_compare(cuda_version, '<7.0'): + cuda_hi_limit_gpu_architecture = '5.2' + + if version_compare(cuda_version, '>=7.0'): + cuda_known_gpu_architectures += ['Kepler+Tegra', 'Kepler+Tesla', 'Maxwell+Tegra'] # noqa: E221 + cuda_common_gpu_architectures += ['5.2'] # noqa: E221 + + if version_compare(cuda_version, '<8.0'): + cuda_common_gpu_architectures += ['5.2+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '6.0' # noqa: E221 + + if version_compare(cuda_version, '>=8.0'): + cuda_known_gpu_architectures += ['Pascal', 'Pascal+Tegra'] # noqa: E221 + cuda_common_gpu_architectures += ['6.0', '6.1'] # noqa: E221 + cuda_all_gpu_architectures += ['6.0', '6.1', '6.2'] # noqa: E221 + + if version_compare(cuda_version, '<9.0'): + cuda_common_gpu_architectures += ['6.1+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '7.0' # noqa: E221 + + if version_compare(cuda_version, '>=9.0'): + cuda_known_gpu_architectures += ['Volta', 'Xavier'] # noqa: E221 + cuda_common_gpu_architectures += ['7.0'] # noqa: E221 + cuda_all_gpu_architectures += ['7.0', '7.2'] # noqa: E221 + # https://docs.nvidia.com/cuda/archive/9.0/cuda-toolkit-release-notes/index.html#unsupported-features + cuda_lo_limit_gpu_architecture = '3.0' # noqa: E221 + + if version_compare(cuda_version, '<10.0'): + cuda_common_gpu_architectures += ['7.2+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '8.0' # noqa: E221 + + if version_compare(cuda_version, '>=10.0'): + cuda_known_gpu_architectures += ['Turing'] # noqa: E221 + cuda_common_gpu_architectures += ['7.5'] # noqa: E221 + cuda_all_gpu_architectures += ['7.5'] # noqa: E221 + + if version_compare(cuda_version, '<11.0'): + cuda_common_gpu_architectures += ['7.5+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '8.0' # noqa: E221 + + if version_compare(cuda_version, '>=11.0'): + cuda_known_gpu_architectures += ['Ampere'] # noqa: E221 + cuda_common_gpu_architectures += ['8.0'] # noqa: E221 + cuda_all_gpu_architectures += ['8.0'] # noqa: E221 + # https://docs.nvidia.com/cuda/archive/11.0/cuda-toolkit-release-notes/index.html#deprecated-features + cuda_lo_limit_gpu_architecture = '3.5' # noqa: E221 + + if version_compare(cuda_version, '<11.1'): + cuda_common_gpu_architectures += ['8.0+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '8.6' # noqa: E221 + + if version_compare(cuda_version, '>=11.1'): + cuda_common_gpu_architectures += ['8.6'] # noqa: E221 + cuda_all_gpu_architectures += ['8.6'] # noqa: E221 + + if version_compare(cuda_version, '<11.8'): + cuda_common_gpu_architectures += ['8.6+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '8.7' # noqa: E221 + + if version_compare(cuda_version, '>=11.8'): + cuda_known_gpu_architectures += ['Orin', 'Lovelace', 'Hopper'] # noqa: E221 + cuda_common_gpu_architectures += ['8.9', '9.0', '9.0+PTX'] # noqa: E221 + cuda_all_gpu_architectures += ['8.7', '8.9', '9.0'] # noqa: E221 + + if version_compare(cuda_version, '<12'): + cuda_hi_limit_gpu_architecture = '9.1' # noqa: E221 + + if version_compare(cuda_version, '>=12.0'): + # https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features (Current) + # https://docs.nvidia.com/cuda/archive/12.0/cuda-toolkit-release-notes/index.html#deprecated-features (Eventual?) + cuda_lo_limit_gpu_architecture = '5.0' # noqa: E221 + + if version_compare(cuda_version, '<13'): + cuda_hi_limit_gpu_architecture = '10.0' # noqa: E221 + + if not cuda_arch_list: + cuda_arch_list = 'Auto' + + if cuda_arch_list == 'All': # noqa: E271 + cuda_arch_list = cuda_known_gpu_architectures + elif cuda_arch_list == 'Common': # noqa: E271 + cuda_arch_list = cuda_common_gpu_architectures + elif cuda_arch_list == 'Auto': # noqa: E271 + if detected: + if isinstance(detected, list): + cuda_arch_list = detected + else: + cuda_arch_list = self._break_arch_string(detected) + cuda_arch_list = self._filter_cuda_arch_list(cuda_arch_list, + cuda_lo_limit_gpu_architecture, + cuda_hi_limit_gpu_architecture, + cuda_common_gpu_architectures[-1]) + else: + cuda_arch_list = cuda_common_gpu_architectures + elif isinstance(cuda_arch_list, str): + cuda_arch_list = self._break_arch_string(cuda_arch_list) + + cuda_arch_list = sorted(x for x in set(cuda_arch_list) if x) + + cuda_arch_bin = [] + cuda_arch_ptx = [] + for arch_name in cuda_arch_list: + arch_bin = [] + arch_ptx = [] + add_ptx = arch_name.endswith('+PTX') + if add_ptx: + arch_name = arch_name[:-len('+PTX')] + + if re.fullmatch('[0-9]+\\.[0-9](\\([0-9]+\\.[0-9]\\))?', arch_name): + arch_bin, arch_ptx = [arch_name], [arch_name] + else: + arch_bin, arch_ptx = { + 'Fermi': (['2.0', '2.1(2.0)'], []), + 'Kepler+Tegra': (['3.2'], []), + 'Kepler+Tesla': (['3.7'], []), + 'Kepler': (['3.0', '3.5'], ['3.5']), + 'Maxwell+Tegra': (['5.3'], []), + 'Maxwell': (['5.0', '5.2'], ['5.2']), + 'Pascal': (['6.0', '6.1'], ['6.1']), + 'Pascal+Tegra': (['6.2'], []), + 'Volta': (['7.0'], ['7.0']), + 'Xavier': (['7.2'], []), + 'Turing': (['7.5'], ['7.5']), + 'Ampere': (['8.0'], ['8.0']), + 'Orin': (['8.7'], []), + 'Lovelace': (['8.9'], ['8.9']), + 'Hopper': (['9.0'], ['9.0']), + }.get(arch_name, (None, None)) + + if arch_bin is None: + raise InvalidArguments(f'Unknown CUDA Architecture Name {arch_name}!') + + cuda_arch_bin += arch_bin + + if add_ptx: + if not arch_ptx: + arch_ptx = arch_bin + cuda_arch_ptx += arch_ptx + + cuda_arch_bin = sorted(set(cuda_arch_bin)) + cuda_arch_ptx = sorted(set(cuda_arch_ptx)) + + nvcc_flags = [] + nvcc_archs_readable = [] + + for arch in cuda_arch_bin: + arch, codev = re.fullmatch( + '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups() + + if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture): + continue + if cuda_hi_limit_gpu_architecture and version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture): + continue + + if codev: + arch = arch.replace('.', '') + codev = codev.replace('.', '') + nvcc_flags += ['-gencode', 'arch=compute_' + codev + ',code=sm_' + arch] + nvcc_archs_readable += ['sm_' + arch] + else: + arch = arch.replace('.', '') + nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=sm_' + arch] + nvcc_archs_readable += ['sm_' + arch] + + for arch in cuda_arch_ptx: + arch, codev = re.fullmatch( + '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups() + + if codev: + arch = codev + + if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture): + continue + if cuda_hi_limit_gpu_architecture and version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture): + continue + + arch = arch.replace('.', '') + nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=compute_' + arch] + nvcc_archs_readable += ['compute_' + arch] + + return nvcc_flags, nvcc_archs_readable + +def initialize(*args, **kwargs): + return CudaModule(*args, **kwargs) diff --git a/mesonbuild/modules/dlang.py b/mesonbuild/modules/dlang.py new file mode 100644 index 0000000..b9d4299 --- /dev/null +++ b/mesonbuild/modules/dlang.py @@ -0,0 +1,136 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the detection logic for external dependencies that +# are UI-related. +from __future__ import annotations + +import json +import os + +from . import ExtensionModule, ModuleInfo +from .. import dependencies +from .. import mlog +from ..interpreterbase import typed_pos_args +from ..mesonlib import Popen_safe, MesonException + +class DlangModule(ExtensionModule): + class_dubbin = None + init_dub = False + + INFO = ModuleInfo('dlang', '0.48.0') + + def __init__(self, interpreter): + super().__init__(interpreter) + self.methods.update({ + 'generate_dub_file': self.generate_dub_file, + }) + + def _init_dub(self, state): + if DlangModule.class_dubbin is None: + self.dubbin = dependencies.DubDependency.class_dubbin + DlangModule.class_dubbin = self.dubbin + else: + self.dubbin = DlangModule.class_dubbin + + if DlangModule.class_dubbin is None: + self.dubbin = self.check_dub(state) + DlangModule.class_dubbin = self.dubbin + else: + self.dubbin = DlangModule.class_dubbin + + if not self.dubbin: + if not self.dubbin: + raise MesonException('DUB not found.') + + @typed_pos_args('dlang.generate_dub_file', str, str) + def generate_dub_file(self, state, args, kwargs): + if not DlangModule.init_dub: + self._init_dub(state) + + config = { + 'name': args[0] + } + + config_path = os.path.join(args[1], 'dub.json') + if os.path.exists(config_path): + with open(config_path, encoding='utf-8') as ofile: + try: + config = json.load(ofile) + except ValueError: + mlog.warning('Failed to load the data in dub.json') + + warn_publishing = ['description', 'license'] + for arg in warn_publishing: + if arg not in kwargs and \ + arg not in config: + mlog.warning('Without', mlog.bold(arg), 'the DUB package can\'t be published') + + for key, value in kwargs.items(): + if key == 'dependencies': + config[key] = {} + if isinstance(value, list): + for dep in value: + if isinstance(dep, dependencies.Dependency): + name = dep.get_name() + ret, res = self._call_dubbin(['describe', name]) + if ret == 0: + version = dep.get_version() + if version is None: + config[key][name] = '' + else: + config[key][name] = version + elif isinstance(value, dependencies.Dependency): + name = value.get_name() + ret, res = self._call_dubbin(['describe', name]) + if ret == 0: + version = value.get_version() + if version is None: + config[key][name] = '' + else: + config[key][name] = version + else: + config[key] = value + + with open(config_path, 'w', encoding='utf-8') as ofile: + ofile.write(json.dumps(config, indent=4, ensure_ascii=False)) + + def _call_dubbin(self, args, env=None): + p, out = Popen_safe(self.dubbin.get_command() + args, env=env)[0:2] + return p.returncode, out.strip() + + def check_dub(self, state): + dubbin = state.find_program('dub', silent=True) + if dubbin.found(): + try: + p, out = Popen_safe(dubbin.get_command() + ['--version'])[0:2] + if p.returncode != 0: + mlog.warning('Found dub {!r} but couldn\'t run it' + ''.format(' '.join(dubbin.get_command()))) + # Set to False instead of None to signify that we've already + # searched for it and not found it + dubbin = False + except (FileNotFoundError, PermissionError): + dubbin = False + else: + dubbin = False + if dubbin: + mlog.log('Found DUB:', mlog.bold(dubbin.get_path()), + '(%s)' % out.strip()) + else: + mlog.log('Found DUB:', mlog.red('NO')) + return dubbin + +def initialize(*args, **kwargs): + return DlangModule(*args, **kwargs) diff --git a/mesonbuild/modules/external_project.py b/mesonbuild/modules/external_project.py new file mode 100644 index 0000000..c3b01c8 --- /dev/null +++ b/mesonbuild/modules/external_project.py @@ -0,0 +1,307 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path +import os +import shlex +import subprocess +import typing as T + +from . import ExtensionModule, ModuleReturnValue, NewExtensionModule, ModuleInfo +from .. import mlog, build +from ..compilers.compilers import CFLAGS_MAPPING +from ..envconfig import ENV_VAR_PROG_MAP +from ..dependencies import InternalDependency, PkgConfigDependency +from ..interpreterbase import FeatureNew +from ..interpreter.type_checking import ENV_KW, DEPENDS_KW +from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, typed_kwargs, typed_pos_args +from ..mesonlib import (EnvironmentException, MesonException, Popen_safe, MachineChoice, + get_variable_regex, do_replacement, join_args, OptionKey) + +if T.TYPE_CHECKING: + from typing_extensions import TypedDict + + from . import ModuleState + from ..interpreter import Interpreter + from ..interpreterbase import TYPE_var + from ..build import BuildTarget, CustomTarget + + class Dependency(TypedDict): + + subdir: str + + class AddProject(TypedDict): + + configure_options: T.List[str] + cross_configure_options: T.List[str] + verbose: bool + env: build.EnvironmentVariables + depends: T.List[T.Union[BuildTarget, CustomTarget]] + + +class ExternalProject(NewExtensionModule): + def __init__(self, + state: 'ModuleState', + configure_command: str, + configure_options: T.List[str], + cross_configure_options: T.List[str], + env: build.EnvironmentVariables, + verbose: bool, + extra_depends: T.List[T.Union['BuildTarget', 'CustomTarget']]): + super().__init__() + self.methods.update({'dependency': self.dependency_method, + }) + + self.subdir = Path(state.subdir) + self.project_version = state.project_version + self.subproject = state.subproject + self.env = state.environment + self.build_machine = state.build_machine + self.host_machine = state.host_machine + self.configure_command = configure_command + self.configure_options = configure_options + self.cross_configure_options = cross_configure_options + self.verbose = verbose + self.user_env = env + + self.src_dir = Path(self.env.get_source_dir(), self.subdir) + self.build_dir = Path(self.env.get_build_dir(), self.subdir, 'build') + self.install_dir = Path(self.env.get_build_dir(), self.subdir, 'dist') + _p = self.env.coredata.get_option(OptionKey('prefix')) + assert isinstance(_p, str), 'for mypy' + self.prefix = Path(_p) + _l = self.env.coredata.get_option(OptionKey('libdir')) + assert isinstance(_l, str), 'for mypy' + self.libdir = Path(_l) + _i = self.env.coredata.get_option(OptionKey('includedir')) + assert isinstance(_i, str), 'for mypy' + self.includedir = Path(_i) + self.name = self.src_dir.name + + # On Windows if the prefix is "c:/foo" and DESTDIR is "c:/bar", `make` + # will install files into "c:/bar/c:/foo" which is an invalid path. + # Work around that issue by removing the drive from prefix. + if self.prefix.drive: + self.prefix = self.prefix.relative_to(self.prefix.drive) + + # self.prefix is an absolute path, so we cannot append it to another path. + self.rel_prefix = self.prefix.relative_to(self.prefix.root) + + self._configure(state) + + self.targets = self._create_targets(extra_depends) + + def _configure(self, state: 'ModuleState') -> None: + if self.configure_command == 'waf': + FeatureNew('Waf external project', '0.60.0').use(self.subproject, state.current_node) + waf = state.find_program('waf') + configure_cmd = waf.get_command() + configure_cmd += ['configure', '-o', str(self.build_dir)] + workdir = self.src_dir + self.make = waf.get_command() + ['build'] + else: + # Assume it's the name of a script in source dir, like 'configure', + # 'autogen.sh', etc). + configure_path = Path(self.src_dir, self.configure_command) + configure_prog = state.find_program(configure_path.as_posix()) + configure_cmd = configure_prog.get_command() + workdir = self.build_dir + self.make = state.find_program('make').get_command() + + d = [('PREFIX', '--prefix=@PREFIX@', self.prefix.as_posix()), + ('LIBDIR', '--libdir=@PREFIX@/@LIBDIR@', self.libdir.as_posix()), + ('INCLUDEDIR', None, self.includedir.as_posix()), + ] + self._validate_configure_options(d, state) + + configure_cmd += self._format_options(self.configure_options, d) + + if self.env.is_cross_build(): + host = '{}-{}-{}'.format(self.host_machine.cpu_family, + self.build_machine.system, + self.host_machine.system) + d = [('HOST', None, host)] + configure_cmd += self._format_options(self.cross_configure_options, d) + + # Set common env variables like CFLAGS, CC, etc. + link_exelist: T.List[str] = [] + link_args: T.List[str] = [] + self.run_env = os.environ.copy() + for lang, compiler in self.env.coredata.compilers[MachineChoice.HOST].items(): + if any(lang not in i for i in (ENV_VAR_PROG_MAP, CFLAGS_MAPPING)): + continue + cargs = self.env.coredata.get_external_args(MachineChoice.HOST, lang) + assert isinstance(cargs, list), 'for mypy' + self.run_env[ENV_VAR_PROG_MAP[lang]] = self._quote_and_join(compiler.get_exelist()) + self.run_env[CFLAGS_MAPPING[lang]] = self._quote_and_join(cargs) + if not link_exelist: + link_exelist = compiler.get_linker_exelist() + _l = self.env.coredata.get_external_link_args(MachineChoice.HOST, lang) + assert isinstance(_l, list), 'for mypy' + link_args = _l + if link_exelist: + # FIXME: Do not pass linker because Meson uses CC as linker wrapper, + # but autotools often expects the real linker (e.h. GNU ld). + # self.run_env['LD'] = self._quote_and_join(link_exelist) + pass + self.run_env['LDFLAGS'] = self._quote_and_join(link_args) + + self.run_env = self.user_env.get_env(self.run_env) + self.run_env = PkgConfigDependency.setup_env(self.run_env, self.env, MachineChoice.HOST, + uninstalled=True) + + self.build_dir.mkdir(parents=True, exist_ok=True) + self._run('configure', configure_cmd, workdir) + + def _quote_and_join(self, array: T.List[str]) -> str: + return ' '.join([shlex.quote(i) for i in array]) + + def _validate_configure_options(self, variables: T.List[T.Tuple[str, str, str]], state: 'ModuleState') -> None: + # Ensure the user at least try to pass basic info to the build system, + # like the prefix, libdir, etc. + for key, default, val in variables: + if default is None: + continue + key_format = f'@{key}@' + for option in self.configure_options: + if key_format in option: + break + else: + FeatureNew('Default configure_option', '0.57.0').use(self.subproject, state.current_node) + self.configure_options.append(default) + + def _format_options(self, options: T.List[str], variables: T.List[T.Tuple[str, str, str]]) -> T.List[str]: + out: T.List[str] = [] + missing = set() + regex = get_variable_regex('meson') + confdata: T.Dict[str, T.Tuple[str, T.Optional[str]]] = {k: (v, None) for k, _, v in variables} + for o in options: + arg, missing_vars = do_replacement(regex, o, 'meson', confdata) + missing.update(missing_vars) + out.append(arg) + if missing: + var_list = ", ".join(repr(m) for m in sorted(missing)) + raise EnvironmentException( + f"Variables {var_list} in configure options are missing.") + return out + + def _run(self, step: str, command: T.List[str], workdir: Path) -> None: + mlog.log(f'External project {self.name}:', mlog.bold(step)) + m = 'Running command ' + str(command) + ' in directory ' + str(workdir) + '\n' + log_filename = Path(mlog.log_dir, f'{self.name}-{step}.log') + output = None + if not self.verbose: + output = open(log_filename, 'w', encoding='utf-8') + output.write(m + '\n') + output.flush() + else: + mlog.log(m) + p, *_ = Popen_safe(command, cwd=workdir, env=self.run_env, + stderr=subprocess.STDOUT, + stdout=output) + if p.returncode != 0: + m = f'{step} step returned error code {p.returncode}.' + if not self.verbose: + m += '\nSee logs: ' + str(log_filename) + raise MesonException(m) + + def _create_targets(self, extra_depends: T.List[T.Union['BuildTarget', 'CustomTarget']]) -> T.List['TYPE_var']: + cmd = self.env.get_build_command() + cmd += ['--internal', 'externalproject', + '--name', self.name, + '--srcdir', self.src_dir.as_posix(), + '--builddir', self.build_dir.as_posix(), + '--installdir', self.install_dir.as_posix(), + '--logdir', mlog.log_dir, + '--make', join_args(self.make), + ] + if self.verbose: + cmd.append('--verbose') + + self.target = build.CustomTarget( + self.name, + self.subdir.as_posix(), + self.subproject, + self.env, + cmd + ['@OUTPUT@', '@DEPFILE@'], + [], + [f'{self.name}.stamp'], + depfile=f'{self.name}.d', + console=True, + extra_depends=extra_depends, + ) + + idir = build.InstallDir(self.subdir.as_posix(), + Path('dist', self.rel_prefix).as_posix(), + install_dir='.', + install_dir_name='.', + install_mode=None, + exclude=None, + strip_directory=True, + from_source_dir=False, + subproject=self.subproject) + + return [self.target, idir] + + @typed_pos_args('external_project.dependency', str) + @typed_kwargs('external_project.dependency', KwargInfo('subdir', str, default='')) + def dependency_method(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'Dependency') -> InternalDependency: + libname = args[0] + + abs_includedir = Path(self.install_dir, self.rel_prefix, self.includedir) + if kwargs['subdir']: + abs_includedir = Path(abs_includedir, kwargs['subdir']) + abs_libdir = Path(self.install_dir, self.rel_prefix, self.libdir) + + version = self.project_version + compile_args = [f'-I{abs_includedir}'] + link_args = [f'-L{abs_libdir}', f'-l{libname}'] + sources = self.target + dep = InternalDependency(version, [], compile_args, link_args, [], + [], [sources], [], {}, [], []) + return dep + + +class ExternalProjectModule(ExtensionModule): + + INFO = ModuleInfo('External build system', '0.56.0', unstable=True) + + def __init__(self, interpreter: 'Interpreter'): + super().__init__(interpreter) + self.methods.update({'add_project': self.add_project, + }) + + @typed_pos_args('external_project_mod.add_project', str) + @typed_kwargs( + 'external_project.add_project', + KwargInfo('configure_options', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('cross_configure_options', ContainerTypeInfo(list, str), default=['--host=@HOST@'], listify=True), + KwargInfo('verbose', bool, default=False), + ENV_KW, + DEPENDS_KW.evolve(since='0.63.0'), + ) + def add_project(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'AddProject') -> ModuleReturnValue: + configure_command = args[0] + project = ExternalProject(state, + configure_command, + kwargs['configure_options'], + kwargs['cross_configure_options'], + kwargs['env'], + kwargs['verbose'], + kwargs['depends']) + return ModuleReturnValue(project, project.targets) + + +def initialize(interp: 'Interpreter') -> ExternalProjectModule: + return ExternalProjectModule(interp) diff --git a/mesonbuild/modules/fs.py b/mesonbuild/modules/fs.py new file mode 100644 index 0000000..7d96995 --- /dev/null +++ b/mesonbuild/modules/fs.py @@ -0,0 +1,315 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +from pathlib import Path, PurePath, PureWindowsPath +import hashlib +import os +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleInfo +from .. import mlog +from ..build import CustomTarget, InvalidArguments +from ..interpreter.type_checking import INSTALL_KW, INSTALL_MODE_KW, INSTALL_TAG_KW, NoneType +from ..interpreterbase import FeatureNew, KwargInfo, typed_kwargs, typed_pos_args, noKwargs +from ..mesonlib import ( + File, + MesonException, + has_path_sep, + path_is_in_root, +) + +if T.TYPE_CHECKING: + from . import ModuleState + from ..interpreter import Interpreter + from ..mesonlib import FileOrString, FileMode + + from typing_extensions import TypedDict + + class ReadKwArgs(TypedDict): + """Keyword Arguments for fs.read.""" + + encoding: str + + class CopyKw(TypedDict): + + """Kwargs for fs.copy""" + + install: bool + install_dir: T.Optional[str] + install_mode: FileMode + install_tag: T.Optional[str] + + +class FSModule(ExtensionModule): + + INFO = ModuleInfo('fs', '0.53.0') + + def __init__(self, interpreter: 'Interpreter') -> None: + super().__init__(interpreter) + self.methods.update({ + 'expanduser': self.expanduser, + 'is_absolute': self.is_absolute, + 'as_posix': self.as_posix, + 'exists': self.exists, + 'is_symlink': self.is_symlink, + 'is_file': self.is_file, + 'is_dir': self.is_dir, + 'hash': self.hash, + 'size': self.size, + 'is_samepath': self.is_samepath, + 'replace_suffix': self.replace_suffix, + 'parent': self.parent, + 'name': self.name, + 'stem': self.stem, + 'read': self.read, + 'copyfile': self.copyfile, + }) + + def _absolute_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path: + """ + make an absolute path from a relative path, WITHOUT resolving symlinks + """ + if isinstance(arg, File): + return Path(arg.absolute_path(state.source_root, self.interpreter.environment.get_build_dir())) + return Path(state.source_root) / Path(state.subdir) / Path(arg).expanduser() + + def _resolve_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path: + """ + resolves symlinks and makes absolute a directory relative to calling meson.build, + if not already absolute + """ + path = self._absolute_dir(state, arg) + try: + # accommodate unresolvable paths e.g. symlink loops + path = path.resolve() + except Exception: + # return the best we could do + pass + return path + + @noKwargs + @FeatureNew('fs.expanduser', '0.54.0') + @typed_pos_args('fs.expanduser', str) + def expanduser(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str: + return str(Path(args[0]).expanduser()) + + @noKwargs + @FeatureNew('fs.is_absolute', '0.54.0') + @typed_pos_args('fs.is_absolute', (str, File)) + def is_absolute(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool: + if isinstance(args[0], File): + FeatureNew('fs.is_absolute_file', '0.59.0').use(state.subproject) + return PurePath(str(args[0])).is_absolute() + + @noKwargs + @FeatureNew('fs.as_posix', '0.54.0') + @typed_pos_args('fs.as_posix', str) + def as_posix(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str: + """ + this function assumes you are passing a Windows path, even if on a Unix-like system + and so ALL '\' are turned to '/', even if you meant to escape a character + """ + return PureWindowsPath(args[0]).as_posix() + + @noKwargs + @typed_pos_args('fs.exists', str) + def exists(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool: + return self._resolve_dir(state, args[0]).exists() + + @noKwargs + @typed_pos_args('fs.is_symlink', (str, File)) + def is_symlink(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool: + if isinstance(args[0], File): + FeatureNew('fs.is_symlink_file', '0.59.0').use(state.subproject) + return self._absolute_dir(state, args[0]).is_symlink() + + @noKwargs + @typed_pos_args('fs.is_file', str) + def is_file(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool: + return self._resolve_dir(state, args[0]).is_file() + + @noKwargs + @typed_pos_args('fs.is_dir', str) + def is_dir(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool: + return self._resolve_dir(state, args[0]).is_dir() + + @noKwargs + @typed_pos_args('fs.hash', (str, File), str) + def hash(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str: + if isinstance(args[0], File): + FeatureNew('fs.hash_file', '0.59.0').use(state.subproject) + file = self._resolve_dir(state, args[0]) + if not file.is_file(): + raise MesonException(f'{file} is not a file and therefore cannot be hashed') + try: + h = hashlib.new(args[1]) + except ValueError: + raise MesonException('hash algorithm {} is not available'.format(args[1])) + mlog.debug('computing {} sum of {} size {} bytes'.format(args[1], file, file.stat().st_size)) + h.update(file.read_bytes()) + return h.hexdigest() + + @noKwargs + @typed_pos_args('fs.size', (str, File)) + def size(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> int: + if isinstance(args[0], File): + FeatureNew('fs.size_file', '0.59.0').use(state.subproject) + file = self._resolve_dir(state, args[0]) + if not file.is_file(): + raise MesonException(f'{file} is not a file and therefore cannot be sized') + try: + return file.stat().st_size + except ValueError: + raise MesonException('{} size could not be determined'.format(args[0])) + + @noKwargs + @typed_pos_args('fs.is_samepath', (str, File), (str, File)) + def is_samepath(self, state: 'ModuleState', args: T.Tuple['FileOrString', 'FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool: + if isinstance(args[0], File) or isinstance(args[1], File): + FeatureNew('fs.is_samepath_file', '0.59.0').use(state.subproject) + file1 = self._resolve_dir(state, args[0]) + file2 = self._resolve_dir(state, args[1]) + if not file1.exists(): + return False + if not file2.exists(): + return False + try: + return file1.samefile(file2) + except OSError: + return False + + @noKwargs + @typed_pos_args('fs.replace_suffix', (str, File), str) + def replace_suffix(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str: + if isinstance(args[0], File): + FeatureNew('fs.replace_suffix_file', '0.59.0').use(state.subproject) + original = PurePath(str(args[0])) + new = original.with_suffix(args[1]) + return str(new) + + @noKwargs + @typed_pos_args('fs.parent', (str, File)) + def parent(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str: + if isinstance(args[0], File): + FeatureNew('fs.parent_file', '0.59.0').use(state.subproject) + original = PurePath(str(args[0])) + new = original.parent + return str(new) + + @noKwargs + @typed_pos_args('fs.name', (str, File)) + def name(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str: + if isinstance(args[0], File): + FeatureNew('fs.name_file', '0.59.0').use(state.subproject) + original = PurePath(str(args[0])) + new = original.name + return str(new) + + @noKwargs + @typed_pos_args('fs.stem', (str, File)) + @FeatureNew('fs.stem', '0.54.0') + def stem(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str: + if isinstance(args[0], File): + FeatureNew('fs.stem_file', '0.59.0').use(state.subproject) + original = PurePath(str(args[0])) + new = original.stem + return str(new) + + @FeatureNew('fs.read', '0.57.0') + @typed_pos_args('fs.read', (str, File)) + @typed_kwargs('fs.read', KwargInfo('encoding', str, default='utf-8')) + def read(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: 'ReadKwArgs') -> str: + """Read a file from the source tree and return its value as a decoded + string. + + If the encoding is not specified, the file is assumed to be utf-8 + encoded. Paths must be relative by default (to prevent accidents) and + are forbidden to be read from the build directory (to prevent build + loops) + """ + path = args[0] + encoding = kwargs['encoding'] + src_dir = self.interpreter.environment.source_dir + sub_dir = self.interpreter.subdir + build_dir = self.interpreter.environment.get_build_dir() + + if isinstance(path, File): + if path.is_built: + raise MesonException( + 'fs.read_file does not accept built files() objects') + path = os.path.join(src_dir, path.relative_name()) + else: + if sub_dir: + src_dir = os.path.join(src_dir, sub_dir) + path = os.path.join(src_dir, path) + + path = os.path.abspath(path) + if path_is_in_root(Path(path), Path(build_dir), resolve=True): + raise MesonException('path must not be in the build tree') + try: + with open(path, encoding=encoding) as f: + data = f.read() + except UnicodeDecodeError: + raise MesonException(f'decoding failed for {path}') + # Reconfigure when this file changes as it can contain data used by any + # part of the build configuration (e.g. `project(..., version: + # fs.read_file('VERSION')` or `configure_file(...)` + self.interpreter.add_build_def_file(path) + return data + + @FeatureNew('fs.copyfile', '0.64.0') + @typed_pos_args('fs.copyfile', (File, str), optargs=[str]) + @typed_kwargs( + 'fs.copyfile', + INSTALL_KW, + INSTALL_MODE_KW, + INSTALL_TAG_KW, + KwargInfo('install_dir', (str, NoneType)), + ) + def copyfile(self, state: ModuleState, args: T.Tuple[FileOrString, T.Optional[str]], + kwargs: CopyKw) -> ModuleReturnValue: + """Copy a file into the build directory at build time.""" + if kwargs['install'] and not kwargs['install_dir']: + raise InvalidArguments('"install_dir" must be specified when "install" is true') + + src = self.interpreter.source_strings_to_files([args[0]])[0] + + # The input is allowed to have path separators, but the output may not, + # so use the basename for the default case + dest = args[1] if args[1] else os.path.basename(src.fname) + if has_path_sep(dest): + raise InvalidArguments('Destination path may not have path separators') + + ct = CustomTarget( + dest, + state.subdir, + state.subproject, + state.environment, + state.environment.get_build_command() + ['--internal', 'copy', '@INPUT@', '@OUTPUT@'], + [src], + [dest], + build_by_default=True, + install=kwargs['install'], + install_dir=[kwargs['install_dir']], + install_mode=kwargs['install_mode'], + install_tag=[kwargs['install_tag']], + backend=state.backend, + ) + + return ModuleReturnValue(ct, [ct]) + + +def initialize(*args: T.Any, **kwargs: T.Any) -> FSModule: + return FSModule(*args, **kwargs) diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py new file mode 100644 index 0000000..38a176d --- /dev/null +++ b/mesonbuild/modules/gnome.py @@ -0,0 +1,2165 @@ +# Copyright 2015-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''This module provides helper functions for Gnome/GLib related +functionality such as gobject-introspection, gresources and gtk-doc''' +from __future__ import annotations + +import copy +import itertools +import functools +import os +import subprocess +import textwrap +import typing as T + +from . import ExtensionModule, ModuleInfo +from . import ModuleReturnValue +from .. import build +from .. import interpreter +from .. import mesonlib +from .. import mlog +from ..build import CustomTarget, CustomTargetIndex, Executable, GeneratedList, InvalidArguments +from ..dependencies import Dependency, PkgConfigDependency, InternalDependency +from ..interpreter.type_checking import DEPENDS_KW, DEPEND_FILES_KW, INSTALL_DIR_KW, INSTALL_KW, NoneType, SOURCES_KW, in_set_validator +from ..interpreterbase import noPosargs, noKwargs, FeatureNew, FeatureDeprecated +from ..interpreterbase import typed_kwargs, KwargInfo, ContainerTypeInfo +from ..interpreterbase.decorators import typed_pos_args +from ..mesonlib import ( + MachineChoice, MesonException, OrderedSet, Popen_safe, join_args, +) +from ..programs import OverrideProgram +from ..scripts.gettext import read_linguas + +if T.TYPE_CHECKING: + from typing_extensions import Literal, TypedDict + + from . import ModuleState + from ..build import BuildTarget + from ..compilers import Compiler + from ..interpreter import Interpreter + from ..interpreterbase import TYPE_var, TYPE_kwargs + from ..mesonlib import FileOrString + from ..programs import ExternalProgram + + class PostInstall(TypedDict): + glib_compile_schemas: bool + gio_querymodules: T.List[str] + gtk_update_icon_cache: bool + update_desktop_database: bool + update_mime_database: bool + + class CompileSchemas(TypedDict): + + build_by_default: bool + depend_files: T.List[FileOrString] + + class Yelp(TypedDict): + + languages: T.List[str] + media: T.List[str] + sources: T.List[str] + symlink_media: bool + + class CompileResources(TypedDict): + + build_by_default: bool + c_name: T.Optional[str] + dependencies: T.List[T.Union[mesonlib.File, build.CustomTarget, build.CustomTargetIndex]] + export: bool + extra_args: T.List[str] + gresource_bundle: bool + install: bool + install_dir: T.Optional[str] + install_header: bool + source_dir: T.List[str] + + class GenerateGir(TypedDict): + + build_by_default: bool + dependencies: T.List[Dependency] + export_packages: T.List[str] + extra_args: T.List[str] + fatal_warnings: bool + header: T.List[str] + identifier_prefix: T.List[str] + include_directories: T.List[T.Union[build.IncludeDirs, str]] + includes: T.List[T.Union[str, GirTarget]] + install: bool + install_dir_gir: T.Optional[str] + install_dir_typelib: T.Optional[str] + link_with: T.List[T.Union[build.SharedLibrary, build.StaticLibrary]] + namespace: str + nsversion: str + sources: T.List[T.Union[FileOrString, build.GeneratedTypes]] + symbol_prefix: T.List[str] + + class GtkDoc(TypedDict): + + src_dir: T.List[T.Union[str, build.IncludeDirs]] + main_sgml: str + main_xml: str + module_version: str + namespace: str + mode: Literal['xml', 'smgl', 'auto', 'none'] + html_args: T.List[str] + scan_args: T.List[str] + scanobjs_args: T.List[str] + fixxref_args: T.List[str] + mkdb_args: T.List[str] + content_files: T.List[T.Union[build.GeneratedTypes, FileOrString]] + ignore_headers: T.List[str] + install_dir: T.List[str] + check: bool + install: bool + gobject_typesfile: T.List[FileOrString] + html_assets: T.List[FileOrString] + expand_content_files: T.List[FileOrString] + c_args: T.List[str] + include_directories: T.List[T.Union[str, build.IncludeDirs]] + dependencies: T.List[T.Union[Dependency, build.SharedLibrary, build.StaticLibrary]] + + class GdbusCodegen(TypedDict): + + sources: T.List[FileOrString] + extra_args: T.List[str] + interface_prefix: T.Optional[str] + namespace: T.Optional[str] + object_manager: bool + build_by_default: bool + annotations: T.List[T.List[str]] + install_header: bool + install_dir: T.Optional[str] + docbook: T.Optional[str] + autocleanup: Literal['all', 'none', 'objects', 'default'] + + class GenMarshal(TypedDict): + + build_always: T.Optional[str] + build_always_stale: T.Optional[bool] + build_by_default: T.Optional[bool] + depend_files: T.List[mesonlib.File] + extra_args: T.List[str] + install_dir: T.Optional[str] + install_header: bool + internal: bool + nostdinc: bool + prefix: T.Optional[str] + skip_source: bool + sources: T.List[FileOrString] + stdinc: bool + valist_marshallers: bool + + class GenerateVapi(TypedDict): + + sources: T.List[T.Union[str, GirTarget]] + install_dir: T.Optional[str] + install: bool + vapi_dirs: T.List[str] + metadata_dirs: T.List[str] + gir_dirs: T.List[str] + packages: T.List[T.Union[str, InternalDependency]] + + class _MkEnumsCommon(TypedDict): + + sources: T.List[T.Union[FileOrString, build.GeneratedTypes]] + install_header: bool + install_dir: T.Optional[str] + identifier_prefix: T.Optional[str] + symbol_prefix: T.Optional[str] + + class MkEnumsSimple(_MkEnumsCommon): + + header_prefix: str + decorator: str + function_prefix: str + body_prefix: str + + class MkEnums(_MkEnumsCommon): + + c_template: T.Optional[FileOrString] + h_template: T.Optional[FileOrString] + comments: T.Optional[str] + eprod: T.Optional[str] + fhead: T.Optional[str] + fprod: T.Optional[str] + ftail: T.Optional[str] + vhead: T.Optional[str] + vprod: T.Optional[str] + vtail: T.Optional[str] + depends: T.List[T.Union[BuildTarget, CustomTarget, CustomTargetIndex]] + + +# Differs from the CustomTarget version in that it straight defaults to True +_BUILD_BY_DEFAULT: KwargInfo[bool] = KwargInfo( + 'build_by_default', bool, default=True, +) + +_EXTRA_ARGS_KW: KwargInfo[T.List[str]] = KwargInfo( + 'extra_args', + ContainerTypeInfo(list, str), + default=[], + listify=True, +) + +_MK_ENUMS_COMMON_KWS: T.List[KwargInfo] = [ + INSTALL_KW.evolve(name='install_header'), + INSTALL_DIR_KW, + KwargInfo( + 'sources', + ContainerTypeInfo(list, (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)), + listify=True, + required=True, + ), + KwargInfo('identifier_prefix', (str, NoneType)), + KwargInfo('symbol_prefix', (str, NoneType)), +] + +def annotations_validator(annotations: T.List[T.Union[str, T.List[str]]]) -> T.Optional[str]: + """Validate gdbus-codegen annotations argument""" + + badlist = 'must be made up of 3 strings for ELEMENT, KEY, and VALUE' + + if not annotations: + return None + elif all(isinstance(annot, str) for annot in annotations): + if len(annotations) == 3: + return None + else: + return badlist + elif not all(isinstance(annot, list) for annot in annotations): + for c, annot in enumerate(annotations): + if not isinstance(annot, list): + return f'element {c+1} must be a list' + else: + for c, annot in enumerate(annotations): + if len(annot) != 3 or not all(isinstance(i, str) for i in annot): + return f'element {c+1} {badlist}' + return None + +class GResourceTarget(build.CustomTarget): + pass + +class GResourceHeaderTarget(build.CustomTarget): + pass + +class GirTarget(build.CustomTarget): + pass + +class TypelibTarget(build.CustomTarget): + pass + +class VapiTarget(build.CustomTarget): + pass + +# gresource compilation is broken due to the way +# the resource compiler and Ninja clash about it +# +# https://github.com/ninja-build/ninja/issues/1184 +# https://bugzilla.gnome.org/show_bug.cgi?id=774368 +gresource_dep_needed_version = '>= 2.51.1' + +class GnomeModule(ExtensionModule): + + INFO = ModuleInfo('gnome') + + def __init__(self, interpreter: 'Interpreter') -> None: + super().__init__(interpreter) + self.gir_dep: T.Optional[Dependency] = None + self.giscanner: T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]] = None + self.gicompiler: T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]] = None + self.install_glib_compile_schemas = False + self.install_gio_querymodules: T.List[str] = [] + self.install_gtk_update_icon_cache = False + self.install_update_desktop_database = False + self.install_update_mime_database = False + self.devenv: T.Optional[build.EnvironmentVariables] = None + self.native_glib_version: T.Optional[str] = None + self.methods.update({ + 'post_install': self.post_install, + 'compile_resources': self.compile_resources, + 'generate_gir': self.generate_gir, + 'compile_schemas': self.compile_schemas, + 'yelp': self.yelp, + 'gtkdoc': self.gtkdoc, + 'gtkdoc_html_dir': self.gtkdoc_html_dir, + 'gdbus_codegen': self.gdbus_codegen, + 'mkenums': self.mkenums, + 'mkenums_simple': self.mkenums_simple, + 'genmarshal': self.genmarshal, + 'generate_vapi': self.generate_vapi, + }) + + def _get_native_glib_version(self, state: 'ModuleState') -> str: + if self.native_glib_version is None: + glib_dep = PkgConfigDependency('glib-2.0', state.environment, + {'native': True, 'required': False}) + if glib_dep.found(): + self.native_glib_version = glib_dep.get_version() + else: + mlog.warning('Could not detect glib version, assuming 2.54. ' + 'You may get build errors if your glib is older.') + self.native_glib_version = '2.54' + return self.native_glib_version + + @mesonlib.run_once + def __print_gresources_warning(self, state: 'ModuleState') -> None: + if not mesonlib.version_compare(self._get_native_glib_version(state), + gresource_dep_needed_version): + mlog.warning('GLib compiled dependencies do not work reliably with \n' + 'the current version of GLib. See the following upstream issue:', + mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368')) + + @staticmethod + def _print_gdbus_warning() -> None: + mlog.warning('Code generated with gdbus_codegen() requires the root directory be added to\n' + ' include_directories of targets with GLib < 2.51.3:', + mlog.bold('https://github.com/mesonbuild/meson/issues/1387'), + once=True) + + @typed_kwargs( + 'gnome.post_install', + KwargInfo('glib_compile_schemas', bool, default=False), + KwargInfo('gio_querymodules', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('gtk_update_icon_cache', bool, default=False), + KwargInfo('update_desktop_database', bool, default=False, since='0.59.0'), + KwargInfo('update_mime_database', bool, default=False, since='0.64.0'), + ) + @noPosargs + @FeatureNew('gnome.post_install', '0.57.0') + def post_install(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'PostInstall') -> ModuleReturnValue: + rv: T.List['build.ExecutableSerialisation'] = [] + datadir_abs = os.path.join(state.environment.get_prefix(), state.environment.get_datadir()) + if kwargs['glib_compile_schemas'] and not self.install_glib_compile_schemas: + self.install_glib_compile_schemas = True + prog = state.find_tool('glib-compile-schemas', 'gio-2.0', 'glib_compile_schemas') + schemasdir = os.path.join(datadir_abs, 'glib-2.0', 'schemas') + script = state.backend.get_executable_serialisation([prog, schemasdir]) + script.skip_if_destdir = True + rv.append(script) + for d in kwargs['gio_querymodules']: + if d not in self.install_gio_querymodules: + self.install_gio_querymodules.append(d) + prog = state.find_tool('gio-querymodules', 'gio-2.0', 'gio_querymodules') + moduledir = os.path.join(state.environment.get_prefix(), d) + script = state.backend.get_executable_serialisation([prog, moduledir]) + script.skip_if_destdir = True + rv.append(script) + if kwargs['gtk_update_icon_cache'] and not self.install_gtk_update_icon_cache: + self.install_gtk_update_icon_cache = True + prog = state.find_program('gtk4-update-icon-cache', required=False) + found = isinstance(prog, build.Executable) or prog.found() + if not found: + prog = state.find_program('gtk-update-icon-cache') + icondir = os.path.join(datadir_abs, 'icons', 'hicolor') + script = state.backend.get_executable_serialisation([prog, '-q', '-t', '-f', icondir]) + script.skip_if_destdir = True + rv.append(script) + if kwargs['update_desktop_database'] and not self.install_update_desktop_database: + self.install_update_desktop_database = True + prog = state.find_program('update-desktop-database') + appdir = os.path.join(datadir_abs, 'applications') + script = state.backend.get_executable_serialisation([prog, '-q', appdir]) + script.skip_if_destdir = True + rv.append(script) + if kwargs['update_mime_database'] and not self.install_update_mime_database: + self.install_update_mime_database = True + prog = state.find_program('update-mime-database') + appdir = os.path.join(datadir_abs, 'mime') + script = state.backend.get_executable_serialisation([prog, appdir]) + script.skip_if_destdir = True + rv.append(script) + return ModuleReturnValue(None, rv) + + @typed_pos_args('gnome.compile_resources', str, (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) + @typed_kwargs( + 'gnome.compile_resources', + _BUILD_BY_DEFAULT, + _EXTRA_ARGS_KW, + INSTALL_KW, + INSTALL_KW.evolve(name='install_header', since='0.37.0'), + INSTALL_DIR_KW, + KwargInfo('c_name', (str, NoneType)), + KwargInfo('dependencies', ContainerTypeInfo(list, (mesonlib.File, build.CustomTarget, build.CustomTargetIndex)), default=[], listify=True), + KwargInfo('export', bool, default=False, since='0.37.0'), + KwargInfo('gresource_bundle', bool, default=False, since='0.37.0'), + KwargInfo('source_dir', ContainerTypeInfo(list, str), default=[], listify=True), + ) + def compile_resources(self, state: 'ModuleState', args: T.Tuple[str, 'FileOrString'], + kwargs: 'CompileResources') -> 'ModuleReturnValue': + self.__print_gresources_warning(state) + glib_version = self._get_native_glib_version(state) + + glib_compile_resources = state.find_program('glib-compile-resources') + cmd: T.List[T.Union[ExternalProgram, str]] = [glib_compile_resources, '@INPUT@'] + + source_dirs = kwargs['source_dir'] + dependencies = kwargs['dependencies'] + + target_name, input_file = args + + # Validate dependencies + subdirs: T.List[str] = [] + depends: T.List[T.Union[build.CustomTarget, build.CustomTargetIndex]] = [] + for dep in dependencies: + if isinstance(dep, mesonlib.File): + subdirs.append(dep.subdir) + else: + depends.append(dep) + subdirs.append(dep.get_subdir()) + if not mesonlib.version_compare(glib_version, gresource_dep_needed_version): + m = 'The "dependencies" argument of gnome.compile_resources() can not\n' \ + 'be used with the current version of glib-compile-resources due to\n' \ + '<https://bugzilla.gnome.org/show_bug.cgi?id=774368>' + raise MesonException(m) + + if not mesonlib.version_compare(glib_version, gresource_dep_needed_version): + # Resource xml files generated at build-time cannot be used with + # gnome.compile_resources() because we need to scan the xml for + # dependencies. Use configure_file() instead to generate it at + # configure-time + if isinstance(input_file, mesonlib.File): + # glib-compile-resources will be run inside the source dir, + # so we need either 'src_to_build' or the absolute path. + # Absolute path is the easiest choice. + if input_file.is_built: + ifile = os.path.join(state.environment.get_build_dir(), input_file.subdir, input_file.fname) + else: + ifile = os.path.join(input_file.subdir, input_file.fname) + + elif isinstance(input_file, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)): + raise MesonException('Resource xml files generated at build-time cannot be used with ' + 'gnome.compile_resources() in the current version of glib-compile-resources ' + 'because we need to scan the xml for dependencies due to ' + '<https://bugzilla.gnome.org/show_bug.cgi?id=774368>\nUse ' + 'configure_file() instead to generate it at configure-time.') + else: + ifile = os.path.join(state.subdir, input_file) + + depend_files, depends, subdirs = self._get_gresource_dependencies( + state, ifile, source_dirs, dependencies) + + # Make source dirs relative to build dir now + source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs] + # Ensure build directories of generated deps are included + source_dirs += subdirs + # Always include current directory, but after paths set by user + source_dirs.append(os.path.join(state.build_to_src, state.subdir)) + + for source_dir in OrderedSet(source_dirs): + cmd += ['--sourcedir', source_dir] + + if kwargs['c_name']: + cmd += ['--c-name', kwargs['c_name']] + if not kwargs['export']: + cmd += ['--internal'] + + cmd += ['--generate', '--target', '@OUTPUT@'] + cmd += kwargs['extra_args'] + + gresource = kwargs['gresource_bundle'] + if gresource: + output = f'{target_name}.gresource' + name = f'{target_name}_gresource' + else: + if 'c' in state.environment.coredata.compilers.host: + output = f'{target_name}.c' + name = f'{target_name}_c' + elif 'cpp' in state.environment.coredata.compilers.host: + output = f'{target_name}.cpp' + name = f'{target_name}_cpp' + else: + raise MesonException('Compiling GResources into code is only supported in C and C++ projects') + + if kwargs['install'] and not gresource: + raise MesonException('The install kwarg only applies to gresource bundles, see install_header') + + install_header = kwargs['install_header'] + if install_header and gresource: + raise MesonException('The install_header kwarg does not apply to gresource bundles') + if install_header and not kwargs['export']: + raise MesonException('GResource header is installed yet export is not enabled') + + depfile: T.Optional[str] = None + target_cmd: T.List[T.Union[ExternalProgram, str]] + if not mesonlib.version_compare(glib_version, gresource_dep_needed_version): + # This will eventually go out of sync if dependencies are added + target_cmd = cmd + else: + depfile = f'{output}.d' + depend_files = [] + target_cmd = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@'] + target_c = GResourceTarget( + name, + state.subdir, + state.subproject, + state.environment, + target_cmd, + [input_file], + [output], + build_by_default=kwargs['build_by_default'], + depfile=depfile, + depend_files=depend_files, + extra_depends=depends, + install=kwargs['install'], + install_dir=[kwargs['install_dir']] if kwargs['install_dir'] else [], + install_tag=['runtime'], + ) + + if gresource: # Only one target for .gresource files + return ModuleReturnValue(target_c, [target_c]) + + install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(mesonlib.OptionKey('includedir')) + assert isinstance(install_dir, str), 'for mypy' + target_h = GResourceHeaderTarget( + f'{target_name}_h', + state.subdir, + state.subproject, + state.environment, + cmd, + [input_file], + [f'{target_name}.h'], + build_by_default=kwargs['build_by_default'], + extra_depends=depends, + install=install_header, + install_dir=[install_dir], + install_tag=['devel'], + ) + rv = [target_c, target_h] + return ModuleReturnValue(rv, rv) + + @staticmethod + def _get_gresource_dependencies( + state: 'ModuleState', input_file: str, source_dirs: T.List[str], + dependencies: T.Sequence[T.Union[mesonlib.File, build.CustomTarget, build.CustomTargetIndex]] + ) -> T.Tuple[T.List[mesonlib.FileOrString], T.List[T.Union[build.CustomTarget, build.CustomTargetIndex]], T.List[str]]: + + cmd = ['glib-compile-resources', + input_file, + '--generate-dependencies'] + + # Prefer generated files over source files + cmd += ['--sourcedir', state.subdir] # Current build dir + for source_dir in source_dirs: + cmd += ['--sourcedir', os.path.join(state.subdir, source_dir)] + + try: + pc, stdout, stderr = Popen_safe(cmd, cwd=state.environment.get_source_dir()) + except (FileNotFoundError, PermissionError): + raise MesonException('Could not execute glib-compile-resources.') + if pc.returncode != 0: + m = f'glib-compile-resources failed to get dependencies for {cmd[1]}:\n{stderr}' + mlog.warning(m) + raise subprocess.CalledProcessError(pc.returncode, cmd) + + raw_dep_files: T.List[str] = stdout.split('\n')[:-1] + + depends: T.List[T.Union[build.CustomTarget, build.CustomTargetIndex]] = [] + subdirs: T.List[str] = [] + dep_files: T.List[mesonlib.FileOrString] = [] + for resfile in raw_dep_files.copy(): + resbasename = os.path.basename(resfile) + for dep in dependencies: + if isinstance(dep, mesonlib.File): + if dep.fname != resbasename: + continue + raw_dep_files.remove(resfile) + dep_files.append(dep) + subdirs.append(dep.subdir) + break + elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)): + fname = None + outputs = {(o, os.path.basename(o)) for o in dep.get_outputs()} + for o, baseo in outputs: + if baseo == resbasename: + fname = o + break + if fname is not None: + raw_dep_files.remove(resfile) + depends.append(dep) + subdirs.append(dep.get_subdir()) + break + else: + # In generate-dependencies mode, glib-compile-resources doesn't raise + # an error for missing resources but instead prints whatever filename + # was listed in the input file. That's good because it means we can + # handle resource files that get generated as part of the build, as + # follows. + # + # If there are multiple generated resource files with the same basename + # then this code will get confused. + try: + f = mesonlib.File.from_source_file(state.environment.get_source_dir(), + ".", resfile) + except MesonException: + raise MesonException( + f'Resource "{resfile}" listed in "{input_file}" was not found. ' + 'If this is a generated file, pass the target that generates ' + 'it to gnome.compile_resources() using the "dependencies" ' + 'keyword argument.') + raw_dep_files.remove(resfile) + dep_files.append(f) + dep_files.extend(raw_dep_files) + return dep_files, depends, subdirs + + def _get_link_args(self, state: 'ModuleState', + lib: T.Union[build.SharedLibrary, build.StaticLibrary], + depends: T.Sequence[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]], + include_rpath: bool = False, + use_gir_args: bool = False + ) -> T.Tuple[T.List[str], T.List[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]]: + link_command: T.List[str] = [] + new_depends = list(depends) + # Construct link args + if isinstance(lib, build.SharedLibrary): + libdir = os.path.join(state.environment.get_build_dir(), state.backend.get_target_dir(lib)) + link_command.append('-L' + libdir) + if include_rpath: + link_command.append('-Wl,-rpath,' + libdir) + new_depends.append(lib) + # Needed for the following binutils bug: + # https://github.com/mesonbuild/meson/issues/1911 + # However, g-ir-scanner does not understand -Wl,-rpath + # so we need to use -L instead + for d in state.backend.determine_rpath_dirs(lib): + d = os.path.join(state.environment.get_build_dir(), d) + link_command.append('-L' + d) + if include_rpath: + link_command.append('-Wl,-rpath,' + d) + if use_gir_args and self._gir_has_option('--extra-library'): + link_command.append('--extra-library=' + lib.name) + else: + link_command.append('-l' + lib.name) + return link_command, new_depends + + def _get_dependencies_flags_raw( + self, deps: T.Sequence[T.Union['Dependency', build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]], + state: 'ModuleState', + depends: T.Sequence[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]], + include_rpath: bool, + use_gir_args: bool, + ) -> T.Tuple[OrderedSet[str], OrderedSet[T.Union[str, T.Tuple[str, str]]], OrderedSet[T.Union[str, T.Tuple[str, str]]], OrderedSet[str], + T.List[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]]: + cflags: OrderedSet[str] = OrderedSet() + # External linker flags that can't be de-duped reliably because they + # require two args in order, such as -framework AVFoundation will be stored as a tuple. + internal_ldflags: OrderedSet[T.Union[str, T.Tuple[str, str]]] = OrderedSet() + external_ldflags: OrderedSet[T.Union[str, T.Tuple[str, str]]] = OrderedSet() + gi_includes: OrderedSet[str] = OrderedSet() + deps = mesonlib.listify(deps) + depends = list(depends) + + for dep in deps: + if isinstance(dep, Dependency): + girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='') + if girdir: + assert isinstance(girdir, str), 'for mypy' + gi_includes.update([girdir]) + if isinstance(dep, InternalDependency): + cflags.update(dep.get_compile_args()) + cflags.update(state.get_include_args(dep.include_directories)) + for lib in dep.libraries: + if isinstance(lib, build.SharedLibrary): + _ld, depends = self._get_link_args(state, lib, depends, include_rpath) + internal_ldflags.update(_ld) + libdepflags = self._get_dependencies_flags_raw(lib.get_external_deps(), state, depends, include_rpath, + use_gir_args) + cflags.update(libdepflags[0]) + internal_ldflags.update(libdepflags[1]) + external_ldflags.update(libdepflags[2]) + gi_includes.update(libdepflags[3]) + depends = libdepflags[4] + extdepflags = self._get_dependencies_flags_raw(dep.ext_deps, state, depends, include_rpath, + use_gir_args) + cflags.update(extdepflags[0]) + internal_ldflags.update(extdepflags[1]) + external_ldflags.update(extdepflags[2]) + gi_includes.update(extdepflags[3]) + depends = extdepflags[4] + for source in dep.sources: + if isinstance(source, GirTarget): + gi_includes.update([os.path.join(state.environment.get_build_dir(), + source.get_subdir())]) + # This should be any dependency other than an internal one. + elif isinstance(dep, Dependency): + cflags.update(dep.get_compile_args()) + ldflags = iter(dep.get_link_args(raw=True)) + for flag in ldflags: + if (os.path.isabs(flag) and + # For PkgConfigDependency only: + getattr(dep, 'is_libtool', False)): + lib_dir = os.path.dirname(flag) + external_ldflags.update([f'-L{lib_dir}']) + if include_rpath: + external_ldflags.update([f'-Wl,-rpath {lib_dir}']) + libname = os.path.basename(flag) + if libname.startswith("lib"): + libname = libname[3:] + libname = libname.split(".so")[0] + flag = f"-l{libname}" + # FIXME: Hack to avoid passing some compiler options in + if flag.startswith("-W"): + continue + # If it's a framework arg, slurp the framework name too + # to preserve the order of arguments + if flag == '-framework': + external_ldflags.update([(flag, next(ldflags))]) + else: + external_ldflags.update([flag]) + elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)): + cflags.update(state.get_include_args(dep.get_include_dirs())) + depends.append(dep) + else: + mlog.log(f'dependency {dep!r} not handled to build gir files') + continue + + if use_gir_args and self._gir_has_option('--extra-library'): + def fix_ldflags(ldflags: T.Iterable[T.Union[str, T.Tuple[str, str]]]) -> OrderedSet[T.Union[str, T.Tuple[str, str]]]: + fixed_ldflags: OrderedSet[T.Union[str, T.Tuple[str, str]]] = OrderedSet() + for ldflag in ldflags: + if isinstance(ldflag, str) and ldflag.startswith("-l"): + ldflag = ldflag.replace('-l', '--extra-library=', 1) + fixed_ldflags.add(ldflag) + return fixed_ldflags + internal_ldflags = fix_ldflags(internal_ldflags) + external_ldflags = fix_ldflags(external_ldflags) + return cflags, internal_ldflags, external_ldflags, gi_includes, depends + + def _get_dependencies_flags( + self, deps: T.Sequence[T.Union['Dependency', build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]], + state: 'ModuleState', + depends: T.Sequence[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]], + include_rpath: bool = False, + use_gir_args: bool = False, + ) -> T.Tuple[OrderedSet[str], T.List[str], T.List[str], OrderedSet[str], + T.List[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]]: + + cflags, internal_ldflags_raw, external_ldflags_raw, gi_includes, depends = self._get_dependencies_flags_raw(deps, state, depends, include_rpath, use_gir_args) + internal_ldflags: T.List[str] = [] + external_ldflags: T.List[str] = [] + + # Extract non-deduplicable argument groups out of the tuples. + for ldflag in internal_ldflags_raw: + if isinstance(ldflag, str): + internal_ldflags.append(ldflag) + else: + internal_ldflags.extend(ldflag) + for ldflag in external_ldflags_raw: + if isinstance(ldflag, str): + external_ldflags.append(ldflag) + else: + external_ldflags.extend(ldflag) + + return cflags, internal_ldflags, external_ldflags, gi_includes, depends + + def _unwrap_gir_target(self, girtarget: T.Union[build.Executable, build.StaticLibrary, build.SharedLibrary], state: 'ModuleState' + ) -> T.Union[build.Executable, build.StaticLibrary, build.SharedLibrary]: + if not isinstance(girtarget, (build.Executable, build.SharedLibrary, + build.StaticLibrary)): + raise MesonException(f'Gir target must be an executable or library but is "{girtarget}" of type {type(girtarget).__name__}') + + STATIC_BUILD_REQUIRED_VERSION = ">=1.58.1" + if isinstance(girtarget, (build.StaticLibrary)) and \ + not mesonlib.version_compare( + self._get_gir_dep(state)[0].get_version(), + STATIC_BUILD_REQUIRED_VERSION): + raise MesonException('Static libraries can only be introspected with GObject-Introspection ' + STATIC_BUILD_REQUIRED_VERSION) + + return girtarget + + def _devenv_prepend(self, varname: str, value: str) -> None: + if self.devenv is None: + self.devenv = build.EnvironmentVariables() + self.devenv.prepend(varname, [value]) + + def get_devenv(self) -> T.Optional[build.EnvironmentVariables]: + return self.devenv + + def _get_gir_dep(self, state: 'ModuleState') -> T.Tuple[Dependency, T.Union[build.Executable, 'ExternalProgram', 'OverrideProgram'], + T.Union[build.Executable, 'ExternalProgram', 'OverrideProgram']]: + if not self.gir_dep: + self.gir_dep = state.dependency('gobject-introspection-1.0') + self.giscanner = state.find_tool('g-ir-scanner', 'gobject-introspection-1.0', 'g_ir_scanner') + self.gicompiler = state.find_tool('g-ir-compiler', 'gobject-introspection-1.0', 'g_ir_compiler') + return self.gir_dep, self.giscanner, self.gicompiler + + @functools.lru_cache(maxsize=None) + def _gir_has_option(self, option: str) -> bool: + exe = self.giscanner + if isinstance(exe, OverrideProgram): + # Handle overridden g-ir-scanner + assert option in {'--extra-library', '--sources-top-dirs'} + return True + p, o, _ = Popen_safe(exe.get_command() + ['--help'], stderr=subprocess.STDOUT) + return p.returncode == 0 and option in o + + # May mutate depends and gir_inc_dirs + @staticmethod + def _scan_include(state: 'ModuleState', includes: T.List[T.Union[str, GirTarget]] + ) -> T.Tuple[T.List[str], T.List[str], T.List[GirTarget]]: + ret: T.List[str] = [] + gir_inc_dirs: T.List[str] = [] + depends: T.List[GirTarget] = [] + + for inc in includes: + if isinstance(inc, str): + ret += [f'--include={inc}'] + elif isinstance(inc, GirTarget): + gir_inc_dirs .append(os.path.join(state.environment.get_build_dir(), inc.get_subdir())) + ret.append(f"--include-uninstalled={os.path.join(inc.get_subdir(), inc.get_basename())}") + depends.append(inc) + + return ret, gir_inc_dirs, depends + + @staticmethod + def _scan_langs(state: 'ModuleState', langs: T.Iterable[str]) -> T.List[str]: + ret: T.List[str] = [] + + for lang in langs: + link_args = state.environment.coredata.get_external_link_args(MachineChoice.HOST, lang) + for link_arg in link_args: + if link_arg.startswith('-L'): + ret.append(link_arg) + + return ret + + @staticmethod + def _scan_gir_targets(state: 'ModuleState', girtargets: T.Sequence[build.BuildTarget]) -> T.List[T.Union[str, build.Executable]]: + ret: T.List[T.Union[str, build.Executable]] = [] + + for girtarget in girtargets: + if isinstance(girtarget, build.Executable): + ret += ['--program', girtarget] + else: + # Because of https://gitlab.gnome.org/GNOME/gobject-introspection/merge_requests/72 + # we can't use the full path until this is merged. + libpath = os.path.join(girtarget.get_subdir(), girtarget.get_filename()) + # Must use absolute paths here because g-ir-scanner will not + # add them to the runtime path list if they're relative. This + # means we cannot use @BUILD_ROOT@ + build_root = state.environment.get_build_dir() + if isinstance(girtarget, build.SharedLibrary): + # need to put our output directory first as we need to use the + # generated libraries instead of any possibly installed system/prefix + # ones. + ret += ["-L{}/{}".format(build_root, os.path.dirname(libpath))] + libname = girtarget.get_basename() + else: + libname = os.path.join(f"{build_root}/{libpath}") + ret += ['--library', libname] + # Needed for the following binutils bug: + # https://github.com/mesonbuild/meson/issues/1911 + # However, g-ir-scanner does not understand -Wl,-rpath + # so we need to use -L instead + for d in state.backend.determine_rpath_dirs(girtarget): + d = os.path.join(state.environment.get_build_dir(), d) + ret.append('-L' + d) + + return ret + + @staticmethod + def _get_girtargets_langs_compilers(girtargets: T.Sequence[build.BuildTarget]) -> T.List[T.Tuple[str, 'Compiler']]: + ret: T.List[T.Tuple[str, 'Compiler']] = [] + for girtarget in girtargets: + for lang, compiler in girtarget.compilers.items(): + # XXX: Can you use g-i with any other language? + if lang in {'c', 'cpp', 'objc', 'objcpp', 'd'}: + ret.append((lang, compiler)) + break + + return ret + + @staticmethod + def _get_gir_targets_deps(girtargets: T.Sequence[build.BuildTarget] + ) -> T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, Dependency]]: + ret: T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, Dependency]] = [] + for girtarget in girtargets: + ret += girtarget.get_all_link_deps() + ret += girtarget.get_external_deps() + return ret + + @staticmethod + def _get_gir_targets_inc_dirs(girtargets: T.Sequence[build.BuildTarget]) -> OrderedSet[build.IncludeDirs]: + ret: OrderedSet = OrderedSet() + for girtarget in girtargets: + ret.update(girtarget.get_include_dirs()) + return ret + + @staticmethod + def _get_langs_compilers_flags(state: 'ModuleState', langs_compilers: T.List[T.Tuple[str, 'Compiler']] + ) -> T.Tuple[T.List[str], T.List[str], T.List[str]]: + cflags: T.List[str] = [] + internal_ldflags: T.List[str] = [] + external_ldflags: T.List[str] = [] + + for lang, compiler in langs_compilers: + if state.global_args.get(lang): + cflags += state.global_args[lang] + if state.project_args.get(lang): + cflags += state.project_args[lang] + if mesonlib.OptionKey('b_sanitize') in compiler.base_options: + sanitize = state.environment.coredata.options[mesonlib.OptionKey('b_sanitize')].value + cflags += compiler.sanitizer_compile_args(sanitize) + sanitize = sanitize.split(',') + # These must be first in ldflags + if 'address' in sanitize: + internal_ldflags += ['-lasan'] + if 'thread' in sanitize: + internal_ldflags += ['-ltsan'] + if 'undefined' in sanitize: + internal_ldflags += ['-lubsan'] + # FIXME: Linking directly to lib*san is not recommended but g-ir-scanner + # does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892 + # ldflags += compiler.sanitizer_link_args(sanitize) + + return cflags, internal_ldflags, external_ldflags + + @staticmethod + def _make_gir_filelist(state: 'ModuleState', srcdir: str, ns: str, + nsversion: str, girtargets: T.Sequence[build.BuildTarget], + libsources: T.Sequence[T.Union[ + str, mesonlib.File, build.GeneratedList, + build.CustomTarget, build.CustomTargetIndex]] + ) -> str: + gir_filelist_dir = state.backend.get_target_private_dir_abs(girtargets[0]) + if not os.path.isdir(gir_filelist_dir): + os.mkdir(gir_filelist_dir) + gir_filelist_filename = os.path.join(gir_filelist_dir, f'{ns}_{nsversion}_gir_filelist') + + with open(gir_filelist_filename, 'w', encoding='utf-8') as gir_filelist: + for s in libsources: + if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)): + for custom_output in s.get_outputs(): + gir_filelist.write(os.path.join(state.environment.get_build_dir(), + state.backend.get_target_dir(s), + custom_output) + '\n') + elif isinstance(s, mesonlib.File): + gir_filelist.write(s.rel_to_builddir(state.build_to_src) + '\n') + elif isinstance(s, build.GeneratedList): + for gen_src in s.get_outputs(): + gir_filelist.write(os.path.join(srcdir, gen_src) + '\n') + else: + gir_filelist.write(os.path.join(srcdir, s) + '\n') + + return gir_filelist_filename + + @staticmethod + def _make_gir_target( + state: 'ModuleState', + girfile: str, + scan_command: T.Sequence[T.Union['FileOrString', Executable, ExternalProgram, OverrideProgram]], + generated_files: T.Sequence[T.Union[str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]], + depends: T.Sequence[T.Union['FileOrString', build.BuildTarget, 'build.GeneratedTypes', build.StructuredSources]], + kwargs: T.Dict[str, T.Any]) -> GirTarget: + install = kwargs['install_gir'] + if install is None: + install = kwargs['install'] + + install_dir = kwargs['install_dir_gir'] + if install_dir is None: + install_dir = os.path.join(state.environment.get_datadir(), 'gir-1.0') + elif install_dir is False: + install = False + + # g-ir-scanner uses pkg-config to find libraries such as glib. They could + # be built as subproject in which case we need to trick it to use + # -uninstalled.pc files Meson generated. It also must respect pkgconfig + # settings user could have set in machine file, like PKG_CONFIG_LIBDIR, + # SYSROOT, etc. + run_env = PkgConfigDependency.get_env(state.environment, MachineChoice.HOST, uninstalled=True) + + return GirTarget( + girfile, + state.subdir, + state.subproject, + state.environment, + scan_command, + generated_files, + [girfile], + build_by_default=kwargs['build_by_default'], + extra_depends=depends, + install=install, + install_dir=[install_dir], + install_tag=['devel'], + env=run_env, + ) + + @staticmethod + def _make_typelib_target(state: 'ModuleState', typelib_output: str, + typelib_cmd: T.Sequence[T.Union[str, build.Executable, ExternalProgram, build.CustomTarget]], + generated_files: T.Sequence[T.Union[str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]], + kwargs: T.Dict[str, T.Any]) -> TypelibTarget: + install = kwargs['install_typelib'] + if install is None: + install = kwargs['install'] + + install_dir = kwargs['install_dir_typelib'] + if install_dir is None: + install_dir = os.path.join(state.environment.get_libdir(), 'girepository-1.0') + elif install_dir is False: + install = False + + return TypelibTarget( + typelib_output, + state.subdir, + state.subproject, + state.environment, + typelib_cmd, + generated_files, + [typelib_output], + install=install, + install_dir=[install_dir], + install_tag=['typelib'], + build_by_default=kwargs['build_by_default'], + ) + + @staticmethod + def _gather_typelib_includes_and_update_depends( + state: 'ModuleState', + deps: T.Sequence[T.Union[Dependency, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]], + depends: T.Sequence[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]] + ) -> T.Tuple[T.List[str], T.List[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]]: + # Need to recursively add deps on GirTarget sources from our + # dependencies and also find the include directories needed for the + # typelib generation custom target below. + typelib_includes: T.List[str] = [] + new_depends = list(depends) + for dep in deps: + # Add a dependency on each GirTarget listed in dependencies and add + # the directory where it will be generated to the typelib includes + if isinstance(dep, InternalDependency): + for source in dep.sources: + if isinstance(source, GirTarget) and source not in depends: + new_depends.append(source) + subdir = os.path.join(state.environment.get_build_dir(), + source.get_subdir()) + if subdir not in typelib_includes: + typelib_includes.append(subdir) + # Do the same, but for dependencies of dependencies. These are + # stored in the list of generated sources for each link dep (from + # girtarget.get_all_link_deps() above). + # FIXME: Store this in the original form from declare_dependency() + # so it can be used here directly. + elif isinstance(dep, build.SharedLibrary): + for g_source in dep.generated: + if isinstance(g_source, GirTarget): + subdir = os.path.join(state.environment.get_build_dir(), + g_source.get_subdir()) + if subdir not in typelib_includes: + typelib_includes.append(subdir) + if isinstance(dep, Dependency): + girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='') + assert isinstance(girdir, str), 'for mypy' + if girdir and girdir not in typelib_includes: + typelib_includes.append(girdir) + return typelib_includes, new_depends + + @staticmethod + def _get_external_args_for_langs(state: 'ModuleState', langs: T.List[str]) -> T.List[str]: + ret: T.List[str] = [] + for lang in langs: + ret += mesonlib.listify(state.environment.coredata.get_external_args(MachineChoice.HOST, lang)) + return ret + + @staticmethod + def _get_scanner_cflags(cflags: T.Iterable[str]) -> T.Iterable[str]: + 'g-ir-scanner only accepts -I/-D/-U; must ignore all other flags' + for f in cflags: + # _FORTIFY_SOURCE depends on / works together with -O, on the other hand this + # just invokes the preprocessor anyway + if f.startswith(('-D', '-U', '-I')) and not f.startswith('-D_FORTIFY_SOURCE'): + yield f + + @staticmethod + def _get_scanner_ldflags(ldflags: T.Iterable[str]) -> T.Iterable[str]: + 'g-ir-scanner only accepts -L/-l; must ignore -F and other linker flags' + for f in ldflags: + if f.startswith(('-L', '-l', '--extra-library')): + yield f + + @typed_pos_args('gnome.generate_gir', varargs=(build.Executable, build.SharedLibrary, build.StaticLibrary), min_varargs=1) + @typed_kwargs( + 'gnome.generate_gir', + INSTALL_KW, + _BUILD_BY_DEFAULT.evolve(since='0.40.0'), + _EXTRA_ARGS_KW, + KwargInfo('dependencies', ContainerTypeInfo(list, Dependency), default=[], listify=True), + KwargInfo('export_packages', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('fatal_warnings', bool, default=False, since='0.55.0'), + KwargInfo('header', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('identifier_prefix', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('include_directories', ContainerTypeInfo(list, (str, build.IncludeDirs)), default=[], listify=True), + KwargInfo('includes', ContainerTypeInfo(list, (str, GirTarget)), default=[], listify=True), + KwargInfo('install_gir', (bool, NoneType), since='0.61.0'), + KwargInfo('install_dir_gir', (str, bool, NoneType), + deprecated_values={False: ('0.61.0', 'Use install_gir to disable installation')}, + validator=lambda x: 'as boolean can only be false' if x is True else None), + KwargInfo('install_typelib', (bool, NoneType), since='0.61.0'), + KwargInfo('install_dir_typelib', (str, bool, NoneType), + deprecated_values={False: ('0.61.0', 'Use install_typelib to disable installation')}, + validator=lambda x: 'as boolean can only be false' if x is True else None), + KwargInfo('link_with', ContainerTypeInfo(list, (build.SharedLibrary, build.StaticLibrary)), default=[], listify=True), + KwargInfo('namespace', str, required=True), + KwargInfo('nsversion', str, required=True), + KwargInfo('sources', ContainerTypeInfo(list, (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex)), default=[], listify=True), + KwargInfo('symbol_prefix', ContainerTypeInfo(list, str), default=[], listify=True), + ) + def generate_gir(self, state: 'ModuleState', args: T.Tuple[T.List[T.Union[build.Executable, build.SharedLibrary, build.StaticLibrary]]], + kwargs: 'GenerateGir') -> ModuleReturnValue: + girtargets = [self._unwrap_gir_target(arg, state) for arg in args[0]] + if len(girtargets) > 1 and any(isinstance(el, build.Executable) for el in girtargets): + raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable') + + gir_dep, giscanner, gicompiler = self._get_gir_dep(state) + + ns = kwargs['namespace'] + nsversion = kwargs['nsversion'] + libsources = kwargs['sources'] + + girfile = f'{ns}-{nsversion}.gir' + srcdir = os.path.join(state.environment.get_source_dir(), state.subdir) + builddir = os.path.join(state.environment.get_build_dir(), state.subdir) + + depends: T.List[T.Union['FileOrString', 'build.GeneratedTypes', build.BuildTarget, build.StructuredSources]] = [] + depends.extend(gir_dep.sources) + depends.extend(girtargets) + + langs_compilers = self._get_girtargets_langs_compilers(girtargets) + cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers) + deps = self._get_gir_targets_deps(girtargets) + deps += kwargs['dependencies'] + deps += [gir_dep] + typelib_includes, depends = self._gather_typelib_includes_and_update_depends(state, deps, depends) + # ldflags will be misinterpreted by gir scanner (showing + # spurious dependencies) but building GStreamer fails if they + # are not used here. + dep_cflags, dep_internal_ldflags, dep_external_ldflags, gi_includes, depends = \ + self._get_dependencies_flags(deps, state, depends, use_gir_args=True) + scan_cflags = [] + scan_cflags += list(self._get_scanner_cflags(cflags)) + scan_cflags += list(self._get_scanner_cflags(dep_cflags)) + scan_cflags += list(self._get_scanner_cflags(self._get_external_args_for_langs(state, [lc[0] for lc in langs_compilers]))) + scan_internal_ldflags = [] + scan_internal_ldflags += list(self._get_scanner_ldflags(internal_ldflags)) + scan_internal_ldflags += list(self._get_scanner_ldflags(dep_internal_ldflags)) + scan_external_ldflags = [] + scan_external_ldflags += list(self._get_scanner_ldflags(external_ldflags)) + scan_external_ldflags += list(self._get_scanner_ldflags(dep_external_ldflags)) + girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets) + inc_dirs = kwargs['include_directories'] + + gir_inc_dirs: T.List[str] = [] + + scan_command: T.List[T.Union[str, build.Executable, 'ExternalProgram', 'OverrideProgram']] = [giscanner] + scan_command += ['--quiet'] + scan_command += ['--no-libtool'] + scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion] + scan_command += ['--warn-all'] + scan_command += ['--output', '@OUTPUT@'] + scan_command += [f'--c-include={h}' for h in kwargs['header']] + scan_command += kwargs['extra_args'] + scan_command += ['-I' + srcdir, '-I' + builddir] + scan_command += state.get_include_args(girtargets_inc_dirs) + scan_command += ['--filelist=' + self._make_gir_filelist(state, srcdir, ns, nsversion, girtargets, libsources)] + for l in kwargs['link_with']: + _cflags, depends = self._get_link_args(state, l, depends, use_gir_args=True) + scan_command.extend(_cflags) + _cmd, _ginc, _deps = self._scan_include(state, kwargs['includes']) + scan_command.extend(_cmd) + gir_inc_dirs.extend(_ginc) + depends.extend(_deps) + + scan_command += [f'--symbol-prefix={p}' for p in kwargs['symbol_prefix']] + scan_command += [f'--identifier-prefix={p}' for p in kwargs['identifier_prefix']] + scan_command += [f'--pkg-export={p}' for p in kwargs['export_packages']] + scan_command += ['--cflags-begin'] + scan_command += scan_cflags + scan_command += ['--cflags-end'] + scan_command += state.get_include_args(inc_dirs) + scan_command += state.get_include_args(itertools.chain(gi_includes, gir_inc_dirs, inc_dirs), prefix='--add-include-path=') + scan_command += list(scan_internal_ldflags) + scan_command += self._scan_gir_targets(state, girtargets) + scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers]) + scan_command += list(scan_external_ldflags) + + if self._gir_has_option('--sources-top-dirs'): + scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), state.root_subdir)] + scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), state.root_subdir)] + + if '--warn-error' in scan_command: + FeatureDeprecated.single_use('gnome.generate_gir argument --warn-error', '0.55.0', + state.subproject, 'Use "fatal_warnings" keyword argument', state.current_node) + if kwargs['fatal_warnings']: + scan_command.append('--warn-error') + + generated_files = [f for f in libsources if isinstance(f, (GeneratedList, CustomTarget, CustomTargetIndex))] + + scan_target = self._make_gir_target( + state, girfile, scan_command, generated_files, depends, + # We have to cast here because mypy can't figure this out + T.cast('T.Dict[str, T.Any]', kwargs)) + + typelib_output = f'{ns}-{nsversion}.typelib' + typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@'] + typelib_cmd += state.get_include_args(gir_inc_dirs, prefix='--includedir=') + + for incdir in typelib_includes: + typelib_cmd += ["--includedir=" + incdir] + + typelib_target = self._make_typelib_target(state, typelib_output, typelib_cmd, generated_files, T.cast('T.Dict[str, T.Any]', kwargs)) + + self._devenv_prepend('GI_TYPELIB_PATH', os.path.join(state.environment.get_build_dir(), state.subdir)) + + rv = [scan_target, typelib_target] + + return ModuleReturnValue(rv, rv) + + @noPosargs + @typed_kwargs('gnome.compile_schemas', _BUILD_BY_DEFAULT.evolve(since='0.40.0'), DEPEND_FILES_KW) + def compile_schemas(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'CompileSchemas') -> ModuleReturnValue: + srcdir = os.path.join(state.build_to_src, state.subdir) + outdir = state.subdir + + cmd: T.List[T.Union[ExternalProgram, str]] = [state.find_program('glib-compile-schemas'), '--targetdir', outdir, srcdir] + if state.subdir == '': + targetname = 'gsettings-compile' + else: + targetname = 'gsettings-compile-' + state.subdir.replace('/', '_') + target_g = build.CustomTarget( + targetname, + state.subdir, + state.subproject, + state.environment, + cmd, + [], + ['gschemas.compiled'], + build_by_default=kwargs['build_by_default'], + depend_files=kwargs['depend_files'], + ) + self._devenv_prepend('GSETTINGS_SCHEMA_DIR', os.path.join(state.environment.get_build_dir(), state.subdir)) + return ModuleReturnValue(target_g, [target_g]) + + @typed_pos_args('gnome.yelp', str, varargs=str) + @typed_kwargs( + 'gnome.yelp', + KwargInfo( + 'languages', ContainerTypeInfo(list, str), + listify=True, default=[], + deprecated='0.43.0', + deprecated_message='Use a LINGUAS file in the source directory instead', + ), + KwargInfo('media', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('sources', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('symlink_media', bool, default=True), + ) + def yelp(self, state: 'ModuleState', args: T.Tuple[str, T.List[str]], kwargs: 'Yelp') -> ModuleReturnValue: + project_id = args[0] + sources = kwargs['sources'] + if args[1]: + FeatureDeprecated.single_use('gnome.yelp more than one positional argument', '0.60.0', + state.subproject, 'use the "sources" keyword argument instead.', state.current_node) + if not sources: + sources = args[1] + if not sources: + raise MesonException('Yelp requires a list of sources') + elif args[1]: + mlog.warning('"gnome.yelp" ignores positional sources arguments when the "sources" keyword argument is set') + sources_files = [mesonlib.File.from_source_file(state.environment.source_dir, + os.path.join(state.subdir, 'C'), + s) for s in sources] + + langs = kwargs['languages'] + if not langs: + langs = read_linguas(os.path.join(state.environment.source_dir, state.subdir)) + + media = kwargs['media'] + symlinks = kwargs['symlink_media'] + targets: T.List[T.Union['build.Target', build.Data, build.SymlinkData]] = [] + potargets: T.List[build.RunTarget] = [] + + itstool = state.find_program('itstool') + msgmerge = state.find_program('msgmerge') + msgfmt = state.find_program('msgfmt') + + install_dir = os.path.join(state.environment.get_datadir(), 'help') + c_install_dir = os.path.join(install_dir, 'C', project_id) + c_data = build.Data(sources_files, c_install_dir, c_install_dir, + mesonlib.FileMode(), state.subproject, install_tag='doc') + targets.append(c_data) + + media_files: T.List[mesonlib.File] = [] + for m in media: + f = mesonlib.File.from_source_file(state.environment.source_dir, + os.path.join(state.subdir, 'C'), m) + media_files.append(f) + m_install_dir = os.path.join(c_install_dir, os.path.dirname(m)) + m_data = build.Data([f], m_install_dir, m_install_dir, + mesonlib.FileMode(), state.subproject, install_tag='doc') + targets.append(m_data) + + pot_file = os.path.join('@SOURCE_ROOT@', state.subdir, 'C', project_id + '.pot') + pot_sources = [os.path.join('@SOURCE_ROOT@', state.subdir, 'C', s) for s in sources] + pot_args: T.List[T.Union['ExternalProgram', str]] = [itstool, '-o', pot_file] + pot_args.extend(pot_sources) + pottarget = build.RunTarget(f'help-{project_id}-pot', pot_args, [], + os.path.join(state.subdir, 'C'), state.subproject, + state.environment) + targets.append(pottarget) + + for l in langs: + l_subdir = os.path.join(state.subdir, l) + l_install_dir = os.path.join(install_dir, l, project_id) + + for i, m in enumerate(media): + m_dir = os.path.dirname(m) + m_install_dir = os.path.join(l_install_dir, m_dir) + l_data: T.Union[build.Data, build.SymlinkData] + if symlinks: + link_target = os.path.join(os.path.relpath(c_install_dir, start=m_install_dir), m) + l_data = build.SymlinkData(link_target, os.path.basename(m), + m_install_dir, state.subproject, install_tag='doc') + else: + try: + m_file = mesonlib.File.from_source_file(state.environment.source_dir, l_subdir, m) + except MesonException: + m_file = media_files[i] + l_data = build.Data([m_file], m_install_dir, m_install_dir, + mesonlib.FileMode(), state.subproject, install_tag='doc') + targets.append(l_data) + + po_file = l + '.po' + po_args: T.List[T.Union['ExternalProgram', str]] = [ + msgmerge, '-q', '-o', + os.path.join('@SOURCE_ROOT@', l_subdir, po_file), + os.path.join('@SOURCE_ROOT@', l_subdir, po_file), pot_file] + potarget = build.RunTarget(f'help-{project_id}-{l}-update-po', + po_args, [pottarget], l_subdir, state.subproject, + state.environment) + targets.append(potarget) + potargets.append(potarget) + + gmo_file = project_id + '-' + l + '.gmo' + gmotarget = build.CustomTarget( + f'help-{project_id}-{l}-gmo', + l_subdir, + state.subproject, + state.environment, + [msgfmt, '@INPUT@', '-o', '@OUTPUT@'], + [po_file], + [gmo_file], + install_tag=['doc'], + ) + targets.append(gmotarget) + + mergetarget = build.CustomTarget( + f'help-{project_id}-{l}', + l_subdir, + state.subproject, + state.environment, + [itstool, '-m', os.path.join(l_subdir, gmo_file), '--lang', l, '-o', '@OUTDIR@', '@INPUT@'], + sources_files, + sources, + extra_depends=[gmotarget], + install=True, + install_dir=[l_install_dir], + install_tag=['doc'], + ) + targets.append(mergetarget) + + allpotarget = build.AliasTarget(f'help-{project_id}-update-po', potargets, + state.subdir, state.subproject, state.environment) + targets.append(allpotarget) + + return ModuleReturnValue(None, targets) + + @typed_pos_args('gnome.gtkdoc', str) + @typed_kwargs( + 'gnome.gtkdoc', + KwargInfo('c_args', ContainerTypeInfo(list, str), since='0.48.0', default=[], listify=True), + KwargInfo('check', bool, default=False, since='0.52.0'), + KwargInfo('content_files', ContainerTypeInfo(list, (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex)), default=[], listify=True), + KwargInfo( + 'dependencies', + ContainerTypeInfo(list, (Dependency, build.SharedLibrary, build.StaticLibrary)), + listify=True, default=[]), + KwargInfo('expand_content_files', ContainerTypeInfo(list, (str, mesonlib.File)), default=[], listify=True), + KwargInfo('fixxref_args', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('gobject_typesfile', ContainerTypeInfo(list, (str, mesonlib.File)), default=[], listify=True), + KwargInfo('html_args', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('html_assets', ContainerTypeInfo(list, (str, mesonlib.File)), default=[], listify=True), + KwargInfo('ignore_headers', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo( + 'include_directories', + ContainerTypeInfo(list, (str, build.IncludeDirs)), + listify=True, default=[]), + KwargInfo('install', bool, default=True), + KwargInfo('install_dir', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('main_sgml', (str, NoneType)), + KwargInfo('main_xml', (str, NoneType)), + KwargInfo('mkdb_args', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo( + 'mode', str, default='auto', since='0.37.0', + validator=in_set_validator({'xml', 'sgml', 'none', 'auto'})), + KwargInfo('module_version', str, default='', since='0.48.0'), + KwargInfo('namespace', str, default='', since='0.37.0'), + KwargInfo('scan_args', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('scanobjs_args', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('src_dir', ContainerTypeInfo(list, (str, build.IncludeDirs)), listify=True, required=True), + ) + def gtkdoc(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'GtkDoc') -> ModuleReturnValue: + modulename = args[0] + main_file = kwargs['main_sgml'] + main_xml = kwargs['main_xml'] + if main_xml is not None: + if main_file is not None: + raise InvalidArguments('gnome.gtkdoc: main_xml and main_sgml are exclusive arguments') + main_file = main_xml + moduleversion = kwargs['module_version'] + targetname = modulename + ('-' + moduleversion if moduleversion else '') + '-doc' + command = state.environment.get_build_command() + + namespace = kwargs['namespace'] + + def abs_filenames(files: T.Iterable['FileOrString']) -> T.Iterator[str]: + for f in files: + if isinstance(f, mesonlib.File): + yield f.absolute_path(state.environment.get_source_dir(), state.environment.get_build_dir()) + else: + yield os.path.join(state.environment.get_source_dir(), state.subdir, f) + + src_dirs = kwargs['src_dir'] + header_dirs: T.List[str] = [] + for src_dir in src_dirs: + if isinstance(src_dir, build.IncludeDirs): + header_dirs.extend(src_dir.to_string_list(state.environment.get_source_dir(), + state.environment.get_build_dir())) + else: + header_dirs.append(src_dir) + + t_args: T.List[str] = [ + '--internal', 'gtkdoc', + '--sourcedir=' + state.environment.get_source_dir(), + '--builddir=' + state.environment.get_build_dir(), + '--subdir=' + state.subdir, + '--headerdirs=' + '@@'.join(header_dirs), + '--mainfile=' + main_file, + '--modulename=' + modulename, + '--moduleversion=' + moduleversion, + '--mode=' + kwargs['mode']] + for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']: + program_name = 'gtkdoc-' + tool + program = state.find_program(program_name) + path = program.get_path() + t_args.append(f'--{program_name}={path}') + if namespace: + t_args.append('--namespace=' + namespace) + exe_wrapper = state.environment.get_exe_wrapper() + if exe_wrapper: + t_args.append('--run=' + ' '.join(exe_wrapper.get_command())) + t_args.append(f'--htmlargs={"@@".join(kwargs["html_args"])}') + t_args.append(f'--scanargs={"@@".join(kwargs["scan_args"])}') + t_args.append(f'--scanobjsargs={"@@".join(kwargs["scanobjs_args"])}') + t_args.append(f'--gobjects-types-file={"@@".join(abs_filenames(kwargs["gobject_typesfile"]))}') + t_args.append(f'--fixxrefargs={"@@".join(kwargs["fixxref_args"])}') + t_args.append(f'--mkdbargs={"@@".join(kwargs["mkdb_args"])}') + t_args.append(f'--html-assets={"@@".join(abs_filenames(kwargs["html_assets"]))}') + + depends: T.List['build.GeneratedTypes'] = [] + content_files = [] + for s in kwargs['content_files']: + if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)): + depends.append(s) + for o in s.get_outputs(): + content_files.append(os.path.join(state.environment.get_build_dir(), + state.backend.get_target_dir(s), + o)) + elif isinstance(s, mesonlib.File): + content_files.append(s.absolute_path(state.environment.get_source_dir(), + state.environment.get_build_dir())) + elif isinstance(s, build.GeneratedList): + depends.append(s) + for gen_src in s.get_outputs(): + content_files.append(os.path.join(state.environment.get_source_dir(), + state.subdir, + gen_src)) + else: + content_files.append(os.path.join(state.environment.get_source_dir(), + state.subdir, + s)) + t_args += ['--content-files=' + '@@'.join(content_files)] + + t_args.append(f'--expand-content-files={"@@".join(abs_filenames(kwargs["expand_content_files"]))}') + t_args.append(f'--ignore-headers={"@@".join(kwargs["ignore_headers"])}') + t_args.append(f'--installdir={"@@".join(kwargs["install_dir"])}') + build_args, new_depends = self._get_build_args(kwargs['c_args'], kwargs['include_directories'], + kwargs['dependencies'], state, depends) + t_args.extend(build_args) + new_depends.extend(depends) + custom_target = build.CustomTarget( + targetname, + state.subdir, + state.subproject, + state.environment, + command + t_args, + [], + [f'{modulename}-decl.txt'], + build_always_stale=True, + extra_depends=new_depends, + ) + alias_target = build.AliasTarget(targetname, [custom_target], state.subdir, state.subproject, state.environment) + if kwargs['check']: + check_cmd = state.find_program('gtkdoc-check') + check_env = ['DOC_MODULE=' + modulename, + 'DOC_MAIN_SGML_FILE=' + main_file] + check_args = (targetname + '-check', check_cmd) + check_workdir = os.path.join(state.environment.get_build_dir(), state.subdir) + state.test(check_args, env=check_env, workdir=check_workdir, depends=[custom_target]) + res: T.List[T.Union[build.Target, build.ExecutableSerialisation]] = [custom_target, alias_target] + if kwargs['install']: + res.append(state.backend.get_executable_serialisation(command + t_args, tag='doc')) + return ModuleReturnValue(custom_target, res) + + def _get_build_args(self, c_args: T.List[str], inc_dirs: T.List[T.Union[str, build.IncludeDirs]], + deps: T.List[T.Union[Dependency, build.SharedLibrary, build.StaticLibrary]], + state: 'ModuleState', + depends: T.Sequence[T.Union[build.BuildTarget, 'build.GeneratedTypes']]) -> T.Tuple[ + T.List[str], T.List[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]]: + args: T.List[str] = [] + cflags = c_args.copy() + deps_cflags, internal_ldflags, external_ldflags, _gi_includes, new_depends = \ + self._get_dependencies_flags(deps, state, depends, include_rpath=True) + + cflags.extend(deps_cflags) + cflags.extend(state.get_include_args(inc_dirs)) + ldflags: T.List[str] = [] + ldflags.extend(internal_ldflags) + ldflags.extend(external_ldflags) + + cflags.extend(state.environment.coredata.get_external_args(MachineChoice.HOST, 'c')) + ldflags.extend(state.environment.coredata.get_external_link_args(MachineChoice.HOST, 'c')) + compiler = state.environment.coredata.compilers[MachineChoice.HOST]['c'] + + compiler_flags = self._get_langs_compilers_flags(state, [('c', compiler)]) + cflags.extend(compiler_flags[0]) + ldflags.extend(compiler_flags[1]) + ldflags.extend(compiler_flags[2]) + if compiler: + args += ['--cc=%s' % join_args(compiler.get_exelist())] + args += ['--ld=%s' % join_args(compiler.get_linker_exelist())] + if cflags: + args += ['--cflags=%s' % join_args(cflags)] + if ldflags: + args += ['--ldflags=%s' % join_args(ldflags)] + + return args, new_depends + + @noKwargs + @typed_pos_args('gnome.gtkdoc_html_dir', str) + def gtkdoc_html_dir(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> str: + return os.path.join('share/gtk-doc/html', args[0]) + + @typed_pos_args('gnome.gdbus_codegen', str, optargs=[(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)]) + @typed_kwargs( + 'gnome.gdbus_codegen', + _BUILD_BY_DEFAULT.evolve(since='0.40.0'), + SOURCES_KW.evolve(since='0.46.0'), + KwargInfo('extra_args', ContainerTypeInfo(list, str), since='0.47.0', default=[], listify=True), + KwargInfo('interface_prefix', (str, NoneType)), + KwargInfo('namespace', (str, NoneType)), + KwargInfo('object_manager', bool, default=False), + KwargInfo( + 'annotations', ContainerTypeInfo(list, (list, str)), + default=[], + validator=annotations_validator, + convertor=lambda x: [x] if x and isinstance(x[0], str) else x, + ), + KwargInfo('install_header', bool, default=False, since='0.46.0'), + KwargInfo('docbook', (str, NoneType)), + KwargInfo( + 'autocleanup', str, default='default', since='0.47.0', + validator=in_set_validator({'all', 'none', 'objects'})), + INSTALL_DIR_KW.evolve(since='0.46.0') + ) + def gdbus_codegen(self, state: 'ModuleState', args: T.Tuple[str, T.Optional[T.Union['FileOrString', build.GeneratedTypes]]], + kwargs: 'GdbusCodegen') -> ModuleReturnValue: + namebase = args[0] + xml_files: T.List[T.Union['FileOrString', build.GeneratedTypes]] = [args[1]] if args[1] else [] + cmd: T.List[T.Union['ExternalProgram', str]] = [state.find_program('gdbus-codegen')] + cmd.extend(kwargs['extra_args']) + + # Autocleanup supported? + glib_version = self._get_native_glib_version(state) + if not mesonlib.version_compare(glib_version, '>= 2.49.1'): + # Warn if requested, silently disable if not + if kwargs['autocleanup'] != 'default': + mlog.warning(f'Glib version ({glib_version}) is too old to support the \'autocleanup\' ' + 'kwarg, need 2.49.1 or newer') + else: + # Handle legacy glib versions that don't have autocleanup + ac = kwargs['autocleanup'] + if ac == 'default': + ac = 'all' + cmd.extend(['--c-generate-autocleanup', ac]) + + if kwargs['interface_prefix'] is not None: + cmd.extend(['--interface-prefix', kwargs['interface_prefix']]) + if kwargs['namespace'] is not None: + cmd.extend(['--c-namespace', kwargs['namespace']]) + if kwargs['object_manager']: + cmd.extend(['--c-generate-object-manager']) + xml_files.extend(kwargs['sources']) + build_by_default = kwargs['build_by_default'] + + # Annotations are a bit ugly in that they are a list of lists of strings... + for annot in kwargs['annotations']: + cmd.append('--annotate') + cmd.extend(annot) + + targets = [] + install_header = kwargs['install_header'] + install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(mesonlib.OptionKey('includedir')) + assert isinstance(install_dir, str), 'for mypy' + + output = namebase + '.c' + # Added in https://gitlab.gnome.org/GNOME/glib/commit/e4d68c7b3e8b01ab1a4231bf6da21d045cb5a816 (2.55.2) + # Fixed in https://gitlab.gnome.org/GNOME/glib/commit/cd1f82d8fc741a2203582c12cc21b4dacf7e1872 (2.56.2) + if mesonlib.version_compare(glib_version, '>= 2.56.2'): + c_cmd = cmd + ['--body', '--output', '@OUTPUT@', '@INPUT@'] + else: + if kwargs['docbook'] is not None: + docbook = kwargs['docbook'] + + cmd += ['--generate-docbook', docbook] + + # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a + if mesonlib.version_compare(glib_version, '>= 2.51.3'): + cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@'] + else: + self._print_gdbus_warning() + cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@'] + c_cmd = cmd + + cfile_custom_target = build.CustomTarget( + output, + state.subdir, + state.subproject, + state.environment, + c_cmd, + xml_files, + [output], + build_by_default=build_by_default, + ) + targets.append(cfile_custom_target) + + output = namebase + '.h' + if mesonlib.version_compare(glib_version, '>= 2.56.2'): + hfile_cmd = cmd + ['--header', '--output', '@OUTPUT@', '@INPUT@'] + depends = [] + else: + hfile_cmd = cmd + depends = [cfile_custom_target] + + hfile_custom_target = build.CustomTarget( + output, + state.subdir, + state.subproject, + state.environment, + hfile_cmd, + xml_files, + [output], + build_by_default=build_by_default, + extra_depends=depends, + install=install_header, + install_dir=[install_dir], + install_tag=['devel'], + ) + targets.append(hfile_custom_target) + + if kwargs['docbook'] is not None: + docbook = kwargs['docbook'] + # The docbook output is always ${docbook}-${name_of_xml_file} + output = namebase + '-docbook' + outputs = [] + for f in xml_files: + outputs.append('{}-{}'.format(docbook, os.path.basename(str(f)))) + + if mesonlib.version_compare(glib_version, '>= 2.56.2'): + docbook_cmd = cmd + ['--output-directory', '@OUTDIR@', '--generate-docbook', docbook, '@INPUT@'] + depends = [] + else: + docbook_cmd = cmd + depends = [cfile_custom_target] + + docbook_custom_target = build.CustomTarget( + output, + state.subdir, + state.subproject, + state.environment, + docbook_cmd, + xml_files, + outputs, + build_by_default=build_by_default, + extra_depends=depends, + ) + targets.append(docbook_custom_target) + + return ModuleReturnValue(targets, targets) + + @typed_pos_args('gnome.mkenums', str) + @typed_kwargs( + 'gnome.mkenums', + *_MK_ENUMS_COMMON_KWS, + DEPENDS_KW, + KwargInfo('c_template', (str, mesonlib.File, NoneType)), + KwargInfo('h_template', (str, mesonlib.File, NoneType)), + KwargInfo('comments', (str, NoneType)), + KwargInfo('eprod', (str, NoneType)), + KwargInfo('fhead', (str, NoneType)), + KwargInfo('fprod', (str, NoneType)), + KwargInfo('ftail', (str, NoneType)), + KwargInfo('vhead', (str, NoneType)), + KwargInfo('vprod', (str, NoneType)), + KwargInfo('vtail', (str, NoneType)), + ) + def mkenums(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'MkEnums') -> ModuleReturnValue: + basename = args[0] + + c_template = kwargs['c_template'] + if isinstance(c_template, mesonlib.File): + c_template = c_template.absolute_path(state.environment.source_dir, state.environment.build_dir) + h_template = kwargs['h_template'] + if isinstance(h_template, mesonlib.File): + h_template = h_template.absolute_path(state.environment.source_dir, state.environment.build_dir) + + cmd: T.List[str] = [] + known_kwargs = ['comments', 'eprod', 'fhead', 'fprod', 'ftail', + 'identifier_prefix', 'symbol_prefix', + 'vhead', 'vprod', 'vtail'] + for arg in known_kwargs: + # mypy can't figure this out + if kwargs[arg]: # type: ignore + cmd += ['--' + arg.replace('_', '-'), kwargs[arg]] # type: ignore + + targets: T.List[CustomTarget] = [] + + h_target: T.Optional[CustomTarget] = None + if h_template is not None: + h_output = os.path.basename(os.path.splitext(h_template)[0]) + # We always set template as the first element in the source array + # so --template consumes it. + h_cmd = cmd + ['--template', '@INPUT@'] + h_sources: T.List[T.Union[FileOrString, 'build.GeneratedTypes']] = [h_template] + h_sources.extend(kwargs['sources']) + h_target = self._make_mkenum_impl( + state, h_sources, h_output, h_cmd, install=kwargs['install_header'], + install_dir=kwargs['install_dir']) + targets.append(h_target) + + if c_template is not None: + c_output = os.path.basename(os.path.splitext(c_template)[0]) + # We always set template as the first element in the source array + # so --template consumes it. + c_cmd = cmd + ['--template', '@INPUT@'] + c_sources: T.List[T.Union[FileOrString, 'build.GeneratedTypes']] = [c_template] + c_sources.extend(kwargs['sources']) + + depends = kwargs['depends'].copy() + if h_target is not None: + depends.append(h_target) + c_target = self._make_mkenum_impl( + state, c_sources, c_output, c_cmd, depends=depends) + targets.insert(0, c_target) + + if c_template is None and h_template is None: + generic_cmd = cmd + ['@INPUT@'] + target = self._make_mkenum_impl( + state, kwargs['sources'], basename, generic_cmd, + install=kwargs['install_header'], + install_dir=kwargs['install_dir']) + return ModuleReturnValue(target, [target]) + else: + return ModuleReturnValue(targets, targets) + + @FeatureNew('gnome.mkenums_simple', '0.42.0') + @typed_pos_args('gnome.mkenums_simple', str) + @typed_kwargs( + 'gnome.mkenums_simple', + *_MK_ENUMS_COMMON_KWS, + KwargInfo('header_prefix', str, default=''), + KwargInfo('function_prefix', str, default=''), + KwargInfo('body_prefix', str, default=''), + KwargInfo('decorator', str, default=''), + ) + def mkenums_simple(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'MkEnumsSimple') -> ModuleReturnValue: + hdr_filename = f'{args[0]}.h' + body_filename = f'{args[0]}.c' + + header_prefix = kwargs['header_prefix'] + decl_decorator = kwargs['decorator'] + func_prefix = kwargs['function_prefix'] + body_prefix = kwargs['body_prefix'] + + cmd: T.List[str] = [] + if kwargs['identifier_prefix']: + cmd.extend(['--identifier-prefix', kwargs['identifier_prefix']]) + if kwargs['symbol_prefix']: + cmd.extend(['--symbol-prefix', kwargs['symbol_prefix']]) + + c_cmd = cmd.copy() + # Maybe we should write our own template files into the build dir + # instead, but that seems like much more work, nice as it would be. + fhead = '' + if body_prefix != '': + fhead += '%s\n' % body_prefix + fhead += '#include "%s"\n' % hdr_filename + for hdr in kwargs['sources']: + fhead += '#include "{}"\n'.format(os.path.basename(str(hdr))) + fhead += textwrap.dedent( + ''' + #define C_ENUM(v) ((gint) v) + #define C_FLAGS(v) ((guint) v) + ''') + c_cmd.extend(['--fhead', fhead]) + + c_cmd.append('--fprod') + c_cmd.append(textwrap.dedent( + ''' + /* enumerations from "@basename@" */ + ''')) + + c_cmd.append('--vhead') + c_cmd.append(textwrap.dedent( + f''' + GType + {func_prefix}@enum_name@_get_type (void) + {{ + static gsize gtype_id = 0; + static const G@Type@Value values[] = {{''')) + + c_cmd.extend(['--vprod', ' { C_@TYPE@(@VALUENAME@), "@VALUENAME@", "@valuenick@" },']) + + c_cmd.append('--vtail') + c_cmd.append(textwrap.dedent( + ''' { 0, NULL, NULL } + }; + if (g_once_init_enter (>ype_id)) { + GType new_type = g_@type@_register_static (g_intern_static_string ("@EnumName@"), values); + g_once_init_leave (>ype_id, new_type); + } + return (GType) gtype_id; + }''')) + c_cmd.append('@INPUT@') + + c_file = self._make_mkenum_impl(state, kwargs['sources'], body_filename, c_cmd) + + # .h file generation + h_cmd = cmd.copy() + + h_cmd.append('--fhead') + h_cmd.append(textwrap.dedent( + f'''#pragma once + + #include <glib-object.h> + {header_prefix} + + G_BEGIN_DECLS + ''')) + + h_cmd.append('--fprod') + h_cmd.append(textwrap.dedent( + ''' + /* enumerations from "@basename@" */ + ''')) + + h_cmd.append('--vhead') + h_cmd.append(textwrap.dedent( + f''' + {decl_decorator} + GType {func_prefix}@enum_name@_get_type (void); + #define @ENUMPREFIX@_TYPE_@ENUMSHORT@ ({func_prefix}@enum_name@_get_type())''')) + + h_cmd.append('--ftail') + h_cmd.append(textwrap.dedent( + ''' + G_END_DECLS''')) + h_cmd.append('@INPUT@') + + h_file = self._make_mkenum_impl( + state, kwargs['sources'], hdr_filename, h_cmd, + install=kwargs['install_header'], + install_dir=kwargs['install_dir']) + + return ModuleReturnValue([c_file, h_file], [c_file, h_file]) + + @staticmethod + def _make_mkenum_impl( + state: 'ModuleState', + sources: T.Sequence[T.Union[str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]], + output: str, + cmd: T.List[str], + *, + install: bool = False, + install_dir: T.Optional[T.Sequence[T.Union[str, bool]]] = None, + depends: T.Optional[T.Sequence[T.Union[CustomTarget, CustomTargetIndex, BuildTarget]]] = None + ) -> build.CustomTarget: + real_cmd: T.List[T.Union[str, ExternalProgram]] = [state.find_program(['glib-mkenums', 'mkenums'])] + real_cmd.extend(cmd) + _install_dir = install_dir or state.environment.coredata.get_option(mesonlib.OptionKey('includedir')) + assert isinstance(_install_dir, str), 'for mypy' + + return build.CustomTarget( + output, + state.subdir, + state.subproject, + state.environment, + real_cmd, + sources, + [output], + capture=True, + install=install, + install_dir=[_install_dir], + install_tag=['devel'], + extra_depends=depends, + # https://github.com/mesonbuild/meson/issues/973 + absolute_paths=True, + ) + + @typed_pos_args('gnome.genmarshal', str) + @typed_kwargs( + 'gnome.genmarshal', + DEPEND_FILES_KW.evolve(since='0.61.0'), + DEPENDS_KW.evolve(since='0.61.0'), + INSTALL_KW.evolve(name='install_header'), + INSTALL_DIR_KW, + KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('internal', bool, default=False), + KwargInfo('nostdinc', bool, default=False), + KwargInfo('prefix', (str, NoneType)), + KwargInfo('skip_source', bool, default=False), + KwargInfo('sources', ContainerTypeInfo(list, (str, mesonlib.File), allow_empty=False), listify=True, required=True), + KwargInfo('stdinc', bool, default=False), + KwargInfo('valist_marshallers', bool, default=False), + ) + def genmarshal(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'GenMarshal') -> ModuleReturnValue: + output = args[0] + sources = kwargs['sources'] + + new_genmarshal = mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.3') + + cmd: T.List[T.Union['ExternalProgram', str]] = [state.find_program('glib-genmarshal')] + if kwargs['prefix']: + cmd.extend(['--prefix', kwargs['prefix']]) + if kwargs['extra_args']: + if new_genmarshal: + cmd.extend(kwargs['extra_args']) + else: + mlog.warning('The current version of GLib does not support extra arguments \n' + 'for glib-genmarshal. You need at least GLib 2.53.3. See ', + mlog.bold('https://github.com/mesonbuild/meson/pull/2049')) + for k in ['internal', 'nostdinc', 'skip_source', 'stdinc', 'valist_marshallers']: + # Mypy can't figure out that this is correct + if kwargs[k]: # type: ignore + cmd.append(f'--{k.replace("_", "-")}') + + install_header = kwargs['install_header'] + capture = False + + # https://github.com/GNOME/glib/commit/0fbc98097fac4d3e647684f344e508abae109fdf + if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.0'): + cmd += ['--output', '@OUTPUT@'] + else: + capture = True + + header_file = output + '.h' + h_cmd = cmd + ['--header', '@INPUT@'] + if new_genmarshal: + h_cmd += ['--pragma-once'] + header = build.CustomTarget( + output + '_h', + state.subdir, + state.subproject, + state.environment, + h_cmd, + sources, + [header_file], + install=install_header, + install_dir=[kwargs['install_dir']] if kwargs['install_dir'] else [], + install_tag=['devel'], + capture=capture, + depend_files=kwargs['depend_files'], + ) + + c_cmd = cmd + ['--body', '@INPUT@'] + extra_deps: T.List[build.CustomTarget] = [] + if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.4'): + # Silence any warnings about missing prototypes + c_cmd += ['--include-header', header_file] + extra_deps.append(header) + body = build.CustomTarget( + output + '_c', + state.subdir, + state.subproject, + state.environment, + c_cmd, + sources, + [f'{output}.c'], + capture=capture, + depend_files=kwargs['depend_files'], + extra_depends=extra_deps, + ) + + rv = [body, header] + return ModuleReturnValue(rv, rv) + + def _extract_vapi_packages(self, state: 'ModuleState', packages: T.List[T.Union[InternalDependency, str]], + ) -> T.Tuple[T.List[str], T.List[VapiTarget], T.List[str], T.List[str], T.List[str]]: + ''' + Packages are special because we need to: + - Get a list of packages for the .deps file + - Get a list of depends for any VapiTargets + - Get package name from VapiTargets + - Add include dirs for any VapiTargets + ''' + if not packages: + return [], [], [], [], [] + vapi_depends: T.List[VapiTarget] = [] + vapi_packages: T.List[str] = [] + vapi_includes: T.List[str] = [] + vapi_args: T.List[str] = [] + remaining_args = [] + for arg in packages: + if isinstance(arg, InternalDependency): + targets = [t for t in arg.sources if isinstance(t, VapiTarget)] + for target in targets: + srcdir = os.path.join(state.environment.get_source_dir(), + target.get_subdir()) + outdir = os.path.join(state.environment.get_build_dir(), + target.get_subdir()) + outfile = target.get_outputs()[0][:-5] # Strip .vapi + vapi_args.append('--vapidir=' + outdir) + vapi_args.append('--girdir=' + outdir) + vapi_args.append('--pkg=' + outfile) + vapi_depends.append(target) + vapi_packages.append(outfile) + vapi_includes.append(srcdir) + else: + assert isinstance(arg, str), 'for mypy' + vapi_args.append(f'--pkg={arg}') + vapi_packages.append(arg) + remaining_args.append(arg) + + # TODO: this is supposed to take IncludeDirs, but it never worked + return vapi_args, vapi_depends, vapi_packages, vapi_includes, remaining_args + + def _generate_deps(self, state: 'ModuleState', library: str, packages: T.List[str], install_dir: str) -> build.Data: + outdir = state.environment.scratch_dir + fname = os.path.join(outdir, library + '.deps') + with open(fname, 'w', encoding='utf-8') as ofile: + for package in packages: + ofile.write(package + '\n') + return build.Data([mesonlib.File(True, outdir, fname)], install_dir, install_dir, mesonlib.FileMode(), state.subproject) + + def _get_vapi_link_with(self, target: build.CustomTarget) -> T.List[build.LibTypes]: + link_with: T.List[build.LibTypes] = [] + for dep in target.get_target_dependencies(): + if isinstance(dep, build.SharedLibrary): + link_with.append(dep) + elif isinstance(dep, GirTarget): + link_with += self._get_vapi_link_with(dep) + return link_with + + @typed_pos_args('gnome.generate_vapi', str) + @typed_kwargs( + 'gnome.generate_vapi', + INSTALL_KW, + INSTALL_DIR_KW, + KwargInfo( + 'sources', + ContainerTypeInfo(list, (str, GirTarget), allow_empty=False), + listify=True, + required=True, + ), + KwargInfo('vapi_dirs', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('metadata_dirs', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('gir_dirs', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('packages', ContainerTypeInfo(list, (str, InternalDependency)), listify=True, default=[]), + ) + def generate_vapi(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'GenerateVapi') -> ModuleReturnValue: + created_values: T.List[T.Union[Dependency, build.Data]] = [] + library = args[0] + build_dir = os.path.join(state.environment.get_build_dir(), state.subdir) + source_dir = os.path.join(state.environment.get_source_dir(), state.subdir) + pkg_cmd, vapi_depends, vapi_packages, vapi_includes, packages = self._extract_vapi_packages(state, kwargs['packages']) + cmd: T.List[T.Union[str, 'ExternalProgram']] + cmd = [state.find_program('vapigen'), '--quiet', f'--library={library}', f'--directory={build_dir}'] + cmd.extend([f'--vapidir={d}' for d in kwargs['vapi_dirs']]) + cmd.extend([f'--metadatadir={d}' for d in kwargs['metadata_dirs']]) + cmd.extend([f'--girdir={d}' for d in kwargs['gir_dirs']]) + cmd += pkg_cmd + cmd += ['--metadatadir=' + source_dir] + + inputs = kwargs['sources'] + + link_with: T.List[build.LibTypes] = [] + for i in inputs: + if isinstance(i, str): + cmd.append(os.path.join(source_dir, i)) + elif isinstance(i, GirTarget): + link_with += self._get_vapi_link_with(i) + subdir = os.path.join(state.environment.get_build_dir(), + i.get_subdir()) + gir_file = os.path.join(subdir, i.get_outputs()[0]) + cmd.append(gir_file) + + vapi_output = library + '.vapi' + datadir = state.environment.coredata.get_option(mesonlib.OptionKey('datadir')) + assert isinstance(datadir, str), 'for mypy' + install_dir = kwargs['install_dir'] or os.path.join(datadir, 'vala', 'vapi') + + if kwargs['install']: + # We shouldn't need this locally but we install it + deps_target = self._generate_deps(state, library, vapi_packages, install_dir) + created_values.append(deps_target) + vapi_target = VapiTarget( + vapi_output, + state.subdir, + state.subproject, + state.environment, + command=cmd, + sources=inputs, + outputs=[vapi_output], + extra_depends=vapi_depends, + install=kwargs['install'], + install_dir=[install_dir], + install_tag=['devel'], + ) + + # So to try our best to get this to just work we need: + # - link with with the correct library + # - include the vapi and dependent vapi files in sources + # - add relevant directories to include dirs + incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)] + sources = [vapi_target] + vapi_depends + rv = InternalDependency(None, incs, [], [], link_with, [], sources, [], {}, [], []) + created_values.append(rv) + return ModuleReturnValue(rv, created_values) + +def initialize(interp: 'Interpreter') -> GnomeModule: + mod = GnomeModule(interp) + mod.interpreter.append_holder_map(GResourceTarget, interpreter.CustomTargetHolder) + mod.interpreter.append_holder_map(GResourceHeaderTarget, interpreter.CustomTargetHolder) + mod.interpreter.append_holder_map(GirTarget, interpreter.CustomTargetHolder) + mod.interpreter.append_holder_map(TypelibTarget, interpreter.CustomTargetHolder) + mod.interpreter.append_holder_map(VapiTarget, interpreter.CustomTargetHolder) + return mod diff --git a/mesonbuild/modules/hotdoc.py b/mesonbuild/modules/hotdoc.py new file mode 100644 index 0000000..b73d812 --- /dev/null +++ b/mesonbuild/modules/hotdoc.py @@ -0,0 +1,457 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +'''This module provides helper functions for generating documentation using hotdoc''' + +import os +import subprocess + +from mesonbuild import mesonlib +from mesonbuild import mlog, build +from mesonbuild.coredata import MesonException +from . import ModuleReturnValue, ModuleInfo +from . import ExtensionModule +from ..dependencies import Dependency, InternalDependency +from ..interpreterbase import ( + InvalidArguments, noPosargs, noKwargs, typed_kwargs, FeatureDeprecated, + ContainerTypeInfo, KwargInfo, typed_pos_args +) +from ..interpreter import CustomTargetHolder +from ..interpreter.type_checking import NoneType +from ..programs import ExternalProgram + + +def ensure_list(value): + if not isinstance(value, list): + return [value] + return value + + +MIN_HOTDOC_VERSION = '0.8.100' + +file_types = (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex) + + +class HotdocTargetBuilder: + + def __init__(self, name, state, hotdoc, interpreter, kwargs): + self.hotdoc = hotdoc + self.build_by_default = kwargs.pop('build_by_default', False) + self.kwargs = kwargs + self.name = name + self.state = state + self.interpreter = interpreter + self.include_paths = mesonlib.OrderedSet() + + self.builddir = state.environment.get_build_dir() + self.sourcedir = state.environment.get_source_dir() + self.subdir = state.subdir + self.build_command = state.environment.get_build_command() + + self.cmd = ['conf', '--project-name', name, "--disable-incremental-build", + '--output', os.path.join(self.builddir, self.subdir, self.name + '-doc')] + + self._extra_extension_paths = set() + self.extra_assets = set() + self.extra_depends = [] + self._subprojects = [] + + def process_known_arg(self, option, argname=None, value_processor=None): + if not argname: + argname = option.strip("-").replace("-", "_") + + value = self.kwargs.pop(argname) + if value is not None and value_processor: + value = value_processor(value) + + self.set_arg_value(option, value) + + def set_arg_value(self, option, value): + if value is None: + return + + if isinstance(value, bool): + if value: + self.cmd.append(option) + elif isinstance(value, list): + # Do not do anything on empty lists + if value: + # https://bugs.python.org/issue9334 (from 2010 :( ) + # The syntax with nargs=+ is inherently ambiguous + # A workaround for this case is to simply prefix with a space + # every value starting with a dash + escaped_value = [] + for e in value: + if isinstance(e, str) and e.startswith('-'): + escaped_value += [' %s' % e] + else: + escaped_value += [e] + if option: + self.cmd.extend([option] + escaped_value) + else: + self.cmd.extend(escaped_value) + else: + # argparse gets confused if value(s) start with a dash. + # When an option expects a single value, the unambiguous way + # to specify it is with = + if isinstance(value, str): + self.cmd.extend([f'{option}={value}']) + else: + self.cmd.extend([option, value]) + + def check_extra_arg_type(self, arg, value): + if isinstance(value, list): + for v in value: + self.check_extra_arg_type(arg, v) + return + + valid_types = (str, bool, mesonlib.File, build.IncludeDirs, build.CustomTarget, build.CustomTargetIndex, build.BuildTarget) + if not isinstance(value, valid_types): + raise InvalidArguments('Argument "{}={}" should be of type: {}.'.format( + arg, value, [t.__name__ for t in valid_types])) + + def process_extra_args(self): + for arg, value in self.kwargs.items(): + option = "--" + arg.replace("_", "-") + self.check_extra_arg_type(arg, value) + self.set_arg_value(option, value) + + def get_value(self, types, argname, default=None, value_processor=None, + mandatory=False, force_list=False): + if not isinstance(types, list): + types = [types] + try: + uvalue = value = self.kwargs.pop(argname) + if value_processor: + value = value_processor(value) + + for t in types: + if isinstance(value, t): + if force_list and not isinstance(value, list): + return [value], uvalue + return value, uvalue + raise MesonException(f"{argname} field value {value} is not valid," + f" valid types are {types}") + except KeyError: + if mandatory: + raise MesonException(f"{argname} mandatory field not found") + + if default is not None: + return default, default + + return None, None + + def add_extension_paths(self, paths): + for path in paths: + if path in self._extra_extension_paths: + continue + + self._extra_extension_paths.add(path) + self.cmd.extend(["--extra-extension-path", path]) + + def replace_dirs_in_string(self, string): + return string.replace("@SOURCE_ROOT@", self.sourcedir).replace("@BUILD_ROOT@", self.builddir) + + def process_gi_c_source_roots(self): + if self.hotdoc.run_hotdoc(['--has-extension=gi-extension']) != 0: + return + + value = self.kwargs.pop('gi_c_source_roots') + value.extend([ + os.path.join(self.sourcedir, self.state.root_subdir), + os.path.join(self.builddir, self.state.root_subdir) + ]) + + self.cmd += ['--gi-c-source-roots'] + value + + def process_dependencies(self, deps): + cflags = set() + for dep in mesonlib.listify(ensure_list(deps)): + if isinstance(dep, InternalDependency): + inc_args = self.state.get_include_args(dep.include_directories) + cflags.update([self.replace_dirs_in_string(x) + for x in inc_args]) + cflags.update(self.process_dependencies(dep.libraries)) + cflags.update(self.process_dependencies(dep.sources)) + cflags.update(self.process_dependencies(dep.ext_deps)) + elif isinstance(dep, Dependency): + cflags.update(dep.get_compile_args()) + elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)): + self.extra_depends.append(dep) + for incd in dep.get_include_dirs(): + cflags.update(incd.get_incdirs()) + elif isinstance(dep, HotdocTarget): + # Recurse in hotdoc target dependencies + self.process_dependencies(dep.get_target_dependencies()) + self._subprojects.extend(dep.subprojects) + self.process_dependencies(dep.subprojects) + self.include_paths.add(os.path.join(self.builddir, dep.hotdoc_conf.subdir)) + self.cmd += ['--extra-assets=' + p for p in dep.extra_assets] + self.add_extension_paths(dep.extra_extension_paths) + elif isinstance(dep, (build.CustomTarget, build.BuildTarget)): + self.extra_depends.append(dep) + elif isinstance(dep, build.CustomTargetIndex): + self.extra_depends.append(dep.target) + + return [f.strip('-I') for f in cflags] + + def process_extra_assets(self): + self._extra_assets = self.kwargs.pop('extra_assets') + + for assets_path in self._extra_assets: + self.cmd.extend(["--extra-assets", assets_path]) + + def process_subprojects(self): + value = self.kwargs.pop('subprojects') + + self.process_dependencies(value) + self._subprojects.extend(value) + + def flatten_config_command(self): + cmd = [] + for arg in mesonlib.listify(self.cmd, flatten=True): + if isinstance(arg, mesonlib.File): + arg = arg.absolute_path(self.state.environment.get_source_dir(), + self.state.environment.get_build_dir()) + elif isinstance(arg, build.IncludeDirs): + for inc_dir in arg.get_incdirs(): + cmd.append(os.path.join(self.sourcedir, arg.get_curdir(), inc_dir)) + cmd.append(os.path.join(self.builddir, arg.get_curdir(), inc_dir)) + + continue + elif isinstance(arg, (build.BuildTarget, build.CustomTarget)): + self.extra_depends.append(arg) + arg = self.interpreter.backend.get_target_filename_abs(arg) + elif isinstance(arg, build.CustomTargetIndex): + self.extra_depends.append(arg.target) + arg = self.interpreter.backend.get_target_filename_abs(arg) + + cmd.append(arg) + + return cmd + + def generate_hotdoc_config(self): + cwd = os.path.abspath(os.curdir) + ncwd = os.path.join(self.sourcedir, self.subdir) + mlog.log('Generating Hotdoc configuration for: ', mlog.bold(self.name)) + os.chdir(ncwd) + self.hotdoc.run_hotdoc(self.flatten_config_command()) + os.chdir(cwd) + + def ensure_file(self, value): + if isinstance(value, list): + res = [] + for val in value: + res.append(self.ensure_file(val)) + return res + + if isinstance(value, str): + return mesonlib.File.from_source_file(self.sourcedir, self.subdir, value) + + return value + + def ensure_dir(self, value): + if os.path.isabs(value): + _dir = value + else: + _dir = os.path.join(self.sourcedir, self.subdir, value) + + if not os.path.isdir(_dir): + raise InvalidArguments(f'"{_dir}" is not a directory.') + + return os.path.relpath(_dir, os.path.join(self.builddir, self.subdir)) + + def check_forbidden_args(self): + for arg in ['conf_file']: + if arg in self.kwargs: + raise InvalidArguments(f'Argument "{arg}" is forbidden.') + + def make_targets(self): + self.check_forbidden_args() + self.process_known_arg("--index", value_processor=self.ensure_file) + self.process_known_arg("--project-version") + self.process_known_arg("--sitemap", value_processor=self.ensure_file) + self.process_known_arg("--html-extra-theme", value_processor=self.ensure_dir) + self.include_paths.update(self.ensure_dir(v) for v in self.kwargs.pop('include_paths')) + self.process_known_arg('--c-include-directories', argname="dependencies", value_processor=self.process_dependencies) + self.process_gi_c_source_roots() + self.process_extra_assets() + self.add_extension_paths(self.kwargs.pop('extra_extension_paths')) + self.process_subprojects() + self.extra_depends.extend(self.kwargs.pop('depends')) + + install = self.kwargs.pop('install') + self.process_extra_args() + + fullname = self.name + '-doc' + hotdoc_config_name = fullname + '.json' + hotdoc_config_path = os.path.join( + self.builddir, self.subdir, hotdoc_config_name) + with open(hotdoc_config_path, 'w', encoding='utf-8') as f: + f.write('{}') + + self.cmd += ['--conf-file', hotdoc_config_path] + self.include_paths.add(os.path.join(self.builddir, self.subdir)) + self.include_paths.add(os.path.join(self.sourcedir, self.subdir)) + + depfile = os.path.join(self.builddir, self.subdir, self.name + '.deps') + self.cmd += ['--deps-file-dest', depfile] + + for path in self.include_paths: + self.cmd.extend(['--include-path', path]) + + if self.state.environment.coredata.get_option(mesonlib.OptionKey('werror', subproject=self.state.subproject)): + self.cmd.append('--fatal-warnings') + self.generate_hotdoc_config() + + target_cmd = self.build_command + ["--internal", "hotdoc"] + \ + self.hotdoc.get_command() + ['run', '--conf-file', hotdoc_config_name] + \ + ['--builddir', os.path.join(self.builddir, self.subdir)] + + target = HotdocTarget(fullname, + subdir=self.subdir, + subproject=self.state.subproject, + environment=self.state.environment, + hotdoc_conf=mesonlib.File.from_built_file( + self.subdir, hotdoc_config_name), + extra_extension_paths=self._extra_extension_paths, + extra_assets=self._extra_assets, + subprojects=self._subprojects, + command=target_cmd, + extra_depends=self.extra_depends, + outputs=[fullname], + sources=[], + depfile=os.path.basename(depfile), + build_by_default=self.build_by_default) + + install_script = None + if install: + install_script = self.state.backend.get_executable_serialisation(self.build_command + [ + "--internal", "hotdoc", + "--install", os.path.join(fullname, 'html'), + '--name', self.name, + '--builddir', os.path.join(self.builddir, self.subdir)] + + self.hotdoc.get_command() + + ['run', '--conf-file', hotdoc_config_name]) + install_script.tag = 'doc' + + return (target, install_script) + + +class HotdocTargetHolder(CustomTargetHolder): + def __init__(self, target, interp): + super().__init__(target, interp) + self.methods.update({'config_path': self.config_path_method}) + + @noPosargs + @noKwargs + def config_path_method(self, *args, **kwargs): + conf = self.held_object.hotdoc_conf.absolute_path(self.interpreter.environment.source_dir, + self.interpreter.environment.build_dir) + return conf + + +class HotdocTarget(build.CustomTarget): + def __init__(self, name, subdir, subproject, hotdoc_conf, extra_extension_paths, extra_assets, + subprojects, environment, **kwargs): + super().__init__(name, subdir, subproject, environment, **kwargs, absolute_paths=True) + self.hotdoc_conf = hotdoc_conf + self.extra_extension_paths = extra_extension_paths + self.extra_assets = extra_assets + self.subprojects = subprojects + + def __getstate__(self): + # Make sure we do not try to pickle subprojects + res = self.__dict__.copy() + res['subprojects'] = [] + + return res + + +class HotDocModule(ExtensionModule): + + INFO = ModuleInfo('hotdoc', '0.48.0') + + def __init__(self, interpreter): + super().__init__(interpreter) + self.hotdoc = ExternalProgram('hotdoc') + if not self.hotdoc.found(): + raise MesonException('hotdoc executable not found') + version = self.hotdoc.get_version(interpreter) + if not mesonlib.version_compare(version, f'>={MIN_HOTDOC_VERSION}'): + raise MesonException(f'hotdoc {MIN_HOTDOC_VERSION} required but not found.)') + + def run_hotdoc(cmd): + return subprocess.run(self.hotdoc.get_command() + cmd, stdout=subprocess.DEVNULL).returncode + + self.hotdoc.run_hotdoc = run_hotdoc + self.methods.update({ + 'has_extensions': self.has_extensions, + 'generate_doc': self.generate_doc, + }) + + @noKwargs + @typed_pos_args('hotdoc.has_extensions', varargs=str, min_varargs=1) + def has_extensions(self, state, args, kwargs): + return self.hotdoc.run_hotdoc([f'--has-extension={extension}' for extension in args[0]]) == 0 + + @typed_pos_args('hotdoc.generate_doc', str) + @typed_kwargs( + 'hotdoc.generate_doc', + KwargInfo('sitemap', file_types, required=True), + KwargInfo('index', file_types, required=True), + KwargInfo('project_version', str, required=True), + KwargInfo('html_extra_theme', (str, NoneType)), + KwargInfo('include_paths', ContainerTypeInfo(list, str), listify=True, default=[]), + # --c-include-directories + KwargInfo( + 'dependencies', + ContainerTypeInfo(list, (Dependency, build.StaticLibrary, build.SharedLibrary, + build.CustomTarget, build.CustomTargetIndex)), + listify=True, + default=[], + ), + KwargInfo( + 'depends', + ContainerTypeInfo(list, (build.CustomTarget, build.CustomTargetIndex)), + listify=True, + default=[], + since='0.64.1', + ), + KwargInfo('gi_c_source_roots', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('extra_assets', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('extra_extension_paths', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('subprojects', ContainerTypeInfo(list, HotdocTarget), listify=True, default=[]), + KwargInfo('install', bool, default=False), + allow_unknown=True + ) + def generate_doc(self, state, args, kwargs): + project_name = args[0] + if any(isinstance(x, (build.CustomTarget, build.CustomTargetIndex)) for x in kwargs['dependencies']): + FeatureDeprecated.single_use('hotdoc.generate_doc dependencies argument with custom_target', + '0.64.1', state.subproject, 'use `depends`', state.current_node) + builder = HotdocTargetBuilder(project_name, state, self.hotdoc, self.interpreter, kwargs) + target, install_script = builder.make_targets() + targets = [target] + if install_script: + targets.append(install_script) + + return ModuleReturnValue(targets[0], targets) + + +def initialize(interpreter): + mod = HotDocModule(interpreter) + mod.interpreter.append_holder_map(HotdocTarget, HotdocTargetHolder) + return mod diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py new file mode 100644 index 0000000..fcb0aa7 --- /dev/null +++ b/mesonbuild/modules/i18n.py @@ -0,0 +1,390 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from os import path +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleInfo +from .. import build +from .. import mesonlib +from .. import mlog +from ..interpreter.type_checking import CT_BUILD_BY_DEFAULT, CT_INPUT_KW, INSTALL_TAG_KW, OUTPUT_KW, INSTALL_DIR_KW, INSTALL_KW, NoneType, in_set_validator +from ..interpreterbase import FeatureNew +from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, noPosargs, typed_kwargs, typed_pos_args +from ..scripts.gettext import read_linguas + +if T.TYPE_CHECKING: + from typing_extensions import Literal, TypedDict + + from . import ModuleState + from ..build import Target + from ..interpreter import Interpreter + from ..interpreterbase import TYPE_var + from ..programs import ExternalProgram + + class MergeFile(TypedDict): + + input: T.List[T.Union[ + str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, + build.ExtractedObjects, build.GeneratedList, ExternalProgram, + mesonlib.File]] + output: str + build_by_default: bool + install: bool + install_dir: T.Optional[str] + install_tag: T.Optional[str] + args: T.List[str] + data_dirs: T.List[str] + po_dir: str + type: Literal['xml', 'desktop'] + + class Gettext(TypedDict): + + args: T.List[str] + data_dirs: T.List[str] + install: bool + install_dir: T.Optional[str] + languages: T.List[str] + preset: T.Optional[str] + + class ItsJoinFile(TypedDict): + + input: T.List[T.Union[ + str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, + build.ExtractedObjects, build.GeneratedList, ExternalProgram, + mesonlib.File]] + output: str + build_by_default: bool + install: bool + install_dir: T.Optional[str] + install_tag: T.Optional[str] + its_files: T.List[str] + mo_targets: T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]] + + +_ARGS: KwargInfo[T.List[str]] = KwargInfo( + 'args', + ContainerTypeInfo(list, str), + default=[], + listify=True, +) + +_DATA_DIRS: KwargInfo[T.List[str]] = KwargInfo( + 'data_dirs', + ContainerTypeInfo(list, str), + default=[], + listify=True +) + +PRESET_ARGS = { + 'glib': [ + '--from-code=UTF-8', + '--add-comments', + + # https://developer.gnome.org/glib/stable/glib-I18N.html + '--keyword=_', + '--keyword=N_', + '--keyword=C_:1c,2', + '--keyword=NC_:1c,2', + '--keyword=g_dcgettext:2', + '--keyword=g_dngettext:2,3', + '--keyword=g_dpgettext2:2c,3', + + '--flag=N_:1:pass-c-format', + '--flag=C_:2:pass-c-format', + '--flag=NC_:2:pass-c-format', + '--flag=g_dngettext:2:pass-c-format', + '--flag=g_strdup_printf:1:c-format', + '--flag=g_string_printf:2:c-format', + '--flag=g_string_append_printf:2:c-format', + '--flag=g_error_new:3:c-format', + '--flag=g_set_error:4:c-format', + '--flag=g_markup_printf_escaped:1:c-format', + '--flag=g_log:3:c-format', + '--flag=g_print:1:c-format', + '--flag=g_printerr:1:c-format', + '--flag=g_printf:1:c-format', + '--flag=g_fprintf:2:c-format', + '--flag=g_sprintf:2:c-format', + '--flag=g_snprintf:3:c-format', + ] +} + + +class I18nModule(ExtensionModule): + + INFO = ModuleInfo('i18n') + + def __init__(self, interpreter: 'Interpreter'): + super().__init__(interpreter) + self.methods.update({ + 'merge_file': self.merge_file, + 'gettext': self.gettext, + 'itstool_join': self.itstool_join, + }) + self.tools: T.Dict[str, T.Optional[ExternalProgram]] = { + 'itstool': None, + 'msgfmt': None, + 'msginit': None, + 'msgmerge': None, + 'xgettext': None, + } + + @staticmethod + def _get_data_dirs(state: 'ModuleState', dirs: T.Iterable[str]) -> T.List[str]: + """Returns source directories of relative paths""" + src_dir = path.join(state.environment.get_source_dir(), state.subdir) + return [path.join(src_dir, d) for d in dirs] + + @FeatureNew('i18n.merge_file', '0.37.0') + @noPosargs + @typed_kwargs( + 'i18n.merge_file', + CT_BUILD_BY_DEFAULT, + CT_INPUT_KW, + KwargInfo('install_dir', (str, NoneType)), + INSTALL_TAG_KW, + OUTPUT_KW, + INSTALL_KW, + _ARGS.evolve(since='0.51.0'), + _DATA_DIRS.evolve(since='0.41.0'), + KwargInfo('po_dir', str, required=True), + KwargInfo('type', str, default='xml', validator=in_set_validator({'xml', 'desktop'})), + ) + def merge_file(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'MergeFile') -> ModuleReturnValue: + if self.tools['msgfmt'] is None or not self.tools['msgfmt'].found(): + self.tools['msgfmt'] = state.find_program('msgfmt', for_machine=mesonlib.MachineChoice.BUILD) + podir = path.join(state.build_to_src, state.subdir, kwargs['po_dir']) + + ddirs = self._get_data_dirs(state, kwargs['data_dirs']) + datadirs = '--datadirs=' + ':'.join(ddirs) if ddirs else None + + command: T.List[T.Union[str, build.BuildTarget, build.CustomTarget, + build.CustomTargetIndex, 'ExternalProgram', mesonlib.File]] = [] + command.extend(state.environment.get_build_command()) + command.extend([ + '--internal', 'msgfmthelper', + '--msgfmt=' + self.tools['msgfmt'].get_path(), + ]) + if datadirs: + command.append(datadirs) + command.extend(['@INPUT@', '@OUTPUT@', kwargs['type'], podir]) + if kwargs['args']: + command.append('--') + command.extend(kwargs['args']) + + build_by_default = kwargs['build_by_default'] + if build_by_default is None: + build_by_default = kwargs['install'] + + install_tag = [kwargs['install_tag']] if kwargs['install_tag'] is not None else None + + ct = build.CustomTarget( + '', + state.subdir, + state.subproject, + state.environment, + command, + kwargs['input'], + [kwargs['output']], + build_by_default=build_by_default, + install=kwargs['install'], + install_dir=[kwargs['install_dir']] if kwargs['install_dir'] is not None else None, + install_tag=install_tag, + ) + + return ModuleReturnValue(ct, [ct]) + + @typed_pos_args('i81n.gettext', str) + @typed_kwargs( + 'i18n.gettext', + _ARGS, + _DATA_DIRS.evolve(since='0.36.0'), + INSTALL_KW.evolve(default=True), + INSTALL_DIR_KW.evolve(since='0.50.0'), + KwargInfo('languages', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo( + 'preset', + (str, NoneType), + validator=in_set_validator(set(PRESET_ARGS)), + since='0.37.0', + ), + ) + def gettext(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'Gettext') -> ModuleReturnValue: + for tool, strict in [('msgfmt', True), ('msginit', False), ('msgmerge', False), ('xgettext', False)]: + if self.tools[tool] is None: + self.tools[tool] = state.find_program(tool, required=False, for_machine=mesonlib.MachineChoice.BUILD) + # still not found? + if not self.tools[tool].found(): + if strict: + mlog.warning('Gettext not found, all translation (po) targets will be ignored.', + once=True, location=state.current_node) + return ModuleReturnValue(None, []) + else: + mlog.warning(f'{tool!r} not found, maintainer targets will not work', + once=True, fatal=False, location=state.current_node) + packagename = args[0] + pkg_arg = f'--pkgname={packagename}' + + languages = kwargs['languages'] + lang_arg = '--langs=' + '@@'.join(languages) if languages else None + + _datadirs = ':'.join(self._get_data_dirs(state, kwargs['data_dirs'])) + datadirs = f'--datadirs={_datadirs}' if _datadirs else None + + extra_args = kwargs['args'] + targets: T.List['Target'] = [] + gmotargets: T.List['build.CustomTarget'] = [] + + preset = kwargs['preset'] + if preset: + preset_args = PRESET_ARGS[preset] + extra_args = list(mesonlib.OrderedSet(preset_args + extra_args)) + + extra_arg = '--extra-args=' + '@@'.join(extra_args) if extra_args else None + + source_root = path.join(state.source_root, state.root_subdir) + subdir = path.relpath(state.subdir, start=state.root_subdir) if state.subdir else None + + potargs = state.environment.get_build_command() + ['--internal', 'gettext', 'pot', pkg_arg] + potargs.append(f'--source-root={source_root}') + if subdir: + potargs.append(f'--subdir={subdir}') + if datadirs: + potargs.append(datadirs) + if extra_arg: + potargs.append(extra_arg) + if self.tools['xgettext'].found(): + potargs.append('--xgettext=' + self.tools['xgettext'].get_path()) + pottarget = build.RunTarget(packagename + '-pot', potargs, [], state.subdir, state.subproject, + state.environment, default_env=False) + targets.append(pottarget) + + install = kwargs['install'] + install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(mesonlib.OptionKey('localedir')) + assert isinstance(install_dir, str), 'for mypy' + if not languages: + languages = read_linguas(path.join(state.environment.source_dir, state.subdir)) + for l in languages: + po_file = mesonlib.File.from_source_file(state.environment.source_dir, + state.subdir, l+'.po') + gmotarget = build.CustomTarget( + f'{packagename}-{l}.mo', + path.join(state.subdir, l, 'LC_MESSAGES'), + state.subproject, + state.environment, + [self.tools['msgfmt'], '@INPUT@', '-o', '@OUTPUT@'], + [po_file], + [f'{packagename}.mo'], + install=install, + # We have multiple files all installed as packagename+'.mo' in different install subdirs. + # What we really wanted to do, probably, is have a rename: kwarg, but that's not available + # to custom_targets. Crude hack: set the build target's subdir manually. + # Bonus: the build tree has something usable as an uninstalled bindtextdomain() target dir. + install_dir=[path.join(install_dir, l, 'LC_MESSAGES')], + install_tag=['i18n'], + ) + targets.append(gmotarget) + gmotargets.append(gmotarget) + + allgmotarget = build.AliasTarget(packagename + '-gmo', gmotargets, state.subdir, state.subproject, + state.environment) + targets.append(allgmotarget) + + updatepoargs = state.environment.get_build_command() + ['--internal', 'gettext', 'update_po', pkg_arg] + updatepoargs.append(f'--source-root={source_root}') + if subdir: + updatepoargs.append(f'--subdir={subdir}') + if lang_arg: + updatepoargs.append(lang_arg) + if datadirs: + updatepoargs.append(datadirs) + if extra_arg: + updatepoargs.append(extra_arg) + for tool in ['msginit', 'msgmerge']: + if self.tools[tool].found(): + updatepoargs.append(f'--{tool}=' + self.tools[tool].get_path()) + updatepotarget = build.RunTarget(packagename + '-update-po', updatepoargs, [], state.subdir, state.subproject, + state.environment, default_env=False) + targets.append(updatepotarget) + + return ModuleReturnValue([gmotargets, pottarget, updatepotarget], targets) + + @FeatureNew('i18n.itstool_join', '0.62.0') + @noPosargs + @typed_kwargs( + 'i18n.itstool_join', + CT_BUILD_BY_DEFAULT, + CT_INPUT_KW, + KwargInfo('install_dir', (str, NoneType)), + INSTALL_TAG_KW, + OUTPUT_KW, + INSTALL_KW, + _ARGS.evolve(), + KwargInfo('its_files', ContainerTypeInfo(list, str)), + KwargInfo('mo_targets', ContainerTypeInfo(list, build.CustomTarget), required=True), + ) + def itstool_join(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'ItsJoinFile') -> ModuleReturnValue: + if self.tools['itstool'] is None: + self.tools['itstool'] = state.find_program('itstool', for_machine=mesonlib.MachineChoice.BUILD) + mo_targets = kwargs['mo_targets'] + its_files = kwargs.get('its_files', []) + + mo_fnames = [] + for target in mo_targets: + mo_fnames.append(path.join(target.get_subdir(), target.get_outputs()[0])) + + command: T.List[T.Union[str, build.BuildTarget, build.CustomTarget, + build.CustomTargetIndex, 'ExternalProgram', mesonlib.File]] = [] + command.extend(state.environment.get_build_command()) + command.extend([ + '--internal', 'itstool', 'join', + '-i', '@INPUT@', + '-o', '@OUTPUT@', + '--itstool=' + self.tools['itstool'].get_path(), + ]) + if its_files: + for fname in its_files: + if not path.isabs(fname): + fname = path.join(state.environment.source_dir, state.subdir, fname) + command.extend(['--its', fname]) + command.extend(mo_fnames) + + build_by_default = kwargs['build_by_default'] + if build_by_default is None: + build_by_default = kwargs['install'] + + install_tag = [kwargs['install_tag']] if kwargs['install_tag'] is not None else None + + ct = build.CustomTarget( + '', + state.subdir, + state.subproject, + state.environment, + command, + kwargs['input'], + [kwargs['output']], + build_by_default=build_by_default, + extra_depends=mo_targets, + install=kwargs['install'], + install_dir=[kwargs['install_dir']] if kwargs['install_dir'] is not None else None, + install_tag=install_tag, + ) + + return ModuleReturnValue(ct, [ct]) + + +def initialize(interp: 'Interpreter') -> I18nModule: + return I18nModule(interp) diff --git a/mesonbuild/modules/icestorm.py b/mesonbuild/modules/icestorm.py new file mode 100644 index 0000000..c579148 --- /dev/null +++ b/mesonbuild/modules/icestorm.py @@ -0,0 +1,131 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +import itertools +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleInfo +from .. import build +from .. import mesonlib +from ..interpreter.type_checking import CT_INPUT_KW +from ..interpreterbase.decorators import KwargInfo, typed_kwargs, typed_pos_args + +if T.TYPE_CHECKING: + from typing_extensions import TypedDict + + from . import ModuleState + from ..interpreter import Interpreter + from ..programs import ExternalProgram + + class ProjectKwargs(TypedDict): + + sources: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]] + constraint_file: T.Union[mesonlib.FileOrString, build.GeneratedTypes] + +class IceStormModule(ExtensionModule): + + INFO = ModuleInfo('FPGA/Icestorm', '0.45.0', unstable=True) + + def __init__(self, interpreter: Interpreter) -> None: + super().__init__(interpreter) + self.tools: T.Dict[str, ExternalProgram] = {} + self.methods.update({ + 'project': self.project, + }) + + def detect_tools(self, state: ModuleState) -> None: + self.tools['yosys'] = state.find_program('yosys') + self.tools['arachne'] = state.find_program('arachne-pnr') + self.tools['icepack'] = state.find_program('icepack') + self.tools['iceprog'] = state.find_program('iceprog') + self.tools['icetime'] = state.find_program('icetime') + + @typed_pos_args('icestorm.project', str, + varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, + build.GeneratedList)) + @typed_kwargs( + 'icestorm.project', + CT_INPUT_KW.evolve(name='sources'), + KwargInfo( + 'constraint_file', + (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList), + required=True, + ) + ) + def project(self, state: ModuleState, + args: T.Tuple[str, T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]], + kwargs: ProjectKwargs) -> ModuleReturnValue: + if not self.tools: + self.detect_tools(state) + proj_name, arg_sources = args + all_sources = self.interpreter.source_strings_to_files( + list(itertools.chain(arg_sources, kwargs['sources']))) + + blif_target = build.CustomTarget( + f'{proj_name}_blif', + state.subdir, + state.subproject, + state.environment, + [self.tools['yosys'], '-q', '-p', 'synth_ice40 -blif @OUTPUT@', '@INPUT@'], + all_sources, + [f'{proj_name}.blif'], + ) + + asc_target = build.CustomTarget( + f'{proj_name}_asc', + state.subdir, + state.subproject, + state.environment, + [self.tools['arachne'], '-q', '-d', '1k', '-p', '@INPUT@', '-o', '@OUTPUT@'], + [kwargs['constraint_file'], blif_target], + [f'{proj_name}.asc'], + ) + + bin_target = build.CustomTarget( + f'{proj_name}_bin', + state.subdir, + state.subproject, + state.environment, + [self.tools['icepack'], '@INPUT@', '@OUTPUT@'], + [asc_target], + [f'{proj_name}.bin'], + build_by_default=True, + ) + + upload_target = build.RunTarget( + f'{proj_name}-upload', + [self.tools['iceprog'], bin_target], + [], + state.subdir, + state.subproject, + state.environment, + ) + + time_target = build.RunTarget( + f'{proj_name}-time', + [self.tools['icetime'], bin_target], + [], + state.subdir, + state.subproject, + state.environment, + ) + + return ModuleReturnValue( + None, + [blif_target, asc_target, bin_target, upload_target, time_target]) + + +def initialize(interp: Interpreter) -> IceStormModule: + return IceStormModule(interp) diff --git a/mesonbuild/modules/java.py b/mesonbuild/modules/java.py new file mode 100644 index 0000000..6861ee0 --- /dev/null +++ b/mesonbuild/modules/java.py @@ -0,0 +1,117 @@ +# Copyright 2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import pathlib +import typing as T + +from mesonbuild import mesonlib +from mesonbuild.build import CustomTarget, CustomTargetIndex, GeneratedList, Target +from mesonbuild.compilers import detect_compiler_for +from mesonbuild.interpreterbase.decorators import ContainerTypeInfo, FeatureDeprecated, FeatureNew, KwargInfo, typed_pos_args, typed_kwargs +from mesonbuild.mesonlib import version_compare, MachineChoice +from . import NewExtensionModule, ModuleReturnValue, ModuleInfo +from ..interpreter.type_checking import NoneType + +if T.TYPE_CHECKING: + from . import ModuleState + from ..compilers import Compiler + from ..interpreter import Interpreter + +class JavaModule(NewExtensionModule): + + INFO = ModuleInfo('java', '0.60.0') + + def __init__(self, interpreter: Interpreter): + super().__init__() + self.methods.update({ + 'generate_native_headers': self.generate_native_headers, + 'native_headers': self.native_headers, + }) + + def __get_java_compiler(self, state: ModuleState) -> Compiler: + if 'java' not in state.environment.coredata.compilers[MachineChoice.BUILD]: + detect_compiler_for(state.environment, 'java', MachineChoice.BUILD) + return state.environment.coredata.compilers[MachineChoice.BUILD]['java'] + + @FeatureNew('java.generate_native_headers', '0.62.0') + @FeatureDeprecated('java.generate_native_headers', '1.0.0') + @typed_pos_args( + 'java.generate_native_headers', + varargs=(str, mesonlib.File, Target, CustomTargetIndex, GeneratedList)) + @typed_kwargs( + 'java.generate_native_headers', + KwargInfo('classes', ContainerTypeInfo(list, str), default=[], listify=True, required=True), + KwargInfo('package', (str, NoneType), default=None)) + def generate_native_headers(self, state: ModuleState, args: T.Tuple[T.List[mesonlib.FileOrString]], + kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue: + return self.__native_headers(state, args, kwargs) + + @FeatureNew('java.native_headers', '1.0.0') + @typed_pos_args( + 'java.native_headers', + varargs=(str, mesonlib.File, Target, CustomTargetIndex, GeneratedList)) + @typed_kwargs( + 'java.native_headers', + KwargInfo('classes', ContainerTypeInfo(list, str), default=[], listify=True, required=True), + KwargInfo('package', (str, NoneType), default=None)) + def native_headers(self, state: ModuleState, args: T.Tuple[T.List[mesonlib.FileOrString]], + kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue: + return self.__native_headers(state, args, kwargs) + + def __native_headers(self, state: ModuleState, args: T.Tuple[T.List[mesonlib.FileOrString]], + kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue: + classes = T.cast('T.List[str]', kwargs.get('classes')) + package = kwargs.get('package') + + if package: + sanitized_package = package.replace("-", "_").replace(".", "_") + + headers: T.List[str] = [] + for clazz in classes: + sanitized_clazz = clazz.replace(".", "_") + if package: + headers.append(f'{sanitized_package}_{sanitized_clazz}.h') + else: + headers.append(f'{sanitized_clazz}.h') + + javac = self.__get_java_compiler(state) + + command = mesonlib.listify([ + javac.exelist, + '-d', + '@PRIVATE_DIR@', + '-h', + state.subdir, + '@INPUT@', + ]) + + prefix = classes[0] if not package else package + + target = CustomTarget(f'{prefix}-native-headers', + state.subdir, + state.subproject, + state.environment, + command, + sources=args[0], outputs=headers, backend=state.backend) + + # It is only known that 1.8.0 won't pre-create the directory. 11 and 16 + # do not exhibit this behavior. + if version_compare(javac.version, '1.8.0'): + pathlib.Path(state.backend.get_target_private_dir_abs(target)).mkdir(parents=True, exist_ok=True) + + return ModuleReturnValue(target, [target]) + +def initialize(*args: T.Any, **kwargs: T.Any) -> JavaModule: + return JavaModule(*args, **kwargs) diff --git a/mesonbuild/modules/keyval.py b/mesonbuild/modules/keyval.py new file mode 100644 index 0000000..1ba2f1c --- /dev/null +++ b/mesonbuild/modules/keyval.py @@ -0,0 +1,75 @@ +# Copyright 2017, 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os +import typing as T + +from . import ExtensionModule, ModuleInfo +from .. import mesonlib +from ..interpreterbase import noKwargs, typed_pos_args + +if T.TYPE_CHECKING: + from ..interpreter import Interpreter + from . import ModuleState + +class KeyvalModule(ExtensionModule): + + INFO = ModuleInfo('keyval', '0.55.0', stabilized='0.56.0') + + def __init__(self, interp: 'Interpreter'): + super().__init__(interp) + self.methods.update({ + 'load': self.load, + }) + + @staticmethod + def _load_file(path_to_config: str) -> T.Dict[str, str]: + result: T.Dict[str, str] = {} + try: + with open(path_to_config, encoding='utf-8') as f: + for line in f: + if '#' in line: + comment_idx = line.index('#') + line = line[:comment_idx] + line = line.strip() + try: + name, val = line.split('=', 1) + except ValueError: + continue + result[name.strip()] = val.strip() + except OSError as e: + raise mesonlib.MesonException(f'Failed to load {path_to_config}: {e}') + + return result + + @noKwargs + @typed_pos_args('keyval.laod', (str, mesonlib.File)) + def load(self, state: 'ModuleState', args: T.Tuple['mesonlib.FileOrString'], kwargs: T.Dict[str, T.Any]) -> T.Dict[str, str]: + s = args[0] + is_built = False + if isinstance(s, mesonlib.File): + is_built = is_built or s.is_built + s = s.absolute_path(self.interpreter.environment.source_dir, self.interpreter.environment.build_dir) + else: + s = os.path.join(self.interpreter.environment.source_dir, s) + + if not is_built: + self.interpreter.build_def_files.add(s) + + return self._load_file(s) + + +def initialize(interp: 'Interpreter') -> KeyvalModule: + return KeyvalModule(interp) diff --git a/mesonbuild/modules/modtest.py b/mesonbuild/modules/modtest.py new file mode 100644 index 0000000..15f8237 --- /dev/null +++ b/mesonbuild/modules/modtest.py @@ -0,0 +1,44 @@ +# Copyright 2015 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +import typing as T + +from . import ExtensionModule, ModuleInfo +from ..interpreterbase import noKwargs, noPosargs + +if T.TYPE_CHECKING: + from . import ModuleState + from ..interpreter.interpreter import Interpreter + from ..interpreterbase.baseobjects import TYPE_kwargs, TYPE_var + + +class TestModule(ExtensionModule): + + INFO = ModuleInfo('modtest') + + def __init__(self, interpreter: Interpreter) -> None: + super().__init__(interpreter) + self.methods.update({ + 'print_hello': self.print_hello, + }) + + @noKwargs + @noPosargs + def print_hello(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> None: + print('Hello from a Meson module') + + +def initialize(interp: Interpreter) -> TestModule: + return TestModule(interp) diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py new file mode 100644 index 0000000..af8467c --- /dev/null +++ b/mesonbuild/modules/pkgconfig.py @@ -0,0 +1,742 @@ +# Copyright 2015-2022 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +from collections import defaultdict +from dataclasses import dataclass +from pathlib import PurePath +import os +import typing as T + +from . import NewExtensionModule, ModuleInfo +from . import ModuleReturnValue +from .. import build +from .. import dependencies +from .. import mesonlib +from .. import mlog +from ..coredata import BUILTIN_DIR_OPTIONS +from ..dependencies import ThreadDependency +from ..interpreter.type_checking import D_MODULE_VERSIONS_KW, INSTALL_DIR_KW, VARIABLES_KW, NoneType +from ..interpreterbase import FeatureNew, FeatureDeprecated +from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, typed_kwargs, typed_pos_args + +if T.TYPE_CHECKING: + from typing_extensions import TypedDict + + from . import ModuleState + from .. import mparser + from ..interpreter import Interpreter + + ANY_DEP = T.Union[dependencies.Dependency, build.BuildTargetTypes, str] + LIBS = T.Union[build.LibTypes, str] + + class GenerateKw(TypedDict): + + version: T.Optional[str] + name: T.Optional[str] + filebase: T.Optional[str] + description: T.Optional[str] + url: str + subdirs: T.List[str] + conflicts: T.List[str] + dataonly: bool + libraries: T.List[ANY_DEP] + libraries_private: T.List[ANY_DEP] + requires: T.List[T.Union[str, build.StaticLibrary, build.SharedLibrary, dependencies.Dependency]] + requires_private: T.List[T.Union[str, build.StaticLibrary, build.SharedLibrary, dependencies.Dependency]] + install_dir: T.Optional[str] + d_module_versions: T.List[T.Union[str, int]] + extra_cflags: T.List[str] + variables: T.Dict[str, str] + uninstalled_variables: T.Dict[str, str] + unescaped_variables: T.Dict[str, str] + unescaped_uninstalled_variables: T.Dict[str, str] + + +_PKG_LIBRARIES: KwargInfo[T.List[T.Union[str, dependencies.Dependency, build.SharedLibrary, build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex]]] = KwargInfo( + 'libraries', + ContainerTypeInfo(list, (str, dependencies.Dependency, + build.SharedLibrary, build.StaticLibrary, + build.CustomTarget, build.CustomTargetIndex)), + default=[], + listify=True, +) + +_PKG_REQUIRES: KwargInfo[T.List[T.Union[str, build.SharedLibrary, build.StaticLibrary, dependencies.Dependency]]] = KwargInfo( + 'requires', + ContainerTypeInfo(list, (str, build.SharedLibrary, build.StaticLibrary, dependencies.Dependency)), + default=[], + listify=True, +) + + +def _as_str(obj: object) -> str: + assert isinstance(obj, str) + return obj + + +@dataclass +class MetaData: + + filebase: str + display_name: str + location: mparser.BaseNode + warned: bool = False + + +class DependenciesHelper: + def __init__(self, state: ModuleState, name: str, metadata: T.Dict[str, MetaData]) -> None: + self.state = state + self.name = name + self.pub_libs: T.List[LIBS] = [] + self.pub_reqs: T.List[str] = [] + self.priv_libs: T.List[LIBS] = [] + self.priv_reqs: T.List[str] = [] + self.cflags: T.List[str] = [] + self.version_reqs: T.DefaultDict[str, T.Set[str]] = defaultdict(set) + self.link_whole_targets: T.List[T.Union[build.CustomTarget, build.CustomTargetIndex, build.StaticLibrary]] = [] + self.metadata = metadata + + def add_pub_libs(self, libs: T.List[ANY_DEP]) -> None: + p_libs, reqs, cflags = self._process_libs(libs, True) + self.pub_libs = p_libs + self.pub_libs # prepend to preserve dependencies + self.pub_reqs += reqs + self.cflags += cflags + + def add_priv_libs(self, libs: T.List[ANY_DEP]) -> None: + p_libs, reqs, _ = self._process_libs(libs, False) + self.priv_libs = p_libs + self.priv_libs + self.priv_reqs += reqs + + def add_pub_reqs(self, reqs: T.List[T.Union[str, build.StaticLibrary, build.SharedLibrary, dependencies.Dependency]]) -> None: + self.pub_reqs += self._process_reqs(reqs) + + def add_priv_reqs(self, reqs: T.List[T.Union[str, build.StaticLibrary, build.SharedLibrary, dependencies.Dependency]]) -> None: + self.priv_reqs += self._process_reqs(reqs) + + def _check_generated_pc_deprecation(self, obj: T.Union[build.CustomTarget, build.CustomTargetIndex, build.StaticLibrary, build.SharedLibrary]) -> None: + if obj.get_id() in self.metadata: + return + data = self.metadata[obj.get_id()] + if data.warned: + return + mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the ' + '"libraries" keyword argument of a previous call ' + 'to generate() method instead of first positional ' + 'argument.', 'Adding', mlog.bold(data.display_name), + 'to "Requires" field, but this is a deprecated ' + 'behaviour that will change in a future version ' + 'of Meson. Please report the issue if this ' + 'warning cannot be avoided in your case.', + location=data.location) + data.warned = True + + def _process_reqs(self, reqs: T.Sequence[T.Union[str, build.StaticLibrary, build.SharedLibrary, dependencies.Dependency]]) -> T.List[str]: + '''Returns string names of requirements''' + processed_reqs: T.List[str] = [] + for obj in mesonlib.listify(reqs): + if not isinstance(obj, str): + FeatureNew.single_use('pkgconfig.generate requirement from non-string object', '0.46.0', self.state.subproject) + if (isinstance(obj, (build.CustomTarget, build.CustomTargetIndex, build.SharedLibrary, build.StaticLibrary)) + and obj.get_id() in self.metadata): + self._check_generated_pc_deprecation(obj) + processed_reqs.append(self.metadata[obj.get_id()].filebase) + elif isinstance(obj, dependencies.PkgConfigDependency): + if obj.found(): + processed_reqs.append(obj.name) + self.add_version_reqs(obj.name, obj.version_reqs) + elif isinstance(obj, str): + name, version_req = self.split_version_req(obj) + processed_reqs.append(name) + self.add_version_reqs(name, [version_req] if version_req is not None else None) + elif isinstance(obj, dependencies.Dependency) and not obj.found(): + pass + elif isinstance(obj, ThreadDependency): + pass + else: + raise mesonlib.MesonException('requires argument not a string, ' + 'library with pkgconfig-generated file ' + f'or pkgconfig-dependency object, got {obj!r}') + return processed_reqs + + def add_cflags(self, cflags: T.List[str]) -> None: + self.cflags += mesonlib.stringlistify(cflags) + + def _process_libs( + self, libs: T.List[ANY_DEP], public: bool + ) -> T.Tuple[T.List[T.Union[str, build.SharedLibrary, build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex]], T.List[str], T.List[str]]: + libs = mesonlib.listify(libs) + processed_libs: T.List[T.Union[str, build.SharedLibrary, build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex]] = [] + processed_reqs: T.List[str] = [] + processed_cflags: T.List[str] = [] + for obj in libs: + if (isinstance(obj, (build.CustomTarget, build.CustomTargetIndex, build.SharedLibrary, build.StaticLibrary)) + and obj.get_id() in self.metadata): + self._check_generated_pc_deprecation(obj) + processed_reqs.append(self.metadata[obj.get_id()].filebase) + elif isinstance(obj, dependencies.ValgrindDependency): + pass + elif isinstance(obj, dependencies.PkgConfigDependency): + if obj.found(): + processed_reqs.append(obj.name) + self.add_version_reqs(obj.name, obj.version_reqs) + elif isinstance(obj, dependencies.InternalDependency): + if obj.found(): + processed_libs += obj.get_link_args() + processed_cflags += obj.get_compile_args() + self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public, private_external_deps=True) + elif isinstance(obj, dependencies.Dependency): + if obj.found(): + processed_libs += obj.get_link_args() + processed_cflags += obj.get_compile_args() + elif isinstance(obj, build.SharedLibrary) and obj.shared_library_only: + # Do not pull dependencies for shared libraries because they are + # only required for static linking. Adding private requires has + # the side effect of exposing their cflags, which is the + # intended behaviour of pkg-config but force Debian to add more + # than needed build deps. + # See https://bugs.freedesktop.org/show_bug.cgi?id=105572 + processed_libs.append(obj) + elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)): + processed_libs.append(obj) + # If there is a static library in `Libs:` all its deps must be + # public too, otherwise the generated pc file will never be + # usable without --static. + self._add_lib_dependencies(obj.link_targets, + obj.link_whole_targets, + obj.external_deps, + isinstance(obj, build.StaticLibrary) and public) + elif isinstance(obj, (build.CustomTarget, build.CustomTargetIndex)): + if not obj.is_linkable_target(): + raise mesonlib.MesonException('library argument contains a not linkable custom_target.') + FeatureNew.single_use('custom_target in pkgconfig.generate libraries', '0.58.0', self.state.subproject) + processed_libs.append(obj) + elif isinstance(obj, str): + processed_libs.append(obj) + else: + raise mesonlib.MesonException(f'library argument of type {type(obj).__name__} not a string, library or dependency object.') + + return processed_libs, processed_reqs, processed_cflags + + def _add_lib_dependencies( + self, link_targets: T.Sequence[build.BuildTargetTypes], + link_whole_targets: T.Sequence[T.Union[build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex]], + external_deps: T.List[dependencies.Dependency], + public: bool, + private_external_deps: bool = False) -> None: + add_libs = self.add_pub_libs if public else self.add_priv_libs + # Recursively add all linked libraries + for t in link_targets: + # Internal libraries (uninstalled static library) will be promoted + # to link_whole, treat them as such here. + if t.is_internal(): + # `is_internal` shouldn't return True for anything but a + # StaticLibrary, or a CustomTarget that is a StaticLibrary + assert isinstance(t, (build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex)), 'for mypy' + self._add_link_whole(t, public) + else: + add_libs([t]) + for t in link_whole_targets: + self._add_link_whole(t, public) + # And finally its external dependencies + if private_external_deps: + self.add_priv_libs(T.cast('T.List[ANY_DEP]', external_deps)) + else: + add_libs(T.cast('T.List[ANY_DEP]', external_deps)) + + def _add_link_whole(self, t: T.Union[build.CustomTarget, build.CustomTargetIndex, build.StaticLibrary], public: bool) -> None: + # Don't include static libraries that we link_whole. But we still need to + # include their dependencies: a static library we link_whole + # could itself link to a shared library or an installed static library. + # Keep track of link_whole_targets so we can remove them from our + # lists in case a library is link_with and link_whole at the same time. + # See remove_dups() below. + self.link_whole_targets.append(t) + if isinstance(t, build.BuildTarget): + self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public) + + def add_version_reqs(self, name: str, version_reqs: T.Optional[T.List[str]]) -> None: + if version_reqs: + # Note that pkg-config is picky about whitespace. + # 'foo > 1.2' is ok but 'foo>1.2' is not. + # foo, bar' is ok, but 'foo,bar' is not. + self.version_reqs[name].update(version_reqs) + + def split_version_req(self, s: str) -> T.Tuple[str, T.Optional[str]]: + for op in ['>=', '<=', '!=', '==', '=', '>', '<']: + pos = s.find(op) + if pos > 0: + return s[0:pos].strip(), s[pos:].strip() + return s, None + + def format_vreq(self, vreq: str) -> str: + # vreq are '>=1.0' and pkgconfig wants '>= 1.0' + for op in ['>=', '<=', '!=', '==', '=', '>', '<']: + if vreq.startswith(op): + return op + ' ' + vreq[len(op):] + return vreq + + def format_reqs(self, reqs: T.List[str]) -> str: + result: T.List[str] = [] + for name in reqs: + vreqs = self.version_reqs.get(name, None) + if vreqs: + result += [name + ' ' + self.format_vreq(vreq) for vreq in vreqs] + else: + result += [name] + return ', '.join(result) + + def remove_dups(self) -> None: + # Set of ids that have already been handled and should not be added any more + exclude: T.Set[str] = set() + + # We can't just check if 'x' is excluded because we could have copies of + # the same SharedLibrary object for example. + def _ids(x: T.Union[str, build.CustomTarget, build.CustomTargetIndex, build.StaticLibrary, build.SharedLibrary]) -> T.Iterable[str]: + if isinstance(x, str): + yield x + else: + if x.get_id() in self.metadata: + yield self.metadata[x.get_id()].display_name + yield x.get_id() + + # Exclude 'x' in all its forms and return if it was already excluded + def _add_exclude(x: T.Union[str, build.CustomTarget, build.CustomTargetIndex, build.StaticLibrary, build.SharedLibrary]) -> bool: + was_excluded = False + for i in _ids(x): + if i in exclude: + was_excluded = True + else: + exclude.add(i) + return was_excluded + + # link_whole targets are already part of other targets, exclude them all. + for t in self.link_whole_targets: + _add_exclude(t) + + # Mypy thinks these overlap, but since List is invariant they don't, + # `List[str]`` is not a valid input to `List[str | BuildTarget]`. + # pylance/pyright gets this right, but for mypy we have to ignore the + # error + @T.overload + def _fn(xs: T.List[str], libs: bool = False) -> T.List[str]: ... # type: ignore + + @T.overload + def _fn(xs: T.List[LIBS], libs: bool = False) -> T.List[LIBS]: ... + + def _fn(xs: T.Union[T.List[str], T.List[LIBS]], libs: bool = False) -> T.Union[T.List[str], T.List[LIBS]]: + # Remove duplicates whilst preserving original order + result = [] + for x in xs: + # Don't de-dup unknown strings to avoid messing up arguments like: + # ['-framework', 'CoreAudio', '-framework', 'CoreMedia'] + known_flags = ['-pthread'] + cannot_dedup = libs and isinstance(x, str) and \ + not x.startswith(('-l', '-L')) and \ + x not in known_flags + if not cannot_dedup and _add_exclude(x): + continue + result.append(x) + return result + + # Handle lists in priority order: public items can be excluded from + # private and Requires can excluded from Libs. + self.pub_reqs = _fn(self.pub_reqs) + self.pub_libs = _fn(self.pub_libs, True) + self.priv_reqs = _fn(self.priv_reqs) + self.priv_libs = _fn(self.priv_libs, True) + # Reset exclude list just in case some values can be both cflags and libs. + exclude = set() + self.cflags = _fn(self.cflags) + +class PkgConfigModule(NewExtensionModule): + + INFO = ModuleInfo('pkgconfig') + + # Track already generated pkg-config files This is stored as a class + # variable so that multiple `import()`s share metadata + _metadata: T.ClassVar[T.Dict[str, MetaData]] = {} + + def __init__(self) -> None: + super().__init__() + self.methods.update({ + 'generate': self.generate, + }) + + def _get_lname(self, l: T.Union[build.SharedLibrary, build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex], + msg: str, pcfile: str) -> str: + if isinstance(l, (build.CustomTargetIndex, build.CustomTarget)): + basename = os.path.basename(l.get_filename()) + name = os.path.splitext(basename)[0] + if name.startswith('lib'): + name = name[3:] + return name + # Nothing special + if not l.name_prefix_set: + return l.name + # Sometimes people want the library to start with 'lib' everywhere, + # which is achieved by setting name_prefix to '' and the target name to + # 'libfoo'. In that case, try to get the pkg-config '-lfoo' arg correct. + if l.prefix == '' and l.name.startswith('lib'): + return l.name[3:] + # If the library is imported via an import library which is always + # named after the target name, '-lfoo' is correct. + if isinstance(l, build.SharedLibrary) and l.import_filename: + return l.name + # In other cases, we can't guarantee that the compiler will be able to + # find the library via '-lfoo', so tell the user that. + mlog.warning(msg.format(l.name, 'name_prefix', l.name, pcfile)) + return l.name + + def _escape(self, value: T.Union[str, PurePath]) -> str: + ''' + We cannot use quote_arg because it quotes with ' and " which does not + work with pkg-config and pkgconf at all. + ''' + # We should always write out paths with / because pkg-config requires + # spaces to be quoted with \ and that messes up on Windows: + # https://bugs.freedesktop.org/show_bug.cgi?id=103203 + if isinstance(value, PurePath): + value = value.as_posix() + return value.replace(' ', r'\ ') + + def _make_relative(self, prefix: T.Union[PurePath, str], subdir: T.Union[PurePath, str]) -> str: + prefix = PurePath(prefix) + subdir = PurePath(subdir) + try: + libdir = subdir.relative_to(prefix) + except ValueError: + libdir = subdir + # pathlib joining makes sure absolute libdir is not appended to '${prefix}' + return ('${prefix}' / libdir).as_posix() + + def _generate_pkgconfig_file(self, state: ModuleState, deps: DependenciesHelper, + subdirs: T.List[str], name: T.Optional[str], + description: T.Optional[str], url: str, version: str, + pcfile: str, conflicts: T.List[str], + variables: T.List[T.Tuple[str, str]], + unescaped_variables: T.List[T.Tuple[str, str]], + uninstalled: bool = False, dataonly: bool = False, + pkgroot: T.Optional[str] = None) -> None: + coredata = state.environment.get_coredata() + referenced_vars = set() + optnames = [x.name for x in BUILTIN_DIR_OPTIONS.keys()] + + if not dataonly: + # includedir is always implied, although libdir may not be + # needed for header-only libraries + referenced_vars |= {'prefix', 'includedir'} + if deps.pub_libs or deps.priv_libs: + referenced_vars |= {'libdir'} + # also automatically infer variables referenced in other variables + implicit_vars_warning = False + redundant_vars_warning = False + varnames = set() + varstrings = set() + for k, v in variables + unescaped_variables: + varnames |= {k} + varstrings |= {v} + for optname in optnames: + optvar = f'${{{optname}}}' + if any(x.startswith(optvar) for x in varstrings): + if optname in varnames: + redundant_vars_warning = True + else: + # these 3 vars were always "implicit" + if dataonly or optname not in {'prefix', 'includedir', 'libdir'}: + implicit_vars_warning = True + referenced_vars |= {'prefix', optname} + if redundant_vars_warning: + FeatureDeprecated.single_use('pkgconfig.generate variable for builtin directories', '0.62.0', + state.subproject, 'They will be automatically included when referenced', + state.current_node) + if implicit_vars_warning: + FeatureNew.single_use('pkgconfig.generate implicit variable for builtin directories', '0.62.0', + state.subproject, location=state.current_node) + + if uninstalled: + outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled') + if not os.path.exists(outdir): + os.mkdir(outdir) + prefix = PurePath(state.environment.get_build_dir()) + srcdir = PurePath(state.environment.get_source_dir()) + else: + outdir = state.environment.scratch_dir + prefix = PurePath(_as_str(coredata.get_option(mesonlib.OptionKey('prefix')))) + if pkgroot: + pkgroot_ = PurePath(pkgroot) + if not pkgroot_.is_absolute(): + pkgroot_ = prefix / pkgroot + elif prefix not in pkgroot_.parents: + raise mesonlib.MesonException('Pkgconfig prefix cannot be outside of the prefix ' + 'when pkgconfig.relocatable=true. ' + f'Pkgconfig prefix is {pkgroot_.as_posix()}.') + prefix = PurePath('${pcfiledir}', os.path.relpath(prefix, pkgroot_)) + fname = os.path.join(outdir, pcfile) + with open(fname, 'w', encoding='utf-8') as ofile: + for optname in optnames: + if optname in referenced_vars - varnames: + if optname == 'prefix': + ofile.write('prefix={}\n'.format(self._escape(prefix))) + else: + dirpath = PurePath(_as_str(coredata.get_option(mesonlib.OptionKey(optname)))) + ofile.write('{}={}\n'.format(optname, self._escape('${prefix}' / dirpath))) + if uninstalled and not dataonly: + ofile.write('srcdir={}\n'.format(self._escape(srcdir))) + if variables or unescaped_variables: + ofile.write('\n') + for k, v in variables: + ofile.write('{}={}\n'.format(k, self._escape(v))) + for k, v in unescaped_variables: + ofile.write(f'{k}={v}\n') + ofile.write('\n') + ofile.write(f'Name: {name}\n') + if len(description) > 0: + ofile.write(f'Description: {description}\n') + if len(url) > 0: + ofile.write(f'URL: {url}\n') + ofile.write(f'Version: {version}\n') + reqs_str = deps.format_reqs(deps.pub_reqs) + if len(reqs_str) > 0: + ofile.write(f'Requires: {reqs_str}\n') + reqs_str = deps.format_reqs(deps.priv_reqs) + if len(reqs_str) > 0: + ofile.write(f'Requires.private: {reqs_str}\n') + if len(conflicts) > 0: + ofile.write('Conflicts: {}\n'.format(' '.join(conflicts))) + + def generate_libs_flags(libs: T.List[LIBS]) -> T.Iterable[str]: + msg = 'Library target {0!r} has {1!r} set. Compilers ' \ + 'may not find it from its \'-l{2}\' linker flag in the ' \ + '{3!r} pkg-config file.' + Lflags = [] + for l in libs: + if isinstance(l, str): + yield l + else: + install_dir: T.Union[str, bool] + if uninstalled: + install_dir = os.path.dirname(state.backend.get_target_filename_abs(l)) + else: + _i = l.get_custom_install_dir() + install_dir = _i[0] if _i else None + if install_dir is False: + continue + if isinstance(l, build.BuildTarget) and 'cs' in l.compilers: + if isinstance(install_dir, str): + Lflag = '-r{}/{}'.format(self._escape(self._make_relative(prefix, install_dir)), l.filename) + else: # install_dir is True + Lflag = '-r${libdir}/%s' % l.filename + else: + if isinstance(install_dir, str): + Lflag = '-L{}'.format(self._escape(self._make_relative(prefix, install_dir))) + else: # install_dir is True + Lflag = '-L${libdir}' + if Lflag not in Lflags: + Lflags.append(Lflag) + yield Lflag + lname = self._get_lname(l, msg, pcfile) + # If using a custom suffix, the compiler may not be able to + # find the library + if isinstance(l, build.BuildTarget) and l.name_suffix_set: + mlog.warning(msg.format(l.name, 'name_suffix', lname, pcfile)) + if isinstance(l, (build.CustomTarget, build.CustomTargetIndex)) or 'cs' not in l.compilers: + yield f'-l{lname}' + + def get_uninstalled_include_dirs(libs: T.List[LIBS]) -> T.List[str]: + result: T.List[str] = [] + for l in libs: + if isinstance(l, (str, build.CustomTarget, build.CustomTargetIndex)): + continue + if l.get_subdir() not in result: + result.append(l.get_subdir()) + for i in l.get_include_dirs(): + curdir = i.get_curdir() + for d in i.get_incdirs(): + path = os.path.join(curdir, d) + if path not in result: + result.append(path) + return result + + def generate_uninstalled_cflags(libs: T.List[LIBS]) -> T.Iterable[str]: + for d in get_uninstalled_include_dirs(libs): + for basedir in ['${prefix}', '${srcdir}']: + path = PurePath(basedir, d) + yield '-I%s' % self._escape(path.as_posix()) + + if len(deps.pub_libs) > 0: + ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs)))) + if len(deps.priv_libs) > 0: + ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs)))) + + cflags: T.List[str] = [] + if uninstalled: + cflags += generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs) + else: + for d in subdirs: + if d == '.': + cflags.append('-I${includedir}') + else: + cflags.append(self._escape(PurePath('-I${includedir}') / d)) + cflags += [self._escape(f) for f in deps.cflags] + if cflags and not dataonly: + ofile.write('Cflags: {}\n'.format(' '.join(cflags))) + + @typed_pos_args('pkgconfig.generate', optargs=[(build.SharedLibrary, build.StaticLibrary)]) + @typed_kwargs( + 'pkgconfig.generate', + D_MODULE_VERSIONS_KW.evolve(since='0.43.0'), + INSTALL_DIR_KW, + KwargInfo('conflicts', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('dataonly', bool, default=False, since='0.54.0'), + KwargInfo('description', (str, NoneType)), + KwargInfo('extra_cflags', ContainerTypeInfo(list, str), default=[], listify=True, since='0.42.0'), + KwargInfo('filebase', (str, NoneType), validator=lambda x: 'must not be an empty string' if x == '' else None), + KwargInfo('name', (str, NoneType), validator=lambda x: 'must not be an empty string' if x == '' else None), + KwargInfo('subdirs', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('url', str, default=''), + KwargInfo('version', (str, NoneType)), + VARIABLES_KW.evolve(name="unescaped_uninstalled_variables", since='0.59.0'), + VARIABLES_KW.evolve(name="unescaped_variables", since='0.59.0'), + VARIABLES_KW.evolve(name="uninstalled_variables", since='0.54.0', since_values={dict: '0.56.0'}), + VARIABLES_KW.evolve(since='0.41.0', since_values={dict: '0.56.0'}), + _PKG_LIBRARIES, + _PKG_LIBRARIES.evolve(name='libraries_private'), + _PKG_REQUIRES, + _PKG_REQUIRES.evolve(name='requires_private'), + ) + def generate(self, state: ModuleState, + args: T.Tuple[T.Optional[T.Union[build.SharedLibrary, build.StaticLibrary]]], + kwargs: GenerateKw) -> ModuleReturnValue: + default_version = state.project_version + default_install_dir: T.Optional[str] = None + default_description: T.Optional[str] = None + default_name: T.Optional[str] = None + mainlib: T.Optional[T.Union[build.SharedLibrary, build.StaticLibrary]] = None + default_subdirs = ['.'] + if args[0]: + FeatureNew.single_use('pkgconfig.generate optional positional argument', '0.46.0', state.subproject) + mainlib = args[0] + default_name = mainlib.name + default_description = state.project_name + ': ' + mainlib.name + install_dir = mainlib.get_custom_install_dir() + if install_dir and isinstance(install_dir[0], str): + default_install_dir = os.path.join(install_dir[0], 'pkgconfig') + else: + if kwargs['version'] is None: + FeatureNew.single_use('pkgconfig.generate implicit version keyword', '0.46.0', state.subproject) + if kwargs['name'] is None: + raise build.InvalidArguments( + 'pkgconfig.generate: if a library is not passed as a ' + 'positional argument, the name keyword argument is ' + 'required.') + + dataonly = kwargs['dataonly'] + if dataonly: + default_subdirs = [] + blocked_vars = ['libraries', 'libraries_private', 'requires_private', 'extra_cflags', 'subdirs'] + if any(kwargs[k] for k in blocked_vars): # type: ignore + raise mesonlib.MesonException(f'Cannot combine dataonly with any of {blocked_vars}') + default_install_dir = os.path.join(state.environment.get_datadir(), 'pkgconfig') + + subdirs = kwargs['subdirs'] or default_subdirs + version = kwargs['version'] if kwargs['version'] is not None else default_version + name = kwargs['name'] if kwargs['name'] is not None else default_name + assert isinstance(name, str), 'for mypy' + filebase = kwargs['filebase'] if kwargs['filebase'] is not None else name + description = kwargs['description'] if kwargs['description'] is not None else default_description + url = kwargs['url'] + conflicts = kwargs['conflicts'] + + # Prepend the main library to public libraries list. This is required + # so dep.add_pub_libs() can handle dependency ordering correctly and put + # extra libraries after the main library. + libraries = kwargs['libraries'].copy() + if mainlib: + libraries.insert(0, mainlib) + + deps = DependenciesHelper(state, filebase, self._metadata) + deps.add_pub_libs(libraries) + deps.add_priv_libs(kwargs['libraries_private']) + deps.add_pub_reqs(kwargs['requires']) + deps.add_priv_reqs(kwargs['requires_private']) + deps.add_cflags(kwargs['extra_cflags']) + + dversions = kwargs['d_module_versions'] + if dversions: + compiler = state.environment.coredata.compilers.host.get('d') + if compiler: + deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None)) + + deps.remove_dups() + + def parse_variable_list(vardict: T.Dict[str, str]) -> T.List[T.Tuple[str, str]]: + reserved = ['prefix', 'libdir', 'includedir'] + variables = [] + for name, value in vardict.items(): + if not dataonly and name in reserved: + raise mesonlib.MesonException(f'Variable "{name}" is reserved') + variables.append((name, value)) + return variables + + variables = parse_variable_list(kwargs['variables']) + unescaped_variables = parse_variable_list(kwargs['unescaped_variables']) + + pcfile = filebase + '.pc' + pkgroot = pkgroot_name = kwargs['install_dir'] or default_install_dir + if pkgroot is None: + if mesonlib.is_freebsd(): + pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(mesonlib.OptionKey('prefix'))), 'libdata', 'pkgconfig') + pkgroot_name = os.path.join('{prefix}', 'libdata', 'pkgconfig') + elif mesonlib.is_haiku(): + pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(mesonlib.OptionKey('prefix'))), 'develop', 'lib', 'pkgconfig') + pkgroot_name = os.path.join('{prefix}', 'develop', 'lib', 'pkgconfig') + else: + pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(mesonlib.OptionKey('libdir'))), 'pkgconfig') + pkgroot_name = os.path.join('{libdir}', 'pkgconfig') + relocatable = state.get_option('relocatable', module='pkgconfig') + self._generate_pkgconfig_file(state, deps, subdirs, name, description, url, + version, pcfile, conflicts, variables, + unescaped_variables, False, dataonly, + pkgroot=pkgroot if relocatable else None) + res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), pcfile)], pkgroot, pkgroot_name, None, state.subproject, install_tag='devel') + variables = parse_variable_list(kwargs['uninstalled_variables']) + unescaped_variables = parse_variable_list(kwargs['unescaped_uninstalled_variables']) + + pcfile = filebase + '-uninstalled.pc' + self._generate_pkgconfig_file(state, deps, subdirs, name, description, url, + version, pcfile, conflicts, variables, + unescaped_variables, uninstalled=True, dataonly=dataonly) + # Associate the main library with this generated pc file. If the library + # is used in any subsequent call to the generated, it will generate a + # 'Requires:' or 'Requires.private:'. + # Backward compatibility: We used to set 'generated_pc' on all public + # libraries instead of just the main one. Keep doing that but warn if + # anyone is relying on that deprecated behaviour. + if mainlib: + if mainlib.get_id() not in self._metadata: + self._metadata[mainlib.get_id()] = MetaData( + filebase, name, state.current_node) + else: + mlog.warning('Already generated a pkg-config file for', mlog.bold(mainlib.name)) + else: + for lib in deps.pub_libs: + if not isinstance(lib, str) and lib.get_id() not in self._metadata: + self._metadata[lib.get_id()] = MetaData( + filebase, name, state.current_node) + return ModuleReturnValue(res, [res]) + + +def initialize(interp: Interpreter) -> PkgConfigModule: + return PkgConfigModule() diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py new file mode 100644 index 0000000..16d3ac4 --- /dev/null +++ b/mesonbuild/modules/python.py @@ -0,0 +1,835 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from pathlib import Path +import copy +import functools +import json +import os +import shutil +import typing as T + +from . import ExtensionModule, ModuleInfo +from .. import mesonlib +from .. import mlog +from ..coredata import UserFeatureOption +from ..build import known_shmod_kwargs +from ..dependencies import DependencyMethods, PkgConfigDependency, NotFoundDependency, SystemDependency, ExtraFrameworkDependency +from ..dependencies.base import process_method_kw +from ..dependencies.detect import get_dep_identifier +from ..environment import detect_cpu_family +from ..interpreter import ExternalProgramHolder, extract_required_kwarg, permitted_dependency_kwargs +from ..interpreter import primitives as P_OBJ +from ..interpreter.type_checking import NoneType, PRESERVE_PATH_KW +from ..interpreterbase import ( + noPosargs, noKwargs, permittedKwargs, ContainerTypeInfo, + InvalidArguments, typed_pos_args, typed_kwargs, KwargInfo, + FeatureNew, FeatureNewKwargs, disablerIfNotFound +) +from ..mesonlib import MachineChoice +from ..programs import ExternalProgram, NonExistingExternalProgram + +if T.TYPE_CHECKING: + from typing_extensions import TypedDict + + from . import ModuleState + from ..build import SharedModule, Data + from ..dependencies import ExternalDependency, Dependency + from ..dependencies.factory import DependencyGenerator + from ..environment import Environment + from ..interpreter import Interpreter + from ..interpreter.kwargs import ExtractRequired + from ..interpreterbase.interpreterbase import TYPE_var, TYPE_kwargs + + class PythonIntrospectionDict(TypedDict): + + install_paths: T.Dict[str, str] + is_pypy: bool + is_venv: bool + link_libpython: bool + sysconfig_paths: T.Dict[str, str] + paths: T.Dict[str, str] + platform: str + suffix: str + variables: T.Dict[str, str] + version: str + + class PyInstallKw(TypedDict): + + pure: T.Optional[bool] + subdir: str + install_tag: T.Optional[str] + + class FindInstallationKw(ExtractRequired): + + disabler: bool + modules: T.List[str] + pure: T.Optional[bool] + + _Base = ExternalDependency +else: + _Base = object + + +mod_kwargs = {'subdir'} +mod_kwargs.update(known_shmod_kwargs) +mod_kwargs -= {'name_prefix', 'name_suffix'} + + +class _PythonDependencyBase(_Base): + + def __init__(self, python_holder: 'PythonInstallation', embed: bool): + self.embed = embed + self.version: str = python_holder.version + self.platform = python_holder.platform + self.variables = python_holder.variables + self.paths = python_holder.paths + # The "-embed" version of python.pc / python-config was introduced in 3.8, + # and distutils extension linking was changed to be considered a non embed + # usage. Before then, this dependency always uses the embed=True handling + # because that is the only one that exists. + # + # On macOS and some Linux distros (Debian) distutils doesn't link extensions + # against libpython, even on 3.7 and below. We call into distutils and + # mirror its behavior. See https://github.com/mesonbuild/meson/issues/4117 + self.link_libpython = python_holder.link_libpython or embed + self.info: T.Optional[T.Dict[str, str]] = None + if mesonlib.version_compare(self.version, '>= 3.0'): + self.major_version = 3 + else: + self.major_version = 2 + + +class PythonPkgConfigDependency(PkgConfigDependency, _PythonDependencyBase): + + def __init__(self, name: str, environment: 'Environment', + kwargs: T.Dict[str, T.Any], installation: 'PythonInstallation', + libpc: bool = False): + if libpc: + mlog.debug(f'Searching for {name!r} via pkgconfig lookup in LIBPC') + else: + mlog.debug(f'Searching for {name!r} via fallback pkgconfig lookup in default paths') + + PkgConfigDependency.__init__(self, name, environment, kwargs) + _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False)) + + if libpc and not self.is_found: + mlog.debug(f'"python-{self.version}" could not be found in LIBPC, this is likely due to a relocated python installation') + + # pkg-config files are usually accurate starting with python 3.8 + if not self.link_libpython and mesonlib.version_compare(self.version, '< 3.8'): + self.link_args = [] + + +class PythonFrameworkDependency(ExtraFrameworkDependency, _PythonDependencyBase): + + def __init__(self, name: str, environment: 'Environment', + kwargs: T.Dict[str, T.Any], installation: 'PythonInstallation'): + ExtraFrameworkDependency.__init__(self, name, environment, kwargs) + _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False)) + + +class PythonSystemDependency(SystemDependency, _PythonDependencyBase): + + def __init__(self, name: str, environment: 'Environment', + kwargs: T.Dict[str, T.Any], installation: 'PythonInstallation'): + SystemDependency.__init__(self, name, environment, kwargs) + _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False)) + + if mesonlib.is_windows(): + self._find_libpy_windows(environment) + else: + self._find_libpy(installation, environment) + + if not self.link_libpython: + # match pkg-config behavior + self.link_args = [] + + if not self.clib_compiler.has_header('Python.h', '', environment, extra_args=self.compile_args): + self.is_found = False + + def _find_libpy(self, python_holder: 'PythonInstallation', environment: 'Environment') -> None: + if python_holder.is_pypy: + if self.major_version == 3: + libname = 'pypy3-c' + else: + libname = 'pypy-c' + libdir = os.path.join(self.variables.get('base'), 'bin') + libdirs = [libdir] + else: + libname = f'python{self.version}' + if 'DEBUG_EXT' in self.variables: + libname += self.variables['DEBUG_EXT'] + if 'ABIFLAGS' in self.variables: + libname += self.variables['ABIFLAGS'] + libdirs = [] + + largs = self.clib_compiler.find_library(libname, environment, libdirs) + if largs is not None: + self.link_args = largs + + self.is_found = largs is not None or not self.link_libpython + + inc_paths = mesonlib.OrderedSet([ + self.variables.get('INCLUDEPY'), + self.paths.get('include'), + self.paths.get('platinclude')]) + + self.compile_args += ['-I' + path for path in inc_paths if path] + + def _get_windows_python_arch(self) -> T.Optional[str]: + if self.platform == 'mingw': + pycc = self.variables.get('CC') + if pycc.startswith('x86_64'): + return '64' + elif pycc.startswith(('i686', 'i386')): + return '32' + else: + mlog.log(f'MinGW Python built with unknown CC {pycc!r}, please file a bug') + return None + elif self.platform == 'win32': + return '32' + elif self.platform in {'win64', 'win-amd64'}: + return '64' + mlog.log(f'Unknown Windows Python platform {self.platform!r}') + return None + + def _get_windows_link_args(self) -> T.Optional[T.List[str]]: + if self.platform.startswith('win'): + vernum = self.variables.get('py_version_nodot') + verdot = self.variables.get('py_version_short') + imp_lower = self.variables.get('implementation_lower', 'python') + if self.static: + libpath = Path('libs') / f'libpython{vernum}.a' + else: + comp = self.get_compiler() + if comp.id == "gcc": + if imp_lower == 'pypy' and verdot == '3.8': + # The naming changed between 3.8 and 3.9 + libpath = Path('libpypy3-c.dll') + elif imp_lower == 'pypy': + libpath = Path(f'libpypy{verdot}-c.dll') + else: + libpath = Path(f'python{vernum}.dll') + else: + libpath = Path('libs') / f'python{vernum}.lib' + # base_prefix to allow for virtualenvs. + lib = Path(self.variables.get('base_prefix')) / libpath + elif self.platform == 'mingw': + if self.static: + libname = self.variables.get('LIBRARY') + else: + libname = self.variables.get('LDLIBRARY') + lib = Path(self.variables.get('LIBDIR')) / libname + else: + raise mesonlib.MesonBugException( + 'On a Windows path, but the OS doesn\'t appear to be Windows or MinGW.') + if not lib.exists(): + mlog.log('Could not find Python3 library {!r}'.format(str(lib))) + return None + return [str(lib)] + + def _find_libpy_windows(self, env: 'Environment') -> None: + ''' + Find python3 libraries on Windows and also verify that the arch matches + what we are building for. + ''' + pyarch = self._get_windows_python_arch() + if pyarch is None: + self.is_found = False + return + arch = detect_cpu_family(env.coredata.compilers.host) + if arch == 'x86': + arch = '32' + elif arch == 'x86_64': + arch = '64' + else: + # We can't cross-compile Python 3 dependencies on Windows yet + mlog.log(f'Unknown architecture {arch!r} for', + mlog.bold(self.name)) + self.is_found = False + return + # Pyarch ends in '32' or '64' + if arch != pyarch: + mlog.log('Need', mlog.bold(self.name), f'for {arch}-bit, but found {pyarch}-bit') + self.is_found = False + return + # This can fail if the library is not found + largs = self._get_windows_link_args() + if largs is None: + self.is_found = False + return + self.link_args = largs + # Compile args + inc_paths = mesonlib.OrderedSet([ + self.variables.get('INCLUDEPY'), + self.paths.get('include'), + self.paths.get('platinclude')]) + + self.compile_args += ['-I' + path for path in inc_paths if path] + + # https://sourceforge.net/p/mingw-w64/mailman/message/30504611/ + # https://github.com/python/cpython/pull/100137 + if pyarch == '64' and mesonlib.version_compare(self.version, '<3.12'): + self.compile_args += ['-DMS_WIN64'] + + self.is_found = True + + +def python_factory(env: 'Environment', for_machine: 'MachineChoice', + kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods], + installation: 'PythonInstallation') -> T.List['DependencyGenerator']: + # We can't use the factory_methods decorator here, as we need to pass the + # extra installation argument + embed = kwargs.get('embed', False) + candidates: T.List['DependencyGenerator'] = [] + pkg_version = installation.variables.get('LDVERSION') or installation.version + + if DependencyMethods.PKGCONFIG in methods: + pkg_libdir = installation.variables.get('LIBPC') + pkg_embed = '-embed' if embed and mesonlib.version_compare(installation.version, '>=3.8') else '' + pkg_name = f'python-{pkg_version}{pkg_embed}' + + # If python-X.Y.pc exists in LIBPC, we will try to use it + def wrap_in_pythons_pc_dir(name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], + installation: 'PythonInstallation') -> 'ExternalDependency': + if not pkg_libdir: + # there is no LIBPC, so we can't search in it + return NotFoundDependency('python', env) + + old_pkg_libdir = os.environ.pop('PKG_CONFIG_LIBDIR', None) + old_pkg_path = os.environ.pop('PKG_CONFIG_PATH', None) + os.environ['PKG_CONFIG_LIBDIR'] = pkg_libdir + try: + return PythonPkgConfigDependency(name, env, kwargs, installation, True) + finally: + def set_env(name, value): + if value is not None: + os.environ[name] = value + elif name in os.environ: + del os.environ[name] + set_env('PKG_CONFIG_LIBDIR', old_pkg_libdir) + set_env('PKG_CONFIG_PATH', old_pkg_path) + + candidates.append(functools.partial(wrap_in_pythons_pc_dir, pkg_name, env, kwargs, installation)) + # We only need to check both, if a python install has a LIBPC. It might point to the wrong location, + # e.g. relocated / cross compilation, but the presence of LIBPC indicates we should definitely look for something. + if pkg_libdir is not None: + candidates.append(functools.partial(PythonPkgConfigDependency, pkg_name, env, kwargs, installation)) + + if DependencyMethods.SYSTEM in methods: + candidates.append(functools.partial(PythonSystemDependency, 'python', env, kwargs, installation)) + + if DependencyMethods.EXTRAFRAMEWORK in methods: + nkwargs = kwargs.copy() + if mesonlib.version_compare(pkg_version, '>= 3'): + # There is a python in /System/Library/Frameworks, but that's python 2.x, + # Python 3 will always be in /Library + nkwargs['paths'] = ['/Library/Frameworks'] + candidates.append(functools.partial(PythonFrameworkDependency, 'Python', env, nkwargs, installation)) + + return candidates + + +INTROSPECT_COMMAND = '''\ +import os.path +import sysconfig +import json +import sys +import distutils.command.install + +def get_distutils_paths(scheme=None, prefix=None): + import distutils.dist + distribution = distutils.dist.Distribution() + install_cmd = distribution.get_command_obj('install') + if prefix is not None: + install_cmd.prefix = prefix + if scheme: + install_cmd.select_scheme(scheme) + install_cmd.finalize_options() + return { + 'data': install_cmd.install_data, + 'include': os.path.dirname(install_cmd.install_headers), + 'platlib': install_cmd.install_platlib, + 'purelib': install_cmd.install_purelib, + 'scripts': install_cmd.install_scripts, + } + +# On Debian derivatives, the Python interpreter shipped by the distribution uses +# a custom install scheme, deb_system, for the system install, and changes the +# default scheme to a custom one pointing to /usr/local and replacing +# site-packages with dist-packages. +# See https://github.com/mesonbuild/meson/issues/8739. +# XXX: We should be using sysconfig, but Debian only patches distutils. + +if 'deb_system' in distutils.command.install.INSTALL_SCHEMES: + paths = get_distutils_paths(scheme='deb_system') + install_paths = get_distutils_paths(scheme='deb_system', prefix='') +else: + paths = sysconfig.get_paths() + empty_vars = {'base': '', 'platbase': '', 'installed_base': ''} + install_paths = sysconfig.get_paths(vars=empty_vars) + +def links_against_libpython(): + from distutils.core import Distribution, Extension + cmd = Distribution().get_command_obj('build_ext') + cmd.ensure_finalized() + return bool(cmd.get_libraries(Extension('dummy', []))) + +variables = sysconfig.get_config_vars() +variables.update({'base_prefix': getattr(sys, 'base_prefix', sys.prefix)}) + +if sys.version_info < (3, 0): + suffix = variables.get('SO') +elif sys.version_info < (3, 8, 7): + # https://bugs.python.org/issue?@action=redirect&bpo=39825 + from distutils.sysconfig import get_config_var + suffix = get_config_var('EXT_SUFFIX') +else: + suffix = variables.get('EXT_SUFFIX') + +print(json.dumps({ + 'variables': variables, + 'paths': paths, + 'sysconfig_paths': sysconfig.get_paths(), + 'install_paths': install_paths, + 'version': sysconfig.get_python_version(), + 'platform': sysconfig.get_platform(), + 'is_pypy': '__pypy__' in sys.builtin_module_names, + 'is_venv': sys.prefix != variables['base_prefix'], + 'link_libpython': links_against_libpython(), + 'suffix': suffix, +})) +''' + + +class PythonExternalProgram(ExternalProgram): + def __init__(self, name: str, command: T.Optional[T.List[str]] = None, + ext_prog: T.Optional[ExternalProgram] = None): + if ext_prog is None: + super().__init__(name, command=command, silent=True) + else: + self.name = name + self.command = ext_prog.command + self.path = ext_prog.path + + # We want strong key values, so we always populate this with bogus data. + # Otherwise to make the type checkers happy we'd have to do .get() for + # everycall, even though we know that the introspection data will be + # complete + self.info: 'PythonIntrospectionDict' = { + 'install_paths': {}, + 'is_pypy': False, + 'is_venv': False, + 'link_libpython': False, + 'sysconfig_paths': {}, + 'paths': {}, + 'platform': 'sentinal', + 'variables': {}, + 'version': '0.0', + } + self.pure: bool = True + + def _check_version(self, version: str) -> bool: + if self.name == 'python2': + return mesonlib.version_compare(version, '< 3.0') + elif self.name == 'python3': + return mesonlib.version_compare(version, '>= 3.0') + return True + + def sanity(self, state: T.Optional['ModuleState'] = None) -> bool: + # Sanity check, we expect to have something that at least quacks in tune + from tempfile import NamedTemporaryFile + with NamedTemporaryFile(suffix='.py', delete=False, mode='w', encoding='utf-8') as tf: + tmpfilename = tf.name + tf.write(INTROSPECT_COMMAND) + cmd = self.get_command() + [tmpfilename] + p, stdout, stderr = mesonlib.Popen_safe(cmd) + os.unlink(tmpfilename) + try: + info = json.loads(stdout) + except json.JSONDecodeError: + info = None + mlog.debug('Could not introspect Python (%s): exit code %d' % (str(p.args), p.returncode)) + mlog.debug('Program stdout:\n') + mlog.debug(stdout) + mlog.debug('Program stderr:\n') + mlog.debug(stderr) + + if info is not None and self._check_version(info['version']): + self.info = T.cast('PythonIntrospectionDict', info) + self.platlib = self._get_path(state, 'platlib') + self.purelib = self._get_path(state, 'purelib') + return True + else: + return False + + def _get_path(self, state: T.Optional['ModuleState'], key: str) -> None: + rel_path = self.info['install_paths'][key][1:] + if not state: + # This happens only from run_project_tests.py + return rel_path + value = state.get_option(f'{key}dir', module='python') + if value: + if state.is_user_defined_option('install_env', module='python'): + raise mesonlib.MesonException(f'python.{key}dir and python.install_env are mutually exclusive') + return value + + install_env = state.get_option('install_env', module='python') + if install_env == 'auto': + install_env = 'venv' if self.info['is_venv'] else 'system' + + if install_env == 'system': + rel_path = os.path.join(self.info['variables']['prefix'], rel_path) + elif install_env == 'venv': + if not self.info['is_venv']: + raise mesonlib.MesonException('python.install_env cannot be set to "venv" unless you are in a venv!') + # inside a venv, deb_system is *never* active hence info['paths'] may be wrong + rel_path = self.info['sysconfig_paths'][key] + + return rel_path + + +_PURE_KW = KwargInfo('pure', (bool, NoneType)) +_SUBDIR_KW = KwargInfo('subdir', str, default='') + + +class PythonInstallation(ExternalProgramHolder): + def __init__(self, python: 'PythonExternalProgram', interpreter: 'Interpreter'): + ExternalProgramHolder.__init__(self, python, interpreter) + info = python.info + prefix = self.interpreter.environment.coredata.get_option(mesonlib.OptionKey('prefix')) + assert isinstance(prefix, str), 'for mypy' + self.variables = info['variables'] + self.suffix = info['suffix'] + self.paths = info['paths'] + self.pure = python.pure + self.platlib_install_path = os.path.join(prefix, python.platlib) + self.purelib_install_path = os.path.join(prefix, python.purelib) + self.version = info['version'] + self.platform = info['platform'] + self.is_pypy = info['is_pypy'] + self.link_libpython = info['link_libpython'] + self.methods.update({ + 'extension_module': self.extension_module_method, + 'dependency': self.dependency_method, + 'install_sources': self.install_sources_method, + 'get_install_dir': self.get_install_dir_method, + 'language_version': self.language_version_method, + 'found': self.found_method, + 'has_path': self.has_path_method, + 'get_path': self.get_path_method, + 'has_variable': self.has_variable_method, + 'get_variable': self.get_variable_method, + 'path': self.path_method, + }) + + @permittedKwargs(mod_kwargs) + def extension_module_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> 'SharedModule': + if 'install_dir' in kwargs: + if 'subdir' in kwargs: + raise InvalidArguments('"subdir" and "install_dir" are mutually exclusive') + else: + subdir = kwargs.pop('subdir', '') + if not isinstance(subdir, str): + raise InvalidArguments('"subdir" argument must be a string.') + + kwargs['install_dir'] = self._get_install_dir_impl(False, subdir) + + new_deps = mesonlib.extract_as_list(kwargs, 'dependencies') + has_pydep = any(isinstance(dep, _PythonDependencyBase) for dep in new_deps) + if not has_pydep: + pydep = self._dependency_method_impl({}) + if not pydep.found(): + raise mesonlib.MesonException('Python dependency not found') + new_deps.append(pydep) + FeatureNew.single_use('python_installation.extension_module with implicit dependency on python', + '0.63.0', self.subproject, 'use python_installation.dependency()', + self.current_node) + kwargs['dependencies'] = new_deps + + # msys2's python3 has "-cpython-36m.dll", we have to be clever + # FIXME: explain what the specific cleverness is here + split, suffix = self.suffix.rsplit('.', 1) + args[0] += split + + kwargs['name_prefix'] = '' + kwargs['name_suffix'] = suffix + + if 'gnu_symbol_visibility' not in kwargs and \ + (self.is_pypy or mesonlib.version_compare(self.version, '>=3.9')): + kwargs['gnu_symbol_visibility'] = 'inlineshidden' + + return self.interpreter.func_shared_module(None, args, kwargs) + + def _dependency_method_impl(self, kwargs: TYPE_kwargs) -> Dependency: + for_machine = self.interpreter.machine_from_native_kwarg(kwargs) + identifier = get_dep_identifier(self._full_path(), kwargs) + + dep = self.interpreter.coredata.deps[for_machine].get(identifier) + if dep is not None: + return dep + + new_kwargs = kwargs.copy() + new_kwargs['required'] = False + methods = process_method_kw({DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM}, kwargs) + # it's theoretically (though not practically) possible to not bind dep, let's ensure it is. + dep: Dependency = NotFoundDependency('python', self.interpreter.environment) + for d in python_factory(self.interpreter.environment, for_machine, new_kwargs, methods, self): + dep = d() + if dep.found(): + break + + self.interpreter.coredata.deps[for_machine].put(identifier, dep) + return dep + + @disablerIfNotFound + @permittedKwargs(permitted_dependency_kwargs | {'embed'}) + @FeatureNewKwargs('python_installation.dependency', '0.53.0', ['embed']) + @noPosargs + def dependency_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> 'Dependency': + disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) + if disabled: + mlog.log('Dependency', mlog.bold('python'), 'skipped: feature', mlog.bold(feature), 'disabled') + return NotFoundDependency('python', self.interpreter.environment) + else: + dep = self._dependency_method_impl(kwargs) + if required and not dep.found(): + raise mesonlib.MesonException('Python dependency not found') + return dep + + @typed_pos_args('install_data', varargs=(str, mesonlib.File)) + @typed_kwargs( + 'python_installation.install_sources', + _PURE_KW, + _SUBDIR_KW, + PRESERVE_PATH_KW, + KwargInfo('install_tag', (str, NoneType), since='0.60.0') + ) + def install_sources_method(self, args: T.Tuple[T.List[T.Union[str, mesonlib.File]]], + kwargs: 'PyInstallKw') -> 'Data': + tag = kwargs['install_tag'] or 'python-runtime' + pure = kwargs['pure'] if kwargs['pure'] is not None else self.pure + install_dir = self._get_install_dir_impl(pure, kwargs['subdir']) + return self.interpreter.install_data_impl( + self.interpreter.source_strings_to_files(args[0]), + install_dir, + mesonlib.FileMode(), rename=None, tag=tag, install_data_type='python', + install_dir_name=install_dir.optname, + preserve_path=kwargs['preserve_path']) + + @noPosargs + @typed_kwargs('python_installation.install_dir', _PURE_KW, _SUBDIR_KW) + def get_install_dir_method(self, args: T.List['TYPE_var'], kwargs: 'PyInstallKw') -> str: + pure = kwargs['pure'] if kwargs['pure'] is not None else self.pure + return self._get_install_dir_impl(pure, kwargs['subdir']) + + def _get_install_dir_impl(self, pure: bool, subdir: str) -> P_OBJ.OptionString: + if pure: + base = self.purelib_install_path + name = '{py_purelib}' + else: + base = self.platlib_install_path + name = '{py_platlib}' + + return P_OBJ.OptionString(os.path.join(base, subdir), os.path.join(name, subdir)) + + @noPosargs + @noKwargs + def language_version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: + return self.version + + @typed_pos_args('python_installation.has_path', str) + @noKwargs + def has_path_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool: + return args[0] in self.paths + + @typed_pos_args('python_installation.get_path', str, optargs=[object]) + @noKwargs + def get_path_method(self, args: T.Tuple[str, T.Optional['TYPE_var']], kwargs: 'TYPE_kwargs') -> 'TYPE_var': + path_name, fallback = args + try: + return self.paths[path_name] + except KeyError: + if fallback is not None: + return fallback + raise InvalidArguments(f'{path_name} is not a valid path name') + + @typed_pos_args('python_installation.has_variable', str) + @noKwargs + def has_variable_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool: + return args[0] in self.variables + + @typed_pos_args('python_installation.get_variable', str, optargs=[object]) + @noKwargs + def get_variable_method(self, args: T.Tuple[str, T.Optional['TYPE_var']], kwargs: 'TYPE_kwargs') -> 'TYPE_var': + var_name, fallback = args + try: + return self.variables[var_name] + except KeyError: + if fallback is not None: + return fallback + raise InvalidArguments(f'{var_name} is not a valid variable name') + + @noPosargs + @noKwargs + @FeatureNew('Python module path method', '0.50.0') + def path_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: + return super().path_method(args, kwargs) + + +class PythonModule(ExtensionModule): + + INFO = ModuleInfo('python', '0.46.0') + + def __init__(self, interpreter: 'Interpreter') -> None: + super().__init__(interpreter) + self.installations: T.Dict[str, ExternalProgram] = {} + self.methods.update({ + 'find_installation': self.find_installation, + }) + + # https://www.python.org/dev/peps/pep-0397/ + @staticmethod + def _get_win_pythonpath(name_or_path: str) -> T.Optional[str]: + if name_or_path not in ['python2', 'python3']: + return None + if not shutil.which('py'): + # program not installed, return without an exception + return None + ver = {'python2': '-2', 'python3': '-3'}[name_or_path] + cmd = ['py', ver, '-c', "import sysconfig; print(sysconfig.get_config_var('BINDIR'))"] + _, stdout, _ = mesonlib.Popen_safe(cmd) + directory = stdout.strip() + if os.path.exists(directory): + return os.path.join(directory, 'python') + else: + return None + + def _find_installation_impl(self, state: 'ModuleState', display_name: str, name_or_path: str, required: bool) -> ExternalProgram: + if not name_or_path: + python = PythonExternalProgram('python3', mesonlib.python_command) + else: + tmp_python = ExternalProgram.from_entry(display_name, name_or_path) + python = PythonExternalProgram(display_name, ext_prog=tmp_python) + + if not python.found() and mesonlib.is_windows(): + pythonpath = self._get_win_pythonpath(name_or_path) + if pythonpath is not None: + name_or_path = pythonpath + python = PythonExternalProgram(name_or_path) + + # Last ditch effort, python2 or python3 can be named python + # on various platforms, let's not give up just yet, if an executable + # named python is available and has a compatible version, let's use + # it + if not python.found() and name_or_path in {'python2', 'python3'}: + python = PythonExternalProgram('python') + + if python.found(): + if python.sanity(state): + return python + else: + sanitymsg = f'{python} is not a valid python or it is missing distutils' + if required: + raise mesonlib.MesonException(sanitymsg) + else: + mlog.warning(sanitymsg, location=state.current_node) + + return NonExistingExternalProgram() + + @disablerIfNotFound + @typed_pos_args('python.find_installation', optargs=[str]) + @typed_kwargs( + 'python.find_installation', + KwargInfo('required', (bool, UserFeatureOption), default=True), + KwargInfo('disabler', bool, default=False, since='0.49.0'), + KwargInfo('modules', ContainerTypeInfo(list, str), listify=True, default=[], since='0.51.0'), + _PURE_KW.evolve(default=True, since='0.64.0'), + ) + def find_installation(self, state: 'ModuleState', args: T.Tuple[T.Optional[str]], + kwargs: 'FindInstallationKw') -> ExternalProgram: + feature_check = FeatureNew('Passing "feature" option to find_installation', '0.48.0') + disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, feature_check) + + # FIXME: this code is *full* of sharp corners. It assumes that it's + # going to get a string value (or now a list of length 1), of `python2` + # or `python3` which is completely nonsense. On windows the value could + # easily be `['py', '-3']`, or `['py', '-3.7']` to get a very specific + # version of python. On Linux we might want a python that's not in + # $PATH, or that uses a wrapper of some kind. + np: T.List[str] = state.environment.lookup_binary_entry(MachineChoice.HOST, 'python') or [] + fallback = args[0] + display_name = fallback or 'python' + if not np and fallback is not None: + np = [fallback] + name_or_path = np[0] if np else None + + if disabled: + mlog.log('Program', name_or_path or 'python', 'found:', mlog.red('NO'), '(disabled by:', mlog.bold(feature), ')') + return NonExistingExternalProgram() + + python = self.installations.get(name_or_path) + if not python: + python = self._find_installation_impl(state, display_name, name_or_path, required) + self.installations[name_or_path] = python + + want_modules = kwargs['modules'] + found_modules: T.List[str] = [] + missing_modules: T.List[str] = [] + if python.found() and want_modules: + for mod in want_modules: + p, *_ = mesonlib.Popen_safe( + python.command + + ['-c', f'import {mod}']) + if p.returncode != 0: + missing_modules.append(mod) + else: + found_modules.append(mod) + + msg: T.List['mlog.TV_Loggable'] = ['Program', python.name] + if want_modules: + msg.append('({})'.format(', '.join(want_modules))) + msg.append('found:') + if python.found() and not missing_modules: + msg.extend([mlog.green('YES'), '({})'.format(' '.join(python.command))]) + else: + msg.append(mlog.red('NO')) + if found_modules: + msg.append('modules:') + msg.append(', '.join(found_modules)) + + mlog.log(*msg) + + if not python.found(): + if required: + raise mesonlib.MesonException('{} not found'.format(name_or_path or 'python')) + return NonExistingExternalProgram() + elif missing_modules: + if required: + raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules))) + return NonExistingExternalProgram() + else: + python = copy.copy(python) + python.pure = kwargs['pure'] + return python + + raise mesonlib.MesonBugException('Unreachable code was reached (PythonModule.find_installation).') + + +def initialize(interpreter: 'Interpreter') -> PythonModule: + mod = PythonModule(interpreter) + mod.interpreter.append_holder_map(PythonExternalProgram, PythonInstallation) + return mod diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py new file mode 100644 index 0000000..065e8d7 --- /dev/null +++ b/mesonbuild/modules/python3.py @@ -0,0 +1,85 @@ +# Copyright 2016-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import sysconfig +from .. import mesonlib + +from . import ExtensionModule, ModuleInfo +from ..interpreterbase import typed_pos_args, noPosargs, noKwargs, permittedKwargs +from ..build import known_shmod_kwargs +from ..programs import ExternalProgram + + +class Python3Module(ExtensionModule): + + INFO = ModuleInfo('python3', '0.38.0', deprecated='0.48.0') + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.methods.update({ + 'extension_module': self.extension_module, + 'find_python': self.find_python, + 'language_version': self.language_version, + 'sysconfig_path': self.sysconfig_path, + }) + + @permittedKwargs(known_shmod_kwargs) + def extension_module(self, state, args, kwargs): + if 'name_prefix' in kwargs: + raise mesonlib.MesonException('Name_prefix is set automatically, specifying it is forbidden.') + if 'name_suffix' in kwargs: + raise mesonlib.MesonException('Name_suffix is set automatically, specifying it is forbidden.') + host_system = state.host_machine.system + if host_system == 'darwin': + # Default suffix is 'dylib' but Python does not use it for extensions. + suffix = 'so' + elif host_system == 'windows': + # On Windows the extension is pyd for some unexplainable reason. + suffix = 'pyd' + else: + suffix = [] + kwargs['name_prefix'] = '' + kwargs['name_suffix'] = suffix + return self.interpreter.func_shared_module(None, args, kwargs) + + @noPosargs + @noKwargs + def find_python(self, state, args, kwargs): + command = state.environment.lookup_binary_entry(mesonlib.MachineChoice.HOST, 'python3') + if command is not None: + py3 = ExternalProgram.from_entry('python3', command) + else: + py3 = ExternalProgram('python3', mesonlib.python_command, silent=True) + return py3 + + @noPosargs + @noKwargs + def language_version(self, state, args, kwargs): + return sysconfig.get_python_version() + + @noKwargs + @typed_pos_args('python3.sysconfig_path', str) + def sysconfig_path(self, state, args, kwargs): + path_name = args[0] + valid_names = sysconfig.get_path_names() + if path_name not in valid_names: + raise mesonlib.MesonException(f'{path_name} is not a valid path name {valid_names}.') + + # Get a relative path without a prefix, e.g. lib/python3.6/site-packages + return sysconfig.get_path(path_name, vars={'base': '', 'platbase': '', 'installed_base': ''})[1:] + + +def initialize(*args, **kwargs): + return Python3Module(*args, **kwargs) diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py new file mode 100644 index 0000000..73160c0 --- /dev/null +++ b/mesonbuild/modules/qt.py @@ -0,0 +1,608 @@ +# Copyright 2015 The Meson development team +# Copyright © 2021 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os +import shutil +import typing as T +import xml.etree.ElementTree as ET + +from . import ModuleReturnValue, ExtensionModule +from .. import build +from .. import coredata +from .. import mlog +from ..dependencies import find_external_dependency, Dependency, ExternalLibrary +from ..mesonlib import MesonException, File, version_compare, Popen_safe +from ..interpreter import extract_required_kwarg +from ..interpreter.type_checking import INSTALL_DIR_KW, INSTALL_KW, NoneType +from ..interpreterbase import ContainerTypeInfo, FeatureDeprecated, KwargInfo, noPosargs, FeatureNew, typed_kwargs +from ..programs import NonExistingExternalProgram + +if T.TYPE_CHECKING: + from . import ModuleState + from ..dependencies.qt import QtPkgConfigDependency, QmakeQtDependency + from ..interpreter import Interpreter + from ..interpreter import kwargs + from ..mesonlib import FileOrString + from ..programs import ExternalProgram + + QtDependencyType = T.Union[QtPkgConfigDependency, QmakeQtDependency] + + from typing_extensions import TypedDict + + class ResourceCompilerKwArgs(TypedDict): + + """Keyword arguments for the Resource Compiler method.""" + + name: T.Optional[str] + sources: T.Sequence[T.Union[FileOrString, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]] + extra_args: T.List[str] + method: str + + class UICompilerKwArgs(TypedDict): + + """Keyword arguments for the Ui Compiler method.""" + + sources: T.Sequence[T.Union[FileOrString, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]] + extra_args: T.List[str] + method: str + + class MocCompilerKwArgs(TypedDict): + + """Keyword arguments for the Moc Compiler method.""" + + sources: T.Sequence[T.Union[FileOrString, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]] + headers: T.Sequence[T.Union[FileOrString, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]] + extra_args: T.List[str] + method: str + include_directories: T.List[T.Union[str, build.IncludeDirs]] + dependencies: T.List[T.Union[Dependency, ExternalLibrary]] + + class PreprocessKwArgs(TypedDict): + + sources: T.List[FileOrString] + moc_sources: T.List[T.Union[FileOrString, build.CustomTarget]] + moc_headers: T.List[T.Union[FileOrString, build.CustomTarget]] + qresources: T.List[FileOrString] + ui_files: T.List[T.Union[FileOrString, build.CustomTarget]] + moc_extra_arguments: T.List[str] + rcc_extra_arguments: T.List[str] + uic_extra_arguments: T.List[str] + include_directories: T.List[T.Union[str, build.IncludeDirs]] + dependencies: T.List[T.Union[Dependency, ExternalLibrary]] + method: str + + class HasToolKwArgs(kwargs.ExtractRequired): + + method: str + + class CompileTranslationsKwArgs(TypedDict): + + build_by_default: bool + install: bool + install_dir: T.Optional[str] + method: str + qresource: T.Optional[str] + rcc_extra_arguments: T.List[str] + ts_files: T.List[T.Union[str, File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]] + +class QtBaseModule(ExtensionModule): + _tools_detected = False + _rcc_supports_depfiles = False + _moc_supports_depfiles = False + + def __init__(self, interpreter: 'Interpreter', qt_version: int = 5): + ExtensionModule.__init__(self, interpreter) + self.qt_version = qt_version + # It is important that this list does not change order as the order of + # the returned ExternalPrograms will change as well + self.tools: T.Dict[str, ExternalProgram] = { + 'moc': NonExistingExternalProgram('moc'), + 'uic': NonExistingExternalProgram('uic'), + 'rcc': NonExistingExternalProgram('rcc'), + 'lrelease': NonExistingExternalProgram('lrelease'), + } + self.methods.update({ + 'has_tools': self.has_tools, + 'preprocess': self.preprocess, + 'compile_translations': self.compile_translations, + 'compile_resources': self.compile_resources, + 'compile_ui': self.compile_ui, + 'compile_moc': self.compile_moc, + }) + + def compilers_detect(self, state: 'ModuleState', qt_dep: 'QtDependencyType') -> None: + """Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH""" + wanted = f'== {qt_dep.version}' + + def gen_bins() -> T.Generator[T.Tuple[str, str], None, None]: + for b in self.tools: + if qt_dep.bindir: + yield os.path.join(qt_dep.bindir, b), b + if qt_dep.libexecdir: + yield os.path.join(qt_dep.libexecdir, b), b + # prefer the (official) <tool><version> or (unofficial) <tool>-qt<version> + # of the tool to the plain one, as we + # don't know what the unsuffixed one points to without calling it. + yield f'{b}{qt_dep.qtver}', b + yield f'{b}-qt{qt_dep.qtver}', b + yield b, b + + for b, name in gen_bins(): + if self.tools[name].found(): + continue + + if name == 'lrelease': + arg = ['-version'] + elif version_compare(qt_dep.version, '>= 5'): + arg = ['--version'] + else: + arg = ['-v'] + + # Ensure that the version of qt and each tool are the same + def get_version(p: ExternalProgram) -> str: + _, out, err = Popen_safe(p.get_command() + arg) + if name == 'lrelease' or not qt_dep.version.startswith('4'): + care = out + else: + care = err + return care.rsplit(' ', maxsplit=1)[-1].replace(')', '').strip() + + p = state.find_program(b, required=False, + version_func=get_version, + wanted=wanted) + if p.found(): + self.tools[name] = p + + def _detect_tools(self, state: 'ModuleState', method: str, required: bool = True) -> None: + if self._tools_detected: + return + self._tools_detected = True + mlog.log(f'Detecting Qt{self.qt_version} tools') + kwargs = {'required': required, 'modules': 'Core', 'method': method} + # Just pick one to make mypy happy + qt = T.cast('QtPkgConfigDependency', find_external_dependency(f'qt{self.qt_version}', state.environment, kwargs)) + if qt.found(): + # Get all tools and then make sure that they are the right version + self.compilers_detect(state, qt) + if version_compare(qt.version, '>=5.15.0'): + self._moc_supports_depfiles = True + else: + mlog.warning('moc dependencies will not work properly until you move to Qt >= 5.15', fatal=False) + if version_compare(qt.version, '>=5.14.0'): + self._rcc_supports_depfiles = True + else: + mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:', + mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) + else: + suffix = f'-qt{self.qt_version}' + self.tools['moc'] = NonExistingExternalProgram(name='moc' + suffix) + self.tools['uic'] = NonExistingExternalProgram(name='uic' + suffix) + self.tools['rcc'] = NonExistingExternalProgram(name='rcc' + suffix) + self.tools['lrelease'] = NonExistingExternalProgram(name='lrelease' + suffix) + + @staticmethod + def _qrc_nodes(state: 'ModuleState', rcc_file: 'FileOrString') -> T.Tuple[str, T.List[str]]: + abspath: str + if isinstance(rcc_file, str): + abspath = os.path.join(state.environment.source_dir, state.subdir, rcc_file) + else: + abspath = rcc_file.absolute_path(state.environment.source_dir, state.environment.build_dir) + rcc_dirname = os.path.dirname(abspath) + + # FIXME: what error are we actually trying to check here? (probably parse errors?) + try: + tree = ET.parse(abspath) + root = tree.getroot() + result: T.List[str] = [] + for child in root[0]: + if child.tag != 'file': + mlog.warning("malformed rcc file: ", os.path.join(state.subdir, str(rcc_file))) + break + elif child.text is None: + raise MesonException(f'<file> element without a path in {os.path.join(state.subdir, str(rcc_file))}') + else: + result.append(child.text) + + return rcc_dirname, result + except MesonException: + raise + except Exception: + raise MesonException(f'Unable to parse resource file {abspath}') + + def _parse_qrc_deps(self, state: 'ModuleState', + rcc_file_: T.Union['FileOrString', build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]) -> T.List[File]: + result: T.List[File] = [] + inputs: T.Sequence['FileOrString'] = [] + if isinstance(rcc_file_, (str, File)): + inputs = [rcc_file_] + else: + inputs = rcc_file_.get_outputs() + + for rcc_file in inputs: + rcc_dirname, nodes = self._qrc_nodes(state, rcc_file) + for resource_path in nodes: + # We need to guess if the pointed resource is: + # a) in build directory -> implies a generated file + # b) in source directory + # c) somewhere else external dependency file to bundle + # + # Also from qrc documentation: relative path are always from qrc file + # So relative path must always be computed from qrc file ! + if os.path.isabs(resource_path): + # a) + if resource_path.startswith(os.path.abspath(state.environment.build_dir)): + resource_relpath = os.path.relpath(resource_path, state.environment.build_dir) + result.append(File(is_built=True, subdir='', fname=resource_relpath)) + # either b) or c) + else: + result.append(File(is_built=False, subdir=state.subdir, fname=resource_path)) + else: + path_from_rcc = os.path.normpath(os.path.join(rcc_dirname, resource_path)) + # a) + if path_from_rcc.startswith(state.environment.build_dir): + result.append(File(is_built=True, subdir=state.subdir, fname=resource_path)) + # b) + else: + result.append(File(is_built=False, subdir=state.subdir, fname=path_from_rcc)) + return result + + @FeatureNew('qt.has_tools', '0.54.0') + @noPosargs + @typed_kwargs( + 'qt.has_tools', + KwargInfo('required', (bool, coredata.UserFeatureOption), default=False), + KwargInfo('method', str, default='auto'), + ) + def has_tools(self, state: 'ModuleState', args: T.Tuple, kwargs: 'HasToolKwArgs') -> bool: + method = kwargs.get('method', 'auto') + # We have to cast here because TypedDicts are invariant, even though + # ExtractRequiredKwArgs is a subset of HasToolKwArgs, type checkers + # will insist this is wrong + disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, default=False) + if disabled: + mlog.log('qt.has_tools skipped: feature', mlog.bold(feature), 'disabled') + return False + self._detect_tools(state, method, required=False) + for tool in self.tools.values(): + if not tool.found(): + if required: + raise MesonException('Qt tools not found') + return False + return True + + @FeatureNew('qt.compile_resources', '0.59.0') + @noPosargs + @typed_kwargs( + 'qt.compile_resources', + KwargInfo('name', (str, NoneType)), + KwargInfo( + 'sources', + ContainerTypeInfo(list, (File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList), allow_empty=False), + listify=True, + required=True, + ), + KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('method', str, default='auto') + ) + def compile_resources(self, state: 'ModuleState', args: T.Tuple, kwargs: 'ResourceCompilerKwArgs') -> ModuleReturnValue: + """Compile Qt resources files. + + Uses CustomTargets to generate .cpp files from .qrc files. + """ + if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in kwargs['sources']): + FeatureNew.single_use('qt.compile_resources: custom_target or generator for "sources" keyword argument', + '0.60.0', state.subproject, location=state.current_node) + out = self._compile_resources_impl(state, kwargs) + return ModuleReturnValue(out, [out]) + + def _compile_resources_impl(self, state: 'ModuleState', kwargs: 'ResourceCompilerKwArgs') -> T.List[build.CustomTarget]: + # Avoid the FeatureNew when dispatching from preprocess + self._detect_tools(state, kwargs['method']) + if not self.tools['rcc'].found(): + err_msg = ("{0} sources specified and couldn't find {1}, " + "please check your qt{2} installation") + raise MesonException(err_msg.format('RCC', f'rcc-qt{self.qt_version}', self.qt_version)) + + # List of generated CustomTargets + targets: T.List[build.CustomTarget] = [] + + # depfile arguments + DEPFILE_ARGS: T.List[str] = ['--depfile', '@DEPFILE@'] if self._rcc_supports_depfiles else [] + + name = kwargs['name'] + sources: T.List['FileOrString'] = [] + for s in kwargs['sources']: + if isinstance(s, (str, File)): + sources.append(s) + else: + sources.extend(s.get_outputs()) + extra_args = kwargs['extra_args'] + + # If a name was set generate a single .cpp file from all of the qrc + # files, otherwise generate one .cpp file per qrc file. + if name: + qrc_deps: T.List[File] = [] + for s in sources: + qrc_deps.extend(self._parse_qrc_deps(state, s)) + + res_target = build.CustomTarget( + name, + state.subdir, + state.subproject, + state.environment, + self.tools['rcc'].get_command() + ['-name', name, '-o', '@OUTPUT@'] + extra_args + ['@INPUT@'] + DEPFILE_ARGS, + sources, + [f'{name}.cpp'], + depend_files=qrc_deps, + depfile=f'{name}.d', + ) + targets.append(res_target) + else: + for rcc_file in sources: + qrc_deps = self._parse_qrc_deps(state, rcc_file) + if isinstance(rcc_file, str): + basename = os.path.basename(rcc_file) + else: + basename = os.path.basename(rcc_file.fname) + name = f'qt{self.qt_version}-{basename.replace(".", "_")}' + res_target = build.CustomTarget( + name, + state.subdir, + state.subproject, + state.environment, + self.tools['rcc'].get_command() + ['-name', '@BASENAME@', '-o', '@OUTPUT@'] + extra_args + ['@INPUT@'] + DEPFILE_ARGS, + [rcc_file], + [f'{name}.cpp'], + depend_files=qrc_deps, + depfile=f'{name}.d', + ) + targets.append(res_target) + + return targets + + @FeatureNew('qt.compile_ui', '0.59.0') + @noPosargs + @typed_kwargs( + 'qt.compile_ui', + KwargInfo( + 'sources', + ContainerTypeInfo(list, (File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList), allow_empty=False), + listify=True, + required=True, + ), + KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('method', str, default='auto') + ) + def compile_ui(self, state: 'ModuleState', args: T.Tuple, kwargs: 'UICompilerKwArgs') -> ModuleReturnValue: + """Compile UI resources into cpp headers.""" + if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in kwargs['sources']): + FeatureNew.single_use('qt.compile_ui: custom_target or generator for "sources" keyword argument', + '0.60.0', state.subproject, location=state.current_node) + out = self._compile_ui_impl(state, kwargs) + return ModuleReturnValue(out, [out]) + + def _compile_ui_impl(self, state: 'ModuleState', kwargs: 'UICompilerKwArgs') -> build.GeneratedList: + # Avoid the FeatureNew when dispatching from preprocess + self._detect_tools(state, kwargs['method']) + if not self.tools['uic'].found(): + err_msg = ("{0} sources specified and couldn't find {1}, " + "please check your qt{2} installation") + raise MesonException(err_msg.format('UIC', f'uic-qt{self.qt_version}', self.qt_version)) + + # TODO: This generator isn't added to the generator list in the Interpreter + gen = build.Generator( + self.tools['uic'], + kwargs['extra_args'] + ['-o', '@OUTPUT@', '@INPUT@'], + ['ui_@BASENAME@.h'], + name=f'Qt{self.qt_version} ui') + return gen.process_files(kwargs['sources'], state) + + @FeatureNew('qt.compile_moc', '0.59.0') + @noPosargs + @typed_kwargs( + 'qt.compile_moc', + KwargInfo( + 'sources', + ContainerTypeInfo(list, (File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)), + listify=True, + default=[], + ), + KwargInfo( + 'headers', + ContainerTypeInfo(list, (File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)), + listify=True, + default=[] + ), + KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('method', str, default='auto'), + KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]), + KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]), + ) + def compile_moc(self, state: 'ModuleState', args: T.Tuple, kwargs: 'MocCompilerKwArgs') -> ModuleReturnValue: + if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in kwargs['headers']): + FeatureNew.single_use('qt.compile_moc: custom_target or generator for "headers" keyword argument', + '0.60.0', state.subproject, location=state.current_node) + if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in kwargs['sources']): + FeatureNew.single_use('qt.compile_moc: custom_target or generator for "sources" keyword argument', + '0.60.0', state.subproject, location=state.current_node) + out = self._compile_moc_impl(state, kwargs) + return ModuleReturnValue(out, [out]) + + def _compile_moc_impl(self, state: 'ModuleState', kwargs: 'MocCompilerKwArgs') -> T.List[build.GeneratedList]: + # Avoid the FeatureNew when dispatching from preprocess + self._detect_tools(state, kwargs['method']) + if not self.tools['moc'].found(): + err_msg = ("{0} sources specified and couldn't find {1}, " + "please check your qt{2} installation") + raise MesonException(err_msg.format('MOC', f'uic-qt{self.qt_version}', self.qt_version)) + + if not (kwargs['headers'] or kwargs['sources']): + raise build.InvalidArguments('At least one of the "headers" or "sources" keyword arguments must be provided and not empty') + + inc = state.get_include_args(include_dirs=kwargs['include_directories']) + compile_args: T.List[str] = [] + for dep in kwargs['dependencies']: + compile_args.extend([a for a in dep.get_all_compile_args() if a.startswith(('-I', '-D'))]) + + output: T.List[build.GeneratedList] = [] + + # depfile arguments (defaults to <output-name>.d) + DEPFILE_ARGS: T.List[str] = ['--output-dep-file'] if self._moc_supports_depfiles else [] + + arguments = kwargs['extra_args'] + DEPFILE_ARGS + inc + compile_args + ['@INPUT@', '-o', '@OUTPUT@'] + if kwargs['headers']: + moc_gen = build.Generator( + self.tools['moc'], arguments, ['moc_@BASENAME@.cpp'], + depfile='moc_@BASENAME@.cpp.d', + name=f'Qt{self.qt_version} moc header') + output.append(moc_gen.process_files(kwargs['headers'], state)) + if kwargs['sources']: + moc_gen = build.Generator( + self.tools['moc'], arguments, ['@BASENAME@.moc'], + depfile='@BASENAME.moc.d@', + name=f'Qt{self.qt_version} moc source') + output.append(moc_gen.process_files(kwargs['sources'], state)) + + return output + + # We can't use typed_pos_args here, the signature is ambiguous + @typed_kwargs( + 'qt.preprocess', + KwargInfo('sources', ContainerTypeInfo(list, (File, str)), listify=True, default=[], deprecated='0.59.0'), + KwargInfo('qresources', ContainerTypeInfo(list, (File, str)), listify=True, default=[]), + KwargInfo('ui_files', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]), + KwargInfo('moc_sources', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]), + KwargInfo('moc_headers', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]), + KwargInfo('moc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.44.0'), + KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.49.0'), + KwargInfo('uic_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.49.0'), + KwargInfo('method', str, default='auto'), + KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]), + KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]), + ) + def preprocess(self, state: 'ModuleState', args: T.List[T.Union[str, File]], kwargs: 'PreprocessKwArgs') -> ModuleReturnValue: + _sources = args[1:] + if _sources: + FeatureDeprecated.single_use('qt.preprocess positional sources', '0.59', state.subproject, location=state.current_node) + # List is invariant, os we have to cast... + sources = T.cast('T.List[T.Union[str, File, build.GeneratedList, build.CustomTarget]]', + _sources + kwargs['sources']) + for s in sources: + if not isinstance(s, (str, File)): + raise build.InvalidArguments('Variadic arguments to qt.preprocess must be Strings or Files') + method = kwargs['method'] + + if kwargs['qresources']: + # custom output name set? -> one output file, multiple otherwise + rcc_kwargs: 'ResourceCompilerKwArgs' = {'name': '', 'sources': kwargs['qresources'], 'extra_args': kwargs['rcc_extra_arguments'], 'method': method} + if args: + name = args[0] + if not isinstance(name, str): + raise build.InvalidArguments('First argument to qt.preprocess must be a string') + rcc_kwargs['name'] = name + sources.extend(self._compile_resources_impl(state, rcc_kwargs)) + + if kwargs['ui_files']: + ui_kwargs: 'UICompilerKwArgs' = {'sources': kwargs['ui_files'], 'extra_args': kwargs['uic_extra_arguments'], 'method': method} + sources.append(self._compile_ui_impl(state, ui_kwargs)) + + if kwargs['moc_headers'] or kwargs['moc_sources']: + moc_kwargs: 'MocCompilerKwArgs' = { + 'extra_args': kwargs['moc_extra_arguments'], + 'sources': kwargs['moc_sources'], + 'headers': kwargs['moc_headers'], + 'include_directories': kwargs['include_directories'], + 'dependencies': kwargs['dependencies'], + 'method': method, + } + sources.extend(self._compile_moc_impl(state, moc_kwargs)) + + return ModuleReturnValue(sources, [sources]) + + @FeatureNew('qt.compile_translations', '0.44.0') + @noPosargs + @typed_kwargs( + 'qt.compile_translations', + KwargInfo('build_by_default', bool, default=False), + INSTALL_KW, + INSTALL_DIR_KW, + KwargInfo('method', str, default='auto'), + KwargInfo('qresource', (str, NoneType), since='0.56.0'), + KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.56.0'), + KwargInfo('ts_files', ContainerTypeInfo(list, (str, File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)), listify=True, default=[]), + ) + def compile_translations(self, state: 'ModuleState', args: T.Tuple, kwargs: 'CompileTranslationsKwArgs') -> ModuleReturnValue: + ts_files = kwargs['ts_files'] + if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in ts_files): + FeatureNew.single_use('qt.compile_translations: custom_target or generator for "ts_files" keyword argument', + '0.60.0', state.subproject, location=state.current_node) + if kwargs['install'] and not kwargs['install_dir']: + raise MesonException('qt.compile_translations: "install_dir" keyword argument must be set when "install" is true.') + qresource = kwargs['qresource'] + if qresource: + if ts_files: + raise MesonException('qt.compile_translations: Cannot specify both ts_files and qresource') + if os.path.dirname(qresource) != '': + raise MesonException('qt.compile_translations: qresource file name must not contain a subdirectory.') + qresource_file = File.from_built_file(state.subdir, qresource) + infile_abs = os.path.join(state.environment.source_dir, qresource_file.relative_name()) + outfile_abs = os.path.join(state.environment.build_dir, qresource_file.relative_name()) + os.makedirs(os.path.dirname(outfile_abs), exist_ok=True) + shutil.copy2(infile_abs, outfile_abs) + self.interpreter.add_build_def_file(infile_abs) + + _, nodes = self._qrc_nodes(state, qresource_file) + for c in nodes: + if c.endswith('.qm'): + ts_files.append(c.rstrip('.qm') + '.ts') + else: + raise MesonException(f'qt.compile_translations: qresource can only contain qm files, found {c}') + results = self.preprocess(state, [], {'qresources': qresource_file, 'rcc_extra_arguments': kwargs['rcc_extra_arguments']}) + self._detect_tools(state, kwargs['method']) + translations: T.List[build.CustomTarget] = [] + for ts in ts_files: + if not self.tools['lrelease'].found(): + raise MesonException('qt.compile_translations: ' + + self.tools['lrelease'].name + ' not found') + if qresource: + # In this case we know that ts_files is always a List[str], as + # it's generated above and no ts_files are passed in. However, + # mypy can't figure that out so we use assert to assure it that + # what we're doing is safe + assert isinstance(ts, str), 'for mypy' + outdir = os.path.dirname(os.path.normpath(os.path.join(state.subdir, ts))) + ts = os.path.basename(ts) + else: + outdir = state.subdir + cmd: T.List[T.Union[ExternalProgram, str]] = [self.tools['lrelease'], '@INPUT@', '-qm', '@OUTPUT@'] + lrelease_target = build.CustomTarget( + f'qt{self.qt_version}-compile-{ts}', + outdir, + state.subproject, + state.environment, + cmd, + [ts], + ['@BASENAME@.qm'], + install=kwargs['install'], + install_dir=[kwargs['install_dir']], + install_tag=['i18n'], + build_by_default=kwargs['build_by_default'], + ) + translations.append(lrelease_target) + if qresource: + return ModuleReturnValue(results.return_value[0], [results.new_objects, translations]) + else: + return ModuleReturnValue(translations, [translations]) diff --git a/mesonbuild/modules/qt4.py b/mesonbuild/modules/qt4.py new file mode 100644 index 0000000..b8948f7 --- /dev/null +++ b/mesonbuild/modules/qt4.py @@ -0,0 +1,28 @@ +# Copyright 2015 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .qt import QtBaseModule +from . import ModuleInfo + + +class Qt4Module(QtBaseModule): + + INFO = ModuleInfo('qt4') + + def __init__(self, interpreter): + QtBaseModule.__init__(self, interpreter, qt_version=4) + + +def initialize(*args, **kwargs): + return Qt4Module(*args, **kwargs) diff --git a/mesonbuild/modules/qt5.py b/mesonbuild/modules/qt5.py new file mode 100644 index 0000000..3933ea0 --- /dev/null +++ b/mesonbuild/modules/qt5.py @@ -0,0 +1,28 @@ +# Copyright 2015 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .qt import QtBaseModule +from . import ModuleInfo + + +class Qt5Module(QtBaseModule): + + INFO = ModuleInfo('qt5') + + def __init__(self, interpreter): + QtBaseModule.__init__(self, interpreter, qt_version=5) + + +def initialize(*args, **kwargs): + return Qt5Module(*args, **kwargs) diff --git a/mesonbuild/modules/qt6.py b/mesonbuild/modules/qt6.py new file mode 100644 index 0000000..66fc43f --- /dev/null +++ b/mesonbuild/modules/qt6.py @@ -0,0 +1,28 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .qt import QtBaseModule +from . import ModuleInfo + + +class Qt6Module(QtBaseModule): + + INFO = ModuleInfo('qt6', '0.57.0') + + def __init__(self, interpreter): + QtBaseModule.__init__(self, interpreter, qt_version=6) + + +def initialize(*args, **kwargs): + return Qt6Module(*args, **kwargs) diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py new file mode 100644 index 0000000..42401e4 --- /dev/null +++ b/mesonbuild/modules/rust.py @@ -0,0 +1,250 @@ +# Copyright © 2020-2022 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleInfo +from .. import mlog +from ..build import BothLibraries, BuildTarget, CustomTargetIndex, Executable, ExtractedObjects, GeneratedList, IncludeDirs, CustomTarget, StructuredSources +from ..dependencies import Dependency, ExternalLibrary +from ..interpreter.type_checking import DEPENDENCIES_KW, TEST_KWS, OUTPUT_KW, INCLUDE_DIRECTORIES, include_dir_string_new +from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, typed_kwargs, typed_pos_args, noPosargs +from ..mesonlib import File + +if T.TYPE_CHECKING: + from . import ModuleState + from ..interpreter import Interpreter + from ..interpreter import kwargs as _kwargs + from ..interpreter.interpreter import SourceInputs, SourceOutputs + from ..programs import ExternalProgram + + from typing_extensions import TypedDict + + class FuncTest(_kwargs.BaseTest): + + dependencies: T.List[T.Union[Dependency, ExternalLibrary]] + is_parallel: bool + + class FuncBindgen(TypedDict): + + args: T.List[str] + c_args: T.List[str] + include_directories: T.List[IncludeDirs] + input: T.List[SourceInputs] + output: str + dependencies: T.List[T.Union[Dependency, ExternalLibrary]] + + +class RustModule(ExtensionModule): + + """A module that holds helper functions for rust.""" + + INFO = ModuleInfo('rust', '0.57.0', stabilized='1.0.0') + + def __init__(self, interpreter: Interpreter) -> None: + super().__init__(interpreter) + self._bindgen_bin: T.Optional[ExternalProgram] = None + self.methods.update({ + 'test': self.test, + 'bindgen': self.bindgen, + }) + + @typed_pos_args('rust.test', str, BuildTarget) + @typed_kwargs( + 'rust.test', + *TEST_KWS, + DEPENDENCIES_KW, + KwargInfo('is_parallel', bool, default=False), + ) + def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: FuncTest) -> ModuleReturnValue: + """Generate a rust test target from a given rust target. + + Rust puts it's unitests inside it's main source files, unlike most + languages that put them in external files. This means that normally + you have to define two separate targets with basically the same + arguments to get tests: + + ```meson + rust_lib_sources = [...] + rust_lib = static_library( + 'rust_lib', + rust_lib_sources, + ) + + rust_lib_test = executable( + 'rust_lib_test', + rust_lib_sources, + rust_args : ['--test'], + ) + + test( + 'rust_lib_test', + rust_lib_test, + protocol : 'rust', + ) + ``` + + This is all fine, but not very DRY. This method makes it much easier + to define rust tests: + + ```meson + rust = import('unstable-rust') + + rust_lib = static_library( + 'rust_lib', + [sources], + ) + + rust.test('rust_lib_test', rust_lib) + ``` + """ + name = args[0] + base_target: BuildTarget = args[1] + if not base_target.uses_rust(): + raise InterpreterException('Second positional argument to rustmod.test() must be a rust based target') + extra_args = kwargs['args'] + + # Delete any arguments we don't want passed + if '--test' in extra_args: + mlog.warning('Do not add --test to rustmod.test arguments') + extra_args.remove('--test') + if '--format' in extra_args: + mlog.warning('Do not add --format to rustmod.test arguments') + i = extra_args.index('--format') + # Also delete the argument to --format + del extra_args[i + 1] + del extra_args[i] + for i, a in enumerate(extra_args): + if isinstance(a, str) and a.startswith('--format='): + del extra_args[i] + break + + # We need to cast here, as currently these don't have protocol in them, but test itself does. + tkwargs = T.cast('_kwargs.FuncTest', kwargs.copy()) + + tkwargs['args'] = extra_args + ['--test', '--format', 'pretty'] + tkwargs['protocol'] = 'rust' + + new_target_kwargs = base_target.kwargs.copy() + # Don't mutate the shallow copied list, instead replace it with a new + # one + new_target_kwargs['rust_args'] = new_target_kwargs.get('rust_args', []) + ['--test'] + new_target_kwargs['install'] = False + new_target_kwargs['dependencies'] = new_target_kwargs.get('dependencies', []) + kwargs['dependencies'] + + sources = T.cast('T.List[SourceOutputs]', base_target.sources.copy()) + sources.extend(base_target.generated) + + new_target = Executable( + name, base_target.subdir, state.subproject, base_target.for_machine, + sources, base_target.structured_sources, + base_target.objects, base_target.environment, base_target.compilers, + new_target_kwargs + ) + + test = self.interpreter.make_test( + self.interpreter.current_node, (name, new_target), tkwargs) + + return ModuleReturnValue(None, [new_target, test]) + + @noPosargs + @typed_kwargs( + 'rust.bindgen', + KwargInfo('c_args', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('args', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo( + 'input', + ContainerTypeInfo(list, (File, GeneratedList, BuildTarget, BothLibraries, ExtractedObjects, CustomTargetIndex, CustomTarget, str), allow_empty=False), + default=[], + listify=True, + required=True, + ), + INCLUDE_DIRECTORIES.evolve(feature_validator=include_dir_string_new), + OUTPUT_KW, + DEPENDENCIES_KW.evolve(since='1.0.0'), + ) + def bindgen(self, state: ModuleState, args: T.List, kwargs: FuncBindgen) -> ModuleReturnValue: + """Wrapper around bindgen to simplify it's use. + + The main thing this simplifies is the use of `include_directory` + objects, instead of having to pass a plethora of `-I` arguments. + """ + header, *_deps = self.interpreter.source_strings_to_files(kwargs['input']) + + # Split File and Target dependencies to add pass to CustomTarget + depends: T.List[SourceOutputs] = [] + depend_files: T.List[File] = [] + for d in _deps: + if isinstance(d, File): + depend_files.append(d) + else: + depends.append(d) + + clang_args: T.List[str] = [] + for i in state.process_include_dirs(kwargs['include_directories']): + # bindgen always uses clang, so it's safe to hardcode -I here + clang_args.extend([f'-I{x}' for x in i.to_string_list( + state.environment.get_source_dir(), state.environment.get_build_dir())]) + + for de in kwargs['dependencies']: + for i in de.get_include_dirs(): + clang_args.extend([f'-I{x}' for x in i.to_string_list( + state.environment.get_source_dir(), state.environment.get_build_dir())]) + clang_args.extend(de.get_all_compile_args()) + for s in de.get_sources(): + if isinstance(s, File): + depend_files.append(s) + elif isinstance(s, CustomTarget): + depends.append(s) + + if self._bindgen_bin is None: + self._bindgen_bin = state.find_program('bindgen') + + name: str + if isinstance(header, File): + name = header.fname + elif isinstance(header, (BuildTarget, BothLibraries, ExtractedObjects, StructuredSources)): + raise InterpreterException('bindgen source file must be a C header, not an object or build target') + else: + name = header.get_outputs()[0] + + cmd = self._bindgen_bin.get_command() + \ + [ + '@INPUT@', '--output', + os.path.join(state.environment.build_dir, '@OUTPUT@') + ] + \ + kwargs['args'] + ['--'] + kwargs['c_args'] + clang_args + \ + ['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@'] + + target = CustomTarget( + f'rustmod-bindgen-{name}'.replace('/', '_'), + state.subdir, + state.subproject, + state.environment, + cmd, + [header], + [kwargs['output']], + depfile='@PLAINNAME@.d', + extra_depends=depends, + depend_files=depend_files, + backend=state.backend, + ) + + return ModuleReturnValue([target], [target]) + + +def initialize(interp: Interpreter) -> RustModule: + return RustModule(interp) diff --git a/mesonbuild/modules/simd.py b/mesonbuild/modules/simd.py new file mode 100644 index 0000000..3ee0858 --- /dev/null +++ b/mesonbuild/modules/simd.py @@ -0,0 +1,88 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from .. import mesonlib, compilers, mlog +from .. import build + +from . import ExtensionModule, ModuleInfo + +class SimdModule(ExtensionModule): + + INFO = ModuleInfo('SIMD', '0.42.0', unstable=True) + + def __init__(self, interpreter): + super().__init__(interpreter) + # FIXME add Altivec and AVX512. + self.isets = ('mmx', + 'sse', + 'sse2', + 'sse3', + 'ssse3', + 'sse41', + 'sse42', + 'avx', + 'avx2', + 'neon', + ) + self.methods.update({ + 'check': self.check, + }) + + def check(self, state, args, kwargs): + result = [] + if len(args) != 1: + raise mesonlib.MesonException('Check requires one argument, a name prefix for checks.') + prefix = args[0] + if not isinstance(prefix, str): + raise mesonlib.MesonException('Argument must be a string.') + if 'compiler' not in kwargs: + raise mesonlib.MesonException('Must specify compiler keyword') + if 'sources' in kwargs: + raise mesonlib.MesonException('SIMD module does not support the "sources" keyword') + basic_kwargs = {} + for key, value in kwargs.items(): + if key not in self.isets and key != 'compiler': + basic_kwargs[key] = value + compiler = kwargs['compiler'] + if not isinstance(compiler, compilers.compilers.Compiler): + raise mesonlib.MesonException('Compiler argument must be a compiler object.') + conf = build.ConfigurationData() + for iset in self.isets: + if iset not in kwargs: + continue + iset_fname = kwargs[iset] # Might also be an array or Files. static_library will validate. + args = compiler.get_instruction_set_args(iset) + if args is None: + mlog.log('Compiler supports %s:' % iset, mlog.red('NO')) + continue + if args: + if not compiler.has_multi_arguments(args, state.environment)[0]: + mlog.log('Compiler supports %s:' % iset, mlog.red('NO')) + continue + mlog.log('Compiler supports %s:' % iset, mlog.green('YES')) + conf.values['HAVE_' + iset.upper()] = ('1', 'Compiler supports %s.' % iset) + libname = prefix + '_' + iset + lib_kwargs = {'sources': iset_fname, + } + lib_kwargs.update(basic_kwargs) + langarg_key = compiler.get_language() + '_args' + old_lang_args = mesonlib.extract_as_list(lib_kwargs, langarg_key) + all_lang_args = old_lang_args + args + lib_kwargs[langarg_key] = all_lang_args + result.append(self.interpreter.func_static_lib(None, [libname], lib_kwargs)) + return [result, conf] + +def initialize(*args, **kwargs): + return SimdModule(*args, **kwargs) diff --git a/mesonbuild/modules/sourceset.py b/mesonbuild/modules/sourceset.py new file mode 100644 index 0000000..c35416e --- /dev/null +++ b/mesonbuild/modules/sourceset.py @@ -0,0 +1,307 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +import typing as T + +from . import ExtensionModule, ModuleObject, MutableModuleObject, ModuleInfo +from .. import build +from .. import dependencies +from .. import mesonlib +from ..interpreterbase import ( + noPosargs, noKwargs, + InterpreterException, InvalidArguments, InvalidCode, FeatureNew, +) +from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, typed_kwargs, typed_pos_args +from ..mesonlib import OrderedSet + +if T.TYPE_CHECKING: + from typing_extensions import TypedDict + + from . import ModuleState + from ..interpreter import Interpreter + from ..interpreterbase import TYPE_var, TYPE_kwargs + + class AddKwargs(TypedDict): + + when: T.List[T.Union[str, dependencies.Dependency]] + if_true: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes, dependencies.Dependency]] + if_false: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]] + + class AddAllKw(TypedDict): + + when: T.List[T.Union[str, dependencies.Dependency]] + if_true: T.List[SourceSetImpl] + + class ApplyKw(TypedDict): + + strict: bool + + +_WHEN_KW: KwargInfo[T.List[T.Union[str, dependencies.Dependency]]] = KwargInfo( + 'when', + ContainerTypeInfo(list, (str, dependencies.Dependency)), + listify=True, + default=[], +) + + +class SourceSetRule(T.NamedTuple): + keys: T.List[str] + """Configuration keys that enable this rule if true""" + + deps: T.List[dependencies.Dependency] + """Dependencies that enable this rule if true""" + + sources: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]] + """Source files added when this rule's conditions are true""" + + extra_deps: T.List[dependencies.Dependency] + """Dependencies added when this rule's conditions are true, but + that do not make the condition false if they're absent.""" + + sourcesets: T.List[SourceSetImpl] + """Other sourcesets added when this rule's conditions are true""" + + if_false: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]] + """Source files added when this rule's conditions are false""" + + +class SourceFiles(T.NamedTuple): + sources: OrderedSet[T.Union[mesonlib.FileOrString, build.GeneratedTypes]] + deps: OrderedSet[dependencies.Dependency] + + +class SourceSet: + """Base class to avoid circular references. + + Because of error messages, this class is called SourceSet, and the actual + implementation is an Impl. + """ + + +class SourceSetImpl(SourceSet, MutableModuleObject): + def __init__(self, interpreter: Interpreter): + super().__init__() + self.rules: T.List[SourceSetRule] = [] + self.subproject = interpreter.subproject + self.environment = interpreter.environment + self.subdir = interpreter.subdir + self.frozen = False + self.methods.update({ + 'add': self.add_method, + 'add_all': self.add_all_method, + 'all_sources': self.all_sources_method, + 'all_dependencies': self.all_dependencies_method, + 'apply': self.apply_method, + }) + + def check_source_files(self, args: T.Sequence[T.Union[mesonlib.FileOrString, build.GeneratedTypes, dependencies.Dependency]], + ) -> T.Tuple[T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]], T.List[dependencies.Dependency]]: + sources: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]] = [] + deps: T.List[dependencies.Dependency] = [] + for x in args: + if isinstance(x, dependencies.Dependency): + deps.append(x) + else: + sources.append(x) + to_check: T.List[str] = [] + + # Get the actual output names to check + for s in sources: + if isinstance(s, str): + to_check.append(s) + elif isinstance(s, mesonlib.File): + to_check.append(s.fname) + else: + to_check.extend(s.get_outputs()) + mesonlib.check_direntry_issues(to_check) + return sources, deps + + def check_conditions(self, args: T.Sequence[T.Union[str, dependencies.Dependency]] + ) -> T.Tuple[T.List[str], T.List[dependencies.Dependency]]: + keys: T.List[str] = [] + deps: T.List[dependencies.Dependency] = [] + for x in args: + if isinstance(x, str): + keys.append(x) + else: + deps.append(x) + return keys, deps + + @typed_pos_args('sourceset.add', varargs=(str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex, dependencies.Dependency)) + @typed_kwargs( + 'sourceset.add', + _WHEN_KW, + KwargInfo( + 'if_true', + ContainerTypeInfo(list, (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex, dependencies.Dependency)), + listify=True, + default=[], + ), + KwargInfo( + 'if_false', + ContainerTypeInfo(list, (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex)), + listify=True, + default=[], + ), + ) + def add_method(self, state: ModuleState, + args: T.Tuple[T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes, dependencies.Dependency]]], + kwargs: AddKwargs) -> None: + if self.frozen: + raise InvalidCode('Tried to use \'add\' after querying the source set') + when = kwargs['when'] + if_true = kwargs['if_true'] + if_false = kwargs['if_false'] + if not any([when, if_true, if_false]): + if_true = args[0] + elif args[0]: + raise InterpreterException('add called with both positional and keyword arguments') + keys, dependencies = self.check_conditions(when) + sources, extra_deps = self.check_source_files(if_true) + if_false, _ = self.check_source_files(if_false) + self.rules.append(SourceSetRule(keys, dependencies, sources, extra_deps, [], if_false)) + + @typed_pos_args('sourceset.add_all', varargs=SourceSet) + @typed_kwargs( + 'sourceset.add_all', + _WHEN_KW, + KwargInfo( + 'if_true', + ContainerTypeInfo(list, SourceSet), + listify=True, + default=[], + ) + ) + def add_all_method(self, state: ModuleState, args: T.Tuple[T.List[SourceSetImpl]], + kwargs: AddAllKw) -> None: + if self.frozen: + raise InvalidCode('Tried to use \'add_all\' after querying the source set') + when = kwargs['when'] + if_true = kwargs['if_true'] + if not when and not if_true: + if_true = args[0] + elif args[0]: + raise InterpreterException('add_all called with both positional and keyword arguments') + keys, dependencies = self.check_conditions(when) + for s in if_true: + if not isinstance(s, SourceSetImpl): + raise InvalidCode('Arguments to \'add_all\' after the first must be source sets') + s.frozen = True + self.rules.append(SourceSetRule(keys, dependencies, [], [], if_true, [])) + + def collect(self, enabled_fn: T.Callable[[str], bool], + all_sources: bool, + into: T.Optional['SourceFiles'] = None) -> SourceFiles: + if not into: + into = SourceFiles(OrderedSet(), OrderedSet()) + for entry in self.rules: + if all(x.found() for x in entry.deps) and \ + all(enabled_fn(key) for key in entry.keys): + into.sources.update(entry.sources) + into.deps.update(entry.deps) + into.deps.update(entry.extra_deps) + for ss in entry.sourcesets: + ss.collect(enabled_fn, all_sources, into) + if not all_sources: + continue + into.sources.update(entry.if_false) + return into + + @noKwargs + @noPosargs + def all_sources_method(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs + ) -> T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]: + self.frozen = True + files = self.collect(lambda x: True, True) + return list(files.sources) + + @noKwargs + @noPosargs + @FeatureNew('source_set.all_dependencies() method', '0.52.0') + def all_dependencies_method(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs + ) -> T.List[dependencies.Dependency]: + self.frozen = True + files = self.collect(lambda x: True, True) + return list(files.deps) + + @typed_pos_args('sourceset.apply', (build.ConfigurationData, dict)) + @typed_kwargs('sourceset.apply', KwargInfo('strict', bool, default=True)) + def apply_method(self, state: ModuleState, args: T.Tuple[T.Union[build.ConfigurationData, T.Dict[str, TYPE_var]]], kwargs: ApplyKw) -> SourceFilesObject: + config_data = args[0] + self.frozen = True + strict = kwargs['strict'] + if isinstance(config_data, dict): + def _get_from_config_data(key: str) -> bool: + assert isinstance(config_data, dict), 'for mypy' + if strict and key not in config_data: + raise InterpreterException(f'Entry {key} not in configuration dictionary.') + return bool(config_data.get(key, False)) + else: + config_cache: T.Dict[str, bool] = {} + + def _get_from_config_data(key: str) -> bool: + assert isinstance(config_data, build.ConfigurationData), 'for mypy' + if key not in config_cache: + if key in config_data: + config_cache[key] = bool(config_data.get(key)[0]) + elif strict: + raise InvalidArguments(f'sourceset.apply: key "{key}" not in passed configuration, and strict set.') + else: + config_cache[key] = False + return config_cache[key] + + files = self.collect(_get_from_config_data, False) + res = SourceFilesObject(files) + return res + +class SourceFilesObject(ModuleObject): + def __init__(self, files: SourceFiles): + super().__init__() + self.files = files + self.methods.update({ + 'sources': self.sources_method, + 'dependencies': self.dependencies_method, + }) + + @noPosargs + @noKwargs + def sources_method(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs + ) -> T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]: + return list(self.files.sources) + + @noPosargs + @noKwargs + def dependencies_method(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs + ) -> T.List[dependencies.Dependency]: + return list(self.files.deps) + +class SourceSetModule(ExtensionModule): + + INFO = ModuleInfo('sourceset', '0.51.0') + + def __init__(self, interpreter: Interpreter): + super().__init__(interpreter) + self.methods.update({ + 'source_set': self.source_set, + }) + + @noKwargs + @noPosargs + def source_set(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> SourceSetImpl: + return SourceSetImpl(self.interpreter) + +def initialize(interp: Interpreter) -> SourceSetModule: + return SourceSetModule(interp) diff --git a/mesonbuild/modules/wayland.py b/mesonbuild/modules/wayland.py new file mode 100644 index 0000000..99f71d0 --- /dev/null +++ b/mesonbuild/modules/wayland.py @@ -0,0 +1,160 @@ +# Copyright 2022 Mark Bolhuis <mark@bolhuis.dev> + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +import os +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleInfo +from ..build import CustomTarget +from ..interpreter.type_checking import NoneType, in_set_validator +from ..interpreterbase import typed_pos_args, typed_kwargs, KwargInfo +from ..mesonlib import File, MesonException + +if T.TYPE_CHECKING: + from typing_extensions import Literal, TypedDict + + from . import ModuleState + from ..build import Executable + from ..dependencies import Dependency + from ..interpreter import Interpreter + from ..programs import ExternalProgram + from ..mesonlib import FileOrString + + class ScanXML(TypedDict): + + public: bool + client: bool + server: bool + include_core_only: bool + + class FindProtocol(TypedDict): + + state: Literal['stable', 'staging', 'unstable'] + version: T.Optional[int] + +class WaylandModule(ExtensionModule): + + INFO = ModuleInfo('wayland', '0.62.0', unstable=True) + + def __init__(self, interpreter: Interpreter) -> None: + super().__init__(interpreter) + + self.protocols_dep: T.Optional[Dependency] = None + self.pkgdatadir: T.Optional[str] = None + self.scanner_bin: T.Optional[T.Union[ExternalProgram, Executable]] = None + + self.methods.update({ + 'scan_xml': self.scan_xml, + 'find_protocol': self.find_protocol, + }) + + @typed_pos_args('wayland.scan_xml', varargs=(str, File), min_varargs=1) + @typed_kwargs( + 'wayland.scan_xml', + KwargInfo('public', bool, default=False), + KwargInfo('client', bool, default=True), + KwargInfo('server', bool, default=False), + KwargInfo('include_core_only', bool, default=True, since='0.64.0'), + ) + def scan_xml(self, state: ModuleState, args: T.Tuple[T.List[FileOrString]], kwargs: ScanXML) -> ModuleReturnValue: + if self.scanner_bin is None: + # wayland-scanner from BUILD machine must have same version as wayland + # libraries from HOST machine. + dep = state.dependency('wayland-client') + self.scanner_bin = state.find_tool('wayland-scanner', 'wayland-scanner', 'wayland_scanner', + wanted=dep.version) + + scope = 'public' if kwargs['public'] else 'private' + # We have to cast because mypy can't deduce these are literals + sides = [i for i in T.cast("T.List[Literal['client', 'server']]", ['client', 'server']) if kwargs[i]] + if not sides: + raise MesonException('At least one of client or server keyword argument must be set to true.') + + xml_files = self.interpreter.source_strings_to_files(args[0]) + targets: T.List[CustomTarget] = [] + for xml_file in xml_files: + name = os.path.splitext(os.path.basename(xml_file.fname))[0] + + code = CustomTarget( + f'{name}-protocol', + state.subdir, + state.subproject, + state.environment, + [self.scanner_bin, f'{scope}-code', '@INPUT@', '@OUTPUT@'], + [xml_file], + [f'{name}-protocol.c'], + backend=state.backend, + ) + targets.append(code) + + for side in sides: + command = [self.scanner_bin, f'{side}-header', '@INPUT@', '@OUTPUT@'] + if kwargs['include_core_only']: + command.append('--include-core-only') + + header = CustomTarget( + f'{name}-{side}-protocol', + state.subdir, + state.subproject, + state.environment, + command, + [xml_file], + [f'{name}-{side}-protocol.h'], + backend=state.backend, + ) + targets.append(header) + + return ModuleReturnValue(targets, targets) + + @typed_pos_args('wayland.find_protocol', str) + @typed_kwargs( + 'wayland.find_protocol', + KwargInfo('state', str, default='stable', validator=in_set_validator({'stable', 'staging', 'unstable'})), + KwargInfo('version', (int, NoneType)), + ) + def find_protocol(self, state: ModuleState, args: T.Tuple[str], kwargs: FindProtocol) -> File: + base_name = args[0] + xml_state = kwargs['state'] + version = kwargs['version'] + + if xml_state != 'stable' and version is None: + raise MesonException(f'{xml_state} protocols require a version number.') + + if xml_state == 'stable' and version is not None: + raise MesonException('stable protocols do not require a version number.') + + if self.protocols_dep is None: + self.protocols_dep = state.dependency('wayland-protocols') + + if self.pkgdatadir is None: + self.pkgdatadir = self.protocols_dep.get_variable(pkgconfig='pkgdatadir', internal='pkgdatadir') + + if xml_state == 'stable': + xml_name = f'{base_name}.xml' + elif xml_state == 'staging': + xml_name = f'{base_name}-v{version}.xml' + else: + xml_name = f'{base_name}-unstable-v{version}.xml' + + path = os.path.join(self.pkgdatadir, xml_state, base_name, xml_name) + + if not os.path.exists(path): + raise MesonException(f'The file {path} does not exist.') + + return File.from_absolute_file(path) + + +def initialize(interpreter: Interpreter) -> WaylandModule: + return WaylandModule(interpreter) diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py new file mode 100644 index 0000000..494cfbf --- /dev/null +++ b/mesonbuild/modules/windows.py @@ -0,0 +1,212 @@ +# Copyright 2015 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import enum +import os +import re +import typing as T + + +from . import ExtensionModule, ModuleInfo +from . import ModuleReturnValue +from .. import mesonlib, build +from .. import mlog +from ..interpreter.type_checking import DEPEND_FILES_KW, DEPENDS_KW, INCLUDE_DIRECTORIES +from ..interpreterbase.decorators import ContainerTypeInfo, FeatureNew, KwargInfo, typed_kwargs, typed_pos_args +from ..mesonlib import MachineChoice, MesonException +from ..programs import ExternalProgram + +if T.TYPE_CHECKING: + from . import ModuleState + from ..compilers import Compiler + from ..interpreter import Interpreter + + from typing_extensions import TypedDict + + class CompileResources(TypedDict): + + depend_files: T.List[mesonlib.FileOrString] + depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]] + include_directories: T.List[T.Union[str, build.IncludeDirs]] + args: T.List[str] + + class RcKwargs(TypedDict): + output: str + input: T.List[T.Union[mesonlib.FileOrString, build.CustomTargetIndex]] + depfile: T.Optional[str] + depend_files: T.List[mesonlib.FileOrString] + depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]] + command: T.List[T.Union[str, ExternalProgram]] + +class ResourceCompilerType(enum.Enum): + windres = 1 + rc = 2 + wrc = 3 + +class WindowsModule(ExtensionModule): + + INFO = ModuleInfo('windows') + + def __init__(self, interpreter: 'Interpreter'): + super().__init__(interpreter) + self._rescomp: T.Optional[T.Tuple[ExternalProgram, ResourceCompilerType]] = None + self.methods.update({ + 'compile_resources': self.compile_resources, + }) + + def detect_compiler(self, compilers: T.Dict[str, 'Compiler']) -> 'Compiler': + for l in ('c', 'cpp'): + if l in compilers: + return compilers[l] + raise MesonException('Resource compilation requires a C or C++ compiler.') + + def _find_resource_compiler(self, state: 'ModuleState') -> T.Tuple[ExternalProgram, ResourceCompilerType]: + # FIXME: Does not handle `native: true` executables, see + # See https://github.com/mesonbuild/meson/issues/1531 + # Take a parameter instead of the hardcoded definition below + for_machine = MachineChoice.HOST + + if self._rescomp: + return self._rescomp + + # Will try cross / native file and then env var + rescomp = ExternalProgram.from_bin_list(state.environment, for_machine, 'windres') + + if not rescomp or not rescomp.found(): + comp = self.detect_compiler(state.environment.coredata.compilers[for_machine]) + if comp.id in {'msvc', 'clang-cl', 'intel-cl'}: + rescomp = ExternalProgram('rc', silent=True) + else: + rescomp = ExternalProgram('windres', silent=True) + + if not rescomp.found(): + raise MesonException('Could not find Windows resource compiler') + + for (arg, match, rc_type) in [ + ('/?', '^.*Microsoft.*Resource Compiler.*$', ResourceCompilerType.rc), + ('--version', '^.*GNU windres.*$', ResourceCompilerType.windres), + ('--version', '^.*Wine Resource Compiler.*$', ResourceCompilerType.wrc), + ]: + p, o, e = mesonlib.Popen_safe(rescomp.get_command() + [arg]) + m = re.search(match, o, re.MULTILINE) + if m: + mlog.log('Windows resource compiler: %s' % m.group()) + self._rescomp = (rescomp, rc_type) + break + else: + raise MesonException('Could not determine type of Windows resource compiler') + + return self._rescomp + + @typed_pos_args('windows.compile_resources', varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex), min_varargs=1) + @typed_kwargs( + 'windows.compile_resources', + DEPEND_FILES_KW.evolve(since='0.47.0'), + DEPENDS_KW.evolve(since='0.47.0'), + INCLUDE_DIRECTORIES, + KwargInfo('args', ContainerTypeInfo(list, str), default=[], listify=True), + ) + def compile_resources(self, state: 'ModuleState', + args: T.Tuple[T.List[T.Union[str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex]]], + kwargs: 'CompileResources') -> ModuleReturnValue: + extra_args = kwargs['args'].copy() + wrc_depend_files = kwargs['depend_files'] + wrc_depends = kwargs['depends'] + for d in wrc_depends: + if isinstance(d, build.CustomTarget): + extra_args += state.get_include_args([ + build.IncludeDirs('', [], False, [os.path.join('@BUILD_ROOT@', self.interpreter.backend.get_target_dir(d))]) + ]) + extra_args += state.get_include_args(kwargs['include_directories']) + + rescomp, rescomp_type = self._find_resource_compiler(state) + if rescomp_type == ResourceCompilerType.rc: + # RC is used to generate .res files, a special binary resource + # format, which can be passed directly to LINK (apparently LINK uses + # CVTRES internally to convert this to a COFF object) + suffix = 'res' + res_args = extra_args + ['/nologo', '/fo@OUTPUT@', '@INPUT@'] + elif rescomp_type == ResourceCompilerType.windres: + # ld only supports object files, so windres is used to generate a + # COFF object + suffix = 'o' + res_args = extra_args + ['@INPUT@', '@OUTPUT@'] + + m = 'Argument {!r} has a space which may not work with windres due to ' \ + 'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933' + for arg in extra_args: + if ' ' in arg: + mlog.warning(m.format(arg), fatal=False) + else: + suffix = 'o' + res_args = extra_args + ['@INPUT@', '-o', '@OUTPUT@'] + + res_targets: T.List[build.CustomTarget] = [] + + def get_names() -> T.Iterable[T.Tuple[str, str, T.Union[str, mesonlib.File, build.CustomTargetIndex]]]: + for src in args[0]: + if isinstance(src, str): + yield os.path.join(state.subdir, src), src, src + elif isinstance(src, mesonlib.File): + yield src.relative_name(), src.fname, src + elif isinstance(src, build.CustomTargetIndex): + FeatureNew.single_use('windows.compile_resource CustomTargetIndex in positional arguments', '0.61.0', + state.subproject, location=state.current_node) + # This dance avoids a case where two indexs of the same + # target are given as separate arguments. + yield (f'{src.get_id()}_{src.target.get_outputs().index(src.output)}', + f'windows_compile_resources_{src.get_filename()}', src) + else: + if len(src.get_outputs()) > 1: + FeatureNew.single_use('windows.compile_resource CustomTarget with multiple outputs in positional arguments', + '0.61.0', state.subproject, location=state.current_node) + for i, out in enumerate(src.get_outputs()): + # Chances are that src.get_filename() is already the name of that + # target, add a prefix to avoid name clash. + yield f'{src.get_id()}_{i}', f'windows_compile_resources_{i}_{out}', src[i] + + for name, name_formatted, src in get_names(): + # Path separators are not allowed in target names + name = name.replace('/', '_').replace('\\', '_').replace(':', '_') + name_formatted = name_formatted.replace('/', '_').replace('\\', '_').replace(':', '_') + output = f'{name}_@BASENAME@.{suffix}' + command: T.List[T.Union[str, ExternalProgram]] = [] + command.append(rescomp) + command.extend(res_args) + depfile: T.Optional[str] = None + # instruct binutils windres to generate a preprocessor depfile + if rescomp_type == ResourceCompilerType.windres: + depfile = f'{output}.d' + command.extend(['--preprocessor-arg=-MD', + '--preprocessor-arg=-MQ@OUTPUT@', + '--preprocessor-arg=-MF@DEPFILE@']) + + res_targets.append(build.CustomTarget( + name_formatted, + state.subdir, + state.subproject, + state.environment, + command, + [src], + [output], + depfile=depfile, + depend_files=wrc_depend_files, + extra_depends=wrc_depends, + )) + + return ModuleReturnValue(res_targets, [res_targets]) + +def initialize(interp: 'Interpreter') -> WindowsModule: + return WindowsModule(interp) |