diff options
Diffstat (limited to 'mesonbuild/dependencies')
24 files changed, 7642 insertions, 0 deletions
diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py new file mode 100644 index 0000000..b6fdb18 --- /dev/null +++ b/mesonbuild/dependencies/__init__.py @@ -0,0 +1,286 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .boost import BoostDependency +from .cuda import CudaDependency +from .hdf5 import hdf5_factory +from .base import Dependency, InternalDependency, ExternalDependency, NotFoundDependency, MissingCompiler +from .base import ( + ExternalLibrary, DependencyException, DependencyMethods, + BuiltinDependency, SystemDependency, get_leaf_external_dependencies) +from .cmake import CMakeDependency +from .configtool import ConfigToolDependency +from .dub import DubDependency +from .framework import ExtraFrameworkDependency +from .pkgconfig import PkgConfigDependency +from .factory import DependencyFactory +from .detect import find_external_dependency, get_dep_identifier, packages, _packages_accept_language +from .dev import ( + ValgrindDependency, JNISystemDependency, JDKSystemDependency, gmock_factory, gtest_factory, + llvm_factory, zlib_factory) +from .coarrays import coarray_factory +from .mpi import mpi_factory +from .scalapack import scalapack_factory +from .misc import ( + BlocksDependency, OpenMPDependency, cups_factory, curses_factory, gpgme_factory, + libgcrypt_factory, libwmf_factory, netcdf_factory, pcap_factory, python3_factory, + shaderc_factory, threads_factory, ThreadDependency, iconv_factory, intl_factory, + dl_factory, openssl_factory, libcrypto_factory, libssl_factory, +) +from .platform import AppleFrameworks +from .qt import qt4_factory, qt5_factory, qt6_factory +from .ui import GnuStepDependency, WxDependency, gl_factory, sdl2_factory, vulkan_factory + +__all__ = [ + 'Dependency', + 'InternalDependency', + 'ExternalDependency', + 'SystemDependency', + 'BuiltinDependency', + 'NotFoundDependency', + 'ExternalLibrary', + 'DependencyException', + 'DependencyMethods', + 'MissingCompiler', + + 'CMakeDependency', + 'ConfigToolDependency', + 'DubDependency', + 'ExtraFrameworkDependency', + 'PkgConfigDependency', + + 'DependencyFactory', + + 'ThreadDependency', + + 'find_external_dependency', + 'get_dep_identifier', + 'get_leaf_external_dependencies', +] + +"""Dependency representations and discovery logic. + +Meson attempts to largely abstract away dependency discovery information, and +to encapsulate that logic itself so that the DSL doesn't have too much direct +information. There are some cases where this is impossible/undesirable, such +as the `get_variable()` method. + +Meson has four primary dependency types: + 1. pkg-config + 2. apple frameworks + 3. CMake + 4. system + +Plus a few more niche ones. + +When a user calls `dependency('foo')` Meson creates a list of candidates, and +tries those candidates in order to find one that matches the criteria +provided by the user (such as version requirements, or optional components +that are required.) + +Except to work around bugs or handle odd corner cases, pkg-config and CMake +generally just work™, though there are exceptions. Most of this package is +concerned with dependencies that don't (always) provide CMake and/or +pkg-config files. + +For these cases one needs to write a `system` dependency. These dependencies +descend directly from `ExternalDependency`, in their constructor they +manually set up the necessary link and compile args (and additional +dependencies as necessary). + +For example, imagine a dependency called Foo, it uses an environment variable +called `$FOO_ROOT` to point to its install root, which looks like this: +```txt +$FOOROOT +→ include/ +→ lib/ +``` +To use Foo, you need its include directory, and you need to link to +`lib/libfoo.ext`. + +You could write code that looks like: + +```python +class FooSystemDependency(ExternalDependency): + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + root = os.environ.get('FOO_ROOT') + if root is None: + mlog.debug('$FOO_ROOT is unset.') + self.is_found = False + return + + lib = self.clib_compiler.find_library('foo', environment, [os.path.join(root, 'lib')]) + if lib is None: + mlog.debug('Could not find lib.') + self.is_found = False + return + + self.compile_args.append(f'-I{os.path.join(root, "include")}') + self.link_args.append(lib) + self.is_found = True +``` + +This code will look for `FOO_ROOT` in the environment, handle `FOO_ROOT` being +undefined gracefully, then set its `compile_args` and `link_args` gracefully. +It will also gracefully handle not finding the required lib (hopefully that +doesn't happen, but it could if, for example, the lib is only static and +shared linking is requested). + +There are a couple of things about this that still aren't ideal. For one, we +don't want to be reading random environment variables at this point. Those +should actually be added to `envconfig.Properties` and read in +`environment.Environment._set_default_properties_from_env` (see how +`BOOST_ROOT` is handled). We can also handle the `static` keyword and the +`prefer_static` built-in option. So now that becomes: + +```python +class FooSystemDependency(ExternalDependency): + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + root = environment.properties[self.for_machine].foo_root + if root is None: + mlog.debug('foo_root is unset.') + self.is_found = False + return + + get_option = environment.coredata.get_option + static_opt = kwargs.get('static', get_option(Mesonlib.OptionKey('prefer_static')) + static = Mesonlib.LibType.STATIC if static_opt else Mesonlib.LibType.SHARED + lib = self.clib_compiler.find_library( + 'foo', environment, [os.path.join(root, 'lib')], libtype=static) + if lib is None: + mlog.debug('Could not find lib.') + self.is_found = False + return + + self.compile_args.append(f'-I{os.path.join(root, "include")}') + self.link_args.append(lib) + self.is_found = True +``` + +This is nicer in a couple of ways. First we can properly cross compile as we +are allowed to set `FOO_ROOT` for both the build and host machines, it also +means that users can override this in their machine files, and if that +environment variables changes during a Meson reconfigure Meson won't re-read +it, this is important for reproducibility. Finally, Meson will figure out +whether it should be finding `libfoo.so` or `libfoo.a` (or the platform +specific names). Things are looking pretty good now, so it can be added to +the `packages` dict below: + +```python +packages.update({ + 'foo': FooSystemDependency, +}) +``` + +Now, what if foo also provides pkg-config, but it's only shipped on Unices, +or only included in very recent versions of the dependency? We can use the +`DependencyFactory` class: + +```python +foo_factory = DependencyFactory( + 'foo', + [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM], + system_class=FooSystemDependency, +) +``` + +This is a helper function that will generate a default pkg-config based +dependency, and use the `FooSystemDependency` as well. It can also handle +custom finders for pkg-config and cmake based dependencies that need some +extra help. You would then add the `foo_factory` to packages instead of +`FooSystemDependency`: + +```python +packages.update({ + 'foo': foo_factory, +}) +``` + +If you have a dependency that is very complicated, (such as having multiple +implementations) you may need to write your own factory function. There are a +number of examples in this package. + +_Note_ before we moved to factory functions it was common to use an +`ExternalDependency` class that would instantiate different types of +dependencies and hold the one it found. There are a number of drawbacks to +this approach, and no new dependencies should do this. +""" + +# This is a dict where the keys should be strings, and the values must be one +# of: +# - An ExternalDependency subclass +# - A DependencyFactory object +# - A callable with a signature of (Environment, MachineChoice, Dict[str, Any]) -> List[Callable[[], ExternalDependency]] +packages.update({ + # From dev: + 'gtest': gtest_factory, + 'gmock': gmock_factory, + 'llvm': llvm_factory, + 'valgrind': ValgrindDependency, + 'zlib': zlib_factory, + 'jni': JNISystemDependency, + 'jdk': JDKSystemDependency, + + 'boost': BoostDependency, + 'cuda': CudaDependency, + + # per-file + 'coarray': coarray_factory, + 'hdf5': hdf5_factory, + 'mpi': mpi_factory, + 'scalapack': scalapack_factory, + + # From misc: + 'blocks': BlocksDependency, + 'curses': curses_factory, + 'netcdf': netcdf_factory, + 'openmp': OpenMPDependency, + 'python3': python3_factory, + 'threads': threads_factory, + 'pcap': pcap_factory, + 'cups': cups_factory, + 'libwmf': libwmf_factory, + 'libgcrypt': libgcrypt_factory, + 'gpgme': gpgme_factory, + 'shaderc': shaderc_factory, + 'iconv': iconv_factory, + 'intl': intl_factory, + 'dl': dl_factory, + 'openssl': openssl_factory, + 'libcrypto': libcrypto_factory, + 'libssl': libssl_factory, + + # From platform: + 'appleframeworks': AppleFrameworks, + + # From ui: + 'gl': gl_factory, + 'gnustep': GnuStepDependency, + 'qt4': qt4_factory, + 'qt5': qt5_factory, + 'qt6': qt6_factory, + 'sdl2': sdl2_factory, + 'wxwidgets': WxDependency, + 'vulkan': vulkan_factory, +}) +_packages_accept_language.update({ + 'hdf5', + 'mpi', + 'netcdf', + 'openmp', +}) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py new file mode 100644 index 0000000..d826026 --- /dev/null +++ b/mesonbuild/dependencies/base.py @@ -0,0 +1,635 @@ +# Copyright 2013-2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the detection logic for external dependencies. +# Custom logic for several other packages are in separate files. + +from __future__ import annotations +import copy +import os +import collections +import itertools +import typing as T +from enum import Enum + +from .. import mlog, mesonlib +from ..compilers import clib_langs +from ..mesonlib import LibType, MachineChoice, MesonException, HoldableObject, OptionKey +from ..mesonlib import version_compare_many +#from ..interpreterbase import FeatureDeprecated, FeatureNew + +if T.TYPE_CHECKING: + from .._typing import ImmutableListProtocol + from ..build import StructuredSources + from ..compilers.compilers import Compiler + from ..environment import Environment + from ..interpreterbase import FeatureCheckBase + from ..build import ( + CustomTarget, IncludeDirs, CustomTargetIndex, LibTypes, + StaticLibrary + ) + from ..mesonlib import FileOrString + + +class DependencyException(MesonException): + '''Exceptions raised while trying to find dependencies''' + + +class MissingCompiler: + """Represent a None Compiler - when no tool chain is found. + replacing AttributeError with DependencyException""" + + def __getattr__(self, item: str) -> T.Any: + if item.startswith('__'): + raise AttributeError() + raise DependencyException('no toolchain found') + + def __bool__(self) -> bool: + return False + + +class DependencyMethods(Enum): + # Auto means to use whatever dependency checking mechanisms in whatever order meson thinks is best. + AUTO = 'auto' + PKGCONFIG = 'pkg-config' + CMAKE = 'cmake' + # The dependency is provided by the standard library and does not need to be linked + BUILTIN = 'builtin' + # Just specify the standard link arguments, assuming the operating system provides the library. + SYSTEM = 'system' + # This is only supported on OSX - search the frameworks directory by name. + EXTRAFRAMEWORK = 'extraframework' + # Detect using the sysconfig module. + SYSCONFIG = 'sysconfig' + # Specify using a "program"-config style tool + CONFIG_TOOL = 'config-tool' + # For backwards compatibility + SDLCONFIG = 'sdlconfig' + CUPSCONFIG = 'cups-config' + PCAPCONFIG = 'pcap-config' + LIBWMFCONFIG = 'libwmf-config' + QMAKE = 'qmake' + # Misc + DUB = 'dub' + + +DependencyTypeName = T.NewType('DependencyTypeName', str) + + +class Dependency(HoldableObject): + + @classmethod + def _process_include_type_kw(cls, kwargs: T.Dict[str, T.Any]) -> str: + if 'include_type' not in kwargs: + return 'preserve' + if not isinstance(kwargs['include_type'], str): + raise DependencyException('The include_type kwarg must be a string type') + if kwargs['include_type'] not in ['preserve', 'system', 'non-system']: + raise DependencyException("include_type may only be one of ['preserve', 'system', 'non-system']") + return kwargs['include_type'] + + def __init__(self, type_name: DependencyTypeName, kwargs: T.Dict[str, T.Any]) -> None: + self.name = "null" + self.version: T.Optional[str] = None + self.language: T.Optional[str] = None # None means C-like + self.is_found = False + self.type_name = type_name + self.compile_args: T.List[str] = [] + self.link_args: T.List[str] = [] + # Raw -L and -l arguments without manual library searching + # If None, self.link_args will be used + self.raw_link_args: T.Optional[T.List[str]] = None + self.sources: T.List[T.Union['FileOrString', 'CustomTarget', 'StructuredSources']] = [] + self.include_type = self._process_include_type_kw(kwargs) + self.ext_deps: T.List[Dependency] = [] + self.d_features: T.DefaultDict[str, T.List[T.Any]] = collections.defaultdict(list) + self.featurechecks: T.List['FeatureCheckBase'] = [] + self.feature_since: T.Optional[T.Tuple[str, str]] = None + + def __repr__(self) -> str: + return f'<{self.__class__.__name__} {self.name}: {self.is_found}>' + + def is_built(self) -> bool: + return False + + def summary_value(self) -> T.Union[str, mlog.AnsiDecorator, mlog.AnsiText]: + if not self.found(): + return mlog.red('NO') + if not self.version: + return mlog.green('YES') + return mlog.AnsiText(mlog.green('YES'), ' ', mlog.cyan(self.version)) + + def get_compile_args(self) -> T.List[str]: + if self.include_type == 'system': + converted = [] + for i in self.compile_args: + if i.startswith('-I') or i.startswith('/I'): + converted += ['-isystem' + i[2:]] + else: + converted += [i] + return converted + if self.include_type == 'non-system': + converted = [] + for i in self.compile_args: + if i.startswith('-isystem'): + converted += ['-I' + i[8:]] + else: + converted += [i] + return converted + return self.compile_args + + def get_all_compile_args(self) -> T.List[str]: + """Get the compile arguments from this dependency and it's sub dependencies.""" + return list(itertools.chain(self.get_compile_args(), + *(d.get_all_compile_args() for d in self.ext_deps))) + + def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]: + if raw and self.raw_link_args is not None: + return self.raw_link_args + return self.link_args + + def get_all_link_args(self) -> T.List[str]: + """Get the link arguments from this dependency and it's sub dependencies.""" + return list(itertools.chain(self.get_link_args(), + *(d.get_all_link_args() for d in self.ext_deps))) + + def found(self) -> bool: + return self.is_found + + def get_sources(self) -> T.List[T.Union['FileOrString', 'CustomTarget', 'StructuredSources']]: + """Source files that need to be added to the target. + As an example, gtest-all.cc when using GTest.""" + return self.sources + + def get_name(self) -> str: + return self.name + + def get_version(self) -> str: + if self.version: + return self.version + else: + return 'unknown' + + def get_include_dirs(self) -> T.List['IncludeDirs']: + return [] + + def get_include_type(self) -> str: + return self.include_type + + def get_exe_args(self, compiler: 'Compiler') -> T.List[str]: + return [] + + def get_pkgconfig_variable(self, variable_name: str, + define_variable: 'ImmutableListProtocol[str]', + default: T.Optional[str]) -> str: + raise DependencyException(f'{self.name!r} is not a pkgconfig dependency') + + def get_configtool_variable(self, variable_name: str) -> str: + raise DependencyException(f'{self.name!r} is not a config-tool dependency') + + def get_partial_dependency(self, *, compile_args: bool = False, + link_args: bool = False, links: bool = False, + includes: bool = False, sources: bool = False) -> 'Dependency': + """Create a new dependency that contains part of the parent dependency. + + The following options can be inherited: + links -- all link_with arguments + includes -- all include_directory and -I/-isystem calls + sources -- any source, header, or generated sources + compile_args -- any compile args + link_args -- any link args + + Additionally the new dependency will have the version parameter of it's + parent (if any) and the requested values of any dependencies will be + added as well. + """ + raise RuntimeError('Unreachable code in partial_dependency called') + + def _add_sub_dependency(self, deplist: T.Iterable[T.Callable[[], 'Dependency']]) -> bool: + """Add an internal dependency from a list of possible dependencies. + + This method is intended to make it easier to add additional + dependencies to another dependency internally. + + Returns true if the dependency was successfully added, false + otherwise. + """ + for d in deplist: + dep = d() + if dep.is_found: + self.ext_deps.append(dep) + return True + return False + + def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None, + configtool: T.Optional[str] = None, internal: T.Optional[str] = None, + default_value: T.Optional[str] = None, + pkgconfig_define: T.Optional[T.List[str]] = None) -> str: + if default_value is not None: + return default_value + raise DependencyException(f'No default provided for dependency {self!r}, which is not pkg-config, cmake, or config-tool based.') + + def generate_system_dependency(self, include_type: str) -> 'Dependency': + new_dep = copy.deepcopy(self) + new_dep.include_type = self._process_include_type_kw({'include_type': include_type}) + return new_dep + +class InternalDependency(Dependency): + def __init__(self, version: str, incdirs: T.List['IncludeDirs'], compile_args: T.List[str], + link_args: T.List[str], + libraries: T.List[LibTypes], + whole_libraries: T.List[T.Union[StaticLibrary, CustomTarget, CustomTargetIndex]], + sources: T.Sequence[T.Union[FileOrString, CustomTarget, StructuredSources]], + ext_deps: T.List[Dependency], variables: T.Dict[str, str], + d_module_versions: T.List[T.Union[str, int]], d_import_dirs: T.List['IncludeDirs']): + super().__init__(DependencyTypeName('internal'), {}) + self.version = version + self.is_found = True + self.include_directories = incdirs + self.compile_args = compile_args + self.link_args = link_args + self.libraries = libraries + self.whole_libraries = whole_libraries + self.sources = list(sources) + self.ext_deps = ext_deps + self.variables = variables + if d_module_versions: + self.d_features['versions'] = d_module_versions + if d_import_dirs: + self.d_features['import_dirs'] = d_import_dirs + + def __deepcopy__(self, memo: T.Dict[int, 'InternalDependency']) -> 'InternalDependency': + result = self.__class__.__new__(self.__class__) + assert isinstance(result, InternalDependency) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k in {'libraries', 'whole_libraries'}: + setattr(result, k, copy.copy(v)) + else: + setattr(result, k, copy.deepcopy(v, memo)) + return result + + def summary_value(self) -> mlog.AnsiDecorator: + # Omit the version. Most of the time it will be just the project + # version, which is uninteresting in the summary. + return mlog.green('YES') + + def is_built(self) -> bool: + if self.sources or self.libraries or self.whole_libraries: + return True + return any(d.is_built() for d in self.ext_deps) + + def get_pkgconfig_variable(self, variable_name: str, + define_variable: 'ImmutableListProtocol[str]', + default: T.Optional[str]) -> str: + raise DependencyException('Method "get_pkgconfig_variable()" is ' + 'invalid for an internal dependency') + + def get_configtool_variable(self, variable_name: str) -> str: + raise DependencyException('Method "get_configtool_variable()" is ' + 'invalid for an internal dependency') + + def get_partial_dependency(self, *, compile_args: bool = False, + link_args: bool = False, links: bool = False, + includes: bool = False, sources: bool = False) -> 'InternalDependency': + final_compile_args = self.compile_args.copy() if compile_args else [] + final_link_args = self.link_args.copy() if link_args else [] + final_libraries = self.libraries.copy() if links else [] + final_whole_libraries = self.whole_libraries.copy() if links else [] + final_sources = self.sources.copy() if sources else [] + final_includes = self.include_directories.copy() if includes else [] + final_deps = [d.get_partial_dependency( + compile_args=compile_args, link_args=link_args, links=links, + includes=includes, sources=sources) for d in self.ext_deps] + return InternalDependency( + self.version, final_includes, final_compile_args, + final_link_args, final_libraries, final_whole_libraries, + final_sources, final_deps, self.variables, [], []) + + def get_include_dirs(self) -> T.List['IncludeDirs']: + return self.include_directories + + def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None, + configtool: T.Optional[str] = None, internal: T.Optional[str] = None, + default_value: T.Optional[str] = None, + pkgconfig_define: T.Optional[T.List[str]] = None) -> str: + val = self.variables.get(internal, default_value) + if val is not None: + return val + raise DependencyException(f'Could not get an internal variable and no default provided for {self!r}') + + def generate_link_whole_dependency(self) -> Dependency: + from ..build import SharedLibrary, CustomTarget, CustomTargetIndex + new_dep = copy.deepcopy(self) + for x in new_dep.libraries: + if isinstance(x, SharedLibrary): + raise MesonException('Cannot convert a dependency to link_whole when it contains a ' + 'SharedLibrary') + elif isinstance(x, (CustomTarget, CustomTargetIndex)) and x.links_dynamically(): + raise MesonException('Cannot convert a dependency to link_whole when it contains a ' + 'CustomTarget or CustomTargetIndex which is a shared library') + + # Mypy doesn't understand that the above is a TypeGuard + new_dep.whole_libraries += T.cast('T.List[T.Union[StaticLibrary, CustomTarget, CustomTargetIndex]]', + new_dep.libraries) + new_dep.libraries = [] + return new_dep + +class HasNativeKwarg: + def __init__(self, kwargs: T.Dict[str, T.Any]): + self.for_machine = self.get_for_machine_from_kwargs(kwargs) + + def get_for_machine_from_kwargs(self, kwargs: T.Dict[str, T.Any]) -> MachineChoice: + return MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST + +class ExternalDependency(Dependency, HasNativeKwarg): + def __init__(self, type_name: DependencyTypeName, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None): + Dependency.__init__(self, type_name, kwargs) + self.env = environment + self.name = type_name # default + self.is_found = False + self.language = language + version_reqs = kwargs.get('version', None) + if isinstance(version_reqs, str): + version_reqs = [version_reqs] + self.version_reqs: T.Optional[T.List[str]] = version_reqs + self.required = kwargs.get('required', True) + self.silent = kwargs.get('silent', False) + self.static = kwargs.get('static', self.env.coredata.get_option(OptionKey('prefer_static'))) + self.libtype = LibType.STATIC if self.static else LibType.PREFER_SHARED + if not isinstance(self.static, bool): + raise DependencyException('Static keyword must be boolean') + # Is this dependency to be run on the build platform? + HasNativeKwarg.__init__(self, kwargs) + self.clib_compiler = detect_compiler(self.name, environment, self.for_machine, self.language) + + def get_compiler(self) -> T.Union['MissingCompiler', 'Compiler']: + return self.clib_compiler + + def get_partial_dependency(self, *, compile_args: bool = False, + link_args: bool = False, links: bool = False, + includes: bool = False, sources: bool = False) -> Dependency: + new = copy.copy(self) + if not compile_args: + new.compile_args = [] + if not link_args: + new.link_args = [] + if not sources: + new.sources = [] + if not includes: + pass # TODO maybe filter compile_args? + if not sources: + new.sources = [] + + return new + + def log_details(self) -> str: + return '' + + def log_info(self) -> str: + return '' + + @staticmethod + def log_tried() -> str: + return '' + + # Check if dependency version meets the requirements + def _check_version(self) -> None: + if not self.is_found: + return + + if self.version_reqs: + # an unknown version can never satisfy any requirement + if not self.version: + self.is_found = False + found_msg: mlog.TV_LoggableList = [] + found_msg += ['Dependency', mlog.bold(self.name), 'found:'] + found_msg += [mlog.red('NO'), 'unknown version, but need:', self.version_reqs] + mlog.log(*found_msg) + + if self.required: + m = f'Unknown version, but need {self.version_reqs!r}.' + raise DependencyException(m) + + else: + (self.is_found, not_found, found) = \ + version_compare_many(self.version, self.version_reqs) + if not self.is_found: + found_msg = ['Dependency', mlog.bold(self.name), 'found:'] + found_msg += [mlog.red('NO'), + 'found', mlog.normal_cyan(self.version), 'but need:', + mlog.bold(', '.join([f"'{e}'" for e in not_found]))] + if found: + found_msg += ['; matched:', + ', '.join([f"'{e}'" for e in found])] + mlog.log(*found_msg) + + if self.required: + m = 'Invalid version, need {!r} {!r} found {!r}.' + raise DependencyException(m.format(self.name, not_found, self.version)) + return + + +class NotFoundDependency(Dependency): + def __init__(self, name: str, environment: 'Environment') -> None: + super().__init__(DependencyTypeName('not-found'), {}) + self.env = environment + self.name = name + self.is_found = False + + def get_partial_dependency(self, *, compile_args: bool = False, + link_args: bool = False, links: bool = False, + includes: bool = False, sources: bool = False) -> 'NotFoundDependency': + return copy.copy(self) + + +class ExternalLibrary(ExternalDependency): + def __init__(self, name: str, link_args: T.List[str], environment: 'Environment', + language: str, silent: bool = False) -> None: + super().__init__(DependencyTypeName('library'), environment, {}, language=language) + self.name = name + self.language = language + self.is_found = False + if link_args: + self.is_found = True + self.link_args = link_args + if not silent: + if self.is_found: + mlog.log('Library', mlog.bold(name), 'found:', mlog.green('YES')) + else: + mlog.log('Library', mlog.bold(name), 'found:', mlog.red('NO')) + + def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]: + ''' + External libraries detected using a compiler must only be used with + compatible code. For instance, Vala libraries (.vapi files) cannot be + used with C code, and not all Rust library types can be linked with + C-like code. Note that C++ libraries *can* be linked with C code with + a C++ linker (and vice-versa). + ''' + # Using a vala library in a non-vala target, or a non-vala library in a vala target + # XXX: This should be extended to other non-C linkers such as Rust + if (self.language == 'vala' and language != 'vala') or \ + (language == 'vala' and self.language != 'vala'): + return [] + return super().get_link_args(language=language, raw=raw) + + def get_partial_dependency(self, *, compile_args: bool = False, + link_args: bool = False, links: bool = False, + includes: bool = False, sources: bool = False) -> 'ExternalLibrary': + # External library only has link_args, so ignore the rest of the + # interface. + new = copy.copy(self) + if not link_args: + new.link_args = [] + return new + + +def get_leaf_external_dependencies(deps: T.List[Dependency]) -> T.List[Dependency]: + if not deps: + # Ensure that we always return a new instance + return deps.copy() + final_deps = [] + while deps: + next_deps = [] + for d in mesonlib.listify(deps): + if not isinstance(d, Dependency) or d.is_built(): + raise DependencyException('Dependencies must be external dependencies') + final_deps.append(d) + next_deps.extend(d.ext_deps) + deps = next_deps + return final_deps + + +def sort_libpaths(libpaths: T.List[str], refpaths: T.List[str]) -> T.List[str]: + """Sort <libpaths> according to <refpaths> + + It is intended to be used to sort -L flags returned by pkg-config. + Pkg-config returns flags in random order which cannot be relied on. + """ + if len(refpaths) == 0: + return list(libpaths) + + def key_func(libpath: str) -> T.Tuple[int, int]: + common_lengths: T.List[int] = [] + for refpath in refpaths: + try: + common_path: str = os.path.commonpath([libpath, refpath]) + except ValueError: + common_path = '' + common_lengths.append(len(common_path)) + max_length = max(common_lengths) + max_index = common_lengths.index(max_length) + reversed_max_length = len(refpaths[max_index]) - max_length + return (max_index, reversed_max_length) + return sorted(libpaths, key=key_func) + +def strip_system_libdirs(environment: 'Environment', for_machine: MachineChoice, link_args: T.List[str]) -> T.List[str]: + """Remove -L<system path> arguments. + + leaving these in will break builds where a user has a version of a library + in the system path, and a different version not in the system path if they + want to link against the non-system path version. + """ + exclude = {f'-L{p}' for p in environment.get_compiler_system_dirs(for_machine)} + return [l for l in link_args if l not in exclude] + +def process_method_kw(possible: T.Iterable[DependencyMethods], kwargs: T.Dict[str, T.Any]) -> T.List[DependencyMethods]: + method = kwargs.get('method', 'auto') # type: T.Union[DependencyMethods, str] + if isinstance(method, DependencyMethods): + return [method] + # TODO: try/except? + if method not in [e.value for e in DependencyMethods]: + raise DependencyException(f'method {method!r} is invalid') + method = DependencyMethods(method) + + # Raise FeatureNew where appropriate + if method is DependencyMethods.CONFIG_TOOL: + # FIXME: needs to get a handle on the subproject + # FeatureNew.single_use('Configuration method "config-tool"', '0.44.0') + pass + # This sets per-tool config methods which are deprecated to to the new + # generic CONFIG_TOOL value. + if method in [DependencyMethods.SDLCONFIG, DependencyMethods.CUPSCONFIG, + DependencyMethods.PCAPCONFIG, DependencyMethods.LIBWMFCONFIG]: + # FIXME: needs to get a handle on the subproject + #FeatureDeprecated.single_use(f'Configuration method {method.value}', '0.44', 'Use "config-tool" instead.') + method = DependencyMethods.CONFIG_TOOL + if method is DependencyMethods.QMAKE: + # FIXME: needs to get a handle on the subproject + # FeatureDeprecated.single_use('Configuration method "qmake"', '0.58', 'Use "config-tool" instead.') + method = DependencyMethods.CONFIG_TOOL + + # Set the detection method. If the method is set to auto, use any available method. + # If method is set to a specific string, allow only that detection method. + if method == DependencyMethods.AUTO: + methods = list(possible) + elif method in possible: + methods = [method] + else: + raise DependencyException( + 'Unsupported detection method: {}, allowed methods are {}'.format( + method.value, + mlog.format_list([x.value for x in [DependencyMethods.AUTO] + list(possible)]))) + + return methods + +def detect_compiler(name: str, env: 'Environment', for_machine: MachineChoice, + language: T.Optional[str]) -> T.Union['MissingCompiler', 'Compiler']: + """Given a language and environment find the compiler used.""" + compilers = env.coredata.compilers[for_machine] + + # Set the compiler for this dependency if a language is specified, + # else try to pick something that looks usable. + if language: + if language not in compilers: + m = name.capitalize() + ' requires a {0} compiler, but ' \ + '{0} is not in the list of project languages' + raise DependencyException(m.format(language.capitalize())) + return compilers[language] + else: + for lang in clib_langs: + try: + return compilers[lang] + except KeyError: + continue + return MissingCompiler() + + +class SystemDependency(ExternalDependency): + + """Dependency base for System type dependencies.""" + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], + language: T.Optional[str] = None) -> None: + super().__init__(DependencyTypeName('system'), env, kwargs, language=language) + self.name = name + + @staticmethod + def log_tried() -> str: + return 'system' + + +class BuiltinDependency(ExternalDependency): + + """Dependency base for Builtin type dependencies.""" + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], + language: T.Optional[str] = None) -> None: + super().__init__(DependencyTypeName('builtin'), env, kwargs, language=language) + self.name = name + + @staticmethod + def log_tried() -> str: + return 'builtin' diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py new file mode 100644 index 0000000..4ebd88d --- /dev/null +++ b/mesonbuild/dependencies/boost.py @@ -0,0 +1,1090 @@ +# Copyright 2013-2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import re +import dataclasses +import functools +import typing as T +from pathlib import Path + +from .. import mlog +from .. import mesonlib + +from .base import DependencyException, SystemDependency +from .pkgconfig import PkgConfigDependency +from .misc import threads_factory + +if T.TYPE_CHECKING: + from ..environment import Environment, Properties + +# On windows 3 directory layouts are supported: +# * The default layout (versioned) installed: +# - $BOOST_ROOT/include/boost-x_x/boost/*.hpp +# - $BOOST_ROOT/lib/*.lib +# * The non-default layout (system) installed: +# - $BOOST_ROOT/include/boost/*.hpp +# - $BOOST_ROOT/lib/*.lib +# * The pre-built binaries from sf.net: +# - $BOOST_ROOT/boost/*.hpp +# - $BOOST_ROOT/lib<arch>-<compiler>/*.lib where arch=32/64 and compiler=msvc-14.1 +# +# Note that we should also try to support: +# mingw-w64 / Windows : libboost_<module>-mt.a (location = <prefix>/mingw64/lib/) +# libboost_<module>-mt.dll.a +# +# The `modules` argument accept library names. This is because every module that +# has libraries to link against also has multiple options regarding how to +# link. See for example: +# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html +# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html +# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html + +# **On Unix**, official packaged versions of boost libraries follow the following schemes: +# +# Linux / Debian: libboost_<module>.so -> libboost_<module>.so.1.66.0 +# Linux / Red Hat: libboost_<module>.so -> libboost_<module>.so.1.66.0 +# Linux / OpenSuse: libboost_<module>.so -> libboost_<module>.so.1.66.0 +# Win / Cygwin: libboost_<module>.dll.a (location = /usr/lib) +# libboost_<module>.a +# cygboost_<module>_1_64.dll (location = /usr/bin) +# Win / VS: boost_<module>-vc<ver>-mt[-gd]-<arch>-1_67.dll (location = C:/local/boost_1_67_0) +# Mac / homebrew: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /usr/local/lib) +# Mac / macports: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /opt/local/lib) +# +# Its not clear that any other abi tags (e.g. -gd) are used in official packages. +# +# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag. +# +# Boost documentation recommends using complex abi tags like "-lboost_regex-gcc34-mt-d-1_36". +# (See http://www.boost.org/doc/libs/1_66_0/more/getting_started/unix-variants.html#library-naming) +# However, its not clear that any Unix distribution follows this scheme. +# Furthermore, the boost documentation for unix above uses examples from windows like +# "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows. +# +# We follow the following strategy for finding modules: +# A) Detect potential boost root directories (uses also BOOST_ROOT env var) +# B) Foreach candidate +# 1. Look for the boost headers (boost/version.pp) +# 2. Find all boost libraries +# 2.1 Add all libraries in lib* +# 2.2 Filter out non boost libraries +# 2.3 Filter the renaining libraries based on the meson requirements (static/shared, etc.) +# 2.4 Ensure that all libraries have the same boost tag (and are thus compatible) +# 3. Select the libraries matching the requested modules + +@dataclasses.dataclass(eq=False, order=False) +class UnknownFileException(Exception): + path: Path + +@functools.total_ordering +class BoostIncludeDir(): + def __init__(self, path: Path, version_int: int): + self.path = path + self.version_int = version_int + major = int(self.version_int / 100000) + minor = int((self.version_int / 100) % 1000) + patch = int(self.version_int % 100) + self.version = f'{major}.{minor}.{patch}' + self.version_lib = f'{major}_{minor}' + + def __repr__(self) -> str: + return f'<BoostIncludeDir: {self.version} -- {self.path}>' + + def __lt__(self, other: object) -> bool: + if isinstance(other, BoostIncludeDir): + return (self.version_int, self.path) < (other.version_int, other.path) + return NotImplemented + +@functools.total_ordering +class BoostLibraryFile(): + # Python libraries are special because of the included + # minor version in the module name. + boost_python_libs = ['boost_python', 'boost_numpy'] + reg_python_mod_split = re.compile(r'(boost_[a-zA-Z]+)([0-9]*)') + + reg_abi_tag = re.compile(r'^s?g?y?d?p?n?$') + reg_ver_tag = re.compile(r'^[0-9_]+$') + + def __init__(self, path: Path): + self.path = path + self.name = self.path.name + + # Initialize default properties + self.static = False + self.toolset = '' + self.arch = '' + self.version_lib = '' + self.mt = True + + self.runtime_static = False + self.runtime_debug = False + self.python_debug = False + self.debug = False + self.stlport = False + self.deprecated_iostreams = False + + # Post process the library name + name_parts = self.name.split('.') + self.basename = name_parts[0] + self.suffixes = name_parts[1:] + self.vers_raw = [x for x in self.suffixes if x.isdigit()] + self.suffixes = [x for x in self.suffixes if not x.isdigit()] + self.nvsuffix = '.'.join(self.suffixes) # Used for detecting the library type + self.nametags = self.basename.split('-') + self.mod_name = self.nametags[0] + if self.mod_name.startswith('lib'): + self.mod_name = self.mod_name[3:] + + # Set library version if possible + if len(self.vers_raw) >= 2: + self.version_lib = '{}_{}'.format(self.vers_raw[0], self.vers_raw[1]) + + # Detecting library type + if self.nvsuffix in {'so', 'dll', 'dll.a', 'dll.lib', 'dylib'}: + self.static = False + elif self.nvsuffix in {'a', 'lib'}: + self.static = True + else: + raise UnknownFileException(self.path) + + # boost_.lib is the dll import library + if self.basename.startswith('boost_') and self.nvsuffix == 'lib': + self.static = False + + # Process tags + tags = self.nametags[1:] + # Filter out the python version tag and fix modname + if self.is_python_lib(): + tags = self.fix_python_name(tags) + if not tags: + return + + # Without any tags mt is assumed, however, an absence of mt in the name + # with tags present indicates that the lib was built without mt support + self.mt = False + for i in tags: + if i == 'mt': + self.mt = True + elif len(i) == 3 and i[1:] in {'32', '64'}: + self.arch = i + elif BoostLibraryFile.reg_abi_tag.match(i): + self.runtime_static = 's' in i + self.runtime_debug = 'g' in i + self.python_debug = 'y' in i + self.debug = 'd' in i + self.stlport = 'p' in i + self.deprecated_iostreams = 'n' in i + elif BoostLibraryFile.reg_ver_tag.match(i): + self.version_lib = i + else: + self.toolset = i + + def __repr__(self) -> str: + return f'<LIB: {self.abitag} {self.mod_name:<32} {self.path}>' + + def __lt__(self, other: object) -> bool: + if isinstance(other, BoostLibraryFile): + return ( + self.mod_name, self.static, self.version_lib, self.arch, + not self.mt, not self.runtime_static, + not self.debug, self.runtime_debug, self.python_debug, + self.stlport, self.deprecated_iostreams, + self.name, + ) < ( + other.mod_name, other.static, other.version_lib, other.arch, + not other.mt, not other.runtime_static, + not other.debug, other.runtime_debug, other.python_debug, + other.stlport, other.deprecated_iostreams, + other.name, + ) + return NotImplemented + + def __eq__(self, other: object) -> bool: + if isinstance(other, BoostLibraryFile): + return self.name == other.name + return NotImplemented + + def __hash__(self) -> int: + return hash(self.name) + + @property + def abitag(self) -> str: + abitag = '' + abitag += 'S' if self.static else '-' + abitag += 'M' if self.mt else '-' + abitag += ' ' + abitag += 's' if self.runtime_static else '-' + abitag += 'g' if self.runtime_debug else '-' + abitag += 'y' if self.python_debug else '-' + abitag += 'd' if self.debug else '-' + abitag += 'p' if self.stlport else '-' + abitag += 'n' if self.deprecated_iostreams else '-' + abitag += ' ' + (self.arch or '???') + abitag += ' ' + (self.toolset or '?') + abitag += ' ' + (self.version_lib or 'x_xx') + return abitag + + def is_boost(self) -> bool: + return any(self.name.startswith(x) for x in ['libboost_', 'boost_']) + + def is_python_lib(self) -> bool: + return any(self.mod_name.startswith(x) for x in BoostLibraryFile.boost_python_libs) + + def fix_python_name(self, tags: T.List[str]) -> T.List[str]: + # Handle the boost_python naming madeness. + # See https://github.com/mesonbuild/meson/issues/4788 for some distro + # specific naming variations. + other_tags = [] # type: T.List[str] + + # Split the current modname into the base name and the version + m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name) + cur_name = m_cur.group(1) + cur_vers = m_cur.group(2) + + # Update the current version string if the new version string is longer + def update_vers(new_vers: str) -> None: + nonlocal cur_vers + new_vers = new_vers.replace('_', '') + new_vers = new_vers.replace('.', '') + if not new_vers.isdigit(): + return + if len(new_vers) > len(cur_vers): + cur_vers = new_vers + + for i in tags: + if i.startswith('py'): + update_vers(i[2:]) + elif i.isdigit(): + update_vers(i) + elif len(i) >= 3 and i[0].isdigit and i[2].isdigit() and i[1] == '.': + update_vers(i) + else: + other_tags += [i] + + self.mod_name = cur_name + cur_vers + return other_tags + + def mod_name_matches(self, mod_name: str) -> bool: + if self.mod_name == mod_name: + return True + if not self.is_python_lib(): + return False + + m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name) + m_arg = BoostLibraryFile.reg_python_mod_split.match(mod_name) + + if not m_cur or not m_arg: + return False + + if m_cur.group(1) != m_arg.group(1): + return False + + cur_vers = m_cur.group(2) + arg_vers = m_arg.group(2) + + # Always assume python 2 if nothing is specified + if not arg_vers: + arg_vers = '2' + + return cur_vers.startswith(arg_vers) + + def version_matches(self, version_lib: str) -> bool: + # If no version tag is present, assume that it fits + if not self.version_lib or not version_lib: + return True + return self.version_lib == version_lib + + def arch_matches(self, arch: str) -> bool: + # If no version tag is present, assume that it fits + if not self.arch or not arch: + return True + return self.arch == arch + + def vscrt_matches(self, vscrt: str) -> bool: + # If no vscrt tag present, assume that it fits ['/MD', '/MDd', '/MT', '/MTd'] + if not vscrt: + return True + if vscrt in {'/MD', '-MD'}: + return not self.runtime_static and not self.runtime_debug + elif vscrt in {'/MDd', '-MDd'}: + return not self.runtime_static and self.runtime_debug + elif vscrt in {'/MT', '-MT'}: + return (self.runtime_static or not self.static) and not self.runtime_debug + elif vscrt in {'/MTd', '-MTd'}: + return (self.runtime_static or not self.static) and self.runtime_debug + + mlog.warning(f'Boost: unknown vscrt tag {vscrt}. This may cause the compilation to fail. Please consider reporting this as a bug.', once=True) + return True + + def get_compiler_args(self) -> T.List[str]: + args = [] # type: T.List[str] + if self.mod_name in boost_libraries: + libdef = boost_libraries[self.mod_name] # type: BoostLibrary + if self.static: + args += libdef.static + else: + args += libdef.shared + if self.mt: + args += libdef.multi + else: + args += libdef.single + return args + + def get_link_args(self) -> T.List[str]: + return [self.path.as_posix()] + +class BoostDependency(SystemDependency): + def __init__(self, environment: Environment, kwargs: T.Dict[str, T.Any]) -> None: + super().__init__('boost', environment, kwargs, language='cpp') + buildtype = environment.coredata.get_option(mesonlib.OptionKey('buildtype')) + assert isinstance(buildtype, str) + self.debug = buildtype.startswith('debug') + self.multithreading = kwargs.get('threading', 'multi') == 'multi' + + self.boost_root = None # type: T.Optional[Path] + self.explicit_static = 'static' in kwargs + + # Extract and validate modules + self.modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str] + for i in self.modules: + if not isinstance(i, str): + raise DependencyException('Boost module argument is not a string.') + if i.startswith('boost_'): + raise DependencyException('Boost modules must be passed without the boost_ prefix') + + self.modules_found = [] # type: T.List[str] + self.modules_missing = [] # type: T.List[str] + + # Do we need threads? + if 'thread' in self.modules: + if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})): + self.is_found = False + return + + # Try figuring out the architecture tag + self.arch = environment.machines[self.for_machine].cpu_family + self.arch = boost_arch_map.get(self.arch, None) + + # First, look for paths specified in a machine file + props = self.env.properties[self.for_machine] + if any(x in self.env.properties[self.for_machine] for x in + ['boost_includedir', 'boost_librarydir', 'boost_root']): + self.detect_boost_machine_file(props) + return + + # Finally, look for paths from .pc files and from searching the filesystem + self.detect_roots() + + def check_and_set_roots(self, roots: T.List[Path], use_system: bool) -> None: + roots = list(mesonlib.OrderedSet(roots)) + for j in roots: + # 1. Look for the boost headers (boost/version.hpp) + mlog.debug(f'Checking potential boost root {j.as_posix()}') + inc_dirs = self.detect_inc_dirs(j) + inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions + + # Early abort when boost is not found + if not inc_dirs: + continue + + lib_dirs = self.detect_lib_dirs(j, use_system) + self.is_found = self.run_check(inc_dirs, lib_dirs) + if self.is_found: + self.boost_root = j + break + + def detect_boost_machine_file(self, props: 'Properties') -> None: + """Detect boost with values in the machine file or environment. + + The machine file values are defaulted to the environment values. + """ + # XXX: if we had a TypedDict we wouldn't need this + incdir = props.get('boost_includedir') + assert incdir is None or isinstance(incdir, str) + libdir = props.get('boost_librarydir') + assert libdir is None or isinstance(libdir, str) + + if incdir and libdir: + inc_dir = Path(incdir) + lib_dir = Path(libdir) + + if not inc_dir.is_absolute() or not lib_dir.is_absolute(): + raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute') + + mlog.debug('Trying to find boost with:') + mlog.debug(f' - boost_includedir = {inc_dir}') + mlog.debug(f' - boost_librarydir = {lib_dir}') + + return self.detect_split_root(inc_dir, lib_dir) + + elif incdir or libdir: + raise DependencyException('Both boost_includedir *and* boost_librarydir have to be set in your machine file (one is not enough)') + + rootdir = props.get('boost_root') + # It shouldn't be possible to get here without something in boost_root + assert rootdir + + raw_paths = mesonlib.stringlistify(rootdir) + paths = [Path(x) for x in raw_paths] + if paths and any(not x.is_absolute() for x in paths): + raise DependencyException('boost_root path given in machine file must be absolute') + + self.check_and_set_roots(paths, use_system=False) + + def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool: + mlog.debug(' - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs])) + mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs])) + + # 2. Find all boost libraries + libs = [] # type: T.List[BoostLibraryFile] + for i in lib_dirs: + libs = self.detect_libraries(i) + if libs: + mlog.debug(f' - found boost library dir: {i}') + # mlog.debug(' - raw library list:') + # for j in libs: + # mlog.debug(' - {}'.format(j)) + break + libs = sorted(set(libs)) + + modules = ['boost_' + x for x in self.modules] + for inc in inc_dirs: + mlog.debug(f' - found boost {inc.version} include dir: {inc.path}') + f_libs = self.filter_libraries(libs, inc.version_lib) + + mlog.debug(' - filtered library list:') + for j in f_libs: + mlog.debug(f' - {j}') + + # 3. Select the libraries matching the requested modules + not_found = [] # type: T.List[str] + selected_modules = [] # type: T.List[BoostLibraryFile] + for mod in modules: + found = False + for l in f_libs: + if l.mod_name_matches(mod): + selected_modules += [l] + found = True + break + if not found: + not_found += [mod] + + # log the result + mlog.debug(' - found:') + comp_args = [] # type: T.List[str] + link_args = [] # type: T.List[str] + for j in selected_modules: + c_args = j.get_compiler_args() + l_args = j.get_link_args() + mlog.debug(' - {:<24} link={} comp={}'.format(j.mod_name, str(l_args), str(c_args))) + comp_args += c_args + link_args += l_args + + comp_args = list(mesonlib.OrderedSet(comp_args)) + link_args = list(mesonlib.OrderedSet(link_args)) + + self.modules_found = [x.mod_name for x in selected_modules] + self.modules_found = [x[6:] for x in self.modules_found] + self.modules_found = sorted(set(self.modules_found)) + self.modules_missing = not_found + self.modules_missing = [x[6:] for x in self.modules_missing] + self.modules_missing = sorted(set(self.modules_missing)) + + # if we found all modules we are done + if not not_found: + self.version = inc.version + self.compile_args = ['-I' + inc.path.as_posix()] + self.compile_args += comp_args + self.compile_args += self._extra_compile_args() + self.compile_args = list(mesonlib.OrderedSet(self.compile_args)) + self.link_args = link_args + mlog.debug(f' - final compile args: {self.compile_args}') + mlog.debug(f' - final link args: {self.link_args}') + return True + + # in case we missed something log it and try again + mlog.debug(' - NOT found:') + for mod in not_found: + mlog.debug(f' - {mod}') + + return False + + def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]: + candidates = [] # type: T.List[Path] + inc_root = root / 'include' + + candidates += [root / 'boost'] + candidates += [inc_root / 'boost'] + if inc_root.is_dir(): + for i in inc_root.iterdir(): + if not i.is_dir() or not i.name.startswith('boost-'): + continue + candidates += [i / 'boost'] + candidates = [x for x in candidates if x.is_dir()] + candidates = [x / 'version.hpp' for x in candidates] + candidates = [x for x in candidates if x.exists()] + return [self._include_dir_from_version_header(x) for x in candidates] + + def detect_lib_dirs(self, root: Path, use_system: bool) -> T.List[Path]: + # First check the system include paths. Only consider those within the + # given root path + + if use_system: + system_dirs_t = self.clib_compiler.get_library_dirs(self.env) + system_dirs = [Path(x) for x in system_dirs_t] + system_dirs = [x.resolve() for x in system_dirs if x.exists()] + system_dirs = [x for x in system_dirs if mesonlib.path_is_in_root(x, root)] + system_dirs = list(mesonlib.OrderedSet(system_dirs)) + + if system_dirs: + return system_dirs + + # No system include paths were found --> fall back to manually looking + # for library dirs in root + dirs = [] # type: T.List[Path] + subdirs = [] # type: T.List[Path] + for i in root.iterdir(): + if i.is_dir() and i.name.startswith('lib'): + dirs += [i] + + # Some distros put libraries not directly inside /usr/lib but in /usr/lib/x86_64-linux-gnu + for i in dirs: + for j in i.iterdir(): + if j.is_dir() and j.name.endswith('-linux-gnu'): + subdirs += [j] + + # Filter out paths that don't match the target arch to avoid finding + # the wrong libraries. See https://github.com/mesonbuild/meson/issues/7110 + if not self.arch: + return dirs + subdirs + + arch_list_32 = ['32', 'i386'] + arch_list_64 = ['64'] + + raw_list = dirs + subdirs + no_arch = [x for x in raw_list if not any(y in x.name for y in arch_list_32 + arch_list_64)] + + matching_arch = [] # type: T.List[Path] + if '32' in self.arch: + matching_arch = [x for x in raw_list if any(y in x.name for y in arch_list_32)] + elif '64' in self.arch: + matching_arch = [x for x in raw_list if any(y in x.name for y in arch_list_64)] + + return sorted(matching_arch) + sorted(no_arch) + + def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]: + # MSVC is very picky with the library tags + vscrt = '' + try: + crt_val = self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value + buildtype = self.env.coredata.options[mesonlib.OptionKey('buildtype')].value + vscrt = self.clib_compiler.get_crt_compile_args(crt_val, buildtype)[0] + except (KeyError, IndexError, AttributeError): + pass + + # mlog.debug(' - static: {}'.format(self.static)) + # mlog.debug(' - not explicit static: {}'.format(not self.explicit_static)) + # mlog.debug(' - mt: {}'.format(self.multithreading)) + # mlog.debug(' - version: {}'.format(lib_vers)) + # mlog.debug(' - arch: {}'.format(self.arch)) + # mlog.debug(' - vscrt: {}'.format(vscrt)) + libs = [x for x in libs if x.static == self.static or not self.explicit_static] + libs = [x for x in libs if x.mt == self.multithreading] + libs = [x for x in libs if x.version_matches(lib_vers)] + libs = [x for x in libs if x.arch_matches(self.arch)] + libs = [x for x in libs if x.vscrt_matches(vscrt)] + libs = [x for x in libs if x.nvsuffix != 'dll'] # Only link to import libraries + + # Only filter by debug when we are building in release mode. Debug + # libraries are automatically preferred through sorting otherwise. + if not self.debug: + libs = [x for x in libs if not x.debug] + + # Take the abitag from the first library and filter by it. This + # ensures that we have a set of libraries that are always compatible. + if not libs: + return [] + abitag = libs[0].abitag + libs = [x for x in libs if x.abitag == abitag] + + return libs + + def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]: + libs = set() # type: T.Set[BoostLibraryFile] + for i in libdir.iterdir(): + if not i.is_file(): + continue + if not any(i.name.startswith(x) for x in ['libboost_', 'boost_']): + continue + # Windows binaries from SourceForge ship with PDB files alongside + # DLLs (#8325). Ignore them. + if i.name.endswith('.pdb'): + continue + + try: + libs.add(BoostLibraryFile(i.resolve())) + except UnknownFileException as e: + mlog.warning('Boost: ignoring unknown file {} under lib directory'.format(e.path.name)) + + return [x for x in libs if x.is_boost()] # Filter out no boost libraries + + def detect_split_root(self, inc_dir: Path, lib_dir: Path) -> None: + boost_inc_dir = None + for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']: + if j.is_file(): + boost_inc_dir = self._include_dir_from_version_header(j) + break + if not boost_inc_dir: + self.is_found = False + return + + self.is_found = self.run_check([boost_inc_dir], [lib_dir]) + + def detect_roots(self) -> None: + roots = [] # type: T.List[Path] + + # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily + # allows BoostDependency to find boost from Conan. See #5438 + try: + boost_pc = PkgConfigDependency('boost', self.env, {'required': False}) + if boost_pc.found(): + boost_root = boost_pc.get_pkgconfig_variable('prefix', [], None) + if boost_root: + roots += [Path(boost_root)] + except DependencyException: + pass + + # Add roots from system paths + inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()] + inc_paths = [x.parent for x in inc_paths if x.exists()] + inc_paths = [x.resolve() for x in inc_paths] + roots += inc_paths + + # Add system paths + if self.env.machines[self.for_machine].is_windows(): + # Where boost built from source actually installs it + c_root = Path('C:/Boost') + if c_root.is_dir(): + roots += [c_root] + + # Where boost documentation says it should be + prog_files = Path('C:/Program Files/boost') + # Where boost prebuilt binaries are + local_boost = Path('C:/local') + + candidates = [] # type: T.List[Path] + if prog_files.is_dir(): + candidates += [*prog_files.iterdir()] + if local_boost.is_dir(): + candidates += [*local_boost.iterdir()] + + roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()] + else: + tmp = [] # type: T.List[Path] + + # Add some default system paths + tmp += [Path('/opt/local')] + tmp += [Path('/usr/local/opt/boost')] + tmp += [Path('/usr/local')] + tmp += [Path('/usr')] + + # Cleanup paths + tmp = [x for x in tmp if x.is_dir()] + tmp = [x.resolve() for x in tmp] + roots += tmp + + self.check_and_set_roots(roots, use_system=True) + + def log_details(self) -> str: + res = '' + if self.modules_found: + res += 'found: ' + ', '.join(self.modules_found) + if self.modules_missing: + if res: + res += ' | ' + res += 'missing: ' + ', '.join(self.modules_missing) + return res + + def log_info(self) -> str: + if self.boost_root: + return self.boost_root.as_posix() + return '' + + def _include_dir_from_version_header(self, hfile: Path) -> BoostIncludeDir: + # Extract the version with a regex. Using clib_compiler.get_define would + # also work, however, this is slower (since it the compiler has to be + # invoked) and overkill since the layout of the header is always the same. + assert hfile.exists() + raw = hfile.read_text(encoding='utf-8') + m = re.search(r'#define\s+BOOST_VERSION\s+([0-9]+)', raw) + if not m: + mlog.debug(f'Failed to extract version information from {hfile}') + return BoostIncludeDir(hfile.parents[1], 0) + return BoostIncludeDir(hfile.parents[1], int(m.group(1))) + + def _extra_compile_args(self) -> T.List[str]: + # BOOST_ALL_DYN_LINK should not be required with the known defines below + return ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking + + +# See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming +# See https://mesonbuild.com/Reference-tables.html#cpu-families +boost_arch_map = { + 'aarch64': 'a64', + 'arc': 'a32', + 'arm': 'a32', + 'ia64': 'i64', + 'mips': 'm32', + 'mips64': 'm64', + 'ppc': 'p32', + 'ppc64': 'p64', + 'sparc': 's32', + 'sparc64': 's64', + 'x86': 'x32', + 'x86_64': 'x64', +} + + +#### ---- BEGIN GENERATED ---- #### +# # +# Generated with tools/boost_names.py: +# - boost version: 1.73.0 +# - modules found: 159 +# - libraries found: 43 +# + +class BoostLibrary(): + def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]): + self.name = name + self.shared = shared + self.static = static + self.single = single + self.multi = multi + +class BoostModule(): + def __init__(self, name: str, key: str, desc: str, libs: T.List[str]): + self.name = name + self.key = key + self.desc = desc + self.libs = libs + + +# dict of all know libraries with additional compile options +boost_libraries = { + 'boost_atomic': BoostLibrary( + name='boost_atomic', + shared=['-DBOOST_ATOMIC_DYN_LINK=1'], + static=['-DBOOST_ATOMIC_STATIC_LINK=1'], + single=[], + multi=[], + ), + 'boost_chrono': BoostLibrary( + name='boost_chrono', + shared=['-DBOOST_CHRONO_DYN_LINK=1'], + static=['-DBOOST_CHRONO_STATIC_LINK=1'], + single=['-DBOOST_CHRONO_THREAD_DISABLED'], + multi=[], + ), + 'boost_container': BoostLibrary( + name='boost_container', + shared=['-DBOOST_CONTAINER_DYN_LINK=1'], + static=['-DBOOST_CONTAINER_STATIC_LINK=1'], + single=[], + multi=[], + ), + 'boost_context': BoostLibrary( + name='boost_context', + shared=['-DBOOST_CONTEXT_DYN_LINK=1'], + static=[], + single=[], + multi=[], + ), + 'boost_contract': BoostLibrary( + name='boost_contract', + shared=['-DBOOST_CONTRACT_DYN_LINK'], + static=['-DBOOST_CONTRACT_STATIC_LINK'], + single=['-DBOOST_CONTRACT_DISABLE_THREADS'], + multi=[], + ), + 'boost_coroutine': BoostLibrary( + name='boost_coroutine', + shared=['-DBOOST_COROUTINES_DYN_LINK=1'], + static=[], + single=[], + multi=[], + ), + 'boost_date_time': BoostLibrary( + name='boost_date_time', + shared=['-DBOOST_DATE_TIME_DYN_LINK=1'], + static=[], + single=[], + multi=[], + ), + 'boost_exception': BoostLibrary( + name='boost_exception', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_fiber': BoostLibrary( + name='boost_fiber', + shared=['-DBOOST_FIBERS_DYN_LINK=1'], + static=[], + single=[], + multi=[], + ), + 'boost_fiber_numa': BoostLibrary( + name='boost_fiber_numa', + shared=['-DBOOST_FIBERS_DYN_LINK=1'], + static=[], + single=[], + multi=[], + ), + 'boost_filesystem': BoostLibrary( + name='boost_filesystem', + shared=['-DBOOST_FILESYSTEM_DYN_LINK=1'], + static=['-DBOOST_FILESYSTEM_STATIC_LINK=1'], + single=[], + multi=[], + ), + 'boost_graph': BoostLibrary( + name='boost_graph', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_iostreams': BoostLibrary( + name='boost_iostreams', + shared=['-DBOOST_IOSTREAMS_DYN_LINK=1'], + static=[], + single=[], + multi=[], + ), + 'boost_locale': BoostLibrary( + name='boost_locale', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_log': BoostLibrary( + name='boost_log', + shared=['-DBOOST_LOG_DYN_LINK=1'], + static=[], + single=['-DBOOST_LOG_NO_THREADS'], + multi=[], + ), + 'boost_log_setup': BoostLibrary( + name='boost_log_setup', + shared=['-DBOOST_LOG_SETUP_DYN_LINK=1'], + static=[], + single=['-DBOOST_LOG_NO_THREADS'], + multi=[], + ), + 'boost_math_c99': BoostLibrary( + name='boost_math_c99', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_math_c99f': BoostLibrary( + name='boost_math_c99f', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_math_c99l': BoostLibrary( + name='boost_math_c99l', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_math_tr1': BoostLibrary( + name='boost_math_tr1', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_math_tr1f': BoostLibrary( + name='boost_math_tr1f', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_math_tr1l': BoostLibrary( + name='boost_math_tr1l', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_mpi': BoostLibrary( + name='boost_mpi', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_nowide': BoostLibrary( + name='boost_nowide', + shared=['-DBOOST_NOWIDE_DYN_LINK=1'], + static=[], + single=[], + multi=[], + ), + 'boost_prg_exec_monitor': BoostLibrary( + name='boost_prg_exec_monitor', + shared=['-DBOOST_TEST_DYN_LINK=1'], + static=[], + single=[], + multi=[], + ), + 'boost_program_options': BoostLibrary( + name='boost_program_options', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_random': BoostLibrary( + name='boost_random', + shared=['-DBOOST_RANDOM_DYN_LINK'], + static=[], + single=[], + multi=[], + ), + 'boost_regex': BoostLibrary( + name='boost_regex', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_serialization': BoostLibrary( + name='boost_serialization', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_stacktrace_addr2line': BoostLibrary( + name='boost_stacktrace_addr2line', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_stacktrace_backtrace': BoostLibrary( + name='boost_stacktrace_backtrace', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_stacktrace_basic': BoostLibrary( + name='boost_stacktrace_basic', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_stacktrace_noop': BoostLibrary( + name='boost_stacktrace_noop', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_stacktrace_windbg': BoostLibrary( + name='boost_stacktrace_windbg', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_stacktrace_windbg_cached': BoostLibrary( + name='boost_stacktrace_windbg_cached', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_system': BoostLibrary( + name='boost_system', + shared=['-DBOOST_SYSTEM_DYN_LINK=1'], + static=['-DBOOST_SYSTEM_STATIC_LINK=1'], + single=[], + multi=[], + ), + 'boost_test_exec_monitor': BoostLibrary( + name='boost_test_exec_monitor', + shared=['-DBOOST_TEST_DYN_LINK=1'], + static=[], + single=[], + multi=[], + ), + 'boost_thread': BoostLibrary( + name='boost_thread', + shared=['-DBOOST_THREAD_BUILD_DLL=1', '-DBOOST_THREAD_USE_DLL=1'], + static=['-DBOOST_THREAD_BUILD_LIB=1', '-DBOOST_THREAD_USE_LIB=1'], + single=[], + multi=[], + ), + 'boost_timer': BoostLibrary( + name='boost_timer', + shared=['-DBOOST_TIMER_DYN_LINK=1'], + static=['-DBOOST_TIMER_STATIC_LINK=1'], + single=[], + multi=[], + ), + 'boost_type_erasure': BoostLibrary( + name='boost_type_erasure', + shared=['-DBOOST_TYPE_ERASURE_DYN_LINK'], + static=[], + single=[], + multi=[], + ), + 'boost_unit_test_framework': BoostLibrary( + name='boost_unit_test_framework', + shared=['-DBOOST_TEST_DYN_LINK=1'], + static=[], + single=[], + multi=[], + ), + 'boost_wave': BoostLibrary( + name='boost_wave', + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_wserialization': BoostLibrary( + name='boost_wserialization', + shared=[], + static=[], + single=[], + multi=[], + ), +} + +# # +#### ---- END GENERATED ---- #### diff --git a/mesonbuild/dependencies/cmake.py b/mesonbuild/dependencies/cmake.py new file mode 100644 index 0000000..abd31a1 --- /dev/null +++ b/mesonbuild/dependencies/cmake.py @@ -0,0 +1,653 @@ +# Copyright 2013-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from .base import ExternalDependency, DependencyException, DependencyTypeName +from ..mesonlib import is_windows, MesonException, PerMachine, stringlistify, extract_as_list +from ..cmake import CMakeExecutor, CMakeTraceParser, CMakeException, CMakeToolchain, CMakeExecScope, check_cmake_args, resolve_cmake_trace_targets, cmake_is_debug +from .. import mlog +import importlib.resources +from pathlib import Path +import functools +import re +import os +import shutil +import textwrap +import typing as T + +if T.TYPE_CHECKING: + from ..cmake import CMakeTarget + from ..environment import Environment + from ..envconfig import MachineInfo + +class CMakeInfo(T.NamedTuple): + module_paths: T.List[str] + cmake_root: str + archs: T.List[str] + common_paths: T.List[str] + +class CMakeDependency(ExternalDependency): + # The class's copy of the CMake path. Avoids having to search for it + # multiple times in the same Meson invocation. + class_cmakeinfo: PerMachine[T.Optional[CMakeInfo]] = PerMachine(None, None) + # Version string for the minimum CMake version + class_cmake_version = '>=3.4' + # CMake generators to try (empty for no generator) + class_cmake_generators = ['', 'Ninja', 'Unix Makefiles', 'Visual Studio 10 2010'] + class_working_generator: T.Optional[str] = None + + def _gen_exception(self, msg: str) -> DependencyException: + return DependencyException(f'Dependency {self.name} not found: {msg}') + + def _main_cmake_file(self) -> str: + return 'CMakeLists.txt' + + def _extra_cmake_opts(self) -> T.List[str]: + return [] + + def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]: + # Map the input module list to something else + # This function will only be executed AFTER the initial CMake + # interpreter pass has completed. Thus variables defined in the + # CMakeLists.txt can be accessed here. + # + # Both the modules and components inputs contain the original lists. + return modules + + def _map_component_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]: + # Map the input components list to something else. This + # function will be executed BEFORE the initial CMake interpreter + # pass. Thus variables from the CMakeLists.txt can NOT be accessed. + # + # Both the modules and components inputs contain the original lists. + return components + + def _original_module_name(self, module: str) -> str: + # Reverse the module mapping done by _map_module_list for + # one module + return module + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None, force_use_global_compilers: bool = False) -> None: + # Gather a list of all languages to support + self.language_list = [] # type: T.List[str] + if language is None or force_use_global_compilers: + compilers = None + if kwargs.get('native', False): + compilers = environment.coredata.compilers.build + else: + compilers = environment.coredata.compilers.host + + candidates = ['c', 'cpp', 'fortran', 'objc', 'objcxx'] + self.language_list += [x for x in candidates if x in compilers] + else: + self.language_list += [language] + + # Add additional languages if required + if 'fortran' in self.language_list: + self.language_list += ['c'] + + # Ensure that the list is unique + self.language_list = list(set(self.language_list)) + + super().__init__(DependencyTypeName('cmake'), environment, kwargs, language=language) + self.name = name + self.is_libtool = False + # Store a copy of the CMake path on the object itself so it is + # stored in the pickled coredata and recovered. + self.cmakebin: T.Optional[CMakeExecutor] = None + self.cmakeinfo: T.Optional[CMakeInfo] = None + + # Where all CMake "build dirs" are located + self.cmake_root_dir = environment.scratch_dir + + # T.List of successfully found modules + self.found_modules: T.List[str] = [] + + # Initialize with None before the first return to avoid + # AttributeError exceptions in derived classes + self.traceparser: T.Optional[CMakeTraceParser] = None + + # TODO further evaluate always using MachineChoice.BUILD + self.cmakebin = CMakeExecutor(environment, CMakeDependency.class_cmake_version, self.for_machine, silent=self.silent) + if not self.cmakebin.found(): + self.cmakebin = None + msg = f'CMake binary for machine {self.for_machine} not found. Giving up.' + if self.required: + raise DependencyException(msg) + mlog.debug(msg) + return + + # Setup the trace parser + self.traceparser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir(), self.env) + + cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args')) + cm_args = check_cmake_args(cm_args) + if CMakeDependency.class_cmakeinfo[self.for_machine] is None: + CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info(cm_args) + self.cmakeinfo = CMakeDependency.class_cmakeinfo[self.for_machine] + if self.cmakeinfo is None: + raise self._gen_exception('Unable to obtain CMake system information') + + package_version = kwargs.get('cmake_package_version', '') + if not isinstance(package_version, str): + raise DependencyException('Keyword "cmake_package_version" must be a string.') + components = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'components'))] + modules = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'modules'))] + modules += [(x, False) for x in stringlistify(extract_as_list(kwargs, 'optional_modules'))] + cm_path = stringlistify(extract_as_list(kwargs, 'cmake_module_path')) + cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path] + if cm_path: + cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path)) + if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]): + mlog.debug('Preliminary CMake check failed. Aborting.') + return + self._detect_dep(name, package_version, modules, components, cm_args) + + def __repr__(self) -> str: + return f'<{self.__class__.__name__} {self.name}: {self.is_found} {self.version_reqs}>' + + def _get_cmake_info(self, cm_args: T.List[str]) -> T.Optional[CMakeInfo]: + mlog.debug("Extracting basic cmake information") + + # Try different CMake generators since specifying no generator may fail + # in cygwin for some reason + gen_list = [] + # First try the last working generator + if CMakeDependency.class_working_generator is not None: + gen_list += [CMakeDependency.class_working_generator] + gen_list += CMakeDependency.class_cmake_generators + + temp_parser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir(), self.env) + toolchain = CMakeToolchain(self.cmakebin, self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir()) + toolchain.write() + + for i in gen_list: + mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto')) + + # Prepare options + cmake_opts = temp_parser.trace_args() + toolchain.get_cmake_args() + ['.'] + cmake_opts += cm_args + if len(i) > 0: + cmake_opts = ['-G', i] + cmake_opts + + # Run CMake + ret1, out1, err1 = self._call_cmake(cmake_opts, 'CMakePathInfo.txt') + + # Current generator was successful + if ret1 == 0: + CMakeDependency.class_working_generator = i + break + + mlog.debug(f'CMake failed to gather system information for generator {i} with error code {ret1}') + mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n') + + # Check if any generator succeeded + if ret1 != 0: + return None + + try: + temp_parser.parse(err1) + except MesonException: + return None + + def process_paths(l: T.List[str]) -> T.Set[str]: + if is_windows(): + # Cannot split on ':' on Windows because its in the drive letter + tmp = [x.split(os.pathsep) for x in l] + else: + # https://github.com/mesonbuild/meson/issues/7294 + tmp = [re.split(r':|;', x) for x in l] + flattened = [x for sublist in tmp for x in sublist] + return set(flattened) + + # Extract the variables and sanity check them + root_paths_set = process_paths(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH')) + root_paths_set.update(process_paths(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT'))) + root_paths = sorted(root_paths_set) + root_paths = [x for x in root_paths if os.path.isdir(x)] + module_paths_set = process_paths(temp_parser.get_cmake_var('MESON_PATHS_LIST')) + rooted_paths: T.List[str] = [] + for j in [Path(x) for x in root_paths]: + for p in [Path(x) for x in module_paths_set]: + rooted_paths.append(str(j / p.relative_to(p.anchor))) + module_paths = sorted(module_paths_set.union(rooted_paths)) + module_paths = [x for x in module_paths if os.path.isdir(x)] + archs = temp_parser.get_cmake_var('MESON_ARCH_LIST') + + common_paths = ['lib', 'lib32', 'lib64', 'libx32', 'share'] + for i in archs: + common_paths += [os.path.join('lib', i)] + + res = CMakeInfo( + module_paths=module_paths, + cmake_root=temp_parser.get_cmake_var('MESON_CMAKE_ROOT')[0], + archs=archs, + common_paths=common_paths, + ) + + mlog.debug(f' -- Module search paths: {res.module_paths}') + mlog.debug(f' -- CMake root: {res.cmake_root}') + mlog.debug(f' -- CMake architectures: {res.archs}') + mlog.debug(f' -- CMake lib search paths: {res.common_paths}') + + return res + + @staticmethod + @functools.lru_cache(maxsize=None) + def _cached_listdir(path: str) -> T.Tuple[T.Tuple[str, str], ...]: + try: + return tuple((x, str(x).lower()) for x in os.listdir(path)) + except OSError: + return tuple() + + @staticmethod + @functools.lru_cache(maxsize=None) + def _cached_isdir(path: str) -> bool: + try: + return os.path.isdir(path) + except OSError: + return False + + def _preliminary_find_check(self, name: str, module_path: T.List[str], prefix_path: T.List[str], machine: 'MachineInfo') -> bool: + lname = str(name).lower() + + # Checks <path>, <path>/cmake, <path>/CMake + def find_module(path: str) -> bool: + for i in [path, os.path.join(path, 'cmake'), os.path.join(path, 'CMake')]: + if not self._cached_isdir(i): + continue + + # Check the directory case insensitive + content = self._cached_listdir(i) + candidates = ['Find{}.cmake', '{}Config.cmake', '{}-config.cmake'] + candidates = [x.format(name).lower() for x in candidates] + if any(x[1] in candidates for x in content): + return True + return False + + # Search in <path>/(lib/<arch>|lib*|share) for cmake files + def search_lib_dirs(path: str) -> bool: + for i in [os.path.join(path, x) for x in self.cmakeinfo.common_paths]: + if not self._cached_isdir(i): + continue + + # Check <path>/(lib/<arch>|lib*|share)/cmake/<name>*/ + cm_dir = os.path.join(i, 'cmake') + if self._cached_isdir(cm_dir): + content = self._cached_listdir(cm_dir) + content = tuple(x for x in content if x[1].startswith(lname)) + for k in content: + if find_module(os.path.join(cm_dir, k[0])): + return True + + # <path>/(lib/<arch>|lib*|share)/<name>*/ + # <path>/(lib/<arch>|lib*|share)/<name>*/(cmake|CMake)/ + content = self._cached_listdir(i) + content = tuple(x for x in content if x[1].startswith(lname)) + for k in content: + if find_module(os.path.join(i, k[0])): + return True + + return False + + # Check the user provided and system module paths + for i in module_path + [os.path.join(self.cmakeinfo.cmake_root, 'Modules')]: + if find_module(i): + return True + + # Check the user provided prefix paths + for i in prefix_path: + if search_lib_dirs(i): + return True + + # Check PATH + system_env = [] # type: T.List[str] + for i in os.environ.get('PATH', '').split(os.pathsep): + if i.endswith('/bin') or i.endswith('\\bin'): + i = i[:-4] + if i.endswith('/sbin') or i.endswith('\\sbin'): + i = i[:-5] + system_env += [i] + + # Check the system paths + for i in self.cmakeinfo.module_paths + system_env: + if find_module(i): + return True + + if search_lib_dirs(i): + return True + + content = self._cached_listdir(i) + content = tuple(x for x in content if x[1].startswith(lname)) + for k in content: + if search_lib_dirs(os.path.join(i, k[0])): + return True + + # Mac framework support + if machine.is_darwin(): + for j in [f'{lname}.framework', f'{lname}.app']: + for k in content: + if k[1] != j: + continue + if find_module(os.path.join(i, k[0], 'Resources')) or find_module(os.path.join(i, k[0], 'Version')): + return True + + # Check the environment path + env_path = os.environ.get(f'{name}_DIR') + if env_path and find_module(env_path): + return True + + # Check the Linux CMake registry + linux_reg = Path.home() / '.cmake' / 'packages' + for p in [linux_reg / name, linux_reg / lname]: + if p.exists(): + return True + + return False + + def _detect_dep(self, name: str, package_version: str, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]], args: T.List[str]) -> None: + # Detect a dependency with CMake using the '--find-package' mode + # and the trace output (stderr) + # + # When the trace output is enabled CMake prints all functions with + # parameters to stderr as they are executed. Since CMake 3.4.0 + # variables ("${VAR}") are also replaced in the trace output. + mlog.debug('\nDetermining dependency {!r} with CMake executable ' + '{!r}'.format(name, self.cmakebin.executable_path())) + + # Try different CMake generators since specifying no generator may fail + # in cygwin for some reason + gen_list = [] + # First try the last working generator + if CMakeDependency.class_working_generator is not None: + gen_list += [CMakeDependency.class_working_generator] + gen_list += CMakeDependency.class_cmake_generators + + # Map the components + comp_mapped = self._map_component_list(modules, components) + toolchain = CMakeToolchain(self.cmakebin, self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir()) + toolchain.write() + + for i in gen_list: + mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto')) + + # Prepare options + cmake_opts = [] + cmake_opts += [f'-DNAME={name}'] + cmake_opts += ['-DARCHS={}'.format(';'.join(self.cmakeinfo.archs))] + cmake_opts += [f'-DVERSION={package_version}'] + cmake_opts += ['-DCOMPS={}'.format(';'.join([x[0] for x in comp_mapped]))] + cmake_opts += args + cmake_opts += self.traceparser.trace_args() + cmake_opts += toolchain.get_cmake_args() + cmake_opts += self._extra_cmake_opts() + cmake_opts += ['.'] + if len(i) > 0: + cmake_opts = ['-G', i] + cmake_opts + + # Run CMake + ret1, out1, err1 = self._call_cmake(cmake_opts, self._main_cmake_file()) + + # Current generator was successful + if ret1 == 0: + CMakeDependency.class_working_generator = i + break + + mlog.debug(f'CMake failed for generator {i} and package {name} with error code {ret1}') + mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n') + + # Check if any generator succeeded + if ret1 != 0: + return + + try: + self.traceparser.parse(err1) + except CMakeException as e: + e2 = self._gen_exception(str(e)) + if self.required: + raise + else: + self.compile_args = [] + self.link_args = [] + self.is_found = False + self.reason = e2 + return + + # Whether the package is found or not is always stored in PACKAGE_FOUND + self.is_found = self.traceparser.var_to_bool('PACKAGE_FOUND') + if not self.is_found: + return + + # Try to detect the version + vers_raw = self.traceparser.get_cmake_var('PACKAGE_VERSION') + + if len(vers_raw) > 0: + self.version = vers_raw[0] + self.version.strip('"\' ') + + # Post-process module list. Used in derived classes to modify the + # module list (append prepend a string, etc.). + modules = self._map_module_list(modules, components) + autodetected_module_list = False + + # Try guessing a CMake target if none is provided + if len(modules) == 0: + for i in self.traceparser.targets: + tg = i.lower() + lname = name.lower() + if f'{lname}::{lname}' == tg or lname == tg.replace('::', ''): + mlog.debug(f'Guessed CMake target \'{i}\'') + modules = [(i, True)] + autodetected_module_list = True + break + + # Failed to guess a target --> try the old-style method + if len(modules) == 0: + # Warn when there might be matching imported targets but no automatic match was used + partial_modules: T.List[CMakeTarget] = [] + for k, v in self.traceparser.targets.items(): + tg = k.lower() + lname = name.lower() + if tg.startswith(f'{lname}::'): + partial_modules += [v] + if partial_modules: + mlog.warning(textwrap.dedent(f'''\ + Could not find and exact match for the CMake dependency {name}. + + However, Meson found the following partial matches: + + {[x.name for x in partial_modules]} + + Using imported is recommended, since this approach is less error prone + and better supported by Meson. Consider explicitly specifying one of + these in the dependency call with: + + dependency('{name}', modules: ['{name}::<name>', ...]) + + Meson will now continue to use the old-style {name}_LIBRARIES CMake + variables to extract the dependency information since no explicit + target is currently specified. + + ''')) + mlog.debug('More info for the partial match targets:') + for tgt in partial_modules: + mlog.debug(tgt) + + incDirs = [x for x in self.traceparser.get_cmake_var('PACKAGE_INCLUDE_DIRS') if x] + defs = [x for x in self.traceparser.get_cmake_var('PACKAGE_DEFINITIONS') if x] + libs_raw = [x for x in self.traceparser.get_cmake_var('PACKAGE_LIBRARIES') if x] + + # CMake has a "fun" API, where certain keywords describing + # configurations can be in the *_LIBRARIES vraiables. See: + # - https://github.com/mesonbuild/meson/issues/9197 + # - https://gitlab.freedesktop.org/libnice/libnice/-/issues/140 + # - https://cmake.org/cmake/help/latest/command/target_link_libraries.html#overview (the last point in the section) + libs: T.List[str] = [] + cfg_matches = True + is_debug = cmake_is_debug(self.env) + cm_tag_map = {'debug': is_debug, 'optimized': not is_debug, 'general': True} + for i in libs_raw: + if i.lower() in cm_tag_map: + cfg_matches = cm_tag_map[i.lower()] + continue + if cfg_matches: + libs += [i] + # According to the CMake docs, a keyword only works for the + # directly the following item and all items without a keyword + # are implizitly `general` + cfg_matches = True + + # Try to use old style variables if no module is specified + if len(libs) > 0: + self.compile_args = [f'-I{x}' for x in incDirs] + defs + self.link_args = [] + for j in libs: + rtgt = resolve_cmake_trace_targets(j, self.traceparser, self.env, clib_compiler=self.clib_compiler) + self.link_args += rtgt.libraries + self.compile_args += [f'-I{x}' for x in rtgt.include_directories] + self.compile_args += rtgt.public_compile_opts + mlog.debug(f'using old-style CMake variables for dependency {name}') + mlog.debug(f'Include Dirs: {incDirs}') + mlog.debug(f'Compiler Definitions: {defs}') + mlog.debug(f'Libraries: {libs}') + return + + # Even the old-style approach failed. Nothing else we can do here + self.is_found = False + raise self._gen_exception('CMake: failed to guess a CMake target for {}.\n' + 'Try to explicitly specify one or more targets with the "modules" property.\n' + 'Valid targets are:\n{}'.format(name, list(self.traceparser.targets.keys()))) + + # Set dependencies with CMake targets + # recognise arguments we should pass directly to the linker + incDirs = [] + compileOptions = [] + libraries = [] + + for i, required in modules: + if i not in self.traceparser.targets: + if not required: + mlog.warning('CMake: T.Optional module', mlog.bold(self._original_module_name(i)), 'for', mlog.bold(name), 'was not found') + continue + raise self._gen_exception('CMake: invalid module {} for {}.\n' + 'Try to explicitly specify one or more targets with the "modules" property.\n' + 'Valid targets are:\n{}'.format(self._original_module_name(i), name, list(self.traceparser.targets.keys()))) + + if not autodetected_module_list: + self.found_modules += [i] + + rtgt = resolve_cmake_trace_targets(i, self.traceparser, self.env, + clib_compiler=self.clib_compiler, + not_found_warning=lambda x: + mlog.warning('CMake: Dependency', mlog.bold(x), 'for', mlog.bold(name), 'was not found') + ) + incDirs += rtgt.include_directories + compileOptions += rtgt.public_compile_opts + libraries += rtgt.libraries + rtgt.link_flags + + # Make sure all elements in the lists are unique and sorted + incDirs = sorted(set(incDirs)) + compileOptions = sorted(set(compileOptions)) + libraries = sorted(set(libraries)) + + mlog.debug(f'Include Dirs: {incDirs}') + mlog.debug(f'Compiler Options: {compileOptions}') + mlog.debug(f'Libraries: {libraries}') + + self.compile_args = compileOptions + [f'-I{x}' for x in incDirs] + self.link_args = libraries + + def _get_build_dir(self) -> Path: + build_dir = Path(self.cmake_root_dir) / f'cmake_{self.name}' + build_dir.mkdir(parents=True, exist_ok=True) + return build_dir + + def _setup_cmake_dir(self, cmake_file: str) -> Path: + # Setup the CMake build environment and return the "build" directory + build_dir = self._get_build_dir() + + # Remove old CMake cache so we can try out multiple generators + cmake_cache = build_dir / 'CMakeCache.txt' + cmake_files = build_dir / 'CMakeFiles' + if cmake_cache.exists(): + cmake_cache.unlink() + shutil.rmtree(cmake_files.as_posix(), ignore_errors=True) + + # Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt + cmake_txt = importlib.resources.read_text('mesonbuild.dependencies.data', cmake_file, encoding = 'utf-8') + + # In general, some Fortran CMake find_package() also require C language enabled, + # even if nothing from C is directly used. An easy Fortran example that fails + # without C language is + # find_package(Threads) + # To make this general to + # any other language that might need this, we use a list for all + # languages and expand in the cmake Project(... LANGUAGES ...) statement. + from ..cmake import language_map + cmake_language = [language_map[x] for x in self.language_list if x in language_map] + if not cmake_language: + cmake_language += ['NONE'] + + cmake_txt = textwrap.dedent(""" + cmake_minimum_required(VERSION ${{CMAKE_VERSION}}) + project(MesonTemp LANGUAGES {}) + """).format(' '.join(cmake_language)) + cmake_txt + + cm_file = build_dir / 'CMakeLists.txt' + cm_file.write_text(cmake_txt, encoding='utf-8') + mlog.cmd_ci_include(cm_file.absolute().as_posix()) + + return build_dir + + def _call_cmake(self, + args: T.List[str], + cmake_file: str, + env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, T.Optional[str], T.Optional[str]]: + build_dir = self._setup_cmake_dir(cmake_file) + return self.cmakebin.call(args, build_dir, env=env) + + @staticmethod + def log_tried() -> str: + return 'cmake' + + def log_details(self) -> str: + modules = [self._original_module_name(x) for x in self.found_modules] + modules = sorted(set(modules)) + if modules: + return 'modules: ' + ', '.join(modules) + return '' + + def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None, + configtool: T.Optional[str] = None, internal: T.Optional[str] = None, + default_value: T.Optional[str] = None, + pkgconfig_define: T.Optional[T.List[str]] = None) -> str: + if cmake and self.traceparser is not None: + try: + v = self.traceparser.vars[cmake] + except KeyError: + pass + else: + # CMake does NOT have a list datatype. We have no idea whether + # anything is a string or a string-separated-by-; Internally, + # we treat them as the latter and represent everything as a + # list, because it is convenient when we are mostly handling + # imported targets, which have various properties that are + # actually lists. + # + # As a result we need to convert them back to strings when grabbing + # raw variables the user requested. + return ';'.join(v) + if default_value is not None: + return default_value + raise DependencyException(f'Could not get cmake variable and no default provided for {self!r}') diff --git a/mesonbuild/dependencies/coarrays.py b/mesonbuild/dependencies/coarrays.py new file mode 100644 index 0000000..70cf4f8 --- /dev/null +++ b/mesonbuild/dependencies/coarrays.py @@ -0,0 +1,87 @@ +# Copyright 2013-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import functools +import typing as T + +from .base import DependencyMethods, detect_compiler, SystemDependency +from .cmake import CMakeDependency +from .pkgconfig import PkgConfigDependency +from .factory import factory_methods + +if T.TYPE_CHECKING: + from . factory import DependencyGenerator + from ..environment import Environment, MachineChoice + + +@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE, DependencyMethods.SYSTEM}) +def coarray_factory(env: 'Environment', + for_machine: 'MachineChoice', + kwargs: T.Dict[str, T.Any], + methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']: + fcid = detect_compiler('coarray', env, for_machine, 'fortran').get_id() + candidates: T.List['DependencyGenerator'] = [] + + if fcid == 'gcc': + # OpenCoarrays is the most commonly used method for Fortran Coarray with GCC + if DependencyMethods.PKGCONFIG in methods: + for pkg in ['caf-openmpi', 'caf']: + candidates.append(functools.partial( + PkgConfigDependency, pkg, env, kwargs, language='fortran')) + + if DependencyMethods.CMAKE in methods: + if 'modules' not in kwargs: + kwargs['modules'] = 'OpenCoarrays::caf_mpi' + candidates.append(functools.partial( + CMakeDependency, 'OpenCoarrays', env, kwargs, language='fortran')) + + if DependencyMethods.SYSTEM in methods: + candidates.append(functools.partial(CoarrayDependency, env, kwargs)) + + return candidates + + +class CoarrayDependency(SystemDependency): + """ + Coarrays are a Fortran 2008 feature. + + Coarrays are sometimes implemented via external library (GCC+OpenCoarrays), + while other compilers just build in support (Cray, IBM, Intel, NAG). + Coarrays may be thought of as a high-level language abstraction of + low-level MPI calls. + """ + def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + super().__init__('coarray', environment, kwargs, language='fortran') + kwargs['required'] = False + kwargs['silent'] = True + + cid = self.get_compiler().get_id() + if cid == 'gcc': + # Fallback to single image + self.compile_args = ['-fcoarray=single'] + self.version = 'single image (fallback)' + self.is_found = True + elif cid == 'intel': + # Coarrays are built into Intel compilers, no external library needed + self.is_found = True + self.link_args = ['-coarray=shared'] + self.compile_args = self.link_args + elif cid == 'intel-cl': + # Coarrays are built into Intel compilers, no external library needed + self.is_found = True + self.compile_args = ['/Qcoarray:shared'] + elif cid == 'nagfor': + # NAG doesn't require any special arguments for Coarray + self.is_found = True diff --git a/mesonbuild/dependencies/configtool.py b/mesonbuild/dependencies/configtool.py new file mode 100644 index 0000000..1f16a43 --- /dev/null +++ b/mesonbuild/dependencies/configtool.py @@ -0,0 +1,186 @@ +# Copyright 2013-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from .base import ExternalDependency, DependencyException, DependencyTypeName +from ..mesonlib import listify, Popen_safe, split_args, version_compare, version_compare_many +from ..programs import find_external_program +from .. import mlog +import re +import typing as T + +from mesonbuild import mesonlib + +if T.TYPE_CHECKING: + from ..environment import Environment + +class ConfigToolDependency(ExternalDependency): + + """Class representing dependencies found using a config tool. + + Takes the following extra keys in kwargs that it uses internally: + :tools List[str]: A list of tool names to use + :version_arg str: The argument to pass to the tool to get it's version + :skip_version str: The argument to pass to the tool to ignore its version + (if ``version_arg`` fails, but it may start accepting it in the future) + Because some tools are stupid and don't accept --version + :returncode_value int: The value of the correct returncode + Because some tools are stupid and don't return 0 + """ + + tools: T.Optional[T.List[str]] = None + tool_name: T.Optional[str] = None + version_arg = '--version' + skip_version: T.Optional[str] = None + __strip_version = re.compile(r'^[0-9][0-9.]+') + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None): + super().__init__(DependencyTypeName('config-tool'), environment, kwargs, language=language) + self.name = name + # You may want to overwrite the class version in some cases + self.tools = listify(kwargs.get('tools', self.tools)) + if not self.tool_name: + self.tool_name = self.tools[0] + if 'version_arg' in kwargs: + self.version_arg = kwargs['version_arg'] + + req_version_raw = kwargs.get('version', None) + if req_version_raw is not None: + req_version = mesonlib.stringlistify(req_version_raw) + else: + req_version = [] + tool, version = self.find_config(req_version, kwargs.get('returncode_value', 0)) + self.config = tool + self.is_found = self.report_config(version, req_version) + if not self.is_found: + self.config = None + return + self.version = version + + def _sanitize_version(self, version: str) -> str: + """Remove any non-numeric, non-point version suffixes.""" + m = self.__strip_version.match(version) + if m: + # Ensure that there isn't a trailing '.', such as an input like + # `1.2.3.git-1234` + return m.group(0).rstrip('.') + return version + + def find_config(self, versions: T.List[str], returncode: int = 0) \ + -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]: + """Helper method that searches for config tool binaries in PATH and + returns the one that best matches the given version requirements. + """ + best_match: T.Tuple[T.Optional[T.List[str]], T.Optional[str]] = (None, None) + for potential_bin in find_external_program( + self.env, self.for_machine, self.tool_name, + self.tool_name, self.tools, allow_default_for_cross=False): + if not potential_bin.found(): + continue + tool = potential_bin.get_command() + try: + p, out = Popen_safe(tool + [self.version_arg])[:2] + except (FileNotFoundError, PermissionError): + continue + if p.returncode != returncode: + if self.skip_version: + # maybe the executable is valid even if it doesn't support --version + p = Popen_safe(tool + [self.skip_version])[0] + if p.returncode != returncode: + continue + else: + continue + + out = self._sanitize_version(out.strip()) + # Some tools, like pcap-config don't supply a version, but also + # don't fail with --version, in that case just assume that there is + # only one version and return it. + if not out: + return (tool, None) + if versions: + is_found = version_compare_many(out, versions)[0] + # This allows returning a found version without a config tool, + # which is useful to inform the user that you found version x, + # but y was required. + if not is_found: + tool = None + if best_match[1]: + if version_compare(out, '> {}'.format(best_match[1])): + best_match = (tool, out) + else: + best_match = (tool, out) + + return best_match + + def report_config(self, version: T.Optional[str], req_version: T.List[str]) -> bool: + """Helper method to print messages about the tool.""" + + found_msg: T.List[T.Union[str, mlog.AnsiDecorator]] = [mlog.bold(self.tool_name), 'found:'] + + if self.config is None: + found_msg.append(mlog.red('NO')) + if version is not None and req_version: + found_msg.append(f'found {version!r} but need {req_version!r}') + elif req_version: + found_msg.append(f'need {req_version!r}') + else: + found_msg += [mlog.green('YES'), '({})'.format(' '.join(self.config)), version] + + mlog.log(*found_msg) + + return self.config is not None + + def get_config_value(self, args: T.List[str], stage: str) -> T.List[str]: + p, out, err = Popen_safe(self.config + args) + if p.returncode != 0: + if self.required: + raise DependencyException(f'Could not generate {stage} for {self.name}.\n{err}') + return [] + return split_args(out) + + def get_configtool_variable(self, variable_name: str) -> str: + p, out, _ = Popen_safe(self.config + [f'--{variable_name}']) + if p.returncode != 0: + if self.required: + raise DependencyException( + 'Could not get variable "{}" for dependency {}'.format( + variable_name, self.name)) + variable = out.strip() + mlog.debug(f'Got config-tool variable {variable_name} : {variable}') + return variable + + @staticmethod + def log_tried() -> str: + return 'config-tool' + + def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None, + configtool: T.Optional[str] = None, internal: T.Optional[str] = None, + default_value: T.Optional[str] = None, + pkgconfig_define: T.Optional[T.List[str]] = None) -> str: + if configtool: + # In the not required case '' (empty string) will be returned if the + # variable is not found. Since '' is a valid value to return we + # set required to True here to force and error, and use the + # finally clause to ensure it's restored. + restore = self.required + self.required = True + try: + return self.get_configtool_variable(configtool) + except DependencyException: + pass + finally: + self.required = restore + if default_value is not None: + return default_value + raise DependencyException(f'Could not get config-tool variable and no default provided for {self!r}') diff --git a/mesonbuild/dependencies/cuda.py b/mesonbuild/dependencies/cuda.py new file mode 100644 index 0000000..89e562f --- /dev/null +++ b/mesonbuild/dependencies/cuda.py @@ -0,0 +1,292 @@ +# Copyright 2013-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import glob +import re +import os +import typing as T +from pathlib import Path + +from .. import mesonlib +from .. import mlog +from ..environment import detect_cpu_family +from .base import DependencyException, SystemDependency + + +if T.TYPE_CHECKING: + from ..environment import Environment + from ..compilers import Compiler + + TV_ResultTuple = T.Tuple[T.Optional[str], T.Optional[str], bool] + +class CudaDependency(SystemDependency): + + supported_languages = ['cuda', 'cpp', 'c'] # see also _default_language + + def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + compilers = environment.coredata.compilers[self.get_for_machine_from_kwargs(kwargs)] + language = self._detect_language(compilers) + if language not in self.supported_languages: + raise DependencyException(f'Language \'{language}\' is not supported by the CUDA Toolkit. Supported languages are {self.supported_languages}.') + + super().__init__('cuda', environment, kwargs, language=language) + self.lib_modules: T.Dict[str, T.List[str]] = {} + self.requested_modules = self.get_requested(kwargs) + if 'cudart' not in self.requested_modules: + self.requested_modules = ['cudart'] + self.requested_modules + + (self.cuda_path, self.version, self.is_found) = self._detect_cuda_path_and_version() + if not self.is_found: + return + + if not os.path.isabs(self.cuda_path): + raise DependencyException(f'CUDA Toolkit path must be absolute, got \'{self.cuda_path}\'.') + + # nvcc already knows where to find the CUDA Toolkit, but if we're compiling + # a mixed C/C++/CUDA project, we still need to make the include dir searchable + if self.language != 'cuda' or len(compilers) > 1: + self.incdir = os.path.join(self.cuda_path, 'include') + self.compile_args += [f'-I{self.incdir}'] + + if self.language != 'cuda': + arch_libdir = self._detect_arch_libdir() + self.libdir = os.path.join(self.cuda_path, arch_libdir) + mlog.debug('CUDA library directory is', mlog.bold(self.libdir)) + else: + self.libdir = None + + self.is_found = self._find_requested_libraries() + + @classmethod + def _detect_language(cls, compilers: T.Dict[str, 'Compiler']) -> str: + for lang in cls.supported_languages: + if lang in compilers: + return lang + return list(compilers.keys())[0] + + def _detect_cuda_path_and_version(self) -> TV_ResultTuple: + self.env_var = self._default_path_env_var() + mlog.debug('Default path env var:', mlog.bold(self.env_var)) + + version_reqs = self.version_reqs + if self.language == 'cuda': + nvcc_version = self._strip_patch_version(self.get_compiler().version) + mlog.debug('nvcc version:', mlog.bold(nvcc_version)) + if version_reqs: + # make sure nvcc version satisfies specified version requirements + (found_some, not_found, found) = mesonlib.version_compare_many(nvcc_version, version_reqs) + if not_found: + msg = f'The current nvcc version {nvcc_version} does not satisfy the specified CUDA Toolkit version requirements {version_reqs}.' + return self._report_dependency_error(msg, (None, None, False)) + + # use nvcc version to find a matching CUDA Toolkit + version_reqs = [f'={nvcc_version}'] + else: + nvcc_version = None + + paths = [(path, self._cuda_toolkit_version(path), default) for (path, default) in self._cuda_paths()] + if version_reqs: + return self._find_matching_toolkit(paths, version_reqs, nvcc_version) + + defaults = [(path, version) for (path, version, default) in paths if default] + if defaults: + return (defaults[0][0], defaults[0][1], True) + + platform_msg = 'set the CUDA_PATH environment variable' if self._is_windows() \ + else 'set the CUDA_PATH environment variable/create the \'/usr/local/cuda\' symbolic link' + msg = f'Please specify the desired CUDA Toolkit version (e.g. dependency(\'cuda\', version : \'>=10.1\')) or {platform_msg} to point to the location of your desired version.' + return self._report_dependency_error(msg, (None, None, False)) + + def _find_matching_toolkit(self, paths: T.List[TV_ResultTuple], version_reqs: T.List[str], nvcc_version: T.Optional[str]) -> TV_ResultTuple: + # keep the default paths order intact, sort the rest in the descending order + # according to the toolkit version + part_func: T.Callable[[TV_ResultTuple], bool] = lambda t: not t[2] + defaults_it, rest_it = mesonlib.partition(part_func, paths) + defaults = list(defaults_it) + paths = defaults + sorted(rest_it, key=lambda t: mesonlib.Version(t[1]), reverse=True) + mlog.debug(f'Search paths: {paths}') + + if nvcc_version and defaults: + default_src = f"the {self.env_var} environment variable" if self.env_var else "the \'/usr/local/cuda\' symbolic link" + nvcc_warning = 'The default CUDA Toolkit as designated by {} ({}) doesn\'t match the current nvcc version {} and will be ignored.'.format(default_src, os.path.realpath(defaults[0][0]), nvcc_version) + else: + nvcc_warning = None + + for (path, version, default) in paths: + (found_some, not_found, found) = mesonlib.version_compare_many(version, version_reqs) + if not not_found: + if not default and nvcc_warning: + mlog.warning(nvcc_warning) + return (path, version, True) + + if nvcc_warning: + mlog.warning(nvcc_warning) + return (None, None, False) + + def _default_path_env_var(self) -> T.Optional[str]: + env_vars = ['CUDA_PATH'] if self._is_windows() else ['CUDA_PATH', 'CUDA_HOME', 'CUDA_ROOT'] + env_vars = [var for var in env_vars if var in os.environ] + user_defaults = {os.environ[var] for var in env_vars} + if len(user_defaults) > 1: + mlog.warning('Environment variables {} point to conflicting toolkit locations ({}). Toolkit selection might produce unexpected results.'.format(', '.join(env_vars), ', '.join(user_defaults))) + return env_vars[0] if env_vars else None + + def _cuda_paths(self) -> T.List[T.Tuple[str, bool]]: + return ([(os.environ[self.env_var], True)] if self.env_var else []) \ + + (self._cuda_paths_win() if self._is_windows() else self._cuda_paths_nix()) + + def _cuda_paths_win(self) -> T.List[T.Tuple[str, bool]]: + env_vars = os.environ.keys() + return [(os.environ[var], False) for var in env_vars if var.startswith('CUDA_PATH_')] + + def _cuda_paths_nix(self) -> T.List[T.Tuple[str, bool]]: + # include /usr/local/cuda default only if no env_var was found + pattern = '/usr/local/cuda-*' if self.env_var else '/usr/local/cuda*' + return [(path, os.path.basename(path) == 'cuda') for path in glob.iglob(pattern)] + + toolkit_version_regex = re.compile(r'^CUDA Version\s+(.*)$') + path_version_win_regex = re.compile(r'^v(.*)$') + path_version_nix_regex = re.compile(r'^cuda-(.*)$') + cudart_version_regex = re.compile(r'#define\s+CUDART_VERSION\s+([0-9]+)') + + def _cuda_toolkit_version(self, path: str) -> str: + version = self._read_toolkit_version_txt(path) + if version: + return version + version = self._read_cuda_runtime_api_version(path) + if version: + return version + + mlog.debug('Falling back to extracting version from path') + path_version_regex = self.path_version_win_regex if self._is_windows() else self.path_version_nix_regex + try: + m = path_version_regex.match(os.path.basename(path)) + if m: + return m.group(1) + else: + mlog.warning(f'Could not detect CUDA Toolkit version for {path}') + except Exception as e: + mlog.warning(f'Could not detect CUDA Toolkit version for {path}: {e!s}') + + return '0.0' + + def _read_cuda_runtime_api_version(self, path_str: str) -> T.Optional[str]: + path = Path(path_str) + for i in path.rglob('cuda_runtime_api.h'): + raw = i.read_text(encoding='utf-8') + m = self.cudart_version_regex.search(raw) + if not m: + continue + try: + vers_int = int(m.group(1)) + except ValueError: + continue + # use // for floor instead of / which produces a float + major = vers_int // 1000 # type: int + minor = (vers_int - major * 1000) // 10 # type: int + return f'{major}.{minor}' + return None + + def _read_toolkit_version_txt(self, path: str) -> T.Optional[str]: + # Read 'version.txt' at the root of the CUDA Toolkit directory to determine the toolkit version + version_file_path = os.path.join(path, 'version.txt') + try: + with open(version_file_path, encoding='utf-8') as version_file: + version_str = version_file.readline() # e.g. 'CUDA Version 10.1.168' + m = self.toolkit_version_regex.match(version_str) + if m: + return self._strip_patch_version(m.group(1)) + except Exception as e: + mlog.debug(f'Could not read CUDA Toolkit\'s version file {version_file_path}: {e!s}') + + return None + + @classmethod + def _strip_patch_version(cls, version: str) -> str: + return '.'.join(version.split('.')[:2]) + + def _detect_arch_libdir(self) -> str: + arch = detect_cpu_family(self.env.coredata.compilers.host) + machine = self.env.machines[self.for_machine] + msg = '{} architecture is not supported in {} version of the CUDA Toolkit.' + if machine.is_windows(): + libdirs = {'x86': 'Win32', 'x86_64': 'x64'} + if arch not in libdirs: + raise DependencyException(msg.format(arch, 'Windows')) + return os.path.join('lib', libdirs[arch]) + elif machine.is_linux(): + libdirs = {'x86_64': 'lib64', 'ppc64': 'lib', 'aarch64': 'lib64', 'loongarch64': 'lib64'} + if arch not in libdirs: + raise DependencyException(msg.format(arch, 'Linux')) + return libdirs[arch] + elif machine.is_darwin(): + libdirs = {'x86_64': 'lib64'} + if arch not in libdirs: + raise DependencyException(msg.format(arch, 'macOS')) + return libdirs[arch] + else: + raise DependencyException('CUDA Toolkit: unsupported platform.') + + def _find_requested_libraries(self) -> bool: + all_found = True + + for module in self.requested_modules: + args = self.clib_compiler.find_library(module, self.env, [self.libdir] if self.libdir else []) + if args is None: + self._report_dependency_error(f'Couldn\'t find requested CUDA module \'{module}\'') + all_found = False + else: + mlog.debug(f'Link args for CUDA module \'{module}\' are {args}') + self.lib_modules[module] = args + + return all_found + + def _is_windows(self) -> bool: + return self.env.machines[self.for_machine].is_windows() + + @T.overload + def _report_dependency_error(self, msg: str) -> None: ... + + @T.overload + def _report_dependency_error(self, msg: str, ret_val: TV_ResultTuple) -> TV_ResultTuple: ... # noqa: F811 + + def _report_dependency_error(self, msg: str, ret_val: T.Optional[TV_ResultTuple] = None) -> T.Optional[TV_ResultTuple]: # noqa: F811 + if self.required: + raise DependencyException(msg) + + mlog.debug(msg) + return ret_val + + def log_details(self) -> str: + module_str = ', '.join(self.requested_modules) + return 'modules: ' + module_str + + def log_info(self) -> str: + return self.cuda_path if self.cuda_path else '' + + def get_requested(self, kwargs: T.Dict[str, T.Any]) -> T.List[str]: + candidates = mesonlib.extract_as_list(kwargs, 'modules') + for c in candidates: + if not isinstance(c, str): + raise DependencyException('CUDA module argument is not a string.') + return candidates + + def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]: + args: T.List[str] = [] + if self.libdir: + args += self.clib_compiler.get_linker_search_args(self.libdir) + for lib in self.requested_modules: + args += self.lib_modules[lib] + return args diff --git a/mesonbuild/dependencies/data/CMakeLists.txt b/mesonbuild/dependencies/data/CMakeLists.txt new file mode 100644 index 0000000..acbf648 --- /dev/null +++ b/mesonbuild/dependencies/data/CMakeLists.txt @@ -0,0 +1,98 @@ +# fail noisily if attempt to use this file without setting: +# cmake_minimum_required(VERSION ${CMAKE_VERSION}) +# project(... LANGUAGES ...) + +cmake_policy(SET CMP0000 NEW) + +set(PACKAGE_FOUND FALSE) +set(_packageName "${NAME}") +string(TOUPPER "${_packageName}" PACKAGE_NAME) + +while(TRUE) + if ("${VERSION}" STREQUAL "") + find_package("${NAME}" QUIET COMPONENTS ${COMPS}) + else() + find_package("${NAME}" "${VERSION}" QUIET COMPONENTS ${COMPS}) + endif() + + # ARCHS has to be set via the CMD interface + if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "") + break() + endif() + + list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE) + list(REMOVE_AT ARCHS 0) +endwhile() + +if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND) + set(PACKAGE_FOUND TRUE) + + # Check the following variables: + # FOO_VERSION + # Foo_VERSION + # FOO_VERSION_STRING + # Foo_VERSION_STRING + if(NOT DEFINED PACKAGE_VERSION) + if(DEFINED ${_packageName}_VERSION) + set(PACKAGE_VERSION "${${_packageName}_VERSION}") + elseif(DEFINED ${PACKAGE_NAME}_VERSION) + set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}") + elseif(DEFINED ${_packageName}_VERSION_STRING) + set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}") + elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING) + set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}") + endif() + endif() + + # Check the following variables: + # FOO_LIBRARIES + # Foo_LIBRARIES + # FOO_LIBS + # Foo_LIBS + set(libs) + if(DEFINED ${_packageName}_LIBRARIES) + set(libs ${_packageName}_LIBRARIES) + elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES) + set(libs ${PACKAGE_NAME}_LIBRARIES) + elseif(DEFINED ${_packageName}_LIBS) + set(libs ${_packageName}_LIBS) + elseif(DEFINED ${PACKAGE_NAME}_LIBS) + set(libs ${PACKAGE_NAME}_LIBS) + endif() + + # Check the following variables: + # FOO_INCLUDE_DIRS + # Foo_INCLUDE_DIRS + # FOO_INCLUDES + # Foo_INCLUDES + # FOO_INCLUDE_DIR + # Foo_INCLUDE_DIR + set(includes) + if(DEFINED ${_packageName}_INCLUDE_DIRS) + set(includes ${_packageName}_INCLUDE_DIRS) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS) + set(includes ${PACKAGE_NAME}_INCLUDE_DIRS) + elseif(DEFINED ${_packageName}_INCLUDES) + set(includes ${_packageName}_INCLUDES) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDES) + set(includes ${PACKAGE_NAME}_INCLUDES) + elseif(DEFINED ${_packageName}_INCLUDE_DIR) + set(includes ${_packageName}_INCLUDE_DIR) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR) + set(includes ${PACKAGE_NAME}_INCLUDE_DIR) + endif() + + # Check the following variables: + # FOO_DEFINITIONS + # Foo_DEFINITIONS + set(definitions) + if(DEFINED ${_packageName}_DEFINITIONS) + set(definitions ${_packageName}_DEFINITIONS) + elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS) + set(definitions ${PACKAGE_NAME}_DEFINITIONS) + endif() + + set(PACKAGE_INCLUDE_DIRS "${${includes}}") + set(PACKAGE_DEFINITIONS "${${definitions}}") + set(PACKAGE_LIBRARIES "${${libs}}") +endif() diff --git a/mesonbuild/dependencies/data/CMakeListsLLVM.txt b/mesonbuild/dependencies/data/CMakeListsLLVM.txt new file mode 100644 index 0000000..da23189 --- /dev/null +++ b/mesonbuild/dependencies/data/CMakeListsLLVM.txt @@ -0,0 +1,94 @@ + +set(PACKAGE_FOUND FALSE) + +while(TRUE) + find_package(LLVM REQUIRED CONFIG QUIET) + + # ARCHS has to be set via the CMD interface + if(LLVM_FOUND OR "${ARCHS}" STREQUAL "") + break() + endif() + + list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE) + list(REMOVE_AT ARCHS 0) +endwhile() + +if(LLVM_FOUND) + set(PACKAGE_FOUND TRUE) + + foreach(mod IN LISTS LLVM_MESON_MODULES) + # Reset variables + set(out_mods) + set(real_mods) + + # Generate a lower and upper case version + string(TOLOWER "${mod}" mod_L) + string(TOUPPER "${mod}" mod_U) + + # Get the mapped components + llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U}) + list(SORT out_mods) + list(REMOVE_DUPLICATES out_mods) + + # Make sure that the modules exist + foreach(i IN LISTS out_mods) + if(TARGET ${i}) + list(APPEND real_mods ${i}) + endif() + endforeach() + + # Set the output variables + set(MESON_LLVM_TARGETS_${mod} ${real_mods}) + foreach(i IN LISTS real_mods) + set(MESON_TARGET_TO_LLVM_${i} ${mod}) + endforeach() + endforeach() + + # Check the following variables: + # LLVM_PACKAGE_VERSION + # LLVM_VERSION + # LLVM_VERSION_STRING + if(NOT DEFINED PACKAGE_VERSION) + if(DEFINED LLVM_PACKAGE_VERSION) + set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}") + elseif(DEFINED LLVM_VERSION) + set(PACKAGE_VERSION "${LLVM_VERSION}") + elseif(DEFINED LLVM_VERSION_STRING) + set(PACKAGE_VERSION "${LLVM_VERSION_STRING}") + endif() + endif() + + # Check the following variables: + # LLVM_LIBRARIES + # LLVM_LIBS + set(libs) + if(DEFINED LLVM_LIBRARIES) + set(libs LLVM_LIBRARIES) + elseif(DEFINED LLVM_LIBS) + set(libs LLVM_LIBS) + endif() + + # Check the following variables: + # LLVM_INCLUDE_DIRS + # LLVM_INCLUDES + # LLVM_INCLUDE_DIR + set(includes) + if(DEFINED LLVM_INCLUDE_DIRS) + set(includes LLVM_INCLUDE_DIRS) + elseif(DEFINED LLVM_INCLUDES) + set(includes LLVM_INCLUDES) + elseif(DEFINED LLVM_INCLUDE_DIR) + set(includes LLVM_INCLUDE_DIR) + endif() + + # Check the following variables: + # LLVM_DEFINITIONS + set(definitions) + if(DEFINED LLVM_DEFINITIONS) + set(definitions LLVM_DEFINITIONS) + endif() + + set(PACKAGE_INCLUDE_DIRS "${${includes}}") + set(PACKAGE_DEFINITIONS "${${definitions}}") + set(PACKAGE_LIBRARIES "${${libs}}") +endif() diff --git a/mesonbuild/dependencies/data/CMakePathInfo.txt b/mesonbuild/dependencies/data/CMakePathInfo.txt new file mode 100644 index 0000000..662ec58 --- /dev/null +++ b/mesonbuild/dependencies/data/CMakePathInfo.txt @@ -0,0 +1,31 @@ +cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION}) + +set(TMP_PATHS_LIST) +list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH}) +list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH}) +list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH}) +list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH}) + +set(LIB_ARCH_LIST) +if(CMAKE_LIBRARY_ARCHITECTURE_REGEX) + file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* ) + foreach(dir ${implicit_dirs}) + if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}") + list(APPEND LIB_ARCH_LIST "${dir}") + endif() + endforeach() +endif() + +# "Export" these variables: +set(MESON_ARCH_LIST ${LIB_ARCH_LIST}) +set(MESON_PATHS_LIST ${TMP_PATHS_LIST}) +set(MESON_CMAKE_ROOT ${CMAKE_ROOT}) +set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT}) +set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH}) + +message(STATUS ${TMP_PATHS_LIST}) diff --git a/mesonbuild/dependencies/data/__init__.py b/mesonbuild/dependencies/data/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/mesonbuild/dependencies/data/__init__.py diff --git a/mesonbuild/dependencies/detect.py b/mesonbuild/dependencies/detect.py new file mode 100644 index 0000000..4c7a477 --- /dev/null +++ b/mesonbuild/dependencies/detect.py @@ -0,0 +1,227 @@ +# Copyright 2013-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from .base import ExternalDependency, DependencyException, DependencyMethods, NotFoundDependency +from .cmake import CMakeDependency +from .dub import DubDependency +from .framework import ExtraFrameworkDependency +from .pkgconfig import PkgConfigDependency + +from ..mesonlib import listify, MachineChoice, PerMachine +from .. import mlog +import functools +import typing as T + +if T.TYPE_CHECKING: + from ..environment import Environment + from .factory import DependencyFactory, WrappedFactoryFunc, DependencyGenerator + + TV_DepIDEntry = T.Union[str, bool, int, T.Tuple[str, ...]] + TV_DepID = T.Tuple[T.Tuple[str, TV_DepIDEntry], ...] + +# These must be defined in this file to avoid cyclical references. +packages: T.Dict[ + str, + T.Union[T.Type[ExternalDependency], 'DependencyFactory', 'WrappedFactoryFunc'] +] = {} +_packages_accept_language: T.Set[str] = set() + +def get_dep_identifier(name: str, kwargs: T.Dict[str, T.Any]) -> 'TV_DepID': + identifier: 'TV_DepID' = (('name', name), ) + from ..interpreter import permitted_dependency_kwargs + assert len(permitted_dependency_kwargs) == 19, \ + 'Extra kwargs have been added to dependency(), please review if it makes sense to handle it here' + for key, value in kwargs.items(): + # 'version' is irrelevant for caching; the caller must check version matches + # 'native' is handled above with `for_machine` + # 'required' is irrelevant for caching; the caller handles it separately + # 'fallback' and 'allow_fallback' is not part of the cache because, + # once a dependency has been found through a fallback, it should + # be used for the rest of the Meson run. + # 'default_options' is only used in fallback case + # 'not_found_message' has no impact on the dependency lookup + # 'include_type' is handled after the dependency lookup + if key in {'version', 'native', 'required', 'fallback', 'allow_fallback', 'default_options', + 'not_found_message', 'include_type'}: + continue + # All keyword arguments are strings, ints, or lists (or lists of lists) + if isinstance(value, list): + for i in value: + assert isinstance(i, str) + value = tuple(frozenset(listify(value))) + else: + assert isinstance(value, (str, bool, int)) + identifier = (*identifier, (key, value),) + return identifier + +display_name_map = { + 'boost': 'Boost', + 'cuda': 'CUDA', + 'dub': 'DUB', + 'gmock': 'GMock', + 'gtest': 'GTest', + 'hdf5': 'HDF5', + 'llvm': 'LLVM', + 'mpi': 'MPI', + 'netcdf': 'NetCDF', + 'openmp': 'OpenMP', + 'wxwidgets': 'WxWidgets', +} + +def find_external_dependency(name: str, env: 'Environment', kwargs: T.Dict[str, object]) -> T.Union['ExternalDependency', NotFoundDependency]: + assert name + required = kwargs.get('required', True) + if not isinstance(required, bool): + raise DependencyException('Keyword "required" must be a boolean.') + if not isinstance(kwargs.get('method', ''), str): + raise DependencyException('Keyword "method" must be a string.') + lname = name.lower() + if lname not in _packages_accept_language and 'language' in kwargs: + raise DependencyException(f'{name} dependency does not accept "language" keyword argument') + if not isinstance(kwargs.get('version', ''), (str, list)): + raise DependencyException('Keyword "Version" must be string or list.') + + # display the dependency name with correct casing + display_name = display_name_map.get(lname, lname) + + for_machine = MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST + + type_text = PerMachine('Build-time', 'Run-time')[for_machine] + ' dependency' + + # build a list of dependency methods to try + candidates = _build_external_dependency_list(name, env, for_machine, kwargs) + + pkg_exc: T.List[DependencyException] = [] + pkgdep: T.List[ExternalDependency] = [] + details = '' + + for c in candidates: + # try this dependency method + try: + d = c() + d._check_version() + pkgdep.append(d) + except DependencyException as e: + assert isinstance(c, functools.partial), 'for mypy' + bettermsg = f'Dependency lookup for {name} with method {c.func.log_tried()!r} failed: {e}' + mlog.debug(bettermsg) + e.args = (bettermsg,) + pkg_exc.append(e) + else: + pkg_exc.append(None) + details = d.log_details() + if details: + details = '(' + details + ') ' + if 'language' in kwargs: + details += 'for ' + d.language + ' ' + + # if the dependency was found + if d.found(): + + info: mlog.TV_LoggableList = [] + if d.version: + info.append(mlog.normal_cyan(d.version)) + + log_info = d.log_info() + if log_info: + info.append('(' + log_info + ')') + + mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.green('YES'), *info) + + return d + + # otherwise, the dependency could not be found + tried_methods = [d.log_tried() for d in pkgdep if d.log_tried()] + if tried_methods: + tried = mlog.format_list(tried_methods) + else: + tried = '' + + mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.red('NO'), + f'(tried {tried})' if tried else '') + + if required: + # if an exception occurred with the first detection method, re-raise it + # (on the grounds that it came from the preferred dependency detection + # method) + if pkg_exc and pkg_exc[0]: + raise pkg_exc[0] + + # we have a list of failed ExternalDependency objects, so we can report + # the methods we tried to find the dependency + raise DependencyException(f'Dependency "{name}" not found' + + (f', tried {tried}' if tried else '')) + + return NotFoundDependency(name, env) + + +def _build_external_dependency_list(name: str, env: 'Environment', for_machine: MachineChoice, + kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']: + # First check if the method is valid + if 'method' in kwargs and kwargs['method'] not in [e.value for e in DependencyMethods]: + raise DependencyException('method {!r} is invalid'.format(kwargs['method'])) + + # Is there a specific dependency detector for this dependency? + lname = name.lower() + if lname in packages: + # Create the list of dependency object constructors using a factory + # class method, if one exists, otherwise the list just consists of the + # constructor + if isinstance(packages[lname], type): + entry1 = T.cast('T.Type[ExternalDependency]', packages[lname]) # mypy doesn't understand isinstance(..., type) + if issubclass(entry1, ExternalDependency): + func: T.Callable[[], 'ExternalDependency'] = functools.partial(entry1, env, kwargs) + dep = [func] + else: + entry2 = T.cast('T.Union[DependencyFactory, WrappedFactoryFunc]', packages[lname]) + dep = entry2(env, for_machine, kwargs) + return dep + + candidates: T.List['DependencyGenerator'] = [] + + # If it's explicitly requested, use the dub detection method (only) + if 'dub' == kwargs.get('method', ''): + candidates.append(functools.partial(DubDependency, name, env, kwargs)) + return candidates + + # If it's explicitly requested, use the pkgconfig detection method (only) + if 'pkg-config' == kwargs.get('method', ''): + candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) + return candidates + + # If it's explicitly requested, use the CMake detection method (only) + if 'cmake' == kwargs.get('method', ''): + candidates.append(functools.partial(CMakeDependency, name, env, kwargs)) + return candidates + + # If it's explicitly requested, use the Extraframework detection method (only) + if 'extraframework' == kwargs.get('method', ''): + # On OSX, also try framework dependency detector + if env.machines[for_machine].is_darwin(): + candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs)) + return candidates + + # Otherwise, just use the pkgconfig and cmake dependency detector + if 'auto' == kwargs.get('method', 'auto'): + candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) + + # On OSX, also try framework dependency detector + if env.machines[for_machine].is_darwin(): + candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs)) + + # Only use CMake as a last resort, since it might not work 100% (see #6113) + candidates.append(functools.partial(CMakeDependency, name, env, kwargs)) + + return candidates diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py new file mode 100644 index 0000000..cc02842 --- /dev/null +++ b/mesonbuild/dependencies/dev.py @@ -0,0 +1,679 @@ +# Copyright 2013-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the detection logic for external dependencies useful for +# development purposes, such as testing, debugging, etc.. + +from __future__ import annotations + +import glob +import os +import re +import pathlib +import shutil +import subprocess +import typing as T + +from mesonbuild.interpreterbase.decorators import FeatureDeprecated + +from .. import mesonlib, mlog +from ..environment import get_llvm_tool_names +from ..mesonlib import version_compare, stringlistify, extract_as_list +from .base import DependencyException, DependencyMethods, detect_compiler, strip_system_libdirs, SystemDependency, ExternalDependency, DependencyTypeName +from .cmake import CMakeDependency +from .configtool import ConfigToolDependency +from .factory import DependencyFactory +from .misc import threads_factory +from .pkgconfig import PkgConfigDependency + +if T.TYPE_CHECKING: + from ..envconfig import MachineInfo + from ..environment import Environment + from ..mesonlib import MachineChoice + from typing_extensions import TypedDict + + class JNISystemDependencyKW(TypedDict): + modules: T.List[str] + # FIXME: When dependency() moves to typed Kwargs, this should inherit + # from its TypedDict type. + version: T.Optional[str] + + +def get_shared_library_suffix(environment: 'Environment', for_machine: MachineChoice) -> str: + """This is only guaranteed to work for languages that compile to machine + code, not for languages like C# that use a bytecode and always end in .dll + """ + m = environment.machines[for_machine] + if m.is_windows(): + return '.dll' + elif m.is_darwin(): + return '.dylib' + return '.so' + + +class GTestDependencySystem(SystemDependency): + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + super().__init__(name, environment, kwargs, language='cpp') + self.main = kwargs.get('main', False) + self.src_dirs = ['/usr/src/gtest/src', '/usr/src/googletest/googletest/src'] + if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})): + self.is_found = False + return + self.detect() + + def detect(self) -> None: + gtest_detect = self.clib_compiler.find_library("gtest", self.env, []) + gtest_main_detect = self.clib_compiler.find_library("gtest_main", self.env, []) + if gtest_detect and (not self.main or gtest_main_detect): + self.is_found = True + self.compile_args = [] + self.link_args = gtest_detect + if self.main: + self.link_args += gtest_main_detect + self.sources = [] + self.prebuilt = True + elif self.detect_srcdir(): + self.is_found = True + self.compile_args = ['-I' + d for d in self.src_include_dirs] + self.link_args = [] + if self.main: + self.sources = [self.all_src, self.main_src] + else: + self.sources = [self.all_src] + self.prebuilt = False + else: + self.is_found = False + + def detect_srcdir(self) -> bool: + for s in self.src_dirs: + if os.path.exists(s): + self.src_dir = s + self.all_src = mesonlib.File.from_absolute_file( + os.path.join(self.src_dir, 'gtest-all.cc')) + self.main_src = mesonlib.File.from_absolute_file( + os.path.join(self.src_dir, 'gtest_main.cc')) + self.src_include_dirs = [os.path.normpath(os.path.join(self.src_dir, '..')), + os.path.normpath(os.path.join(self.src_dir, '../include')), + ] + return True + return False + + def log_info(self) -> str: + if self.prebuilt: + return 'prebuilt' + else: + return 'building self' + + +class GTestDependencyPC(PkgConfigDependency): + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + assert name == 'gtest' + if kwargs.get('main'): + name = 'gtest_main' + super().__init__(name, environment, kwargs) + + +class GMockDependencySystem(SystemDependency): + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + super().__init__(name, environment, kwargs, language='cpp') + self.main = kwargs.get('main', False) + if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})): + self.is_found = False + return + + # If we are getting main() from GMock, we definitely + # want to avoid linking in main() from GTest + gtest_kwargs = kwargs.copy() + if self.main: + gtest_kwargs['main'] = False + + # GMock without GTest is pretty much useless + # this also mimics the structure given in WrapDB, + # where GMock always pulls in GTest + found = self._add_sub_dependency(gtest_factory(environment, self.for_machine, gtest_kwargs)) + if not found: + self.is_found = False + return + + # GMock may be a library or just source. + # Work with both. + gmock_detect = self.clib_compiler.find_library("gmock", self.env, []) + gmock_main_detect = self.clib_compiler.find_library("gmock_main", self.env, []) + if gmock_detect and (not self.main or gmock_main_detect): + self.is_found = True + self.link_args += gmock_detect + if self.main: + self.link_args += gmock_main_detect + self.prebuilt = True + return + + for d in ['/usr/src/googletest/googlemock/src', '/usr/src/gmock/src', '/usr/src/gmock']: + if os.path.exists(d): + self.is_found = True + # Yes, we need both because there are multiple + # versions of gmock that do different things. + d2 = os.path.normpath(os.path.join(d, '..')) + self.compile_args += ['-I' + d, '-I' + d2, '-I' + os.path.join(d2, 'include')] + all_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock-all.cc')) + main_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock_main.cc')) + if self.main: + self.sources += [all_src, main_src] + else: + self.sources += [all_src] + self.prebuilt = False + return + + self.is_found = False + + def log_info(self) -> str: + if self.prebuilt: + return 'prebuilt' + else: + return 'building self' + + +class GMockDependencyPC(PkgConfigDependency): + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + assert name == 'gmock' + if kwargs.get('main'): + name = 'gmock_main' + super().__init__(name, environment, kwargs) + + +class LLVMDependencyConfigTool(ConfigToolDependency): + """ + LLVM uses a special tool, llvm-config, which has arguments for getting + c args, cxx args, and ldargs as well as version. + """ + tool_name = 'llvm-config' + __cpp_blacklist = {'-DNDEBUG'} + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + self.tools = get_llvm_tool_names('llvm-config') + + # Fedora starting with Fedora 30 adds a suffix of the number + # of bits in the isa that llvm targets, for example, on x86_64 + # and aarch64 the name will be llvm-config-64, on x86 and arm + # it will be llvm-config-32. + if environment.machines[self.get_for_machine_from_kwargs(kwargs)].is_64_bit: + self.tools.append('llvm-config-64') + else: + self.tools.append('llvm-config-32') + + # It's necessary for LLVM <= 3.8 to use the C++ linker. For 3.9 and 4.0 + # the C linker works fine if only using the C API. + super().__init__(name, environment, kwargs, language='cpp') + self.provided_modules: T.List[str] = [] + self.required_modules: mesonlib.OrderedSet[str] = mesonlib.OrderedSet() + self.module_details: T.List[str] = [] + if not self.is_found: + return + + self.provided_modules = self.get_config_value(['--components'], 'modules') + modules = stringlistify(extract_as_list(kwargs, 'modules')) + self.check_components(modules) + opt_modules = stringlistify(extract_as_list(kwargs, 'optional_modules')) + self.check_components(opt_modules, required=False) + + cargs = mesonlib.OrderedSet(self.get_config_value(['--cppflags'], 'compile_args')) + self.compile_args = list(cargs.difference(self.__cpp_blacklist)) + + if version_compare(self.version, '>= 3.9'): + self._set_new_link_args(environment) + else: + self._set_old_link_args() + self.link_args = strip_system_libdirs(environment, self.for_machine, self.link_args) + self.link_args = self.__fix_bogus_link_args(self.link_args) + if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})): + self.is_found = False + return + + def __fix_bogus_link_args(self, args: T.List[str]) -> T.List[str]: + """This function attempts to fix bogus link arguments that llvm-config + generates. + + Currently it works around the following: + - FreeBSD: when statically linking -l/usr/lib/libexecinfo.so will + be generated, strip the -l in cases like this. + - Windows: We may get -LIBPATH:... which is later interpreted as + "-L IBPATH:...", if we're using an msvc like compilers convert + that to "/LIBPATH", otherwise to "-L ..." + """ + + new_args = [] + for arg in args: + if arg.startswith('-l') and arg.endswith('.so'): + new_args.append(arg.lstrip('-l')) + elif arg.startswith('-LIBPATH:'): + cpp = self.env.coredata.compilers[self.for_machine]['cpp'] + new_args.extend(cpp.get_linker_search_args(arg.lstrip('-LIBPATH:'))) + else: + new_args.append(arg) + return new_args + + def __check_libfiles(self, shared: bool) -> None: + """Use llvm-config's --libfiles to check if libraries exist.""" + mode = '--link-shared' if shared else '--link-static' + + # Set self.required to true to force an exception in get_config_value + # if the returncode != 0 + restore = self.required + self.required = True + + try: + # It doesn't matter what the stage is, the caller needs to catch + # the exception anyway. + self.link_args = self.get_config_value(['--libfiles', mode], '') + finally: + self.required = restore + + def _set_new_link_args(self, environment: 'Environment') -> None: + """How to set linker args for LLVM versions >= 3.9""" + try: + mode = self.get_config_value(['--shared-mode'], 'link_args')[0] + except IndexError: + mlog.debug('llvm-config --shared-mode returned an error') + self.is_found = False + return + + if not self.static and mode == 'static': + # If llvm is configured with LLVM_BUILD_LLVM_DYLIB but not with + # LLVM_LINK_LLVM_DYLIB and not LLVM_BUILD_SHARED_LIBS (which + # upstream doesn't recommend using), then llvm-config will lie to + # you about how to do shared-linking. It wants to link to a a bunch + # of individual shared libs (which don't exist because llvm wasn't + # built with LLVM_BUILD_SHARED_LIBS. + # + # Therefore, we'll try to get the libfiles, if the return code is 0 + # or we get an empty list, then we'll try to build a working + # configuration by hand. + try: + self.__check_libfiles(True) + except DependencyException: + lib_ext = get_shared_library_suffix(environment, self.for_machine) + libdir = self.get_config_value(['--libdir'], 'link_args')[0] + # Sort for reproducibility + matches = sorted(glob.iglob(os.path.join(libdir, f'libLLVM*{lib_ext}'))) + if not matches: + if self.required: + raise + self.is_found = False + return + + self.link_args = self.get_config_value(['--ldflags'], 'link_args') + libname = os.path.basename(matches[0]).rstrip(lib_ext).lstrip('lib') + self.link_args.append(f'-l{libname}') + return + elif self.static and mode == 'shared': + # If, however LLVM_BUILD_SHARED_LIBS is true # (*cough* gentoo *cough*) + # then this is correct. Building with LLVM_BUILD_SHARED_LIBS has a side + # effect, it stops the generation of static archives. Therefore we need + # to check for that and error out on static if this is the case + try: + self.__check_libfiles(False) + except DependencyException: + if self.required: + raise + self.is_found = False + return + + link_args = ['--link-static', '--system-libs'] if self.static else ['--link-shared'] + self.link_args = self.get_config_value( + ['--libs', '--ldflags'] + link_args + list(self.required_modules), + 'link_args') + + def _set_old_link_args(self) -> None: + """Setting linker args for older versions of llvm. + + Old versions of LLVM bring an extra level of insanity with them. + llvm-config will provide the correct arguments for static linking, but + not for shared-linnking, we have to figure those out ourselves, because + of course we do. + """ + if self.static: + self.link_args = self.get_config_value( + ['--libs', '--ldflags', '--system-libs'] + list(self.required_modules), + 'link_args') + else: + # llvm-config will provide arguments for static linking, so we get + # to figure out for ourselves what to link with. We'll do that by + # checking in the directory provided by --libdir for a library + # called libLLVM-<ver>.(so|dylib|dll) + libdir = self.get_config_value(['--libdir'], 'link_args')[0] + + expected_name = f'libLLVM-{self.version}' + re_name = re.compile(fr'{expected_name}.(so|dll|dylib)$') + + for file_ in os.listdir(libdir): + if re_name.match(file_): + self.link_args = [f'-L{libdir}', + '-l{}'.format(os.path.splitext(file_.lstrip('lib'))[0])] + break + else: + raise DependencyException( + 'Could not find a dynamically linkable library for LLVM.') + + def check_components(self, modules: T.List[str], required: bool = True) -> None: + """Check for llvm components (modules in meson terms). + + The required option is whether the module is required, not whether LLVM + is required. + """ + for mod in sorted(set(modules)): + status = '' + + if mod not in self.provided_modules: + if required: + self.is_found = False + if self.required: + raise DependencyException( + f'Could not find required LLVM Component: {mod}') + status = '(missing)' + else: + status = '(missing but optional)' + else: + self.required_modules.add(mod) + + self.module_details.append(mod + status) + + def log_details(self) -> str: + if self.module_details: + return 'modules: ' + ', '.join(self.module_details) + return '' + +class LLVMDependencyCMake(CMakeDependency): + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + self.llvm_modules = stringlistify(extract_as_list(kwargs, 'modules')) + self.llvm_opt_modules = stringlistify(extract_as_list(kwargs, 'optional_modules')) + + compilers = None + if kwargs.get('native', False): + compilers = env.coredata.compilers.build + else: + compilers = env.coredata.compilers.host + if not compilers or not all(x in compilers for x in ('c', 'cpp')): + # Initialize basic variables + ExternalDependency.__init__(self, DependencyTypeName('cmake'), env, kwargs) + + # Initialize CMake specific variables + self.found_modules: T.List[str] = [] + self.name = name + + # Warn and return + mlog.warning('The LLVM dependency was not found via CMake since both a C and C++ compiler are required.') + return + + super().__init__(name, env, kwargs, language='cpp', force_use_global_compilers=True) + + # Cmake will always create a statically linked binary, so don't use + # cmake if dynamic is required + if not self.static: + self.is_found = False + mlog.warning('Ignoring LLVM CMake dependency because dynamic was requested') + return + + if self.traceparser is None: + return + + # Extract extra include directories and definitions + inc_dirs = self.traceparser.get_cmake_var('PACKAGE_INCLUDE_DIRS') + defs = self.traceparser.get_cmake_var('PACKAGE_DEFINITIONS') + # LLVM explicitly uses space-separated variables rather than semicolon lists + if len(defs) == 1: + defs = defs[0].split(' ') + temp = ['-I' + x for x in inc_dirs] + defs + self.compile_args += [x for x in temp if x not in self.compile_args] + if not self._add_sub_dependency(threads_factory(env, self.for_machine, {})): + self.is_found = False + return + + def _main_cmake_file(self) -> str: + # Use a custom CMakeLists.txt for LLVM + return 'CMakeListsLLVM.txt' + + def _extra_cmake_opts(self) -> T.List[str]: + return ['-DLLVM_MESON_MODULES={}'.format(';'.join(self.llvm_modules + self.llvm_opt_modules))] + + def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]: + res = [] + for mod, required in modules: + cm_targets = self.traceparser.get_cmake_var(f'MESON_LLVM_TARGETS_{mod}') + if not cm_targets: + if required: + raise self._gen_exception(f'LLVM module {mod} was not found') + else: + mlog.warning('Optional LLVM module', mlog.bold(mod), 'was not found') + continue + for i in cm_targets: + res += [(i, required)] + return res + + def _original_module_name(self, module: str) -> str: + orig_name = self.traceparser.get_cmake_var(f'MESON_TARGET_TO_LLVM_{module}') + if orig_name: + return orig_name[0] + return module + + +class ValgrindDependency(PkgConfigDependency): + ''' + Consumers of Valgrind usually only need the compile args and do not want to + link to its (static) libraries. + ''' + def __init__(self, env: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__('valgrind', env, kwargs) + + def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]: + return [] + + +class ZlibSystemDependency(SystemDependency): + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + from ..compilers.c import AppleClangCCompiler + from ..compilers.cpp import AppleClangCPPCompiler + + m = self.env.machines[self.for_machine] + + # I'm not sure this is entirely correct. What if we're cross compiling + # from something to macOS? + if ((m.is_darwin() and isinstance(self.clib_compiler, (AppleClangCCompiler, AppleClangCPPCompiler))) or + m.is_freebsd() or m.is_dragonflybsd() or m.is_android()): + # No need to set includes, + # on macos xcode/clang will do that for us. + # on freebsd zlib.h is in /usr/include + + self.is_found = True + self.link_args = ['-lz'] + else: + if self.clib_compiler.get_argument_syntax() == 'msvc': + libs = ['zlib1', 'zlib'] + else: + libs = ['z'] + for lib in libs: + l = self.clib_compiler.find_library(lib, environment, []) + h = self.clib_compiler.has_header('zlib.h', '', environment, dependencies=[self]) + if l and h[0]: + self.is_found = True + self.link_args = l + break + else: + return + + v, _ = self.clib_compiler.get_define('ZLIB_VERSION', '#include <zlib.h>', self.env, [], [self]) + self.version = v.strip('"') + + +class JNISystemDependency(SystemDependency): + def __init__(self, environment: 'Environment', kwargs: JNISystemDependencyKW): + super().__init__('jni', environment, T.cast('T.Dict[str, T.Any]', kwargs)) + + self.feature_since = ('0.62.0', '') + + m = self.env.machines[self.for_machine] + + if 'java' not in environment.coredata.compilers[self.for_machine]: + detect_compiler(self.name, environment, self.for_machine, 'java') + self.javac = environment.coredata.compilers[self.for_machine]['java'] + self.version = self.javac.version + + modules: T.List[str] = mesonlib.listify(kwargs.get('modules', [])) + for module in modules: + if module not in {'jvm', 'awt'}: + log = mlog.error if self.required else mlog.debug + log(f'Unknown JNI module ({module})') + self.is_found = False + return + + if 'version' in kwargs and not version_compare(self.version, kwargs['version']): + mlog.error(f'Incorrect JDK version found ({self.version}), wanted {kwargs["version"]}') + self.is_found = False + return + + self.java_home = environment.properties[self.for_machine].get_java_home() + if not self.java_home: + self.java_home = pathlib.Path(shutil.which(self.javac.exelist[0])).resolve().parents[1] + if m.is_darwin(): + problem_java_prefix = pathlib.Path('/System/Library/Frameworks/JavaVM.framework/Versions') + if problem_java_prefix in self.java_home.parents: + res = subprocess.run(['/usr/libexec/java_home', '--failfast', '--arch', m.cpu_family], + stdout=subprocess.PIPE) + if res.returncode != 0: + log = mlog.error if self.required else mlog.debug + log('JAVA_HOME could not be discovered on the system. Please set it explicitly.') + self.is_found = False + return + self.java_home = pathlib.Path(res.stdout.decode().strip()) + + platform_include_dir = self.__machine_info_to_platform_include_dir(m) + if platform_include_dir is None: + mlog.error("Could not find a JDK platform include directory for your OS, please open an issue or provide a pull request.") + self.is_found = False + return + + java_home_include = self.java_home / 'include' + self.compile_args.append(f'-I{java_home_include}') + self.compile_args.append(f'-I{java_home_include / platform_include_dir}') + + if modules: + if m.is_windows(): + java_home_lib = self.java_home / 'lib' + java_home_lib_server = java_home_lib + else: + if version_compare(self.version, '<= 1.8.0'): + java_home_lib = self.java_home / 'jre' / 'lib' / self.__cpu_translate(m.cpu_family) + else: + java_home_lib = self.java_home / 'lib' + + java_home_lib_server = java_home_lib / 'server' + + if 'jvm' in modules: + jvm = self.clib_compiler.find_library('jvm', environment, extra_dirs=[str(java_home_lib_server)]) + if jvm is None: + mlog.debug('jvm library not found.') + self.is_found = False + else: + self.link_args.extend(jvm) + if 'awt' in modules: + jawt = self.clib_compiler.find_library('jawt', environment, extra_dirs=[str(java_home_lib)]) + if jawt is None: + mlog.debug('jawt library not found.') + self.is_found = False + else: + self.link_args.extend(jawt) + + self.is_found = True + + @staticmethod + def __cpu_translate(cpu: str) -> str: + ''' + The JDK and Meson have a disagreement here, so translate it over. In the event more + translation needs to be done, add to following dict. + ''' + java_cpus = { + 'x86_64': 'amd64', + } + + return java_cpus.get(cpu, cpu) + + @staticmethod + def __machine_info_to_platform_include_dir(m: 'MachineInfo') -> T.Optional[str]: + '''Translates the machine information to the platform-dependent include directory + + When inspecting a JDK release tarball or $JAVA_HOME, inside the `include/` directory is a + platform-dependent directory that must be on the target's include path in addition to the + parent `include/` directory. + ''' + if m.is_linux(): + return 'linux' + elif m.is_windows(): + return 'win32' + elif m.is_darwin(): + return 'darwin' + elif m.is_sunos(): + return 'solaris' + elif m.is_freebsd(): + return 'freebsd' + elif m.is_netbsd(): + return 'netbsd' + elif m.is_openbsd(): + return 'openbsd' + elif m.is_dragonflybsd(): + return 'dragonfly' + + return None + + +class JDKSystemDependency(JNISystemDependency): + def __init__(self, environment: 'Environment', kwargs: JNISystemDependencyKW): + super().__init__(environment, kwargs) + + self.feature_since = ('0.59.0', '') + self.featurechecks.append(FeatureDeprecated( + 'jdk system dependency', + '0.62.0', + 'Use the jni system dependency instead' + )) + + +llvm_factory = DependencyFactory( + 'LLVM', + [DependencyMethods.CMAKE, DependencyMethods.CONFIG_TOOL], + cmake_class=LLVMDependencyCMake, + configtool_class=LLVMDependencyConfigTool, +) + +gtest_factory = DependencyFactory( + 'gtest', + [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM], + pkgconfig_class=GTestDependencyPC, + system_class=GTestDependencySystem, +) + +gmock_factory = DependencyFactory( + 'gmock', + [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM], + pkgconfig_class=GMockDependencyPC, + system_class=GMockDependencySystem, +) + +zlib_factory = DependencyFactory( + 'zlib', + [DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE, DependencyMethods.SYSTEM], + cmake_name='ZLIB', + system_class=ZlibSystemDependency, +) diff --git a/mesonbuild/dependencies/dub.py b/mesonbuild/dependencies/dub.py new file mode 100644 index 0000000..ac2b667 --- /dev/null +++ b/mesonbuild/dependencies/dub.py @@ -0,0 +1,404 @@ +# Copyright 2013-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from .base import ExternalDependency, DependencyException, DependencyTypeName +from .pkgconfig import PkgConfigDependency +from ..mesonlib import (Popen_safe, OptionKey, join_args) +from ..programs import ExternalProgram +from .. import mlog +import re +import os +import json +import typing as T + +if T.TYPE_CHECKING: + from ..environment import Environment + +class DubDependency(ExternalDependency): + class_dubbin = None + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(DependencyTypeName('dub'), environment, kwargs, language='d') + self.name = name + from ..compilers.d import DCompiler, d_feature_args + + _temp_comp = super().get_compiler() + assert isinstance(_temp_comp, DCompiler) + self.compiler = _temp_comp + + if 'required' in kwargs: + self.required = kwargs.get('required') + + if DubDependency.class_dubbin is None: + self.dubbin = self._check_dub() + DubDependency.class_dubbin = self.dubbin + else: + self.dubbin = DubDependency.class_dubbin + + if not self.dubbin: + if self.required: + raise DependencyException('DUB not found.') + self.is_found = False + return + + assert isinstance(self.dubbin, ExternalProgram) + mlog.debug('Determining dependency {!r} with DUB executable ' + '{!r}'.format(name, self.dubbin.get_path())) + + # if an explicit version spec was stated, use this when querying Dub + main_pack_spec = name + if 'version' in kwargs: + version_spec = kwargs['version'] + if isinstance(version_spec, list): + version_spec = " ".join(version_spec) + main_pack_spec = f'{name}@{version_spec}' + + # we need to know the target architecture + dub_arch = self.compiler.arch + + # we need to know the build type as well + dub_buildtype = str(environment.coredata.get_option(OptionKey('buildtype'))) + # MESON types: choices=['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'])), + # DUB types: debug (default), plain, release, release-debug, release-nobounds, unittest, profile, profile-gc, + # docs, ddox, cov, unittest-cov, syntax and custom + if dub_buildtype == 'debugoptimized': + dub_buildtype = 'release-debug' + elif dub_buildtype == 'minsize': + dub_buildtype = 'release' + + # Ask dub for the package + describe_cmd = [ + 'describe', main_pack_spec, '--arch=' + dub_arch, + '--build=' + dub_buildtype, '--compiler=' + self.compiler.get_exelist()[-1] + ] + ret, res, err = self._call_dubbin(describe_cmd) + + if ret != 0: + mlog.debug('DUB describe failed: ' + err) + if 'locally' in err: + fetch_cmd = ['dub', 'fetch', main_pack_spec] + mlog.error(mlog.bold(main_pack_spec), 'is not present locally. You may try the following command:') + mlog.log(mlog.bold(join_args(fetch_cmd))) + self.is_found = False + return + + # A command that might be useful in case of missing DUB package + def dub_build_deep_command() -> str: + cmd = [ + 'dub', 'run', 'dub-build-deep', '--yes', '--', main_pack_spec, + '--arch=' + dub_arch, '--compiler=' + self.compiler.get_exelist()[-1], + '--build=' + dub_buildtype + ] + return join_args(cmd) + + dub_comp_id = self.compiler.get_id().replace('llvm', 'ldc').replace('gcc', 'gdc') + description = json.loads(res) + + self.compile_args = [] + self.link_args = self.raw_link_args = [] + + show_buildtype_warning = False + + def find_package_target(pkg: T.Dict[str, str]) -> bool: + nonlocal show_buildtype_warning + # try to find a static library in a DUB folder corresponding to + # version, configuration, compiler, arch and build-type + # if can find, add to link_args. + # link_args order is meaningful, so this function MUST be called in the right order + pack_id = f'{pkg["name"]}@{pkg["version"]}' + (tgt_file, compatibilities) = self._find_compatible_package_target(description, pkg, dub_comp_id) + if tgt_file is None: + if not compatibilities: + mlog.error(mlog.bold(pack_id), 'not found') + elif 'compiler' not in compatibilities: + mlog.error(mlog.bold(pack_id), 'found but not compiled with ', mlog.bold(dub_comp_id)) + elif dub_comp_id != 'gdc' and 'compiler_version' not in compatibilities: + mlog.error(mlog.bold(pack_id), 'found but not compiled with', mlog.bold(f'{dub_comp_id}-{self.compiler.version}')) + elif 'arch' not in compatibilities: + mlog.error(mlog.bold(pack_id), 'found but not compiled for', mlog.bold(dub_arch)) + elif 'platform' not in compatibilities: + mlog.error(mlog.bold(pack_id), 'found but not compiled for', mlog.bold(description['platform'].join('.'))) + elif 'configuration' not in compatibilities: + mlog.error(mlog.bold(pack_id), 'found but not compiled for the', mlog.bold(pkg['configuration']), 'configuration') + else: + mlog.error(mlog.bold(pack_id), 'not found') + + mlog.log('You may try the following command to install the necessary DUB libraries:') + mlog.log(mlog.bold(dub_build_deep_command())) + + return False + + if 'build_type' not in compatibilities: + mlog.warning(mlog.bold(pack_id), 'found but not compiled as', mlog.bold(dub_buildtype)) + show_buildtype_warning = True + + self.link_args.append(tgt_file) + return True + + # Main algorithm: + # 1. Ensure that the target is a compatible library type (not dynamic) + # 2. Find a compatible built library for the main dependency + # 3. Do the same for each sub-dependency. + # link_args MUST be in the same order than the "linkDependencies" of the main target + # 4. Add other build settings (imports, versions etc.) + + # 1 + self.is_found = False + packages = {} + for pkg in description['packages']: + packages[pkg['name']] = pkg + + if not pkg['active']: + continue + + if pkg['targetType'] == 'dynamicLibrary': + mlog.error('DUB dynamic library dependencies are not supported.') + self.is_found = False + return + + ## check that the main dependency is indeed a library + if pkg['name'] == name: + self.is_found = True + + if pkg['targetType'] not in ['library', 'sourceLibrary', 'staticLibrary']: + mlog.error(mlog.bold(name), "found but it isn't a library") + self.is_found = False + return + + self.version = pkg['version'] + self.pkg = pkg + + # collect all targets + targets = {} + for tgt in description['targets']: + targets[tgt['rootPackage']] = tgt + + if name not in targets: + self.is_found = False + if self.pkg['targetType'] == 'sourceLibrary': + # source libraries have no associated targets, + # but some build settings like import folders must be found from the package object. + # Current algo only get these from "buildSettings" in the target object. + # Let's save this for a future PR. + # (See openssl DUB package for example of sourceLibrary) + mlog.error('DUB targets of type', mlog.bold('sourceLibrary'), 'are not supported.') + else: + mlog.error('Could not find target description for', mlog.bold(main_pack_spec)) + + if not self.is_found: + mlog.error(f'Could not find {name} in DUB description') + return + + # Current impl only supports static libraries + self.static = True + + # 2 + if not find_package_target(self.pkg): + self.is_found = False + return + + # 3 + for link_dep in targets[name]['linkDependencies']: + pkg = packages[link_dep] + if not find_package_target(pkg): + self.is_found = False + return + + if show_buildtype_warning: + mlog.log('If it is not suitable, try the following command and reconfigure Meson with', mlog.bold('--clearcache')) + mlog.log(mlog.bold(dub_build_deep_command())) + + # 4 + bs = targets[name]['buildSettings'] + + for flag in bs['dflags']: + self.compile_args.append(flag) + + for path in bs['importPaths']: + self.compile_args.append('-I' + path) + + for path in bs['stringImportPaths']: + if 'import_dir' not in d_feature_args[self.compiler.id]: + break + flag = d_feature_args[self.compiler.id]['import_dir'] + self.compile_args.append(f'{flag}={path}') + + for ver in bs['versions']: + if 'version' not in d_feature_args[self.compiler.id]: + break + flag = d_feature_args[self.compiler.id]['version'] + self.compile_args.append(f'{flag}={ver}') + + if bs['mainSourceFile']: + self.compile_args.append(bs['mainSourceFile']) + + # pass static libraries + # linkerFiles are added during step 3 + # for file in bs['linkerFiles']: + # self.link_args.append(file) + + for file in bs['sourceFiles']: + # sourceFiles may contain static libraries + if file.endswith('.lib') or file.endswith('.a'): + self.link_args.append(file) + + for flag in bs['lflags']: + self.link_args.append(flag) + + is_windows = self.env.machines.host.is_windows() + if is_windows: + winlibs = ['kernel32', 'user32', 'gdi32', 'winspool', 'shell32', 'ole32', + 'oleaut32', 'uuid', 'comdlg32', 'advapi32', 'ws2_32'] + + for lib in bs['libs']: + if os.name != 'nt': + # trying to add system libraries by pkg-config + pkgdep = PkgConfigDependency(lib, environment, {'required': 'true', 'silent': 'true'}) + if pkgdep.is_found: + for arg in pkgdep.get_compile_args(): + self.compile_args.append(arg) + for arg in pkgdep.get_link_args(): + self.link_args.append(arg) + for arg in pkgdep.get_link_args(raw=True): + self.raw_link_args.append(arg) + continue + + if is_windows and lib in winlibs: + self.link_args.append(lib + '.lib') + continue + + # fallback + self.link_args.append('-l'+lib) + + # This function finds the target of the provided JSON package, built for the right + # compiler, architecture, configuration... + # It returns (target|None, {compatibilities}) + # If None is returned for target, compatibilities will list what other targets were found without full compatibility + def _find_compatible_package_target(self, jdesc: T.Dict[str, str], jpack: T.Dict[str, str], dub_comp_id: str) -> T.Tuple[str, T.Set[str]]: + dub_build_path = os.path.join(jpack['path'], '.dub', 'build') + + if not os.path.exists(dub_build_path): + return (None, None) + + # try to find a dir like library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA + + # fields are: + # - configuration + # - build type + # - platform + # - architecture + # - compiler id (dmd, ldc, gdc) + # - compiler version or frontend id or frontend version? + + conf = jpack['configuration'] + build_type = jdesc['buildType'] + platforms = jdesc['platform'] + archs = jdesc['architecture'] + + # Get D frontend version implemented in the compiler, or the compiler version itself + # gdc doesn't support this + comp_versions = [] + + if dub_comp_id != 'gdc': + comp_versions.append(self.compiler.version) + + ret, res = self._call_compbin(['--version'])[0:2] + if ret != 0: + mlog.error('Failed to run {!r}', mlog.bold(dub_comp_id)) + return (None, None) + d_ver_reg = re.search('v[0-9].[0-9][0-9][0-9].[0-9]', res) # Ex.: v2.081.2 + + if d_ver_reg is not None: + frontend_version = d_ver_reg.group() + frontend_id = frontend_version.rsplit('.', 1)[0].replace('v', '').replace('.', '') # Fix structure. Ex.: 2081 + comp_versions.extend([frontend_version, frontend_id]) + + compatibilities: T.Set[str] = set() + + # build_type is not in check_list because different build types might be compatible. + # We do show a WARNING that the build type is not the same. + # It might be critical in release builds, and acceptable otherwise + check_list = ('configuration', 'platform', 'arch', 'compiler', 'compiler_version') + + for entry in os.listdir(dub_build_path): + + target = os.path.join(dub_build_path, entry, jpack['targetFileName']) + if not os.path.exists(target): + # unless Dub and Meson are racing, the target file should be present + # when the directory is present + mlog.debug("WARNING: Could not find a Dub target: " + target) + continue + + # we build a new set for each entry, because if this target is returned + # we want to return only the compatibilities associated to this target + # otherwise we could miss the WARNING about build_type + comps = set() + + if conf in entry: + comps.add('configuration') + + if build_type in entry: + comps.add('build_type') + + if all(platform in entry for platform in platforms): + comps.add('platform') + + if all(arch in entry for arch in archs): + comps.add('arch') + + if dub_comp_id in entry: + comps.add('compiler') + + if dub_comp_id == 'gdc' or any(cv in entry for cv in comp_versions): + comps.add('compiler_version') + + if all(key in comps for key in check_list): + return (target, comps) + else: + compatibilities = set.union(compatibilities, comps) + + return (None, compatibilities) + + def _call_dubbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]: + assert isinstance(self.dubbin, ExternalProgram) + p, out, err = Popen_safe(self.dubbin.get_command() + args, env=env) + return p.returncode, out.strip(), err.strip() + + def _call_compbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]: + p, out, err = Popen_safe(self.compiler.get_exelist() + args, env=env) + return p.returncode, out.strip(), err.strip() + + def _check_dub(self) -> T.Union[bool, ExternalProgram]: + dubbin: T.Union[bool, ExternalProgram] = ExternalProgram('dub', silent=True) + assert isinstance(dubbin, ExternalProgram) + if dubbin.found(): + try: + p, out = Popen_safe(dubbin.get_command() + ['--version'])[0:2] + if p.returncode != 0: + mlog.warning('Found dub {!r} but couldn\'t run it' + ''.format(' '.join(dubbin.get_command()))) + # Set to False instead of None to signify that we've already + # searched for it and not found it + dubbin = False + except (FileNotFoundError, PermissionError): + dubbin = False + else: + dubbin = False + if isinstance(dubbin, ExternalProgram): + mlog.log('Found DUB:', mlog.bold(dubbin.get_path()), + '(%s)' % out.strip()) + else: + mlog.log('Found DUB:', mlog.red('NO')) + return dubbin diff --git a/mesonbuild/dependencies/factory.py b/mesonbuild/dependencies/factory.py new file mode 100644 index 0000000..d50ce0f --- /dev/null +++ b/mesonbuild/dependencies/factory.py @@ -0,0 +1,156 @@ +# Copyright 2013-2021 The Meson development team +# Copyright © 2021 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import functools +import typing as T + +from .base import DependencyException, DependencyMethods +from .base import process_method_kw +from .base import BuiltinDependency, SystemDependency +from .cmake import CMakeDependency +from .framework import ExtraFrameworkDependency +from .pkgconfig import PkgConfigDependency + +if T.TYPE_CHECKING: + from .base import ExternalDependency + from .configtool import ConfigToolDependency + from ..environment import Environment + from ..mesonlib import MachineChoice + + DependencyGenerator = T.Callable[[], ExternalDependency] + FactoryFunc = T.Callable[ + [ + 'Environment', + MachineChoice, + T.Dict[str, T.Any], + T.List[DependencyMethods] + ], + T.List[DependencyGenerator] + ] + + WrappedFactoryFunc = T.Callable[ + [ + 'Environment', + MachineChoice, + T.Dict[str, T.Any] + ], + T.List[DependencyGenerator] + ] + + # This should be str, Environment, T.Dict[str, T.Any], T.Optional[str] + # But if you try that, you get error: Cannot infer type of lambda + CmakeDependencyFunc = T.Callable[..., CMakeDependency] + +class DependencyFactory: + + """Factory to get dependencies from multiple sources. + + This class provides an initializer that takes a set of names and classes + for various kinds of dependencies. When the initialized object is called + it returns a list of callables return Dependency objects to try in order. + + :name: The name of the dependency. This will be passed as the name + parameter of the each dependency unless it is overridden on a per + type basis. + :methods: An ordered list of DependencyMethods. This is the order + dependencies will be returned in unless they are removed by the + _process_method function + :*_name: This will overwrite the name passed to the corresponding class. + For example, if the name is 'zlib', but cmake calls the dependency + 'Z', then using `cmake_name='Z'` will pass the name as 'Z' to cmake. + :*_class: A *type* or callable that creates a class, and has the + signature of an ExternalDependency + :system_class: If you pass DependencyMethods.SYSTEM in methods, you must + set this argument. + """ + + def __init__(self, name: str, methods: T.List[DependencyMethods], *, + extra_kwargs: T.Optional[T.Dict[str, T.Any]] = None, + pkgconfig_name: T.Optional[str] = None, + pkgconfig_class: 'T.Type[PkgConfigDependency]' = PkgConfigDependency, + cmake_name: T.Optional[str] = None, + cmake_class: 'T.Union[T.Type[CMakeDependency], CmakeDependencyFunc]' = CMakeDependency, + configtool_class: 'T.Optional[T.Type[ConfigToolDependency]]' = None, + framework_name: T.Optional[str] = None, + framework_class: 'T.Type[ExtraFrameworkDependency]' = ExtraFrameworkDependency, + builtin_class: 'T.Type[BuiltinDependency]' = BuiltinDependency, + system_class: 'T.Type[SystemDependency]' = SystemDependency): + + if DependencyMethods.CONFIG_TOOL in methods and not configtool_class: + raise DependencyException('A configtool must have a custom class') + + self.extra_kwargs = extra_kwargs or {} + self.methods = methods + self.classes: T.Dict[ + DependencyMethods, + T.Callable[['Environment', T.Dict[str, T.Any]], ExternalDependency] + ] = { + # Just attach the correct name right now, either the generic name + # or the method specific name. + DependencyMethods.EXTRAFRAMEWORK: functools.partial(framework_class, framework_name or name), + DependencyMethods.PKGCONFIG: functools.partial(pkgconfig_class, pkgconfig_name or name), + DependencyMethods.CMAKE: functools.partial(cmake_class, cmake_name or name), + DependencyMethods.SYSTEM: functools.partial(system_class, name), + DependencyMethods.BUILTIN: functools.partial(builtin_class, name), + DependencyMethods.CONFIG_TOOL: None, + } + if configtool_class is not None: + self.classes[DependencyMethods.CONFIG_TOOL] = functools.partial(configtool_class, name) + + @staticmethod + def _process_method(method: DependencyMethods, env: 'Environment', for_machine: MachineChoice) -> bool: + """Report whether a method is valid or not. + + If the method is valid, return true, otherwise return false. This is + used in a list comprehension to filter methods that are not possible. + + By default this only remove EXTRAFRAMEWORK dependencies for non-mac platforms. + """ + # Extra frameworks are only valid for macOS and other apple products + if (method is DependencyMethods.EXTRAFRAMEWORK and + not env.machines[for_machine].is_darwin()): + return False + return True + + def __call__(self, env: 'Environment', for_machine: MachineChoice, + kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']: + """Return a list of Dependencies with the arguments already attached.""" + methods = process_method_kw(self.methods, kwargs) + nwargs = self.extra_kwargs.copy() + nwargs.update(kwargs) + + return [functools.partial(self.classes[m], env, nwargs) for m in methods + if self._process_method(m, env, for_machine)] + + +def factory_methods(methods: T.Set[DependencyMethods]) -> T.Callable[['FactoryFunc'], 'WrappedFactoryFunc']: + """Decorator for handling methods for dependency factory functions. + + This helps to make factory functions self documenting + >>> @factory_methods([DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE]) + >>> def factory(env: Environment, for_machine: MachineChoice, kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']: + >>> pass + """ + + def inner(func: 'FactoryFunc') -> 'WrappedFactoryFunc': + + @functools.wraps(func) + def wrapped(env: 'Environment', for_machine: MachineChoice, kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']: + return func(env, for_machine, kwargs, process_method_kw(methods, kwargs)) + + return wrapped + + return inner diff --git a/mesonbuild/dependencies/framework.py b/mesonbuild/dependencies/framework.py new file mode 100644 index 0000000..b02b3ce --- /dev/null +++ b/mesonbuild/dependencies/framework.py @@ -0,0 +1,121 @@ +# Copyright 2013-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from .base import DependencyTypeName, ExternalDependency, DependencyException +from ..mesonlib import MesonException, Version, stringlistify +from .. import mlog +from pathlib import Path +import typing as T + +if T.TYPE_CHECKING: + from ..environment import Environment + +class ExtraFrameworkDependency(ExternalDependency): + system_framework_paths: T.Optional[T.List[str]] = None + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None: + paths = stringlistify(kwargs.get('paths', [])) + super().__init__(DependencyTypeName('extraframeworks'), env, kwargs, language=language) + self.name = name + # Full path to framework directory + self.framework_path: T.Optional[str] = None + if not self.clib_compiler: + raise DependencyException('No C-like compilers are available') + if self.system_framework_paths is None: + try: + self.system_framework_paths = self.clib_compiler.find_framework_paths(self.env) + except MesonException as e: + if 'non-clang' in str(e): + # Apple frameworks can only be found (and used) with the + # system compiler. It is not available so bail immediately. + self.is_found = False + return + raise + self.detect(name, paths) + + def detect(self, name: str, paths: T.List[str]) -> None: + if not paths: + paths = self.system_framework_paths + for p in paths: + mlog.debug(f'Looking for framework {name} in {p}') + # We need to know the exact framework path because it's used by the + # Qt5 dependency class, and for setting the include path. We also + # want to avoid searching in an invalid framework path which wastes + # time and can cause a false positive. + framework_path = self._get_framework_path(p, name) + if framework_path is None: + continue + # We want to prefer the specified paths (in order) over the system + # paths since these are "extra" frameworks. + # For example, Python2's framework is in /System/Library/Frameworks and + # Python3's framework is in /Library/Frameworks, but both are called + # Python.framework. We need to know for sure that the framework was + # found in the path we expect. + allow_system = p in self.system_framework_paths + args = self.clib_compiler.find_framework(name, self.env, [p], allow_system) + if args is None: + continue + self.link_args = args + self.framework_path = framework_path.as_posix() + self.compile_args = ['-F' + self.framework_path] + # We need to also add -I includes to the framework because all + # cross-platform projects such as OpenGL, Python, Qt, GStreamer, + # etc do not use "framework includes": + # https://developer.apple.com/library/archive/documentation/MacOSX/Conceptual/BPFrameworks/Tasks/IncludingFrameworks.html + incdir = self._get_framework_include_path(framework_path) + if incdir: + self.compile_args += ['-I' + incdir] + self.is_found = True + return + + def _get_framework_path(self, path: str, name: str) -> T.Optional[Path]: + p = Path(path) + lname = name.lower() + for d in p.glob('*.framework/'): + if lname == d.name.rsplit('.', 1)[0].lower(): + return d + return None + + def _get_framework_latest_version(self, path: Path) -> str: + versions = [] + for each in path.glob('Versions/*'): + # macOS filesystems are usually case-insensitive + if each.name.lower() == 'current': + continue + versions.append(Version(each.name)) + if len(versions) == 0: + # most system frameworks do not have a 'Versions' directory + return 'Headers' + return 'Versions/{}/Headers'.format(sorted(versions)[-1]._s) + + def _get_framework_include_path(self, path: Path) -> T.Optional[str]: + # According to the spec, 'Headers' must always be a symlink to the + # Headers directory inside the currently-selected version of the + # framework, but sometimes frameworks are broken. Look in 'Versions' + # for the currently-selected version or pick the latest one. + trials = ('Headers', 'Versions/Current/Headers', + self._get_framework_latest_version(path)) + for each in trials: + trial = path / each + if trial.is_dir(): + return trial.as_posix() + return None + + def log_info(self) -> str: + return self.framework_path or '' + + @staticmethod + def log_tried() -> str: + return 'framework' diff --git a/mesonbuild/dependencies/hdf5.py b/mesonbuild/dependencies/hdf5.py new file mode 100644 index 0000000..4e5820a --- /dev/null +++ b/mesonbuild/dependencies/hdf5.py @@ -0,0 +1,180 @@ +# Copyright 2013-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the detection logic for miscellaneous external dependencies. +from __future__ import annotations + +import functools +import os +import re +import subprocess +from pathlib import Path + +from ..mesonlib import Popen_safe, OrderedSet, join_args +from ..programs import ExternalProgram +from .base import DependencyException, DependencyMethods +from .configtool import ConfigToolDependency +from .pkgconfig import PkgConfigDependency +from .factory import factory_methods +import typing as T + +if T.TYPE_CHECKING: + from .factory import DependencyGenerator + from ..environment import Environment + from ..mesonlib import MachineChoice + + +class HDF5PkgConfigDependency(PkgConfigDependency): + + """Handle brokenness in the HDF5 pkg-config files.""" + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None: + language = language or 'c' + if language not in {'c', 'cpp', 'fortran'}: + raise DependencyException(f'Language {language} is not supported with HDF5.') + + super().__init__(name, environment, kwargs, language) + if not self.is_found: + return + + # some broken pkgconfig don't actually list the full path to the needed includes + newinc = [] # type: T.List[str] + for arg in self.compile_args: + if arg.startswith('-I'): + stem = 'static' if self.static else 'shared' + if (Path(arg[2:]) / stem).is_dir(): + newinc.append('-I' + str(Path(arg[2:]) / stem)) + self.compile_args += newinc + + link_args = [] # type: T.List[str] + for larg in self.get_link_args(): + lpath = Path(larg) + # some pkg-config hdf5.pc (e.g. Ubuntu) don't include the commonly-used HL HDF5 libraries, + # so let's add them if they exist + # additionally, some pkgconfig HDF5 HL files are malformed so let's be sure to find HL anyway + if lpath.is_file(): + hl = [] + if language == 'cpp': + hl += ['_hl_cpp', '_cpp'] + elif language == 'fortran': + hl += ['_hl_fortran', 'hl_fortran', '_fortran'] + hl += ['_hl'] # C HL library, always needed + + suffix = '.' + lpath.name.split('.', 1)[1] # in case of .dll.a + for h in hl: + hlfn = lpath.parent / (lpath.name.split('.', 1)[0] + h + suffix) + if hlfn.is_file(): + link_args.append(str(hlfn)) + # HDF5 C libs are required by other HDF5 languages + link_args.append(larg) + else: + link_args.append(larg) + + self.link_args = link_args + + +class HDF5ConfigToolDependency(ConfigToolDependency): + + """Wrapper around hdf5 binary config tools.""" + + version_arg = '-showconfig' + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None: + language = language or 'c' + if language not in {'c', 'cpp', 'fortran'}: + raise DependencyException(f'Language {language} is not supported with HDF5.') + + if language == 'c': + cenv = 'CC' + tools = ['h5cc', 'h5pcc'] + elif language == 'cpp': + cenv = 'CXX' + tools = ['h5c++', 'h5pc++'] + elif language == 'fortran': + cenv = 'FC' + tools = ['h5fc', 'h5pfc'] + else: + raise DependencyException('How did you get here?') + + # We need this before we call super() + for_machine = self.get_for_machine_from_kwargs(kwargs) + + nkwargs = kwargs.copy() + nkwargs['tools'] = tools + + # Override the compiler that the config tools are going to use by + # setting the environment variables that they use for the compiler and + # linkers. + compiler = environment.coredata.compilers[for_machine][language] + try: + os.environ[f'HDF5_{cenv}'] = join_args(compiler.get_exelist()) + os.environ[f'HDF5_{cenv}LINKER'] = join_args(compiler.get_linker_exelist()) + super().__init__(name, environment, nkwargs, language) + finally: + del os.environ[f'HDF5_{cenv}'] + del os.environ[f'HDF5_{cenv}LINKER'] + if not self.is_found: + return + + # We first need to call the tool with -c to get the compile arguments + # and then without -c to get the link arguments. + args = self.get_config_value(['-show', '-c'], 'args')[1:] + args += self.get_config_value(['-show', '-noshlib' if self.static else '-shlib'], 'args')[1:] + for arg in args: + if arg.startswith(('-I', '-f', '-D')) or arg == '-pthread': + self.compile_args.append(arg) + elif arg.startswith(('-L', '-l', '-Wl')): + self.link_args.append(arg) + elif Path(arg).is_file(): + self.link_args.append(arg) + + # If the language is not C we need to add C as a subdependency + if language != 'c': + nkwargs = kwargs.copy() + nkwargs['language'] = 'c' + # I'm being too clever for mypy and pylint + self.is_found = self._add_sub_dependency(hdf5_factory(environment, for_machine, nkwargs)) # pylint: disable=no-value-for-parameter + + def _sanitize_version(self, ver: str) -> str: + v = re.search(r'\s*HDF5 Version: (\d+\.\d+\.\d+)', ver) + return v.group(1) + + +@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL}) +def hdf5_factory(env: 'Environment', for_machine: 'MachineChoice', + kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']: + language = kwargs.get('language') + candidates: T.List['DependencyGenerator'] = [] + + if DependencyMethods.PKGCONFIG in methods: + # Use an ordered set so that these remain the first tried pkg-config files + pkgconfig_files = OrderedSet(['hdf5', 'hdf5-serial']) + PCEXE = PkgConfigDependency._detect_pkgbin(False, env, for_machine) + pcenv = PkgConfigDependency.setup_env(os.environ, env, for_machine) + if PCEXE: + assert isinstance(PCEXE, ExternalProgram) + # some distros put hdf5-1.2.3.pc with version number in .pc filename. + ret, stdout, _ = Popen_safe(PCEXE.get_command() + ['--list-all'], stderr=subprocess.DEVNULL, env=pcenv) + if ret.returncode == 0: + for pkg in stdout.split('\n'): + if pkg.startswith('hdf5'): + pkgconfig_files.add(pkg.split(' ', 1)[0]) + + for pkg in pkgconfig_files: + candidates.append(functools.partial(HDF5PkgConfigDependency, pkg, env, kwargs, language)) + + if DependencyMethods.CONFIG_TOOL in methods: + candidates.append(functools.partial(HDF5ConfigToolDependency, 'hdf5', env, kwargs, language)) + + return candidates diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py new file mode 100644 index 0000000..2913d84 --- /dev/null +++ b/mesonbuild/dependencies/misc.py @@ -0,0 +1,724 @@ +# Copyright 2013-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the detection logic for miscellaneous external dependencies. +from __future__ import annotations + +from pathlib import Path +import functools +import re +import sysconfig +import typing as T + +from .. import mesonlib +from .. import mlog +from ..environment import detect_cpu_family +from .base import DependencyException, DependencyMethods +from .base import BuiltinDependency, SystemDependency +from .cmake import CMakeDependency +from .configtool import ConfigToolDependency +from .factory import DependencyFactory, factory_methods +from .pkgconfig import PkgConfigDependency + +if T.TYPE_CHECKING: + from ..environment import Environment, MachineChoice + from .factory import DependencyGenerator + + +@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE}) +def netcdf_factory(env: 'Environment', + for_machine: 'MachineChoice', + kwargs: T.Dict[str, T.Any], + methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']: + language = kwargs.get('language', 'c') + if language not in ('c', 'cpp', 'fortran'): + raise DependencyException(f'Language {language} is not supported with NetCDF.') + + candidates: T.List['DependencyGenerator'] = [] + + if DependencyMethods.PKGCONFIG in methods: + if language == 'fortran': + pkg = 'netcdf-fortran' + else: + pkg = 'netcdf' + + candidates.append(functools.partial(PkgConfigDependency, pkg, env, kwargs, language=language)) + + if DependencyMethods.CMAKE in methods: + candidates.append(functools.partial(CMakeDependency, 'NetCDF', env, kwargs, language=language)) + + return candidates + + +class DlBuiltinDependency(BuiltinDependency): + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, env, kwargs) + self.feature_since = ('0.62.0', "consider checking for `dlopen` with and without `find_library('dl')`") + + if self.clib_compiler.has_function('dlopen', '#include <dlfcn.h>', env)[0]: + self.is_found = True + + +class DlSystemDependency(SystemDependency): + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, env, kwargs) + self.feature_since = ('0.62.0', "consider checking for `dlopen` with and without `find_library('dl')`") + + h = self.clib_compiler.has_header('dlfcn.h', '', env) + self.link_args = self.clib_compiler.find_library('dl', env, [], self.libtype) + + if h[0] and self.link_args: + self.is_found = True + + +class OpenMPDependency(SystemDependency): + # Map date of specification release (which is the macro value) to a version. + VERSIONS = { + '201811': '5.0', + '201611': '5.0-revision1', # This is supported by ICC 19.x + '201511': '4.5', + '201307': '4.0', + '201107': '3.1', + '200805': '3.0', + '200505': '2.5', + '200203': '2.0', + '199810': '1.0', + } + + def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + language = kwargs.get('language') + super().__init__('openmp', environment, kwargs, language=language) + self.is_found = False + if self.clib_compiler.get_id() == 'nagfor': + # No macro defined for OpenMP, but OpenMP 3.1 is supported. + self.version = '3.1' + self.is_found = True + self.compile_args = self.link_args = self.clib_compiler.openmp_flags() + return + if self.clib_compiler.get_id() == 'pgi': + # through at least PGI 19.4, there is no macro defined for OpenMP, but OpenMP 3.1 is supported. + self.version = '3.1' + self.is_found = True + self.compile_args = self.link_args = self.clib_compiler.openmp_flags() + return + try: + openmp_date = self.clib_compiler.get_define( + '_OPENMP', '', self.env, self.clib_compiler.openmp_flags(), [self], disable_cache=True)[0] + except mesonlib.EnvironmentException as e: + mlog.debug('OpenMP support not available in the compiler') + mlog.debug(e) + openmp_date = None + + if openmp_date: + try: + self.version = self.VERSIONS[openmp_date] + except KeyError: + mlog.debug(f'Could not find an OpenMP version matching {openmp_date}') + if openmp_date == '_OPENMP': + mlog.debug('This can be caused by flags such as gcc\'s `-fdirectives-only`, which affect preprocessor behavior.') + return + # Flang has omp_lib.h + header_names = ('omp.h', 'omp_lib.h') + for name in header_names: + if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]: + self.is_found = True + self.compile_args = self.clib_compiler.openmp_flags() + self.link_args = self.clib_compiler.openmp_link_flags() + break + if not self.is_found: + mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.') + + +class ThreadDependency(SystemDependency): + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + super().__init__(name, environment, kwargs) + self.is_found = True + # Happens if you are using a language with threads + # concept without C, such as plain Cuda. + if not self.clib_compiler: + self.compile_args = [] + self.link_args = [] + else: + self.compile_args = self.clib_compiler.thread_flags(environment) + self.link_args = self.clib_compiler.thread_link_flags(environment) + + +class BlocksDependency(SystemDependency): + def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + super().__init__('blocks', environment, kwargs) + self.name = 'blocks' + self.is_found = False + + if self.env.machines[self.for_machine].is_darwin(): + self.compile_args = [] + self.link_args = [] + else: + self.compile_args = ['-fblocks'] + self.link_args = ['-lBlocksRuntime'] + + if not self.clib_compiler.has_header('Block.h', '', environment, disable_cache=True) or \ + not self.clib_compiler.find_library('BlocksRuntime', environment, []): + mlog.log(mlog.red('ERROR:'), 'BlocksRuntime not found.') + return + + source = ''' + int main(int argc, char **argv) + { + int (^callback)(void) = ^ int (void) { return 0; }; + return callback(); + }''' + + with self.clib_compiler.compile(source, extra_args=self.compile_args + self.link_args) as p: + if p.returncode != 0: + mlog.log(mlog.red('ERROR:'), 'Compiler does not support blocks extension.') + return + + self.is_found = True + + +class Python3DependencySystem(SystemDependency): + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + super().__init__(name, environment, kwargs) + + if not environment.machines.matches_build_machine(self.for_machine): + return + if not environment.machines[self.for_machine].is_windows(): + return + + self.name = 'python3' + # We can only be sure that it is Python 3 at this point + self.version = '3' + self._find_libpy3_windows(environment) + + @staticmethod + def get_windows_python_arch() -> T.Optional[str]: + pyplat = sysconfig.get_platform() + if pyplat == 'mingw': + pycc = sysconfig.get_config_var('CC') + if pycc.startswith('x86_64'): + return '64' + elif pycc.startswith(('i686', 'i386')): + return '32' + else: + mlog.log(f'MinGW Python built with unknown CC {pycc!r}, please file a bug') + return None + elif pyplat == 'win32': + return '32' + elif pyplat in {'win64', 'win-amd64'}: + return '64' + mlog.log(f'Unknown Windows Python platform {pyplat!r}') + return None + + def get_windows_link_args(self) -> T.Optional[T.List[str]]: + pyplat = sysconfig.get_platform() + if pyplat.startswith('win'): + vernum = sysconfig.get_config_var('py_version_nodot') + if self.static: + libpath = Path('libs') / f'libpython{vernum}.a' + else: + comp = self.get_compiler() + if comp.id == "gcc": + libpath = Path(f'python{vernum}.dll') + else: + libpath = Path('libs') / f'python{vernum}.lib' + lib = Path(sysconfig.get_config_var('base')) / libpath + elif pyplat == 'mingw': + if self.static: + libname = sysconfig.get_config_var('LIBRARY') + else: + libname = sysconfig.get_config_var('LDLIBRARY') + lib = Path(sysconfig.get_config_var('LIBDIR')) / libname + if not lib.exists(): + mlog.log('Could not find Python3 library {!r}'.format(str(lib))) + return None + return [str(lib)] + + def _find_libpy3_windows(self, env: 'Environment') -> None: + ''' + Find python3 libraries on Windows and also verify that the arch matches + what we are building for. + ''' + pyarch = self.get_windows_python_arch() + if pyarch is None: + self.is_found = False + return + arch = detect_cpu_family(env.coredata.compilers.host) + if arch == 'x86': + arch = '32' + elif arch == 'x86_64': + arch = '64' + else: + # We can't cross-compile Python 3 dependencies on Windows yet + mlog.log(f'Unknown architecture {arch!r} for', + mlog.bold(self.name)) + self.is_found = False + return + # Pyarch ends in '32' or '64' + if arch != pyarch: + mlog.log('Need', mlog.bold(self.name), 'for {}-bit, but ' + 'found {}-bit'.format(arch, pyarch)) + self.is_found = False + return + # This can fail if the library is not found + largs = self.get_windows_link_args() + if largs is None: + self.is_found = False + return + self.link_args = largs + # Compile args + inc = sysconfig.get_path('include') + platinc = sysconfig.get_path('platinclude') + self.compile_args = ['-I' + inc] + if inc != platinc: + self.compile_args.append('-I' + platinc) + self.version = sysconfig.get_config_var('py_version') + self.is_found = True + + @staticmethod + def log_tried() -> str: + return 'sysconfig' + +class PcapDependencyConfigTool(ConfigToolDependency): + + tools = ['pcap-config'] + tool_name = 'pcap-config' + + # version 1.10.2 added error checking for invalid arguments + # version 1.10.3 will hopefully add actual support for --version + skip_version = '--help' + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--libs'], 'link_args') + if self.version is None: + # older pcap-config versions don't support this + self.version = self.get_pcap_lib_version() + + def get_pcap_lib_version(self) -> T.Optional[str]: + # Since we seem to need to run a program to discover the pcap version, + # we can't do that when cross-compiling + # FIXME: this should be handled if we have an exe_wrapper + if not self.env.machines.matches_build_machine(self.for_machine): + return None + + v = self.clib_compiler.get_return_value('pcap_lib_version', 'string', + '#include <pcap.h>', self.env, [], [self]) + v = re.sub(r'libpcap version ', '', str(v)) + v = re.sub(r' -- Apple version.*$', '', v) + return v + + +class CupsDependencyConfigTool(ConfigToolDependency): + + tools = ['cups-config'] + tool_name = 'cups-config' + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--ldflags', '--libs'], 'link_args') + + +class LibWmfDependencyConfigTool(ConfigToolDependency): + + tools = ['libwmf-config'] + tool_name = 'libwmf-config' + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--libs'], 'link_args') + + +class LibGCryptDependencyConfigTool(ConfigToolDependency): + + tools = ['libgcrypt-config'] + tool_name = 'libgcrypt-config' + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--libs'], 'link_args') + self.version = self.get_config_value(['--version'], 'version')[0] + + +class GpgmeDependencyConfigTool(ConfigToolDependency): + + tools = ['gpgme-config'] + tool_name = 'gpg-config' + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--libs'], 'link_args') + self.version = self.get_config_value(['--version'], 'version')[0] + + +class ShadercDependency(SystemDependency): + + def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__('shaderc', environment, kwargs) + + static_lib = 'shaderc_combined' + shared_lib = 'shaderc_shared' + + libs = [shared_lib, static_lib] + if self.static: + libs.reverse() + + cc = self.get_compiler() + + for lib in libs: + self.link_args = cc.find_library(lib, environment, []) + if self.link_args is not None: + self.is_found = True + + if self.static and lib != static_lib: + mlog.warning(f'Static library {static_lib!r} not found for dependency ' + f'{self.name!r}, may not be statically linked') + + break + + +class CursesConfigToolDependency(ConfigToolDependency): + + """Use the curses config tools.""" + + tool = 'curses-config' + # ncurses5.4-config is for macOS Catalina + tools = ['ncursesw6-config', 'ncursesw5-config', 'ncurses6-config', 'ncurses5-config', 'ncurses5.4-config'] + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None): + super().__init__(name, env, kwargs, language) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--libs'], 'link_args') + + +class CursesSystemDependency(SystemDependency): + + """Curses dependency the hard way. + + This replaces hand rolled find_library() and has_header() calls. We + provide this for portability reasons, there are a large number of curses + implementations, and the differences between them can be very annoying. + """ + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, env, kwargs) + + candidates = [ + ('pdcurses', ['pdcurses/curses.h']), + ('ncursesw', ['ncursesw/ncurses.h', 'ncurses.h']), + ('ncurses', ['ncurses/ncurses.h', 'ncurses/curses.h', 'ncurses.h']), + ('curses', ['curses.h']), + ] + + # Not sure how else to elegently break out of both loops + for lib, headers in candidates: + l = self.clib_compiler.find_library(lib, env, []) + if l: + for header in headers: + h = self.clib_compiler.has_header(header, '', env) + if h[0]: + self.is_found = True + self.link_args = l + # Not sure how to find version for non-ncurses curses + # implementations. The one in illumos/OpenIndiana + # doesn't seem to have a version defined in the header. + if lib.startswith('ncurses'): + v, _ = self.clib_compiler.get_define('NCURSES_VERSION', f'#include <{header}>', env, [], [self]) + self.version = v.strip('"') + if lib.startswith('pdcurses'): + v_major, _ = self.clib_compiler.get_define('PDC_VER_MAJOR', f'#include <{header}>', env, [], [self]) + v_minor, _ = self.clib_compiler.get_define('PDC_VER_MINOR', f'#include <{header}>', env, [], [self]) + self.version = f'{v_major}.{v_minor}' + + # Check the version if possible, emit a warning if we can't + req = kwargs.get('version') + if req: + if self.version: + self.is_found = mesonlib.version_compare(self.version, req) + else: + mlog.warning('Cannot determine version of curses to compare against.') + + if self.is_found: + mlog.debug('Curses library:', l) + mlog.debug('Curses header:', header) + break + if self.is_found: + break + + +class IconvBuiltinDependency(BuiltinDependency): + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, env, kwargs) + self.feature_since = ('0.60.0', "consider checking for `iconv_open` with and without `find_library('iconv')`") + code = '''#include <iconv.h>\n\nint main() {\n iconv_open("","");\n}''' # [ignore encoding] this is C, not python, Mr. Lint + + if self.clib_compiler.links(code, env)[0]: + self.is_found = True + + +class IconvSystemDependency(SystemDependency): + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, env, kwargs) + self.feature_since = ('0.60.0', "consider checking for `iconv_open` with and without find_library('iconv')") + + h = self.clib_compiler.has_header('iconv.h', '', env) + self.link_args = self.clib_compiler.find_library('iconv', env, [], self.libtype) + + if h[0] and self.link_args: + self.is_found = True + + +class IntlBuiltinDependency(BuiltinDependency): + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, env, kwargs) + self.feature_since = ('0.59.0', "consider checking for `ngettext` with and without `find_library('intl')`") + code = '''#include <libintl.h>\n\nint main() {\n gettext("Hello world");\n}''' + + if self.clib_compiler.links(code, env)[0]: + self.is_found = True + + +class IntlSystemDependency(SystemDependency): + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, env, kwargs) + self.feature_since = ('0.59.0', "consider checking for `ngettext` with and without `find_library('intl')`") + + h = self.clib_compiler.has_header('libintl.h', '', env) + self.link_args = self.clib_compiler.find_library('intl', env, [], self.libtype) + + if h[0] and self.link_args: + self.is_found = True + + if self.static: + if not self._add_sub_dependency(iconv_factory(env, self.for_machine, {'static': True})): + self.is_found = False + return + + +class OpensslSystemDependency(SystemDependency): + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, env, kwargs) + + dependency_kwargs = { + 'method': 'system', + 'static': self.static, + } + if not self.clib_compiler.has_header('openssl/ssl.h', '', env)[0]: + return + + # openssl >= 3 only + self.version = self.clib_compiler.get_define('OPENSSL_VERSION_STR', '#include <openssl/opensslv.h>', env, [], [self])[0] + # openssl < 3 only + if not self.version: + version_hex = self.clib_compiler.get_define('OPENSSL_VERSION_NUMBER', '#include <openssl/opensslv.h>', env, [], [self])[0] + if not version_hex: + return + version_hex = version_hex.rstrip('L') + version_ints = [((int(version_hex.rstrip('L'), 16) >> 4 + i) & 0xFF) for i in (24, 16, 8, 0)] + # since this is openssl, the format is 1.2.3a in four parts + self.version = '.'.join(str(i) for i in version_ints[:3]) + chr(ord('a') + version_ints[3] - 1) + + if name == 'openssl': + if self._add_sub_dependency(libssl_factory(env, self.for_machine, dependency_kwargs)) and \ + self._add_sub_dependency(libcrypto_factory(env, self.for_machine, dependency_kwargs)): + self.is_found = True + return + else: + self.link_args = self.clib_compiler.find_library(name.lstrip('lib'), env, [], self.libtype) + if not self.link_args: + return + + if not self.static: + self.is_found = True + else: + if name == 'libssl': + if self._add_sub_dependency(libcrypto_factory(env, self.for_machine, dependency_kwargs)): + self.is_found = True + elif name == 'libcrypto': + use_threads = self.clib_compiler.has_header_symbol('openssl/opensslconf.h', 'OPENSSL_THREADS', '', env, dependencies=[self])[0] + if not use_threads or self._add_sub_dependency(threads_factory(env, self.for_machine, {})): + self.is_found = True + # only relevant on platforms where it is distributed with the libc, in which case it always succeeds + sublib = self.clib_compiler.find_library('dl', env, [], self.libtype) + if sublib: + self.link_args.extend(sublib) + + +@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.SYSTEM}) +def curses_factory(env: 'Environment', + for_machine: 'MachineChoice', + kwargs: T.Dict[str, T.Any], + methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']: + candidates: T.List['DependencyGenerator'] = [] + + if DependencyMethods.PKGCONFIG in methods: + pkgconfig_files = ['pdcurses', 'ncursesw', 'ncurses', 'curses'] + for pkg in pkgconfig_files: + candidates.append(functools.partial(PkgConfigDependency, pkg, env, kwargs)) + + # There are path handling problems with these methods on msys, and they + # don't apply to windows otherwise (cygwin is handled separately from + # windows) + if not env.machines[for_machine].is_windows(): + if DependencyMethods.CONFIG_TOOL in methods: + candidates.append(functools.partial(CursesConfigToolDependency, 'curses', env, kwargs)) + + if DependencyMethods.SYSTEM in methods: + candidates.append(functools.partial(CursesSystemDependency, 'curses', env, kwargs)) + + return candidates + + +@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM}) +def shaderc_factory(env: 'Environment', + for_machine: 'MachineChoice', + kwargs: T.Dict[str, T.Any], + methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']: + """Custom DependencyFactory for ShaderC. + + ShaderC's odd you get three different libraries from the same build + thing are just easier to represent as a separate function than + twisting DependencyFactory even more. + """ + candidates: T.List['DependencyGenerator'] = [] + + if DependencyMethods.PKGCONFIG in methods: + # ShaderC packages their shared and static libs together + # and provides different pkg-config files for each one. We + # smooth over this difference by handling the static + # keyword before handing off to the pkg-config handler. + shared_libs = ['shaderc'] + static_libs = ['shaderc_combined', 'shaderc_static'] + + if kwargs.get('static', env.coredata.get_option(mesonlib.OptionKey('prefer_static'))): + c = [functools.partial(PkgConfigDependency, name, env, kwargs) + for name in static_libs + shared_libs] + else: + c = [functools.partial(PkgConfigDependency, name, env, kwargs) + for name in shared_libs + static_libs] + candidates.extend(c) + + if DependencyMethods.SYSTEM in methods: + candidates.append(functools.partial(ShadercDependency, env, kwargs)) + + return candidates + + +cups_factory = DependencyFactory( + 'cups', + [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE], + configtool_class=CupsDependencyConfigTool, + cmake_name='Cups', +) + +dl_factory = DependencyFactory( + 'dl', + [DependencyMethods.BUILTIN, DependencyMethods.SYSTEM], + builtin_class=DlBuiltinDependency, + system_class=DlSystemDependency, +) + +gpgme_factory = DependencyFactory( + 'gpgme', + [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL], + configtool_class=GpgmeDependencyConfigTool, +) + +libgcrypt_factory = DependencyFactory( + 'libgcrypt', + [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL], + configtool_class=LibGCryptDependencyConfigTool, +) + +libwmf_factory = DependencyFactory( + 'libwmf', + [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL], + configtool_class=LibWmfDependencyConfigTool, +) + +pcap_factory = DependencyFactory( + 'pcap', + [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL], + configtool_class=PcapDependencyConfigTool, + pkgconfig_name='libpcap', +) + +python3_factory = DependencyFactory( + 'python3', + [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM, DependencyMethods.EXTRAFRAMEWORK], + system_class=Python3DependencySystem, + # There is no version number in the macOS version number + framework_name='Python', + # There is a python in /System/Library/Frameworks, but that's python 2.x, + # Python 3 will always be in /Library + extra_kwargs={'paths': ['/Library/Frameworks']}, +) + +threads_factory = DependencyFactory( + 'threads', + [DependencyMethods.SYSTEM, DependencyMethods.CMAKE], + cmake_name='Threads', + system_class=ThreadDependency, +) + +iconv_factory = DependencyFactory( + 'iconv', + [DependencyMethods.BUILTIN, DependencyMethods.SYSTEM], + builtin_class=IconvBuiltinDependency, + system_class=IconvSystemDependency, +) + +intl_factory = DependencyFactory( + 'intl', + [DependencyMethods.BUILTIN, DependencyMethods.SYSTEM], + builtin_class=IntlBuiltinDependency, + system_class=IntlSystemDependency, +) + +openssl_factory = DependencyFactory( + 'openssl', + [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM, DependencyMethods.CMAKE], + system_class=OpensslSystemDependency, + cmake_class=lambda name, env, kwargs: CMakeDependency('OpenSSL', env, dict(kwargs, modules=['OpenSSL::Crypto', 'OpenSSL::SSL'])), +) + +libcrypto_factory = DependencyFactory( + 'libcrypto', + [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM, DependencyMethods.CMAKE], + system_class=OpensslSystemDependency, + cmake_class=lambda name, env, kwargs: CMakeDependency('OpenSSL', env, dict(kwargs, modules=['OpenSSL::Crypto'])), +) + +libssl_factory = DependencyFactory( + 'libssl', + [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM, DependencyMethods.CMAKE], + system_class=OpensslSystemDependency, + cmake_class=lambda name, env, kwargs: CMakeDependency('OpenSSL', env, dict(kwargs, modules=['OpenSSL::SSL'])), +) diff --git a/mesonbuild/dependencies/mpi.py b/mesonbuild/dependencies/mpi.py new file mode 100644 index 0000000..8f83ce4 --- /dev/null +++ b/mesonbuild/dependencies/mpi.py @@ -0,0 +1,237 @@ +# Copyright 2013-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import functools +import typing as T +import os +import re + +from ..environment import detect_cpu_family +from .base import DependencyMethods, detect_compiler, SystemDependency +from .configtool import ConfigToolDependency +from .factory import factory_methods +from .pkgconfig import PkgConfigDependency + +if T.TYPE_CHECKING: + from .factory import DependencyGenerator + from ..environment import Environment, MachineChoice + + +@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.SYSTEM}) +def mpi_factory(env: 'Environment', + for_machine: 'MachineChoice', + kwargs: T.Dict[str, T.Any], + methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']: + language = kwargs.get('language', 'c') + if language not in {'c', 'cpp', 'fortran'}: + # OpenMPI doesn't work without any other languages + return [] + + candidates: T.List['DependencyGenerator'] = [] + compiler = detect_compiler('mpi', env, for_machine, language) + if not compiler: + return [] + compiler_is_intel = compiler.get_id() in {'intel', 'intel-cl'} + + # Only OpenMPI has pkg-config, and it doesn't work with the intel compilers + if DependencyMethods.PKGCONFIG in methods and not compiler_is_intel: + pkg_name = None + if language == 'c': + pkg_name = 'ompi-c' + elif language == 'cpp': + pkg_name = 'ompi-cxx' + elif language == 'fortran': + pkg_name = 'ompi-fort' + candidates.append(functools.partial( + PkgConfigDependency, pkg_name, env, kwargs, language=language)) + + if DependencyMethods.CONFIG_TOOL in methods: + nwargs = kwargs.copy() + + if compiler_is_intel: + if env.machines[for_machine].is_windows(): + nwargs['version_arg'] = '-v' + nwargs['returncode_value'] = 3 + + if language == 'c': + tool_names = [os.environ.get('I_MPI_CC'), 'mpiicc'] + elif language == 'cpp': + tool_names = [os.environ.get('I_MPI_CXX'), 'mpiicpc'] + elif language == 'fortran': + tool_names = [os.environ.get('I_MPI_F90'), 'mpiifort'] + + cls = IntelMPIConfigToolDependency # type: T.Type[ConfigToolDependency] + else: # OpenMPI, which doesn't work with intel + # + # We try the environment variables for the tools first, but then + # fall back to the hardcoded names + if language == 'c': + tool_names = [os.environ.get('MPICC'), 'mpicc'] + elif language == 'cpp': + tool_names = [os.environ.get('MPICXX'), 'mpic++', 'mpicxx', 'mpiCC'] + elif language == 'fortran': + tool_names = [os.environ.get(e) for e in ['MPIFC', 'MPIF90', 'MPIF77']] + tool_names.extend(['mpifort', 'mpif90', 'mpif77']) + + cls = OpenMPIConfigToolDependency + + tool_names = [t for t in tool_names if t] # remove empty environment variables + assert tool_names + + nwargs['tools'] = tool_names + candidates.append(functools.partial( + cls, tool_names[0], env, nwargs, language=language)) + + if DependencyMethods.SYSTEM in methods: + candidates.append(functools.partial( + MSMPIDependency, 'msmpi', env, kwargs, language=language)) + + return candidates + + +class _MPIConfigToolDependency(ConfigToolDependency): + + def _filter_compile_args(self, args: T.List[str]) -> T.List[str]: + """ + MPI wrappers return a bunch of garbage args. + Drop -O2 and everything that is not needed. + """ + result = [] + multi_args: T.Tuple[str, ...] = ('-I', ) + if self.language == 'fortran': + fc = self.env.coredata.compilers[self.for_machine]['fortran'] + multi_args += fc.get_module_incdir_args() + + include_next = False + for f in args: + if f.startswith(('-D', '-f') + multi_args) or f == '-pthread' \ + or (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')): + result.append(f) + if f in multi_args: + # Path is a separate argument. + include_next = True + elif include_next: + include_next = False + result.append(f) + return result + + def _filter_link_args(self, args: T.List[str]) -> T.List[str]: + """ + MPI wrappers return a bunch of garbage args. + Drop -O2 and everything that is not needed. + """ + result = [] + include_next = False + for f in args: + if self._is_link_arg(f): + result.append(f) + if f in {'-L', '-Xlinker'}: + include_next = True + elif include_next: + include_next = False + result.append(f) + return result + + def _is_link_arg(self, f: str) -> bool: + if self.clib_compiler.id == 'intel-cl': + return f == '/link' or f.startswith('/LIBPATH') or f.endswith('.lib') # always .lib whether static or dynamic + else: + return (f.startswith(('-L', '-l', '-Xlinker')) or + f == '-pthread' or + (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror'))) + + +class IntelMPIConfigToolDependency(_MPIConfigToolDependency): + + """Wrapper around Intel's mpiicc and friends.""" + + version_arg = '-v' # --version is not the same as -v + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], + language: T.Optional[str] = None): + super().__init__(name, env, kwargs, language=language) + if not self.is_found: + return + + args = self.get_config_value(['-show'], 'link and compile args') + self.compile_args = self._filter_compile_args(args) + self.link_args = self._filter_link_args(args) + + def _sanitize_version(self, out: str) -> str: + v = re.search(r'(\d{4}) Update (\d)', out) + if v: + return '{}.{}'.format(v.group(1), v.group(2)) + return out + + +class OpenMPIConfigToolDependency(_MPIConfigToolDependency): + + """Wrapper around OpenMPI mpicc and friends.""" + + version_arg = '--showme:version' + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], + language: T.Optional[str] = None): + super().__init__(name, env, kwargs, language=language) + if not self.is_found: + return + + c_args = self.get_config_value(['--showme:compile'], 'compile_args') + self.compile_args = self._filter_compile_args(c_args) + + l_args = self.get_config_value(['--showme:link'], 'link_args') + self.link_args = self._filter_link_args(l_args) + + def _sanitize_version(self, out: str) -> str: + v = re.search(r'\d+.\d+.\d+', out) + if v: + return v.group(0) + return out + + +class MSMPIDependency(SystemDependency): + + """The Microsoft MPI.""" + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], + language: T.Optional[str] = None): + super().__init__(name, env, kwargs, language=language) + # MSMPI only supports the C API + if language not in {'c', 'fortran', None}: + self.is_found = False + return + # MSMPI is only for windows, obviously + if not self.env.machines[self.for_machine].is_windows(): + return + + incdir = os.environ.get('MSMPI_INC') + arch = detect_cpu_family(self.env.coredata.compilers.host) + libdir = None + if arch == 'x86': + libdir = os.environ.get('MSMPI_LIB32') + post = 'x86' + elif arch == 'x86_64': + libdir = os.environ.get('MSMPI_LIB64') + post = 'x64' + + if libdir is None or incdir is None: + self.is_found = False + return + + self.is_found = True + self.link_args = ['-l' + os.path.join(libdir, 'msmpi')] + self.compile_args = ['-I' + incdir, '-I' + os.path.join(incdir, post)] + if self.language == 'fortran': + self.link_args.append('-l' + os.path.join(libdir, 'msmpifec')) diff --git a/mesonbuild/dependencies/pkgconfig.py b/mesonbuild/dependencies/pkgconfig.py new file mode 100644 index 0000000..76dc3ef --- /dev/null +++ b/mesonbuild/dependencies/pkgconfig.py @@ -0,0 +1,505 @@ +# Copyright 2013-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from pathlib import Path + +from .base import ExternalDependency, DependencyException, sort_libpaths, DependencyTypeName +from ..mesonlib import OptionKey, OrderedSet, PerMachine, Popen_safe +from ..programs import find_external_program, ExternalProgram +from .. import mlog +from pathlib import PurePath +import re +import os +import shlex +import typing as T + +if T.TYPE_CHECKING: + from ..environment import Environment + from ..mesonlib import MachineChoice + from .._typing import ImmutableListProtocol + from ..build import EnvironmentVariables + +class PkgConfigDependency(ExternalDependency): + # The class's copy of the pkg-config path. Avoids having to search for it + # multiple times in the same Meson invocation. + class_pkgbin: PerMachine[T.Union[None, bool, ExternalProgram]] = PerMachine(None, None) + # We cache all pkg-config subprocess invocations to avoid redundant calls + pkgbin_cache: T.Dict[ + T.Tuple[ExternalProgram, T.Tuple[str, ...], T.FrozenSet[T.Tuple[str, str]]], + T.Tuple[int, str, str] + ] = {} + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None: + super().__init__(DependencyTypeName('pkgconfig'), environment, kwargs, language=language) + self.name = name + self.is_libtool = False + # Store a copy of the pkg-config path on the object itself so it is + # stored in the pickled coredata and recovered. + self.pkgbin = self._detect_pkgbin(self.silent, self.env, self.for_machine) + if self.pkgbin is False: + self.pkgbin = None + msg = f'Pkg-config binary for machine {self.for_machine} not found. Giving up.' + if self.required: + raise DependencyException(msg) + else: + mlog.debug(msg) + return + + assert isinstance(self.pkgbin, ExternalProgram) + mlog.debug('Determining dependency {!r} with pkg-config executable ' + '{!r}'.format(name, self.pkgbin.get_path())) + ret, self.version, _ = self._call_pkgbin(['--modversion', name]) + if ret != 0: + return + + self.is_found = True + + try: + # Fetch cargs to be used while using this dependency + self._set_cargs() + # Fetch the libraries and library paths needed for using this + self._set_libs() + except DependencyException as e: + mlog.debug(f"pkg-config error with '{name}': {e}") + if self.required: + raise + else: + self.compile_args = [] + self.link_args = [] + self.is_found = False + self.reason = e + + def __repr__(self) -> str: + s = '<{0} {1}: {2} {3}>' + return s.format(self.__class__.__name__, self.name, self.is_found, + self.version_reqs) + + @classmethod + def _detect_pkgbin(cls, silent: bool, env: Environment, + for_machine: MachineChoice) -> T.Union[None, bool, ExternalProgram]: + # Only search for pkg-config for each machine the first time and store + # the result in the class definition + if cls.class_pkgbin[for_machine] is False: + mlog.debug(f'Pkg-config binary for {for_machine} is cached as not found.') + elif cls.class_pkgbin[for_machine] is not None: + mlog.debug(f'Pkg-config binary for {for_machine} is cached.') + else: + assert cls.class_pkgbin[for_machine] is None, 'for mypy' + mlog.debug(f'Pkg-config binary for {for_machine} is not cached.') + for potential_pkgbin in find_external_program( + env, for_machine, 'pkgconfig', 'Pkg-config', + env.default_pkgconfig, allow_default_for_cross=False): + version_if_ok = cls.check_pkgconfig(env, potential_pkgbin) + if not version_if_ok: + continue + if not silent: + mlog.log('Found pkg-config:', mlog.bold(potential_pkgbin.get_path()), + f'({version_if_ok})') + cls.class_pkgbin[for_machine] = potential_pkgbin + break + else: + if not silent: + mlog.log('Found Pkg-config:', mlog.red('NO')) + # Set to False instead of None to signify that we've already + # searched for it and not found it + cls.class_pkgbin[for_machine] = False + + return cls.class_pkgbin[for_machine] + + def _call_pkgbin_real(self, args: T.List[str], env: T.Dict[str, str]) -> T.Tuple[int, str, str]: + assert isinstance(self.pkgbin, ExternalProgram) + cmd = self.pkgbin.get_command() + args + p, out, err = Popen_safe(cmd, env=env) + rc, out, err = p.returncode, out.strip(), err.strip() + call = ' '.join(cmd) + mlog.debug(f"Called `{call}` -> {rc}") + if out: + mlog.debug(f'stdout:\n{out}\n-----------') + if err: + mlog.debug(f'stderr:\n{err}\n-----------') + return rc, out, err + + @staticmethod + def get_env(environment: 'Environment', for_machine: MachineChoice, + uninstalled: bool = False) -> 'EnvironmentVariables': + from ..build import EnvironmentVariables + env = EnvironmentVariables() + key = OptionKey('pkg_config_path', machine=for_machine) + extra_paths: T.List[str] = environment.coredata.options[key].value[:] + if uninstalled: + uninstalled_path = Path(environment.get_build_dir(), 'meson-uninstalled').as_posix() + if uninstalled_path not in extra_paths: + extra_paths.append(uninstalled_path) + env.set('PKG_CONFIG_PATH', extra_paths) + sysroot = environment.properties[for_machine].get_sys_root() + if sysroot: + env.set('PKG_CONFIG_SYSROOT_DIR', [sysroot]) + pkg_config_libdir_prop = environment.properties[for_machine].get_pkg_config_libdir() + if pkg_config_libdir_prop: + env.set('PKG_CONFIG_LIBDIR', pkg_config_libdir_prop) + return env + + @staticmethod + def setup_env(env: T.MutableMapping[str, str], environment: 'Environment', for_machine: MachineChoice, + uninstalled: bool = False) -> T.Dict[str, str]: + envvars = PkgConfigDependency.get_env(environment, for_machine, uninstalled) + env = envvars.get_env(env) + # Dump all PKG_CONFIG environment variables + for key, value in env.items(): + if key.startswith('PKG_'): + mlog.debug(f'env[{key}]: {value}') + return env + + def _call_pkgbin(self, args: T.List[str], env: T.Optional[T.MutableMapping[str, str]] = None) -> T.Tuple[int, str, str]: + assert isinstance(self.pkgbin, ExternalProgram) + env = env or os.environ + env = PkgConfigDependency.setup_env(env, self.env, self.for_machine) + + fenv = frozenset(env.items()) + targs = tuple(args) + cache = PkgConfigDependency.pkgbin_cache + if (self.pkgbin, targs, fenv) not in cache: + cache[(self.pkgbin, targs, fenv)] = self._call_pkgbin_real(args, env) + return cache[(self.pkgbin, targs, fenv)] + + def _convert_mingw_paths(self, args: T.List[str]) -> T.List[str]: + ''' + Both MSVC and native Python on Windows cannot handle MinGW-esque /c/foo + paths so convert them to C:/foo. We cannot resolve other paths starting + with / like /home/foo so leave them as-is so that the user gets an + error/warning from the compiler/linker. + ''' + if not self.env.machines.build.is_windows(): + return args + converted = [] + for arg in args: + pargs: T.Tuple[str, ...] = tuple() + # Library search path + if arg.startswith('-L/'): + pargs = PurePath(arg[2:]).parts + tmpl = '-L{}:/{}' + elif arg.startswith('-I/'): + pargs = PurePath(arg[2:]).parts + tmpl = '-I{}:/{}' + # Full path to library or .la file + elif arg.startswith('/'): + pargs = PurePath(arg).parts + tmpl = '{}:/{}' + elif arg.startswith(('-L', '-I')) or (len(arg) > 2 and arg[1] == ':'): + # clean out improper '\\ ' as comes from some Windows pkg-config files + arg = arg.replace('\\ ', ' ') + if len(pargs) > 1 and len(pargs[1]) == 1: + arg = tmpl.format(pargs[1], '/'.join(pargs[2:])) + converted.append(arg) + return converted + + def _split_args(self, cmd: str) -> T.List[str]: + # pkg-config paths follow Unix conventions, even on Windows; split the + # output using shlex.split rather than mesonlib.split_args + return shlex.split(cmd) + + def _set_cargs(self) -> None: + env = None + if self.language == 'fortran': + # gfortran doesn't appear to look in system paths for INCLUDE files, + # so don't allow pkg-config to suppress -I flags for system paths + env = os.environ.copy() + env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1' + ret, out, err = self._call_pkgbin(['--cflags', self.name], env=env) + if ret != 0: + raise DependencyException(f'Could not generate cargs for {self.name}:\n{err}\n') + self.compile_args = self._convert_mingw_paths(self._split_args(out)) + + def _search_libs(self, out: str, out_raw: str) -> T.Tuple[T.List[str], T.List[str]]: + ''' + @out: PKG_CONFIG_ALLOW_SYSTEM_LIBS=1 pkg-config --libs + @out_raw: pkg-config --libs + + We always look for the file ourselves instead of depending on the + compiler to find it with -lfoo or foo.lib (if possible) because: + 1. We want to be able to select static or shared + 2. We need the full path of the library to calculate RPATH values + 3. De-dup of libraries is easier when we have absolute paths + + Libraries that are provided by the toolchain or are not found by + find_library() will be added with -L -l pairs. + ''' + # Library paths should be safe to de-dup + # + # First, figure out what library paths to use. Originally, we were + # doing this as part of the loop, but due to differences in the order + # of -L values between pkg-config and pkgconf, we need to do that as + # a separate step. See: + # https://github.com/mesonbuild/meson/issues/3951 + # https://github.com/mesonbuild/meson/issues/4023 + # + # Separate system and prefix paths, and ensure that prefix paths are + # always searched first. + prefix_libpaths: OrderedSet[str] = OrderedSet() + # We also store this raw_link_args on the object later + raw_link_args = self._convert_mingw_paths(self._split_args(out_raw)) + for arg in raw_link_args: + if arg.startswith('-L') and not arg.startswith(('-L-l', '-L-L')): + path = arg[2:] + if not os.path.isabs(path): + # Resolve the path as a compiler in the build directory would + path = os.path.join(self.env.get_build_dir(), path) + prefix_libpaths.add(path) + # Library paths are not always ordered in a meaningful way + # + # Instead of relying on pkg-config or pkgconf to provide -L flags in a + # specific order, we reorder library paths ourselves, according to th + # order specified in PKG_CONFIG_PATH. See: + # https://github.com/mesonbuild/meson/issues/4271 + # + # Only prefix_libpaths are reordered here because there should not be + # too many system_libpaths to cause library version issues. + pkg_config_path: T.List[str] = self.env.coredata.options[OptionKey('pkg_config_path', machine=self.for_machine)].value + pkg_config_path = self._convert_mingw_paths(pkg_config_path) + prefix_libpaths = OrderedSet(sort_libpaths(list(prefix_libpaths), pkg_config_path)) + system_libpaths: OrderedSet[str] = OrderedSet() + full_args = self._convert_mingw_paths(self._split_args(out)) + for arg in full_args: + if arg.startswith(('-L-l', '-L-L')): + # These are D language arguments, not library paths + continue + if arg.startswith('-L') and arg[2:] not in prefix_libpaths: + system_libpaths.add(arg[2:]) + # Use this re-ordered path list for library resolution + libpaths = list(prefix_libpaths) + list(system_libpaths) + # Track -lfoo libraries to avoid duplicate work + libs_found: OrderedSet[str] = OrderedSet() + # Track not-found libraries to know whether to add library paths + libs_notfound = [] + # Generate link arguments for this library + link_args = [] + for lib in full_args: + if lib.startswith(('-L-l', '-L-L')): + # These are D language arguments, add them as-is + pass + elif lib.startswith('-L'): + # We already handled library paths above + continue + elif lib.startswith('-l:'): + # see: https://stackoverflow.com/questions/48532868/gcc-library-option-with-a-colon-llibevent-a + # also : See the documentation of -lnamespec | --library=namespec in the linker manual + # https://sourceware.org/binutils/docs-2.18/ld/Options.html + + # Don't resolve the same -l:libfoo.a argument again + if lib in libs_found: + continue + libfilename = lib[3:] + foundname = None + for libdir in libpaths: + target = os.path.join(libdir, libfilename) + if os.path.exists(target): + foundname = target + break + if foundname is None: + if lib in libs_notfound: + continue + else: + mlog.warning('Library {!r} not found for dependency {!r}, may ' + 'not be successfully linked'.format(libfilename, self.name)) + libs_notfound.append(lib) + else: + lib = foundname + elif lib.startswith('-l'): + # Don't resolve the same -lfoo argument again + if lib in libs_found: + continue + if self.clib_compiler: + args = self.clib_compiler.find_library(lib[2:], self.env, + libpaths, self.libtype) + # If the project only uses a non-clib language such as D, Rust, + # C#, Python, etc, all we can do is limp along by adding the + # arguments as-is and then adding the libpaths at the end. + else: + args = None + if args is not None: + libs_found.add(lib) + # Replace -l arg with full path to library if available + # else, library is either to be ignored, or is provided by + # the compiler, can't be resolved, and should be used as-is + if args: + if not args[0].startswith('-l'): + lib = args[0] + else: + continue + else: + # Library wasn't found, maybe we're looking in the wrong + # places or the library will be provided with LDFLAGS or + # LIBRARY_PATH from the environment (on macOS), and many + # other edge cases that we can't account for. + # + # Add all -L paths and use it as -lfoo + if lib in libs_notfound: + continue + if self.static: + mlog.warning('Static library {!r} not found for dependency {!r}, may ' + 'not be statically linked'.format(lib[2:], self.name)) + libs_notfound.append(lib) + elif lib.endswith(".la"): + shared_libname = self.extract_libtool_shlib(lib) + shared_lib = os.path.join(os.path.dirname(lib), shared_libname) + if not os.path.exists(shared_lib): + shared_lib = os.path.join(os.path.dirname(lib), ".libs", shared_libname) + + if not os.path.exists(shared_lib): + raise DependencyException(f'Got a libtools specific "{lib}" dependencies' + 'but we could not compute the actual shared' + 'library path') + self.is_libtool = True + lib = shared_lib + if lib in link_args: + continue + link_args.append(lib) + # Add all -Lbar args if we have -lfoo args in link_args + if libs_notfound: + # Order of -L flags doesn't matter with ld, but it might with other + # linkers such as MSVC, so prepend them. + link_args = ['-L' + lp for lp in prefix_libpaths] + link_args + return link_args, raw_link_args + + def _set_libs(self) -> None: + env = None + libcmd = ['--libs'] + + if self.static: + libcmd.append('--static') + + libcmd.append(self.name) + + # Force pkg-config to output -L fields even if they are system + # paths so we can do manual searching with cc.find_library() later. + env = os.environ.copy() + env['PKG_CONFIG_ALLOW_SYSTEM_LIBS'] = '1' + ret, out, err = self._call_pkgbin(libcmd, env=env) + if ret != 0: + raise DependencyException(f'Could not generate libs for {self.name}:\n{err}\n') + # Also get the 'raw' output without -Lfoo system paths for adding -L + # args with -lfoo when a library can't be found, and also in + # gnome.generate_gir + gnome.gtkdoc which need -L -l arguments. + ret, out_raw, err_raw = self._call_pkgbin(libcmd) + if ret != 0: + raise DependencyException(f'Could not generate libs for {self.name}:\n\n{out_raw}') + self.link_args, self.raw_link_args = self._search_libs(out, out_raw) + + def get_pkgconfig_variable(self, variable_name: str, + define_variable: 'ImmutableListProtocol[str]', + default: T.Optional[str]) -> str: + options = ['--variable=' + variable_name, self.name] + + if define_variable: + options = ['--define-variable=' + '='.join(define_variable)] + options + + ret, out, err = self._call_pkgbin(options) + variable = '' + if ret != 0: + if self.required: + raise DependencyException(f'dependency {self.name} not found:\n{err}\n') + else: + variable = out.strip() + + # pkg-config doesn't distinguish between empty and non-existent variables + # use the variable list to check for variable existence + if not variable: + ret, out, _ = self._call_pkgbin(['--print-variables', self.name]) + if not re.search(r'^' + variable_name + r'$', out, re.MULTILINE): + if default is not None: + variable = default + else: + mlog.warning(f"pkgconfig variable '{variable_name}' not defined for dependency {self.name}.") + + mlog.debug(f'Got pkgconfig variable {variable_name} : {variable}') + return variable + + @staticmethod + def check_pkgconfig(env: Environment, pkgbin: ExternalProgram) -> T.Optional[str]: + if not pkgbin.found(): + mlog.log(f'Did not find pkg-config by name {pkgbin.name!r}') + return None + command_as_string = ' '.join(pkgbin.get_command()) + try: + helptext = Popen_safe(pkgbin.get_command() + ['--help'])[1] + if 'Pure-Perl' in helptext: + mlog.log(f'found pkg-config {command_as_string!r} but it is Strawberry Perl and thus broken. Ignoring...') + return None + p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2] + if p.returncode != 0: + mlog.warning(f'Found pkg-config {command_as_string!r} but it failed when run') + return None + except FileNotFoundError: + mlog.warning(f'We thought we found pkg-config {command_as_string!r} but now it\'s not there. How odd!') + return None + except PermissionError: + msg = f'Found pkg-config {command_as_string!r} but didn\'t have permissions to run it.' + if not env.machines.build.is_windows(): + msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.' + mlog.warning(msg) + return None + return out.strip() + + def extract_field(self, la_file: str, fieldname: str) -> T.Optional[str]: + with open(la_file, encoding='utf-8') as f: + for line in f: + arr = line.strip().split('=') + if arr[0] == fieldname: + return arr[1][1:-1] + return None + + def extract_dlname_field(self, la_file: str) -> T.Optional[str]: + return self.extract_field(la_file, 'dlname') + + def extract_libdir_field(self, la_file: str) -> T.Optional[str]: + return self.extract_field(la_file, 'libdir') + + def extract_libtool_shlib(self, la_file: str) -> T.Optional[str]: + ''' + Returns the path to the shared library + corresponding to this .la file + ''' + dlname = self.extract_dlname_field(la_file) + if dlname is None: + return None + + # Darwin uses absolute paths where possible; since the libtool files never + # contain absolute paths, use the libdir field + if self.env.machines[self.for_machine].is_darwin(): + dlbasename = os.path.basename(dlname) + libdir = self.extract_libdir_field(la_file) + if libdir is None: + return dlbasename + return os.path.join(libdir, dlbasename) + # From the comments in extract_libtool(), older libtools had + # a path rather than the raw dlname + return os.path.basename(dlname) + + @staticmethod + def log_tried() -> str: + return 'pkgconfig' + + def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None, + configtool: T.Optional[str] = None, internal: T.Optional[str] = None, + default_value: T.Optional[str] = None, + pkgconfig_define: T.Optional[T.List[str]] = None) -> str: + if pkgconfig: + try: + return self.get_pkgconfig_variable(pkgconfig, pkgconfig_define or [], default_value) + except DependencyException: + pass + if default_value is not None: + return default_value + raise DependencyException(f'Could not get pkg-config variable and no default provided for {self!r}') diff --git a/mesonbuild/dependencies/platform.py b/mesonbuild/dependencies/platform.py new file mode 100644 index 0000000..6d32555 --- /dev/null +++ b/mesonbuild/dependencies/platform.py @@ -0,0 +1,60 @@ +# Copyright 2013-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the detection logic for external dependencies that are +# platform-specific (generally speaking). +from __future__ import annotations + +from .base import DependencyTypeName, ExternalDependency, DependencyException +from ..mesonlib import MesonException +import typing as T + +if T.TYPE_CHECKING: + from ..environment import Environment + +class AppleFrameworks(ExternalDependency): + def __init__(self, env: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + super().__init__(DependencyTypeName('appleframeworks'), env, kwargs) + modules = kwargs.get('modules', []) + if isinstance(modules, str): + modules = [modules] + if not modules: + raise DependencyException("AppleFrameworks dependency requires at least one module.") + self.frameworks = modules + if not self.clib_compiler: + raise DependencyException('No C-like compilers are available, cannot find the framework') + self.is_found = True + for f in self.frameworks: + try: + args = self.clib_compiler.find_framework(f, env, []) + except MesonException as e: + if 'non-clang' in str(e): + self.is_found = False + self.link_args = [] + self.compile_args = [] + return + raise + + if args is not None: + # No compile args are needed for system frameworks + self.link_args += args + else: + self.is_found = False + + def log_info(self) -> str: + return ', '.join(self.frameworks) + + @staticmethod + def log_tried() -> str: + return 'framework' diff --git a/mesonbuild/dependencies/qt.py b/mesonbuild/dependencies/qt.py new file mode 100644 index 0000000..6dd712d --- /dev/null +++ b/mesonbuild/dependencies/qt.py @@ -0,0 +1,486 @@ +# Copyright 2013-2017 The Meson development team +# Copyright © 2021 Intel Corporation +# SPDX-license-identifier: Apache-2.0 + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +"""Dependency finders for the Qt framework.""" + +import abc +import re +import os +import typing as T + +from .base import DependencyException, DependencyMethods +from .configtool import ConfigToolDependency +from .framework import ExtraFrameworkDependency +from .pkgconfig import PkgConfigDependency +from .factory import DependencyFactory +from .. import mlog +from .. import mesonlib + +if T.TYPE_CHECKING: + from ..compilers import Compiler + from ..envconfig import MachineInfo + from ..environment import Environment + from ..dependencies import MissingCompiler + + +def _qt_get_private_includes(mod_inc_dir: str, module: str, mod_version: str) -> T.List[str]: + # usually Qt5 puts private headers in /QT_INSTALL_HEADERS/module/VERSION/module/private + # except for at least QtWebkit and Enginio where the module version doesn't match Qt version + # as an example with Qt 5.10.1 on linux you would get: + # /usr/include/qt5/QtCore/5.10.1/QtCore/private/ + # /usr/include/qt5/QtWidgets/5.10.1/QtWidgets/private/ + # /usr/include/qt5/QtWebKit/5.212.0/QtWebKit/private/ + + # on Qt4 when available private folder is directly in module folder + # like /usr/include/QtCore/private/ + if int(mod_version.split('.')[0]) < 5: + return [] + + private_dir = os.path.join(mod_inc_dir, mod_version) + # fallback, let's try to find a directory with the latest version + if not os.path.exists(private_dir): + dirs = [filename for filename in os.listdir(mod_inc_dir) + if os.path.isdir(os.path.join(mod_inc_dir, filename))] + + for dirname in sorted(dirs, reverse=True): + if len(dirname.split('.')) == 3: + private_dir = dirname + break + return [private_dir, os.path.join(private_dir, 'Qt' + module)] + + +def get_qmake_host_bins(qvars: T.Dict[str, str]) -> str: + # Prefer QT_HOST_BINS (qt5, correct for cross and native compiling) + # but fall back to QT_INSTALL_BINS (qt4) + if 'QT_HOST_BINS' in qvars: + return qvars['QT_HOST_BINS'] + return qvars['QT_INSTALL_BINS'] + + +def get_qmake_host_libexecs(qvars: T.Dict[str, str]) -> T.Optional[str]: + if 'QT_HOST_LIBEXECS' in qvars: + return qvars['QT_HOST_LIBEXECS'] + return qvars.get('QT_INSTALL_LIBEXECS') + + +def _get_modules_lib_suffix(version: str, info: 'MachineInfo', is_debug: bool) -> str: + """Get the module suffix based on platform and debug type.""" + suffix = '' + if info.is_windows(): + if is_debug: + suffix += 'd' + if version.startswith('4'): + suffix += '4' + if info.is_darwin(): + if is_debug: + suffix += '_debug' + if mesonlib.version_compare(version, '>= 5.14.0'): + if info.is_android(): + if info.cpu_family == 'x86': + suffix += '_x86' + elif info.cpu_family == 'x86_64': + suffix += '_x86_64' + elif info.cpu_family == 'arm': + suffix += '_armeabi-v7a' + elif info.cpu_family == 'aarch64': + suffix += '_arm64-v8a' + else: + mlog.warning(f'Android target arch "{info.cpu_family}"" for Qt5 is unknown, ' + 'module detection may not work') + return suffix + + +class QtExtraFrameworkDependency(ExtraFrameworkDependency): + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None): + super().__init__(name, env, kwargs, language=language) + self.mod_name = name[2:] + + def get_compile_args(self, with_private_headers: bool = False, qt_version: str = "0") -> T.List[str]: + if self.found(): + mod_inc_dir = os.path.join(self.framework_path, 'Headers') + args = ['-I' + mod_inc_dir] + if with_private_headers: + args += ['-I' + dirname for dirname in _qt_get_private_includes(mod_inc_dir, self.mod_name, qt_version)] + return args + return [] + + +class _QtBase: + + """Mixin class for shared components between PkgConfig and Qmake.""" + + link_args: T.List[str] + clib_compiler: T.Union['MissingCompiler', 'Compiler'] + env: 'Environment' + libexecdir: T.Optional[str] = None + + def __init__(self, name: str, kwargs: T.Dict[str, T.Any]): + self.name = name + self.qtname = name.capitalize() + self.qtver = name[-1] + if self.qtver == "4": + self.qtpkgname = 'Qt' + else: + self.qtpkgname = self.qtname + + self.private_headers = T.cast('bool', kwargs.get('private_headers', False)) + + self.requested_modules = mesonlib.stringlistify(mesonlib.extract_as_list(kwargs, 'modules')) + if not self.requested_modules: + raise DependencyException('No ' + self.qtname + ' modules specified.') + + self.qtmain = T.cast('bool', kwargs.get('main', False)) + if not isinstance(self.qtmain, bool): + raise DependencyException('"main" argument must be a boolean') + + def _link_with_qt_winmain(self, is_debug: bool, libdir: T.Union[str, T.List[str]]) -> bool: + libdir = mesonlib.listify(libdir) # TODO: shouldn't be necessary + base_name = self.get_qt_winmain_base_name(is_debug) + qt_winmain = self.clib_compiler.find_library(base_name, self.env, libdir) + if qt_winmain: + self.link_args.append(qt_winmain[0]) + return True + return False + + def get_qt_winmain_base_name(self, is_debug: bool) -> str: + return 'qtmaind' if is_debug else 'qtmain' + + def get_exe_args(self, compiler: 'Compiler') -> T.List[str]: + # Originally this was -fPIE but nowadays the default + # for upstream and distros seems to be -reduce-relocations + # which requires -fPIC. This may cause a performance + # penalty when using self-built Qt or on platforms + # where -fPIC is not required. If this is an issue + # for you, patches are welcome. + return compiler.get_pic_args() + + def log_details(self) -> str: + return f'modules: {", ".join(sorted(self.requested_modules))}' + + +class QtPkgConfigDependency(_QtBase, PkgConfigDependency, metaclass=abc.ABCMeta): + + """Specialization of the PkgConfigDependency for Qt.""" + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + _QtBase.__init__(self, name, kwargs) + + # Always use QtCore as the "main" dependency, since it has the extra + # pkg-config variables that a user would expect to get. If "Core" is + # not a requested module, delete the compile and link arguments to + # avoid linking with something they didn't ask for + PkgConfigDependency.__init__(self, self.qtpkgname + 'Core', env, kwargs) + if 'Core' not in self.requested_modules: + self.compile_args = [] + self.link_args = [] + + for m in self.requested_modules: + mod = PkgConfigDependency(self.qtpkgname + m, self.env, kwargs, language=self.language) + if not mod.found(): + self.is_found = False + return + if self.private_headers: + qt_inc_dir = mod.get_pkgconfig_variable('includedir', [], None) + mod_private_dir = os.path.join(qt_inc_dir, 'Qt' + m) + if not os.path.isdir(mod_private_dir): + # At least some versions of homebrew don't seem to set this + # up correctly. /usr/local/opt/qt/include/Qt + m_name is a + # symlink to /usr/local/opt/qt/include, but the pkg-config + # file points to /usr/local/Cellar/qt/x.y.z/Headers/, and + # the Qt + m_name there is not a symlink, it's a file + mod_private_dir = qt_inc_dir + mod_private_inc = _qt_get_private_includes(mod_private_dir, m, mod.version) + for directory in mod_private_inc: + mod.compile_args.append('-I' + directory) + self._add_sub_dependency([lambda: mod]) + + if self.env.machines[self.for_machine].is_windows() and self.qtmain: + # Check if we link with debug binaries + debug_lib_name = self.qtpkgname + 'Core' + _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], True) + is_debug = False + for arg in self.get_link_args(): + if arg == f'-l{debug_lib_name}' or arg.endswith(f'{debug_lib_name}.lib') or arg.endswith(f'{debug_lib_name}.a'): + is_debug = True + break + libdir = self.get_pkgconfig_variable('libdir', [], None) + if not self._link_with_qt_winmain(is_debug, libdir): + self.is_found = False + return + + self.bindir = self.get_pkgconfig_host_bins(self) + if not self.bindir: + # If exec_prefix is not defined, the pkg-config file is broken + prefix = self.get_pkgconfig_variable('exec_prefix', [], None) + if prefix: + self.bindir = os.path.join(prefix, 'bin') + + self.libexecdir = self.get_pkgconfig_host_libexecs(self) + + @staticmethod + @abc.abstractmethod + def get_pkgconfig_host_bins(core: PkgConfigDependency) -> T.Optional[str]: + pass + + @staticmethod + @abc.abstractmethod + def get_pkgconfig_host_libexecs(core: PkgConfigDependency) -> T.Optional[str]: + pass + + @abc.abstractmethod + def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]: + pass + + def log_info(self) -> str: + return 'pkg-config' + + +class QmakeQtDependency(_QtBase, ConfigToolDependency, metaclass=abc.ABCMeta): + + """Find Qt using Qmake as a config-tool.""" + + tool_name = 'qmake' + version_arg = '-v' + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + _QtBase.__init__(self, name, kwargs) + self.tools = [f'qmake{self.qtver}', f'qmake-{self.name}', 'qmake'] + + # Add additional constraints that the Qt version is met, but preserve + # any version requrements the user has set as well. For example, if Qt5 + # is requested, add "">= 5, < 6", but if the user has ">= 5.6", don't + # lose that. + kwargs = kwargs.copy() + _vers = mesonlib.listify(kwargs.get('version', [])) + _vers.extend([f'>= {self.qtver}', f'< {int(self.qtver) + 1}']) + kwargs['version'] = _vers + + ConfigToolDependency.__init__(self, name, env, kwargs) + if not self.found(): + return + + # Query library path, header path, and binary path + stdo = self.get_config_value(['-query'], 'args') + qvars: T.Dict[str, str] = {} + for line in stdo: + line = line.strip() + if line == '': + continue + k, v = line.split(':', 1) + qvars[k] = v + # Qt on macOS uses a framework, but Qt for iOS/tvOS does not + xspec = qvars.get('QMAKE_XSPEC', '') + if self.env.machines.host.is_darwin() and not any(s in xspec for s in ['ios', 'tvos']): + mlog.debug("Building for macOS, looking for framework") + self._framework_detect(qvars, self.requested_modules, kwargs) + # Sometimes Qt is built not as a framework (for instance, when using conan pkg manager) + # skip and fall back to normal procedure then + if self.is_found: + return + else: + mlog.debug("Building for macOS, couldn't find framework, falling back to library search") + incdir = qvars['QT_INSTALL_HEADERS'] + self.compile_args.append('-I' + incdir) + libdir = qvars['QT_INSTALL_LIBS'] + # Used by qt.compilers_detect() + self.bindir = get_qmake_host_bins(qvars) + self.libexecdir = get_qmake_host_libexecs(qvars) + + # Use the buildtype by default, but look at the b_vscrt option if the + # compiler supports it. + is_debug = self.env.coredata.get_option(mesonlib.OptionKey('buildtype')) == 'debug' + if mesonlib.OptionKey('b_vscrt') in self.env.coredata.options: + if self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value in {'mdd', 'mtd'}: + is_debug = True + modules_lib_suffix = _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], is_debug) + + for module in self.requested_modules: + mincdir = os.path.join(incdir, 'Qt' + module) + self.compile_args.append('-I' + mincdir) + + if module == 'QuickTest': + define_base = 'QMLTEST' + elif module == 'Test': + define_base = 'TESTLIB' + else: + define_base = module.upper() + self.compile_args.append(f'-DQT_{define_base}_LIB') + + if self.private_headers: + priv_inc = self.get_private_includes(mincdir, module) + for directory in priv_inc: + self.compile_args.append('-I' + directory) + libfiles = self.clib_compiler.find_library( + self.qtpkgname + module + modules_lib_suffix, self.env, + mesonlib.listify(libdir)) # TODO: shouldn't be necissary + if libfiles: + libfile = libfiles[0] + else: + mlog.log("Could not find:", module, + self.qtpkgname + module + modules_lib_suffix, + 'in', libdir) + self.is_found = False + break + self.link_args.append(libfile) + + if self.env.machines[self.for_machine].is_windows() and self.qtmain: + if not self._link_with_qt_winmain(is_debug, libdir): + self.is_found = False + + def _sanitize_version(self, version: str) -> str: + m = re.search(rf'({self.qtver}(\.\d+)+)', version) + if m: + return m.group(0).rstrip('.') + return version + + @abc.abstractmethod + def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]: + pass + + def _framework_detect(self, qvars: T.Dict[str, str], modules: T.List[str], kwargs: T.Dict[str, T.Any]) -> None: + libdir = qvars['QT_INSTALL_LIBS'] + + # ExtraFrameworkDependency doesn't support any methods + fw_kwargs = kwargs.copy() + fw_kwargs.pop('method', None) + fw_kwargs['paths'] = [libdir] + + for m in modules: + fname = 'Qt' + m + mlog.debug('Looking for qt framework ' + fname) + fwdep = QtExtraFrameworkDependency(fname, self.env, fw_kwargs, language=self.language) + if fwdep.found(): + self.compile_args.append('-F' + libdir) + self.compile_args += fwdep.get_compile_args(with_private_headers=self.private_headers, + qt_version=self.version) + self.link_args += fwdep.get_link_args() + else: + self.is_found = False + break + else: + self.is_found = True + # Used by self.compilers_detect() + self.bindir = get_qmake_host_bins(qvars) + self.libexecdir = get_qmake_host_libexecs(qvars) + + def log_info(self) -> str: + return 'qmake' + + +class Qt6WinMainMixin: + + def get_qt_winmain_base_name(self, is_debug: bool) -> str: + return 'Qt6EntryPointd' if is_debug else 'Qt6EntryPoint' + + +class Qt4ConfigToolDependency(QmakeQtDependency): + + def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]: + return [] + + +class Qt5ConfigToolDependency(QmakeQtDependency): + + def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]: + return _qt_get_private_includes(mod_inc_dir, module, self.version) + + +class Qt6ConfigToolDependency(Qt6WinMainMixin, QmakeQtDependency): + + def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]: + return _qt_get_private_includes(mod_inc_dir, module, self.version) + + +class Qt4PkgConfigDependency(QtPkgConfigDependency): + + @staticmethod + def get_pkgconfig_host_bins(core: PkgConfigDependency) -> T.Optional[str]: + # Only return one bins dir, because the tools are generally all in one + # directory for Qt4, in Qt5, they must all be in one directory. Return + # the first one found among the bin variables, in case one tool is not + # configured to be built. + applications = ['moc', 'uic', 'rcc', 'lupdate', 'lrelease'] + for application in applications: + try: + return os.path.dirname(core.get_pkgconfig_variable(f'{application}_location', [], None)) + except mesonlib.MesonException: + pass + return None + + def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]: + return [] + + @staticmethod + def get_pkgconfig_host_libexecs(core: PkgConfigDependency) -> str: + return None + + +class Qt5PkgConfigDependency(QtPkgConfigDependency): + + @staticmethod + def get_pkgconfig_host_bins(core: PkgConfigDependency) -> str: + return core.get_pkgconfig_variable('host_bins', [], None) + + @staticmethod + def get_pkgconfig_host_libexecs(core: PkgConfigDependency) -> str: + return None + + def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]: + return _qt_get_private_includes(mod_inc_dir, module, self.version) + + +class Qt6PkgConfigDependency(Qt6WinMainMixin, QtPkgConfigDependency): + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, env, kwargs) + if not self.libexecdir: + mlog.debug(f'detected Qt6 {self.version} pkg-config dependency does not ' + 'have proper tools support, ignoring') + self.is_found = False + + @staticmethod + def get_pkgconfig_host_bins(core: PkgConfigDependency) -> str: + return core.get_pkgconfig_variable('bindir', [], None) + + @staticmethod + def get_pkgconfig_host_libexecs(core: PkgConfigDependency) -> str: + # Qt6 pkg-config for Qt defines libexecdir from 6.3+ + return core.get_pkgconfig_variable('libexecdir', [], None) + + def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]: + return _qt_get_private_includes(mod_inc_dir, module, self.version) + + +qt4_factory = DependencyFactory( + 'qt4', + [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL], + pkgconfig_class=Qt4PkgConfigDependency, + configtool_class=Qt4ConfigToolDependency, +) + +qt5_factory = DependencyFactory( + 'qt5', + [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL], + pkgconfig_class=Qt5PkgConfigDependency, + configtool_class=Qt5ConfigToolDependency, +) + +qt6_factory = DependencyFactory( + 'qt6', + [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL], + pkgconfig_class=Qt6PkgConfigDependency, + configtool_class=Qt6ConfigToolDependency, +) diff --git a/mesonbuild/dependencies/scalapack.py b/mesonbuild/dependencies/scalapack.py new file mode 100644 index 0000000..be8ee70 --- /dev/null +++ b/mesonbuild/dependencies/scalapack.py @@ -0,0 +1,156 @@ +# Copyright 2013-2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from pathlib import Path +import functools +import os +import typing as T + +from ..mesonlib import OptionKey +from .base import DependencyMethods +from .base import DependencyException +from .cmake import CMakeDependency +from .pkgconfig import PkgConfigDependency +from .factory import factory_methods + +if T.TYPE_CHECKING: + from ..environment import Environment, MachineChoice + from .factory import DependencyGenerator + + +@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE}) +def scalapack_factory(env: 'Environment', for_machine: 'MachineChoice', + kwargs: T.Dict[str, T.Any], + methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']: + candidates: T.List['DependencyGenerator'] = [] + + if DependencyMethods.PKGCONFIG in methods: + static_opt = kwargs.get('static', env.coredata.get_option(OptionKey('prefer_static'))) + mkl = 'mkl-static-lp64-iomp' if static_opt else 'mkl-dynamic-lp64-iomp' + candidates.append(functools.partial( + MKLPkgConfigDependency, mkl, env, kwargs)) + + for pkg in ['scalapack-openmpi', 'scalapack']: + candidates.append(functools.partial( + PkgConfigDependency, pkg, env, kwargs)) + + if DependencyMethods.CMAKE in methods: + candidates.append(functools.partial( + CMakeDependency, 'Scalapack', env, kwargs)) + + return candidates + + +class MKLPkgConfigDependency(PkgConfigDependency): + + """PkgConfigDependency for Intel MKL. + + MKL's pkg-config is pretty much borked in every way. We need to apply a + bunch of fixups to make it work correctly. + """ + + def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], + language: T.Optional[str] = None): + _m = os.environ.get('MKLROOT') + self.__mklroot = Path(_m).resolve() if _m else None + + # We need to call down into the normal super() method even if we don't + # find mklroot, otherwise we won't have all of the instance variables + # initialized that meson expects. + super().__init__(name, env, kwargs, language=language) + + # Doesn't work with gcc on windows, but does on Linux + if (not self.__mklroot or (env.machines[self.for_machine].is_windows() + and self.clib_compiler.id == 'gcc')): + self.is_found = False + + # This can happen either because we're using GCC, we couldn't find the + # mklroot, or the pkg-config couldn't find it. + if not self.is_found: + return + + assert self.version != '', 'This should not happen if we didn\'t return above' + + if self.version == 'unknown': + # At least by 2020 the version is in the pkg-config, just not with + # the correct name + v = self.get_variable(pkgconfig='Version', default_value='') + + if not v and self.__mklroot: + try: + v = ( + self.__mklroot.as_posix() + .split('compilers_and_libraries_')[1] + .split('/', 1)[0] + ) + except IndexError: + pass + + if v: + assert isinstance(v, str) + self.version = v + + def _set_libs(self) -> None: + super()._set_libs() + + if self.env.machines[self.for_machine].is_windows(): + suffix = '.lib' + elif self.static: + suffix = '.a' + else: + suffix = '' + libdir = self.__mklroot / 'lib/intel64' + + if self.clib_compiler.id == 'gcc': + for i, a in enumerate(self.link_args): + # only replace in filename, not in directory names + dirname, basename = os.path.split(a) + if 'mkl_intel_lp64' in basename: + basename = basename.replace('intel', 'gf') + self.link_args[i] = '/' + os.path.join(dirname, basename) + # MKL pkg-config omits scalapack + # be sure "-L" and "-Wl" are first if present + i = 0 + for j, a in enumerate(self.link_args): + if a.startswith(('-L', '-Wl')): + i = j + 1 + elif j > 3: + break + if self.env.machines[self.for_machine].is_windows() or self.static: + self.link_args.insert( + i, str(libdir / ('mkl_scalapack_lp64' + suffix)) + ) + self.link_args.insert( + i + 1, str(libdir / ('mkl_blacs_intelmpi_lp64' + suffix)) + ) + else: + self.link_args.insert(i, '-lmkl_scalapack_lp64') + self.link_args.insert(i + 1, '-lmkl_blacs_intelmpi_lp64') + + def _set_cargs(self) -> None: + env = None + if self.language == 'fortran': + # gfortran doesn't appear to look in system paths for INCLUDE files, + # so don't allow pkg-config to suppress -I flags for system paths + env = os.environ.copy() + env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1' + ret, out, err = self._call_pkgbin([ + '--cflags', self.name, + '--define-variable=prefix=' + self.__mklroot.as_posix()], + env=env) + if ret != 0: + raise DependencyException('Could not generate cargs for %s:\n%s\n' % + (self.name, err)) + self.compile_args = self._convert_mingw_paths(self._split_args(out)) diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py new file mode 100644 index 0000000..2c341af --- /dev/null +++ b/mesonbuild/dependencies/ui.py @@ -0,0 +1,255 @@ +# Copyright 2013-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the detection logic for external dependencies that +# are UI-related. +from __future__ import annotations + +import os +import subprocess +import typing as T + +from .. import mlog +from .. import mesonlib +from ..mesonlib import ( + Popen_safe, extract_as_list, version_compare_many +) +from ..environment import detect_cpu_family + +from .base import DependencyException, DependencyMethods, DependencyTypeName, SystemDependency +from .configtool import ConfigToolDependency +from .factory import DependencyFactory + +if T.TYPE_CHECKING: + from ..environment import Environment + + +class GLDependencySystem(SystemDependency): + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + super().__init__(name, environment, kwargs) + + if self.env.machines[self.for_machine].is_darwin(): + self.is_found = True + # FIXME: Use AppleFrameworks dependency + self.link_args = ['-framework', 'OpenGL'] + # FIXME: Detect version using self.clib_compiler + return + if self.env.machines[self.for_machine].is_windows(): + self.is_found = True + # FIXME: Use self.clib_compiler.find_library() + self.link_args = ['-lopengl32'] + # FIXME: Detect version using self.clib_compiler + return + +class GnuStepDependency(ConfigToolDependency): + + tools = ['gnustep-config'] + tool_name = 'gnustep-config' + + def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: + super().__init__('gnustep', environment, kwargs, language='objc') + if not self.is_found: + return + self.modules = kwargs.get('modules', []) + self.compile_args = self.filter_args( + self.get_config_value(['--objc-flags'], 'compile_args')) + self.link_args = self.weird_filter(self.get_config_value( + ['--gui-libs' if 'gui' in self.modules else '--base-libs'], + 'link_args')) + + def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]: + tool = [self.tools[0]] + try: + p, out = Popen_safe(tool + ['--help'])[:2] + except (FileNotFoundError, PermissionError): + return (None, None) + if p.returncode != returncode: + return (None, None) + self.config = tool + found_version = self.detect_version() + if versions and not version_compare_many(found_version, versions)[0]: + return (None, found_version) + + return (tool, found_version) + + @staticmethod + def weird_filter(elems: T.List[str]) -> T.List[str]: + """When building packages, the output of the enclosing Make is + sometimes mixed among the subprocess output. I have no idea why. As a + hack filter out everything that is not a flag. + """ + return [e for e in elems if e.startswith('-')] + + @staticmethod + def filter_args(args: T.List[str]) -> T.List[str]: + """gnustep-config returns a bunch of garbage args such as -O2 and so + on. Drop everything that is not needed. + """ + result = [] + for f in args: + if f.startswith('-D') \ + or f.startswith('-f') \ + or f.startswith('-I') \ + or f == '-pthread' \ + or (f.startswith('-W') and not f == '-Wall'): + result.append(f) + return result + + def detect_version(self) -> str: + gmake = self.get_config_value(['--variable=GNUMAKE'], 'variable')[0] + makefile_dir = self.get_config_value(['--variable=GNUSTEP_MAKEFILES'], 'variable')[0] + # This Makefile has the GNUStep version set + base_make = os.path.join(makefile_dir, 'Additional', 'base.make') + # Print the Makefile variable passed as the argument. For instance, if + # you run the make target `print-SOME_VARIABLE`, this will print the + # value of the variable `SOME_VARIABLE`. + printver = "print-%:\n\t@echo '$($*)'" + env = os.environ.copy() + # See base.make to understand why this is set + env['FOUNDATION_LIB'] = 'gnu' + p, o, e = Popen_safe([gmake, '-f', '-', '-f', base_make, + 'print-GNUSTEP_BASE_VERSION'], + env=env, write=printver, stdin=subprocess.PIPE) + version = o.strip() + if not version: + mlog.debug("Couldn't detect GNUStep version, falling back to '1'") + # Fallback to setting some 1.x version + version = '1' + return version + + +class SDL2DependencyConfigTool(ConfigToolDependency): + + tools = ['sdl2-config'] + tool_name = 'sdl2-config' + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--libs'], 'link_args') + + +class WxDependency(ConfigToolDependency): + + tools = ['wx-config-3.0', 'wx-config-3.1', 'wx-config', 'wx-config-gtk3'] + tool_name = 'wx-config' + + def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__('WxWidgets', environment, kwargs, language='cpp') + if not self.is_found: + return + self.requested_modules = self.get_requested(kwargs) + + extra_args = [] + if self.static: + extra_args.append('--static=yes') + + # Check to make sure static is going to work + err = Popen_safe(self.config + extra_args)[2] + if 'No config found to match' in err: + mlog.debug('WxWidgets is missing static libraries.') + self.is_found = False + return + + # wx-config seems to have a cflags as well but since it requires C++, + # this should be good, at least for now. + self.compile_args = self.get_config_value(['--cxxflags'] + extra_args + self.requested_modules, 'compile_args') + self.link_args = self.get_config_value(['--libs'] + extra_args + self.requested_modules, 'link_args') + + @staticmethod + def get_requested(kwargs: T.Dict[str, T.Any]) -> T.List[str]: + if 'modules' not in kwargs: + return [] + candidates = extract_as_list(kwargs, 'modules') + for c in candidates: + if not isinstance(c, str): + raise DependencyException('wxwidgets module argument is not a string') + return candidates + + +class VulkanDependencySystem(SystemDependency): + + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None: + super().__init__(name, environment, kwargs, language=language) + + try: + self.vulkan_sdk = os.environ['VULKAN_SDK'] + if not os.path.isabs(self.vulkan_sdk): + raise DependencyException('VULKAN_SDK must be an absolute path.') + except KeyError: + self.vulkan_sdk = None + + if self.vulkan_sdk: + # TODO: this config might not work on some platforms, fix bugs as reported + # we should at least detect other 64-bit platforms (e.g. armv8) + lib_name = 'vulkan' + lib_dir = 'lib' + inc_dir = 'include' + if mesonlib.is_windows(): + lib_name = 'vulkan-1' + lib_dir = 'Lib32' + inc_dir = 'Include' + if detect_cpu_family(self.env.coredata.compilers.host) == 'x86_64': + lib_dir = 'Lib' + + # make sure header and lib are valid + inc_path = os.path.join(self.vulkan_sdk, inc_dir) + header = os.path.join(inc_path, 'vulkan', 'vulkan.h') + lib_path = os.path.join(self.vulkan_sdk, lib_dir) + find_lib = self.clib_compiler.find_library(lib_name, environment, [lib_path]) + + if not find_lib: + raise DependencyException('VULKAN_SDK point to invalid directory (no lib)') + + if not os.path.isfile(header): + raise DependencyException('VULKAN_SDK point to invalid directory (no include)') + + # XXX: this is very odd, and may deserve being removed + self.type_name = DependencyTypeName('vulkan_sdk') + self.is_found = True + self.compile_args.append('-I' + inc_path) + self.link_args.append('-L' + lib_path) + self.link_args.append('-l' + lib_name) + + # TODO: find a way to retrieve the version from the sdk? + # Usually it is a part of the path to it (but does not have to be) + return + else: + # simply try to guess it, usually works on linux + libs = self.clib_compiler.find_library('vulkan', environment, []) + if libs is not None and self.clib_compiler.has_header('vulkan/vulkan.h', '', environment, disable_cache=True)[0]: + self.is_found = True + for lib in libs: + self.link_args.append(lib) + return + +gl_factory = DependencyFactory( + 'gl', + [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM], + system_class=GLDependencySystem, +) + +sdl2_factory = DependencyFactory( + 'sdl2', + [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK], + configtool_class=SDL2DependencyConfigTool, +) + +vulkan_factory = DependencyFactory( + 'vulkan', + [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM], + system_class=VulkanDependencySystem, +) |