Split the Factory and dependency classes out of the base.py script to improve maintainability.pull/8846/head
parent
201dc64226
commit
95b70bcb97
24 changed files with 2059 additions and 1859 deletions
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,655 @@ |
||||
# Copyright 2013-2021 The Meson development team |
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
from .base import ExternalDependency, DependencyException, DependencyMethods |
||||
from ..mesonlib import is_windows, MesonException, OptionKey, PerMachine, stringlistify, extract_as_list |
||||
from ..mesondata import mesondata |
||||
from ..cmake import CMakeExecutor, CMakeTraceParser, CMakeException, CMakeToolchain, CMakeExecScope, check_cmake_args |
||||
from .. import mlog |
||||
from pathlib import Path |
||||
import functools |
||||
import re |
||||
import os |
||||
import shutil |
||||
import textwrap |
||||
import typing as T |
||||
|
||||
if T.TYPE_CHECKING: |
||||
from ..environment import Environment |
||||
from ..mesonlib import MachineInfo |
||||
|
||||
class CMakeDependency(ExternalDependency): |
||||
# The class's copy of the CMake path. Avoids having to search for it |
||||
# multiple times in the same Meson invocation. |
||||
class_cmakeinfo = PerMachine(None, None) |
||||
# Version string for the minimum CMake version |
||||
class_cmake_version = '>=3.4' |
||||
# CMake generators to try (empty for no generator) |
||||
class_cmake_generators = ['', 'Ninja', 'Unix Makefiles', 'Visual Studio 10 2010'] |
||||
class_working_generator = None |
||||
|
||||
def _gen_exception(self, msg): |
||||
return DependencyException(f'Dependency {self.name} not found: {msg}') |
||||
|
||||
def _main_cmake_file(self) -> str: |
||||
return 'CMakeLists.txt' |
||||
|
||||
def _extra_cmake_opts(self) -> T.List[str]: |
||||
return [] |
||||
|
||||
def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]: |
||||
# Map the input module list to something else |
||||
# This function will only be executed AFTER the initial CMake |
||||
# interpreter pass has completed. Thus variables defined in the |
||||
# CMakeLists.txt can be accessed here. |
||||
# |
||||
# Both the modules and components inputs contain the original lists. |
||||
return modules |
||||
|
||||
def _map_component_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]: |
||||
# Map the input components list to something else. This |
||||
# function will be executed BEFORE the initial CMake interpreter |
||||
# pass. Thus variables from the CMakeLists.txt can NOT be accessed. |
||||
# |
||||
# Both the modules and components inputs contain the original lists. |
||||
return components |
||||
|
||||
def _original_module_name(self, module: str) -> str: |
||||
# Reverse the module mapping done by _map_module_list for |
||||
# one module |
||||
return module |
||||
|
||||
def __init__(self, name: str, environment: 'Environment', kwargs, language: T.Optional[str] = None): |
||||
# Gather a list of all languages to support |
||||
self.language_list = [] # type: T.List[str] |
||||
if language is None: |
||||
compilers = None |
||||
if kwargs.get('native', False): |
||||
compilers = environment.coredata.compilers.build |
||||
else: |
||||
compilers = environment.coredata.compilers.host |
||||
|
||||
candidates = ['c', 'cpp', 'fortran', 'objc', 'objcxx'] |
||||
self.language_list += [x for x in candidates if x in compilers] |
||||
else: |
||||
self.language_list += [language] |
||||
|
||||
# Add additional languages if required |
||||
if 'fortran' in self.language_list: |
||||
self.language_list += ['c'] |
||||
|
||||
# Ensure that the list is unique |
||||
self.language_list = list(set(self.language_list)) |
||||
|
||||
super().__init__('cmake', environment, kwargs, language=language) |
||||
self.name = name |
||||
self.is_libtool = False |
||||
# Store a copy of the CMake path on the object itself so it is |
||||
# stored in the pickled coredata and recovered. |
||||
self.cmakebin = None |
||||
self.cmakeinfo = None |
||||
|
||||
# Where all CMake "build dirs" are located |
||||
self.cmake_root_dir = environment.scratch_dir |
||||
|
||||
# T.List of successfully found modules |
||||
self.found_modules = [] |
||||
|
||||
# Initialize with None before the first return to avoid |
||||
# AttributeError exceptions in derived classes |
||||
self.traceparser = None # type: CMakeTraceParser |
||||
|
||||
# TODO further evaluate always using MachineChoice.BUILD |
||||
self.cmakebin = CMakeExecutor(environment, CMakeDependency.class_cmake_version, self.for_machine, silent=self.silent) |
||||
if not self.cmakebin.found(): |
||||
self.cmakebin = None |
||||
msg = f'CMake binary for machine {self.for_machine} not found. Giving up.' |
||||
if self.required: |
||||
raise DependencyException(msg) |
||||
mlog.debug(msg) |
||||
return |
||||
|
||||
# Setup the trace parser |
||||
self.traceparser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir()) |
||||
|
||||
cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args')) |
||||
cm_args = check_cmake_args(cm_args) |
||||
if CMakeDependency.class_cmakeinfo[self.for_machine] is None: |
||||
CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info(cm_args) |
||||
self.cmakeinfo = CMakeDependency.class_cmakeinfo[self.for_machine] |
||||
if self.cmakeinfo is None: |
||||
raise self._gen_exception('Unable to obtain CMake system information') |
||||
|
||||
package_version = kwargs.get('cmake_package_version', '') |
||||
if not isinstance(package_version, str): |
||||
raise DependencyException('Keyword "cmake_package_version" must be a string.') |
||||
components = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'components'))] |
||||
modules = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'modules'))] |
||||
modules += [(x, False) for x in stringlistify(extract_as_list(kwargs, 'optional_modules'))] |
||||
cm_path = stringlistify(extract_as_list(kwargs, 'cmake_module_path')) |
||||
cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path] |
||||
if cm_path: |
||||
cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path)) |
||||
if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]): |
||||
mlog.debug('Preliminary CMake check failed. Aborting.') |
||||
return |
||||
self._detect_dep(name, package_version, modules, components, cm_args) |
||||
|
||||
def __repr__(self): |
||||
s = '<{0} {1}: {2} {3}>' |
||||
return s.format(self.__class__.__name__, self.name, self.is_found, |
||||
self.version_reqs) |
||||
|
||||
def _get_cmake_info(self, cm_args): |
||||
mlog.debug("Extracting basic cmake information") |
||||
res = {} |
||||
|
||||
# Try different CMake generators since specifying no generator may fail |
||||
# in cygwin for some reason |
||||
gen_list = [] |
||||
# First try the last working generator |
||||
if CMakeDependency.class_working_generator is not None: |
||||
gen_list += [CMakeDependency.class_working_generator] |
||||
gen_list += CMakeDependency.class_cmake_generators |
||||
|
||||
temp_parser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir()) |
||||
toolchain = CMakeToolchain(self.cmakebin, self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir()) |
||||
toolchain.write() |
||||
|
||||
for i in gen_list: |
||||
mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto')) |
||||
|
||||
# Prepare options |
||||
cmake_opts = temp_parser.trace_args() + toolchain.get_cmake_args() + ['.'] |
||||
cmake_opts += cm_args |
||||
if len(i) > 0: |
||||
cmake_opts = ['-G', i] + cmake_opts |
||||
|
||||
# Run CMake |
||||
ret1, out1, err1 = self._call_cmake(cmake_opts, 'CMakePathInfo.txt') |
||||
|
||||
# Current generator was successful |
||||
if ret1 == 0: |
||||
CMakeDependency.class_working_generator = i |
||||
break |
||||
|
||||
mlog.debug(f'CMake failed to gather system information for generator {i} with error code {ret1}') |
||||
mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n') |
||||
|
||||
# Check if any generator succeeded |
||||
if ret1 != 0: |
||||
return None |
||||
|
||||
try: |
||||
temp_parser.parse(err1) |
||||
except MesonException: |
||||
return None |
||||
|
||||
def process_paths(l: T.List[str]) -> T.Set[str]: |
||||
if is_windows(): |
||||
# Cannot split on ':' on Windows because its in the drive letter |
||||
l = [x.split(os.pathsep) for x in l] |
||||
else: |
||||
# https://github.com/mesonbuild/meson/issues/7294 |
||||
l = [re.split(r':|;', x) for x in l] |
||||
l = [x for sublist in l for x in sublist] |
||||
return set(l) |
||||
|
||||
# Extract the variables and sanity check them |
||||
root_paths = process_paths(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH')) |
||||
root_paths.update(process_paths(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT'))) |
||||
root_paths = sorted(root_paths) |
||||
root_paths = list(filter(lambda x: os.path.isdir(x), root_paths)) |
||||
module_paths = process_paths(temp_parser.get_cmake_var('MESON_PATHS_LIST')) |
||||
rooted_paths = [] |
||||
for j in [Path(x) for x in root_paths]: |
||||
for i in [Path(x) for x in module_paths]: |
||||
rooted_paths.append(str(j / i.relative_to(i.anchor))) |
||||
module_paths = sorted(module_paths.union(rooted_paths)) |
||||
module_paths = list(filter(lambda x: os.path.isdir(x), module_paths)) |
||||
archs = temp_parser.get_cmake_var('MESON_ARCH_LIST') |
||||
|
||||
common_paths = ['lib', 'lib32', 'lib64', 'libx32', 'share'] |
||||
for i in archs: |
||||
common_paths += [os.path.join('lib', i)] |
||||
|
||||
res = { |
||||
'module_paths': module_paths, |
||||
'cmake_root': temp_parser.get_cmake_var('MESON_CMAKE_ROOT')[0], |
||||
'archs': archs, |
||||
'common_paths': common_paths |
||||
} |
||||
|
||||
mlog.debug(' -- Module search paths: {}'.format(res['module_paths'])) |
||||
mlog.debug(' -- CMake root: {}'.format(res['cmake_root'])) |
||||
mlog.debug(' -- CMake architectures: {}'.format(res['archs'])) |
||||
mlog.debug(' -- CMake lib search paths: {}'.format(res['common_paths'])) |
||||
|
||||
return res |
||||
|
||||
@staticmethod |
||||
@functools.lru_cache(maxsize=None) |
||||
def _cached_listdir(path: str) -> T.Tuple[T.Tuple[str, str]]: |
||||
try: |
||||
return tuple((x, str(x).lower()) for x in os.listdir(path)) |
||||
except OSError: |
||||
return () |
||||
|
||||
@staticmethod |
||||
@functools.lru_cache(maxsize=None) |
||||
def _cached_isdir(path: str) -> bool: |
||||
try: |
||||
return os.path.isdir(path) |
||||
except OSError: |
||||
return False |
||||
|
||||
def _preliminary_find_check(self, name: str, module_path: T.List[str], prefix_path: T.List[str], machine: 'MachineInfo') -> bool: |
||||
lname = str(name).lower() |
||||
|
||||
# Checks <path>, <path>/cmake, <path>/CMake |
||||
def find_module(path: str) -> bool: |
||||
for i in [path, os.path.join(path, 'cmake'), os.path.join(path, 'CMake')]: |
||||
if not self._cached_isdir(i): |
||||
continue |
||||
|
||||
# Check the directory case insensitive |
||||
content = self._cached_listdir(i) |
||||
candidates = ['Find{}.cmake', '{}Config.cmake', '{}-config.cmake'] |
||||
candidates = [x.format(name).lower() for x in candidates] |
||||
if any([x[1] in candidates for x in content]): |
||||
return True |
||||
return False |
||||
|
||||
# Search in <path>/(lib/<arch>|lib*|share) for cmake files |
||||
def search_lib_dirs(path: str) -> bool: |
||||
for i in [os.path.join(path, x) for x in self.cmakeinfo['common_paths']]: |
||||
if not self._cached_isdir(i): |
||||
continue |
||||
|
||||
# Check <path>/(lib/<arch>|lib*|share)/cmake/<name>*/ |
||||
cm_dir = os.path.join(i, 'cmake') |
||||
if self._cached_isdir(cm_dir): |
||||
content = self._cached_listdir(cm_dir) |
||||
content = list(filter(lambda x: x[1].startswith(lname), content)) |
||||
for k in content: |
||||
if find_module(os.path.join(cm_dir, k[0])): |
||||
return True |
||||
|
||||
# <path>/(lib/<arch>|lib*|share)/<name>*/ |
||||
# <path>/(lib/<arch>|lib*|share)/<name>*/(cmake|CMake)/ |
||||
content = self._cached_listdir(i) |
||||
content = list(filter(lambda x: x[1].startswith(lname), content)) |
||||
for k in content: |
||||
if find_module(os.path.join(i, k[0])): |
||||
return True |
||||
|
||||
return False |
||||
|
||||
# Check the user provided and system module paths |
||||
for i in module_path + [os.path.join(self.cmakeinfo['cmake_root'], 'Modules')]: |
||||
if find_module(i): |
||||
return True |
||||
|
||||
# Check the user provided prefix paths |
||||
for i in prefix_path: |
||||
if search_lib_dirs(i): |
||||
return True |
||||
|
||||
# Check PATH |
||||
system_env = [] # type: T.List[str] |
||||
for i in os.environ.get('PATH', '').split(os.pathsep): |
||||
if i.endswith('/bin') or i.endswith('\\bin'): |
||||
i = i[:-4] |
||||
if i.endswith('/sbin') or i.endswith('\\sbin'): |
||||
i = i[:-5] |
||||
system_env += [i] |
||||
|
||||
# Check the system paths |
||||
for i in self.cmakeinfo['module_paths'] + system_env: |
||||
if find_module(i): |
||||
return True |
||||
|
||||
if search_lib_dirs(i): |
||||
return True |
||||
|
||||
content = self._cached_listdir(i) |
||||
content = list(filter(lambda x: x[1].startswith(lname), content)) |
||||
for k in content: |
||||
if search_lib_dirs(os.path.join(i, k[0])): |
||||
return True |
||||
|
||||
# Mac framework support |
||||
if machine.is_darwin(): |
||||
for j in ['{}.framework', '{}.app']: |
||||
j = j.format(lname) |
||||
if j in content: |
||||
if find_module(os.path.join(i, j[0], 'Resources')) or find_module(os.path.join(i, j[0], 'Version')): |
||||
return True |
||||
|
||||
# Check the environment path |
||||
env_path = os.environ.get(f'{name}_DIR') |
||||
if env_path and find_module(env_path): |
||||
return True |
||||
|
||||
return False |
||||
|
||||
def _detect_dep(self, name: str, package_version: str, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]], args: T.List[str]): |
||||
# Detect a dependency with CMake using the '--find-package' mode |
||||
# and the trace output (stderr) |
||||
# |
||||
# When the trace output is enabled CMake prints all functions with |
||||
# parameters to stderr as they are executed. Since CMake 3.4.0 |
||||
# variables ("${VAR}") are also replaced in the trace output. |
||||
mlog.debug('\nDetermining dependency {!r} with CMake executable ' |
||||
'{!r}'.format(name, self.cmakebin.executable_path())) |
||||
|
||||
# Try different CMake generators since specifying no generator may fail |
||||
# in cygwin for some reason |
||||
gen_list = [] |
||||
# First try the last working generator |
||||
if CMakeDependency.class_working_generator is not None: |
||||
gen_list += [CMakeDependency.class_working_generator] |
||||
gen_list += CMakeDependency.class_cmake_generators |
||||
|
||||
# Map the components |
||||
comp_mapped = self._map_component_list(modules, components) |
||||
toolchain = CMakeToolchain(self.cmakebin, self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir()) |
||||
toolchain.write() |
||||
|
||||
for i in gen_list: |
||||
mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto')) |
||||
|
||||
# Prepare options |
||||
cmake_opts = [] |
||||
cmake_opts += [f'-DNAME={name}'] |
||||
cmake_opts += ['-DARCHS={}'.format(';'.join(self.cmakeinfo['archs']))] |
||||
cmake_opts += [f'-DVERSION={package_version}'] |
||||
cmake_opts += ['-DCOMPS={}'.format(';'.join([x[0] for x in comp_mapped]))] |
||||
cmake_opts += args |
||||
cmake_opts += self.traceparser.trace_args() |
||||
cmake_opts += toolchain.get_cmake_args() |
||||
cmake_opts += self._extra_cmake_opts() |
||||
cmake_opts += ['.'] |
||||
if len(i) > 0: |
||||
cmake_opts = ['-G', i] + cmake_opts |
||||
|
||||
# Run CMake |
||||
ret1, out1, err1 = self._call_cmake(cmake_opts, self._main_cmake_file()) |
||||
|
||||
# Current generator was successful |
||||
if ret1 == 0: |
||||
CMakeDependency.class_working_generator = i |
||||
break |
||||
|
||||
mlog.debug(f'CMake failed for generator {i} and package {name} with error code {ret1}') |
||||
mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n') |
||||
|
||||
# Check if any generator succeeded |
||||
if ret1 != 0: |
||||
return |
||||
|
||||
try: |
||||
self.traceparser.parse(err1) |
||||
except CMakeException as e: |
||||
e = self._gen_exception(str(e)) |
||||
if self.required: |
||||
raise |
||||
else: |
||||
self.compile_args = [] |
||||
self.link_args = [] |
||||
self.is_found = False |
||||
self.reason = e |
||||
return |
||||
|
||||
# Whether the package is found or not is always stored in PACKAGE_FOUND |
||||
self.is_found = self.traceparser.var_to_bool('PACKAGE_FOUND') |
||||
if not self.is_found: |
||||
return |
||||
|
||||
# Try to detect the version |
||||
vers_raw = self.traceparser.get_cmake_var('PACKAGE_VERSION') |
||||
|
||||
if len(vers_raw) > 0: |
||||
self.version = vers_raw[0] |
||||
self.version.strip('"\' ') |
||||
|
||||
# Post-process module list. Used in derived classes to modify the |
||||
# module list (append prepend a string, etc.). |
||||
modules = self._map_module_list(modules, components) |
||||
autodetected_module_list = False |
||||
|
||||
# Try guessing a CMake target if none is provided |
||||
if len(modules) == 0: |
||||
for i in self.traceparser.targets: |
||||
tg = i.lower() |
||||
lname = name.lower() |
||||
if f'{lname}::{lname}' == tg or lname == tg.replace('::', ''): |
||||
mlog.debug(f'Guessed CMake target \'{i}\'') |
||||
modules = [(i, True)] |
||||
autodetected_module_list = True |
||||
break |
||||
|
||||
# Failed to guess a target --> try the old-style method |
||||
if len(modules) == 0: |
||||
incDirs = [x for x in self.traceparser.get_cmake_var('PACKAGE_INCLUDE_DIRS') if x] |
||||
defs = [x for x in self.traceparser.get_cmake_var('PACKAGE_DEFINITIONS') if x] |
||||
libs = [x for x in self.traceparser.get_cmake_var('PACKAGE_LIBRARIES') if x] |
||||
|
||||
# Try to use old style variables if no module is specified |
||||
if len(libs) > 0: |
||||
self.compile_args = list(map(lambda x: f'-I{x}', incDirs)) + defs |
||||
self.link_args = libs |
||||
mlog.debug(f'using old-style CMake variables for dependency {name}') |
||||
mlog.debug(f'Include Dirs: {incDirs}') |
||||
mlog.debug(f'Compiler Definitions: {defs}') |
||||
mlog.debug(f'Libraries: {libs}') |
||||
return |
||||
|
||||
# Even the old-style approach failed. Nothing else we can do here |
||||
self.is_found = False |
||||
raise self._gen_exception('CMake: failed to guess a CMake target for {}.\n' |
||||
'Try to explicitly specify one or more targets with the "modules" property.\n' |
||||
'Valid targets are:\n{}'.format(name, list(self.traceparser.targets.keys()))) |
||||
|
||||
# Set dependencies with CMake targets |
||||
# recognise arguments we should pass directly to the linker |
||||
reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-pthread|-delayload:[a-zA-Z0-9_\.]+|[a-zA-Z0-9_]+\.lib)$') |
||||
reg_is_maybe_bare_lib = re.compile(r'^[a-zA-Z0-9_]+$') |
||||
processed_targets = [] |
||||
incDirs = [] |
||||
compileDefinitions = [] |
||||
compileOptions = [] |
||||
libraries = [] |
||||
for i, required in modules: |
||||
if i not in self.traceparser.targets: |
||||
if not required: |
||||
mlog.warning('CMake: T.Optional module', mlog.bold(self._original_module_name(i)), 'for', mlog.bold(name), 'was not found') |
||||
continue |
||||
raise self._gen_exception('CMake: invalid module {} for {}.\n' |
||||
'Try to explicitly specify one or more targets with the "modules" property.\n' |
||||
'Valid targets are:\n{}'.format(self._original_module_name(i), name, list(self.traceparser.targets.keys()))) |
||||
|
||||
targets = [i] |
||||
if not autodetected_module_list: |
||||
self.found_modules += [i] |
||||
|
||||
while len(targets) > 0: |
||||
curr = targets.pop(0) |
||||
|
||||
# Skip already processed targets |
||||
if curr in processed_targets: |
||||
continue |
||||
|
||||
tgt = self.traceparser.targets[curr] |
||||
cfgs = [] |
||||
cfg = '' |
||||
otherDeps = [] |
||||
mlog.debug(tgt) |
||||
|
||||
if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties: |
||||
incDirs += [x for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x] |
||||
|
||||
if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties: |
||||
compileDefinitions += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x] |
||||
|
||||
if 'INTERFACE_COMPILE_OPTIONS' in tgt.properties: |
||||
compileOptions += [x for x in tgt.properties['INTERFACE_COMPILE_OPTIONS'] if x] |
||||
|
||||
if 'IMPORTED_CONFIGURATIONS' in tgt.properties: |
||||
cfgs = [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x] |
||||
cfg = cfgs[0] |
||||
|
||||
if OptionKey('b_vscrt') in self.env.coredata.options: |
||||
is_debug = self.env.coredata.get_option(OptionKey('buildtype')) == 'debug' |
||||
if self.env.coredata.options[OptionKey('b_vscrt')].value in {'mdd', 'mtd'}: |
||||
is_debug = True |
||||
else: |
||||
is_debug = self.env.coredata.get_option(OptionKey('debug')) |
||||
if is_debug: |
||||
if 'DEBUG' in cfgs: |
||||
cfg = 'DEBUG' |
||||
elif 'RELEASE' in cfgs: |
||||
cfg = 'RELEASE' |
||||
else: |
||||
if 'RELEASE' in cfgs: |
||||
cfg = 'RELEASE' |
||||
|
||||
if f'IMPORTED_IMPLIB_{cfg}' in tgt.properties: |
||||
libraries += [x for x in tgt.properties[f'IMPORTED_IMPLIB_{cfg}'] if x] |
||||
elif 'IMPORTED_IMPLIB' in tgt.properties: |
||||
libraries += [x for x in tgt.properties['IMPORTED_IMPLIB'] if x] |
||||
elif f'IMPORTED_LOCATION_{cfg}' in tgt.properties: |
||||
libraries += [x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x] |
||||
elif 'IMPORTED_LOCATION' in tgt.properties: |
||||
libraries += [x for x in tgt.properties['IMPORTED_LOCATION'] if x] |
||||
|
||||
if 'INTERFACE_LINK_LIBRARIES' in tgt.properties: |
||||
otherDeps += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x] |
||||
|
||||
if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties: |
||||
otherDeps += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x] |
||||
elif 'IMPORTED_LINK_DEPENDENT_LIBRARIES' in tgt.properties: |
||||
otherDeps += [x for x in tgt.properties['IMPORTED_LINK_DEPENDENT_LIBRARIES'] if x] |
||||
|
||||
for j in otherDeps: |
||||
if j in self.traceparser.targets: |
||||
targets += [j] |
||||
elif reg_is_lib.match(j): |
||||
libraries += [j] |
||||
elif os.path.isabs(j) and os.path.exists(j): |
||||
libraries += [j] |
||||
elif self.env.machines.build.is_windows() and reg_is_maybe_bare_lib.match(j): |
||||
# On Windows, CMake library dependencies can be passed as bare library names, |
||||
# e.g. 'version' should translate into 'version.lib'. CMake brute-forces a |
||||
# combination of prefix/suffix combinations to find the right library, however |
||||
# as we do not have a compiler environment available to us, we cannot do the |
||||
# same, but must assume any bare argument passed which is not also a CMake |
||||
# target must be a system library we should try to link against |
||||
libraries += [f"{j}.lib"] |
||||
else: |
||||
mlog.warning('CMake: Dependency', mlog.bold(j), 'for', mlog.bold(name), 'target', mlog.bold(self._original_module_name(curr)), 'was not found') |
||||
|
||||
processed_targets += [curr] |
||||
|
||||
# Make sure all elements in the lists are unique and sorted |
||||
incDirs = sorted(set(incDirs)) |
||||
compileDefinitions = sorted(set(compileDefinitions)) |
||||
compileOptions = sorted(set(compileOptions)) |
||||
libraries = sorted(set(libraries)) |
||||
|
||||
mlog.debug(f'Include Dirs: {incDirs}') |
||||
mlog.debug(f'Compiler Definitions: {compileDefinitions}') |
||||
mlog.debug(f'Compiler Options: {compileOptions}') |
||||
mlog.debug(f'Libraries: {libraries}') |
||||
|
||||
self.compile_args = compileOptions + compileDefinitions + [f'-I{x}' for x in incDirs] |
||||
self.link_args = libraries |
||||
|
||||
def _get_build_dir(self) -> Path: |
||||
build_dir = Path(self.cmake_root_dir) / f'cmake_{self.name}' |
||||
build_dir.mkdir(parents=True, exist_ok=True) |
||||
return build_dir |
||||
|
||||
def _setup_cmake_dir(self, cmake_file: str) -> Path: |
||||
# Setup the CMake build environment and return the "build" directory |
||||
build_dir = self._get_build_dir() |
||||
|
||||
# Remove old CMake cache so we can try out multiple generators |
||||
cmake_cache = build_dir / 'CMakeCache.txt' |
||||
cmake_files = build_dir / 'CMakeFiles' |
||||
if cmake_cache.exists(): |
||||
cmake_cache.unlink() |
||||
shutil.rmtree(cmake_files.as_posix(), ignore_errors=True) |
||||
|
||||
# Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt |
||||
cmake_txt = mesondata['dependencies/data/' + cmake_file].data |
||||
|
||||
# In general, some Fortran CMake find_package() also require C language enabled, |
||||
# even if nothing from C is directly used. An easy Fortran example that fails |
||||
# without C language is |
||||
# find_package(Threads) |
||||
# To make this general to |
||||
# any other language that might need this, we use a list for all |
||||
# languages and expand in the cmake Project(... LANGUAGES ...) statement. |
||||
from ..cmake import language_map |
||||
cmake_language = [language_map[x] for x in self.language_list if x in language_map] |
||||
if not cmake_language: |
||||
cmake_language += ['NONE'] |
||||
|
||||
cmake_txt = textwrap.dedent(""" |
||||
cmake_minimum_required(VERSION ${{CMAKE_VERSION}}) |
||||
project(MesonTemp LANGUAGES {}) |
||||
""").format(' '.join(cmake_language)) + cmake_txt |
||||
|
||||
cm_file = build_dir / 'CMakeLists.txt' |
||||
cm_file.write_text(cmake_txt) |
||||
mlog.cmd_ci_include(cm_file.absolute().as_posix()) |
||||
|
||||
return build_dir |
||||
|
||||
def _call_cmake(self, args, cmake_file: str, env=None): |
||||
build_dir = self._setup_cmake_dir(cmake_file) |
||||
return self.cmakebin.call(args, build_dir, env=env) |
||||
|
||||
@staticmethod |
||||
def get_methods(): |
||||
return [DependencyMethods.CMAKE] |
||||
|
||||
def log_tried(self): |
||||
return self.type_name |
||||
|
||||
def log_details(self) -> str: |
||||
modules = [self._original_module_name(x) for x in self.found_modules] |
||||
modules = sorted(set(modules)) |
||||
if modules: |
||||
return 'modules: ' + ', '.join(modules) |
||||
return '' |
||||
|
||||
def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None, |
||||
configtool: T.Optional[str] = None, internal: T.Optional[str] = None, |
||||
default_value: T.Optional[str] = None, |
||||
pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]: |
||||
if cmake and self.traceparser is not None: |
||||
try: |
||||
v = self.traceparser.vars[cmake] |
||||
except KeyError: |
||||
pass |
||||
else: |
||||
if len(v) == 1: |
||||
return v[0] |
||||
elif v: |
||||
return v |
||||
if default_value is not None: |
||||
return default_value |
||||
raise DependencyException(f'Could not get cmake variable and no default provided for {self!r}') |
@ -0,0 +1,173 @@ |
||||
# Copyright 2013-2021 The Meson development team |
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
from .base import ExternalDependency, DependencyException, DependencyMethods |
||||
from ..mesonlib import listify, Popen_safe, split_args, version_compare, version_compare_many |
||||
from ..programs import find_external_program |
||||
from .. import mlog |
||||
import re |
||||
import typing as T |
||||
|
||||
class ConfigToolDependency(ExternalDependency): |
||||
|
||||
"""Class representing dependencies found using a config tool. |
||||
|
||||
Takes the following extra keys in kwargs that it uses internally: |
||||
:tools List[str]: A list of tool names to use |
||||
:version_arg str: The argument to pass to the tool to get it's version |
||||
:returncode_value int: The value of the correct returncode |
||||
Because some tools are stupid and don't return 0 |
||||
""" |
||||
|
||||
tools = None |
||||
tool_name = None |
||||
version_arg = '--version' |
||||
__strip_version = re.compile(r'^[0-9][0-9.]+') |
||||
|
||||
def __init__(self, name, environment, kwargs, language: T.Optional[str] = None): |
||||
super().__init__('config-tool', environment, kwargs, language=language) |
||||
self.name = name |
||||
# You may want to overwrite the class version in some cases |
||||
self.tools = listify(kwargs.get('tools', self.tools)) |
||||
if not self.tool_name: |
||||
self.tool_name = self.tools[0] |
||||
if 'version_arg' in kwargs: |
||||
self.version_arg = kwargs['version_arg'] |
||||
|
||||
req_version = kwargs.get('version', None) |
||||
tool, version = self.find_config(req_version, kwargs.get('returncode_value', 0)) |
||||
self.config = tool |
||||
self.is_found = self.report_config(version, req_version) |
||||
if not self.is_found: |
||||
self.config = None |
||||
return |
||||
self.version = version |
||||
|
||||
def _sanitize_version(self, version): |
||||
"""Remove any non-numeric, non-point version suffixes.""" |
||||
m = self.__strip_version.match(version) |
||||
if m: |
||||
# Ensure that there isn't a trailing '.', such as an input like |
||||
# `1.2.3.git-1234` |
||||
return m.group(0).rstrip('.') |
||||
return version |
||||
|
||||
def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) \ |
||||
-> T.Tuple[T.Optional[str], T.Optional[str]]: |
||||
"""Helper method that searches for config tool binaries in PATH and |
||||
returns the one that best matches the given version requirements. |
||||
""" |
||||
if not isinstance(versions, list) and versions is not None: |
||||
versions = listify(versions) |
||||
best_match = (None, None) # type: T.Tuple[T.Optional[str], T.Optional[str]] |
||||
for potential_bin in find_external_program( |
||||
self.env, self.for_machine, self.tool_name, |
||||
self.tool_name, self.tools, allow_default_for_cross=False): |
||||
if not potential_bin.found(): |
||||
continue |
||||
tool = potential_bin.get_command() |
||||
try: |
||||
p, out = Popen_safe(tool + [self.version_arg])[:2] |
||||
except (FileNotFoundError, PermissionError): |
||||
continue |
||||
if p.returncode != returncode: |
||||
continue |
||||
|
||||
out = self._sanitize_version(out.strip()) |
||||
# Some tools, like pcap-config don't supply a version, but also |
||||
# don't fail with --version, in that case just assume that there is |
||||
# only one version and return it. |
||||
if not out: |
||||
return (tool, None) |
||||
if versions: |
||||
is_found = version_compare_many(out, versions)[0] |
||||
# This allows returning a found version without a config tool, |
||||
# which is useful to inform the user that you found version x, |
||||
# but y was required. |
||||
if not is_found: |
||||
tool = None |
||||
if best_match[1]: |
||||
if version_compare(out, '> {}'.format(best_match[1])): |
||||
best_match = (tool, out) |
||||
else: |
||||
best_match = (tool, out) |
||||
|
||||
return best_match |
||||
|
||||
def report_config(self, version, req_version): |
||||
"""Helper method to print messages about the tool.""" |
||||
|
||||
found_msg = [mlog.bold(self.tool_name), 'found:'] |
||||
|
||||
if self.config is None: |
||||
found_msg.append(mlog.red('NO')) |
||||
if version is not None and req_version is not None: |
||||
found_msg.append(f'found {version!r} but need {req_version!r}') |
||||
elif req_version: |
||||
found_msg.append(f'need {req_version!r}') |
||||
else: |
||||
found_msg += [mlog.green('YES'), '({})'.format(' '.join(self.config)), version] |
||||
|
||||
mlog.log(*found_msg) |
||||
|
||||
return self.config is not None |
||||
|
||||
def get_config_value(self, args: T.List[str], stage: str) -> T.List[str]: |
||||
p, out, err = Popen_safe(self.config + args) |
||||
if p.returncode != 0: |
||||
if self.required: |
||||
raise DependencyException( |
||||
'Could not generate {} for {}.\n{}'.format( |
||||
stage, self.name, err)) |
||||
return [] |
||||
return split_args(out) |
||||
|
||||
@staticmethod |
||||
def get_methods(): |
||||
return [DependencyMethods.AUTO, DependencyMethods.CONFIG_TOOL] |
||||
|
||||
def get_configtool_variable(self, variable_name): |
||||
p, out, _ = Popen_safe(self.config + [f'--{variable_name}']) |
||||
if p.returncode != 0: |
||||
if self.required: |
||||
raise DependencyException( |
||||
'Could not get variable "{}" for dependency {}'.format( |
||||
variable_name, self.name)) |
||||
variable = out.strip() |
||||
mlog.debug(f'Got config-tool variable {variable_name} : {variable}') |
||||
return variable |
||||
|
||||
def log_tried(self): |
||||
return self.type_name |
||||
|
||||
def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None, |
||||
configtool: T.Optional[str] = None, internal: T.Optional[str] = None, |
||||
default_value: T.Optional[str] = None, |
||||
pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]: |
||||
if configtool: |
||||
# In the not required case '' (empty string) will be returned if the |
||||
# variable is not found. Since '' is a valid value to return we |
||||
# set required to True here to force and error, and use the |
||||
# finally clause to ensure it's restored. |
||||
restore = self.required |
||||
self.required = True |
||||
try: |
||||
return self.get_configtool_variable(configtool) |
||||
except DependencyException: |
||||
pass |
||||
finally: |
||||
self.required = restore |
||||
if default_value is not None: |
||||
return default_value |
||||
raise DependencyException(f'Could not get config-tool variable and no default provided for {self!r}') |
@ -0,0 +1,209 @@ |
||||
# Copyright 2013-2021 The Meson development team |
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
from .base import Dependency, DependencyException, DependencyMethods, NotFoundDependency |
||||
from .cmake import CMakeDependency |
||||
from .dub import DubDependency |
||||
from .framework import ExtraFrameworkDependency |
||||
from .pkgconfig import PkgConfigDependency |
||||
|
||||
from ..mesonlib import listify, MachineChoice, PerMachine |
||||
from .. import mlog |
||||
import functools |
||||
import typing as T |
||||
|
||||
if T.TYPE_CHECKING: |
||||
from ..environment import Environment |
||||
from .factory import DependencyType |
||||
|
||||
# These must be defined in this file to avoid cyclical references. |
||||
packages = {} |
||||
_packages_accept_language = set() |
||||
|
||||
def get_dep_identifier(name, kwargs) -> T.Tuple: |
||||
identifier = (name, ) |
||||
from ..interpreter import permitted_dependency_kwargs |
||||
assert len(permitted_dependency_kwargs) == 19, \ |
||||
'Extra kwargs have been added to dependency(), please review if it makes sense to handle it here' |
||||
for key, value in kwargs.items(): |
||||
# 'version' is irrelevant for caching; the caller must check version matches |
||||
# 'native' is handled above with `for_machine` |
||||
# 'required' is irrelevant for caching; the caller handles it separately |
||||
# 'fallback' and 'allow_fallback' is not part of the cache because, |
||||
# once a dependency has been found through a fallback, it should |
||||
# be used for the rest of the Meson run. |
||||
# 'default_options' is only used in fallback case |
||||
# 'not_found_message' has no impact on the dependency lookup |
||||
# 'include_type' is handled after the dependency lookup |
||||
if key in ('version', 'native', 'required', 'fallback', 'allow_fallback', 'default_options', |
||||
'not_found_message', 'include_type'): |
||||
continue |
||||
# All keyword arguments are strings, ints, or lists (or lists of lists) |
||||
if isinstance(value, list): |
||||
value = frozenset(listify(value)) |
||||
identifier += (key, value) |
||||
return identifier |
||||
|
||||
display_name_map = { |
||||
'boost': 'Boost', |
||||
'cuda': 'CUDA', |
||||
'dub': 'DUB', |
||||
'gmock': 'GMock', |
||||
'gtest': 'GTest', |
||||
'hdf5': 'HDF5', |
||||
'llvm': 'LLVM', |
||||
'mpi': 'MPI', |
||||
'netcdf': 'NetCDF', |
||||
'openmp': 'OpenMP', |
||||
'wxwidgets': 'WxWidgets', |
||||
} |
||||
|
||||
def find_external_dependency(name, env, kwargs): |
||||
assert(name) |
||||
required = kwargs.get('required', True) |
||||
if not isinstance(required, bool): |
||||
raise DependencyException('Keyword "required" must be a boolean.') |
||||
if not isinstance(kwargs.get('method', ''), str): |
||||
raise DependencyException('Keyword "method" must be a string.') |
||||
lname = name.lower() |
||||
if lname not in _packages_accept_language and 'language' in kwargs: |
||||
raise DependencyException(f'{name} dependency does not accept "language" keyword argument') |
||||
if not isinstance(kwargs.get('version', ''), (str, list)): |
||||
raise DependencyException('Keyword "Version" must be string or list.') |
||||
|
||||
# display the dependency name with correct casing |
||||
display_name = display_name_map.get(lname, lname) |
||||
|
||||
for_machine = MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST |
||||
|
||||
type_text = PerMachine('Build-time', 'Run-time')[for_machine] + ' dependency' |
||||
|
||||
# build a list of dependency methods to try |
||||
candidates = _build_external_dependency_list(name, env, for_machine, kwargs) |
||||
|
||||
pkg_exc = [] |
||||
pkgdep = [] |
||||
details = '' |
||||
|
||||
for c in candidates: |
||||
# try this dependency method |
||||
try: |
||||
d = c() |
||||
d._check_version() |
||||
pkgdep.append(d) |
||||
except DependencyException as e: |
||||
pkg_exc.append(e) |
||||
mlog.debug(str(e)) |
||||
else: |
||||
pkg_exc.append(None) |
||||
details = d.log_details() |
||||
if details: |
||||
details = '(' + details + ') ' |
||||
if 'language' in kwargs: |
||||
details += 'for ' + d.language + ' ' |
||||
|
||||
# if the dependency was found |
||||
if d.found(): |
||||
|
||||
info = [] |
||||
if d.version: |
||||
info.append(mlog.normal_cyan(d.version)) |
||||
|
||||
log_info = d.log_info() |
||||
if log_info: |
||||
info.append('(' + log_info + ')') |
||||
|
||||
mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.green('YES'), *info) |
||||
|
||||
return d |
||||
|
||||
# otherwise, the dependency could not be found |
||||
tried_methods = [d.log_tried() for d in pkgdep if d.log_tried()] |
||||
if tried_methods: |
||||
tried = '{}'.format(mlog.format_list(tried_methods)) |
||||
else: |
||||
tried = '' |
||||
|
||||
mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.red('NO'), |
||||
f'(tried {tried})' if tried else '') |
||||
|
||||
if required: |
||||
# if an exception occurred with the first detection method, re-raise it |
||||
# (on the grounds that it came from the preferred dependency detection |
||||
# method) |
||||
if pkg_exc and pkg_exc[0]: |
||||
raise pkg_exc[0] |
||||
|
||||
# we have a list of failed ExternalDependency objects, so we can report |
||||
# the methods we tried to find the dependency |
||||
raise DependencyException('Dependency "%s" not found' % (name) + |
||||
(', tried %s' % (tried) if tried else '')) |
||||
|
||||
return NotFoundDependency(env) |
||||
|
||||
|
||||
def _build_external_dependency_list(name: str, env: 'Environment', for_machine: MachineChoice, |
||||
kwargs: T.Dict[str, T.Any]) -> T.List['DependencyType']: |
||||
# First check if the method is valid |
||||
if 'method' in kwargs and kwargs['method'] not in [e.value for e in DependencyMethods]: |
||||
raise DependencyException('method {!r} is invalid'.format(kwargs['method'])) |
||||
|
||||
# Is there a specific dependency detector for this dependency? |
||||
lname = name.lower() |
||||
if lname in packages: |
||||
# Create the list of dependency object constructors using a factory |
||||
# class method, if one exists, otherwise the list just consists of the |
||||
# constructor |
||||
if isinstance(packages[lname], type) and issubclass(packages[lname], Dependency): |
||||
dep = [functools.partial(packages[lname], env, kwargs)] |
||||
else: |
||||
dep = packages[lname](env, for_machine, kwargs) |
||||
return dep |
||||
|
||||
candidates = [] |
||||
|
||||
# If it's explicitly requested, use the dub detection method (only) |
||||
if 'dub' == kwargs.get('method', ''): |
||||
candidates.append(functools.partial(DubDependency, name, env, kwargs)) |
||||
return candidates |
||||
|
||||
# If it's explicitly requested, use the pkgconfig detection method (only) |
||||
if 'pkg-config' == kwargs.get('method', ''): |
||||
candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) |
||||
return candidates |
||||
|
||||
# If it's explicitly requested, use the CMake detection method (only) |
||||
if 'cmake' == kwargs.get('method', ''): |
||||
candidates.append(functools.partial(CMakeDependency, name, env, kwargs)) |
||||
return candidates |
||||
|
||||
# If it's explicitly requested, use the Extraframework detection method (only) |
||||
if 'extraframework' == kwargs.get('method', ''): |
||||
# On OSX, also try framework dependency detector |
||||
if env.machines[for_machine].is_darwin(): |
||||
candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs)) |
||||
return candidates |
||||
|
||||
# Otherwise, just use the pkgconfig and cmake dependency detector |
||||
if 'auto' == kwargs.get('method', 'auto'): |
||||
candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) |
||||
|
||||
# On OSX, also try framework dependency detector |
||||
if env.machines[for_machine].is_darwin(): |
||||
candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs)) |
||||
|
||||
# Only use CMake as a last resort, since it might not work 100% (see #6113) |
||||
candidates.append(functools.partial(CMakeDependency, name, env, kwargs)) |
||||
|
||||
return candidates |
@ -0,0 +1,227 @@ |
||||
# Copyright 2013-2021 The Meson development team |
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
from .base import ExternalDependency, DependencyException, DependencyMethods |
||||
from .pkgconfig import PkgConfigDependency |
||||
from ..mesonlib import Popen_safe |
||||
from ..programs import ExternalProgram |
||||
from .. import mlog |
||||
import re |
||||
import os |
||||
import copy |
||||
import json |
||||
import platform |
||||
|
||||
class DubDependency(ExternalDependency): |
||||
class_dubbin = None |
||||
|
||||
def __init__(self, name, environment, kwargs): |
||||
super().__init__('dub', environment, kwargs, language='d') |
||||
self.name = name |
||||
self.compiler = super().get_compiler() |
||||
self.module_path = None |
||||
|
||||
if 'required' in kwargs: |
||||
self.required = kwargs.get('required') |
||||
|
||||
if DubDependency.class_dubbin is None: |
||||
self.dubbin = self._check_dub() |
||||
DubDependency.class_dubbin = self.dubbin |
||||
else: |
||||
self.dubbin = DubDependency.class_dubbin |
||||
|
||||
if not self.dubbin: |
||||
if self.required: |
||||
raise DependencyException('DUB not found.') |
||||
self.is_found = False |
||||
return |
||||
|
||||
mlog.debug('Determining dependency {!r} with DUB executable ' |
||||
'{!r}'.format(name, self.dubbin.get_path())) |
||||
|
||||
# we need to know the target architecture |
||||
arch = self.compiler.arch |
||||
|
||||
# Ask dub for the package |
||||
ret, res = self._call_dubbin(['describe', name, '--arch=' + arch]) |
||||
|
||||
if ret != 0: |
||||
self.is_found = False |
||||
return |
||||
|
||||
comp = self.compiler.get_id().replace('llvm', 'ldc').replace('gcc', 'gdc') |
||||
packages = [] |
||||
description = json.loads(res) |
||||
for package in description['packages']: |
||||
packages.append(package['name']) |
||||
if package['name'] == name: |
||||
self.is_found = True |
||||
|
||||
not_lib = True |
||||
if 'targetType' in package: |
||||
if package['targetType'] in ['library', 'sourceLibrary', 'staticLibrary', 'dynamicLibrary']: |
||||
not_lib = False |
||||
|
||||
if not_lib: |
||||
mlog.error(mlog.bold(name), "found but it isn't a library") |
||||
self.is_found = False |
||||
return |
||||
|
||||
self.module_path = self._find_right_lib_path(package['path'], comp, description, True, package['targetFileName']) |
||||
if not os.path.exists(self.module_path): |
||||
# check if the dependency was built for other archs |
||||
archs = [['x86_64'], ['x86'], ['x86', 'x86_mscoff']] |
||||
for a in archs: |
||||
description_a = copy.deepcopy(description) |
||||
description_a['architecture'] = a |
||||
arch_module_path = self._find_right_lib_path(package['path'], comp, description_a, True, package['targetFileName']) |
||||
if arch_module_path: |
||||
mlog.error(mlog.bold(name), "found but it wasn't compiled for", mlog.bold(arch)) |
||||
self.is_found = False |
||||
return |
||||
|
||||
mlog.error(mlog.bold(name), "found but it wasn't compiled with", mlog.bold(comp)) |
||||
self.is_found = False |
||||
return |
||||
|
||||
self.version = package['version'] |
||||
self.pkg = package |
||||
|
||||
if self.pkg['targetFileName'].endswith('.a'): |
||||
self.static = True |
||||
|
||||
self.compile_args = [] |
||||
for flag in self.pkg['dflags']: |
||||
self.link_args.append(flag) |
||||
for path in self.pkg['importPaths']: |
||||
self.compile_args.append('-I' + os.path.join(self.pkg['path'], path)) |
||||
|
||||
self.link_args = self.raw_link_args = [] |
||||
for flag in self.pkg['lflags']: |
||||
self.link_args.append(flag) |
||||
|
||||
self.link_args.append(os.path.join(self.module_path, self.pkg['targetFileName'])) |
||||
|
||||
# Handle dependencies |
||||
libs = [] |
||||
|
||||
def add_lib_args(field_name, target): |
||||
if field_name in target['buildSettings']: |
||||
for lib in target['buildSettings'][field_name]: |
||||
if lib not in libs: |
||||
libs.append(lib) |
||||
if os.name != 'nt': |
||||
pkgdep = PkgConfigDependency(lib, environment, {'required': 'true', 'silent': 'true'}) |
||||
for arg in pkgdep.get_compile_args(): |
||||
self.compile_args.append(arg) |
||||
for arg in pkgdep.get_link_args(): |
||||
self.link_args.append(arg) |
||||
for arg in pkgdep.get_link_args(raw=True): |
||||
self.raw_link_args.append(arg) |
||||
|
||||
for target in description['targets']: |
||||
if target['rootPackage'] in packages: |
||||
add_lib_args('libs', target) |
||||
add_lib_args(f'libs-{platform.machine()}', target) |
||||
for file in target['buildSettings']['linkerFiles']: |
||||
lib_path = self._find_right_lib_path(file, comp, description) |
||||
if lib_path: |
||||
self.link_args.append(lib_path) |
||||
else: |
||||
self.is_found = False |
||||
|
||||
def get_compiler(self): |
||||
return self.compiler |
||||
|
||||
def _find_right_lib_path(self, default_path, comp, description, folder_only=False, file_name=''): |
||||
module_path = lib_file_name = '' |
||||
if folder_only: |
||||
module_path = default_path |
||||
lib_file_name = file_name |
||||
else: |
||||
module_path = os.path.dirname(default_path) |
||||
lib_file_name = os.path.basename(default_path) |
||||
module_build_path = os.path.join(module_path, '.dub', 'build') |
||||
|
||||
# If default_path is a path to lib file and |
||||
# directory of lib don't have subdir '.dub/build' |
||||
if not os.path.isdir(module_build_path) and os.path.isfile(default_path): |
||||
if folder_only: |
||||
return module_path |
||||
else: |
||||
return default_path |
||||
|
||||
# Get D version implemented in the compiler |
||||
# gdc doesn't support this |
||||
ret, res = self._call_dubbin(['--version']) |
||||
|
||||
if ret != 0: |
||||
mlog.error('Failed to run {!r}', mlog.bold(comp)) |
||||
return |
||||
|
||||
d_ver = re.search('v[0-9].[0-9][0-9][0-9].[0-9]', res) # Ex.: v2.081.2 |
||||
if d_ver is not None: |
||||
d_ver = d_ver.group().rsplit('.', 1)[0].replace('v', '').replace('.', '') # Fix structure. Ex.: 2081 |
||||
else: |
||||
d_ver = '' # gdc |
||||
|
||||
if not os.path.isdir(module_build_path): |
||||
return '' |
||||
|
||||
# Ex.: library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA |
||||
build_name = '-{}-{}-{}-{}_{}'.format(description['buildType'], '.'.join(description['platform']), '.'.join(description['architecture']), comp, d_ver) |
||||
for entry in os.listdir(module_build_path): |
||||
if build_name in entry: |
||||
for file in os.listdir(os.path.join(module_build_path, entry)): |
||||
if file == lib_file_name: |
||||
if folder_only: |
||||
return os.path.join(module_build_path, entry) |
||||
else: |
||||
return os.path.join(module_build_path, entry, lib_file_name) |
||||
|
||||
return '' |
||||
|
||||
def _call_dubbin(self, args, env=None): |
||||
p, out = Popen_safe(self.dubbin.get_command() + args, env=env)[0:2] |
||||
return p.returncode, out.strip() |
||||
|
||||
def _call_copmbin(self, args, env=None): |
||||
p, out = Popen_safe(self.compiler.get_exelist() + args, env=env)[0:2] |
||||
return p.returncode, out.strip() |
||||
|
||||
def _check_dub(self): |
||||
dubbin = ExternalProgram('dub', silent=True) |
||||
if dubbin.found(): |
||||
try: |
||||
p, out = Popen_safe(dubbin.get_command() + ['--version'])[0:2] |
||||
if p.returncode != 0: |
||||
mlog.warning('Found dub {!r} but couldn\'t run it' |
||||
''.format(' '.join(dubbin.get_command()))) |
||||
# Set to False instead of None to signify that we've already |
||||
# searched for it and not found it |
||||
dubbin = False |
||||
except (FileNotFoundError, PermissionError): |
||||
dubbin = False |
||||
else: |
||||
dubbin = False |
||||
if dubbin: |
||||
mlog.log('Found DUB:', mlog.bold(dubbin.get_path()), |
||||
'(%s)' % out.strip()) |
||||
else: |
||||
mlog.log('Found DUB:', mlog.red('NO')) |
||||
return dubbin |
||||
|
||||
@staticmethod |
||||
def get_methods(): |
||||
return [DependencyMethods.DUB] |
@ -0,0 +1,125 @@ |
||||
# Copyright 2013-2021 The Meson development team |
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
from .base import Dependency, ExternalDependency |
||||
from .base import DependencyException, DependencyMethods |
||||
from .base import process_method_kw |
||||
from .cmake import CMakeDependency |
||||
from .framework import ExtraFrameworkDependency |
||||
from .pkgconfig import PkgConfigDependency |
||||
from ..mesonlib import MachineChoice |
||||
import functools |
||||
import typing as T |
||||
|
||||
if T.TYPE_CHECKING: |
||||
from ..environment import Environment |
||||
from .base import DependencyType |
||||
from .configtool import ConfigToolDependency |
||||
FactoryType = T.TypeVar('FactoryType', bound=T.Callable[..., T.List[T.Callable[[], 'Dependency']]]) |
||||
|
||||
class DependencyFactory: |
||||
|
||||
"""Factory to get dependencies from multiple sources. |
||||
|
||||
This class provides an initializer that takes a set of names and classes |
||||
for various kinds of dependencies. When the initialized object is called |
||||
it returns a list of callables return Dependency objects to try in order. |
||||
|
||||
:name: The name of the dependency. This will be passed as the name |
||||
parameter of the each dependency unless it is overridden on a per |
||||
type basis. |
||||
:methods: An ordered list of DependencyMethods. This is the order |
||||
dependencies will be returned in unless they are removed by the |
||||
_process_method function |
||||
:*_name: This will overwrite the name passed to the coresponding class. |
||||
For example, if the name is 'zlib', but cmake calls the dependency |
||||
'Z', then using `cmake_name='Z'` will pass the name as 'Z' to cmake. |
||||
:*_class: A *type* or callable that creates a class, and has the |
||||
signature of an ExternalDependency |
||||
:system_class: If you pass DependencyMethods.SYSTEM in methods, you must |
||||
set this argument. |
||||
""" |
||||
|
||||
def __init__(self, name: str, methods: T.List[DependencyMethods], *, |
||||
extra_kwargs: T.Optional[T.Dict[str, T.Any]] = None, |
||||
pkgconfig_name: T.Optional[str] = None, |
||||
pkgconfig_class: 'T.Type[PkgConfigDependency]' = PkgConfigDependency, |
||||
cmake_name: T.Optional[str] = None, |
||||
cmake_class: 'T.Type[CMakeDependency]' = CMakeDependency, |
||||
configtool_class: 'T.Optional[T.Type[ConfigToolDependency]]' = None, |
||||
framework_name: T.Optional[str] = None, |
||||
framework_class: 'T.Type[ExtraFrameworkDependency]' = ExtraFrameworkDependency, |
||||
system_class: 'T.Type[ExternalDependency]' = ExternalDependency): |
||||
|
||||
if DependencyMethods.CONFIG_TOOL in methods and not configtool_class: |
||||
raise DependencyException('A configtool must have a custom class') |
||||
|
||||
self.extra_kwargs = extra_kwargs or {} |
||||
self.methods = methods |
||||
self.classes = { |
||||
# Just attach the correct name right now, either the generic name |
||||
# or the method specific name. |
||||
DependencyMethods.EXTRAFRAMEWORK: functools.partial(framework_class, framework_name or name), |
||||
DependencyMethods.PKGCONFIG: functools.partial(pkgconfig_class, pkgconfig_name or name), |
||||
DependencyMethods.CMAKE: functools.partial(cmake_class, cmake_name or name), |
||||
DependencyMethods.SYSTEM: functools.partial(system_class, name), |
||||
DependencyMethods.CONFIG_TOOL: None, |
||||
} |
||||
if configtool_class is not None: |
||||
self.classes[DependencyMethods.CONFIG_TOOL] = functools.partial(configtool_class, name) |
||||
|
||||
@staticmethod |
||||
def _process_method(method: DependencyMethods, env: 'Environment', for_machine: MachineChoice) -> bool: |
||||
"""Report whether a method is valid or not. |
||||
|
||||
If the method is valid, return true, otherwise return false. This is |
||||
used in a list comprehension to filter methods that are not possible. |
||||
|
||||
By default this only remove EXTRAFRAMEWORK dependencies for non-mac platforms. |
||||
""" |
||||
# Extra frameworks are only valid for macOS and other apple products |
||||
if (method is DependencyMethods.EXTRAFRAMEWORK and |
||||
not env.machines[for_machine].is_darwin()): |
||||
return False |
||||
return True |
||||
|
||||
def __call__(self, env: 'Environment', for_machine: MachineChoice, |
||||
kwargs: T.Dict[str, T.Any]) -> T.List['DependencyType']: |
||||
"""Return a list of Dependencies with the arguments already attached.""" |
||||
methods = process_method_kw(self.methods, kwargs) |
||||
nwargs = self.extra_kwargs.copy() |
||||
nwargs.update(kwargs) |
||||
|
||||
return [functools.partial(self.classes[m], env, nwargs) for m in methods |
||||
if self._process_method(m, env, for_machine)] |
||||
|
||||
|
||||
def factory_methods(methods: T.Set[DependencyMethods]) -> T.Callable[['FactoryType'], 'FactoryType']: |
||||
"""Decorator for handling methods for dependency factory functions. |
||||
|
||||
This helps to make factory functions self documenting |
||||
>>> @factory_methods([DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE]) |
||||
>>> def factory(env: Environment, for_machine: MachineChoice, kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List[T.Callable[[], 'Dependency']]: |
||||
>>> pass |
||||
""" |
||||
|
||||
def inner(func: 'FactoryType') -> 'FactoryType': |
||||
|
||||
@functools.wraps(func) |
||||
def wrapped(env: 'Environment', for_machine: MachineChoice, kwargs: T.Dict[str, T.Any]) -> T.List[T.Callable[[], 'Dependency']]: |
||||
return func(env, for_machine, kwargs, process_method_kw(methods, kwargs)) |
||||
|
||||
return T.cast('FactoryType', wrapped) |
||||
|
||||
return inner |
@ -0,0 +1,120 @@ |
||||
# Copyright 2013-2021 The Meson development team |
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
from .base import ExternalDependency, DependencyException, DependencyMethods |
||||
from ..mesonlib import MesonException, Version |
||||
from .. import mlog |
||||
from pathlib import Path |
||||
import typing as T |
||||
|
||||
class ExtraFrameworkDependency(ExternalDependency): |
||||
system_framework_paths = None |
||||
|
||||
def __init__(self, name, env, kwargs, language: T.Optional[str] = None): |
||||
paths = kwargs.get('paths', []) |
||||
super().__init__('extraframeworks', env, kwargs, language=language) |
||||
self.name = name |
||||
# Full path to framework directory |
||||
self.framework_path = None |
||||
if not self.clib_compiler: |
||||
raise DependencyException('No C-like compilers are available') |
||||
if self.system_framework_paths is None: |
||||
try: |
||||
self.system_framework_paths = self.clib_compiler.find_framework_paths(self.env) |
||||
except MesonException as e: |
||||
if 'non-clang' in str(e): |
||||
# Apple frameworks can only be found (and used) with the |
||||
# system compiler. It is not available so bail immediately. |
||||
self.is_found = False |
||||
return |
||||
raise |
||||
self.detect(name, paths) |
||||
|
||||
def detect(self, name, paths): |
||||
if not paths: |
||||
paths = self.system_framework_paths |
||||
for p in paths: |
||||
mlog.debug(f'Looking for framework {name} in {p}') |
||||
# We need to know the exact framework path because it's used by the |
||||
# Qt5 dependency class, and for setting the include path. We also |
||||
# want to avoid searching in an invalid framework path which wastes |
||||
# time and can cause a false positive. |
||||
framework_path = self._get_framework_path(p, name) |
||||
if framework_path is None: |
||||
continue |
||||
# We want to prefer the specified paths (in order) over the system |
||||
# paths since these are "extra" frameworks. |
||||
# For example, Python2's framework is in /System/Library/Frameworks and |
||||
# Python3's framework is in /Library/Frameworks, but both are called |
||||
# Python.framework. We need to know for sure that the framework was |
||||
# found in the path we expect. |
||||
allow_system = p in self.system_framework_paths |
||||
args = self.clib_compiler.find_framework(name, self.env, [p], allow_system) |
||||
if args is None: |
||||
continue |
||||
self.link_args = args |
||||
self.framework_path = framework_path.as_posix() |
||||
self.compile_args = ['-F' + self.framework_path] |
||||
# We need to also add -I includes to the framework because all |
||||
# cross-platform projects such as OpenGL, Python, Qt, GStreamer, |
||||
# etc do not use "framework includes": |
||||
# https://developer.apple.com/library/archive/documentation/MacOSX/Conceptual/BPFrameworks/Tasks/IncludingFrameworks.html |
||||
incdir = self._get_framework_include_path(framework_path) |
||||
if incdir: |
||||
self.compile_args += ['-I' + incdir] |
||||
self.is_found = True |
||||
return |
||||
|
||||
def _get_framework_path(self, path, name): |
||||
p = Path(path) |
||||
lname = name.lower() |
||||
for d in p.glob('*.framework/'): |
||||
if lname == d.name.rsplit('.', 1)[0].lower(): |
||||
return d |
||||
return None |
||||
|
||||
def _get_framework_latest_version(self, path): |
||||
versions = [] |
||||
for each in path.glob('Versions/*'): |
||||
# macOS filesystems are usually case-insensitive |
||||
if each.name.lower() == 'current': |
||||
continue |
||||
versions.append(Version(each.name)) |
||||
if len(versions) == 0: |
||||
# most system frameworks do not have a 'Versions' directory |
||||
return 'Headers' |
||||
return 'Versions/{}/Headers'.format(sorted(versions)[-1]._s) |
||||
|
||||
def _get_framework_include_path(self, path): |
||||
# According to the spec, 'Headers' must always be a symlink to the |
||||
# Headers directory inside the currently-selected version of the |
||||
# framework, but sometimes frameworks are broken. Look in 'Versions' |
||||
# for the currently-selected version or pick the latest one. |
||||
trials = ('Headers', 'Versions/Current/Headers', |
||||
self._get_framework_latest_version(path)) |
||||
for each in trials: |
||||
trial = path / each |
||||
if trial.is_dir(): |
||||
return trial.as_posix() |
||||
return None |
||||
|
||||
@staticmethod |
||||
def get_methods(): |
||||
return [DependencyMethods.EXTRAFRAMEWORK] |
||||
|
||||
def log_info(self): |
||||
return self.framework_path |
||||
|
||||
def log_tried(self): |
||||
return 'framework' |
@ -0,0 +1,472 @@ |
||||
# Copyright 2013-2021 The Meson development team |
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
from .base import ExternalDependency, DependencyException, DependencyMethods, sort_libpaths |
||||
from ..mesonlib import LibType, MachineChoice, OptionKey, OrderedSet, PerMachine, Popen_safe |
||||
from ..programs import find_external_program |
||||
from .. import mlog |
||||
from pathlib import PurePath |
||||
import re |
||||
import os |
||||
import shlex |
||||
import typing as T |
||||
|
||||
if T.TYPE_CHECKING: |
||||
from ..environment import Environment |
||||
|
||||
class PkgConfigDependency(ExternalDependency): |
||||
# The class's copy of the pkg-config path. Avoids having to search for it |
||||
# multiple times in the same Meson invocation. |
||||
class_pkgbin = PerMachine(None, None) |
||||
# We cache all pkg-config subprocess invocations to avoid redundant calls |
||||
pkgbin_cache = {} |
||||
|
||||
def __init__(self, name, environment: 'Environment', kwargs, language: T.Optional[str] = None): |
||||
super().__init__('pkgconfig', environment, kwargs, language=language) |
||||
self.name = name |
||||
self.is_libtool = False |
||||
# Store a copy of the pkg-config path on the object itself so it is |
||||
# stored in the pickled coredata and recovered. |
||||
self.pkgbin = None |
||||
|
||||
# Only search for pkg-config for each machine the first time and store |
||||
# the result in the class definition |
||||
if PkgConfigDependency.class_pkgbin[self.for_machine] is False: |
||||
mlog.debug('Pkg-config binary for %s is cached as not found.' % self.for_machine) |
||||
elif PkgConfigDependency.class_pkgbin[self.for_machine] is not None: |
||||
mlog.debug('Pkg-config binary for %s is cached.' % self.for_machine) |
||||
else: |
||||
assert PkgConfigDependency.class_pkgbin[self.for_machine] is None |
||||
mlog.debug('Pkg-config binary for %s is not cached.' % self.for_machine) |
||||
for potential_pkgbin in find_external_program( |
||||
self.env, self.for_machine, 'pkgconfig', 'Pkg-config', |
||||
environment.default_pkgconfig, allow_default_for_cross=False): |
||||
version_if_ok = self.check_pkgconfig(potential_pkgbin) |
||||
if not version_if_ok: |
||||
continue |
||||
if not self.silent: |
||||
mlog.log('Found pkg-config:', mlog.bold(potential_pkgbin.get_path()), |
||||
'(%s)' % version_if_ok) |
||||
PkgConfigDependency.class_pkgbin[self.for_machine] = potential_pkgbin |
||||
break |
||||
else: |
||||
if not self.silent: |
||||
mlog.log('Found Pkg-config:', mlog.red('NO')) |
||||
# Set to False instead of None to signify that we've already |
||||
# searched for it and not found it |
||||
PkgConfigDependency.class_pkgbin[self.for_machine] = False |
||||
|
||||
self.pkgbin = PkgConfigDependency.class_pkgbin[self.for_machine] |
||||
if self.pkgbin is False: |
||||
self.pkgbin = None |
||||
msg = 'Pkg-config binary for machine %s not found. Giving up.' % self.for_machine |
||||
if self.required: |
||||
raise DependencyException(msg) |
||||
else: |
||||
mlog.debug(msg) |
||||
return |
||||
|
||||
mlog.debug('Determining dependency {!r} with pkg-config executable ' |
||||
'{!r}'.format(name, self.pkgbin.get_path())) |
||||
ret, self.version, _ = self._call_pkgbin(['--modversion', name]) |
||||
if ret != 0: |
||||
return |
||||
|
||||
self.is_found = True |
||||
|
||||
try: |
||||
# Fetch cargs to be used while using this dependency |
||||
self._set_cargs() |
||||
# Fetch the libraries and library paths needed for using this |
||||
self._set_libs() |
||||
except DependencyException as e: |
||||
mlog.debug(f"pkg-config error with '{name}': {e}") |
||||
if self.required: |
||||
raise |
||||
else: |
||||
self.compile_args = [] |
||||
self.link_args = [] |
||||
self.is_found = False |
||||
self.reason = e |
||||
|
||||
def __repr__(self): |
||||
s = '<{0} {1}: {2} {3}>' |
||||
return s.format(self.__class__.__name__, self.name, self.is_found, |
||||
self.version_reqs) |
||||
|
||||
def _call_pkgbin_real(self, args, env): |
||||
cmd = self.pkgbin.get_command() + args |
||||
p, out, err = Popen_safe(cmd, env=env) |
||||
rc, out, err = p.returncode, out.strip(), err.strip() |
||||
call = ' '.join(cmd) |
||||
mlog.debug(f"Called `{call}` -> {rc}\n{out}") |
||||
return rc, out, err |
||||
|
||||
@staticmethod |
||||
def setup_env(env: T.MutableMapping[str, str], environment: 'Environment', for_machine: MachineChoice, |
||||
extra_path: T.Optional[str] = None) -> None: |
||||
extra_paths: T.List[str] = environment.coredata.options[OptionKey('pkg_config_path', machine=for_machine)].value[:] |
||||
if extra_path and extra_path not in extra_paths: |
||||
extra_paths.append(extra_path) |
||||
sysroot = environment.properties[for_machine].get_sys_root() |
||||
if sysroot: |
||||
env['PKG_CONFIG_SYSROOT_DIR'] = sysroot |
||||
new_pkg_config_path = ':'.join([p for p in extra_paths]) |
||||
env['PKG_CONFIG_PATH'] = new_pkg_config_path |
||||
|
||||
pkg_config_libdir_prop = environment.properties[for_machine].get_pkg_config_libdir() |
||||
if pkg_config_libdir_prop: |
||||
new_pkg_config_libdir = ':'.join([p for p in pkg_config_libdir_prop]) |
||||
env['PKG_CONFIG_LIBDIR'] = new_pkg_config_libdir |
||||
# Dump all PKG_CONFIG environment variables |
||||
for key, value in env.items(): |
||||
if key.startswith('PKG_'): |
||||
mlog.debug(f'env[{key}]: {value}') |
||||
|
||||
def _call_pkgbin(self, args, env=None): |
||||
# Always copy the environment since we're going to modify it |
||||
# with pkg-config variables |
||||
if env is None: |
||||
env = os.environ.copy() |
||||
else: |
||||
env = env.copy() |
||||
|
||||
PkgConfigDependency.setup_env(env, self.env, self.for_machine) |
||||
|
||||
fenv = frozenset(env.items()) |
||||
targs = tuple(args) |
||||
cache = PkgConfigDependency.pkgbin_cache |
||||
if (self.pkgbin, targs, fenv) not in cache: |
||||
cache[(self.pkgbin, targs, fenv)] = self._call_pkgbin_real(args, env) |
||||
return cache[(self.pkgbin, targs, fenv)] |
||||
|
||||
def _convert_mingw_paths(self, args: T.List[str]) -> T.List[str]: |
||||
''' |
||||
Both MSVC and native Python on Windows cannot handle MinGW-esque /c/foo |
||||
paths so convert them to C:/foo. We cannot resolve other paths starting |
||||
with / like /home/foo so leave them as-is so that the user gets an |
||||
error/warning from the compiler/linker. |
||||
''' |
||||
if not self.env.machines.build.is_windows(): |
||||
return args |
||||
converted = [] |
||||
for arg in args: |
||||
pargs = [] |
||||
# Library search path |
||||
if arg.startswith('-L/'): |
||||
pargs = PurePath(arg[2:]).parts |
||||
tmpl = '-L{}:/{}' |
||||
elif arg.startswith('-I/'): |
||||
pargs = PurePath(arg[2:]).parts |
||||
tmpl = '-I{}:/{}' |
||||
# Full path to library or .la file |
||||
elif arg.startswith('/'): |
||||
pargs = PurePath(arg).parts |
||||
tmpl = '{}:/{}' |
||||
elif arg.startswith(('-L', '-I')) or (len(arg) > 2 and arg[1] == ':'): |
||||
# clean out improper '\\ ' as comes from some Windows pkg-config files |
||||
arg = arg.replace('\\ ', ' ') |
||||
if len(pargs) > 1 and len(pargs[1]) == 1: |
||||
arg = tmpl.format(pargs[1], '/'.join(pargs[2:])) |
||||
converted.append(arg) |
||||
return converted |
||||
|
||||
def _split_args(self, cmd): |
||||
# pkg-config paths follow Unix conventions, even on Windows; split the |
||||
# output using shlex.split rather than mesonlib.split_args |
||||
return shlex.split(cmd) |
||||
|
||||
def _set_cargs(self): |
||||
env = None |
||||
if self.language == 'fortran': |
||||
# gfortran doesn't appear to look in system paths for INCLUDE files, |
||||
# so don't allow pkg-config to suppress -I flags for system paths |
||||
env = os.environ.copy() |
||||
env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1' |
||||
ret, out, err = self._call_pkgbin(['--cflags', self.name], env=env) |
||||
if ret != 0: |
||||
raise DependencyException('Could not generate cargs for %s:\n%s\n' % |
||||
(self.name, err)) |
||||
self.compile_args = self._convert_mingw_paths(self._split_args(out)) |
||||
|
||||
def _search_libs(self, out, out_raw): |
||||
''' |
||||
@out: PKG_CONFIG_ALLOW_SYSTEM_LIBS=1 pkg-config --libs |
||||
@out_raw: pkg-config --libs |
||||
|
||||
We always look for the file ourselves instead of depending on the |
||||
compiler to find it with -lfoo or foo.lib (if possible) because: |
||||
1. We want to be able to select static or shared |
||||
2. We need the full path of the library to calculate RPATH values |
||||
3. De-dup of libraries is easier when we have absolute paths |
||||
|
||||
Libraries that are provided by the toolchain or are not found by |
||||
find_library() will be added with -L -l pairs. |
||||
''' |
||||
# Library paths should be safe to de-dup |
||||
# |
||||
# First, figure out what library paths to use. Originally, we were |
||||
# doing this as part of the loop, but due to differences in the order |
||||
# of -L values between pkg-config and pkgconf, we need to do that as |
||||
# a separate step. See: |
||||
# https://github.com/mesonbuild/meson/issues/3951 |
||||
# https://github.com/mesonbuild/meson/issues/4023 |
||||
# |
||||
# Separate system and prefix paths, and ensure that prefix paths are |
||||
# always searched first. |
||||
prefix_libpaths = OrderedSet() |
||||
# We also store this raw_link_args on the object later |
||||
raw_link_args = self._convert_mingw_paths(self._split_args(out_raw)) |
||||
for arg in raw_link_args: |
||||
if arg.startswith('-L') and not arg.startswith(('-L-l', '-L-L')): |
||||
path = arg[2:] |
||||
if not os.path.isabs(path): |
||||
# Resolve the path as a compiler in the build directory would |
||||
path = os.path.join(self.env.get_build_dir(), path) |
||||
prefix_libpaths.add(path) |
||||
# Library paths are not always ordered in a meaningful way |
||||
# |
||||
# Instead of relying on pkg-config or pkgconf to provide -L flags in a |
||||
# specific order, we reorder library paths ourselves, according to th |
||||
# order specified in PKG_CONFIG_PATH. See: |
||||
# https://github.com/mesonbuild/meson/issues/4271 |
||||
# |
||||
# Only prefix_libpaths are reordered here because there should not be |
||||
# too many system_libpaths to cause library version issues. |
||||
pkg_config_path: T.List[str] = self.env.coredata.options[OptionKey('pkg_config_path', machine=self.for_machine)].value |
||||
pkg_config_path = self._convert_mingw_paths(pkg_config_path) |
||||
prefix_libpaths = sort_libpaths(prefix_libpaths, pkg_config_path) |
||||
system_libpaths = OrderedSet() |
||||
full_args = self._convert_mingw_paths(self._split_args(out)) |
||||
for arg in full_args: |
||||
if arg.startswith(('-L-l', '-L-L')): |
||||
# These are D language arguments, not library paths |
||||
continue |
||||
if arg.startswith('-L') and arg[2:] not in prefix_libpaths: |
||||
system_libpaths.add(arg[2:]) |
||||
# Use this re-ordered path list for library resolution |
||||
libpaths = list(prefix_libpaths) + list(system_libpaths) |
||||
# Track -lfoo libraries to avoid duplicate work |
||||
libs_found = OrderedSet() |
||||
# Track not-found libraries to know whether to add library paths |
||||
libs_notfound = [] |
||||
libtype = LibType.STATIC if self.static else LibType.PREFER_SHARED |
||||
# Generate link arguments for this library |
||||
link_args = [] |
||||
for lib in full_args: |
||||
if lib.startswith(('-L-l', '-L-L')): |
||||
# These are D language arguments, add them as-is |
||||
pass |
||||
elif lib.startswith('-L'): |
||||
# We already handled library paths above |
||||
continue |
||||
elif lib.startswith('-l'): |
||||
# Don't resolve the same -lfoo argument again |
||||
if lib in libs_found: |
||||
continue |
||||
if self.clib_compiler: |
||||
args = self.clib_compiler.find_library(lib[2:], self.env, |
||||
libpaths, libtype) |
||||
# If the project only uses a non-clib language such as D, Rust, |
||||
# C#, Python, etc, all we can do is limp along by adding the |
||||
# arguments as-is and then adding the libpaths at the end. |
||||
else: |
||||
args = None |
||||
if args is not None: |
||||
libs_found.add(lib) |
||||
# Replace -l arg with full path to library if available |
||||
# else, library is either to be ignored, or is provided by |
||||
# the compiler, can't be resolved, and should be used as-is |
||||
if args: |
||||
if not args[0].startswith('-l'): |
||||
lib = args[0] |
||||
else: |
||||
continue |
||||
else: |
||||
# Library wasn't found, maybe we're looking in the wrong |
||||
# places or the library will be provided with LDFLAGS or |
||||
# LIBRARY_PATH from the environment (on macOS), and many |
||||
# other edge cases that we can't account for. |
||||
# |
||||
# Add all -L paths and use it as -lfoo |
||||
if lib in libs_notfound: |
||||
continue |
||||
if self.static: |
||||
mlog.warning('Static library {!r} not found for dependency {!r}, may ' |
||||
'not be statically linked'.format(lib[2:], self.name)) |
||||
libs_notfound.append(lib) |
||||
elif lib.endswith(".la"): |
||||
shared_libname = self.extract_libtool_shlib(lib) |
||||
shared_lib = os.path.join(os.path.dirname(lib), shared_libname) |
||||
if not os.path.exists(shared_lib): |
||||
shared_lib = os.path.join(os.path.dirname(lib), ".libs", shared_libname) |
||||
|
||||
if not os.path.exists(shared_lib): |
||||
raise DependencyException('Got a libtools specific "%s" dependencies' |
||||
'but we could not compute the actual shared' |
||||
'library path' % lib) |
||||
self.is_libtool = True |
||||
lib = shared_lib |
||||
if lib in link_args: |
||||
continue |
||||
link_args.append(lib) |
||||
# Add all -Lbar args if we have -lfoo args in link_args |
||||
if libs_notfound: |
||||
# Order of -L flags doesn't matter with ld, but it might with other |
||||
# linkers such as MSVC, so prepend them. |
||||
link_args = ['-L' + lp for lp in prefix_libpaths] + link_args |
||||
return link_args, raw_link_args |
||||
|
||||
def _set_libs(self): |
||||
env = None |
||||
libcmd = ['--libs'] |
||||
|
||||
if self.static: |
||||
libcmd.append('--static') |
||||
|
||||
libcmd.append(self.name) |
||||
|
||||
# Force pkg-config to output -L fields even if they are system |
||||
# paths so we can do manual searching with cc.find_library() later. |
||||
env = os.environ.copy() |
||||
env['PKG_CONFIG_ALLOW_SYSTEM_LIBS'] = '1' |
||||
ret, out, err = self._call_pkgbin(libcmd, env=env) |
||||
if ret != 0: |
||||
raise DependencyException('Could not generate libs for %s:\n%s\n' % |
||||
(self.name, err)) |
||||
# Also get the 'raw' output without -Lfoo system paths for adding -L |
||||
# args with -lfoo when a library can't be found, and also in |
||||
# gnome.generate_gir + gnome.gtkdoc which need -L -l arguments. |
||||
ret, out_raw, err_raw = self._call_pkgbin(libcmd) |
||||
if ret != 0: |
||||
raise DependencyException('Could not generate libs for %s:\n\n%s' % |
||||
(self.name, out_raw)) |
||||
self.link_args, self.raw_link_args = self._search_libs(out, out_raw) |
||||
|
||||
def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str: |
||||
options = ['--variable=' + variable_name, self.name] |
||||
|
||||
if 'define_variable' in kwargs: |
||||
definition = kwargs.get('define_variable', []) |
||||
if not isinstance(definition, list): |
||||
raise DependencyException('define_variable takes a list') |
||||
|
||||
if len(definition) != 2 or not all(isinstance(i, str) for i in definition): |
||||
raise DependencyException('define_variable must be made up of 2 strings for VARIABLENAME and VARIABLEVALUE') |
||||
|
||||
options = ['--define-variable=' + '='.join(definition)] + options |
||||
|
||||
ret, out, err = self._call_pkgbin(options) |
||||
variable = '' |
||||
if ret != 0: |
||||
if self.required: |
||||
raise DependencyException('dependency %s not found:\n%s\n' % |
||||
(self.name, err)) |
||||
else: |
||||
variable = out.strip() |
||||
|
||||
# pkg-config doesn't distinguish between empty and non-existent variables |
||||
# use the variable list to check for variable existence |
||||
if not variable: |
||||
ret, out, _ = self._call_pkgbin(['--print-variables', self.name]) |
||||
if not re.search(r'^' + variable_name + r'$', out, re.MULTILINE): |
||||
if 'default' in kwargs: |
||||
variable = kwargs['default'] |
||||
else: |
||||
mlog.warning(f"pkgconfig variable '{variable_name}' not defined for dependency {self.name}.") |
||||
|
||||
mlog.debug(f'Got pkgconfig variable {variable_name} : {variable}') |
||||
return variable |
||||
|
||||
@staticmethod |
||||
def get_methods(): |
||||
return [DependencyMethods.PKGCONFIG] |
||||
|
||||
def check_pkgconfig(self, pkgbin): |
||||
if not pkgbin.found(): |
||||
mlog.log(f'Did not find pkg-config by name {pkgbin.name!r}') |
||||
return None |
||||
try: |
||||
p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2] |
||||
if p.returncode != 0: |
||||
mlog.warning('Found pkg-config {!r} but it failed when run' |
||||
''.format(' '.join(pkgbin.get_command()))) |
||||
return None |
||||
except FileNotFoundError: |
||||
mlog.warning('We thought we found pkg-config {!r} but now it\'s not there. How odd!' |
||||
''.format(' '.join(pkgbin.get_command()))) |
||||
return None |
||||
except PermissionError: |
||||
msg = 'Found pkg-config {!r} but didn\'t have permissions to run it.'.format(' '.join(pkgbin.get_command())) |
||||
if not self.env.machines.build.is_windows(): |
||||
msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.' |
||||
mlog.warning(msg) |
||||
return None |
||||
return out.strip() |
||||
|
||||
def extract_field(self, la_file, fieldname): |
||||
with open(la_file) as f: |
||||
for line in f: |
||||
arr = line.strip().split('=') |
||||
if arr[0] == fieldname: |
||||
return arr[1][1:-1] |
||||
return None |
||||
|
||||
def extract_dlname_field(self, la_file): |
||||
return self.extract_field(la_file, 'dlname') |
||||
|
||||
def extract_libdir_field(self, la_file): |
||||
return self.extract_field(la_file, 'libdir') |
||||
|
||||
def extract_libtool_shlib(self, la_file): |
||||
''' |
||||
Returns the path to the shared library |
||||
corresponding to this .la file |
||||
''' |
||||
dlname = self.extract_dlname_field(la_file) |
||||
if dlname is None: |
||||
return None |
||||
|
||||
# Darwin uses absolute paths where possible; since the libtool files never |
||||
# contain absolute paths, use the libdir field |
||||
if self.env.machines[self.for_machine].is_darwin(): |
||||
dlbasename = os.path.basename(dlname) |
||||
libdir = self.extract_libdir_field(la_file) |
||||
if libdir is None: |
||||
return dlbasename |
||||
return os.path.join(libdir, dlbasename) |
||||
# From the comments in extract_libtool(), older libtools had |
||||
# a path rather than the raw dlname |
||||
return os.path.basename(dlname) |
||||
|
||||
def log_tried(self): |
||||
return self.type_name |
||||
|
||||
def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None, |
||||
configtool: T.Optional[str] = None, internal: T.Optional[str] = None, |
||||
default_value: T.Optional[str] = None, |
||||
pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]: |
||||
if pkgconfig: |
||||
kwargs = {} |
||||
if default_value is not None: |
||||
kwargs['default'] = default_value |
||||
if pkgconfig_define is not None: |
||||
kwargs['define_variable'] = pkgconfig_define |
||||
try: |
||||
return self.get_pkgconfig_variable(pkgconfig, kwargs) |
||||
except DependencyException: |
||||
pass |
||||
if default_value is not None: |
||||
return default_value |
||||
raise DependencyException(f'Could not get pkg-config variable and no default provided for {self!r}') |
Loading…
Reference in new issue