The Meson Build System
http://mesonbuild.com/
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
872 lines
36 KiB
872 lines
36 KiB
# Copyright 2012-2020 The Meson development team |
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
|
# you may not use this file except in compliance with the License. |
|
# You may obtain a copy of the License at |
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
|
|
|
# Unless required by applicable law or agreed to in writing, software |
|
# distributed under the License is distributed on an "AS IS" BASIS, |
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
# See the License for the specific language governing permissions and |
|
# limitations under the License. |
|
|
|
import itertools |
|
import os, platform, re, sys, shutil |
|
import typing as T |
|
import collections |
|
|
|
from . import coredata |
|
from . import mesonlib |
|
from .mesonlib import ( |
|
MesonException, EnvironmentException, MachineChoice, Popen_safe, PerMachine, |
|
PerMachineDefaultable, PerThreeMachineDefaultable, split_args, quote_arg, OptionKey, |
|
search_version, MesonBugException |
|
) |
|
from . import mlog |
|
from .programs import ( |
|
ExternalProgram, EmptyExternalProgram |
|
) |
|
|
|
from .envconfig import ( |
|
BinaryTable, MachineInfo, Properties, known_cpu_families, CMakeVariables, |
|
) |
|
from . import compilers |
|
from .compilers import ( |
|
Compiler, |
|
is_assembly, |
|
is_header, |
|
is_library, |
|
is_llvm_ir, |
|
is_object, |
|
is_source, |
|
) |
|
|
|
from functools import lru_cache |
|
from mesonbuild import envconfig |
|
|
|
if T.TYPE_CHECKING: |
|
from configparser import ConfigParser |
|
|
|
from .dependencies import ExternalProgram |
|
from .wrap.wrap import Resolver |
|
|
|
build_filename = 'meson.build' |
|
|
|
CompilersDict = T.Dict[str, Compiler] |
|
|
|
if T.TYPE_CHECKING: |
|
import argparse |
|
|
|
|
|
def _get_env_var(for_machine: MachineChoice, is_cross: bool, var_name: str) -> T.Optional[str]: |
|
""" |
|
Returns the exact env var and the value. |
|
""" |
|
candidates = PerMachine( |
|
# The prefixed build version takes priority, but if we are native |
|
# compiling we fall back on the unprefixed host version. This |
|
# allows native builds to never need to worry about the 'BUILD_*' |
|
# ones. |
|
([var_name + '_FOR_BUILD'] if is_cross else [var_name]), |
|
# Always just the unprefixed host verions |
|
[var_name] |
|
)[for_machine] |
|
for var in candidates: |
|
value = os.environ.get(var) |
|
if value is not None: |
|
break |
|
else: |
|
formatted = ', '.join([f'{var!r}' for var in candidates]) |
|
mlog.debug(f'None of {formatted} are defined in the environment, not changing global flags.') |
|
return None |
|
mlog.debug(f'Using {var!r} from environment with value: {value!r}') |
|
return value |
|
|
|
|
|
def detect_gcovr(min_version='3.3', new_rootdir_version='4.2', log=False): |
|
gcovr_exe = 'gcovr' |
|
try: |
|
p, found = Popen_safe([gcovr_exe, '--version'])[0:2] |
|
except (FileNotFoundError, PermissionError): |
|
# Doesn't exist in PATH or isn't executable |
|
return None, None |
|
found = search_version(found) |
|
if p.returncode == 0 and mesonlib.version_compare(found, '>=' + min_version): |
|
if log: |
|
mlog.log('Found gcovr-{} at {}'.format(found, quote_arg(shutil.which(gcovr_exe)))) |
|
return gcovr_exe, mesonlib.version_compare(found, '>=' + new_rootdir_version) |
|
return None, None |
|
|
|
def detect_llvm_cov(): |
|
tools = get_llvm_tool_names('llvm-cov') |
|
for tool in tools: |
|
if mesonlib.exe_exists([tool, '--version']): |
|
return tool |
|
return None |
|
|
|
def find_coverage_tools() -> T.Tuple[T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str]]: |
|
gcovr_exe, gcovr_new_rootdir = detect_gcovr() |
|
|
|
llvm_cov_exe = detect_llvm_cov() |
|
|
|
lcov_exe = 'lcov' |
|
genhtml_exe = 'genhtml' |
|
|
|
if not mesonlib.exe_exists([lcov_exe, '--version']): |
|
lcov_exe = None |
|
if not mesonlib.exe_exists([genhtml_exe, '--version']): |
|
genhtml_exe = None |
|
|
|
return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe |
|
|
|
def detect_ninja(version: str = '1.8.2', log: bool = False) -> T.List[str]: |
|
r = detect_ninja_command_and_version(version, log) |
|
return r[0] if r else None |
|
|
|
def detect_ninja_command_and_version(version: str = '1.8.2', log: bool = False) -> T.Tuple[T.List[str], str]: |
|
env_ninja = os.environ.get('NINJA', None) |
|
for n in [env_ninja] if env_ninja else ['ninja', 'ninja-build', 'samu']: |
|
prog = ExternalProgram(n, silent=True) |
|
if not prog.found(): |
|
continue |
|
try: |
|
p, found = Popen_safe(prog.command + ['--version'])[0:2] |
|
except (FileNotFoundError, PermissionError): |
|
# Doesn't exist in PATH or isn't executable |
|
continue |
|
found = found.strip() |
|
# Perhaps we should add a way for the caller to know the failure mode |
|
# (not found or too old) |
|
if p.returncode == 0 and mesonlib.version_compare(found, '>=' + version): |
|
if log: |
|
name = os.path.basename(n) |
|
if name.endswith('-' + found): |
|
name = name[0:-1 - len(found)] |
|
if name == 'ninja-build': |
|
name = 'ninja' |
|
if name == 'samu': |
|
name = 'samurai' |
|
mlog.log('Found {}-{} at {}'.format(name, found, |
|
' '.join([quote_arg(x) for x in prog.command]))) |
|
return (prog.command, found) |
|
|
|
def get_llvm_tool_names(tool: str) -> T.List[str]: |
|
# Ordered list of possible suffixes of LLVM executables to try. Start with |
|
# base, then try newest back to oldest (3.5 is arbitrary), and finally the |
|
# devel version. Please note that the development snapshot in Debian does |
|
# not have a distinct name. Do not move it to the beginning of the list |
|
# unless it becomes a stable release. |
|
suffixes = [ |
|
'', # base (no suffix) |
|
'-12', '12', |
|
'-11', '11', |
|
'-10', '10', |
|
'-9', '90', |
|
'-8', '80', |
|
'-7', '70', |
|
'-6.0', '60', |
|
'-5.0', '50', |
|
'-4.0', '40', |
|
'-3.9', '39', |
|
'-3.8', '38', |
|
'-3.7', '37', |
|
'-3.6', '36', |
|
'-3.5', '35', |
|
'-13', # Debian development snapshot |
|
'-devel', # FreeBSD development snapshot |
|
] |
|
names = [] |
|
for suffix in suffixes: |
|
names.append(tool + suffix) |
|
return names |
|
|
|
def detect_scanbuild() -> T.List[str]: |
|
""" Look for scan-build binary on build platform |
|
|
|
First, if a SCANBUILD env variable has been provided, give it precedence |
|
on all platforms. |
|
|
|
For most platforms, scan-build is found is the PATH contains a binary |
|
named "scan-build". However, some distribution's package manager (FreeBSD) |
|
don't. For those, loop through a list of candidates to see if one is |
|
available. |
|
|
|
Return: a single-element list of the found scan-build binary ready to be |
|
passed to Popen() |
|
""" |
|
exelist = [] |
|
if 'SCANBUILD' in os.environ: |
|
exelist = split_args(os.environ['SCANBUILD']) |
|
|
|
else: |
|
tools = get_llvm_tool_names('scan-build') |
|
for tool in tools: |
|
if shutil.which(tool) is not None: |
|
exelist = [shutil.which(tool)] |
|
break |
|
|
|
if exelist: |
|
tool = exelist[0] |
|
if os.path.isfile(tool) and os.access(tool, os.X_OK): |
|
return [tool] |
|
return [] |
|
|
|
def detect_clangformat() -> T.List[str]: |
|
""" Look for clang-format binary on build platform |
|
|
|
Do the same thing as detect_scanbuild to find clang-format except it |
|
currently does not check the environment variable. |
|
|
|
Return: a single-element list of the found clang-format binary ready to be |
|
passed to Popen() |
|
""" |
|
tools = get_llvm_tool_names('clang-format') |
|
for tool in tools: |
|
path = shutil.which(tool) |
|
if path is not None: |
|
return [path] |
|
return [] |
|
|
|
def detect_native_windows_arch(): |
|
""" |
|
The architecture of Windows itself: x86, amd64 or arm64 |
|
""" |
|
# These env variables are always available. See: |
|
# https://msdn.microsoft.com/en-us/library/aa384274(VS.85).aspx |
|
# https://blogs.msdn.microsoft.com/david.wang/2006/03/27/howto-detect-process-bitness/ |
|
arch = os.environ.get('PROCESSOR_ARCHITEW6432', '').lower() |
|
if not arch: |
|
try: |
|
# If this doesn't exist, something is messing with the environment |
|
arch = os.environ['PROCESSOR_ARCHITECTURE'].lower() |
|
except KeyError: |
|
raise EnvironmentException('Unable to detect native OS architecture') |
|
return arch |
|
|
|
def detect_windows_arch(compilers: CompilersDict) -> str: |
|
""" |
|
Detecting the 'native' architecture of Windows is not a trivial task. We |
|
cannot trust that the architecture that Python is built for is the 'native' |
|
one because you can run 32-bit apps on 64-bit Windows using WOW64 and |
|
people sometimes install 32-bit Python on 64-bit Windows. |
|
|
|
We also can't rely on the architecture of the OS itself, since it's |
|
perfectly normal to compile and run 32-bit applications on Windows as if |
|
they were native applications. It's a terrible experience to require the |
|
user to supply a cross-info file to compile 32-bit applications on 64-bit |
|
Windows. Thankfully, the only way to compile things with Visual Studio on |
|
Windows is by entering the 'msvc toolchain' environment, which can be |
|
easily detected. |
|
|
|
In the end, the sanest method is as follows: |
|
1. Check environment variables that are set by Windows and WOW64 to find out |
|
if this is x86 (possibly in WOW64), if so use that as our 'native' |
|
architecture. |
|
2. If the compiler toolchain target architecture is x86, use that as our |
|
'native' architecture. |
|
3. Otherwise, use the actual Windows architecture |
|
|
|
""" |
|
os_arch = detect_native_windows_arch() |
|
if os_arch == 'x86': |
|
return os_arch |
|
# If we're on 64-bit Windows, 32-bit apps can be compiled without |
|
# cross-compilation. So if we're doing that, just set the native arch as |
|
# 32-bit and pretend like we're running under WOW64. Else, return the |
|
# actual Windows architecture that we deduced above. |
|
for compiler in compilers.values(): |
|
if compiler.id == 'msvc' and (compiler.target == 'x86' or compiler.target == '80x86'): |
|
return 'x86' |
|
if compiler.id == 'clang-cl' and compiler.target == 'x86': |
|
return 'x86' |
|
if compiler.id == 'gcc' and compiler.has_builtin_define('__i386__'): |
|
return 'x86' |
|
return os_arch |
|
|
|
def any_compiler_has_define(compilers: CompilersDict, define): |
|
for c in compilers.values(): |
|
try: |
|
if c.has_builtin_define(define): |
|
return True |
|
except mesonlib.MesonException: |
|
# Ignore compilers that do not support has_builtin_define. |
|
pass |
|
return False |
|
|
|
def detect_cpu_family(compilers: CompilersDict) -> str: |
|
""" |
|
Python is inconsistent in its platform module. |
|
It returns different values for the same cpu. |
|
For x86 it might return 'x86', 'i686' or somesuch. |
|
Do some canonicalization. |
|
""" |
|
if mesonlib.is_windows(): |
|
trial = detect_windows_arch(compilers) |
|
elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_qnx() or mesonlib.is_aix(): |
|
trial = platform.processor().lower() |
|
else: |
|
trial = platform.machine().lower() |
|
if trial.startswith('i') and trial.endswith('86'): |
|
trial = 'x86' |
|
elif trial == 'bepc': |
|
trial = 'x86' |
|
elif trial == 'arm64': |
|
trial = 'aarch64' |
|
elif trial.startswith('aarch64'): |
|
# This can be `aarch64_be` |
|
trial = 'aarch64' |
|
elif trial.startswith('arm') or trial.startswith('earm'): |
|
trial = 'arm' |
|
elif trial.startswith(('powerpc64', 'ppc64')): |
|
trial = 'ppc64' |
|
elif trial.startswith(('powerpc', 'ppc')) or trial in {'macppc', 'power macintosh'}: |
|
trial = 'ppc' |
|
elif trial in ('amd64', 'x64', 'i86pc'): |
|
trial = 'x86_64' |
|
elif trial in {'sun4u', 'sun4v'}: |
|
trial = 'sparc64' |
|
elif trial.startswith('mips'): |
|
if '64' not in trial: |
|
trial = 'mips' |
|
else: |
|
trial = 'mips64' |
|
elif trial in {'ip30', 'ip35'}: |
|
trial = 'mips64' |
|
|
|
# On Linux (and maybe others) there can be any mixture of 32/64 bit code in |
|
# the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only |
|
# reliable way to know is to check the compiler defines. |
|
if trial == 'x86_64': |
|
if any_compiler_has_define(compilers, '__i386__'): |
|
trial = 'x86' |
|
elif trial == 'aarch64': |
|
if any_compiler_has_define(compilers, '__arm__'): |
|
trial = 'arm' |
|
# Add more quirks here as bugs are reported. Keep in sync with detect_cpu() |
|
# below. |
|
elif trial == 'parisc64': |
|
# ATM there is no 64 bit userland for PA-RISC. Thus always |
|
# report it as 32 bit for simplicity. |
|
trial = 'parisc' |
|
elif trial == 'ppc': |
|
# AIX always returns powerpc, check here for 64-bit |
|
if any_compiler_has_define(compilers, '__64BIT__'): |
|
trial = 'ppc64' |
|
|
|
if trial not in known_cpu_families: |
|
mlog.warning(f'Unknown CPU family {trial!r}, please report this at ' |
|
'https://github.com/mesonbuild/meson/issues/new with the ' |
|
'output of `uname -a` and `cat /proc/cpuinfo`') |
|
|
|
return trial |
|
|
|
def detect_cpu(compilers: CompilersDict) -> str: |
|
if mesonlib.is_windows(): |
|
trial = detect_windows_arch(compilers) |
|
elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_aix(): |
|
trial = platform.processor().lower() |
|
else: |
|
trial = platform.machine().lower() |
|
|
|
if trial in ('amd64', 'x64', 'i86pc'): |
|
trial = 'x86_64' |
|
if trial == 'x86_64': |
|
# Same check as above for cpu_family |
|
if any_compiler_has_define(compilers, '__i386__'): |
|
trial = 'i686' # All 64 bit cpus have at least this level of x86 support. |
|
elif trial.startswith('aarch64'): |
|
# Same check as above for cpu_family |
|
if any_compiler_has_define(compilers, '__arm__'): |
|
trial = 'arm' |
|
else: |
|
# for aarch64_be |
|
trial = 'aarch64' |
|
elif trial.startswith('earm'): |
|
trial = 'arm' |
|
elif trial == 'e2k': |
|
# Make more precise CPU detection for Elbrus platform. |
|
trial = platform.processor().lower() |
|
elif trial.startswith('mips'): |
|
if '64' not in trial: |
|
trial = 'mips' |
|
else: |
|
trial = 'mips64' |
|
elif trial == 'ppc': |
|
# AIX always returns powerpc, check here for 64-bit |
|
if any_compiler_has_define(compilers, '__64BIT__'): |
|
trial = 'ppc64' |
|
|
|
# Add more quirks here as bugs are reported. Keep in sync with |
|
# detect_cpu_family() above. |
|
return trial |
|
|
|
def detect_system() -> str: |
|
if sys.platform == 'cygwin': |
|
return 'cygwin' |
|
return platform.system().lower() |
|
|
|
def detect_msys2_arch() -> T.Optional[str]: |
|
return os.environ.get('MSYSTEM_CARCH', None) |
|
|
|
def detect_machine_info(compilers: T.Optional[CompilersDict] = None) -> MachineInfo: |
|
"""Detect the machine we're running on |
|
|
|
If compilers are not provided, we cannot know as much. None out those |
|
fields to avoid accidentally depending on partial knowledge. The |
|
underlying ''detect_*'' method can be called to explicitly use the |
|
partial information. |
|
""" |
|
return MachineInfo( |
|
detect_system(), |
|
detect_cpu_family(compilers) if compilers is not None else None, |
|
detect_cpu(compilers) if compilers is not None else None, |
|
sys.byteorder) |
|
|
|
# TODO make this compare two `MachineInfo`s purely. How important is the |
|
# `detect_cpu_family({})` distinction? It is the one impediment to that. |
|
def machine_info_can_run(machine_info: MachineInfo): |
|
"""Whether we can run binaries for this machine on the current machine. |
|
|
|
Can almost always run 32-bit binaries on 64-bit natively if the host |
|
and build systems are the same. We don't pass any compilers to |
|
detect_cpu_family() here because we always want to know the OS |
|
architecture, not what the compiler environment tells us. |
|
""" |
|
if machine_info.system != detect_system(): |
|
return False |
|
true_build_cpu_family = detect_cpu_family({}) |
|
return \ |
|
(machine_info.cpu_family == true_build_cpu_family) or \ |
|
((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \ |
|
((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm')) |
|
|
|
class Environment: |
|
private_dir = 'meson-private' |
|
log_dir = 'meson-logs' |
|
info_dir = 'meson-info' |
|
|
|
def __init__(self, source_dir: T.Optional[str], build_dir: T.Optional[str], options: 'argparse.Namespace') -> None: |
|
self.source_dir = source_dir |
|
self.build_dir = build_dir |
|
# Do not try to create build directories when build_dir is none. |
|
# This reduced mode is used by the --buildoptions introspector |
|
if build_dir is not None: |
|
self.scratch_dir = os.path.join(build_dir, Environment.private_dir) |
|
self.log_dir = os.path.join(build_dir, Environment.log_dir) |
|
self.info_dir = os.path.join(build_dir, Environment.info_dir) |
|
os.makedirs(self.scratch_dir, exist_ok=True) |
|
os.makedirs(self.log_dir, exist_ok=True) |
|
os.makedirs(self.info_dir, exist_ok=True) |
|
try: |
|
self.coredata = coredata.load(self.get_build_dir()) # type: coredata.CoreData |
|
self.first_invocation = False |
|
except FileNotFoundError: |
|
self.create_new_coredata(options) |
|
except coredata.MesonVersionMismatchException as e: |
|
# This is routine, but tell the user the update happened |
|
mlog.log('Regenerating configuration from scratch:', str(e)) |
|
coredata.read_cmd_line_file(self.build_dir, options) |
|
self.create_new_coredata(options) |
|
except MesonException as e: |
|
# If we stored previous command line options, we can recover from |
|
# a broken/outdated coredata. |
|
if os.path.isfile(coredata.get_cmd_line_file(self.build_dir)): |
|
mlog.warning('Regenerating configuration from scratch.') |
|
mlog.log('Reason:', mlog.red(str(e))) |
|
coredata.read_cmd_line_file(self.build_dir, options) |
|
self.create_new_coredata(options) |
|
else: |
|
raise e |
|
else: |
|
# Just create a fresh coredata in this case |
|
self.scratch_dir = '' |
|
self.create_new_coredata(options) |
|
|
|
## locally bind some unfrozen configuration |
|
|
|
# Stores machine infos, the only *three* machine one because we have a |
|
# target machine info on for the user (Meson never cares about the |
|
# target machine.) |
|
machines: PerThreeMachineDefaultable[MachineInfo] = PerThreeMachineDefaultable() |
|
|
|
# Similar to coredata.compilers, but lower level in that there is no |
|
# meta data, only names/paths. |
|
binaries = PerMachineDefaultable() # type: PerMachineDefaultable[BinaryTable] |
|
|
|
# Misc other properties about each machine. |
|
properties = PerMachineDefaultable() # type: PerMachineDefaultable[Properties] |
|
|
|
# CMake toolchain variables |
|
cmakevars = PerMachineDefaultable() # type: PerMachineDefaultable[CMakeVariables] |
|
|
|
## Setup build machine defaults |
|
|
|
# Will be fully initialized later using compilers later. |
|
machines.build = detect_machine_info() |
|
|
|
# Just uses hard-coded defaults and environment variables. Might be |
|
# overwritten by a native file. |
|
binaries.build = BinaryTable() |
|
properties.build = Properties() |
|
|
|
# Options with the key parsed into an OptionKey type. |
|
# |
|
# Note that order matters because of 'buildtype', if it is after |
|
# 'optimization' and 'debug' keys, it override them. |
|
self.options: T.MutableMapping[OptionKey, T.Union[str, T.List[str]]] = collections.OrderedDict() |
|
|
|
## Read in native file(s) to override build machine configuration |
|
|
|
if self.coredata.config_files is not None: |
|
config = coredata.parse_machine_files(self.coredata.config_files) |
|
binaries.build = BinaryTable(config.get('binaries', {})) |
|
properties.build = Properties(config.get('properties', {})) |
|
cmakevars.build = CMakeVariables(config.get('cmake', {})) |
|
self._load_machine_file_options( |
|
config, properties.build, |
|
MachineChoice.BUILD if self.coredata.cross_files else MachineChoice.HOST) |
|
|
|
## Read in cross file(s) to override host machine configuration |
|
|
|
if self.coredata.cross_files: |
|
config = coredata.parse_machine_files(self.coredata.cross_files) |
|
properties.host = Properties(config.get('properties', {})) |
|
binaries.host = BinaryTable(config.get('binaries', {})) |
|
cmakevars.host = CMakeVariables(config.get('cmake', {})) |
|
if 'host_machine' in config: |
|
machines.host = MachineInfo.from_literal(config['host_machine']) |
|
if 'target_machine' in config: |
|
machines.target = MachineInfo.from_literal(config['target_machine']) |
|
# Keep only per machine options from the native file. The cross |
|
# file takes precedence over all other options. |
|
for key, value in list(self.options.items()): |
|
if self.coredata.is_per_machine_option(key): |
|
self.options[key.as_build()] = value |
|
self._load_machine_file_options(config, properties.host, MachineChoice.HOST) |
|
|
|
|
|
## "freeze" now initialized configuration, and "save" to the class. |
|
|
|
self.machines = machines.default_missing() |
|
self.binaries = binaries.default_missing() |
|
self.properties = properties.default_missing() |
|
self.cmakevars = cmakevars.default_missing() |
|
|
|
# Command line options override those from cross/native files |
|
self.options.update(options.cmd_line_options) |
|
|
|
# Take default value from env if not set in cross/native files or command line. |
|
self._set_default_options_from_env() |
|
self._set_default_binaries_from_env() |
|
self._set_default_properties_from_env() |
|
|
|
# Warn if the user is using two different ways of setting build-type |
|
# options that override each other |
|
bt = OptionKey('buildtype') |
|
db = OptionKey('debug') |
|
op = OptionKey('optimization') |
|
if bt in self.options and (db in self.options or op in self.options): |
|
mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. ' |
|
'Using both is redundant since they override each other. ' |
|
'See: https://mesonbuild.com/Builtin-options.html#build-type-options') |
|
|
|
exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper') |
|
if exe_wrapper is not None: |
|
self.exe_wrapper = ExternalProgram.from_bin_list(self, MachineChoice.HOST, 'exe_wrapper') |
|
else: |
|
self.exe_wrapper = None |
|
|
|
self.default_cmake = ['cmake'] |
|
self.default_pkgconfig = ['pkg-config'] |
|
self.wrap_resolver: T.Optional['Resolver'] = None |
|
|
|
def _load_machine_file_options(self, config: 'ConfigParser', properties: Properties, machine: MachineChoice) -> None: |
|
"""Read the contents of a Machine file and put it in the options store.""" |
|
|
|
# Look for any options in the deprecated paths section, warn about |
|
# those, then assign them. They will be overwritten by the ones in the |
|
# "built-in options" section if they're in both sections. |
|
paths = config.get('paths') |
|
if paths: |
|
mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.') |
|
for k, v in paths.items(): |
|
self.options[OptionKey.from_string(k).evolve(machine=machine)] = v |
|
|
|
# Next look for compiler options in the "properties" section, this is |
|
# also deprecated, and these will also be overwritten by the "built-in |
|
# options" section. We need to remove these from this section, as well. |
|
deprecated_properties: T.Set[str] = set() |
|
for lang in compilers.all_languages: |
|
deprecated_properties.add(lang + '_args') |
|
deprecated_properties.add(lang + '_link_args') |
|
for k, v in properties.properties.copy().items(): |
|
if k in deprecated_properties: |
|
mlog.deprecation(f'{k} in the [properties] section of the machine file is deprecated, use the [built-in options] section.') |
|
self.options[OptionKey.from_string(k).evolve(machine=machine)] = v |
|
del properties.properties[k] |
|
|
|
for section, values in config.items(): |
|
if ':' in section: |
|
subproject, section = section.split(':') |
|
else: |
|
subproject = '' |
|
if section == 'built-in options': |
|
for k, v in values.items(): |
|
key = OptionKey.from_string(k) |
|
# If we're in the cross file, and there is a `build.foo` warn about that. Later we'll remove it. |
|
if machine is MachineChoice.HOST and key.machine is not machine: |
|
mlog.deprecation('Setting build machine options in cross files, please use a native file instead, this will be removed in meson 0.60', once=True) |
|
if key.subproject: |
|
raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.') |
|
self.options[key.evolve(subproject=subproject, machine=machine)] = v |
|
elif section == 'project options' and machine is MachineChoice.HOST: |
|
# Project options are only for the host machine, we don't want |
|
# to read these from the native file |
|
for k, v in values.items(): |
|
# Project options are always for the host machine |
|
key = OptionKey.from_string(k) |
|
if key.subproject: |
|
raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.') |
|
self.options[key.evolve(subproject=subproject)] = v |
|
|
|
def _set_default_options_from_env(self) -> None: |
|
opts: T.List[T.Tuple[str, str]] = ( |
|
[(v, f'{k}_args') for k, v in compilers.compilers.CFLAGS_MAPPING.items()] + |
|
[ |
|
('PKG_CONFIG_PATH', 'pkg_config_path'), |
|
('CMAKE_PREFIX_PATH', 'cmake_prefix_path'), |
|
('LDFLAGS', 'ldflags'), |
|
('CPPFLAGS', 'cppflags'), |
|
] |
|
) |
|
|
|
env_opts: T.DefaultDict[OptionKey, T.List[str]] = collections.defaultdict(list) |
|
|
|
for (evar, keyname), for_machine in itertools.product(opts, MachineChoice): |
|
p_env = _get_env_var(for_machine, self.is_cross_build(), evar) |
|
if p_env is not None: |
|
# these may contain duplicates, which must be removed, else |
|
# a duplicates-in-array-option warning arises. |
|
if keyname == 'cmake_prefix_path': |
|
if self.machines[for_machine].is_windows(): |
|
# Cannot split on ':' on Windows because its in the drive letter |
|
_p_env = p_env.split(os.pathsep) |
|
else: |
|
# https://github.com/mesonbuild/meson/issues/7294 |
|
_p_env = re.split(r':|;', p_env) |
|
p_list = list(mesonlib.OrderedSet(_p_env)) |
|
elif keyname == 'pkg_config_path': |
|
p_list = list(mesonlib.OrderedSet(p_env.split(':'))) |
|
else: |
|
p_list = split_args(p_env) |
|
p_list = [e for e in p_list if e] # filter out any empty elements |
|
|
|
# Take env vars only on first invocation, if the env changes when |
|
# reconfiguring it gets ignored. |
|
# FIXME: We should remember if we took the value from env to warn |
|
# if it changes on future invocations. |
|
if self.first_invocation: |
|
if keyname == 'ldflags': |
|
key = OptionKey('link_args', machine=for_machine, lang='c') # needs a language to initialize properly |
|
for lang in compilers.compilers.LANGUAGES_USING_LDFLAGS: |
|
key = key.evolve(lang=lang) |
|
env_opts[key].extend(p_list) |
|
elif keyname == 'cppflags': |
|
key = OptionKey('env_args', machine=for_machine, lang='c') |
|
for lang in compilers.compilers.LANGUAGES_USING_CPPFLAGS: |
|
key = key.evolve(lang=lang) |
|
env_opts[key].extend(p_list) |
|
else: |
|
key = OptionKey.from_string(keyname).evolve(machine=for_machine) |
|
if evar in compilers.compilers.CFLAGS_MAPPING.values(): |
|
# If this is an environment variable, we have to |
|
# store it separately until the compiler is |
|
# instantiated, as we don't know whether the |
|
# compiler will want to use these arguments at link |
|
# time and compile time (instead of just at compile |
|
# time) until we're instantiating that `Compiler` |
|
# object. This is required so that passing |
|
# `-Dc_args=` on the command line and `$CFLAGS` |
|
# have subtely differen behavior. `$CFLAGS` will be |
|
# added to the linker command line if the compiler |
|
# acts as a linker driver, `-Dc_args` will not. |
|
# |
|
# We stil use the original key as the base here, as |
|
# we want to inhert the machine and the compiler |
|
# language |
|
key = key.evolve('env_args') |
|
env_opts[key].extend(p_list) |
|
|
|
# Only store options that are not already in self.options, |
|
# otherwise we'd override the machine files |
|
for k, v in env_opts.items(): |
|
if k not in self.options: |
|
self.options[k] = v |
|
|
|
def _set_default_binaries_from_env(self) -> None: |
|
"""Set default binaries from the environment. |
|
|
|
For example, pkg-config can be set via PKG_CONFIG, or in the machine |
|
file. We want to set the default to the env variable. |
|
""" |
|
opts = itertools.chain(envconfig.DEPRECATED_ENV_PROG_MAP.items(), |
|
envconfig.ENV_VAR_PROG_MAP.items()) |
|
|
|
for (name, evar), for_machine in itertools.product(opts, MachineChoice): |
|
p_env = _get_env_var(for_machine, self.is_cross_build(), evar) |
|
if p_env is not None: |
|
self.binaries[for_machine].binaries.setdefault(name, mesonlib.split_args(p_env)) |
|
|
|
def _set_default_properties_from_env(self) -> None: |
|
"""Properties which can also be set from the environment.""" |
|
# name, evar, split |
|
opts: T.List[T.Tuple[str, T.List[str], bool]] = [ |
|
('boost_includedir', ['BOOST_INCLUDEDIR'], False), |
|
('boost_librarydir', ['BOOST_LIBRARYDIR'], False), |
|
('boost_root', ['BOOST_ROOT', 'BOOSTROOT'], True), |
|
('java_home', ['JAVA_HOME'], False), |
|
] |
|
|
|
for (name, evars, split), for_machine in itertools.product(opts, MachineChoice): |
|
for evar in evars: |
|
p_env = _get_env_var(for_machine, self.is_cross_build(), evar) |
|
if p_env is not None: |
|
if split: |
|
self.properties[for_machine].properties.setdefault(name, p_env.split(os.pathsep)) |
|
else: |
|
self.properties[for_machine].properties.setdefault(name, p_env) |
|
break |
|
|
|
def create_new_coredata(self, options: 'argparse.Namespace') -> None: |
|
# WARNING: Don't use any values from coredata in __init__. It gets |
|
# re-initialized with project options by the interpreter during |
|
# build file parsing. |
|
# meson_command is used by the regenchecker script, which runs meson |
|
self.coredata = coredata.CoreData(options, self.scratch_dir, mesonlib.get_meson_command()) |
|
self.first_invocation = True |
|
|
|
def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool: |
|
return self.coredata.is_cross_build(when_building_for) |
|
|
|
def dump_coredata(self) -> str: |
|
return coredata.save(self.coredata, self.get_build_dir()) |
|
|
|
def get_log_dir(self) -> str: |
|
return self.log_dir |
|
|
|
def get_coredata(self) -> coredata.CoreData: |
|
return self.coredata |
|
|
|
@staticmethod |
|
def get_build_command(unbuffered: bool = False) -> T.List[str]: |
|
cmd = mesonlib.get_meson_command() |
|
if cmd is None: |
|
raise MesonBugException('No command?') |
|
cmd = cmd.copy() |
|
if unbuffered and 'python' in os.path.basename(cmd[0]): |
|
cmd.insert(1, '-u') |
|
return cmd |
|
|
|
def is_header(self, fname: 'mesonlib.FileOrString') -> bool: |
|
return is_header(fname) |
|
|
|
def is_source(self, fname: 'mesonlib.FileOrString') -> bool: |
|
return is_source(fname) |
|
|
|
def is_assembly(self, fname: 'mesonlib.FileOrString') -> bool: |
|
return is_assembly(fname) |
|
|
|
def is_llvm_ir(self, fname: 'mesonlib.FileOrString') -> bool: |
|
return is_llvm_ir(fname) |
|
|
|
def is_object(self, fname: 'mesonlib.FileOrString') -> bool: |
|
return is_object(fname) |
|
|
|
@lru_cache(maxsize=None) |
|
def is_library(self, fname): |
|
return is_library(fname) |
|
|
|
def lookup_binary_entry(self, for_machine: MachineChoice, name: str) -> T.Optional[T.List[str]]: |
|
return self.binaries[for_machine].lookup_entry(name) |
|
|
|
def get_scratch_dir(self) -> str: |
|
return self.scratch_dir |
|
|
|
def get_source_dir(self) -> str: |
|
return self.source_dir |
|
|
|
def get_build_dir(self) -> str: |
|
return self.build_dir |
|
|
|
def get_import_lib_dir(self) -> str: |
|
"Install dir for the import library (library used for linking)" |
|
return self.get_libdir() |
|
|
|
def get_shared_module_dir(self) -> str: |
|
"Install dir for shared modules that are loaded at runtime" |
|
return self.get_libdir() |
|
|
|
def get_shared_lib_dir(self) -> str: |
|
"Install dir for the shared library" |
|
m = self.machines.host |
|
# Windows has no RPATH or similar, so DLLs must be next to EXEs. |
|
if m.is_windows() or m.is_cygwin(): |
|
return self.get_bindir() |
|
return self.get_libdir() |
|
|
|
def get_static_lib_dir(self) -> str: |
|
"Install dir for the static library" |
|
return self.get_libdir() |
|
|
|
def get_prefix(self) -> str: |
|
return self.coredata.get_option(OptionKey('prefix')) |
|
|
|
def get_libdir(self) -> str: |
|
return self.coredata.get_option(OptionKey('libdir')) |
|
|
|
def get_libexecdir(self) -> str: |
|
return self.coredata.get_option(OptionKey('libexecdir')) |
|
|
|
def get_bindir(self) -> str: |
|
return self.coredata.get_option(OptionKey('bindir')) |
|
|
|
def get_includedir(self) -> str: |
|
return self.coredata.get_option(OptionKey('includedir')) |
|
|
|
def get_mandir(self) -> str: |
|
return self.coredata.get_option(OptionKey('mandir')) |
|
|
|
def get_datadir(self) -> str: |
|
return self.coredata.get_option(OptionKey('datadir')) |
|
|
|
def get_compiler_system_dirs(self, for_machine: MachineChoice): |
|
for comp in self.coredata.compilers[for_machine].values(): |
|
if isinstance(comp, compilers.ClangCompiler): |
|
index = 1 |
|
break |
|
elif isinstance(comp, compilers.GnuCompiler): |
|
index = 2 |
|
break |
|
else: |
|
# This option is only supported by gcc and clang. If we don't get a |
|
# GCC or Clang compiler return and empty list. |
|
return [] |
|
|
|
p, out, _ = Popen_safe(comp.get_exelist() + ['-print-search-dirs']) |
|
if p.returncode != 0: |
|
raise mesonlib.MesonException('Could not calculate system search dirs') |
|
out = out.split('\n')[index].lstrip('libraries: =').split(':') |
|
return [os.path.normpath(p) for p in out] |
|
|
|
def need_exe_wrapper(self, for_machine: MachineChoice = MachineChoice.HOST): |
|
value = self.properties[for_machine].get('needs_exe_wrapper', None) |
|
if value is not None: |
|
return value |
|
return not machine_info_can_run(self.machines[for_machine]) |
|
|
|
def get_exe_wrapper(self) -> ExternalProgram: |
|
if not self.need_exe_wrapper(): |
|
return EmptyExternalProgram() |
|
return self.exe_wrapper
|
|
|