The Meson Build System http://mesonbuild.com/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

983 lines
41 KiB

# SPDX-License-Identifier: Apache-2.0
# Copyright 2013-2024 The Meson development team
# Copyright © 2023-2024 Intel Corporation
from __future__ import annotations
import copy
from . import mlog, options
import pickle, os, uuid
import sys
from itertools import chain
from pathlib import PurePath
from collections import OrderedDict, abc
from dataclasses import dataclass
from .mesonlib import (
MesonBugException,
MesonException, MachineChoice, PerMachine,
PerMachineDefaultable,
stringlistify,
pickle_load
)
from .options import OptionKey
from .machinefile import CmdLineFileParser
import ast
import argparse
import enum
import shlex
import typing as T
if T.TYPE_CHECKING:
from typing_extensions import Protocol
from typing import Any
from . import dependencies
from .compilers.compilers import Compiler, CompileResult, RunResult, CompileCheckMode
from .dependencies.detect import TV_DepID
from .environment import Environment
from .mesonlib import FileOrString
from .cmake.traceparser import CMakeCacheEntry
from .interpreterbase import SubProject
from .options import UserOption
class SharedCMDOptions(Protocol):
"""Representation of command line options from Meson setup, configure,
and dist.
:param projectoptions: The raw list of command line options given
:param cmd_line_options: command line options parsed into an OptionKey:
str mapping
"""
cmd_line_options: T.Dict[OptionKey, str]
projectoptions: T.List[str]
cross_file: T.List[str]
native_file: T.List[str]
OptionDictType = T.Union[T.Dict[str, 'options.UserOption[T.Any]'], 'OptionsView']
MutableKeyedOptionDictType = T.Dict['OptionKey', 'options.UserOption[T.Any]']
KeyedOptionDictType = T.Union['options.OptionStore', 'OptionsView']
CompilerCheckCacheKey = T.Tuple[T.Tuple[str, ...], str, FileOrString, T.Tuple[str, ...], CompileCheckMode]
# code, args
RunCheckCacheKey = T.Tuple[str, T.Tuple[str, ...]]
# typeshed
StrOrBytesPath = T.Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]
# Check major_versions_differ() if changing versioning scheme.
#
# Pip requires that RCs are named like this: '0.1.0.rc1'
# But the corresponding Git tag needs to be '0.1.0rc1'
version = '1.5.99'
# The next stable version when we are in dev. This is used to allow projects to
# require meson version >=1.2.0 when using 1.1.99. FeatureNew won't warn when
# using a feature introduced in 1.2.0 when using Meson 1.1.99.
stable_version = version
if stable_version.endswith('.99'):
stable_version_array = stable_version.split('.')
stable_version_array[-1] = '0'
stable_version_array[-2] = str(int(stable_version_array[-2]) + 1)
stable_version = '.'.join(stable_version_array)
def get_genvs_default_buildtype_list() -> list[str]:
# just debug, debugoptimized, and release for now
# but this should probably be configurable through some extra option, alongside --genvslite.
return options.buildtypelist[1:-2]
class MesonVersionMismatchException(MesonException):
'''Build directory generated with Meson version is incompatible with current version'''
def __init__(self, old_version: str, current_version: str, extra_msg: str = '') -> None:
super().__init__(f'Build directory has been generated with Meson version {old_version}, '
f'which is incompatible with the current version {current_version}.'
+ extra_msg)
self.old_version = old_version
self.current_version = current_version
class DependencyCacheType(enum.Enum):
OTHER = 0
PKG_CONFIG = 1
CMAKE = 2
@classmethod
def from_type(cls, dep: 'dependencies.Dependency') -> 'DependencyCacheType':
# As more types gain search overrides they'll need to be added here
if dep.type_name == 'pkgconfig':
return cls.PKG_CONFIG
if dep.type_name == 'cmake':
return cls.CMAKE
return cls.OTHER
class DependencySubCache:
def __init__(self, type_: DependencyCacheType):
self.types = [type_]
self.__cache: T.Dict[T.Tuple[str, ...], 'dependencies.Dependency'] = {}
def __getitem__(self, key: T.Tuple[str, ...]) -> 'dependencies.Dependency':
return self.__cache[key]
def __setitem__(self, key: T.Tuple[str, ...], value: 'dependencies.Dependency') -> None:
self.__cache[key] = value
def __contains__(self, key: T.Tuple[str, ...]) -> bool:
return key in self.__cache
def values(self) -> T.Iterable['dependencies.Dependency']:
return self.__cache.values()
class DependencyCache:
"""Class that stores a cache of dependencies.
This class is meant to encapsulate the fact that we need multiple keys to
successfully lookup by providing a simple get/put interface.
"""
def __init__(self, builtins: 'KeyedOptionDictType', for_machine: MachineChoice):
self.__cache: T.MutableMapping[TV_DepID, DependencySubCache] = OrderedDict()
self.__builtins = builtins
self.__pkg_conf_key = OptionKey('pkg_config_path', machine=for_machine)
self.__cmake_key = OptionKey('cmake_prefix_path', machine=for_machine)
def __calculate_subkey(self, type_: DependencyCacheType) -> T.Tuple[str, ...]:
data: T.Dict[DependencyCacheType, T.List[str]] = {
DependencyCacheType.PKG_CONFIG: stringlistify(self.__builtins.get_value(self.__pkg_conf_key)),
DependencyCacheType.CMAKE: stringlistify(self.__builtins.get_value(self.__cmake_key)),
DependencyCacheType.OTHER: [],
}
assert type_ in data, 'Someone forgot to update subkey calculations for a new type'
return tuple(data[type_])
def __iter__(self) -> T.Iterator['TV_DepID']:
return self.keys()
def put(self, key: 'TV_DepID', dep: 'dependencies.Dependency') -> None:
t = DependencyCacheType.from_type(dep)
if key not in self.__cache:
self.__cache[key] = DependencySubCache(t)
subkey = self.__calculate_subkey(t)
self.__cache[key][subkey] = dep
def get(self, key: 'TV_DepID') -> T.Optional['dependencies.Dependency']:
"""Get a value from the cache.
If there is no cache entry then None will be returned.
"""
try:
val = self.__cache[key]
except KeyError:
return None
for t in val.types:
subkey = self.__calculate_subkey(t)
try:
return val[subkey]
except KeyError:
pass
return None
def values(self) -> T.Iterator['dependencies.Dependency']:
for c in self.__cache.values():
yield from c.values()
def keys(self) -> T.Iterator['TV_DepID']:
return iter(self.__cache.keys())
def items(self) -> T.Iterator[T.Tuple['TV_DepID', T.List['dependencies.Dependency']]]:
for k, v in self.__cache.items():
vs: T.List[dependencies.Dependency] = []
for t in v.types:
subkey = self.__calculate_subkey(t)
if subkey in v:
vs.append(v[subkey])
yield k, vs
def clear(self) -> None:
self.__cache.clear()
class CMakeStateCache:
"""Class that stores internal CMake compiler states.
This cache is used to reduce the startup overhead of CMake by caching
all internal CMake compiler variables.
"""
def __init__(self) -> None:
self.__cache: T.Dict[str, T.Dict[str, T.List[str]]] = {}
self.cmake_cache: T.Dict[str, 'CMakeCacheEntry'] = {}
def __iter__(self) -> T.Iterator[T.Tuple[str, T.Dict[str, T.List[str]]]]:
return iter(self.__cache.items())
def items(self) -> T.Iterator[T.Tuple[str, T.Dict[str, T.List[str]]]]:
return iter(self.__cache.items())
def update(self, language: str, variables: T.Dict[str, T.List[str]]):
if language not in self.__cache:
self.__cache[language] = {}
self.__cache[language].update(variables)
@property
def languages(self) -> T.Set[str]:
return set(self.__cache.keys())
# Can't bind this near the class method it seems, sadly.
_V = T.TypeVar('_V')
# This class contains all data that must persist over multiple
# invocations of Meson. It is roughly the same thing as
# cmakecache.
class CoreData:
def __init__(self, cmd_options: SharedCMDOptions, scratch_dir: str, meson_command: T.List[str]):
self.lang_guids = {
'default': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
'c': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
'cpp': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
'test': '3AC096D0-A1C2-E12C-1390-A8335801FDAB',
'directory': '2150E333-8FDC-42A3-9474-1A3956D46DE8',
}
self.test_guid = str(uuid.uuid4()).upper()
self.regen_guid = str(uuid.uuid4()).upper()
self.install_guid = str(uuid.uuid4()).upper()
self.meson_command = meson_command
self.target_guids = {}
self.version = version
self.optstore = options.OptionStore()
self.cross_files = self.__load_config_files(cmd_options, scratch_dir, 'cross')
self.compilers: PerMachine[T.Dict[str, Compiler]] = PerMachine(OrderedDict(), OrderedDict())
# Stores the (name, hash) of the options file, The name will be either
# "meson_options.txt" or "meson.options".
# This is used by mconf to reload the option file if it's changed.
self.options_files: T.Dict[SubProject, T.Optional[T.Tuple[str, str]]] = {}
# Set of subprojects that have already been initialized once, this is
# required to be stored and reloaded with the coredata, as we don't
# want to overwrite options for such subprojects.
self.initialized_subprojects: T.Set[str] = set()
# For host == build configurations these caches should be the same.
self.deps: PerMachine[DependencyCache] = PerMachineDefaultable.default(
self.is_cross_build(),
DependencyCache(self.optstore, MachineChoice.BUILD),
DependencyCache(self.optstore, MachineChoice.HOST))
self.compiler_check_cache: T.Dict['CompilerCheckCacheKey', 'CompileResult'] = OrderedDict()
self.run_check_cache: T.Dict['RunCheckCacheKey', 'RunResult'] = OrderedDict()
# CMake cache
self.cmake_cache: PerMachine[CMakeStateCache] = PerMachine(CMakeStateCache(), CMakeStateCache())
# Only to print a warning if it changes between Meson invocations.
self.config_files = self.__load_config_files(cmd_options, scratch_dir, 'native')
self.builtin_options_libdir_cross_fixup()
self.init_builtins('')
@staticmethod
def __load_config_files(cmd_options: SharedCMDOptions, scratch_dir: str, ftype: str) -> T.List[str]:
# Need to try and make the passed filenames absolute because when the
# files are parsed later we'll have chdir()d.
if ftype == 'cross':
filenames = cmd_options.cross_file
else:
filenames = cmd_options.native_file
if not filenames:
return []
found_invalid: T.List[str] = []
missing: T.List[str] = []
real: T.List[str] = []
for i, f in enumerate(filenames):
f = os.path.expanduser(os.path.expandvars(f))
if os.path.exists(f):
if os.path.isfile(f):
real.append(os.path.abspath(f))
continue
elif os.path.isdir(f):
found_invalid.append(os.path.abspath(f))
else:
# in this case we've been passed some kind of pipe, copy
# the contents of that file into the meson private (scratch)
# directory so that it can be re-read when wiping/reconfiguring
copy = os.path.join(scratch_dir, f'{uuid.uuid4()}.{ftype}.ini')
with open(f, encoding='utf-8') as rf:
with open(copy, 'w', encoding='utf-8') as wf:
wf.write(rf.read())
real.append(copy)
# Also replace the command line argument, as the pipe
# probably won't exist on reconfigure
filenames[i] = copy
continue
if sys.platform != 'win32':
paths = [
os.environ.get('XDG_DATA_HOME', os.path.expanduser('~/.local/share')),
] + os.environ.get('XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(':')
for path in paths:
path_to_try = os.path.join(path, 'meson', ftype, f)
if os.path.isfile(path_to_try):
real.append(path_to_try)
break
else:
missing.append(f)
else:
missing.append(f)
if missing:
if found_invalid:
mlog.log('Found invalid candidates for', ftype, 'file:', *found_invalid)
mlog.log('Could not find any valid candidate for', ftype, 'files:', *missing)
raise MesonException(f'Cannot find specified {ftype} file: {f}')
return real
def builtin_options_libdir_cross_fixup(self) -> None:
# By default set libdir to "lib" when cross compiling since
# getting the "system default" is always wrong on multiarch
# platforms as it gets a value like lib/x86_64-linux-gnu.
if self.cross_files:
options.BUILTIN_OPTIONS[OptionKey('libdir')].default = 'lib'
def sanitize_prefix(self, prefix: str) -> str:
prefix = os.path.expanduser(prefix)
if not os.path.isabs(prefix):
raise MesonException(f'prefix value {prefix!r} must be an absolute path')
if prefix.endswith('/') or prefix.endswith('\\'):
# On Windows we need to preserve the trailing slash if the
# string is of type 'C:\' because 'C:' is not an absolute path.
if len(prefix) == 3 and prefix[1] == ':':
pass
# If prefix is a single character, preserve it since it is
# the root directory.
elif len(prefix) == 1:
pass
else:
prefix = prefix[:-1]
return prefix
def sanitize_dir_option_value(self, prefix: str, option: OptionKey, value: T.Any) -> T.Any:
'''
If the option is an installation directory option, the value is an
absolute path and resides within prefix, return the value
as a path relative to the prefix. Otherwise, return it as is.
This way everyone can do f.ex, get_option('libdir') and usually get
the library directory relative to prefix, even though it really
should not be relied upon.
'''
try:
value = PurePath(value)
except TypeError:
return value
if option.name.endswith('dir') and value.is_absolute() and \
option not in options.BUILTIN_DIR_NOPREFIX_OPTIONS:
try:
# Try to relativize the path.
value = value.relative_to(prefix)
except ValueError:
# Path is not relative, let’s keep it as is.
pass
if '..' in value.parts:
raise MesonException(
f'The value of the \'{option}\' option is \'{value}\' but '
'directory options are not allowed to contain \'..\'.\n'
f'If you need a path outside of the {prefix!r} prefix, '
'please use an absolute path.'
)
# .as_posix() keeps the posix-like file separators Meson uses.
return value.as_posix()
def init_builtins(self, subproject: str) -> None:
# Create builtin options with default values
for key, opt in options.BUILTIN_OPTIONS.items():
self.add_builtin_option(self.optstore, key.evolve(subproject=subproject), opt)
for for_machine in iter(MachineChoice):
for key, opt in options.BUILTIN_OPTIONS_PER_MACHINE.items():
self.add_builtin_option(self.optstore, key.evolve(subproject=subproject, machine=for_machine), opt)
@staticmethod
def add_builtin_option(opts_map: 'MutableKeyedOptionDictType', key: OptionKey,
opt: 'options.BuiltinOption') -> None:
if key.subproject:
if opt.yielding:
# This option is global and not per-subproject
return
value = opts_map.get_value(key.as_root())
else:
value = None
if key.has_module_prefix():
modulename = key.get_module_prefix()
opts_map.add_module_option(modulename, key, opt.init_option(key, value, options.default_prefix()))
else:
opts_map.add_system_option(key, opt.init_option(key, value, options.default_prefix()))
def init_backend_options(self, backend_name: str) -> None:
if backend_name == 'ninja':
self.optstore.add_system_option('backend_max_links', options.UserIntegerOption(
'backend_max_links',
'Maximum number of linker processes to run or 0 for no '
'limit',
(0, None, 0)))
elif backend_name.startswith('vs'):
self.optstore.add_system_option('backend_startup_project', options.UserStringOption(
'backend_startup_project',
'Default project to execute in Visual Studio',
''))
def get_option(self, key: OptionKey) -> T.Union[T.List[str], str, int, bool]:
try:
v = self.optstore.get_value(key)
return v
except KeyError:
pass
try:
v = self.optstore.get_value_object(key.as_root())
if v.yielding:
return v.value
except KeyError:
pass
raise MesonException(f'Tried to get unknown builtin option {str(key)}')
def set_option(self, key: OptionKey, value, first_invocation: bool = False) -> bool:
dirty = False
if self.optstore.is_builtin_option(key):
if key.name == 'prefix':
value = self.sanitize_prefix(value)
else:
prefix = self.optstore.get_value('prefix')
value = self.sanitize_dir_option_value(prefix, key, value)
try:
opt = self.optstore.get_value_object(key)
except KeyError:
raise MesonException(f'Tried to set unknown builtin option {str(key)}')
if opt.deprecated is True:
mlog.deprecation(f'Option {key.name!r} is deprecated')
elif isinstance(opt.deprecated, list):
for v in opt.listify(value):
if v in opt.deprecated:
mlog.deprecation(f'Option {key.name!r} value {v!r} is deprecated')
elif isinstance(opt.deprecated, dict):
def replace(v):
newvalue = opt.deprecated.get(v)
if newvalue is not None:
mlog.deprecation(f'Option {key.name!r} value {v!r} is replaced by {newvalue!r}')
return newvalue
return v
newvalue = [replace(v) for v in opt.listify(value)]
value = ','.join(newvalue)
elif isinstance(opt.deprecated, str):
# Option is deprecated and replaced by another. Note that a project
# option could be replaced by a built-in or module option, which is
# why we use OptionKey.from_string(newname) instead of
# key.evolve(newname). We set the value on both the old and new names,
# assuming they accept the same value. That could for example be
# achieved by adding the values from old option as deprecated on the
# new option, for example in the case of boolean option is replaced
# by a feature option with a different name.
newname = opt.deprecated
newkey = OptionKey.from_string(newname).evolve(subproject=key.subproject)
mlog.deprecation(f'Option {key.name!r} is replaced by {newname!r}')
dirty |= self.set_option(newkey, value, first_invocation)
changed = opt.set_value(value)
if changed and opt.readonly and not first_invocation:
raise MesonException(f'Tried modify read only option {str(key)!r}')
dirty |= changed
if key.name == 'buildtype':
dirty |= self._set_others_from_buildtype(value)
return dirty
def clear_cache(self) -> None:
self.deps.host.clear()
self.deps.build.clear()
self.compiler_check_cache.clear()
self.run_check_cache.clear()
def get_nondefault_buildtype_args(self) -> T.List[T.Union[T.Tuple[str, str, str], T.Tuple[str, bool, bool]]]:
result: T.List[T.Union[T.Tuple[str, str, str], T.Tuple[str, bool, bool]]] = []
value = self.optstore.get_value('buildtype')
if value == 'plain':
opt = 'plain'
debug = False
elif value == 'debug':
opt = '0'
debug = True
elif value == 'debugoptimized':
opt = '2'
debug = True
elif value == 'release':
opt = '3'
debug = False
elif value == 'minsize':
opt = 's'
debug = True
else:
assert value == 'custom'
return []
actual_opt = self.optstore.get_value('optimization')
actual_debug = self.optstore.get_value('debug')
if actual_opt != opt:
result.append(('optimization', actual_opt, opt))
if actual_debug != debug:
result.append(('debug', actual_debug, debug))
return result
def _set_others_from_buildtype(self, value: str) -> bool:
dirty = False
if value == 'plain':
opt = 'plain'
debug = False
elif value == 'debug':
opt = '0'
debug = True
elif value == 'debugoptimized':
opt = '2'
debug = True
elif value == 'release':
opt = '3'
debug = False
elif value == 'minsize':
opt = 's'
debug = True
else:
assert value == 'custom'
return False
dirty |= self.optstore.set_value('optimization', opt)
dirty |= self.optstore.set_value('debug', debug)
return dirty
def is_per_machine_option(self, optname: OptionKey) -> bool:
if optname.as_host() in options.BUILTIN_OPTIONS_PER_MACHINE:
return True
return self.optstore.is_compiler_option(optname)
def get_external_args(self, for_machine: MachineChoice, lang: str) -> T.List[str]:
# mypy cannot analyze type of OptionKey
key = OptionKey(f'{lang}_args', machine=for_machine)
return T.cast('T.List[str]', self.optstore.get_value(key))
def get_external_link_args(self, for_machine: MachineChoice, lang: str) -> T.List[str]:
# mypy cannot analyze type of OptionKey
key = OptionKey(f'{lang}_link_args', machine=for_machine)
return T.cast('T.List[str]', self.optstore.get_value(key))
def update_project_options(self, project_options: 'MutableKeyedOptionDictType', subproject: SubProject) -> None:
for key, value in project_options.items():
if key not in self.optstore:
self.optstore.add_project_option(key, value)
continue
if key.subproject != subproject:
raise MesonBugException(f'Tried to set an option for subproject {key.subproject} from {subproject}!')
oldval = self.optstore.get_value_object(key)
if type(oldval) is not type(value):
self.optstore.set_value(key, value.value)
elif oldval.choices != value.choices:
# If the choices have changed, use the new value, but attempt
# to keep the old options. If they are not valid keep the new
# defaults but warn.
self.optstore.set_value_object(key, value)
try:
value.set_value(oldval.value)
except MesonException:
mlog.warning(f'Old value(s) of {key} are no longer valid, resetting to default ({value.value}).',
fatal=False)
# Find any extranious keys for this project and remove them
for key in self.optstore.keys() - project_options.keys():
if self.optstore.is_project_option(key) and key.subproject == subproject:
self.optstore.remove(key)
def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
if when_building_for == MachineChoice.BUILD:
return False
return len(self.cross_files) > 0
def copy_build_options_from_regular_ones(self) -> bool:
dirty = False
assert not self.is_cross_build()
for k in options.BUILTIN_OPTIONS_PER_MACHINE:
o = self.optstore.get_value_object(k)
dirty |= self.optstore.set_value(k.as_build(), o.value)
for bk, bv in self.optstore.items():
if bk.machine is MachineChoice.BUILD:
hk = bk.as_host()
try:
hv = self.optstore.get_value_object(hk)
dirty |= bv.set_value(hv.value)
except KeyError:
continue
return dirty
def set_options(self, opts_to_set: T.Dict[OptionKey, T.Any], subproject: str = '', first_invocation: bool = False) -> bool:
dirty = False
if not self.is_cross_build():
opts_to_set = {k: v for k, v in opts_to_set.items() if k.machine is not MachineChoice.BUILD}
# Set prefix first because it's needed to sanitize other options
pfk = OptionKey('prefix')
if pfk in opts_to_set:
prefix = self.sanitize_prefix(opts_to_set[pfk])
dirty |= self.optstore.set_value('prefix', prefix)
for key in options.BUILTIN_DIR_NOPREFIX_OPTIONS:
if key not in opts_to_set:
dirty |= self.optstore.set_value(key, options.BUILTIN_OPTIONS[key].prefixed_default(key, prefix))
unknown_options: T.List[OptionKey] = []
for k, v in opts_to_set.items():
if k == pfk:
continue
elif k in self.optstore:
dirty |= self.set_option(k, v, first_invocation)
elif k.machine != MachineChoice.BUILD and not self.optstore.is_compiler_option(k):
unknown_options.append(k)
if unknown_options:
unknown_options_str = ', '.join(sorted(str(s) for s in unknown_options))
sub = f'In subproject {subproject}: ' if subproject else ''
raise MesonException(f'{sub}Unknown options: "{unknown_options_str}"')
if not self.is_cross_build():
dirty |= self.copy_build_options_from_regular_ones()
return dirty
def set_default_options(self, default_options: T.MutableMapping[OptionKey, str], subproject: str, env: 'Environment') -> None:
from .compilers import base_options
# Main project can set default options on subprojects, but subprojects
# can only set default options on themselves.
# Preserve order: if env.options has 'buildtype' it must come after
# 'optimization' if it is in default_options.
options: T.MutableMapping[OptionKey, T.Any] = OrderedDict()
for k, v in default_options.items():
if not subproject or k.subproject == subproject:
options[k] = v
options.update(env.options)
env.options = options
# Create a subset of options, keeping only project and builtin
# options for this subproject.
# Language and backend specific options will be set later when adding
# languages and setting the backend (builtin options must be set first
# to know which backend we'll use).
options = OrderedDict()
for k, v in env.options.items():
# If this is a subproject, don't use other subproject options
if k.subproject and k.subproject != subproject:
continue
# If the option is a builtin and is yielding then it's not allowed per subproject.
#
# Always test this using the HOST machine, as many builtin options
# are not valid for the BUILD machine, but the yielding value does
# not differ between them even when they are valid for both.
if subproject and self.optstore.is_builtin_option(k) and self.optstore.get_value_object(k.evolve(subproject='', machine=MachineChoice.HOST)).yielding:
continue
# Skip base, compiler, and backend options, they are handled when
# adding languages and setting backend.
if self.optstore.is_compiler_option(k) or self.optstore.is_backend_option(k):
continue
if self.optstore.is_base_option(k) and k.as_root() in base_options:
# set_options will report unknown base options
continue
options[k] = v
self.set_options(options, subproject=subproject, first_invocation=env.first_invocation)
def add_compiler_options(self, c_options: MutableKeyedOptionDictType, lang: str, for_machine: MachineChoice,
env: Environment, subproject: str) -> None:
for k, o in c_options.items():
value = env.options.get(k)
if value is not None:
o.set_value(value)
if not subproject:
self.optstore.set_value_object(k, o) # override compiler option on reconfigure
self.optstore.setdefault(k, o)
if subproject:
sk = k.evolve(subproject=subproject)
value = env.options.get(sk) or value
if value is not None:
o.set_value(value)
self.optstore.set_value_object(sk, o) # override compiler option on reconfigure
self.optstore.setdefault(sk, o)
def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
for_machine: MachineChoice, env: 'Environment') -> None:
"""Add global language arguments that are needed before compiler/linker detection."""
from .compilers import compilers
# These options are all new at this point, because the compiler is
# responsible for adding its own options, thus calling
# `self.optstore.update()`` is perfectly safe.
for gopt_key, gopt_valobj in compilers.get_global_options(lang, comp, for_machine, env).items():
self.optstore.add_compiler_option(lang, gopt_key, gopt_valobj)
def process_compiler_options(self, lang: str, comp: Compiler, env: Environment, subproject: str) -> None:
from . import compilers
self.add_compiler_options(comp.get_options(), lang, comp.for_machine, env, subproject)
enabled_opts: T.List[OptionKey] = []
for key in comp.base_options:
if subproject:
skey = key.evolve(subproject=subproject)
else:
skey = key
if skey not in self.optstore:
self.optstore.add_system_option(skey, copy.deepcopy(compilers.base_options[key]))
if skey in env.options:
self.optstore.set_value(skey, env.options[skey])
enabled_opts.append(skey)
elif subproject and key in env.options:
self.optstore.set_value(skey, env.options[key])
enabled_opts.append(skey)
if subproject and key not in self.optstore:
self.optstore.add_system_option(key, copy.deepcopy(self.optstore.get_value_object(skey)))
elif skey in env.options:
self.optstore.set_value(skey, env.options[skey])
elif subproject and key in env.options:
self.optstore.set_value(skey, env.options[key])
self.emit_base_options_warnings(enabled_opts)
def emit_base_options_warnings(self, enabled_opts: T.List[OptionKey]) -> None:
if OptionKey('b_bitcode') in enabled_opts:
mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.', fatal=False)
mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.', fatal=False)
def get_cmd_line_file(build_dir: str) -> str:
return os.path.join(build_dir, 'meson-private', 'cmd_line.txt')
def read_cmd_line_file(build_dir: str, options: SharedCMDOptions) -> None:
filename = get_cmd_line_file(build_dir)
if not os.path.isfile(filename):
return
config = CmdLineFileParser()
config.read(filename)
# Do a copy because config is not really a dict. options.cmd_line_options
# overrides values from the file.
d = {OptionKey.from_string(k): v for k, v in config['options'].items()}
d.update(options.cmd_line_options)
options.cmd_line_options = d
properties = config['properties']
if not options.cross_file:
options.cross_file = ast.literal_eval(properties.get('cross_file', '[]'))
if not options.native_file:
# This will be a string in the form: "['first', 'second', ...]", use
# literal_eval to get it into the list of strings.
options.native_file = ast.literal_eval(properties.get('native_file', '[]'))
def write_cmd_line_file(build_dir: str, options: SharedCMDOptions) -> None:
filename = get_cmd_line_file(build_dir)
config = CmdLineFileParser()
properties: OrderedDict[str, str] = OrderedDict()
if options.cross_file:
properties['cross_file'] = options.cross_file
if options.native_file:
properties['native_file'] = options.native_file
config['options'] = {str(k): str(v) for k, v in options.cmd_line_options.items()}
config['properties'] = properties
with open(filename, 'w', encoding='utf-8') as f:
config.write(f)
def update_cmd_line_file(build_dir: str, options: SharedCMDOptions) -> None:
filename = get_cmd_line_file(build_dir)
config = CmdLineFileParser()
config.read(filename)
config['options'].update({str(k): str(v) for k, v in options.cmd_line_options.items()})
with open(filename, 'w', encoding='utf-8') as f:
config.write(f)
def format_cmd_line_options(options: SharedCMDOptions) -> str:
cmdline = ['-D{}={}'.format(str(k), v) for k, v in options.cmd_line_options.items()]
if options.cross_file:
cmdline += [f'--cross-file={f}' for f in options.cross_file]
if options.native_file:
cmdline += [f'--native-file={f}' for f in options.native_file]
return ' '.join([shlex.quote(x) for x in cmdline])
def major_versions_differ(v1: str, v2: str) -> bool:
v1_major, v1_minor = v1.rsplit('.', 1)
v2_major, v2_minor = v2.rsplit('.', 1)
# Major version differ, or one is development version but not the other.
return v1_major != v2_major or ('99' in {v1_minor, v2_minor} and v1_minor != v2_minor)
def load(build_dir: str, suggest_reconfigure: bool = True) -> CoreData:
filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
return pickle_load(filename, 'Coredata', CoreData, suggest_reconfigure)
def save(obj: CoreData, build_dir: str) -> str:
filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
prev_filename = filename + '.prev'
tempfilename = filename + '~'
if major_versions_differ(obj.version, version):
raise MesonException('Fatal version mismatch corruption.')
if os.path.exists(filename):
import shutil
shutil.copyfile(filename, prev_filename)
with open(tempfilename, 'wb') as f:
pickle.dump(obj, f)
f.flush()
os.fsync(f.fileno())
os.replace(tempfilename, filename)
return filename
def register_builtin_arguments(parser: argparse.ArgumentParser) -> None:
for n, b in options.BUILTIN_OPTIONS.items():
b.add_to_argparse(str(n), parser, '')
for n, b in options.BUILTIN_OPTIONS_PER_MACHINE.items():
b.add_to_argparse(str(n), parser, ' (just for host machine)')
b.add_to_argparse(str(n.as_build()), parser, ' (just for build machine)')
parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option",
help='Set the value of an option, can be used several times to set multiple options.')
def create_options_dict(options: T.List[str], subproject: str = '') -> T.Dict[OptionKey, str]:
result: T.OrderedDict[OptionKey, str] = OrderedDict()
for o in options:
try:
(key, value) = o.split('=', 1)
except ValueError:
raise MesonException(f'Option {o!r} must have a value separated by equals sign.')
k = OptionKey.from_string(key)
if subproject:
k = k.evolve(subproject=subproject)
result[k] = value
return result
def parse_cmd_line_options(args: SharedCMDOptions) -> None:
args.cmd_line_options = create_options_dict(args.projectoptions)
# Merge builtin options set with --option into the dict.
for key in chain(
options.BUILTIN_OPTIONS.keys(),
(k.as_build() for k in options.BUILTIN_OPTIONS_PER_MACHINE.keys()),
options.BUILTIN_OPTIONS_PER_MACHINE.keys(),
):
name = str(key)
value = getattr(args, name, None)
if value is not None:
if key in args.cmd_line_options:
cmdline_name = options.BuiltinOption.argparse_name_to_arg(name)
raise MesonException(
f'Got argument {name} as both -D{name} and {cmdline_name}. Pick one.')
args.cmd_line_options[key] = value
delattr(args, name)
@dataclass
class OptionsView(abc.Mapping):
'''A view on an options dictionary for a given subproject and with overrides.
'''
# TODO: the typing here could be made more explicit using a TypeDict from
# python 3.8 or typing_extensions
original_options: T.Union[KeyedOptionDictType, 'dict[OptionKey, UserOption[Any]]']
subproject: T.Optional[str] = None
overrides: T.Optional[T.Mapping[OptionKey, T.Union[str, int, bool, T.List[str]]]] = None
def __getitem__(self, key: OptionKey) -> options.UserOption:
# FIXME: This is fundamentally the same algorithm than interpreter.get_option_internal().
# We should try to share the code somehow.
key = key.evolve(subproject=self.subproject)
if not isinstance(self.original_options, options.OptionStore):
# This is only used by CUDA currently.
# This entire class gets removed when option refactor
# is finished.
if '_' in key.name or key.lang is not None:
is_project_option = False
else:
sys.exit(f'FAIL {key}.')
else:
is_project_option = self.original_options.is_project_option(key)
if not is_project_option:
opt = self.original_options.get(key)
if opt is None or opt.yielding:
key2 = key.as_root()
# This hack goes away once wi start using OptionStore
# to hold overrides.
if isinstance(self.original_options, options.OptionStore):
if key2 not in self.original_options:
raise KeyError(f'{key} {key2}')
opt = self.original_options.get_value_object(key2)
else:
opt = self.original_options[key2]
else:
opt = self.original_options[key]
if opt.yielding:
opt = self.original_options.get(key.as_root(), opt)
if self.overrides:
override_value = self.overrides.get(key.as_root())
if override_value is not None:
opt = copy.copy(opt)
opt.set_value(override_value)
return opt
def get_value(self, key: T.Union[str, OptionKey]):
if isinstance(key, str):
key = OptionKey(key)
return self[key].value
def set_value(self, key: T.Union[str, OptionKey], value: T.Union[str, int, bool, T.List[str]]):
if isinstance(key, str):
key = OptionKey(key)
self.overrides[key] = value
def __iter__(self) -> T.Iterator[OptionKey]:
return iter(self.original_options)
def __len__(self) -> int:
return len(self.original_options)
FORBIDDEN_TARGET_NAMES = frozenset({
'clean',
'clean-ctlist',
'clean-gcno',
'clean-gcda',
'coverage',
'coverage-text',
'coverage-xml',
'coverage-html',
'phony',
'PHONY',
'all',
'test',
'benchmark',
'install',
'uninstall',
'build.ninja',
'scan-build',
'reconfigure',
'dist',
'distcheck',
})