The Meson Build System http://mesonbuild.com/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

3580 lines
179 KiB

# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2021 The Meson development team
# Copyright © 2023-2024 Intel Corporation
from __future__ import annotations
import hashlib
from .. import mparser
from .. import environment
from .. import coredata
from .. import dependencies
from .. import mlog
from .. import options
from .. import build
from .. import optinterpreter
from .. import compilers
from .. import envconfig
from ..wrap import wrap, WrapMode
from .. import mesonlib
from ..mesonlib import (EnvironmentVariables, ExecutableSerialisation, MesonBugException, MesonException, HoldableObject,
FileMode, MachineChoice, listify,
extract_as_list, has_path_sep, path_is_in_root, PerMachine)
from ..options import OptionKey
from ..programs import ExternalProgram, NonExistingExternalProgram
from ..dependencies import Dependency
from ..depfile import DepFile
from ..interpreterbase import ContainerTypeInfo, InterpreterBase, KwargInfo, typed_kwargs, typed_pos_args
from ..interpreterbase import noPosargs, noKwargs, permittedKwargs, noArgsFlattening, noSecondLevelHolderResolving, unholder_return
from ..interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
from ..interpreterbase import Disabler, disablerIfNotFound
from ..interpreterbase import FeatureNew, FeatureDeprecated, FeatureBroken, FeatureNewKwargs
from ..interpreterbase import ObjectHolder, ContextManagerObject
from ..interpreterbase import stringifyUserArguments, resolve_second_level_holders
from ..modules import ExtensionModule, ModuleObject, MutableModuleObject, NewExtensionModule, NotFoundExtensionModule
from ..optinterpreter import optname_regex
from . import interpreterobjects as OBJ
from . import compiler as compilerOBJ
from .mesonmain import MesonMain
from .dependencyfallbacks import DependencyFallbacksHolder
from .interpreterobjects import (
SubprojectHolder,
Test,
RunProcess,
extract_required_kwarg,
extract_search_dirs,
NullSubprojectInterpreter,
)
from .type_checking import (
BUILD_TARGET_KWS,
COMMAND_KW,
CT_BUILD_ALWAYS,
CT_BUILD_ALWAYS_STALE,
CT_BUILD_BY_DEFAULT,
CT_INPUT_KW,
CT_INSTALL_DIR_KW,
EXECUTABLE_KWS,
JAR_KWS,
LIBRARY_KWS,
MULTI_OUTPUT_KW,
OUTPUT_KW,
DEFAULT_OPTIONS,
DEPENDENCIES_KW,
DEPENDS_KW,
DEPEND_FILES_KW,
DEPFILE_KW,
DISABLER_KW,
D_MODULE_VERSIONS_KW,
ENV_KW,
ENV_METHOD_KW,
ENV_SEPARATOR_KW,
INCLUDE_DIRECTORIES,
INSTALL_KW,
INSTALL_DIR_KW,
INSTALL_MODE_KW,
INSTALL_FOLLOW_SYMLINKS,
LINK_WITH_KW,
LINK_WHOLE_KW,
CT_INSTALL_TAG_KW,
INSTALL_TAG_KW,
LANGUAGE_KW,
NATIVE_KW,
PRESERVE_PATH_KW,
REQUIRED_KW,
SHARED_LIB_KWS,
SHARED_MOD_KWS,
DEPENDENCY_SOURCES_KW,
SOURCES_VARARGS,
STATIC_LIB_KWS,
VARIABLES_KW,
TEST_KWS,
NoneType,
in_set_validator,
env_convertor_with_method
)
from . import primitives as P_OBJ
from pathlib import Path
from enum import Enum
import os
import shutil
import uuid
import re
import stat
import collections
import typing as T
import textwrap
import importlib
import copy
if T.TYPE_CHECKING:
from . import kwargs as kwtypes
from ..backend.backends import Backend
from ..interpreterbase.baseobjects import InterpreterObject, TYPE_var, TYPE_kwargs
from ..programs import OverrideProgram
from .type_checking import SourcesVarargsType
# Input source types passed to Targets
SourceInputs = T.Union[mesonlib.File, build.GeneratedList, build.BuildTarget, build.BothLibraries,
build.CustomTargetIndex, build.CustomTarget, build.GeneratedList,
build.ExtractedObjects, str]
# Input source types passed to the build.Target classes
SourceOutputs = T.Union[mesonlib.File, build.GeneratedList,
build.BuildTarget, build.CustomTargetIndex, build.CustomTarget,
build.ExtractedObjects, build.GeneratedList, build.StructuredSources]
BuildTargetSource = T.Union[mesonlib.FileOrString, build.GeneratedTypes, build.StructuredSources]
ProgramVersionFunc = T.Callable[[T.Union[ExternalProgram, build.Executable, OverrideProgram]], str]
def _project_version_validator(value: T.Union[T.List, str, mesonlib.File, None]) -> T.Optional[str]:
if isinstance(value, list):
if len(value) != 1:
return 'when passed as array must have a length of 1'
elif not isinstance(value[0], mesonlib.File):
return 'when passed as array must contain a File'
return None
class Summary:
def __init__(self, project_name: str, project_version: str):
self.project_name = project_name
self.project_version = project_version
self.sections = collections.defaultdict(dict)
self.max_key_len = 0
def add_section(self, section: str, values: T.Dict[str, T.Any], bool_yn: bool,
list_sep: T.Optional[str], subproject: str) -> None:
for k, v in values.items():
if k in self.sections[section]:
raise InterpreterException(f'Summary section {section!r} already have key {k!r}')
formatted_values = []
for i in listify(v):
if isinstance(i, bool):
if bool_yn:
formatted_values.append(mlog.green('YES') if i else mlog.red('NO'))
else:
formatted_values.append('true' if i else 'false')
elif isinstance(i, (str, int)):
formatted_values.append(str(i))
elif isinstance(i, (ExternalProgram, Dependency)):
FeatureNew.single_use('dependency or external program in summary', '0.57.0', subproject)
formatted_values.append(i.summary_value())
elif isinstance(i, Disabler):
FeatureNew.single_use('disabler in summary', '0.64.0', subproject)
formatted_values.append(mlog.red('NO'))
elif isinstance(i, options.UserOption):
FeatureNew.single_use('feature option in summary', '0.58.0', subproject)
formatted_values.append(i.printable_value())
else:
m = 'Summary value in section {!r}, key {!r}, must be string, integer, boolean, dependency, disabler, or external program'
raise InterpreterException(m.format(section, k))
self.sections[section][k] = (formatted_values, list_sep)
self.max_key_len = max(self.max_key_len, len(k))
def dump(self):
mlog.log(self.project_name, mlog.normal_cyan(self.project_version))
for section, values in self.sections.items():
mlog.log('') # newline
if section:
mlog.log(' ', mlog.bold(section))
for k, v in values.items():
v, list_sep = v
padding = self.max_key_len - len(k)
end = ' ' if v else ''
mlog.log(' ' * 3, k + ' ' * padding + ':', end=end)
indent = self.max_key_len + 6
self.dump_value(v, list_sep, indent)
mlog.log('') # newline
def dump_value(self, arr, list_sep, indent):
lines_sep = '\n' + ' ' * indent
if list_sep is None:
mlog.log(*arr, sep=lines_sep, display_timestamp=False)
return
max_len = shutil.get_terminal_size().columns
line = []
line_len = indent
lines_sep = list_sep.rstrip() + lines_sep
for v in arr:
v_len = len(v) + len(list_sep)
if line and line_len + v_len > max_len:
mlog.log(*line, sep=list_sep, end=lines_sep)
line_len = indent
line = []
line.append(v)
line_len += v_len
mlog.log(*line, sep=list_sep, display_timestamp=False)
known_library_kwargs = (
build.known_shlib_kwargs |
build.known_stlib_kwargs |
{f'{l}_shared_args' for l in compilers.all_languages - {'java'}} |
{f'{l}_static_args' for l in compilers.all_languages - {'java'}}
)
known_build_target_kwargs = (
known_library_kwargs |
build.known_exe_kwargs |
build.known_jar_kwargs |
{'target_type'}
)
class InterpreterRuleRelaxation(Enum):
''' Defines specific relaxations of the Meson rules.
This is intended to be used for automatically converted
projects (CMake subprojects, build system mixing) that
generate a Meson AST via introspection, etc.
'''
ALLOW_BUILD_DIR_FILE_REFERENCES = 1
permitted_dependency_kwargs = {
'allow_fallback',
'cmake_args',
'cmake_module_path',
'cmake_package_version',
'components',
'default_options',
'fallback',
'include_type',
'language',
'main',
'method',
'modules',
'native',
'not_found_message',
'optional_modules',
'private_headers',
'required',
'static',
'version',
}
implicit_check_false_warning = """You should add the boolean check kwarg to the run_command call.
It currently defaults to false,
but it will default to true in meson 2.0.
See also: https://github.com/mesonbuild/meson/issues/9300"""
class Interpreter(InterpreterBase, HoldableObject):
def __init__(
self,
_build: build.Build,
backend: T.Optional[Backend] = None,
subproject: str = '',
subdir: str = '',
subproject_dir: str = 'subprojects',
default_project_options: T.Optional[T.Dict[OptionKey, str]] = None,
ast: T.Optional[mparser.CodeBlockNode] = None,
is_translated: bool = False,
relaxations: T.Optional[T.Set[InterpreterRuleRelaxation]] = None,
user_defined_options: T.Optional[coredata.SharedCMDOptions] = None,
) -> None:
super().__init__(_build.environment.get_source_dir(), subdir, subproject)
self.active_projectname = ''
self.build = _build
self.environment = self.build.environment
self.coredata = self.environment.get_coredata()
self.backend = backend
self.summary: T.Dict[str, 'Summary'] = {}
self.modules: T.Dict[str, NewExtensionModule] = {}
# Subproject directory is usually the name of the subproject, but can
# be different for dependencies provided by wrap files.
self.subproject_directory_name = subdir.split(os.path.sep)[-1]
self.subproject_dir = subproject_dir
self.relaxations = relaxations or set()
if ast is None:
self.load_root_meson_file()
else:
self.ast = ast
self.sanity_check_ast()
self.builtin.update({'meson': MesonMain(self.build, self)})
self.generators: T.List[build.Generator] = []
self.processed_buildfiles: T.Set[str] = set()
self.project_args_frozen = False
self.global_args_frozen = False # implies self.project_args_frozen
self.subprojects: T.Dict[str, SubprojectHolder] = {}
self.subproject_stack: T.List[str] = []
self.configure_file_outputs: T.Dict[str, int] = {}
# Passed from the outside, only used in subprojects.
if default_project_options:
self.default_project_options = default_project_options.copy()
else:
self.default_project_options = {}
self.project_default_options: T.Dict[OptionKey, str] = {}
self.build_func_dict()
self.build_holder_map()
self.user_defined_options = user_defined_options
self.compilers: PerMachine[T.Dict[str, 'compilers.Compiler']] = PerMachine({}, {})
# build_def_files needs to be defined before parse_project is called
#
# For non-meson subprojects, we'll be using the ast. Even if it does
# exist we don't want to add a dependency on it, it's autogenerated
# from the actual build files, and is just for reference.
self.build_def_files: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
build_filename = os.path.join(self.subdir, environment.build_filename)
if not is_translated:
self.build_def_files.add(build_filename)
self.parse_project()
self._redetect_machines()
def __getnewargs_ex__(self) -> T.Tuple[T.Tuple[object], T.Dict[str, object]]:
raise MesonBugException('This class is unpicklable')
def _redetect_machines(self) -> None:
# Re-initialize machine descriptions. We can do a better job now because we
# have the compilers needed to gain more knowledge, so wipe out old
# inference and start over.
machines = self.build.environment.machines.miss_defaulting()
machines.build = environment.detect_machine_info(self.coredata.compilers.build)
self.build.environment.machines = machines.default_missing()
assert self.build.environment.machines.build.cpu is not None
assert self.build.environment.machines.host.cpu is not None
assert self.build.environment.machines.target.cpu is not None
self.builtin['build_machine'] = \
OBJ.MachineHolder(self.build.environment.machines.build, self)
self.builtin['host_machine'] = \
OBJ.MachineHolder(self.build.environment.machines.host, self)
self.builtin['target_machine'] = \
OBJ.MachineHolder(self.build.environment.machines.target, self)
def build_func_dict(self) -> None:
self.funcs.update({'add_global_arguments': self.func_add_global_arguments,
'add_global_link_arguments': self.func_add_global_link_arguments,
'add_languages': self.func_add_languages,
'add_project_arguments': self.func_add_project_arguments,
'add_project_dependencies': self.func_add_project_dependencies,
'add_project_link_arguments': self.func_add_project_link_arguments,
'add_test_setup': self.func_add_test_setup,
'alias_target': self.func_alias_target,
'assert': self.func_assert,
'benchmark': self.func_benchmark,
'both_libraries': self.func_both_lib,
'build_target': self.func_build_target,
'configuration_data': self.func_configuration_data,
'configure_file': self.func_configure_file,
'custom_target': self.func_custom_target,
'debug': self.func_debug,
'declare_dependency': self.func_declare_dependency,
'dependency': self.func_dependency,
'disabler': self.func_disabler,
'environment': self.func_environment,
'error': self.func_error,
'executable': self.func_executable,
'files': self.func_files,
'find_program': self.func_find_program,
'generator': self.func_generator,
'get_option': self.func_get_option,
'get_variable': self.func_get_variable,
'import': self.func_import,
'include_directories': self.func_include_directories,
'install_data': self.func_install_data,
'install_emptydir': self.func_install_emptydir,
'install_headers': self.func_install_headers,
'install_man': self.func_install_man,
'install_subdir': self.func_install_subdir,
'install_symlink': self.func_install_symlink,
'is_disabler': self.func_is_disabler,
'is_variable': self.func_is_variable,
'jar': self.func_jar,
'join_paths': self.func_join_paths,
'library': self.func_library,
'message': self.func_message,
'option': self.func_option,
'project': self.func_project,
'range': self.func_range,
'run_command': self.func_run_command,
'run_target': self.func_run_target,
'set_variable': self.func_set_variable,
'structured_sources': self.func_structured_sources,
'subdir': self.func_subdir,
'shared_library': self.func_shared_lib,
'shared_module': self.func_shared_module,
'static_library': self.func_static_lib,
'subdir_done': self.func_subdir_done,
'subproject': self.func_subproject,
'summary': self.func_summary,
'test': self.func_test,
'unset_variable': self.func_unset_variable,
'vcs_tag': self.func_vcs_tag,
'warning': self.func_warning,
})
if 'MESON_UNIT_TEST' in os.environ:
self.funcs.update({'exception': self.func_exception})
if 'MESON_RUNNING_IN_PROJECT_TESTS' in os.environ:
self.funcs.update({'expect_error': self.func_expect_error})
def build_holder_map(self) -> None:
'''
Build a mapping of `HoldableObject` types to their corresponding
`ObjectHolder`s. This mapping is used in `InterpreterBase` to automatically
holderify all returned values from methods and functions.
'''
self.holder_map.update({
# Primitives
list: P_OBJ.ArrayHolder,
dict: P_OBJ.DictHolder,
int: P_OBJ.IntegerHolder,
bool: P_OBJ.BooleanHolder,
str: P_OBJ.StringHolder,
P_OBJ.MesonVersionString: P_OBJ.MesonVersionStringHolder,
P_OBJ.DependencyVariableString: P_OBJ.DependencyVariableStringHolder,
P_OBJ.OptionString: P_OBJ.OptionStringHolder,
# Meson types
mesonlib.File: OBJ.FileHolder,
build.SharedLibrary: OBJ.SharedLibraryHolder,
build.StaticLibrary: OBJ.StaticLibraryHolder,
build.BothLibraries: OBJ.BothLibrariesHolder,
build.SharedModule: OBJ.SharedModuleHolder,
build.Executable: OBJ.ExecutableHolder,
build.Jar: OBJ.JarHolder,
build.CustomTarget: OBJ.CustomTargetHolder,
build.CustomTargetIndex: OBJ.CustomTargetIndexHolder,
build.Generator: OBJ.GeneratorHolder,
build.GeneratedList: OBJ.GeneratedListHolder,
build.ExtractedObjects: OBJ.GeneratedObjectsHolder,
build.RunTarget: OBJ.RunTargetHolder,
build.AliasTarget: OBJ.AliasTargetHolder,
build.Headers: OBJ.HeadersHolder,
build.Man: OBJ.ManHolder,
build.EmptyDir: OBJ.EmptyDirHolder,
build.Data: OBJ.DataHolder,
build.SymlinkData: OBJ.SymlinkDataHolder,
build.InstallDir: OBJ.InstallDirHolder,
build.IncludeDirs: OBJ.IncludeDirsHolder,
mesonlib.EnvironmentVariables: OBJ.EnvironmentVariablesHolder,
build.StructuredSources: OBJ.StructuredSourcesHolder,
compilers.RunResult: compilerOBJ.TryRunResultHolder,
dependencies.ExternalLibrary: OBJ.ExternalLibraryHolder,
options.UserFeatureOption: OBJ.FeatureOptionHolder,
envconfig.MachineInfo: OBJ.MachineHolder,
build.ConfigurationData: OBJ.ConfigurationDataHolder,
})
'''
Build a mapping of `HoldableObject` base classes to their
corresponding `ObjectHolder`s. The difference to `self.holder_map`
is that the keys here define an upper bound instead of requiring an
exact match.
The mappings defined here are only used when there was no direct hit
found in `self.holder_map`.
'''
self.bound_holder_map.update({
dependencies.Dependency: OBJ.DependencyHolder,
ExternalProgram: OBJ.ExternalProgramHolder,
compilers.Compiler: compilerOBJ.CompilerHolder,
ModuleObject: OBJ.ModuleObjectHolder,
MutableModuleObject: OBJ.MutableModuleObjectHolder,
})
def append_holder_map(self, held_type: T.Type[mesonlib.HoldableObject], holder_type: T.Type[ObjectHolder]) -> None:
'''
Adds one additional mapping to the `holder_map`.
The intended use for this function is in the `initialize` method of
modules to register custom object holders.
'''
self.holder_map.update({
held_type: holder_type
})
def process_new_values(self, invalues: T.List[T.Union[TYPE_var, ExecutableSerialisation]]) -> None:
invalues = listify(invalues)
for v in invalues:
if isinstance(v, ObjectHolder):
raise InterpreterException('Modules must not return ObjectHolders')
if isinstance(v, (build.BuildTarget, build.CustomTarget, build.RunTarget)):
self.add_target(v.name, v)
elif isinstance(v, list):
self.process_new_values(v)
elif isinstance(v, ExecutableSerialisation):
v.subproject = self.subproject
self.build.install_scripts.append(v)
elif isinstance(v, build.Data):
self.build.data.append(v)
elif isinstance(v, build.SymlinkData):
self.build.symlinks.append(v)
elif isinstance(v, dependencies.InternalDependency):
# FIXME: This is special cased and not ideal:
# The first source is our new VapiTarget, the rest are deps
self.process_new_values(v.sources[0])
elif isinstance(v, build.InstallDir):
self.build.install_dirs.append(v)
elif isinstance(v, Test):
self.build.tests.append(v)
elif isinstance(v, (int, str, bool, Disabler, ObjectHolder, build.GeneratedList,
ExternalProgram, build.ConfigurationData)):
pass
else:
raise InterpreterException(f'Module returned a value of unknown type {v!r}.')
def handle_meson_version(self, pv: str, location: mparser.BaseNode) -> None:
if not mesonlib.version_compare(coredata.stable_version, pv):
raise InterpreterException.from_node(f'Meson version is {coredata.version} but project requires {pv}', node=location)
mesonlib.project_meson_versions[self.subproject] = pv
def handle_meson_version_from_ast(self) -> None:
if not self.ast.lines:
return
project = self.ast.lines[0]
# first line is always project()
if not isinstance(project, mparser.FunctionNode):
return
for kw, val in project.args.kwargs.items():
assert isinstance(kw, mparser.IdNode), 'for mypy'
if kw.value == 'meson_version':
# mypy does not understand "and isinstance"
if isinstance(val, mparser.StringNode):
self.handle_meson_version(val.value, val)
def get_build_def_files(self) -> mesonlib.OrderedSet[str]:
return self.build_def_files
def add_build_def_file(self, f: mesonlib.FileOrString) -> None:
# Use relative path for files within source directory, and absolute path
# for system files. Skip files within build directory. Also skip not regular
# files (e.g. /dev/stdout) Normalize the path to avoid duplicates, this
# is especially important to convert '/' to '\' on Windows.
if isinstance(f, mesonlib.File):
if f.is_built:
return
f = os.path.normpath(f.relative_name())
elif os.path.isfile(f) and not f.startswith('/dev/'):
srcdir = Path(self.environment.get_source_dir())
builddir = Path(self.environment.get_build_dir())
try:
f_ = Path(f).resolve()
except OSError:
f_ = Path(f)
s = f_.stat()
if (hasattr(s, 'st_file_attributes') and
s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and
s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK):
# This is a Windows Store link which we can't
# resolve, so just do our best otherwise.
f_ = f_.parent.resolve() / f_.name
else:
raise
if builddir in f_.parents:
return
if srcdir in f_.parents:
f_ = f_.relative_to(srcdir)
f = str(f_)
else:
return
if f not in self.build_def_files:
self.build_def_files.add(f)
def get_variables(self) -> T.Dict[str, InterpreterObject]:
return self.variables
def check_stdlibs(self) -> None:
machine_choices = [MachineChoice.HOST]
if self.coredata.is_cross_build():
machine_choices.append(MachineChoice.BUILD)
for for_machine in machine_choices:
props = self.build.environment.properties[for_machine]
for l in self.coredata.compilers[for_machine].keys():
try:
di = mesonlib.stringlistify(props.get_stdlib(l))
except KeyError:
continue
if len(di) == 1:
FeatureNew.single_use('stdlib without variable name', '0.56.0', self.subproject, location=self.current_node)
kwargs = {'native': for_machine is MachineChoice.BUILD,
}
name = l + '_stdlib'
df = DependencyFallbacksHolder(self, [name])
df.set_fallback(di)
dep = df.lookup(kwargs, force_fallback=True)
self.build.stdlibs[for_machine][l] = dep
@typed_pos_args('import', str)
@typed_kwargs(
'import',
REQUIRED_KW.evolve(since='0.59.0'),
DISABLER_KW.evolve(since='0.59.0'),
)
@disablerIfNotFound
def func_import(self, node: mparser.BaseNode, args: T.Tuple[str],
kwargs: 'kwtypes.FuncImportModule') -> T.Union[ExtensionModule, NewExtensionModule, NotFoundExtensionModule]:
modname = args[0]
disabled, required, _ = extract_required_kwarg(kwargs, self.subproject)
if disabled:
return NotFoundExtensionModule(modname)
# Always report implementation detail modules don't exist
if modname.startswith('_'):
raise InvalidArguments(f'Module "{modname}" does not exist')
expect_unstable = False
# Some tests use "unstable_" instead of "unstable-", and that happens to work because
# of implementation details
if modname.startswith(('unstable-', 'unstable_')):
if modname.startswith('unstable_'):
mlog.deprecation(f'Importing unstable modules as "{modname}" instead of "{modname.replace("_", "-", 1)}"',
location=node)
real_modname = modname[len('unstable') + 1:] # + 1 to handle the - or _
expect_unstable = True
else:
real_modname = modname
if real_modname in self.modules:
return self.modules[real_modname]
try:
module = importlib.import_module(f'mesonbuild.modules.{real_modname}')
except ImportError:
if required:
raise InvalidArguments(f'Module "{modname}" does not exist')
ext_module = NotFoundExtensionModule(real_modname)
else:
ext_module = module.initialize(self)
assert isinstance(ext_module, (ExtensionModule, NewExtensionModule)), 'for mypy'
self.build.modules.append(real_modname)
if ext_module.INFO.added:
FeatureNew.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.added, self.subproject, location=node)
if ext_module.INFO.deprecated:
FeatureDeprecated.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.deprecated, self.subproject, location=node)
if expect_unstable and not ext_module.INFO.unstable and ext_module.INFO.stabilized is None:
raise InvalidArguments(f'Module {ext_module.INFO.name} has never been unstable, remove "unstable-" prefix.')
if ext_module.INFO.stabilized is not None:
if expect_unstable:
FeatureDeprecated.single_use(
f'module {ext_module.INFO.name} has been stabilized',
ext_module.INFO.stabilized, self.subproject,
'drop "unstable-" prefix from the module name',
location=node)
else:
FeatureNew.single_use(
f'module {ext_module.INFO.name} as stable module',
ext_module.INFO.stabilized, self.subproject,
f'Consider either adding "unstable-" to the module name, or updating the meson required version to ">= {ext_module.INFO.stabilized}"',
location=node)
elif ext_module.INFO.unstable:
if not expect_unstable:
if required:
raise InvalidArguments(f'Module "{ext_module.INFO.name}" has not been stabilized, and must be imported as unstable-{ext_module.INFO.name}')
ext_module = NotFoundExtensionModule(real_modname)
else:
mlog.warning(f'Module {ext_module.INFO.name} has no backwards or forwards compatibility and might not exist in future releases.', location=node, fatal=False)
self.modules[real_modname] = ext_module
return ext_module
@typed_pos_args('files', varargs=str)
@noKwargs
def func_files(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[mesonlib.File]:
return self.source_strings_to_files(args[0])
@noPosargs
@typed_kwargs(
'declare_dependency',
KwargInfo('compile_args', ContainerTypeInfo(list, str), listify=True, default=[]),
INCLUDE_DIRECTORIES.evolve(name='d_import_dirs', since='0.62.0'),
D_MODULE_VERSIONS_KW.evolve(since='0.62.0'),
KwargInfo('link_args', ContainerTypeInfo(list, str), listify=True, default=[]),
DEPENDENCIES_KW,
INCLUDE_DIRECTORIES,
LINK_WITH_KW,
LINK_WHOLE_KW.evolve(since='0.46.0'),
DEPENDENCY_SOURCES_KW,
KwargInfo('extra_files', ContainerTypeInfo(list, (mesonlib.File, str)), listify=True, default=[], since='1.2.0'),
VARIABLES_KW.evolve(since='0.54.0', since_values={list: '0.56.0'}),
KwargInfo('version', (str, NoneType)),
KwargInfo('objects', ContainerTypeInfo(list, build.ExtractedObjects), listify=True, default=[], since='1.1.0'),
)
@noSecondLevelHolderResolving
def func_declare_dependency(self, node: mparser.BaseNode, args: T.List[TYPE_var],
kwargs: kwtypes.FuncDeclareDependency) -> dependencies.Dependency:
deps = kwargs['dependencies']
incs = self.extract_incdirs(kwargs)
libs = kwargs['link_with']
libs_whole = kwargs['link_whole']
objects = kwargs['objects']
sources = self.source_strings_to_files(kwargs['sources'])
extra_files = self.source_strings_to_files(kwargs['extra_files'])
compile_args = kwargs['compile_args']
link_args = kwargs['link_args']
variables = kwargs['variables']
version = kwargs['version']
if version is None:
version = self.project_version
d_module_versions = kwargs['d_module_versions']
d_import_dirs = self.extract_incdirs(kwargs, 'd_import_dirs')
srcdir = Path(self.environment.source_dir)
# convert variables which refer to an -uninstalled.pc style datadir
for k, v in variables.items():
if not v:
FeatureNew.single_use('empty variable value in declare_dependency', '1.4.0', self.subproject, location=node)
try:
p = Path(v)
except ValueError:
continue
else:
if not self.is_subproject() and srcdir / self.subproject_dir in p.parents:
continue
if p.is_absolute() and p.is_dir() and srcdir / self.root_subdir in [p] + list(Path(os.path.abspath(p)).parents):
variables[k] = P_OBJ.DependencyVariableString(v)
dep = dependencies.InternalDependency(version, incs, compile_args,
link_args, libs, libs_whole, sources, extra_files,
deps, variables, d_module_versions, d_import_dirs,
objects)
return dep
@typed_pos_args('assert', bool, optargs=[str])
@noKwargs
def func_assert(self, node: mparser.FunctionNode, args: T.Tuple[bool, T.Optional[str]],
kwargs: 'TYPE_kwargs') -> None:
value, message = args
if message is None:
FeatureNew.single_use('assert function without message argument', '0.53.0', self.subproject, location=node)
if not value:
if message is None:
from ..ast import AstPrinter
printer = AstPrinter()
node.args.arguments[0].accept(printer)
message = printer.result
raise InterpreterException('Assert failed: ' + message)
def validate_arguments(self, args, argcount, arg_types):
if argcount is not None:
if argcount != len(args):
raise InvalidArguments(f'Expected {argcount} arguments, got {len(args)}.')
for actual, wanted in zip(args, arg_types):
if wanted is not None:
if not isinstance(actual, wanted):
raise InvalidArguments('Incorrect argument type.')
# Executables aren't actually accepted, but we allow them here to allow for
# better error messages when overridden
@typed_pos_args(
'run_command',
(build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str),
varargs=(build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str))
@typed_kwargs(
'run_command',
KwargInfo('check', (bool, NoneType), since='0.47.0'),
KwargInfo('capture', bool, default=True, since='0.47.0'),
ENV_KW.evolve(since='0.50.0'),
)
def func_run_command(self, node: mparser.BaseNode,
args: T.Tuple[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str],
T.List[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str]]],
kwargs: 'kwtypes.RunCommand') -> RunProcess:
return self.run_command_impl(args, kwargs)
def run_command_impl(self,
args: T.Tuple[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str],
T.List[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str]]],
kwargs: 'kwtypes.RunCommand',
in_builddir: bool = False) -> RunProcess:
cmd, cargs = args
capture = kwargs['capture']
env = kwargs['env']
srcdir = self.environment.get_source_dir()
builddir = self.environment.get_build_dir()
check = kwargs['check']
if check is None:
mlog.warning(implicit_check_false_warning, once=True)
check = False
overridden_msg = ('Program {!r} was overridden with the compiled '
'executable {!r} and therefore cannot be used during '
'configuration')
expanded_args: T.List[str] = []
if isinstance(cmd, build.Executable):
for name, exe in self.build.find_overrides.items():
if cmd == exe:
progname = name
break
else:
raise InterpreterException(f'Program {cmd.description()!r} is a compiled executable and therefore cannot be used during configuration')
raise InterpreterException(overridden_msg.format(progname, cmd.description()))
if isinstance(cmd, ExternalProgram):
if not cmd.found():
raise InterpreterException(f'command {cmd.get_name()!r} not found or not executable')
elif isinstance(cmd, compilers.Compiler):
exelist = cmd.get_exelist()
cmd = exelist[0]
prog = ExternalProgram(cmd, silent=True)
if not prog.found():
raise InterpreterException(f'Program {cmd!r} not found or not executable')
cmd = prog
expanded_args = exelist[1:]
else:
if isinstance(cmd, mesonlib.File):
cmd = cmd.absolute_path(srcdir, builddir)
# Prefer scripts in the current source directory
search_dir = os.path.join(srcdir, self.subdir)
prog = ExternalProgram(cmd, silent=True, search_dirs=[search_dir])
if not prog.found():
raise InterpreterException(f'Program or command {cmd!r} not found or not executable')
cmd = prog
for a in cargs:
if isinstance(a, str):
expanded_args.append(a)
elif isinstance(a, mesonlib.File):
expanded_args.append(a.absolute_path(srcdir, builddir))
elif isinstance(a, ExternalProgram):
expanded_args.append(a.get_path())
elif isinstance(a, compilers.Compiler):
FeatureNew.single_use('Compiler object as a variadic argument to `run_command`', '0.61.0', self.subproject, location=self.current_node)
prog = ExternalProgram(a.exelist[0], silent=True)
if not prog.found():
raise InterpreterException(f'Program {cmd!r} not found or not executable')
expanded_args.append(prog.get_path())
else:
raise InterpreterException(overridden_msg.format(a.name, cmd.description()))
# If any file that was used as an argument to the command
# changes, we must re-run the configuration step.
self.add_build_def_file(cmd.get_path())
for a in expanded_args:
if not os.path.isabs(a):
a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a)
self.add_build_def_file(a)
return RunProcess(cmd, expanded_args, env, srcdir, builddir, self.subdir,
self.environment.get_build_command() + ['introspect'],
in_builddir=in_builddir, check=check, capture=capture)
def func_option(self, nodes, args, kwargs):
raise InterpreterException('Tried to call option() in build description file. All options must be in the option file.')
@typed_pos_args('subproject', str)
@typed_kwargs(
'subproject',
REQUIRED_KW,
DEFAULT_OPTIONS.evolve(since='0.38.0'),
KwargInfo('version', ContainerTypeInfo(list, str), default=[], listify=True),
)
def func_subproject(self, nodes: mparser.BaseNode, args: T.Tuple[str], kwargs: kwtypes.Subproject) -> SubprojectHolder:
kw: kwtypes.DoSubproject = {
'required': kwargs['required'],
'default_options': kwargs['default_options'],
'version': kwargs['version'],
'options': None,
'cmake_options': [],
}
return self.do_subproject(args[0], kw)
def disabled_subproject(self, subp_name: str, disabled_feature: T.Optional[str] = None,
exception: T.Optional[Exception] = None) -> SubprojectHolder:
sub = SubprojectHolder(NullSubprojectInterpreter(), os.path.join(self.subproject_dir, subp_name),
disabled_feature=disabled_feature, exception=exception)
self.subprojects[subp_name] = sub
return sub
def do_subproject(self, subp_name: str, kwargs: kwtypes.DoSubproject, force_method: T.Optional[wrap.Method] = None) -> SubprojectHolder:
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
assert feature, 'for mypy'
mlog.log('Subproject', mlog.bold(subp_name), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
return self.disabled_subproject(subp_name, disabled_feature=feature)
default_options = {k.evolve(subproject=subp_name): v for k, v in kwargs['default_options'].items()}
if subp_name == '':
raise InterpreterException('Subproject name must not be empty.')
if subp_name[0] == '.':
raise InterpreterException('Subproject name must not start with a period.')
if '..' in subp_name:
raise InterpreterException('Subproject name must not contain a ".." path segment.')
if os.path.isabs(subp_name):
raise InterpreterException('Subproject name must not be an absolute path.')
if has_path_sep(subp_name):
mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.',
location=self.current_node)
if subp_name in self.subproject_stack:
fullstack = self.subproject_stack + [subp_name]
incpath = ' => '.join(fullstack)
raise InvalidCode(f'Recursive include of subprojects: {incpath}.')
if subp_name in self.subprojects:
subproject = self.subprojects[subp_name]
if required and not subproject.found():
raise InterpreterException(f'Subproject "{subproject.subdir}" required but not found.')
if kwargs['version']:
pv = self.build.subprojects[subp_name]
wanted = kwargs['version']
if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]:
raise InterpreterException(f'Subproject {subp_name} version is {pv} but {wanted} required.')
return subproject
r = self.environment.wrap_resolver
try:
subdir, method = r.resolve(subp_name, force_method)
except wrap.WrapException as e:
if not required:
mlog.log(e)
mlog.log('Subproject ', mlog.bold(subp_name), 'is buildable:', mlog.red('NO'), '(disabling)')
return self.disabled_subproject(subp_name, exception=e)
raise e
os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
self.global_args_frozen = True
stack = ':'.join(self.subproject_stack + [subp_name])
m = ['\nExecuting subproject', mlog.bold(stack)]
if method != 'meson':
m += ['method', mlog.bold(method)]
mlog.log(*m, '\n', nested=False)
methods_map: T.Dict[wrap.Method, T.Callable[[str, str, T.Dict[OptionKey, str, kwtypes.DoSubproject]], SubprojectHolder]] = {
'meson': self._do_subproject_meson,
'cmake': self._do_subproject_cmake,
'cargo': self._do_subproject_cargo,
}
try:
return methods_map[method](subp_name, subdir, default_options, kwargs)
# Invalid code is always an error
except InvalidCode:
raise
except Exception as e:
if not required:
with mlog.nested(subp_name):
# Suppress the 'ERROR:' prefix because this exception is not
# fatal and VS CI treat any logs with "ERROR:" as fatal.
mlog.exception(e, prefix=mlog.yellow('Exception:'))
mlog.log('\nSubproject', mlog.bold(subdir), 'is buildable:', mlog.red('NO'), '(disabling)')
return self.disabled_subproject(subp_name, exception=e)
raise e
def _do_subproject_meson(self, subp_name: str, subdir: str,
default_options: T.Dict[OptionKey, str],
kwargs: kwtypes.DoSubproject,
ast: T.Optional[mparser.CodeBlockNode] = None,
build_def_files: T.Optional[T.List[str]] = None,
relaxations: T.Optional[T.Set[InterpreterRuleRelaxation]] = None) -> SubprojectHolder:
with mlog.nested(subp_name):
if ast:
# Debug print the generated meson file
from ..ast import AstIndentationGenerator, AstPrinter
printer = AstPrinter(update_ast_line_nos=True)
ast.accept(AstIndentationGenerator())
ast.accept(printer)
printer.post_process()
meson_filename = os.path.join(self.build.environment.get_build_dir(), subdir, 'meson.build')
with open(meson_filename, "w", encoding='utf-8') as f:
f.write(printer.result)
mlog.log('Generated Meson AST:', meson_filename)
mlog.cmd_ci_include(meson_filename)
new_build = self.build.copy()
subi = Interpreter(new_build, self.backend, subp_name, subdir, self.subproject_dir,
default_options, ast=ast, is_translated=(ast is not None),
relaxations=relaxations,
user_defined_options=self.user_defined_options)
# Those lists are shared by all interpreters. That means that
# even if the subproject fails, any modification that the subproject
# made to those lists will affect the parent project.
subi.subprojects = self.subprojects
subi.modules = self.modules
subi.holder_map = self.holder_map
subi.bound_holder_map = self.bound_holder_map
subi.summary = self.summary
subi.subproject_stack = self.subproject_stack + [subp_name]
current_active = self.active_projectname
with mlog.nested_warnings():
subi.run()
subi_warnings = mlog.get_warning_count()
mlog.log('Subproject', mlog.bold(subp_name), 'finished.')
mlog.log()
if kwargs['version']:
pv = subi.project_version
wanted = kwargs['version']
if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]:
raise InterpreterException(f'Subproject {subp_name} version is {pv} but {wanted} required.')
self.active_projectname = current_active
self.subprojects.update(subi.subprojects)
self.subprojects[subp_name] = SubprojectHolder(subi, subdir, warnings=subi_warnings,
callstack=self.subproject_stack)
# Duplicates are possible when subproject uses files from project root
if build_def_files:
self.build_def_files.update(build_def_files)
# We always need the subi.build_def_files, to propagate sub-sub-projects
self.build_def_files.update(subi.build_def_files)
self.build.merge(subi.build)
self.build.subprojects[subp_name] = subi.project_version
return self.subprojects[subp_name]
def _do_subproject_cmake(self, subp_name: str, subdir: str,
default_options: T.Dict[OptionKey, str],
kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
from ..cmake import CMakeInterpreter
with mlog.nested(subp_name):
prefix = self.coredata.optstore.get_value('prefix')
from ..modules.cmake import CMakeSubprojectOptions
options = kwargs.get('options') or CMakeSubprojectOptions()
cmake_options = kwargs.get('cmake_options', []) + options.cmake_options
cm_int = CMakeInterpreter(Path(subdir), Path(prefix), self.build.environment, self.backend)
cm_int.initialise(cmake_options)
cm_int.analyse()
# Generate a meson ast and execute it with the normal do_subproject_meson
ast = cm_int.pretend_to_be_meson(options.target_options)
result = self._do_subproject_meson(
subp_name, subdir, default_options,
kwargs, ast,
[str(f) for f in cm_int.bs_files],
relaxations={
InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFERENCES,
}
)
result.cm_interpreter = cm_int
return result
def _do_subproject_cargo(self, subp_name: str, subdir: str,
default_options: T.Dict[OptionKey, str],
kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
from .. import cargo
FeatureNew.single_use('Cargo subproject', '1.3.0', self.subproject, location=self.current_node)
mlog.warning('Cargo subproject is an experimental feature and has no backwards compatibility guarantees.',
once=True, location=self.current_node)
if self.environment.cargo is None:
self.environment.cargo = cargo.Interpreter(self.environment)
with mlog.nested(subp_name):
ast = self.environment.cargo.interpret(subdir)
return self._do_subproject_meson(
subp_name, subdir, default_options, kwargs, ast,
# FIXME: Are there other files used by cargo interpreter?
[os.path.join(subdir, 'Cargo.toml')])
def get_option_internal(self, optname: str) -> options.UserOption:
key = OptionKey.from_string(optname).evolve(subproject=self.subproject)
if not self.environment.coredata.optstore.is_project_option(key):
for opts in [self.coredata.optstore, compilers.base_options]:
v = opts.get(key)
if v is None or v.yielding:
v = opts.get(key.as_root())
if v is not None:
assert isinstance(v, options.UserOption), 'for mypy'
return v
try:
opt = self.coredata.optstore.get_value_object(key)
if opt.yielding and key.subproject and key.as_root() in self.coredata.optstore:
popt = self.coredata.optstore.get_value_object(key.as_root())
if type(opt) is type(popt):
opt = popt
else:
# Get class name, then option type as a string
opt_type = opt.__class__.__name__[4:][:-6].lower()
popt_type = popt.__class__.__name__[4:][:-6].lower()
# This is not a hard error to avoid dependency hell, the workaround
# when this happens is to simply set the subproject's option directly.
mlog.warning('Option {0!r} of type {1!r} in subproject {2!r} cannot yield '
'to parent option of type {3!r}, ignoring parent value. '
'Use -D{2}:{0}=value to set the value for this option manually'
'.'.format(optname, opt_type, self.subproject, popt_type),
location=self.current_node)
return opt
except KeyError:
pass
raise InterpreterException(f'Tried to access unknown option {optname!r}.')
@typed_pos_args('get_option', str)
@noKwargs
def func_get_option(self, nodes: mparser.BaseNode, args: T.Tuple[str],
kwargs: 'TYPE_kwargs') -> T.Union[options.UserOption, 'TYPE_var']:
optname = args[0]
if ':' in optname:
raise InterpreterException('Having a colon in option name is forbidden, '
'projects are not allowed to directly access '
'options of other subprojects.')
if optname_regex.search(optname.split('.', maxsplit=1)[-1]) is not None:
raise InterpreterException(f'Invalid option name {optname!r}')
opt = self.get_option_internal(optname)
if isinstance(opt, options.UserFeatureOption):
opt.name = optname
return opt
elif isinstance(opt, options.UserOption):
if isinstance(opt.value, str):
return P_OBJ.OptionString(opt.value, f'{{{optname}}}')
return opt.value
return opt
@typed_pos_args('configuration_data', optargs=[dict])
@noKwargs
def func_configuration_data(self, node: mparser.BaseNode, args: T.Tuple[T.Optional[T.Dict[str, T.Any]]],
kwargs: 'TYPE_kwargs') -> build.ConfigurationData:
initial_values = args[0]
if initial_values is not None:
FeatureNew.single_use('configuration_data dictionary', '0.49.0', self.subproject, location=node)
for k, v in initial_values.items():
if not isinstance(v, (str, int, bool)):
raise InvalidArguments(
f'"configuration_data": initial value dictionary key "{k!r}"" must be "str | int | bool", not "{v!r}"')
return build.ConfigurationData(initial_values)
def set_backend(self) -> None:
# The backend is already set when parsing subprojects
if self.backend is not None:
return
from ..backend import backends
if OptionKey('genvslite') in self.user_defined_options.cmd_line_options.keys():
# Use of the '--genvslite vsxxxx' option ultimately overrides any '--backend xxx'
# option the user may specify.
backend_name = self.coredata.get_option(OptionKey('genvslite'))
self.backend = backends.get_genvslite_backend(backend_name, self.build, self)
else:
backend_name = self.coredata.get_option(OptionKey('backend'))
self.backend = backends.get_backend_from_name(backend_name, self.build, self)
if self.backend is None:
raise InterpreterException(f'Unknown backend "{backend_name}".')
if backend_name != self.backend.name:
if self.backend.name.startswith('vs'):
mlog.log('Auto detected Visual Studio backend:', mlog.bold(self.backend.name))
if not self.environment.first_invocation:
raise MesonBugException(f'Backend changed from {backend_name} to {self.backend.name}')
self.coredata.set_option(OptionKey('backend'), self.backend.name, first_invocation=True)
# Only init backend options on first invocation otherwise it would
# override values previously set from command line.
if self.environment.first_invocation:
self.coredata.init_backend_options(backend_name)
options = {k: v for k, v in self.environment.options.items() if self.environment.coredata.optstore.is_backend_option(k)}
self.coredata.set_options(options)
@typed_pos_args('project', str, varargs=str)
@typed_kwargs(
'project',
DEFAULT_OPTIONS,
KwargInfo('meson_version', (str, NoneType)),
KwargInfo(
'version',
(str, mesonlib.File, NoneType, list),
default='undefined',
validator=_project_version_validator,
convertor=lambda x: x[0] if isinstance(x, list) else x,
),
KwargInfo('license', (ContainerTypeInfo(list, str), NoneType), default=None, listify=True),
KwargInfo('license_files', ContainerTypeInfo(list, str), default=[], listify=True, since='1.1.0'),
KwargInfo('subproject_dir', str, default='subprojects'),
)
def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str]], kwargs: 'kwtypes.Project') -> None:
proj_name, proj_langs = args
if ':' in proj_name:
raise InvalidArguments(f"Project name {proj_name!r} must not contain ':'")
# This needs to be evaluated as early as possible, as meson uses this
# for things like deprecation testing.
if kwargs['meson_version']:
self.handle_meson_version(kwargs['meson_version'], node)
else:
mesonlib.project_meson_versions[self.subproject] = mesonlib.NoProjectVersion()
# Load "meson.options" before "meson_options.txt", and produce a warning if
# it is being used with an old version. I have added check that if both
# exist the warning isn't raised
option_file = os.path.join(self.source_root, self.subdir, 'meson.options')
old_option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
if os.path.exists(option_file):
if os.path.exists(old_option_file):
if os.path.samefile(option_file, old_option_file):
mlog.debug("Not warning about meson.options with version minimum < 1.1 because meson_options.txt also exists")
else:
raise MesonException("meson.options and meson_options.txt both exist, but are not the same file.")
else:
FeatureNew.single_use('meson.options file', '1.1', self.subproject, 'Use meson_options.txt instead')
else:
option_file = old_option_file
if os.path.exists(option_file):
with open(option_file, 'rb') as f:
# We want fast not cryptographically secure, this is just to
# see if the option file has changed
self.coredata.options_files[self.subproject] = (option_file, hashlib.sha1(f.read()).hexdigest())
oi = optinterpreter.OptionInterpreter(self.environment.coredata.optstore, self.subproject)
oi.process(option_file)
self.coredata.update_project_options(oi.options, self.subproject)
self.add_build_def_file(option_file)
else:
self.coredata.options_files[self.subproject] = None
if self.subproject:
self.project_default_options = {k.evolve(subproject=self.subproject): v
for k, v in kwargs['default_options'].items()}
else:
self.project_default_options = kwargs['default_options']
# Do not set default_options on reconfigure otherwise it would override
# values previously set from command line. That means that changing
# default_options in a project will trigger a reconfigure but won't
# have any effect.
#
# If this is the first invocation we always need to initialize
# builtins, if this is a subproject that is new in a re-invocation we
# need to initialize builtins for that
if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects):
default_options = self.project_default_options.copy()
default_options.update(self.default_project_options)
self.coredata.init_builtins(self.subproject)
self.coredata.initialized_subprojects.add(self.subproject)
else:
default_options = {}
self.coredata.set_default_options(default_options, self.subproject, self.environment)
if not self.is_subproject():
self.build.project_name = proj_name
self.active_projectname = proj_name
version = kwargs['version']
assert version is not None, 'for mypy'
if isinstance(version, mesonlib.File):
FeatureNew.single_use('version from file', '0.57.0', self.subproject, location=node)
self.add_build_def_file(version)
ifname = version.absolute_path(self.environment.source_dir,
self.environment.build_dir)
try:
ver_data = Path(ifname).read_text(encoding='utf-8').split('\n')
except FileNotFoundError:
raise InterpreterException('Version file not found.')
if len(ver_data) == 2 and ver_data[1] == '':
ver_data = ver_data[0:1]
if len(ver_data) != 1:
raise InterpreterException('Version file must contain exactly one line of text.')
self.project_version = ver_data[0]
else:
self.project_version = version
if self.build.project_version is None:
self.build.project_version = self.project_version
if kwargs['license'] is None:
proj_license = ['unknown']
if kwargs['license_files']:
raise InvalidArguments('Project `license` name must be specified when `license_files` is set')
else:
proj_license = kwargs['license']
proj_license_files = []
for i in self.source_strings_to_files(kwargs['license_files']):
ifname = i.absolute_path(self.environment.source_dir,
self.environment.build_dir)
proj_license_files.append((ifname, i))
self.build.dep_manifest[proj_name] = build.DepManifest(self.project_version, proj_license,
proj_license_files, self.subproject)
if self.subproject in self.build.projects:
raise InvalidCode('Second call to project().')
# spdirname is the subproject_dir for this project, relative to self.subdir.
# self.subproject_dir is the subproject_dir for the main project, relative to top source dir.
spdirname = kwargs['subproject_dir']
if not isinstance(spdirname, str):
raise InterpreterException('Subproject_dir must be a string')
if os.path.isabs(spdirname):
raise InterpreterException('Subproject_dir must not be an absolute path.')
if spdirname.startswith('.'):
raise InterpreterException('Subproject_dir must not begin with a period.')
if '..' in spdirname:
raise InterpreterException('Subproject_dir must not contain a ".." segment.')
if not self.is_subproject():
self.subproject_dir = spdirname
self.build.subproject_dir = self.subproject_dir
# Load wrap files from this (sub)project.
subprojects_dir = os.path.join(self.subdir, spdirname)
if not self.is_subproject():
wrap_mode = WrapMode.from_string(self.coredata.get_option(OptionKey('wrap_mode')))
self.environment.wrap_resolver = wrap.Resolver(self.environment.get_source_dir(), subprojects_dir, self.subproject, wrap_mode)
else:
assert self.environment.wrap_resolver is not None, 'for mypy'
self.environment.wrap_resolver.load_and_merge(subprojects_dir, self.subproject)
self.build.projects[self.subproject] = proj_name
mlog.log('Project name:', mlog.bold(proj_name))
mlog.log('Project version:', mlog.bold(self.project_version))
if not self.is_subproject():
# We have to activate VS before adding languages and before calling
# self.set_backend() otherwise it wouldn't be able to detect which
# vs backend version we need. But after setting default_options in case
# the project sets vs backend by default.
backend = self.coredata.get_option(OptionKey('backend'))
assert backend is None or isinstance(backend, str), 'for mypy'
vsenv = self.coredata.get_option(OptionKey('vsenv'))
assert isinstance(vsenv, bool), 'for mypy'
force_vsenv = vsenv or backend.startswith('vs')
mesonlib.setup_vsenv(force_vsenv)
self.add_languages(proj_langs, True, MachineChoice.HOST)
self.add_languages(proj_langs, False, MachineChoice.BUILD)
self.set_backend()
if not self.is_subproject():
self.check_stdlibs()
@typed_kwargs('add_languages', KwargInfo('native', (bool, NoneType), since='0.54.0'), REQUIRED_KW)
@typed_pos_args('add_languages', varargs=str)
def func_add_languages(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddLanguages') -> bool:
langs = args[0]
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
native = kwargs['native']
if disabled:
assert feature, 'for mypy'
for lang in sorted(langs, key=compilers.sort_clink):
mlog.log('Compiler for language', mlog.bold(lang), 'skipped: feature', mlog.bold(feature), 'disabled')
return False
if native is not None:
return self.add_languages(langs, required, self.machine_from_native_kwarg(kwargs))
else:
# absent 'native' means 'both' for backwards compatibility
tv = FeatureNew.get_target_version(self.subproject)
if FeatureNew.check_version(tv, '0.54.0'):
mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
location=node)
success = self.add_languages(langs, False, MachineChoice.BUILD)
success &= self.add_languages(langs, required, MachineChoice.HOST)
return success
def _stringify_user_arguments(self, args: T.List[TYPE_var], func_name: str) -> T.List[str]:
try:
return [stringifyUserArguments(i, self.subproject) for i in args]
except InvalidArguments as e:
raise InvalidArguments(f'{func_name}(): {str(e)}')
@noArgsFlattening
@noKwargs
def func_message(self, node: mparser.BaseNode, args, kwargs):
if len(args) > 1:
FeatureNew.single_use('message with more than one argument', '0.54.0', self.subproject, location=node)
args_str = self._stringify_user_arguments(args, 'message')
self.message_impl(args_str)
def message_impl(self, args):
mlog.log(mlog.bold('Message:'), *args)
@noArgsFlattening
@FeatureNew('summary', '0.53.0')
@typed_pos_args('summary', (str, dict), optargs=[object])
@typed_kwargs(
'summary',
KwargInfo('section', str, default=''),
KwargInfo('bool_yn', bool, default=False),
KwargInfo('list_sep', (str, NoneType), since='0.54.0')
)
def func_summary(self, node: mparser.BaseNode, args: T.Tuple[T.Union[str, T.Dict[str, T.Any]], T.Optional[T.Any]],
kwargs: 'kwtypes.Summary') -> None:
if args[1] is None:
if not isinstance(args[0], dict):
raise InterpreterException('Summary first argument must be dictionary.')
values = args[0]
else:
if not isinstance(args[0], str):
raise InterpreterException('Summary first argument must be string.')
values = {args[0]: args[1]}
self.summary_impl(kwargs['section'], values, kwargs)
def summary_impl(self, section: str, values, kwargs: 'kwtypes.Summary') -> None:
if self.subproject not in self.summary:
self.summary[self.subproject] = Summary(self.active_projectname, self.project_version)
self.summary[self.subproject].add_section(
section, values, kwargs['bool_yn'], kwargs['list_sep'], self.subproject)
def _print_summary(self) -> None:
# Add automatic 'Subprojects' section in main project.
all_subprojects = collections.OrderedDict()
for name, subp in sorted(self.subprojects.items()):
value = [subp.found()]
if subp.disabled_feature:
value += [f'Feature {subp.disabled_feature!r} disabled']
elif subp.exception:
value += [str(subp.exception)]
elif subp.warnings > 0:
value += [f'{subp.warnings} warnings']
if subp.callstack:
stack = ' => '.join(subp.callstack)
value += [f'(from {stack})']
all_subprojects[name] = value
if all_subprojects:
self.summary_impl('Subprojects', all_subprojects,
{'bool_yn': True,
'list_sep': ' ',
})
# Add automatic section with all user defined options
if self.user_defined_options:
values = collections.OrderedDict()
if self.user_defined_options.cross_file:
values['Cross files'] = self.user_defined_options.cross_file
if self.user_defined_options.native_file:
values['Native files'] = self.user_defined_options.native_file
sorted_options = sorted(self.user_defined_options.cmd_line_options.items())
values.update({str(k): v for k, v in sorted_options})
if values:
self.summary_impl('User defined options', values, {'bool_yn': False, 'list_sep': None})
# Print all summaries, main project last.
mlog.log('') # newline
main_summary = self.summary.pop('', None)
for subp_name, summary in sorted(self.summary.items()):
if self.subprojects[subp_name].found():
summary.dump()
if main_summary:
main_summary.dump()
@noArgsFlattening
@FeatureNew('warning', '0.44.0')
@noKwargs
def func_warning(self, node, args, kwargs):
if len(args) > 1:
FeatureNew.single_use('warning with more than one argument', '0.54.0', self.subproject, location=node)
args_str = self._stringify_user_arguments(args, 'warning')
mlog.warning(*args_str, location=node)
@noArgsFlattening
@noKwargs
def func_error(self, node, args, kwargs):
if len(args) > 1:
FeatureNew.single_use('error with more than one argument', '0.58.0', self.subproject, location=node)
args_str = self._stringify_user_arguments(args, 'error')
raise InterpreterException('Problem encountered: ' + ' '.join(args_str))
@noArgsFlattening
@FeatureNew('debug', '0.63.0')
@noKwargs
def func_debug(self, node, args, kwargs):
args_str = self._stringify_user_arguments(args, 'debug')
mlog.debug('Debug:', *args_str)
@noKwargs
@noPosargs
def func_exception(self, node, args, kwargs):
raise RuntimeError('unit test traceback :)')
@typed_pos_args('expect_error', str)
@typed_kwargs(
'expect_error',
KwargInfo('how', str, default='literal', validator=in_set_validator({'literal', 're'})),
)
def func_expect_error(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: TYPE_kwargs) -> ContextManagerObject:
class ExpectErrorObject(ContextManagerObject):
def __init__(self, msg: str, how: str, subproject: str) -> None:
super().__init__(subproject)
self.msg = msg
self.how = how
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_val is None:
raise InterpreterException('Expecting an error but code block succeeded')
if isinstance(exc_val, mesonlib.MesonException):
msg = str(exc_val)
if (self.how == 'literal' and self.msg != msg) or \
(self.how == 're' and not re.match(self.msg, msg)):
raise InterpreterException(f'Expecting error {self.msg!r} but got {msg!r}')
return True
return ExpectErrorObject(args[0], kwargs['how'], self.subproject)
def add_languages(self, args: T.List[str], required: bool, for_machine: MachineChoice) -> bool:
success = self.add_languages_for(args, required, for_machine)
if not self.coredata.is_cross_build():
self.coredata.copy_build_options_from_regular_ones()
self._redetect_machines()
return success
def should_skip_sanity_check(self, for_machine: MachineChoice) -> bool:
should = self.environment.properties.host.get('skip_sanity_check', False)
if not isinstance(should, bool):
raise InterpreterException('Option skip_sanity_check must be a boolean.')
if for_machine != MachineChoice.HOST and not should:
return False
if not self.environment.is_cross_build() and not should:
return False
return should
def add_languages_for(self, args: T.List[str], required: bool, for_machine: MachineChoice) -> bool:
args = [a.lower() for a in args]
langs = set(self.compilers[for_machine].keys())
langs.update(args)
# We'd really like to add cython's default language here, but it can't
# actually be done because the cython compiler hasn't been initialized,
# so we can't actually get the option yet. Because we can't know what
# compiler to add by default, and we don't want to add unnecessary
# compilers we don't add anything for cython here, and instead do it
# When the first cython target using a particular language is used.
if 'vala' in langs and 'c' not in langs:
FeatureNew.single_use('Adding Vala language without C', '0.59.0', self.subproject, location=self.current_node)
args.append('c')
if 'nasm' in langs:
FeatureNew.single_use('Adding NASM language', '0.64.0', self.subproject, location=self.current_node)
success = True
for lang in sorted(args, key=compilers.sort_clink):
if lang in self.compilers[for_machine]:
continue
machine_name = for_machine.get_lower_case_name()
comp = self.coredata.compilers[for_machine].get(lang)
if not comp:
try:
skip_sanity_check = self.should_skip_sanity_check(for_machine)
if skip_sanity_check:
mlog.log('Cross compiler sanity tests disabled via the cross file.', once=True)
comp = compilers.detect_compiler_for(self.environment, lang, for_machine, skip_sanity_check, self.subproject)
if comp is None:
raise InvalidArguments(f'Tried to use unknown language "{lang}".')
except mesonlib.MesonException:
if not required:
mlog.log('Compiler for language',
mlog.bold(lang), 'for the', machine_name,
'machine not found.')
success = False
continue
else:
raise
if lang == 'cuda' and hasattr(self.backend, 'allow_thin_archives'):
# see NinjaBackend.__init__() why we need to disable thin archives for cuda
mlog.debug('added cuda as language, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine))
self.backend.allow_thin_archives[for_machine] = False
else:
# update new values from commandline, if it applies
self.coredata.process_compiler_options(lang, comp, self.environment, self.subproject)
# Add per-subproject compiler options. They inherit value from main project.
if self.subproject:
options = {}
for k in comp.get_options():
v = copy.copy(self.coredata.optstore.get_value_object(k))
k = k.evolve(subproject=self.subproject)
options[k] = v
self.coredata.add_compiler_options(options, lang, for_machine, self.environment, self.subproject)
if for_machine == MachineChoice.HOST or self.environment.is_cross_build():
logger_fun = mlog.log
else:
logger_fun = mlog.debug
logger_fun(comp.get_display_language(), 'compiler for the', machine_name, 'machine:',
mlog.bold(' '.join(comp.get_exelist())), comp.get_version_string())
if comp.linker is not None:
logger_fun(comp.get_display_language(), 'linker for the', machine_name, 'machine:',
mlog.bold(' '.join(comp.linker.get_exelist())), comp.linker.id, comp.linker.version)
self.build.ensure_static_linker(comp)
self.compilers[for_machine][lang] = comp
return success
def program_from_file_for(self, for_machine: MachineChoice, prognames: T.List[mesonlib.FileOrString]
) -> T.Optional[ExternalProgram]:
for p in prognames:
if isinstance(p, mesonlib.File):
continue # Always points to a local (i.e. self generated) file.
if not isinstance(p, str):
raise InterpreterException('Executable name must be a string')
prog = ExternalProgram.from_bin_list(self.environment, for_machine, p)
# if the machine file specified something, it may be a regular
# not-found program but we still want to return that
if not isinstance(prog, NonExistingExternalProgram):
return prog
return None
def program_from_system(self, args: T.List[mesonlib.FileOrString], search_dirs: T.Optional[T.List[str]],
extra_info: T.List[mlog.TV_Loggable]) -> T.Optional[ExternalProgram]:
# Search for scripts relative to current subdir.
# Do not cache found programs because find_program('foobar')
# might give different results when run from different source dirs.
source_dir = os.path.join(self.environment.get_source_dir(), self.subdir)
for exename in args:
if isinstance(exename, mesonlib.File):
if exename.is_built:
search_dir = os.path.join(self.environment.get_build_dir(),
exename.subdir)
else:
search_dir = os.path.join(self.environment.get_source_dir(),
exename.subdir)
exename = exename.fname
search_dirs = [search_dir]
elif isinstance(exename, str):
if search_dirs:
search_dirs = [source_dir] + search_dirs
else:
search_dirs = [source_dir]
else:
raise InvalidArguments(f'find_program only accepts strings and files, not {exename!r}')
extprog = ExternalProgram(exename, search_dirs=search_dirs, silent=True)
if extprog.found():
extra_info.append(f"({' '.join(extprog.get_command())})")
return extprog
return None
def program_from_overrides(self, command_names: T.List[mesonlib.FileOrString],
extra_info: T.List['mlog.TV_Loggable']
) -> T.Optional[T.Union[ExternalProgram, OverrideProgram, build.Executable]]:
for name in command_names:
if not isinstance(name, str):
continue
if name in self.build.find_overrides:
exe = self.build.find_overrides[name]
extra_info.append(mlog.blue('(overridden)'))
return exe
return None
def store_name_lookups(self, command_names: T.List[mesonlib.FileOrString]) -> None:
for name in command_names:
if isinstance(name, str):
self.build.searched_programs.add(name)
def add_find_program_override(self, name: str, exe: T.Union[build.Executable, ExternalProgram, 'OverrideProgram']) -> None:
if name in self.build.searched_programs:
raise InterpreterException(f'Tried to override finding of executable "{name}" which has already been found.')
if name in self.build.find_overrides:
raise InterpreterException(f'Tried to override executable "{name}" which has already been overridden.')
self.build.find_overrides[name] = exe
if name == 'pkg-config' and isinstance(exe, ExternalProgram):
from ..dependencies.pkgconfig import PkgConfigInterface
PkgConfigInterface.set_program_override(exe, MachineChoice.HOST)
def notfound_program(self, args: T.List[mesonlib.FileOrString]) -> ExternalProgram:
return NonExistingExternalProgram(' '.join(
[a if isinstance(a, str) else a.absolute_path(self.environment.source_dir, self.environment.build_dir)
for a in args]))
# TODO update modules to always pass `for_machine`. It is bad-form to assume
# the host machine.
def find_program_impl(self, args: T.List[mesonlib.FileOrString],
for_machine: MachineChoice = MachineChoice.HOST,
default_options: T.Optional[T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]] = None,
required: bool = True, silent: bool = True,
wanted: T.Union[str, T.List[str]] = '',
search_dirs: T.Optional[T.List[str]] = None,
version_arg: T.Optional[str] = '',
version_func: T.Optional[ProgramVersionFunc] = None
) -> T.Union['ExternalProgram', 'build.Executable', 'OverrideProgram']:
args = mesonlib.listify(args)
extra_info: T.List[mlog.TV_Loggable] = []
progobj = self.program_lookup(args, for_machine, default_options, required, search_dirs, wanted, version_arg, version_func, extra_info)
if progobj is None or not self.check_program_version(progobj, wanted, version_func, extra_info):
progobj = self.notfound_program(args)
if isinstance(progobj, ExternalProgram) and not progobj.found():
if not silent:
mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'), *extra_info)
if required:
m = 'Program {!r} not found or not executable'
raise InterpreterException(m.format(progobj.get_name()))
return progobj
# Only store successful lookups
self.store_name_lookups(args)
if not silent:
mlog.log('Program', mlog.bold(progobj.name), 'found:', mlog.green('YES'), *extra_info)
if isinstance(progobj, build.Executable):
progobj.was_returned_by_find_program = True
return progobj
def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: MachineChoice,
default_options: T.Optional[T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]],
required: bool,
search_dirs: T.Optional[T.List[str]],
wanted: T.Union[str, T.List[str]],
version_arg: T.Optional[str],
version_func: T.Optional[ProgramVersionFunc],
extra_info: T.List[mlog.TV_Loggable]
) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]:
progobj = self.program_from_overrides(args, extra_info)
if progobj:
return progobj
if args[0] == 'meson':
# Override find_program('meson') to return what we were invoked with
return ExternalProgram('meson', self.environment.get_build_command(), silent=True)
fallback = None
wrap_mode = WrapMode.from_string(self.coredata.get_option(OptionKey('wrap_mode')))
if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver:
fallback = self.environment.wrap_resolver.find_program_provider(args)
if fallback and wrap_mode == WrapMode.forcefallback:
return self.find_program_fallback(fallback, args, default_options, required, extra_info)
progobj = self.program_from_file_for(for_machine, args)
if progobj is None:
progobj = self.program_from_system(args, search_dirs, extra_info)
if progobj is None and args[0].endswith('python3'):
prog = ExternalProgram('python3', mesonlib.python_command, silent=True)
progobj = prog if prog.found() else None
if isinstance(progobj, ExternalProgram) and version_arg:
progobj.version_arg = version_arg
if progobj and not self.check_program_version(progobj, wanted, version_func, extra_info):
progobj = None
if progobj is None and fallback and required:
progobj = self.notfound_program(args)
mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'), *extra_info)
extra_info.clear()
progobj = self.find_program_fallback(fallback, args, default_options, required, extra_info)
return progobj
def check_program_version(self, progobj: T.Union[ExternalProgram, build.Executable, OverrideProgram],
wanted: T.Union[str, T.List[str]],
version_func: T.Optional[ProgramVersionFunc],
extra_info: T.List[mlog.TV_Loggable]) -> bool:
if wanted:
if version_func:
version = version_func(progobj)
elif isinstance(progobj, build.Executable):
if progobj.subproject:
interp = self.subprojects[progobj.subproject].held_object
else:
interp = self
assert isinstance(interp, Interpreter), 'for mypy'
version = interp.project_version
else:
version = progobj.get_version(self)
is_found, not_found, _ = mesonlib.version_compare_many(version, wanted)
if not is_found:
extra_info[:0] = ['found', mlog.normal_cyan(version), 'but need:',
mlog.bold(', '.join([f"'{e}'" for e in not_found]))]
return False
extra_info.insert(0, mlog.normal_cyan(version))
return True
def find_program_fallback(self, fallback: str, args: T.List[mesonlib.FileOrString],
default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]],
required: bool, extra_info: T.List[mlog.TV_Loggable]
) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]:
mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program',
mlog.bold(' '.join(args)))
sp_kwargs: kwtypes.DoSubproject = {
'required': required,
'default_options': default_options or {},
'version': [],
'cmake_options': [],
'options': None,
}
self.do_subproject(fallback, sp_kwargs)
return self.program_from_overrides(args, extra_info)
@typed_pos_args('find_program', varargs=(str, mesonlib.File), min_varargs=1)
@typed_kwargs(
'find_program',
DISABLER_KW.evolve(since='0.49.0'),
NATIVE_KW,
REQUIRED_KW,
KwargInfo('dirs', ContainerTypeInfo(list, str), default=[], listify=True, since='0.53.0'),
KwargInfo('version', ContainerTypeInfo(list, str), default=[], listify=True, since='0.52.0'),
KwargInfo('version_argument', str, default='', since='1.5.0'),
DEFAULT_OPTIONS.evolve(since='1.3.0')
)
@disablerIfNotFound
def func_find_program(self, node: mparser.BaseNode, args: T.Tuple[T.List[mesonlib.FileOrString]],
kwargs: 'kwtypes.FindProgram',
) -> T.Union['build.Executable', ExternalProgram, 'OverrideProgram']:
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
assert feature, 'for mypy'
mlog.log('Program', mlog.bold(' '.join(args[0])), 'skipped: feature', mlog.bold(feature), 'disabled')
return self.notfound_program(args[0])
search_dirs = extract_search_dirs(kwargs)
default_options = kwargs['default_options']
return self.find_program_impl(args[0], kwargs['native'], default_options=default_options, required=required,
silent=False, wanted=kwargs['version'], version_arg=kwargs['version_argument'],
search_dirs=search_dirs)
# When adding kwargs, please check if they make sense in dependencies.get_dep_identifier()
@FeatureNewKwargs('dependency', '0.57.0', ['cmake_package_version'])
@FeatureNewKwargs('dependency', '0.56.0', ['allow_fallback'])
@FeatureNewKwargs('dependency', '0.54.0', ['components'])
@FeatureNewKwargs('dependency', '0.52.0', ['include_type'])
@FeatureNewKwargs('dependency', '0.50.0', ['not_found_message', 'cmake_module_path', 'cmake_args'])
@FeatureNewKwargs('dependency', '0.49.0', ['disabler'])
@FeatureNewKwargs('dependency', '0.40.0', ['method'])
@disablerIfNotFound
@permittedKwargs(permitted_dependency_kwargs)
@typed_pos_args('dependency', varargs=str, min_varargs=1)
@typed_kwargs('dependency', DEFAULT_OPTIONS.evolve(since='0.38.0'), allow_unknown=True)
def func_dependency(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kwargs) -> Dependency:
# Replace '' by empty list of names
names = [n for n in args[0] if n]
if len(names) > 1:
FeatureNew('dependency with more than one name', '0.60.0').use(self.subproject)
allow_fallback = kwargs.get('allow_fallback')
if allow_fallback is not None and not isinstance(allow_fallback, bool):
raise InvalidArguments('"allow_fallback" argument must be boolean')
fallback = kwargs.get('fallback')
default_options = kwargs.get('default_options')
df = DependencyFallbacksHolder(self, names, allow_fallback, default_options)
df.set_fallback(fallback)
not_found_message = kwargs.get('not_found_message', '')
if not isinstance(not_found_message, str):
raise InvalidArguments('The not_found_message must be a string.')
try:
d = df.lookup(kwargs)
except Exception:
if not_found_message:
self.message_impl([not_found_message])
raise
assert isinstance(d, Dependency), 'for mypy'
if not d.found() and not_found_message:
self.message_impl([not_found_message])
# Ensure the correct include type
if 'include_type' in kwargs:
wanted = kwargs['include_type']
if not isinstance(wanted, str):
raise InvalidArguments('The `include_type` kwarg must be a string')
actual = d.get_include_type()
if wanted != actual:
mlog.debug(f'Current include type of {args[0]} is {actual}. Converting to requested {wanted}')
d = d.generate_system_dependency(wanted)
if d.feature_since is not None:
version, extra_msg = d.feature_since
FeatureNew.single_use(f'dep {d.name!r} custom lookup', version, self.subproject, extra_msg, node)
for f in d.featurechecks:
f.use(self.subproject, node)
return d
@FeatureNew('disabler', '0.44.0')
@noKwargs
@noPosargs
def func_disabler(self, node, args, kwargs):
return Disabler()
@permittedKwargs(build.known_exe_kwargs)
@typed_pos_args('executable', str, varargs=SOURCES_VARARGS)
@typed_kwargs('executable', *EXECUTABLE_KWS, allow_unknown=True)
def func_executable(self, node: mparser.BaseNode,
args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.Executable) -> build.Executable:
return self.build_target(node, args, kwargs, build.Executable)
@permittedKwargs(build.known_stlib_kwargs)
@typed_pos_args('static_library', str, varargs=SOURCES_VARARGS)
@typed_kwargs('static_library', *STATIC_LIB_KWS, allow_unknown=True)
def func_static_lib(self, node: mparser.BaseNode,
args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.StaticLibrary) -> build.StaticLibrary:
return self.build_target(node, args, kwargs, build.StaticLibrary)
@permittedKwargs(build.known_shlib_kwargs)
@typed_pos_args('shared_library', str, varargs=SOURCES_VARARGS)
@typed_kwargs('shared_library', *SHARED_LIB_KWS, allow_unknown=True)
def func_shared_lib(self, node: mparser.BaseNode,
args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.SharedLibrary) -> build.SharedLibrary:
holder = self.build_target(node, args, kwargs, build.SharedLibrary)
holder.shared_library_only = True
return holder
@permittedKwargs(known_library_kwargs)
@typed_pos_args('both_libraries', str, varargs=SOURCES_VARARGS)
@typed_kwargs('both_libraries', *LIBRARY_KWS, allow_unknown=True)
@noSecondLevelHolderResolving
def func_both_lib(self, node: mparser.BaseNode,
args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.Library) -> build.BothLibraries:
return self.build_both_libraries(node, args, kwargs)
@FeatureNew('shared_module', '0.37.0')
@permittedKwargs(build.known_shmod_kwargs)
@typed_pos_args('shared_module', str, varargs=SOURCES_VARARGS)
@typed_kwargs('shared_module', *SHARED_MOD_KWS, allow_unknown=True)
def func_shared_module(self, node: mparser.BaseNode,
args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.SharedModule) -> build.SharedModule:
return self.build_target(node, args, kwargs, build.SharedModule)
@permittedKwargs(known_library_kwargs)
@typed_pos_args('library', str, varargs=SOURCES_VARARGS)
@typed_kwargs('library', *LIBRARY_KWS, allow_unknown=True)
@noSecondLevelHolderResolving
def func_library(self, node: mparser.BaseNode,
args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.Library) -> build.Executable:
return self.build_library(node, args, kwargs)
@permittedKwargs(build.known_jar_kwargs)
@typed_pos_args('jar', str, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList, build.ExtractedObjects, build.BuildTarget))
@typed_kwargs('jar', *JAR_KWS, allow_unknown=True)
def func_jar(self, node: mparser.BaseNode,
args: T.Tuple[str, T.List[T.Union[str, mesonlib.File, build.GeneratedTypes]]],
kwargs: kwtypes.Jar) -> build.Jar:
return self.build_target(node, args, kwargs, build.Jar)
@FeatureNewKwargs('build_target', '0.40.0', ['link_whole', 'override_options'])
@permittedKwargs(known_build_target_kwargs)
@typed_pos_args('build_target', str, varargs=SOURCES_VARARGS)
@typed_kwargs('build_target', *BUILD_TARGET_KWS, allow_unknown=True)
@noSecondLevelHolderResolving
def func_build_target(self, node: mparser.BaseNode,
args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.BuildTarget
) -> T.Union[build.Executable, build.StaticLibrary, build.SharedLibrary,
build.SharedModule, build.BothLibraries, build.Jar]:
target_type = kwargs['target_type']
if target_type not in {'both_libraries', 'library'}:
args, kwargs = resolve_second_level_holders(args, kwargs)
if target_type == 'executable':
return self.build_target(node, args, kwargs, build.Executable)
elif target_type == 'shared_library':
return self.build_target(node, args, kwargs, build.SharedLibrary)
elif target_type == 'shared_module':
return self.build_target(node, args, kwargs, build.SharedModule)
elif target_type == 'static_library':
return self.build_target(node, args, kwargs, build.StaticLibrary)
elif target_type == 'both_libraries':
return self.build_both_libraries(node, args, kwargs)
elif target_type == 'library':
return self.build_library(node, args, kwargs)
return self.build_target(node, args, kwargs, build.Jar)
@noPosargs
@typed_kwargs(
'vcs_tag',
CT_INPUT_KW.evolve(required=True),
MULTI_OUTPUT_KW,
# Cannot use the COMMAND_KW because command is allowed to be empty
KwargInfo(
'command',
ContainerTypeInfo(list, (str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, ExternalProgram, mesonlib.File)),
listify=True,
default=[],
),
KwargInfo('fallback', (str, NoneType)),
KwargInfo('replace_string', str, default='@VCS_TAG@'),
)
def func_vcs_tag(self, node: mparser.BaseNode, args: T.List['TYPE_var'], kwargs: 'kwtypes.VcsTag') -> build.CustomTarget:
if kwargs['fallback'] is None:
FeatureNew.single_use('Optional fallback in vcs_tag', '0.41.0', self.subproject, location=node)
fallback = kwargs['fallback'] or self.project_version
replace_string = kwargs['replace_string']
regex_selector = '(.*)' # default regex selector for custom command: use complete output
vcs_cmd = kwargs['command']
source_dir = os.path.normpath(os.path.join(self.environment.get_source_dir(), self.subdir))
if vcs_cmd:
if isinstance(vcs_cmd[0], (str, mesonlib.File)):
if isinstance(vcs_cmd[0], mesonlib.File):
FeatureNew.single_use('vcs_tag with file as the first argument', '0.62.0', self.subproject, location=node)
maincmd = self.find_program_impl(vcs_cmd[0], required=False)
if maincmd.found():
vcs_cmd[0] = maincmd
else:
FeatureNew.single_use('vcs_tag with custom_tgt, external_program, or exe as the first argument', '0.63.0', self.subproject, location=node)
else:
vcs = mesonlib.detect_vcs(source_dir)
if vcs:
mlog.log('Found {} repository at {}'.format(vcs['name'], vcs['wc_dir']))
vcs_cmd = vcs['get_rev'].split()
regex_selector = vcs['rev_regex']
else:
vcs_cmd = [' '] # executing this cmd will fail in vcstagger.py and force to use the fallback string
# vcstagger.py parameters: infile, outfile, fallback, source_dir, replace_string, regex_selector, command...
self._validate_custom_target_outputs(len(kwargs['input']) > 1, kwargs['output'], "vcs_tag")
cmd = self.environment.get_build_command() + \
['--internal',
'vcstagger',
'@INPUT0@',
'@OUTPUT0@',
fallback,
source_dir,
replace_string,
regex_selector] + vcs_cmd
tg = build.CustomTarget(
kwargs['output'][0],
self.subdir,
self.subproject,
self.environment,
cmd,
self.source_strings_to_files(kwargs['input']),
kwargs['output'],
build_by_default=True,
build_always_stale=True,
)
self.add_target(tg.name, tg)
return tg
@FeatureNew('subdir_done', '0.46.0')
@noPosargs
@noKwargs
def func_subdir_done(self, node: mparser.BaseNode, args: TYPE_var, kwargs: TYPE_kwargs) -> T.NoReturn:
raise SubdirDoneRequest()
def _validate_custom_target_outputs(self, has_multi_in: bool, outputs: T.Iterable[str], name: str) -> None:
"""Checks for additional invalid values in a custom_target output.
This cannot be done with typed_kwargs because it requires the number of
inputs.
"""
inregex: T.List[str] = ['@PLAINNAME[0-9]+@', '@BASENAME[0-9]+@']
from ..utils.universal import iter_regexin_iter
for out in outputs:
match = iter_regexin_iter(inregex, [out])
if has_multi_in and ('@PLAINNAME@' in out or '@BASENAME@' in out):
raise InvalidArguments(f'{name}: output cannot contain "@PLAINNAME@" or "@BASENAME@" '
'when there is more than one input (we can\'t know which to use)')
elif match:
FeatureNew.single_use(
f'{match} in output', '1.5.0',
self.subproject)
@typed_pos_args('custom_target', optargs=[str])
@typed_kwargs(
'custom_target',
COMMAND_KW,
CT_BUILD_ALWAYS,
CT_BUILD_ALWAYS_STALE,
CT_BUILD_BY_DEFAULT,
CT_INPUT_KW,
CT_INSTALL_DIR_KW,
CT_INSTALL_TAG_KW,
MULTI_OUTPUT_KW,
DEPENDS_KW,
DEPEND_FILES_KW,
DEPFILE_KW,
ENV_KW.evolve(since='0.57.0'),
INSTALL_KW,
INSTALL_MODE_KW.evolve(since='0.47.0'),
KwargInfo('feed', bool, default=False, since='0.59.0'),
KwargInfo('capture', bool, default=False),
KwargInfo('console', bool, default=False, since='0.48.0'),
)
def func_custom_target(self, node: mparser.FunctionNode, args: T.Tuple[str],
kwargs: 'kwtypes.CustomTarget') -> build.CustomTarget:
if kwargs['depfile'] and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']):
FeatureNew.single_use('substitutions in custom_target depfile', '0.47.0', self.subproject, location=node)
install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
# Don't mutate the kwargs
build_by_default = kwargs['build_by_default']
build_always_stale = kwargs['build_always_stale']
# Remap build_always to build_by_default and build_always_stale
if kwargs['build_always'] is not None and kwargs['build_always_stale'] is not None:
raise InterpreterException('CustomTarget: "build_always" and "build_always_stale" are mutually exclusive')
if build_by_default is None and kwargs['install']:
build_by_default = True
elif kwargs['build_always'] is not None:
if build_by_default is None:
build_by_default = kwargs['build_always']
build_always_stale = kwargs['build_by_default']
# These are nullable so that we can know whether they're explicitly
# set or not. If they haven't been overwritten, set them to their true
# default
if build_by_default is None:
build_by_default = False
if build_always_stale is None:
build_always_stale = False
name = args[0]
if name is None:
# name will default to first output, but we cannot do that yet because
# they could need substitutions (e.g. @BASENAME@) first. CustomTarget()
# will take care of setting a proper default but name must be an empty
# string in the meantime.
FeatureNew.single_use('custom_target() with no name argument', '0.60.0', self.subproject, location=node)
name = ''
inputs = self.source_strings_to_files(kwargs['input'], strict=False)
command = kwargs['command']
if command and isinstance(command[0], str):
command[0] = self.find_program_impl([command[0]])
if len(inputs) > 1 and kwargs['feed']:
raise InvalidArguments('custom_target: "feed" keyword argument can only be used with a single input')
if len(kwargs['output']) > 1 and kwargs['capture']:
raise InvalidArguments('custom_target: "capture" keyword argument can only be used with a single output')
if kwargs['capture'] and kwargs['console']:
raise InvalidArguments('custom_target: "capture" and "console" keyword arguments are mutually exclusive')
for c in command:
if kwargs['capture'] and isinstance(c, str) and '@OUTPUT@' in c:
raise InvalidArguments('custom_target: "capture" keyword argument cannot be used with "@OUTPUT@"')
if kwargs['feed'] and isinstance(c, str) and '@INPUT@' in c:
raise InvalidArguments('custom_target: "feed" keyword argument cannot be used with "@INPUT@"')
if kwargs['install'] and not kwargs['install_dir']:
raise InvalidArguments('custom_target: "install_dir" keyword argument must be set when "install" is true.')
if len(kwargs['install_dir']) > 1:
FeatureNew.single_use('multiple install_dir for custom_target', '0.40.0', self.subproject, location=node)
if len(kwargs['install_tag']) not in {0, 1, len(kwargs['output'])}:
raise InvalidArguments('custom_target: install_tag argument must have 0 or 1 outputs, '
'or the same number of elements as the output keyword argument. '
f'(there are {len(kwargs["install_tag"])} install_tags, '
f'and {len(kwargs["output"])} outputs)')
for t in kwargs['output']:
self.validate_forbidden_targets(t)
self._validate_custom_target_outputs(len(inputs) > 1, kwargs['output'], "custom_target")
tg = build.CustomTarget(
name,
self.subdir,
self.subproject,
self.environment,
command,
inputs,
kwargs['output'],
build_always_stale=build_always_stale,
build_by_default=build_by_default,
capture=kwargs['capture'],
console=kwargs['console'],
depend_files=kwargs['depend_files'],
depfile=kwargs['depfile'],
extra_depends=kwargs['depends'],
env=kwargs['env'],
feed=kwargs['feed'],
install=kwargs['install'],
install_dir=kwargs['install_dir'],
install_mode=install_mode,
install_tag=kwargs['install_tag'],
backend=self.backend)
self.add_target(tg.name, tg)
return tg
@typed_pos_args('run_target', str)
@typed_kwargs(
'run_target',
COMMAND_KW,
DEPENDS_KW,
ENV_KW.evolve(since='0.57.0'),
)
def func_run_target(self, node: mparser.FunctionNode, args: T.Tuple[str],
kwargs: 'kwtypes.RunTarget') -> build.RunTarget:
all_args = kwargs['command'].copy()
for i in listify(all_args):
if isinstance(i, ExternalProgram) and not i.found():
raise InterpreterException(f'Tried to use non-existing executable {i.name!r}')
if isinstance(all_args[0], str):
all_args[0] = self.find_program_impl([all_args[0]])
name = args[0]
tg = build.RunTarget(name, all_args, kwargs['depends'], self.subdir, self.subproject, self.environment,
kwargs['env'])
self.add_target(name, tg)
return tg
@FeatureNew('alias_target', '0.52.0')
@typed_pos_args('alias_target', str, varargs=(build.Target, build.BothLibraries), min_varargs=1)
@noKwargs
@noSecondLevelHolderResolving
def func_alias_target(self, node: mparser.BaseNode, args: T.Tuple[str, T.List[T.Union[build.Target, build.BothLibraries]]],
kwargs: TYPE_kwargs) -> build.AliasTarget:
name, deps = args
if any(isinstance(d, build.RunTarget) for d in deps):
FeatureNew.single_use('alias_target that depends on run_targets', '0.60.0', self.subproject)
tg = build.AliasTarget(name, deps, self.subdir, self.subproject, self.environment)
self.add_target(name, tg)
return tg
@typed_pos_args('generator', (build.Executable, ExternalProgram))
@typed_kwargs(
'generator',
KwargInfo('arguments', ContainerTypeInfo(list, str, allow_empty=False), required=True, listify=True),
KwargInfo('output', ContainerTypeInfo(list, str, allow_empty=False), required=True, listify=True),
DEPFILE_KW,
DEPENDS_KW,
KwargInfo('capture', bool, default=False, since='0.43.0'),
)
def func_generator(self, node: mparser.FunctionNode,
args: T.Tuple[T.Union[build.Executable, ExternalProgram]],
kwargs: 'kwtypes.FuncGenerator') -> build.Generator:
for rule in kwargs['output']:
if '@BASENAME@' not in rule and '@PLAINNAME@' not in rule:
raise InvalidArguments('Every element of "output" must contain @BASENAME@ or @PLAINNAME@.')
if has_path_sep(rule):
raise InvalidArguments('"output" must not contain a directory separator.')
if len(kwargs['output']) > 1:
for o in kwargs['output']:
if '@OUTPUT@' in o:
raise InvalidArguments('Tried to use @OUTPUT@ in a rule with more than one output.')
gen = build.Generator(args[0], **kwargs)
self.generators.append(gen)
return gen
@typed_pos_args('benchmark', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File, build.CustomTarget, build.CustomTargetIndex))
@typed_kwargs('benchmark', *TEST_KWS)
def func_benchmark(self, node: mparser.BaseNode,
args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
kwargs: 'kwtypes.FuncBenchmark') -> None:
self.add_test(node, args, kwargs, False)
@typed_pos_args('test', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File, build.CustomTarget, build.CustomTargetIndex))
@typed_kwargs('test', *TEST_KWS, KwargInfo('is_parallel', bool, default=True))
def func_test(self, node: mparser.BaseNode,
args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File, build.CustomTarget, build.CustomTargetIndex]],
kwargs: 'kwtypes.FuncTest') -> None:
self.add_test(node, args, kwargs, True)
def unpack_env_kwarg(self, kwargs: T.Union[EnvironmentVariables, T.Dict[str, 'TYPE_var'], T.List['TYPE_var'], str]) -> EnvironmentVariables:
envlist = kwargs.get('env')
if envlist is None:
return EnvironmentVariables()
msg = ENV_KW.validator(envlist)
if msg:
raise InvalidArguments(f'"env": {msg}')
return ENV_KW.convertor(envlist)
def make_test(self, node: mparser.BaseNode,
args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File, build.CustomTarget, build.CustomTargetIndex]],
kwargs: 'kwtypes.BaseTest') -> Test:
name = args[0]
if ':' in name:
mlog.deprecation(f'":" is not allowed in test name "{name}", it has been replaced with "_"',
location=node)
name = name.replace(':', '_')
exe = args[1]
if isinstance(exe, ExternalProgram):
if not exe.found():
raise InvalidArguments('Tried to use not-found external program as test exe')
elif isinstance(exe, mesonlib.File):
exe = self.find_program_impl([exe])
elif isinstance(exe, build.CustomTarget):
kwargs.setdefault('depends', []).append(exe)
elif isinstance(exe, build.CustomTargetIndex):
kwargs.setdefault('depends', []).append(exe.target)
env = self.unpack_env_kwarg(kwargs)
if kwargs['timeout'] <= 0:
FeatureNew.single_use('test() timeout <= 0', '0.57.0', self.subproject, location=node)
prj = self.subproject if self.is_subproject() else self.build.project_name
suite: T.List[str] = []
for s in kwargs['suite']:
if s:
s = ':' + s
suite.append(prj.replace(' ', '_').replace(':', '_') + s)
return Test(name,
prj,
suite,
exe,
kwargs['depends'],
kwargs.get('is_parallel', False),
kwargs['args'],
env,
kwargs['should_fail'],
kwargs['timeout'],
kwargs['workdir'],
kwargs['protocol'],
kwargs['priority'],
kwargs['verbose'])
def add_test(self, node: mparser.BaseNode,
args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File, build.CustomTarget, build.CustomTargetIndex]],
kwargs: T.Dict[str, T.Any], is_base_test: bool):
if isinstance(args[1], (build.CustomTarget, build.CustomTargetIndex)):
FeatureNew.single_use('test with CustomTarget as command', '1.4.0', self.subproject)
if any(isinstance(i, ExternalProgram) for i in kwargs['args']):
FeatureNew.single_use('test with external_program in args', '1.6.0', self.subproject)
t = self.make_test(node, args, kwargs)
if is_base_test:
self.build.tests.append(t)
mlog.debug('Adding test', mlog.bold(t.name, True))
else:
self.build.benchmarks.append(t)
mlog.debug('Adding benchmark', mlog.bold(t.name, True))
@typed_pos_args('install_headers', varargs=(str, mesonlib.File))
@typed_kwargs(
'install_headers',
PRESERVE_PATH_KW,
KwargInfo('subdir', (str, NoneType)),
INSTALL_MODE_KW.evolve(since='0.47.0'),
INSTALL_DIR_KW,
INSTALL_FOLLOW_SYMLINKS,
)
def func_install_headers(self, node: mparser.BaseNode,
args: T.Tuple[T.List['mesonlib.FileOrString']],
kwargs: 'kwtypes.FuncInstallHeaders') -> build.Headers:
install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
source_files = self.source_strings_to_files(args[0])
install_subdir = kwargs['subdir']
if install_subdir is not None:
if kwargs['install_dir'] is not None:
raise InterpreterException('install_headers: cannot specify both "install_dir" and "subdir". Use only "install_dir".')
if os.path.isabs(install_subdir):
mlog.deprecation('Subdir keyword must not be an absolute path. This will be a hard error in meson 2.0.')
else:
install_subdir = ''
dirs = collections.defaultdict(list)
ret_headers = []
if kwargs['preserve_path']:
for file in source_files:
dirname = os.path.dirname(file.fname)
dirs[dirname].append(file)
else:
dirs[''].extend(source_files)
for childdir in dirs:
h = build.Headers(dirs[childdir], os.path.join(install_subdir, childdir), kwargs['install_dir'],
install_mode, self.subproject,
follow_symlinks=kwargs['follow_symlinks'])
ret_headers.append(h)
self.build.headers.append(h)
return ret_headers
@typed_pos_args('install_man', varargs=(str, mesonlib.File))
@typed_kwargs(
'install_man',
KwargInfo('locale', (str, NoneType), since='0.58.0'),
INSTALL_MODE_KW.evolve(since='0.47.0'),
INSTALL_DIR_KW,
)
def func_install_man(self, node: mparser.BaseNode,
args: T.Tuple[T.List['mesonlib.FileOrString']],
kwargs: 'kwtypes.FuncInstallMan') -> build.Man:
install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
# We just need to narrow this, because the input is limited to files and
# Strings as inputs, so only Files will be returned
sources = self.source_strings_to_files(args[0])
for s in sources:
try:
num = int(s.rsplit('.', 1)[-1])
except (IndexError, ValueError):
num = 0
if not 1 <= num <= 9:
raise InvalidArguments('Man file must have a file extension of a number between 1 and 9')
m = build.Man(sources, kwargs['install_dir'], install_mode,
self.subproject, kwargs['locale'])
self.build.man.append(m)
return m
@FeatureNew('install_emptydir', '0.60.0')
@typed_kwargs(
'install_emptydir',
INSTALL_MODE_KW,
KwargInfo('install_tag', (str, NoneType), since='0.62.0')
)
def func_install_emptydir(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs) -> None:
d = build.EmptyDir(args[0], kwargs['install_mode'], self.subproject, kwargs['install_tag'])
self.build.emptydir.append(d)
return d
@FeatureNew('install_symlink', '0.61.0')
@typed_pos_args('symlink_name', str)
@typed_kwargs(
'install_symlink',
KwargInfo('pointing_to', str, required=True),
KwargInfo('install_dir', str, required=True),
INSTALL_TAG_KW,
)
def func_install_symlink(self, node: mparser.BaseNode,
args: T.Tuple[T.List[str]],
kwargs) -> build.SymlinkData:
name = args[0] # Validation while creating the SymlinkData object
target = kwargs['pointing_to']
l = build.SymlinkData(target, name, kwargs['install_dir'],
self.subproject, kwargs['install_tag'])
self.build.symlinks.append(l)
return l
@FeatureNew('structured_sources', '0.62.0')
@typed_pos_args('structured_sources', object, optargs=[dict])
@noKwargs
@noArgsFlattening
def func_structured_sources(
self, node: mparser.BaseNode,
args: T.Tuple[object, T.Optional[T.Dict[str, object]]],
kwargs: 'TYPE_kwargs') -> build.StructuredSources:
valid_types = (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)
sources: T.Dict[str, T.List[T.Union[mesonlib.File, 'build.GeneratedTypes']]] = collections.defaultdict(list)
for arg in mesonlib.listify(args[0]):
if not isinstance(arg, valid_types):
raise InvalidArguments(f'structured_sources: type "{type(arg)}" is not valid')
if isinstance(arg, str):
arg = mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, arg)
sources[''].append(arg)
if args[1]:
if '' in args[1]:
raise InvalidArguments('structured_sources: keys to dictionary argument may not be an empty string.')
for k, v in args[1].items():
for arg in mesonlib.listify(v):
if not isinstance(arg, valid_types):
raise InvalidArguments(f'structured_sources: type "{type(arg)}" is not valid')
if isinstance(arg, str):
arg = mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, arg)
sources[k].append(arg)
return build.StructuredSources(sources)
@typed_pos_args('subdir', str)
@typed_kwargs(
'subdir',
KwargInfo(
'if_found',
ContainerTypeInfo(list, object),
validator=lambda a: 'Objects must have a found() method' if not all(hasattr(x, 'found') for x in a) else None,
since='0.44.0',
default=[],
listify=True,
),
)
def func_subdir(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'kwtypes.Subdir') -> None:
mesonlib.check_direntry_issues(args)
if '..' in args[0]:
raise InvalidArguments('Subdir contains ..')
if self.subdir == '' and args[0] == self.subproject_dir:
raise InvalidArguments('Must not go into subprojects dir with subdir(), use subproject() instead.')
if self.subdir == '' and args[0].startswith('meson-'):
raise InvalidArguments('The "meson-" prefix is reserved and cannot be used for top-level subdir().')
if args[0] == '':
raise InvalidArguments("The argument given to subdir() is the empty string ''. This is prohibited.")
for i in kwargs['if_found']:
if not i.found():
return
prev_subdir = self.subdir
subdir = os.path.join(prev_subdir, args[0])
if os.path.isabs(subdir):
raise InvalidArguments('Subdir argument must be a relative path.')
absdir = os.path.join(self.environment.get_source_dir(), subdir)
symlinkless_dir = os.path.realpath(absdir)
build_file = os.path.join(symlinkless_dir, 'meson.build')
if build_file in self.processed_buildfiles:
raise InvalidArguments(f'Tried to enter directory "{subdir}", which has already been visited.')
self.processed_buildfiles.add(build_file)
self.subdir = subdir
os.makedirs(os.path.join(self.environment.build_dir, subdir), exist_ok=True)
buildfilename = os.path.join(self.subdir, environment.build_filename)
self.build_def_files.add(buildfilename)
absname = os.path.join(self.environment.get_source_dir(), buildfilename)
if not os.path.isfile(absname):
self.subdir = prev_subdir
raise InterpreterException(f"Nonexistent build file '{buildfilename!s}'")
code = self.read_buildfile(absname, buildfilename)
try:
codeblock = mparser.Parser(code, absname).parse()
except mesonlib.MesonException as me:
me.file = absname
raise me
try:
self.evaluate_codeblock(codeblock)
except SubdirDoneRequest:
pass
self.subdir = prev_subdir
# This is either ignored on basically any OS nowadays, or silently gets
# ignored (Solaris) or triggers an "illegal operation" error (FreeBSD).
# It was likely added "because it exists", but should never be used. In
# theory it is useful for directories, but we never apply modes to
# directories other than in install_emptydir.
def _warn_kwarg_install_mode_sticky(self, mode: FileMode) -> FileMode:
if mode.perms > 0 and mode.perms & stat.S_ISVTX:
mlog.deprecation('install_mode with the sticky bit on a file does not do anything and will '
'be ignored since Meson 0.64.0', location=self.current_node)
perms = stat.filemode(mode.perms - stat.S_ISVTX)[1:]
return FileMode(perms, mode.owner, mode.group)
else:
return mode
@typed_pos_args('install_data', varargs=(str, mesonlib.File))
@typed_kwargs(
'install_data',
KwargInfo('sources', ContainerTypeInfo(list, (str, mesonlib.File)), listify=True, default=[]),
KwargInfo('rename', ContainerTypeInfo(list, str), default=[], listify=True, since='0.46.0'),
INSTALL_MODE_KW.evolve(since='0.38.0'),
INSTALL_TAG_KW.evolve(since='0.60.0'),
INSTALL_DIR_KW,
PRESERVE_PATH_KW.evolve(since='0.64.0'),
INSTALL_FOLLOW_SYMLINKS,
)
def func_install_data(self, node: mparser.BaseNode,
args: T.Tuple[T.List['mesonlib.FileOrString']],
kwargs: 'kwtypes.FuncInstallData') -> build.Data:
sources = self.source_strings_to_files(args[0] + kwargs['sources'])
rename = kwargs['rename'] or None
if rename:
if len(rename) != len(sources):
raise InvalidArguments(
'"rename" and "sources" argument lists must be the same length if "rename" is given. '
f'Rename has {len(rename)} elements and sources has {len(sources)}.')
install_dir = kwargs['install_dir']
if not install_dir:
subdir = self.active_projectname
install_dir = P_OBJ.OptionString(os.path.join(self.environment.get_datadir(), subdir), os.path.join('{datadir}', subdir))
if self.is_subproject():
FeatureNew.single_use('install_data() without install_dir inside of a subproject', '1.3.0', self.subproject,
'This was broken and would install to the project name of the parent project instead',
node)
if kwargs['preserve_path']:
FeatureNew.single_use('install_data() with preserve_path and without install_dir', '1.3.0', self.subproject,
'This was broken and would not add the project name to the install path',
node)
install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
return self.install_data_impl(sources, install_dir, install_mode, rename, kwargs['install_tag'],
preserve_path=kwargs['preserve_path'],
follow_symlinks=kwargs['follow_symlinks'])
def install_data_impl(self, sources: T.List[mesonlib.File], install_dir: str,
install_mode: FileMode, rename: T.Optional[str],
tag: T.Optional[str],
install_data_type: T.Optional[str] = None,
preserve_path: bool = False,
follow_symlinks: T.Optional[bool] = None) -> build.Data:
install_dir_name = install_dir.optname if isinstance(install_dir, P_OBJ.OptionString) else install_dir
dirs = collections.defaultdict(list)
if preserve_path:
for file in sources:
dirname = os.path.dirname(file.fname)
dirs[dirname].append(file)
else:
dirs[''].extend(sources)
ret_data = []
for childdir, files in dirs.items():
d = build.Data(files, os.path.join(install_dir, childdir), os.path.join(install_dir_name, childdir),
install_mode, self.subproject, rename, tag, install_data_type,
follow_symlinks)
ret_data.append(d)
self.build.data.extend(ret_data)
return ret_data
@typed_pos_args('install_subdir', str)
@typed_kwargs(
'install_subdir',
KwargInfo('install_dir', str, required=True),
KwargInfo('strip_directory', bool, default=False),
KwargInfo('exclude_files', ContainerTypeInfo(list, str),
default=[], listify=True, since='0.42.0',
validator=lambda x: 'cannot be absolute' if any(os.path.isabs(d) for d in x) else None),
KwargInfo('exclude_directories', ContainerTypeInfo(list, str),
default=[], listify=True, since='0.42.0',
validator=lambda x: 'cannot be absolute' if any(os.path.isabs(d) for d in x) else None),
INSTALL_MODE_KW.evolve(since='0.38.0'),
INSTALL_TAG_KW.evolve(since='0.60.0'),
INSTALL_FOLLOW_SYMLINKS,
)
def func_install_subdir(self, node: mparser.BaseNode, args: T.Tuple[str],
kwargs: 'kwtypes.FuncInstallSubdir') -> build.InstallDir:
exclude = (set(kwargs['exclude_files']), set(kwargs['exclude_directories']))
srcdir = os.path.join(self.environment.source_dir, self.subdir, args[0])
if not os.path.isdir(srcdir) or not any(os.listdir(srcdir)):
FeatureNew.single_use('install_subdir with empty directory', '0.47.0', self.subproject, location=node)
FeatureDeprecated.single_use('install_subdir with empty directory', '0.60.0', self.subproject,
'It worked by accident and is buggy. Use install_emptydir instead.', node)
install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
idir_name = kwargs['install_dir']
if isinstance(idir_name, P_OBJ.OptionString):
idir_name = idir_name.optname
idir = build.InstallDir(
self.subdir,
args[0],
kwargs['install_dir'],
idir_name,
install_mode,
exclude,
kwargs['strip_directory'],
self.subproject,
install_tag=kwargs['install_tag'],
follow_symlinks=kwargs['follow_symlinks'])
self.build.install_dirs.append(idir)
return idir
@noPosargs
@typed_kwargs(
'configure_file',
DEPFILE_KW.evolve(since='0.52.0'),
INSTALL_MODE_KW.evolve(since='0.47.0,'),
INSTALL_TAG_KW.evolve(since='0.60.0'),
KwargInfo('capture', bool, default=False, since='0.41.0'),
KwargInfo(
'command',
(ContainerTypeInfo(list, (build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str), allow_empty=False), NoneType),
listify=True,
),
KwargInfo(
'configuration',
(ContainerTypeInfo(dict, (str, int, bool)), build.ConfigurationData, NoneType),
),
KwargInfo(
'copy', bool, default=False, since='0.47.0',
),
KwargInfo('encoding', str, default='utf-8', since='0.47.0'),
KwargInfo('format', str, default='meson', since='0.46.0',
validator=in_set_validator({'meson', 'cmake', 'cmake@'})),
KwargInfo(
'input',
ContainerTypeInfo(list, (mesonlib.File, str)),
listify=True,
default=[],
),
# Cannot use shared implementation until None backwards compat is dropped
KwargInfo('install', (bool, NoneType), since='0.50.0'),
KwargInfo('install_dir', (str, bool), default='',
validator=lambda x: 'must be `false` if boolean' if x is True else None),
OUTPUT_KW,
KwargInfo('output_format', str, default='c', since='0.47.0', since_values={'json': '1.3.0'},
validator=in_set_validator({'c', 'json', 'nasm'})),
KwargInfo('macro_name', (str, NoneType), default=None, since='1.3.0'),
)
def func_configure_file(self, node: mparser.BaseNode, args: T.List[TYPE_var],
kwargs: kwtypes.ConfigureFile):
actions = sorted(x for x in ['configuration', 'command', 'copy']
if kwargs[x] not in [None, False])
num_actions = len(actions)
if num_actions == 0:
raise InterpreterException('Must specify an action with one of these '
'keyword arguments: \'configuration\', '
'\'command\', or \'copy\'.')
elif num_actions == 2:
raise InterpreterException('Must not specify both {!r} and {!r} '
'keyword arguments since they are '
'mutually exclusive.'.format(*actions))
elif num_actions == 3:
raise InterpreterException('Must specify one of {!r}, {!r}, and '
'{!r} keyword arguments since they are '
'mutually exclusive.'.format(*actions))
if kwargs['capture'] and not kwargs['command']:
raise InvalidArguments('configure_file: "capture" keyword requires "command" keyword.')
install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
fmt = kwargs['format']
output_format = kwargs['output_format']
depfile = kwargs['depfile']
# Validate input
inputs = self.source_strings_to_files(kwargs['input'])
inputs_abs = []
for f in inputs:
if isinstance(f, mesonlib.File):
inputs_abs.append(f.absolute_path(self.environment.source_dir,
self.environment.build_dir))
self.add_build_def_file(f)
else:
raise InterpreterException('Inputs can only be strings or file objects')
# Validate output
output = kwargs['output']
if inputs_abs:
values = mesonlib.get_filenames_templates_dict(inputs_abs, None)
outputs = mesonlib.substitute_values([output], values)
output = outputs[0]
if depfile:
depfile = mesonlib.substitute_values([depfile], values)[0]
ofile_rpath = os.path.join(self.subdir, output)
if ofile_rpath in self.configure_file_outputs:
mesonbuildfile = os.path.join(self.subdir, 'meson.build')
current_call = f"{mesonbuildfile}:{self.current_lineno}"
first_call = "{}:{}".format(mesonbuildfile, self.configure_file_outputs[ofile_rpath])
mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call)
else:
self.configure_file_outputs[ofile_rpath] = self.current_lineno
(ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output))
ofile_abs = os.path.join(self.environment.build_dir, ofile_path, ofile_fname)
# Perform the appropriate action
if kwargs['configuration'] is not None:
conf = kwargs['configuration']
if isinstance(conf, dict):
FeatureNew.single_use('configure_file.configuration dictionary', '0.49.0', self.subproject, location=node)
for k, v in conf.items():
if not isinstance(v, (str, int, bool)):
raise InvalidArguments(
f'"configuration_data": initial value dictionary key "{k!r}"" must be "str | int | bool", not "{v!r}"')
conf = build.ConfigurationData(conf)
mlog.log('Configuring', mlog.bold(output), 'using configuration')
if len(inputs) > 1:
raise InterpreterException('At most one input file can given in configuration mode')
if inputs:
os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
file_encoding = kwargs['encoding']
missing_variables, confdata_useless = \
mesonlib.do_conf_file(inputs_abs[0], ofile_abs, conf,
fmt, file_encoding, self.subproject)
if missing_variables:
var_list = ", ".join(repr(m) for m in sorted(missing_variables))
mlog.warning(
f"The variable(s) {var_list} in the input file '{inputs[0]}' are not "
"present in the given configuration data.", location=node)
if confdata_useless:
ifbase = os.path.basename(inputs_abs[0])
tv = FeatureNew.get_target_version(self.subproject)
if FeatureNew.check_version(tv, '0.47.0'):
mlog.warning('Got an empty configuration_data() object and found no '
f'substitutions in the input file {ifbase!r}. If you want to '
'copy a file to the build dir, use the \'copy:\' keyword '
'argument added in 0.47.0', location=node)
else:
macro_name = kwargs['macro_name']
mesonlib.dump_conf_header(ofile_abs, conf, output_format, macro_name)
conf.used = True
elif kwargs['command'] is not None:
if len(inputs) > 1:
FeatureNew.single_use('multiple inputs in configure_file()', '0.52.0', self.subproject, location=node)
# We use absolute paths for input and output here because the cwd
# that the command is run from is 'unspecified', so it could change.
# Currently it's builddir/subdir for in_builddir else srcdir/subdir.
values = mesonlib.get_filenames_templates_dict(inputs_abs, [ofile_abs])
if depfile:
depfile = os.path.join(self.environment.get_scratch_dir(), depfile)
values['@DEPFILE@'] = depfile
# Substitute @INPUT@, @OUTPUT@, etc here.
_cmd = mesonlib.substitute_values(kwargs['command'], values)
mlog.log('Configuring', mlog.bold(output), 'with command')
cmd, *args = _cmd
res = self.run_command_impl((cmd, args),
{'capture': True, 'check': True, 'env': EnvironmentVariables()},
True)
if kwargs['capture']:
dst_tmp = ofile_abs + '~'
file_encoding = kwargs['encoding']
with open(dst_tmp, 'w', encoding=file_encoding) as f:
f.writelines(res.stdout)
if inputs_abs:
shutil.copymode(inputs_abs[0], dst_tmp)
mesonlib.replace_if_different(ofile_abs, dst_tmp)
if depfile:
mlog.log('Reading depfile:', mlog.bold(depfile))
with open(depfile, encoding='utf-8') as f:
df = DepFile(f.readlines())
deps = df.get_all_dependencies(ofile_fname)
for dep in deps:
self.add_build_def_file(dep)
elif kwargs['copy']:
if len(inputs_abs) != 1:
raise InterpreterException('Exactly one input file must be given in copy mode')
os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
shutil.copy2(inputs_abs[0], ofile_abs)
# Install file if requested, we check for the empty string
# for backwards compatibility. That was the behaviour before
# 0.45.0 so preserve it.
idir = kwargs['install_dir']
if idir is False:
idir = ''
FeatureDeprecated.single_use('configure_file install_dir: false', '0.50.0',
self.subproject, 'Use the `install:` kwarg instead', location=node)
install = kwargs['install'] if kwargs['install'] is not None else idir != ''
if install:
if not idir:
raise InterpreterException(
'"install_dir" must be specified when "install" in a configure_file is true')
idir_name = idir
if isinstance(idir_name, P_OBJ.OptionString):
idir_name = idir_name.optname
cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname)
install_tag = kwargs['install_tag']
self.build.data.append(build.Data([cfile], idir, idir_name, install_mode, self.subproject,
install_tag=install_tag, data_type='configure'))
return mesonlib.File.from_built_file(self.subdir, output)
def extract_incdirs(self, kwargs, key: str = 'include_directories') -> T.List[build.IncludeDirs]:
prospectives = extract_as_list(kwargs, key)
if key == 'include_directories':
for i in prospectives:
if isinstance(i, str):
FeatureNew.single_use('include_directories kwarg of type string', '0.50.0', self.subproject,
f'Use include_directories({i!r}) instead', location=self.current_node)
break
result: T.List[build.IncludeDirs] = []
for p in prospectives:
if isinstance(p, build.IncludeDirs):
result.append(p)
elif isinstance(p, str):
if key == 'd_import_dirs' and os.path.normpath(p).startswith(self.environment.get_source_dir()):
FeatureDeprecated.single_use('Building absolute path to source dir is not supported',
'0.45', self.subproject,
'Use a relative path instead.',
location=self.current_node)
p = os.path.relpath(p, os.path.join(self.environment.get_source_dir(), self.subdir))
result.append(self.build_incdir_object([p]))
else:
raise InterpreterException('Include directory objects can only be created from strings or include directories.')
return result
@typed_pos_args('include_directories', varargs=str)
@typed_kwargs('include_directories', KwargInfo('is_system', bool, default=False))
def func_include_directories(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]],
kwargs: 'kwtypes.FuncIncludeDirectories') -> build.IncludeDirs:
return self.build_incdir_object(args[0], kwargs['is_system'])
def build_incdir_object(self, incdir_strings: T.List[str], is_system: bool = False) -> build.IncludeDirs:
if not isinstance(is_system, bool):
raise InvalidArguments('Is_system must be boolean.')
src_root = self.environment.get_source_dir()
build_root = self.environment.get_build_dir()
absbase_src = os.path.join(src_root, self.subdir)
absbase_build = os.path.join(build_root, self.subdir)
for a in incdir_strings:
if path_is_in_root(Path(a), Path(src_root)):
raise InvalidArguments(textwrap.dedent('''\
Tried to form an absolute path to a dir in the source tree.
You should not do that but use relative paths instead, for
directories that are part of your project.
To get include path to any directory relative to the current dir do
incdir = include_directories(dirname)
After this incdir will contain both the current source dir as well as the
corresponding build dir. It can then be used in any subdirectory and
Meson will take care of all the busywork to make paths work.
Dirname can even be '.' to mark the current directory. Though you should
remember that the current source and build directories are always
put in the include directories by default so you only need to do
include_directories('.') if you intend to use the result in a
different subdirectory.
Note that this error message can also be triggered by
external dependencies being installed within your source
tree - it's not recommended to do this.
'''))
else:
try:
self.validate_within_subproject(self.subdir, a)
except InterpreterException:
mlog.warning('include_directories sandbox violation!', location=self.current_node)
print(textwrap.dedent(f'''\
The project is trying to access the directory {a!r} which belongs to a different
subproject. This is a problem as it hardcodes the relative paths of these two projects.
This makes it impossible to compile the project in any other directory layout and also
prevents the subproject from changing its own directory layout.
Instead of poking directly at the internals the subproject should be executed and
it should set a variable that the caller can then use. Something like:
# In subproject
some_dep = declare_dependency(include_directories: include_directories('include'))
# In subproject wrap file
[provide]
some = some_dep
# In parent project
some_dep = dependency('some')
executable(..., dependencies: [some_dep])
This warning will become a hard error in a future Meson release.
'''))
absdir_src = os.path.join(absbase_src, a)
absdir_build = os.path.join(absbase_build, a)
if not os.path.isdir(absdir_src) and not os.path.isdir(absdir_build):
raise InvalidArguments(f'Include dir {a} does not exist.')
i = build.IncludeDirs(self.subdir, incdir_strings, is_system)
return i
@typed_pos_args('add_test_setup', str)
@typed_kwargs(
'add_test_setup',
KwargInfo('exe_wrapper', ContainerTypeInfo(list, (str, ExternalProgram)), listify=True, default=[]),
KwargInfo('gdb', bool, default=False),
KwargInfo('timeout_multiplier', int, default=1),
KwargInfo('exclude_suites', ContainerTypeInfo(list, str), listify=True, default=[], since='0.57.0'),
KwargInfo('is_default', bool, default=False, since='0.49.0'),
ENV_KW,
)
def func_add_test_setup(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'kwtypes.AddTestSetup') -> None:
setup_name = args[0]
if re.fullmatch('([_a-zA-Z][_0-9a-zA-Z]*:)?[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None:
raise InterpreterException('Setup name may only contain alphanumeric characters.')
if ":" not in setup_name:
setup_name = f'{(self.subproject if self.subproject else self.build.project_name)}:{setup_name}'
exe_wrapper: T.List[str] = []
for i in kwargs['exe_wrapper']:
if isinstance(i, str):
exe_wrapper.append(i)
else:
if not i.found():
raise InterpreterException('Tried to use non-found executable.')
exe_wrapper += i.get_command()
timeout_multiplier = kwargs['timeout_multiplier']
if timeout_multiplier <= 0:
FeatureNew('add_test_setup() timeout_multiplier <= 0', '0.57.0').use(self.subproject)
if kwargs['is_default']:
if self.build.test_setup_default_name is not None:
raise InterpreterException(f'{self.build.test_setup_default_name!r} is already set as default. '
'is_default can be set to true only once')
self.build.test_setup_default_name = setup_name
self.build.test_setups[setup_name] = build.TestSetup(exe_wrapper, kwargs['gdb'], timeout_multiplier, kwargs['env'],
kwargs['exclude_suites'])
@typed_pos_args('add_global_arguments', varargs=str)
@typed_kwargs('add_global_arguments', NATIVE_KW, LANGUAGE_KW)
def func_add_global_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
self._add_global_arguments(node, self.build.global_args[kwargs['native']], args[0], kwargs)
@typed_pos_args('add_global_link_arguments', varargs=str)
@typed_kwargs('add_global_arguments', NATIVE_KW, LANGUAGE_KW)
def func_add_global_link_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
self._add_global_arguments(node, self.build.global_link_args[kwargs['native']], args[0], kwargs)
@typed_pos_args('add_project_arguments', varargs=str)
@typed_kwargs('add_project_arguments', NATIVE_KW, LANGUAGE_KW)
def func_add_project_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
self._add_project_arguments(node, self.build.projects_args[kwargs['native']], args[0], kwargs)
@typed_pos_args('add_project_link_arguments', varargs=str)
@typed_kwargs('add_global_arguments', NATIVE_KW, LANGUAGE_KW)
def func_add_project_link_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
self._add_project_arguments(node, self.build.projects_link_args[kwargs['native']], args[0], kwargs)
@FeatureNew('add_project_dependencies', '0.63.0')
@typed_pos_args('add_project_dependencies', varargs=dependencies.Dependency)
@typed_kwargs('add_project_dependencies', NATIVE_KW, LANGUAGE_KW)
def func_add_project_dependencies(self, node: mparser.FunctionNode, args: T.Tuple[T.List[dependencies.Dependency]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
for_machine = kwargs['native']
for lang in kwargs['language']:
if lang not in self.compilers[for_machine]:
raise InvalidCode(f'add_project_dependencies() called before add_language() for language "{lang}"')
for d in dependencies.get_leaf_external_dependencies(args[0]):
compile_args = list(d.get_compile_args())
system_incdir = d.get_include_type() == 'system'
for i in d.get_include_dirs():
for lang in kwargs['language']:
comp = self.coredata.compilers[for_machine][lang]
for idir in i.to_string_list(self.environment.get_source_dir(), self.environment.get_build_dir()):
compile_args.extend(comp.get_include_args(idir, system_incdir))
self._add_project_arguments(node, self.build.projects_args[for_machine], compile_args, kwargs)
self._add_project_arguments(node, self.build.projects_link_args[for_machine], d.get_link_args(), kwargs)
def _warn_about_builtin_args(self, args: T.List[str]) -> None:
# -Wpedantic is deliberately not included, since some people want to use it but not use -Wextra
# see e.g.
# https://github.com/mesonbuild/meson/issues/3275#issuecomment-641354956
# https://github.com/mesonbuild/meson/issues/3742
warnargs = ('/W1', '/W2', '/W3', '/W4', '/Wall', '-Wall', '-Wextra')
optargs = ('-O0', '-O2', '-O3', '-Os', '-Oz', '/O1', '/O2', '/Os')
for arg in args:
if arg in warnargs:
mlog.warning(f'Consider using the built-in warning_level option instead of using "{arg}".',
location=self.current_node)
elif arg in optargs:
mlog.warning(f'Consider using the built-in optimization level instead of using "{arg}".',
location=self.current_node)
elif arg == '-Werror':
mlog.warning(f'Consider using the built-in werror option instead of using "{arg}".',
location=self.current_node)
elif arg == '-g':
mlog.warning(f'Consider using the built-in debug option instead of using "{arg}".',
location=self.current_node)
# Don't catch things like `-fsanitize-recover`
elif arg in {'-fsanitize', '/fsanitize'} or arg.startswith(('-fsanitize=', '/fsanitize=')):
mlog.warning(f'Consider using the built-in option for sanitizers instead of using "{arg}".',
location=self.current_node)
elif arg.startswith('-std=') or arg.startswith('/std:'):
mlog.warning(f'Consider using the built-in option for language standard version instead of using "{arg}".',
location=self.current_node)
def _add_global_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.List[str]],
args: T.List[str], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
if self.is_subproject():
msg = f'Function \'{node.func_name.value}\' cannot be used in subprojects because ' \
'there is no way to make that reliable.\nPlease only call ' \
'this if is_subproject() returns false. Alternatively, ' \
'define a variable that\ncontains your language-specific ' \
'arguments and add it to the appropriate *_args kwarg ' \
'in each target.'
raise InvalidCode(msg)
frozen = self.project_args_frozen or self.global_args_frozen
self._add_arguments(node, argsdict, frozen, args, kwargs)
def _add_project_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.Dict[str, T.List[str]]],
args: T.List[str], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
if self.subproject not in argsdict:
argsdict[self.subproject] = {}
self._add_arguments(node, argsdict[self.subproject],
self.project_args_frozen, args, kwargs)
def _add_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.List[str]],
args_frozen: bool, args: T.List[str], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
if args_frozen:
msg = f'Tried to use \'{node.func_name.value}\' after a build target has been declared.\n' \
'This is not permitted. Please declare all arguments before your targets.'
raise InvalidCode(msg)
self._warn_about_builtin_args(args)
for lang in kwargs['language']:
argsdict[lang] = argsdict.get(lang, []) + args
@noArgsFlattening
@typed_pos_args('environment', optargs=[(str, list, dict)])
@typed_kwargs('environment', ENV_METHOD_KW, ENV_SEPARATOR_KW.evolve(since='0.62.0'))
def func_environment(self, node: mparser.FunctionNode, args: T.Tuple[T.Union[None, str, T.List['TYPE_var'], T.Dict[str, 'TYPE_var']]],
kwargs: 'TYPE_kwargs') -> EnvironmentVariables:
init = args[0]
if init is not None:
FeatureNew.single_use('environment positional arguments', '0.52.0', self.subproject, location=node)
msg = ENV_KW.validator(init)
if msg:
raise InvalidArguments(f'"environment": {msg}')
if isinstance(init, dict) and any(i for i in init.values() if isinstance(i, list)):
FeatureNew.single_use('List of string in dictionary value', '0.62.0', self.subproject, location=node)
return env_convertor_with_method(init, kwargs['method'], kwargs['separator'])
return EnvironmentVariables()
@typed_pos_args('join_paths', varargs=str, min_varargs=1)
@noKwargs
def func_join_paths(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> str:
parts = args[0]
other = os.path.join('', *parts[1:]).replace('\\', '/')
ret = os.path.join(*parts).replace('\\', '/')
if isinstance(parts[0], P_OBJ.DependencyVariableString) and '..' not in other:
return P_OBJ.DependencyVariableString(ret)
elif isinstance(parts[0], P_OBJ.OptionString):
name = os.path.join(parts[0].optname, other)
return P_OBJ.OptionString(ret, name)
else:
return ret
def run(self) -> None:
super().run()
mlog.log('Build targets in project:', mlog.bold(str(len(self.build.targets))))
FeatureNew.report(self.subproject)
FeatureDeprecated.report(self.subproject)
FeatureBroken.report(self.subproject)
if not self.is_subproject():
self.print_extra_warnings()
self._print_summary()
def print_extra_warnings(self) -> None:
# TODO cross compilation
for c in self.coredata.compilers.host.values():
if c.get_id() == 'clang':
self.check_clang_asan_lundef()
break
def check_clang_asan_lundef(self) -> None:
if OptionKey('b_lundef') not in self.coredata.optstore:
return
if OptionKey('b_sanitize') not in self.coredata.optstore:
return
if (self.coredata.optstore.get_value('b_lundef') and
self.coredata.optstore.get_value('b_sanitize') != 'none'):
value = self.coredata.optstore.get_value('b_sanitize')
mlog.warning(textwrap.dedent(f'''\
Trying to use {value} sanitizer on Clang with b_lundef.
This will probably not work.
Try setting b_lundef to false instead.'''),
location=self.current_node) # noqa: E128
# Check that the indicated file is within the same subproject
# as we currently are. This is to stop people doing
# nasty things like:
#
# f = files('../../master_src/file.c')
#
# Note that this is validated only when the file
# object is generated. The result can be used in a different
# subproject than it is defined in (due to e.g. a
# declare_dependency).
def validate_within_subproject(self, subdir, fname):
srcdir = Path(self.environment.source_dir)
builddir = Path(self.environment.build_dir)
if isinstance(fname, P_OBJ.DependencyVariableString):
def validate_installable_file(fpath: Path) -> bool:
installablefiles: T.Set[Path] = set()
for d in self.build.data:
for s in d.sources:
installablefiles.add(Path(s.absolute_path(srcdir, builddir)))
installabledirs = [str(Path(srcdir, s.source_subdir)) for s in self.build.install_dirs]
if fpath in installablefiles:
return True
for d in installabledirs:
if str(fpath).startswith(d):
return True
return False
norm = Path(fname)
# variables built from a dep.get_variable are allowed to refer to
# subproject files, as long as they are scheduled to be installed.
if validate_installable_file(norm):
return
norm = Path(os.path.abspath(Path(srcdir, subdir, fname)))
if os.path.isdir(norm):
inputtype = 'directory'
else:
inputtype = 'file'
if InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFERENCES in self.relaxations and builddir in norm.parents:
return
if srcdir not in norm.parents:
# Grabbing files outside the source tree is ok.
# This is for vendor stuff like:
#
# /opt/vendorsdk/src/file_with_license_restrictions.c
return
project_root = Path(srcdir, self.root_subdir)
subproject_dir = project_root / self.subproject_dir
if norm == project_root:
return
if project_root not in norm.parents:
raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} outside current (sub)project.')
if subproject_dir == norm or subproject_dir in norm.parents:
raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} from a nested subproject.')
@T.overload
def source_strings_to_files(self, sources: T.List['mesonlib.FileOrString'], strict: bool = True) -> T.List['mesonlib.File']: ...
@T.overload
def source_strings_to_files(self, sources: T.List['mesonlib.FileOrString'], strict: bool = False) -> T.List['mesonlib.FileOrString']: ... # noqa: F811
@T.overload
def source_strings_to_files(self, sources: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]) -> T.List[T.Union[mesonlib.File, build.GeneratedTypes]]: ... # noqa: F811
@T.overload
def source_strings_to_files(self, sources: T.List['SourceInputs'], strict: bool = True) -> T.List['SourceOutputs']: ... # noqa: F811
@T.overload
def source_strings_to_files(self, sources: T.List[SourcesVarargsType], strict: bool = True) -> T.List['SourceOutputs']: ... # noqa: F811
def source_strings_to_files(self, sources: T.List['SourceInputs'], strict: bool = True) -> T.List['SourceOutputs']: # noqa: F811
"""Lower inputs to a list of Targets and Files, replacing any strings.
:param sources: A raw (Meson DSL) list of inputs (targets, files, and
strings)
:raises InterpreterException: if any of the inputs are of an invalid type
:return: A list of Targets and Files
"""
mesonlib.check_direntry_issues(sources)
if not isinstance(sources, list):
sources = [sources]
results: T.List['SourceOutputs'] = []
for s in sources:
if isinstance(s, str):
if not strict and s.startswith(self.environment.get_build_dir()):
results.append(s)
mlog.warning(f'Source item {s!r} cannot be converted to File object, because it is a generated file. '
'This will become a hard error in meson 2.0.', location=self.current_node)
else:
self.validate_within_subproject(self.subdir, s)
results.append(mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, s))
elif isinstance(s, mesonlib.File):
results.append(s)
elif isinstance(s, (build.GeneratedList, build.BuildTarget,
build.CustomTargetIndex, build.CustomTarget,
build.ExtractedObjects, build.StructuredSources)):
results.append(s)
else:
raise InterpreterException(f'Source item is {s!r} instead of '
'string or File-type object')
return results
@staticmethod
def validate_forbidden_targets(name: str) -> None:
if name.startswith('meson-internal__'):
raise InvalidArguments("Target names starting with 'meson-internal__' are reserved "
"for Meson's internal use. Please rename.")
if name.startswith('meson-') and '.' not in name:
raise InvalidArguments("Target names starting with 'meson-' and without a file extension "
"are reserved for Meson's internal use. Please rename.")
if name in coredata.FORBIDDEN_TARGET_NAMES:
raise InvalidArguments(f"Target name '{name}' is reserved for Meson's "
"internal use. Please rename.")
def add_target(self, name: str, tobj: build.Target) -> None:
if self.backend.name == 'none':
raise InterpreterException('Install-only backend cannot generate target rules, try using `--backend=ninja`.')
if name == '':
raise InterpreterException('Target name must not be empty.')
if name.strip() == '':
raise InterpreterException('Target name must not consist only of whitespace.')
if has_path_sep(name):
pathseg = os.path.join(self.subdir, os.path.split(name)[0])
if os.path.exists(os.path.join(self.source_root, pathseg)):
raise InvalidArguments(textwrap.dedent(f'''\
Target "{name}" has a path segment pointing to directory "{pathseg}". This is an error.
To define a target that builds in that directory you must define it
in the meson.build file in that directory.
'''))
self.validate_forbidden_targets(name)
# To permit an executable and a shared library to have the
# same name, such as "foo.exe" and "libfoo.a".
idname = tobj.get_id()
subdir = tobj.get_subdir()
namedir = (name, subdir)
if idname in self.build.targets:
raise InvalidCode(f'Tried to create target "{name}", but a target of that name already exists.')
if isinstance(tobj, build.Executable) and namedir in self.build.targetnames:
FeatureNew.single_use(f'multiple executables with the same name, "{tobj.name}", but different suffixes in the same directory',
'1.3.0', self.subproject, location=self.current_node)
if isinstance(tobj, build.BuildTarget):
self.add_languages(tobj.missing_languages, True, tobj.for_machine)
tobj.process_compilers_late()
self.add_stdlib_info(tobj)
self.build.targets[idname] = tobj
# Only need to add executables to this set
if isinstance(tobj, build.Executable):
self.build.targetnames.update([namedir])
if idname not in self.coredata.target_guids:
self.coredata.target_guids[idname] = str(uuid.uuid4()).upper()
@FeatureNew('both_libraries', '0.46.0')
def build_both_libraries(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType], kwargs: kwtypes.Library) -> build.BothLibraries:
shared_lib = self.build_target(node, args, kwargs, build.SharedLibrary)
static_lib = self.build_target(node, args, kwargs, build.StaticLibrary)
preferred_library = self.coredata.get_option(OptionKey('default_both_libraries'))
if preferred_library == 'auto':
preferred_library = self.coredata.get_option(OptionKey('default_library'))
if preferred_library == 'both':
preferred_library = 'shared'
if self.backend.name == 'xcode':
# Xcode is a bit special in that you can't (at least for the moment)
# form a library only from object file inputs. The simple but inefficient
# solution is to use the sources directly. This will lead to them being
# built twice. This is unfortunate and slow, but at least it works.
# Feel free to submit patches to get this fixed if it is an
# issue for you.
reuse_object_files = False
elif shared_lib.uses_rust():
# FIXME: rustc supports generating both libraries in a single invocation,
# but for now compile twice.
reuse_object_files = False
elif any(k.endswith(('static_args', 'shared_args')) and v for k, v in kwargs.items()):
# Ensure not just the keyword arguments exist, but that they are non-empty.
reuse_object_files = False
else:
reuse_object_files = static_lib.pic
if reuse_object_files:
# Replace sources with objects from the shared library to avoid
# building them twice. We post-process the static library instead of
# removing sources from args because sources could also come from
# any InternalDependency, see BuildTarget.add_deps().
static_lib.objects.append(build.ExtractedObjects(shared_lib, shared_lib.sources, shared_lib.generated, []))
static_lib.sources = []
static_lib.generated = []
# Compilers with no corresponding sources confuses the backend.
# Keep only compilers used for linking
static_lib.compilers = {k: v for k, v in static_lib.compilers.items() if k in compilers.clink_langs}
return build.BothLibraries(shared_lib, static_lib, preferred_library)
def build_library(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType], kwargs: kwtypes.Library):
default_library = self.coredata.get_option(OptionKey('default_library', subproject=self.subproject))
assert isinstance(default_library, str), 'for mypy'
if default_library == 'shared':
args, kwargs = resolve_second_level_holders(args, kwargs)
return self.build_target(node, args, T.cast('kwtypes.StaticLibrary', kwargs), build.SharedLibrary)
elif default_library == 'static':
args, kwargs = resolve_second_level_holders(args, kwargs)
return self.build_target(node, args, T.cast('kwtypes.SharedLibrary', kwargs), build.StaticLibrary)
elif default_library == 'both':
return self.build_both_libraries(node, args, kwargs)
else:
raise InterpreterException(f'Unknown default_library value: {default_library}.')
def __convert_file_args(self, raw: T.List[mesonlib.FileOrString]) -> T.Tuple[T.List[mesonlib.File], T.List[str]]:
"""Convert raw target arguments from File | str to File.
This removes files from the command line and replaces them with string
values, but adds the files to depends list
:param raw: the raw arguments
:return: A tuple of file dependencies and raw arguments
"""
depend_files: T.List[mesonlib.File] = []
args: T.List[str] = []
build_to_source = mesonlib.relpath(self.environment.get_source_dir(),
self.environment.get_build_dir())
for a in raw:
if isinstance(a, mesonlib.File):
depend_files.append(a)
args.append(a.rel_to_builddir(build_to_source))
else:
args.append(a)
return depend_files, args
def __process_language_args(self, kwargs: T.Dict[str, T.List[mesonlib.FileOrString]]) -> None:
"""Convert split language args into a combined dictionary.
The Meson DSL takes arguments in the form `<lang>_args : args`, but in the
build layer we store these in a single dictionary as `{<lang>: args}`.
This function extracts the arguments from the DSL format and prepares
them for the IR.
"""
d = kwargs.setdefault('depend_files', [])
new_args: T.DefaultDict[str, T.List[str]] = collections.defaultdict(list)
for l in compilers.all_languages:
deps, args = self.__convert_file_args(kwargs[f'{l}_args'])
new_args[l] = args
d.extend(deps)
kwargs['language_args'] = new_args
@T.overload
def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.Executable, targetclass: T.Type[build.Executable]) -> build.Executable: ...
@T.overload
def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.StaticLibrary, targetclass: T.Type[build.StaticLibrary]) -> build.StaticLibrary: ...
@T.overload
def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.SharedLibrary, targetclass: T.Type[build.SharedLibrary]) -> build.SharedLibrary: ...
@T.overload
def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.SharedModule, targetclass: T.Type[build.SharedModule]) -> build.SharedModule: ...
@T.overload
def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.Jar, targetclass: T.Type[build.Jar]) -> build.Jar: ...
def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType],
kwargs: T.Union[kwtypes.Executable, kwtypes.StaticLibrary, kwtypes.SharedLibrary, kwtypes.SharedModule, kwtypes.Jar],
targetclass: T.Type[T.Union[build.Executable, build.StaticLibrary, build.SharedModule, build.SharedLibrary, build.Jar]]
) -> T.Union[build.Executable, build.StaticLibrary, build.SharedModule, build.SharedLibrary, build.Jar]:
name, sources = args
for_machine = kwargs['native']
if kwargs.get('rust_crate_type') == 'proc-macro':
# Silently force to native because that's the only sensible value
# and rust_crate_type is deprecated any way.
for_machine = MachineChoice.BUILD
# Avoid mutating, since there could be other references to sources
sources = sources + kwargs['sources']
if any(isinstance(s, build.BuildTarget) for s in sources):
FeatureBroken.single_use('passing references to built targets as a source file', '1.1.0', self.subproject,
'Consider using `link_with` or `link_whole` if you meant to link, or dropping them as otherwise they are ignored.',
node)
if any(isinstance(s, build.ExtractedObjects) for s in sources):
FeatureBroken.single_use('passing object files as sources', '1.1.0', self.subproject,
'Pass these to the `objects` keyword instead, they are ignored when passed as sources.',
node)
# Go ahead and drop these here, since they're only allowed through for
# backwards compatibility anyway
sources = [s for s in sources
if not isinstance(s, (build.BuildTarget, build.ExtractedObjects))]
# due to lack of type checking, these are "allowed" for legacy reasons
if not isinstance(kwargs['install'], bool):
FeatureBroken.single_use('install kwarg with non-boolean value', '1.3.0', self.subproject,
'This was never intended to work, and is essentially the same as using `install: true` regardless of value.',
node)
sources = self.source_strings_to_files(sources)
objs = kwargs['objects']
kwargs['dependencies'] = extract_as_list(kwargs, 'dependencies')
kwargs['extra_files'] = self.source_strings_to_files(kwargs['extra_files'])
self.check_sources_exist(os.path.join(self.source_root, self.subdir), sources)
if targetclass not in {build.Executable, build.SharedLibrary, build.SharedModule, build.StaticLibrary, build.Jar}:
mlog.debug('Unknown target type:', str(targetclass))
raise RuntimeError('Unreachable code')
self.__process_language_args(kwargs)
if targetclass is build.StaticLibrary:
for lang in compilers.all_languages - {'java'}:
deps, args = self.__convert_file_args(kwargs.get(f'{lang}_static_args', []))
kwargs['language_args'][lang].extend(args)
kwargs['depend_files'].extend(deps)
elif targetclass is build.SharedLibrary:
for lang in compilers.all_languages - {'java'}:
deps, args = self.__convert_file_args(kwargs.get(f'{lang}_shared_args', []))
kwargs['language_args'][lang].extend(args)
kwargs['depend_files'].extend(deps)
if targetclass is not build.Jar:
kwargs['d_import_dirs'] = self.extract_incdirs(kwargs, 'd_import_dirs')
# Filter out kwargs from other target types. For example 'soversion'
# passed to library() when default_library == 'static'.
kwargs = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs | {'language_args'}}
srcs: T.List['SourceInputs'] = []
struct: T.Optional[build.StructuredSources] = build.StructuredSources()
for s in sources:
if isinstance(s, build.StructuredSources):
struct = struct + s
else:
srcs.append(s)
if not struct:
struct = None
else:
# Validate that we won't end up with two outputs with the same name.
# i.e, don't allow:
# [structured_sources('foo/bar.rs'), structured_sources('bar/bar.rs')]
for v in struct.sources.values():
outputs: T.Set[str] = set()
for f in v:
o: T.List[str]
if isinstance(f, str):
o = [os.path.basename(f)]
elif isinstance(f, mesonlib.File):
o = [f.fname]
else:
o = f.get_outputs()
conflicts = outputs.intersection(o)
if conflicts:
raise InvalidArguments.from_node(
f"Conflicting sources in structured sources: {', '.join(sorted(conflicts))}",
node=node)
outputs.update(o)
kwargs['include_directories'] = self.extract_incdirs(kwargs)
if targetclass is build.Executable:
kwargs = T.cast('kwtypes.Executable', kwargs)
if kwargs['gui_app'] is not None:
if kwargs['win_subsystem'] is not None:
raise InvalidArguments.from_node(
'Executable got both "gui_app", and "win_subsystem" arguments, which are mutually exclusive',
node=node)
if kwargs['gui_app']:
kwargs['win_subsystem'] = 'windows'
if kwargs['win_subsystem'] is None:
kwargs['win_subsystem'] = 'console'
if kwargs['implib']:
if kwargs['export_dynamic'] is False:
FeatureDeprecated.single_use('implib overrides explicit export_dynamic off', '1.3.0', self.subproject,
'Do not set ths if want export_dynamic disabled if implib is enabled',
location=node)
kwargs['export_dynamic'] = True
elif kwargs['export_dynamic']:
if kwargs['implib'] is False:
raise InvalidArguments('"implib" keyword" must not be false if "export_dynamic" is set and not false.')
kwargs['implib'] = True
if kwargs['export_dynamic'] is None:
kwargs['export_dynamic'] = False
if kwargs['implib'] is None:
kwargs['implib'] = False
target = targetclass(name, self.subdir, self.subproject, for_machine, srcs, struct, objs,
self.environment, self.compilers[for_machine], kwargs)
self.add_target(name, target)
self.project_args_frozen = True
return target
def add_stdlib_info(self, target):
for l in target.compilers.keys():
dep = self.build.stdlibs[target.for_machine].get(l, None)
if dep:
target.add_deps(dep)
def check_sources_exist(self, subdir, sources):
for s in sources:
if not isinstance(s, str):
continue # This means a generated source and they always exist.
fname = os.path.join(subdir, s)
if not os.path.isfile(fname):
raise InterpreterException(f'Tried to add non-existing source file {s}.')
# Only permit object extraction from the same subproject
def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
if self.subproject != buildtarget.subproject:
raise InterpreterException('Tried to extract objects from a different subproject.')
def is_subproject(self) -> bool:
return self.subproject != ''
@typed_pos_args('set_variable', str, object)
@noKwargs
@noArgsFlattening
@noSecondLevelHolderResolving
def func_set_variable(self, node: mparser.BaseNode, args: T.Tuple[str, object], kwargs: 'TYPE_kwargs') -> None:
varname, value = args
self.set_variable(varname, value, holderify=True)
@typed_pos_args('get_variable', (str, Disabler), optargs=[object])
@noKwargs
@noArgsFlattening
@unholder_return
def func_get_variable(self, node: mparser.BaseNode, args: T.Tuple[T.Union[str, Disabler], T.Optional[object]],
kwargs: 'TYPE_kwargs') -> 'TYPE_var':
varname, fallback = args
if isinstance(varname, Disabler):
return varname
try:
return self.variables[varname]
except KeyError:
if fallback is not None:
return self._holderify(fallback)
raise InterpreterException(f'Tried to get unknown variable "{varname}".')
@typed_pos_args('is_variable', str)
@noKwargs
def func_is_variable(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
return args[0] in self.variables
@FeatureNew('unset_variable', '0.60.0')
@typed_pos_args('unset_variable', str)
@noKwargs
def func_unset_variable(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> None:
varname = args[0]
try:
del self.variables[varname]
except KeyError:
raise InterpreterException(f'Tried to unset unknown variable "{varname}".')
@staticmethod
def machine_from_native_kwarg(kwargs: T.Dict[str, T.Any]) -> MachineChoice:
native = kwargs.get('native', False)
if not isinstance(native, bool):
raise InvalidArguments('Argument to "native" must be a boolean.')
return MachineChoice.BUILD if native else MachineChoice.HOST
@FeatureNew('is_disabler', '0.52.0')
@typed_pos_args('is_disabler', object)
@noKwargs
def func_is_disabler(self, node: mparser.BaseNode, args: T.Tuple[object], kwargs: 'TYPE_kwargs') -> bool:
return isinstance(args[0], Disabler)
@noKwargs
@FeatureNew('range', '0.58.0')
@typed_pos_args('range', int, optargs=[int, int])
def func_range(self, node, args: T.Tuple[int, T.Optional[int], T.Optional[int]], kwargs: T.Dict[str, T.Any]) -> P_OBJ.RangeHolder:
start, stop, step = args
# Just like Python's range, we allow range(stop), range(start, stop), or
# range(start, stop, step)
if stop is None:
stop = start
start = 0
if step is None:
step = 1
# This is more strict than Python's range()
if start < 0:
raise InterpreterException('start cannot be negative')
if stop < start:
raise InterpreterException('stop cannot be less than start')
if step < 1:
raise InterpreterException('step must be >=1')
return P_OBJ.RangeHolder(start, stop, step, subproject=self.subproject)