# Copyright 2012-2021 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from . . import mparser
from . . import environment
from . . import coredata
from . . import dependencies
from . . import mlog
from . . import build
from . . import optinterpreter
from . . import compilers
from . . import envconfig
from . . wrap import wrap , WrapMode
from . . import mesonlib
from . . mesonlib import ( MesonBugException , HoldableObject , FileMode , MachineChoice , OptionKey ,
listify , extract_as_list , has_path_sep , PerMachine )
from . . programs import ExternalProgram , NonExistingExternalProgram
from . . dependencies import Dependency
from . . depfile import DepFile
from . . interpreterbase import ContainerTypeInfo , InterpreterBase , KwargInfo , typed_kwargs , typed_pos_args
from . . interpreterbase import noPosargs , noKwargs , permittedKwargs , noArgsFlattening , noSecondLevelHolderResolving , unholder_return
from . . interpreterbase import InterpreterException , InvalidArguments , InvalidCode , SubdirDoneRequest
from . . interpreterbase import Disabler , disablerIfNotFound
from . . interpreterbase import FeatureNew , FeatureDeprecated , FeatureNewKwargs , FeatureDeprecatedKwargs
from . . interpreterbase import ObjectHolder
from . . modules import ExtensionModule , ModuleObject , MutableModuleObject , NewExtensionModule , NotFoundExtensionModule
from . . cmake import CMakeInterpreter
from . . backend . backends import ExecutableSerialisation
from . import interpreterobjects as OBJ
from . import compiler as compilerOBJ
from . mesonmain import MesonMain
from . dependencyfallbacks import DependencyFallbacksHolder
from . interpreterobjects import (
SubprojectHolder ,
Test ,
RunProcess ,
extract_required_kwarg ,
extract_search_dirs ,
NullSubprojectInterpreter ,
)
from . type_checking import (
COMMAND_KW ,
CT_BUILD_ALWAYS ,
CT_BUILD_ALWAYS_STALE ,
CT_BUILD_BY_DEFAULT ,
CT_INPUT_KW ,
CT_INSTALL_DIR_KW ,
MULTI_OUTPUT_KW ,
OUTPUT_KW ,
DEFAULT_OPTIONS ,
DEPENDENCIES_KW ,
DEPENDS_KW ,
DEPEND_FILES_KW ,
DEPFILE_KW ,
DISABLER_KW ,
D_MODULE_VERSIONS_KW ,
ENV_KW ,
ENV_METHOD_KW ,
ENV_SEPARATOR_KW ,
INCLUDE_DIRECTORIES ,
INSTALL_KW ,
INSTALL_DIR_KW ,
INSTALL_MODE_KW ,
LINK_WITH_KW ,
LINK_WHOLE_KW ,
CT_INSTALL_TAG_KW ,
INSTALL_TAG_KW ,
LANGUAGE_KW ,
NATIVE_KW ,
PRESERVE_PATH_KW ,
REQUIRED_KW ,
SOURCES_KW ,
VARIABLES_KW ,
NoneType ,
in_set_validator ,
env_convertor_with_method
)
from . import primitives as P_OBJ
from pathlib import Path
from enum import Enum
import os
import shutil
import uuid
import re
import stat
import collections
import typing as T
import textwrap
import importlib
import copy
if T . TYPE_CHECKING :
import argparse
from typing_extensions import Literal
from . import kwargs as kwtypes
from . . backend . backends import Backend
from . . interpreterbase . baseobjects import InterpreterObject , TYPE_var , TYPE_kwargs
from . . programs import OverrideProgram
# Input source types passed to Targets
SourceInputs = T . Union [ mesonlib . File , build . GeneratedList , build . BuildTarget , build . BothLibraries ,
build . CustomTargetIndex , build . CustomTarget , build . GeneratedList ,
build . ExtractedObjects , str ]
# Input source types passed to the build.Target classes
SourceOutputs = T . Union [ mesonlib . File , build . GeneratedList ,
build . BuildTarget , build . CustomTargetIndex , build . CustomTarget ,
build . ExtractedObjects , build . GeneratedList , build . StructuredSources ]
def _project_version_validator ( value : T . Union [ T . List , str , mesonlib . File , None ] ) - > T . Optional [ str ] :
if isinstance ( value , list ) :
if len ( value ) != 1 :
return ' when passed as array must have a length of 1 '
elif not isinstance ( value [ 0 ] , mesonlib . File ) :
return ' when passed as array must contain a File '
return None
def stringifyUserArguments ( args : T . List [ T . Any ] , quote : bool = False ) - > str :
if isinstance ( args , list ) :
return ' [ %s ] ' % ' , ' . join ( [ stringifyUserArguments ( x , True ) for x in args ] )
elif isinstance ( args , dict ) :
return ' { %s } ' % ' , ' . join ( [ ' {} : {} ' . format ( stringifyUserArguments ( k , True ) , stringifyUserArguments ( v , True ) ) for k , v in args . items ( ) ] )
elif isinstance ( args , bool ) :
return ' true ' if args else ' false '
elif isinstance ( args , int ) :
return str ( args )
elif isinstance ( args , str ) :
return f " ' { args } ' " if quote else args
raise InvalidArguments ( ' Function accepts only strings, integers, bools, lists, dictionaries and lists thereof. ' )
class Summary :
def __init__ ( self , project_name : str , project_version : str ) :
self . project_name = project_name
self . project_version = project_version
self . sections = collections . defaultdict ( dict )
self . max_key_len = 0
def add_section ( self , section : str , values : T . Dict [ str , T . Any ] , bool_yn : bool ,
list_sep : T . Optional [ str ] , subproject : str ) - > None :
for k , v in values . items ( ) :
if k in self . sections [ section ] :
raise InterpreterException ( f ' Summary section { section !r} already have key { k !r} ' )
formatted_values = [ ]
for i in listify ( v ) :
if isinstance ( i , bool ) and bool_yn :
formatted_values . append ( mlog . green ( ' YES ' ) if i else mlog . red ( ' NO ' ) )
elif isinstance ( i , ( str , int , bool ) ) :
formatted_values . append ( str ( i ) )
elif isinstance ( i , ( ExternalProgram , Dependency ) ) :
FeatureNew . single_use ( ' dependency or external program in summary ' , ' 0.57.0 ' , subproject )
formatted_values . append ( i . summary_value ( ) )
elif isinstance ( i , Disabler ) :
FeatureNew . single_use ( ' disabler in summary ' , ' 0.64.0 ' , subproject )
formatted_values . append ( mlog . red ( ' NO ' ) )
elif isinstance ( i , coredata . UserOption ) :
FeatureNew . single_use ( ' feature option in summary ' , ' 0.58.0 ' , subproject )
formatted_values . append ( i . printable_value ( ) )
else :
m = ' Summary value in section {!r} , key {!r} , must be string, integer, boolean, dependency, disabler, or external program '
raise InterpreterException ( m . format ( section , k ) )
self . sections [ section ] [ k ] = ( formatted_values , list_sep )
self . max_key_len = max ( self . max_key_len , len ( k ) )
def dump ( self ) :
mlog . log ( self . project_name , mlog . normal_cyan ( self . project_version ) )
for section , values in self . sections . items ( ) :
mlog . log ( ' ' ) # newline
if section :
mlog . log ( ' ' , mlog . bold ( section ) )
for k , v in values . items ( ) :
v , list_sep = v
padding = self . max_key_len - len ( k )
end = ' ' if v else ' '
mlog . log ( ' ' * 3 , k + ' ' * padding + ' : ' , end = end )
indent = self . max_key_len + 6
self . dump_value ( v , list_sep , indent )
mlog . log ( ' ' ) # newline
def dump_value ( self , arr , list_sep , indent ) :
lines_sep = ' \n ' + ' ' * indent
if list_sep is None :
mlog . log ( * arr , sep = lines_sep )
return
max_len = shutil . get_terminal_size ( ) . columns
line = [ ]
line_len = indent
lines_sep = list_sep . rstrip ( ) + lines_sep
for v in arr :
v_len = len ( v ) + len ( list_sep )
if line and line_len + v_len > max_len :
mlog . log ( * line , sep = list_sep , end = lines_sep )
line_len = indent
line = [ ]
line . append ( v )
line_len + = v_len
mlog . log ( * line , sep = list_sep )
known_library_kwargs = (
build . known_shlib_kwargs |
build . known_stlib_kwargs
)
known_build_target_kwargs = (
known_library_kwargs |
build . known_exe_kwargs |
build . known_jar_kwargs |
{ ' target_type ' }
)
TEST_KWARGS : T . List [ KwargInfo ] = [
KwargInfo ( ' args ' , ContainerTypeInfo ( list , ( str , mesonlib . File , build . BuildTarget , build . CustomTarget , build . CustomTargetIndex ) ) ,
listify = True , default = [ ] ) ,
KwargInfo ( ' should_fail ' , bool , default = False ) ,
KwargInfo ( ' timeout ' , int , default = 30 ) ,
KwargInfo ( ' workdir ' , ( str , NoneType ) , default = None ,
validator = lambda x : ' must be an absolute path ' if not os . path . isabs ( x ) else None ) ,
KwargInfo ( ' protocol ' , str ,
default = ' exitcode ' ,
validator = in_set_validator ( { ' exitcode ' , ' tap ' , ' gtest ' , ' rust ' } ) ,
since_values = { ' gtest ' : ' 0.55.0 ' , ' rust ' : ' 0.57.0 ' } ) ,
KwargInfo ( ' priority ' , int , default = 0 , since = ' 0.52.0 ' ) ,
# TODO: env needs reworks of the way the environment variable holder itself works probably
ENV_KW ,
DEPENDS_KW . evolve ( since = ' 0.46.0 ' ) ,
KwargInfo ( ' suite ' , ContainerTypeInfo ( list , str ) , listify = True , default = [ ' ' ] ) , # yes, a list of empty string
KwargInfo ( ' verbose ' , bool , default = False , since = ' 0.62.0 ' ) ,
]
class InterpreterRuleRelaxation ( Enum ) :
''' Defines specific relaxations of the Meson rules.
This is intended to be used for automatically converted
projects ( CMake subprojects , build system mixing ) that
generate a Meson AST via introspection , etc .
'''
ALLOW_BUILD_DIR_FILE_REFFERENCES = 1
permitted_dependency_kwargs = {
' allow_fallback ' ,
' cmake_args ' ,
' cmake_module_path ' ,
' cmake_package_version ' ,
' components ' ,
' default_options ' ,
' fallback ' ,
' include_type ' ,
' language ' ,
' main ' ,
' method ' ,
' modules ' ,
' native ' ,
' not_found_message ' ,
' optional_modules ' ,
' private_headers ' ,
' required ' ,
' static ' ,
' version ' ,
}
implicit_check_false_warning = """ You should add the boolean check kwarg to the run_command call.
It currently defaults to false ,
but it will default to true in future releases of meson .
See also : https : / / github . com / mesonbuild / meson / issues / 9300 """
class Interpreter ( InterpreterBase , HoldableObject ) :
def __init__ (
self ,
_build : build . Build ,
backend : T . Optional [ Backend ] = None ,
subproject : str = ' ' ,
subdir : str = ' ' ,
subproject_dir : str = ' subprojects ' ,
default_project_options : T . Optional [ T . Dict [ OptionKey , str ] ] = None ,
mock : bool = False ,
ast : T . Optional [ mparser . CodeBlockNode ] = None ,
is_translated : bool = False ,
relaxations : T . Optional [ T . Set [ InterpreterRuleRelaxation ] ] = None ,
user_defined_options : T . Optional [ ' argparse.Namespace ' ] = None ,
) - > None :
super ( ) . __init__ ( _build . environment . get_source_dir ( ) , subdir , subproject )
self . active_projectname = ' '
self . build = _build
self . environment = self . build . environment
self . coredata = self . environment . get_coredata ( )
self . backend = backend
self . summary : T . Dict [ str , ' Summary ' ] = { }
self . modules : T . Dict [ str , NewExtensionModule ] = { }
# Subproject directory is usually the name of the subproject, but can
# be different for dependencies provided by wrap files.
self . subproject_directory_name = subdir . split ( os . path . sep ) [ - 1 ]
self . subproject_dir = subproject_dir
self . option_file = os . path . join ( self . source_root , self . subdir , ' meson_options.txt ' )
self . relaxations = relaxations or set ( )
if not mock and ast is None :
self . load_root_meson_file ( )
self . sanity_check_ast ( )
elif ast is not None :
self . ast = ast
self . sanity_check_ast ( )
self . builtin . update ( { ' meson ' : MesonMain ( self . build , self ) } )
self . generators : T . List [ build . Generator ] = [ ]
self . processed_buildfiles = set ( ) # type: T.Set[str]
self . project_args_frozen = False
self . global_args_frozen = False # implies self.project_args_frozen
self . subprojects : T . Dict [ str , SubprojectHolder ] = { }
self . subproject_stack : T . List [ str ] = [ ]
self . configure_file_outputs : T . Dict [ str , int ] = { }
# Passed from the outside, only used in subprojects.
if default_project_options :
self . default_project_options = default_project_options . copy ( )
else :
self . default_project_options = { }
self . project_default_options : T . Dict [ OptionKey , str ] = { }
self . build_func_dict ( )
self . build_holder_map ( )
self . user_defined_options = user_defined_options
self . compilers : PerMachine [ T . Dict [ str , ' compilers.Compiler ' ] ] = PerMachine ( { } , { } )
# build_def_files needs to be defined before parse_project is called
#
# For non-meson subprojects, we'll be using the ast. Even if it does
# exist we don't want to add a dependency on it, it's autogenerated
# from the actual build files, and is just for reference.
self . build_def_files : mesonlib . OrderedSet [ str ] = mesonlib . OrderedSet ( )
build_filename = os . path . join ( self . subdir , environment . build_filename )
if not is_translated :
self . build_def_files . add ( build_filename )
if not mock :
self . parse_project ( )
self . _redetect_machines ( )
def __getnewargs_ex__ ( self ) - > T . Tuple [ T . Tuple [ object ] , T . Dict [ str , object ] ] :
raise MesonBugException ( ' This class is unpicklable ' )
def _redetect_machines ( self ) - > None :
# Re-initialize machine descriptions. We can do a better job now because we
# have the compilers needed to gain more knowledge, so wipe out old
# inference and start over.
machines = self . build . environment . machines . miss_defaulting ( )
machines . build = environment . detect_machine_info ( self . coredata . compilers . build )
self . build . environment . machines = machines . default_missing ( )
assert self . build . environment . machines . build . cpu is not None
assert self . build . environment . machines . host . cpu is not None
assert self . build . environment . machines . target . cpu is not None
self . builtin [ ' build_machine ' ] = \
OBJ . MachineHolder ( self . build . environment . machines . build , self )
self . builtin [ ' host_machine ' ] = \
OBJ . MachineHolder ( self . build . environment . machines . host , self )
self . builtin [ ' target_machine ' ] = \
OBJ . MachineHolder ( self . build . environment . machines . target , self )
def build_func_dict ( self ) - > None :
self . funcs . update ( { ' add_global_arguments ' : self . func_add_global_arguments ,
' add_global_link_arguments ' : self . func_add_global_link_arguments ,
' add_languages ' : self . func_add_languages ,
' add_project_arguments ' : self . func_add_project_arguments ,
' add_project_dependencies ' : self . func_add_project_dependencies ,
' add_project_link_arguments ' : self . func_add_project_link_arguments ,
' add_test_setup ' : self . func_add_test_setup ,
' alias_target ' : self . func_alias_target ,
' assert ' : self . func_assert ,
' benchmark ' : self . func_benchmark ,
' both_libraries ' : self . func_both_lib ,
' build_target ' : self . func_build_target ,
' configuration_data ' : self . func_configuration_data ,
' configure_file ' : self . func_configure_file ,
' custom_target ' : self . func_custom_target ,
' debug ' : self . func_debug ,
' declare_dependency ' : self . func_declare_dependency ,
' dependency ' : self . func_dependency ,
' disabler ' : self . func_disabler ,
' environment ' : self . func_environment ,
' error ' : self . func_error ,
' executable ' : self . func_executable ,
' files ' : self . func_files ,
' find_library ' : self . func_find_library ,
' find_program ' : self . func_find_program ,
' generator ' : self . func_generator ,
' get_option ' : self . func_get_option ,
' get_variable ' : self . func_get_variable ,
' gettext ' : self . func_gettext ,
' import ' : self . func_import ,
' include_directories ' : self . func_include_directories ,
' install_data ' : self . func_install_data ,
' install_emptydir ' : self . func_install_emptydir ,
' install_headers ' : self . func_install_headers ,
' install_man ' : self . func_install_man ,
' install_subdir ' : self . func_install_subdir ,
' install_symlink ' : self . func_install_symlink ,
' is_disabler ' : self . func_is_disabler ,
' is_variable ' : self . func_is_variable ,
' jar ' : self . func_jar ,
' join_paths ' : self . func_join_paths ,
' library ' : self . func_library ,
' message ' : self . func_message ,
' option ' : self . func_option ,
' project ' : self . func_project ,
' range ' : self . func_range ,
' run_command ' : self . func_run_command ,
' run_target ' : self . func_run_target ,
' set_variable ' : self . func_set_variable ,
' structured_sources ' : self . func_structured_sources ,
' subdir ' : self . func_subdir ,
' shared_library ' : self . func_shared_lib ,
' shared_module ' : self . func_shared_module ,
' static_library ' : self . func_static_lib ,
' subdir_done ' : self . func_subdir_done ,
' subproject ' : self . func_subproject ,
' summary ' : self . func_summary ,
' test ' : self . func_test ,
' unset_variable ' : self . func_unset_variable ,
' vcs_tag ' : self . func_vcs_tag ,
' warning ' : self . func_warning ,
} )
if ' MESON_UNIT_TEST ' in os . environ :
self . funcs . update ( { ' exception ' : self . func_exception } )
def build_holder_map ( self ) - > None :
'''
Build a mapping of ` HoldableObject ` types to their corresponding
` ObjectHolder ` s . This mapping is used in ` InterpreterBase ` to automatically
holderify all returned values from methods and functions .
'''
self . holder_map . update ( {
# Primitives
list : P_OBJ . ArrayHolder ,
dict : P_OBJ . DictHolder ,
int : P_OBJ . IntegerHolder ,
bool : P_OBJ . BooleanHolder ,
str : P_OBJ . StringHolder ,
P_OBJ . MesonVersionString : P_OBJ . MesonVersionStringHolder ,
P_OBJ . DependencyVariableString : P_OBJ . DependencyVariableStringHolder ,
P_OBJ . OptionString : P_OBJ . OptionStringHolder ,
# Meson types
mesonlib . File : OBJ . FileHolder ,
build . SharedLibrary : OBJ . SharedLibraryHolder ,
build . StaticLibrary : OBJ . StaticLibraryHolder ,
build . BothLibraries : OBJ . BothLibrariesHolder ,
build . SharedModule : OBJ . SharedModuleHolder ,
build . Executable : OBJ . ExecutableHolder ,
build . Jar : OBJ . JarHolder ,
build . CustomTarget : OBJ . CustomTargetHolder ,
build . CustomTargetIndex : OBJ . CustomTargetIndexHolder ,
build . Generator : OBJ . GeneratorHolder ,
build . GeneratedList : OBJ . GeneratedListHolder ,
build . ExtractedObjects : OBJ . GeneratedObjectsHolder ,
build . RunTarget : OBJ . RunTargetHolder ,
build . AliasTarget : OBJ . AliasTargetHolder ,
build . Headers : OBJ . HeadersHolder ,
build . Man : OBJ . ManHolder ,
build . EmptyDir : OBJ . EmptyDirHolder ,
build . Data : OBJ . DataHolder ,
build . SymlinkData : OBJ . SymlinkDataHolder ,
build . InstallDir : OBJ . InstallDirHolder ,
build . IncludeDirs : OBJ . IncludeDirsHolder ,
build . EnvironmentVariables : OBJ . EnvironmentVariablesHolder ,
build . StructuredSources : OBJ . StructuredSourcesHolder ,
compilers . RunResult : compilerOBJ . TryRunResultHolder ,
dependencies . ExternalLibrary : OBJ . ExternalLibraryHolder ,
coredata . UserFeatureOption : OBJ . FeatureOptionHolder ,
envconfig . MachineInfo : OBJ . MachineHolder ,
build . ConfigurationData : OBJ . ConfigurationDataHolder ,
} )
'''
Build a mapping of ` HoldableObject ` base classes to their
corresponding ` ObjectHolder ` s . The difference to ` self . holder_map `
is that the keys here define an upper bound instead of requiring an
exact match .
The mappings defined here are only used when there was no direct hit
found in ` self . holder_map ` .
'''
self . bound_holder_map . update ( {
dependencies . Dependency : OBJ . DependencyHolder ,
ExternalProgram : OBJ . ExternalProgramHolder ,
compilers . Compiler : compilerOBJ . CompilerHolder ,
ModuleObject : OBJ . ModuleObjectHolder ,
MutableModuleObject : OBJ . MutableModuleObjectHolder ,
} )
def append_holder_map ( self , held_type : T . Type [ mesonlib . HoldableObject ] , holder_type : T . Type [ ObjectHolder ] ) - > None :
'''
Adds one additional mapping to the ` holder_map ` .
The intended use for this function is in the ` initialize ` method of
modules to register custom object holders .
'''
self . holder_map . update ( {
held_type : holder_type
} )
def process_new_values ( self , invalues : T . List [ T . Union [ TYPE_var , ExecutableSerialisation ] ] ) - > None :
invalues = listify ( invalues )
for v in invalues :
if isinstance ( v , ObjectHolder ) :
raise InterpreterException ( ' Modules must not return ObjectHolders ' )
if isinstance ( v , ( build . BuildTarget , build . CustomTarget , build . RunTarget ) ) :
self . add_target ( v . name , v )
elif isinstance ( v , list ) :
self . process_new_values ( v )
elif isinstance ( v , ExecutableSerialisation ) :
v . subproject = self . subproject
self . build . install_scripts . append ( v )
elif isinstance ( v , build . Data ) :
self . build . data . append ( v )
elif isinstance ( v , build . SymlinkData ) :
self . build . symlinks . append ( v )
elif isinstance ( v , dependencies . InternalDependency ) :
# FIXME: This is special cased and not ideal:
# The first source is our new VapiTarget, the rest are deps
self . process_new_values ( v . sources [ 0 ] )
elif isinstance ( v , build . InstallDir ) :
self . build . install_dirs . append ( v )
elif isinstance ( v , Test ) :
self . build . tests . append ( v )
elif isinstance ( v , ( int , str , bool , Disabler , ObjectHolder , build . GeneratedList ,
ExternalProgram , build . ConfigurationData ) ) :
pass
else :
raise InterpreterException ( f ' Module returned a value of unknown type { v !r} . ' )
def get_build_def_files ( self ) - > mesonlib . OrderedSet [ str ] :
return self . build_def_files
def add_build_def_file ( self , f : mesonlib . FileOrString ) - > None :
# Use relative path for files within source directory, and absolute path
# for system files. Skip files within build directory. Also skip not regular
# files (e.g. /dev/stdout) Normalize the path to avoid duplicates, this
# is especially important to convert '/' to '\' on Windows.
if isinstance ( f , mesonlib . File ) :
if f . is_built :
return
f = os . path . normpath ( f . relative_name ( ) )
elif os . path . isfile ( f ) and not f . startswith ( ' /dev ' ) :
srcdir = Path ( self . environment . get_source_dir ( ) )
builddir = Path ( self . environment . get_build_dir ( ) )
try :
f_ = Path ( f ) . resolve ( )
except OSError :
f_ = Path ( f )
s = f_ . stat ( )
if ( hasattr ( s , ' st_file_attributes ' ) and
s . st_file_attributes & stat . FILE_ATTRIBUTE_REPARSE_POINT != 0 and
s . st_reparse_tag == stat . IO_REPARSE_TAG_APPEXECLINK ) :
# This is a Windows Store link which we can't
# resolve, so just do our best otherwise.
f_ = f_ . parent . resolve ( ) / f_ . name
else :
raise
if builddir in f_ . parents :
return
if srcdir in f_ . parents :
f_ = f_ . relative_to ( srcdir )
f = str ( f_ )
else :
return
if f not in self . build_def_files :
self . build_def_files . add ( f )
def get_variables ( self ) - > T . Dict [ str , InterpreterObject ] :
return self . variables
def check_stdlibs ( self ) - > None :
machine_choices = [ MachineChoice . HOST ]
if self . coredata . is_cross_build ( ) :
machine_choices . append ( MachineChoice . BUILD )
for for_machine in machine_choices :
props = self . build . environment . properties [ for_machine ]
for l in self . coredata . compilers [ for_machine ] . keys ( ) :
try :
di = mesonlib . stringlistify ( props . get_stdlib ( l ) )
except KeyError :
continue
if len ( di ) == 1 :
FeatureNew . single_use ( ' stdlib without variable name ' , ' 0.56.0 ' , self . subproject , location = self . current_node )
kwargs = { ' native ' : for_machine is MachineChoice . BUILD ,
}
name = l + ' _stdlib '
df = DependencyFallbacksHolder ( self , [ name ] )
df . set_fallback ( di )
dep = df . lookup ( kwargs , force_fallback = True )
self . build . stdlibs [ for_machine ] [ l ] = dep
@typed_pos_args ( ' import ' , str )
@typed_kwargs (
' import ' ,
REQUIRED_KW . evolve ( since = ' 0.59.0 ' ) ,
DISABLER_KW . evolve ( since = ' 0.59.0 ' ) ,
)
@disablerIfNotFound
def func_import ( self , node : mparser . BaseNode , args : T . Tuple [ str ] ,
kwargs : ' kwtypes.FuncImportModule ' ) - > T . Union [ ExtensionModule , NewExtensionModule , NotFoundExtensionModule ] :
modname = args [ 0 ]
disabled , required , _ = extract_required_kwarg ( kwargs , self . subproject )
if disabled :
return NotFoundExtensionModule ( modname )
expect_unstable = False
# Some tests use "unstable_" instead of "unstable-", and that happens to work because
# of implementation details
if modname . startswith ( ( ' unstable- ' , ' unstable_ ' ) ) :
if modname . startswith ( ' unstable_ ' ) :
mlog . deprecation ( f ' Importing unstable modules as " { modname } " instead of " { modname . replace ( " _ " , " - " , 1 ) } " ' ,
location = node )
real_modname = modname [ len ( ' unstable ' ) + 1 : ] # + 1 to handle the - or _
expect_unstable = True
else :
real_modname = modname
if real_modname in self . modules :
return self . modules [ real_modname ]
try :
module = importlib . import_module ( f ' mesonbuild.modules. { real_modname } ' )
except ImportError :
if required :
raise InvalidArguments ( f ' Module " { modname } " does not exist ' )
ext_module = NotFoundExtensionModule ( real_modname )
else :
ext_module = module . initialize ( self )
assert isinstance ( ext_module , ( ExtensionModule , NewExtensionModule ) )
self . build . modules . append ( real_modname )
if ext_module . INFO . added :
FeatureNew . single_use ( f ' module { ext_module . INFO . name } ' , ext_module . INFO . added , self . subproject , location = node )
if ext_module . INFO . deprecated :
FeatureDeprecated . single_use ( f ' module { ext_module . INFO . name } ' , ext_module . INFO . deprecated , self . subproject , location = node )
if expect_unstable and not ext_module . INFO . unstable and ext_module . INFO . stabilized is None :
raise InvalidArguments ( f ' Module { ext_module . INFO . name } has never been unstable, remove " unstable- " prefix. ' )
if ext_module . INFO . stabilized is not None :
if expect_unstable :
FeatureDeprecated . single_use (
f ' module { ext_module . INFO . name } has been stabilized ' ,
ext_module . INFO . stabilized , self . subproject ,
' drop " unstable- " prefix from the module name ' ,
location = node )
else :
FeatureNew . single_use (
f ' module { ext_module . INFO . name } as stable module ' ,
ext_module . INFO . stabilized , self . subproject ,
f ' Consider either adding " unstable- " to the module name, or updating the meson required version to " >= { ext_module . INFO . stabilized } " ' ,
location = node )
elif ext_module . INFO . unstable :
if not expect_unstable :
if required :
raise InvalidArguments ( f ' Module " { ext_module . INFO . name } " has not been stabilized, and must be imported as unstable- { ext_module . INFO . name } ' )
ext_module = NotFoundExtensionModule ( real_modname )
else :
mlog . warning ( f ' Module { ext_module . INFO . name } has no backwards or forwards compatibility and might not exist in future releases. ' , location = node , fatal = False )
self . modules [ real_modname ] = ext_module
return ext_module
@typed_pos_args ( ' files ' , varargs = str )
@noKwargs
def func_files ( self , node : mparser . FunctionNode , args : T . Tuple [ T . List [ str ] ] , kwargs : ' TYPE_kwargs ' ) - > T . List [ mesonlib . File ] :
return self . source_strings_to_files ( args [ 0 ] )
@noPosargs
@typed_kwargs (
' declare_dependency ' ,
KwargInfo ( ' compile_args ' , ContainerTypeInfo ( list , str ) , listify = True , default = [ ] ) ,
INCLUDE_DIRECTORIES . evolve ( name = ' d_import_dirs ' , since = ' 0.62.0 ' ) ,
D_MODULE_VERSIONS_KW . evolve ( since = ' 0.62.0 ' ) ,
KwargInfo ( ' link_args ' , ContainerTypeInfo ( list , str ) , listify = True , default = [ ] ) ,
DEPENDENCIES_KW ,
INCLUDE_DIRECTORIES ,
LINK_WITH_KW ,
LINK_WHOLE_KW . evolve ( since = ' 0.46.0 ' ) ,
SOURCES_KW ,
VARIABLES_KW . evolve ( since = ' 0.54.0 ' , since_values = { list : ' 0.56.0 ' } ) ,
KwargInfo ( ' version ' , ( str , NoneType ) ) ,
)
def func_declare_dependency ( self , node , args , kwargs ) :
deps = kwargs [ ' dependencies ' ]
incs = self . extract_incdirs ( kwargs )
libs = kwargs [ ' link_with ' ]
libs_whole = kwargs [ ' link_whole ' ]
sources = self . source_strings_to_files ( kwargs [ ' sources ' ] )
compile_args = kwargs [ ' compile_args ' ]
link_args = kwargs [ ' link_args ' ]
variables = kwargs [ ' variables ' ]
version = kwargs [ ' version ' ]
if version is None :
version = self . project_version
d_module_versions = kwargs [ ' d_module_versions ' ]
d_import_dirs = self . extract_incdirs ( kwargs , ' d_import_dirs ' )
srcdir = Path ( self . environment . source_dir )
# convert variables which refer to an -uninstalled.pc style datadir
for k , v in variables . items ( ) :
try :
p = Path ( v )
except ValueError :
continue
else :
if not self . is_subproject ( ) and srcdir / self . subproject_dir in p . parents :
continue
if p . is_absolute ( ) and p . is_dir ( ) and srcdir / self . root_subdir in [ p ] + list ( Path ( os . path . abspath ( p ) ) . parents ) :
variables [ k ] = P_OBJ . DependencyVariableString ( v )
for d in deps :
if not isinstance ( d , dependencies . Dependency ) :
raise InterpreterException ( ' Invalid dependency ' )
dep = dependencies . InternalDependency ( version , incs , compile_args ,
link_args , libs , libs_whole , sources , deps ,
variables , d_module_versions , d_import_dirs )
return dep
@typed_pos_args ( ' assert ' , bool , optargs = [ str ] )
@noKwargs
def func_assert ( self , node : mparser . FunctionNode , args : T . Tuple [ bool , T . Optional [ str ] ] ,
kwargs : ' TYPE_kwargs ' ) - > None :
value , message = args
if message is None :
FeatureNew . single_use ( ' assert function without message argument ' , ' 0.53.0 ' , self . subproject , location = node )
if not value :
if message is None :
from . . ast import AstPrinter
printer = AstPrinter ( )
node . args . arguments [ 0 ] . accept ( printer )
message = printer . result
raise InterpreterException ( ' Assert failed: ' + message )
def validate_arguments ( self , args , argcount , arg_types ) :
if argcount is not None :
if argcount != len ( args ) :
raise InvalidArguments ( f ' Expected { argcount } arguments, got { len ( args ) } . ' )
for actual , wanted in zip ( args , arg_types ) :
if wanted is not None :
if not isinstance ( actual , wanted ) :
raise InvalidArguments ( ' Incorrect argument type. ' )
# Executables aren't actually accepted, but we allow them here to allow for
# better error messages when overridden
@typed_pos_args (
' run_command ' ,
( build . Executable , ExternalProgram , compilers . Compiler , mesonlib . File , str ) ,
varargs = ( build . Executable , ExternalProgram , compilers . Compiler , mesonlib . File , str ) )
@typed_kwargs (
' run_command ' ,
KwargInfo ( ' check ' , ( bool , NoneType ) , since = ' 0.47.0 ' ) ,
KwargInfo ( ' capture ' , bool , default = True , since = ' 0.47.0 ' ) ,
ENV_KW . evolve ( since = ' 0.50.0 ' ) ,
)
def func_run_command ( self , node : mparser . BaseNode ,
args : T . Tuple [ T . Union [ build . Executable , ExternalProgram , compilers . Compiler , mesonlib . File , str ] ,
T . List [ T . Union [ build . Executable , ExternalProgram , compilers . Compiler , mesonlib . File , str ] ] ] ,
kwargs : ' kwtypes.RunCommand ' ) - > RunProcess :
return self . run_command_impl ( node , args , kwargs )
def run_command_impl ( self ,
node : mparser . BaseNode ,
args : T . Tuple [ T . Union [ build . Executable , ExternalProgram , compilers . Compiler , mesonlib . File , str ] ,
T . List [ T . Union [ build . Executable , ExternalProgram , compilers . Compiler , mesonlib . File , str ] ] ] ,
kwargs : ' kwtypes.RunCommand ' ,
in_builddir : bool = False ) - > RunProcess :
cmd , cargs = args
capture = kwargs [ ' capture ' ]
env = kwargs [ ' env ' ]
srcdir = self . environment . get_source_dir ( )
builddir = self . environment . get_build_dir ( )
check = kwargs [ ' check ' ]
if check is None :
mlog . warning ( implicit_check_false_warning , once = True )
check = False
overridden_msg = ( ' Program {!r} was overridden with the compiled '
' executable {!r} and therefore cannot be used during '
' configuration ' )
expanded_args : T . List [ str ] = [ ]
if isinstance ( cmd , build . Executable ) :
for name , exe in self . build . find_overrides . items ( ) :
if cmd == exe :
progname = name
break
else :
raise MesonBugException ( ' cmd was a built executable but not found in overrides table ' )
raise InterpreterException ( overridden_msg . format ( progname , cmd . description ( ) ) )
if isinstance ( cmd , ExternalProgram ) :
if not cmd . found ( ) :
raise InterpreterException ( f ' command { cmd . get_name ( ) !r} not found or not executable ' )
elif isinstance ( cmd , compilers . Compiler ) :
exelist = cmd . get_exelist ( )
cmd = exelist [ 0 ]
prog = ExternalProgram ( cmd , silent = True )
if not prog . found ( ) :
raise InterpreterException ( f ' Program { cmd !r} not found or not executable ' )
cmd = prog
expanded_args = exelist [ 1 : ]
else :
if isinstance ( cmd , mesonlib . File ) :
cmd = cmd . absolute_path ( srcdir , builddir )
# Prefer scripts in the current source directory
search_dir = os . path . join ( srcdir , self . subdir )
prog = ExternalProgram ( cmd , silent = True , search_dir = search_dir )
if not prog . found ( ) :
raise InterpreterException ( f ' Program or command { cmd !r} not found or not executable ' )
cmd = prog
for a in cargs :
if isinstance ( a , str ) :
expanded_args . append ( a )
elif isinstance ( a , mesonlib . File ) :
expanded_args . append ( a . absolute_path ( srcdir , builddir ) )
elif isinstance ( a , ExternalProgram ) :
expanded_args . append ( a . get_path ( ) )
elif isinstance ( a , compilers . Compiler ) :
FeatureNew . single_use ( ' Compiler object as a variadic argument to `run_command` ' , ' 0.61.0 ' , self . subproject , location = node )
prog = ExternalProgram ( a . exelist [ 0 ] , silent = True )
if not prog . found ( ) :
raise InterpreterException ( f ' Program { cmd !r} not found or not executable ' )
expanded_args . append ( prog . get_path ( ) )
else :
raise InterpreterException ( overridden_msg . format ( a . name , cmd . description ( ) ) )
# If any file that was used as an argument to the command
# changes, we must re-run the configuration step.
self . add_build_def_file ( cmd . get_path ( ) )
for a in expanded_args :
if not os . path . isabs ( a ) :
a = os . path . join ( builddir if in_builddir else srcdir , self . subdir , a )
self . add_build_def_file ( a )
return RunProcess ( cmd , expanded_args , env , srcdir , builddir , self . subdir ,
self . environment . get_build_command ( ) + [ ' introspect ' ] ,
in_builddir = in_builddir , check = check , capture = capture )
def func_gettext ( self , nodes , args , kwargs ) :
raise InterpreterException ( ' Gettext() function has been moved to module i18n. Import it and use i18n.gettext() instead ' )
def func_option ( self , nodes , args , kwargs ) :
raise InterpreterException ( ' Tried to call option() in build description file. All options must be in the option file. ' )
@typed_pos_args ( ' subproject ' , str )
@typed_kwargs (
' subproject ' ,
REQUIRED_KW ,
DEFAULT_OPTIONS . evolve ( since = ' 0.38.0 ' ) ,
KwargInfo ( ' version ' , ContainerTypeInfo ( list , str ) , default = [ ] , listify = True ) ,
)
def func_subproject ( self , nodes : mparser . BaseNode , args : T . Tuple [ str ] , kwargs : kwtypes . Subproject ) - > SubprojectHolder :
kw : kwtypes . DoSubproject = {
' required ' : kwargs [ ' required ' ] ,
' default_options ' : kwargs [ ' default_options ' ] ,
' version ' : kwargs [ ' version ' ] ,
' options ' : None ,
' cmake_options ' : [ ] ,
}
return self . do_subproject ( args [ 0 ] , ' meson ' , kw )
def disabled_subproject ( self , subp_name : str , disabled_feature : T . Optional [ str ] = None ,
exception : T . Optional [ Exception ] = None ) - > SubprojectHolder :
sub = SubprojectHolder ( NullSubprojectInterpreter ( ) , os . path . join ( self . subproject_dir , subp_name ) ,
disabled_feature = disabled_feature , exception = exception )
self . subprojects [ subp_name ] = sub
return sub
def do_subproject ( self , subp_name : str , method : Literal [ ' meson ' , ' cmake ' ] , kwargs : kwtypes . DoSubproject ) - > SubprojectHolder :
disabled , required , feature = extract_required_kwarg ( kwargs , self . subproject )
if disabled :
mlog . log ( ' Subproject ' , mlog . bold ( subp_name ) , ' : ' , ' skipped: feature ' , mlog . bold ( feature ) , ' disabled ' )
return self . disabled_subproject ( subp_name , disabled_feature = feature )
default_options = coredata . create_options_dict ( kwargs [ ' default_options ' ] , subp_name )
if subp_name == ' ' :
raise InterpreterException ( ' Subproject name must not be empty. ' )
if subp_name [ 0 ] == ' . ' :
raise InterpreterException ( ' Subproject name must not start with a period. ' )
if ' .. ' in subp_name :
raise InterpreterException ( ' Subproject name must not contain a " .. " path segment. ' )
if os . path . isabs ( subp_name ) :
raise InterpreterException ( ' Subproject name must not be an absolute path. ' )
if has_path_sep ( subp_name ) :
mlog . warning ( ' Subproject name has a path separator. This may cause unexpected behaviour. ' ,
location = self . current_node )
if subp_name in self . subproject_stack :
fullstack = self . subproject_stack + [ subp_name ]
incpath = ' => ' . join ( fullstack )
raise InvalidCode ( f ' Recursive include of subprojects: { incpath } . ' )
if subp_name in self . subprojects :
subproject = self . subprojects [ subp_name ]
if required and not subproject . found ( ) :
raise InterpreterException ( f ' Subproject " { subproject . subdir } " required but not found. ' )
if kwargs [ ' version ' ] :
pv = self . build . subprojects [ subp_name ]
wanted = kwargs [ ' version ' ]
if pv == ' undefined ' or not mesonlib . version_compare_many ( pv , wanted ) [ 0 ] :
raise InterpreterException ( f ' Subproject { subp_name } version is { pv } but { wanted } required. ' )
return subproject
r = self . environment . wrap_resolver
try :
subdir = r . resolve ( subp_name , method )
except wrap . WrapException as e :
if not required :
mlog . log ( e )
mlog . log ( ' Subproject ' , mlog . bold ( subp_name ) , ' is buildable: ' , mlog . red ( ' NO ' ) , ' (disabling) ' )
return self . disabled_subproject ( subp_name , exception = e )
raise e
subdir_abs = os . path . join ( self . environment . get_source_dir ( ) , subdir )
os . makedirs ( os . path . join ( self . build . environment . get_build_dir ( ) , subdir ) , exist_ok = True )
self . global_args_frozen = True
stack = ' : ' . join ( self . subproject_stack + [ subp_name ] )
m = [ ' \n Executing subproject ' , mlog . bold ( stack ) ]
if method != ' meson ' :
m + = [ ' method ' , mlog . bold ( method ) ]
mlog . log ( * m , ' \n ' , nested = False )
try :
if method == ' meson ' :
return self . _do_subproject_meson ( subp_name , subdir , default_options , kwargs )
elif method == ' cmake ' :
return self . _do_subproject_cmake ( subp_name , subdir , subdir_abs , default_options , kwargs )
else :
raise mesonlib . MesonBugException ( f ' The method { method } is invalid for the subproject { subp_name } ' )
# Invalid code is always an error
except InvalidCode :
raise
except Exception as e :
if not required :
with mlog . nested ( subp_name ) :
# Suppress the 'ERROR:' prefix because this exception is not
# fatal and VS CI treat any logs with "ERROR:" as fatal.
mlog . exception ( e , prefix = mlog . yellow ( ' Exception: ' ) )
mlog . log ( ' \n Subproject ' , mlog . bold ( subdir ) , ' is buildable: ' , mlog . red ( ' NO ' ) , ' (disabling) ' )
return self . disabled_subproject ( subp_name , exception = e )
raise e
def _do_subproject_meson ( self , subp_name : str , subdir : str ,
default_options : T . Dict [ OptionKey , str ] ,
kwargs : kwtypes . DoSubproject ,
ast : T . Optional [ mparser . CodeBlockNode ] = None ,
build_def_files : T . Optional [ T . List [ str ] ] = None ,
is_translated : bool = False ,
relaxations : T . Optional [ T . Set [ InterpreterRuleRelaxation ] ] = None ) - > SubprojectHolder :
with mlog . nested ( subp_name ) :
new_build = self . build . copy ( )
subi = Interpreter ( new_build , self . backend , subp_name , subdir , self . subproject_dir ,
default_options , ast = ast , is_translated = is_translated ,
relaxations = relaxations ,
user_defined_options = self . user_defined_options )
# Those lists are shared by all interpreters. That means that
# even if the subproject fails, any modification that the subproject
# made to those lists will affect the parent project.
subi . subprojects = self . subprojects
subi . modules = self . modules
subi . holder_map = self . holder_map
subi . bound_holder_map = self . bound_holder_map
subi . summary = self . summary
subi . subproject_stack = self . subproject_stack + [ subp_name ]
current_active = self . active_projectname
current_warnings_counter = mlog . log_warnings_counter
mlog . log_warnings_counter = 0
subi . run ( )
subi_warnings = mlog . log_warnings_counter
mlog . log_warnings_counter = current_warnings_counter
mlog . log ( ' Subproject ' , mlog . bold ( subp_name ) , ' finished. ' )
mlog . log ( )
if kwargs [ ' version ' ] :
pv = subi . project_version
wanted = kwargs [ ' version ' ]
if pv == ' undefined ' or not mesonlib . version_compare_many ( pv , wanted ) [ 0 ] :
raise InterpreterException ( f ' Subproject { subp_name } version is { pv } but { wanted } required. ' )
self . active_projectname = current_active
self . subprojects . update ( subi . subprojects )
self . subprojects [ subp_name ] = SubprojectHolder ( subi , subdir , warnings = subi_warnings )
# Duplicates are possible when subproject uses files from project root
if build_def_files :
self . build_def_files . update ( build_def_files )
# We always need the subi.build_def_files, to propgate sub-sub-projects
self . build_def_files . update ( subi . build_def_files )
self . build . merge ( subi . build )
self . build . subprojects [ subp_name ] = subi . project_version
return self . subprojects [ subp_name ]
def _do_subproject_cmake ( self , subp_name : str , subdir : str , subdir_abs : str ,
default_options : T . Dict [ OptionKey , str ] ,
kwargs : kwtypes . DoSubproject ) - > SubprojectHolder :
with mlog . nested ( subp_name ) :
new_build = self . build . copy ( )
prefix = self . coredata . options [ OptionKey ( ' prefix ' ) ] . value
from . . modules . cmake import CMakeSubprojectOptions
options = kwargs [ ' options ' ] or CMakeSubprojectOptions ( )
cmake_options = kwargs [ ' cmake_options ' ] + options . cmake_options
cm_int = CMakeInterpreter ( new_build , Path ( subdir ) , Path ( subdir_abs ) , Path ( prefix ) , new_build . environment , self . backend )
cm_int . initialise ( cmake_options )
cm_int . analyse ( )
# Generate a meson ast and execute it with the normal do_subproject_meson
ast = cm_int . pretend_to_be_meson ( options . target_options )
mlog . log ( )
with mlog . nested ( ' cmake-ast ' ) :
mlog . log ( ' Processing generated meson AST ' )
# Debug print the generated meson file
from . . ast import AstIndentationGenerator , AstPrinter
printer = AstPrinter ( update_ast_line_nos = True )
ast . accept ( AstIndentationGenerator ( ) )
ast . accept ( printer )
printer . post_process ( )
meson_filename = os . path . join ( self . build . environment . get_build_dir ( ) , subdir , ' meson.build ' )
with open ( meson_filename , " w " , encoding = ' utf-8 ' ) as f :
f . write ( printer . result )
mlog . log ( ' Build file: ' , meson_filename )
mlog . cmd_ci_include ( meson_filename )
mlog . log ( )
result = self . _do_subproject_meson (
subp_name , subdir , default_options ,
kwargs , ast ,
[ str ( f ) for f in cm_int . bs_files ] ,
is_translated = True ,
relaxations = {
InterpreterRuleRelaxation . ALLOW_BUILD_DIR_FILE_REFFERENCES ,
}
)
result . cm_interpreter = cm_int
mlog . log ( )
return result
def get_option_internal ( self , optname : str ) - > coredata . UserOption :
key = OptionKey . from_string ( optname ) . evolve ( subproject = self . subproject )
if not key . is_project ( ) :
for opts in [ self . coredata . options , compilers . base_options ] :
v = opts . get ( key )
if v is None or v . yielding :
v = opts . get ( key . as_root ( ) )
if v is not None :
assert isinstance ( v , coredata . UserOption ) , ' for mypy '
return v
try :
opt = self . coredata . options [ key ]
if opt . yielding and key . subproject and key . as_root ( ) in self . coredata . options :
popt = self . coredata . options [ key . as_root ( ) ]
if type ( opt ) is type ( popt ) :
opt = popt
else :
# Get class name, then option type as a string
opt_type = opt . __class__ . __name__ [ 4 : ] [ : - 6 ] . lower ( )
popt_type = popt . __class__ . __name__ [ 4 : ] [ : - 6 ] . lower ( )
# This is not a hard error to avoid dependency hell, the workaround
# when this happens is to simply set the subproject's option directly.
mlog . warning ( ' Option {0!r} of type {1!r} in subproject {2!r} cannot yield '
' to parent option of type {3!r} , ignoring parent value. '
' Use -D {2} : {0} =value to set the value for this option manually '
' . ' . format ( optname , opt_type , self . subproject , popt_type ) ,
location = self . current_node )
return opt
except KeyError :
pass
raise InterpreterException ( f ' Tried to access unknown option { optname !r} . ' )
@typed_pos_args ( ' get_option ' , str )
@noKwargs
def func_get_option ( self , nodes : mparser . BaseNode , args : T . Tuple [ str ] ,
kwargs : ' TYPE_kwargs ' ) - > T . Union [ coredata . UserOption , ' TYPE_var ' ] :
optname = args [ 0 ]
if ' : ' in optname :
raise InterpreterException ( ' Having a colon in option name is forbidden, '
' projects are not allowed to directly access '
' options of other subprojects. ' )
opt = self . get_option_internal ( optname )
if isinstance ( opt , coredata . UserFeatureOption ) :
opt . name = optname
return opt
elif isinstance ( opt , coredata . UserOption ) :
if isinstance ( opt . value , str ) :
return P_OBJ . OptionString ( opt . value , f ' {{ { optname } }} ' )
return opt . value
return opt
@typed_pos_args ( ' configuration_data ' , optargs = [ dict ] )
@noKwargs
def func_configuration_data ( self , node : mparser . BaseNode , args : T . Tuple [ T . Optional [ T . Dict [ str , T . Any ] ] ] ,
kwargs : ' TYPE_kwargs ' ) - > build . ConfigurationData :
initial_values = args [ 0 ]
if initial_values is not None :
FeatureNew . single_use ( ' configuration_data dictionary ' , ' 0.49.0 ' , self . subproject , location = node )
for k , v in initial_values . items ( ) :
if not isinstance ( v , ( str , int , bool ) ) :
raise InvalidArguments (
f ' " configuration_data " : initial value dictionary key " { k !r} " " must be " str | int | bool " , not " { v !r} " ' )
return build . ConfigurationData ( initial_values )
def set_backend ( self ) - > None :
# The backend is already set when parsing subprojects
if self . backend is not None :
return
backend = self . coredata . get_option ( OptionKey ( ' backend ' ) )
from . . backend import backends
self . backend = backends . get_backend_from_name ( backend , self . build , self )
if self . backend is None :
raise InterpreterException ( f ' Unknown backend " { backend } " . ' )
if backend != self . backend . name :
if self . backend . name . startswith ( ' vs ' ) :
mlog . log ( ' Auto detected Visual Studio backend: ' , mlog . bold ( self . backend . name ) )
self . coredata . set_option ( OptionKey ( ' backend ' ) , self . backend . name )
# Only init backend options on first invocation otherwise it would
# override values previously set from command line.
if self . environment . first_invocation :
self . coredata . init_backend_options ( backend )
options = { k : v for k , v in self . environment . options . items ( ) if k . is_backend ( ) }
self . coredata . set_options ( options )
@typed_pos_args ( ' project ' , str , varargs = str )
@typed_kwargs (
' project ' ,
DEFAULT_OPTIONS ,
KwargInfo ( ' meson_version ' , ( str , NoneType ) ) ,
KwargInfo (
' version ' ,
( str , mesonlib . File , NoneType , list ) ,
default = ' undefined ' ,
validator = _project_version_validator ,
convertor = lambda x : x [ 0 ] if isinstance ( x , list ) else x ,
) ,
KwargInfo ( ' license ' , ContainerTypeInfo ( list , str ) , default = [ ' unknown ' ] , listify = True ) ,
KwargInfo ( ' subproject_dir ' , str , default = ' subprojects ' ) ,
)
def func_project ( self , node : mparser . FunctionNode , args : T . Tuple [ str , T . List [ str ] ] , kwargs : ' kwtypes.Project ' ) - > None :
proj_name , proj_langs = args
if ' : ' in proj_name :
raise InvalidArguments ( f " Project name { proj_name !r} must not contain ' : ' " )
# This needs to be evaluated as early as possible, as meson uses this
# for things like deprecation testing.
if kwargs [ ' meson_version ' ] :
cv = coredata . version
pv = kwargs [ ' meson_version ' ]
if not mesonlib . version_compare ( cv , pv ) :
raise InterpreterException ( f ' Meson version is { cv } but project requires { pv } ' )
mesonlib . project_meson_versions [ self . subproject ] = kwargs [ ' meson_version ' ]
if os . path . exists ( self . option_file ) :
oi = optinterpreter . OptionInterpreter ( self . subproject )
oi . process ( self . option_file )
self . coredata . update_project_options ( oi . options )
self . add_build_def_file ( self . option_file )
# Do not set default_options on reconfigure otherwise it would override
# values previously set from command line. That means that changing
# default_options in a project will trigger a reconfigure but won't
# have any effect.
self . project_default_options = coredata . create_options_dict (
kwargs [ ' default_options ' ] , self . subproject )
# If this is the first invocation we always need to initialize
# builtins, if this is a subproject that is new in a re-invocation we
# need to initialize builtins for that
if self . environment . first_invocation or ( self . subproject != ' ' and self . subproject not in self . coredata . initialized_subprojects ) :
default_options = self . project_default_options . copy ( )
default_options . update ( self . default_project_options )
self . coredata . init_builtins ( self . subproject )
self . coredata . initialized_subprojects . add ( self . subproject )
else :
default_options = { }
self . coredata . set_default_options ( default_options , self . subproject , self . environment )
if not self . is_subproject ( ) :
self . build . project_name = proj_name
self . active_projectname = proj_name
version = kwargs [ ' version ' ]
if isinstance ( version , mesonlib . File ) :
FeatureNew . single_use ( ' version from file ' , ' 0.57.0 ' , self . subproject , location = node )
self . add_build_def_file ( version )
ifname = version . absolute_path ( self . environment . source_dir ,
self . environment . build_dir )
try :
ver_data = Path ( ifname ) . read_text ( encoding = ' utf-8 ' ) . split ( ' \n ' )
except FileNotFoundError :
raise InterpreterException ( ' Version file not found. ' )
if len ( ver_data ) == 2 and ver_data [ 1 ] == ' ' :
ver_data = ver_data [ 0 : 1 ]
if len ( ver_data ) != 1 :
raise InterpreterException ( ' Version file must contain exactly one line of text. ' )
self . project_version = ver_data [ 0 ]
else :
self . project_version = version
if self . build . project_version is None :
self . build . project_version = self . project_version
proj_license = kwargs [ ' license ' ]
self . build . dep_manifest [ proj_name ] = build . DepManifest ( self . project_version , proj_license )
if self . subproject in self . build . projects :
raise InvalidCode ( ' Second call to project(). ' )
# spdirname is the subproject_dir for this project, relative to self.subdir.
# self.subproject_dir is the subproject_dir for the main project, relative to top source dir.
spdirname = kwargs [ ' subproject_dir ' ]
if not isinstance ( spdirname , str ) :
raise InterpreterException ( ' Subproject_dir must be a string ' )
if os . path . isabs ( spdirname ) :
raise InterpreterException ( ' Subproject_dir must not be an absolute path. ' )
if spdirname . startswith ( ' . ' ) :
raise InterpreterException ( ' Subproject_dir must not begin with a period. ' )
if ' .. ' in spdirname :
raise InterpreterException ( ' Subproject_dir must not contain a " .. " segment. ' )
if not self . is_subproject ( ) :
self . subproject_dir = spdirname
self . build . subproject_dir = self . subproject_dir
# Load wrap files from this (sub)project.
wrap_mode = self . coredata . get_option ( OptionKey ( ' wrap_mode ' ) )
if not self . is_subproject ( ) or wrap_mode != WrapMode . nopromote :
subdir = os . path . join ( self . subdir , spdirname )
r = wrap . Resolver ( self . environment . get_source_dir ( ) , subdir , self . subproject , wrap_mode )
if self . is_subproject ( ) :
self . environment . wrap_resolver . merge_wraps ( r )
else :
self . environment . wrap_resolver = r
self . build . projects [ self . subproject ] = proj_name
mlog . log ( ' Project name: ' , mlog . bold ( proj_name ) )
mlog . log ( ' Project version: ' , mlog . bold ( self . project_version ) )
if not self . is_subproject ( ) :
# We have to activate VS before adding languages and before calling
# self.set_backend() otherwise it wouldn't be able to detect which
# vs backend version we need. But after setting default_options in case
# the project sets vs backend by default.
backend = self . coredata . get_option ( OptionKey ( ' backend ' ) )
force_vsenv = self . user_defined_options . vsenv or backend . startswith ( ' vs ' )
if mesonlib . setup_vsenv ( force_vsenv ) :
self . build . need_vsenv = True
self . add_languages ( proj_langs , True , MachineChoice . HOST )
self . add_languages ( proj_langs , False , MachineChoice . BUILD )
self . set_backend ( )
if not self . is_subproject ( ) :
self . check_stdlibs ( )
@typed_kwargs ( ' add_languages ' , KwargInfo ( ' native ' , ( bool , NoneType ) , since = ' 0.54.0 ' ) , REQUIRED_KW )
@typed_pos_args ( ' add_languages ' , varargs = str )
def func_add_languages ( self , node : mparser . FunctionNode , args : T . Tuple [ T . List [ str ] ] , kwargs : ' kwtypes.FuncAddLanguages ' ) - > bool :
langs = args [ 0 ]
disabled , required , feature = extract_required_kwarg ( kwargs , self . subproject )
native = kwargs [ ' native ' ]
if disabled :
for lang in sorted ( langs , key = compilers . sort_clink ) :
mlog . log ( ' Compiler for language ' , mlog . bold ( lang ) , ' skipped: feature ' , mlog . bold ( feature ) , ' disabled ' )
return False
if native is not None :
return self . add_languages ( langs , required , self . machine_from_native_kwarg ( kwargs ) )
else :
# absent 'native' means 'both' for backwards compatibility
tv = FeatureNew . get_target_version ( self . subproject )
if FeatureNew . check_version ( tv , ' 0.54.0 ' ) :
mlog . warning ( ' add_languages is missing native:, assuming languages are wanted for both host and build. ' ,
location = node )
success = self . add_languages ( langs , False , MachineChoice . BUILD )
success & = self . add_languages ( langs , required , MachineChoice . HOST )
return success
@noArgsFlattening
@noKwargs
def func_message ( self , node : mparser . BaseNode , args , kwargs ) :
if len ( args ) > 1 :
FeatureNew . single_use ( ' message with more than one argument ' , ' 0.54.0 ' , self . subproject , location = node )
args_str = [ stringifyUserArguments ( i ) for i in args ]
self . message_impl ( args_str )
def message_impl ( self , args ) :
mlog . log ( mlog . bold ( ' Message: ' ) , * args )
@noArgsFlattening
@FeatureNew ( ' summary ' , ' 0.53.0 ' )
@typed_pos_args ( ' summary ' , ( str , dict ) , optargs = [ object ] )
@typed_kwargs (
' summary ' ,
KwargInfo ( ' section ' , str , default = ' ' ) ,
KwargInfo ( ' bool_yn ' , bool , default = False ) ,
KwargInfo ( ' list_sep ' , ( str , NoneType ) , since = ' 0.54.0 ' )
)
def func_summary ( self , node : mparser . BaseNode , args : T . Tuple [ T . Union [ str , T . Dict [ str , T . Any ] ] , T . Optional [ T . Any ] ] ,
kwargs : ' kwtypes.Summary ' ) - > None :
if args [ 1 ] is None :
if not isinstance ( args [ 0 ] , dict ) :
raise InterpreterException ( ' Summary first argument must be dictionary. ' )
values = args [ 0 ]
else :
if not isinstance ( args [ 0 ] , str ) :
raise InterpreterException ( ' Summary first argument must be string. ' )
values = { args [ 0 ] : args [ 1 ] }
self . summary_impl ( kwargs [ ' section ' ] , values , kwargs )
def summary_impl ( self , section : str , values , kwargs : ' kwtypes.Summary ' ) - > None :
if self . subproject not in self . summary :
self . summary [ self . subproject ] = Summary ( self . active_projectname , self . project_version )
self . summary [ self . subproject ] . add_section (
section , values , kwargs [ ' bool_yn ' ] , kwargs [ ' list_sep ' ] , self . subproject )
def _print_summary ( self ) - > None :
# Add automatic 'Supbrojects' section in main project.
all_subprojects = collections . OrderedDict ( )
for name , subp in sorted ( self . subprojects . items ( ) ) :
value = subp . found ( )
if subp . disabled_feature :
value = [ value , f ' Feature { subp . disabled_feature !r} disabled ' ]
elif subp . exception :
value = [ value , str ( subp . exception ) ]
elif subp . warnings > 0 :
value = [ value , f ' { subp . warnings } warnings ' ]
all_subprojects [ name ] = value
if all_subprojects :
self . summary_impl ( ' Subprojects ' , all_subprojects ,
{ ' bool_yn ' : True ,
' list_sep ' : ' ' ,
} )
# Add automatic section with all user defined options
if self . user_defined_options :
values = collections . OrderedDict ( )
if self . user_defined_options . cross_file :
values [ ' Cross files ' ] = self . user_defined_options . cross_file
if self . user_defined_options . native_file :
values [ ' Native files ' ] = self . user_defined_options . native_file
sorted_options = sorted ( self . user_defined_options . cmd_line_options . items ( ) )
values . update ( { str ( k ) : v for k , v in sorted_options } )
if values :
self . summary_impl ( ' User defined options ' , values , { ' bool_yn ' : False , ' list_sep ' : None } )
# Print all summaries, main project last.
mlog . log ( ' ' ) # newline
main_summary = self . summary . pop ( ' ' , None )
for subp_name , summary in sorted ( self . summary . items ( ) ) :
if self . subprojects [ subp_name ] . found ( ) :
summary . dump ( )
if main_summary :
main_summary . dump ( )
@noArgsFlattening
@FeatureNew ( ' warning ' , ' 0.44.0 ' )
@noKwargs
def func_warning ( self , node , args , kwargs ) :
if len ( args ) > 1 :
FeatureNew . single_use ( ' warning with more than one argument ' , ' 0.54.0 ' , self . subproject , location = node )
args_str = [ stringifyUserArguments ( i ) for i in args ]
mlog . warning ( * args_str , location = node )
@noArgsFlattening
@noKwargs
def func_error ( self , node , args , kwargs ) :
if len ( args ) > 1 :
FeatureNew . single_use ( ' error with more than one argument ' , ' 0.58.0 ' , self . subproject , location = node )
args_str = [ stringifyUserArguments ( i ) for i in args ]
raise InterpreterException ( ' Problem encountered: ' + ' ' . join ( args_str ) )
@noArgsFlattening
@FeatureNew ( ' debug ' , ' 0.63.0 ' )
@noKwargs
def func_debug ( self , node , args , kwargs ) :
args_str = [ stringifyUserArguments ( i ) for i in args ]
mlog . debug ( ' Debug: ' , * args_str )
@noKwargs
@noPosargs
def func_exception ( self , node , args , kwargs ) :
raise Exception ( )
def add_languages ( self , args : T . Sequence [ str ] , required : bool , for_machine : MachineChoice ) - > bool :
success = self . add_languages_for ( args , required , for_machine )
if not self . coredata . is_cross_build ( ) :
self . coredata . copy_build_options_from_regular_ones ( )
self . _redetect_machines ( )
return success
def should_skip_sanity_check ( self , for_machine : MachineChoice ) - > bool :
should = self . environment . properties . host . get ( ' skip_sanity_check ' , False )
if not isinstance ( should , bool ) :
raise InterpreterException ( ' Option skip_sanity_check must be a boolean. ' )
if for_machine != MachineChoice . HOST and not should :
return False
if not self . environment . is_cross_build ( ) and not should :
return False
return should
def add_languages_for ( self , args : T . List [ str ] , required : bool , for_machine : MachineChoice ) - > bool :
args = [ a . lower ( ) for a in args ]
langs = set ( self . compilers [ for_machine ] . keys ( ) )
langs . update ( args )
# We'd really like to add cython's default language here, but it can't
# actually be done because the cython compiler hasn't been initialized,
# so we can't actually get the option yet. Because we can't know what
# compiler to add by default, and we don't want to add unnecessary
# compilers we don't add anything for cython here, and instead do it
# When the first cython target using a particular language is used.
if ' vala ' in langs and ' c ' not in langs :
FeatureNew . single_use ( ' Adding Vala language without C ' , ' 0.59.0 ' , self . subproject , location = self . current_node )
args . append ( ' c ' )
if ' nasm ' in langs :
FeatureNew . single_use ( ' Adding NASM language ' , ' 0.64.0 ' , self . subproject , location = self . current_node )
success = True
for lang in sorted ( args , key = compilers . sort_clink ) :
if lang in self . compilers [ for_machine ] :
continue
machine_name = for_machine . get_lower_case_name ( )
comp = self . coredata . compilers [ for_machine ] . get ( lang )
if not comp :
try :
comp = compilers . detect_compiler_for ( self . environment , lang , for_machine )
if comp is None :
raise InvalidArguments ( f ' Tried to use unknown language " { lang } " . ' )
if self . should_skip_sanity_check ( for_machine ) :
mlog . log_once ( ' Cross compiler sanity tests disabled via the cross file. ' )
else :
comp . sanity_check ( self . environment . get_scratch_dir ( ) , self . environment )
except Exception :
if not required :
mlog . log ( ' Compiler for language ' ,
mlog . bold ( lang ) , ' for the ' , machine_name ,
' machine not found. ' )
success = False
continue
else :
raise
# Add per-subproject compiler options. They inherit value from main project.
if self . subproject :
options = { }
for k in comp . get_options ( ) :
v = copy . copy ( self . coredata . options [ k ] )
k = k . evolve ( subproject = self . subproject )
options [ k ] = v
self . coredata . add_compiler_options ( options , lang , for_machine , self . environment )
if for_machine == MachineChoice . HOST or self . environment . is_cross_build ( ) :
logger_fun = mlog . log
else :
logger_fun = mlog . debug
logger_fun ( comp . get_display_language ( ) , ' compiler for the ' , machine_name , ' machine: ' ,
mlog . bold ( ' ' . join ( comp . get_exelist ( ) ) ) , comp . get_version_string ( ) )
if comp . linker is not None :
logger_fun ( comp . get_display_language ( ) , ' linker for the ' , machine_name , ' machine: ' ,
mlog . bold ( ' ' . join ( comp . linker . get_exelist ( ) ) ) , comp . linker . id , comp . linker . version )
self . build . ensure_static_linker ( comp )
self . compilers [ for_machine ] [ lang ] = comp
return success
def program_from_file_for ( self , for_machine : MachineChoice , prognames : T . List [ mesonlib . FileOrString ]
) - > T . Optional [ ExternalProgram ] :
for p in prognames :
if isinstance ( p , mesonlib . File ) :
continue # Always points to a local (i.e. self generated) file.
if not isinstance ( p , str ) :
raise InterpreterException ( ' Executable name must be a string ' )
prog = ExternalProgram . from_bin_list ( self . environment , for_machine , p )
if prog . found ( ) :
return prog
return None
def program_from_system ( self , args : T . List [ mesonlib . FileOrString ] , search_dirs : T . List [ str ] ,
extra_info : T . List [ mlog . TV_Loggable ] ) - > T . Optional [ ExternalProgram ] :
# Search for scripts relative to current subdir.
# Do not cache found programs because find_program('foobar')
# might give different results when run from different source dirs.
source_dir = os . path . join ( self . environment . get_source_dir ( ) , self . subdir )
for exename in args :
if isinstance ( exename , mesonlib . File ) :
if exename . is_built :
search_dir = os . path . join ( self . environment . get_build_dir ( ) ,
exename . subdir )
else :
search_dir = os . path . join ( self . environment . get_source_dir ( ) ,
exename . subdir )
exename = exename . fname
extra_search_dirs = [ ]
elif isinstance ( exename , str ) :
search_dir = source_dir
extra_search_dirs = search_dirs
else :
raise InvalidArguments ( f ' find_program only accepts strings and files, not { exename !r} ' )
extprog = ExternalProgram ( exename , search_dir = search_dir ,
extra_search_dirs = extra_search_dirs ,
silent = True )
if extprog . found ( ) :
extra_info . append ( f " ( { ' ' . join ( extprog . get_command ( ) ) } ) " )
return extprog
return None
def program_from_overrides ( self , command_names : T . List [ mesonlib . FileOrString ] ,
extra_info : T . List [ ' mlog.TV_Loggable ' ]
) - > T . Optional [ T . Union [ ExternalProgram , OverrideProgram , build . Executable ] ] :
for name in command_names :
if not isinstance ( name , str ) :
continue
if name in self . build . find_overrides :
exe = self . build . find_overrides [ name ]
extra_info . append ( mlog . blue ( ' (overridden) ' ) )
return exe
return None
def store_name_lookups ( self , command_names : T . List [ mesonlib . FileOrString ] ) - > None :
for name in command_names :
if isinstance ( name , str ) :
self . build . searched_programs . add ( name )
def add_find_program_override ( self , name : str , exe : T . Union [ build . Executable , ExternalProgram , ' OverrideProgram ' ] ) - > None :
if name in self . build . searched_programs :
raise InterpreterException ( f ' Tried to override finding of executable " { name } " which has already been found. ' )
if name in self . build . find_overrides :
raise InterpreterException ( f ' Tried to override executable " { name } " which has already been overridden. ' )
self . build . find_overrides [ name ] = exe
def notfound_program ( self , args : T . List [ mesonlib . FileOrString ] ) - > ExternalProgram :
return NonExistingExternalProgram ( ' ' . join (
[ a if isinstance ( a , str ) else a . absolute_path ( self . environment . source_dir , self . environment . build_dir )
for a in args ] ) )
# TODO update modules to always pass `for_machine`. It is bad-form to assume
# the host machine.
def find_program_impl ( self , args : T . List [ mesonlib . FileOrString ] ,
for_machine : MachineChoice = MachineChoice . HOST ,
required : bool = True , silent : bool = True ,
wanted : T . Union [ str , T . List [ str ] ] = ' ' ,
search_dirs : T . Optional [ T . List [ str ] ] = None ,
version_func : T . Optional [ T . Callable [ [ T . Union [ ' ExternalProgram ' , ' build.Executable ' , ' OverrideProgram ' ] ] , str ] ] = None
) - > T . Union [ ' ExternalProgram ' , ' build.Executable ' , ' OverrideProgram ' ] :
args = mesonlib . listify ( args )
extra_info : T . List [ mlog . TV_Loggable ] = [ ]
progobj = self . program_lookup ( args , for_machine , required , search_dirs , extra_info )
if progobj is None :
progobj = self . notfound_program ( args )
if isinstance ( progobj , ExternalProgram ) and not progobj . found ( ) :
if not silent :
mlog . log ( ' Program ' , mlog . bold ( progobj . get_name ( ) ) , ' found: ' , mlog . red ( ' NO ' ) )
if required :
m = ' Program {!r} not found or not executable '
raise InterpreterException ( m . format ( progobj . get_name ( ) ) )
return progobj
if wanted :
if version_func :
version = version_func ( progobj )
elif isinstance ( progobj , build . Executable ) :
if progobj . subproject :
interp = self . subprojects [ progobj . subproject ] . held_object
else :
interp = self
assert isinstance ( interp , Interpreter )
version = interp . project_version
else :
version = progobj . get_version ( self )
is_found , not_found , _ = mesonlib . version_compare_many ( version , wanted )
if not is_found :
mlog . log ( ' Program ' , mlog . bold ( progobj . name ) , ' found: ' , mlog . red ( ' NO ' ) ,
' found ' , mlog . normal_cyan ( version ) , ' but need: ' ,
mlog . bold ( ' , ' . join ( [ f " ' { e } ' " for e in not_found ] ) ) , * extra_info )
if required :
m = ' Invalid version of program, need {!r} {!r} found {!r} . '
raise InterpreterException ( m . format ( progobj . name , not_found , version ) )
return self . notfound_program ( args )
extra_info . insert ( 0 , mlog . normal_cyan ( version ) )
# Only store successful lookups
self . store_name_lookups ( args )
if not silent :
mlog . log ( ' Program ' , mlog . bold ( progobj . name ) , ' found: ' , mlog . green ( ' YES ' ) , * extra_info )
if isinstance ( progobj , build . Executable ) :
progobj . was_returned_by_find_program = True
return progobj
def program_lookup ( self , args : T . List [ mesonlib . FileOrString ] , for_machine : MachineChoice ,
required : bool , search_dirs : T . List [ str ] , extra_info : T . List [ mlog . TV_Loggable ]
) - > T . Optional [ T . Union [ ExternalProgram , build . Executable , OverrideProgram ] ] :
progobj = self . program_from_overrides ( args , extra_info )
if progobj :
return progobj
fallback = None
wrap_mode = self . coredata . get_option ( OptionKey ( ' wrap_mode ' ) )
if wrap_mode != WrapMode . nofallback and self . environment . wrap_resolver :
fallback = self . environment . wrap_resolver . find_program_provider ( args )
if fallback and wrap_mode == WrapMode . forcefallback :
return self . find_program_fallback ( fallback , args , required , extra_info )
progobj = self . program_from_file_for ( for_machine , args )
if progobj is None :
progobj = self . program_from_system ( args , search_dirs , extra_info )
if progobj is None and args [ 0 ] . endswith ( ' python3 ' ) :
prog = ExternalProgram ( ' python3 ' , mesonlib . python_command , silent = True )
progobj = prog if prog . found ( ) else None
if progobj is None and fallback and required :
progobj = self . find_program_fallback ( fallback , args , required , extra_info )
return progobj
def find_program_fallback ( self , fallback : str , args : T . List [ mesonlib . FileOrString ] ,
required : bool , extra_info : T . List [ mlog . TV_Loggable ]
) - > T . Optional [ T . Union [ ExternalProgram , build . Executable , OverrideProgram ] ] :
mlog . log ( ' Fallback to subproject ' , mlog . bold ( fallback ) , ' which provides program ' ,
mlog . bold ( ' ' . join ( args ) ) )
sp_kwargs : kwtypes . DoSubproject = {
' required ' : required ,
' default_options ' : [ ] ,
' version ' : [ ] ,
' cmake_options ' : [ ] ,
' options ' : None ,
}
self . do_subproject ( fallback , ' meson ' , sp_kwargs )
return self . program_from_overrides ( args , extra_info )
@typed_pos_args ( ' find_program ' , varargs = ( str , mesonlib . File ) , min_varargs = 1 )
@typed_kwargs (
' find_program ' ,
DISABLER_KW . evolve ( since = ' 0.49.0 ' ) ,
NATIVE_KW ,
REQUIRED_KW ,
KwargInfo ( ' dirs ' , ContainerTypeInfo ( list , str ) , default = [ ] , listify = True , since = ' 0.53.0 ' ) ,
KwargInfo ( ' version ' , ContainerTypeInfo ( list , str ) , default = [ ] , listify = True , since = ' 0.52.0 ' ) ,
)
@disablerIfNotFound
def func_find_program ( self , node : mparser . BaseNode , args : T . Tuple [ T . List [ mesonlib . FileOrString ] ] ,
kwargs : ' kwtypes.FindProgram ' ,
) - > T . Union [ ' build.Executable ' , ExternalProgram , ' OverrideProgram ' ] :
disabled , required , feature = extract_required_kwarg ( kwargs , self . subproject )
if disabled :
mlog . log ( ' Program ' , mlog . bold ( ' ' . join ( args [ 0 ] ) ) , ' skipped: feature ' , mlog . bold ( feature ) , ' disabled ' )
return self . notfound_program ( args [ 0 ] )
search_dirs = extract_search_dirs ( kwargs )
return self . find_program_impl ( args [ 0 ] , kwargs [ ' native ' ] , required = required ,
silent = False , wanted = kwargs [ ' version ' ] ,
search_dirs = search_dirs )
def func_find_library ( self , node , args , kwargs ) :
raise InvalidCode ( ' find_library() is removed, use meson.get_compiler( \' name \' ).find_library() instead. \n '
' Look here for documentation: http://mesonbuild.com/Reference-manual.html#compiler-object \n '
' Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably \n '
)
# When adding kwargs, please check if they make sense in dependencies.get_dep_identifier()
@FeatureNewKwargs ( ' dependency ' , ' 0.57.0 ' , [ ' cmake_package_version ' ] )
@FeatureNewKwargs ( ' dependency ' , ' 0.56.0 ' , [ ' allow_fallback ' ] )
@FeatureNewKwargs ( ' dependency ' , ' 0.54.0 ' , [ ' components ' ] )
@FeatureNewKwargs ( ' dependency ' , ' 0.52.0 ' , [ ' include_type ' ] )
@FeatureNewKwargs ( ' dependency ' , ' 0.50.0 ' , [ ' not_found_message ' , ' cmake_module_path ' , ' cmake_args ' ] )
@FeatureNewKwargs ( ' dependency ' , ' 0.49.0 ' , [ ' disabler ' ] )
@FeatureNewKwargs ( ' dependency ' , ' 0.40.0 ' , [ ' method ' ] )
@FeatureNewKwargs ( ' dependency ' , ' 0.38.0 ' , [ ' default_options ' ] )
@disablerIfNotFound
@permittedKwargs ( permitted_dependency_kwargs )
@typed_pos_args ( ' dependency ' , varargs = str , min_varargs = 1 )
def func_dependency ( self , node : mparser . BaseNode , args : T . Tuple [ T . List [ str ] ] , kwargs ) - > Dependency :
# Replace '' by empty list of names
names = [ n for n in args [ 0 ] if n ]
if len ( names ) > 1 :
FeatureNew ( ' dependency with more than one name ' , ' 0.60.0 ' ) . use ( self . subproject )
allow_fallback = kwargs . get ( ' allow_fallback ' )
if allow_fallback is not None and not isinstance ( allow_fallback , bool ) :
raise InvalidArguments ( ' " allow_fallback " argument must be boolean ' )
fallback = kwargs . get ( ' fallback ' )
default_options = kwargs . get ( ' default_options ' )
df = DependencyFallbacksHolder ( self , names , allow_fallback , default_options )
df . set_fallback ( fallback )
not_found_message = kwargs . get ( ' not_found_message ' , ' ' )
if not isinstance ( not_found_message , str ) :
raise InvalidArguments ( ' The not_found_message must be a string. ' )
try :
d = df . lookup ( kwargs )
except Exception :
if not_found_message :
self . message_impl ( [ not_found_message ] )
raise
assert isinstance ( d , Dependency )
if not d . found ( ) and not_found_message :
self . message_impl ( [ not_found_message ] )
self . message_impl ( [ not_found_message ] )
# Ensure the correct include type
if ' include_type ' in kwargs :
wanted = kwargs [ ' include_type ' ]
if not isinstance ( wanted , str ) :
raise InvalidArguments ( ' The `include_type` kwarg must be a string ' )
actual = d . get_include_type ( )
if wanted != actual :
mlog . debug ( f ' Current include type of { args [ 0 ] } is { actual } . Converting to requested { wanted } ' )
d = d . generate_system_dependency ( wanted )
if d . feature_since is not None :
version , extra_msg = d . feature_since
FeatureNew . single_use ( f ' dep { d . name !r} custom lookup ' , version , self . subproject , extra_msg , node )
for f in d . featurechecks :
f . use ( self . subproject , node )
return d
@FeatureNew ( ' disabler ' , ' 0.44.0 ' )
@noKwargs
@noPosargs
def func_disabler ( self , node , args , kwargs ) :
return Disabler ( )
@FeatureNewKwargs ( ' executable ' , ' 0.42.0 ' , [ ' implib ' ] )
@FeatureNewKwargs ( ' executable ' , ' 0.56.0 ' , [ ' win_subsystem ' ] )
@FeatureDeprecatedKwargs ( ' executable ' , ' 0.56.0 ' , [ ' gui_app ' ] , extra_message = " Use ' win_subsystem ' instead. " )
@permittedKwargs ( build . known_exe_kwargs )
def func_executable ( self , node , args , kwargs ) :
return self . build_target ( node , args , kwargs , build . Executable )
@permittedKwargs ( build . known_stlib_kwargs )
def func_static_lib ( self , node , args , kwargs ) :
return self . build_target ( node , args , kwargs , build . StaticLibrary )
@permittedKwargs ( build . known_shlib_kwargs )
def func_shared_lib ( self , node , args , kwargs ) :
holder = self . build_target ( node , args , kwargs , build . SharedLibrary )
holder . shared_library_only = True
return holder
@permittedKwargs ( known_library_kwargs )
def func_both_lib ( self , node , args , kwargs ) :
return self . build_both_libraries ( node , args , kwargs )
@FeatureNew ( ' shared_module ' , ' 0.37.0 ' )
@permittedKwargs ( build . known_shmod_kwargs )
def func_shared_module ( self , node , args , kwargs ) :
return self . build_target ( node , args , kwargs , build . SharedModule )
@permittedKwargs ( known_library_kwargs )
def func_library ( self , node , args , kwargs ) :
return self . build_library ( node , args , kwargs )
@permittedKwargs ( build . known_jar_kwargs )
def func_jar ( self , node , args , kwargs ) :
return self . build_target ( node , args , kwargs , build . Jar )
@FeatureNewKwargs ( ' build_target ' , ' 0.40.0 ' , [ ' link_whole ' , ' override_options ' ] )
@permittedKwargs ( known_build_target_kwargs )
def func_build_target ( self , node , args , kwargs ) :
if ' target_type ' not in kwargs :
raise InterpreterException ( ' Missing target_type keyword argument ' )
target_type = kwargs . pop ( ' target_type ' )
if target_type == ' executable ' :
return self . build_target ( node , args , kwargs , build . Executable )
elif target_type == ' shared_library ' :
return self . build_target ( node , args , kwargs , build . SharedLibrary )
elif target_type == ' shared_module ' :
FeatureNew . single_use (
' build_target(target_type: \' shared_module \' ) ' ,
' 0.51.0 ' , self . subproject , location = node )
return self . build_target ( node , args , kwargs , build . SharedModule )
elif target_type == ' static_library ' :
return self . build_target ( node , args , kwargs , build . StaticLibrary )
elif target_type == ' both_libraries ' :
return self . build_both_libraries ( node , args , kwargs )
elif target_type == ' library ' :
return self . build_library ( node , args , kwargs )
elif target_type == ' jar ' :
return self . build_target ( node , args , kwargs , build . Jar )
else :
raise InterpreterException ( ' Unknown target_type. ' )
@noPosargs
@typed_kwargs (
' vcs_tag ' ,
CT_INPUT_KW . evolve ( required = True ) ,
MULTI_OUTPUT_KW ,
# Cannot use the COMMAND_KW because command is allowed to be empty
KwargInfo (
' command ' ,
ContainerTypeInfo ( list , ( str , build . BuildTarget , build . CustomTarget , build . CustomTargetIndex , ExternalProgram , mesonlib . File ) ) ,
listify = True ,
default = [ ] ,
) ,
KwargInfo ( ' fallback ' , ( str , NoneType ) ) ,
KwargInfo ( ' replace_string ' , str , default = ' @VCS_TAG@ ' ) ,
)
def func_vcs_tag ( self , node : mparser . BaseNode , args : T . List [ ' TYPE_var ' ] , kwargs : ' kwtypes.VcsTag ' ) - > build . CustomTarget :
if kwargs [ ' fallback ' ] is None :
FeatureNew . single_use ( ' Optional fallback in vcs_tag ' , ' 0.41.0 ' , self . subproject , location = node )
fallback = kwargs [ ' fallback ' ] or self . project_version
replace_string = kwargs [ ' replace_string ' ]
regex_selector = ' (.*) ' # default regex selector for custom command: use complete output
vcs_cmd = kwargs [ ' command ' ]
source_dir = os . path . normpath ( os . path . join ( self . environment . get_source_dir ( ) , self . subdir ) )
if vcs_cmd :
if isinstance ( vcs_cmd [ 0 ] , ( str , mesonlib . File ) ) :
if isinstance ( vcs_cmd [ 0 ] , mesonlib . File ) :
FeatureNew . single_use ( ' vcs_tag with file as the first argument ' , ' 0.62.0 ' , self . subproject , location = node )
maincmd = self . find_program_impl ( vcs_cmd [ 0 ] , required = False )
if maincmd . found ( ) :
vcs_cmd [ 0 ] = maincmd
else :
FeatureNew . single_use ( ' vcs_tag with custom_tgt, external_program, or exe as the first argument ' , ' 0.63.0 ' , self . subproject , location = node )
else :
vcs = mesonlib . detect_vcs ( source_dir )
if vcs :
mlog . log ( ' Found {} repository at {} ' . format ( vcs [ ' name ' ] , vcs [ ' wc_dir ' ] ) )
vcs_cmd = vcs [ ' get_rev ' ] . split ( )
regex_selector = vcs [ ' rev_regex ' ]
else :
vcs_cmd = [ ' ' ] # executing this cmd will fail in vcstagger.py and force to use the fallback string
# vcstagger.py parameters: infile, outfile, fallback, source_dir, replace_string, regex_selector, command...
self . _validate_custom_target_outputs ( len ( kwargs [ ' input ' ] ) > 1 , kwargs [ ' output ' ] , " vcs_tag " )
cmd = self . environment . get_build_command ( ) + \
[ ' --internal ' ,
' vcstagger ' ,
' @INPUT0@ ' ,
' @OUTPUT0@ ' ,
fallback ,
source_dir ,
replace_string ,
regex_selector ] + vcs_cmd
tg = build . CustomTarget (
kwargs [ ' output ' ] [ 0 ] ,
self . subdir ,
self . subproject ,
self . environment ,
cmd ,
self . source_strings_to_files ( kwargs [ ' input ' ] ) ,
kwargs [ ' output ' ] ,
build_by_default = True ,
build_always_stale = True ,
)
self . add_target ( tg . name , tg )
return tg
@FeatureNew ( ' subdir_done ' , ' 0.46.0 ' )
@noPosargs
@noKwargs
def func_subdir_done ( self , node : mparser . BaseNode , args : TYPE_var , kwargs : TYPE_kwargs ) - > T . NoReturn :
raise SubdirDoneRequest ( )
@staticmethod
def _validate_custom_target_outputs ( has_multi_in : bool , outputs : T . Iterable [ str ] , name : str ) - > None :
""" Checks for additional invalid values in a custom_target output.
This cannot be done with typed_kwargs because it requires the number of
inputs .
"""
for out in outputs :
if has_multi_in and ( ' @PLAINNAME@ ' in out or ' @BASENAME@ ' in out ) :
raise InvalidArguments ( f ' { name } : output cannot contain " @PLAINNAME@ " or " @BASENAME@ " '
' when there is more than one input (we can \' t know which to use) ' )
@typed_pos_args ( ' custom_target ' , optargs = [ str ] )
@typed_kwargs (
' custom_target ' ,
COMMAND_KW ,
CT_BUILD_ALWAYS ,
CT_BUILD_ALWAYS_STALE ,
CT_BUILD_BY_DEFAULT ,
CT_INPUT_KW ,
CT_INSTALL_DIR_KW ,
CT_INSTALL_TAG_KW ,
MULTI_OUTPUT_KW ,
DEPENDS_KW ,
DEPEND_FILES_KW ,
DEPFILE_KW ,
ENV_KW . evolve ( since = ' 0.57.0 ' ) ,
INSTALL_KW ,
INSTALL_MODE_KW . evolve ( since = ' 0.47.0 ' ) ,
KwargInfo ( ' feed ' , bool , default = False , since = ' 0.59.0 ' ) ,
KwargInfo ( ' capture ' , bool , default = False ) ,
KwargInfo ( ' console ' , bool , default = False , since = ' 0.48.0 ' ) ,
)
def func_custom_target ( self , node : mparser . FunctionNode , args : T . Tuple [ str ] ,
kwargs : ' kwtypes.CustomTarget ' ) - > build . CustomTarget :
if kwargs [ ' depfile ' ] and ( ' @BASENAME@ ' in kwargs [ ' depfile ' ] or ' @PLAINNAME@ ' in kwargs [ ' depfile ' ] ) :
FeatureNew . single_use ( ' substitutions in custom_target depfile ' , ' 0.47.0 ' , self . subproject , location = node )
install_mode = self . _warn_kwarg_install_mode_sticky ( kwargs [ ' install_mode ' ] )
# Don't mutate the kwargs
build_by_default = kwargs [ ' build_by_default ' ]
build_always_stale = kwargs [ ' build_always_stale ' ]
# Remap build_always to build_by_default and build_always_stale
if kwargs [ ' build_always ' ] is not None and kwargs [ ' build_always_stale ' ] is not None :
raise InterpreterException ( ' CustomTarget: " build_always " and " build_always_stale " are mutually exclusive ' )
if build_by_default is None and kwargs [ ' install ' ] :
build_by_default = True
elif kwargs [ ' build_always ' ] is not None :
if build_by_default is None :
build_by_default = kwargs [ ' build_always ' ]
build_always_stale = kwargs [ ' build_by_default ' ]
# These are are nullaable so that we can know whether they're explicitly
# set or not. If they haven't been overwritten, set them to their true
# default
if build_by_default is None :
build_by_default = False
if build_always_stale is None :
build_always_stale = False
name = args [ 0 ]
if name is None :
# name will default to first output, but we cannot do that yet because
# they could need substitutions (e.g. @BASENAME@) first. CustomTarget()
# will take care of setting a proper default but name must be an empty
# string in the meantime.
FeatureNew . single_use ( ' custom_target() with no name argument ' , ' 0.60.0 ' , self . subproject , location = node )
name = ' '
inputs = self . source_strings_to_files ( kwargs [ ' input ' ] , strict = False )
command = kwargs [ ' command ' ]
if command and isinstance ( command [ 0 ] , str ) :
command [ 0 ] = self . find_program_impl ( [ command [ 0 ] ] )
if len ( inputs ) > 1 and kwargs [ ' feed ' ] :
raise InvalidArguments ( ' custom_target: " feed " keyword argument can only be used used with a single input ' )
if len ( kwargs [ ' output ' ] ) > 1 and kwargs [ ' capture ' ] :
raise InvalidArguments ( ' custom_target: " capture " keyword argument can only be used used with a single output ' )
if kwargs [ ' capture ' ] and kwargs [ ' console ' ] :
raise InvalidArguments ( ' custom_target: " capture " and " console " keyword arguments are mutually exclusive ' )
for c in command :
if kwargs [ ' capture ' ] and isinstance ( c , str ) and ' @OUTPUT@ ' in c :
raise InvalidArguments ( ' custom_target: " capture " keyword argument cannot be used with " @OUTPUT@ " ' )
if kwargs [ ' feed ' ] and isinstance ( c , str ) and ' @INPUT@ ' in c :
raise InvalidArguments ( ' custom_target: " feed " keyword argument cannot be used with " @INPUT@ " ' )
if kwargs [ ' install ' ] and not kwargs [ ' install_dir ' ] :
raise InvalidArguments ( ' custom_target: " install_dir " keyword argument must be set when " install " is true. ' )
if len ( kwargs [ ' install_dir ' ] ) > 1 :
FeatureNew . single_use ( ' multiple install_dir for custom_target ' , ' 0.40.0 ' , self . subproject , location = node )
if len ( kwargs [ ' install_tag ' ] ) not in { 0 , 1 , len ( kwargs [ ' output ' ] ) } :
raise InvalidArguments ( ' custom_target: install_tag argument must have 0 or 1 outputs, '
' or the same number of elements as the output keyword argument. '
f ' (there are { len ( kwargs [ " install_tag " ] ) } install_tags, '
f ' and { len ( kwargs [ " output " ] ) } outputs) ' )
for t in kwargs [ ' output ' ] :
self . validate_forbidden_targets ( t )
self . _validate_custom_target_outputs ( len ( inputs ) > 1 , kwargs [ ' output ' ] , " custom_target " )
tg = build . CustomTarget (
name ,
self . subdir ,
self . subproject ,
self . environment ,
command ,
inputs ,
kwargs [ ' output ' ] ,
build_always_stale = build_always_stale ,
build_by_default = build_by_default ,
capture = kwargs [ ' capture ' ] ,
console = kwargs [ ' console ' ] ,
depend_files = kwargs [ ' depend_files ' ] ,
depfile = kwargs [ ' depfile ' ] ,
extra_depends = kwargs [ ' depends ' ] ,
env = kwargs [ ' env ' ] ,
feed = kwargs [ ' feed ' ] ,
install = kwargs [ ' install ' ] ,
install_dir = kwargs [ ' install_dir ' ] ,
install_mode = install_mode ,
install_tag = kwargs [ ' install_tag ' ] ,
backend = self . backend )
self . add_target ( tg . name , tg )
return tg
@typed_pos_args ( ' run_target ' , str )
@typed_kwargs (
' run_target ' ,
COMMAND_KW ,
DEPENDS_KW ,
ENV_KW . evolve ( since = ' 0.57.0 ' ) ,
)
def func_run_target ( self , node : mparser . FunctionNode , args : T . Tuple [ str ] ,
kwargs : ' kwtypes.RunTarget ' ) - > build . RunTarget :
all_args = kwargs [ ' command ' ] . copy ( )
for i in listify ( all_args ) :
if isinstance ( i , ExternalProgram ) and not i . found ( ) :
raise InterpreterException ( f ' Tried to use non-existing executable { i . name !r} ' )
if isinstance ( all_args [ 0 ] , str ) :
all_args [ 0 ] = self . find_program_impl ( [ all_args [ 0 ] ] )
name = args [ 0 ]
tg = build . RunTarget ( name , all_args , kwargs [ ' depends ' ] , self . subdir , self . subproject , self . environment ,
kwargs [ ' env ' ] )
self . add_target ( name , tg )
full_name = ( self . subproject , name )
assert full_name not in self . build . run_target_names
self . build . run_target_names . add ( full_name )
return tg
@FeatureNew ( ' alias_target ' , ' 0.52.0 ' )
@typed_pos_args ( ' alias_target ' , str , varargs = build . Target , min_varargs = 1 )
@noKwargs
def func_alias_target ( self , node : mparser . BaseNode , args : T . Tuple [ str , T . List [ build . Target ] ] ,
kwargs : ' TYPE_kwargs ' ) - > build . AliasTarget :
name , deps = args
tg = build . AliasTarget ( name , deps , self . subdir , self . subproject , self . environment )
self . add_target ( name , tg )
return tg
@typed_pos_args ( ' generator ' , ( build . Executable , ExternalProgram ) )
@typed_kwargs (
' generator ' ,
KwargInfo ( ' arguments ' , ContainerTypeInfo ( list , str , allow_empty = False ) , required = True , listify = True ) ,
KwargInfo ( ' output ' , ContainerTypeInfo ( list , str , allow_empty = False ) , required = True , listify = True ) ,
DEPFILE_KW ,
DEPENDS_KW ,
KwargInfo ( ' capture ' , bool , default = False , since = ' 0.43.0 ' ) ,
)
def func_generator ( self , node : mparser . FunctionNode ,
args : T . Tuple [ T . Union [ build . Executable , ExternalProgram ] ] ,
kwargs : ' kwtypes.FuncGenerator ' ) - > build . Generator :
for rule in kwargs [ ' output ' ] :
if ' @BASENAME@ ' not in rule and ' @PLAINNAME@ ' not in rule :
raise InvalidArguments ( ' Every element of " output " must contain @BASENAME@ or @PLAINNAME@. ' )
if has_path_sep ( rule ) :
raise InvalidArguments ( ' " output " must not contain a directory separator. ' )
if len ( kwargs [ ' output ' ] ) > 1 :
for o in kwargs [ ' output ' ] :
if ' @OUTPUT@ ' in o :
raise InvalidArguments ( ' Tried to use @OUTPUT@ in a rule with more than one output. ' )
gen = build . Generator ( args [ 0 ] , * * kwargs )
self . generators . append ( gen )
return gen
@typed_pos_args ( ' benchmark ' , str , ( build . Executable , build . Jar , ExternalProgram , mesonlib . File ) )
@typed_kwargs ( ' benchmark ' , * TEST_KWARGS )
def func_benchmark ( self , node : mparser . BaseNode ,
args : T . Tuple [ str , T . Union [ build . Executable , build . Jar , ExternalProgram , mesonlib . File ] ] ,
kwargs : ' kwtypes.FuncBenchmark ' ) - > None :
self . add_test ( node , args , kwargs , False )
@typed_pos_args ( ' test ' , str , ( build . Executable , build . Jar , ExternalProgram , mesonlib . File ) )
@typed_kwargs ( ' test ' , * TEST_KWARGS , KwargInfo ( ' is_parallel ' , bool , default = True ) )
def func_test ( self , node : mparser . BaseNode ,
args : T . Tuple [ str , T . Union [ build . Executable , build . Jar , ExternalProgram , mesonlib . File ] ] ,
kwargs : ' kwtypes.FuncTest ' ) - > None :
self . add_test ( node , args , kwargs , True )
def unpack_env_kwarg ( self , kwargs : T . Union [ build . EnvironmentVariables , T . Dict [ str , ' TYPE_var ' ] , T . List [ ' TYPE_var ' ] , str ] ) - > build . EnvironmentVariables :
envlist = kwargs . get ( ' env ' )
if envlist is None :
return build . EnvironmentVariables ( )
msg = ENV_KW . validator ( envlist )
if msg :
raise InvalidArguments ( f ' " env " : { msg } ' )
return ENV_KW . convertor ( envlist )
def make_test ( self , node : mparser . BaseNode ,
args : T . Tuple [ str , T . Union [ build . Executable , build . Jar , ExternalProgram , mesonlib . File ] ] ,
kwargs : ' kwtypes.BaseTest ' ) - > Test :
name = args [ 0 ]
if ' : ' in name :
mlog . deprecation ( f ' " : " is not allowed in test name " { name } " , it has been replaced with " _ " ' ,
location = node )
name = name . replace ( ' : ' , ' _ ' )
exe = args [ 1 ]
add early sanity check for test programs existing
It used to be possible to do this:
```
bomb = find_program('nonexisting', required: false)
test('traceback during meson test', bomb)
```
and it would in fact bomb out, with:
```
[0/1] Running all tests.
Traceback (most recent call last):
File "/usr/lib/python3.10/site-packages/mesonbuild/mesonmain.py", line 149, in run
return options.run_func(options)
File "/usr/lib/python3.10/site-packages/mesonbuild/mtest.py", line 2017, in run
return th.doit()
File "/usr/lib/python3.10/site-packages/mesonbuild/mtest.py", line 1685, in doit
runners.extend(self.get_test_runner(test) for test in tests)
File "/usr/lib/python3.10/site-packages/mesonbuild/mtest.py", line 1685, in <genexpr>
runners.extend(self.get_test_runner(test) for test in tests)
File "/usr/lib/python3.10/site-packages/mesonbuild/mtest.py", line 1586, in get_test_runner
return SingleTestRunner(test, env, name, options)
File "/usr/lib/python3.10/site-packages/mesonbuild/mtest.py", line 1308, in __init__
self.cmd = self._get_cmd()
File "/usr/lib/python3.10/site-packages/mesonbuild/mtest.py", line 1374, in _get_cmd
test_cmd = self._get_test_cmd()
File "/usr/lib/python3.10/site-packages/mesonbuild/mtest.py", line 1352, in _get_test_cmd
if self.options.no_rebuild and not os.path.isfile(testentry):
File "/usr/lib/python3.10/genericpath.py", line 30, in isfile
st = os.stat(path)
TypeError: stat: path should be string, bytes, os.PathLike or integer, not NoneType
ERROR: Unhandled python exception
This is a Meson bug and should be reported!
```
This is something we explicitly check for elsewhere, for example when
using a not-found program as a command in a custom target or generator.
Check for it when making a test too, and error out with a similar error.
Fixes #10091
3 years ago
if isinstance ( exe , ExternalProgram ) :
if not exe . found ( ) :
raise InvalidArguments ( ' Tried to use not-found external program as test exe ' )
elif isinstance ( exe , mesonlib . File ) :
exe = self . find_program_impl ( [ exe ] )
env = self . unpack_env_kwarg ( kwargs )
if kwargs [ ' timeout ' ] < = 0 :
FeatureNew . single_use ( ' test() timeout <= 0 ' , ' 0.57.0 ' , self . subproject , location = node )
prj = self . subproject if self . is_subproject ( ) else self . build . project_name
suite : T . List [ str ] = [ ]
for s in kwargs [ ' suite ' ] :
if s :
s = ' : ' + s
suite . append ( prj . replace ( ' ' , ' _ ' ) . replace ( ' : ' , ' _ ' ) + s )
return Test ( name ,
prj ,
suite ,
exe ,
kwargs [ ' depends ' ] ,
kwargs . get ( ' is_parallel ' , False ) ,
kwargs [ ' args ' ] ,
env ,
kwargs [ ' should_fail ' ] ,
kwargs [ ' timeout ' ] ,
kwargs [ ' workdir ' ] ,
kwargs [ ' protocol ' ] ,
kwargs [ ' priority ' ] ,
kwargs [ ' verbose ' ] )
def add_test ( self , node : mparser . BaseNode , args : T . List , kwargs : T . Dict [ str , T . Any ] , is_base_test : bool ) :
t = self . make_test ( node , args , kwargs )
if is_base_test :
self . build . tests . append ( t )
mlog . debug ( ' Adding test ' , mlog . bold ( t . name , True ) )
else :
self . build . benchmarks . append ( t )
mlog . debug ( ' Adding benchmark ' , mlog . bold ( t . name , True ) )
@typed_pos_args ( ' install_headers ' , varargs = ( str , mesonlib . File ) )
@typed_kwargs (
' install_headers ' ,
PRESERVE_PATH_KW ,
KwargInfo ( ' subdir ' , ( str , NoneType ) ) ,
INSTALL_MODE_KW . evolve ( since = ' 0.47.0 ' ) ,
INSTALL_DIR_KW ,
)
def func_install_headers ( self , node : mparser . BaseNode ,
args : T . Tuple [ T . List [ ' mesonlib.FileOrString ' ] ] ,
kwargs : ' kwtypes.FuncInstallHeaders ' ) - > build . Headers :
install_mode = self . _warn_kwarg_install_mode_sticky ( kwargs [ ' install_mode ' ] )
source_files = self . source_strings_to_files ( args [ 0 ] )
install_subdir = kwargs [ ' subdir ' ]
if install_subdir is not None :
if kwargs [ ' install_dir ' ] is not None :
raise InterpreterException ( ' install_headers: cannot specify both " install_dir " and " subdir " . Use only " install_dir " . ' )
if os . path . isabs ( install_subdir ) :
mlog . deprecation ( ' Subdir keyword must not be an absolute path. This will be a hard error in the next release. ' )
else :
install_subdir = ' '
dirs = collections . defaultdict ( list )
ret_headers = [ ]
if kwargs [ ' preserve_path ' ] :
for file in source_files :
dirname = os . path . dirname ( file . fname )
dirs [ dirname ] . append ( file )
else :
dirs [ ' ' ] . extend ( source_files )
for childdir in dirs :
h = build . Headers ( dirs [ childdir ] , os . path . join ( install_subdir , childdir ) , kwargs [ ' install_dir ' ] ,
install_mode , self . subproject )
ret_headers . append ( h )
self . build . headers . append ( h )
return ret_headers
@typed_pos_args ( ' install_man ' , varargs = ( str , mesonlib . File ) )
@typed_kwargs (
' install_man ' ,
KwargInfo ( ' locale ' , ( str , NoneType ) , since = ' 0.58.0 ' ) ,
INSTALL_MODE_KW . evolve ( since = ' 0.47.0 ' ) ,
INSTALL_DIR_KW ,
)
def func_install_man ( self , node : mparser . BaseNode ,
args : T . Tuple [ T . List [ ' mesonlib.FileOrString ' ] ] ,
kwargs : ' kwtypes.FuncInstallMan ' ) - > build . Man :
install_mode = self . _warn_kwarg_install_mode_sticky ( kwargs [ ' install_mode ' ] )
# We just need to narrow this, because the input is limited to files and
# Strings as inputs, so only Files will be returned
sources = self . source_strings_to_files ( args [ 0 ] )
for s in sources :
try :
num = int ( s . rsplit ( ' . ' , 1 ) [ - 1 ] )
except ( IndexError , ValueError ) :
num = 0
if not 1 < = num < = 9 :
raise InvalidArguments ( ' Man file must have a file extension of a number between 1 and 9 ' )
m = build . Man ( sources , kwargs [ ' install_dir ' ] , install_mode ,
self . subproject , kwargs [ ' locale ' ] )
self . build . man . append ( m )
return m
@FeatureNew ( ' install_emptydir ' , ' 0.60.0 ' )
@typed_kwargs (
' install_emptydir ' ,
INSTALL_MODE_KW ,
KwargInfo ( ' install_tag ' , ( str , NoneType ) , since = ' 0.62.0 ' )
)
def func_install_emptydir ( self , node : mparser . BaseNode , args : T . Tuple [ str ] , kwargs ) - > None :
d = build . EmptyDir ( args [ 0 ] , kwargs [ ' install_mode ' ] , self . subproject , kwargs [ ' install_tag ' ] )
self . build . emptydir . append ( d )
return d
@FeatureNew ( ' install_symlink ' , ' 0.61.0 ' )
@typed_pos_args ( ' symlink_name ' , str )
@typed_kwargs (
' install_symlink ' ,
KwargInfo ( ' pointing_to ' , str , required = True ) ,
KwargInfo ( ' install_dir ' , str , required = True ) ,
INSTALL_TAG_KW ,
)
def func_install_symlink ( self , node : mparser . BaseNode ,
args : T . Tuple [ T . List [ str ] ] ,
kwargs ) - > build . SymlinkData :
name = args [ 0 ] # Validation while creating the SymlinkData object
target = kwargs [ ' pointing_to ' ]
l = build . SymlinkData ( target , name , kwargs [ ' install_dir ' ] ,
self . subproject , kwargs [ ' install_tag ' ] )
self . build . symlinks . append ( l )
return l
@FeatureNew ( ' structured_sources ' , ' 0.62.0 ' )
@typed_pos_args ( ' structured_sources ' , object , optargs = [ dict ] )
@noKwargs
@noArgsFlattening
def func_structured_sources (
self , node : mparser . BaseNode ,
args : T . Tuple [ object , T . Optional [ T . Dict [ str , object ] ] ] ,
kwargs : ' TYPE_kwargs ' ) - > build . StructuredSources :
valid_types = ( str , mesonlib . File , build . GeneratedList , build . CustomTarget , build . CustomTargetIndex , build . GeneratedList )
sources : T . Dict [ str , T . List [ T . Union [ mesonlib . File , ' build.GeneratedTypes ' ] ] ] = collections . defaultdict ( list )
for arg in mesonlib . listify ( args [ 0 ] ) :
if not isinstance ( arg , valid_types ) :
raise InvalidArguments ( f ' structured_sources: type " { type ( arg ) } " is not valid ' )
if isinstance ( arg , str ) :
arg = mesonlib . File . from_source_file ( self . environment . source_dir , self . subdir , arg )
sources [ ' ' ] . append ( arg )
if args [ 1 ] :
if ' ' in args [ 1 ] :
raise InvalidArguments ( ' structured_sources: keys to dictionary argument may not be an empty string. ' )
for k , v in args [ 1 ] . items ( ) :
for arg in mesonlib . listify ( v ) :
if not isinstance ( arg , valid_types ) :
raise InvalidArguments ( f ' structured_sources: type " { type ( arg ) } " is not valid ' )
if isinstance ( arg , str ) :
arg = mesonlib . File . from_source_file ( self . environment . source_dir , self . subdir , arg )
sources [ k ] . append ( arg )
return build . StructuredSources ( sources )
@typed_pos_args ( ' subdir ' , str )
@typed_kwargs (
' subdir ' ,
KwargInfo (
' if_found ' ,
ContainerTypeInfo ( list , object ) ,
validator = lambda a : ' Objects must have a found() method ' if not all ( hasattr ( x , ' found ' ) for x in a ) else None ,
since = ' 0.44.0 ' ,
default = [ ] ,
listify = True ,
) ,
)
def func_subdir ( self , node : mparser . BaseNode , args : T . Tuple [ str ] , kwargs : ' kwtypes.Subdir ' ) - > None :
mesonlib . check_direntry_issues ( args )
if ' .. ' in args [ 0 ] :
raise InvalidArguments ( ' Subdir contains .. ' )
if self . subdir == ' ' and args [ 0 ] == self . subproject_dir :
raise InvalidArguments ( ' Must not go into subprojects dir with subdir(), use subproject() instead. ' )
if self . subdir == ' ' and args [ 0 ] . startswith ( ' meson- ' ) :
raise InvalidArguments ( ' The " meson- " prefix is reserved and cannot be used for top-level subdir(). ' )
if args [ 0 ] == ' ' :
raise InvalidArguments ( " The argument given to subdir() is the empty string ' ' . This is prohibited. " )
for i in kwargs [ ' if_found ' ] :
if not i . found ( ) :
return
prev_subdir = self . subdir
subdir = os . path . join ( prev_subdir , args [ 0 ] )
if os . path . isabs ( subdir ) :
raise InvalidArguments ( ' Subdir argument must be a relative path. ' )
absdir = os . path . join ( self . environment . get_source_dir ( ) , subdir )
symlinkless_dir = os . path . realpath ( absdir )
build_file = os . path . join ( symlinkless_dir , ' meson.build ' )
if build_file in self . processed_buildfiles :
raise InvalidArguments ( f ' Tried to enter directory " { subdir } " , which has already been visited. ' )
self . processed_buildfiles . add ( build_file )
self . subdir = subdir
os . makedirs ( os . path . join ( self . environment . build_dir , subdir ) , exist_ok = True )
buildfilename = os . path . join ( self . subdir , environment . build_filename )
self . build_def_files . add ( buildfilename )
absname = os . path . join ( self . environment . get_source_dir ( ) , buildfilename )
if not os . path . isfile ( absname ) :
self . subdir = prev_subdir
raise InterpreterException ( f " Non-existent build file ' { buildfilename !s} ' " )
with open ( absname , encoding = ' utf-8 ' ) as f :
code = f . read ( )
assert isinstance ( code , str )
try :
codeblock = mparser . Parser ( code , absname ) . parse ( )
except mesonlib . MesonException as me :
me . file = absname
raise me
try :
self . evaluate_codeblock ( codeblock )
except SubdirDoneRequest :
pass
self . subdir = prev_subdir
def _get_kwarg_install_mode ( self , kwargs : T . Dict [ str , T . Any ] ) - > T . Optional [ FileMode ] :
if kwargs . get ( ' install_mode ' , None ) is None :
return None
if isinstance ( kwargs [ ' install_mode ' ] , FileMode ) :
return kwargs [ ' install_mode ' ]
install_mode : T . List [ str ] = [ ]
Support multiple install dirs for built/custom targets
You can now pass a list of strings to the install_dir: kwarg to
build_target and custom_target.
Custom Targets:
===============
Allows you to specify the installation directory for each
corresponding output. For example:
custom_target('different-install-dirs',
output : ['first.file', 'second.file'],
...
install : true,
install_dir : ['somedir', 'otherdir])
This would install first.file to somedir and second.file to otherdir.
If only one install_dir is provided, all outputs are installed there
(same behaviour as before).
To only install some outputs, pass `false` for the outputs that you
don't want installed. For example:
custom_target('only-install-second',
output : ['first.file', 'second.file'],
...
install : true,
install_dir : [false, 'otherdir])
This would install second.file to otherdir and not install first.file.
Build Targets:
==============
With build_target() (which includes executable(), library(), etc),
usually there is only one primary output. However some types of
targets have multiple outputs.
For example, while generating Vala libraries, valac also generates
a header and a .vapi file both of which often need to be installed.
This allows you to specify installation directories for those too.
# This will only install the library (same as before)
shared_library('somevalalib', 'somesource.vala',
...
install : true)
# This will install the library, the header, and the vapi into the
# respective directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : ['libdir', 'incdir', 'vapidir'])
# This will install the library into the default libdir and
# everything else into the specified directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : [true, 'incdir', 'vapidir'])
# This will NOT install the library, and will install everything
# else into the specified directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : [false, 'incdir', 'vapidir'])
true/false can also be used for secondary outputs in the same way.
Valac can also generate a GIR file for libraries when the `vala_gir:`
keyword argument is passed to library(). In that case, `install_dir:`
must be given a list with four elements, one for each output.
Includes tests for all these.
Closes https://github.com/mesonbuild/meson/issues/705
Closes https://github.com/mesonbuild/meson/issues/891
Closes https://github.com/mesonbuild/meson/issues/892
Closes https://github.com/mesonbuild/meson/issues/1178
Closes https://github.com/mesonbuild/meson/issues/1193
8 years ago
mode = mesonlib . typeslistify ( kwargs . get ( ' install_mode ' , [ ] ) , ( str , int ) )
for m in mode :
# We skip any arguments that are set to `false`
if m is False :
m = None
install_mode . append ( m )
if len ( install_mode ) > 3 :
raise InvalidArguments ( ' Keyword argument install_mode takes at '
' most 3 arguments. ' )
if len ( install_mode ) > 0 and install_mode [ 0 ] is not None and \
not isinstance ( install_mode [ 0 ] , str ) :
raise InvalidArguments ( ' Keyword argument install_mode requires the '
' permissions arg to be a string or false ' )
return FileMode ( * install_mode )
# This is either ignored on basically any OS nowadays, or silently gets
# ignored (Solaris) or triggers an "illegal operation" error (FreeBSD).
# It was likely added "because it exists", but should never be used. In
# theory it is useful for directories, but we never apply modes to
# directories other than in install_emptydir.
def _warn_kwarg_install_mode_sticky ( self , mode : FileMode ) - > None :
if mode . perms > 0 and mode . perms & stat . S_ISVTX :
mlog . deprecation ( ' install_mode with the sticky bit on a file does not do anything and will '
' be ignored since Meson 0.64.0 ' , location = self . current_node )
perms = stat . filemode ( mode . perms - stat . S_ISVTX ) [ 1 : ]
return FileMode ( perms , mode . owner , mode . group )
else :
return mode
@typed_pos_args ( ' install_data ' , varargs = ( str , mesonlib . File ) )
@typed_kwargs (
' install_data ' ,
KwargInfo ( ' sources ' , ContainerTypeInfo ( list , ( str , mesonlib . File ) ) , listify = True , default = [ ] ) ,
KwargInfo ( ' rename ' , ContainerTypeInfo ( list , str ) , default = [ ] , listify = True , since = ' 0.46.0 ' ) ,
INSTALL_MODE_KW . evolve ( since = ' 0.38.0 ' ) ,
INSTALL_TAG_KW . evolve ( since = ' 0.60.0 ' ) ,
INSTALL_DIR_KW ,
PRESERVE_PATH_KW . evolve ( since = ' 0.64.0 ' ) ,
)
def func_install_data ( self , node : mparser . BaseNode ,
args : T . Tuple [ T . List [ ' mesonlib.FileOrString ' ] ] ,
kwargs : ' kwtypes.FuncInstallData ' ) - > build . Data :
sources = self . source_strings_to_files ( args [ 0 ] + kwargs [ ' sources ' ] )
rename = kwargs [ ' rename ' ] or None
if rename :
if len ( rename ) != len ( sources ) :
raise InvalidArguments (
' " rename " and " sources " argument lists must be the same length if " rename " is given. '
f ' Rename has { len ( rename ) } elements and sources has { len ( sources ) } . ' )
install_mode = self . _warn_kwarg_install_mode_sticky ( kwargs [ ' install_mode ' ] )
return self . install_data_impl ( sources , kwargs [ ' install_dir ' ] , install_mode ,
rename , kwargs [ ' install_tag ' ] ,
preserve_path = kwargs [ ' preserve_path ' ] )
def install_data_impl ( self , sources : T . List [ mesonlib . File ] , install_dir : T . Optional [ str ] ,
install_mode : FileMode , rename : T . Optional [ str ] ,
tag : T . Optional [ str ] ,
install_dir_name : T . Optional [ str ] = None ,
install_data_type : T . Optional [ str ] = None ,
preserve_path : bool = False ) - > build . Data :
""" Just the implementation with no validation. """
idir = install_dir or ' '
idir_name = install_dir_name or idir or ' {datadir} '
if isinstance ( idir_name , P_OBJ . OptionString ) :
idir_name = idir_name . optname
dirs = collections . defaultdict ( list )
ret_data = [ ]
if preserve_path :
for file in sources :
dirname = os . path . dirname ( file . fname )
dirs [ dirname ] . append ( file )
else :
dirs [ ' ' ] . extend ( sources )
for childdir , files in dirs . items ( ) :
d = build . Data ( files , os . path . join ( idir , childdir ) , os . path . join ( idir_name , childdir ) ,
install_mode , self . subproject , rename , tag , install_data_type )
ret_data . append ( d )
self . build . data . extend ( ret_data )
return ret_data
@typed_pos_args ( ' install_subdir ' , str )
@typed_kwargs (
' install_subdir ' ,
KwargInfo ( ' install_dir ' , str , required = True ) ,
KwargInfo ( ' strip_directory ' , bool , default = False ) ,
KwargInfo ( ' exclude_files ' , ContainerTypeInfo ( list , str ) ,
default = [ ] , listify = True , since = ' 0.42.0 ' ,
validator = lambda x : ' cannot be absolute ' if any ( os . path . isabs ( d ) for d in x ) else None ) ,
KwargInfo ( ' exclude_directories ' , ContainerTypeInfo ( list , str ) ,
default = [ ] , listify = True , since = ' 0.42.0 ' ,
validator = lambda x : ' cannot be absolute ' if any ( os . path . isabs ( d ) for d in x ) else None ) ,
INSTALL_MODE_KW . evolve ( since = ' 0.38.0 ' ) ,
INSTALL_TAG_KW . evolve ( since = ' 0.60.0 ' ) ,
)
def func_install_subdir ( self , node : mparser . BaseNode , args : T . Tuple [ str ] ,
kwargs : ' kwtypes.FuncInstallSubdir ' ) - > build . InstallDir :
exclude = ( set ( kwargs [ ' exclude_files ' ] ) , set ( kwargs [ ' exclude_directories ' ] ) )
srcdir = os . path . join ( self . environment . source_dir , self . subdir , args [ 0 ] )
if not os . path . isdir ( srcdir ) or not any ( os . scandir ( srcdir ) ) :
FeatureNew . single_use ( ' install_subdir with empty directory ' , ' 0.47.0 ' , self . subproject , location = node )
FeatureDeprecated . single_use ( ' install_subdir with empty directory ' , ' 0.60.0 ' , self . subproject ,
' It worked by accident and is buggy. Use install_emptydir instead. ' , node )
install_mode = self . _warn_kwarg_install_mode_sticky ( kwargs [ ' install_mode ' ] )
idir_name = kwargs [ ' install_dir ' ]
if isinstance ( idir_name , P_OBJ . OptionString ) :
idir_name = idir_name . optname
idir = build . InstallDir (
self . subdir ,
args [ 0 ] ,
kwargs [ ' install_dir ' ] ,
idir_name ,
install_mode ,
exclude ,
kwargs [ ' strip_directory ' ] ,
self . subproject ,
install_tag = kwargs [ ' install_tag ' ] )
self . build . install_dirs . append ( idir )
return idir
@noPosargs
@typed_kwargs (
' configure_file ' ,
DEPFILE_KW . evolve ( since = ' 0.52.0 ' ) ,
INSTALL_MODE_KW . evolve ( since = ' 0.47.0, ' ) ,
INSTALL_TAG_KW . evolve ( since = ' 0.60.0 ' ) ,
KwargInfo ( ' capture ' , bool , default = False , since = ' 0.41.0 ' ) ,
KwargInfo (
' command ' ,
( ContainerTypeInfo ( list , ( build . Executable , ExternalProgram , compilers . Compiler , mesonlib . File , str ) , allow_empty = False ) , NoneType ) ,
listify = True ,
) ,
KwargInfo (
' configuration ' ,
( ContainerTypeInfo ( dict , ( str , int , bool ) ) , build . ConfigurationData , NoneType ) ,
) ,
KwargInfo (
' copy ' , bool , default = False , since = ' 0.47.0 ' ,
deprecated = ' 0.64.0 ' , deprecated_message = ' Use fs.copyfile instead ' ,
) ,
KwargInfo ( ' encoding ' , str , default = ' utf-8 ' , since = ' 0.47.0 ' ) ,
KwargInfo ( ' format ' , str , default = ' meson ' , since = ' 0.46.0 ' ,
validator = in_set_validator ( { ' meson ' , ' cmake ' , ' cmake@ ' } ) ) ,
KwargInfo (
' input ' ,
ContainerTypeInfo ( list , ( mesonlib . File , str ) ) ,
listify = True ,
default = [ ] ,
) ,
# Cannot use shared implementation until None backwards compat is dropped
KwargInfo ( ' install ' , ( bool , NoneType ) , since = ' 0.50.0 ' ) ,
KwargInfo ( ' install_dir ' , ( str , bool ) , default = ' ' ,
validator = lambda x : ' must be `false` if boolean ' if x is True else None ) ,
OUTPUT_KW ,
KwargInfo ( ' output_format ' , str , default = ' c ' , since = ' 0.47.0 ' ,
validator = in_set_validator ( { ' c ' , ' nasm ' } ) ) ,
)
def func_configure_file ( self , node : mparser . BaseNode , args : T . List [ TYPE_var ] ,
kwargs : kwtypes . ConfigureFile ) :
actions = sorted ( x for x in [ ' configuration ' , ' command ' , ' copy ' ]
if kwargs [ x ] not in [ None , False ] )
num_actions = len ( actions )
if num_actions == 0 :
raise InterpreterException ( ' Must specify an action with one of these '
' keyword arguments: \' configuration \' , '
' \' command \' , or \' copy \' . ' )
elif num_actions == 2 :
raise InterpreterException ( ' Must not specify both {!r} and {!r} '
' keyword arguments since they are '
' mutually exclusive. ' . format ( * actions ) )
elif num_actions == 3 :
raise InterpreterException ( ' Must specify one of {!r} , {!r} , and '
' {!r} keyword arguments since they are '
' mutually exclusive. ' . format ( * actions ) )
if kwargs [ ' capture ' ] and not kwargs [ ' command ' ] :
raise InvalidArguments ( ' configure_file: " capture " keyword requires " command " keyword. ' )
install_mode = self . _warn_kwarg_install_mode_sticky ( kwargs [ ' install_mode ' ] )
fmt = kwargs [ ' format ' ]
output_format = kwargs [ ' output_format ' ]
depfile = kwargs [ ' depfile ' ]
# Validate input
inputs = self . source_strings_to_files ( kwargs [ ' input ' ] )
inputs_abs = [ ]
for f in inputs :
if isinstance ( f , mesonlib . File ) :
inputs_abs . append ( f . absolute_path ( self . environment . source_dir ,
self . environment . build_dir ) )
self . add_build_def_file ( f )
else :
raise InterpreterException ( ' Inputs can only be strings or file objects ' )
# Validate output
output = kwargs [ ' output ' ]
if inputs_abs :
values = mesonlib . get_filenames_templates_dict ( inputs_abs , None )
outputs = mesonlib . substitute_values ( [ output ] , values )
output = outputs [ 0 ]
if depfile :
depfile = mesonlib . substitute_values ( [ depfile ] , values ) [ 0 ]
ofile_rpath = os . path . join ( self . subdir , output )
if ofile_rpath in self . configure_file_outputs :
mesonbuildfile = os . path . join ( self . subdir , ' meson.build ' )
current_call = f " { mesonbuildfile } : { self . current_lineno } "
first_call = " {} : {} " . format ( mesonbuildfile , self . configure_file_outputs [ ofile_rpath ] )
mlog . warning ( ' Output file ' , mlog . bold ( ofile_rpath , True ) , ' for configure_file() at ' , current_call , ' overwrites configure_file() output at ' , first_call )
else :
self . configure_file_outputs [ ofile_rpath ] = self . current_lineno
( ofile_path , ofile_fname ) = os . path . split ( os . path . join ( self . subdir , output ) )
ofile_abs = os . path . join ( self . environment . build_dir , ofile_path , ofile_fname )
# Perform the appropriate action
if kwargs [ ' configuration ' ] is not None :
conf = kwargs [ ' configuration ' ]
if isinstance ( conf , dict ) :
FeatureNew . single_use ( ' configure_file.configuration dictionary ' , ' 0.49.0 ' , self . subproject , location = node )
for k , v in conf . items ( ) :
if not isinstance ( v , ( str , int , bool ) ) :
raise InvalidArguments (
f ' " configuration_data " : initial value dictionary key " { k !r} " " must be " str | int | bool " , not " { v !r} " ' )
conf = build . ConfigurationData ( conf )
mlog . log ( ' Configuring ' , mlog . bold ( output ) , ' using configuration ' )
if len ( inputs ) > 1 :
raise InterpreterException ( ' At most one input file can given in configuration mode ' )
if inputs :
os . makedirs ( os . path . join ( self . environment . build_dir , self . subdir ) , exist_ok = True )
file_encoding = kwargs [ ' encoding ' ]
missing_variables , confdata_useless = \
mesonlib . do_conf_file ( inputs_abs [ 0 ] , ofile_abs , conf ,
fmt , file_encoding )
if missing_variables :
var_list = " , " . join ( repr ( m ) for m in sorted ( missing_variables ) )
mlog . warning (
f " The variable(s) { var_list } in the input file ' { inputs [ 0 ] } ' are not "
" present in the given configuration data. " , location = node )
if confdata_useless :
ifbase = os . path . basename ( inputs_abs [ 0 ] )
tv = FeatureNew . get_target_version ( self . subproject )
if FeatureNew . check_version ( tv , ' 0.47.0 ' ) :
mlog . warning ( ' Got an empty configuration_data() object and found no '
f ' substitutions in the input file { ifbase !r} . If you want to '
' copy a file to the build dir, use the \' copy: \' keyword '
' argument added in 0.47.0 ' , location = node )
else :
mesonlib . dump_conf_header ( ofile_abs , conf , output_format )
conf . used = True
elif kwargs [ ' command ' ] is not None :
if len ( inputs ) > 1 :
FeatureNew . single_use ( ' multiple inputs in configure_file() ' , ' 0.52.0 ' , self . subproject , location = node )
# We use absolute paths for input and output here because the cwd
# that the command is run from is 'unspecified', so it could change.
# Currently it's builddir/subdir for in_builddir else srcdir/subdir.
values = mesonlib . get_filenames_templates_dict ( inputs_abs , [ ofile_abs ] )
if depfile :
depfile = os . path . join ( self . environment . get_scratch_dir ( ) , depfile )
values [ ' @DEPFILE@ ' ] = depfile
# Substitute @INPUT@, @OUTPUT@, etc here.
_cmd = mesonlib . substitute_values ( kwargs [ ' command ' ] , values )
mlog . log ( ' Configuring ' , mlog . bold ( output ) , ' with command ' )
cmd , * args = _cmd
res = self . run_command_impl ( node , ( cmd , args ) ,
{ ' capture ' : True , ' check ' : True , ' env ' : build . EnvironmentVariables ( ) } ,
True )
if kwargs [ ' capture ' ] :
dst_tmp = ofile_abs + ' ~ '
file_encoding = kwargs [ ' encoding ' ]
with open ( dst_tmp , ' w ' , encoding = file_encoding ) as f :
f . writelines ( res . stdout )
if inputs_abs :
shutil . copymode ( inputs_abs [ 0 ] , dst_tmp )
mesonlib . replace_if_different ( ofile_abs , dst_tmp )
if depfile :
mlog . log ( ' Reading depfile: ' , mlog . bold ( depfile ) )
with open ( depfile , encoding = ' utf-8 ' ) as f :
df = DepFile ( f . readlines ( ) )
deps = df . get_all_dependencies ( ofile_fname )
for dep in deps :
self . add_build_def_file ( dep )
elif kwargs [ ' copy ' ] :
if len ( inputs_abs ) != 1 :
raise InterpreterException ( ' Exactly one input file must be given in copy mode ' )
os . makedirs ( os . path . join ( self . environment . build_dir , self . subdir ) , exist_ok = True )
shutil . copy2 ( inputs_abs [ 0 ] , ofile_abs )
# Install file if requested, we check for the empty string
# for backwards compatibility. That was the behaviour before
# 0.45.0 so preserve it.
idir = kwargs [ ' install_dir ' ]
if idir is False :
idir = ' '
FeatureDeprecated . single_use ( ' configure_file install_dir: false ' , ' 0.50.0 ' ,
self . subproject , ' Use the `install:` kwarg instead ' , location = node )
install = kwargs [ ' install ' ] if kwargs [ ' install ' ] is not None else idir != ' '
if install :
if not idir :
raise InterpreterException (
' " install_dir " must be specified when " install " in a configure_file is true ' )
idir_name = idir
if isinstance ( idir_name , P_OBJ . OptionString ) :
idir_name = idir_name . optname
cfile = mesonlib . File . from_built_file ( ofile_path , ofile_fname )
install_tag = kwargs [ ' install_tag ' ]
self . build . data . append ( build . Data ( [ cfile ] , idir , idir_name , install_mode , self . subproject ,
install_tag = install_tag , data_type = ' configure ' ) )
return mesonlib . File . from_built_file ( self . subdir , output )
def extract_incdirs ( self , kwargs , key : str = ' include_directories ' ) :
prospectives = extract_as_list ( kwargs , key )
if key == ' include_directories ' :
for i in prospectives :
if isinstance ( i , str ) :
FeatureNew . single_use ( ' include_directories kwarg of type string ' , ' 0.50.0 ' , self . subproject ,
f ' Use include_directories( { i !r} ) instead ' , location = self . current_node )
break
result = [ ]
for p in prospectives :
if isinstance ( p , build . IncludeDirs ) :
result . append ( p )
elif isinstance ( p , str ) :
result . append ( self . build_incdir_object ( [ p ] ) )
else :
raise InterpreterException ( ' Include directory objects can only be created from strings or include directories. ' )
return result
@typed_pos_args ( ' include_directories ' , varargs = str )
@typed_kwargs ( ' include_directories ' , KwargInfo ( ' is_system ' , bool , default = False ) )
def func_include_directories ( self , node : mparser . BaseNode , args : T . Tuple [ T . List [ str ] ] ,
kwargs : ' kwtypes.FuncIncludeDirectories ' ) - > build . IncludeDirs :
return self . build_incdir_object ( args [ 0 ] , kwargs [ ' is_system ' ] )
def build_incdir_object ( self , incdir_strings : T . List [ str ] , is_system : bool = False ) - > build . IncludeDirs :
if not isinstance ( is_system , bool ) :
raise InvalidArguments ( ' Is_system must be boolean. ' )
src_root = self . environment . get_source_dir ( )
build_root = self . environment . get_build_dir ( )
absbase_src = os . path . join ( src_root , self . subdir )
absbase_build = os . path . join ( build_root , self . subdir )
for a in incdir_strings :
if a . startswith ( src_root ) :
raise InvalidArguments ( textwrap . dedent ( ''' \
Tried to form an absolute path to a dir in the source tree .
You should not do that but use relative paths instead , for
directories that are part of your project .
To get include path to any directory relative to the current dir do
incdir = include_directories ( dirname )
After this incdir will contain both the current source dir as well as the
corresponding build dir . It can then be used in any subdirectory and
Meson will take care of all the busywork to make paths work .
Dirname can even be ' . ' to mark the current directory . Though you should
remember that the current source and build directories are always
put in the include directories by default so you only need to do
include_directories ( ' . ' ) if you intend to use the result in a
different subdirectory .
Note that this error message can also be triggered by
external dependencies being installed within your source
tree - it ' s not recommended to do this.
''' ))
else :
try :
self . validate_within_subproject ( self . subdir , a )
except InterpreterException :
mlog . warning ( ' include_directories sandbox violation! ' , location = self . current_node )
print ( textwrap . dedent ( f ''' \
The project is trying to access the directory { a ! r } which belongs to a different
subproject . This is a problem as it hardcodes the relative paths of these two projects .
This makes it impossible to compile the project in any other directory layout and also
prevents the subproject from changing its own directory layout .
Instead of poking directly at the internals the subproject should be executed and
it should set a variable that the caller can then use . Something like :
# In subproject
some_dep = declare_dependency ( include_directories : include_directories ( ' include ' ) )
# In subproject wrap file
[ provide ]
some = some_dep
# In parent project
some_dep = dependency ( ' some ' )
executable ( . . . , dependencies : [ some_dep ] )
This warning will become a hard error in a future Meson release .
''' ))
absdir_src = os . path . join ( absbase_src , a )
absdir_build = os . path . join ( absbase_build , a )
if not os . path . isdir ( absdir_src ) and not os . path . isdir ( absdir_build ) :
raise InvalidArguments ( f ' Include dir { a } does not exist. ' )
i = build . IncludeDirs ( self . subdir , incdir_strings , is_system )
return i
@typed_pos_args ( ' add_test_setup ' , str )
@typed_kwargs (
' add_test_setup ' ,
KwargInfo ( ' exe_wrapper ' , ContainerTypeInfo ( list , ( str , ExternalProgram ) ) , listify = True , default = [ ] ) ,
KwargInfo ( ' gdb ' , bool , default = False ) ,
KwargInfo ( ' timeout_multiplier ' , int , default = 1 ) ,
KwargInfo ( ' exclude_suites ' , ContainerTypeInfo ( list , str ) , listify = True , default = [ ] , since = ' 0.57.0 ' ) ,
KwargInfo ( ' is_default ' , bool , default = False , since = ' 0.49.0 ' ) ,
ENV_KW ,
)
def func_add_test_setup ( self , node : mparser . BaseNode , args : T . Tuple [ str ] , kwargs : ' kwtypes.AddTestSetup ' ) - > None :
setup_name = args [ 0 ]
if re . fullmatch ( ' ([_a-zA-Z][_0-9a-zA-Z]*:)?[_a-zA-Z][_0-9a-zA-Z]* ' , setup_name ) is None :
raise InterpreterException ( ' Setup name may only contain alphanumeric characters. ' )
if " : " not in setup_name :
setup_name = f ' { ( self . subproject if self . subproject else self . build . project_name ) } : { setup_name } '
exe_wrapper : T . List [ str ] = [ ]
for i in kwargs [ ' exe_wrapper ' ] :
if isinstance ( i , str ) :
exe_wrapper . append ( i )
else :
if not i . found ( ) :
raise InterpreterException ( ' Tried to use non-found executable. ' )
exe_wrapper + = i . get_command ( )
timeout_multiplier = kwargs [ ' timeout_multiplier ' ]
if timeout_multiplier < = 0 :
FeatureNew ( ' add_test_setup() timeout_multiplier <= 0 ' , ' 0.57.0 ' ) . use ( self . subproject )
if kwargs [ ' is_default ' ] :
if self . build . test_setup_default_name is not None :
raise InterpreterException ( f ' { self . build . test_setup_default_name !r} is already set as default. '
' is_default can be set to true only once ' )
self . build . test_setup_default_name = setup_name
self . build . test_setups [ setup_name ] = build . TestSetup ( exe_wrapper , kwargs [ ' gdb ' ] , timeout_multiplier , kwargs [ ' env ' ] ,
kwargs [ ' exclude_suites ' ] )
@typed_pos_args ( ' add_global_arguments ' , varargs = str )
@typed_kwargs ( ' add_global_arguments ' , NATIVE_KW , LANGUAGE_KW )
def func_add_global_arguments ( self , node : mparser . FunctionNode , args : T . Tuple [ T . List [ str ] ] , kwargs : ' kwtypes.FuncAddProjectArgs ' ) - > None :
self . _add_global_arguments ( node , self . build . global_args [ kwargs [ ' native ' ] ] , args [ 0 ] , kwargs )
@typed_pos_args ( ' add_global_link_arguments ' , varargs = str )
@typed_kwargs ( ' add_global_arguments ' , NATIVE_KW , LANGUAGE_KW )
def func_add_global_link_arguments ( self , node : mparser . FunctionNode , args : T . Tuple [ T . List [ str ] ] , kwargs : ' kwtypes.FuncAddProjectArgs ' ) - > None :
self . _add_global_arguments ( node , self . build . global_link_args [ kwargs [ ' native ' ] ] , args [ 0 ] , kwargs )
@typed_pos_args ( ' add_project_arguments ' , varargs = str )
@typed_kwargs ( ' add_project_arguments ' , NATIVE_KW , LANGUAGE_KW )
def func_add_project_arguments ( self , node : mparser . FunctionNode , args : T . Tuple [ T . List [ str ] ] , kwargs : ' kwtypes.FuncAddProjectArgs ' ) - > None :
self . _add_project_arguments ( node , self . build . projects_args [ kwargs [ ' native ' ] ] , args [ 0 ] , kwargs )
@typed_pos_args ( ' add_project_link_arguments ' , varargs = str )
@typed_kwargs ( ' add_global_arguments ' , NATIVE_KW , LANGUAGE_KW )
def func_add_project_link_arguments ( self , node : mparser . FunctionNode , args : T . Tuple [ T . List [ str ] ] , kwargs : ' kwtypes.FuncAddProjectArgs ' ) - > None :
self . _add_project_arguments ( node , self . build . projects_link_args [ kwargs [ ' native ' ] ] , args [ 0 ] , kwargs )
@FeatureNew ( ' add_project_dependencies ' , ' 0.63.0 ' )
@typed_pos_args ( ' add_project_dependencies ' , varargs = dependencies . Dependency )
@typed_kwargs ( ' add_project_dependencies ' , NATIVE_KW , LANGUAGE_KW )
def func_add_project_dependencies ( self , node : mparser . FunctionNode , args : T . Tuple [ T . List [ dependencies . Dependency ] ] , kwargs : ' kwtypes.FuncAddProjectArgs ' ) - > None :
for_machine = kwargs [ ' native ' ]
for lang in kwargs [ ' language ' ] :
if lang not in self . compilers [ for_machine ] :
raise InvalidCode ( f ' add_project_dependencies() called before add_language() for language " { lang } " ' )
for d in dependencies . get_leaf_external_dependencies ( args [ 0 ] ) :
compile_args = list ( d . get_compile_args ( ) )
system_incdir = d . get_include_type ( ) == ' system '
for i in d . get_include_dirs ( ) :
for lang in kwargs [ ' language ' ] :
comp = self . coredata . compilers [ for_machine ] [ lang ]
for idir in i . to_string_list ( self . environment . get_source_dir ( ) , self . environment . get_build_dir ( ) ) :
compile_args . extend ( comp . get_include_args ( idir , system_incdir ) )
self . _add_project_arguments ( node , self . build . projects_args [ for_machine ] , compile_args , kwargs )
self . _add_project_arguments ( node , self . build . projects_link_args [ for_machine ] , d . get_link_args ( ) , kwargs )
def _warn_about_builtin_args ( self , args : T . List [ str ] ) - > None :
# -Wpedantic is deliberately not included, since some people want to use it but not use -Wextra
# see e.g.
# https://github.com/mesonbuild/meson/issues/3275#issuecomment-641354956
# https://github.com/mesonbuild/meson/issues/3742
warnargs = ( ' /W1 ' , ' /W2 ' , ' /W3 ' , ' /W4 ' , ' /Wall ' , ' -Wall ' , ' -Wextra ' )
optargs = ( ' -O0 ' , ' -O2 ' , ' -O3 ' , ' -Os ' , ' -Oz ' , ' /O1 ' , ' /O2 ' , ' /Os ' )
for arg in args :
if arg in warnargs :
mlog . warning ( f ' Consider using the built-in warning_level option instead of using " { arg } " . ' ,
location = self . current_node )
elif arg in optargs :
mlog . warning ( f ' Consider using the built-in optimization level instead of using " { arg } " . ' ,
location = self . current_node )
elif arg == ' -Werror ' :
mlog . warning ( f ' Consider using the built-in werror option instead of using " { arg } " . ' ,
location = self . current_node )
elif arg == ' -g ' :
mlog . warning ( f ' Consider using the built-in debug option instead of using " { arg } " . ' ,
location = self . current_node )
elif arg . startswith ( ' -fsanitize ' ) :
mlog . warning ( f ' Consider using the built-in option for sanitizers instead of using " { arg } " . ' ,
location = self . current_node )
elif arg . startswith ( ' -std= ' ) or arg . startswith ( ' /std: ' ) :
mlog . warning ( f ' Consider using the built-in option for language standard version instead of using " { arg } " . ' ,
location = self . current_node )
def _add_global_arguments ( self , node : mparser . FunctionNode , argsdict : T . Dict [ str , T . List [ str ] ] ,
args : T . List [ str ] , kwargs : ' kwtypes.FuncAddProjectArgs ' ) - > None :
if self . is_subproject ( ) :
msg = f ' Function \' { node . func_name } \' cannot be used in subprojects because ' \
' there is no way to make that reliable. \n Please only call ' \
' this if is_subproject() returns false. Alternatively, ' \
' define a variable that \n contains your language-specific ' \
' arguments and add it to the appropriate *_args kwarg ' \
' in each target. '
raise InvalidCode ( msg )
frozen = self . project_args_frozen or self . global_args_frozen
self . _add_arguments ( node , argsdict , frozen , args , kwargs )
def _add_project_arguments ( self , node : mparser . FunctionNode , argsdict : T . Dict [ str , T . Dict [ str , T . List [ str ] ] ] ,
args : T . List [ str ] , kwargs : ' kwtypes.FuncAddProjectArgs ' ) - > None :
if self . subproject not in argsdict :
argsdict [ self . subproject ] = { }
self . _add_arguments ( node , argsdict [ self . subproject ] ,
self . project_args_frozen , args , kwargs )
def _add_arguments ( self , node : mparser . FunctionNode , argsdict : T . Dict [ str , T . List [ str ] ] ,
args_frozen : bool , args : T . List [ str ] , kwargs : ' kwtypes.FuncAddProjectArgs ' ) - > None :
if args_frozen :
msg = f ' Tried to use \' { node . func_name } \' after a build target has been declared. \n ' \
' This is not permitted. Please declare all arguments before your targets. '
raise InvalidCode ( msg )
self . _warn_about_builtin_args ( args )
for lang in kwargs [ ' language ' ] :
argsdict [ lang ] = argsdict . get ( lang , [ ] ) + args
@noArgsFlattening
@typed_pos_args ( ' environment ' , optargs = [ ( str , list , dict ) ] )
@typed_kwargs ( ' environment ' , ENV_METHOD_KW , ENV_SEPARATOR_KW . evolve ( since = ' 0.62.0 ' ) )
def func_environment ( self , node : mparser . FunctionNode , args : T . Tuple [ T . Union [ None , str , T . List [ ' TYPE_var ' ] , T . Dict [ str , ' TYPE_var ' ] ] ] ,
kwargs : ' TYPE_kwargs ' ) - > build . EnvironmentVariables :
init = args [ 0 ]
if init is not None :
FeatureNew . single_use ( ' environment positional arguments ' , ' 0.52.0 ' , self . subproject , location = node )
msg = ENV_KW . validator ( init )
if msg :
raise InvalidArguments ( f ' " environment " : { msg } ' )
if isinstance ( init , dict ) and any ( i for i in init . values ( ) if isinstance ( i , list ) ) :
FeatureNew . single_use ( ' List of string in dictionary value ' , ' 0.62.0 ' , self . subproject , location = node )
return env_convertor_with_method ( init , kwargs [ ' method ' ] , kwargs [ ' separator ' ] )
return build . EnvironmentVariables ( )
@typed_pos_args ( ' join_paths ' , varargs = str , min_varargs = 1 )
@noKwargs
def func_join_paths ( self , node : mparser . BaseNode , args : T . Tuple [ T . List [ str ] ] , kwargs : ' TYPE_kwargs ' ) - > str :
parts = args [ 0 ]
other = os . path . join ( ' ' , * parts [ 1 : ] ) . replace ( ' \\ ' , ' / ' )
ret = os . path . join ( * parts ) . replace ( ' \\ ' , ' / ' )
if isinstance ( parts [ 0 ] , P_OBJ . DependencyVariableString ) and ' .. ' not in other :
return P_OBJ . DependencyVariableString ( ret )
elif isinstance ( parts [ 0 ] , P_OBJ . OptionString ) :
name = os . path . join ( parts [ 0 ] . optname , other )
return P_OBJ . OptionString ( ret , name )
else :
return ret
def run ( self ) - > None :
super ( ) . run ( )
mlog . log ( ' Build targets in project: ' , mlog . bold ( str ( len ( self . build . targets ) ) ) )
FeatureNew . report ( self . subproject )
FeatureDeprecated . report ( self . subproject )
if not self . is_subproject ( ) :
self . print_extra_warnings ( )
self . _print_summary ( )
def print_extra_warnings ( self ) - > None :
# TODO cross compilation
for c in self . coredata . compilers . host . values ( ) :
if c . get_id ( ) == ' clang ' :
self . check_clang_asan_lundef ( )
break
def check_clang_asan_lundef ( self ) - > None :
if OptionKey ( ' b_lundef ' ) not in self . coredata . options :
return
if OptionKey ( ' b_sanitize ' ) not in self . coredata . options :
return
if ( self . coredata . options [ OptionKey ( ' b_lundef ' ) ] . value and
self . coredata . options [ OptionKey ( ' b_sanitize ' ) ] . value != ' none ' ) :
mlog . warning ( ''' Trying to use {} sanitizer on Clang with b_lundef.
This will probably not work .
Try setting b_lundef to false instead . ''' .format(self.coredata.options[OptionKey( ' b_sanitize ' )].value),
location = self . current_node )
# Check that the indicated file is within the same subproject
# as we currently are. This is to stop people doing
# nasty things like:
#
# f = files('../../master_src/file.c')
#
# Note that this is validated only when the file
# object is generated. The result can be used in a different
# subproject than it is defined in (due to e.g. a
# declare_dependency).
def validate_within_subproject ( self , subdir , fname ) :
srcdir = Path ( self . environment . source_dir )
builddir = Path ( self . environment . build_dir )
if isinstance ( fname , P_OBJ . DependencyVariableString ) :
def validate_installable_file ( fpath : Path ) - > bool :
installablefiles : T . Set [ Path ] = set ( )
for d in self . build . data :
for s in d . sources :
installablefiles . add ( Path ( s . absolute_path ( srcdir , builddir ) ) )
installabledirs = [ str ( Path ( srcdir , s . source_subdir ) ) for s in self . build . install_dirs ]
if fpath in installablefiles :
return True
for d in installabledirs :
if str ( fpath ) . startswith ( d ) :
return True
return False
norm = Path ( fname )
# variables built from a dep.get_variable are allowed to refer to
# subproject files, as long as they are scheduled to be installed.
if validate_installable_file ( norm ) :
return
norm = Path ( os . path . abspath ( Path ( srcdir , subdir , fname ) ) )
if os . path . isdir ( norm ) :
inputtype = ' directory '
else :
inputtype = ' file '
if InterpreterRuleRelaxation . ALLOW_BUILD_DIR_FILE_REFFERENCES in self . relaxations and builddir in norm . parents :
return
if srcdir not in norm . parents :
# Grabbing files outside the source tree is ok.
# This is for vendor stuff like:
#
# /opt/vendorsdk/src/file_with_license_restrictions.c
return
project_root = Path ( srcdir , self . root_subdir )
subproject_dir = project_root / self . subproject_dir
if norm == project_root :
return
if project_root not in norm . parents :
raise InterpreterException ( f ' Sandbox violation: Tried to grab { inputtype } { norm . name } outside current (sub)project. ' )
if subproject_dir == norm or subproject_dir in norm . parents :
raise InterpreterException ( f ' Sandbox violation: Tried to grab { inputtype } { norm . name } from a nested subproject. ' )
@T . overload
def source_strings_to_files ( self , sources : T . List [ ' mesonlib.FileOrString ' ] , strict : bool = True ) - > T . List [ ' mesonlib.File ' ] : . . .
@T . overload
def source_strings_to_files ( self , sources : T . List [ ' mesonlib.FileOrString ' ] , strict : bool = False ) - > T . List [ ' mesonlib.FileOrString ' ] : . . . # noqa: F811
@T . overload
def source_strings_to_files ( self , sources : T . List [ mesonlib . FileOrString , build . GeneratedTypes ] ) - > T . List [ T . Union [ mesonlib . File , build . GeneratedTypes ] ] : . . . # noqa: F811
@T . overload
def source_strings_to_files ( self , sources : T . List [ ' SourceInputs ' ] , strict : bool = True ) - > T . List [ ' SourceOutputs ' ] : . . . # noqa: F811
def source_strings_to_files ( self , sources : T . List [ ' SourceInputs ' ] , strict : bool = True ) - > T . List [ ' SourceOutputs ' ] : # noqa: F811
""" Lower inputs to a list of Targets and Files, replacing any strings.
: param sources : A raw ( Meson DSL ) list of inputs ( targets , files , and
strings )
: raises InterpreterException : if any of the inputs are of an invalid type
: return : A list of Targets and Files
"""
mesonlib . check_direntry_issues ( sources )
if not isinstance ( sources , list ) :
sources = [ sources ]
results : T . List [ ' SourceOutputs ' ] = [ ]
for s in sources :
if isinstance ( s , str ) :
if not strict and s . startswith ( self . environment . get_build_dir ( ) ) :
results . append ( s )
mlog . warning ( f ' Source item { s !r} cannot be converted to File object, because it is a generated file. '
' This will become a hard error in the future. ' , location = self . current_node )
else :
self . validate_within_subproject ( self . subdir , s )
results . append ( mesonlib . File . from_source_file ( self . environment . source_dir , self . subdir , s ) )
elif isinstance ( s , mesonlib . File ) :
results . append ( s )
elif isinstance ( s , ( build . GeneratedList , build . BuildTarget ,
build . CustomTargetIndex , build . CustomTarget ,
build . ExtractedObjects , build . StructuredSources ) ) :
results . append ( s )
else :
raise InterpreterException ( f ' Source item is { s !r} instead of '
' string or File-type object ' )
return results
@staticmethod
def validate_forbidden_targets ( name : str ) - > None :
if name . startswith ( ' meson-internal__ ' ) :
raise InvalidArguments ( " Target names starting with ' meson-internal__ ' are reserved "
" for Meson ' s internal use. Please rename. " )
if name . startswith ( ' meson- ' ) and ' . ' not in name :
raise InvalidArguments ( " Target names starting with ' meson- ' and without a file extension "
" are reserved for Meson ' s internal use. Please rename. " )
if name in coredata . FORBIDDEN_TARGET_NAMES :
raise InvalidArguments ( f " Target name ' { name } ' is reserved for Meson ' s "
" internal use. Please rename. " )
def add_target ( self , name : str , tobj : build . Target ) - > None :
if name == ' ' :
raise InterpreterException ( ' Target name must not be empty. ' )
if name . strip ( ) == ' ' :
raise InterpreterException ( ' Target name must not consist only of whitespace. ' )
if has_path_sep ( name ) :
pathseg = os . path . join ( self . subdir , os . path . split ( name ) [ 0 ] )
if os . path . exists ( os . path . join ( self . source_root , pathseg ) ) :
raise InvalidArguments ( textwrap . dedent ( f ''' \
Target " {name} " has a path segment pointing to directory " {pathseg} " . This is an error .
To define a target that builds in that directory you must define it
in the meson . build file in that directory .
''' ))
self . validate_forbidden_targets ( name )
# To permit an executable and a shared library to have the
# same name, such as "foo.exe" and "libfoo.a".
idname = tobj . get_id ( )
if idname in self . build . targets :
raise InvalidCode ( f ' Tried to create target " { name } " , but a target of that name already exists. ' )
if isinstance ( tobj , build . BuildTarget ) :
missing_languages = tobj . process_compilers ( )
self . add_languages ( missing_languages , True , tobj . for_machine )
tobj . process_compilers_late ( missing_languages )
self . add_stdlib_info ( tobj )
self . build . targets [ idname ] = tobj
if idname not in self . coredata . target_guids :
self . coredata . target_guids [ idname ] = str ( uuid . uuid4 ( ) ) . upper ( )
@FeatureNew ( ' both_libraries ' , ' 0.46.0 ' )
def build_both_libraries ( self , node , args , kwargs ) :
shared_lib = self . build_target ( node , args , kwargs , build . SharedLibrary )
static_lib = self . build_target ( node , args , kwargs , build . StaticLibrary )
# Check if user forces non-PIC static library.
pic = True
key = OptionKey ( ' b_staticpic ' )
if ' pic ' in kwargs :
pic = kwargs [ ' pic ' ]
elif key in self . environment . coredata . options :
pic = self . environment . coredata . options [ key ] . value
if self . backend . name == ' xcode ' :
# Xcode is a bit special in that you can't (at least for the moment)
# form a library only from object file inputs. The simple but inefficient
# solution is to use the sources directly. This will lead to them being
# built twice. This is unfortunate and slow, but at least it works.
# Feel free to submit patches to get this fixed if it is an
# issue for you.
reuse_object_files = False
else :
reuse_object_files = pic
if reuse_object_files :
# Replace sources with objects from the shared library to avoid
# building them twice. We post-process the static library instead of
# removing sources from args because sources could also come from
# any InternalDependency, see BuildTarget.add_deps().
static_lib . objects . append ( build . ExtractedObjects ( shared_lib , shared_lib . sources , shared_lib . generated , [ ] ) )
static_lib . sources = [ ]
static_lib . generated = [ ]
# Compilers with no corresponding sources confuses the backend.
# Keep only compilers used for linking
static_lib . compilers = { k : v for k , v in static_lib . compilers . items ( ) if k in compilers . clink_langs }
return build . BothLibraries ( shared_lib , static_lib )
def build_library ( self , node , args , kwargs ) :
default_library = self . coredata . get_option ( OptionKey ( ' default_library ' , subproject = self . subproject ) )
if default_library == ' shared ' :
return self . build_target ( node , args , kwargs , build . SharedLibrary )
elif default_library == ' static ' :
return self . build_target ( node , args , kwargs , build . StaticLibrary )
elif default_library == ' both ' :
return self . build_both_libraries ( node , args , kwargs )
else :
raise InterpreterException ( f ' Unknown default_library value: { default_library } . ' )
def build_target ( self , node : mparser . BaseNode , args , kwargs , targetclass ) :
@FeatureNewKwargs ( ' build target ' , ' 0.42.0 ' , [ ' rust_crate_type ' , ' build_rpath ' , ' implicit_include_directories ' ] )
@FeatureNewKwargs ( ' build target ' , ' 0.41.0 ' , [ ' rust_args ' ] )
@FeatureNewKwargs ( ' build target ' , ' 0.38.0 ' , [ ' build_by_default ' ] )
@FeatureNewKwargs ( ' build target ' , ' 0.48.0 ' , [ ' gnu_symbol_visibility ' ] )
def build_target_decorator_caller ( self , node , args , kwargs ) :
return True
build_target_decorator_caller ( self , node , args , kwargs )
Don't use len() to test emptiness vs not emptiness
Meson has a common pattern of using 'if len(foo) == 0:' or
'if len(foo) != 0:', however, this is a common anti-pattern in python.
Instead tests for emptiness/non-emptiness should be done with a simple
'if foo:' or 'if not foo:'
Consider the following:
>>> import timeit
>>> timeit.timeit('if len([]) == 0: pass')
0.10730923599840025
>>> timeit.timeit('if not []: pass')
0.030033907998586074
>>> timeit.timeit('if len(['a', 'b', 'c', 'd']) == 0: pass')
0.1154778649979562
>>> timeit.timeit("if not ['a', 'b', 'c', 'd']: pass")
0.08259823200205574
>>> timeit.timeit('if len("") == 0: pass')
0.089759664999292
>>> timeit.timeit('if not "": pass')
0.02340641999762738
>>> timeit.timeit('if len("foo") == 0: pass')
0.08848102600313723
>>> timeit.timeit('if not "foo": pass')
0.04032287199879647
And for the one additional case of 'if len(foo.strip()) == 0', which can
be replaced with 'if not foo.isspace()'
>>> timeit.timeit('if len(" ".strip()) == 0: pass')
0.15294511600222904
>>> timeit.timeit('if " ".isspace(): pass')
0.09413968399894657
>>> timeit.timeit('if len(" abc".strip()) == 0: pass')
0.2023209120015963
>>> timeit.timeit('if " abc".isspace(): pass')
0.09571301700270851
In other words, it's always a win to not use len(), when you don't
actually want to check the length.
8 years ago
if not args :
raise InterpreterException ( ' Target does not have a name. ' )
name , * sources = args
for_machine = self . machine_from_native_kwarg ( kwargs )
if ' sources ' in kwargs :
sources + = listify ( kwargs [ ' sources ' ] )
sources = self . source_strings_to_files ( sources )
objs = extract_as_list ( kwargs , ' objects ' )
kwargs [ ' dependencies ' ] = extract_as_list ( kwargs , ' dependencies ' )
kwargs [ ' install_mode ' ] = self . _get_kwarg_install_mode ( kwargs )
if ' extra_files ' in kwargs :
ef = extract_as_list ( kwargs , ' extra_files ' )
kwargs [ ' extra_files ' ] = self . source_strings_to_files ( ef )
self . check_sources_exist ( os . path . join ( self . source_root , self . subdir ) , sources )
if targetclass not in { build . Executable , build . SharedLibrary , build . SharedModule , build . StaticLibrary , build . Jar } :
mlog . debug ( ' Unknown target type: ' , str ( targetclass ) )
raise RuntimeError ( ' Unreachable code ' )
self . kwarg_strings_to_includedirs ( kwargs )
# Filter out kwargs from other target types. For example 'soversion'
# passed to library() when default_library == 'static'.
kwargs = { k : v for k , v in kwargs . items ( ) if k in targetclass . known_kwargs }
srcs : T . List [ ' SourceInputs ' ] = [ ]
struct : T . Optional [ build . StructuredSources ] = build . StructuredSources ( )
for s in sources :
if isinstance ( s , build . StructuredSources ) :
struct = struct + s
else :
srcs . append ( s )
if not struct :
struct = None
else :
# Validate that we won't end up with two outputs with the same name.
# i.e, don't allow:
# [structured_sources('foo/bar.rs'), structured_sources('bar/bar.rs')]
for v in struct . sources . values ( ) :
outputs : T . Set [ str ] = set ( )
for f in v :
o : T . List [ str ]
if isinstance ( f , str ) :
o = [ os . path . basename ( f ) ]
elif isinstance ( f , mesonlib . File ) :
o = [ f . fname ]
else :
o = f . get_outputs ( )
conflicts = outputs . intersection ( o )
if conflicts :
raise InvalidArguments . from_node (
f " Conflicting sources in structured sources: { ' , ' . join ( sorted ( conflicts ) ) } " ,
node = node )
outputs . update ( o )
kwargs [ ' include_directories ' ] = self . extract_incdirs ( kwargs )
target = targetclass ( name , self . subdir , self . subproject , for_machine , srcs , struct , objs ,
self . environment , self . compilers [ for_machine ] , kwargs )
target . project_version = self . project_version
self . add_target ( name , target )
self . project_args_frozen = True
return target
def kwarg_strings_to_includedirs ( self , kwargs ) :
if ' d_import_dirs ' in kwargs :
items = mesonlib . extract_as_list ( kwargs , ' d_import_dirs ' )
cleaned_items = [ ]
for i in items :
if isinstance ( i , str ) :
# BW compatibility. This was permitted so we must support it
# for a few releases so people can transition to "correct"
# path declarations.
if os . path . normpath ( i ) . startswith ( self . environment . get_source_dir ( ) ) :
mlog . warning ( ''' Building a path to the source dir is not supported. Use a relative path instead.
This will become a hard error in the future . ''' , location=self.current_node)
i = os . path . relpath ( i , os . path . join ( self . environment . get_source_dir ( ) , self . subdir ) )
i = self . build_incdir_object ( [ i ] )
cleaned_items . append ( i )
kwargs [ ' d_import_dirs ' ] = cleaned_items
def add_stdlib_info ( self , target ) :
for l in target . compilers . keys ( ) :
dep = self . build . stdlibs [ target . for_machine ] . get ( l , None )
if dep :
target . add_deps ( dep )
def check_sources_exist ( self , subdir , sources ) :
for s in sources :
if not isinstance ( s , str ) :
continue # This means a generated source and they always exist.
fname = os . path . join ( subdir , s )
if not os . path . isfile ( fname ) :
raise InterpreterException ( f ' Tried to add non-existing source file { s } . ' )
# Only permit object extraction from the same subproject
def validate_extraction ( self , buildtarget : mesonlib . HoldableObject ) - > None :
if self . subproject != buildtarget . subproject :
raise InterpreterException ( ' Tried to extract objects from a different subproject. ' )
def is_subproject ( self ) - > bool :
return self . subproject != ' '
@typed_pos_args ( ' set_variable ' , str , object )
@noKwargs
@noArgsFlattening
@noSecondLevelHolderResolving
def func_set_variable ( self , node : mparser . BaseNode , args : T . Tuple [ str , object ] , kwargs : ' TYPE_kwargs ' ) - > None :
varname , value = args
self . set_variable ( varname , value , holderify = True )
@typed_pos_args ( ' get_variable ' , ( str , Disabler ) , optargs = [ object ] )
@noKwargs
@noArgsFlattening
@unholder_return
def func_get_variable ( self , node : mparser . BaseNode , args : T . Tuple [ T . Union [ str , Disabler ] , T . Optional [ object ] ] ,
kwargs : ' TYPE_kwargs ' ) - > ' TYPE_var ' :
varname , fallback = args
if isinstance ( varname , Disabler ) :
return varname
try :
return self . variables [ varname ]
except KeyError :
if fallback is not None :
return self . _holderify ( fallback )
raise InterpreterException ( f ' Tried to get unknown variable " { varname } " . ' )
@typed_pos_args ( ' is_variable ' , str )
@noKwargs
def func_is_variable ( self , node : mparser . BaseNode , args : T . Tuple [ str ] , kwargs : ' TYPE_kwargs ' ) - > bool :
return args [ 0 ] in self . variables
@FeatureNew ( ' unset_variable ' , ' 0.60.0 ' )
@typed_pos_args ( ' unset_variable ' , str )
@noKwargs
def func_unset_variable ( self , node : mparser . BaseNode , args : T . Tuple [ str ] , kwargs : ' TYPE_kwargs ' ) - > None :
varname = args [ 0 ]
try :
del self . variables [ varname ]
except KeyError :
raise InterpreterException ( f ' Tried to unset unknown variable " { varname } " . ' )
@staticmethod
def machine_from_native_kwarg ( kwargs : T . Dict [ str , T . Any ] ) - > MachineChoice :
native = kwargs . get ( ' native ' , False )
if not isinstance ( native , bool ) :
raise InvalidArguments ( ' Argument to " native " must be a boolean. ' )
return MachineChoice . BUILD if native else MachineChoice . HOST
@FeatureNew ( ' is_disabler ' , ' 0.52.0 ' )
@typed_pos_args ( ' is_disabler ' , object )
@noKwargs
def func_is_disabler ( self , node : mparser . BaseNode , args : T . Tuple [ object ] , kwargs : ' TYPE_kwargs ' ) - > bool :
return isinstance ( args [ 0 ] , Disabler )
@noKwargs
@FeatureNew ( ' range ' , ' 0.58.0 ' )
@typed_pos_args ( ' range ' , int , optargs = [ int , int ] )
def func_range ( self , node , args : T . Tuple [ int , T . Optional [ int ] , T . Optional [ int ] ] , kwargs : T . Dict [ str , T . Any ] ) - > P_OBJ . RangeHolder :
start , stop , step = args
# Just like Python's range, we allow range(stop), range(start, stop), or
# range(start, stop, step)
if stop is None :
stop = start
start = 0
if step is None :
step = 1
# This is more strict than Python's range()
if start < 0 :
raise InterpreterException ( ' start cannot be negative ' )
if stop < start :
raise InterpreterException ( ' stop cannot be less than start ' )
if step < 1 :
raise InterpreterException ( ' step must be >=1 ' )
return P_OBJ . RangeHolder ( start , stop , step , subproject = self . subproject )