# SPDX-License-Identifier: Apache-2.0
# Copyright 2013-2024 The Meson development team
# Copyright © 2023-2024 Intel Corporation
from __future__ import annotations
import copy
from . import mlog , mparser , options
import pickle , os , uuid
import sys
from itertools import chain
from pathlib import PurePath
from collections import OrderedDict , abc
from dataclasses import dataclass
from . mesonlib import (
MesonBugException ,
MesonException , EnvironmentException , MachineChoice , PerMachine ,
PerMachineDefaultable ,
OptionKey , OptionType , stringlistify ,
pickle_load
)
import ast
import argparse
import configparser
import enum
import shlex
import typing as T
if T . TYPE_CHECKING :
from typing_extensions import Protocol
from . import dependencies
from . compilers . compilers import Compiler , CompileResult , RunResult , CompileCheckMode
from . dependencies . detect import TV_DepID
from . environment import Environment
from . mesonlib import FileOrString
from . cmake . traceparser import CMakeCacheEntry
from . interpreterbase import SubProject
class SharedCMDOptions ( Protocol ) :
""" Representation of command line options from Meson setup, configure,
and dist .
: param projectoptions : The raw list of command line options given
: param cmd_line_options : command line options parsed into an OptionKey :
str mapping
"""
cmd_line_options : T . Dict [ OptionKey , str ]
projectoptions : T . List [ str ]
cross_file : T . List [ str ]
native_file : T . List [ str ]
OptionDictType = T . Union [ T . Dict [ str , ' options.UserOption[T.Any] ' ] , ' OptionsView ' ]
MutableKeyedOptionDictType = T . Dict [ ' OptionKey ' , ' options.UserOption[T.Any] ' ]
KeyedOptionDictType = T . Union [ MutableKeyedOptionDictType , ' OptionsView ' ]
CompilerCheckCacheKey = T . Tuple [ T . Tuple [ str , . . . ] , str , FileOrString , T . Tuple [ str , . . . ] , CompileCheckMode ]
# code, args
RunCheckCacheKey = T . Tuple [ str , T . Tuple [ str , . . . ] ]
# typeshed
StrOrBytesPath = T . Union [ str , bytes , os . PathLike [ str ] , os . PathLike [ bytes ] ]
# Check major_versions_differ() if changing versioning scheme.
#
# Pip requires that RCs are named like this: '0.1.0.rc1'
# But the corresponding Git tag needs to be '0.1.0rc1'
version = ' 1.4.99 '
# The next stable version when we are in dev. This is used to allow projects to
# require meson version >=1.2.0 when using 1.1.99. FeatureNew won't warn when
# using a feature introduced in 1.2.0 when using Meson 1.1.99.
stable_version = version
if stable_version . endswith ( ' .99 ' ) :
stable_version_array = stable_version . split ( ' . ' )
stable_version_array [ - 1 ] = ' 0 '
stable_version_array [ - 2 ] = str ( int ( stable_version_array [ - 2 ] ) + 1 )
stable_version = ' . ' . join ( stable_version_array )
def get_genvs_default_buildtype_list ( ) - > list [ str ] :
# just debug, debugoptimized, and release for now
# but this should probably be configurable through some extra option, alongside --genvslite.
return options . buildtypelist [ 1 : - 2 ]
Experimental 'genvslite' WIP. (#11049)
* Capture all compile args from the first round of ninja backend generation for all languages used in building the targets so that these args, defines, and include paths can be applied to the .vcxproj's intellisense fields for all buildtypes/configurations.
Solution generation is now set up for mutiple build configurations (buildtypes) when using '--genvslite'.
All generated vcxprojs invoke the same high-level meson compile to build all targets; there's no selective target building (could add this later). Related to this, we skip pointlessly generating vcxprojs for targets that aren't buildable (BuildTarget-derived), which aren't of interest to the user anyway.
When using --genvslite, no longer inject '<ProjectReference ...>' dependencies on which a generated .vcxproj depends because that imposes a forced visual studio build dependency, which we don't want, since we're essentially bypassing VS's build in favour of running 'meson compile ...'.
When populating the vcxproj's shared intellisense defines, include paths, and compiler options fields, we choose the most frequent src file language, since this means more project src files can simply reference the project shared fields and fewer files of non-primary language types need to populate their full set of intellisense fields. This makes for smaller .vcxproj files.
Paths for generated source/header/etc files, left alone, would be added to solution projects relative to the '..._vs' build directory, where they're never generated; they're generated under the respective '..._[debug/opt/release]' ninja build directories that correspond to the solution build configuration. Although VS doesn't allow conditional src/header listings in vcxprojs (at least not in a simple way that I'm aware of), we can ensure these generated sources get adjusted to at least reference locations under one of the concrete build directories (I've chosen '..._debug') under which they will be generated.
Testing with --genvslite has revealed that, in some cases, the presence of 'c:\windows\system32;c:\windows' on the 'Path' environment variable (via the make-style project's ExecutablePath element) is critical to getting the 'meson compile ...' build to succeed. Not sure whether this is some 'find and guess' implicit defaults behaviour within meson or within the MSVC compiler that some projects may rely on. Feels weird but not sure of a better solution than forcibly adding these to the Path environment variable (the Executable Path property of the project).
Added a new windows-only test to windowstests.py ('test_genvslite') to exercise the --genvslite option along with checking that the 'msbuild' command invokes the 'meson compile ...' of the build-type-appropriate-suffixed temporary build dir and checks expected program output.
Check and report error if user specifies a non-ninja backend with a 'genvslite' setup, since that conflicts with the stated behaviour of genvslite. Also added this test case to 'WindowsTests.test_genvslite'
I had problems tracking down some problematic environment variable behaviour, which appears to need a work-around. See further notes on VSINSTALLDIR, in windowstests.py, test_genvslite.
'meson setup --help' clearly states that positional arguments are ... [builddir] [sourcedir]. However, BasePlatformTests.init(...) was passing these in the order [sourcedir] [builddir]. This was producing failures, saying, "ERROR: Neither directory contains a build file meson.build." but when using the correct ordering, setup now succeeds.
Changed regen, run_tests, and run_install utility projects to be simpler makefile projects instead, with commands to invoke the appropriate '...meson.py --internal regencheck ...' (or install/test) on the '[builddir]_[buildtype]' as appropriate for the curent VS configuration. Also, since the 'regen.vcxproj' utility didn't work correctly with '--genvslite' setup build dirs, and getting it to fully work would require more non-trivial intrusion into new parts of meson (i.e. '--internal regencheck', '--internal regenerate', and perhaps also 'setup --reconfigure'), for now, the REGEN project is replaced with a simpler, lighter-weight RECONFIGURE utility proj, which is unlinked from any solution build dependencies and which simply runs 'meson setup --reconfigure [builddir]_[buildtype] [srcdir]' on each of the ninja-backend build dirs for each buildtype.
Yes, although this will enable the building/compiling to be correctly configured, it can leave the solution/vcxprojs stale and out-of-date, it's simple for the user to 'meson setup --genvslite ...' to fully regenerate an updated, correct solution again. However, I've noted this down as a 'fixme' to consider implementing the full regen behaviour for the genvslite case.
* Review feedback changes -
- Avoid use of 'captured_compile_args_per_buildtype_and_target' as an 'out' param.
- Factored a little msetup.py, 'run(...)' macro/looping setup steps, for genvslite, out into a 'run_genvslite_setup' func.
* Review feedback: Fixed missing spaces between multi-line strings.
* 'backend_name' assignment gets immediately overwritten in 'genvslite' case so moved it into else/non-genvslite block.
* Had to bump up 'test cases/unit/113 genvslites/...' up to 114; it collided with a newly added test dir again.
* Changed validation of 'capture' and 'captured_compile_args_...' to use MesonBugException instead of MesonException.
* Changed some function param and closing brace indentation.
1 year ago
class MesonVersionMismatchException ( MesonException ) :
''' Build directory generated with Meson version is incompatible with current version '''
def __init__ ( self , old_version : str , current_version : str , extra_msg : str = ' ' ) - > None :
super ( ) . __init__ ( f ' Build directory has been generated with Meson version { old_version } , '
f ' which is incompatible with the current version { current_version } . '
+ extra_msg )
self . old_version = old_version
self . current_version = current_version
class DependencyCacheType ( enum . Enum ) :
OTHER = 0
PKG_CONFIG = 1
CMAKE = 2
@classmethod
def from_type ( cls , dep : ' dependencies.Dependency ' ) - > ' DependencyCacheType ' :
# As more types gain search overrides they'll need to be added here
if dep . type_name == ' pkgconfig ' :
return cls . PKG_CONFIG
if dep . type_name == ' cmake ' :
return cls . CMAKE
return cls . OTHER
class DependencySubCache :
def __init__ ( self , type_ : DependencyCacheType ) :
self . types = [ type_ ]
self . __cache : T . Dict [ T . Tuple [ str , . . . ] , ' dependencies.Dependency ' ] = { }
def __getitem__ ( self , key : T . Tuple [ str , . . . ] ) - > ' dependencies.Dependency ' :
return self . __cache [ key ]
def __setitem__ ( self , key : T . Tuple [ str , . . . ] , value : ' dependencies.Dependency ' ) - > None :
self . __cache [ key ] = value
def __contains__ ( self , key : T . Tuple [ str , . . . ] ) - > bool :
return key in self . __cache
def values ( self ) - > T . Iterable [ ' dependencies.Dependency ' ] :
return self . __cache . values ( )
class DependencyCache :
""" Class that stores a cache of dependencies.
This class is meant to encapsulate the fact that we need multiple keys to
successfully lookup by providing a simple get / put interface .
"""
def __init__ ( self , builtins : ' KeyedOptionDictType ' , for_machine : MachineChoice ) :
self . __cache : T . MutableMapping [ TV_DepID , DependencySubCache ] = OrderedDict ( )
self . __builtins = builtins
self . __pkg_conf_key = OptionKey ( ' pkg_config_path ' , machine = for_machine )
self . __cmake_key = OptionKey ( ' cmake_prefix_path ' , machine = for_machine )
def __calculate_subkey ( self , type_ : DependencyCacheType ) - > T . Tuple [ str , . . . ] :
data : T . Dict [ DependencyCacheType , T . List [ str ] ] = {
DependencyCacheType . PKG_CONFIG : stringlistify ( self . __builtins [ self . __pkg_conf_key ] . value ) ,
DependencyCacheType . CMAKE : stringlistify ( self . __builtins [ self . __cmake_key ] . value ) ,
DependencyCacheType . OTHER : [ ] ,
}
assert type_ in data , ' Someone forgot to update subkey calculations for a new type '
return tuple ( data [ type_ ] )
def __iter__ ( self ) - > T . Iterator [ ' TV_DepID ' ] :
return self . keys ( )
def put ( self , key : ' TV_DepID ' , dep : ' dependencies.Dependency ' ) - > None :
t = DependencyCacheType . from_type ( dep )
if key not in self . __cache :
self . __cache [ key ] = DependencySubCache ( t )
subkey = self . __calculate_subkey ( t )
self . __cache [ key ] [ subkey ] = dep
def get ( self , key : ' TV_DepID ' ) - > T . Optional [ ' dependencies.Dependency ' ] :
""" Get a value from the cache.
If there is no cache entry then None will be returned .
"""
try :
val = self . __cache [ key ]
except KeyError :
return None
for t in val . types :
subkey = self . __calculate_subkey ( t )
try :
return val [ subkey ]
except KeyError :
pass
return None
def values ( self ) - > T . Iterator [ ' dependencies.Dependency ' ] :
for c in self . __cache . values ( ) :
yield from c . values ( )
def keys ( self ) - > T . Iterator [ ' TV_DepID ' ] :
return iter ( self . __cache . keys ( ) )
def items ( self ) - > T . Iterator [ T . Tuple [ ' TV_DepID ' , T . List [ ' dependencies.Dependency ' ] ] ] :
for k , v in self . __cache . items ( ) :
vs : T . List [ dependencies . Dependency ] = [ ]
for t in v . types :
subkey = self . __calculate_subkey ( t )
if subkey in v :
vs . append ( v [ subkey ] )
yield k , vs
def clear ( self ) - > None :
self . __cache . clear ( )
class CMakeStateCache :
""" Class that stores internal CMake compiler states.
This cache is used to reduce the startup overhead of CMake by caching
all internal CMake compiler variables .
"""
def __init__ ( self ) - > None :
self . __cache : T . Dict [ str , T . Dict [ str , T . List [ str ] ] ] = { }
self . cmake_cache : T . Dict [ str , ' CMakeCacheEntry ' ] = { }
def __iter__ ( self ) - > T . Iterator [ T . Tuple [ str , T . Dict [ str , T . List [ str ] ] ] ] :
return iter ( self . __cache . items ( ) )
def items ( self ) - > T . Iterator [ T . Tuple [ str , T . Dict [ str , T . List [ str ] ] ] ] :
return iter ( self . __cache . items ( ) )
def update ( self , language : str , variables : T . Dict [ str , T . List [ str ] ] ) :
if language not in self . __cache :
self . __cache [ language ] = { }
self . __cache [ language ] . update ( variables )
@property
def languages ( self ) - > T . Set [ str ] :
return set ( self . __cache . keys ( ) )
# Can't bind this near the class method it seems, sadly.
_V = T . TypeVar ( ' _V ' )
# This class contains all data that must persist over multiple
# invocations of Meson. It is roughly the same thing as
# cmakecache.
class CoreData :
def __init__ ( self , options : SharedCMDOptions , scratch_dir : str , meson_command : T . List [ str ] ) :
self . lang_guids = {
' default ' : ' 8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942 ' ,
' c ' : ' 8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942 ' ,
' cpp ' : ' 8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942 ' ,
' test ' : ' 3AC096D0-A1C2-E12C-1390-A8335801FDAB ' ,
' directory ' : ' 2150E333-8FDC-42A3-9474-1A3956D46DE8 ' ,
}
self . test_guid = str ( uuid . uuid4 ( ) ) . upper ( )
self . regen_guid = str ( uuid . uuid4 ( ) ) . upper ( )
self . install_guid = str ( uuid . uuid4 ( ) ) . upper ( )
self . meson_command = meson_command
self . target_guids = { }
self . version = version
self . options : ' MutableKeyedOptionDictType ' = { }
self . cross_files = self . __load_config_files ( options , scratch_dir , ' cross ' )
self . compilers : PerMachine [ T . Dict [ str , Compiler ] ] = PerMachine ( OrderedDict ( ) , OrderedDict ( ) )
# Stores the (name, hash) of the options file, The name will be either
# "meson_options.txt" or "meson.options".
# This is used by mconf to reload the option file if it's changed.
self . options_files : T . Dict [ SubProject , T . Optional [ T . Tuple [ str , str ] ] ] = { }
# Set of subprojects that have already been initialized once, this is
# required to be stored and reloaded with the coredata, as we don't
# want to overwrite options for such subprojects.
self . initialized_subprojects : T . Set [ str ] = set ( )
# For host == build configurations these caches should be the same.
self . deps : PerMachine [ DependencyCache ] = PerMachineDefaultable . default (
self . is_cross_build ( ) ,
DependencyCache ( self . options , MachineChoice . BUILD ) ,
DependencyCache ( self . options , MachineChoice . HOST ) )
self . compiler_check_cache : T . Dict [ ' CompilerCheckCacheKey ' , ' CompileResult ' ] = OrderedDict ( )
self . run_check_cache : T . Dict [ ' RunCheckCacheKey ' , ' RunResult ' ] = OrderedDict ( )
# CMake cache
self . cmake_cache : PerMachine [ CMakeStateCache ] = PerMachine ( CMakeStateCache ( ) , CMakeStateCache ( ) )
# Only to print a warning if it changes between Meson invocations.
self . config_files = self . __load_config_files ( options , scratch_dir , ' native ' )
self . builtin_options_libdir_cross_fixup ( )
self . init_builtins ( ' ' )
@staticmethod
def __load_config_files ( options : SharedCMDOptions , scratch_dir : str , ftype : str ) - > T . List [ str ] :
# Need to try and make the passed filenames absolute because when the
# files are parsed later we'll have chdir()d.
if ftype == ' cross ' :
filenames = options . cross_file
else :
filenames = options . native_file
if not filenames :
return [ ]
found_invalid : T . List [ str ] = [ ]
missing : T . List [ str ] = [ ]
real : T . List [ str ] = [ ]
for i , f in enumerate ( filenames ) :
f = os . path . expanduser ( os . path . expandvars ( f ) )
if os . path . exists ( f ) :
if os . path . isfile ( f ) :
real . append ( os . path . abspath ( f ) )
continue
elif os . path . isdir ( f ) :
found_invalid . append ( os . path . abspath ( f ) )
else :
# in this case we've been passed some kind of pipe, copy
# the contents of that file into the meson private (scratch)
# directory so that it can be re-read when wiping/reconfiguring
copy = os . path . join ( scratch_dir , f ' { uuid . uuid4 ( ) } . { ftype } .ini ' )
with open ( f , encoding = ' utf-8 ' ) as rf :
with open ( copy , ' w ' , encoding = ' utf-8 ' ) as wf :
wf . write ( rf . read ( ) )
real . append ( copy )
# Also replace the command line argument, as the pipe
# probably won't exist on reconfigure
filenames [ i ] = copy
continue
if sys . platform != ' win32 ' :
paths = [
os . environ . get ( ' XDG_DATA_HOME ' , os . path . expanduser ( ' ~/.local/share ' ) ) ,
] + os . environ . get ( ' XDG_DATA_DIRS ' , ' /usr/local/share:/usr/share ' ) . split ( ' : ' )
for path in paths :
path_to_try = os . path . join ( path , ' meson ' , ftype , f )
if os . path . isfile ( path_to_try ) :
real . append ( path_to_try )
break
else :
missing . append ( f )
else :
missing . append ( f )
if missing :
if found_invalid :
mlog . log ( ' Found invalid candidates for ' , ftype , ' file: ' , * found_invalid )
mlog . log ( ' Could not find any valid candidate for ' , ftype , ' files: ' , * missing )
raise MesonException ( f ' Cannot find specified { ftype } file: { f } ' )
return real
def builtin_options_libdir_cross_fixup ( self ) - > None :
# By default set libdir to "lib" when cross compiling since
# getting the "system default" is always wrong on multiarch
# platforms as it gets a value like lib/x86_64-linux-gnu.
if self . cross_files :
options . BUILTIN_OPTIONS [ OptionKey ( ' libdir ' ) ] . default = ' lib '
def sanitize_prefix ( self , prefix : str ) - > str :
prefix = os . path . expanduser ( prefix )
if not os . path . isabs ( prefix ) :
raise MesonException ( f ' prefix value { prefix !r} must be an absolute path ' )
if prefix . endswith ( ' / ' ) or prefix . endswith ( ' \\ ' ) :
# On Windows we need to preserve the trailing slash if the
# string is of type 'C:\' because 'C:' is not an absolute path.
if len ( prefix ) == 3 and prefix [ 1 ] == ' : ' :
pass
# If prefix is a single character, preserve it since it is
# the root directory.
elif len ( prefix ) == 1 :
pass
else :
prefix = prefix [ : - 1 ]
return prefix
def sanitize_dir_option_value ( self , prefix : str , option : OptionKey , value : T . Any ) - > T . Any :
'''
If the option is an installation directory option , the value is an
absolute path and resides within prefix , return the value
as a path relative to the prefix . Otherwise , return it as is .
This way everyone can do f . ex , get_option ( ' libdir ' ) and usually get
the library directory relative to prefix , even though it really
should not be relied upon .
'''
try :
value = PurePath ( value )
except TypeError :
return value
if option . name . endswith ( ' dir ' ) and value . is_absolute ( ) and \
option not in options . BUILTIN_DIR_NOPREFIX_OPTIONS :
try :
# Try to relativize the path.
value = value . relative_to ( prefix )
except ValueError :
# Path is not relative, let’s keep it as is.
pass
if ' .. ' in value . parts :
raise MesonException (
f ' The value of the \' { option } \' option is \' { value } \' but '
' directory options are not allowed to contain \' .. \' . \n '
f ' If you need a path outside of the { prefix !r} prefix, '
' please use an absolute path. '
)
# .as_posix() keeps the posix-like file separators Meson uses.
return value . as_posix ( )
def init_builtins ( self , subproject : str ) - > None :
# Create builtin options with default values
for key , opt in options . BUILTIN_OPTIONS . items ( ) :
self . add_builtin_option ( self . options , key . evolve ( subproject = subproject ) , opt )
for for_machine in iter ( MachineChoice ) :
for key , opt in options . BUILTIN_OPTIONS_PER_MACHINE . items ( ) :
self . add_builtin_option ( self . options , key . evolve ( subproject = subproject , machine = for_machine ) , opt )
@staticmethod
def add_builtin_option ( opts_map : ' MutableKeyedOptionDictType ' , key : OptionKey ,
opt : ' options.BuiltinOption ' ) - > None :
if key . subproject :
if opt . yielding :
# This option is global and not per-subproject
return
value = opts_map [ key . as_root ( ) ] . value
else :
value = None
opts_map [ key ] = opt . init_option ( key , value , options . default_prefix ( ) )
def init_backend_options ( self , backend_name : str ) - > None :
if backend_name == ' ninja ' :
self . options [ OptionKey ( ' backend_max_links ' ) ] = options . UserIntegerOption (
' backend_max_links ' ,
' Maximum number of linker processes to run or 0 for no '
' limit ' ,
( 0 , None , 0 ) )
elif backend_name . startswith ( ' vs ' ) :
self . options [ OptionKey ( ' backend_startup_project ' ) ] = options . UserStringOption (
' backend_startup_project ' ,
' Default project to execute in Visual Studio ' ,
' ' )
def get_option ( self , key : OptionKey ) - > T . Union [ T . List [ str ] , str , int , bool ] :
try :
v = self . options [ key ] . value
return v
except KeyError :
pass
try :
v = self . options [ key . as_root ( ) ]
if v . yielding :
return v . value
except KeyError :
pass
raise MesonException ( f ' Tried to get unknown builtin option { str ( key ) } ' )
def set_option ( self , key : OptionKey , value , first_invocation : bool = False ) - > bool :
dirty = False
if key . is_builtin ( ) :
if key . name == ' prefix ' :
value = self . sanitize_prefix ( value )
else :
prefix = self . options [ OptionKey ( ' prefix ' ) ] . value
value = self . sanitize_dir_option_value ( prefix , key , value )
try :
opt = self . options [ key ]
except KeyError :
raise MesonException ( f ' Tried to set unknown builtin option { str ( key ) } ' )
if opt . deprecated is True :
mlog . deprecation ( f ' Option { key . name !r} is deprecated ' )
elif isinstance ( opt . deprecated , list ) :
for v in opt . listify ( value ) :
if v in opt . deprecated :
mlog . deprecation ( f ' Option { key . name !r} value { v !r} is deprecated ' )
elif isinstance ( opt . deprecated , dict ) :
def replace ( v ) :
newvalue = opt . deprecated . get ( v )
if newvalue is not None :
mlog . deprecation ( f ' Option { key . name !r} value { v !r} is replaced by { newvalue !r} ' )
return newvalue
return v
newvalue = [ replace ( v ) for v in opt . listify ( value ) ]
value = ' , ' . join ( newvalue )
elif isinstance ( opt . deprecated , str ) :
# Option is deprecated and replaced by another. Note that a project
# option could be replaced by a built-in or module option, which is
# why we use OptionKey.from_string(newname) instead of
# key.evolve(newname). We set the value on both the old and new names,
# assuming they accept the same value. That could for example be
# achieved by adding the values from old option as deprecated on the
# new option, for example in the case of boolean option is replaced
# by a feature option with a different name.
newname = opt . deprecated
newkey = OptionKey . from_string ( newname ) . evolve ( subproject = key . subproject )
mlog . deprecation ( f ' Option { key . name !r} is replaced by { newname !r} ' )
dirty | = self . set_option ( newkey , value , first_invocation )
changed = opt . set_value ( value )
if changed and opt . readonly and not first_invocation :
raise MesonException ( f ' Tried modify read only option { str ( key ) !r} ' )
dirty | = changed
if key . name == ' buildtype ' :
dirty | = self . _set_others_from_buildtype ( value )
return dirty
def clear_cache ( self ) - > None :
self . deps . host . clear ( )
self . deps . build . clear ( )
self . compiler_check_cache . clear ( )
self . run_check_cache . clear ( )
def get_nondefault_buildtype_args ( self ) - > T . List [ T . Union [ T . Tuple [ str , str , str ] , T . Tuple [ str , bool , bool ] ] ] :
result : T . List [ T . Union [ T . Tuple [ str , str , str ] , T . Tuple [ str , bool , bool ] ] ] = [ ]
value = self . options [ OptionKey ( ' buildtype ' ) ] . value
if value == ' plain ' :
opt = ' plain '
debug = False
elif value == ' debug ' :
opt = ' 0 '
debug = True
elif value == ' debugoptimized ' :
opt = ' 2 '
debug = True
elif value == ' release ' :
opt = ' 3 '
debug = False
elif value == ' minsize ' :
opt = ' s '
debug = True
else :
assert value == ' custom '
return [ ]
actual_opt = self . options [ OptionKey ( ' optimization ' ) ] . value
actual_debug = self . options [ OptionKey ( ' debug ' ) ] . value
if actual_opt != opt :
result . append ( ( ' optimization ' , actual_opt , opt ) )
if actual_debug != debug :
result . append ( ( ' debug ' , actual_debug , debug ) )
return result
def _set_others_from_buildtype ( self , value : str ) - > bool :
dirty = False
if value == ' plain ' :
opt = ' plain '
debug = False
elif value == ' debug ' :
opt = ' 0 '
debug = True
elif value == ' debugoptimized ' :
opt = ' 2 '
debug = True
elif value == ' release ' :
opt = ' 3 '
debug = False
elif value == ' minsize ' :
opt = ' s '
debug = True
else :
assert value == ' custom '
return False
dirty | = self . options [ OptionKey ( ' optimization ' ) ] . set_value ( opt )
dirty | = self . options [ OptionKey ( ' debug ' ) ] . set_value ( debug )
return dirty
@staticmethod
def is_per_machine_option ( optname : OptionKey ) - > bool :
if optname . as_host ( ) in options . BUILTIN_OPTIONS_PER_MACHINE :
return True
return optname . lang is not None
def get_external_args ( self , for_machine : MachineChoice , lang : str ) - > T . List [ str ] :
# mypy cannot analyze type of OptionKey
return T . cast ( ' T.List[str] ' , self . options [ OptionKey ( ' args ' , machine = for_machine , lang = lang ) ] . value )
def get_external_link_args ( self , for_machine : MachineChoice , lang : str ) - > T . List [ str ] :
# mypy cannot analyze type of OptionKey
return T . cast ( ' T.List[str] ' , self . options [ OptionKey ( ' link_args ' , machine = for_machine , lang = lang ) ] . value )
def update_project_options ( self , options : ' MutableKeyedOptionDictType ' , subproject : SubProject ) - > None :
for key , value in options . items ( ) :
if not key . is_project ( ) :
continue
if key not in self . options :
self . options [ key ] = value
continue
if key . subproject != subproject :
raise MesonBugException ( f ' Tried to set an option for subproject { key . subproject } from { subproject } ! ' )
oldval = self . options [ key ]
if type ( oldval ) is not type ( value ) :
self . options [ key ] = value
elif oldval . choices != value . choices :
# If the choices have changed, use the new value, but attempt
# to keep the old options. If they are not valid keep the new
# defaults but warn.
self . options [ key ] = value
try :
value . set_value ( oldval . value )
except MesonException :
mlog . warning ( f ' Old value(s) of { key } are no longer valid, resetting to default ( { value . value } ). ' ,
fatal = False )
# Find any extranious keys for this project and remove them
for key in self . options . keys ( ) - options . keys ( ) :
if key . is_project ( ) and key . subproject == subproject :
del self . options [ key ]
def is_cross_build ( self , when_building_for : MachineChoice = MachineChoice . HOST ) - > bool :
if when_building_for == MachineChoice . BUILD :
return False
return len ( self . cross_files ) > 0
def copy_build_options_from_regular_ones ( self ) - > bool :
dirty = False
assert not self . is_cross_build ( )
for k in options . BUILTIN_OPTIONS_PER_MACHINE :
o = self . options [ k ]
dirty | = self . options [ k . as_build ( ) ] . set_value ( o . value )
for bk , bv in self . options . items ( ) :
if bk . machine is MachineChoice . BUILD :
hk = bk . as_host ( )
try :
hv = self . options [ hk ]
dirty | = bv . set_value ( hv . value )
except KeyError :
continue
return dirty
def set_options ( self , opts_to_set : T . Dict [ OptionKey , T . Any ] , subproject : str = ' ' , first_invocation : bool = False ) - > bool :
dirty = False
if not self . is_cross_build ( ) :
opts_to_set = { k : v for k , v in opts_to_set . items ( ) if k . machine is not MachineChoice . BUILD }
# Set prefix first because it's needed to sanitize other options
pfk = OptionKey ( ' prefix ' )
if pfk in opts_to_set :
prefix = self . sanitize_prefix ( opts_to_set [ pfk ] )
dirty | = self . options [ OptionKey ( ' prefix ' ) ] . set_value ( prefix )
for key in options . BUILTIN_DIR_NOPREFIX_OPTIONS :
if key not in opts_to_set :
dirty | = self . options [ key ] . set_value ( options . BUILTIN_OPTIONS [ key ] . prefixed_default ( key , prefix ) )
unknown_options : T . List [ OptionKey ] = [ ]
for k , v in opts_to_set . items ( ) :
if k == pfk :
continue
elif k in self . options :
dirty | = self . set_option ( k , v , first_invocation )
elif k . machine != MachineChoice . BUILD and k . type != OptionType . COMPILER :
unknown_options . append ( k )
if unknown_options :
unknown_options_str = ' , ' . join ( sorted ( str ( s ) for s in unknown_options ) )
sub = f ' In subproject { subproject } : ' if subproject else ' '
raise MesonException ( f ' { sub } Unknown options: " { unknown_options_str } " ' )
if not self . is_cross_build ( ) :
dirty | = self . copy_build_options_from_regular_ones ( )
return dirty
def set_default_options ( self , default_options : T . MutableMapping [ OptionKey , str ] , subproject : str , env : ' Environment ' ) - > None :
from . compilers import base_options
# Main project can set default options on subprojects, but subprojects
# can only set default options on themselves.
# Preserve order: if env.options has 'buildtype' it must come after
# 'optimization' if it is in default_options.
options : T . MutableMapping [ OptionKey , T . Any ] = OrderedDict ( )
for k , v in default_options . items ( ) :
if not subproject or k . subproject == subproject :
options [ k ] = v
options . update ( env . options )
env . options = options
# Create a subset of options, keeping only project and builtin
# options for this subproject.
# Language and backend specific options will be set later when adding
# languages and setting the backend (builtin options must be set first
# to know which backend we'll use).
options = OrderedDict ( )
for k , v in env . options . items ( ) :
# If this is a subproject, don't use other subproject options
if k . subproject and k . subproject != subproject :
continue
# If the option is a builtin and is yielding then it's not allowed per subproject.
#
# Always test this using the HOST machine, as many builtin options
# are not valid for the BUILD machine, but the yielding value does
# not differ between them even when they are valid for both.
if subproject and k . is_builtin ( ) and self . options [ k . evolve ( subproject = ' ' , machine = MachineChoice . HOST ) ] . yielding :
continue
# Skip base, compiler, and backend options, they are handled when
# adding languages and setting backend.
if k . type in { OptionType . COMPILER , OptionType . BACKEND } :
continue
if k . type == OptionType . BASE and k . as_root ( ) in base_options :
# set_options will report unknown base options
continue
options [ k ] = v
self . set_options ( options , subproject = subproject , first_invocation = env . first_invocation )
def add_compiler_options ( self , options : MutableKeyedOptionDictType , lang : str , for_machine : MachineChoice ,
env : Environment , subproject : str ) - > None :
for k , o in options . items ( ) :
value = env . options . get ( k )
if value is not None :
o . set_value ( value )
if not subproject :
self . options [ k ] = o # override compiler option on reconfigure
self . options . setdefault ( k , o )
if subproject :
sk = k . evolve ( subproject = subproject )
value = env . options . get ( sk ) or value
if value is not None :
o . set_value ( value )
self . options [ sk ] = o # override compiler option on reconfigure
self . options . setdefault ( sk , o )
def add_lang_args ( self , lang : str , comp : T . Type [ ' Compiler ' ] ,
for_machine : MachineChoice , env : ' Environment ' ) - > None :
""" Add global language arguments that are needed before compiler/linker detection. """
from . compilers import compilers
# These options are all new at this point, because the compiler is
# responsible for adding its own options, thus calling
# `self.options.update()`` is perfectly safe.
self . options . update ( compilers . get_global_options ( lang , comp , for_machine , env ) )
def process_compiler_options ( self , lang : str , comp : Compiler , env : Environment , subproject : str ) - > None :
from . import compilers
self . add_compiler_options ( comp . get_options ( ) , lang , comp . for_machine , env , subproject )
enabled_opts : T . List [ OptionKey ] = [ ]
for key in comp . base_options :
if subproject :
skey = key . evolve ( subproject = subproject )
else :
skey = key
if skey not in self . options :
self . options [ skey ] = copy . deepcopy ( compilers . base_options [ key ] )
if skey in env . options :
self . options [ skey ] . set_value ( env . options [ skey ] )
enabled_opts . append ( skey )
elif subproject and key in env . options :
self . options [ skey ] . set_value ( env . options [ key ] )
enabled_opts . append ( skey )
if subproject and key not in self . options :
self . options [ key ] = copy . deepcopy ( self . options [ skey ] )
elif skey in env . options :
self . options [ skey ] . set_value ( env . options [ skey ] )
elif subproject and key in env . options :
self . options [ skey ] . set_value ( env . options [ key ] )
self . emit_base_options_warnings ( enabled_opts )
def emit_base_options_warnings ( self , enabled_opts : T . List [ OptionKey ] ) - > None :
if OptionKey ( ' b_bitcode ' ) in enabled_opts :
mlog . warning ( ' Base option \' b_bitcode \' is enabled, which is incompatible with many linker options. Incompatible options such as \' b_asneeded \' have been disabled. ' , fatal = False )
mlog . warning ( ' Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details. ' , fatal = False )
class CmdLineFileParser ( configparser . ConfigParser ) :
def __init__ ( self ) - > None :
# We don't want ':' as key delimiter, otherwise it would break when
# storing subproject options like "subproject:option=value"
super ( ) . __init__ ( delimiters = [ ' = ' ] , interpolation = None )
def read ( self , filenames : T . Union [ ' StrOrBytesPath ' , T . Iterable [ ' StrOrBytesPath ' ] ] , encoding : T . Optional [ str ] = ' utf-8 ' ) - > T . List [ str ] :
return super ( ) . read ( filenames , encoding )
def optionxform ( self , optionstr : str ) - > str :
# Don't call str.lower() on keys
return optionstr
class MachineFileParser ( ) :
def __init__ ( self , filenames : T . List [ str ] , sourcedir : str ) - > None :
self . parser = CmdLineFileParser ( )
self . constants : T . Dict [ str , T . Union [ str , bool , int , T . List [ str ] ] ] = { ' True ' : True , ' False ' : False }
self . sections : T . Dict [ str , T . Dict [ str , T . Union [ str , bool , int , T . List [ str ] ] ] ] = { }
for fname in filenames :
try :
with open ( fname , encoding = ' utf-8 ' ) as f :
content = f . read ( )
except UnicodeDecodeError as e :
raise EnvironmentException ( f ' Malformed machine file { fname !r} failed to parse as unicode: { e } ' )
content = content . replace ( ' @GLOBAL_SOURCE_ROOT@ ' , sourcedir )
content = content . replace ( ' @DIRNAME@ ' , os . path . dirname ( fname ) )
try :
self . parser . read_string ( content , fname )
except configparser . Error as e :
raise EnvironmentException ( f ' Malformed machine file: { e } ' )
# Parse [constants] first so they can be used in other sections
if self . parser . has_section ( ' constants ' ) :
self . constants . update ( self . _parse_section ( ' constants ' ) )
for s in self . parser . sections ( ) :
if s == ' constants ' :
continue
self . sections [ s ] = self . _parse_section ( s )
def _parse_section ( self , s : str ) - > T . Dict [ str , T . Union [ str , bool , int , T . List [ str ] ] ] :
self . scope = self . constants . copy ( )
section : T . Dict [ str , T . Union [ str , bool , int , T . List [ str ] ] ] = { }
for entry , value in self . parser . items ( s ) :
if ' ' in entry or ' \t ' in entry or " ' " in entry or ' " ' in entry :
raise EnvironmentException ( f ' Malformed variable name { entry !r} in machine file. ' )
# Windows paths...
value = value . replace ( ' \\ ' , ' \\ \\ ' )
try :
ast = mparser . Parser ( value , ' machinefile ' ) . parse ( )
if not ast . lines :
raise EnvironmentException ( ' value cannot be empty ' )
res = self . _evaluate_statement ( ast . lines [ 0 ] )
except MesonException as e :
raise EnvironmentException ( f ' Malformed value in machine file variable { entry !r} : { str ( e ) } . ' )
except KeyError as e :
raise EnvironmentException ( f ' Undefined constant { e . args [ 0 ] !r} in machine file variable { entry !r} . ' )
section [ entry ] = res
self . scope [ entry ] = res
return section
def _evaluate_statement ( self , node : mparser . BaseNode ) - > T . Union [ str , bool , int , T . List [ str ] ] :
if isinstance ( node , ( mparser . StringNode ) ) :
return node . value
elif isinstance ( node , mparser . BooleanNode ) :
return node . value
elif isinstance ( node , mparser . NumberNode ) :
return node . value
elif isinstance ( node , mparser . ParenthesizedNode ) :
return self . _evaluate_statement ( node . inner )
elif isinstance ( node , mparser . ArrayNode ) :
# TODO: This is where recursive types would come in handy
return [ self . _evaluate_statement ( arg ) for arg in node . args . arguments ]
elif isinstance ( node , mparser . IdNode ) :
return self . scope [ node . value ]
elif isinstance ( node , mparser . ArithmeticNode ) :
l = self . _evaluate_statement ( node . left )
r = self . _evaluate_statement ( node . right )
if node . operation == ' add ' :
if ( isinstance ( l , str ) and isinstance ( r , str ) ) or \
( isinstance ( l , list ) and isinstance ( r , list ) ) :
return l + r
elif node . operation == ' div ' :
if isinstance ( l , str ) and isinstance ( r , str ) :
return os . path . join ( l , r )
raise EnvironmentException ( ' Unsupported node type ' )
def parse_machine_files ( filenames : T . List [ str ] , sourcedir : str ) :
parser = MachineFileParser ( filenames , sourcedir )
return parser . sections
def get_cmd_line_file ( build_dir : str ) - > str :
return os . path . join ( build_dir , ' meson-private ' , ' cmd_line.txt ' )
def read_cmd_line_file ( build_dir : str , options : SharedCMDOptions ) - > None :
filename = get_cmd_line_file ( build_dir )
if not os . path . isfile ( filename ) :
return
config = CmdLineFileParser ( )
config . read ( filename )
# Do a copy because config is not really a dict. options.cmd_line_options
# overrides values from the file.
d = { OptionKey . from_string ( k ) : v for k , v in config [ ' options ' ] . items ( ) }
d . update ( options . cmd_line_options )
options . cmd_line_options = d
properties = config [ ' properties ' ]
if not options . cross_file :
options . cross_file = ast . literal_eval ( properties . get ( ' cross_file ' , ' [] ' ) )
if not options . native_file :
# This will be a string in the form: "['first', 'second', ...]", use
# literal_eval to get it into the list of strings.
options . native_file = ast . literal_eval ( properties . get ( ' native_file ' , ' [] ' ) )
def write_cmd_line_file ( build_dir : str , options : SharedCMDOptions ) - > None :
filename = get_cmd_line_file ( build_dir )
config = CmdLineFileParser ( )
properties : OrderedDict [ str , str ] = OrderedDict ( )
if options . cross_file :
properties [ ' cross_file ' ] = options . cross_file
if options . native_file :
properties [ ' native_file ' ] = options . native_file
config [ ' options ' ] = { str ( k ) : str ( v ) for k , v in options . cmd_line_options . items ( ) }
config [ ' properties ' ] = properties
with open ( filename , ' w ' , encoding = ' utf-8 ' ) as f :
config . write ( f )
def update_cmd_line_file ( build_dir : str , options : SharedCMDOptions ) - > None :
filename = get_cmd_line_file ( build_dir )
config = CmdLineFileParser ( )
config . read ( filename )
config [ ' options ' ] . update ( { str ( k ) : str ( v ) for k , v in options . cmd_line_options . items ( ) } )
with open ( filename , ' w ' , encoding = ' utf-8 ' ) as f :
config . write ( f )
def format_cmd_line_options ( options : SharedCMDOptions ) - > str :
cmdline = [ ' -D {} = {} ' . format ( str ( k ) , v ) for k , v in options . cmd_line_options . items ( ) ]
if options . cross_file :
cmdline + = [ f ' --cross-file= { f } ' for f in options . cross_file ]
if options . native_file :
cmdline + = [ f ' --native-file= { f } ' for f in options . native_file ]
return ' ' . join ( [ shlex . quote ( x ) for x in cmdline ] )
def major_versions_differ ( v1 : str , v2 : str ) - > bool :
v1_major , v1_minor = v1 . rsplit ( ' . ' , 1 )
v2_major , v2_minor = v2 . rsplit ( ' . ' , 1 )
# Major version differ, or one is development version but not the other.
return v1_major != v2_major or ( ' 99 ' in { v1_minor , v2_minor } and v1_minor != v2_minor )
def load ( build_dir : str , suggest_reconfigure : bool = True ) - > CoreData :
filename = os . path . join ( build_dir , ' meson-private ' , ' coredata.dat ' )
return pickle_load ( filename , ' Coredata ' , CoreData , suggest_reconfigure )
def save ( obj : CoreData , build_dir : str ) - > str :
filename = os . path . join ( build_dir , ' meson-private ' , ' coredata.dat ' )
prev_filename = filename + ' .prev '
tempfilename = filename + ' ~ '
if major_versions_differ ( obj . version , version ) :
raise MesonException ( ' Fatal version mismatch corruption. ' )
if os . path . exists ( filename ) :
import shutil
shutil . copyfile ( filename , prev_filename )
with open ( tempfilename , ' wb ' ) as f :
pickle . dump ( obj , f )
f . flush ( )
os . fsync ( f . fileno ( ) )
os . replace ( tempfilename , filename )
return filename
def register_builtin_arguments ( parser : argparse . ArgumentParser ) - > None :
for n , b in options . BUILTIN_OPTIONS . items ( ) :
b . add_to_argparse ( str ( n ) , parser , ' ' )
for n , b in options . BUILTIN_OPTIONS_PER_MACHINE . items ( ) :
b . add_to_argparse ( str ( n ) , parser , ' (just for host machine) ' )
b . add_to_argparse ( str ( n . as_build ( ) ) , parser , ' (just for build machine) ' )
parser . add_argument ( ' -D ' , action = ' append ' , dest = ' projectoptions ' , default = [ ] , metavar = " option " ,
help = ' Set the value of an option, can be used several times to set multiple options. ' )
def create_options_dict ( options : T . List [ str ] , subproject : str = ' ' ) - > T . Dict [ OptionKey , str ] :
result : T . OrderedDict [ OptionKey , str ] = OrderedDict ( )
for o in options :
try :
( key , value ) = o . split ( ' = ' , 1 )
except ValueError :
raise MesonException ( f ' Option { o !r} must have a value separated by equals sign. ' )
k = OptionKey . from_string ( key )
if subproject :
k = k . evolve ( subproject = subproject )
result [ k ] = value
return result
def parse_cmd_line_options ( args : SharedCMDOptions ) - > None :
args . cmd_line_options = create_options_dict ( args . projectoptions )
# Merge builtin options set with --option into the dict.
for key in chain (
options . BUILTIN_OPTIONS . keys ( ) ,
( k . as_build ( ) for k in options . BUILTIN_OPTIONS_PER_MACHINE . keys ( ) ) ,
options . BUILTIN_OPTIONS_PER_MACHINE . keys ( ) ,
) :
name = str ( key )
value = getattr ( args , name , None )
if value is not None :
if key in args . cmd_line_options :
cmdline_name = options . BuiltinOption . argparse_name_to_arg ( name )
raise MesonException (
f ' Got argument { name } as both -D { name } and { cmdline_name } . Pick one. ' )
args . cmd_line_options [ key ] = value
delattr ( args , name )
@dataclass
class OptionsView ( abc . Mapping ) :
''' A view on an options dictionary for a given subproject and with overrides.
'''
# TODO: the typing here could be made more explicit using a TypeDict from
# python 3.8 or typing_extensions
original_options : KeyedOptionDictType
subproject : T . Optional [ str ] = None
overrides : T . Optional [ T . Mapping [ OptionKey , T . Union [ str , int , bool , T . List [ str ] ] ] ] = None
def __getitem__ ( self , key : OptionKey ) - > options . UserOption :
# FIXME: This is fundamentally the same algorithm than interpreter.get_option_internal().
# We should try to share the code somehow.
key = key . evolve ( subproject = self . subproject )
if not key . is_project ( ) :
opt = self . original_options . get ( key )
if opt is None or opt . yielding :
opt = self . original_options [ key . as_root ( ) ]
else :
opt = self . original_options [ key ]
if opt . yielding :
opt = self . original_options . get ( key . as_root ( ) , opt )
if self . overrides :
override_value = self . overrides . get ( key . as_root ( ) )
if override_value is not None :
opt = copy . copy ( opt )
opt . set_value ( override_value )
return opt
def __iter__ ( self ) - > T . Iterator [ OptionKey ] :
return iter ( self . original_options )
def __len__ ( self ) - > int :
return len ( self . original_options )
FORBIDDEN_TARGET_NAMES = frozenset ( {
' clean ' ,
' clean-ctlist ' ,
' clean-gcno ' ,
' clean-gcda ' ,
' coverage ' ,
' coverage-text ' ,
' coverage-xml ' ,
' coverage-html ' ,
' phony ' ,
' PHONY ' ,
' all ' ,
' test ' ,
' benchmark ' ,
' install ' ,
' uninstall ' ,
' build.ninja ' ,
' scan-build ' ,
' reconfigure ' ,
' dist ' ,
' distcheck ' ,
} )