|
|
|
# Copyright 2012-2020 The Meson development team
|
|
|
|
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
"""A library of random helper functionality."""
|
|
|
|
from pathlib import Path
|
|
|
|
import argparse
|
|
|
|
import enum
|
|
|
|
import sys
|
|
|
|
import stat
|
|
|
|
import time
|
|
|
|
import abc
|
|
|
|
import platform, subprocess, operator, os, shlex, shutil, re
|
|
|
|
import collections
|
|
|
|
from functools import lru_cache, wraps, total_ordering
|
|
|
|
from itertools import tee, filterfalse
|
|
|
|
from tempfile import TemporaryDirectory
|
|
|
|
import typing as T
|
|
|
|
import uuid
|
|
|
|
import textwrap
|
|
|
|
import copy
|
|
|
|
|
|
|
|
from mesonbuild import mlog
|
|
|
|
|
|
|
|
if T.TYPE_CHECKING:
|
|
|
|
from .._typing import ImmutableListProtocol
|
|
|
|
from ..build import ConfigurationData
|
|
|
|
from ..coredata import KeyedOptionDictType, UserOption
|
|
|
|
from ..compilers.compilers import Compiler
|
|
|
|
|
|
|
|
FileOrString = T.Union['File', str]
|
|
|
|
|
|
|
|
_T = T.TypeVar('_T')
|
|
|
|
_U = T.TypeVar('_U')
|
|
|
|
|
|
|
|
__all__ = [
|
|
|
|
'GIT',
|
|
|
|
'python_command',
|
|
|
|
'project_meson_versions',
|
|
|
|
'HoldableObject',
|
|
|
|
'SecondLevelHolder',
|
|
|
|
'File',
|
|
|
|
'FileMode',
|
|
|
|
'GitException',
|
|
|
|
'LibType',
|
|
|
|
'MachineChoice',
|
|
|
|
'MesonException',
|
|
|
|
'MesonBugException',
|
|
|
|
'EnvironmentException',
|
|
|
|
'FileOrString',
|
|
|
|
'GitException',
|
|
|
|
'OptionKey',
|
|
|
|
'dump_conf_header',
|
|
|
|
'OptionOverrideProxy',
|
|
|
|
'OptionType',
|
|
|
|
'OrderedSet',
|
|
|
|
'PerMachine',
|
|
|
|
'PerMachineDefaultable',
|
|
|
|
'PerThreeMachine',
|
|
|
|
'PerThreeMachineDefaultable',
|
|
|
|
'ProgressBar',
|
|
|
|
'RealPathAction',
|
|
|
|
'TemporaryDirectoryWinProof',
|
|
|
|
'Version',
|
|
|
|
'check_direntry_issues',
|
|
|
|
'classify_unity_sources',
|
|
|
|
'current_vs_supports_modules',
|
|
|
|
'darwin_get_object_archs',
|
|
|
|
'default_libdir',
|
|
|
|
'default_libexecdir',
|
|
|
|
'default_prefix',
|
|
|
|
'detect_subprojects',
|
|
|
|
'detect_vcs',
|
|
|
|
'do_conf_file',
|
|
|
|
'do_conf_str',
|
|
|
|
'do_replacement',
|
|
|
|
'exe_exists',
|
|
|
|
'expand_arguments',
|
|
|
|
'extract_as_list',
|
|
|
|
'get_compiler_for_source',
|
|
|
|
'get_filenames_templates_dict',
|
|
|
|
'get_library_dirs',
|
|
|
|
'get_variable_regex',
|
|
|
|
'get_wine_shortpath',
|
|
|
|
'git',
|
|
|
|
'has_path_sep',
|
|
|
|
'is_aix',
|
|
|
|
'is_android',
|
|
|
|
'is_ascii_string',
|
|
|
|
'is_cygwin',
|
|
|
|
'is_debianlike',
|
|
|
|
'is_dragonflybsd',
|
|
|
|
'is_freebsd',
|
|
|
|
'is_haiku',
|
|
|
|
'is_hurd',
|
|
|
|
'is_irix',
|
|
|
|
'is_linux',
|
|
|
|
'is_netbsd',
|
|
|
|
'is_openbsd',
|
|
|
|
'is_osx',
|
|
|
|
'is_qnx',
|
|
|
|
'is_sunos',
|
|
|
|
'is_windows',
|
|
|
|
'is_wsl',
|
|
|
|
'iter_regexin_iter',
|
|
|
|
'join_args',
|
|
|
|
'listify',
|
|
|
|
'partition',
|
|
|
|
'path_is_in_root',
|
|
|
|
'Popen_safe',
|
|
|
|
'quiet_git',
|
|
|
|
'quote_arg',
|
|
|
|
'relative_to_if_possible',
|
|
|
|
'relpath',
|
|
|
|
'replace_if_different',
|
|
|
|
'run_once',
|
|
|
|
'get_meson_command',
|
|
|
|
'set_meson_command',
|
|
|
|
'split_args',
|
|
|
|
'stringlistify',
|
|
|
|
'substitute_values',
|
|
|
|
'substring_is_in_list',
|
|
|
|
'typeslistify',
|
|
|
|
'verbose_git',
|
|
|
|
'version_compare',
|
|
|
|
'version_compare_condition_with_min',
|
|
|
|
'version_compare_many',
|
|
|
|
'search_version',
|
|
|
|
'windows_proof_rm',
|
|
|
|
'windows_proof_rmtree',
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
# TODO: this is such a hack, this really should be either in coredata or in the
|
|
|
|
# interpreter
|
|
|
|
# {subproject: project_meson_version}
|
|
|
|
project_meson_versions = collections.defaultdict(str) # type: T.DefaultDict[str, str]
|
|
|
|
|
|
|
|
|
|
|
|
from glob import glob
|
|
|
|
|
|
|
|
if os.path.basename(sys.executable) == 'meson.exe':
|
|
|
|
# In Windows and using the MSI installed executable.
|
|
|
|
python_command = [sys.executable, 'runpython']
|
|
|
|
else:
|
|
|
|
python_command = [sys.executable]
|
|
|
|
_meson_command = None
|
|
|
|
|
|
|
|
class MesonException(Exception):
|
|
|
|
'''Exceptions thrown by Meson'''
|
|
|
|
|
|
|
|
def __init__(self, *args: object, file: T.Optional[str] = None,
|
|
|
|
lineno: T.Optional[int] = None, colno: T.Optional[int] = None):
|
|
|
|
super().__init__(*args)
|
|
|
|
self.file = file
|
|
|
|
self.lineno = lineno
|
|
|
|
self.colno = colno
|
|
|
|
|
|
|
|
|
|
|
|
class MesonBugException(MesonException):
|
|
|
|
'''Exceptions thrown when there is a clear Meson bug that should be reported'''
|
|
|
|
|
|
|
|
def __init__(self, msg: str, file: T.Optional[str] = None,
|
|
|
|
lineno: T.Optional[int] = None, colno: T.Optional[int] = None):
|
|
|
|
super().__init__(msg + '\n\n This is a Meson bug and should be reported!',
|
|
|
|
file=file, lineno=lineno, colno=colno)
|
|
|
|
|
|
|
|
class EnvironmentException(MesonException):
|
|
|
|
'''Exceptions thrown while processing and creating the build environment'''
|
|
|
|
|
|
|
|
class GitException(MesonException):
|
|
|
|
def __init__(self, msg: str, output: T.Optional[str] = None):
|
|
|
|
super().__init__(msg)
|
|
|
|
self.output = output.strip() if output else ''
|
|
|
|
|
|
|
|
GIT = shutil.which('git')
|
|
|
|
def git(cmd: T.List[str], workingdir: str, check: bool = False, **kwargs: T.Any) -> T.Tuple[subprocess.Popen, str, str]:
|
|
|
|
cmd = [GIT] + cmd
|
|
|
|
p, o, e = Popen_safe(cmd, cwd=workingdir, **kwargs)
|
|
|
|
if check and p.returncode != 0:
|
|
|
|
raise GitException('Git command failed: ' + str(cmd), e)
|
|
|
|
return p, o, e
|
|
|
|
|
|
|
|
def quiet_git(cmd: T.List[str], workingdir: str, check: bool = False) -> T.Tuple[bool, str]:
|
|
|
|
if not GIT:
|
|
|
|
m = 'Git program not found.'
|
|
|
|
if check:
|
|
|
|
raise GitException(m)
|
|
|
|
return False, m
|
|
|
|
p, o, e = git(cmd, workingdir, check)
|
|
|
|
if p.returncode != 0:
|
|
|
|
return False, e
|
|
|
|
return True, o
|
|
|
|
|
|
|
|
def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
|
|
|
|
if not GIT:
|
|
|
|
m = 'Git program not found.'
|
|
|
|
if check:
|
|
|
|
raise GitException(m)
|
|
|
|
return False
|
|
|
|
p, _, _ = git(cmd, workingdir, check, stdout=None, stderr=None)
|
|
|
|
return p.returncode == 0
|
|
|
|
|
|
|
|
def set_meson_command(mainfile: str) -> None:
|
|
|
|
global python_command
|
|
|
|
global _meson_command
|
|
|
|
# On UNIX-like systems `meson` is a Python script
|
|
|
|
# On Windows `meson` and `meson.exe` are wrapper exes
|
|
|
|
if not mainfile.endswith('.py'):
|
|
|
|
_meson_command = [mainfile]
|
|
|
|
elif os.path.isabs(mainfile) and mainfile.endswith('mesonmain.py'):
|
|
|
|
# Can't actually run meson with an absolute path to mesonmain.py, it must be run as -m mesonbuild.mesonmain
|
|
|
|
_meson_command = python_command + ['-m', 'mesonbuild.mesonmain']
|
|
|
|
else:
|
|
|
|
# Either run uninstalled, or full path to meson-script.py
|
|
|
|
_meson_command = python_command + [mainfile]
|
|
|
|
# We print this value for unit tests.
|
|
|
|
if 'MESON_COMMAND_TESTS' in os.environ:
|
|
|
|
mlog.log(f'meson_command is {_meson_command!r}')
|
|
|
|
|
|
|
|
|
|
|
|
def get_meson_command() -> T.Optional[T.List[str]]:
|
|
|
|
return _meson_command
|
|
|
|
|
|
|
|
|
|
|
|
def is_ascii_string(astring: T.Union[str, bytes]) -> bool:
|
|
|
|
try:
|
|
|
|
if isinstance(astring, str):
|
|
|
|
astring.encode('ascii')
|
|
|
|
elif isinstance(astring, bytes):
|
|
|
|
astring.decode('ascii')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def check_direntry_issues(direntry_array: T.Union[T.List[T.Union[str, bytes]], str, bytes]) -> None:
|
|
|
|
import locale
|
|
|
|
# Warn if the locale is not UTF-8. This can cause various unfixable issues
|
|
|
|
# such as os.stat not being able to decode filenames with unicode in them.
|
|
|
|
# There is no way to reset both the preferred encoding and the filesystem
|
|
|
|
# encoding, so we can just warn about it.
|
|
|
|
e = locale.getpreferredencoding()
|
|
|
|
if e.upper() != 'UTF-8' and not is_windows():
|
|
|
|
if not isinstance(direntry_array, list):
|
|
|
|
direntry_array = [direntry_array]
|
|
|
|
for de in direntry_array:
|
|
|
|
if is_ascii_string(de):
|
|
|
|
continue
|
|
|
|
mlog.warning(textwrap.dedent(f'''
|
|
|
|
You are using {e!r} which is not a Unicode-compatible
|
|
|
|
locale but you are trying to access a file system entry called {de!r} which is
|
|
|
|
not pure ASCII. This may cause problems.
|
|
|
|
'''), file=sys.stderr)
|
|
|
|
|
|
|
|
class HoldableObject(metaclass=abc.ABCMeta):
|
|
|
|
''' Dummy base class for all objects that can be
|
|
|
|
held by an interpreter.baseobjects.ObjectHolder '''
|
|
|
|
|
|
|
|
class SecondLevelHolder(HoldableObject, metaclass=abc.ABCMeta):
|
|
|
|
''' A second level object holder. The primary purpose
|
|
|
|
of such objects is to hold multiple objects with one
|
|
|
|
default option. '''
|
|
|
|
|
|
|
|
@abc.abstractmethod
|
|
|
|
def get_default_object(self) -> HoldableObject: ...
|
|
|
|
|
|
|
|
class FileMode:
|
|
|
|
# The first triad is for owner permissions, the second for group permissions,
|
|
|
|
# and the third for others (everyone else).
|
|
|
|
# For the 1st character:
|
|
|
|
# 'r' means can read
|
|
|
|
# '-' means not allowed
|
|
|
|
# For the 2nd character:
|
|
|
|
# 'w' means can write
|
|
|
|
# '-' means not allowed
|
|
|
|
# For the 3rd character:
|
|
|
|
# 'x' means can execute
|
|
|
|
# 's' means can execute and setuid/setgid is set (owner/group triads only)
|
|
|
|
# 'S' means cannot execute and setuid/setgid is set (owner/group triads only)
|
|
|
|
# 't' means can execute and sticky bit is set ("others" triads only)
|
|
|
|
# 'T' means cannot execute and sticky bit is set ("others" triads only)
|
|
|
|
# '-' means none of these are allowed
|
|
|
|
#
|
|
|
|
# The meanings of 'rwx' perms is not obvious for directories; see:
|
|
|
|
# https://www.hackinglinuxexposed.com/articles/20030424.html
|
|
|
|
#
|
|
|
|
# For information on this notation such as setuid/setgid/sticky bits, see:
|
|
|
|
# https://en.wikipedia.org/wiki/File_system_permissions#Symbolic_notation
|
|
|
|
symbolic_perms_regex = re.compile('[r-][w-][xsS-]' # Owner perms
|
|
|
|
'[r-][w-][xsS-]' # Group perms
|
|
|
|
'[r-][w-][xtT-]') # Others perms
|
|
|
|
|
|
|
|
def __init__(self, perms: T.Optional[str] = None, owner: T.Union[str, int, None] = None,
|
|
|
|
group: T.Union[str, int, None] = None):
|
|
|
|
self.perms_s = perms
|
|
|
|
self.perms = self.perms_s_to_bits(perms)
|
|
|
|
self.owner = owner
|
|
|
|
self.group = group
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
ret = '<FileMode: {!r} owner={} group={}'
|
|
|
|
return ret.format(self.perms_s, self.owner, self.group)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def perms_s_to_bits(cls, perms_s: T.Optional[str]) -> int:
|
|
|
|
'''
|
|
|
|
Does the opposite of stat.filemode(), converts strings of the form
|
|
|
|
'rwxr-xr-x' to st_mode enums which can be passed to os.chmod()
|
|
|
|
'''
|
|
|
|
if perms_s is None:
|
|
|
|
# No perms specified, we will not touch the permissions
|
|
|
|
return -1
|
|
|
|
eg = 'rwxr-xr-x'
|
|
|
|
if not isinstance(perms_s, str):
|
|
|
|
raise MesonException(f'Install perms must be a string. For example, {eg!r}')
|
|
|
|
if len(perms_s) != 9 or not cls.symbolic_perms_regex.match(perms_s):
|
|
|
|
raise MesonException(f'File perms {perms_s!r} must be exactly 9 chars. For example, {eg!r}')
|
|
|
|
perms = 0
|
|
|
|
# Owner perms
|
|
|
|
if perms_s[0] == 'r':
|
|
|
|
perms |= stat.S_IRUSR
|
|
|
|
if perms_s[1] == 'w':
|
|
|
|
perms |= stat.S_IWUSR
|
|
|
|
if perms_s[2] == 'x':
|
|
|
|
perms |= stat.S_IXUSR
|
|
|
|
elif perms_s[2] == 'S':
|
|
|
|
perms |= stat.S_ISUID
|
|
|
|
elif perms_s[2] == 's':
|
|
|
|
perms |= stat.S_IXUSR
|
|
|
|
perms |= stat.S_ISUID
|
|
|
|
# Group perms
|
|
|
|
if perms_s[3] == 'r':
|
|
|
|
perms |= stat.S_IRGRP
|
|
|
|
if perms_s[4] == 'w':
|
|
|
|
perms |= stat.S_IWGRP
|
|
|
|
if perms_s[5] == 'x':
|
|
|
|
perms |= stat.S_IXGRP
|
|
|
|
elif perms_s[5] == 'S':
|
|
|
|
perms |= stat.S_ISGID
|
|
|
|
elif perms_s[5] == 's':
|
|
|
|
perms |= stat.S_IXGRP
|
|
|
|
perms |= stat.S_ISGID
|
|
|
|
# Others perms
|
|
|
|
if perms_s[6] == 'r':
|
|
|
|
perms |= stat.S_IROTH
|
|
|
|
if perms_s[7] == 'w':
|
|
|
|
perms |= stat.S_IWOTH
|
|
|
|
if perms_s[8] == 'x':
|
|
|
|
perms |= stat.S_IXOTH
|
|
|
|
elif perms_s[8] == 'T':
|
|
|
|
perms |= stat.S_ISVTX
|
|
|
|
elif perms_s[8] == 't':
|
|
|
|
perms |= stat.S_IXOTH
|
|
|
|
perms |= stat.S_ISVTX
|
|
|
|
return perms
|
|
|
|
|
|
|
|
dot_C_dot_H_warning = """You are using .C or .H files in your project. This is deprecated.
|
|
|
|
Currently, Meson treats this as C++ code, but they
|
|
|
|
used to be treated as C code.
|
|
|
|
Note that the situation is a bit more complex if you are using the
|
|
|
|
Visual Studio compiler, as it treats .C files as C code, unless you add
|
|
|
|
the /TP compiler flag, but this is unreliable.
|
|
|
|
See https://github.com/mesonbuild/meson/pull/8747 for the discussions."""
|
|
|
|
class File(HoldableObject):
|
|
|
|
def __init__(self, is_built: bool, subdir: str, fname: str):
|
|
|
|
if fname.endswith(".C") or fname.endswith(".H"):
|
|
|
|
mlog.warning(dot_C_dot_H_warning, once=True)
|
|
|
|
self.is_built = is_built
|
|
|
|
self.subdir = subdir
|
|
|
|
self.fname = fname
|
|
|
|
self.hash = hash((is_built, subdir, fname))
|
|
|
|
|
|
|
|
def __str__(self) -> str:
|
|
|
|
return self.relative_name()
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
ret = '<File: {0}'
|
|
|
|
if not self.is_built:
|
|
|
|
ret += ' (not built)'
|
|
|
|
ret += '>'
|
|
|
|
return ret.format(self.relative_name())
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
@lru_cache(maxsize=None)
|
|
|
|
def from_source_file(source_root: str, subdir: str, fname: str) -> 'File':
|
|
|
|
if not os.path.isfile(os.path.join(source_root, subdir, fname)):
|
|
|
|
raise MesonException(f'File {fname} does not exist.')
|
|
|
|
return File(False, subdir, fname)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def from_built_file(subdir: str, fname: str) -> 'File':
|
|
|
|
return File(True, subdir, fname)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def from_absolute_file(fname: str) -> 'File':
|
|
|
|
return File(False, '', fname)
|
|
|
|
|
|
|
|
@lru_cache(maxsize=None)
|
|
|
|
def rel_to_builddir(self, build_to_src: str) -> str:
|
|
|
|
if self.is_built:
|
|
|
|
return self.relative_name()
|
|
|
|
else:
|
|
|
|
return os.path.join(build_to_src, self.subdir, self.fname)
|
|
|
|
|
|
|
|
@lru_cache(maxsize=None)
|
|
|
|
def absolute_path(self, srcdir: str, builddir: str) -> str:
|
|
|
|
absdir = srcdir
|
|
|
|
if self.is_built:
|
|
|
|
absdir = builddir
|
|
|
|
return os.path.join(absdir, self.relative_name())
|
|
|
|
|
|
|
|
@property
|
|
|
|
def suffix(self) -> str:
|
|
|
|
return os.path.splitext(self.fname)[1][1:].lower()
|
|
|
|
|
|
|
|
def endswith(self, ending: str) -> bool:
|
|
|
|
return self.fname.endswith(ending)
|
|
|
|
|
|
|
|
def split(self, s: str, maxsplit: int = -1) -> T.List[str]:
|
|
|
|
return self.fname.split(s, maxsplit=maxsplit)
|
|
|
|
|
|
|
|
def rsplit(self, s: str, maxsplit: int = -1) -> T.List[str]:
|
|
|
|
return self.fname.rsplit(s, maxsplit=maxsplit)
|
|
|
|
|
|
|
|
def __eq__(self, other: object) -> bool:
|
|
|
|
if not isinstance(other, File):
|
|
|
|
return NotImplemented
|
|
|
|
if self.hash != other.hash:
|
|
|
|
return False
|
|
|
|
return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
|
|
|
|
|
|
|
|
def __hash__(self) -> int:
|
|
|
|
return self.hash
|
|
|
|
|
|
|
|
@lru_cache(maxsize=None)
|
|
|
|
def relative_name(self) -> str:
|
|
|
|
return os.path.join(self.subdir, self.fname)
|
|
|
|
|
|
|
|
|
|
|
|
def get_compiler_for_source(compilers: T.Iterable['Compiler'], src: 'FileOrString') -> 'Compiler':
|
|
|
|
"""Given a set of compilers and a source, find the compiler for that source type."""
|
|
|
|
for comp in compilers:
|
|
|
|
if comp.can_compile(src):
|
|
|
|
return comp
|
|
|
|
raise MesonException(f'No specified compiler can handle file {src!s}')
|
|
|
|
|
|
|
|
|
|
|
|
def classify_unity_sources(compilers: T.Iterable['Compiler'], sources: T.Sequence['FileOrString']) -> T.Dict['Compiler', T.List['FileOrString']]:
|
|
|
|
compsrclist: T.Dict['Compiler', T.List['FileOrString']] = {}
|
|
|
|
for src in sources:
|
|
|
|
comp = get_compiler_for_source(compilers, src)
|
|
|
|
if comp not in compsrclist:
|
|
|
|
compsrclist[comp] = [src]
|
|
|
|
else:
|
|
|
|
compsrclist[comp].append(src)
|
|
|
|
return compsrclist
|
|
|
|
|
|
|
|
|
|
|
|
class MachineChoice(enum.IntEnum):
|
|
|
|
|
|
|
|
"""Enum class representing one of the two abstract machine names used in
|
|
|
|
most places: the build, and host, machines.
|
|
|
|
"""
|
|
|
|
|
|
|
|
BUILD = 0
|
|
|
|
HOST = 1
|
|
|
|
|
|
|
|
def get_lower_case_name(self) -> str:
|
|
|
|
return PerMachine('build', 'host')[self]
|
|
|
|
|
|
|
|
def get_prefix(self) -> str:
|
|
|
|
return PerMachine('build.', '')[self]
|
|
|
|
|
|
|
|
|
|
|
|
class PerMachine(T.Generic[_T]):
|
|
|
|
def __init__(self, build: _T, host: _T) -> None:
|
|
|
|
self.build = build
|
|
|
|
self.host = host
|
|
|
|
|
|
|
|
def __getitem__(self, machine: MachineChoice) -> _T:
|
|
|
|
return {
|
|
|
|
MachineChoice.BUILD: self.build,
|
|
|
|
MachineChoice.HOST: self.host,
|
|
|
|
}[machine]
|
|
|
|
|
|
|
|
def __setitem__(self, machine: MachineChoice, val: _T) -> None:
|
|
|
|
setattr(self, machine.get_lower_case_name(), val)
|
|
|
|
|
|
|
|
def miss_defaulting(self) -> "PerMachineDefaultable[T.Optional[_T]]":
|
|
|
|
"""Unset definition duplicated from their previous to None
|
|
|
|
|
|
|
|
This is the inverse of ''default_missing''. By removing defaulted
|
|
|
|
machines, we can elaborate the original and then redefault them and thus
|
|
|
|
avoid repeating the elaboration explicitly.
|
|
|
|
"""
|
|
|
|
unfreeze = PerMachineDefaultable() # type: PerMachineDefaultable[T.Optional[_T]]
|
|
|
|
unfreeze.build = self.build
|
|
|
|
unfreeze.host = self.host
|
|
|
|
if unfreeze.host == unfreeze.build:
|
|
|
|
unfreeze.host = None
|
|
|
|
return unfreeze
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
return f'PerMachine({self.build!r}, {self.host!r})'
|
|
|
|
|
|
|
|
|
|
|
|
class PerThreeMachine(PerMachine[_T]):
|
|
|
|
"""Like `PerMachine` but includes `target` too.
|
|
|
|
|
|
|
|
It turns out just one thing do we need track the target machine. There's no
|
|
|
|
need to computer the `target` field so we don't bother overriding the
|
|
|
|
`__getitem__`/`__setitem__` methods.
|
|
|
|
"""
|
|
|
|
def __init__(self, build: _T, host: _T, target: _T) -> None:
|
|
|
|
super().__init__(build, host)
|
|
|
|
self.target = target
|
|
|
|
|
|
|
|
def miss_defaulting(self) -> "PerThreeMachineDefaultable[T.Optional[_T]]":
|
|
|
|
"""Unset definition duplicated from their previous to None
|
|
|
|
|
|
|
|
This is the inverse of ''default_missing''. By removing defaulted
|
|
|
|
machines, we can elaborate the original and then redefault them and thus
|
|
|
|
avoid repeating the elaboration explicitly.
|
|
|
|
"""
|
|
|
|
unfreeze = PerThreeMachineDefaultable() # type: PerThreeMachineDefaultable[T.Optional[_T]]
|
|
|
|
unfreeze.build = self.build
|
|
|
|
unfreeze.host = self.host
|
|
|
|
unfreeze.target = self.target
|
|
|
|
if unfreeze.target == unfreeze.host:
|
|
|
|
unfreeze.target = None
|
|
|
|
if unfreeze.host == unfreeze.build:
|
|
|
|
unfreeze.host = None
|
|
|
|
return unfreeze
|
|
|
|
|
|
|
|
def matches_build_machine(self, machine: MachineChoice) -> bool:
|
|
|
|
return self.build == self[machine]
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
return f'PerThreeMachine({self.build!r}, {self.host!r}, {self.target!r})'
|
|
|
|
|
|
|
|
|
|
|
|
class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
|
|
|
|
"""Extends `PerMachine` with the ability to default from `None`s.
|
|
|
|
"""
|
|
|
|
def __init__(self, build: T.Optional[_T] = None, host: T.Optional[_T] = None) -> None:
|
|
|
|
super().__init__(build, host)
|
|
|
|
|
|
|
|
def default_missing(self) -> "PerMachine[_T]":
|
|
|
|
"""Default host to build
|
|
|
|
|
|
|
|
This allows just specifying nothing in the native case, and just host in the
|
|
|
|
cross non-compiler case.
|
|
|
|
"""
|
|
|
|
freeze = PerMachine(self.build, self.host)
|
|
|
|
if freeze.host is None:
|
|
|
|
freeze.host = freeze.build
|
|
|
|
return freeze
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
return f'PerMachineDefaultable({self.build!r}, {self.host!r})'
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def default(cls, is_cross: bool, build: _T, host: _T) -> PerMachine[_T]:
|
|
|
|
"""Easy way to get a defaulted value
|
|
|
|
|
|
|
|
This allows simplifying the case where you can control whether host and
|
|
|
|
build are separate or not with a boolean. If the is_cross value is set
|
|
|
|
to true then the optional host value will be used, otherwise the host
|
|
|
|
will be set to the build value.
|
|
|
|
"""
|
|
|
|
m = cls(build)
|
|
|
|
if is_cross:
|
|
|
|
m.host = host
|
|
|
|
return m.default_missing()
|
|
|
|
|
|
|
|
|
|
|
|
class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]):
|
|
|
|
"""Extends `PerThreeMachine` with the ability to default from `None`s.
|
|
|
|
"""
|
|
|
|
def __init__(self) -> None:
|
|
|
|
PerThreeMachine.__init__(self, None, None, None)
|
|
|
|
|
|
|
|
def default_missing(self) -> "PerThreeMachine[T.Optional[_T]]":
|
|
|
|
"""Default host to build and target to host.
|
|
|
|
|
|
|
|
This allows just specifying nothing in the native case, just host in the
|
|
|
|
cross non-compiler case, and just target in the native-built
|
|
|
|
cross-compiler case.
|
|
|
|
"""
|
|
|
|
freeze = PerThreeMachine(self.build, self.host, self.target)
|
|
|
|
if freeze.host is None:
|
|
|
|
freeze.host = freeze.build
|
|
|
|
if freeze.target is None:
|
|
|
|
freeze.target = freeze.host
|
|
|
|
return freeze
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
return f'PerThreeMachineDefaultable({self.build!r}, {self.host!r}, {self.target!r})'
|
|
|
|
|
|
|
|
|
|
|
|
def is_sunos() -> bool:
|
|
|
|
return platform.system().lower() == 'sunos'
|
|
|
|
|
|
|
|
|
|
|
|
def is_osx() -> bool:
|
|
|
|
return platform.system().lower() == 'darwin'
|
|
|
|
|
|
|
|
|
|
|
|
def is_linux() -> bool:
|
|
|
|
return platform.system().lower() == 'linux'
|
|
|
|
|
|
|
|
|
|
|
|
def is_android() -> bool:
|
|
|
|
return platform.system().lower() == 'android'
|
|
|
|
|
|
|
|
|
|
|
|
def is_haiku() -> bool:
|
|
|
|
return platform.system().lower() == 'haiku'
|
|
|
|
|
|
|
|
|
|
|
|
def is_openbsd() -> bool:
|
|
|
|
return platform.system().lower() == 'openbsd'
|
|
|
|
|
|
|
|
|
|
|
|
def is_windows() -> bool:
|
|
|
|
platname = platform.system().lower()
|
|
|
|
return platname == 'windows'
|
|
|
|
|
|
|
|
def is_wsl() -> bool:
|
|
|
|
return is_linux() and 'microsoft' in platform.release().lower()
|
|
|
|
|
|
|
|
def is_cygwin() -> bool:
|
|
|
|
return sys.platform == 'cygwin'
|
|
|
|
|
|
|
|
|
|
|
|
def is_debianlike() -> bool:
|
|
|
|
return os.path.isfile('/etc/debian_version')
|
|
|
|
|
|
|
|
|
|
|
|
def is_dragonflybsd() -> bool:
|
|
|
|
return platform.system().lower() == 'dragonfly'
|
|
|
|
|
|
|
|
|
|
|
|
def is_netbsd() -> bool:
|
|
|
|
return platform.system().lower() == 'netbsd'
|
|
|
|
|
|
|
|
|
|
|
|
def is_freebsd() -> bool:
|
|
|
|
return platform.system().lower() == 'freebsd'
|
|
|
|
|
|
|
|
def is_irix() -> bool:
|
|
|
|
return platform.system().startswith('irix')
|
|
|
|
|
|
|
|
def is_hurd() -> bool:
|
|
|
|
return platform.system().lower() == 'gnu'
|
|
|
|
|
|
|
|
def is_qnx() -> bool:
|
|
|
|
return platform.system().lower() == 'qnx'
|
|
|
|
|
|
|
|
def is_aix() -> bool:
|
|
|
|
return platform.system().lower() == 'aix'
|
|
|
|
|
|
|
|
def exe_exists(arglist: T.List[str]) -> bool:
|
|
|
|
try:
|
|
|
|
if subprocess.run(arglist, timeout=10).returncode == 0:
|
|
|
|
return True
|
|
|
|
except (FileNotFoundError, subprocess.TimeoutExpired):
|
|
|
|
pass
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
@lru_cache(maxsize=None)
|
|
|
|
def darwin_get_object_archs(objpath: str) -> 'ImmutableListProtocol[str]':
|
|
|
|
'''
|
|
|
|
For a specific object (executable, static library, dylib, etc), run `lipo`
|
|
|
|
to fetch the list of archs supported by it. Supports both thin objects and
|
|
|
|
'fat' objects.
|
|
|
|
'''
|
|
|
|
_, stdo, stderr = Popen_safe(['lipo', '-info', objpath])
|
|
|
|
if not stdo:
|
|
|
|
mlog.debug(f'lipo {objpath}: {stderr}')
|
|
|
|
return None
|
|
|
|
stdo = stdo.rsplit(': ', 1)[1]
|
|
|
|
# Convert from lipo-style archs to meson-style CPUs
|
|
|
|
stdo = stdo.replace('i386', 'x86')
|
|
|
|
stdo = stdo.replace('arm64', 'aarch64')
|
|
|
|
# Add generic name for armv7 and armv7s
|
|
|
|
if 'armv7' in stdo:
|
|
|
|
stdo += ' arm'
|
|
|
|
return stdo.split()
|
|
|
|
|
|
|
|
|
|
|
|
def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
|
|
|
|
vcs_systems = [
|
|
|
|
dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
|
|
|
|
dict(name = 'mercurial', cmd = 'hg', repo_dir = '.hg', get_rev = 'hg id -i', rev_regex = '(.*)', dep = '.hg/dirstate'),
|
|
|
|
dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info', rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'),
|
|
|
|
dict(name = 'bazaar', cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno', rev_regex = '(.*)', dep = '.bzr'),
|
|
|
|
]
|
|
|
|
if isinstance(source_dir, str):
|
|
|
|
source_dir = Path(source_dir)
|
|
|
|
|
|
|
|
parent_paths_and_self = collections.deque(source_dir.parents)
|
|
|
|
# Prepend the source directory to the front so we can check it;
|
|
|
|
# source_dir.parents doesn't include source_dir
|
|
|
|
parent_paths_and_self.appendleft(source_dir)
|
|
|
|
for curdir in parent_paths_and_self:
|
|
|
|
for vcs in vcs_systems:
|
|
|
|
if Path.is_dir(curdir.joinpath(vcs['repo_dir'])) and shutil.which(vcs['cmd']):
|
|
|
|
vcs['wc_dir'] = str(curdir)
|
|
|
|
return vcs
|
|
|
|
return None
|
|
|
|
|
|
|
|
def current_vs_supports_modules() -> bool:
|
|
|
|
vsver = os.environ.get('VSCMD_VER', '')
|
|
|
|
nums = vsver.split('.', 2)
|
|
|
|
major = int(nums[0])
|
|
|
|
if major >= 17:
|
|
|
|
return True
|
|
|
|
if major == 16 and int(nums[1]) >= 10:
|
|
|
|
return True
|
|
|
|
return vsver.startswith('16.9.0') and '-pre.' in vsver
|
|
|
|
|
|
|
|
# a helper class which implements the same version ordering as RPM
|
|
|
|
class Version:
|
|
|
|
def __init__(self, s: str) -> None:
|
|
|
|
self._s = s
|
|
|
|
|
|
|
|
# split into numeric, alphabetic and non-alphanumeric sequences
|
|
|
|
sequences1 = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
|
|
|
|
|
|
|
|
# non-alphanumeric separators are discarded
|
|
|
|
sequences2 = [m for m in sequences1 if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
|
|
|
|
|
|
|
|
# numeric sequences are converted from strings to ints
|
|
|
|
sequences3 = [int(m.group(1)) if m.group(1).isdigit() else m.group(1) for m in sequences2]
|
|
|
|
|
|
|
|
self._v = sequences3
|
|
|
|
|
|
|
|
def __str__(self) -> str:
|
|
|
|
return '{} (V={})'.format(self._s, str(self._v))
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
return f'<Version: {self._s}>'
|
|
|
|
|
|
|
|
def __lt__(self, other: object) -> bool:
|
|
|
|
if isinstance(other, Version):
|
|
|
|
return self.__cmp(other, operator.lt)
|
|
|
|
return NotImplemented
|
|
|
|
|
|
|
|
def __gt__(self, other: object) -> bool:
|
|
|
|
if isinstance(other, Version):
|
|
|
|
return self.__cmp(other, operator.gt)
|
|
|
|
return NotImplemented
|
|
|
|
|
|
|
|
def __le__(self, other: object) -> bool:
|
|
|
|
if isinstance(other, Version):
|
|
|
|
return self.__cmp(other, operator.le)
|
|
|
|
return NotImplemented
|
|
|
|
|
|
|
|
def __ge__(self, other: object) -> bool:
|
|
|
|
if isinstance(other, Version):
|
|
|
|
return self.__cmp(other, operator.ge)
|
|
|
|
return NotImplemented
|
|
|
|
|
|
|
|
def __eq__(self, other: object) -> bool:
|
|
|
|
if isinstance(other, Version):
|
|
|
|
return self._v == other._v
|
|
|
|
return NotImplemented
|
|
|
|
|
|
|
|
def __ne__(self, other: object) -> bool:
|
|
|
|
if isinstance(other, Version):
|
|
|
|
return self._v != other._v
|
|
|
|
return NotImplemented
|
|
|
|
|
|
|
|
def __cmp(self, other: 'Version', comparator: T.Callable[[T.Any, T.Any], bool]) -> bool:
|
|
|
|
# compare each sequence in order
|
|
|
|
for ours, theirs in zip(self._v, other._v):
|
|
|
|
# sort a non-digit sequence before a digit sequence
|
|
|
|
ours_is_int = isinstance(ours, int)
|
|
|
|
theirs_is_int = isinstance(theirs, int)
|
|
|
|
if ours_is_int != theirs_is_int:
|
|
|
|
return comparator(ours_is_int, theirs_is_int)
|
|
|
|
|
|
|
|
if ours != theirs:
|
|
|
|
return comparator(ours, theirs)
|
|
|
|
|
|
|
|
# if equal length, all components have matched, so equal
|
|
|
|
# otherwise, the version with a suffix remaining is greater
|
|
|
|
return comparator(len(self._v), len(other._v))
|
|
|
|
|
|
|
|
|
|
|
|
def _version_extract_cmpop(vstr2: str) -> T.Tuple[T.Callable[[T.Any, T.Any], bool], str]:
|
|
|
|
if vstr2.startswith('>='):
|
|
|
|
cmpop = operator.ge
|
|
|
|
vstr2 = vstr2[2:]
|
|
|
|
elif vstr2.startswith('<='):
|
|
|
|
cmpop = operator.le
|
|
|
|
vstr2 = vstr2[2:]
|
|
|
|
elif vstr2.startswith('!='):
|
|
|
|
cmpop = operator.ne
|
|
|
|
vstr2 = vstr2[2:]
|
|
|
|
elif vstr2.startswith('=='):
|
|
|
|
cmpop = operator.eq
|
|
|
|
vstr2 = vstr2[2:]
|
|
|
|
elif vstr2.startswith('='):
|
|
|
|
cmpop = operator.eq
|
|
|
|
vstr2 = vstr2[1:]
|
|
|
|
elif vstr2.startswith('>'):
|
|
|
|
cmpop = operator.gt
|
|
|
|
vstr2 = vstr2[1:]
|
|
|
|
elif vstr2.startswith('<'):
|
|
|
|
cmpop = operator.lt
|
|
|
|
vstr2 = vstr2[1:]
|
|
|
|
else:
|
|
|
|
cmpop = operator.eq
|
|
|
|
|
|
|
|
return (cmpop, vstr2)
|
|
|
|
|
|
|
|
|
|
|
|
def version_compare(vstr1: str, vstr2: str) -> bool:
|
|
|
|
(cmpop, vstr2) = _version_extract_cmpop(vstr2)
|
|
|
|
return cmpop(Version(vstr1), Version(vstr2))
|
|
|
|
|
|
|
|
|
|
|
|
def version_compare_many(vstr1: str, conditions: T.Union[str, T.Iterable[str]]) -> T.Tuple[bool, T.List[str], T.List[str]]:
|
|
|
|
if isinstance(conditions, str):
|
|
|
|
conditions = [conditions]
|
|
|
|
found = []
|
|
|
|
not_found = []
|
|
|
|
for req in conditions:
|
|
|
|
if not version_compare(vstr1, req):
|
|
|
|
not_found.append(req)
|
|
|
|
else:
|
|
|
|
found.append(req)
|
|
|
|
return not_found == [], not_found, found
|
|
|
|
|
|
|
|
|
|
|
|
# determine if the minimum version satisfying the condition |condition| exceeds
|
|
|
|
# the minimum version for a feature |minimum|
|
|
|
|
def version_compare_condition_with_min(condition: str, minimum: str) -> bool:
|
|
|
|
if condition.startswith('>='):
|
|
|
|
cmpop = operator.le
|
|
|
|
condition = condition[2:]
|
|
|
|
elif condition.startswith('<='):
|
|
|
|
return False
|
|
|
|
elif condition.startswith('!='):
|
|
|
|
return False
|
|
|
|
elif condition.startswith('=='):
|
|
|
|
cmpop = operator.le
|
|
|
|
condition = condition[2:]
|
|
|
|
elif condition.startswith('='):
|
|
|
|
cmpop = operator.le
|
|
|
|
condition = condition[1:]
|
|
|
|
elif condition.startswith('>'):
|
|
|
|
cmpop = operator.lt
|
|
|
|
condition = condition[1:]
|
|
|
|
elif condition.startswith('<'):
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
cmpop = operator.le
|
|
|
|
|
|
|
|
# Declaring a project(meson_version: '>=0.46') and then using features in
|
|
|
|
# 0.46.0 is valid, because (knowing the meson versioning scheme) '0.46.0' is
|
|
|
|
# the lowest version which satisfies the constraint '>=0.46'.
|
|
|
|
#
|
|
|
|
# But this will fail here, because the minimum version required by the
|
|
|
|
# version constraint ('0.46') is strictly less (in our version comparison)
|
|
|
|
# than the minimum version needed for the feature ('0.46.0').
|
|
|
|
#
|
|
|
|
# Map versions in the constraint of the form '0.46' to '0.46.0', to embed
|
|
|
|
# this knowledge of the meson versioning scheme.
|
|
|
|
condition = condition.strip()
|
|
|
|
if re.match(r'^\d+.\d+$', condition):
|
|
|
|
condition += '.0'
|
|
|
|
|
|
|
|
return T.cast(bool, cmpop(Version(minimum), Version(condition)))
|
|
|
|
|
|
|
|
def search_version(text: str) -> str:
|
|
|
|
# Usually of the type 4.1.4 but compiler output may contain
|
|
|
|
# stuff like this:
|
|
|
|
# (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
|
|
|
|
# Limiting major version number to two digits seems to work
|
|
|
|
# thus far. When we get to GCC 100, this will break, but
|
|
|
|
# if we are still relevant when that happens, it can be
|
|
|
|
# considered an achievement in itself.
|
|
|
|
#
|
|
|
|
# This regex is reaching magic levels. If it ever needs
|
|
|
|
# to be updated, do not complexify but convert to something
|
|
|
|
# saner instead.
|
|
|
|
# We'll demystify it a bit with a verbose definition.
|
|
|
|
version_regex = re.compile(r"""
|
|
|
|
(?<! # Zero-width negative lookbehind assertion
|
|
|
|
(
|
|
|
|
\d # One digit
|
|
|
|
| \. # Or one period
|
|
|
|
) # One occurrence
|
|
|
|
)
|
|
|
|
# Following pattern must not follow a digit or period
|
|
|
|
(
|
|
|
|
\d{1,2} # One or two digits
|
|
|
|
(
|
|
|
|
\.\d+ # Period and one or more digits
|
|
|
|
)+ # One or more occurrences
|
|
|
|
(
|
|
|
|
-[a-zA-Z0-9]+ # Hyphen and one or more alphanumeric
|
|
|
|
)? # Zero or one occurrence
|
|
|
|
) # One occurrence
|
|
|
|
""", re.VERBOSE)
|
|
|
|
match = version_regex.search(text)
|
|
|
|
if match:
|
|
|
|
return match.group(0)
|
|
|
|
|
|
|
|
# try a simpler regex that has like "blah 2020.01.100 foo" or "blah 2020.01 foo"
|
|
|
|
version_regex = re.compile(r"(\d{1,4}\.\d{1,4}\.?\d{0,4})")
|
|
|
|
match = version_regex.search(text)
|
|
|
|
if match:
|
|
|
|
return match.group(0)
|
|
|
|
|
|
|
|
return 'unknown version'
|
|
|
|
|
|
|
|
|
|
|
|
def default_libdir() -> str:
|
|
|
|
if is_debianlike():
|
|
|
|
try:
|
|
|
|
pc = subprocess.Popen(['dpkg-architecture', '-qDEB_HOST_MULTIARCH'],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.DEVNULL)
|
|
|
|
(stdo, _) = pc.communicate()
|
|
|
|
if pc.returncode == 0:
|
|
|
|
archpath = stdo.decode().strip()
|
|
|
|
return 'lib/' + archpath
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
if is_freebsd() or is_irix():
|
|
|
|
return 'lib'
|
|
|
|
if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
|
|
|
|
return 'lib64'
|
|
|
|
return 'lib'
|
|
|
|
|
|
|
|
|
|
|
|
def default_libexecdir() -> str:
|
|
|
|
# There is no way to auto-detect this, so it must be set at build time
|
|
|
|
return 'libexec'
|
|
|
|
|
|
|
|
|
|
|
|
def default_prefix() -> str:
|
|
|
|
return 'c:/' if is_windows() else '/usr/local'
|
|
|
|
|
|
|
|
|
|
|
|
def get_library_dirs() -> T.List[str]:
|
|
|
|
if is_windows():
|
|
|
|
return ['C:/mingw/lib'] # TODO: get programmatically
|
|
|
|
if is_osx():
|
|
|
|
return ['/usr/lib'] # TODO: get programmatically
|
|
|
|
# The following is probably Debian/Ubuntu specific.
|
|
|
|
# /usr/local/lib is first because it contains stuff
|
|
|
|
# installed by the sysadmin and is probably more up-to-date
|
|
|
|
# than /usr/lib. If you feel that this search order is
|
|
|
|
# problematic, please raise the issue on the mailing list.
|
|
|
|
unixdirs = ['/usr/local/lib', '/usr/lib', '/lib']
|
|
|
|
|
|
|
|
if is_freebsd():
|
|
|
|
return unixdirs
|
|
|
|
# FIXME: this needs to be further genericized for aarch64 etc.
|
|
|
|
machine = platform.machine()
|
|
|
|
if machine in ('i386', 'i486', 'i586', 'i686'):
|
|
|
|
plat = 'i386'
|
|
|
|
elif machine.startswith('arm'):
|
|
|
|
plat = 'arm'
|
|
|
|
else:
|
|
|
|
plat = ''
|
|
|
|
|
|
|
|
# Solaris puts 32-bit libraries in the main /lib & /usr/lib directories
|
|
|
|
# and 64-bit libraries in platform specific subdirectories.
|
|
|
|
if is_sunos():
|
|
|
|
if machine == 'i86pc':
|
|
|
|
plat = 'amd64'
|
|
|
|
elif machine.startswith('sun4'):
|
|
|
|
plat = 'sparcv9'
|
|
|
|
|
|
|
|
usr_platdir = Path('/usr/lib/') / plat
|
|
|
|
if usr_platdir.is_dir():
|
|
|
|
unixdirs += [str(x) for x in (usr_platdir).iterdir() if x.is_dir()]
|
|
|
|
if os.path.exists('/usr/lib64'):
|
|
|
|
unixdirs.append('/usr/lib64')
|
|
|
|
|
|
|
|
lib_platdir = Path('/lib/') / plat
|
|
|
|
if lib_platdir.is_dir():
|
|
|
|
unixdirs += [str(x) for x in (lib_platdir).iterdir() if x.is_dir()]
|
|
|
|
if os.path.exists('/lib64'):
|
|
|
|
unixdirs.append('/lib64')
|
|
|
|
|
|
|
|
return unixdirs
|
|
|
|
|
|
|
|
|
|
|
|
def has_path_sep(name: str, sep: str = '/\\') -> bool:
|
|
|
|
'Checks if any of the specified @sep path separators are in @name'
|
|
|
|
for each in sep:
|
|
|
|
if each in name:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
if is_windows():
|
|
|
|
# shlex.split is not suitable for splitting command line on Window (https://bugs.python.org/issue1724822);
|
|
|
|
# shlex.quote is similarly problematic. Below are "proper" implementations of these functions according to
|
|
|
|
# https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments and
|
|
|
|
# https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
|
|
|
|
|
|
|
|
_whitespace = ' \t\n\r'
|
|
|
|
_find_unsafe_char = re.compile(fr'[{_whitespace}"]').search
|
|
|
|
|
|
|
|
def quote_arg(arg: str) -> str:
|
|
|
|
if arg and not _find_unsafe_char(arg):
|
|
|
|
return arg
|
|
|
|
|
|
|
|
result = '"'
|
|
|
|
num_backslashes = 0
|
|
|
|
for c in arg:
|
|
|
|
if c == '\\':
|
|
|
|
num_backslashes += 1
|
|
|
|
else:
|
|
|
|
if c == '"':
|
|
|
|
# Escape all backslashes and the following double quotation mark
|
|
|
|
num_backslashes = num_backslashes * 2 + 1
|
|
|
|
|
|
|
|
result += num_backslashes * '\\' + c
|
|
|
|
num_backslashes = 0
|
|
|
|
|
|
|
|
# Escape all backslashes, but let the terminating double quotation
|
|
|
|
# mark we add below be interpreted as a metacharacter
|
|
|
|
result += (num_backslashes * 2) * '\\' + '"'
|
|
|
|
return result
|
|
|
|
|
|
|
|
def split_args(cmd: str) -> T.List[str]:
|
|
|
|
result = []
|
|
|
|
arg = ''
|
|
|
|
num_backslashes = 0
|
|
|
|
num_quotes = 0
|
|
|
|
in_quotes = False
|
|
|
|
for c in cmd:
|
|
|
|
if c == '\\':
|
|
|
|
num_backslashes += 1
|
|
|
|
else:
|
|
|
|
if c == '"' and not num_backslashes % 2:
|
|
|
|
# unescaped quote, eat it
|
|
|
|
arg += (num_backslashes // 2) * '\\'
|
|
|
|
num_quotes += 1
|
|
|
|
in_quotes = not in_quotes
|
|
|
|
elif c in _whitespace and not in_quotes:
|
|
|
|
if arg or num_quotes:
|
|
|
|
# reached the end of the argument
|
|
|
|
result.append(arg)
|
|
|
|
arg = ''
|
|
|
|
num_quotes = 0
|
|
|
|
else:
|
|
|
|
if c == '"':
|
|
|
|
# escaped quote
|
|
|
|
num_backslashes = (num_backslashes - 1) // 2
|
|
|
|
|
|
|
|
arg += num_backslashes * '\\' + c
|
|
|
|
|
|
|
|
num_backslashes = 0
|
|
|
|
|
|
|
|
if arg or num_quotes:
|
|
|
|
result.append(arg)
|
|
|
|
|
|
|
|
return result
|
|
|
|
else:
|
|
|
|
def quote_arg(arg: str) -> str:
|
|
|
|
return shlex.quote(arg)
|
|
|
|
|
|
|
|
def split_args(cmd: str) -> T.List[str]:
|
|
|
|
return shlex.split(cmd)
|
|
|
|
|
|
|
|
|
|
|
|
def join_args(args: T.Iterable[str]) -> str:
|
|
|
|
return ' '.join([quote_arg(x) for x in args])
|
|
|
|
|
|
|
|
|
|
|
|
def do_replacement(regex: T.Pattern[str], line: str, variable_format: str,
|
|
|
|
confdata: T.Union[T.Dict[str, T.Tuple[str, T.Optional[str]]], 'ConfigurationData']) -> T.Tuple[str, T.Set[str]]:
|
|
|
|
missing_variables = set() # type: T.Set[str]
|
|
|
|
if variable_format == 'cmake':
|
|
|
|
start_tag = '${'
|
|
|
|
backslash_tag = '\\${'
|
|
|
|
else:
|
|
|
|
assert variable_format in ['meson', 'cmake@']
|
|
|
|
start_tag = '@'
|
|
|
|
backslash_tag = '\\@'
|
|
|
|
|
|
|
|
def variable_replace(match: T.Match[str]) -> str:
|
|
|
|
# Pairs of escape characters before '@' or '\@'
|
|
|
|
if match.group(0).endswith('\\'):
|
|
|
|
num_escapes = match.end(0) - match.start(0)
|
|
|
|
return '\\' * (num_escapes // 2)
|
|
|
|
# Single escape character and '@'
|
|
|
|
elif match.group(0) == backslash_tag:
|
|
|
|
return start_tag
|
|
|
|
# Template variable to be replaced
|
|
|
|
else:
|
|
|
|
varname = match.group(1)
|
|
|
|
var_str = ''
|
|
|
|
if varname in confdata:
|
|
|
|
var, _ = confdata.get(varname)
|
|
|
|
if isinstance(var, str):
|
|
|
|
var_str = var
|
|
|
|
elif isinstance(var, int):
|
|
|
|
var_str = str(var)
|
|
|
|
else:
|
|
|
|
msg = f'Tried to replace variable {varname!r} value with ' \
|
|
|
|
f'something other than a string or int: {var!r}'
|
|
|
|
raise MesonException(msg)
|
|
|
|
else:
|
|
|
|
missing_variables.add(varname)
|
|
|
|
return var_str
|
|
|
|
return re.sub(regex, variable_replace, line), missing_variables
|
|
|
|
|
|
|
|
def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', variable_format: str) -> str:
|
|
|
|
def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
|
|
|
|
arr = line.split()
|
|
|
|
define_value = []
|
|
|
|
for token in arr[2:]:
|
|
|
|
try:
|
|
|
|
(v, desc) = confdata.get(token)
|
|
|
|
define_value += [str(v)]
|
|
|
|
except KeyError:
|
|
|
|
define_value += [token]
|
|
|
|
return ' '.join(define_value)
|
|
|
|
|
|
|
|
arr = line.split()
|
|
|
|
if variable_format == 'meson' and len(arr) != 2:
|
|
|
|
raise MesonException('#mesondefine does not contain exactly two tokens: %s' % line.strip())
|
|
|
|
|
|
|
|
varname = arr[1]
|
|
|
|
try:
|
|
|
|
(v, desc) = confdata.get(varname)
|
|
|
|
except KeyError:
|
|
|
|
return '/* #undef %s */\n' % varname
|
|
|
|
if isinstance(v, bool):
|
|
|
|
if v:
|
|
|
|
return '#define %s\n' % varname
|
|
|
|
else:
|
|
|
|
return '#undef %s\n' % varname
|
|
|
|
elif isinstance(v, int):
|
|
|
|
return '#define %s %d\n' % (varname, v)
|
|
|
|
elif isinstance(v, str):
|
|
|
|
if variable_format == 'meson':
|
|
|
|
result = v
|
|
|
|
else:
|
|
|
|
result = get_cmake_define(line, confdata)
|
|
|
|
result = f'#define {varname} {result}\n'
|
|
|
|
(result, missing_variable) = do_replacement(regex, result, variable_format, confdata)
|
|
|
|
return result
|
|
|
|
else:
|
|
|
|
raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
|
|
|
|
|
|
|
|
def get_variable_regex(variable_format: str = 'meson') -> T.Pattern[str]:
|
|
|
|
# Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
|
|
|
|
# Also allow escaping '@' with '\@'
|
|
|
|
if variable_format in ['meson', 'cmake@']:
|
|
|
|
regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
|
|
|
|
elif variable_format == 'cmake':
|
|
|
|
regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}')
|
|
|
|
else:
|
|
|
|
raise MesonException(f'Format "{variable_format}" not handled')
|
|
|
|
return regex
|
|
|
|
|
|
|
|
def do_conf_str(src: str, data: list, confdata: 'ConfigurationData', variable_format: str,
|
|
|
|
encoding: str = 'utf-8') -> T.Tuple[T.List[str], T.Set[str], bool]:
|
|
|
|
def line_is_valid(line: str, variable_format: str) -> bool:
|
|
|
|
if variable_format == 'meson':
|
|
|
|
if '#cmakedefine' in line:
|
|
|
|
return False
|
|
|
|
else: # cmake format
|
|
|
|
if '#mesondefine' in line:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
regex = get_variable_regex(variable_format)
|
|
|
|
|
|
|
|
search_token = '#mesondefine'
|
|
|
|
if variable_format != 'meson':
|
|
|
|
search_token = '#cmakedefine'
|
|
|
|
|
|
|
|
result = []
|
|
|
|
missing_variables = set()
|
|
|
|
# Detect when the configuration data is empty and no tokens were found
|
|
|
|
# during substitution so we can warn the user to use the `copy:` kwarg.
|
|
|
|
confdata_useless = not confdata.keys()
|
|
|
|
for line in data:
|
|
|
|
if line.startswith(search_token):
|
|
|
|
confdata_useless = False
|
|
|
|
line = do_define(regex, line, confdata, variable_format)
|
|
|
|
else:
|
|
|
|
if not line_is_valid(line, variable_format):
|
|
|
|
raise MesonException(f'Format error in {src}: saw "{line.strip()}" when format set to "{variable_format}"')
|
|
|
|
line, missing = do_replacement(regex, line, variable_format, confdata)
|
|
|
|
missing_variables.update(missing)
|
|
|
|
if missing:
|
|
|
|
confdata_useless = False
|
|
|
|
result.append(line)
|
|
|
|
|
|
|
|
return result, missing_variables, confdata_useless
|
|
|
|
|
|
|
|
def do_conf_file(src: str, dst: str, confdata: 'ConfigurationData', variable_format: str,
|
|
|
|
encoding: str = 'utf-8') -> T.Tuple[T.Set[str], bool]:
|
|
|
|
try:
|
|
|
|
with open(src, encoding=encoding, newline='') as f:
|
|
|
|
data = f.readlines()
|
|
|
|
except Exception as e:
|
|
|
|
raise MesonException(f'Could not read input file {src}: {e!s}')
|
|
|
|
|
|
|
|
(result, missing_variables, confdata_useless) = do_conf_str(src, data, confdata, variable_format, encoding)
|
|
|
|
dst_tmp = dst + '~'
|
|
|
|
try:
|
|
|
|
with open(dst_tmp, 'w', encoding=encoding, newline='') as f:
|
|
|
|
f.writelines(result)
|
|
|
|
except Exception as e:
|
|
|
|
raise MesonException(f'Could not write output file {dst}: {e!s}')
|
|
|
|
shutil.copymode(src, dst_tmp)
|
|
|
|
replace_if_different(dst, dst_tmp)
|
|
|
|
return missing_variables, confdata_useless
|
|
|
|
|
|
|
|
CONF_C_PRELUDE = '''/*
|
|
|
|
* Autogenerated by the Meson build system.
|
|
|
|
* Do not edit, your changes will be lost.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
CONF_NASM_PRELUDE = '''; Autogenerated by the Meson build system.
|
|
|
|
; Do not edit, your changes will be lost.
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format: str) -> None:
|
|
|
|
if output_format == 'c':
|
|
|
|
prelude = CONF_C_PRELUDE
|
|
|
|
prefix = '#'
|
|
|
|
elif output_format == 'nasm':
|
|
|
|
prelude = CONF_NASM_PRELUDE
|
|
|
|
prefix = '%'
|
|
|
|
else:
|
|
|
|
raise MesonBugException(f'Undefined output_format: "{output_format}"')
|
|
|
|
|
|
|
|
ofilename_tmp = ofilename + '~'
|
|
|
|
with open(ofilename_tmp, 'w', encoding='utf-8') as ofile:
|
|
|
|
ofile.write(prelude)
|
|
|
|
for k in sorted(cdata.keys()):
|
|
|
|
(v, desc) = cdata.get(k)
|
|
|
|
if desc:
|
|
|
|
if output_format == 'c':
|
|
|
|
ofile.write('/* %s */\n' % desc)
|
|
|
|
elif output_format == 'nasm':
|
|
|
|
for line in desc.split('\n'):
|
|
|
|
ofile.write('; %s\n' % line)
|
|
|
|
if isinstance(v, bool):
|
|
|
|
if v:
|
|
|
|
ofile.write(f'{prefix}define {k}\n\n')
|
|
|
|
else:
|
|
|
|
ofile.write(f'{prefix}undef {k}\n\n')
|
|
|
|
elif isinstance(v, (int, str)):
|
|
|
|
ofile.write(f'{prefix}define {k} {v}\n\n')
|
|
|
|
else:
|
|
|
|
raise MesonException('Unknown data type in configuration file entry: ' + k)
|
|
|
|
replace_if_different(ofilename, ofilename_tmp)
|
|
|
|
|
|
|
|
|
|
|
|
def replace_if_different(dst: str, dst_tmp: str) -> None:
|
|
|
|
# If contents are identical, don't touch the file to prevent
|
|
|
|
# unnecessary rebuilds.
|
|
|
|
different = True
|
|
|
|
try:
|
|
|
|
with open(dst, 'rb') as f1, open(dst_tmp, 'rb') as f2:
|
|
|
|
if f1.read() == f2.read():
|
|
|
|
different = False
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
if different:
|
|
|
|
os.replace(dst_tmp, dst)
|
|
|
|
else:
|
|
|
|
os.unlink(dst_tmp)
|
|
|
|
|
|
|
|
|
|
|
|
def listify(item: T.Any, flatten: bool = True) -> T.List[T.Any]:
|
|
|
|
'''
|
|
|
|
Returns a list with all args embedded in a list if they are not a list.
|
|
|
|
This function preserves order.
|
|
|
|
@flatten: Convert lists of lists to a flat list
|
|
|
|
'''
|
|
|
|
if not isinstance(item, list):
|
|
|
|
return [item]
|
|
|
|
result = [] # type: T.List[T.Any]
|
|
|
|
for i in item:
|
|
|
|
if flatten and isinstance(i, list):
|
|
|
|
result += listify(i, flatten=True)
|
|
|
|
else:
|
|
|
|
result.append(i)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def extract_as_list(dict_object: T.Dict[_T, _U], key: _T, pop: bool = False) -> T.List[_U]:
|
|
|
|
'''
|
|
|
|
Extracts all values from given dict_object and listifies them.
|
|
|
|
'''
|
|
|
|
fetch = dict_object.get
|
|
|
|
if pop:
|
|
|
|
fetch = dict_object.pop
|
|
|
|
# If there's only one key, we don't return a list with one element
|
|
|
|
return listify(fetch(key, []), flatten=True)
|
|
|
|
|
|
|
|
|
|
|
|
def typeslistify(item: 'T.Union[_T, T.Sequence[_T]]',
|
|
|
|
types: 'T.Union[T.Type[_T], T.Tuple[T.Type[_T]]]') -> T.List[_T]:
|
Support multiple install dirs for built/custom targets
You can now pass a list of strings to the install_dir: kwarg to
build_target and custom_target.
Custom Targets:
===============
Allows you to specify the installation directory for each
corresponding output. For example:
custom_target('different-install-dirs',
output : ['first.file', 'second.file'],
...
install : true,
install_dir : ['somedir', 'otherdir])
This would install first.file to somedir and second.file to otherdir.
If only one install_dir is provided, all outputs are installed there
(same behaviour as before).
To only install some outputs, pass `false` for the outputs that you
don't want installed. For example:
custom_target('only-install-second',
output : ['first.file', 'second.file'],
...
install : true,
install_dir : [false, 'otherdir])
This would install second.file to otherdir and not install first.file.
Build Targets:
==============
With build_target() (which includes executable(), library(), etc),
usually there is only one primary output. However some types of
targets have multiple outputs.
For example, while generating Vala libraries, valac also generates
a header and a .vapi file both of which often need to be installed.
This allows you to specify installation directories for those too.
# This will only install the library (same as before)
shared_library('somevalalib', 'somesource.vala',
...
install : true)
# This will install the library, the header, and the vapi into the
# respective directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : ['libdir', 'incdir', 'vapidir'])
# This will install the library into the default libdir and
# everything else into the specified directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : [true, 'incdir', 'vapidir'])
# This will NOT install the library, and will install everything
# else into the specified directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : [false, 'incdir', 'vapidir'])
true/false can also be used for secondary outputs in the same way.
Valac can also generate a GIR file for libraries when the `vala_gir:`
keyword argument is passed to library(). In that case, `install_dir:`
must be given a list with four elements, one for each output.
Includes tests for all these.
Closes https://github.com/mesonbuild/meson/issues/705
Closes https://github.com/mesonbuild/meson/issues/891
Closes https://github.com/mesonbuild/meson/issues/892
Closes https://github.com/mesonbuild/meson/issues/1178
Closes https://github.com/mesonbuild/meson/issues/1193
8 years ago
|
|
|
'''
|
|
|
|
Ensure that type(@item) is one of @types or a
|
|
|
|
list of items all of which are of type @types
|
|
|
|
'''
|
|
|
|
if isinstance(item, types):
|
|
|
|
item = T.cast(T.List[_T], [item])
|
|
|
|
if not isinstance(item, list):
|
|
|
|
raise MesonException('Item must be a list or one of {!r}, not {!r}'.format(types, type(item)))
|
|
|
|
for i in item:
|
Support multiple install dirs for built/custom targets
You can now pass a list of strings to the install_dir: kwarg to
build_target and custom_target.
Custom Targets:
===============
Allows you to specify the installation directory for each
corresponding output. For example:
custom_target('different-install-dirs',
output : ['first.file', 'second.file'],
...
install : true,
install_dir : ['somedir', 'otherdir])
This would install first.file to somedir and second.file to otherdir.
If only one install_dir is provided, all outputs are installed there
(same behaviour as before).
To only install some outputs, pass `false` for the outputs that you
don't want installed. For example:
custom_target('only-install-second',
output : ['first.file', 'second.file'],
...
install : true,
install_dir : [false, 'otherdir])
This would install second.file to otherdir and not install first.file.
Build Targets:
==============
With build_target() (which includes executable(), library(), etc),
usually there is only one primary output. However some types of
targets have multiple outputs.
For example, while generating Vala libraries, valac also generates
a header and a .vapi file both of which often need to be installed.
This allows you to specify installation directories for those too.
# This will only install the library (same as before)
shared_library('somevalalib', 'somesource.vala',
...
install : true)
# This will install the library, the header, and the vapi into the
# respective directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : ['libdir', 'incdir', 'vapidir'])
# This will install the library into the default libdir and
# everything else into the specified directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : [true, 'incdir', 'vapidir'])
# This will NOT install the library, and will install everything
# else into the specified directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : [false, 'incdir', 'vapidir'])
true/false can also be used for secondary outputs in the same way.
Valac can also generate a GIR file for libraries when the `vala_gir:`
keyword argument is passed to library(). In that case, `install_dir:`
must be given a list with four elements, one for each output.
Includes tests for all these.
Closes https://github.com/mesonbuild/meson/issues/705
Closes https://github.com/mesonbuild/meson/issues/891
Closes https://github.com/mesonbuild/meson/issues/892
Closes https://github.com/mesonbuild/meson/issues/1178
Closes https://github.com/mesonbuild/meson/issues/1193
8 years ago
|
|
|
if i is not None and not isinstance(i, types):
|
|
|
|
raise MesonException('List item must be one of {!r}, not {!r}'.format(types, type(i)))
|
|
|
|
return item
|
|
|
|
|
|
|
|
|
|
|
|
def stringlistify(item: T.Union[T.Any, T.Sequence[T.Any]]) -> T.List[str]:
|
Support multiple install dirs for built/custom targets
You can now pass a list of strings to the install_dir: kwarg to
build_target and custom_target.
Custom Targets:
===============
Allows you to specify the installation directory for each
corresponding output. For example:
custom_target('different-install-dirs',
output : ['first.file', 'second.file'],
...
install : true,
install_dir : ['somedir', 'otherdir])
This would install first.file to somedir and second.file to otherdir.
If only one install_dir is provided, all outputs are installed there
(same behaviour as before).
To only install some outputs, pass `false` for the outputs that you
don't want installed. For example:
custom_target('only-install-second',
output : ['first.file', 'second.file'],
...
install : true,
install_dir : [false, 'otherdir])
This would install second.file to otherdir and not install first.file.
Build Targets:
==============
With build_target() (which includes executable(), library(), etc),
usually there is only one primary output. However some types of
targets have multiple outputs.
For example, while generating Vala libraries, valac also generates
a header and a .vapi file both of which often need to be installed.
This allows you to specify installation directories for those too.
# This will only install the library (same as before)
shared_library('somevalalib', 'somesource.vala',
...
install : true)
# This will install the library, the header, and the vapi into the
# respective directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : ['libdir', 'incdir', 'vapidir'])
# This will install the library into the default libdir and
# everything else into the specified directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : [true, 'incdir', 'vapidir'])
# This will NOT install the library, and will install everything
# else into the specified directories
shared_library('somevalalib', 'somesource.vala',
...
install : true,
install_dir : [false, 'incdir', 'vapidir'])
true/false can also be used for secondary outputs in the same way.
Valac can also generate a GIR file for libraries when the `vala_gir:`
keyword argument is passed to library(). In that case, `install_dir:`
must be given a list with four elements, one for each output.
Includes tests for all these.
Closes https://github.com/mesonbuild/meson/issues/705
Closes https://github.com/mesonbuild/meson/issues/891
Closes https://github.com/mesonbuild/meson/issues/892
Closes https://github.com/mesonbuild/meson/issues/1178
Closes https://github.com/mesonbuild/meson/issues/1193
8 years ago
|
|
|
return typeslistify(item, str)
|
|
|
|
|
|
|
|
|
|
|
|
def expand_arguments(args: T.Iterable[str]) -> T.Optional[T.List[str]]:
|
|
|
|
expended_args = [] # type: T.List[str]
|
|
|
|
for arg in args:
|
|
|
|
if not arg.startswith('@'):
|
|
|
|
expended_args.append(arg)
|
|
|
|
continue
|
|
|
|
|
|
|
|
args_file = arg[1:]
|
|
|
|
try:
|
|
|
|
with open(args_file, encoding='utf-8') as f:
|
|
|
|
extended_args = f.read().split()
|
|
|
|
expended_args += extended_args
|
|
|
|
except Exception as e:
|
|
|
|
mlog.error('Expanding command line arguments:', args_file, 'not found')
|
|
|
|
mlog.exception(e)
|
|
|
|
return None
|
|
|
|
return expended_args
|
|
|
|
|
|
|
|
|
|
|
|
def partition(pred: T.Callable[[_T], object], iterable: T.Iterable[_T]) -> T.Tuple[T.Iterator[_T], T.Iterator[_T]]:
|
|
|
|
"""Use a predicate to partition entries into false entries and true
|
|
|
|
entries.
|
|
|
|
|
|
|
|
>>> x, y = partition(is_odd, range(10))
|
|
|
|
>>> (list(x), list(y))
|
|
|
|
([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
|
|
|
|
"""
|
|
|
|
t1, t2 = tee(iterable)
|
|
|
|
return filterfalse(pred, t1), filter(pred, t2)
|
|
|
|
|
|
|
|
|
|
|
|
def Popen_safe(args: T.List[str], write: T.Optional[str] = None,
|
|
|
|
stdout: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
|
|
|
|
stderr: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
|
|
|
|
**kwargs: T.Any) -> T.Tuple['subprocess.Popen[str]', str, str]:
|
|
|
|
import locale
|
|
|
|
encoding = locale.getpreferredencoding()
|
|
|
|
# Redirect stdin to DEVNULL otherwise the command run by us here might mess
|
|
|
|
# up the console and ANSI colors will stop working on Windows.
|
|
|
|
if 'stdin' not in kwargs:
|
|
|
|
kwargs['stdin'] = subprocess.DEVNULL
|
|
|
|
if not sys.stdout.encoding or encoding.upper() != 'UTF-8':
|
|
|
|
p, o, e = Popen_safe_legacy(args, write=write, stdout=stdout, stderr=stderr, **kwargs)
|
|
|
|
else:
|
|
|
|
p = subprocess.Popen(args, universal_newlines=True, close_fds=False,
|
|
|
|
stdout=stdout, stderr=stderr, **kwargs)
|
|
|
|
o, e = p.communicate(write)
|
|
|
|
# Sometimes the command that we run will call another command which will be
|
|
|
|
# without the above stdin workaround, so set the console mode again just in
|
|
|
|
# case.
|
|
|
|
mlog.setup_console()
|
|
|
|
return p, o, e
|
|
|
|
|
|
|
|
|
|
|
|
def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
|
|
|
|
stdout: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
|
|
|
|
stderr: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
|
|
|
|
**kwargs: T.Any) -> T.Tuple['subprocess.Popen[str]', str, str]:
|
|
|
|
p = subprocess.Popen(args, universal_newlines=False, close_fds=False,
|
|
|
|
stdout=stdout, stderr=stderr, **kwargs)
|
|
|
|
input_ = None # type: T.Optional[bytes]
|
|
|
|
if write is not None:
|
|
|
|
input_ = write.encode('utf-8')
|
|
|
|
o, e = p.communicate(input_)
|
|
|
|
if o is not None:
|
|
|
|
if sys.stdout.encoding:
|
|
|
|
o = o.decode(encoding=sys.stdout.encoding, errors='replace').replace('\r\n', '\n')
|
|
|
|
else:
|
|
|
|
o = o.decode(errors='replace').replace('\r\n', '\n')
|
|
|
|
if e is not None:
|
|
|
|
if sys.stderr.encoding:
|
|
|
|
e = e.decode(encoding=sys.stderr.encoding, errors='replace').replace('\r\n', '\n')
|
|
|
|
else:
|
|
|
|
e = e.decode(errors='replace').replace('\r\n', '\n')
|
|
|
|
return p, o, e
|
|
|
|
|
|
|
|
|
|
|
|
def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.Optional[str]:
|
|
|
|
'''
|
|
|
|
Takes each regular expression in @regexiter and tries to search for it in
|
|
|
|
every item in @initer. If there is a match, returns that match.
|
|
|
|
Else returns False.
|
|
|
|
'''
|
|
|
|
for regex in regexiter:
|
|
|
|
for ii in initer:
|
|
|
|
if not isinstance(ii, str):
|
|
|
|
continue
|
|
|
|
match = re.search(regex, ii)
|
|
|
|
if match:
|
|
|
|
return match.group()
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> None:
|
|
|
|
# Error checking
|
|
|
|
inregex = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@'] # type: T.List[str]
|
|
|
|
outregex = ['@OUTPUT([0-9]+)?@', '@OUTDIR@'] # type: T.List[str]
|
|
|
|
if '@INPUT@' not in values:
|
|
|
|
# Error out if any input-derived templates are present in the command
|
|
|
|
match = iter_regexin_iter(inregex, command)
|
|
|
|
if match:
|
|
|
|
raise MesonException(f'Command cannot have {match!r}, since no input files were specified')
|
|
|
|
else:
|
|
|
|
if len(values['@INPUT@']) > 1:
|
|
|
|
# Error out if @PLAINNAME@ or @BASENAME@ is present in the command
|
|
|
|
match = iter_regexin_iter(inregex[1:], command)
|
|
|
|
if match:
|
|
|
|
raise MesonException(f'Command cannot have {match!r} when there is '
|
|
|
|
'more than one input file')
|
|
|
|
# Error out if an invalid @INPUTnn@ template was specified
|
|
|
|
for each in command:
|
|
|
|
if not isinstance(each, str):
|
|
|
|
continue
|
|
|
|
match2 = re.search(inregex[0], each)
|
|
|
|
if match2 and match2.group() not in values:
|
|
|
|
m = 'Command cannot have {!r} since there are only {!r} inputs'
|
|
|
|
raise MesonException(m.format(match2.group(), len(values['@INPUT@'])))
|
|
|
|
if '@OUTPUT@' not in values:
|
|
|
|
# Error out if any output-derived templates are present in the command
|
|
|
|
match = iter_regexin_iter(outregex, command)
|
|
|
|
if match:
|
|
|
|
raise MesonException(f'Command cannot have {match!r} since there are no outputs')
|
|
|
|
else:
|
|
|
|
# Error out if an invalid @OUTPUTnn@ template was specified
|
|
|
|
for each in command:
|
|
|
|
if not isinstance(each, str):
|
|
|
|
continue
|
|
|
|
match2 = re.search(outregex[0], each)
|
|
|
|
if match2 and match2.group() not in values:
|
|
|
|
m = 'Command cannot have {!r} since there are only {!r} outputs'
|
|
|
|
raise MesonException(m.format(match2.group(), len(values['@OUTPUT@'])))
|
|
|
|
|
|
|
|
|
|
|
|
def substitute_values(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> T.List[str]:
|
|
|
|
'''
|
|
|
|
Substitute the template strings in the @values dict into the list of
|
|
|
|
strings @command and return a new list. For a full list of the templates,
|
|
|
|
see get_filenames_templates_dict()
|
|
|
|
|
|
|
|
If multiple inputs/outputs are given in the @values dictionary, we
|
|
|
|
substitute @INPUT@ and @OUTPUT@ only if they are the entire string, not
|
|
|
|
just a part of it, and in that case we substitute *all* of them.
|
|
|
|
|
|
|
|
The typing of this function is difficult, as only @OUTPUT@ and @INPUT@ can
|
|
|
|
be lists, everything else is a string. However, TypeDict cannot represent
|
|
|
|
this, as you can have optional keys, but not extra keys. We end up just
|
|
|
|
having to us asserts to convince type checkers that this is okay.
|
|
|
|
|
|
|
|
https://github.com/python/mypy/issues/4617
|
|
|
|
'''
|
|
|
|
|
|
|
|
def replace(m: T.Match[str]) -> str:
|
|
|
|
v = values[m.group(0)]
|
|
|
|
assert isinstance(v, str), 'for mypy'
|
|
|
|
return v
|
|
|
|
|
|
|
|
# Error checking
|
|
|
|
_substitute_values_check_errors(command, values)
|
|
|
|
|
|
|
|
# Substitution
|
|
|
|
outcmd = [] # type: T.List[str]
|
|
|
|
rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
|
|
|
|
value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
|
|
|
|
for vv in command:
|
|
|
|
more: T.Optional[str] = None
|
|
|
|
if not isinstance(vv, str):
|
|
|
|
outcmd.append(vv)
|
|
|
|
elif '@INPUT@' in vv:
|
|
|
|
inputs = values['@INPUT@']
|
|
|
|
if vv == '@INPUT@':
|
|
|
|
outcmd += inputs
|
|
|
|
elif len(inputs) == 1:
|
|
|
|
outcmd.append(vv.replace('@INPUT@', inputs[0]))
|
|
|
|
else:
|
|
|
|
raise MesonException("Command has '@INPUT@' as part of a "
|
|
|
|
"string and more than one input file")
|
|
|
|
elif '@OUTPUT@' in vv:
|
|
|
|
outputs = values['@OUTPUT@']
|
|
|
|
if vv == '@OUTPUT@':
|
|
|
|
outcmd += outputs
|
|
|
|
elif len(outputs) == 1:
|
|
|
|
outcmd.append(vv.replace('@OUTPUT@', outputs[0]))
|
|
|
|
else:
|
|
|
|
raise MesonException("Command has '@OUTPUT@' as part of a "
|
|
|
|
"string and more than one output file")
|
|
|
|
|
|
|
|
# Append values that are exactly a template string.
|
|
|
|
# This is faster than a string replace.
|
|
|
|
elif vv in values:
|
|
|
|
o = values[vv]
|
|
|
|
assert isinstance(o, str), 'for mypy'
|
|
|
|
more = o
|
|
|
|
# Substitute everything else with replacement
|
|
|
|
elif value_rx:
|
|
|
|
more = value_rx.sub(replace, vv)
|
|
|
|
else:
|
|
|
|
more = vv
|
|
|
|
|
|
|
|
if more is not None:
|
|
|
|
outcmd.append(more)
|
|
|
|
|
|
|
|
return outcmd
|
|
|
|
|
|
|
|
|
|
|
|
def get_filenames_templates_dict(inputs: T.List[str], outputs: T.List[str]) -> T.Dict[str, T.Union[str, T.List[str]]]:
|
|
|
|
'''
|
|
|
|
Create a dictionary with template strings as keys and values as values for
|
|
|
|
the following templates:
|
|
|
|
|
|
|
|
@INPUT@ - the full path to one or more input files, from @inputs
|
|
|
|
@OUTPUT@ - the full path to one or more output files, from @outputs
|
|
|
|
@OUTDIR@ - the full path to the directory containing the output files
|
|
|
|
|
|
|
|
If there is only one input file, the following keys are also created:
|
|
|
|
|
|
|
|
@PLAINNAME@ - the filename of the input file
|
|
|
|
@BASENAME@ - the filename of the input file with the extension removed
|
|
|
|
|
|
|
|
If there is more than one input file, the following keys are also created:
|
|
|
|
|
|
|
|
@INPUT0@, @INPUT1@, ... one for each input file
|
|
|
|
|
|
|
|
If there is more than one output file, the following keys are also created:
|
|
|
|
|
|
|
|
@OUTPUT0@, @OUTPUT1@, ... one for each output file
|
|
|
|
'''
|
|
|
|
values = {} # type: T.Dict[str, T.Union[str, T.List[str]]]
|
|
|
|
# Gather values derived from the input
|
|
|
|
if inputs:
|
|
|
|
# We want to substitute all the inputs.
|
|
|
|
values['@INPUT@'] = inputs
|
|
|
|
for (ii, vv) in enumerate(inputs):
|
|
|
|
# Write out @INPUT0@, @INPUT1@, ...
|
|
|
|
values[f'@INPUT{ii}@'] = vv
|
|
|
|
if len(inputs) == 1:
|
|
|
|
# Just one value, substitute @PLAINNAME@ and @BASENAME@
|
|
|
|
values['@PLAINNAME@'] = plain = os.path.basename(inputs[0])
|
|
|
|
values['@BASENAME@'] = os.path.splitext(plain)[0]
|
|
|
|
if outputs:
|
|
|
|
# Gather values derived from the outputs, similar to above.
|
|
|
|
values['@OUTPUT@'] = outputs
|
|
|
|
for (ii, vv) in enumerate(outputs):
|
|
|
|
values[f'@OUTPUT{ii}@'] = vv
|
|
|
|
# Outdir should be the same for all outputs
|
|
|
|
values['@OUTDIR@'] = os.path.dirname(outputs[0])
|
|
|
|
# Many external programs fail on empty arguments.
|
|
|
|
if values['@OUTDIR@'] == '':
|
|
|
|
values['@OUTDIR@'] = '.'
|
|
|
|
return values
|
|
|
|
|
|
|
|
|
|
|
|
def _make_tree_writable(topdir: str) -> None:
|
|
|
|
# Ensure all files and directories under topdir are writable
|
|
|
|
# (and readable) by owner.
|
|
|
|
for d, _, files in os.walk(topdir):
|
|
|
|
os.chmod(d, os.stat(d).st_mode | stat.S_IWRITE | stat.S_IREAD)
|
|
|
|
for fname in files:
|
|
|
|
fpath = os.path.join(d, fname)
|
|
|
|
if os.path.isfile(fpath):
|
|
|
|
os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
|
|
|
|
|
|
|
|
|
|
|
|
def windows_proof_rmtree(f: str) -> None:
|
|
|
|
# On Windows if anyone is holding a file open you can't
|
|
|
|
# delete it. As an example an anti virus scanner might
|
|
|
|
# be scanning files you are trying to delete. The only
|
|
|
|
# way to fix this is to try again and again.
|
|
|
|
delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
|
|
|
|
writable = False
|
|
|
|
for d in delays:
|
|
|
|
try:
|
|
|
|
# Start by making the tree writable.
|
|
|
|
if not writable:
|
|
|
|
_make_tree_writable(f)
|
|
|
|
writable = True
|
|
|
|
except PermissionError:
|
|
|
|
time.sleep(d)
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
shutil.rmtree(f)
|
|
|
|
return
|
|
|
|
except FileNotFoundError:
|
|
|
|
return
|
|
|
|
except OSError:
|
|
|
|
time.sleep(d)
|
|
|
|
# Try one last time and throw if it fails.
|
|
|
|
shutil.rmtree(f)
|
|
|
|
|
|
|
|
|
|
|
|
def windows_proof_rm(fpath: str) -> None:
|
|
|
|
"""Like windows_proof_rmtree, but for a single file."""
|
|
|
|
if os.path.isfile(fpath):
|
|
|
|
os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
|
|
|
|
delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
|
|
|
|
for d in delays:
|
|
|
|
try:
|
|
|
|
os.unlink(fpath)
|
|
|
|
return
|
|
|
|
except FileNotFoundError:
|
|
|
|
return
|
|
|
|
except OSError:
|
|
|
|
time.sleep(d)
|
|
|
|
os.unlink(fpath)
|
|
|
|
|
|
|
|
|
|
|
|
class TemporaryDirectoryWinProof(TemporaryDirectory):
|
|
|
|
"""
|
|
|
|
Like TemporaryDirectory, but cleans things up using
|
|
|
|
windows_proof_rmtree()
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __exit__(self, exc: T.Any, value: T.Any, tb: T.Any) -> None:
|
|
|
|
try:
|
|
|
|
super().__exit__(exc, value, tb)
|
|
|
|
except OSError:
|
|
|
|
windows_proof_rmtree(self.name)
|
|
|
|
|
|
|
|
def cleanup(self) -> None:
|
|
|
|
try:
|
|
|
|
super().cleanup()
|
|
|
|
except OSError:
|
|
|
|
windows_proof_rmtree(self.name)
|
|
|
|
|
|
|
|
|
|
|
|
def detect_subprojects(spdir_name: str, current_dir: str = '',
|
|
|
|
result: T.Optional[T.Dict[str, T.List[str]]] = None) -> T.Optional[T.Dict[str, T.List[str]]]:
|
|
|
|
if result is None:
|
|
|
|
result = {}
|
|
|
|
spdir = os.path.join(current_dir, spdir_name)
|
|
|
|
if not os.path.exists(spdir):
|
|
|
|
return result
|
|
|
|
for trial in glob(os.path.join(spdir, '*')):
|
|
|
|
basename = os.path.basename(trial)
|
|
|
|
if trial == 'packagecache':
|
|
|
|
continue
|
|
|
|
append_this = True
|
|
|
|
if os.path.isdir(trial):
|
|
|
|
detect_subprojects(spdir_name, trial, result)
|
|
|
|
elif trial.endswith('.wrap') and os.path.isfile(trial):
|
|
|
|
basename = os.path.splitext(basename)[0]
|
|
|
|
else:
|
|
|
|
append_this = False
|
|
|
|
if append_this:
|
|
|
|
if basename in result:
|
|
|
|
result[basename].append(trial)
|
|
|
|
else:
|
|
|
|
result[basename] = [trial]
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def substring_is_in_list(substr: str, strlist: T.List[str]) -> bool:
|
|
|
|
for s in strlist:
|
|
|
|
if substr in s:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
class OrderedSet(T.MutableSet[_T]):
|
|
|
|
"""A set that preserves the order in which items are added, by first
|
|
|
|
insertion.
|
|
|
|
"""
|
|
|
|
def __init__(self, iterable: T.Optional[T.Iterable[_T]] = None):
|
|
|
|
# typing.OrderedDict is new in 3.7.2, so we can't use that, but we can
|
|
|
|
# use MutableMapping, which is fine in this case.
|
|
|
|
self.__container = collections.OrderedDict() # type: T.MutableMapping[_T, None]
|
|
|
|
if iterable:
|
|
|
|
self.update(iterable)
|
|
|
|
|
|
|
|
def __contains__(self, value: object) -> bool:
|
|
|
|
return value in self.__container
|
|
|
|
|
|
|
|
def __iter__(self) -> T.Iterator[_T]:
|
|
|
|
return iter(self.__container.keys())
|
|
|
|
|
|
|
|
def __len__(self) -> int:
|
|
|
|
return len(self.__container)
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
# Don't print 'OrderedSet("")' for an empty set.
|
|
|
|
if self.__container:
|
|
|
|
return 'OrderedSet("{}")'.format(
|
|
|
|
'", "'.join(repr(e) for e in self.__container.keys()))
|
|
|
|
return 'OrderedSet()'
|
|
|
|
|
|
|
|
def __reversed__(self) -> T.Iterator[_T]:
|
|
|
|
# Mypy is complaining that sets can't be reversed, which is true for
|
|
|
|
# unordered sets, but this is an ordered, set so reverse() makes sense.
|
|
|
|
return reversed(self.__container.keys()) # type: ignore
|
|
|
|
|
|
|
|
def add(self, value: _T) -> None:
|
|
|
|
self.__container[value] = None
|
|
|
|
|
|
|
|
def discard(self, value: _T) -> None:
|
|
|
|
if value in self.__container:
|
|
|
|
del self.__container[value]
|
|
|
|
|
|
|
|
def move_to_end(self, value: _T, last: bool = True) -> None:
|
|
|
|
# Mypy does not know about move_to_end, because it is not part of MutableMapping
|
|
|
|
self.__container.move_to_end(value, last) # type: ignore
|
|
|
|
|
|
|
|
def pop(self, last: bool = True) -> _T:
|
|
|
|
# Mypy does not know about the last argument, because it is not part of MutableMapping
|
|
|
|
item, _ = self.__container.popitem(last) # type: ignore
|
|
|
|
return item
|
|
|
|
|
|
|
|
def update(self, iterable: T.Iterable[_T]) -> None:
|
|
|
|
for item in iterable:
|
|
|
|
self.__container[item] = None
|
|
|
|
|
|
|
|
def difference(self, set_: T.Union[T.Set[_T], 'OrderedSet[_T]']) -> 'OrderedSet[_T]':
|
|
|
|
return type(self)(e for e in self if e not in set_)
|
|
|
|
|
|
|
|
def relpath(path: str, start: str) -> str:
|
|
|
|
# On Windows a relative path can't be evaluated for paths on two different
|
|
|
|
# drives (i.e. c:\foo and f:\bar). The only thing left to do is to use the
|
|
|
|
# original absolute path.
|
|
|
|
try:
|
|
|
|
return os.path.relpath(path, start)
|
|
|
|
except (TypeError, ValueError):
|
|
|
|
return path
|
|
|
|
|
|
|
|
def path_is_in_root(path: Path, root: Path, resolve: bool = False) -> bool:
|
|
|
|
# Check whether a path is within the root directory root
|
|
|
|
try:
|
|
|
|
if resolve:
|
|
|
|
path.resolve().relative_to(root.resolve())
|
|
|
|
else:
|
|
|
|
path.relative_to(root)
|
|
|
|
except ValueError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
def relative_to_if_possible(path: Path, root: Path, resolve: bool = False) -> Path:
|
|
|
|
try:
|
|
|
|
if resolve:
|
|
|
|
return path.resolve().relative_to(root.resolve())
|
|
|
|
else:
|
|
|
|
return path.relative_to(root)
|
|
|
|
except ValueError:
|
|
|
|
return path
|
|
|
|
|
|
|
|
class LibType(enum.IntEnum):
|
|
|
|
|
|
|
|
"""Enumeration for library types."""
|
|
|
|
|
|
|
|
SHARED = 0
|
|
|
|
STATIC = 1
|
|
|
|
PREFER_SHARED = 2
|
|
|
|
PREFER_STATIC = 3
|
|
|
|
|
|
|
|
|
|
|
|
class ProgressBarFallback: # lgtm [py/iter-returns-non-self]
|
|
|
|
'''
|
|
|
|
Fallback progress bar implementation when tqdm is not found
|
|
|
|
|
|
|
|
Since this class is not an actual iterator, but only provides a minimal
|
|
|
|
fallback, it is safe to ignore the 'Iterator does not return self from
|
|
|
|
__iter__ method' warning.
|
|
|
|
'''
|
|
|
|
def __init__(self, iterable: T.Optional[T.Iterable[str]] = None, total: T.Optional[int] = None,
|
|
|
|
bar_type: T.Optional[str] = None, desc: T.Optional[str] = None):
|
|
|
|
if iterable is not None:
|
|
|
|
self.iterable = iter(iterable)
|
|
|
|
return
|
|
|
|
self.total = total
|
|
|
|
self.done = 0
|
|
|
|
self.printed_dots = 0
|
|
|
|
if self.total and bar_type == 'download':
|
|
|
|
print('Download size:', self.total)
|
|
|
|
if desc:
|
|
|
|
print(f'{desc}: ', end='')
|
|
|
|
|
|
|
|
# Pretend to be an iterator when called as one and don't print any
|
|
|
|
# progress
|
|
|
|
def __iter__(self) -> T.Iterator[str]:
|
|
|
|
return self.iterable
|
|
|
|
|
|
|
|
def __next__(self) -> str:
|
|
|
|
return next(self.iterable)
|
|
|
|
|
|
|
|
def print_dot(self) -> None:
|
|
|
|
print('.', end='')
|
|
|
|
sys.stdout.flush()
|
|
|
|
self.printed_dots += 1
|
|
|
|
|
|
|
|
def update(self, progress: int) -> None:
|
|
|
|
self.done += progress
|
|
|
|
if not self.total:
|
|
|
|
# Just print one dot per call if we don't have a total length
|
|
|
|
self.print_dot()
|
|
|
|
return
|
|
|
|
ratio = int(self.done / self.total * 10)
|
|
|
|
while self.printed_dots < ratio:
|
|
|
|
self.print_dot()
|
|
|
|
|
|
|
|
def close(self) -> None:
|
|
|
|
print('')
|
|
|
|
|
|
|
|
try:
|
|
|
|
from tqdm import tqdm
|
|
|
|
except ImportError:
|
|
|
|
# ideally we would use a typing.Protocol here, but it's part of typing_extensions until 3.8
|
|
|
|
ProgressBar = ProgressBarFallback # type: T.Union[T.Type[ProgressBarFallback], T.Type[ProgressBarTqdm]]
|
|
|
|
else:
|
|
|
|
class ProgressBarTqdm(tqdm):
|
|
|
|
def __init__(self, *args: T.Any, bar_type: T.Optional[str] = None, **kwargs: T.Any) -> None:
|
|
|
|
if bar_type == 'download':
|
|
|
|
kwargs.update({'unit': 'bytes', 'leave': True})
|
|
|
|
else:
|
|
|
|
kwargs.update({'leave': False})
|
|
|
|
kwargs['ncols'] = 100
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
ProgressBar = ProgressBarTqdm
|
|
|
|
|
|
|
|
|
|
|
|
class RealPathAction(argparse.Action):
|
|
|
|
def __init__(self, option_strings: T.List[str], dest: str, default: str = '.', **kwargs: T.Any):
|
|
|
|
default = os.path.abspath(os.path.realpath(default))
|
|
|
|
super().__init__(option_strings, dest, nargs=None, default=default, **kwargs)
|
|
|
|
|
|
|
|
def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
|
|
|
|
values: T.Union[str, T.Sequence[T.Any], None], option_string: str = None) -> None:
|
|
|
|
assert isinstance(values, str)
|
|
|
|
setattr(namespace, self.dest, os.path.abspath(os.path.realpath(values)))
|
|
|
|
|
|
|
|
|
|
|
|
def get_wine_shortpath(winecmd: T.List[str], wine_paths: T.Sequence[str]) -> str:
|
|
|
|
"""Get A short version of @wine_paths to avoid reaching WINEPATH number
|
|
|
|
of char limit.
|
|
|
|
"""
|
|
|
|
|
|
|
|
wine_paths = list(OrderedSet(wine_paths))
|
|
|
|
|
|
|
|
getShortPathScript = '%s.bat' % str(uuid.uuid4()).lower()[:5]
|
|
|
|
with open(getShortPathScript, mode='w', encoding='utf-8') as f:
|
|
|
|
f.write("@ECHO OFF\nfor %%x in (%*) do (\n echo|set /p=;%~sx\n)\n")
|
|
|
|
f.flush()
|
|
|
|
try:
|
|
|
|
with open(os.devnull, 'w', encoding='utf-8') as stderr:
|
|
|
|
wine_path = subprocess.check_output(
|
|
|
|
winecmd +
|
|
|
|
['cmd', '/C', getShortPathScript] + wine_paths,
|
|
|
|
stderr=stderr).decode('utf-8')
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
print("Could not get short paths: %s" % e)
|
|
|
|
wine_path = ';'.join(wine_paths)
|
|
|
|
finally:
|
|
|
|
os.remove(getShortPathScript)
|
|
|
|
if len(wine_path) > 2048:
|
|
|
|
raise MesonException(
|
|
|
|
'WINEPATH size {} > 2048'
|
|
|
|
' this will cause random failure.'.format(
|
|
|
|
len(wine_path)))
|
|
|
|
|
|
|
|
return wine_path.strip(';')
|
|
|
|
|
|
|
|
|
|
|
|
def run_once(func: T.Callable[..., _T]) -> T.Callable[..., _T]:
|
|
|
|
ret = [] # type: T.List[_T]
|
|
|
|
|
|
|
|
@wraps(func)
|
|
|
|
def wrapper(*args: T.Any, **kwargs: T.Any) -> _T:
|
|
|
|
if ret:
|
|
|
|
return ret[0]
|
|
|
|
|
|
|
|
val = func(*args, **kwargs)
|
|
|
|
ret.append(val)
|
|
|
|
return val
|
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
|
|
|
class OptionOverrideProxy(collections.abc.MutableMapping):
|
|
|
|
|
|
|
|
'''Mimic an option list but transparently override selected option
|
|
|
|
values.
|
|
|
|
'''
|
|
|
|
|
|
|
|
# TODO: the typing here could be made more explicit using a TypeDict from
|
|
|
|
# python 3.8 or typing_extensions
|
|
|
|
|
|
|
|
def __init__(self, overrides: T.Dict['OptionKey', T.Any], *options: 'KeyedOptionDictType'):
|
|
|
|
self.overrides = overrides.copy()
|
|
|
|
self.options: T.Dict['OptionKey', UserOption] = {}
|
|
|
|
for o in options:
|
|
|
|
self.options.update(o)
|
|
|
|
|
|
|
|
def __getitem__(self, key: 'OptionKey') -> T.Union['UserOption']:
|
|
|
|
if key in self.options:
|
|
|
|
opt = self.options[key]
|
|
|
|
if key in self.overrides:
|
|
|
|
opt = copy.copy(opt)
|
|
|
|
opt.set_value(self.overrides[key])
|
|
|
|
return opt
|
|
|
|
raise KeyError('Option not found', key)
|
|
|
|
|
|
|
|
def __setitem__(self, key: 'OptionKey', value: T.Union['UserOption']) -> None:
|
|
|
|
self.overrides[key] = value.value
|
|
|
|
|
|
|
|
def __delitem__(self, key: 'OptionKey') -> None:
|
|
|
|
del self.overrides[key]
|
|
|
|
|
|
|
|
def __iter__(self) -> T.Iterator['OptionKey']:
|
|
|
|
return iter(self.options)
|
|
|
|
|
|
|
|
def __len__(self) -> int:
|
|
|
|
return len(self.options)
|
|
|
|
|
|
|
|
def copy(self) -> 'OptionOverrideProxy':
|
|
|
|
return OptionOverrideProxy(self.overrides.copy(), self.options.copy())
|
|
|
|
|
|
|
|
|
|
|
|
class OptionType(enum.IntEnum):
|
|
|
|
|
|
|
|
"""Enum used to specify what kind of argument a thing is."""
|
|
|
|
|
|
|
|
BUILTIN = 0
|
|
|
|
BACKEND = 1
|
|
|
|
BASE = 2
|
|
|
|
COMPILER = 3
|
|
|
|
PROJECT = 4
|
|
|
|
|
|
|
|
# This is copied from coredata. There is no way to share this, because this
|
|
|
|
# is used in the OptionKey constructor, and the coredata lists are
|
|
|
|
# OptionKeys...
|
|
|
|
_BUILTIN_NAMES = {
|
|
|
|
'prefix',
|
|
|
|
'bindir',
|
|
|
|
'datadir',
|
|
|
|
'includedir',
|
|
|
|
'infodir',
|
|
|
|
'libdir',
|
|
|
|
'libexecdir',
|
|
|
|
'localedir',
|
|
|
|
'localstatedir',
|
|
|
|
'mandir',
|
|
|
|
'sbindir',
|
|
|
|
'sharedstatedir',
|
|
|
|
'sysconfdir',
|
|
|
|
'auto_features',
|
|
|
|
'backend',
|
|
|
|
'buildtype',
|
|
|
|
'debug',
|
|
|
|
'default_library',
|
|
|
|
'errorlogs',
|
|
|
|
'install_umask',
|
|
|
|
'layout',
|
|
|
|
'optimization',
|
|
|
|
'stdsplit',
|
|
|
|
'strip',
|
|
|
|
'unity',
|
|
|
|
'unity_size',
|
|
|
|
'warning_level',
|
|
|
|
'werror',
|
|
|
|
'wrap_mode',
|
|
|
|
'force_fallback_for',
|
|
|
|
'pkg_config_path',
|
|
|
|
'cmake_prefix_path',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def _classify_argument(key: 'OptionKey') -> OptionType:
|
|
|
|
"""Classify arguments into groups so we know which dict to assign them to."""
|
|
|
|
|
|
|
|
if key.name.startswith('b_'):
|
|
|
|
return OptionType.BASE
|
|
|
|
elif key.lang is not None:
|
|
|
|
return OptionType.COMPILER
|
|
|
|
elif key.name in _BUILTIN_NAMES or key.module:
|
|
|
|
return OptionType.BUILTIN
|
|
|
|
elif key.name.startswith('backend_'):
|
|
|
|
assert key.machine is MachineChoice.HOST, str(key)
|
|
|
|
return OptionType.BACKEND
|
|
|
|
else:
|
|
|
|
assert key.machine is MachineChoice.HOST, str(key)
|
|
|
|
return OptionType.PROJECT
|
|
|
|
|
|
|
|
|
|
|
|
@total_ordering
|
|
|
|
class OptionKey:
|
|
|
|
|
|
|
|
"""Represents an option key in the various option dictionaries.
|
|
|
|
|
|
|
|
This provides a flexible, powerful way to map option names from their
|
|
|
|
external form (things like subproject:build.option) to something that
|
|
|
|
internally easier to reason about and produce.
|
|
|
|
"""
|
|
|
|
|
|
|
|
__slots__ = ['name', 'subproject', 'machine', 'lang', '_hash', 'type', 'module']
|
|
|
|
|
|
|
|
name: str
|
|
|
|
subproject: str
|
|
|
|
machine: MachineChoice
|
|
|
|
lang: T.Optional[str]
|
|
|
|
_hash: int
|
|
|
|
type: OptionType
|
|
|
|
module: T.Optional[str]
|
|
|
|
|
|
|
|
def __init__(self, name: str, subproject: str = '',
|
|
|
|
machine: MachineChoice = MachineChoice.HOST,
|
|
|
|
lang: T.Optional[str] = None,
|
|
|
|
module: T.Optional[str] = None,
|
|
|
|
_type: T.Optional[OptionType] = None):
|
|
|
|
# the _type option to the constructor is kinda private. We want to be
|
|
|
|
# able tos ave the state and avoid the lookup function when
|
|
|
|
# pickling/unpickling, but we need to be able to calculate it when
|
|
|
|
# constructing a new OptionKey
|
|
|
|
object.__setattr__(self, 'name', name)
|
|
|
|
object.__setattr__(self, 'subproject', subproject)
|
|
|
|
object.__setattr__(self, 'machine', machine)
|
|
|
|
object.__setattr__(self, 'lang', lang)
|
|
|
|
object.__setattr__(self, 'module', module)
|
|
|
|
object.__setattr__(self, '_hash', hash((name, subproject, machine, lang, module)))
|
|
|
|
if _type is None:
|
|
|
|
_type = _classify_argument(self)
|
|
|
|
object.__setattr__(self, 'type', _type)
|
|
|
|
|
|
|
|
def __setattr__(self, key: str, value: T.Any) -> None:
|
|
|
|
raise AttributeError('OptionKey instances do not support mutation.')
|
|
|
|
|
|
|
|
def __getstate__(self) -> T.Dict[str, T.Any]:
|
|
|
|
return {
|
|
|
|
'name': self.name,
|
|
|
|
'subproject': self.subproject,
|
|
|
|
'machine': self.machine,
|
|
|
|
'lang': self.lang,
|
|
|
|
'_type': self.type,
|
|
|
|
'module': self.module,
|
|
|
|
}
|
|
|
|
|
|
|
|
def __setstate__(self, state: T.Dict[str, T.Any]) -> None:
|
|
|
|
"""De-serialize the state of a pickle.
|
|
|
|
|
|
|
|
This is very clever. __init__ is not a constructor, it's an
|
|
|
|
initializer, therefore it's safe to call more than once. We create a
|
|
|
|
state in the custom __getstate__ method, which is valid to pass
|
|
|
|
splatted to the initializer.
|
|
|
|
"""
|
|
|
|
# Mypy doesn't like this, because it's so clever.
|
|
|
|
self.__init__(**state) # type: ignore
|
|
|
|
|
|
|
|
def __hash__(self) -> int:
|
|
|
|
return self._hash
|
|
|
|
|
|
|
|
def _to_tuple(self) -> T.Tuple[str, OptionType, str, str, MachineChoice, str]:
|
|
|
|
return (self.subproject, self.type, self.lang or '', self.module or '', self.machine, self.name)
|
|
|
|
|
|
|
|
def __eq__(self, other: object) -> bool:
|
|
|
|
if isinstance(other, OptionKey):
|
|
|
|
return self._to_tuple() == other._to_tuple()
|
|
|
|
return NotImplemented
|
|
|
|
|
|
|
|
def __lt__(self, other: object) -> bool:
|
|
|
|
if isinstance(other, OptionKey):
|
|
|
|
return self._to_tuple() < other._to_tuple()
|
|
|
|
return NotImplemented
|
|
|
|
|
|
|
|
def __str__(self) -> str:
|
|
|
|
out = self.name
|
|
|
|
if self.lang:
|
|
|
|
out = f'{self.lang}_{out}'
|
|
|
|
if self.machine is MachineChoice.BUILD:
|
|
|
|
out = f'build.{out}'
|
|
|
|
if self.module:
|
|
|
|
out = f'{self.module}.{out}'
|
|
|
|
if self.subproject:
|
|
|
|
out = f'{self.subproject}:{out}'
|
|
|
|
return out
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
return f'OptionKey({repr(self.name)}, {repr(self.subproject)}, {repr(self.machine)}, {repr(self.lang)})'
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_string(cls, raw: str) -> 'OptionKey':
|
|
|
|
"""Parse the raw command line format into a three part tuple.
|
|
|
|
|
|
|
|
This takes strings like `mysubproject:build.myoption` and Creates an
|
|
|
|
OptionKey out of them.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
subproject, raw2 = raw.split(':')
|
|
|
|
except ValueError:
|
|
|
|
subproject, raw2 = '', raw
|
|
|
|
|
|
|
|
module = None
|
|
|
|
for_machine = MachineChoice.HOST
|
|
|
|
try:
|
|
|
|
prefix, raw3 = raw2.split('.')
|
|
|
|
if prefix == 'build':
|
|
|
|
for_machine = MachineChoice.BUILD
|
|
|
|
else:
|
|
|
|
module = prefix
|
|
|
|
except ValueError:
|
|
|
|
raw3 = raw2
|
|
|
|
|
|
|
|
from ..compilers import all_languages
|
|
|
|
if any(raw3.startswith(f'{l}_') for l in all_languages):
|
|
|
|
lang, opt = raw3.split('_', 1)
|
|
|
|
else:
|
|
|
|
lang, opt = None, raw3
|
|
|
|
assert ':' not in opt
|
|
|
|
assert '.' not in opt
|
|
|
|
|
|
|
|
return cls(opt, subproject, for_machine, lang, module)
|
|
|
|
|
|
|
|
def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = None,
|
|
|
|
machine: T.Optional[MachineChoice] = None, lang: T.Optional[str] = '',
|
|
|
|
module: T.Optional[str] = '') -> 'OptionKey':
|
|
|
|
"""Create a new copy of this key, but with alterted members.
|
|
|
|
|
|
|
|
For example:
|
|
|
|
>>> a = OptionKey('foo', '', MachineChoice.Host)
|
|
|
|
>>> b = OptionKey('foo', 'bar', MachineChoice.Host)
|
|
|
|
>>> b == a.evolve(subproject='bar')
|
|
|
|
True
|
|
|
|
"""
|
|
|
|
# We have to be a little clever with lang here, because lang is valid
|
|
|
|
# as None, for non-compiler options
|
|
|
|
return OptionKey(
|
|
|
|
name if name is not None else self.name,
|
|
|
|
subproject if subproject is not None else self.subproject,
|
|
|
|
machine if machine is not None else self.machine,
|
|
|
|
lang if lang != '' else self.lang,
|
|
|
|
module if module != '' else self.module
|
|
|
|
)
|
|
|
|
|
|
|
|
def as_root(self) -> 'OptionKey':
|
|
|
|
"""Convenience method for key.evolve(subproject='')."""
|
|
|
|
return self.evolve(subproject='')
|
|
|
|
|
|
|
|
def as_build(self) -> 'OptionKey':
|
|
|
|
"""Convenience method for key.evolve(machine=MachinceChoice.BUILD)."""
|
|
|
|
return self.evolve(machine=MachineChoice.BUILD)
|
|
|
|
|
|
|
|
def as_host(self) -> 'OptionKey':
|
|
|
|
"""Convenience method for key.evolve(machine=MachinceChoice.HOST)."""
|
|
|
|
return self.evolve(machine=MachineChoice.HOST)
|
|
|
|
|
|
|
|
def is_backend(self) -> bool:
|
|
|
|
"""Convenience method to check if this is a backend option."""
|
|
|
|
return self.type is OptionType.BACKEND
|
|
|
|
|
|
|
|
def is_builtin(self) -> bool:
|
|
|
|
"""Convenience method to check if this is a builtin option."""
|
|
|
|
return self.type is OptionType.BUILTIN
|
|
|
|
|
|
|
|
def is_compiler(self) -> bool:
|
|
|
|
"""Convenience method to check if this is a builtin option."""
|
|
|
|
return self.type is OptionType.COMPILER
|
|
|
|
|
|
|
|
def is_project(self) -> bool:
|
|
|
|
"""Convenience method to check if this is a project option."""
|
|
|
|
return self.type is OptionType.PROJECT
|
|
|
|
|
|
|
|
def is_base(self) -> bool:
|
|
|
|
"""Convenience method to check if this is a base option."""
|
|
|
|
return self.type is OptionType.BASE
|