Merge pull request #7657 from mensinda/moreTyping

typing: Strict type annotations
pull/7716/head
Dylan Baker 5 years ago committed by GitHub
commit 4c2d0eb9bc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      .github/workflows/lint_mypy.yml
  2. 26
      .mypy.ini
  3. 8
      mesonbuild/arglist.py
  4. 22
      mesonbuild/ast/interpreter.py
  5. 11
      mesonbuild/ast/introspection.py
  6. 6
      mesonbuild/ast/postprocess.py
  7. 2
      mesonbuild/ast/visitor.py
  8. 18
      mesonbuild/backend/backends.py
  9. 2
      mesonbuild/backend/ninjabackend.py
  10. 4
      mesonbuild/backend/vs2010backend.py
  11. 59
      mesonbuild/build.py
  12. 2
      mesonbuild/compilers/compilers.py
  13. 4
      mesonbuild/compilers/mixins/intel.py
  14. 84
      mesonbuild/coredata.py
  15. 22
      mesonbuild/dependencies/base.py
  16. 22
      mesonbuild/dependencies/boost.py
  17. 10
      mesonbuild/dependencies/hdf5.py
  18. 5
      mesonbuild/dependencies/mpi.py
  19. 12
      mesonbuild/envconfig.py
  20. 32
      mesonbuild/environment.py
  21. 42
      mesonbuild/interpreter.py
  22. 160
      mesonbuild/interpreterbase.py
  23. 14
      mesonbuild/mcompile.py
  24. 8
      mesonbuild/mconf.py
  25. 97
      mesonbuild/mesonlib.py
  26. 12
      mesonbuild/minit.py
  27. 36
      mesonbuild/mintro.py
  28. 2
      mesonbuild/mlog.py
  29. 12
      mesonbuild/modules/__init__.py
  30. 8
      mesonbuild/modules/fs.py
  31. 39
      mesonbuild/mparser.py
  32. 28
      mesonbuild/msetup.py
  33. 65
      mesonbuild/mtest.py
  34. 11
      mesonbuild/optinterpreter.py
  35. 3
      mesonbuild/scripts/__init__.py
  36. 5
      mesonbuild/scripts/clangformat.py
  37. 8
      mesonbuild/scripts/clangtidy.py
  38. 5
      mesonbuild/scripts/cleantrees.py
  39. 44
      mesonbuild/scripts/cmake_run_ctgt.py
  40. 15
      mesonbuild/scripts/commandrunner.py
  41. 5
      mesonbuild/scripts/coverage.py
  42. 3
      mesonbuild/scripts/delwithsuffix.py
  43. 80
      mesonbuild/scripts/depfixer.py
  44. 3
      mesonbuild/scripts/dirchanger.py
  45. 14
      mesonbuild/scripts/gettext.py
  46. 19
      mesonbuild/scripts/gtkdochelper.py
  47. 6
      mesonbuild/scripts/hotdochelper.py
  48. 9
      mesonbuild/scripts/meson_exe.py
  49. 3
      mesonbuild/scripts/msgfmthelper.py
  50. 20
      mesonbuild/scripts/regen_checker.py
  51. 23
      mesonbuild/scripts/scanbuild.py
  52. 34
      mesonbuild/scripts/symbolextractor.py
  53. 16
      mesonbuild/scripts/tags.py
  54. 5
      mesonbuild/scripts/uninstall.py
  55. 6
      mesonbuild/scripts/vcstagger.py
  56. 14
      mesonbuild/scripts/yelphelper.py
  57. 4
      mesonbuild/templates/cpptemplates.py
  58. 4
      mesonbuild/templates/cstemplates.py
  59. 4
      mesonbuild/templates/ctemplates.py
  60. 4
      mesonbuild/templates/cudatemplates.py
  61. 4
      mesonbuild/templates/dlangtemplates.py
  62. 4
      mesonbuild/templates/fortrantemplates.py
  63. 4
      mesonbuild/templates/javatemplates.py
  64. 4
      mesonbuild/templates/mesontemplates.py
  65. 4
      mesonbuild/templates/objcpptemplates.py
  66. 4
      mesonbuild/templates/objctemplates.py
  67. 4
      mesonbuild/templates/rusttemplates.py
  68. 4
      mesonbuild/templates/samplefactory.py
  69. 4
      mesonbuild/templates/sampleimpl.py
  70. 2
      mesonbuild/wrap/__init__.py
  71. 30
      mesonbuild/wrap/wrap.py
  72. 42
      mesonbuild/wrap/wraptool.py
  73. 65
      run_mypy.py
  74. 2
      run_project_tests.py
  75. 4
      run_tests.py
  76. 6
      tools/boost_names.py
  77. 4
      tools/build_website.py
  78. 19
      tools/cmake2meson.py
  79. 4
      tools/dircondenser.py
  80. 16
      tools/regenerate_docs.py

@ -31,4 +31,4 @@ jobs:
with:
python-version: '3.x'
- run: python -m pip install mypy
- run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py mesonbuild/mesonlib.py mesonbuild/arglist.py
- run: python run_mypy.py

@ -1,5 +1,23 @@
[mypy]
strict_optional = False
show_error_context = False
show_column_numbers = True
ignore_missing_imports = True
strict_optional = False
show_error_context = False
show_column_numbers = True
ignore_missing_imports = True
follow_imports = silent
warn_redundant_casts = True
warn_unused_ignores = True
warn_return_any = True
# warn_unreachable = True
disallow_untyped_calls = True
disallow_untyped_defs = True
disallow_incomplete_defs = True
disallow_untyped_decorators = True
no_implicit_optional = True
strict_equality = True
check_untyped_defs = True
# disallow_any_expr = True
# disallow_any_decorated = True
# disallow_any_explicit = True
# disallow_any_generics = True
# disallow_subclassing_any = True

@ -164,7 +164,7 @@ class CompilerArgs(collections.abc.MutableSequence):
def __getitem__(self, index: slice) -> T.MutableSequence[str]: # noqa: F811
pass
def __getitem__(self, index): # noqa: F811
def __getitem__(self, index: T.Union[int, slice]) -> T.Union[str, T.MutableSequence[str]]: # noqa: F811
self.flush_pre_post()
return self._container[index]
@ -176,9 +176,9 @@ class CompilerArgs(collections.abc.MutableSequence):
def __setitem__(self, index: slice, value: T.Iterable[str]) -> None: # noqa: F811
pass
def __setitem__(self, index, value) -> None: # noqa: F811
def __setitem__(self, index: T.Union[int, slice], value: T.Union[str, T.Iterable[str]]) -> None: # noqa: F811
self.flush_pre_post()
self._container[index] = value
self._container[index] = value # type: ignore # TODO: fix 'Invalid index type' and 'Incompatible types in assignment' erros
def __delitem__(self, index: T.Union[int, slice]) -> None:
self.flush_pre_post()
@ -314,7 +314,7 @@ class CompilerArgs(collections.abc.MutableSequence):
new += self
return new
def __eq__(self, other: T.Any) -> T.Union[bool]:
def __eq__(self, other: object) -> T.Union[bool]:
self.flush_pre_post()
# Only allow equality checks against other CompilerArgs and lists instances
if isinstance(other, CompilerArgs):

@ -38,7 +38,6 @@ from ..mparser import (
NotNode,
OrNode,
PlusAssignmentNode,
StringNode,
TernaryNode,
UMinusNode,
)
@ -193,7 +192,11 @@ class AstInterpreter(interpreterbase.InterpreterBase):
self.evaluate_statement(node.falseblock)
def evaluate_dictstatement(self, node: mparser.DictNode) -> TYPE_nkwargs:
(arguments, kwargs) = self.reduce_arguments(node.args, resolve_key_nodes=False)
def resolve_key(node: mparser.BaseNode) -> str:
if isinstance(node, mparser.StringNode):
return node.value
return '__AST_UNKNOWN__'
arguments, kwargs = self.reduce_arguments(node.args, key_resolver=resolve_key)
assert (not arguments)
self.argument_depth += 1
for key, value in kwargs.items():
@ -216,15 +219,16 @@ class AstInterpreter(interpreterbase.InterpreterBase):
def unknown_function_called(self, func_name: str) -> None:
pass
def reduce_arguments(self, args: ArgumentNode, resolve_key_nodes: bool = True) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]:
def reduce_arguments(
self,
args: mparser.ArgumentNode,
key_resolver: T.Callable[[mparser.BaseNode], str] = interpreterbase.default_resolve_key,
duplicate_key_error: T.Optional[str] = None,
) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]:
if isinstance(args, ArgumentNode):
kwargs = {} # type: T.Dict[T.Union[str, BaseNode], TYPE_nvar]
kwargs = {} # type: T.Dict[str, TYPE_nvar]
for key, val in args.kwargs.items():
if resolve_key_nodes and isinstance(key, (StringNode, IdNode)):
assert isinstance(key.value, str)
kwargs[key.value] = val
else:
kwargs[key] = val
kwargs[key_resolver(key)] = val
if args.incorrect_order():
raise InvalidArguments('All keyword arguments must be after positional arguments.')
return self.flatten_args(args.arguments), kwargs

@ -21,20 +21,25 @@ from .. import compilers, environment, mesonlib, optinterpreter
from .. import coredata as cdata
from ..mesonlib import MachineChoice
from ..interpreterbase import InvalidArguments, TYPE_nvar
from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary
from ..build import BuildTarget, Executable, Jar, SharedLibrary, SharedModule, StaticLibrary
from ..mparser import BaseNode, ArithmeticNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode
import typing as T
import os
import argparse
build_target_functions = ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']
class IntrospectionHelper:
class IntrospectionHelper(argparse.Namespace):
# mimic an argparse namespace
def __init__(self, cross_file: str):
super().__init__()
self.cross_file = cross_file # type: str
self.native_file = None # type: str
self.cmd_line_options = {} # type: T.Dict[str, str]
def __eq__(self, other: object) -> bool:
return NotImplemented
class IntrospectionInterpreter(AstInterpreter):
# Interpreter to detect the options without a build directory
# Most of the code is stolen from interpreter.Interpreter
@ -182,7 +187,7 @@ class IntrospectionInterpreter(AstInterpreter):
'node': node
}]
def build_target(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs_raw: T.Dict[str, TYPE_nvar], targetclass) -> T.Optional[T.Dict[str, T.Any]]:
def build_target(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs_raw: T.Dict[str, TYPE_nvar], targetclass: T.Type[BuildTarget]) -> T.Optional[T.Dict[str, T.Any]]:
args = self.flatten_args(args)
if not args or not isinstance(args[0], str):
return None

@ -20,7 +20,7 @@ from .. import mparser
import typing as T
class AstIndentationGenerator(AstVisitor):
def __init__(self):
def __init__(self) -> None:
self.level = 0
def visit_default_func(self, node: mparser.BaseNode) -> None:
@ -76,7 +76,7 @@ class AstIndentationGenerator(AstVisitor):
self.level -= 1
class AstIDGenerator(AstVisitor):
def __init__(self):
def __init__(self) -> None:
self.counter = {} # type: T.Dict[str, int]
def visit_default_func(self, node: mparser.BaseNode) -> None:
@ -87,7 +87,7 @@ class AstIDGenerator(AstVisitor):
self.counter[name] += 1
class AstConditionLevel(AstVisitor):
def __init__(self):
def __init__(self) -> None:
self.condition_level = 0
def visit_default_func(self, node: mparser.BaseNode) -> None:

@ -18,7 +18,7 @@
from .. import mparser
class AstVisitor:
def __init__(self):
def __init__(self) -> None:
pass
def visit_default_func(self, node: mparser.BaseNode) -> None:

@ -36,7 +36,7 @@ from ..mesonlib import (
)
if T.TYPE_CHECKING:
from ..interpreter import Interpreter
from ..interpreter import Interpreter, Test
class TestProtocol(enum.Enum):
@ -104,7 +104,7 @@ class TargetInstallData:
class ExecutableSerialisation:
def __init__(self, cmd_args, env=None, exe_wrapper=None,
workdir=None, extra_paths=None, capture=None):
workdir=None, extra_paths=None, capture=None) -> None:
self.cmd_args = cmd_args
self.env = env or {}
if exe_wrapper is not None:
@ -182,11 +182,15 @@ class Backend:
self.interpreter = interpreter
self.environment = build.environment
self.processed_targets = {}
self.name = '<UNKNOWN>'
self.build_dir = self.environment.get_build_dir()
self.source_dir = self.environment.get_source_dir()
self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(),
self.environment.get_build_dir())
def generate(self) -> None:
raise RuntimeError('generate is not implemented in {}'.format(type(self).__name__))
def get_target_filename(self, t, *, warn_multi_output: bool = True):
if isinstance(t, build.CustomTarget):
if warn_multi_output and len(t.get_outputs()) != 1:
@ -794,7 +798,7 @@ class Backend:
def write_test_file(self, datafile):
self.write_test_serialisation(self.build.get_tests(), datafile)
def create_test_serialisation(self, tests):
def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSerialisation]:
arr = []
for t in sorted(tests, key=lambda tst: -1 * tst.priority):
exe = t.get_exe()
@ -864,7 +868,7 @@ class Backend:
arr.append(ts)
return arr
def write_test_serialisation(self, tests, datafile):
def write_test_serialisation(self, tests: T.List['Test'], datafile: str):
pickle.dump(self.create_test_serialisation(tests), datafile)
def construct_target_rel_path(self, a, workdir):
@ -1128,7 +1132,7 @@ class Backend:
cmd = [i.replace('\\', '/') for i in cmd]
return inputs, outputs, cmd
def run_postconf_scripts(self):
def run_postconf_scripts(self) -> None:
env = {'MESON_SOURCE_ROOT': self.environment.get_source_dir(),
'MESON_BUILD_ROOT': self.environment.get_build_dir(),
'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in self.environment.get_build_command() + ['introspect']]),
@ -1140,7 +1144,7 @@ class Backend:
cmd = s['exe'] + s['args']
subprocess.check_call(cmd, env=child_env)
def create_install_data(self):
def create_install_data(self) -> InstallData:
strip_bin = self.environment.lookup_binary_entry(MachineChoice.HOST, 'strip')
if strip_bin is None:
if self.environment.is_cross_build():
@ -1338,7 +1342,7 @@ class Backend:
d.install_subdirs.append([src_dir, dst_dir, sd.install_mode,
sd.exclude])
def get_introspection_data(self, target_id, target):
def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
'''
Returns a list of source dicts with the following format for a given target:
[

@ -2997,7 +2997,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '')
self.add_build(elem)
def get_introspection_data(self, target_id, target):
def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
if target_id not in self.introspection_data or len(self.introspection_data[target_id]) == 0:
return super().get_introspection_data(target_id, target)

@ -192,11 +192,11 @@ class Vs2010Backend(backends.Backend):
Vs2010Backend.touch_regen_timestamp(self.environment.get_build_dir())
@staticmethod
def get_regen_stampfile(build_dir):
def get_regen_stampfile(build_dir: str) -> None:
return os.path.join(os.path.join(build_dir, Environment.private_dir), 'regen.stamp')
@staticmethod
def touch_regen_timestamp(build_dir):
def touch_regen_timestamp(build_dir: str) -> None:
with open(Vs2010Backend.get_regen_stampfile(build_dir), 'w'):
pass

@ -37,6 +37,9 @@ from .compilers import (
from .linkers import StaticLinker
from .interpreterbase import FeatureNew
if T.TYPE_CHECKING:
from .interpreter import Test
pch_kwargs = set(['c_pch', 'cpp_pch'])
lang_arg_kwargs = set([
@ -128,14 +131,14 @@ class Build:
self.project_version = None
self.environment = environment
self.projects = {}
self.targets = OrderedDict()
self.targets = OrderedDict() # type: T.Dict[str, 'Target']
self.run_target_names = set() # type: T.Set[T.Tuple[str, str]]
self.global_args = PerMachine({}, {}) # type: PerMachine[T.Dict[str, T.List[str]]]
self.projects_args = PerMachine({}, {}) # type: PerMachine[T.Dict[str, T.List[str]]]
self.global_link_args = PerMachine({}, {}) # type: PerMachine[T.Dict[str, T.List[str]]]
self.projects_link_args = PerMachine({}, {}) # type: PerMachine[T.Dict[str, T.List[str]]]
self.tests = []
self.benchmarks = []
self.tests = [] # type: T.List['Test']
self.benchmarks = [] # type: T.List['Test']
self.headers = []
self.man = []
self.data = []
@ -178,13 +181,13 @@ class Build:
def get_subproject_dir(self):
return self.subproject_dir
def get_targets(self):
def get_targets(self) -> T.Dict[str, 'Target']:
return self.targets
def get_tests(self):
def get_tests(self) -> T.List['Test']:
return self.tests
def get_benchmarks(self):
def get_benchmarks(self) -> T.List['Test']:
return self.benchmarks
def get_headers(self):
@ -369,22 +372,22 @@ a hard error in the future.'''.format(name))
if not hasattr(self, 'typename'):
raise RuntimeError('Target type is not set for target class "{}". This is a bug'.format(type(self).__name__))
def __lt__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]:
def __lt__(self, other: object) -> bool:
if not hasattr(other, 'get_id') and not callable(other.get_id):
return NotImplemented
return self.get_id() < other.get_id()
def __le__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]:
def __le__(self, other: object) -> bool:
if not hasattr(other, 'get_id') and not callable(other.get_id):
return NotImplemented
return self.get_id() <= other.get_id()
def __gt__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]:
def __gt__(self, other: object) -> bool:
if not hasattr(other, 'get_id') and not callable(other.get_id):
return NotImplemented
return self.get_id() > other.get_id()
def __ge__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]:
def __ge__(self, other: object) -> bool:
if not hasattr(other, 'get_id') and not callable(other.get_id):
return NotImplemented
return self.get_id() >= other.get_id()
@ -403,13 +406,13 @@ a hard error in the future.'''.format(name))
outdirs[0] = default_install_dir
return outdirs, custom_install_dir
def get_basename(self):
def get_basename(self) -> str:
return self.name
def get_subdir(self):
def get_subdir(self) -> str:
return self.subdir
def get_typename(self):
def get_typename(self) -> str:
return self.typename
@staticmethod
@ -423,7 +426,7 @@ a hard error in the future.'''.format(name))
return h.hexdigest()[:7]
@staticmethod
def construct_id_from_path(subdir, name, type_suffix):
def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str:
"""Construct target ID from subdir, name and type suffix.
This helper function is made public mostly for tests."""
@ -441,7 +444,7 @@ a hard error in the future.'''.format(name))
return subdir_part + '@@' + my_id
return my_id
def get_id(self):
def get_id(self) -> str:
return self.construct_id_from_path(
self.subdir, self.name, self.type_suffix())
@ -480,6 +483,12 @@ a hard error in the future.'''.format(name))
def is_linkable_target(self) -> bool:
return False
def get_outputs(self) -> T.List[str]:
return []
def should_install(self) -> bool:
return False
class BuildTarget(Target):
known_kwargs = known_build_target_kwargs
@ -1006,7 +1015,7 @@ This will become a hard error in a future Meson release.''')
def get_filename(self):
return self.filename
def get_outputs(self):
def get_outputs(self) -> T.List[str]:
return self.outputs
def get_extra_args(self, language):
@ -1036,7 +1045,7 @@ This will become a hard error in a future Meson release.''')
def get_generated_sources(self):
return self.generated
def should_install(self):
def should_install(self) -> bool:
return self.need_install
def has_pch(self):
@ -1474,7 +1483,7 @@ class GeneratedList:
def get_inputs(self):
return self.infilelist
def get_outputs(self):
def get_outputs(self) -> T.List[str]:
return self.outfilelist
def get_outputs_for(self, filename):
@ -2192,7 +2201,7 @@ class CustomTarget(Target):
def get_dependencies(self):
return self.dependencies
def should_install(self):
def should_install(self) -> bool:
return self.install
def get_custom_install_dir(self):
@ -2201,7 +2210,7 @@ class CustomTarget(Target):
def get_custom_install_mode(self):
return self.install_mode
def get_outputs(self):
def get_outputs(self) -> T.List[str]:
return self.outputs
def get_filename(self):
@ -2289,13 +2298,13 @@ class RunTarget(Target):
def get_sources(self):
return []
def should_install(self):
def should_install(self) -> bool:
return False
def get_filename(self):
def get_filename(self) -> str:
return self.name
def get_outputs(self):
def get_outputs(self) -> T.List[str]:
if isinstance(self.name, str):
return [self.name]
elif isinstance(self.name, list):
@ -2367,7 +2376,7 @@ class CustomTargetIndex:
return '<CustomTargetIndex: {!r}[{}]>'.format(
self.target, self.target.get_outputs().index(self.output))
def get_outputs(self):
def get_outputs(self) -> T.List[str]:
return [self.output]
def get_subdir(self):
@ -2509,6 +2518,6 @@ def load(build_dir: str) -> Build:
raise MesonException(load_fail_msg)
return obj
def save(obj, filename):
def save(obj: Build, filename: str) -> None:
with open(filename, 'wb') as f:
pickle.dump(obj, f)

@ -578,7 +578,7 @@ class Compiler(metaclass=abc.ABCMeta):
raise EnvironmentException('Language %s does not support function checks.' % self.get_display_language())
@classmethod
def unix_args_to_native(cls, args):
def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
"Always returns a copy that can be independently mutated"
return args[:]

@ -69,7 +69,7 @@ class IntelGnuLikeCompiler(GnuLikeCompiler):
's': ['-Os'],
}
def __init__(self):
def __init__(self) -> None:
super().__init__()
# As of 19.0.0 ICC doesn't have sanitizer, color, or lto support.
#
@ -152,7 +152,7 @@ class IntelVisualStudioLikeCompiler(VisualStudioLikeCompiler):
super().__init__(target)
self.id = 'intel-cl'
def compile(self, code, *, extra_args: T.Optional[T.List[str]] = None, **kwargs) -> T.Iterator['subprocess.Popen']:
def compile(self, code: str, *, extra_args: T.Optional[T.List[str]] = None, **kwargs) -> T.Iterator['subprocess.Popen']:
# This covers a case that .get('foo', []) doesn't, that extra_args is
if kwargs.get('mode', 'compile') != 'link':
extra_args = extra_args.copy() if extra_args is not None else []

@ -47,7 +47,7 @@ _T = T.TypeVar('_T')
class MesonVersionMismatchException(MesonException):
'''Build directory generated with Meson version incompatible with current version'''
def __init__(self, old_version, current_version):
def __init__(self, old_version: str, current_version: str) -> None:
super().__init__('Build directory has been generated with Meson version {}, '
'which is incompatible with current version {}.'
.format(old_version, current_version))
@ -56,7 +56,7 @@ class MesonVersionMismatchException(MesonException):
class UserOption(T.Generic[_T]):
def __init__(self, description, choices, yielding):
def __init__(self, description: str, choices: T.Optional[T.Union[str, T.List[_T]]], yielding: T.Optional[bool]):
super().__init__()
self.choices = choices
self.description = description
@ -66,7 +66,8 @@ class UserOption(T.Generic[_T]):
raise MesonException('Value of "yielding" must be a boolean.')
self.yielding = yielding
def printable_value(self):
def printable_value(self) -> T.Union[str, int, bool, T.List[T.Union[str, int, bool]]]:
assert isinstance(self.value, (str, int, bool, list))
return self.value
# Check that the input is a valid value and return the
@ -75,30 +76,32 @@ class UserOption(T.Generic[_T]):
def validate_value(self, value: T.Any) -> _T:
raise RuntimeError('Derived option class did not override validate_value.')
def set_value(self, newvalue):
def set_value(self, newvalue: T.Any) -> None:
self.value = self.validate_value(newvalue)
class UserStringOption(UserOption[str]):
def __init__(self, description, value, choices=None, yielding=None):
super().__init__(description, choices, yielding)
def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
super().__init__(description, None, yielding)
self.set_value(value)
def validate_value(self, value):
def validate_value(self, value: T.Any) -> str:
if not isinstance(value, str):
raise MesonException('Value "%s" for string option is not a string.' % str(value))
return value
class UserBooleanOption(UserOption[bool]):
def __init__(self, description, value, yielding=None):
def __init__(self, description: str, value, yielding: T.Optional[bool] = None) -> None:
super().__init__(description, [True, False], yielding)
self.set_value(value)
def __bool__(self) -> bool:
return self.value
def validate_value(self, value) -> bool:
def validate_value(self, value: T.Any) -> bool:
if isinstance(value, bool):
return value
if not isinstance(value, str):
raise MesonException('Value {} cannot be converted to a boolean'.format(value))
if value.lower() == 'true':
return True
if value.lower() == 'false':
@ -106,7 +109,7 @@ class UserBooleanOption(UserOption[bool]):
raise MesonException('Value %s is not boolean (true or false).' % value)
class UserIntegerOption(UserOption[int]):
def __init__(self, description, value, yielding=None):
def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
min_value, max_value, default_value = value
self.min_value = min_value
self.max_value = max_value
@ -119,7 +122,7 @@ class UserIntegerOption(UserOption[int]):
super().__init__(description, choices, yielding)
self.set_value(default_value)
def validate_value(self, value) -> int:
def validate_value(self, value: T.Any) -> int:
if isinstance(value, str):
value = self.toint(value)
if not isinstance(value, int):
@ -130,35 +133,35 @@ class UserIntegerOption(UserOption[int]):
raise MesonException('New value %d is more than maximum value %d.' % (value, self.max_value))
return value
def toint(self, valuestring) -> int:
def toint(self, valuestring: str) -> int:
try:
return int(valuestring)
except ValueError:
raise MesonException('Value string "%s" is not convertible to an integer.' % valuestring)
class UserUmaskOption(UserIntegerOption, UserOption[T.Union[str, int]]):
def __init__(self, description, value, yielding=None):
def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
super().__init__(description, (0, 0o777, value), yielding)
self.choices = ['preserve', '0000-0777']
def printable_value(self):
def printable_value(self) -> str:
if self.value == 'preserve':
return self.value
return format(self.value, '04o')
def validate_value(self, value):
def validate_value(self, value: T.Any) -> T.Union[str, int]:
if value is None or value == 'preserve':
return 'preserve'
return super().validate_value(value)
def toint(self, valuestring):
def toint(self, valuestring: T.Union[str, int]) -> int:
try:
return int(valuestring, 8)
except ValueError as e:
raise MesonException('Invalid mode: {}'.format(e))
class UserComboOption(UserOption[str]):
def __init__(self, description, choices: T.List[str], value, yielding=None):
def __init__(self, description: str, choices: T.List[str], value: T.Any, yielding: T.Optional[bool] = None):
super().__init__(description, choices, yielding)
if not isinstance(self.choices, list):
raise MesonException('Combo choices must be an array.')
@ -167,7 +170,7 @@ class UserComboOption(UserOption[str]):
raise MesonException('Combo choice elements must be strings.')
self.set_value(value)
def validate_value(self, value):
def validate_value(self, value: T.Any) -> str:
if value not in self.choices:
if isinstance(value, bool):
_type = 'boolean'
@ -182,13 +185,13 @@ class UserComboOption(UserOption[str]):
return value
class UserArrayOption(UserOption[T.List[str]]):
def __init__(self, description, value, split_args=False, user_input=False, allow_dups=False, **kwargs):
def __init__(self, description: str, value: T.Union[str, T.List[str]], split_args: bool = False, user_input: bool = False, allow_dups: bool = False, **kwargs: T.Any) -> None:
super().__init__(description, kwargs.get('choices', []), yielding=kwargs.get('yielding', None))
self.split_args = split_args
self.allow_dups = allow_dups
self.value = self.validate_value(value, user_input=user_input)
def validate_value(self, value, user_input: bool = True) -> T.List[str]:
def validate_value(self, value: T.Union[str, T.List[str]], user_input: bool = True) -> T.List[str]:
# User input is for options defined on the command line (via -D
# options). Users can put their input in as a comma separated
# string, but for defining options in meson_options.txt the format
@ -232,16 +235,16 @@ class UserArrayOption(UserOption[T.List[str]]):
class UserFeatureOption(UserComboOption):
static_choices = ['enabled', 'disabled', 'auto']
def __init__(self, description, value, yielding=None):
def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
super().__init__(description, self.static_choices, value, yielding)
def is_enabled(self):
def is_enabled(self) -> bool:
return self.value == 'enabled'
def is_disabled(self):
def is_disabled(self) -> bool:
return self.value == 'disabled'
def is_auto(self):
def is_auto(self) -> bool:
return self.value == 'auto'
if T.TYPE_CHECKING:
@ -362,7 +365,7 @@ _V = T.TypeVar('_V')
class CoreData:
def __init__(self, options: argparse.Namespace, scratch_dir: str):
def __init__(self, options: argparse.Namespace, scratch_dir: str, meson_command: T.List[str]):
self.lang_guids = {
'default': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
'c': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
@ -373,6 +376,7 @@ class CoreData:
self.test_guid = str(uuid.uuid4()).upper()
self.regen_guid = str(uuid.uuid4()).upper()
self.install_guid = str(uuid.uuid4()).upper()
self.meson_command = meson_command
self.target_guids = {}
self.version = version
self.builtins = {} # type: OptionDictType
@ -534,7 +538,7 @@ class CoreData:
value = None
opts_map[optname] = opt.init_option(key, value, default_prefix())
def init_backend_options(self, backend_name):
def init_backend_options(self, backend_name: str) -> None:
if backend_name == 'ninja':
self.backend_options['backend_max_links'] = \
UserIntegerOption(
@ -547,7 +551,7 @@ class CoreData:
'Default project to execute in Visual Studio',
'')
def get_builtin_option(self, optname, subproject=''):
def get_builtin_option(self, optname: str, subproject: str = '') -> T.Union[str, int, bool]:
raw_optname = optname
if subproject:
optname = subproject + ':' + optname
@ -683,7 +687,7 @@ class CoreData:
def get_external_link_args(self, for_machine: MachineChoice, lang):
return self.compiler_options[for_machine][lang]['link_args'].value
def merge_user_options(self, options):
def merge_user_options(self, options: T.Dict[str, T.Union[str, bool, int]]) -> None:
for (name, value) in options.items():
if name not in self.user_options:
self.user_options[name] = value
@ -715,7 +719,7 @@ class CoreData:
if k in build_opts:
build_opts[k].set_value(o.value)
def set_options(self, options, *, subproject='', warn_unknown=True):
def set_options(self, options: T.Dict[str, T.Any], subproject: str = '', warn_unknown: bool = True) -> None:
if not self.is_cross_build():
options = self.strip_build_option_names(options)
# Set prefix first because it's needed to sanitize other options
@ -912,10 +916,10 @@ def parse_machine_files(filenames):
parser = MachineFileParser(filenames)
return parser.sections
def get_cmd_line_file(build_dir):
def get_cmd_line_file(build_dir: str) -> str:
return os.path.join(build_dir, 'meson-private', 'cmd_line.txt')
def read_cmd_line_file(build_dir, options):
def read_cmd_line_file(build_dir: str, options: argparse.Namespace) -> None:
filename = get_cmd_line_file(build_dir)
if not os.path.isfile(filename):
return
@ -937,10 +941,10 @@ def read_cmd_line_file(build_dir, options):
# literal_eval to get it into the list of strings.
options.native_file = ast.literal_eval(properties.get('native_file', '[]'))
def cmd_line_options_to_string(options):
def cmd_line_options_to_string(options: argparse.Namespace) -> T.Dict[str, str]:
return {k: str(v) for k, v in options.cmd_line_options.items()}
def write_cmd_line_file(build_dir, options):
def write_cmd_line_file(build_dir: str, options: argparse.Namespace) -> None:
filename = get_cmd_line_file(build_dir)
config = CmdLineFileParser()
@ -955,7 +959,7 @@ def write_cmd_line_file(build_dir, options):
with open(filename, 'w') as f:
config.write(f)
def update_cmd_line_file(build_dir, options):
def update_cmd_line_file(build_dir: str, options: argparse.Namespace):
filename = get_cmd_line_file(build_dir)
config = CmdLineFileParser()
config.read(filename)
@ -963,7 +967,7 @@ def update_cmd_line_file(build_dir, options):
with open(filename, 'w') as f:
config.write(f)
def get_cmd_line_options(build_dir, options):
def get_cmd_line_options(build_dir: str, options: argparse.Namespace) -> str:
copy = argparse.Namespace(**vars(options))
read_cmd_line_file(build_dir, copy)
cmdline = ['-D{}={}'.format(k, v) for k, v in copy.cmd_line_options.items()]
@ -976,7 +980,7 @@ def get_cmd_line_options(build_dir, options):
def major_versions_differ(v1, v2):
return v1.split('.')[0:2] != v2.split('.')[0:2]
def load(build_dir):
def load(build_dir: str) -> CoreData:
filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename)
try:
@ -995,7 +999,7 @@ def load(build_dir):
raise MesonVersionMismatchException(obj.version, version)
return obj
def save(obj, build_dir):
def save(obj: CoreData, build_dir: str) -> str:
filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
prev_filename = filename + '.prev'
tempfilename = filename + '~'
@ -1012,7 +1016,7 @@ def save(obj, build_dir):
return filename
def register_builtin_arguments(parser):
def register_builtin_arguments(parser: argparse.ArgumentParser) -> None:
for n, b in BUILTIN_OPTIONS.items():
b.add_to_argparse(n, parser, '', '')
for n, b in BUILTIN_OPTIONS_PER_MACHINE.items():
@ -1021,7 +1025,7 @@ def register_builtin_arguments(parser):
parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option",
help='Set the value of an option, can be used several times to set multiple options.')
def create_options_dict(options):
def create_options_dict(options: T.List[str]) -> T.Dict[str, str]:
result = OrderedDict()
for o in options:
try:
@ -1031,7 +1035,7 @@ def create_options_dict(options):
result[key] = value
return result
def parse_cmd_line_options(args):
def parse_cmd_line_options(args: argparse.Namespace) -> None:
args.cmd_line_options = create_options_dict(args.projectoptions)
# Merge builtin options set with --option into the dict.

@ -114,11 +114,11 @@ class Dependency:
def __init__(self, type_name, kwargs):
self.name = "null"
self.version = None
self.version = None # type: T.Optional[str]
self.language = None # None means C-like
self.is_found = False
self.type_name = type_name
self.compile_args = []
self.compile_args = [] # type: T.List[str]
self.link_args = []
# Raw -L and -l arguments without manual library searching
# If None, self.link_args will be used
@ -132,7 +132,7 @@ class Dependency:
s = '<{0} {1}: {2}>'
return s.format(self.__class__.__name__, self.name, self.is_found)
def get_compile_args(self):
def get_compile_args(self) -> T.List[str]:
if self.include_type == 'system':
converted = []
for i in self.compile_args:
@ -156,7 +156,7 @@ class Dependency:
return self.raw_link_args
return self.link_args
def found(self):
def found(self) -> bool:
return self.is_found
def get_sources(self):
@ -171,7 +171,7 @@ class Dependency:
def get_name(self):
return self.name
def get_version(self):
def get_version(self) -> str:
if self.version:
return self.version
else:
@ -183,7 +183,7 @@ class Dependency:
def get_exe_args(self, compiler):
return []
def get_pkgconfig_variable(self, variable_name, kwargs):
def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str:
raise DependencyException('{!r} is not a pkgconfig dependency'.format(self.name))
def get_configtool_variable(self, variable_name):
@ -261,7 +261,7 @@ class InternalDependency(Dependency):
setattr(result, k, copy.deepcopy(v, memo))
return result
def get_pkgconfig_variable(self, variable_name, kwargs):
def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str:
raise DependencyException('Method "get_pkgconfig_variable()" is '
'invalid for an internal dependency')
@ -504,7 +504,7 @@ class ConfigToolDependency(ExternalDependency):
return self.config is not None
def get_config_value(self, args, stage):
def get_config_value(self, args: T.List[str], stage: str) -> T.List[str]:
p, out, err = Popen_safe(self.config + args)
if p.returncode != 0:
if self.required:
@ -877,7 +877,7 @@ class PkgConfigDependency(ExternalDependency):
(self.name, out_raw))
self.link_args, self.raw_link_args = self._search_libs(out, out_raw)
def get_pkgconfig_variable(self, variable_name, kwargs):
def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str:
options = ['--variable=' + variable_name, self.name]
if 'define_variable' in kwargs:
@ -2037,7 +2037,7 @@ class ExternalProgram:
def found(self) -> bool:
return self.command[0] is not None
def get_command(self):
def get_command(self) -> T.List[str]:
return self.command[:]
def get_path(self):
@ -2550,7 +2550,7 @@ def factory_methods(methods: T.Set[DependencyMethods]) -> T.Callable[['FactoryTy
This helps to make factory functions self documenting
>>> @factory_methods([DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE])
>>> def factory(env: Environment, for_machine: MachineChoice, kwargs: T.Dict[str, T.Any], methods: T.Set[DependencyMethods]) -> T.List[DependencyType]:
>>> def factory(env: Environment, for_machine: MachineChoice, kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List[T.Callable[[], 'Dependency']]:
>>> pass
"""

@ -95,7 +95,7 @@ class BoostIncludeDir():
def __repr__(self) -> str:
return '<BoostIncludeDir: {} -- {}>'.format(self.version, self.path)
def __lt__(self, other: T.Any) -> bool:
def __lt__(self, other: object) -> bool:
if isinstance(other, BoostIncludeDir):
return (self.version_int, self.path) < (other.version_int, other.path)
return NotImplemented
@ -187,7 +187,7 @@ class BoostLibraryFile():
def __repr__(self) -> str:
return '<LIB: {} {:<32} {}>'.format(self.abitag, self.mod_name, self.path)
def __lt__(self, other: T.Any) -> bool:
def __lt__(self, other: object) -> bool:
if isinstance(other, BoostLibraryFile):
return (
self.mod_name, self.static, self.version_lib, self.arch,
@ -204,7 +204,7 @@ class BoostLibraryFile():
)
return NotImplemented
def __eq__(self, other: T.Any) -> bool:
def __eq__(self, other: object) -> bool:
if isinstance(other, BoostLibraryFile):
return self.name == other.name
return NotImplemented
@ -339,12 +339,14 @@ class BoostLibraryFile():
return [self.path.as_posix()]
class BoostDependency(ExternalDependency):
def __init__(self, environment: Environment, kwargs):
def __init__(self, environment: Environment, kwargs: T.Dict[str, T.Any]) -> None:
super().__init__('boost', environment, kwargs, language='cpp')
self.debug = environment.coredata.get_builtin_option('buildtype').startswith('debug')
buildtype = environment.coredata.get_builtin_option('buildtype')
assert isinstance(buildtype, str)
self.debug = buildtype.startswith('debug')
self.multithreading = kwargs.get('threading', 'multi') == 'multi'
self.boost_root = None
self.boost_root = None # type: T.Optional[Path]
self.explicit_static = 'static' in kwargs
# Extract and validate modules
@ -385,7 +387,7 @@ class BoostDependency(ExternalDependency):
# Finally, look for paths from .pc files and from searching the filesystem
self.detect_roots()
def check_and_set_roots(self, roots) -> None:
def check_and_set_roots(self, roots: T.List[Path]) -> None:
roots = list(mesonlib.OrderedSet(roots))
for j in roots:
# 1. Look for the boost headers (boost/version.hpp)
@ -403,7 +405,7 @@ class BoostDependency(ExternalDependency):
self.boost_root = j
break
def detect_boost_machine_file(self, props) -> None:
def detect_boost_machine_file(self, props: T.Dict[str, str]) -> None:
incdir = props.get('boost_includedir')
libdir = props.get('boost_librarydir')
@ -434,7 +436,7 @@ class BoostDependency(ExternalDependency):
self.check_and_set_roots(paths)
def detect_boost_env(self):
def detect_boost_env(self) -> None:
boost_includedir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_INCLUDEDIR')
boost_librarydir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_LIBRARYDIR')
@ -658,7 +660,7 @@ class BoostDependency(ExternalDependency):
libs += [BoostLibraryFile(i)]
return [x for x in libs if x.is_boost()] # Filter out no boost libraries
def detect_split_root(self, inc_dir, lib_dir) -> None:
def detect_split_root(self, inc_dir: Path, lib_dir: Path) -> None:
boost_inc_dir = None
for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
if j.is_file():

@ -22,10 +22,14 @@ from .. import mlog
from ..mesonlib import split_args, listify
from .base import (DependencyException, DependencyMethods, ExternalDependency, ExternalProgram,
PkgConfigDependency)
import typing as T
if T.TYPE_CHECKING:
from ..environment import Environment
class HDF5Dependency(ExternalDependency):
def __init__(self, environment, kwargs):
def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
language = kwargs.get('language', 'c')
super().__init__('hdf5', environment, kwargs, language=language)
kwargs['required'] = False
@ -109,7 +113,7 @@ class HDF5Dependency(ExternalDependency):
cmd = prog.get_command() + [shlib_arg, '-show']
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=15)
if p.returncode != 0:
mlog.debug('Command', mlog.bold(cmd), 'failed to run:')
mlog.debug('Command', mlog.bold(str(cmd)), 'failed to run:')
mlog.debug(mlog.bold('Standard output\n'), p.stdout)
mlog.debug(mlog.bold('Standard error\n'), p.stderr)
return
@ -127,5 +131,5 @@ class HDF5Dependency(ExternalDependency):
return
@staticmethod
def get_methods():
def get_methods() -> T.List[DependencyMethods]:
return [DependencyMethods.AUTO, DependencyMethods.PKGCONFIG]

@ -24,6 +24,7 @@ from ..environment import detect_cpu_family
if T.TYPE_CHECKING:
from .base import Dependency
from ..compilers import Compiler
from ..compilers.compiler import CompilerType
from ..environment import Environment, MachineChoice
@ -36,7 +37,9 @@ def mpi_factory(env: 'Environment', for_machine: 'MachineChoice',
return []
candidates = [] # type: T.List[T.Callable[[], Dependency]]
compiler = detect_compiler('mpi', env, for_machine, language)
compiler = detect_compiler('mpi', env, for_machine, language) # type: T.Optional['CompilerType']
if compiler is None:
return []
compiler_is_intel = compiler.get_id() in {'intel', 'intel-cl'}
# Only OpenMPI has pkg-config, and it doesn't work with the intel compilers

@ -111,7 +111,7 @@ def get_env_var_pair(for_machine: MachineChoice,
def get_env_var(for_machine: MachineChoice,
is_cross: bool,
var_name: str) -> T.Tuple[T.Optional[str], T.Optional[str]]:
var_name: str) -> T.Optional[str]:
ret = get_env_var_pair(for_machine, is_cross, var_name)
if ret is None:
return None
@ -147,7 +147,7 @@ class Properties:
return p
return mesonlib.listify(p)
def __eq__(self, other: T.Any) -> 'T.Union[bool, NotImplemented]':
def __eq__(self, other: object) -> bool:
if isinstance(other, type(self)):
return self.properties == other.properties
return NotImplemented
@ -172,8 +172,8 @@ class MachineInfo:
self.endian = endian
self.is_64_bit = cpu_family in CPU_FAMILES_64_BIT # type: bool
def __eq__(self, other: T.Any) -> 'T.Union[bool, NotImplemented]':
if self.__class__ is not other.__class__:
def __eq__(self, other: object) -> bool:
if not isinstance(other, MachineInfo):
return NotImplemented
return \
self.system == other.system and \
@ -181,8 +181,8 @@ class MachineInfo:
self.cpu == other.cpu and \
self.endian == other.endian
def __ne__(self, other: T.Any) -> 'T.Union[bool, NotImplemented]':
if self.__class__ is not other.__class__:
def __ne__(self, other: object) -> bool:
if not isinstance(other, MachineInfo):
return NotImplemented
return not self.__eq__(other)

@ -132,6 +132,9 @@ build_filename = 'meson.build'
CompilersDict = T.Dict[str, Compiler]
if T.TYPE_CHECKING:
import argparse
def detect_gcovr(min_version='3.3', new_rootdir_version='4.2', log=False):
gcovr_exe = 'gcovr'
try:
@ -153,7 +156,7 @@ def detect_llvm_cov():
return tool
return None
def find_coverage_tools():
def find_coverage_tools() -> T.Tuple[T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str]]:
gcovr_exe, gcovr_new_rootdir = detect_gcovr()
llvm_cov_exe = detect_llvm_cov()
@ -522,7 +525,7 @@ class Environment:
log_dir = 'meson-logs'
info_dir = 'meson-info'
def __init__(self, source_dir, build_dir, options):
def __init__(self, source_dir: T.Optional[str], build_dir: T.Optional[str], options: 'argparse.Namespace') -> None:
self.source_dir = source_dir
self.build_dir = build_dir
# Do not try to create build directories when build_dir is none.
@ -535,7 +538,7 @@ class Environment:
os.makedirs(self.log_dir, exist_ok=True)
os.makedirs(self.info_dir, exist_ok=True)
try:
self.coredata = coredata.load(self.get_build_dir())
self.coredata = coredata.load(self.get_build_dir()) # type: coredata.CoreData
self.first_invocation = False
except FileNotFoundError:
self.create_new_coredata(options)
@ -807,29 +810,28 @@ class Environment:
self.default_pkgconfig = ['pkg-config']
self.wrap_resolver = None
def create_new_coredata(self, options):
def create_new_coredata(self, options: 'argparse.Namespace') -> None:
# WARNING: Don't use any values from coredata in __init__. It gets
# re-initialized with project options by the interpreter during
# build file parsing.
self.coredata = coredata.CoreData(options, self.scratch_dir)
# Used by the regenchecker script, which runs meson
self.coredata.meson_command = mesonlib.meson_command
# meson_command is used by the regenchecker script, which runs meson
self.coredata = coredata.CoreData(options, self.scratch_dir, mesonlib.meson_command)
self.first_invocation = True
def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
return self.coredata.is_cross_build(when_building_for)
def dump_coredata(self):
def dump_coredata(self) -> str:
return coredata.save(self.coredata, self.get_build_dir())
def get_script_dir(self):
def get_script_dir(self) -> str:
import mesonbuild.scripts
return os.path.dirname(mesonbuild.scripts.__file__)
def get_log_dir(self):
def get_log_dir(self) -> str:
return self.log_dir
def get_coredata(self):
def get_coredata(self) -> coredata.CoreData:
return self.coredata
def get_build_command(self, unbuffered=False):
@ -1035,7 +1037,7 @@ class Environment:
:extra_args: Any additional arguments required (such as a source file)
"""
self.coredata.add_lang_args(comp_class.language, comp_class, for_machine, self)
extra_args = T.cast(T.List[str], extra_args or [])
extra_args = extra_args or []
extra_args += self.coredata.compiler_options[for_machine][comp_class.language]['args'].value
if isinstance(comp_class.LINKER_PREFIX, str):
@ -1535,7 +1537,7 @@ class Environment:
self._handle_exceptions(popen_exceptions, compilers)
def get_scratch_dir(self):
def get_scratch_dir(self) -> str:
return self.scratch_dir
def detect_objc_compiler(self, for_machine: MachineInfo) -> 'Compiler':
@ -1974,10 +1976,10 @@ class Environment:
self._handle_exceptions(popen_exceptions, linkers, 'linker')
raise EnvironmentException('Unknown static linker "{}"'.format(' '.join(linkers)))
def get_source_dir(self):
def get_source_dir(self) -> str:
return self.source_dir
def get_build_dir(self):
def get_build_dir(self) -> str:
return self.build_dir
def get_import_lib_dir(self) -> str:

@ -31,9 +31,10 @@ from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode
from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler, disablerIfNotFound
from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs, FeatureDeprecatedKwargs
from .interpreterbase import ObjectHolder, MesonVersionString
from .modules import ModuleReturnValue
from .interpreterbase import TYPE_var, TYPE_nkwargs
from .modules import ModuleReturnValue, ExtensionModule
from .cmake import CMakeInterpreter
from .backend.backends import TestProtocol
from .backend.backends import TestProtocol, Backend
from pathlib import Path, PurePath
import os
@ -673,22 +674,22 @@ class MachineHolder(InterpreterObject, ObjectHolder):
@noPosargs
@permittedKwargs({})
def cpu_family_method(self, args, kwargs):
def cpu_family_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str:
return self.held_object.cpu_family
@noPosargs
@permittedKwargs({})
def cpu_method(self, args, kwargs):
def cpu_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str:
return self.held_object.cpu
@noPosargs
@permittedKwargs({})
def system_method(self, args, kwargs):
def system_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str:
return self.held_object.system
@noPosargs
@permittedKwargs({})
def endian_method(self, args, kwargs):
def endian_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str:
return self.held_object.endian
class IncludeDirsHolder(InterpreterObject, ObjectHolder):
@ -2333,8 +2334,18 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'},
class Interpreter(InterpreterBase):
def __init__(self, build, backend=None, subproject='', subdir='', subproject_dir='subprojects',
modules = None, default_project_options=None, mock=False, ast=None):
def __init__(
self,
build: build.Build,
backend: T.Optional[Backend] = None,
subproject: str = '',
subdir: str = '',
subproject_dir: str = 'subprojects',
modules: T.Optional[T.Dict[str, ExtensionModule]] = None,
default_project_options: T.Optional[T.Dict[str, str]] = None,
mock: bool = False,
ast: T.Optional[mparser.CodeBlockNode] = None,
) -> None:
super().__init__(build.environment.get_source_dir(), subdir, subproject)
self.an_unpicklable_object = mesonlib.an_unpicklable_object
self.build = build
@ -2396,7 +2407,8 @@ class Interpreter(InterpreterBase):
self.builtin['target_machine'] = \
MachineHolder(self.build.environment.machines.target)
def get_non_matching_default_options(self):
# TODO: Why is this in interpreter.py and not CoreData or Environment?
def get_non_matching_default_options(self) -> T.Iterator[T.Tuple[str, str, coredata.UserOption]]:
env = self.environment
for def_opt_name, def_opt_value in self.project_default_options.items():
for opts in env.coredata.get_all_options():
@ -2530,7 +2542,7 @@ class Interpreter(InterpreterBase):
self.process_new_values(invalues)
return self.holderify(return_object.return_value)
def get_build_def_files(self):
def get_build_def_files(self) -> T.List[str]:
return self.build_def_files
def add_build_def_file(self, f):
@ -2599,7 +2611,9 @@ class Interpreter(InterpreterBase):
module = importlib.import_module('mesonbuild.modules.' + modname)
except ImportError:
raise InvalidArguments('Module "%s" does not exist' % (modname, ))
self.modules[modname] = module.initialize(self)
ext_module = module.initialize(self)
assert isinstance(ext_module, ExtensionModule)
self.modules[modname] = ext_module
@stringArgs
@noKwargs
@ -4598,7 +4612,7 @@ different subdirectory.
def func_join_paths(self, node, args, kwargs):
return self.join_path_strings(args)
def run(self):
def run(self) -> None:
super().run()
mlog.log('Build targets in project:', mlog.bold(str(len(self.build.targets))))
FeatureNew.report(self.subproject)
@ -4608,14 +4622,14 @@ different subdirectory.
if self.subproject == '':
self._print_summary()
def print_extra_warnings(self):
def print_extra_warnings(self) -> None:
# TODO cross compilation
for c in self.coredata.compilers.host.values():
if c.get_id() == 'clang':
self.check_clang_asan_lundef()
break
def check_clang_asan_lundef(self):
def check_clang_asan_lundef(self) -> None:
if 'b_lundef' not in self.coredata.base_options:
return
if 'b_sanitize' not in self.coredata.base_options:

@ -24,14 +24,31 @@ import collections.abc
from functools import wraps
import typing as T
TV_fw_var = T.Union[str, int, float, bool, list, dict, 'InterpreterObject', 'ObjectHolder']
TV_fw_args = T.List[T.Union[mparser.BaseNode, TV_fw_var]]
TV_fw_kwargs = T.Dict[str, T.Union[mparser.BaseNode, TV_fw_var]]
TV_func = T.TypeVar('TV_func', bound=T.Callable[..., T.Any])
TYPE_elementary = T.Union[str, int, float, bool]
TYPE_var = T.Union[TYPE_elementary, T.List[T.Any], T.Dict[str, T.Any], 'InterpreterObject', 'ObjectHolder']
TYPE_nvar = T.Union[TYPE_var, mparser.BaseNode]
TYPE_nkwargs = T.Dict[str, TYPE_nvar]
TYPE_key_resolver = T.Callable[[mparser.BaseNode], str]
class InterpreterObject:
def __init__(self):
self.methods = {} # type: T.Dict[str, T.Callable]
def __init__(self) -> None:
self.methods = {} # type: T.Dict[str, T.Callable[[T.List[TYPE_nvar], TYPE_nkwargs], TYPE_var]]
# Current node set during a method call. This can be used as location
# when printing a warning message during a method call.
self.current_node = None # type: mparser.BaseNode
def method_call(self, method_name: str, args: T.List[T.Union[mparser.BaseNode, str, int, float, bool, list, dict, 'InterpreterObject', 'ObjectHolder']], kwargs: T.Dict[str, T.Union[mparser.BaseNode, str, int, float, bool, list, dict, 'InterpreterObject', 'ObjectHolder']]):
def method_call(
self,
method_name: str,
args: TV_fw_args,
kwargs: TV_fw_kwargs
) -> TYPE_var:
if method_name in self.methods:
method = self.methods[method_name]
if not getattr(method, 'no-args-flattening', False):
@ -42,18 +59,13 @@ class InterpreterObject:
TV_InterpreterObject = T.TypeVar('TV_InterpreterObject')
class ObjectHolder(T.Generic[TV_InterpreterObject]):
def __init__(self, obj: InterpreterObject, subproject: T.Optional[str] = None):
def __init__(self, obj: InterpreterObject, subproject: T.Optional[str] = None) -> None:
self.held_object = obj # type: InterpreterObject
self.subproject = subproject # type: str
def __repr__(self):
def __repr__(self) -> str:
return '<Holder: {!r}>'.format(self.held_object)
TYPE_elementary = T.Union[str, int, float, bool]
TYPE_var = T.Union[TYPE_elementary, list, dict, InterpreterObject, ObjectHolder]
TYPE_nvar = T.Union[TYPE_var, mparser.BaseNode]
TYPE_nkwargs = T.Dict[T.Union[mparser.BaseNode, str], TYPE_nvar]
class MesonVersionString(str):
pass
@ -67,11 +79,11 @@ def check_stringlist(a: T.Any, msg: str = 'Arguments must be strings.') -> None:
mlog.debug('Element not a string:', str(a))
raise InvalidArguments(msg)
def _get_callee_args(wrapped_args, want_subproject: bool = False):
def _get_callee_args(wrapped_args: T.Sequence[T.Any], want_subproject: bool = False) -> T.Tuple[T.Any, mparser.BaseNode, TV_fw_args, TV_fw_kwargs, T.Optional[str]]:
s = wrapped_args[0]
n = len(wrapped_args)
# Raise an error if the codepaths are not there
subproject = None
subproject = None # type: T.Optional[str]
if want_subproject and n == 2:
if hasattr(s, 'subproject'):
# Interpreter base types have 2 args: self, node
@ -145,18 +157,18 @@ def flatten(args: T.Union[TYPE_nvar, T.List[TYPE_nvar]]) -> T.List[TYPE_nvar]:
result.append(a)
return result
def noPosargs(f):
def noPosargs(f: TV_func) -> TV_func:
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
args = _get_callee_args(wrapped_args)[2]
if args:
raise InvalidArguments('Function does not take positional arguments.')
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
return T.cast(TV_func, wrapped)
def builtinMethodNoKwargs(f):
def builtinMethodNoKwargs(f: TV_func) -> TV_func:
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
node = wrapped_args[0].current_node
method_name = wrapped_args[2]
kwargs = wrapped_args[4]
@ -165,56 +177,56 @@ def builtinMethodNoKwargs(f):
'This will become a hard error in the future',
location=node)
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
return T.cast(TV_func, wrapped)
def noKwargs(f):
def noKwargs(f: TV_func) -> TV_func:
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
kwargs = _get_callee_args(wrapped_args)[3]
if kwargs:
raise InvalidArguments('Function does not take keyword arguments.')
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
return T.cast(TV_func, wrapped)
def stringArgs(f):
def stringArgs(f: TV_func) -> TV_func:
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
args = _get_callee_args(wrapped_args)[2]
assert(isinstance(args, list))
check_stringlist(args)
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
return T.cast(TV_func, wrapped)
def noArgsFlattening(f):
def noArgsFlattening(f: TV_func) -> TV_func:
setattr(f, 'no-args-flattening', True) # noqa: B010
return f
def disablerIfNotFound(f):
def disablerIfNotFound(f: TV_func) -> TV_func:
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
kwargs = _get_callee_args(wrapped_args)[3]
disabler = kwargs.pop('disabler', False)
ret = f(*wrapped_args, **wrapped_kwargs)
if disabler and not ret.held_object.found():
return Disabler()
return ret
return wrapped
return T.cast(TV_func, wrapped)
class permittedKwargs:
def __init__(self, permitted: T.Set[str]):
self.permitted = permitted # type: T.Set[str]
def __call__(self, f):
def __call__(self, f: TV_func) -> TV_func:
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
s, node, args, kwargs, _ = _get_callee_args(wrapped_args)
for k in kwargs:
if k not in self.permitted:
mlog.warning('''Passed invalid keyword argument "{}".'''.format(k), location=node)
mlog.warning('This will become a hard error in the future.')
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
return T.cast(TV_func, wrapped)
class FeatureCheckBase(metaclass=abc.ABCMeta):
"Base class for feature version checks"
@ -279,15 +291,15 @@ class FeatureCheckBase(metaclass=abc.ABCMeta):
def get_warning_str_prefix(tv: str) -> str:
raise InterpreterException('get_warning_str_prefix not implemented')
def __call__(self, f):
def __call__(self, f: TV_func) -> TV_func:
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
subproject = _get_callee_args(wrapped_args, want_subproject=True)[4]
if subproject is None:
raise AssertionError('{!r}'.format(wrapped_args))
self.use(subproject)
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
return T.cast(TV_func, wrapped)
@classmethod
def single_use(cls, feature_name: str, version: str, subproject: str,
@ -366,9 +378,9 @@ class FeatureCheckKwargsBase(metaclass=abc.ABCMeta):
self.kwargs = kwargs
self.extra_message = extra_message
def __call__(self, f):
def __call__(self, f: TV_func) -> TV_func:
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
kwargs, subproject = _get_callee_args(wrapped_args, want_subproject=True)[3:5]
if subproject is None:
raise AssertionError('{!r}'.format(wrapped_args))
@ -379,7 +391,7 @@ class FeatureCheckKwargsBase(metaclass=abc.ABCMeta):
self.feature_check_class.single_use(
name, self.feature_version, subproject, self.extra_message)
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
return T.cast(TV_func, wrapped)
class FeatureNewKwargs(FeatureCheckKwargsBase):
feature_check_class = FeatureNew
@ -407,21 +419,21 @@ class BreakRequest(BaseException):
pass
class MutableInterpreterObject(InterpreterObject):
def __init__(self):
def __init__(self) -> None:
super().__init__()
class Disabler(InterpreterObject):
def __init__(self):
def __init__(self) -> None:
super().__init__()
self.methods.update({'found': self.found_method})
def found_method(self, args, kwargs):
def found_method(self, args: T.Sequence[T.Any], kwargs: T.Dict[str, T.Any]) -> bool:
return False
def is_disabler(i) -> bool:
def is_disabler(i: T.Any) -> bool:
return isinstance(i, Disabler)
def is_arg_disabled(arg) -> bool:
def is_arg_disabled(arg: T.Any) -> bool:
if is_disabler(arg):
return True
if isinstance(arg, list):
@ -430,7 +442,7 @@ def is_arg_disabled(arg) -> bool:
return True
return False
def is_disabled(args, kwargs) -> bool:
def is_disabled(args: T.Sequence[T.Any], kwargs: T.Dict[str, T.Any]) -> bool:
for i in args:
if is_arg_disabled(i):
return True
@ -439,6 +451,11 @@ def is_disabled(args, kwargs) -> bool:
return True
return False
def default_resolve_key(key: mparser.BaseNode) -> str:
if not isinstance(key, mparser.IdNode):
raise InterpreterException('Invalid kwargs format.')
return key.value
class InterpreterBase:
elementary_types = (int, float, str, bool, list)
@ -585,23 +602,17 @@ class InterpreterBase:
return arguments
@FeatureNew('dict', '0.47.0')
def evaluate_dictstatement(self, cur: mparser.DictNode) -> T.Dict[str, T.Any]:
(arguments, kwargs) = self.reduce_arguments(cur.args, resolve_key_nodes=False)
assert (not arguments)
result = {} # type: T.Dict[str, T.Any]
self.argument_depth += 1
for key, value in kwargs.items():
def evaluate_dictstatement(self, cur: mparser.DictNode) -> TYPE_nkwargs:
def resolve_key(key: mparser.BaseNode) -> str:
if not isinstance(key, mparser.StringNode):
FeatureNew.single_use('Dictionary entry using non literal key', '0.53.0', self.subproject)
assert isinstance(key, mparser.BaseNode) # All keys must be nodes due to resolve_key_nodes=False
str_key = self.evaluate_statement(key)
if not isinstance(str_key, str):
raise InvalidArguments('Key must be a string')
if str_key in result:
raise InvalidArguments('Duplicate dictionary key: {}'.format(str_key))
result[str_key] = value
self.argument_depth -= 1
return result
return str_key
arguments, kwargs = self.reduce_arguments(cur.args, key_resolver=resolve_key, duplicate_key_error='Duplicate dictionary key: {}')
assert not arguments
return kwargs
def evaluate_notstatement(self, cur: mparser.NotNode) -> T.Union[bool, Disabler]:
v = self.evaluate_statement(cur.value)
@ -722,7 +733,7 @@ The result of this is undefined and will become a hard error in a future Meson r
raise InterpreterException('Second argument to "or" is not a boolean.')
return r
def evaluate_uminusstatement(self, cur) -> T.Union[int, Disabler]:
def evaluate_uminusstatement(self, cur: mparser.UMinusNode) -> T.Union[int, Disabler]:
v = self.evaluate_statement(cur.value)
if isinstance(v, Disabler):
return v
@ -868,7 +879,8 @@ The result of this is undefined and will become a hard error in a future Meson r
if not isinstance(index, str):
raise InterpreterException('Key is not a string')
try:
return iobject[index]
# The cast is required because we don't have recursive types...
return T.cast(TYPE_var, iobject[index])
except KeyError:
raise InterpreterException('Key %s is not in dict' % index)
else:
@ -893,7 +905,7 @@ The result of this is undefined and will become a hard error in a future Meson r
func_args = posargs # type: T.Any
if not getattr(func, 'no-args-flattening', False):
func_args = flatten(posargs)
return func(node, func_args, self.kwargs_string_keys(kwargs))
return func(node, func_args, kwargs)
else:
self.unknown_function_called(func_name)
return None
@ -935,7 +947,7 @@ The result of this is undefined and will become a hard error in a future Meson r
raise InvalidArguments('Invalid operation "extract_objects" on variable "{}"'.format(object_name))
self.validate_extraction(obj.held_object)
obj.current_node = node
return obj.method_call(method_name, args, self.kwargs_string_keys(kwargs))
return obj.method_call(method_name, args, kwargs)
@builtinMethodNoKwargs
def bool_method_call(self, obj: bool, method_name: str, posargs: T.List[TYPE_nvar], kwargs: T.Dict[str, T.Any]) -> T.Union[str, int]:
@ -1068,7 +1080,7 @@ The result of this is undefined and will become a hard error in a future Meson r
arg = str(arg).lower()
arg_strings.append(str(arg))
def arg_replace(match):
def arg_replace(match: T.Match[str]) -> str:
idx = int(match.group(1))
if idx >= len(arg_strings):
raise InterpreterException('Format placeholder @{}@ out of range.'.format(idx))
@ -1080,7 +1092,7 @@ The result of this is undefined and will become a hard error in a future Meson r
raise InvalidCode('Unknown function "%s".' % func_name)
@builtinMethodNoKwargs
def array_method_call(self, obj: list, method_name: str, posargs: T.List[TYPE_nvar], kwargs: T.Dict[str, T.Any]) -> TYPE_var:
def array_method_call(self, obj: T.List[TYPE_var], method_name: str, posargs: T.List[TYPE_nvar], kwargs: T.Dict[str, T.Any]) -> TYPE_var:
if method_name == 'contains':
def check_contains(el: list) -> bool:
if len(posargs) != 1:
@ -1121,7 +1133,7 @@ The result of this is undefined and will become a hard error in a future Meson r
raise InterpreterException(m.format(method_name))
@builtinMethodNoKwargs
def dict_method_call(self, obj: dict, method_name: str, posargs: T.List[TYPE_nvar], kwargs: T.Dict[str, T.Any]) -> TYPE_var:
def dict_method_call(self, obj: T.Dict[str, TYPE_var], method_name: str, posargs: T.List[TYPE_nvar], kwargs: T.Dict[str, T.Any]) -> TYPE_var:
if method_name in ('has_key', 'get'):
if method_name == 'has_key':
if len(posargs) != 1:
@ -1157,7 +1169,12 @@ The result of this is undefined and will become a hard error in a future Meson r
raise InterpreterException('Dictionaries do not have a method called "%s".' % method_name)
def reduce_arguments(self, args: mparser.ArgumentNode, resolve_key_nodes: bool = True) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]:
def reduce_arguments(
self,
args: mparser.ArgumentNode,
key_resolver: T.Callable[[mparser.BaseNode], str] = default_resolve_key,
duplicate_key_error: T.Optional[str] = None,
) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]:
assert(isinstance(args, mparser.ArgumentNode))
if args.incorrect_order():
raise InvalidArguments('All keyword arguments must be after positional arguments.')
@ -1165,13 +1182,12 @@ The result of this is undefined and will become a hard error in a future Meson r
reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments] # type: T.List[TYPE_nvar]
reduced_kw = {} # type: TYPE_nkwargs
for key, val in args.kwargs.items():
reduced_key = key # type: T.Union[str, mparser.BaseNode]
reduced_key = key_resolver(key)
reduced_val = val # type: TYPE_nvar
if resolve_key_nodes and isinstance(key, mparser.IdNode):
assert isinstance(key.value, str)
reduced_key = key.value
if isinstance(reduced_val, mparser.BaseNode):
reduced_val = self.evaluate_statement(reduced_val)
if duplicate_key_error and reduced_key in reduced_kw:
raise InvalidArguments(duplicate_key_error.format(reduced_key))
reduced_kw[reduced_key] = reduced_val
self.argument_depth -= 1
final_kw = self.expand_default_kwargs(reduced_kw)
@ -1191,14 +1207,6 @@ The result of this is undefined and will become a hard error in a future Meson r
kwargs[k] = v
return kwargs
def kwargs_string_keys(self, kwargs: TYPE_nkwargs) -> T.Dict[str, TYPE_nvar]:
kw = {} # type: T.Dict[str, TYPE_nvar]
for key, val in kwargs.items():
if not isinstance(key, str):
raise InterpreterException('Key of kwargs is not a string')
kw[key] = val
return kw
def assignment(self, node: mparser.AssignmentNode) -> None:
assert(isinstance(node, mparser.AssignmentNode))
if self.argument_depth != 0:
@ -1229,7 +1237,7 @@ To specify a keyword argument, use : instead of =.''')
raise InvalidCode('Tried to overwrite internal variable "%s"' % varname)
self.variables[varname] = variable
def get_variable(self, varname) -> TYPE_var:
def get_variable(self, varname: str) -> TYPE_var:
if varname in self.builtin:
return self.builtin[varname]
if varname in self.variables:

@ -45,7 +45,9 @@ def get_backend_from_coredata(builddir: Path) -> str:
"""
Gets `backend` option value from coredata
"""
return coredata.load(str(builddir)).get_builtin_option('backend')
backend = coredata.load(str(builddir)).get_builtin_option('backend')
assert isinstance(backend, str)
return backend
def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]:
"""
@ -97,12 +99,12 @@ class ParsedTargetName:
}
return type in allowed_types
def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> dict:
def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]:
if target.name not in introspect_data:
raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name))
intro_targets = introspect_data[target.name]
found_targets = []
found_targets = [] # type: T.List[T.Dict[str, T.Any]]
resolved_bdir = builddir.resolve()
@ -137,7 +139,7 @@ def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path) -> T.Li
runner = detect_ninja()
if runner is None:
raise MesonException('Cannot find ninja.')
mlog.log('Found runner:', runner)
mlog.log('Found runner:', str(runner))
cmd = runner + ['-C', builddir.as_posix()]
@ -169,9 +171,9 @@ def generate_target_name_vs(target: ParsedTargetName, builddir: Path, introspect
# Normalize project name
# Source: https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-build-specific-targets-in-solutions-by-using-msbuild-exe
target_name = re.sub('[\%\$\@\;\.\(\)\']', '_', intro_target['id'])
target_name = re.sub('[\%\$\@\;\.\(\)\']', '_', intro_target['id']) # type: str
rel_path = Path(intro_target['filename'][0]).relative_to(builddir.resolve()).parent
if rel_path != '.':
if rel_path != Path('.'):
target_name = str(rel_path / target_name)
return target_name

@ -15,15 +15,19 @@
import os
from . import coredata, environment, mesonlib, build, mintro, mlog
from .ast import AstIDGenerator
import typing as T
def add_arguments(parser):
if T.TYPE_CHECKING:
import argparse
def add_arguments(parser: 'argparse.ArgumentParser') -> None:
coredata.register_builtin_arguments(parser)
parser.add_argument('builddir', nargs='?', default='.')
parser.add_argument('--clearcache', action='store_true', default=False,
help='Clear cached state (e.g. found dependencies)')
def make_lower_case(val):
def make_lower_case(val: T.Any) -> T.Union[str, T.List[T.Any]]: # T.Any because of recursion...
if isinstance(val, bool):
return str(val).lower()
elif isinstance(val, list):

@ -19,7 +19,7 @@ import stat
import time
import platform, subprocess, operator, os, shlex, shutil, re
import collections
from enum import Enum
from enum import IntEnum
from functools import lru_cache, wraps
from itertools import tee, filterfalse
import typing as T
@ -289,7 +289,7 @@ class File:
def split(self, s: str) -> T.List[str]:
return self.fname.split(s)
def __eq__(self, other) -> bool:
def __eq__(self, other: object) -> bool:
if not isinstance(other, File):
return NotImplemented
if self.hash != other.hash:
@ -323,32 +323,7 @@ def classify_unity_sources(compilers: T.Iterable['CompilerType'], sources: T.Ite
return compsrclist
class OrderedEnum(Enum):
"""
An Enum which additionally offers homogeneous ordered comparison.
"""
def __ge__(self, other):
if self.__class__ is other.__class__:
return self.value >= other.value
return NotImplemented
def __gt__(self, other):
if self.__class__ is other.__class__:
return self.value > other.value
return NotImplemented
def __le__(self, other):
if self.__class__ is other.__class__:
return self.value <= other.value
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
return self.value < other.value
return NotImplemented
class MachineChoice(OrderedEnum):
class MachineChoice(IntEnum):
"""Enum class representing one of the two abstract machine names used in
most places: the build, and host, machines.
@ -365,7 +340,7 @@ class MachineChoice(OrderedEnum):
class PerMachine(T.Generic[_T]):
def __init__(self, build: _T, host: _T):
def __init__(self, build: _T, host: _T) -> None:
self.build = build
self.host = host
@ -403,7 +378,7 @@ class PerThreeMachine(PerMachine[_T]):
need to computer the `target` field so we don't bother overriding the
`__getitem__`/`__setitem__` methods.
"""
def __init__(self, build: _T, host: _T, target: _T):
def __init__(self, build: _T, host: _T, target: _T) -> None:
super().__init__(build, host)
self.target = target
@ -434,7 +409,7 @@ class PerThreeMachine(PerMachine[_T]):
class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
"""Extends `PerMachine` with the ability to default from `None`s.
"""
def __init__(self):
def __init__(self) -> None:
super().__init__(None, None)
def default_missing(self) -> "PerMachine[T.Optional[_T]]":
@ -455,7 +430,7 @@ class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]):
"""Extends `PerThreeMachine` with the ability to default from `None`s.
"""
def __init__(self):
def __init__(self) -> None:
PerThreeMachine.__init__(self, None, None, None)
def default_missing(self) -> "PerThreeMachine[T.Optional[_T]]":
@ -589,7 +564,7 @@ def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
# a helper class which implements the same version ordering as RPM
class Version:
def __init__(self, s: str):
def __init__(self, s: str) -> None:
self._s = s
# split into numeric, alphabetic and non-alphanumeric sequences
@ -603,38 +578,38 @@ class Version:
self._v = sequences3
def __str__(self):
def __str__(self) -> str:
return '%s (V=%s)' % (self._s, str(self._v))
def __repr__(self):
def __repr__(self) -> str:
return '<Version: {}>'.format(self._s)
def __lt__(self, other):
def __lt__(self, other: object) -> bool:
if isinstance(other, Version):
return self.__cmp(other, operator.lt)
return NotImplemented
def __gt__(self, other):
def __gt__(self, other: object) -> bool:
if isinstance(other, Version):
return self.__cmp(other, operator.gt)
return NotImplemented
def __le__(self, other):
def __le__(self, other: object) -> bool:
if isinstance(other, Version):
return self.__cmp(other, operator.le)
return NotImplemented
def __ge__(self, other):
def __ge__(self, other: object) -> bool:
if isinstance(other, Version):
return self.__cmp(other, operator.ge)
return NotImplemented
def __eq__(self, other):
def __eq__(self, other: object) -> bool:
if isinstance(other, Version):
return self._v == other._v
return NotImplemented
def __ne__(self, other):
def __ne__(self, other: object) -> bool:
if isinstance(other, Version):
return self._v != other._v
return NotImplemented
@ -740,7 +715,7 @@ def version_compare_condition_with_min(condition: str, minimum: str) -> bool:
if re.match(r'^\d+.\d+$', condition):
condition += '.0'
return cmpop(Version(minimum), Version(condition))
return T.cast(bool, cmpop(Version(minimum), Version(condition)))
def default_libdir() -> str:
@ -924,20 +899,20 @@ def do_replacement(regex: T.Pattern[str], line: str, variable_format: str,
# Template variable to be replaced
else:
varname = match.group(1)
var_str = ''
if varname in confdata:
(var, desc) = confdata.get(varname)
if isinstance(var, str):
pass
var_str = var
elif isinstance(var, int):
var = str(var)
var_str = str(var)
else:
msg = 'Tried to replace variable {!r} value with ' \
'something other than a string or int: {!r}'
raise MesonException(msg.format(varname, var))
else:
missing_variables.add(varname)
var = ''
return var
return var_str
return re.sub(regex, variable_replace, line), missing_variables
def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', variable_format: str) -> str:
@ -947,7 +922,7 @@ def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', v
for token in arr[2:]:
try:
(v, desc) = confdata.get(token)
define_value += [v]
define_value += [str(v)]
except KeyError:
define_value += [token]
return ' '.join(define_value)
@ -981,14 +956,14 @@ def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', v
def do_conf_str (data: list, confdata: 'ConfigurationData', variable_format: str,
encoding: str = 'utf-8') -> T.Tuple[T.List[str],T.Set[str], bool]:
def line_is_valid(line : str, variable_format: str):
if variable_format == 'meson':
if '#cmakedefine' in line:
return False
else: #cmake format
if '#mesondefine' in line:
return False
return True
def line_is_valid(line : str, variable_format: str) -> bool:
if variable_format == 'meson':
if '#cmakedefine' in line:
return False
else: #cmake format
if '#mesondefine' in line:
return False
return True
# Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
# Also allow escaping '@' with '\@'
@ -1115,7 +1090,7 @@ def unholder(item: T.List[_T]) -> T.List[_T]: ...
@T.overload
def unholder(item: T.List[T.Union[_T, 'ObjectHolder[_T]']]) -> T.List[_T]: ...
def unholder(item):
def unholder(item): # type: ignore # TODO fix overload (somehow)
"""Get the held item of an object holder or list of object holders."""
if isinstance(item, list):
return [i.held_object if hasattr(i, 'held_object') else i for i in item]
@ -1531,10 +1506,10 @@ class OrderedSet(T.MutableSet[_T]):
class BuildDirLock:
def __init__(self, builddir: str):
def __init__(self, builddir: str) -> None:
self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock')
def __enter__(self):
def __enter__(self) -> None:
self.lockfile = open(self.lockfilename, 'w')
try:
if have_fcntl:
@ -1545,7 +1520,7 @@ class BuildDirLock:
self.lockfile.close()
raise MesonException('Some other Meson process is already using this build directory. Exiting.')
def __exit__(self, *args):
def __exit__(self, *args: T.Any) -> None:
if have_fcntl:
fcntl.flock(self.lockfile, fcntl.LOCK_UN)
elif have_msvcrt:
@ -1572,7 +1547,7 @@ def path_is_in_root(path: Path, root: Path, resolve: bool = False) -> bool:
return False
return True
class LibType(Enum):
class LibType(IntEnum):
"""Enumeration for library types."""
@ -1636,7 +1611,7 @@ except ImportError:
ProgressBar = ProgressBarFallback # type: T.Union[T.Type[ProgressBarFallback], T.Type[ProgressBarTqdm]]
else:
class ProgressBarTqdm(tqdm):
def __init__(self, *args, bar_type: T.Optional[str] = None, **kwargs):
def __init__(self, *args: T.Any, bar_type: T.Optional[str] = None, **kwargs: T.Any) -> None:
if bar_type == 'download':
kwargs.update({'unit': 'bytes', 'leave': True})
else:

@ -25,6 +25,10 @@ from glob import glob
from mesonbuild import mesonlib
from mesonbuild.environment import detect_ninja
from mesonbuild.templates.samplefactory import sameple_generator
import typing as T
if T.TYPE_CHECKING:
import argparse
'''
we currently have one meson template at this time.
@ -49,7 +53,7 @@ meson compile -C builddir
'''
def create_sample(options) -> None:
def create_sample(options: 'argparse.Namespace') -> None:
'''
Based on what arguments are passed we check for a match in language
then check for project type and create new Meson samples project.
@ -63,7 +67,7 @@ def create_sample(options) -> None:
raise RuntimeError('Unreachable code')
print(INFO_MESSAGE)
def autodetect_options(options, sample: bool = False) -> None:
def autodetect_options(options: 'argparse.Namespace', sample: bool = False) -> None:
'''
Here we autodetect options for args not passed in so don't have to
think about it.
@ -129,7 +133,7 @@ def autodetect_options(options, sample: bool = False) -> None:
raise SystemExit("Can't autodetect language, please specify it with -l.")
print("Detected language: " + options.language)
def add_arguments(parser):
def add_arguments(parser: 'argparse.ArgumentParser') -> None:
'''
Here we add args for that the user can passed when making a new
Meson project.
@ -146,7 +150,7 @@ def add_arguments(parser):
parser.add_argument('--type', default=DEFAULT_PROJECT, choices=('executable', 'library'), help="project type. default: {} based project".format(DEFAULT_PROJECT))
parser.add_argument('--version', default=DEFAULT_VERSION, help="project version. default: {}".format(DEFAULT_VERSION))
def run(options) -> int:
def run(options: 'argparse.Namespace') -> int:
'''
Here we generate the new Meson sample project.
'''

@ -31,6 +31,7 @@ from .interpreter import Interpreter
from pathlib import PurePath
import typing as T
import os
import argparse
def get_meson_info_file(info_dir: str) -> str:
return os.path.join(info_dir, 'meson-info.json')
@ -76,7 +77,7 @@ def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))),
])
def add_arguments(parser):
def add_arguments(parser: argparse.ArgumentParser) -> None:
intro_types = get_meson_introspection_types()
for key, val in intro_types.items():
flag = '--' + key.replace('_', '-')
@ -97,7 +98,7 @@ def dump_ast(intr: IntrospectionInterpreter) -> T.Dict[str, T.Any]:
intr.ast.accept(printer)
return printer.result
def list_installed(installdata):
def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]:
res = {}
if installdata is not None:
for t in installdata.targets:
@ -157,7 +158,7 @@ def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[st
return tlist
def list_targets(builddata: build.Build, installdata, backend: backends.Backend) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
def list_targets(builddata: build.Build, installdata: backends.InstallData, backend: backends.Backend) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
tlist = [] # type: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]
build_dir = builddata.environment.get_build_dir()
src_dir = builddata.environment.get_source_dir()
@ -254,11 +255,9 @@ def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[s
'compiler',
machine='host',
)
tmp_dict = dict(coredata.flatten_lang_iterator(coredata.compiler_options.build.items())) # type: T.Dict[str, cdata.UserOption]
add_keys(
{
'build.' + k: o for k, o in
coredata.flatten_lang_iterator(coredata.compiler_options.build.items())
},
{'build.' + k: o for k, o in tmp_dict.items()},
'compiler',
machine='build',
)
@ -267,7 +266,7 @@ def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[s
add_keys(test_options, 'test')
return optlist
def find_buildsystem_files_list(src_dir) -> T.List[str]:
def find_buildsystem_files_list(src_dir: str) -> T.List[str]:
# I feel dirty about this. But only slightly.
filelist = [] # type: T.List[str]
for root, _, files in os.walk(src_dir):
@ -278,7 +277,7 @@ def find_buildsystem_files_list(src_dir) -> T.List[str]:
def list_buildsystem_files(builddata: build.Build, interpreter: Interpreter) -> T.List[str]:
src_dir = builddata.environment.get_source_dir()
filelist = interpreter.get_build_def_files()
filelist = interpreter.get_build_def_files() # type: T.List[str]
filelist = [PurePath(src_dir, x).as_posix() for x in filelist]
return filelist
@ -305,10 +304,10 @@ def list_deps(coredata: cdata.CoreData) -> T.List[T.Dict[str, T.Union[str, T.Lis
'link_args': d.get_link_args()}]
return result
def get_test_list(testdata) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
def get_test_list(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
result = [] # type: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]
for t in testdata:
to = {}
to = {} # type: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]
if isinstance(t.fname, str):
fname = [t.fname]
else:
@ -329,21 +328,21 @@ def get_test_list(testdata) -> T.List[T.Dict[str, T.Union[str, int, T.List[str],
result.append(to)
return result
def list_tests(testdata) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
def list_tests(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
return get_test_list(testdata)
def list_benchmarks(benchdata) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
def list_benchmarks(benchdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
return get_test_list(benchdata)
def list_projinfo(builddata: build.Build) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
result = {'version': builddata.project_version,
'descriptive_name': builddata.project_name,
'subproject_dir': builddata.subproject_dir}
'subproject_dir': builddata.subproject_dir} # type: T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]
subprojects = []
for k, v in builddata.subprojects.items():
c = {'name': k,
'version': v,
'descriptive_name': builddata.projects.get(k)}
'descriptive_name': builddata.projects.get(k)} # type: T.Dict[str, str]
subprojects.append(c)
result['subprojects'] = subprojects
return result
@ -362,7 +361,7 @@ def list_projinfo_from_source(intr: IntrospectionInterpreter) -> T.Dict[str, T.U
intr.project_data['subproject_dir'] = intr.subproject_dir
return intr.project_data
def print_results(options, results: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], indent: int) -> int:
def print_results(options: argparse.Namespace, results: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], indent: int) -> int:
if not results and not options.force_dict:
print('No command specified')
return 1
@ -376,7 +375,7 @@ def print_results(options, results: T.Sequence[T.Tuple[str, T.Union[dict, T.List
print(json.dumps(out, indent=indent))
return 0
def run(options) -> int:
def run(options: argparse.Namespace) -> int:
datadir = 'meson-private'
infodir = 'meson-info'
if options.builddir is not None:
@ -391,6 +390,7 @@ def run(options) -> int:
# Make sure that log entries in other parts of meson don't interfere with the JSON output
mlog.disable()
backend = backends.get_backend_from_name(options.backend)
assert backend is not None
intr = IntrospectionInterpreter(sourcedir, '', backend.name, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
intr.analyze()
# Re-enable logging just in case
@ -461,7 +461,7 @@ def generate_introspection_file(builddata: build.Build, backend: backends.Backen
write_intro_info(intro_info, builddata.environment.info_dir)
def update_build_options(coredata: cdata.CoreData, info_dir) -> None:
def update_build_options(coredata: cdata.CoreData, info_dir: str) -> None:
intro_info = [
('buildoptions', list_buildoptions(coredata))
]

@ -56,7 +56,7 @@ def colorize_console() -> bool:
sys.stdout.colorize_console = _colorize_console # type: ignore[attr-defined]
return _colorize_console
def setup_console():
def setup_console() -> None:
# on Windows, a subprocess might call SetConsoleMode() on the console
# connected to stdout and turn off ANSI escape processing. Call this after
# running a subprocess to ensure we turn it on again.

@ -19,14 +19,18 @@ import os
from .. import build
from ..mesonlib import unholder
import typing as T
if T.TYPE_CHECKING:
from ..interpreter import Interpreter
from ..interpreterbase import TYPE_var
class ExtensionModule:
def __init__(self, interpreter):
def __init__(self, interpreter: 'Interpreter') -> None:
self.interpreter = interpreter
self.snippets = set() # List of methods that operate only on the interpreter.
self.snippets = set() # type: T.Set[str] # List of methods that operate only on the interpreter.
def is_snippet(self, funcname):
def is_snippet(self, funcname: str) -> bool:
return funcname in self.snippets
@ -69,7 +73,7 @@ def is_module_library(fname):
class ModuleReturnValue:
def __init__(self, return_value, new_objects):
def __init__(self, return_value: 'TYPE_var', new_objects: T.List['TYPE_var']) -> None:
self.return_value = return_value
assert(isinstance(new_objects, list))
self.new_objects = new_objects

@ -24,11 +24,11 @@ from ..interpreterbase import FeatureNew
from ..interpreterbase import stringArgs, noKwargs
if T.TYPE_CHECKING:
from ..interpreter import ModuleState
from ..interpreter import Interpreter, ModuleState
class FSModule(ExtensionModule):
def __init__(self, interpreter):
def __init__(self, interpreter: 'Interpreter') -> None:
super().__init__(interpreter)
self.snippets.add('generate_dub_file')
@ -36,7 +36,7 @@ class FSModule(ExtensionModule):
"""
make an absolute path from a relative path, WITHOUT resolving symlinks
"""
return Path(state.source_root) / state.subdir / Path(arg).expanduser()
return Path(state.source_root) / Path(state.subdir) / Path(arg).expanduser()
def _resolve_dir(self, state: 'ModuleState', arg: str) -> Path:
"""
@ -193,5 +193,5 @@ class FSModule(ExtensionModule):
new = original.stem
return ModuleReturnValue(str(new), [])
def initialize(*args, **kwargs) -> FSModule:
def initialize(*args: T.Any, **kwargs: T.Any) -> FSModule:
return FSModule(*args, **kwargs)

@ -35,25 +35,34 @@ ESCAPE_SEQUENCE_SINGLE_RE = re.compile(r'''
)''', re.UNICODE | re.VERBOSE)
class MesonUnicodeDecodeError(MesonException):
def __init__(self, match):
super().__init__("%s" % match)
def __init__(self, match: str) -> None:
super().__init__(match)
self.match = match
def decode_match(match):
def decode_match(match: T.Match[str]) -> str:
try:
return codecs.decode(match.group(0), 'unicode_escape')
return codecs.decode(match.group(0).encode(), 'unicode_escape')
except UnicodeDecodeError:
raise MesonUnicodeDecodeError(match.group(0))
class ParseException(MesonException):
def __init__(self, text, line, lineno, colno):
def __init__(self, text: str, line: str, lineno: int, colno: int) -> None:
# Format as error message, followed by the line with the error, followed by a caret to show the error column.
super().__init__("%s\n%s\n%s" % (text, line, '%s^' % (' ' * colno)))
super().__init__("{}\n{}\n{}".format(text, line, '{}^'.format(' ' * colno)))
self.lineno = lineno
self.colno = colno
class BlockParseException(MesonException):
def __init__(self, text, line, lineno, colno, start_line, start_lineno, start_colno):
def __init__(
self,
text: str,
line: str,
lineno: int,
colno: int,
start_line: str,
start_lineno: int,
start_colno: int,
) -> None:
# This can be formatted in two ways - one if the block start and end are on the same line, and a different way if they are on different lines.
if lineno == start_lineno:
@ -88,10 +97,12 @@ class Token(T.Generic[TV_TokenTypes]):
self.bytespan = bytespan # type: T.Tuple[int, int]
self.value = value # type: TV_TokenTypes
def __eq__(self, other) -> bool:
def __eq__(self, other: object) -> bool:
if isinstance(other, str):
return self.tid == other
return self.tid == other.tid
elif isinstance(other, Token):
return self.tid == other.tid
return NotImplemented
class Lexer:
def __init__(self, code: str):
@ -261,7 +272,7 @@ class IdNode(ElementaryNode[str]):
super().__init__(token)
assert isinstance(self.value, str)
def __str__(self):
def __str__(self) -> str:
return "Id node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
class NumberNode(ElementaryNode[int]):
@ -274,7 +285,7 @@ class StringNode(ElementaryNode[str]):
super().__init__(token)
assert isinstance(self.value, str)
def __str__(self):
def __str__(self) -> str:
return "String node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
class ContinueNode(ElementaryNode):
@ -703,7 +714,7 @@ class Parser:
s = self.statement()
return a
def method_call(self, source_object) -> MethodNode:
def method_call(self, source_object: BaseNode) -> MethodNode:
methodname = self.e9()
if not(isinstance(methodname, IdNode)):
raise ParseException('Method name must be plain id',
@ -717,7 +728,7 @@ class Parser:
return self.method_call(method)
return method
def index_call(self, source_object) -> IndexNode:
def index_call(self, source_object: BaseNode) -> IndexNode:
index_statement = self.statement()
self.expect('rbracket')
return IndexNode(source_object, index_statement)
@ -750,7 +761,7 @@ class Parser:
clause.elseblock = self.elseblock()
return clause
def elseifblock(self, clause) -> None:
def elseifblock(self, clause: IfClauseNode) -> None:
while self.accept('elif'):
s = self.statement()
self.expect('eol')

@ -31,7 +31,7 @@ from . import mintro
from .mconf import make_lower_case
from .mesonlib import MesonException
def add_arguments(parser):
def add_arguments(parser: argparse.ArgumentParser) -> None:
coredata.register_builtin_arguments(parser)
parser.add_argument('--native-file',
default=[],
@ -59,7 +59,7 @@ def add_arguments(parser):
parser.add_argument('sourcedir', nargs='?', default=None)
class MesonApp:
def __init__(self, options):
def __init__(self, options: argparse.Namespace) -> None:
(self.source_dir, self.build_dir) = self.validate_dirs(options.builddir,
options.sourcedir,
options.reconfigure,
@ -150,7 +150,7 @@ class MesonApp:
raise SystemExit('Directory does not contain a valid build tree:\n{}'.format(build_dir))
return src_dir, build_dir
def generate(self):
def generate(self) -> None:
env = environment.Environment(self.source_dir, self.build_dir, self.options)
mlog.initialize(env.get_log_dir(), self.options.fatal_warnings)
if self.options.profile:
@ -158,7 +158,7 @@ class MesonApp:
with mesonlib.BuildDirLock(self.build_dir):
self._generate(env)
def _generate(self, env):
def _generate(self, env: environment.Environment) -> None:
mlog.debug('Build started at', datetime.datetime.now().isoformat())
mlog.debug('Main binary:', sys.executable)
mlog.debug('Build Options:', coredata.get_cmd_line_options(self.build_dir, self.options))
@ -178,12 +178,18 @@ class MesonApp:
logger_fun = mlog.log
else:
logger_fun = mlog.debug
logger_fun('Build machine cpu family:', mlog.bold(intr.builtin['build_machine'].cpu_family_method([], {})))
logger_fun('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {})))
mlog.log('Host machine cpu family:', mlog.bold(intr.builtin['host_machine'].cpu_family_method([], {})))
mlog.log('Host machine cpu:', mlog.bold(intr.builtin['host_machine'].cpu_method([], {})))
logger_fun('Target machine cpu family:', mlog.bold(intr.builtin['target_machine'].cpu_family_method([], {})))
logger_fun('Target machine cpu:', mlog.bold(intr.builtin['target_machine'].cpu_method([], {})))
build_machine = intr.builtin['build_machine']
host_machine = intr.builtin['build_machine']
target_machine = intr.builtin['target_machine']
assert isinstance(build_machine, interpreter.MachineHolder)
assert isinstance(host_machine, interpreter.MachineHolder)
assert isinstance(target_machine, interpreter.MachineHolder)
logger_fun('Build machine cpu family:', mlog.bold(build_machine.cpu_family_method([], {})))
logger_fun('Build machine cpu:', mlog.bold(build_machine.cpu_method([], {})))
mlog.log('Host machine cpu family:', mlog.bold(host_machine.cpu_family_method([], {})))
mlog.log('Host machine cpu:', mlog.bold(host_machine.cpu_method([], {})))
logger_fun('Target machine cpu family:', mlog.bold(target_machine.cpu_family_method([], {})))
logger_fun('Target machine cpu:', mlog.bold(target_machine.cpu_method([], {})))
try:
if self.options.profile:
fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log')
@ -239,7 +245,7 @@ class MesonApp:
os.unlink(cdf)
raise
def run(options) -> int:
def run(options: argparse.Namespace) -> int:
coredata.parse_cmd_line_options(options)
app = MesonApp(options)
app.generate()

@ -43,10 +43,7 @@ from . import environment
from . import mlog
from .dependencies import ExternalProgram
from .mesonlib import MesonException, get_wine_shortpath, split_args, join_args
from .backend.backends import TestProtocol
if T.TYPE_CHECKING:
from .backend.backends import TestSerialisation
from .backend.backends import TestProtocol, TestSerialisation
# GNU autotools interprets a return code of 77 from tests it executes to
# mean that the test should be skipped.
@ -445,7 +442,7 @@ class JunitBuilder:
class TestRun:
@classmethod
def make_gtest(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
def make_gtest(cls, test: TestSerialisation, test_env: T.Dict[str, str],
returncode: int, starttime: float, duration: float,
stdo: T.Optional[str], stde: T.Optional[str],
cmd: T.Optional[T.List[str]]) -> 'TestRun':
@ -459,10 +456,10 @@ class TestRun:
junit=tree)
@classmethod
def make_exitcode(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
def make_exitcode(cls, test: TestSerialisation, test_env: T.Dict[str, str],
returncode: int, starttime: float, duration: float,
stdo: T.Optional[str], stde: T.Optional[str],
cmd: T.Optional[T.List[str]], **kwargs) -> 'TestRun':
cmd: T.Optional[T.List[str]], **kwargs: T.Any) -> 'TestRun':
if returncode == GNU_SKIP_RETURNCODE:
res = TestResult.SKIP
elif returncode == GNU_ERROR_RETURNCODE:
@ -474,7 +471,7 @@ class TestRun:
return cls(test, test_env, res, [], returncode, starttime, duration, stdo, stde, cmd, **kwargs)
@classmethod
def make_tap(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
def make_tap(cls, test: TestSerialisation, test_env: T.Dict[str, str],
returncode: int, starttime: float, duration: float,
stdo: str, stde: str,
cmd: T.Optional[T.List[str]]) -> 'TestRun':
@ -511,7 +508,7 @@ class TestRun:
return cls(test, test_env, res, results, returncode, starttime, duration, stdo, stde, cmd)
def __init__(self, test: 'TestSerialisation', test_env: T.Dict[str, str],
def __init__(self, test: TestSerialisation, test_env: T.Dict[str, str],
res: TestResult, results: T.List[TestResult], returncode:
int, starttime: float, duration: float,
stdo: T.Optional[str], stde: T.Optional[str],
@ -577,26 +574,32 @@ def write_json_log(jsonlogfile: T.TextIO, test_name: str, result: TestRun) -> No
def run_with_mono(fname: str) -> bool:
return fname.endswith('.exe') and not (is_windows() or is_cygwin())
def load_benchmarks(build_dir: str) -> T.List['TestSerialisation']:
def load_benchmarks(build_dir: str) -> T.List[TestSerialisation]:
datafile = Path(build_dir) / 'meson-private' / 'meson_benchmark_setup.dat'
if not datafile.is_file():
raise TestException('Directory {!r} does not seem to be a Meson build directory.'.format(build_dir))
with datafile.open('rb') as f:
obj = T.cast(T.List['TestSerialisation'], pickle.load(f))
obj = pickle.load(f)
assert isinstance(obj, list)
for i in obj:
assert isinstance(i, TestSerialisation)
return obj
def load_tests(build_dir: str) -> T.List['TestSerialisation']:
def load_tests(build_dir: str) -> T.List[TestSerialisation]:
datafile = Path(build_dir) / 'meson-private' / 'meson_test_setup.dat'
if not datafile.is_file():
raise TestException('Directory {!r} does not seem to be a Meson build directory.'.format(build_dir))
with datafile.open('rb') as f:
obj = T.cast(T.List['TestSerialisation'], pickle.load(f))
obj = pickle.load(f)
assert isinstance(obj, list)
for i in obj:
assert isinstance(i, TestSerialisation)
return obj
class SingleTestRunner:
def __init__(self, test: 'TestSerialisation', test_env: T.Dict[str, str],
def __init__(self, test: TestSerialisation, test_env: T.Dict[str, str],
env: T.Dict[str, str], options: argparse.Namespace):
self.test = test
self.test_env = test_env
@ -680,7 +683,7 @@ class SingleTestRunner:
# We don't want setsid() in gdb because gdb needs the
# terminal in order to handle ^C and not show tcsetpgrp()
# errors avoid not being able to use the terminal.
os.setsid() # type: ignore
os.setsid()
extra_cmd = [] # type: T.List[str]
if self.test.protocol is TestProtocol.GTEST:
@ -727,10 +730,10 @@ class SingleTestRunner:
subprocess.run(['taskkill', '/F', '/T', '/PID', str(p.pid)])
else:
def _send_signal_to_process_group(pgid : int, signum : int):
def _send_signal_to_process_group(pgid : int, signum : int) -> None:
""" sends a signal to a process group """
try:
os.killpg(pgid, signum) # type: ignore
os.killpg(pgid, signum)
except ProcessLookupError:
# Sometimes (e.g. with Wine) this happens.
# There's nothing we can do (maybe the process
@ -820,10 +823,10 @@ class TestHarness:
def __del__(self) -> None:
self.close_logfiles()
def __enter__(self):
def __enter__(self) -> 'TestHarness':
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
self.close_logfiles()
def close_logfiles(self) -> None:
@ -833,7 +836,7 @@ class TestHarness:
lfile.close()
setattr(self, f, None)
def merge_suite_options(self, options: argparse.Namespace, test: 'TestSerialisation') -> T.Dict[str, str]:
def merge_suite_options(self, options: argparse.Namespace, test: TestSerialisation) -> T.Dict[str, str]:
if ':' in options.setup:
if options.setup not in self.build_data.test_setups:
sys.exit("Unknown test setup '{}'.".format(options.setup))
@ -857,7 +860,7 @@ class TestHarness:
options.wrapper = current.exe_wrapper
return current.env.get_env(os.environ.copy())
def get_test_runner(self, test: 'TestSerialisation') -> SingleTestRunner:
def get_test_runner(self, test: TestSerialisation) -> SingleTestRunner:
options = deepcopy(self.options)
if not options.setup:
options.setup = self.build_data.test_setup_default_name
@ -889,7 +892,7 @@ class TestHarness:
sys.exit('Unknown test result encountered: {}'.format(result.res))
def print_stats(self, test_count: int, name_max_len: int,
tests: T.List['TestSerialisation'],
tests: T.List[TestSerialisation],
name: str, result: TestRun, i: int) -> None:
ok_statuses = (TestResult.OK, TestResult.EXPECTEDFAIL)
bad_statuses = (TestResult.FAIL, TestResult.TIMEOUT,
@ -983,14 +986,14 @@ class TestHarness:
@staticmethod
def split_suite_string(suite: str) -> T.Tuple[str, str]:
if ':' in suite:
# mypy can't figure out that str.split(n, 1) will return a list of
# length 2, so we have to help it.
return T.cast(T.Tuple[str, str], tuple(suite.split(':', 1)))
split = suite.split(':', 1)
assert len(split) == 2
return split[0], split[1]
else:
return suite, ""
@staticmethod
def test_in_suites(test: 'TestSerialisation', suites: T.List[str]) -> bool:
def test_in_suites(test: TestSerialisation, suites: T.List[str]) -> bool:
for suite in suites:
(prj_match, st_match) = TestHarness.split_suite_string(suite)
for prjst in test.suite:
@ -1021,12 +1024,12 @@ class TestHarness:
return True
return False
def test_suitable(self, test: 'TestSerialisation') -> bool:
def test_suitable(self, test: TestSerialisation) -> bool:
return ((not self.options.include_suites or
TestHarness.test_in_suites(test, self.options.include_suites)) and not
TestHarness.test_in_suites(test, self.options.exclude_suites))
def get_tests(self) -> T.List['TestSerialisation']:
def get_tests(self) -> T.List[TestSerialisation]:
if not self.tests:
print('No tests defined.')
return []
@ -1089,7 +1092,7 @@ class TestHarness:
wrap += options.wrapper
return wrap
def get_pretty_suite(self, test: 'TestSerialisation') -> str:
def get_pretty_suite(self, test: TestSerialisation) -> str:
if len(self.suites) > 1 and test.suite:
rv = TestHarness.split_suite_string(test.suite[0])[0]
s = "+".join(TestHarness.split_suite_string(s)[1] for s in test.suite)
@ -1099,7 +1102,7 @@ class TestHarness:
else:
return test.name
def run_tests(self, tests: T.List['TestSerialisation']) -> None:
def run_tests(self, tests: T.List[TestSerialisation]) -> None:
executor = None
futures = [] # type: T.List[T.Tuple[conc.Future[TestRun], int, int, T.List[TestSerialisation], str, int]]
test_count = len(tests)
@ -1141,7 +1144,7 @@ class TestHarness:
finally:
os.chdir(startdir)
def drain_futures(self, futures: T.List[T.Tuple['conc.Future[TestRun]', int, int, T.List['TestSerialisation'], str, int]]) -> None:
def drain_futures(self, futures: T.List[T.Tuple['conc.Future[TestRun]', int, int, T.List[TestSerialisation], str, int]]) -> None:
for x in futures:
(result, test_count, name_max_len, tests, name, i) = x
if self.options.repeat > 1 and self.fail_count:

@ -62,7 +62,6 @@ optname_regex = re.compile('[^a-zA-Z0-9_-]')
def StringParser(description, kwargs):
return coredata.UserStringOption(description,
kwargs.get('value', ''),
kwargs.get('choices', []),
kwargs.get('yield', coredata.default_yielding))
@permitted_kwargs({'value', 'yield'})
@ -134,11 +133,11 @@ option_types = {'string': StringParser,
} # type: T.Dict[str, T.Callable[[str, T.Dict], coredata.UserOption]]
class OptionInterpreter:
def __init__(self, subproject):
def __init__(self, subproject: str) -> None:
self.options = {}
self.subproject = subproject
def process(self, option_file):
def process(self, option_file: str) -> None:
try:
with open(option_file, 'r', encoding='utf8') as f:
ast = mparser.Parser(f.read(), option_file).parse()
@ -159,7 +158,7 @@ class OptionInterpreter:
e.file = option_file
raise e
def reduce_single(self, arg):
def reduce_single(self, arg: T.Union[str, mparser.BaseNode]) -> T.Union[str, int, bool]:
if isinstance(arg, str):
return arg
elif isinstance(arg, (mparser.StringNode, mparser.BooleanNode,
@ -189,7 +188,7 @@ class OptionInterpreter:
else:
raise OptionException('Arguments may only be string, int, bool, or array of those.')
def reduce_arguments(self, args):
def reduce_arguments(self, args: mparser.ArgumentNode) -> T.Tuple[T.List[T.Union[str, int, bool]], T.Dict[str, T.Union[str, int, bool]]]:
assert(isinstance(args, mparser.ArgumentNode))
if args.incorrect_order():
raise OptionException('All keyword arguments must be after positional arguments.')
@ -202,7 +201,7 @@ class OptionInterpreter:
reduced_kw[key.value] = self.reduce_single(a)
return reduced_pos, reduced_kw
def evaluate_statement(self, node):
def evaluate_statement(self, node: mparser.BaseNode) -> None:
if not isinstance(node, mparser.FunctionNode):
raise OptionException('Option file may only contain option definitions')
func_name = node.func_name

@ -12,7 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
def destdir_join(d1, d2):
# TODO: consider switching to pathlib for this
def destdir_join(d1: str, d2: str) -> str:
# c:\destdir + c:\prefix must produce c:\destdir\prefix
if len(d1) > 1 and d1[1] == ':' \
and len(d2) > 1 and d2[1] == ':':

@ -18,8 +18,9 @@ from concurrent.futures import ThreadPoolExecutor
from ..environment import detect_clangformat
from ..compilers import lang_suffixes
import typing as T
def clangformat(exelist, srcdir_name, builddir_name):
def clangformat(exelist: T.List[str], srcdir_name: str, builddir_name: str) -> int:
srcdir = pathlib.Path(srcdir_name)
suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
suffixes.add('h')
@ -33,7 +34,7 @@ def clangformat(exelist, srcdir_name, builddir_name):
[x.result() for x in futures]
return 0
def run(args):
def run(args: T.List[str]) -> int:
srcdir_name = args[0]
builddir_name = args[1]

@ -16,10 +16,11 @@ import pathlib
import subprocess
import shutil
from concurrent.futures import ThreadPoolExecutor
import typing as T
from ..compilers import lang_suffixes
def manual_clangformat(srcdir_name, builddir_name):
def manual_clangformat(srcdir_name: str, builddir_name: str) -> int:
srcdir = pathlib.Path(srcdir_name)
suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
suffixes.add('h')
@ -34,7 +35,7 @@ def manual_clangformat(srcdir_name, builddir_name):
[max(returncode, x.result().returncode) for x in futures]
return returncode
def clangformat(srcdir_name, builddir_name):
def clangformat(srcdir_name: str, builddir_name: str) -> int:
run_clang_tidy = None
for rct in ('run-clang-tidy', 'run-clang-tidy.py'):
if shutil.which(rct):
@ -45,8 +46,9 @@ def clangformat(srcdir_name, builddir_name):
else:
print('Could not find run-clang-tidy, running checks manually.')
manual_clangformat(srcdir_name, builddir_name)
return 0
def run(args):
def run(args: T.List[str]) -> int:
srcdir_name = args[0]
builddir_name = args[1]
return clangformat(srcdir_name, builddir_name)

@ -16,8 +16,9 @@ import os
import sys
import shutil
import pickle
import typing as T
def rmtrees(build_dir, trees):
def rmtrees(build_dir: str, trees: T.List[str]) -> None:
for t in trees:
# Never delete trees outside of the builddir
if os.path.isabs(t):
@ -28,7 +29,7 @@ def rmtrees(build_dir, trees):
if os.path.isdir(bt):
shutil.rmtree(bt, ignore_errors=True)
def run(args):
def run(args: T.List[str]) -> int:
if len(args) != 1:
print('Cleaner script for Meson. Do not run on your own please.')
print('cleantrees.py <data-file>')

@ -3,12 +3,12 @@
import argparse
import subprocess
import shutil
import os
import sys
from pathlib import Path
import typing as T
def run(argsv):
commands = [[]]
def run(argsv: T.List[str]) -> int:
commands = [[]] # type: T.List[T.List[str]]
SEPARATOR = ';;;'
# Generate CMD parameters
@ -20,13 +20,14 @@ def run(argsv):
# Parse
args = parser.parse_args(argsv)
directory = Path(args.directory)
dummy_target = None
if len(args.outputs) == 1 and len(args.original_outputs) == 0:
dummy_target = args.outputs[0]
dummy_target = Path(args.outputs[0])
elif len(args.outputs) != len(args.original_outputs):
print('Length of output list and original output list differ')
sys.exit(1)
return 1
for i in args.commands:
if i == SEPARATOR:
@ -62,39 +63,40 @@ def run(argsv):
cmd += [j]
try:
os.makedirs(args.directory, exist_ok=True)
directory.mkdir(parents=True, exist_ok=True)
res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=args.directory, check=True)
res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=str(directory), check=True)
if capture_file:
out_file = Path(args.directory) / capture_file
out_file = directory / capture_file
out_file.write_bytes(res.stdout)
except subprocess.CalledProcessError:
sys.exit(1)
return 1
if dummy_target:
with open(dummy_target, 'a'):
os.utime(dummy_target, None)
sys.exit(0)
dummy_target.touch()
return 0
# Copy outputs
zipped_outputs = zip(args.outputs, args.original_outputs)
zipped_outputs = zip([Path(x) for x in args.outputs], [Path(x) for x in args.original_outputs])
for expected, generated in zipped_outputs:
do_copy = False
if not os.path.exists(expected):
if not os.path.exists(generated):
if not expected.exists():
if not generated.exists():
print('Unable to find generated file. This can cause the build to fail:')
print(generated)
do_copy = False
else:
do_copy = True
elif os.path.exists(generated):
if os.path.getmtime(generated) > os.path.getmtime(expected):
elif generated.exists():
if generated.stat().st_mtime > expected.stat().st_mtime:
do_copy = True
if do_copy:
if os.path.exists(expected):
os.remove(expected)
shutil.copyfile(generated, expected)
if expected.exists():
expected.unlink()
shutil.copyfile(str(generated), str(expected))
return 0
if __name__ == '__main__':
sys.run(sys.argv[1:])
sys.exit(run(sys.argv[1:]))

@ -17,8 +17,9 @@ what to run, sets up the environment and executes the command."""
import sys, os, subprocess, shutil, shlex
import re
import typing as T
def run_command(source_dir, build_dir, subdir, meson_command, command, arguments):
def run_command(source_dir: str, build_dir: str, subdir: str, meson_command: T.List[str], command: str, arguments: T.List[str]) -> subprocess.Popen:
env = {'MESON_SOURCE_ROOT': source_dir,
'MESON_BUILD_ROOT': build_dir,
'MESON_SUBDIR': subdir,
@ -50,24 +51,24 @@ def run_command(source_dir, build_dir, subdir, meson_command, command, arguments
print('Could not execute command "{}": {}'.format(command, err))
sys.exit(1)
def is_python_command(cmdname):
def is_python_command(cmdname: str) -> bool:
end_py_regex = r'python(3|3\.\d+)?(\.exe)?$'
return re.search(end_py_regex, cmdname) is not None
def run(args):
def run(args: T.List[str]) -> int:
if len(args) < 4:
print('commandrunner.py <source dir> <build dir> <subdir> <command> [arguments]')
return 1
src_dir = args[0]
build_dir = args[1]
subdir = args[2]
meson_command = args[3]
if is_python_command(meson_command):
meson_command = [meson_command, args[4]]
meson_bin = args[3]
if is_python_command(meson_bin):
meson_command = [meson_bin, args[4]]
command = args[5]
arguments = args[6:]
else:
meson_command = [meson_command]
meson_command = [meson_bin]
command = args[4]
arguments = args[5:]
pc = run_command(src_dir, build_dir, subdir, meson_command, command, arguments)

@ -15,8 +15,9 @@
from mesonbuild import environment, mesonlib
import argparse, sys, os, subprocess, pathlib, stat
import typing as T
def coverage(outputs, source_root, subproject_root, build_root, log_dir, use_llvm_cov):
def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build_root: str, log_dir: str, use_llvm_cov: bool) -> int:
outfiles = []
exitcode = 0
@ -146,7 +147,7 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir, use_llv
return exitcode
def run(args):
def run(args: T.List[str]) -> int:
if not os.path.isfile('build.ninja'):
print('Coverage currently only works with the Ninja backend.')
return 1

@ -13,8 +13,9 @@
# limitations under the License.
import os, sys
import typing as T
def run(args):
def run(args: T.List[str]) -> int:
if len(args) != 2:
print('delwithsuffix.py <root of subdir to process> <suffix to delete>')
sys.exit(1)

@ -15,6 +15,7 @@
import sys, struct
import shutil, subprocess
import typing as T
from ..mesonlib import OrderedSet
@ -30,7 +31,7 @@ DT_MIPS_RLD_MAP_REL = 1879048245
INSTALL_NAME_TOOL = False
class DataSizes:
def __init__(self, ptrsize, is_le):
def __init__(self, ptrsize: int, is_le: bool) -> None:
if is_le:
p = '<'
else:
@ -57,7 +58,7 @@ class DataSizes:
self.OffSize = 4
class DynamicEntry(DataSizes):
def __init__(self, ifile, ptrsize, is_le):
def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
super().__init__(ptrsize, is_le)
self.ptrsize = ptrsize
if ptrsize == 64:
@ -67,7 +68,7 @@ class DynamicEntry(DataSizes):
self.d_tag = struct.unpack(self.Sword, ifile.read(self.SwordSize))[0]
self.val = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
def write(self, ofile):
def write(self, ofile: T.BinaryIO) -> None:
if self.ptrsize == 64:
ofile.write(struct.pack(self.Sxword, self.d_tag))
ofile.write(struct.pack(self.XWord, self.val))
@ -76,7 +77,7 @@ class DynamicEntry(DataSizes):
ofile.write(struct.pack(self.Word, self.val))
class SectionHeader(DataSizes):
def __init__(self, ifile, ptrsize, is_le):
def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
super().__init__(ptrsize, is_le)
if ptrsize == 64:
is_64 = True
@ -116,10 +117,12 @@ class SectionHeader(DataSizes):
self.sh_entsize = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
class Elf(DataSizes):
def __init__(self, bfile, verbose=True):
def __init__(self, bfile: str, verbose: bool = True) -> None:
self.bfile = bfile
self.verbose = verbose
self.bf = open(bfile, 'r+b')
self.sections = [] # type: T.List[SectionHeader]
self.dynamic = [] # type: T.List[DynamicEntry]
try:
(self.ptrsize, self.is_le) = self.detect_elf_type()
super().__init__(self.ptrsize, self.is_le)
@ -130,18 +133,18 @@ class Elf(DataSizes):
self.bf.close()
raise
def __enter__(self):
def __enter__(self) -> 'Elf':
return self
def __del__(self):
def __del__(self) -> None:
if self.bf:
self.bf.close()
def __exit__(self, exc_type, exc_value, traceback):
def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
self.bf.close()
self.bf = None
def detect_elf_type(self):
def detect_elf_type(self) -> T.Tuple[int, bool]:
data = self.bf.read(6)
if data[1:4] != b'ELF':
# This script gets called to non-elf targets too
@ -163,7 +166,7 @@ class Elf(DataSizes):
sys.exit('File "%s" has unknown ELF endianness.' % self.bfile)
return ptrsize, is_le
def parse_header(self):
def parse_header(self) -> None:
self.bf.seek(0)
self.e_ident = struct.unpack('16s', self.bf.read(16))[0]
self.e_type = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
@ -180,13 +183,12 @@ class Elf(DataSizes):
self.e_shnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
self.e_shstrndx = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
def parse_sections(self):
def parse_sections(self) -> None:
self.bf.seek(self.e_shoff)
self.sections = []
for _ in range(self.e_shnum):
self.sections.append(SectionHeader(self.bf, self.ptrsize, self.is_le))
def read_str(self):
def read_str(self) -> bytes:
arr = []
x = self.bf.read(1)
while x != b'\0':
@ -196,17 +198,17 @@ class Elf(DataSizes):
raise RuntimeError('Tried to read past the end of the file')
return b''.join(arr)
def find_section(self, target_name):
def find_section(self, target_name: bytes) -> T.Optional[SectionHeader]:
section_names = self.sections[self.e_shstrndx]
for i in self.sections:
self.bf.seek(section_names.sh_offset + i.sh_name)
name = self.read_str()
if name == target_name:
return i
return None
def parse_dynamic(self):
def parse_dynamic(self) -> None:
sec = self.find_section(b'.dynamic')
self.dynamic = []
if sec is None:
return
self.bf.seek(sec.sh_offset)
@ -216,14 +218,14 @@ class Elf(DataSizes):
if e.d_tag == 0:
break
def print_section_names(self):
def print_section_names(self) -> None:
section_names = self.sections[self.e_shstrndx]
for i in self.sections:
self.bf.seek(section_names.sh_offset + i.sh_name)
name = self.read_str()
print(name.decode())
def print_soname(self):
def print_soname(self) -> None:
soname = None
strtab = None
for i in self.dynamic:
@ -237,14 +239,16 @@ class Elf(DataSizes):
self.bf.seek(strtab.val + soname.val)
print(self.read_str())
def get_entry_offset(self, entrynum):
def get_entry_offset(self, entrynum: int) -> T.Optional[int]:
sec = self.find_section(b'.dynstr')
for i in self.dynamic:
if i.d_tag == entrynum:
return sec.sh_offset + i.val
res = sec.sh_offset + i.val
assert isinstance(res, int)
return res
return None
def print_rpath(self):
def print_rpath(self) -> None:
offset = self.get_entry_offset(DT_RPATH)
if offset is None:
print("This file does not have an rpath.")
@ -252,7 +256,7 @@ class Elf(DataSizes):
self.bf.seek(offset)
print(self.read_str())
def print_runpath(self):
def print_runpath(self) -> None:
offset = self.get_entry_offset(DT_RUNPATH)
if offset is None:
print("This file does not have a runpath.")
@ -260,7 +264,7 @@ class Elf(DataSizes):
self.bf.seek(offset)
print(self.read_str())
def print_deps(self):
def print_deps(self) -> None:
sec = self.find_section(b'.dynstr')
deps = []
for i in self.dynamic:
@ -272,7 +276,7 @@ class Elf(DataSizes):
name = self.read_str()
print(name)
def fix_deps(self, prefix):
def fix_deps(self, prefix: bytes) -> None:
sec = self.find_section(b'.dynstr')
deps = []
for i in self.dynamic:
@ -290,15 +294,13 @@ class Elf(DataSizes):
self.bf.seek(offset)
self.bf.write(newname)
def fix_rpath(self, rpath_dirs_to_remove, new_rpath):
def fix_rpath(self, rpath_dirs_to_remove: T.List[bytes], new_rpath: bytes) -> None:
# The path to search for can be either rpath or runpath.
# Fix both of them to be sure.
self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RPATH)
self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RUNPATH)
def fix_rpathtype_entry(self, rpath_dirs_to_remove, new_rpath, entrynum):
if isinstance(new_rpath, str):
new_rpath = new_rpath.encode('utf8')
def fix_rpathtype_entry(self, rpath_dirs_to_remove: T.List[bytes], new_rpath: bytes, entrynum: int) -> None:
rp_off = self.get_entry_offset(entrynum)
if rp_off is None:
if self.verbose:
@ -309,7 +311,7 @@ class Elf(DataSizes):
old_rpath = self.read_str()
# Some rpath entries may come from multiple sources.
# Only add each one once.
new_rpaths = OrderedSet()
new_rpaths = OrderedSet() # type: OrderedSet[bytes]
if new_rpath:
new_rpaths.add(new_rpath)
if old_rpath:
@ -326,7 +328,7 @@ class Elf(DataSizes):
new_rpath = b':'.join(new_rpaths)
if len(old_rpath) < len(new_rpath):
msg = "New rpath must not be longer than the old one.\n Old: {}\n New: {}".format(old_rpath, new_rpath)
msg = "New rpath must not be longer than the old one.\n Old: {}\n New: {}".format(old_rpath.decode('utf-8'), new_rpath.decode('utf-8'))
sys.exit(msg)
# The linker does read-only string deduplication. If there is a
# string that shares a suffix with the rpath, they might get
@ -343,7 +345,7 @@ class Elf(DataSizes):
self.bf.write(new_rpath)
self.bf.write(b'\0')
def remove_rpath_entry(self, entrynum):
def remove_rpath_entry(self, entrynum: int) -> None:
sec = self.find_section(b'.dynamic')
if sec is None:
return None
@ -363,7 +365,7 @@ class Elf(DataSizes):
entry.write(self.bf)
return None
def fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose=True):
def fix_elf(fname: str, rpath_dirs_to_remove: T.List[bytes], new_rpath: T.Optional[bytes], verbose: bool = True) -> None:
with Elf(fname, verbose) as e:
if new_rpath is None:
e.print_rpath()
@ -371,7 +373,7 @@ def fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose=True):
else:
e.fix_rpath(rpath_dirs_to_remove, new_rpath)
def get_darwin_rpaths_to_remove(fname):
def get_darwin_rpaths_to_remove(fname: str) -> T.List[str]:
out = subprocess.check_output(['otool', '-l', fname],
universal_newlines=True,
stderr=subprocess.DEVNULL)
@ -389,7 +391,7 @@ def get_darwin_rpaths_to_remove(fname):
result.append(rp)
return result
def fix_darwin(fname, new_rpath, final_path, install_name_mappings):
def fix_darwin(fname: str, new_rpath: str, final_path: str, install_name_mappings: T.Dict[str, str]) -> None:
try:
rpaths = get_darwin_rpaths_to_remove(fname)
except subprocess.CalledProcessError:
@ -439,7 +441,7 @@ def fix_darwin(fname, new_rpath, final_path, install_name_mappings):
except Exception as err:
raise SystemExit(err)
def fix_jar(fname):
def fix_jar(fname: str) -> None:
subprocess.check_call(['jar', 'xfv', fname, 'META-INF/MANIFEST.MF'])
with open('META-INF/MANIFEST.MF', 'r+') as f:
lines = f.readlines()
@ -450,7 +452,7 @@ def fix_jar(fname):
f.truncate()
subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF'])
def fix_rpath(fname, rpath_dirs_to_remove, new_rpath, final_path, install_name_mappings, verbose=True):
def fix_rpath(fname: str, rpath_dirs_to_remove: T.List[bytes], new_rpath: T.Union[str, bytes], final_path: str, install_name_mappings: T.Dict[str, str], verbose: bool = True) -> None:
global INSTALL_NAME_TOOL
# Static libraries, import libraries, debug information, headers, etc
# never have rpaths
@ -461,6 +463,8 @@ def fix_rpath(fname, rpath_dirs_to_remove, new_rpath, final_path, install_name_m
if fname.endswith('.jar'):
fix_jar(fname)
return
if isinstance(new_rpath, str):
new_rpath = new_rpath.encode('utf8')
fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose)
return
except SystemExit as e:
@ -473,6 +477,8 @@ def fix_rpath(fname, rpath_dirs_to_remove, new_rpath, final_path, install_name_m
# (upto 30ms), which is significant with --only-changed. For details, see:
# https://github.com/mesonbuild/meson/pull/6612#discussion_r378581401
if INSTALL_NAME_TOOL is False:
INSTALL_NAME_TOOL = shutil.which('install_name_tool')
INSTALL_NAME_TOOL = bool(shutil.which('install_name_tool'))
if INSTALL_NAME_TOOL:
if isinstance(new_rpath, bytes):
new_rpath = new_rpath.decode('utf8')
fix_darwin(fname, new_rpath, final_path, install_name_mappings)

@ -16,8 +16,9 @@
the command given in the rest of the arguments.'''
import os, subprocess, sys
import typing as T
def run(args):
def run(args: T.List[str]) -> int:
dirname = args[0]
command = args[1:]

@ -17,6 +17,7 @@ import shutil
import argparse
import subprocess
from . import destdir_join
import typing as T
parser = argparse.ArgumentParser()
parser.add_argument('command')
@ -27,7 +28,7 @@ parser.add_argument('--localedir', default='')
parser.add_argument('--subdir', default='')
parser.add_argument('--extra-args', default='')
def read_linguas(src_sub):
def read_linguas(src_sub: str) -> T.List[str]:
# Syntax of this file is documented here:
# https://www.gnu.org/software/gettext/manual/html_node/po_002fLINGUAS.html
linguas = os.path.join(src_sub, 'LINGUAS')
@ -43,7 +44,7 @@ def read_linguas(src_sub):
print('Could not find file LINGUAS in {}'.format(src_sub))
return []
def run_potgen(src_sub, pkgname, datadirs, args):
def run_potgen(src_sub: str, pkgname: str, datadirs: str, args: T.List[str]) -> int:
listfile = os.path.join(src_sub, 'POTFILES.in')
if not os.path.exists(listfile):
listfile = os.path.join(src_sub, 'POTFILES')
@ -60,13 +61,13 @@ def run_potgen(src_sub, pkgname, datadirs, args):
'-D', os.environ['MESON_SOURCE_ROOT'], '-k_', '-o', ofile] + args,
env=child_env)
def gen_gmo(src_sub, bld_sub, langs):
def gen_gmo(src_sub: str, bld_sub: str, langs: T.List[str]) -> int:
for l in langs:
subprocess.check_call(['msgfmt', os.path.join(src_sub, l + '.po'),
'-o', os.path.join(bld_sub, l + '.gmo')])
return 0
def update_po(src_sub, pkgname, langs):
def update_po(src_sub: str, pkgname: str, langs: T.List[str]) -> int:
potfile = os.path.join(src_sub, pkgname + '.pot')
for l in langs:
pofile = os.path.join(src_sub, l + '.po')
@ -76,7 +77,7 @@ def update_po(src_sub, pkgname, langs):
subprocess.check_call(['msginit', '--input', potfile, '--output-file', pofile, '--locale', l, '--no-translator'])
return 0
def do_install(src_sub, bld_sub, dest, pkgname, langs):
def do_install(src_sub: str, bld_sub: str, dest: str, pkgname: str, langs: T.List[str]) -> int:
for l in langs:
srcfile = os.path.join(bld_sub, l + '.gmo')
outfile = os.path.join(dest, l, 'LC_MESSAGES',
@ -88,7 +89,7 @@ def do_install(src_sub, bld_sub, dest, pkgname, langs):
print('Installing %s to %s' % (srcfile, outfile))
return 0
def run(args):
def run(args: T.List[str]) -> int:
options = parser.parse_args(args)
subcmd = options.command
langs = options.langs.split('@@') if options.langs else None
@ -120,3 +121,4 @@ def run(args):
else:
print('Unknown subcommand.')
return 1
return 0

@ -18,6 +18,7 @@ import shutil
import argparse
from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
from . import destdir_join
import typing as T
parser = argparse.ArgumentParser()
@ -50,7 +51,7 @@ for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']:
program_name = 'gtkdoc-' + tool
parser.add_argument('--' + program_name, dest=program_name.replace('-', '_'))
def gtkdoc_run_check(cmd, cwd, library_paths=None):
def gtkdoc_run_check(cmd: T.List[str], cwd: str, library_paths: T.Optional[T.List[str]] = None) -> None:
if library_paths is None:
library_paths = []
@ -85,12 +86,12 @@ def gtkdoc_run_check(cmd, cwd, library_paths=None):
except UnicodeEncodeError:
pass
def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs,
main_file, module, module_version,
html_args, scan_args, fixxref_args, mkdb_args,
gobject_typesfile, scanobjs_args, run, ld, cc, ldflags, cflags,
html_assets, content_files, ignore_headers, namespace,
expand_content_files, mode, options):
def build_gtkdoc(source_root: str, build_root: str, doc_subdir: str, src_subdirs: T.List[str],
main_file: str, module: str, module_version: str,
html_args: T.List[str], scan_args: T.List[str], fixxref_args: T.List[str], mkdb_args: T.List[str],
gobject_typesfile: str, scanobjs_args: T.List[str], run: str, ld: str, cc: str, ldflags: str, cflags: str,
html_assets: T.List[str], content_files: T.List[str], ignore_headers: T.List[str], namespace: str,
expand_content_files: T.List[str], mode: str, options: argparse.Namespace) -> None:
print("Building documentation for %s" % module)
src_dir_args = []
@ -217,13 +218,13 @@ def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs,
shutil.move(os.path.join(htmldir, '{}.devhelp2'.format(module)),
os.path.join(htmldir, '{}-{}.devhelp2'.format(module, module_version)))
def install_gtkdoc(build_root, doc_subdir, install_prefix, datadir, module):
def install_gtkdoc(build_root: str, doc_subdir: str, install_prefix: str, datadir: str, module: str) -> None:
source = os.path.join(build_root, doc_subdir, 'html')
final_destination = os.path.join(install_prefix, datadir, module)
shutil.rmtree(final_destination, ignore_errors=True)
shutil.copytree(source, final_destination)
def run(args):
def run(args: T.List[str]) -> int:
options = parser.parse_args(args)
if options.htmlargs:
htmlargs = options.htmlargs.split('@@')

@ -5,6 +5,7 @@ import subprocess
from . import destdir_join
import argparse
import typing as T
parser = argparse.ArgumentParser()
parser.add_argument('--install')
@ -14,7 +15,7 @@ parser.add_argument('--builddir')
parser.add_argument('--project-version')
def run(argv):
def run(argv: T.List[str]) -> int:
options, args = parser.parse_known_args(argv)
subenv = os.environ.copy()
@ -23,7 +24,7 @@ def run(argv):
res = subprocess.call(args, cwd=options.builddir, env=subenv)
if res != 0:
exit(res)
return res
if options.install:
source_dir = os.path.join(options.builddir, options.install)
@ -34,3 +35,4 @@ def run(argv):
shutil.rmtree(installdir, ignore_errors=True)
shutil.copytree(source_dir, installdir)
return 0

@ -17,19 +17,20 @@ import sys
import argparse
import pickle
import subprocess
import typing as T
from .. import mesonlib
from ..backend.backends import ExecutableSerialisation
options = None
def buildparser():
def buildparser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description='Custom executable wrapper for Meson. Do not run on your own, mmm\'kay?')
parser.add_argument('--unpickle')
parser.add_argument('--capture')
return parser
def run_exe(exe):
def run_exe(exe: ExecutableSerialisation) -> int:
if exe.exe_runner:
if not exe.exe_runner.found():
raise AssertionError('BUG: Can\'t run cross-compiled exe {!r} with not-found '
@ -56,7 +57,7 @@ def run_exe(exe):
if p.returncode == 0xc0000135:
# STATUS_DLL_NOT_FOUND on Windows indicating a common problem that is otherwise hard to diagnose
raise FileNotFoundError('Missing DLLs on calling {!r}'.format(exe.name))
raise FileNotFoundError('Missing DLLs on calling {!r}'.format(cmd_args))
if exe.capture and p.returncode == 0:
skip_write = False
@ -74,7 +75,7 @@ def run_exe(exe):
sys.stderr.buffer.write(stderr)
return p.returncode
def run(args):
def run(args: T.List[str]) -> int:
global options
parser = buildparser()
options, cmd_args = parser.parse_known_args(args)

@ -15,6 +15,7 @@
import argparse
import subprocess
import os
import typing as T
parser = argparse.ArgumentParser()
parser.add_argument('input')
@ -25,7 +26,7 @@ parser.add_argument('--datadirs', default='')
parser.add_argument('args', default=[], metavar='extra msgfmt argument', nargs='*')
def run(args):
def run(args: T.List[str]) -> int:
options = parser.parse_args(args)
env = None
if options.datadirs:

@ -14,10 +14,13 @@
import sys, os
import pickle, subprocess
import typing as T
from ..coredata import CoreData
from ..backend.vs2010backend import RegenInfo
# This could also be used for XCode.
def need_regen(regeninfo, regen_timestamp):
def need_regen(regeninfo: RegenInfo, regen_timestamp: float) -> bool:
for i in regeninfo.depfiles:
curfile = os.path.join(regeninfo.build_dir, i)
curtime = os.stat(curfile).st_mtime
@ -31,7 +34,7 @@ def need_regen(regeninfo, regen_timestamp):
Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)
return False
def regen(regeninfo, meson_command, backend):
def regen(regeninfo: RegenInfo, meson_command: T.List[str], backend: str) -> None:
cmd = meson_command + ['--internal',
'regenerate',
regeninfo.build_dir,
@ -39,19 +42,22 @@ def regen(regeninfo, meson_command, backend):
'--backend=' + backend]
subprocess.check_call(cmd)
def run(args):
def run(args: T.List[str]) -> int:
private_dir = args[0]
dumpfile = os.path.join(private_dir, 'regeninfo.dump')
coredata = os.path.join(private_dir, 'coredata.dat')
coredata_file = os.path.join(private_dir, 'coredata.dat')
with open(dumpfile, 'rb') as f:
regeninfo = pickle.load(f)
with open(coredata, 'rb') as f:
assert isinstance(regeninfo, RegenInfo)
with open(coredata_file, 'rb') as f:
coredata = pickle.load(f)
assert isinstance(coredata, CoreData)
backend = coredata.get_builtin_option('backend')
assert isinstance(backend, str)
regen_timestamp = os.stat(dumpfile).st_mtime
if need_regen(regeninfo, regen_timestamp):
regen(regeninfo, coredata.meson_command, backend)
sys.exit(0)
return 0
if __name__ == '__main__':
run(sys.argv[1:])
sys.exit(run(sys.argv[1:]))

@ -12,30 +12,31 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import shutil
import tempfile
from ..environment import detect_ninja, detect_scanbuild
from pathlib import Path
import typing as T
def scanbuild(exelist, srcdir, blddir, privdir, logdir, args):
with tempfile.TemporaryDirectory(dir=privdir) as scandir:
def scanbuild(exelist: T.List[str], srcdir: Path, blddir: Path, privdir: Path, logdir: Path, args: T.List[str]) -> int:
with tempfile.TemporaryDirectory(dir=str(privdir)) as scandir:
meson_cmd = exelist + args
build_cmd = exelist + ['-o', logdir] + detect_ninja() + ['-C', scandir]
rc = subprocess.call(meson_cmd + [srcdir, scandir])
build_cmd = exelist + ['-o', str(logdir)] + detect_ninja() + ['-C', scandir]
rc = subprocess.call(meson_cmd + [str(srcdir), scandir])
if rc != 0:
return rc
return subprocess.call(build_cmd)
def run(args):
srcdir = args[0]
blddir = args[1]
def run(args: T.List[str]) -> int:
srcdir = Path(args[0])
blddir = Path(args[1])
meson_cmd = args[2:]
privdir = os.path.join(blddir, 'meson-private')
logdir = os.path.join(blddir, 'meson-logs/scanbuild')
shutil.rmtree(logdir, ignore_errors=True)
privdir = blddir / 'meson-private'
logdir = blddir / 'meson-logs' / 'scanbuild'
shutil.rmtree(str(logdir), ignore_errors=True)
exelist = detect_scanbuild()
if not exelist:

@ -36,12 +36,12 @@ parser.add_argument('args', nargs='+')
TOOL_WARNING_FILE = None
RELINKING_WARNING = 'Relinking will always happen on source changes.'
def dummy_syms(outfilename: str):
def dummy_syms(outfilename: str) -> None:
"""Just touch it so relinking happens always."""
with open(outfilename, 'w'):
pass
def write_if_changed(text: str, outfilename: str):
def write_if_changed(text: str, outfilename: str) -> None:
try:
with open(outfilename, 'r') as f:
oldtext = f.read()
@ -52,13 +52,11 @@ def write_if_changed(text: str, outfilename: str):
with open(outfilename, 'w') as f:
f.write(text)
def print_tool_warning(tool: list, msg: str, stderr: str = None):
def print_tool_warning(tools: T.List[str], msg: str, stderr: T.Optional[str] = None) -> None:
global TOOL_WARNING_FILE
if os.path.exists(TOOL_WARNING_FILE):
return
if len(tool) == 1:
tool = tool[0]
m = '{!r} {}. {}'.format(tool, msg, RELINKING_WARNING)
m = '{!r} {}. {}'.format(tools, msg, RELINKING_WARNING)
if stderr:
m += '\n' + stderr
mlog.warning(m)
@ -73,7 +71,7 @@ def get_tool(name: str) -> T.List[str]:
return shlex.split(os.environ[evar])
return [name]
def call_tool(name: str, args: T.List[str], **kwargs) -> str:
def call_tool(name: str, args: T.List[str], **kwargs: T.Any) -> str:
tool = get_tool(name)
try:
p, output, e = Popen_safe(tool + args, **kwargs)
@ -88,7 +86,7 @@ def call_tool(name: str, args: T.List[str], **kwargs) -> str:
return None
return output
def call_tool_nowarn(tool: T.List[str], **kwargs) -> T.Tuple[str, str]:
def call_tool_nowarn(tool: T.List[str], **kwargs: T.Any) -> T.Tuple[str, str]:
try:
p, output, e = Popen_safe(tool, **kwargs)
except FileNotFoundError:
@ -99,7 +97,7 @@ def call_tool_nowarn(tool: T.List[str], **kwargs) -> T.Tuple[str, str]:
return None, e
return output, None
def gnu_syms(libfilename: str, outfilename: str):
def gnu_syms(libfilename: str, outfilename: str) -> None:
# Get the name of the library
output = call_tool('readelf', ['-d', libfilename])
if not output:
@ -126,7 +124,7 @@ def gnu_syms(libfilename: str, outfilename: str):
result += [' '.join(entry)]
write_if_changed('\n'.join(result) + '\n', outfilename)
def solaris_syms(libfilename: str, outfilename: str):
def solaris_syms(libfilename: str, outfilename: str) -> None:
# gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump
origpath = os.environ['PATH']
try:
@ -135,7 +133,7 @@ def solaris_syms(libfilename: str, outfilename: str):
finally:
os.environ['PATH'] = origpath
def osx_syms(libfilename: str, outfilename: str):
def osx_syms(libfilename: str, outfilename: str) -> None:
# Get the name of the library
output = call_tool('otool', ['-l', libfilename])
if not output:
@ -156,7 +154,7 @@ def osx_syms(libfilename: str, outfilename: str):
result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
write_if_changed('\n'.join(result) + '\n', outfilename)
def openbsd_syms(libfilename: str, outfilename: str):
def openbsd_syms(libfilename: str, outfilename: str) -> None:
# Get the name of the library
output = call_tool('readelf', ['-d', libfilename])
if not output:
@ -173,7 +171,7 @@ def openbsd_syms(libfilename: str, outfilename: str):
result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')]
write_if_changed('\n'.join(result) + '\n', outfilename)
def cygwin_syms(impfilename: str, outfilename: str):
def cygwin_syms(impfilename: str, outfilename: str) -> None:
# Get the name of the library
output = call_tool('dlltool', ['-I', impfilename])
if not output:
@ -242,23 +240,23 @@ def _get_implib_exports(impfilename: str) -> T.Tuple[T.List[str], str]:
all_stderr += e
return ([], all_stderr)
def windows_syms(impfilename: str, outfilename: str):
def windows_syms(impfilename: str, outfilename: str) -> None:
# Get the name of the library
result, e = _get_implib_dllname(impfilename)
if not result:
print_tool_warning('lib, llvm-lib, dlltool', 'do not work or were not found', e)
print_tool_warning(['lib', 'llvm-lib', 'dlltool'], 'do not work or were not found', e)
dummy_syms(outfilename)
return
# Get a list of all symbols exported
symbols, e = _get_implib_exports(impfilename)
if not symbols:
print_tool_warning('dumpbin, llvm-nm, nm', 'do not work or were not found', e)
print_tool_warning(['dumpbin', 'llvm-nm', 'nm'], 'do not work or were not found', e)
dummy_syms(outfilename)
return
result += symbols
write_if_changed('\n'.join(result) + '\n', outfilename)
def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host: str):
def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host: str) -> None:
if cross_host is not None:
# In case of cross builds just always relink. In theory we could
# determine the correct toolset, but we would need to use the correct
@ -295,7 +293,7 @@ def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host
pass
dummy_syms(outfilename)
def run(args):
def run(args: T.List[str]) -> int:
global TOOL_WARNING_FILE
options = parser.parse_args(args)
if len(options.args) != 4:

@ -15,9 +15,9 @@
import os
import subprocess
from pathlib import Path
import typing as T
def ls_as_bytestream():
def ls_as_bytestream() -> bytes:
if os.path.exists('.git'):
return subprocess.run(['git', 'ls-tree', '-r', '--name-only', 'HEAD'],
stdout=subprocess.PIPE).stdout
@ -28,24 +28,26 @@ def ls_as_bytestream():
return '\n'.join(files).encode()
def cscope():
def cscope() -> int:
ls = b'\n'.join([b'"%s"' % f for f in ls_as_bytestream().split()])
return subprocess.run(['cscope', '-v', '-b', '-i-'], input=ls).returncode
def ctags():
def ctags() -> int:
ls = ls_as_bytestream()
return subprocess.run(['ctags', '-L-'], input=ls).returncode
def etags():
def etags() -> int:
ls = ls_as_bytestream()
return subprocess.run(['etags', '-'], input=ls).returncode
def run(args):
def run(args: T.List[str]) -> int:
tool_name = args[0]
srcdir_name = args[1]
os.chdir(srcdir_name)
assert tool_name in ['cscope', 'ctags', 'etags']
return globals()[tool_name]()
res = globals()[tool_name]()
assert isinstance(res, int)
return res

@ -13,10 +13,11 @@
# limitations under the License.
import os
import typing as T
logfile = 'meson-logs/install-log.txt'
def do_uninstall(log):
def do_uninstall(log: str) -> None:
failures = 0
successes = 0
for line in open(log):
@ -38,7 +39,7 @@ def do_uninstall(log):
print('Failed:', failures)
print('\nRemember that files created by custom scripts have not been removed.')
def run(args):
def run(args: T.List[str]) -> int:
if args:
print('Weird error.')
return 1

@ -13,9 +13,9 @@
# limitations under the License.
import sys, os, subprocess, re
import typing as T
def config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, cmd):
def config_vcs_tag(infile: str, outfile: str, fallback: str, source_dir: str, replace_string: str, regex_selector: str, cmd: T.List[str]) -> None:
try:
output = subprocess.check_output(cmd, cwd=source_dir)
new_string = re.search(regex_selector, output.decode()).group(1).strip()
@ -34,7 +34,7 @@ def config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_
f.write(new_data)
def run(args):
def run(args: T.List[str]) -> int:
infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
command = args[6:]
config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command)

@ -20,6 +20,7 @@ from .. import mlog
from ..mesonlib import has_path_sep
from . import destdir_join
from .gettext import read_linguas
import typing as T
parser = argparse.ArgumentParser()
parser.add_argument('command')
@ -31,19 +32,19 @@ parser.add_argument('--media', dest='media', default='')
parser.add_argument('--langs', dest='langs', default='')
parser.add_argument('--symlinks', type=bool, dest='symlinks', default=False)
def build_pot(srcdir, project_id, sources):
def build_pot(srcdir: str, project_id: str, sources: T.List[str]) -> None:
# Must be relative paths
sources = [os.path.join('C', source) for source in sources]
outfile = os.path.join(srcdir, project_id + '.pot')
subprocess.call(['itstool', '-o', outfile] + sources)
def update_po(srcdir, project_id, langs):
def update_po(srcdir: str, project_id: str, langs: T.List[str]) -> None:
potfile = os.path.join(srcdir, project_id + '.pot')
for lang in langs:
pofile = os.path.join(srcdir, lang, lang + '.po')
subprocess.call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
def build_translations(srcdir, blddir, langs):
def build_translations(srcdir: str, blddir: str, langs: T.List[str]) -> None:
for lang in langs:
outdir = os.path.join(blddir, lang)
os.makedirs(outdir, exist_ok=True)
@ -52,14 +53,14 @@ def build_translations(srcdir, blddir, langs):
'-o', os.path.join(outdir, lang + '.gmo')
])
def merge_translations(blddir, sources, langs):
def merge_translations(blddir: str, sources: T.List[str], langs: T.List[str]) -> None:
for lang in langs:
subprocess.call([
'itstool', '-m', os.path.join(blddir, lang, lang + '.gmo'),
'-o', os.path.join(blddir, lang)
] + sources)
def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, project_id, symlinks):
def install_help(srcdir: str, blddir: str, sources: T.List[str], media: T.List[str], langs: T.List[str], install_dir: str, destdir: str, project_id: str, symlinks: bool) -> None:
c_install_dir = os.path.join(install_dir, 'C', project_id)
for lang in langs + ['C']:
indir = destdir_join(destdir, os.path.join(install_dir, lang, project_id))
@ -101,7 +102,7 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr
shutil.copyfile(infile, outfile)
shutil.copystat(infile, outfile)
def run(args):
def run(args: T.List[str]) -> int:
options = parser.parse_args(args)
langs = options.langs.split('@@') if options.langs else []
media = options.media.split('@@') if options.media else []
@ -129,3 +130,4 @@ def run(args):
merge_translations(build_subdir, abs_sources, langs)
install_help(src_subdir, build_subdir, sources, media, langs, install_dir,
destdir, options.project_id, options.symlinks)
return 0

@ -146,7 +146,7 @@ class CppProject(SampleImpl):
self.name = options.name
self.version = options.version
def create_executable(self):
def create_executable(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
source_name = lowercase_token + '.cpp'
open(source_name, 'w').write(hello_cpp_template.format(project_name=self.name))
@ -155,7 +155,7 @@ class CppProject(SampleImpl):
source_name=source_name,
version=self.version))
def create_library(self):
def create_library(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
class_name = uppercase_token[0] + lowercase_token[1:]

@ -95,7 +95,7 @@ class CSharpProject(SampleImpl):
self.name = options.name
self.version = options.version
def create_executable(self):
def create_executable(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
class_name = uppercase_token[0] + lowercase_token[1:]
@ -107,7 +107,7 @@ class CSharpProject(SampleImpl):
source_name=source_name,
version=self.version))
def create_library(self):
def create_library(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
class_name = uppercase_token[0] + lowercase_token[1:]

@ -129,7 +129,7 @@ class CProject(SampleImpl):
self.name = options.name
self.version = options.version
def create_executable(self):
def create_executable(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
source_name = lowercase_token + '.c'
open(source_name, 'w').write(hello_c_template.format(project_name=self.name))
@ -138,7 +138,7 @@ class CProject(SampleImpl):
source_name=source_name,
version=self.version))
def create_library(self):
def create_library(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
function_name = lowercase_token[0:3] + '_func'

@ -146,7 +146,7 @@ class CudaProject(SampleImpl):
self.name = options.name
self.version = options.version
def create_executable(self):
def create_executable(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
source_name = lowercase_token + '.cu'
open(source_name, 'w').write(hello_cuda_template.format(project_name=self.name))
@ -155,7 +155,7 @@ class CudaProject(SampleImpl):
source_name=source_name,
version=self.version))
def create_library(self):
def create_library(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
class_name = uppercase_token[0] + lowercase_token[1:]

@ -107,7 +107,7 @@ class DlangProject(SampleImpl):
self.name = options.name
self.version = options.version
def create_executable(self):
def create_executable(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
source_name = lowercase_token + '.d'
open(source_name, 'w').write(hello_d_template.format(project_name=self.name))
@ -116,7 +116,7 @@ class DlangProject(SampleImpl):
source_name=source_name,
version=self.version))
def create_library(self):
def create_library(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
function_name = lowercase_token[0:3] + '_func'

@ -106,7 +106,7 @@ class FortranProject(SampleImpl):
self.name = options.name
self.version = options.version
def create_executable(self):
def create_executable(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
source_name = lowercase_token + '.f90'
open(source_name, 'w').write(hello_fortran_template.format(project_name=self.name))
@ -115,7 +115,7 @@ class FortranProject(SampleImpl):
source_name=source_name,
version=self.version))
def create_library(self):
def create_library(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
function_name = lowercase_token[0:3] + '_func'

@ -99,7 +99,7 @@ class JavaProject(SampleImpl):
self.name = options.name
self.version = options.version
def create_executable(self):
def create_executable(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
class_name = uppercase_token[0] + lowercase_token[1:]
@ -111,7 +111,7 @@ class JavaProject(SampleImpl):
source_name=source_name,
version=self.version))
def create_library(self):
def create_library(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
class_name = uppercase_token[0] + lowercase_token[1:]

@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
meson_executable_template = '''project('{project_name}', '{language}',
version : '{version}',
default_options : [{default_options}])
@ -33,7 +35,7 @@ jar('{executable}',
'''
def create_meson_build(options):
def create_meson_build(options: argparse.Namespace) -> None:
if options.type != 'executable':
raise SystemExit('\nGenerating a meson.build file from existing sources is\n'
'supported only for project type "executable".\n'

@ -129,7 +129,7 @@ class ObjCppProject(SampleImpl):
self.name = options.name
self.version = options.version
def create_executable(self):
def create_executable(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
source_name = lowercase_token + '.mm'
open(source_name, 'w').write(hello_objcpp_template.format(project_name=self.name))
@ -138,7 +138,7 @@ class ObjCppProject(SampleImpl):
source_name=source_name,
version=self.version))
def create_library(self):
def create_library(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
function_name = lowercase_token[0:3] + '_func'

@ -129,7 +129,7 @@ class ObjCProject(SampleImpl):
self.name = options.name
self.version = options.version
def create_executable(self):
def create_executable(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
source_name = lowercase_token + '.m'
open(source_name, 'w').write(hello_objc_template.format(project_name=self.name))
@ -138,7 +138,7 @@ class ObjCProject(SampleImpl):
source_name=source_name,
version=self.version))
def create_library(self):
def create_library(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
function_name = lowercase_token[0:3] + '_func'

@ -77,7 +77,7 @@ class RustProject(SampleImpl):
self.name = options.name
self.version = options.version
def create_executable(self):
def create_executable(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
source_name = lowercase_token + '.rs'
open(source_name, 'w').write(hello_rust_template.format(project_name=self.name))
@ -86,7 +86,7 @@ class RustProject(SampleImpl):
source_name=source_name,
version=self.version))
def create_library(self):
def create_library(self) -> None:
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
function_name = lowercase_token[0:3] + '_func'

@ -21,9 +21,11 @@ from mesonbuild.templates.objctemplates import ObjCProject
from mesonbuild.templates.cpptemplates import CppProject
from mesonbuild.templates.cstemplates import CSharpProject
from mesonbuild.templates.ctemplates import CProject
from mesonbuild.templates.sampleimpl import SampleImpl
import argparse
def sameple_generator(options):
def sameple_generator(options: argparse.Namespace) -> SampleImpl:
return {
'c': CProject,
'cpp': CppProject,

@ -14,8 +14,8 @@
class SampleImpl:
def create_executable(self):
def create_executable(self) -> None:
raise NotImplementedError('Sample implementation for "executable" not implemented!')
def create_library(self):
def create_library(self) -> None:
raise NotImplementedError('Sample implementation for "library" not implemented!')

@ -52,6 +52,6 @@ class WrapMode(Enum):
return self.name
@staticmethod
def from_string(mode_name: str):
def from_string(mode_name: str) -> 'WrapMode':
g = string_to_value[mode_name]
return WrapMode(g)

@ -83,7 +83,7 @@ def open_wrapdburl(urlstring: str) -> 'http.client.HTTPResponse':
url = whitelist_wrapdb(urlstring)
if has_ssl:
try:
return urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT)
return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT))
except urllib.error.URLError as excp:
raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp))
@ -93,7 +93,7 @@ def open_wrapdburl(urlstring: str) -> 'http.client.HTTPResponse':
mlog.warning('SSL module not available in {}: WrapDB traffic not authenticated.'.format(sys.executable))
SSL_WARNING_PRINTED = True
try:
return urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT)
return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT))
except urllib.error.URLError as excp:
raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp))
@ -107,7 +107,7 @@ class WrapNotFoundException(WrapException):
class PackageDefinition:
def __init__(self, fname: str):
self.filename = fname
self.type = None
self.type = None # type: T.Optional[str]
self.values = {} # type: T.Dict[str, str]
self.provided_deps = {} # type: T.Dict[str, T.Optional[str]]
self.provided_programs = [] # type: T.List[str]
@ -122,7 +122,7 @@ class PackageDefinition:
if os.path.dirname(self.directory):
raise WrapException('Directory key must be a name and not a path')
def parse_wrap(self, fname: str):
def parse_wrap(self, fname: str) -> None:
try:
self.config = configparser.ConfigParser(interpolation=None)
self.config.read(fname)
@ -131,7 +131,7 @@ class PackageDefinition:
self.parse_wrap_section()
self.parse_provide_section()
def parse_wrap_section(self):
def parse_wrap_section(self) -> None:
if len(self.config.sections()) < 1:
raise WrapException('Missing sections in {}'.format(self.basename))
self.wrap_section = self.config.sections()[0]
@ -141,19 +141,19 @@ class PackageDefinition:
self.type = self.wrap_section[5:]
self.values = dict(self.config[self.wrap_section])
def parse_provide_section(self):
def parse_provide_section(self) -> None:
if self.config.has_section('provide'):
for k, v in self.config['provide'].items():
if k == 'dependency_names':
# A comma separated list of dependency names that does not
# need a variable name
names = {n.strip(): None for n in v.split(',')}
self.provided_deps.update(names)
names_dict = {n.strip(): None for n in v.split(',')}
self.provided_deps.update(names_dict)
continue
if k == 'program_names':
# A comma separated list of program names
names = [n.strip() for n in v.split(',')]
self.provided_programs += names
names_list = [n.strip() for n in v.split(',')]
self.provided_programs += names_list
continue
if not v:
m = ('Empty dependency variable name for {!r} in {}. '
@ -177,7 +177,7 @@ def get_directory(subdir_root: str, packagename: str) -> str:
return packagename
class Resolver:
def __init__(self, subdir_root: str, wrap_mode=WrapMode.default):
def __init__(self, subdir_root: str, wrap_mode: WrapMode = WrapMode.default) -> None:
self.wrap_mode = wrap_mode
self.subdir_root = subdir_root
self.cachedir = os.path.join(self.subdir_root, 'packagecache')
@ -187,7 +187,7 @@ class Resolver:
self.provided_programs = {} # type: T.Dict[str, PackageDefinition]
self.load_wraps()
def load_wraps(self):
def load_wraps(self) -> None:
if not os.path.isdir(self.subdir_root):
return
root, dirs, files = next(os.walk(self.subdir_root))
@ -221,7 +221,7 @@ class Resolver:
raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
self.provided_programs[k] = wrap
def find_dep_provider(self, packagename: str):
def find_dep_provider(self, packagename: str) -> T.Optional[T.Union[str, T.List[str]]]:
# Return value is in the same format as fallback kwarg:
# ['subproject_name', 'variable_name'], or 'subproject_name'.
wrap = self.provided_deps.get(packagename)
@ -232,7 +232,7 @@ class Resolver:
return wrap.name
return None
def find_program_provider(self, names: T.List[str]):
def find_program_provider(self, names: T.List[str]) -> T.Optional[str]:
for name in names:
wrap = self.provided_programs.get(name)
if wrap:
@ -464,7 +464,7 @@ class Resolver:
if dhash != expected:
raise WrapException('Incorrect hash for {}:\n {} expected\n {} actual.'.format(what, expected, dhash))
def download(self, what: str, ofname: str, fallback=False) -> None:
def download(self, what: str, ofname: str, fallback: bool = False) -> None:
self.check_can_download()
srcurl = self.wrap.get(what + ('_fallback_url' if fallback else '_url'))
mlog.log('Downloading', mlog.bold(self.packagename), what, 'from', mlog.bold(srcurl))

@ -16,6 +16,7 @@ import json
import sys, os
import configparser
import shutil
import typing as T
from glob import glob
@ -23,7 +24,10 @@ from .wrap import API_ROOT, open_wrapdburl
from .. import mesonlib
def add_arguments(parser):
if T.TYPE_CHECKING:
import argparse
def add_arguments(parser: 'argparse.ArgumentParser') -> None:
subparsers = parser.add_subparsers(title='Commands', dest='command')
subparsers.required = True
@ -53,26 +57,28 @@ def add_arguments(parser):
p.add_argument('project_path')
p.set_defaults(wrap_func=promote)
def get_result(urlstring):
def get_result(urlstring: str) -> T.Dict[str, T.Any]:
u = open_wrapdburl(urlstring)
data = u.read().decode('utf-8')
jd = json.loads(data)
if jd['output'] != 'ok':
print('Got bad output from server.', file=sys.stderr)
raise SystemExit(data)
assert isinstance(jd, dict)
return jd
def get_projectlist():
def get_projectlist() -> T.List[str]:
jd = get_result(API_ROOT + 'projects')
projects = jd['projects']
assert isinstance(projects, list)
return projects
def list_projects(options):
def list_projects(options: 'argparse.Namespace') -> None:
projects = get_projectlist()
for p in projects:
print(p)
def search(options):
def search(options: 'argparse.Namespace') -> None:
name = options.name
jd = get_result(API_ROOT + 'query/byname/' + name)
for p in jd['projects']:
@ -84,7 +90,7 @@ def get_latest_version(name: str) -> tuple:
revision = jd['revision']
return branch, revision
def install(options):
def install(options: 'argparse.Namespace') -> None:
name = options.name
if not os.path.isdir('subprojects'):
raise SystemExit('Subprojects dir not found. Run this script in your source root directory.')
@ -100,25 +106,25 @@ def install(options):
f.write(data)
print('Installed', name, 'branch', branch, 'revision', revision)
def parse_patch_url(patch_url):
def parse_patch_url(patch_url: str) -> T.Tuple[str, int]:
arr = patch_url.split('/')
return arr[-3], int(arr[-2])
def get_current_version(wrapfile):
def get_current_version(wrapfile: str) -> T.Tuple[str, int, str, str, str]:
cp = configparser.ConfigParser(interpolation=None)
cp.read(wrapfile)
cp = cp['wrap-file']
patch_url = cp['patch_url']
wrap_data = cp['wrap-file']
patch_url = wrap_data['patch_url']
branch, revision = parse_patch_url(patch_url)
return branch, revision, cp['directory'], cp['source_filename'], cp['patch_filename']
return branch, revision, wrap_data['directory'], wrap_data['source_filename'], wrap_data['patch_filename']
def update_wrap_file(wrapfile, name, new_branch, new_revision):
def update_wrap_file(wrapfile: str, name: str, new_branch: str, new_revision: str) -> None:
u = open_wrapdburl(API_ROOT + 'projects/{}/{}/{}/get_wrap'.format(name, new_branch, new_revision))
data = u.read()
with open(wrapfile, 'wb') as f:
f.write(data)
def update(options):
def update(options: 'argparse.Namespace') -> None:
name = options.name
if not os.path.isdir('subprojects'):
raise SystemExit('Subprojects dir not found. Run this command in your source root directory.')
@ -142,7 +148,7 @@ def update(options):
pass
print('Updated', name, 'to branch', new_branch, 'revision', new_revision)
def info(options):
def info(options: 'argparse.Namespace') -> None:
name = options.name
jd = get_result(API_ROOT + 'projects/' + name)
versions = jd['versions']
@ -152,7 +158,7 @@ def info(options):
for v in versions:
print(' ', v['branch'], v['revision'])
def do_promotion(from_path, spdir_name):
def do_promotion(from_path: str, spdir_name: str) -> None:
if os.path.isfile(from_path):
assert(from_path.endswith('.wrap'))
shutil.copy(from_path, spdir_name)
@ -163,7 +169,7 @@ def do_promotion(from_path, spdir_name):
raise SystemExit('Output dir {} already exists. Will not overwrite.'.format(outputdir))
shutil.copytree(from_path, outputdir, ignore=shutil.ignore_patterns('subprojects'))
def promote(options):
def promote(options: 'argparse.Namespace') -> None:
argument = options.project_path
spdir_name = 'subprojects'
sprojs = mesonlib.detect_subprojects(spdir_name)
@ -186,7 +192,7 @@ def promote(options):
raise SystemExit(1)
do_promotion(matches[0], spdir_name)
def status(options):
def status(options: 'argparse.Namespace') -> None:
print('Subproject status')
for w in glob('subprojects/*.wrap'):
name = os.path.basename(w)[:-5]
@ -205,6 +211,6 @@ def status(options):
else:
print('', name, 'not up to date. Have {} {}, but {} {} is available.'.format(current_branch, current_revision, latest_branch, latest_revision))
def run(options):
def run(options: 'argparse.Namespace') -> int:
options.wrap_func(options)
return 0

@ -0,0 +1,65 @@
#!/usr/bin/env python3
import sys
import subprocess
import argparse
from pathlib import Path
import typing as T
modules = [
# fully typed submodules
'mesonbuild/ast',
'mesonbuild/scripts',
'mesonbuild/wrap',
# specific files
'mesonbuild/arglist.py',
# 'mesonbuild/compilers/mixins/intel.py',
# 'mesonbuild/coredata.py',
'mesonbuild/dependencies/boost.py',
'mesonbuild/dependencies/hdf5.py',
'mesonbuild/dependencies/mpi.py',
'mesonbuild/envconfig.py',
'mesonbuild/interpreterbase.py',
'mesonbuild/mcompile.py',
'mesonbuild/mesonlib.py',
'mesonbuild/minit.py',
'mesonbuild/mintro.py',
'mesonbuild/mlog.py',
'mesonbuild/modules/fs.py',
'mesonbuild/mparser.py',
'mesonbuild/msetup.py',
'mesonbuild/mtest.py',
'run_mypy.py',
'tools'
]
def check_mypy() -> None:
try:
import mypy
except ImportError:
print('Failed import mypy')
sys.exit(1)
def main() -> int:
check_mypy()
root = Path(__file__).absolute().parent
args = [] # type: T.List[str]
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('-p', '--pretty', action='store_true', help='pretty print mypy errors')
opts = parser.parse_args()
if opts.pretty:
args.append('--pretty')
p = subprocess.run(
[sys.executable, '-m', 'mypy'] + args + modules,
cwd=root,
)
return p.returncode
if __name__ == '__main__':
sys.exit(main())

@ -216,7 +216,7 @@ class TestDef:
return '{} ({})'.format(self.path.as_posix(), self.name)
return self.path.as_posix()
def __lt__(self, other: T.Any) -> bool:
def __lt__(self, other: object) -> bool:
if isinstance(other, TestDef):
# None is not sortable, so replace it with an empty string
s_id = int(self.path.name.split(' ')[0])

@ -42,7 +42,7 @@ NINJA_CMD = None
# test that we run.
if 'CI' in os.environ:
NINJA_1_9_OR_NEWER = True
NINJA_CMD = 'ninja'
NINJA_CMD = ['ninja']
else:
# Look for 1.9 to see if https://github.com/ninja-build/ninja/issues/1219
# is fixed
@ -221,7 +221,7 @@ def get_backend_commands(backend, debug=False):
test_cmd = cmd + ['-target', 'RUN_TESTS']
elif backend is Backend.ninja:
global NINJA_CMD
cmd = [NINJA_CMD, '-w', 'dupbuild=err', '-d', 'explain']
cmd = NINJA_CMD + ['-w', 'dupbuild=err', '-d', 'explain']
if debug:
cmd += ['-v']
clean_cmd = cmd + ['clean']

@ -48,12 +48,12 @@ class BoostLibrary():
self.single = sorted(set(single))
self.multi = sorted(set(multi))
def __lt__(self, other: T.Any) -> T.Union[bool, 'NotImplemented']:
def __lt__(self, other: object) -> T.Union[bool, 'NotImplemented']:
if isinstance(other, BoostLibrary):
return self.name < other.name
return NotImplemented
def __eq__(self, other: T.Any) -> T.Union[bool, 'NotImplemented']:
def __eq__(self, other: object) -> T.Union[bool, 'NotImplemented']:
if isinstance(other, BoostLibrary):
return self.name == other.name
elif isinstance(other, str):
@ -71,7 +71,7 @@ class BoostModule():
self.desc = desc
self.libs = libs
def __lt__(self, other: T.Any) -> T.Union[bool, 'NotImplemented']:
def __lt__(self, other: object) -> T.Union[bool, 'NotImplemented']:
if isinstance(other, BoostModule):
return self.key < other.key
return NotImplemented

@ -6,14 +6,14 @@ assert(os.getcwd() == '/home/jpakkane')
from glob import glob
def purge(fname):
def purge(fname: str) -> None:
if not os.path.exists(fname):
return
if os.path.isdir(fname):
shutil.rmtree(fname)
os.unlink(fname)
def update():
def update() -> None:
webdir = 'mesonweb'
repodir = 'mesonwebbuild'
docdir = os.path.join(repodir, 'docs')

@ -34,7 +34,7 @@ class Statement:
self.args = args
class Lexer:
def __init__(self):
def __init__(self) -> None:
self.token_specification = [
# Need to be sorted longest to shortest.
('ignore', re.compile(r'[ \t]')),
@ -87,11 +87,11 @@ class Lexer:
raise ValueError('Lexer got confused line %d column %d' % (lineno, col))
class Parser:
def __init__(self, code: str):
def __init__(self, code: str) -> None:
self.stream = Lexer().lex(code)
self.getsym()
def getsym(self):
def getsym(self) -> None:
try:
self.current = next(self.stream)
except StopIteration:
@ -118,8 +118,8 @@ class Parser:
self.expect('rparen')
return Statement(cur.value, args)
def arguments(self) -> list:
args = []
def arguments(self) -> T.List[T.Union[Token, T.Any]]:
args = [] # type: T.List[T.Union[Token, T.Any]]
if self.accept('lparen'):
args.append(self.arguments())
self.expect('rparen')
@ -139,7 +139,7 @@ class Parser:
while not self.accept('eof'):
yield(self.statement())
def token_or_group(arg):
def token_or_group(arg: T.Union[Token, T.List[Token]]) -> str:
if isinstance(arg, Token):
return ' ' + arg.value
elif isinstance(arg, list):
@ -148,6 +148,7 @@ def token_or_group(arg):
line += ' ' + token_or_group(a)
line += ' )'
return line
raise RuntimeError('Conversion error in token_or_group')
class Converter:
ignored_funcs = {'cmake_minimum_required': True,
@ -183,7 +184,7 @@ class Converter:
return res[0]
return ''
def write_entry(self, outfile: T.TextIO, t: Statement):
def write_entry(self, outfile: T.TextIO, t: Statement) -> None:
if t.name in Converter.ignored_funcs:
return
preincrement = 0
@ -274,7 +275,7 @@ class Converter:
outfile.write('\n')
self.indent_level += postincrement
def convert(self, subdir: Path = None):
def convert(self, subdir: Path = None) -> None:
if not subdir:
subdir = self.cmake_root
cfile = Path(subdir).expanduser() / 'CMakeLists.txt'
@ -297,7 +298,7 @@ class Converter:
if subdir == self.cmake_root and len(self.options) > 0:
self.write_options()
def write_options(self):
def write_options(self) -> None:
filename = self.cmake_root / 'meson_options.txt'
with filename.open('w') as optfile:
for o in self.options:

@ -53,7 +53,7 @@ def get_entries() -> T.List[T.Tuple[int, str]]:
entries.sort()
return entries
def replace_source(sourcefile: str, replacements: T.List[T.Tuple[str, str]]):
def replace_source(sourcefile: str, replacements: T.List[T.Tuple[str, str]]) -> None:
with open(sourcefile, 'r') as f:
contents = f.read()
for old_name, new_name in replacements:
@ -61,7 +61,7 @@ def replace_source(sourcefile: str, replacements: T.List[T.Tuple[str, str]]):
with open(sourcefile, 'w') as f:
f.write(contents)
def condense(dirname: str):
def condense(dirname: str) -> None:
curdir = os.getcwd()
os.chdir(dirname)
entries = get_entries()

@ -31,21 +31,21 @@ from pathlib import Path
PathLike = T.Union[Path,str]
def _get_meson_output(root_dir: Path, args: T.List):
def _get_meson_output(root_dir: Path, args: T.List) -> str:
env = os.environ.copy()
env['COLUMNS'] = '80'
return subprocess.run([str(sys.executable), str(root_dir/'meson.py')] + args, check=True, capture_output=True, text=True, env=env).stdout.strip()
def get_commands_data(root_dir: Path):
def get_commands_data(root_dir: Path) -> T.Dict[str, T.Any]:
usage_start_pattern = re.compile(r'^usage: ', re.MULTILINE)
positional_start_pattern = re.compile(r'^positional arguments:[\t ]*[\r\n]+', re.MULTILINE)
options_start_pattern = re.compile(r'^optional arguments:[\t ]*[\r\n]+', re.MULTILINE)
commands_start_pattern = re.compile(r'^[A-Za-z ]*[Cc]ommands:[\t ]*[\r\n]+', re.MULTILINE)
def get_next_start(iterators, end):
def get_next_start(iterators: T.Sequence[T.Any], end: T.Optional[int]) -> int:
return next((i.start() for i in iterators if i), end)
def normalize_text(text):
def normalize_text(text: str) -> str:
# clean up formatting
out = text
out = re.sub(r'\r\n', r'\r', out, flags=re.MULTILINE) # replace newlines with a linux EOL
@ -53,7 +53,7 @@ def get_commands_data(root_dir: Path):
out = re.sub(r'(?:^\n+|\n+$)', '', out) # remove trailing empty lines
return out
def parse_cmd(cmd):
def parse_cmd(cmd: str) -> T.Dict[str, str]:
cmd_len = len(cmd)
usage = usage_start_pattern.search(cmd)
positionals = positional_start_pattern.search(cmd)
@ -72,7 +72,7 @@ def get_commands_data(root_dir: Path):
'arguments': normalize_text(cmd[arguments_start:cmd_len]),
}
def clean_dir_arguments(text):
def clean_dir_arguments(text: str) -> str:
# Remove platform specific defaults
args = [
'prefix',
@ -127,7 +127,7 @@ def regenerate_docs(output_dir: PathLike,
dummy_output_file: T.Optional[PathLike]) -> None:
if not output_dir:
raise ValueError(f'Output directory value is not set')
output_dir = Path(output_dir).resolve()
output_dir.mkdir(parents=True, exist_ok=True)
@ -143,7 +143,7 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate meson docs')
parser.add_argument('--output-dir', required=True)
parser.add_argument('--dummy-output-file', type=str)
args = parser.parse_args()
regenerate_docs(output_dir=args.output_dir,

Loading…
Cancel
Save