treewide: automatic rewriting of all comment-style type annotations

Performed using https://github.com/ilevkivskyi/com2ann

This has no actual effect on the codebase as type checkers (still)
support both and negligible effect on runtime performance since
__future__ annotations ameliorates that. Technically, the bytecode would
be bigger for non function-local annotations, of which we have many
either way.

So if it doesn't really matter, why do a large-scale refactor? Simple:
because people keep wanting to, but it's getting nickle-and-dimed. If
we're going to do this we might as well do it consistently in one shot,
using tooling that guarantees repeatability and correctness.

Repeat with:

```
com2ann mesonbuild/
```
pull/12112/head
Eli Schwartz 1 year ago
parent de1cc0b02b
commit 90ce084144
No known key found for this signature in database
GPG Key ID: CEB167EFB5722BD6
  1. 28
      mesonbuild/arglist.py
  2. 16
      mesonbuild/ast/interpreter.py
  3. 20
      mesonbuild/ast/introspection.py
  4. 2
      mesonbuild/ast/postprocess.py
  5. 12
      mesonbuild/ast/printer.py
  6. 4
      mesonbuild/build.py
  7. 42
      mesonbuild/cmake/common.py
  8. 10
      mesonbuild/cmake/executor.py
  9. 4
      mesonbuild/cmake/fileapi.py
  10. 4
      mesonbuild/cmake/generator.py
  11. 6
      mesonbuild/cmake/toolchain.py
  12. 36
      mesonbuild/cmake/traceparser.py
  13. 4
      mesonbuild/compilers/cs.py
  14. 2
      mesonbuild/compilers/detect.py
  15. 18
      mesonbuild/compilers/mixins/arm.py
  16. 4
      mesonbuild/compilers/mixins/pgi.py
  17. 14
      mesonbuild/compilers/mixins/xc16.py
  18. 4
      mesonbuild/compilers/rust.py
  19. 4
      mesonbuild/compilers/swift.py
  20. 2
      mesonbuild/dependencies/base.py
  21. 38
      mesonbuild/dependencies/boost.py
  22. 4
      mesonbuild/dependencies/cmake.py
  23. 4
      mesonbuild/dependencies/hdf5.py
  24. 2
      mesonbuild/dependencies/mpi.py
  25. 2
      mesonbuild/envconfig.py
  26. 2
      mesonbuild/interpreter/interpreter.py
  27. 4
      mesonbuild/interpreterbase/interpreterbase.py
  28. 4
      mesonbuild/linkers/detect.py
  29. 8
      mesonbuild/mcompile.py
  30. 26
      mesonbuild/mintro.py
  31. 8
      mesonbuild/mlog.py
  32. 2
      mesonbuild/modules/cmake.py
  33. 4
      mesonbuild/mparser.py
  34. 56
      mesonbuild/mtest.py
  35. 2
      mesonbuild/scripts/cmake_run_ctgt.py
  36. 6
      mesonbuild/scripts/depfixer.py
  37. 2
      mesonbuild/utils/universal.py
  38. 18
      mesonbuild/wrap/wrap.py

@ -82,23 +82,23 @@ class CompilerArgs(T.MutableSequence[str]):
''' '''
# Arg prefixes that override by prepending instead of appending # Arg prefixes that override by prepending instead of appending
prepend_prefixes = () # type: T.Tuple[str, ...] prepend_prefixes: T.Tuple[str, ...] = ()
# Arg prefixes and args that must be de-duped by returning 2 # Arg prefixes and args that must be de-duped by returning 2
dedup2_prefixes = () # type: T.Tuple[str, ...] dedup2_prefixes: T.Tuple[str, ...] = ()
dedup2_suffixes = () # type: T.Tuple[str, ...] dedup2_suffixes: T.Tuple[str, ...] = ()
dedup2_args = () # type: T.Tuple[str, ...] dedup2_args: T.Tuple[str, ...] = ()
# Arg prefixes and args that must be de-duped by returning 1 # Arg prefixes and args that must be de-duped by returning 1
# #
# NOTE: not thorough. A list of potential corner cases can be found in # NOTE: not thorough. A list of potential corner cases can be found in
# https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
dedup1_prefixes = () # type: T.Tuple[str, ...] dedup1_prefixes: T.Tuple[str, ...] = ()
dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
# Match a .so of the form path/to/libfoo.so.0.1.0 # Match a .so of the form path/to/libfoo.so.0.1.0
# Only UNIX shared libraries require this. Others have a fixed extension. # Only UNIX shared libraries require this. Others have a fixed extension.
dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
dedup1_args = () # type: T.Tuple[str, ...] dedup1_args: T.Tuple[str, ...] = ()
# In generate_link() we add external libs without de-dup, but we must # In generate_link() we add external libs without de-dup, but we must
# *always* de-dup these because they're special arguments to the linker # *always* de-dup these because they're special arguments to the linker
# TODO: these should probably move too # TODO: these should probably move too
@ -107,19 +107,19 @@ class CompilerArgs(T.MutableSequence[str]):
def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'], def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'],
iterable: T.Optional[T.Iterable[str]] = None): iterable: T.Optional[T.Iterable[str]] = None):
self.compiler = compiler self.compiler = compiler
self._container = list(iterable) if iterable is not None else [] # type: T.List[str] self._container: T.List[str] = list(iterable) if iterable is not None else []
self.pre = collections.deque() # type: T.Deque[str] self.pre: T.Deque[str] = collections.deque()
self.post = collections.deque() # type: T.Deque[str] self.post: T.Deque[str] = collections.deque()
# Flush the saved pre and post list into the _container list # Flush the saved pre and post list into the _container list
# #
# This correctly deduplicates the entries after _can_dedup definition # This correctly deduplicates the entries after _can_dedup definition
# Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot. # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot.
def flush_pre_post(self) -> None: def flush_pre_post(self) -> None:
new = [] # type: T.List[str] new: T.List[str] = []
pre_flush_set = set() # type: T.Set[str] pre_flush_set: T.Set[str] = set()
post_flush = collections.deque() # type: T.Deque[str] post_flush: T.Deque[str] = collections.deque()
post_flush_set = set() # type: T.Set[str] post_flush_set: T.Set[str] = set()
#The two lists are here walked from the front to the back, in order to not need removals for deduplication #The two lists are here walked from the front to the back, in order to not need removals for deduplication
for a in self.pre: for a in self.pre:
@ -285,7 +285,7 @@ class CompilerArgs(T.MutableSequence[str]):
Add two CompilerArgs while taking into account overriding of arguments Add two CompilerArgs while taking into account overriding of arguments
and while preserving the order of arguments as much as possible and while preserving the order of arguments as much as possible
''' '''
tmp_pre = collections.deque() # type: T.Deque[str] tmp_pre: T.Deque[str] = collections.deque()
if not isinstance(args, collections.abc.Iterable): if not isinstance(args, collections.abc.Iterable):
raise TypeError(f'can only concatenate Iterable[str] (not "{args}") to CompilerArgs') raise TypeError(f'can only concatenate Iterable[str] (not "{args}") to CompilerArgs')
for arg in args: for arg in args:

@ -97,10 +97,10 @@ class AstInterpreter(InterpreterBase):
def __init__(self, source_root: str, subdir: str, subproject: str, visitors: T.Optional[T.List[AstVisitor]] = None): def __init__(self, source_root: str, subdir: str, subproject: str, visitors: T.Optional[T.List[AstVisitor]] = None):
super().__init__(source_root, subdir, subproject) super().__init__(source_root, subdir, subproject)
self.visitors = visitors if visitors is not None else [] self.visitors = visitors if visitors is not None else []
self.processed_buildfiles = set() # type: T.Set[str] self.processed_buildfiles: T.Set[str] = set()
self.assignments = {} # type: T.Dict[str, BaseNode] self.assignments: T.Dict[str, BaseNode] = {}
self.assign_vals = {} # type: T.Dict[str, T.Any] self.assign_vals: T.Dict[str, T.Any] = {}
self.reverse_assignment = {} # type: T.Dict[str, BaseNode] self.reverse_assignment: T.Dict[str, BaseNode] = {}
self.funcs.update({'project': self.func_do_nothing, self.funcs.update({'project': self.func_do_nothing,
'test': self.func_do_nothing, 'test': self.func_do_nothing,
'benchmark': self.func_do_nothing, 'benchmark': self.func_do_nothing,
@ -274,7 +274,7 @@ class AstInterpreter(InterpreterBase):
duplicate_key_error: T.Optional[str] = None, duplicate_key_error: T.Optional[str] = None,
) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]: ) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]:
if isinstance(args, ArgumentNode): if isinstance(args, ArgumentNode):
kwargs = {} # type: T.Dict[str, TYPE_nvar] kwargs: T.Dict[str, TYPE_nvar] = {}
for key, val in args.kwargs.items(): for key, val in args.kwargs.items():
kwargs[key_resolver(key)] = val kwargs[key_resolver(key)] = val
if args.incorrect_order(): if args.incorrect_order():
@ -383,7 +383,7 @@ class AstInterpreter(InterpreterBase):
elif isinstance(node, MethodNode): elif isinstance(node, MethodNode):
src = quick_resolve(node.source_object) src = quick_resolve(node.source_object)
margs = self.flatten_args(node.args.arguments, include_unknown_args, id_loop_detect) margs = self.flatten_args(node.args.arguments, include_unknown_args, id_loop_detect)
mkwargs = {} # type: T.Dict[str, TYPE_nvar] mkwargs: T.Dict[str, TYPE_nvar] = {}
try: try:
if isinstance(src, str): if isinstance(src, str):
result = StringHolder(src, T.cast('Interpreter', self)).method_call(node.name, margs, mkwargs) result = StringHolder(src, T.cast('Interpreter', self)).method_call(node.name, margs, mkwargs)
@ -402,7 +402,7 @@ class AstInterpreter(InterpreterBase):
if isinstance(result, BaseNode): if isinstance(result, BaseNode):
result = self.resolve_node(result, include_unknown_args, id_loop_detect) result = self.resolve_node(result, include_unknown_args, id_loop_detect)
elif isinstance(result, list): elif isinstance(result, list):
new_res = [] # type: T.List[TYPE_nvar] new_res: T.List[TYPE_nvar] = []
for i in result: for i in result:
if isinstance(i, BaseNode): if isinstance(i, BaseNode):
resolved = self.resolve_node(i, include_unknown_args, id_loop_detect) resolved = self.resolve_node(i, include_unknown_args, id_loop_detect)
@ -421,7 +421,7 @@ class AstInterpreter(InterpreterBase):
else: else:
args = [args_raw] args = [args_raw]
flattened_args = [] # type: T.List[TYPE_nvar] flattened_args: T.List[TYPE_nvar] = []
# Resolve the contents of args # Resolve the contents of args
for i in args: for i in args:

@ -47,8 +47,8 @@ class IntrospectionHelper(argparse.Namespace):
def __init__(self, cross_file: str): def __init__(self, cross_file: str):
super().__init__() super().__init__()
self.cross_file = cross_file self.cross_file = cross_file
self.native_file = None # type: str self.native_file: str = None
self.cmd_line_options = {} # type: T.Dict[str, str] self.cmd_line_options: T.Dict[str, str] = {}
def __eq__(self, other: object) -> bool: def __eq__(self, other: object) -> bool:
return NotImplemented return NotImplemented
@ -78,10 +78,10 @@ class IntrospectionInterpreter(AstInterpreter):
self.coredata = self.environment.get_coredata() self.coredata = self.environment.get_coredata()
self.backend = backend self.backend = backend
self.default_options = {OptionKey('backend'): self.backend} self.default_options = {OptionKey('backend'): self.backend}
self.project_data = {} # type: T.Dict[str, T.Any] self.project_data: T.Dict[str, T.Any] = {}
self.targets = [] # type: T.List[T.Dict[str, T.Any]] self.targets: T.List[T.Dict[str, T.Any]] = []
self.dependencies = [] # type: T.List[T.Dict[str, T.Any]] self.dependencies: T.List[T.Dict[str, T.Any]] = []
self.project_node = None # type: BaseNode self.project_node: BaseNode = None
self.funcs.update({ self.funcs.update({
'add_languages': self.func_add_languages, 'add_languages': self.func_add_languages,
@ -170,7 +170,7 @@ class IntrospectionInterpreter(AstInterpreter):
self._add_languages(args, required, for_machine) self._add_languages(args, required, for_machine)
def _add_languages(self, raw_langs: T.List[TYPE_nvar], required: bool, for_machine: MachineChoice) -> None: def _add_languages(self, raw_langs: T.List[TYPE_nvar], required: bool, for_machine: MachineChoice) -> None:
langs = [] # type: T.List[str] langs: T.List[str] = []
for l in self.flatten_args(raw_langs): for l in self.flatten_args(raw_langs):
if isinstance(l, str): if isinstance(l, str):
langs.append(l) langs.append(l)
@ -238,7 +238,7 @@ class IntrospectionInterpreter(AstInterpreter):
kwargs = self.flatten_kwargs(kwargs_raw, True) kwargs = self.flatten_kwargs(kwargs_raw, True)
def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]: def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]:
res = [] # type: T.List[BaseNode] res: T.List[BaseNode] = []
while inqueue: while inqueue:
curr = inqueue.pop(0) curr = inqueue.pop(0)
arg_node = None arg_node = None
@ -277,8 +277,8 @@ class IntrospectionInterpreter(AstInterpreter):
kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()} kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()}
kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)} kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)}
for_machine = MachineChoice.HOST for_machine = MachineChoice.HOST
objects = [] # type: T.List[T.Any] objects: T.List[T.Any] = []
empty_sources = [] # type: T.List[T.Any] empty_sources: T.List[T.Any] = []
# Passing the unresolved sources list causes errors # Passing the unresolved sources list causes errors
kwargs_reduced['_allow_no_sources'] = True kwargs_reduced['_allow_no_sources'] = True
target = targetclass(name, self.subdir, self.subproject, for_machine, empty_sources, [], objects, target = targetclass(name, self.subdir, self.subproject, for_machine, empty_sources, [], objects,

@ -80,7 +80,7 @@ class AstIndentationGenerator(AstVisitor):
class AstIDGenerator(AstVisitor): class AstIDGenerator(AstVisitor):
def __init__(self) -> None: def __init__(self) -> None:
self.counter = {} # type: T.Dict[str, int] self.counter: T.Dict[str, int] = {}
def visit_default_func(self, node: mparser.BaseNode) -> None: def visit_default_func(self, node: mparser.BaseNode) -> None:
name = type(node).__name__ name = type(node).__name__

@ -240,12 +240,12 @@ class AstPrinter(AstVisitor):
class AstJSONPrinter(AstVisitor): class AstJSONPrinter(AstVisitor):
def __init__(self) -> None: def __init__(self) -> None:
self.result = {} # type: T.Dict[str, T.Any] self.result: T.Dict[str, T.Any] = {}
self.current = self.result self.current = self.result
def _accept(self, key: str, node: mparser.BaseNode) -> None: def _accept(self, key: str, node: mparser.BaseNode) -> None:
old = self.current old = self.current
data = {} # type: T.Dict[str, T.Any] data: T.Dict[str, T.Any] = {}
self.current = data self.current = data
node.accept(self) node.accept(self)
self.current = old self.current = old
@ -253,7 +253,7 @@ class AstJSONPrinter(AstVisitor):
def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None: def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None:
old = self.current old = self.current
datalist = [] # type: T.List[T.Dict[str, T.Any]] datalist: T.List[T.Dict[str, T.Any]] = []
for i in nodes: for i in nodes:
self.current = {} self.current = {}
i.accept(self) i.accept(self)
@ -388,10 +388,10 @@ class AstJSONPrinter(AstVisitor):
def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None:
self._accept_list('positional', node.arguments) self._accept_list('positional', node.arguments)
kwargs_list = [] # type: T.List[T.Dict[str, T.Dict[str, T.Any]]] kwargs_list: T.List[T.Dict[str, T.Dict[str, T.Any]]] = []
for key, val in node.kwargs.items(): for key, val in node.kwargs.items():
key_res = {} # type: T.Dict[str, T.Any] key_res: T.Dict[str, T.Any] = {}
val_res = {} # type: T.Dict[str, T.Any] val_res: T.Dict[str, T.Any] = {}
self._raw_accept(key, key_res) self._raw_accept(key, key_res)
self._raw_accept(val, val_res) self._raw_accept(val, val_res)
kwargs_list += [{'key': key_res, 'val': val_res}] kwargs_list += [{'key': key_res, 'val': val_res}]

@ -723,7 +723,7 @@ class BuildTarget(Target):
kwargs): kwargs):
super().__init__(name, subdir, subproject, True, for_machine, environment, install=kwargs.get('install', False)) super().__init__(name, subdir, subproject, True, for_machine, environment, install=kwargs.get('install', False))
self.all_compilers = compilers self.all_compilers = compilers
self.compilers = OrderedDict() # type: OrderedDict[str, Compiler] self.compilers: OrderedDict[str, Compiler] = OrderedDict()
self.objects: T.List[ObjectTypes] = [] self.objects: T.List[ObjectTypes] = []
self.structured_sources = structured_sources self.structured_sources = structured_sources
self.external_deps: T.List[dependencies.Dependency] = [] self.external_deps: T.List[dependencies.Dependency] = []
@ -1539,7 +1539,7 @@ class BuildTarget(Target):
See: https://github.com/mesonbuild/meson/issues/1653 See: https://github.com/mesonbuild/meson/issues/1653
''' '''
langs = [] # type: T.List[str] langs: T.List[str] = []
# Check if any of the external libraries were written in this language # Check if any of the external libraries were written in this language
for dep in self.external_deps: for dep in self.external_deps:

@ -123,7 +123,7 @@ def cmake_get_generator_args(env: 'Environment') -> T.List[str]:
return ['-G', backend_generator_map[backend_name]] return ['-G', backend_generator_map[backend_name]]
def cmake_defines_to_args(raw: T.List[T.Dict[str, TYPE_var]], permissive: bool = False) -> T.List[str]: def cmake_defines_to_args(raw: T.List[T.Dict[str, TYPE_var]], permissive: bool = False) -> T.List[str]:
res = [] # type: T.List[str] res: T.List[str] = []
for i in raw: for i in raw:
for key, val in i.items(): for key, val in i.items():
@ -144,7 +144,7 @@ def cmake_defines_to_args(raw: T.List[T.Dict[str, TYPE_var]], permissive: bool =
# TODO: this function will become obsolete once the `cmake_args` kwarg is dropped # TODO: this function will become obsolete once the `cmake_args` kwarg is dropped
def check_cmake_args(args: T.List[str]) -> T.List[str]: def check_cmake_args(args: T.List[str]) -> T.List[str]:
res = [] # type: T.List[str] res: T.List[str] = []
dis = ['-D' + x for x in blacklist_cmake_defs] dis = ['-D' + x for x in blacklist_cmake_defs]
assert dis # Ensure that dis is not empty. assert dis # Ensure that dis is not empty.
for i in args: for i in args:
@ -166,14 +166,14 @@ class CMakeInclude:
class CMakeFileGroup: class CMakeFileGroup:
def __init__(self, data: T.Dict[str, T.Any]) -> None: def __init__(self, data: T.Dict[str, T.Any]) -> None:
self.defines = data.get('defines', '') # type: str self.defines: str = data.get('defines', '')
self.flags = _flags_to_list(data.get('compileFlags', '')) self.flags = _flags_to_list(data.get('compileFlags', ''))
self.is_generated = data.get('isGenerated', False) # type: bool self.is_generated: bool = data.get('isGenerated', False)
self.language = data.get('language', 'C') # type: str self.language: str = data.get('language', 'C')
self.sources = [Path(x) for x in data.get('sources', [])] self.sources = [Path(x) for x in data.get('sources', [])]
# Fix the include directories # Fix the include directories
self.includes = [] # type: T.List[CMakeInclude] self.includes: T.List[CMakeInclude] = []
for i in data.get('includePath', []): for i in data.get('includePath', []):
if isinstance(i, dict) and 'path' in i: if isinstance(i, dict) and 'path' in i:
isSystem = i.get('isSystem', False) isSystem = i.get('isSystem', False)
@ -199,18 +199,18 @@ class CMakeTarget:
self.artifacts = [Path(x) for x in data.get('artifacts', [])] self.artifacts = [Path(x) for x in data.get('artifacts', [])]
self.src_dir = Path(data.get('sourceDirectory', '')) self.src_dir = Path(data.get('sourceDirectory', ''))
self.build_dir = Path(data.get('buildDirectory', '')) self.build_dir = Path(data.get('buildDirectory', ''))
self.name = data.get('name', '') # type: str self.name: str = data.get('name', '')
self.full_name = data.get('fullName', '') # type: str self.full_name: str = data.get('fullName', '')
self.install = data.get('hasInstallRule', False) # type: bool self.install: bool = data.get('hasInstallRule', False)
self.install_paths = [Path(x) for x in set(data.get('installPaths', []))] self.install_paths = [Path(x) for x in set(data.get('installPaths', []))]
self.link_lang = data.get('linkerLanguage', '') # type: str self.link_lang: str = data.get('linkerLanguage', '')
self.link_libraries = _flags_to_list(data.get('linkLibraries', '')) self.link_libraries = _flags_to_list(data.get('linkLibraries', ''))
self.link_flags = _flags_to_list(data.get('linkFlags', '')) self.link_flags = _flags_to_list(data.get('linkFlags', ''))
self.link_lang_flags = _flags_to_list(data.get('linkLanguageFlags', '')) self.link_lang_flags = _flags_to_list(data.get('linkLanguageFlags', ''))
# self.link_path = Path(data.get('linkPath', '')) # self.link_path = Path(data.get('linkPath', ''))
self.type = data.get('type', 'EXECUTABLE') # type: str self.type: str = data.get('type', 'EXECUTABLE')
# self.is_generator_provided = data.get('isGeneratorProvided', False) # type: bool # self.is_generator_provided = data.get('isGeneratorProvided', False) # type: bool
self.files = [] # type: T.List[CMakeFileGroup] self.files: T.List[CMakeFileGroup] = []
for i in data.get('fileGroups', []): for i in data.get('fileGroups', []):
self.files += [CMakeFileGroup(i)] self.files += [CMakeFileGroup(i)]
@ -239,8 +239,8 @@ class CMakeProject:
def __init__(self, data: T.Dict[str, T.Any]) -> None: def __init__(self, data: T.Dict[str, T.Any]) -> None:
self.src_dir = Path(data.get('sourceDirectory', '')) self.src_dir = Path(data.get('sourceDirectory', ''))
self.build_dir = Path(data.get('buildDirectory', '')) self.build_dir = Path(data.get('buildDirectory', ''))
self.name = data.get('name', '') # type: str self.name: str = data.get('name', '')
self.targets = [] # type: T.List[CMakeTarget] self.targets: T.List[CMakeTarget] = []
for i in data.get('targets', []): for i in data.get('targets', []):
self.targets += [CMakeTarget(i)] self.targets += [CMakeTarget(i)]
@ -256,8 +256,8 @@ class CMakeProject:
class CMakeConfiguration: class CMakeConfiguration:
def __init__(self, data: T.Dict[str, T.Any]) -> None: def __init__(self, data: T.Dict[str, T.Any]) -> None:
self.name = data.get('name', '') # type: str self.name: str = data.get('name', '')
self.projects = [] # type: T.List[CMakeProject] self.projects: T.List[CMakeProject] = []
for i in data.get('projects', []): for i in data.get('projects', []):
self.projects += [CMakeProject(i)] self.projects += [CMakeProject(i)]
@ -270,9 +270,9 @@ class CMakeConfiguration:
class SingleTargetOptions: class SingleTargetOptions:
def __init__(self) -> None: def __init__(self) -> None:
self.opts = {} # type: T.Dict[str, str] self.opts: T.Dict[str, str] = {}
self.lang_args = {} # type: T.Dict[str, T.List[str]] self.lang_args: T.Dict[str, T.List[str]] = {}
self.link_args = [] # type: T.List[str] self.link_args: T.List[str] = []
self.install = 'preserve' self.install = 'preserve'
def set_opt(self, opt: str, val: str) -> None: def set_opt(self, opt: str, val: str) -> None:
@ -290,7 +290,7 @@ class SingleTargetOptions:
self.install = 'true' if install else 'false' self.install = 'true' if install else 'false'
def get_override_options(self, initial: T.List[str]) -> T.List[str]: def get_override_options(self, initial: T.List[str]) -> T.List[str]:
res = [] # type: T.List[str] res: T.List[str] = []
for i in initial: for i in initial:
opt = i[:i.find('=')] opt = i[:i.find('=')]
if opt not in self.opts: if opt not in self.opts:
@ -312,7 +312,7 @@ class SingleTargetOptions:
class TargetOptions: class TargetOptions:
def __init__(self) -> None: def __init__(self) -> None:
self.global_options = SingleTargetOptions() self.global_options = SingleTargetOptions()
self.target_options = {} # type: T.Dict[str, SingleTargetOptions] self.target_options: T.Dict[str, SingleTargetOptions] = {}
def __getitem__(self, tgt: str) -> SingleTargetOptions: def __getitem__(self, tgt: str) -> SingleTargetOptions:
if tgt not in self.target_options: if tgt not in self.target_options:

@ -39,9 +39,9 @@ if T.TYPE_CHECKING:
class CMakeExecutor: class CMakeExecutor:
# The class's copy of the CMake path. Avoids having to search for it # The class's copy of the CMake path. Avoids having to search for it
# multiple times in the same Meson invocation. # multiple times in the same Meson invocation.
class_cmakebin = PerMachine(None, None) # type: PerMachine[T.Optional[ExternalProgram]] class_cmakebin: PerMachine[T.Optional[ExternalProgram]] = PerMachine(None, None)
class_cmakevers = PerMachine(None, None) # type: PerMachine[T.Optional[str]] class_cmakevers: PerMachine[T.Optional[str]] = PerMachine(None, None)
class_cmake_cache = {} # type: T.Dict[T.Any, TYPE_result] class_cmake_cache: T.Dict[T.Any, TYPE_result] = {}
def __init__(self, environment: 'Environment', version: str, for_machine: MachineChoice, silent: bool = False): def __init__(self, environment: 'Environment', version: str, for_machine: MachineChoice, silent: bool = False):
self.min_version = version self.min_version = version
@ -50,8 +50,8 @@ class CMakeExecutor:
self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent) self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent)
self.always_capture_stderr = True self.always_capture_stderr = True
self.print_cmout = False self.print_cmout = False
self.prefix_paths = [] # type: T.List[str] self.prefix_paths: T.List[str] = []
self.extra_cmake_args = [] # type: T.List[str] self.extra_cmake_args: T.List[str] = []
if self.cmakebin is None: if self.cmakebin is None:
return return

@ -28,8 +28,8 @@ class CMakeFileAPI:
self.api_base_dir = self.build_dir / '.cmake' / 'api' / 'v1' self.api_base_dir = self.build_dir / '.cmake' / 'api' / 'v1'
self.request_dir = self.api_base_dir / 'query' / 'client-meson' self.request_dir = self.api_base_dir / 'query' / 'client-meson'
self.reply_dir = self.api_base_dir / 'reply' self.reply_dir = self.api_base_dir / 'reply'
self.cmake_sources = [] # type: T.List[CMakeBuildFile] self.cmake_sources: T.List[CMakeBuildFile] = []
self.cmake_configurations = [] # type: T.List[CMakeConfiguration] self.cmake_configurations: T.List[CMakeConfiguration] = []
self.kind_resolver_map = { self.kind_resolver_map = {
'codemodel': self._parse_codemodel, 'codemodel': self._parse_codemodel,
'cmakeFiles': self._parse_cmakeFiles, 'cmakeFiles': self._parse_cmakeFiles,

@ -98,7 +98,7 @@ def parse_generator_expressions(
return ';'.join([x for x in tgt.properties['IMPORTED_LOCATION'] if x]) return ';'.join([x for x in tgt.properties['IMPORTED_LOCATION'] if x])
return '' return ''
supported = { supported: T.Dict[str, T.Callable[[str], str]] = {
# Boolean functions # Boolean functions
'BOOL': lambda x: '0' if x.upper() in {'', '0', 'FALSE', 'OFF', 'N', 'NO', 'IGNORE', 'NOTFOUND'} or x.endswith('-NOTFOUND') else '1', 'BOOL': lambda x: '0' if x.upper() in {'', '0', 'FALSE', 'OFF', 'N', 'NO', 'IGNORE', 'NOTFOUND'} or x.endswith('-NOTFOUND') else '1',
'AND': lambda x: '1' if all(y == '1' for y in x.split(',')) else '0', 'AND': lambda x: '1' if all(y == '1' for y in x.split(',')) else '0',
@ -140,7 +140,7 @@ def parse_generator_expressions(
'TARGET_NAME_IF_EXISTS': lambda x: x if x in trace.targets else '', 'TARGET_NAME_IF_EXISTS': lambda x: x if x in trace.targets else '',
'TARGET_PROPERTY': target_property, 'TARGET_PROPERTY': target_property,
'TARGET_FILE': target_file, 'TARGET_FILE': target_file,
} # type: T.Dict[str, T.Callable[[str], str]] }
# Recursively evaluate generator expressions # Recursively evaluate generator expressions
def eval_generator_expressions() -> str: def eval_generator_expressions() -> str:

@ -144,7 +144,7 @@ class CMakeToolchain:
return res return res
def get_defaults(self) -> T.Dict[str, T.List[str]]: def get_defaults(self) -> T.Dict[str, T.List[str]]:
defaults = {} # type: T.Dict[str, T.List[str]] defaults: T.Dict[str, T.List[str]] = {}
# Do nothing if the user does not want automatic defaults # Do nothing if the user does not want automatic defaults
if not self.properties.get_cmake_defaults(): if not self.properties.get_cmake_defaults():
@ -153,13 +153,13 @@ class CMakeToolchain:
# Best effort to map the meson system name to CMAKE_SYSTEM_NAME, which # Best effort to map the meson system name to CMAKE_SYSTEM_NAME, which
# is not trivial since CMake lacks a list of all supported # is not trivial since CMake lacks a list of all supported
# CMAKE_SYSTEM_NAME values. # CMAKE_SYSTEM_NAME values.
SYSTEM_MAP = { SYSTEM_MAP: T.Dict[str, str] = {
'android': 'Android', 'android': 'Android',
'linux': 'Linux', 'linux': 'Linux',
'windows': 'Windows', 'windows': 'Windows',
'freebsd': 'FreeBSD', 'freebsd': 'FreeBSD',
'darwin': 'Darwin', 'darwin': 'Darwin',
} # type: T.Dict[str, str] }
# Only set these in a cross build. Otherwise CMake will trip up in native # Only set these in a cross build. Otherwise CMake will trip up in native
# builds and thing they are cross (which causes TRY_RUN() to break) # builds and thing they are cross (which causes TRY_RUN() to break)

@ -67,9 +67,9 @@ class CMakeTarget:
self.properties = properties self.properties = properties
self.imported = imported self.imported = imported
self.tline = tline self.tline = tline
self.depends = [] # type: T.List[str] self.depends: T.List[str] = []
self.current_bin_dir = None # type: T.Optional[Path] self.current_bin_dir: T.Optional[Path] = None
self.current_src_dir = None # type: T.Optional[Path] self.current_src_dir: T.Optional[Path] = None
def __repr__(self) -> str: def __repr__(self) -> str:
s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- imported: {}\n -- properties: {{\n{} }}\n -- tline: {}' s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- imported: {}\n -- properties: {{\n{} }}\n -- tline: {}'
@ -89,10 +89,10 @@ class CMakeTarget:
class CMakeGeneratorTarget(CMakeTarget): class CMakeGeneratorTarget(CMakeTarget):
def __init__(self, name: str) -> None: def __init__(self, name: str) -> None:
super().__init__(name, 'CUSTOM', {}) super().__init__(name, 'CUSTOM', {})
self.outputs = [] # type: T.List[Path] self.outputs: T.List[Path] = []
self._outputs_str = [] # type: T.List[str] self._outputs_str: T.List[str] = []
self.command = [] # type: T.List[T.List[str]] self.command: T.List[T.List[str]] = []
self.working_dir = None # type: T.Optional[Path] self.working_dir: T.Optional[Path] = None
class CMakeTraceParser: class CMakeTraceParser:
def __init__(self, cmake_version: str, build_dir: Path, env: 'Environment', permissive: bool = True) -> None: def __init__(self, cmake_version: str, build_dir: Path, env: 'Environment', permissive: bool = True) -> None:
@ -101,10 +101,10 @@ class CMakeTraceParser:
self.targets: T.Dict[str, CMakeTarget] = {} self.targets: T.Dict[str, CMakeTarget] = {}
self.cache: T.Dict[str, CMakeCacheEntry] = {} self.cache: T.Dict[str, CMakeCacheEntry] = {}
self.explicit_headers = set() # type: T.Set[Path] self.explicit_headers: T.Set[Path] = set()
# T.List of targes that were added with add_custom_command to generate files # T.List of targes that were added with add_custom_command to generate files
self.custom_targets = [] # type: T.List[CMakeGeneratorTarget] self.custom_targets: T.List[CMakeGeneratorTarget] = []
self.env = env self.env = env
self.permissive = permissive self.permissive = permissive
@ -118,11 +118,11 @@ class CMakeTraceParser:
# State for delayed command execution. Delayed command execution is realised # State for delayed command execution. Delayed command execution is realised
# with a custom CMake file that overrides some functions and adds some # with a custom CMake file that overrides some functions and adds some
# introspection information to the trace. # introspection information to the trace.
self.delayed_commands = [] # type: T.List[str] self.delayed_commands: T.List[str] = []
self.stored_commands = [] # type: T.List[CMakeTraceLine] self.stored_commands: T.List[CMakeTraceLine] = []
# All supported functions # All supported functions
self.functions = { self.functions: T.Dict[str, T.Callable[[CMakeTraceLine], None]] = {
'set': self._cmake_set, 'set': self._cmake_set,
'unset': self._cmake_unset, 'unset': self._cmake_unset,
'add_executable': self._cmake_add_executable, 'add_executable': self._cmake_add_executable,
@ -145,7 +145,7 @@ class CMakeTraceParser:
'meson_ps_execute_delayed_calls': self._meson_ps_execute_delayed_calls, 'meson_ps_execute_delayed_calls': self._meson_ps_execute_delayed_calls,
'meson_ps_reload_vars': self._meson_ps_reload_vars, 'meson_ps_reload_vars': self._meson_ps_reload_vars,
'meson_ps_disabled_function': self._meson_ps_disabled_function, 'meson_ps_disabled_function': self._meson_ps_disabled_function,
} # type: T.Dict[str, T.Callable[[CMakeTraceLine], None]] }
if version_compare(self.cmake_version, '<3.17.0'): if version_compare(self.cmake_version, '<3.17.0'):
mlog.deprecation(textwrap.dedent(f'''\ mlog.deprecation(textwrap.dedent(f'''\
@ -591,10 +591,10 @@ class CMakeTraceParser:
# With the JSON output format, introduced in CMake 3.17, spaces are # With the JSON output format, introduced in CMake 3.17, spaces are
# handled properly and we don't have to do either options # handled properly and we don't have to do either options
arglist = [] # type: T.List[T.Tuple[str, T.List[str]]] arglist: T.List[T.Tuple[str, T.List[str]]] = []
if self.trace_format == 'human': if self.trace_format == 'human':
name = args.pop(0) name = args.pop(0)
values = [] # type: T.List[str] values: T.List[str] = []
prop_regex = re.compile(r'^[A-Z_]+$') prop_regex = re.compile(r'^[A-Z_]+$')
for a in args: for a in args:
if prop_regex.match(a): if prop_regex.match(a):
@ -768,7 +768,7 @@ class CMakeTraceParser:
def _flatten_args(self, args: T.List[str]) -> T.List[str]: def _flatten_args(self, args: T.List[str]) -> T.List[str]:
# Split lists in arguments # Split lists in arguments
res = [] # type: T.List[str] res: T.List[str] = []
for i in args: for i in args:
res += i.split(';') res += i.split(';')
return res return res
@ -783,8 +783,8 @@ class CMakeTraceParser:
reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$') reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$')
reg_end = re.compile(r'^.*\.[a-zA-Z]+$') reg_end = re.compile(r'^.*\.[a-zA-Z]+$')
fixed_list = [] # type: T.List[str] fixed_list: T.List[str] = []
curr_str = None # type: T.Optional[str] curr_str: T.Optional[str] = None
path_found = False path_found = False
for i in broken_list: for i in broken_list:

@ -28,7 +28,7 @@ if T.TYPE_CHECKING:
from ..environment import Environment from ..environment import Environment
from ..mesonlib import MachineChoice from ..mesonlib import MachineChoice
cs_optimization_args = { cs_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [], 'plain': [],
'0': [], '0': [],
'g': [], 'g': [],
@ -36,7 +36,7 @@ cs_optimization_args = {
'2': ['-optimize+'], '2': ['-optimize+'],
'3': ['-optimize+'], '3': ['-optimize+'],
's': ['-optimize+'], 's': ['-optimize+'],
} # type: T.Dict[str, T.List[str]] }
class CsCompiler(BasicLinkerIsCompilerMixin, Compiler): class CsCompiler(BasicLinkerIsCompilerMixin, Compiler):

@ -969,7 +969,7 @@ def detect_vala_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> RustCompiler: def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> RustCompiler:
from . import rust from . import rust
from ..linkers import linkers from ..linkers import linkers
popen_exceptions = {} # type: T.Dict[str, Exception] popen_exceptions: T.Dict[str, Exception] = {}
compilers, _, exe_wrap = _get_compilers(env, 'rust', for_machine) compilers, _, exe_wrap = _get_compilers(env, 'rust', for_machine)
is_cross = env.is_cross_build(for_machine) is_cross = env.is_cross_build(for_machine)
info = env.machines[for_machine] info = env.machines[for_machine]

@ -34,16 +34,16 @@ else:
# do). This gives up DRYer type checking, with no runtime impact # do). This gives up DRYer type checking, with no runtime impact
Compiler = object Compiler = object
arm_buildtype_args = { arm_buildtype_args: T.Dict[str, T.List[str]] = {
'plain': [], 'plain': [],
'debug': [], 'debug': [],
'debugoptimized': [], 'debugoptimized': [],
'release': [], 'release': [],
'minsize': [], 'minsize': [],
'custom': [], 'custom': [],
} # type: T.Dict[str, T.List[str]] }
arm_optimization_args = { arm_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [], 'plain': [],
'0': ['-O0'], '0': ['-O0'],
'g': ['-g'], 'g': ['-g'],
@ -51,18 +51,18 @@ arm_optimization_args = {
'2': [], # Compiler defaults to -O2 '2': [], # Compiler defaults to -O2
'3': ['-O3', '-Otime'], '3': ['-O3', '-Otime'],
's': ['-O3'], # Compiler defaults to -Ospace 's': ['-O3'], # Compiler defaults to -Ospace
} # type: T.Dict[str, T.List[str]] }
armclang_buildtype_args = { armclang_buildtype_args: T.Dict[str, T.List[str]] = {
'plain': [], 'plain': [],
'debug': [], 'debug': [],
'debugoptimized': [], 'debugoptimized': [],
'release': [], 'release': [],
'minsize': [], 'minsize': [],
'custom': [], 'custom': [],
} # type: T.Dict[str, T.List[str]] }
armclang_optimization_args = { armclang_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [], 'plain': [],
'0': [], # Compiler defaults to -O0 '0': [], # Compiler defaults to -O0
'g': ['-g'], 'g': ['-g'],
@ -70,7 +70,7 @@ armclang_optimization_args = {
'2': ['-O2'], '2': ['-O2'],
'3': ['-O3'], '3': ['-O3'],
's': ['-Oz'] 's': ['-Oz']
} # type: T.Dict[str, T.List[str]] }
class ArmCompiler(Compiler): class ArmCompiler(Compiler):
@ -82,7 +82,7 @@ class ArmCompiler(Compiler):
def __init__(self) -> None: def __init__(self) -> None:
if not self.is_cross: if not self.is_cross:
raise mesonlib.EnvironmentException('armcc supports only cross-compilation.') raise mesonlib.EnvironmentException('armcc supports only cross-compilation.')
default_warn_args = [] # type: T.List[str] default_warn_args: T.List[str] = []
self.warn_args = {'0': [], self.warn_args = {'0': [],
'1': default_warn_args, '1': default_warn_args,
'2': default_warn_args + [], '2': default_warn_args + [],

@ -32,14 +32,14 @@ else:
# do). This gives up DRYer type checking, with no runtime impact # do). This gives up DRYer type checking, with no runtime impact
Compiler = object Compiler = object
pgi_buildtype_args = { pgi_buildtype_args: T.Dict[str, T.List[str]] = {
'plain': [], 'plain': [],
'debug': [], 'debug': [],
'debugoptimized': [], 'debugoptimized': [],
'release': [], 'release': [],
'minsize': [], 'minsize': [],
'custom': [], 'custom': [],
} # type: T.Dict[str, T.List[str]] }
class PGICompiler(Compiler): class PGICompiler(Compiler):

@ -31,16 +31,16 @@ else:
# do). This gives up DRYer type checking, with no runtime impact # do). This gives up DRYer type checking, with no runtime impact
Compiler = object Compiler = object
xc16_buildtype_args = { xc16_buildtype_args: T.Dict[str, T.List[str]] = {
'plain': [], 'plain': [],
'debug': [], 'debug': [],
'debugoptimized': [], 'debugoptimized': [],
'release': [], 'release': [],
'minsize': [], 'minsize': [],
'custom': [], 'custom': [],
} # type: T.Dict[str, T.List[str]] }
xc16_optimization_args = { xc16_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [], 'plain': [],
'0': ['-O0'], '0': ['-O0'],
'g': ['-O0'], 'g': ['-O0'],
@ -48,12 +48,12 @@ xc16_optimization_args = {
'2': ['-O2'], '2': ['-O2'],
'3': ['-O3'], '3': ['-O3'],
's': ['-Os'] 's': ['-Os']
} # type: T.Dict[str, T.List[str]] }
xc16_debug_args = { xc16_debug_args: T.Dict[bool, T.List[str]] = {
False: [], False: [],
True: [] True: []
} # type: T.Dict[bool, T.List[str]] }
class Xc16Compiler(Compiler): class Xc16Compiler(Compiler):
@ -66,7 +66,7 @@ class Xc16Compiler(Compiler):
# Assembly # Assembly
self.can_compile_suffixes.add('s') self.can_compile_suffixes.add('s')
self.can_compile_suffixes.add('sx') self.can_compile_suffixes.add('sx')
default_warn_args = [] # type: T.List[str] default_warn_args: T.List[str] = []
self.warn_args = {'0': [], self.warn_args = {'0': [],
'1': default_warn_args, '1': default_warn_args,
'2': default_warn_args + [], '2': default_warn_args + [],

@ -32,7 +32,7 @@ if T.TYPE_CHECKING:
from ..dependencies import Dependency from ..dependencies import Dependency
rust_optimization_args = { rust_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [], 'plain': [],
'0': [], '0': [],
'g': ['-C', 'opt-level=0'], 'g': ['-C', 'opt-level=0'],
@ -40,7 +40,7 @@ rust_optimization_args = {
'2': ['-C', 'opt-level=2'], '2': ['-C', 'opt-level=2'],
'3': ['-C', 'opt-level=3'], '3': ['-C', 'opt-level=3'],
's': ['-C', 'opt-level=s'], 's': ['-C', 'opt-level=s'],
} # type: T.Dict[str, T.List[str]] }
class RustCompiler(Compiler): class RustCompiler(Compiler):

@ -26,7 +26,7 @@ if T.TYPE_CHECKING:
from ..linkers.linkers import DynamicLinker from ..linkers.linkers import DynamicLinker
from ..mesonlib import MachineChoice from ..mesonlib import MachineChoice
swift_optimization_args = { swift_optimization_args: T.Dict[str, T.List[str]] = {
'plain': [], 'plain': [],
'0': [], '0': [],
'g': [], 'g': [],
@ -34,7 +34,7 @@ swift_optimization_args = {
'2': ['-O'], '2': ['-O'],
'3': ['-O'], '3': ['-O'],
's': ['-O'], 's': ['-O'],
} # type: T.Dict[str, T.List[str]] }
class SwiftCompiler(Compiler): class SwiftCompiler(Compiler):

@ -566,7 +566,7 @@ def strip_system_includedirs(environment: 'Environment', for_machine: MachineCho
return [i for i in include_args if i not in exclude] return [i for i in include_args if i not in exclude]
def process_method_kw(possible: T.Iterable[DependencyMethods], kwargs: T.Dict[str, T.Any]) -> T.List[DependencyMethods]: def process_method_kw(possible: T.Iterable[DependencyMethods], kwargs: T.Dict[str, T.Any]) -> T.List[DependencyMethods]:
method = kwargs.get('method', 'auto') # type: T.Union[DependencyMethods, str] method: T.Union[DependencyMethods, str] = kwargs.get('method', 'auto')
if isinstance(method, DependencyMethods): if isinstance(method, DependencyMethods):
return [method] return [method]
# TODO: try/except? # TODO: try/except?

@ -248,7 +248,7 @@ class BoostLibraryFile():
# Handle the boost_python naming madness. # Handle the boost_python naming madness.
# See https://github.com/mesonbuild/meson/issues/4788 for some distro # See https://github.com/mesonbuild/meson/issues/4788 for some distro
# specific naming variations. # specific naming variations.
other_tags = [] # type: T.List[str] other_tags: T.List[str] = []
# Split the current modname into the base name and the version # Split the current modname into the base name and the version
m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name) m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
@ -331,7 +331,7 @@ class BoostLibraryFile():
return True return True
def get_compiler_args(self) -> T.List[str]: def get_compiler_args(self) -> T.List[str]:
args = [] # type: T.List[str] args: T.List[str] = []
if self.mod_name in boost_libraries: if self.mod_name in boost_libraries:
libdef = boost_libraries[self.mod_name] libdef = boost_libraries[self.mod_name]
if self.static: if self.static:
@ -355,19 +355,19 @@ class BoostDependency(SystemDependency):
self.debug = buildtype.startswith('debug') self.debug = buildtype.startswith('debug')
self.multithreading = kwargs.get('threading', 'multi') == 'multi' self.multithreading = kwargs.get('threading', 'multi') == 'multi'
self.boost_root = None # type: T.Optional[Path] self.boost_root: T.Optional[Path] = None
self.explicit_static = 'static' in kwargs self.explicit_static = 'static' in kwargs
# Extract and validate modules # Extract and validate modules
self.modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str] self.modules: T.List[str] = mesonlib.extract_as_list(kwargs, 'modules')
for i in self.modules: for i in self.modules:
if not isinstance(i, str): if not isinstance(i, str):
raise DependencyException('Boost module argument is not a string.') raise DependencyException('Boost module argument is not a string.')
if i.startswith('boost_'): if i.startswith('boost_'):
raise DependencyException('Boost modules must be passed without the boost_ prefix') raise DependencyException('Boost modules must be passed without the boost_ prefix')
self.modules_found = [] # type: T.List[str] self.modules_found: T.List[str] = []
self.modules_missing = [] # type: T.List[str] self.modules_missing: T.List[str] = []
# Do we need threads? # Do we need threads?
if 'thread' in self.modules: if 'thread' in self.modules:
@ -450,7 +450,7 @@ class BoostDependency(SystemDependency):
mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs])) mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs]))
# 2. Find all boost libraries # 2. Find all boost libraries
libs = [] # type: T.List[BoostLibraryFile] libs: T.List[BoostLibraryFile] = []
for i in lib_dirs: for i in lib_dirs:
libs = self.detect_libraries(i) libs = self.detect_libraries(i)
if libs: if libs:
@ -471,8 +471,8 @@ class BoostDependency(SystemDependency):
mlog.debug(f' - {j}') mlog.debug(f' - {j}')
# 3. Select the libraries matching the requested modules # 3. Select the libraries matching the requested modules
not_found = [] # type: T.List[str] not_found: T.List[str] = []
selected_modules = [] # type: T.List[BoostLibraryFile] selected_modules: T.List[BoostLibraryFile] = []
for mod in modules: for mod in modules:
found = False found = False
for l in f_libs: for l in f_libs:
@ -485,8 +485,8 @@ class BoostDependency(SystemDependency):
# log the result # log the result
mlog.debug(' - found:') mlog.debug(' - found:')
comp_args = [] # type: T.List[str] comp_args: T.List[str] = []
link_args = [] # type: T.List[str] link_args: T.List[str] = []
for j in selected_modules: for j in selected_modules:
c_args = j.get_compiler_args() c_args = j.get_compiler_args()
l_args = j.get_link_args() l_args = j.get_link_args()
@ -524,7 +524,7 @@ class BoostDependency(SystemDependency):
return False return False
def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]: def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]:
candidates = [] # type: T.List[Path] candidates: T.List[Path] = []
inc_root = root / 'include' inc_root = root / 'include'
candidates += [root / 'boost'] candidates += [root / 'boost']
@ -555,8 +555,8 @@ class BoostDependency(SystemDependency):
# No system include paths were found --> fall back to manually looking # No system include paths were found --> fall back to manually looking
# for library dirs in root # for library dirs in root
dirs = [] # type: T.List[Path] dirs: T.List[Path] = []
subdirs = [] # type: T.List[Path] subdirs: T.List[Path] = []
for i in root.iterdir(): for i in root.iterdir():
if i.is_dir() and i.name.startswith('lib'): if i.is_dir() and i.name.startswith('lib'):
dirs += [i] dirs += [i]
@ -578,7 +578,7 @@ class BoostDependency(SystemDependency):
raw_list = dirs + subdirs raw_list = dirs + subdirs
no_arch = [x for x in raw_list if not any(y in x.name for y in arch_list_32 + arch_list_64)] no_arch = [x for x in raw_list if not any(y in x.name for y in arch_list_32 + arch_list_64)]
matching_arch = [] # type: T.List[Path] matching_arch: T.List[Path] = []
if '32' in self.arch: if '32' in self.arch:
matching_arch = [x for x in raw_list if any(y in x.name for y in arch_list_32)] matching_arch = [x for x in raw_list if any(y in x.name for y in arch_list_32)]
elif '64' in self.arch: elif '64' in self.arch:
@ -624,7 +624,7 @@ class BoostDependency(SystemDependency):
return libs return libs
def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]: def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]:
libs = set() # type: T.Set[BoostLibraryFile] libs: T.Set[BoostLibraryFile] = set()
for i in libdir.iterdir(): for i in libdir.iterdir():
if not i.is_file(): if not i.is_file():
continue continue
@ -655,7 +655,7 @@ class BoostDependency(SystemDependency):
self.is_found = self.run_check([boost_inc_dir], [lib_dir]) self.is_found = self.run_check([boost_inc_dir], [lib_dir])
def detect_roots(self) -> None: def detect_roots(self) -> None:
roots = [] # type: T.List[Path] roots: T.List[Path] = []
# Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily
# allows BoostDependency to find boost from Conan. See #5438 # allows BoostDependency to find boost from Conan. See #5438
@ -686,7 +686,7 @@ class BoostDependency(SystemDependency):
# Where boost prebuilt binaries are # Where boost prebuilt binaries are
local_boost = Path('C:/local') local_boost = Path('C:/local')
candidates = [] # type: T.List[Path] candidates: T.List[Path] = []
if prog_files.is_dir(): if prog_files.is_dir():
candidates += [*prog_files.iterdir()] candidates += [*prog_files.iterdir()]
if local_boost.is_dir(): if local_boost.is_dir():
@ -694,7 +694,7 @@ class BoostDependency(SystemDependency):
roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()] roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
else: else:
tmp = [] # type: T.List[Path] tmp: T.List[Path] = []
# Add some default system paths # Add some default system paths
tmp += [Path('/opt/local')] tmp += [Path('/opt/local')]

@ -80,7 +80,7 @@ class CMakeDependency(ExternalDependency):
def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None, force_use_global_compilers: bool = False) -> None: def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None, force_use_global_compilers: bool = False) -> None:
# Gather a list of all languages to support # Gather a list of all languages to support
self.language_list = [] # type: T.List[str] self.language_list: T.List[str] = []
if language is None or force_use_global_compilers: if language is None or force_use_global_compilers:
compilers = None compilers = None
if kwargs.get('native', False): if kwargs.get('native', False):
@ -312,7 +312,7 @@ class CMakeDependency(ExternalDependency):
return True return True
# Check PATH # Check PATH
system_env = [] # type: T.List[str] system_env: T.List[str] = []
for i in os.environ.get('PATH', '').split(os.pathsep): for i in os.environ.get('PATH', '').split(os.pathsep):
if i.endswith('/bin') or i.endswith('\\bin'): if i.endswith('/bin') or i.endswith('\\bin'):
i = i[:-4] i = i[:-4]

@ -48,7 +48,7 @@ class HDF5PkgConfigDependency(PkgConfigDependency):
return return
# some broken pkgconfig don't actually list the full path to the needed includes # some broken pkgconfig don't actually list the full path to the needed includes
newinc = [] # type: T.List[str] newinc: T.List[str] = []
for arg in self.compile_args: for arg in self.compile_args:
if arg.startswith('-I'): if arg.startswith('-I'):
stem = 'static' if self.static else 'shared' stem = 'static' if self.static else 'shared'
@ -56,7 +56,7 @@ class HDF5PkgConfigDependency(PkgConfigDependency):
newinc.append('-I' + str(Path(arg[2:]) / stem)) newinc.append('-I' + str(Path(arg[2:]) / stem))
self.compile_args += newinc self.compile_args += newinc
link_args = [] # type: T.List[str] link_args: T.List[str] = []
for larg in self.get_link_args(): for larg in self.get_link_args():
lpath = Path(larg) lpath = Path(larg)
# some pkg-config hdf5.pc (e.g. Ubuntu) don't include the commonly-used HL HDF5 libraries, # some pkg-config hdf5.pc (e.g. Ubuntu) don't include the commonly-used HL HDF5 libraries,

@ -74,7 +74,7 @@ def mpi_factory(env: 'Environment',
elif language == 'fortran': elif language == 'fortran':
tool_names = [os.environ.get('I_MPI_F90'), 'mpiifort'] tool_names = [os.environ.get('I_MPI_F90'), 'mpiifort']
cls = IntelMPIConfigToolDependency # type: T.Type[ConfigToolDependency] cls: T.Type[ConfigToolDependency] = IntelMPIConfigToolDependency
else: # OpenMPI, which doesn't work with intel else: # OpenMPI, which doesn't work with intel
# #
# We try the environment variables for the tools first, but then # We try the environment variables for the tools first, but then

@ -460,7 +460,7 @@ class BinaryTable:
class CMakeVariables: class CMakeVariables:
def __init__(self, variables: T.Optional[T.Dict[str, T.Any]] = None) -> None: def __init__(self, variables: T.Optional[T.Dict[str, T.Any]] = None) -> None:
variables = variables or {} variables = variables or {}
self.variables = {} # type: T.Dict[str, T.List[str]] self.variables: T.Dict[str, T.List[str]] = {}
for key, value in variables.items(): for key, value in variables.items():
value = mesonlib.listify(value) value = mesonlib.listify(value)

@ -295,7 +295,7 @@ class Interpreter(InterpreterBase, HoldableObject):
self.sanity_check_ast() self.sanity_check_ast()
self.builtin.update({'meson': MesonMain(self.build, self)}) self.builtin.update({'meson': MesonMain(self.build, self)})
self.generators: T.List[build.Generator] = [] self.generators: T.List[build.Generator] = []
self.processed_buildfiles = set() # type: T.Set[str] self.processed_buildfiles: T.Set[str] = set()
self.project_args_frozen = False self.project_args_frozen = False
self.global_args_frozen = False # implies self.project_args_frozen self.global_args_frozen = False # implies self.project_args_frozen
self.subprojects: T.Dict[str, SubprojectHolder] = {} self.subprojects: T.Dict[str, SubprojectHolder] = {}

@ -93,12 +93,12 @@ class InterpreterBase:
self.current_lineno = -1 self.current_lineno = -1
# Current node set during a function call. This can be used as location # Current node set during a function call. This can be used as location
# when printing a warning message during a method call. # when printing a warning message during a method call.
self.current_node = None # type: mparser.BaseNode self.current_node: mparser.BaseNode = None
# This is set to `version_string` when this statement is evaluated: # This is set to `version_string` when this statement is evaluated:
# meson.version().compare_version(version_string) # meson.version().compare_version(version_string)
# If it was part of a if-clause, it is used to temporally override the # If it was part of a if-clause, it is used to temporally override the
# current meson version target within that if-block. # current meson version target within that if-block.
self.tmp_meson_version = None # type: T.Optional[str] self.tmp_meson_version: T.Optional[str] = None
def handle_meson_version_from_ast(self, strict: bool = True) -> None: def handle_meson_version_from_ast(self, strict: bool = True) -> None:
# do nothing in an AST interpreter # do nothing in an AST interpreter

@ -61,7 +61,7 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
check_args += env.coredata.get_external_link_args(for_machine, comp_class.language) check_args += env.coredata.get_external_link_args(for_machine, comp_class.language)
override = [] # type: T.List[str] override: T.List[str] = []
value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld') value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld')
if value is not None: if value is not None:
override = comp_class.use_linker_args(value[0], comp_version) override = comp_class.use_linker_args(value[0], comp_version)
@ -138,7 +138,7 @@ def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
else: else:
check_args = comp_class.LINKER_PREFIX + ['--version'] + extra_args check_args = comp_class.LINKER_PREFIX + ['--version'] + extra_args
override = [] # type: T.List[str] override: T.List[str] = []
value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld') value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld')
if value is not None: if value is not None:
override = comp_class.use_linker_args(value[0], comp_version) override = comp_class.use_linker_args(value[0], comp_version)

@ -54,7 +54,7 @@ def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]:
with path_to_intro.open(encoding='utf-8') as f: with path_to_intro.open(encoding='utf-8') as f:
schema = json.load(f) schema = json.load(f)
parsed_data = defaultdict(list) # type: T.Dict[str, T.List[dict]] parsed_data: T.Dict[str, T.List[dict]] = defaultdict(list)
for target in schema: for target in schema:
parsed_data[target['name']] += [target] parsed_data[target['name']] += [target]
return parsed_data return parsed_data
@ -100,7 +100,7 @@ def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introsp
raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found') raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found')
intro_targets = introspect_data[target.name] intro_targets = introspect_data[target.name]
found_targets = [] # type: T.List[T.Dict[str, T.Any]] found_targets: T.List[T.Dict[str, T.Any]] = []
resolved_bdir = builddir.resolve() resolved_bdir = builddir.resolve()
@ -337,8 +337,8 @@ def run(options: 'argparse.Namespace') -> int:
if setup_vsenv(need_vsenv): if setup_vsenv(need_vsenv):
mlog.log(mlog.green('INFO:'), 'automatically activated MSVC compiler environment') mlog.log(mlog.green('INFO:'), 'automatically activated MSVC compiler environment')
cmd = [] # type: T.List[str] cmd: T.List[str] = []
env = None # type: T.Optional[T.Dict[str, str]] env: T.Optional[T.Dict[str, str]] = None
backend = cdata.get_option(mesonlib.OptionKey('backend')) backend = cdata.get_option(mesonlib.OptionKey('backend'))
assert isinstance(backend, str) assert isinstance(backend, str)

@ -132,7 +132,7 @@ def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]:
return res return res
def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]]: def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]]:
plan = { plan: T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]] = {
'targets': { 'targets': {
os.path.join(installdata.build_dir, target.fname): { os.path.join(installdata.build_dir, target.fname): {
'destination': target.out_name, 'destination': target.out_name,
@ -141,7 +141,7 @@ def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[s
} }
for target in installdata.targets for target in installdata.targets
}, },
} # type: T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]] }
for key, data_list in { for key, data_list in {
'data': installdata.data, 'data': installdata.data,
'man': installdata.man, 'man': installdata.man,
@ -178,13 +178,13 @@ def get_target_dir(coredata: cdata.CoreData, subdir: str) -> str:
return subdir return subdir
def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]: def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
tlist = [] # type: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]] tlist: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]] = []
root_dir = Path(intr.source_root) root_dir = Path(intr.source_root)
def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]: def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]:
res = [] # type: T.List[Path] res: T.List[Path] = []
for n in node_list: for n in node_list:
args = [] # type: T.List[BaseNode] args: T.List[BaseNode] = []
if isinstance(n, FunctionNode): if isinstance(n, FunctionNode):
args = list(n.args.arguments) args = list(n.args.arguments)
if n.func_name in BUILD_TARGET_FUNCTIONS: if n.func_name in BUILD_TARGET_FUNCTIONS:
@ -231,7 +231,7 @@ def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[st
return tlist return tlist
def list_targets(builddata: build.Build, installdata: backends.InstallData, backend: backends.Backend) -> T.List[T.Any]: def list_targets(builddata: build.Build, installdata: backends.InstallData, backend: backends.Backend) -> T.List[T.Any]:
tlist = [] # type: T.List[T.Any] tlist: T.List[T.Any] = []
build_dir = builddata.environment.get_build_dir() build_dir = builddata.environment.get_build_dir()
src_dir = builddata.environment.get_source_dir() src_dir = builddata.environment.get_source_dir()
@ -290,7 +290,7 @@ def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Di
return list_buildoptions(intr.coredata, subprojects) return list_buildoptions(intr.coredata, subprojects)
def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]: def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
optlist = [] # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]] optlist: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]] = []
subprojects = subprojects or [] subprojects = subprojects or []
dir_option_names = set(cdata.BUILTIN_DIR_OPTIONS) dir_option_names = set(cdata.BUILTIN_DIR_OPTIONS)
@ -379,7 +379,7 @@ def list_compilers(coredata: cdata.CoreData) -> T.Dict[str, T.Dict[str, T.Dict[s
return compilers return compilers
def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]: def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]:
result = [] # type: T.List[T.Dict[str, T.Union[str, bool]]] result: T.List[T.Dict[str, T.Union[str, bool]]] = []
for i in intr.dependencies: for i in intr.dependencies:
keys = [ keys = [
'name', 'name',
@ -436,9 +436,9 @@ def list_deps(coredata: cdata.CoreData, backend: backends.Backend) -> T.List[T.D
return list(result.values()) return list(result.values())
def get_test_list(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]: def get_test_list(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
result = [] # type: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]] result: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]] = []
for t in testdata: for t in testdata:
to = {} # type: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]] to: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]] = {}
if isinstance(t.fname, str): if isinstance(t.fname, str):
fname = [t.fname] fname = [t.fname]
else: else:
@ -541,7 +541,7 @@ def run(options: argparse.Namespace) -> int:
if options.builddir is not None: if options.builddir is not None:
datadir = os.path.join(options.builddir, datadir) datadir = os.path.join(options.builddir, datadir)
indent = 4 if options.indent else None indent = 4 if options.indent else None
results = [] # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] results: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] = []
sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11] sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
intro_types = get_meson_introspection_types(sourcedir=sourcedir) intro_types = get_meson_introspection_types(sourcedir=sourcedir)
@ -594,7 +594,7 @@ def run(options: argparse.Namespace) -> int:
return print_results(options, results, indent) return print_results(options, results, indent)
updated_introspection_files = [] # type: T.List[str] updated_introspection_files: T.List[str] = []
def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], info_dir: str) -> None: def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], info_dir: str) -> None:
for kind, data in intro_info: for kind, data in intro_info:
@ -609,7 +609,7 @@ def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.
def generate_introspection_file(builddata: build.Build, backend: backends.Backend) -> None: def generate_introspection_file(builddata: build.Build, backend: backends.Backend) -> None:
coredata = builddata.environment.get_coredata() coredata = builddata.environment.get_coredata()
intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend) intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend)
intro_info = [] # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] intro_info: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] = []
for key, val in intro_types.items(): for key, val in intro_types.items():
if not val.func: if not val.func:

@ -60,7 +60,7 @@ def _windows_ansi() -> bool:
return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON')) return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
def colorize_console() -> bool: def colorize_console() -> bool:
_colorize_console = getattr(sys.stdout, 'colorize_console', None) # type: bool _colorize_console: bool = getattr(sys.stdout, 'colorize_console', None)
if _colorize_console is not None: if _colorize_console is not None:
return _colorize_console return _colorize_console
@ -201,7 +201,7 @@ class _Logger:
self.log_fatal_warnings = fatal_warnings self.log_fatal_warnings = fatal_warnings
def process_markup(self, args: T.Sequence[TV_Loggable], keep: bool, display_timestamp: bool = True) -> T.List[str]: def process_markup(self, args: T.Sequence[TV_Loggable], keep: bool, display_timestamp: bool = True) -> T.List[str]:
arr = [] # type: T.List[str] arr: T.List[str] = []
if self.log_timestamp_start is not None and display_timestamp: if self.log_timestamp_start is not None and display_timestamp:
arr = ['[{:.3f}]'.format(time.monotonic() - self.log_timestamp_start)] arr = ['[{:.3f}]'.format(time.monotonic() - self.log_timestamp_start)]
for arg in args: for arg in args:
@ -312,7 +312,7 @@ class _Logger:
# The typing requirements here are non-obvious. Lists are invariant, # The typing requirements here are non-obvious. Lists are invariant,
# therefore T.List[A] and T.List[T.Union[A, B]] are not able to be joined # therefore T.List[A] and T.List[T.Union[A, B]] are not able to be joined
if severity is _Severity.NOTICE: if severity is _Severity.NOTICE:
label = [bold('NOTICE:')] # type: TV_LoggableList label: TV_LoggableList = [bold('NOTICE:')]
elif severity is _Severity.WARNING: elif severity is _Severity.WARNING:
label = [yellow('WARNING:')] label = [yellow('WARNING:')]
elif severity is _Severity.ERROR: elif severity is _Severity.ERROR:
@ -373,7 +373,7 @@ class _Logger:
if prefix is None: if prefix is None:
prefix = red('ERROR:') prefix = red('ERROR:')
self.log() self.log()
args = [] # type: T.List[T.Union[AnsiDecorator, str]] args: T.List[T.Union[AnsiDecorator, str]] = []
if all(getattr(e, a, None) is not None for a in ['file', 'lineno', 'colno']): if all(getattr(e, a, None) is not None for a in ['file', 'lineno', 'colno']):
# Mypy doesn't follow hasattr, and it's pretty easy to visually inspect # Mypy doesn't follow hasattr, and it's pretty easy to visually inspect
# that this is correct, so we'll just ignore it. # that this is correct, so we'll just ignore it.

@ -188,7 +188,7 @@ class CMakeSubproject(ModuleObject):
class CMakeSubprojectOptions(ModuleObject): class CMakeSubprojectOptions(ModuleObject):
def __init__(self) -> None: def __init__(self) -> None:
super().__init__() super().__init__()
self.cmake_options = [] # type: T.List[str] self.cmake_options: T.List[str] = []
self.target_options = TargetOptions() self.target_options = TargetOptions()
self.methods.update( self.methods.update(

@ -163,7 +163,7 @@ class Lexer:
col = 0 col = 0
while loc < len(self.code): while loc < len(self.code):
matched = False matched = False
value = None # type: T.Union[str, bool, int] value: T.Union[str, bool, int] = None
for (tid, reg) in self.token_specification: for (tid, reg) in self.token_specification:
mo = reg.match(self.code, loc) mo = reg.match(self.code, loc)
if mo: if mo:
@ -610,7 +610,7 @@ class Parser:
def __init__(self, code: str, filename: str): def __init__(self, code: str, filename: str):
self.lexer = Lexer(code) self.lexer = Lexer(code)
self.stream = self.lexer.lex(filename) self.stream = self.lexer.lex(filename)
self.current = Token('eof', '', 0, 0, 0, (0, 0), None) # type: Token self.current: Token = Token('eof', '', 0, 0, 0, (0, 0), None)
self.getsym() self.getsym()
self.in_ternary = False self.in_ternary = False

@ -521,9 +521,9 @@ class ConsoleLogger(TestLogger):
RTRI = "\u25B6 " RTRI = "\u25B6 "
def __init__(self) -> None: def __init__(self) -> None:
self.running_tests = OrderedSet() # type: OrderedSet['TestRun'] self.running_tests: OrderedSet['TestRun'] = OrderedSet()
self.progress_test = None # type: T.Optional['TestRun'] self.progress_test: T.Optional['TestRun'] = None
self.progress_task = None # type: T.Optional[asyncio.Future] self.progress_task: T.Optional[asyncio.Future] = None
self.max_left_width = 0 self.max_left_width = 0
self.stop = False self.stop = False
# TODO: before 3.10 this cannot be created immediately, because # TODO: before 3.10 this cannot be created immediately, because
@ -806,7 +806,7 @@ class JunitBuilder(TestLogger):
self.filename = filename self.filename = filename
self.root = et.Element( self.root = et.Element(
'testsuites', tests='0', errors='0', failures='0') 'testsuites', tests='0', errors='0', failures='0')
self.suites = {} # type: T.Dict[str, et.Element] self.suites: T.Dict[str, et.Element] = {}
def log(self, harness: 'TestHarness', test: 'TestRun') -> None: def log(self, harness: 'TestHarness', test: 'TestRun') -> None:
"""Log a single test case.""" """Log a single test case."""
@ -924,24 +924,24 @@ class TestRun:
name: str, timeout: T.Optional[int], is_parallel: bool, verbose: bool): name: str, timeout: T.Optional[int], is_parallel: bool, verbose: bool):
self.res = TestResult.PENDING self.res = TestResult.PENDING
self.test = test self.test = test
self._num = None # type: T.Optional[int] self._num: T.Optional[int] = None
self.name = name self.name = name
self.timeout = timeout self.timeout = timeout
self.results = [] # type: T.List[TAPParser.Test] self.results: T.List[TAPParser.Test] = []
self.returncode = None # type: T.Optional[int] self.returncode: T.Optional[int] = None
self.starttime = None # type: T.Optional[float] self.starttime: T.Optional[float] = None
self.duration = None # type: T.Optional[float] self.duration: T.Optional[float] = None
self.stdo = '' self.stdo = ''
self.stde = '' self.stde = ''
self.additional_error = '' self.additional_error = ''
self.cmd = None # type: T.Optional[T.List[str]] self.cmd: T.Optional[T.List[str]] = None
self.env = test_env self.env = test_env
self.should_fail = test.should_fail self.should_fail = test.should_fail
self.project = test.project_name self.project = test.project_name
self.junit = None # type: T.Optional[et.ElementTree] self.junit: T.Optional[et.ElementTree] = None
self.is_parallel = is_parallel self.is_parallel = is_parallel
self.verbose = verbose self.verbose = verbose
self.warnings = [] # type: T.List[str] self.warnings: T.List[str] = []
def start(self, cmd: T.List[str]) -> None: def start(self, cmd: T.List[str]) -> None:
self.res = TestResult.RUNNING self.res = TestResult.RUNNING
@ -1088,7 +1088,7 @@ class TestRunTAP(TestRun):
async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> None: async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> None:
res = None res = None
warnings = [] # type: T.List[TAPParser.UnknownLine] warnings: T.List[TAPParser.UnknownLine] = []
version = 12 version = 12
async for i in TAPParser().parse_async(lines): async for i in TAPParser().parse_async(lines):
@ -1288,8 +1288,8 @@ class TestSubprocess:
self.stdo_task: T.Optional[asyncio.Task[None]] = None self.stdo_task: T.Optional[asyncio.Task[None]] = None
self.stde_task: T.Optional[asyncio.Task[None]] = None self.stde_task: T.Optional[asyncio.Task[None]] = None
self.postwait_fn = postwait_fn self.postwait_fn = postwait_fn
self.all_futures = [] # type: T.List[asyncio.Future] self.all_futures: T.List[asyncio.Future] = []
self.queue = None # type: T.Optional[asyncio.Queue[T.Optional[str]]] self.queue: T.Optional[asyncio.Queue[T.Optional[str]]] = None
def stdout_lines(self) -> T.AsyncIterator[str]: def stdout_lines(self) -> T.AsyncIterator[str]:
self.queue = asyncio.Queue() self.queue = asyncio.Queue()
@ -1535,7 +1535,7 @@ class SingleTestRunner:
if not self.options.split and not self.runobj.needs_parsing \ if not self.options.split and not self.runobj.needs_parsing \
else asyncio.subprocess.PIPE else asyncio.subprocess.PIPE
extra_cmd = [] # type: T.List[str] extra_cmd: T.List[str] = []
if self.test.protocol is TestProtocol.GTEST: if self.test.protocol is TestProtocol.GTEST:
gtestname = self.test.name gtestname = self.test.name
if self.test.workdir: if self.test.workdir:
@ -1570,7 +1570,7 @@ class SingleTestRunner:
class TestHarness: class TestHarness:
def __init__(self, options: argparse.Namespace): def __init__(self, options: argparse.Namespace):
self.options = options self.options = options
self.collected_failures = [] # type: T.List[TestRun] self.collected_failures: T.List[TestRun] = []
self.fail_count = 0 self.fail_count = 0
self.expectedfail_count = 0 self.expectedfail_count = 0
self.unexpectedpass_count = 0 self.unexpectedpass_count = 0
@ -1580,13 +1580,13 @@ class TestHarness:
self.test_count = 0 self.test_count = 0
self.name_max_len = 0 self.name_max_len = 0
self.is_run = False self.is_run = False
self.loggers = [] # type: T.List[TestLogger] self.loggers: T.List[TestLogger] = []
self.console_logger = ConsoleLogger() self.console_logger = ConsoleLogger()
self.loggers.append(self.console_logger) self.loggers.append(self.console_logger)
self.need_console = False self.need_console = False
self.ninja = None # type: T.List[str] self.ninja: T.List[str] = None
self.logfile_base = None # type: T.Optional[str] self.logfile_base: T.Optional[str] = None
if self.options.logbase and not self.options.gdb: if self.options.logbase and not self.options.gdb:
namebase = None namebase = None
self.logfile_base = os.path.join(self.options.wd, 'meson-logs', self.options.logbase) self.logfile_base = os.path.join(self.options.wd, 'meson-logs', self.options.logbase)
@ -1809,7 +1809,7 @@ class TestHarness:
startdir = os.getcwd() startdir = os.getcwd()
try: try:
os.chdir(self.options.wd) os.chdir(self.options.wd)
runners = [] # type: T.List[SingleTestRunner] runners: T.List[SingleTestRunner] = []
for i in range(self.options.repeat): for i in range(self.options.repeat):
runners.extend(self.get_test_runner(test) for test in tests) runners.extend(self.get_test_runner(test) for test in tests)
if i == 0: if i == 0:
@ -1964,7 +1964,7 @@ class TestHarness:
@staticmethod @staticmethod
def get_wrapper(options: argparse.Namespace) -> T.List[str]: def get_wrapper(options: argparse.Namespace) -> T.List[str]:
wrap = [] # type: T.List[str] wrap: T.List[str] = []
if options.gdb: if options.gdb:
wrap = [options.gdb_path, '--quiet'] wrap = [options.gdb_path, '--quiet']
if options.repeat > 1: if options.repeat > 1:
@ -2007,10 +2007,10 @@ class TestHarness:
async def _run_tests(self, runners: T.List[SingleTestRunner]) -> None: async def _run_tests(self, runners: T.List[SingleTestRunner]) -> None:
semaphore = asyncio.Semaphore(self.options.num_processes) semaphore = asyncio.Semaphore(self.options.num_processes)
futures = deque() # type: T.Deque[asyncio.Future] futures: T.Deque[asyncio.Future] = deque()
running_tests = {} # type: T.Dict[asyncio.Future, str] running_tests: T.Dict[asyncio.Future, str] = {}
interrupted = False interrupted = False
ctrlc_times = deque(maxlen=MAX_CTRLC) # type: T.Deque[float] ctrlc_times: T.Deque[float] = deque(maxlen=MAX_CTRLC)
loop = asyncio.get_running_loop() loop = asyncio.get_running_loop()
async def run_test(test: SingleTestRunner) -> None: async def run_test(test: SingleTestRunner) -> None:
@ -2117,9 +2117,9 @@ def rebuild_deps(ninja: T.List[str], wd: str, tests: T.List[TestSerialisation])
assert len(ninja) > 0 assert len(ninja) > 0
depends = set() # type: T.Set[str] depends: T.Set[str] = set()
targets = set() # type: T.Set[str] targets: T.Set[str] = set()
intro_targets = {} # type: T.Dict[str, T.List[str]] intro_targets: T.Dict[str, T.List[str]] = {}
for target in load_info_file(get_infodir(wd), kind='targets'): for target in load_info_file(get_infodir(wd), kind='targets'):
intro_targets[target['id']] = [ intro_targets[target['id']] = [
convert_path_to_target(f) convert_path_to_target(f)

@ -9,7 +9,7 @@ from pathlib import Path
import typing as T import typing as T
def run(argsv: T.List[str]) -> int: def run(argsv: T.List[str]) -> int:
commands = [[]] # type: T.List[T.List[str]] commands: T.List[T.List[str]] = [[]]
SEPARATOR = ';;;' SEPARATOR = ';;;'
# Generate CMD parameters # Generate CMD parameters

@ -123,8 +123,8 @@ class Elf(DataSizes):
def __init__(self, bfile: str, verbose: bool = True) -> None: def __init__(self, bfile: str, verbose: bool = True) -> None:
self.bfile = bfile self.bfile = bfile
self.verbose = verbose self.verbose = verbose
self.sections = [] # type: T.List[SectionHeader] self.sections: T.List[SectionHeader] = []
self.dynamic = [] # type: T.List[DynamicEntry] self.dynamic: T.List[DynamicEntry] = []
self.open_bf(bfile) self.open_bf(bfile)
try: try:
(self.ptrsize, self.is_le) = self.detect_elf_type() (self.ptrsize, self.is_le) = self.detect_elf_type()
@ -329,7 +329,7 @@ class Elf(DataSizes):
old_rpath = self.read_str() old_rpath = self.read_str()
# Some rpath entries may come from multiple sources. # Some rpath entries may come from multiple sources.
# Only add each one once. # Only add each one once.
new_rpaths = OrderedSet() # type: OrderedSet[bytes] new_rpaths: OrderedSet[bytes] = OrderedSet()
if new_rpath: if new_rpath:
new_rpaths.update(new_rpath.split(b':')) new_rpaths.update(new_rpath.split(b':'))
if old_rpath: if old_rpath:

@ -1526,7 +1526,7 @@ def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
**kwargs: T.Any) -> T.Tuple['subprocess.Popen[str]', str, str]: **kwargs: T.Any) -> T.Tuple['subprocess.Popen[str]', str, str]:
p = subprocess.Popen(args, universal_newlines=False, close_fds=False, p = subprocess.Popen(args, universal_newlines=False, close_fds=False,
stdin=stdin, stdout=stdout, stderr=stderr, **kwargs) stdin=stdin, stdout=stdout, stderr=stderr, **kwargs)
input_ = None # type: T.Optional[bytes] input_: T.Optional[bytes] = None
if write is not None: if write is not None:
input_ = write.encode('utf-8') input_ = write.encode('utf-8')
o, e = p.communicate(input_) o, e = p.communicate(input_)

@ -145,11 +145,11 @@ class PackageDefinition:
def __init__(self, fname: str, subproject: str = ''): def __init__(self, fname: str, subproject: str = ''):
self.filename = fname self.filename = fname
self.subproject = SubProject(subproject) self.subproject = SubProject(subproject)
self.type = None # type: T.Optional[str] self.type: T.Optional[str] = None
self.values = {} # type: T.Dict[str, str] self.values: T.Dict[str, str] = {}
self.provided_deps = {} # type: T.Dict[str, T.Optional[str]] self.provided_deps: T.Dict[str, T.Optional[str]] = {}
self.provided_programs = [] # type: T.List[str] self.provided_programs: T.List[str] = []
self.diff_files = [] # type: T.List[Path] self.diff_files: T.List[Path] = []
self.basename = os.path.basename(fname) self.basename = os.path.basename(fname)
self.has_wrap = self.basename.endswith('.wrap') self.has_wrap = self.basename.endswith('.wrap')
self.name = self.basename[:-5] if self.has_wrap else self.basename self.name = self.basename[:-5] if self.has_wrap else self.basename
@ -290,10 +290,10 @@ class Resolver:
def __post_init__(self) -> None: def __post_init__(self) -> None:
self.subdir_root = os.path.join(self.source_dir, self.subdir) self.subdir_root = os.path.join(self.source_dir, self.subdir)
self.cachedir = os.path.join(self.subdir_root, 'packagecache') self.cachedir = os.path.join(self.subdir_root, 'packagecache')
self.wraps = {} # type: T.Dict[str, PackageDefinition] self.wraps: T.Dict[str, PackageDefinition] = {}
self.netrc: T.Optional[netrc] = None self.netrc: T.Optional[netrc] = None
self.provided_deps = {} # type: T.Dict[str, PackageDefinition] self.provided_deps: T.Dict[str, PackageDefinition] = {}
self.provided_programs = {} # type: T.Dict[str, PackageDefinition] self.provided_programs: T.Dict[str, PackageDefinition] = {}
self.wrapdb: T.Dict[str, T.Any] = {} self.wrapdb: T.Dict[str, T.Any] = {}
self.wrapdb_provided_deps: T.Dict[str, str] = {} self.wrapdb_provided_deps: T.Dict[str, str] = {}
self.wrapdb_provided_programs: T.Dict[str, str] = {} self.wrapdb_provided_programs: T.Dict[str, str] = {}
@ -555,7 +555,7 @@ class Resolver:
revno = self.wrap.get('revision') revno = self.wrap.get('revision')
checkout_cmd = ['-c', 'advice.detachedHead=false', 'checkout', revno, '--'] checkout_cmd = ['-c', 'advice.detachedHead=false', 'checkout', revno, '--']
is_shallow = False is_shallow = False
depth_option = [] # type: T.List[str] depth_option: T.List[str] = []
if self.wrap.values.get('depth', '') != '': if self.wrap.values.get('depth', '') != '':
is_shallow = True is_shallow = True
depth_option = ['--depth', self.wrap.values.get('depth')] depth_option = ['--depth', self.wrap.values.get('depth')]

Loading…
Cancel
Save