fix various spelling issues

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>
pull/11673/head
Josh Soref 2 years ago committed by Eli Schwartz
parent e238b81ba0
commit cf9fd56bc9
  1. 4
      ci/run.ps1
  2. 2
      cross/iphone.txt
  3. 2
      cross/tvos.txt
  4. 2
      docs/markdown/Adding-new-projects-to-wrapdb.md
  5. 4
      docs/markdown/Contributing.md
  6. 4
      docs/markdown/Fs-module.md
  7. 2
      docs/markdown/IDE-integration.md
  8. 2
      docs/markdown/MesonCI.md
  9. 2
      docs/markdown/Qt6-module.md
  10. 4
      docs/markdown/Reference-tables.md
  11. 2
      docs/markdown/Release-notes-for-0.40.0.md
  12. 2
      docs/markdown/Release-notes-for-0.45.0.md
  13. 2
      docs/markdown/Release-notes-for-0.46.0.md
  14. 4
      docs/markdown/Release-notes-for-0.48.0.md
  15. 4
      docs/markdown/Release-notes-for-0.50.0.md
  16. 4
      docs/markdown/Release-notes-for-0.52.0.md
  17. 2
      docs/markdown/Release-notes-for-0.54.0.md
  18. 4
      docs/markdown/Release-notes-for-0.55.0.md
  19. 4
      docs/markdown/Release-notes-for-0.59.0.md
  20. 4
      docs/markdown/Release-notes-for-0.60.0.md
  21. 4
      docs/markdown/Release-notes-for-0.62.0.md
  22. 2
      docs/markdown/Release-notes-for-0.63.0.md
  23. 2
      docs/markdown/Release-notes-for-1.0.0.md
  24. 2
      docs/markdown/Release-procedure.md
  25. 2
      docs/markdown/Rewriter.md
  26. 2
      docs/markdown/Rust-module.md
  27. 2
      docs/markdown/Simple-comparison.md
  28. 2
      docs/markdown/Subprojects.md
  29. 2
      docs/markdown/Syntax.md
  30. 2
      docs/markdown/Users.md
  31. 2
      docs/markdown/Windows-module.md
  32. 6
      docs/markdown/Yaml-RefMan.md
  33. 2
      docs/markdown/_include_qt_base.md
  34. 2
      docs/markdown/i18n-module.md
  35. 2
      docs/refman/generatormd.py
  36. 4
      docs/refman/loaderbase.py
  37. 2
      docs/yaml/elementary/str.yml
  38. 4
      docs/yaml/functions/_build_target_base.yaml
  39. 2
      docs/yaml/functions/install_headers.yaml
  40. 2
      docs/yaml/functions/project.yaml
  41. 2
      docs/yaml/objects/build_tgt.yaml
  42. 4
      docs/yaml/objects/compiler.yaml
  43. 2
      docs/yaml/objects/module.yaml
  44. 2
      manual tests/12 wrap mirror/meson.build
  45. 2
      manual tests/3 git wrap/meson.build
  46. 2
      manual tests/4 standalone binaries/readme.txt
  47. 2
      manual tests/6 hg wrap/meson.build
  48. 4
      mesonbuild/arglist.py
  49. 18
      mesonbuild/ast/interpreter.py
  50. 4
      mesonbuild/ast/introspection.py
  51. 26
      mesonbuild/backend/ninjabackend.py
  52. 2
      mesonbuild/backend/vs2010backend.py
  53. 10
      mesonbuild/build.py
  54. 2
      mesonbuild/cmake/common.py
  55. 8
      mesonbuild/cmake/traceparser.py
  56. 6
      mesonbuild/compilers/compilers.py
  57. 8
      mesonbuild/compilers/detect.py
  58. 4
      mesonbuild/compilers/mixins/clike.py
  59. 2
      mesonbuild/compilers/mixins/visualstudio.py
  60. 14
      mesonbuild/coredata.py
  61. 4
      mesonbuild/dependencies/boost.py
  62. 4
      mesonbuild/dependencies/cmake.py
  63. 2
      mesonbuild/dependencies/data/CMakeListsLLVM.txt
  64. 2
      mesonbuild/dependencies/misc.py
  65. 2
      mesonbuild/dependencies/pkgconfig.py
  66. 4
      mesonbuild/dependencies/qt.py
  67. 4
      mesonbuild/envconfig.py
  68. 2
      mesonbuild/environment.py
  69. 6
      mesonbuild/interpreter/compiler.py
  70. 18
      mesonbuild/interpreter/interpreter.py
  71. 2
      mesonbuild/interpreter/kwargs.py
  72. 2
      mesonbuild/interpreter/mesonmain.py
  73. 6
      mesonbuild/interpreter/type_checking.py
  74. 4
      mesonbuild/interpreterbase/decorators.py
  75. 12
      mesonbuild/interpreterbase/interpreterbase.py
  76. 2
      mesonbuild/linkers/detect.py
  77. 4
      mesonbuild/linkers/linkers.py
  78. 4
      mesonbuild/minstall.py
  79. 2
      mesonbuild/modules/gnome.py
  80. 2
      mesonbuild/modules/keyval.py
  81. 2
      mesonbuild/modules/windows.py
  82. 2
      mesonbuild/mtest.py
  83. 2
      mesonbuild/programs.py
  84. 14
      mesonbuild/rewriter.py
  85. 2
      mesonbuild/scripts/cmake_run_ctgt.py
  86. 2
      mesonbuild/scripts/depfixer.py
  87. 6
      mesonbuild/utils/universal.py
  88. 2
      mesonbuild/utils/vsenv.py
  89. 2
      run_project_tests.py
  90. 2
      test cases/cmake/9 disabled subproject/meson.build
  91. 2
      test cases/common/14 configure file/meson.build
  92. 2
      test cases/common/158 disabler/meson.build
  93. 2
      test cases/common/182 find override/meson.build
  94. 2
      test cases/common/189 check header/meson.build
  95. 2
      test cases/common/196 subproject with features/meson.build
  96. 2
      test cases/common/20 global arg/prog.c
  97. 2
      test cases/common/220 fs module/meson.build
  98. 8
      test cases/common/222 native prop/meson.build
  99. 0
      test cases/common/227 very long command line/codegen.py
  100. 0
      test cases/common/227 very long command line/main.c
  101. Some files were not shown because too many files have changed in this diff Show More

@ -3,7 +3,7 @@ if ($LastExitCode -ne 0) {
exit 0
}
# remove Chocolately, MinGW, Strawberry Perl from path, so we don't find gcc/gfortran and try to use it
# remove Chocolatey, MinGW, Strawberry Perl from path, so we don't find gcc/gfortran and try to use it
# remove PostgreSQL from path so we don't pickup a broken zlib from it
$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey|PostgreSQL' }) -join ';'
@ -76,7 +76,7 @@ foreach ($prog in $progs) {
echo ""
echo "Ninja / MSBuld version:"
echo "Ninja / MSBuild version:"
if ($env:backend -eq 'ninja') {
ninja --version
} else {

@ -1,6 +1,6 @@
# This is a cross compilation file from OSX Yosemite to iPhone
# Apple keeps changing the location and names of files so
# these might not work for you. Use the googels and xcrun.
# these might not work for you. Use the googles and xcrun.
[binaries]
c = ['clang', '-arch', 'arm64', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk']

@ -1,6 +1,6 @@
# This is a cross compilation file from OSX Yosemite to Apple tvOS
# Apple keeps changing the location and names of files so
# these might not work for you. Use the googels and xcrun.
# these might not work for you. Use the googles and xcrun.
[binaries]
c = ['clang', '-arch', 'arm64', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk']

@ -170,6 +170,6 @@ The first command is to ensure the wrap is correctly fetched from the
latest packagefiles. The second command configures meson and selects a
set of subprojects to enable.
The Github project contains automatic CI on pushing to run the project
The GitHub project contains automatic CI on pushing to run the project
and check the metadata for obvious mistakes. This can be checked from
your fork before submitting a PR.

@ -14,7 +14,7 @@ Thank you for your interest in participating to the development.
## Submitting patches
All changes must be submitted as [pull requests to
Github](https://github.com/mesonbuild/meson/pulls). This causes them
GitHub](https://github.com/mesonbuild/meson/pulls). This causes them
to be run through the CI system. All submissions must pass a full CI
test run before they are even considered for submission.
@ -110,7 +110,7 @@ Meson's merge strategy should fulfill the following guidelines:
These goals are slightly contradictory so the correct thing to do
often requires some judgement on part of the person doing the
merge. Github provides three different merge options, The rules of
merge. GitHub provides three different merge options, The rules of
thumb for choosing between them goes like this:
- single commit pull requests should always be rebased

@ -90,7 +90,7 @@ Examples:
x = 'foo.txt'
y = 'sub/../foo.txt'
z = 'bar.txt' # a symlink pointing to foo.txt
j = 'notafile.txt' # non-existent file
j = 'notafile.txt' # nonexistent file
fs.is_samepath(x, y) # true
fs.is_samepath(x, z) # true
@ -99,7 +99,7 @@ fs.is_samepath(x, j) # false
p = 'foo/bar'
q = 'foo/bar/baz/..'
r = 'buz' # a symlink pointing to foo/bar
s = 'notapath' # non-existent directory
s = 'notapath' # nonexistent directory
fs.is_samepath(p, q) # true
fs.is_samepath(p, r) # true

@ -260,7 +260,7 @@ The list of all _found_ dependencies can be acquired from
`intro-dependencies.json`. Here, the name, version, compiler and
linker arguments for a dependency are listed.
### Scanning for dependecie with `--scan-dependencies`
### Scanning for dependencies with `--scan-dependencies`
It is also possible to get most dependencies used without a build
directory. This can be done by running `meson introspect

@ -32,7 +32,7 @@ The Dockerfile is generated from the `image.json` file and basically
only adds a few common files and runs the `install.sh` script which
should contain all distribution specific setup steps. The `common.sh`
can be sourced via `source /ci/common.sh` to access some shared
functionalety.
functionality.
To generate the image run `build.py -t build <image>`. A generated
image can be tested with `build.py -t test <image>`.

@ -80,7 +80,7 @@ This method takes the following keyword arguments:
- `ui_files`: (string | File | CustomTarget)[]: Passed the `uic` compiler
- `moc_sources`: (string | File | CustomTarget)[]: Passed the `moc` compiler.
These are converted into .moc files meant to be `#include`ed
- `moc_headers`: (string | File | CustomTarget)[]: Passied the `moc` compiler.
- `moc_headers`: (string | File | CustomTarget)[]: Passed the `moc` compiler.
These will be converted into .cpp files
- `include_directories` (IncludeDirectories | string)[], the directories to add
to header search path for `moc`

@ -144,7 +144,7 @@ These are provided by the `.system()` method call.
| cygwin | The Cygwin environment for Windows |
| darwin | Either OSX or iOS |
| dragonfly | DragonFly BSD |
| emscripten | Emscripten's Javascript environment |
| emscripten | Emscripten's JavaScript environment |
| freebsd | FreeBSD and its derivatives |
| gnu | GNU Hurd |
| haiku | |
@ -329,7 +329,7 @@ machine](#Environment-variables-per-machine) section for details.
| C# | CSC | CSC | The linker is the compiler |
| nasm | NASM | | Uses the C linker |
*The old environment variales are still supported, but are deprecated
*The old environment variables are still supported, but are deprecated
and will be removed in a future version of Meson.*
## Environment variables per machine

@ -119,7 +119,7 @@ qt5_dep = dependency('qt5', modules : 'core', method : 'qmake')
## Link whole contents of static libraries
The default behavior of static libraries is to discard all symbols
that are not not directly referenced. This may lead to exported
that are not directly referenced. This may lead to exported
symbols being lost. Most compilers support "whole archive" linking
that includes all symbols and code of a given static library. This is
exposed with the `link_whole` keyword.

@ -97,7 +97,7 @@ int_255 = 0xFF
The value `if-release` can be given for the `b_ndebug` project option.
This will make the `NDEBUG` pre-compiler macro to be defined for
release type builds as if the `b_ndebug` project option had had the
release type builds as if the `b_ndebug` project option had the
value `true` defined for it.
## `install_data()` defaults to `{datadir}/{projectname}`

@ -269,7 +269,7 @@ helper = static_library(
final = shared_library(
'final',
['final.c'],
dependencyes : dep,
dependencies : dep,
)
```

@ -12,7 +12,7 @@ use, such as *debug* and *minsize*. There is also a *plain* type that
adds nothing by default but instead makes it the user's responsibility
to add everything by hand. This works but is a bit tedious.
In this release we have added new new options to manually toggle e.g.
In this release we have added new options to manually toggle e.g.
optimization levels and debug info so those can be changed
independently of other options. For example by default the debug
buildtype has no optimization enabled at all. If you wish to use GCC's
@ -79,7 +79,7 @@ which has special properties such as not buffering stdout and
serializing all targets in this pool.
The primary use-case for this is to be able to run external commands
that take a long time to exeute. Without setting this, the user does
that take a long time to execute. Without setting this, the user does
not receive any feedback about what the program is doing.
## `dependency(version:)` now applies to all dependency types

@ -227,7 +227,7 @@ Furthermore, the `filename` and `install_filename` keys in the targets
introspection are now lists of strings with identical length.
The `--target-files` option is now deprecated, since the same information
can be acquired from the `--tragets` introspection API.
can be acquired from the `--targets` introspection API.
## Meson file rewriter
@ -317,7 +317,7 @@ A complete introspection dump is also stored in the `meson-info`
directory. This dump will be (re)generated each time meson updates the
configuration of the build directory.
Additionlly the format of `meson introspect target` was changed:
Additionally the format of `meson introspect target` was changed:
- New: the `sources` key. It stores the source files of a target and their compiler parameters.
- New: the `defined_in` key. It stores the Meson file where a target is defined

@ -94,7 +94,7 @@ linker internal re-architecture this has become possible
## Compiler and dynamic linker representation split
0.52.0 includes a massive refactor of the representaitons of compilers to
0.52.0 includes a massive refactor of the representations of compilers to
tease apart the representations of compilers and dynamic linkers (ld). This
fixes a number of compiler/linker combinations. In particular this fixes
use GCC and vanilla clang on macOS.
@ -160,7 +160,7 @@ lib2 = static_library(other_sources, link_whole : lib1, install : true)
```
- `link_with:` of a static library with an uninstalled static library. In the
example below, lib2 now implicitly promote `link_with:` to `link_whole:` because
the installed lib2 would oterhwise be unusable.
the installed lib2 would otherwise be unusable.
```meson
lib1 = static_library(sources, install : false)
lib2 = static_library(sources, link_with : lib1, install : true)

@ -286,7 +286,7 @@ This old behavior is inconsistent with the way Autotools works, which
undermines the purpose of distro-integration that is the only reason
environment variables are supported at all in Meson. The new behavior
is not quite the same, but doesn't conflict: Meson doesn't always
repond to an environment when Autoconf would, but when it does it
respond to an environment when Autoconf would, but when it does it
interprets it as Autotools would.
## Added 'pkg_config_libdir' property

@ -93,7 +93,7 @@ Meson now supports passing configuration options to CMake and
overriding certain build details extracted from the CMake subproject.
The new CMake configuration options object is very similar to the
[[@cfg_data]] object object returned
[[@cfg_data]] object returned
by [[configuration_data]]. It
is generated by the `subproject_options` function
@ -175,7 +175,7 @@ changed), but is now deprecated.
## String concatenation in meson_options.txt
It is now possible to use string concatenation (with the `+`
opperator) in the `meson_options.txt` file. This allows splitting long
operator) in the `meson_options.txt` file. This allows splitting long
option descriptions.
```meson

@ -196,7 +196,7 @@ executable(
## New `build target` methods
The [[@build_tgt]] object now supports
the following two functions, to ensure feature compatebility with
the following two functions, to ensure feature compatibility with
[[@external_program]] objects:
- `found()`: Always returns `true`. This function is meant
@ -205,7 +205,7 @@ the following two functions, to ensure feature compatebility with
use-cases where an executable is used instead of an external program.
- `path()`: **(deprecated)** does the exact same as `full_path()`.
**NOTE:** This function is solely kept for compatebility
**NOTE:** This function is solely kept for compatibility
with `external program` objects. It will be
removed once the, also deprecated, corresponding `path()` function in the
`external program` object is removed.

@ -286,7 +286,7 @@ be flattened.
## The qt modules now accept generated outputs as inputs for qt.compile_*
This means you can uset `custom_target`, custom_target indices
This means you can use `custom_target`, custom_target indices
(`custom_target[0]`, for example), or the output of `generator.process` as
inputs to the various `qt.compile_*` methods.
@ -357,7 +357,7 @@ are found, and silently continue if Visual Studio activation fails.
`meson setup --vsenv` command line argument can now be used to force Visual Studio
activation even when other compilers are found. It also make Meson abort with an
error message when activation fails. This is especially useful for Github Action
error message when activation fails. This is especially useful for GitHub Actions
because their Windows images have gcc in their PATH by default.
`--vsenv` is set by default when using `vs` backend.

@ -19,9 +19,9 @@ directory, that file is loaded by gdb automatically.
## Print modified environment variables with `meson devenv --dump`
With `--dump` option, all envorinment variables that have been modified are
With `--dump` option, all environment variables that have been modified are
printed instead of starting an interactive shell. It can be used by shell
scripts that wish to setup their environment themself.
scripts that wish to setup their environment themselves.
## New `method` and `separator` kwargs on `environment()` and `meson.add_devenv()`

@ -112,7 +112,7 @@ and the resulting directory tree will look like
## JAR Resources
The ability to add resources to a JAR has been added. Use the `java_resources`
keyword argument. It takes a `sturctured_src` object.
keyword argument. It takes a `structured_src` object.
```meson
jar(

@ -54,7 +54,7 @@ Meson function name styling.
The `bindgen` method of the `rust` module now accepts a dependencies argument.
Any include paths in these dependencies will be passed to the underlying call to
`clang`, and the call to `bindgen` will correctly depend on any generatd sources.
`clang`, and the call to `bindgen` will correctly depend on any generated sources.
## String arguments to the rust.bindgen include_directories argument

@ -24,7 +24,7 @@ Before a major release is made a stable branch will be made, and
will be made, and all bugs effecting the RC will be assigned to this
milestone. Patches fixing bugs in the milestone will be picked to the
stable branch, and normal development will continue on the master
branch. Every week after after this a new release candidate will be
branch. Every week after this a new release candidate will be
made until all bugs are resolved in that milestone. When all of the
bugs are fixed the 0.X.0 release will be made.

@ -26,7 +26,7 @@ mode", on the other hand, is meant to be used by external programs
The rewriter itself is considered stable, however the user interface
and the "script mode" API might change in the future. These changes
may also break backwards comaptibility to older releases.
may also break backwards compatibility to older releases.
We are also open to suggestions for API improvements.

@ -68,7 +68,7 @@ generated = rust.bindgen(
)
```
If the header depeneds on generated headers, those headers must be passed to
If the header depends on generated headers, those headers must be passed to
`bindgen` as well to ensure proper dependency ordering, static headers do not
need to be passed, as a proper depfile is generated:

@ -29,7 +29,7 @@ how much time the build system takes to check the states of all source
files because if any of them could potentially cause a rebuild.
Since CMake has two different backends, Make and Ninja, we ran the
tests on both of them. All tests were run on a 2011 era Macbook Pro
tests on both of them. All tests were run on a 2011 era MacBook Pro
running Ubuntu 13/04. The tests were run multiple times and we always
took the fastest time.

@ -219,7 +219,7 @@ the following command-line options:
* **--wrap-mode=nodownload**
Meson will not use the network to download any subprojects or
fetch any wrap information. Only pre-existing sources will be used.
fetch any wrap information. Only preexisting sources will be used.
This is useful (mostly for distros) when you want to only use the
sources provided by a software release, and want to manually handle
or provide missing dependencies.

@ -766,7 +766,7 @@ additive_expression: multiplicative_expression | (additive_expression additive_o
additive_operator: "+" | "-"
argument_list: positional_arguments ["," keyword_arguments] | keyword_arguments
array_literal: "[" [expression_list] "]"
assignment_statement: expression asssignment_operator expression
assignment_statement: expression assignment_operator expression
assignment_operator: "=" | "+="
binary_literal: "0b" BINARY_NUMBER
BINARY_NUMBER: /[01]+/

@ -107,7 +107,7 @@ format files
- [Marker](https://github.com/fabiocolacio/Marker), a GTK-3 markdown editor
- [Mesa](https://mesa3d.org/), an open source graphics driver project
- [Miniz](https://github.com/richgel999/miniz), a zlib replacement library
- [MiracleCast](https://github.com/albfan/miraclecast), connect external monitors to your system via Wifi-Display specification aka Miracast
- [MiracleCast](https://github.com/albfan/miraclecast), connect external monitors to your system via WiFi-Display specification aka Miracast
- [mpv](https://github.com/mpv-player/mpv), a free, open source, and cross-platform media player
- [mrsh](https://github.com/emersion/mrsh), a minimal POSIX shell
- [Nautilus](https://gitlab.gnome.org/GNOME/nautilus), the GNOME file manager

@ -20,7 +20,7 @@ Compiles Windows `rc` files specified in the positional arguments.
Returns a list of `CustomTarget` objects that you put in the list of sources for
the target you want to have the resources in.
*Since 0.61.0* CustomTargetIndexs and CustomTargets with more than out output
*Since 0.61.0* CustomTargetIndexes and CustomTargets with more than out output
*may be used as positional arguments.
This method has the following keyword arguments:

@ -34,7 +34,7 @@ To link to functions, the function name should be put into the tag:
`[[<func name>]]`.
Methods (for all kinds of objects, including modules) can be linked to like
this: `[[<object name>.<method name>]]`.
To link to objects themself, the `[[@<object name>]]` syntax can be used.
To link to objects themselves, the `[[@<object name>]]` syntax can be used.
These tags do **not** need to be put in inline code! A hotdoc extension handles
the formatting here. If tags need to be placed (for instance, to include reference
@ -69,7 +69,7 @@ module has its own directory. The module itself **must** be in a file called
`module.yaml`. All objects returned by the module are then located next to this
file.
The name of the YAML files themself are ignored (with the exception of
The name of the YAML files themselves are ignored (with the exception of
`module.yaml`) and carry no specific meaning. However, it is recommended to name
the YAML files after the `name` entry of the object.
@ -81,7 +81,7 @@ is to make inheriting functions and arguments easier.
# YAML schema
The YAML files themself are structured as follows:
The YAML files themselves are structured as follows:
## Functions

@ -71,7 +71,7 @@ This method takes the following keyword arguments:
- `qresources` (string | File)[]: Passed to the RCC compiler
- `ui_files`: (string | File | CustomTarget)[]: Passed the `uic` compiler
- `moc_sources`: (string | File | CustomTarget)[]: Passed the `moc` compiler. These are converted into .moc files meant to be `#include`ed
- `moc_headers`: (string | File | CustomTarget)[]: Passied the `moc` compiler. These will be converted into .cpp files
- `moc_headers`: (string | File | CustomTarget)[]: Passed the `moc` compiler. These will be converted into .cpp files
- `include_directories` (IncludeDirectories | string)[], the directories to add to header search path for `moc`
- `moc_extra_arguments` string[]: any additional arguments to `moc`. Since v0.44.0.
- `uic_extra_arguments` string[]: any additional arguments to `uic`. Since v0.49.0.

@ -27,7 +27,7 @@ argument which is the name of the gettext module.
* `preset`: (*Added 0.37.0*) name of a preset list of arguments,
current option is `'glib'`, see
[source](https://github.com/mesonbuild/meson/blob/master/mesonbuild/modules/i18n.py)
for for their value
for their value
* `install`: (*Added 0.43.0*) if false, do not install the built translations.
* `install_dir`: (*Added 0.50.0*) override default install location, default is `localedir`

@ -105,7 +105,7 @@ class GeneratorMD(GeneratorBase):
def _link_to_object(self, obj: T.Union[Function, Object], in_code_block: bool = False) -> str:
'''
Generate a palaceholder tag for the the function/method/object documentation.
Generate a palaceholder tag for the function/method/object documentation.
This tag is then replaced in the custom hotdoc plugin.
'''
prefix = '#' if in_code_block else ''

@ -108,7 +108,7 @@ class _Resolver:
for obj in func.returns.resolved:
obj.data_type.returned_by += [func]
# Handle kwargs inehritance
# Handle kwargs inheritance
for base_name in func.kwargs_inherit:
base_name = base_name.strip()
assert base_name in self.func_map, f'Unknown base function `{base_name}` for {func.name}'
@ -123,7 +123,7 @@ class _Resolver:
missing = {k: v for k, v in base.kwargs.items() if k in base_keys - curr_keys}
func.kwargs.update(missing)
# Handloe other args inheritance
# Handle other args inheritance
_T = T.TypeVar('_T', bound=T.Union[ArgBase, T.List[PosArg]])
def resolve_inherit(name: str, curr: _T, resolver: T.Callable[[Function], _T]) -> _T:
if name and not curr:

@ -39,7 +39,7 @@ methods:
# str.replace(old, new)
- name: replace
description: Search all occurrences of `old` and and replace it with `new`
description: Search all occurrences of `old` and replace it with `new`
returns: str
since: 0.58.0
example: |

@ -89,7 +89,7 @@ kwargs:
default: false
description: |
When set to true flags this target as a GUI application
on platforms where this makes a differerence, **deprecated** since
on platforms where this makes a difference, **deprecated** since
0.56.0, use `win_subsystem` instead.
link_args:
@ -299,6 +299,6 @@ kwargs:
If it is a [[shared_library]] it defaults to "lib", and may be "lib",
"dylib", "cdylib", or "proc-macro". If "lib" then Rustc will pick a
default, "cdylib" means a C ABI library, "dylib" means a Rust ABI, and
"proc-macro" is a special rust proceedural macro crate.
"proc-macro" is a special rust procedural macro crate.
"proc-macro" is new in 0.62.0.

@ -67,6 +67,6 @@ kwargs:
since: 0.63.0
default: false
description: |
Disable stripping child-direcories from header files when installing.
Disable stripping child-directories from header files when installing.
This is equivalent to GNU Automake's `nobase` option.

@ -82,7 +82,7 @@ kwargs:
For backwards compatibility reasons you can also pass an array of
licenses here. This is not recommended, as it is ambiguous: `license :
['Apache-2.0', 'GPL-2.0-only']` instead use an SPDX espression: `license
['Apache-2.0', 'GPL-2.0-only']` instead use an SPDX expression: `license
: 'Apache-2.0 OR GPL-2.0-only'`, which makes it clear that the license
mean OR, not AND.

@ -53,7 +53,7 @@ methods:
deprecated: 0.59.0
description: |
Does the exact same as [[build_tgt.full_path]]. **NOTE**: This
function is solely kept for compatebility with [[@external_program]] objects.
function is solely kept for compatibility with [[@external_program]] objects.
It will be removed once the, also deprecated, corresponding `path()`
function in the [[@external_program]] object is removed.

@ -485,7 +485,7 @@ methods:
default: "'off'"
description: |
Supported values:
- `'off'`: Quietely ignore unsupported arguments
- `'off'`: Quietly ignore unsupported arguments
- `'warn'`: Print a warning for unsupported arguments
- `'require'`: Abort if at least one argument is not supported
@ -544,7 +544,7 @@ methods:
# default: "'off'"
# description: |
# Supported values:
# - `'off'`: Quietely ignore unsupported arguments
# - `'off'`: Quietly ignore unsupported arguments
# - `'warn'`: Print a warning for unsupported arguments
# - `'require'`: Abort if at least one argument is not supported

@ -4,7 +4,7 @@ description: |
Base type for all modules.
Modules provide their own specific implementation methods, but all modules
proivide the following methods:
provide the following methods:
methods:
- name: found

@ -1,4 +1,4 @@
project('downloader')
# this test will timeout, showing that a subdomain isn't caught as masquarading url
# this test will timeout, showing that a subdomain isn't caught as masquerading url
subproject('zlib')

@ -1,4 +1,4 @@
project('git outcheckker', 'c')
project('git outchecker', 'c')
sp = subproject('samplesubproject')

@ -1,5 +1,5 @@
This directory shows how you can build redistributable binaries. On
OSX this menans building an app bundle and a .dmg installer. On Linux
OSX this means building an app bundle and a .dmg installer. On Linux
it means building an archive that bundles its dependencies. On Windows
it means building an .exe installer.

@ -1,4 +1,4 @@
project('Mercurial outcheckker', 'c')
project('Mercurial outchecker', 'c')
sp = subproject('samplesubproject')

@ -198,13 +198,13 @@ class CompilerArgs(T.MutableSequence[str]):
"""Returns whether the argument can be safely de-duped.
In addition to these, we handle library arguments specially.
With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup
With GNU ld, we surround library arguments with -Wl,--start/end-group
to recursively search for symbols in the libraries. This is not needed
with other linkers.
"""
# A standalone argument must never be deduplicated because it is
# defined by what comes _after_ it. Thus dedupping this:
# defined by what comes _after_ it. Thus deduping this:
# -D FOO -D BAR
# would yield either
# -D FOO BAR

@ -352,7 +352,7 @@ class AstInterpreter(InterpreterBase):
return None # Loop detected
id_loop_detect += [node.ast_id]
# Try to evealuate the value of the node
# Try to evaluate the value of the node
if isinstance(node, IdNode):
result = quick_resolve(node)
@ -421,7 +421,7 @@ class AstInterpreter(InterpreterBase):
else:
args = [args_raw]
flattend_args = [] # type: T.List[TYPE_nvar]
flattened_args = [] # type: T.List[TYPE_nvar]
# Resolve the contents of args
for i in args:
@ -430,18 +430,18 @@ class AstInterpreter(InterpreterBase):
if resolved is not None:
if not isinstance(resolved, list):
resolved = [resolved]
flattend_args += resolved
flattened_args += resolved
elif isinstance(i, (str, bool, int, float)) or include_unknown_args:
flattend_args += [i]
return flattend_args
flattened_args += [i]
return flattened_args
def flatten_kwargs(self, kwargs: T.Dict[str, TYPE_nvar], include_unknown_args: bool = False) -> T.Dict[str, TYPE_nvar]:
flattend_kwargs = {}
flattened_kwargs = {}
for key, val in kwargs.items():
if isinstance(val, BaseNode):
resolved = self.resolve_node(val, include_unknown_args)
if resolved is not None:
flattend_kwargs[key] = resolved
flattened_kwargs[key] = resolved
elif isinstance(val, (str, bool, int, float)) or include_unknown_args:
flattend_kwargs[key] = val
return flattend_kwargs
flattened_kwargs[key] = val
return flattened_kwargs

@ -263,9 +263,9 @@ class IntrospectionInterpreter(AstInterpreter):
# Pop the first element if the function is a build target function
if isinstance(curr, FunctionNode) and curr.func_name in BUILD_TARGET_FUNCTIONS:
arg_nodes.pop(0)
elemetary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))]
elementary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))]
inqueue += [x for x in arg_nodes if isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode))]
if elemetary_nodes:
if elementary_nodes:
res += [curr]
return res

@ -545,7 +545,7 @@ class NinjaBackend(backends.Backend):
# We want to match 'Note: including file: ' in the line
# 'Note: including file: d:\MyDir\include\stdio.h', however
# different locales have different messages with a different
# number of colons. Match up to the the drive name 'd:\'.
# number of colons. Match up to the drive name 'd:\'.
# When used in cross compilation, the path separator is a
# forward slash rather than a backslash so handle both; i.e.
# the path is /MyDir/include/stdio.h.
@ -750,7 +750,7 @@ class NinjaBackend(backends.Backend):
'''
Adds the source file introspection information for a language of a target
Internal introspection storage formart:
Internal introspection storage format:
self.introspection_data = {
'<target ID>': {
<id tuple>: {
@ -830,7 +830,7 @@ class NinjaBackend(backends.Backend):
self.generate_swift_target(target)
return
# Pre-existing target C/C++ sources to be built; dict of full path to
# Preexisting target C/C++ sources to be built; dict of full path to
# source relative to build root and the original File object.
target_sources: T.MutableMapping[str, File]
@ -839,7 +839,7 @@ class NinjaBackend(backends.Backend):
generated_sources: T.MutableMapping[str, File]
# List of sources that have been transpiled from a DSL (like Vala) into
# a language that is haneled below, such as C or C++
# a language that is handled below, such as C or C++
transpiled_sources: T.List[str]
if 'vala' in target.compilers:
@ -879,7 +879,7 @@ class NinjaBackend(backends.Backend):
mlog.log(mlog.red('FIXME'), msg)
# Get a list of all generated headers that will be needed while building
# this target's sources (generated sources and pre-existing sources).
# this target's sources (generated sources and preexisting sources).
# This will be set as dependencies of all the target's sources. At the
# same time, also deal with generated sources that need to be compiled.
generated_source_files = []
@ -964,7 +964,7 @@ class NinjaBackend(backends.Backend):
o, s = self.generate_single_compile(target, src, 'vala', [], header_deps)
obj_list.append(o)
# Generate compile targets for all the pre-existing sources for this target
# Generate compile targets for all the preexisting sources for this target
for src in target_sources.values():
if not self.environment.is_header(src):
if self.environment.is_llvm_ir(src):
@ -1035,8 +1035,8 @@ class NinjaBackend(backends.Backend):
rule_name = 'depscan'
scan_sources = self.select_sources_to_scan(compiled_sources)
# Dump the sources as a json list. This avoids potential probllems where
# the number of sources passed to depscan exceedes the limit imposed by
# Dump the sources as a json list. This avoids potential problems where
# the number of sources passed to depscan exceeds the limit imposed by
# the OS.
with open(json_abs, 'w', encoding='utf-8') as f:
json.dump(scan_sources, f)
@ -1294,7 +1294,7 @@ class NinjaBackend(backends.Backend):
if build.rulename in self.ruledict:
build.rule = self.ruledict[build.rulename]
else:
mlog.warning(f"build statement for {build.outfilenames} references non-existent rule {build.rulename}")
mlog.warning(f"build statement for {build.outfilenames} references nonexistent rule {build.rulename}")
def write_rules(self, outfile):
for b in self.build_elements:
@ -1505,7 +1505,7 @@ class NinjaBackend(backends.Backend):
T.Tuple[T.MutableMapping[str, File], T.MutableMapping]]:
"""
Splits the target's sources into .vala, .gs, .vapi, and other sources.
Handles both pre-existing and generated sources.
Handles both preexisting and generated sources.
Returns a tuple (vala, vapi, others) each of which is a dictionary with
the keys being the path to the file (relative to the build directory)
@ -1515,7 +1515,7 @@ class NinjaBackend(backends.Backend):
vapi: T.MutableMapping[str, File] = OrderedDict()
others: T.MutableMapping[str, File] = OrderedDict()
othersgen: T.MutableMapping[str, File] = OrderedDict()
# Split pre-existing sources
# Split preexisting sources
for s in t.get_sources():
# BuildTarget sources are always mesonlib.File files which are
# either in the source root, or generated with configure_file and
@ -1928,7 +1928,7 @@ class NinjaBackend(backends.Backend):
# before that it would treat linking two static libraries as
# whole-archive linking. However, to make this work we have to disable
# bundling, which can't be done until 1.63.0… So for 1.61–1.62 we just
# have to hope that the default cases of +whole-archive are sufficent.
# have to hope that the default cases of +whole-archive are sufficient.
# See: https://github.com/rust-lang/rust/issues/99429
if mesonlib.version_compare(rustc.version, '>= 1.63.0'):
whole_archive = ':+whole-archive,-bundle'
@ -2624,7 +2624,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
# has pdb file called foo.pdb. So will a static library
# foo.lib, which clobbers both foo.pdb _and_ the dll file's
# export library called foo.lib (by default, currently we name
# them libfoo.a to avoidt this issue). You can give the files
# them libfoo.a to avoid this issue). You can give the files
# unique names such as foo_exe.pdb but VC also generates a
# bunch of other files which take their names from the target
# basename (i.e. "foo") and stomp on each other.

@ -1436,7 +1436,7 @@ class Vs2010Backend(backends.Backend):
else:
inc_dirs = file_inc_dirs
self.add_include_dirs(lang, inc_cl, inc_dirs)
# XXX: Do we need to set the object file name name here too?
# XXX: Do we need to set the object file name here too?
previous_objects = []
if self.has_objects(objects, additional_objects, gen_objs):

@ -740,7 +740,7 @@ class BuildTarget(Target):
self.rpath_dirs_to_remove: T.Set[bytes] = set()
self.process_sourcelist(sources)
# Objects can be:
# 1. Pre-existing objects provided by the user with the `objects:` kwarg
# 1. Preexisting objects provided by the user with the `objects:` kwarg
# 2. Compiled objects created by and extracted from another target
self.process_objectlist(objects)
self.process_kwargs(kwargs)
@ -811,8 +811,8 @@ class BuildTarget(Target):
"""Split sources into generated and static sources.
Sources can be:
1. Pre-existing source files in the source tree (static)
2. Pre-existing sources generated by configure_file in the build tree.
1. Preexisting source files in the source tree (static)
2. Preexisting sources generated by configure_file in the build tree.
(static as they are only regenerated if meson itself is regenerated)
3. Sources files generated by another target or a Generator (generated)
"""
@ -884,7 +884,7 @@ class BuildTarget(Target):
missing_languages: T.List[str] = []
if not any([self.sources, self.generated, self.objects, self.structured_sources]):
return missing_languages
# Pre-existing sources
# Preexisting sources
sources: T.List['FileOrString'] = list(self.sources)
generated = self.generated.copy()
@ -1654,7 +1654,7 @@ You probably should put it in link_with instead.''')
'\n '
f'If shared_module() was used for {link_target.name} because it has references to undefined symbols,'
'\n '
'use shared_libary() with `override_options: [\'b_lundef=false\']` instead.')
'use shared_library() with `override_options: [\'b_lundef=false\']` instead.')
link_target.force_soname = True
class Generator(HoldableObject):

@ -146,7 +146,7 @@ def cmake_defines_to_args(raw: T.Any, permissive: bool = False) -> T.List[str]:
return res
# TODO: this functuin will become obsolete once the `cmake_args` kwarg is dropped
# TODO: this function will become obsolete once the `cmake_args` kwarg is dropped
def check_cmake_args(args: T.List[str]) -> T.List[str]:
res = [] # type: T.List[str]
dis = ['-D' + x for x in blacklist_cmake_defs]

@ -288,7 +288,7 @@ class CMakeTraceParser:
raise CMakeException(f'CMake: {function}() {error}\n{tline}')
def _cmake_set(self, tline: CMakeTraceLine) -> None:
"""Handler for the CMake set() function in all variaties.
"""Handler for the CMake set() function in all varieties.
comes in three flavors:
set(<var> <value> [PARENT_SCOPE])
@ -509,7 +509,7 @@ class CMakeTraceParser:
targets += curr.split(';')
if not args:
return self._gen_exception('set_property', 'faild to parse argument list', tline)
return self._gen_exception('set_property', 'failed to parse argument list', tline)
if len(args) == 1:
# Tries to set property to nothing so nothing has to be done
@ -575,7 +575,7 @@ class CMakeTraceParser:
targets.append(curr)
# Now we need to try to reconsitute the original quoted format of the
# Now we need to try to reconstitute the original quoted format of the
# arguments, as a property value could have spaces in it. Unlike
# set_property() this is not context free. There are two approaches I
# can think of, both have drawbacks:
@ -586,7 +586,7 @@ class CMakeTraceParser:
#
# Neither of these is awesome for obvious reasons. I'm going to try
# option 1 first and fall back to 2, as 1 requires less code and less
# synchroniztion for cmake changes.
# synchronization for cmake changes.
#
# With the JSON output format, introduced in CMake 3.17, spaces are
# handled properly and we don't have to do either options

@ -1228,7 +1228,7 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
mode: CompileCheckMode = CompileCheckMode.COMPILE) -> CompilerArgs:
"""Arguments to pass the build_wrapper helper.
This generally needs to be set on a per-language baises. It provides
This generally needs to be set on a per-language basis. It provides
a hook for languages to handle dependencies and extra args. The base
implementation handles the most common cases, namely adding the
check_arguments, unwrapping dependencies, and appending extra args.
@ -1266,7 +1266,7 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
mode: str = 'compile', want_output: bool = False,
disable_cache: bool = False,
temp_dir: str = None) -> T.Iterator[T.Optional[CompileResult]]:
"""Helper for getting a cacched value when possible.
"""Helper for getting a cached value when possible.
This method isn't meant to be called externally, it's mean to be
wrapped by other methods like compiles() and links().
@ -1361,7 +1361,7 @@ def get_global_options(lang: str,
# If the compiler acts as a linker driver, and we're using the
# environment variable flags for both the compiler and linker
# arguments, then put the compiler flags in the linker flags as well.
# This is how autotools works, and the env vars freature is for
# This is how autotools works, and the env vars feature is for
# autotools compatibility.
largs.extend_value(comp_options)

@ -382,7 +382,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if 'Arm C/C++/Fortran Compiler' in out:
arm_ver_match = re.search(r'version (\d+)\.(\d+)\.?(\d+)? \(build number (\d+)\)', out)
assert arm_ver_match is not None, 'for mypy' # because mypy *should* be complaning that this could be None
assert arm_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None
version = '.'.join([x for x in arm_ver_match.groups() if x is not None])
if lang == 'c':
cls = c.ArmLtdClangCCompiler
@ -667,7 +667,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
if 'Arm C/C++/Fortran Compiler' in out:
cls = fortran.ArmLtdFlangFortranCompiler
arm_ver_match = re.search(r'version (\d+)\.(\d+)\.?(\d+)? \(build number (\d+)\)', out)
assert arm_ver_match is not None, 'for mypy' # because mypy *should* be complaning that this could be None
assert arm_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None
version = '.'.join([x for x in arm_ver_match.groups() if x is not None])
linker = guess_nix_linker(env, compiler, cls, version, for_machine)
return cls(
@ -1073,7 +1073,7 @@ def detect_d_compiler(env: 'Environment', for_machine: MachineChoice) -> Compile
if 'LLVM D compiler' in out:
cls = d.LLVMDCompiler
# LDC seems to require a file
# We cannot use NamedTemproraryFile on windows, its documented
# We cannot use NamedTemporaryFile on windows, its documented
# to not work for our uses. So, just use mkstemp and only have
# one path for simplicity.
o, f = tempfile.mkstemp('.d')
@ -1111,7 +1111,7 @@ def detect_d_compiler(env: 'Environment', for_machine: MachineChoice) -> Compile
elif 'The D Language Foundation' in out or 'Digital Mars' in out:
cls = d.DmdDCompiler
# DMD seems to require a file
# We cannot use NamedTemproraryFile on windows, its documented
# We cannot use NamedTemporaryFile on windows, its documented
# to not work for our uses. So, just use mkstemp and only have
# one path for simplicity.
o, f = tempfile.mkstemp('.d')

@ -16,7 +16,7 @@ from __future__ import annotations
"""Mixin classes to be shared between C and C++ compilers.
Without this we'll end up with awful diamond inherintance problems. The goal
Without this we'll end up with awful diamond inheritance problems. The goal
of this is to have mixin's, which are classes that are designed *not* to be
standalone, they only work through inheritance.
"""
@ -432,7 +432,7 @@ class CLikeCompiler(Compiler):
extra_args: T.Union[None, arglist.CompilerArgs, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]],
dependencies: T.Optional[T.List['Dependency']],
mode: CompileCheckMode = CompileCheckMode.COMPILE) -> arglist.CompilerArgs:
# TODO: the caller should handle the listfing of these arguments
# TODO: the caller should handle the listing of these arguments
if extra_args is None:
extra_args = []
else:

@ -423,7 +423,7 @@ class MSVCCompiler(VisualStudioLikeCompiler):
def __init__(self, target: str):
super().__init__(target)
# Visual Studio 2013 and erlier don't support the /utf-8 argument.
# Visual Studio 2013 and earlier don't support the /utf-8 argument.
# We want to remove it. We also want to make an explicit copy so we
# don't mutate class constant state
if mesonlib.version_compare(self.version, '<19.00') and '/utf-8' in self.always_args:

@ -472,7 +472,7 @@ class CoreData:
# want to overwrite options for such subprojects.
self.initialized_subprojects: T.Set[str] = set()
# For host == build configuraitons these caches should be the same.
# For host == build configurations these caches should be the same.
self.deps: PerMachine[DependencyCache] = PerMachineDefaultable.default(
self.is_cross_build(),
DependencyCache(self.options, MachineChoice.BUILD),
@ -586,7 +586,7 @@ class CoreData:
except TypeError:
return value
if option.name.endswith('dir') and value.is_absolute() and \
option not in BULITIN_DIR_NOPREFIX_OPTIONS:
option not in BUILTIN_DIR_NOPREFIX_OPTIONS:
try:
# Try to relativize the path.
value = value.relative_to(prefix)
@ -707,7 +707,7 @@ class CoreData:
elif key.name in {'wrap_mode', 'force_fallback_for'}:
# We could have the system dependency cached for a dependency that
# is now forced to use subproject fallback. We probably could have
# more fine grained cache invalidation, but better be safe.
# more fine-grained cache invalidation, but better be safe.
self.clear_deps_cache()
dirty = True
@ -838,7 +838,7 @@ class CoreData:
if pfk in options:
prefix = self.sanitize_prefix(options[pfk])
dirty |= self.options[OptionKey('prefix')].set_value(prefix)
for key in BULITIN_DIR_NOPREFIX_OPTIONS:
for key in BUILTIN_DIR_NOPREFIX_OPTIONS:
if key not in options:
dirty |= self.options[key].set_value(BUILTIN_OPTIONS[key].prefixed_default(key, prefix))
@ -862,7 +862,7 @@ class CoreData:
def set_default_options(self, default_options: T.MutableMapping[OptionKey, str], subproject: str, env: 'Environment') -> None:
# Main project can set default options on subprojects, but subprojects
# can only set default options on themself.
# can only set default options on themselves.
# Preserve order: if env.options has 'buildtype' it must come after
# 'optimization' if it is in default_options.
options: T.MutableMapping[OptionKey, T.Any] = OrderedDict()
@ -1194,7 +1194,7 @@ class BuiltinOption(T.Generic[_T, _U]):
if self.opt_type in [UserComboOption, UserIntegerOption]:
return self.default
try:
return BULITIN_DIR_NOPREFIX_OPTIONS[name][prefix]
return BUILTIN_DIR_NOPREFIX_OPTIONS[name][prefix]
except KeyError:
pass
return self.default
@ -1283,7 +1283,7 @@ BUILTIN_OPTIONS_PER_MACHINE: 'MutableKeyedOptionDictType' = OrderedDict([
# Special prefix-dependent defaults for installation directories that reside in
# a path outside of the prefix in FHS and common usage.
BULITIN_DIR_NOPREFIX_OPTIONS: T.Dict[OptionKey, T.Dict[str, str]] = {
BUILTIN_DIR_NOPREFIX_OPTIONS: T.Dict[OptionKey, T.Dict[str, str]] = {
OptionKey('sysconfdir'): {'/usr': '/etc'},
OptionKey('localstatedir'): {'/usr': '/var', '/usr/local': '/var/local'},
OptionKey('sharedstatedir'): {'/usr': '/var/lib', '/usr/local': '/var/local/lib'},

@ -80,7 +80,7 @@ if T.TYPE_CHECKING:
# 2. Find all boost libraries
# 2.1 Add all libraries in lib*
# 2.2 Filter out non boost libraries
# 2.3 Filter the renaining libraries based on the meson requirements (static/shared, etc.)
# 2.3 Filter the remaining libraries based on the meson requirements (static/shared, etc.)
# 2.4 Ensure that all libraries have the same boost tag (and are thus compatible)
# 3. Select the libraries matching the requested modules
@ -243,7 +243,7 @@ class BoostLibraryFile():
return any(self.mod_name.startswith(x) for x in BoostLibraryFile.boost_python_libs)
def fix_python_name(self, tags: T.List[str]) -> T.List[str]:
# Handle the boost_python naming madeness.
# Handle the boost_python naming madness.
# See https://github.com/mesonbuild/meson/issues/4788 for some distro
# specific naming variations.
other_tags = [] # type: T.List[str]

@ -489,7 +489,7 @@ class CMakeDependency(ExternalDependency):
libs_raw = [x for x in self.traceparser.get_cmake_var('PACKAGE_LIBRARIES') if x]
# CMake has a "fun" API, where certain keywords describing
# configurations can be in the *_LIBRARIES vraiables. See:
# configurations can be in the *_LIBRARIES variables. See:
# - https://github.com/mesonbuild/meson/issues/9197
# - https://gitlab.freedesktop.org/libnice/libnice/-/issues/140
# - https://cmake.org/cmake/help/latest/command/target_link_libraries.html#overview (the last point in the section)
@ -505,7 +505,7 @@ class CMakeDependency(ExternalDependency):
libs += [i]
# According to the CMake docs, a keyword only works for the
# directly the following item and all items without a keyword
# are implizitly `general`
# are implicitly `general`
cfg_matches = True
# Try to use old style variables if no module is specified

@ -42,7 +42,7 @@ function(meson_llvm_cmake_dynamic_available mod out)
return()
endif()
# Complex heurisic to filter all pseudo-components and skip invalid names
# Complex heuristic to filter all pseudo-components and skip invalid names
# LLVM_DYLIB_COMPONENTS will be 'all', because in other case we returned
# in previous check. 'all' is also handled there.
set(llvm_pseudo_components "native" "backend" "engine" "all-targets")

@ -332,7 +332,7 @@ class CursesSystemDependency(SystemDependency):
('curses', ['curses.h']),
]
# Not sure how else to elegently break out of both loops
# Not sure how else to elegantly break out of both loops
for lib, headers in candidates:
l = self.clib_compiler.find_library(lib, env, [])
if l:

@ -415,7 +415,7 @@ class PkgConfigDependency(ExternalDependency):
else:
variable = out.strip()
# pkg-config doesn't distinguish between empty and non-existent variables
# pkg-config doesn't distinguish between empty and nonexistent variables
# use the variable list to check for variable existence
if not variable:
ret, out, _ = self._call_pkgbin(['--print-variables', self.name])

@ -260,7 +260,7 @@ class QmakeQtDependency(_QtBase, ConfigToolDependency, metaclass=abc.ABCMeta):
self.tools = [f'qmake{self.qtver}', f'qmake-{self.name}', 'qmake']
# Add additional constraints that the Qt version is met, but preserve
# any version requrements the user has set as well. For example, if Qt5
# any version requirements the user has set as well. For example, if Qt5
# is requested, add "">= 5, < 6", but if the user has ">= 5.6", don't
# lose that.
kwargs = kwargs.copy()
@ -325,7 +325,7 @@ class QmakeQtDependency(_QtBase, ConfigToolDependency, metaclass=abc.ABCMeta):
self.compile_args.append('-I' + directory)
libfiles = self.clib_compiler.find_library(
self.qtpkgname + module + modules_lib_suffix, self.env,
mesonlib.listify(libdir)) # TODO: shouldn't be necissary
mesonlib.listify(libdir)) # TODO: shouldn't be necessary
if libfiles:
libfile = libfiles[0]
else:

@ -28,7 +28,7 @@ from pathlib import Path
# and cross file currently), and also assists with the reading environment
# variables.
#
# At this time there isn't an ironclad difference between this an other sources
# At this time there isn't an ironclad difference between this and other sources
# of state like `coredata`. But one rough guide is much what is in `coredata` is
# the *output* of the configuration process: the final decisions after tests.
# This, on the other hand has *inputs*. The config files are parsed, but
@ -167,7 +167,7 @@ class Properties:
return language + '_stdlib' in self.properties
# Some of get_stdlib, get_root, get_sys_root are wider than is actually
# true, but without heterogenious dict annotations it's not practical to
# true, but without heterogeneous dict annotations it's not practical to
# narrow them
def get_stdlib(self, language: str) -> T.Union[str, T.List[str]]:
stdlib = self.properties[language + '_stdlib']

@ -674,7 +674,7 @@ class Environment:
# time) until we're instantiating that `Compiler`
# object. This is required so that passing
# `-Dc_args=` on the command line and `$CFLAGS`
# have subtely different behavior. `$CFLAGS` will be
# have subtly different behavior. `$CFLAGS` will be
# added to the linker command line if the compiler
# acts as a linker driver, `-Dc_args` will not.
#

@ -1,4 +1,4 @@
# SPDX-Licnese-Identifier: Apache-2.0
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2021 The Meson development team
# Copyright © 2021 Intel Corporation
from __future__ import annotations
@ -60,7 +60,7 @@ if T.TYPE_CHECKING:
args: T.List[str]
dependencies: T.List[dependencies.Dependency]
class CompupteIntKW(CommonKW):
class ComputeIntKW(CommonKW):
guess: T.Optional[int]
high: T.Optional[int]
@ -405,7 +405,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
KwargInfo('guess', (int, NoneType)),
*_COMMON_KWS,
)
def compute_int_method(self, args: T.Tuple[str], kwargs: 'CompupteIntKW') -> int:
def compute_int_method(self, args: T.Tuple[str], kwargs: 'ComputeIntKW') -> int:
expression = args[0]
extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross)

@ -235,7 +235,7 @@ class InterpreterRuleRelaxation(Enum):
generate a Meson AST via introspection, etc.
'''
ALLOW_BUILD_DIR_FILE_REFFERENCES = 1
ALLOW_BUILD_DIR_FILE_REFERENCES = 1
permitted_dependency_kwargs = {
'allow_fallback',
@ -1001,7 +1001,7 @@ class Interpreter(InterpreterBase, HoldableObject):
# Duplicates are possible when subproject uses files from project root
if build_def_files:
self.build_def_files.update(build_def_files)
# We always need the subi.build_def_files, to propgate sub-sub-projects
# We always need the subi.build_def_files, to propagate sub-sub-projects
self.build_def_files.update(subi.build_def_files)
self.build.merge(subi.build)
self.build.subprojects[subp_name] = subi.project_version
@ -1048,7 +1048,7 @@ class Interpreter(InterpreterBase, HoldableObject):
[str(f) for f in cm_int.bs_files],
is_translated=True,
relaxations={
InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFFERENCES,
InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFERENCES,
}
)
result.cm_interpreter = cm_int
@ -1365,7 +1365,7 @@ class Interpreter(InterpreterBase, HoldableObject):
section, values, kwargs['bool_yn'], kwargs['list_sep'], self.subproject)
def _print_summary(self) -> None:
# Add automatic 'Supbrojects' section in main project.
# Add automatic 'Subprojects' section in main project.
all_subprojects = collections.OrderedDict()
for name, subp in sorted(self.subprojects.items()):
value = subp.found()
@ -1997,7 +1997,7 @@ class Interpreter(InterpreterBase, HoldableObject):
build_by_default = kwargs['build_always']
build_always_stale = kwargs['build_by_default']
# These are are nullaable so that we can know whether they're explicitly
# These are nullable so that we can know whether they're explicitly
# set or not. If they haven't been overwritten, set them to their true
# default
if build_by_default is None:
@ -2019,9 +2019,9 @@ class Interpreter(InterpreterBase, HoldableObject):
command[0] = self.find_program_impl([command[0]])
if len(inputs) > 1 and kwargs['feed']:
raise InvalidArguments('custom_target: "feed" keyword argument can only be used used with a single input')
raise InvalidArguments('custom_target: "feed" keyword argument can only be used with a single input')
if len(kwargs['output']) > 1 and kwargs['capture']:
raise InvalidArguments('custom_target: "capture" keyword argument can only be used used with a single output')
raise InvalidArguments('custom_target: "capture" keyword argument can only be used with a single output')
if kwargs['capture'] and kwargs['console']:
raise InvalidArguments('custom_target: "capture" and "console" keyword arguments are mutually exclusive')
for c in command:
@ -2370,7 +2370,7 @@ class Interpreter(InterpreterBase, HoldableObject):
absname = os.path.join(self.environment.get_source_dir(), buildfilename)
if not os.path.isfile(absname):
self.subdir = prev_subdir
raise InterpreterException(f"Non-existent build file '{buildfilename!s}'")
raise InterpreterException(f"Nonexistent build file '{buildfilename!s}'")
with open(absname, encoding='utf-8') as f:
code = f.read()
assert isinstance(code, str)
@ -3034,7 +3034,7 @@ class Interpreter(InterpreterBase, HoldableObject):
inputtype = 'directory'
else:
inputtype = 'file'
if InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFFERENCES in self.relaxations and builddir in norm.parents:
if InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFERENCES in self.relaxations and builddir in norm.parents:
return
if srcdir not in norm.parents:
# Grabbing files outside the source tree is ok.

@ -57,7 +57,7 @@ class FuncTest(FuncBenchmark):
"""Keyword Arguments for `test`
`test` only adds the `is_prallel` argument over benchmark, so inherintance
`test` only adds the `is_parallel` argument over benchmark, so inheritance
is helpful here.
"""

@ -377,7 +377,7 @@ class MesonMain(MesonInterpreterObject):
def _override_dependency_impl(self, name: str, dep: dependencies.Dependency, kwargs: 'FuncOverrideDependency',
static: T.Optional[bool], permissive: bool = False) -> None:
# We need the cast here as get_dep_identifier works on such a dict,
# which FuncOverrideDependency is, but mypy can't fgure that out
# which FuncOverrideDependency is, but mypy can't figure that out
nkwargs = T.cast('T.Dict[str, T.Any]', kwargs.copy())
if static is None:
del nkwargs['static']

@ -87,9 +87,9 @@ def _install_mode_validator(mode: T.List[T.Union[str, bool, int]]) -> T.Optional
return f'permission character 9 must be "-", "t", "T", or "x", not {perms[8]}'
if len(mode) >= 2 and not isinstance(mode[1], (int, str, bool)):
return 'second componenent can only be a string, number, or False'
return 'second component can only be a string, number, or False'
if len(mode) >= 3 and not isinstance(mode[2], (int, str, bool)):
return 'third componenent can only be a string, number, or False'
return 'third component can only be a string, number, or False'
return None
@ -211,7 +211,7 @@ def _env_validator(value: T.Union[EnvironmentVariables, T.List['TYPE_var'], T.Di
return None
def _options_validator(value: T.Union[EnvironmentVariables, T.List['TYPE_var'], T.Dict[str, 'TYPE_var'], str, None]) -> T.Optional[str]:
# Reusing the env validator is a littl overkill, but nicer than duplicating the code
# Reusing the env validator is a little overkill, but nicer than duplicating the code
return _env_validator(value, allow_dict_list=False)
def split_equal_string(input: str) -> T.Tuple[str, str]:

@ -470,7 +470,7 @@ def typed_kwargs(name: str, *types: KwargInfo, allow_unknown: bool = False) -> T
information. For non-required values it sets the value to a default, which
means the value will always be provided.
If type tyhpe is a :class:ContainerTypeInfo, then the default value will be
If type is a :class:ContainerTypeInfo, then the default value will be
passed as an argument to the container initializer, making a shallow copy
:param name: the name of the function, including the object it's attached to
@ -583,7 +583,7 @@ def typed_kwargs(name: str, *types: KwargInfo, allow_unknown: bool = False) -> T
else:
# set the value to the default, this ensuring all kwargs are present
# This both simplifies the typing checking and the usage
assert check_value_type(types_tuple, info.default), f'In funcion {name} default value of {info.name} is not a valid type, got {type(info.default)} expected {types_description(types_tuple)}'
assert check_value_type(types_tuple, info.default), f'In function {name} default value of {info.name} is not a valid type, got {type(info.default)} expected {types_description(types_tuple)}'
# Create a shallow copy of the container. This allows mutable
# types to be used safely as default values
kwargs[info.name] = copy.copy(info.default)

@ -525,14 +525,14 @@ class InterpreterBase:
return None
def method_call(self, node: mparser.MethodNode) -> T.Optional[InterpreterObject]:
invokable = node.source_object
invocable = node.source_object
obj: T.Optional[InterpreterObject]
if isinstance(invokable, mparser.IdNode):
object_display_name = f'variable "{invokable.value}"'
obj = self.get_variable(invokable.value)
if isinstance(invocable, mparser.IdNode):
object_display_name = f'variable "{invocable.value}"'
obj = self.get_variable(invocable.value)
else:
object_display_name = invokable.__class__.__name__
obj = self.evaluate_statement(invokable)
object_display_name = invocable.__class__.__name__
obj = self.evaluate_statement(invocable)
method_name = node.name
(h_args, h_kwargs) = self.reduce_arguments(node.args)
(args, kwargs) = self._unholder_args(h_args, h_kwargs)

@ -96,7 +96,7 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
if value is not None and invoked_directly:
compiler = value
# We've already hanedled the non-direct case above
# We've already handled the non-direct case above
p, o, e = Popen_safe(compiler + check_args)
if 'LLD' in o.split('\n', maxsplit=1)[0]:

@ -928,7 +928,7 @@ class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dyna
class CcrxDynamicLinker(DynamicLinker):
"""Linker for Renesis CCrx compiler."""
"""Linker for Renesas CCrx compiler."""
id = 'rlink'
@ -1216,7 +1216,7 @@ NvidiaHPC_StaticLinker = PGIStaticLinker
class VisualStudioLikeLinkerMixin:
"""Mixin class for for dynamic linkers that act like Microsoft's link.exe."""
"""Mixin class for dynamic linkers that act like Microsoft's link.exe."""
if T.TYPE_CHECKING:
for_machine = MachineChoice.HOST

@ -207,10 +207,10 @@ def set_mode(path: str, mode: T.Optional['FileMode'], default_umask: T.Union[str
except PermissionError as e:
print(f'{path!r}: Unable to set owner {mode.owner!r} and group {mode.group!r}: {e.strerror}, ignoring...')
except LookupError:
print(f'{path!r}: Non-existent owner {mode.owner!r} or group {mode.group!r}: ignoring...')
print(f'{path!r}: Nonexistent owner {mode.owner!r} or group {mode.group!r}: ignoring...')
except OSError as e:
if e.errno == errno.EINVAL:
print(f'{path!r}: Non-existent numeric owner {mode.owner!r} or group {mode.group!r}: ignoring...')
print(f'{path!r}: Nonexistent numeric owner {mode.owner!r} or group {mode.group!r}: ignoring...')
else:
raise
# Must set permissions *after* setting owner/group otherwise the

@ -2149,7 +2149,7 @@ class GnomeModule(ExtensionModule):
)
# So to try our best to get this to just work we need:
# - link with with the correct library
# - link with the correct library
# - include the vapi and dependent vapi files in sources
# - add relevant directories to include dirs
incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)]

@ -55,7 +55,7 @@ class KeyvalModule(ExtensionModule):
return result
@noKwargs
@typed_pos_args('keyval.laod', (str, mesonlib.File))
@typed_pos_args('keyval.load', (str, mesonlib.File))
def load(self, state: 'ModuleState', args: T.Tuple['mesonlib.FileOrString'], kwargs: T.Dict[str, T.Any]) -> T.Dict[str, str]:
s = args[0]
is_built = False

@ -164,7 +164,7 @@ class WindowsModule(ExtensionModule):
elif isinstance(src, build.CustomTargetIndex):
FeatureNew.single_use('windows.compile_resource CustomTargetIndex in positional arguments', '0.61.0',
state.subproject, location=state.current_node)
# This dance avoids a case where two indexs of the same
# This dance avoids a case where two indexes of the same
# target are given as separate arguments.
yield (f'{src.get_id()}_{src.target.get_outputs().index(src.output)}',
f'windows_compile_resources_{src.get_filename()}', src)

@ -185,7 +185,7 @@ def returncode_to_status(retcode: int) -> str:
# functions here because the status returned by subprocess is munged. It
# returns a negative value if the process was killed by a signal rather than
# the raw status returned by `wait()`. Also, If a shell sits between Meson
# the the actual unit test that shell is likely to convert a termination due
# the actual unit test that shell is likely to convert a termination due
# to a signal into an exit status of 128 plus the signal number.
if retcode < 0:
signum = -retcode

@ -354,7 +354,7 @@ class OverrideProgram(ExternalProgram):
def find_external_program(env: 'Environment', for_machine: MachineChoice, name: str,
display_name: str, default_names: T.List[str],
allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]:
"""Find an external program, chcking the cross file plus any default options."""
"""Find an external program, checking the cross file plus any default options."""
# Lookup in cross or machine file.
potential_cmd = env.lookup_binary_entry(for_machine, name)
if potential_cmd is not None:

@ -569,27 +569,27 @@ class Rewriter:
if key not in arg_node.kwargs:
arg_node.kwargs[key] = None
modifyer = kwargs_def[key](arg_node.kwargs[key])
if not modifyer.can_modify():
modifier = kwargs_def[key](arg_node.kwargs[key])
if not modifier.can_modify():
mlog.log(' -- Skipping', mlog.bold(key), 'because it is to complex to modify')
# Apply the operation
val_str = str(val)
if cmd['operation'] == 'set':
mlog.log(' -- Setting', mlog.bold(key), 'to', mlog.yellow(val_str))
modifyer.set_value(val)
modifier.set_value(val)
elif cmd['operation'] == 'add':
mlog.log(' -- Adding', mlog.yellow(val_str), 'to', mlog.bold(key))
modifyer.add_value(val)
modifier.add_value(val)
elif cmd['operation'] == 'remove':
mlog.log(' -- Removing', mlog.yellow(val_str), 'from', mlog.bold(key))
modifyer.remove_value(val)
modifier.remove_value(val)
elif cmd['operation'] == 'remove_regex':
mlog.log(' -- Removing all values matching', mlog.yellow(val_str), 'from', mlog.bold(key))
modifyer.remove_regex(val)
modifier.remove_regex(val)
# Write back the result
arg_node.kwargs[key] = modifyer.get_node()
arg_node.kwargs[key] = modifier.get_node()
num_changed += 1
# Convert the keys back to IdNode's

@ -35,7 +35,7 @@ def run(argsv: T.List[str]) -> int:
commands += [[]]
continue
i = i.replace('"', '') # Remove lefover quotes
i = i.replace('"', '') # Remove leftover quotes
commands[-1] += [i]
# Execute

@ -350,7 +350,7 @@ class Elf(DataSizes):
sys.exit(msg)
# The linker does read-only string deduplication. If there is a
# string that shares a suffix with the rpath, they might get
# dedupped. This means changing the rpath string might break something
# deduped. This means changing the rpath string might break something
# completely unrelated. This has already happened once with X.org.
# Thus we want to keep this change as small as possible to minimize
# the chance of obliterating other strings. It might still happen

@ -2295,7 +2295,7 @@ class OptionKey:
def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = None,
machine: T.Optional[MachineChoice] = None, lang: T.Optional[str] = '',
module: T.Optional[str] = '') -> 'OptionKey':
"""Create a new copy of this key, but with alterted members.
"""Create a new copy of this key, but with altered members.
For example:
>>> a = OptionKey('foo', '', MachineChoice.Host)
@ -2318,11 +2318,11 @@ class OptionKey:
return self.evolve(subproject='')
def as_build(self) -> 'OptionKey':
"""Convenience method for key.evolve(machine=MachinceChoice.BUILD)."""
"""Convenience method for key.evolve(machine=MachineChoice.BUILD)."""
return self.evolve(machine=MachineChoice.BUILD)
def as_host(self) -> 'OptionKey':
"""Convenience method for key.evolve(machine=MachinceChoice.HOST)."""
"""Convenience method for key.evolve(machine=MachineChoice.HOST)."""
return self.evolve(machine=MachineChoice.HOST)
def is_backend(self) -> bool:

@ -70,7 +70,7 @@ def _setup_vsenv(force: bool) -> bool:
)
bat_info = json.loads(bat_json)
if not bat_info:
# VS installer instelled but not VS itself maybe?
# VS installer installed but not VS itself maybe?
raise MesonException('Could not parse vswhere.exe output')
bat_root = pathlib.Path(bat_info[0]['installationPath'])
if windows_detect_native_arch() == 'arm64':

@ -153,7 +153,7 @@ class InstalledFile:
if self.language in {'c', 'cpp'}:
has_pdb = canonical_compiler == 'msvc'
elif self.language == 'd':
# dmd's optlink does not genearte pdb iles
# dmd's optlink does not generate pdb files
has_pdb = env.coredata.compilers.host['d'].linker.id in {'link', 'lld-link'}
# Abort if the platform does not match

@ -2,5 +2,5 @@ project('cmakeSubTest', ['c', 'cpp'])
cm = import('cmake')
sub_pro = cm.subproject('nothinig', required: false)
sub_pro = cm.subproject('nothing', required: false)
assert(not sub_pro.found(), 'subproject found() reports wrong value')

@ -161,7 +161,7 @@ cfile = configure_file(input : 'config.h.in',
install_dir : false,
configuration : conf)
# test intsall_dir with install: false
# test install_dir with install: false
cfile = configure_file(input : 'config.h.in',
output : 'do_not_get_installed_in_install_dir.h',
install : false,

@ -97,7 +97,7 @@ assert(if_is_not_disabled, 'Disabler in is_variable should not skip blocks')
get_d = get_variable('d6')
assert(is_disabler(get_d), 'get_variable should yield a disabler')
get_fallback_d = get_variable('nonexistant', disabler())
get_fallback_d = get_variable('nonexistent', disabler())
assert(is_disabler(get_fallback_d), 'get_variable fallback should yield a disabler')
var_true = true

@ -18,7 +18,7 @@ assert(tool.found())
assert(tool.full_path() != '')
assert(tool.full_path() == tool.path())
# six_meson_exe is an overritten project executable
# six_meson_exe is an overridden project executable
six_prog = find_program('six_meson_exe')
assert(six_prog.found())
assert(six_prog.full_path() != '')

@ -44,5 +44,5 @@ foreach comp : [meson.get_compiler('c'), meson.get_compiler('cpp')]
# This header exists in the source and the builddir, but we still must not
# find it since we are looking in the system directories.
assert(not comp.check_header(non_existent_header, prefix : fallback),
'Found non-existent header.')
'Found nonexistent header.')
endforeach

@ -10,7 +10,7 @@ disabled_subproj = subproject('disabled_sub', required: get_option('disabled-sub
assert(disabled_subproj.found() == false, 'Disabled subproject should be NOT found')
disabled_dep = dependency('', fallback: ['disabled_sub', 'libSub'], required: false)
assert(disabled_dep.found() == false, 'Subprojetc was disabled, it should never be built.')
assert(disabled_dep.found() == false, 'Subproject was disabled, it should never be built.')
nothing = executable('nothing', 'nothing.c', dependencies: [disabled_dep])
subproj_with_missing_dep = subproject('auto_sub_with_missing_dep', required: get_option('auto-sub-with-missing-dep'))

@ -11,7 +11,7 @@
#endif
#if !defined(GLOBAL_HOST) && !defined(GLOBAL_BUILD)
#error "Neither global_host nor glogal_build is set."
#error "Neither global_host nor global_build is set."
#endif
#if defined(GLOBAL_HOST) && defined(GLOBAL_BUILD)

@ -120,7 +120,7 @@ assert(not fs.is_samepath(f1, 'subdir/subdirfile.txt'), 'is_samepath known bad c
assert(not fs.is_samepath('not-a-path', f2), 'is_samepath should not error if path(s) do not exist')
f = files('meson.build', 'subdir/../meson.build')
assert(fs.is_samepath(f[0], f[1]), 'is_samepath not detercting same files')
assert(fs.is_samepath(f[0], f[1]), 'is_samepath not detecting same files')
if not is_windows and build_machine.system() != 'cygwin'
assert(fs.is_samepath(symlink, 'meson.build'), 'symlink is_samepath fail')

@ -8,16 +8,16 @@ x = meson.get_external_property('astring', native: true)
assert(x=='mystring', 'did not get native property with native:true and non-cross build.')
x = meson.get_external_property('astring', 'fallback', native: false)
assert(x==ref, 'did not get get native property with native:false and non-cross build.')
assert(x==ref, 'did not get native property with native:false and non-cross build.')
x = meson.get_external_property('notexist', 'fallback')
x = meson.get_external_property('nonexistent', 'fallback')
assert(x=='fallback', 'fallback did not work')
x = meson.get_external_property('notexist', 'fallback', native: true)
x = meson.get_external_property('nonexistent', 'fallback', native: true)
assert(x=='fallback', 'fallback native:true did not work')
x = meson.get_external_property('notexist', 'fallback', native: false)
x = meson.get_external_property('nonexistent', 'fallback', native: false)
assert(x=='fallback', 'fallback native:false did not work')

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save