Merge remote-tracking branch 'upstream/master' into lang-enum

lang-enum
John Ericson 5 years ago
commit eaf6343c06
  1. 1
      .github/workflows/images.yml
  2. 5
      .github/workflows/lint_mypy.yml
  3. 5
      .github/workflows/os_comp.yml
  4. 6
      .github/workflows/unusedargs_missingreturn.yml
  5. 1
      .gitignore
  6. 0
      .mypy.ini
  7. 3
      .travis.yml
  8. 6
      MANIFEST.in
  9. 29
      azure-pipelines.yml
  10. 4
      ci/ciimage/arch/install.sh
  11. 8
      ci/ciimage/bionic/image.json
  12. 59
      ci/ciimage/bionic/install.sh
  13. 5
      ci/ciimage/build.py
  14. 4
      ci/ciimage/eoan/install.sh
  15. 4
      ci/ciimage/fedora/install.sh
  16. 8
      ci/ciimage/opensuse/install.sh
  17. 3
      ci/run.ps1
  18. 4
      ci/travis_install.sh
  19. 8
      ci/travis_script.sh
  20. 2
      cross/armcc.txt
  21. 5
      cross/armclang-linux.txt
  22. 2
      cross/armclang.txt
  23. 6
      cross/c2000.txt
  24. 2
      cross/ccrx.txt
  25. 6
      cross/iphone.txt
  26. 7
      cross/tvos.txt
  27. 6
      cross/ubuntu-armhf.txt
  28. 3
      cross/wasm.txt
  29. 2
      cross/xc16.txt
  30. 25
      data/macros.meson
  31. 96
      data/schema.xsd
  32. 3
      data/syntax-highlighting/vim/syntax/meson.vim
  33. 25
      data/test.schema.json
  34. 53
      docs/markdown/Adding-new-projects-to-wrapdb.md
  35. 3
      docs/markdown/Build-options.md
  36. 105
      docs/markdown/Builtin-options.md
  37. 77
      docs/markdown/CMake-module.md
  38. 4
      docs/markdown/Configuring-a-build-directory.md
  39. 18
      docs/markdown/Continuous-Integration.md
  40. 41
      docs/markdown/Contributing.md
  41. 2
      docs/markdown/Creating-OSX-packages.md
  42. 67
      docs/markdown/Creating-releases.md
  43. 9
      docs/markdown/Cross-compilation.md
  44. 13
      docs/markdown/Dependencies.md
  45. 4
      docs/markdown/Design-rationale.md
  46. 4
      docs/markdown/FAQ.md
  47. 4
      docs/markdown/Feature-autodetection.md
  48. 10
      docs/markdown/Gnome-module.md
  49. 96
      docs/markdown/IDE-integration.md
  50. 6
      docs/markdown/IndepthTutorial.md
  51. 20
      docs/markdown/Installing.md
  52. 27
      docs/markdown/Keyval-module.md
  53. 4
      docs/markdown/Localisation.md
  54. 192
      docs/markdown/Machine-files.md
  55. 2
      docs/markdown/Meson-sample.md
  56. 53
      docs/markdown/MesonCI.md
  57. 2
      docs/markdown/Precompiled-headers.md
  58. 27
      docs/markdown/Project-templates.md
  59. 2
      docs/markdown/Qt5-module.md
  60. 16
      docs/markdown/Quick-guide.md
  61. 988
      docs/markdown/Reference-manual.md
  62. 135
      docs/markdown/Reference-tables.md
  63. 7
      docs/markdown/Release-notes-for-0.54.0.md
  64. 307
      docs/markdown/Release-notes-for-0.55.0.md
  65. 2
      docs/markdown/Run-targets.md
  66. 45
      docs/markdown/Running-Meson.md
  67. 6
      docs/markdown/Style-guide.md
  68. 38
      docs/markdown/Subprojects.md
  69. 190
      docs/markdown/Syntax.md
  70. 6
      docs/markdown/Tutorial.md
  71. 153
      docs/markdown/Unit-tests.md
  72. 5
      docs/markdown/Users.md
  73. 4
      docs/markdown/Using-multiple-build-directories.md
  74. 2
      docs/markdown/Vala.md
  75. 6
      docs/markdown/Vs-External.md
  76. 125
      docs/markdown/Wrap-dependency-system-manual.md
  77. 17
      docs/markdown/Wrap-maintainer-tools.md
  78. 42
      docs/markdown/Wrap-review-guidelines.md
  79. 46
      docs/markdown/howtox.md
  80. 2
      docs/markdown/legal.md
  81. 5
      docs/markdown/snippets/dist_not_tests.md
  82. 7
      docs/markdown/snippets/keyval.md
  83. 4
      docs/markdown/snippets/per_subproject.md
  84. 52
      docs/markdown/snippets/project_options_in_machine_files.md
  85. 4
      docs/markdown/snippets/wrap_fallback.md
  86. 296
      docs/markdown_dynamic/Commands.md
  87. 32
      docs/meson.build
  88. 6
      docs/sitemap.txt
  89. 2
      docs/theme/extra/templates/navbar_links.html
  90. 2
      man/meson.1
  91. 331
      mesonbuild/arglist.py
  92. 3
      mesonbuild/ast/__init__.py
  93. 5
      mesonbuild/ast/interpreter.py
  94. 2
      mesonbuild/ast/introspection.py
  95. 160
      mesonbuild/ast/printer.py
  96. 3
      mesonbuild/ast/visitor.py
  97. 253
      mesonbuild/backend/backends.py
  98. 476
      mesonbuild/backend/ninjabackend.py
  99. 79
      mesonbuild/backend/vs2010backend.py
  100. 57
      mesonbuild/build.py
  101. Some files were not shown because too many files have changed in this diff Show More

@ -31,6 +31,7 @@ jobs:
- { name: CUDA (on Arch), id: cuda }
- { name: Fedora, id: fedora }
- { name: OpenSUSE, id: opensuse }
- { name: Ubuntu Bionic, id: bionic }
- { name: Ubuntu Eoan, id: eoan }
steps:
- uses: actions/checkout@v2

@ -19,7 +19,8 @@ jobs:
- uses: actions/setup-python@v1
with:
python-version: '3.x'
- run: python -m pip install pylint
# pylint version constraint can be removed when https://github.com/PyCQA/pylint/issues/3524 is resolved
- run: python -m pip install pylint==2.4.4
- run: pylint mesonbuild
mypy:
@ -30,4 +31,4 @@ jobs:
with:
python-version: '3.x'
- run: python -m pip install mypy
- run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py mesonbuild/mesonlib.py
- run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py mesonbuild/mesonlib.py mesonbuild/arglist.py

@ -11,7 +11,7 @@ jobs:
name: Ubuntu 16.04
runs-on: ubuntu-16.04
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
- name: Install Dependencies
run: |
sudo apt update -yq
@ -39,9 +39,10 @@ jobs:
- { name: CUDA (on Arch), id: cuda }
- { name: Fedora, id: fedora }
- { name: OpenSUSE, id: opensuse }
- { name: Ubuntu Bionic, id: bionic }
container: mesonbuild/${{ matrix.cfg.id }}:latest
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v2
- name: Run tests
# All environment variables are stored inside the docker image in /ci/env_vars.sh
# They are defined in the `env` section in each image.json. CI_ARGS should be set

@ -55,7 +55,11 @@ jobs:
- uses: actions/setup-python@v1
with:
python-version: '3.x'
- run: pip install ninja pefile
# ninja==1.10 pypi release didn't ship with windows binaries, which causes
# pip to try to build it which fails on Windows. Pin the previous version
# for now. We can update once that's fixed.
# https://pypi.org/project/ninja/1.10.0/#files
- run: pip install ninja==1.9.0.post1 pefile
- run: python run_project_tests.py --only platform-windows
env:
CI: "1"

1
.gitignore vendored

@ -30,3 +30,4 @@ packagecache
/docs/hotdoc-private*
*.pyc
/*venv*

@ -31,9 +31,10 @@ matrix:
compiler: gcc
include:
# Test cross builds separately, they do not use the global compiler
# Also hijack one cross build to test long commandline handling codepath (and avoid overloading Travis)
- os: linux
compiler: gcc
env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt"
env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt" MESON_RSP_THRESHOLD=0
- os: linux
compiler: gcc
env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt" MESON_ARGS="--unity=on"

@ -4,10 +4,9 @@ graft cross
graft data
graft graphics
graft man
graft syntax-highlighting
graft tools
include authors.txt
include contributing.txt
include contributing.md
include COPYING
include README.md
include run_cross_test.py
@ -15,7 +14,6 @@ include run_tests.py
include run_unittests.py
include run_meson_command_tests.py
include run_project_tests.py
include mesonrewriter.py
include ghwt.py
include __main__.py
include meson.py

@ -13,6 +13,7 @@ variables:
jobs:
- job: vs2017
timeoutInMinutes: 120
pool:
vmImage: VS2017-Win2016
@ -22,6 +23,7 @@ jobs:
arch: x86
compiler: msvc2017
backend: ninja
MESON_RSP_THRESHOLD: 0
vc2017x64vs:
arch: x64
compiler: msvc2017
@ -40,6 +42,7 @@ jobs:
- template: ci/azure-steps.yml
- job: vs2019
timeoutInMinutes: 120
pool:
vmImage: windows-2019
@ -63,6 +66,7 @@ jobs:
- template: ci/azure-steps.yml
- job: cygwin
timeoutInMinutes: 120
pool:
vmImage: VS2017-Win2016
strategy:
@ -82,6 +86,7 @@ jobs:
gcc-objc,^
git,^
gobject-introspection,^
gtk-doc,^
libarchive13,^
libboost-devel,^
libglib2.0-devel,^
@ -89,9 +94,15 @@ jobs:
libjsoncpp19,^
librhash0,^
libuv1,^
libxml2,^
libxml2-devel,^
libxslt,^
libxslt-devel,^
ninja,^
python2-devel,^
python3-devel,^
python3-libxml2,^
python3-libxslt,^
python36-pip,^
vala,^
wget,^
@ -100,8 +111,8 @@ jobs:
displayName: Install Dependencies
- script: |
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
env.exe -- python3 -m pip --disable-pip-version-check install pefile pytest-xdist jsonschema
displayName: pip install pefile pytest-xdist jsonschema
env.exe -- python3 -m pip --disable-pip-version-check install gcovr pefile pytest-xdist jsonschema
displayName: pip install gcovr pefile pytest-xdist jsonschema
- script: |
set BOOST_ROOT=
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
@ -138,6 +149,7 @@ jobs:
gccx64ninja:
MSYSTEM: MINGW64
MSYS2_ARCH: x86_64
MESON_RSP_THRESHOLD: 0
compiler: gcc
clangx64ninja:
MSYSTEM: MINGW64
@ -151,7 +163,13 @@ jobs:
displayName: Install MSYS2
- script: |
set PATH=%MSYS2_ROOT%\usr\bin;%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem
%MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syyuu
# Remove this line when https://github.com/msys2/MSYS2-packages/pull/2022 is merged
%MSYS2_ROOT%\usr\bin\pacman --noconfirm -Sy dash
echo Updating msys2
%MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syuu || echo system update failed, ignoring
echo Killing all msys2 processes
taskkill /F /FI "MODULES eq msys-2.0.dll"
echo Updating msys2 (again)
%MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syuu
displayName: Update MSYS2
- script: |
@ -162,14 +180,17 @@ jobs:
git ^
mercurial ^
mingw-w64-$(MSYS2_ARCH)-cmake ^
mingw-w64-$(MSYS2_ARCH)-lcov ^
mingw-w64-$(MSYS2_ARCH)-libxml2 ^
mingw-w64-$(MSYS2_ARCH)-ninja ^
mingw-w64-$(MSYS2_ARCH)-pkg-config ^
mingw-w64-$(MSYS2_ARCH)-python2 ^
mingw-w64-$(MSYS2_ARCH)-python3 ^
mingw-w64-$(MSYS2_ARCH)-python3-lxml ^
mingw-w64-$(MSYS2_ARCH)-python3-setuptools ^
mingw-w64-$(MSYS2_ARCH)-python3-pip ^
%TOOLCHAIN%
%MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install pefile jsonschema"
%MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install gcovr jsonschema pefile"
displayName: Install Dependencies
- script: |
set BOOST_ROOT=

@ -12,12 +12,12 @@ pkgs=(
itstool gtk3 java-environment=8 gtk-doc llvm clang sdl2 graphviz
doxygen vulkan-validation-layers openssh mercurial gtk-sharp-2 qt5-tools
libwmf valgrind cmake netcdf-fortran openmpi nasm gnustep-base gettext
python-jsonschema
python-jsonschema python-lxml
# cuda
)
aur_pkgs=(scalapack)
pip_pkgs=(hotdoc)
pip_pkgs=(hotdoc gcovr)
cleanup_pkgs=(go)
AUR_USER=docker

@ -0,0 +1,8 @@
{
"base_image": "ubuntu:bionic",
"env": {
"CI": "1",
"SKIP_SCIENTIFIC": "1",
"DC": "gdc"
}
}

@ -0,0 +1,59 @@
#!/bin/bash
set -e
export DEBIAN_FRONTEND=noninteractive
export LANG='C.UTF-8'
export DC=gdc
pkgs=(
python3-pytest-xdist
python3-pip libxml2-dev libxslt1-dev libyaml-dev libjson-glib-dev
wget unzip cmake doxygen
clang
pkg-config-arm-linux-gnueabihf
qt4-linguist-tools qt5-default qtbase5-private-dev
python-dev
libomp-dev
llvm lcov
ldc
libclang-dev
libgcrypt20-dev
libgpgme-dev
libhdf5-dev openssh-server
libboost-python-dev libboost-regex-dev
libblocksruntime-dev
libperl-dev libscalapack-mpi-dev libncurses-dev
)
boost_pkgs=(atomic chrono date-time filesystem log regex serialization system test thread)
sed -i '/^#\sdeb-src /s/^#//' "/etc/apt/sources.list"
apt-get -y update
apt-get -y upgrade
apt-get -y install eatmydata
# Base stuff
eatmydata apt-get -y build-dep meson
# Add boost packages
for i in "${boost_pkgs[@]}"; do
for j in "1.62.0" "1.65.1"; do
pkgs+=("libboost-${i}${j}")
done
done
# packages
eatmydata apt-get -y install "${pkgs[@]}"
eatmydata python3 -m pip install codecov gcovr jsonschema
# Install the ninja 0.10
wget https://github.com/ninja-build/ninja/releases/download/v1.10.0/ninja-linux.zip
unzip ninja-linux.zip -d /ci
# cleanup
apt-get -y remove ninja-build
apt-get -y clean
apt-get -y autoclean
rm ninja-linux.zip

@ -71,6 +71,9 @@ class Builder(BuilderBase):
for key, val in self.image_def.env.items():
out_data += f'export {key}="{val}"\n'
# Also add /ci to PATH
out_data += 'export PATH="/ci:$PATH"\n'
out_file.write_text(out_data)
# make it executable
@ -157,7 +160,7 @@ class ImageTester(BuilderBase):
test_cmd = [
self.docker, 'run', '--rm', '-t', 'meson_test_image',
'/usr/bin/bash', '-c', 'source /ci/env_vars.sh; cd meson; ./run_tests.py $CI_ARGS'
'/bin/bash', '-c', 'source /ci/env_vars.sh; cd meson; ./run_tests.py $CI_ARGS'
]
if subprocess.run(test_cmd).returncode != 0:
raise RuntimeError('Running tests failed')

@ -11,12 +11,14 @@ export DC=gdc
pkgs=(
python3-pytest-xdist
python3-pip libxml2-dev libxslt1-dev libyaml-dev libjson-glib-dev
python3-lxml
wget unzip
qt5-default clang
pkg-config-arm-linux-gnueabihf
qt4-linguist-tools
python-dev
libomp-dev
llvm lcov
dub ldc
mingw-w64 mingw-w64-tools nim
libclang-dev
@ -41,7 +43,7 @@ eatmydata apt-get -y build-dep meson
eatmydata apt-get -y install "${pkgs[@]}"
eatmydata apt-get -y install --no-install-recommends wine-stable # Wine is special
eatmydata python3 -m pip install hotdoc codecov jsonschema
eatmydata python3 -m pip install hotdoc codecov gcovr jsonschema
# dub stuff
dub_fetch urld

@ -13,7 +13,7 @@ pkgs=(
doxygen vulkan-devel vulkan-validation-layers-devel openssh mercurial gtk-sharp2-devel libpcap-devel gpgme-devel
qt5-qtbase-devel qt5-qttools-devel qt5-linguist qt5-qtbase-private-devel
libwmf-devel valgrind cmake openmpi-devel nasm gnustep-base-devel gettext-devel ncurses-devel
libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel
libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel python3-lxml
)
# Sys update
@ -21,7 +21,7 @@ dnf -y upgrade
# Install deps
dnf -y install "${pkgs[@]}"
python3 -m pip install hotdoc gobject PyGObject
python3 -m pip install hotdoc gcovr gobject PyGObject
# Cleanup
dnf -y clean all

@ -5,9 +5,9 @@ set -e
source /ci/common.sh
pkgs=(
python3-setuptools python3-wheel python3-pip python3-pytest-xdist python3
python3-setuptools python3-wheel python3-pip python3-pytest-xdist python3 python3-lxml
ninja make git autoconf automake patch python3-Cython python3-jsonschema
elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl
elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl lcov
mono-core gtkmm3-devel gtest gmock protobuf-devel wxGTK3-3_2-devel gobject-introspection-devel
itstool gtk3-devel java-15-openjdk-devel gtk-doc llvm-devel clang-devel libSDL2-devel graphviz-devel zlib-devel zlib-devel-static
#hdf5-devel netcdf-devel libscalapack2-openmpi3-devel libscalapack2-gnu-openmpi3-hpc-devel openmpi3-devel
@ -17,7 +17,7 @@ pkgs=(
libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel
boost-devel libboost_date_time-devel libboost_filesystem-devel libboost_locale-devel libboost_system-devel
libboost_test-devel libboost_log-devel libboost_regex-devel
libboost_python-devel libboost_python-py3-1_71_0-devel libboost_regex-devel
libboost_python3-devel libboost_regex-devel
)
# Sys update
@ -26,7 +26,7 @@ zypper --non-interactive update
# Install deps
zypper install -y "${pkgs[@]}"
python3 -m pip install hotdoc gobject PyGObject
python3 -m pip install hotdoc gcovr gobject PyGObject
echo 'export PKG_CONFIG_PATH="/usr/lib64/mpi/gcc/openmpi3/lib64/pkgconfig:$PKG_CONFIG_PATH"' >> /ci/env_vars.sh

@ -4,7 +4,8 @@ if ($LastExitCode -ne 0) {
}
# remove Chocolately, MinGW, Strawberry Perl from path, so we don't find gcc/gfortran and try to use it
$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey' }) -join ';'
# remove PostgreSQL from path so we don't pickup a broken zlib from it
$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey|PostgreSQL' }) -join ';'
# Rust puts its shared stdlib in a secret place, but it is needed to run tests.
$env:Path += ";$HOME/.rustup/toolchains/stable-x86_64-pc-windows-msvc/bin"

@ -7,9 +7,11 @@ msg() { echo -e "\x1b[1;32mINFO: \x1b[37m$*\x1b[0m"; }
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
msg "Running OSX setup"
brew update
# Run one macOS build with pkg-config available (pulled in by qt), and the
# other (unity=on) without pkg-config
brew install qt ldc llvm ninja
if [[ "$MESON_ARGS" =~ .*unity=on.* ]]; then
which pkg-config || brew install pkg-config
which pkg-config && rm -f $(which pkg-config)
fi
python3 -m pip install jsonschema
elif [[ "$TRAVIS_OS_NAME" == "linux" ]]; then

@ -23,6 +23,10 @@ export CXX=$CXX
export OBJC=$CC
export OBJCXX=$CXX
export PATH=/root/tools:$PATH
if test "$MESON_RSP_THRESHOLD" != ""
then
export MESON_RSP_THRESHOLD=$MESON_RSP_THRESHOLD
fi
source /ci/env_vars.sh
cd /root
@ -55,5 +59,9 @@ elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
export OBJC=$CC
export OBJCXX=$CXX
export PATH=$HOME/tools:/usr/local/opt/qt/bin:$PATH:$(brew --prefix llvm)/bin
if test "$MESON_RSP_THRESHOLD" != ""
then
export MESON_RSP_THRESHOLD=$MESON_RSP_THRESHOLD
fi
./run_tests.py $RUN_TESTS_ARGS --backend=ninja -- $MESON_ARGS
fi

@ -7,7 +7,7 @@ cpp = 'armcc'
ar = 'armar'
strip = 'armar'
[properties]
[built-in options]
# The '--cpu' option with the appropriate target type should be mentioned
# to cross compile c/c++ code with armcc,.
c_args = ['--cpu=Cortex-M0plus']

@ -12,7 +12,7 @@
# Armcc is only available in toolchain version 5.
# Armclang is only available in toolchain version 6.
# Start shell with /opt/arm/developmentstudio-2019.0/bin/suite_exec zsh
# Now the compilers will work.
# Now the compilers will work.
[binaries]
# we could set exe_wrapper = qemu-arm-static but to test the case
@ -24,8 +24,7 @@ ar = '/opt/arm/developmentstudio-2019.0/sw/ARMCompiler6.12/bin/armar'
#strip = '/usr/arm-linux-gnueabihf/bin/strip'
#pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config'
[properties]
[built-in options]
c_args = ['--target=aarch64-arm-none-eabi']
[host_machine]

@ -7,7 +7,7 @@ cpp = 'armclang'
ar = 'armar'
strip = 'armar'
[properties]
[built-in options]
# The '--target', '-mcpu' options with the appropriate values should be mentioned
# to cross compile c/c++ code with armclang.
c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus']

@ -12,8 +12,7 @@ cpu_family = 'c2000'
cpu = 'c28x'
endian = 'little'
[properties]
needs_exe_wrapper = true
[built-in options]
c_args = [
'-v28',
'-ml',
@ -24,3 +23,6 @@ c_link_args = [
'\f28004x_flash.cmd']
cpp_args = []
cpp_link_args = []
[properties]
needs_exe_wrapper = true

@ -7,7 +7,7 @@ cpp = 'ccrx'
ar = 'rlink'
strip = 'rlink'
[properties]
[built-in options]
# The '--cpu' option with the appropriate target type should be mentioned
# to cross compile c/c++ code with ccrx,.
c_args = ['-cpu=rx600']

@ -8,14 +8,14 @@ cpp = 'clang++'
ar = 'ar'
strip = 'strip'
[properties]
root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer'
[built-in options]
c_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
cpp_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
c_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
cpp_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
[properties]
root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer'
has_function_printf = true
has_function_hfkerhisadf = false

@ -8,14 +8,15 @@ cpp = 'clang++'
ar = 'ar'
strip = 'strip'
[properties]
root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer'
[built-in options]
c_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk']
cpp_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk']
c_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk']
cpp_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk']
[properties]
root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer'
has_function_printf = true
has_function_hfkerhisadf = false

@ -9,12 +9,14 @@ strip = '/usr/arm-linux-gnueabihf/bin/strip'
pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config'
ld = '/usr/bin/arm-linux/gnueabihf-ld'
[properties]
root = '/usr/arm-linux-gnueabihf'
[built-in options]
# Used in unit test '140 get define'
c_args = ['-DMESON_TEST_ISSUE_1665=1']
cpp_args = '-DMESON_TEST_ISSUE_1665=1'
[properties]
root = '/usr/arm-linux-gnueabihf'
has_function_printf = true
has_function_hfkerhisadf = false

@ -3,8 +3,7 @@ c = '/home/jpakkane/emsdk/fastcomp/emscripten/emcc'
cpp = '/home/jpakkane/emsdk/fastcomp/emscripten/em++'
ar = '/home/jpakkane/emsdk/fastcomp/emscripten/emar'
[properties]
[built-in options]
c_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1']
c_link_args = ['-s','EXPORT_ALL=1']
cpp_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1']

@ -14,6 +14,8 @@ endian = 'little'
[properties]
needs_exe_wrapper = true
[built-in options]
c_args = [
'-c',
'-mcpu=33EP64MC203',

@ -2,12 +2,6 @@
%__meson_wrap_mode nodownload
%__meson_auto_features enabled
%_smp_mesonflags %([ -z "$MESON_BUILD_NCPUS" ] \\\
&& MESON_BUILD_NCPUS="`/usr/bin/getconf _NPROCESSORS_ONLN`"; \\\
ncpus_max=%{?_smp_ncpus_max}; \\\
if [ -n "$ncpus_max" ] && [ "$ncpus_max" -gt 0 ] && [ "$MESON_BUILD_NCPUS" -gt "$ncpus_max" ]; then MESON_BUILD_NCPUS="$ncpus_max"; fi; \\\
if [ "$MESON_BUILD_NCPUS" -gt 1 ]; then echo "--num-processes $MESON_BUILD_NCPUS"; fi)
%meson \
%set_build_flags \
%{shrink:%{__meson} \
@ -28,17 +22,24 @@
--wrap-mode=%{__meson_wrap_mode} \
--auto-features=%{__meson_auto_features} \
%{_vpath_srcdir} %{_vpath_builddir} \
%{nil}}
%{nil}}
%meson_build \
%ninja_build -C %{_vpath_builddir}
%{shrink:%{__meson} compile \
-C %{_vpath_builddir} \
-j %{_smp_build_ncpus} \
--verbose \
%{nil}}
%meson_install \
%ninja_install -C %{_vpath_builddir}
%{shrink:DESTDIR=%{buildroot} %{__meson} install \
-C %{_vpath_builddir} \
--no-rebuild \
%{nil}}
%meson_test \
%{shrink: %{__meson} test \
%{shrink:%{__meson} test \
-C %{_vpath_builddir} \
%{?_smp_mesonflags} \
--num-processes %{_smp_build_ncpus} \
--print-errorlogs \
%{nil}}
%{nil}}

@ -0,0 +1,96 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!-- from https://svn.jenkins-ci.org/trunk/hudson/dtkit/dtkit-format/dtkit-junit-model/src/main/resources/com/thalesgroup/dtkit/junit/model/xsd/junit-4.xsd -->
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:element name="failure">
<xs:complexType mixed="true">
<xs:attribute name="type" type="xs:string" use="optional"/>
<xs:attribute name="message" type="xs:string" use="optional"/>
</xs:complexType>
</xs:element>
<xs:element name="error">
<xs:complexType mixed="true">
<xs:attribute name="type" type="xs:string" use="optional"/>
<xs:attribute name="message" type="xs:string" use="optional"/>
</xs:complexType>
</xs:element>
<xs:element name="properties">
<xs:complexType>
<xs:sequence>
<xs:element ref="property" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="property">
<xs:complexType>
<xs:attribute name="name" type="xs:string" use="required"/>
<xs:attribute name="value" type="xs:string" use="required"/>
</xs:complexType>
</xs:element>
<xs:element name="skipped">
<xs:complexType mixed="true">
<xs:attribute name="message" type="xs:string" use="optional"/>
</xs:complexType>
</xs:element>
<xs:element name="system-err" type="xs:string"/>
<xs:element name="system-out" type="xs:string"/>
<xs:element name="testcase">
<xs:complexType>
<xs:sequence>
<xs:element ref="skipped" minOccurs="0" maxOccurs="1"/>
<xs:element ref="error" minOccurs="0" maxOccurs="unbounded"/>
<xs:element ref="failure" minOccurs="0" maxOccurs="unbounded"/>
<xs:element ref="system-out" minOccurs="0" maxOccurs="unbounded"/>
<xs:element ref="system-err" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" type="xs:string" use="required"/>
<xs:attribute name="assertions" type="xs:string" use="optional"/>
<xs:attribute name="time" type="xs:string" use="optional"/>
<xs:attribute name="classname" type="xs:string" use="optional"/>
<xs:attribute name="status" type="xs:string" use="optional"/>
</xs:complexType>
</xs:element>
<xs:element name="testsuite">
<xs:complexType>
<xs:sequence>
<xs:element ref="properties" minOccurs="0" maxOccurs="1"/>
<xs:element ref="testcase" minOccurs="0" maxOccurs="unbounded"/>
<xs:element ref="system-out" minOccurs="0" maxOccurs="1"/>
<xs:element ref="system-err" minOccurs="0" maxOccurs="1"/>
</xs:sequence>
<xs:attribute name="name" type="xs:string" use="required"/>
<xs:attribute name="tests" type="xs:string" use="required"/>
<xs:attribute name="failures" type="xs:string" use="optional"/>
<xs:attribute name="errors" type="xs:string" use="optional"/>
<xs:attribute name="time" type="xs:string" use="optional"/>
<xs:attribute name="disabled" type="xs:string" use="optional"/>
<xs:attribute name="skipped" type="xs:string" use="optional"/>
<xs:attribute name="timestamp" type="xs:string" use="optional"/>
<xs:attribute name="hostname" type="xs:string" use="optional"/>
<xs:attribute name="id" type="xs:string" use="optional"/>
<xs:attribute name="package" type="xs:string" use="optional"/>
</xs:complexType>
</xs:element>
<xs:element name="testsuites">
<xs:complexType>
<xs:sequence>
<xs:element ref="testsuite" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" type="xs:string" use="optional"/>
<xs:attribute name="time" type="xs:string" use="optional"/>
<xs:attribute name="tests" type="xs:string" use="optional"/>
<xs:attribute name="failures" type="xs:string" use="optional"/>
<xs:attribute name="disabled" type="xs:string" use="optional"/>
<xs:attribute name="errors" type="xs:string" use="optional"/>
</xs:complexType>
</xs:element>
</xs:schema>

@ -32,8 +32,9 @@ set cpo&vim
" http://mesonbuild.com/Syntax.html
syn keyword mesonConditional elif else if endif
syn keyword mesonRepeat foreach endforeach
syn keyword mesonRepeat foreach endforeach
syn keyword mesonOperator and not or in
syn keyword mesonStatement continue break
syn match mesonComment "#.*$" contains=mesonTodo,@Spell
syn keyword mesonTodo FIXME NOTE NOTES TODO XXX contained

@ -1,5 +1,6 @@
{
"type": "object",
"additionalProperties": false,
"properties": {
"env": {
"type": "object",
@ -100,6 +101,30 @@
"prefix"
]
}
},
"tools": {
"type": "object"
},
"stdout": {
"type": "array",
"items": {
"type": "object",
"properties": {
"line": {
"type": "string"
},
"match": {
"type": "string",
"enum": [
"literal",
"re"
]
}
},
"required": [
"line"
]
}
}
}
}

@ -6,14 +6,17 @@
Each wrap repository has a master branch with only one initial commit and *no* wrap files.
And that is the only commit ever made on that branch.
For every release of a project a new branch is created. The new branch is named after the
the upstream release number (e.g. `1.0.0`). This branch holds a wrap file for
For every release of a project a new branch is created. The new branch is named after the
the upstream release number (e.g. `1.0.0`). This branch holds a wrap file for
this particular release.
There are two types of wraps on WrapDB - regular wraps and wraps with Meson build
definition patches. A wrap file in a repository on WrapDB must have a name `upstream.wrap`.
Wraps with Meson build definition patches work in much the same way as Debian: we take the unaltered upstream source package and add a new build system to it as a patch. These build systems are stored as Git repositories on GitHub. They only contain build definition files. You may also think of them as an overlay to upstream source.
Wraps with Meson build definition patches work in much the same way as Debian:
we take the unaltered upstream source package and add a new build system to it as a patch.
These build systems are stored as Git repositories on GitHub. They only contain build definition files.
You may also think of them as an overlay to upstream source.
Whenever a new commit is pushed into GitHub's project branch, a new wrap is generated
with an incremented version number. All the old releases remain unaltered.
@ -21,13 +24,15 @@ New commits are always done via GitHub merge requests and must be reviewed by
someone other than the submitter.
Note that your Git repo with wrap must not contain the subdirectory of the source
release. That gets added automatically by the service. You also must not commit
release. That gets added automatically by the service. You also must not commit
any source code from the original tarball into the wrap repository.
## Choosing the repository name
Wrapped subprojects are used much like external dependencies. Thus
they should have the same name as the upstream projects.
they should have the same name as the upstream projects.
NOTE: Repo names must fully match this regexp: `[a-z0-9._]+`.
If the project provides a pkg-config file, then the repository name should be
the same as the pkg-config name. Usually this is the name of the
@ -36,16 +41,19 @@ however. As an example the libogg project's chosen pkg-config name is
`ogg` instead of `libogg`, which is the reason why the repository is
named plain `ogg`.
If there is no a pkg-config file, the name the project uses/promotes should be used,
If there is no a pkg-config file, the name the project uses/promotes should be used,
lowercase only (Catch2 -> catch2).
If the project name is too generic or ambiguous (e.g. `benchmark`),
consider using `organization-project` naming format (e.g. `google-benchmark`).
## How to contribute a new wrap
If the project already uses Meson build system, then only a wrap file - `upstream.wrap`
should be provided. In other case a Meson build definition patch - a set of `meson.build`
should be provided. In other case a Meson build definition patch - a set of `meson.build`
files - should be also provided.
### Request a new repository or branch
### Request a new repository
Create an issue on the [wrapdb bug tracker](https://github.com/mesonbuild/wrapdb/issues)
using *Title* and *Description* below as a template.
@ -61,6 +69,9 @@ version: <version_you_have_a_wrap_for>
Wait until the new repository or branch is created. A link to the new repository or branch
will be posted in a comment to this issue.
NOTE: Requesting a branch is not necessary. WrapDB maintainer can create the branch and
modify the PR accordingly if the project repository exists.
### Add a new wrap
First you need to fork the repository to your own page.
@ -80,28 +91,28 @@ git commit -a -m 'Add wrap files for libfoo-1.0.0'
git push origin 1.0.0
```
Now you should create a pull request on GitHub. Remember to create it against the
correct branch rather than master (`1.0.0` branch in this example). GitHub should do
Now you should create a pull request on GitHub. Remember to create it against the
correct branch rather than master (`1.0.0` branch in this example). GitHub should do
this automatically.
If the branch doesn't exist file a pull request against master.
WrapDB maintainers can fix it before merging.
## What is done by WrapDB maintainers
[mesonwrap tools](Wrap-maintainer-tools.md) must be used for the tasks below.
### Adding new project to the Wrap provider service
Each project gets its own repo. It is initialized like this:
```
git init
git add readme.txt
git add LICENSE.build
git commit -a -m 'Create project foobar'
git remote add origin <repo url>
git push -u origin master
mesonwrap new_repo --homepage=$HOMEPAGE --directory=$NEW_LOCAL_PROJECT_DIR $PROJECT_NAME
```
Note that this is the *only* commit that will ever be made to master branch. All other commits are done to branches.
The command creates a new repository and uploads it to Github.
Repo names must fully match this regexp: `[a-z0-9._]+`.
`--version` flag may be used to create a branch immediately.
### Adding a new branch to an existing project
@ -129,12 +140,6 @@ to functionality. All such changes must be submitted to upstream. You
may also host your own Git repo with the changes if you wish. The Wrap
system has native support for Git subprojects.
## Creator script
The WrapDB repository has a
[helper script](https://github.com/mesonbuild/mesonwrap/blob/master/mesonwrap.py)
to generate new repositories, verify them and update them.
## Reviewing wraps
See [Wrap review guidelines](Wrap-review-guidelines.md).

@ -20,6 +20,9 @@ option('integer_opt', type : 'integer', min : 0, max : 5, value : 3) # Since 0.4
option('free_array_opt', type : 'array', value : ['one', 'two']) # Since 0.44.0
option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two'])
option('some_feature', type : 'feature', value : 'enabled') # Since 0.47.0
option('long_desc', type : 'string', value : 'optval',
description : 'An option with a very long description' +
'that does something in a specific context') # Since 0.55.0
```
For built-in options, see [Built-in options][builtin_opts].

@ -17,7 +17,7 @@ by setting them inside `default_options` of `project()` in your `meson.build`.
For legacy reasons `--warnlevel` is the cli argument for the `warning_level` option.
They can also be edited after setup using `meson configure`.
They can also be edited after setup using `meson configure -Doption=value`.
Installation options are all relative to the prefix, except:
@ -55,37 +55,31 @@ particularly the paths section may be necessary.
### Core options
Options that are labeled "per machine" in the table are set per machine.
Prefixing the option with `build.` just affects the build machine configuration,
while unprefixed just affects the host machine configuration, respectively.
Using the option as-is with no prefix affects all machines. For example:
- `build.pkg_config_path` controls the paths pkg-config will search for just
`native: true` dependencies (build machine).
- `pkg_config_path` controls the paths pkg-config will search for just
`native: false` dependencies (host machine).
| Option | Default value | Description | Is per machine |
| ------ | ------------- | ----------- | -------------- |
| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no |
| backend {ninja, vs,<br>vs2010, vs2015, vs2017, vs2019, xcode} | ninja | Backend to use | no |
| buildtype {plain, debug,<br>debugoptimized, release, minsize, custom} | debug | Build type to use | no |
| debug | true | Debug | no |
| default_library {shared, static, both} | shared | Default library type | no |
| errorlogs | true | Whether to print the logs from failing tests. | no |
| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no |
| layout {mirror,flat} | mirror | Build directory layout | no |
| optimization {0, g, 1, 2, 3, s} | 0 | Optimization level | no |
| pkg_config_path {OS separated path} | '' | Additional paths for pkg-config to search before builtin paths | yes |
| cmake_prefix_path | [] | Additional prefixes for cmake to search before builtin paths | yes |
| stdsplit | true | Split stdout and stderr in test logs | no |
| strip | false | Strip targets on install | no |
| unity {on, off, subprojects} | off | Unity build | no |
| unity_size {>=2} | 4 | Unity file block size | no |
| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no |
| werror | false | Treat warnings as errors | no |
| wrap_mode {default, nofallback,<br>nodownload, forcefallback} | default | Wrap mode to use | no |
Options that are labeled "per machine" in the table are set per machine. See
the [specifying options per machine](#Specifying-options-per-machine) section
for details.
| Option | Default value | Description | Is per machine | Is per subproject |
| ------ | ------------- | ----------- | -------------- | ----------------- |
| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no | no |
| backend {ninja, vs,<br>vs2010, vs2015, vs2017, vs2019, xcode} | ninja | Backend to use | no | no |
| buildtype {plain, debug,<br>debugoptimized, release, minsize, custom} | debug | Build type to use | no | no |
| debug | true | Debug | no | no |
| default_library {shared, static, both} | shared | Default library type | no | yes |
| errorlogs | true | Whether to print the logs from failing tests. | no | no |
| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no | no |
| layout {mirror,flat} | mirror | Build directory layout | no | no |
| optimization {0, g, 1, 2, 3, s} | 0 | Optimization level | no | no |
| pkg_config_path {OS separated path} | '' | Additional paths for pkg-config to search before builtin paths | yes | no |
| cmake_prefix_path | [] | Additional prefixes for cmake to search before builtin paths | yes | no |
| stdsplit | true | Split stdout and stderr in test logs | no | no |
| strip | false | Strip targets on install | no | no |
| unity {on, off, subprojects} | off | Unity build | no | no |
| unity_size {>=2} | 4 | Unity file block size | no | no |
| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no | yes |
| werror | false | Treat warnings as errors | no | yes |
| wrap_mode {default, nofallback,<br>nodownload, forcefallback} | default | Wrap mode to use | no | no |
| force_fallback_for | [] | Force fallback for those dependencies | no | no |
<a name="build-type-options"></a>
For setting optimization levels and toggling debug, you can either set the
@ -186,9 +180,9 @@ The default values of `c_winlibs` and `cpp_winlibs` are in compiler-specific
argument forms, but the libraries are: kernel32, user32, gdi32, winspool,
shell32, ole32, oleaut32, uuid, comdlg32, advapi32.
c_args, cpp_args, c_link_args, and cpp_link_args only affect native builds,
when cross compiling they will not be applied to binaries or libraries
targeting the host system, only those being run on the build system.
All these `<lang>_*` options are specified per machine. See below in the
[specifying options per machine](#Specifying-options-per-machine) section on
how to do this in cross builds.
When using MSVC, `cpp_eh=none` will result in no exception flags being passed,
while the `cpp_eh=[value]` will result in `/EH[value]`.
@ -199,3 +193,44 @@ gcc-style compilers, nothing is passed (allowing exceptions to work), while
Since *0.54.0* The `<lang>_thread_count` option can be used to control the
value passed to `-s PTHREAD_POOL_SIZE` when using emcc. No other c/c++
compiler supports this option.
## Specifying options per machine
Since *0.51.0*, some options are specified per machine rather than globally for
all machine configurations. Prefixing the option with `build.` just affects the
build machine configuration, while unprefixed just affects the host machine
configuration, respectively. For example:
- `build.pkg_config_path` controls the paths pkg-config will search for just
`native: true` dependencies (build machine).
- `pkg_config_path` controls the paths pkg-config will search for just
`native: false` dependencies (host machine).
This is useful for cross builds. In the native builds, build = host, and the
unprefixed option alone will suffice.
Prior to *0.51.0*, these options just effected native builds when specified on
the command line, as there was no `build.` prefix. Similarly named fields in
the `[properties]` section of the cross file would effect cross compilers, but
the code paths were fairly different allowing differences in behavior to crop
out.
## Specifying options per subproject
Since *0.54.0* `default_library` and `werror` built-in options can be defined
per subproject. This is useful for example when building shared libraries in the
main project, but static link a subproject, or when the main project must build
with no warnings but some subprojects cannot.
Most of the time this would be used either by the parent project by setting
subproject's default_options (e.g. `subproject('foo', default_options: 'default_library=static')`),
or by the user using the command line `-Dfoo:default_library=static`.
The value is overriden in this order:
- Value from parent project
- Value from subproject's default_options if set
- Value from subproject() default_options if set
- Value from command line if set
Since 0.56.0 `warning_level` can also be defined per subproject.

@ -48,8 +48,6 @@ The `subproject` method is almost identical to the normal meson
`subproject` function. The only difference is that a CMake project
instead of a meson project is configured.
Also, project specific CMake options can be added with the `cmake_options` key.
The returned `sub_proj` supports the same options as a "normal" subproject.
Meson automatically detects CMake build targets, which can be accessed with
the methods listed [below](#subproject-object).
@ -87,6 +85,49 @@ It should be noted that not all projects are guaranteed to work. The
safest approach would still be to create a `meson.build` for the
subprojects in question.
### Configuration options
*New in meson 0.55.0*
Meson also supports passing configuration options to CMake and overriding
certain build details extracted from the CMake subproject.
```meson
cmake = import('cmake')
opt_var = cmake.subproject_options()
# Call CMake with `-DSOME_OTHER_VAR=ON`
opt_var.add_cmake_defines({'SOME_OTHER_VAR': true})
# Globally override the C++ standard to c++11
opt_var.set_override_option('cpp_std', 'c++11')
# Override the previous global C++ standard
# with c++14 only for the CMake target someLib
opt_var.set_override_option('cpp_std', 'c++14', target: 'someLib')
sub_pro = cmake.subproject('someLibProject', options: opt_var)
# Further changes to opt_var have no effect
```
See [the CMake options object](#cmake-options-object) for a complete reference
of all supported functions.
The CMake configuration options object is very similar to the
[configuration data object](Reference-manual.md#configuration-data-object) object
returned by [`configuration_data`](Reference-manual.md#configuration_data). It
is generated by the `subproject_options` function
All configuration options have to be set *before* the subproject is configured
and must be passed to the `subproject` method via the `options` key. Altering
the configuration object won't have any effect on previous `cmake.subproject`
calls.
In earlier meson versions CMake command-line parameters could be set with the
`cmake_options` kwarg. However, this feature is deprecated since 0.55.0 and only
kept for compatibility. It will not work together with the `options` kwarg.
### `subproject` object
This object is returned by the `subproject` function described above
@ -103,7 +144,37 @@ and supports the following methods:
the subproject. Usually `dependency()` or `target()` should be
preferred to extract build targets.
- `found` returns true if the subproject is available, otherwise false
*new in in 0.53.2*
*new in meson 0.53.2*
### `cmake options` object
This object is returned by the `subproject_options()` function and consumed by
the `options` kwarg of the `subproject` function. The following methods are
supported:
- `add_cmake_defines({'opt1': val1, ...})` add additional CMake commandline defines
- `set_override_option(opt, val)` set specific [build options](Build-options.md)
for targets. This will effectively add `opt=val` to the `override_options`
array of the [build target](Reference-manual.md#executable)
- `set_install(bool)` override wether targets should be installed or not
- `append_compile_args(lang, arg1, ...)` append compile flags for a specific
language to the targets
- `append_link_args(arg1, ...)` append linger args to the targets
- `clear()` reset all data in the `cmake options` object
The methods `set_override_option`, `set_install`, `append_compile_args` and
`append_link_args` support the optional `target` kwarg. If specified, the set
options affect the specific target. The effect of the option is global for the
subproject otherwise.
If, for instance, `opt_var.set_install(false)` is called, no target will be
installed regardless of what is set by CMake. However, it is still possible to
install specific targets (here `foo`) by setting the `target` kwarg:
`opt_var.set_install(true, target: 'foo')`
Options that are not set won't affect the generated subproject. So, if for
instance, `set_install` was not called then the values extracted from CMake will
be used.
## CMake configuration files

@ -109,11 +109,11 @@ you would issue the following command.
meson configure -Dprefix=/tmp/testroot
Then you would run your build command (usually `ninja`), which would
Then you would run your build command (usually `meson compile`), which would
cause Meson to detect that the build setup has changed and do all the
work required to bring your build tree up to date.
Since 0.50.0, it is also possible to get a list of all build options
by invoking `meson configure` with the project source directory or
by invoking [`meson configure`](Commands.md#configure) with the project source directory or
the path to the root `meson.build`. In this case, meson will print the
default values of all options similar to the example output from above.

@ -36,8 +36,8 @@ script:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo FROM YOUR/REPO:eoan > Dockerfile; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo ADD . /root >> Dockerfile; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker build -t withgit .; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX meson builddir && ninja -C builddir test"; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) meson builddir && ninja -C builddir test; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX meson builddir && meson test -C builddir"; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) meson builddir && meson test -C builddir; fi
```
## CircleCi for Linux (with Docker)
@ -69,7 +69,7 @@ jobs:
steps:
- checkout
- run: meson setup builddir --backend ninja
- run: ninja -C builddir
- run: meson compile -C builddir
- run: meson test -C builddir
meson_debian_build:
@ -77,7 +77,7 @@ jobs:
steps:
- checkout
- run: meson setup builddir --backend ninja
- run: ninja -C builddir
- run: meson compile -C builddir
- run: meson test -C builddir
meson_fedora_build:
@ -85,7 +85,7 @@ jobs:
steps:
- checkout
- run: meson setup builddir --backend ninja
- run: ninja -C builddir
- run: meson compile -C builddir
- run: meson test -C builddir
workflows:
@ -138,10 +138,10 @@ install:
build_script:
- cmd: echo Building on %arch% with %compiler%
- cmd: meson --backend=ninja builddir
- cmd: ninja -C builddir
- cmd: meson compile -C builddir
test_script:
- cmd: ninja -C builddir test
- cmd: meson test -C builddir
```
### Qt
@ -187,8 +187,8 @@ install:
script:
- meson builddir
- ninja -C builddir
- ninja -C builddir test
- meson compile -C builddir
- meson test -C builddir
```
## GitHub Actions

@ -174,7 +174,7 @@ contents of an additional file into the CI log on failure.
Projects needed by unit tests are in the `test cases/unit`
subdirectory. They are not run as part of `./run_project_tests.py`.
#### Configuring project tests
### Configuring project tests
The (optional) `test.json` file, in the root of a test case, is used
for configuring the test. All of the following root entries in the `test.json`
@ -209,17 +209,20 @@ Exanple `test.json`:
{ "opt1": "qwert", "opt2": "false" },
{ "opt1": "bad" }
]
},
"tools": {
"cmake": ">=3.11"
}
}
```
##### env
#### env
The `env` key contains a dictionary which specifies additional
environment variables to be set during the configure step of the test. `@ROOT@`
is replaced with the absolute path of the source directory.
##### installed
#### installed
The `installed` dict contains a list of dicts, describing which files are expected
to be installed. Each dict contains the following keys:
@ -277,7 +280,7 @@ the platform matches. The following values for `platform` are currently supporte
| `cygwin` | Matches when the platform is cygwin |
| `!cygwin` | Not `cygwin` |
##### matrix
#### matrix
The `matrix` section can be used to define a test matrix to run project tests
with different meson options.
@ -318,12 +321,40 @@ The above example will produce the following matrix entries:
- `opt1=qwert`
- `opt1=qwert opt2=true`
##### do_not_set_opts
#### do_not_set_opts
Currently supported values are:
- `prefix`
- `libdir`
#### tools
This section specifies a dict of tool requirements in a simple key-value format.
If a tool is specified, it has to be present in the environment, and the version
requirement must be fulfilled. Otherwise, the entire test is skipped (including
every element in the test matrix).
#### stdout
The `stdout` key contains a list of dicts, describing the expected stdout.
Each dict contains the following keys:
- `line`
- `match` (optional)
Each item in the list is matched, in order, against the remaining actual stdout
lines, after any previous matches. If the actual stdout is exhausted before
every item in the list is matched, the expected output has not been seen, and
the test has failed.
The `match` element of the dict determines how the `line` element is matched:
| Type | Description |
| -------- | ----------------------- |
| `literal` | Literal match (default) |
| `re` | regex match |
### Skipping integration tests
Meson uses several continuous integration testing systems that have slightly

@ -39,7 +39,7 @@ $ meson --prefix=/tmp/myapp.app \
<other flags you might need>
```
Now when we do `ninja install` the bundle is properly staged. If you
Now when we do `meson install` the bundle is properly staged. If you
have any resource files or data, you need to install them into
`Contents/Resources` either by custom install commands or specifying
more install paths to the Meson command.

@ -5,27 +5,56 @@ short-description: Creating releases
# Creating releases
In addition to development, almost all projects provide periodical
source releases. These are standalone packages (usually either in tar
or zip format) of the source code. They do not contain any revision
control metadata, only the source code.
source releases. These are standalone packages (usually either in
tar or zip format) of the source code. They do not contain any
revision control metadata, only the source code. Meson provides
a simple way of generating these, with the `meson dist` command.
Meson provides a simple way of generating these. It consists of a
single command:
single command *(available since 0.52.0)*:
ninja dist
```sh
meson dist
```
or alternatively (on older meson versions with `ninja` backend):
```sh
ninja dist
```
This creates a file called `projectname-version.tar.xz` in the build
tree subdirectory `meson-dist`. This archive contains the full
contents of the latest commit in revision control including all the
submodules (recursively). All revision control metadata is removed.
Meson then takes
this archive and tests that it works by doing a full compile + test +
install cycle. If all these pass, Meson will then create a SHA-256
checksum file next to the archive.
**Note**: Meson behaviour is different from Autotools. The Autotools
"dist" target packages up the current source tree. Meson packages
the latest revision control commit. The reason for this is that it
prevents developers from doing accidental releases where the
distributed archive does not match any commit in revision control
(especially the one tagged for the release).
tree subdirectory `meson-dist`. This archive contains the full contents
of the latest commit in revision control including all the submodules
(recursively). All revision control metadata is removed. Meson then
takes this archive and tests that it works by doing a full
`compile` + `test` + `install` cycle. If all these pass, Meson will
then create a `SHA-256` checksum file next to the archive.
## Autotools dist VS Meson dist
Meson behaviour is different from Autotools. The Autotools "dist"
target packages up the current source tree. Meson packages the latest
revision control commit. The reason for this is that it prevents developers
from doing accidental releases where the distributed archive does not match
any commit in revision control (especially the one tagged for the release).
## Include subprojects in your release
The `meson dist` command has `--include-subprojects` command line option.
When enabled, the source tree of all subprojects used by the current build
will also be included in the final tarball. This is useful to distribute
self contained tarball that can be built offline (i.e. `--wrap-mode=nodownload`).
## Skip build and test with `--no-tests`
The `meson dist` command has a `--no-tests` option to skip build and
tests steps of generated packages. It can be used to not waste time
for example when done in CI that already does its own testing.
So with `--no-tests` you can tell Meson "Do not build and test generated
packages.".

@ -222,7 +222,7 @@ Once you have the cross file, starting a build is simple
$ meson srcdir builddir --cross-file cross_file.txt
```
Once configuration is done, compilation is started by invoking Ninja
Once configuration is done, compilation is started by invoking `meson compile`
in the usual way.
## Introspection and system checks
@ -231,13 +231,10 @@ The main *meson* object provides two functions to determine cross
compilation status.
```meson
meson.is_cross_build() # returns true when cross compiling
meson.has_exe_wrapper() # returns true if an exe wrapper has been defined
meson.is_cross_build() # returns true when cross compiling
meson.can_run_host_binaries() # returns true if the host binaries can be run, either with a wrapper or natively
```
Note that the latter gives undefined return value when doing a native
build.
You can run system checks on both the system compiler or the cross
compiler. You just have to specify which one to use.

@ -76,7 +76,7 @@ and config-tool based variables.
```meson
foo_dep = dependency('foo')
var = foo.get_variable(cmake : 'CMAKE_VAR', pkgconfig : 'pkg-config-var', configtool : 'get-var', default_value : 'default')
var = foo_dep.get_variable(cmake : 'CMAKE_VAR', pkgconfig : 'pkg-config-var', configtool : 'get-var', default_value : 'default')
```
It accepts the keywords 'cmake', 'pkgconfig', 'pkgconfig_define',
@ -242,6 +242,9 @@ libgcrypt_dep = dependency('libgcrypt', version: '>= 1.8')
gpgme_dep = dependency('gpgme', version: '>= 1.0')
```
*Since 0.55.0* Meson won't search $PATH any more for a config tool binary when
cross compiling if the config tool did not have an entry in the cross file.
## AppleFrameworks
Use the `modules` keyword to list frameworks required, e.g.
@ -285,8 +288,12 @@ You can call `dependency` multiple times with different modules and
use those to link against your targets.
If your boost headers or libraries are in non-standard locations you
can set the BOOST_ROOT, BOOST_INCLUDEDIR, and/or BOOST_LIBRARYDIR
environment variables.
can set the `BOOST_ROOT`, or the `BOOST_INCLUDEDIR` and `BOOST_LIBRARYDIR`
environment variables. *(added in 0.56.0)* You can also set these
parameters as `boost_root`, `boost_include`, and `boost_librarydir` in your
native or cross machine file. Note that machine file variables are
preferred to environment variables, and that specifying any of these
disables system-wide search for boost.
You can set the argument `threading` to `single` to use boost
libraries that have been compiled for single-threaded use instead.

@ -223,11 +223,11 @@ add_test('test library', exe)
```
First we build a shared library named foobar. It is marked
installable, so running `ninja install` installs it to the library
installable, so running `meson install` installs it to the library
directory (the system knows which one so the user does not have to
care). Then we build a test executable which is linked against the
library. It will not be installed, but instead it is added to the list
of unit tests, which can be run with the command `ninja test`.
of unit tests, which can be run with the command `meson test`.
Above we mentioned precompiled headers as a feature not supported by
other build systems. Here's how you would use them.

@ -51,7 +51,7 @@ $ /path/to/meson.py <options>
After this you don't have to care about invoking Meson any more. It
remembers where it was originally invoked from and calls itself
appropriately. As a user the only thing you need to do is to `cd` into
your build directory and invoke `ninja`.
your build directory and invoke `meson compile`.
## Why can't I specify target files with a wildcard?
@ -432,7 +432,7 @@ sources in the build target:
libfoo_gen_headers = custom_target('gen-headers', ..., output: 'foo-gen.h')
libfoo_sources = files('foo-utils.c', 'foo-lib.c')
# Add generated headers to the list of sources for the build target
libfoo = library('foo', sources: libfoo_sources + libfoo_gen_headers)
libfoo = library('foo', sources: [libfoo_sources + libfoo_gen_headers])
```
Now let's say you have a new target that links to `libfoo`:

@ -28,12 +28,12 @@ the binaries `gcovr`, `lcov` and `genhtml`. If version 3.3 or higher
of the first is found, targets called *coverage-text*, *coverage-xml*
and *coverage-html* are generated. Alternatively, if the latter two
are found, only the target *coverage-html* is generated. Coverage
reports can then be produced simply by calling e.g. `ninja
reports can then be produced simply by calling e.g. `meson compile
coverage-xml`. As a convenience, a high-level *coverage* target is
also generated which will produce all 3 coverage report types, if
possible.
Note that generating any of the coverage reports described above
requires the tests (i.e. `ninja test`) to finish running so the
requires the tests (i.e. `meson test`) to finish running so the
information about the functions that are called in the tests can be
gathered for the report.

@ -88,7 +88,6 @@ There are several keyword arguments. Many of these map directly to the
e.g. `Gtk`
* `includes`: list of gir names to be included, can also be a GirTarget
* `header`: *(Added 0.43.0)* name of main c header to include for the library, e.g. `glib.h`
* `dependencies`: deps to use during introspection scanning
* `include_directories`: extra include paths to look for gir files
* `install`: if true, install the generated files
* `install_dir_gir`: (*Added 0.35.0*) which directory to install the
@ -98,6 +97,7 @@ There are several keyword arguments. Many of these map directly to the
* `link_with`: list of libraries to link with
* `symbol_prefix`: the symbol prefix for the gir object, e.g. `gtk`,
(*Since 0.43.0*) an ordered list of multiple prefixes is allowed
* `fatal_warnings`: *Since 0.55.0* turn scanner warnings into fatal errors.
Returns an array of two elements which are: `[gir_target,
typelib_target]`
@ -223,7 +223,7 @@ directory. Note that this is not for installing schemas and is only
useful when running the application locally for example during tests.
* `build_by_default`: causes, when set to true, to have this target be
built by default, that is, when invoking plain `ninja`, the default
built by default, that is, when invoking plain `meson compile`, the default
value is true for all built target types
* `depend_files`: files ([`string`](Reference-manual.md#string-object),
[`files()`](Reference-manual.md#files), or
@ -246,7 +246,7 @@ one XML file.
* `annotations`: *(Added 0.43.0)* list of lists of 3 strings for the annotation for `'ELEMENT', 'KEY', 'VALUE'`
* `docbook`: *(Added 0.43.0)* prefix to generate `'PREFIX'-NAME.xml` docbooks
* `build_by_default`: causes, when set to true, to have this target be
built by default, that is, when invoking plain `ninja`, the default
built by default, that is, when invoking plain `meson compile`, the default
value is true for all built target types
* `install_dir`: (*Added 0.46.0*) location to install the header or
bundle depending on previous options
@ -344,8 +344,8 @@ of the module.
Note that this has the downside of rebuilding the doc for each build, which is
often very slow. It usually should be enabled only in CI.
This creates a `$module-doc` target that can be ran to build docs and
normally these are only built on install.
This also creates a `$module-doc` target that can be run to build documentation.
Normally the documentation is only built on install.
*Since 0.52.0* Returns a target object that can be passed as dependency to other
targets using generated doc files (e.g. in `content_files` of another doc).

@ -25,20 +25,21 @@ With this command meson will configure the project and also generate
introspection information that is stored in `intro-*.json` files in the
`meson-info` directory. The introspection dump will be automatically updated
when meson is (re)configured, or the build options change. Thus, an IDE can
watch for changes in this directory to know when something changed.
watch for changes in this directory to know when something changed. Note that
`meson-info.json` guaranteed to be the last file written.
The `meson-info` directory should contain the following files:
| File | Description |
| ---- | ----------- |
| `intro-benchmarks.json` | Lists all benchmarks |
| `intro-buildoptions.json` | Contains a full list of meson configuration options for the project |
| `intro-buildsystem_files.json` | Full list of all meson build files |
| `intro-dependencies.json` | Lists all dependencies used in the project |
| `intro-installed.json` | Contains mapping of files to their installed location |
| `intro-projectinfo.json` | Stores basic information about the project (name, version, etc.) |
| `intro-targets.json` | Full list of all build targets |
| `intro-tests.json` | Lists all tests with instructions how to run them |
| File | Description |
| ------------------------------ | ------------------------------------------------------------------- |
| `intro-benchmarks.json` | Lists all benchmarks |
| `intro-buildoptions.json` | Contains a full list of meson configuration options for the project |
| `intro-buildsystem_files.json` | Full list of all meson build files |
| `intro-dependencies.json` | Lists all dependencies used in the project |
| `intro-installed.json` | Contains mapping of files to their installed location |
| `intro-projectinfo.json` | Stores basic information about the project (name, version, etc.) |
| `intro-targets.json` | Full list of all build targets |
| `intro-tests.json` | Lists all tests with instructions how to run them |
The content of the JSON files is further specified in the remainder of this document.
@ -99,15 +100,15 @@ for actual compilation.
The following table shows all valid types for a target.
| value of `type` | Description |
| --------------- | ----------- |
| `executable` | This target will generate an executable file |
| `static library` | Target for a static library |
| `shared library` | Target for a shared library |
| value of `type` | Description |
| ---------------- | --------------------------------------------------------------------------------------------- |
| `executable` | This target will generate an executable file |
| `static library` | Target for a static library |
| `shared library` | Target for a shared library |
| `shared module` | A shared library that is meant to be used with dlopen rather than linking into something else |
| `custom` | A custom target |
| `run` | A Meson run target |
| `jar` | A Java JAR target |
| `custom` | A custom target |
| `run` | A Meson run target |
| `jar` | A Java JAR target |
### Using `--targets` without a build directory
@ -227,8 +228,8 @@ in the `meson.build`.
## Tests
Compilation and unit tests are done as usual by running the `ninja` and
`ninja test` commands. A JSON formatted result log can be found in
Compilation and unit tests are done as usual by running the `meson compile` and
`meson test` commands. A JSON formatted result log can be found in
`workspace/project/builddir/meson-logs/testlog.json`.
When these tests fail, the user probably wants to run the failing test in a
@ -275,11 +276,62 @@ command line. Use `meson introspect -h` to see all available options.
This API can also work without a build directory for the `--projectinfo` command.
# AST of a `meson.build`
Since meson *0.55.0* it is possible to dump the AST of a `meson.build` as a JSON
object. The interface for this is `meson introspect --ast /path/to/meson.build`.
Each node of the AST has at least the following entries:
| Key | Description |
| ------------ | ------------------------------------------------------- |
| `node` | Type of the node (see following table) |
| `lineno` | Line number of the node in the file |
| `colno` | Column number of the node in the file |
| `end_lineno` | Marks the end of the node (may be the same as `lineno`) |
| `end_colno` | Marks the end of the node (may be the same as `colno`) |
Possible values for `node` with additional keys:
| Node type | Additional keys |
| -------------------- | ------------------------------------------------ |
| `BooleanNode` | `value`: bool |
| `IdNode` | `value`: str |
| `NumberNode` | `value`: int |
| `StringNode` | `value`: str |
| `ContinueNode` | |
| `BreakNode` | |
| `ArgumentNode` | `positional`: node list; `kwargs`: accept_kwargs |
| `ArrayNode` | `args`: node |
| `DictNode` | `args`: node |
| `EmptyNode` | |
| `OrNode` | `left`: node; `right`: node |
| `AndNode` | `left`: node; `right`: node |
| `ComparisonNode` | `left`: node; `right`: node; `ctype`: str |
| `ArithmeticNode` | `left`: node; `right`: node; `op`: str |
| `NotNode` | `right`: node |
| `CodeBlockNode` | `lines`: node list |
| `IndexNode` | `object`: node; `index`: node |
| `MethodNode` | `object`: node; `args`: node; `name`: str |
| `FunctionNode` | `args`: node; `name`: str |
| `AssignmentNode` | `value`: node; `var_name`: str |
| `PlusAssignmentNode` | `value`: node; `var_name`: str |
| `ForeachClauseNode` | `items`: node; `block`: node; `varnames`: list |
| `IfClauseNode` | `ifs`: node list; `else`: node |
| `IfNode` | `condition`: node; `block`: node |
| `UMinusNode` | `right`: node |
| `TernaryNode` | `condition`: node; `true`: node; `false`: node |
We do not guarantee the stability of this format since it is heavily linked to
the internal Meson AST. However, breaking changes (removal of a node type or the
removal of a key) are unlikely and will be announced in the release notes.
# Existing integrations
- [Gnome Builder](https://wiki.gnome.org/Apps/Builder)
- [KDevelop](https://www.kdevelop.org)
- [Eclipse CDT](https://www.eclipse.org/cdt/) (experimental)
- [Meson Cmake Wrapper](https://github.com/prozum/meson-cmake-wrapper) (for cmake IDEs)
- [Meson Cmake Wrapper](https://github.com/prozum/meson-cmake-wrapper) (for cmake IDEs) (currently unmaintained !!)
- [Meson-UI](https://github.com/michaelbadcrumble/meson-ui) (Meson build GUI)
- [Meson Syntax Highlighter](https://plugins.jetbrains.com/plugin/13269-meson-syntax-highlighter) plugin for JetBrains IDEs.

@ -79,12 +79,12 @@ With these four files we are done. To configure, build and run the test suite, w
```console
$ meson builddir && cd builddir
$ ninja
$ ninja test
$ meson compile
$ meson test
```
To then install the project you only need one command.
```console
$ ninja install
$ meson install
```

@ -4,6 +4,18 @@ short-description: Installing targets
# Installing
Invoked via the [following command](Commands.md#install) *(available since 0.47.0)*:
```sh
meson install
```
or alternatively (on older meson versions with `ninja` backend):
```sh
ninja install
```
By default Meson will not install anything. Build targets can be
installed by tagging them as installable in the definition.
@ -97,15 +109,13 @@ packages. This is done with the `DESTDIR` environment variable and it
is used just like with other build systems:
```console
$ DESTDIR=/path/to/staging/area ninja install
$ DESTDIR=/path/to/staging/area meson install
```
## Custom install behaviour
The default install target (executed via, e.g., `ninja install`) does
installing with reasonable default options. More control over the
install behaviour can be achieved with the `meson install` command,
that has been available since 0.47.0.
Installation behaviour can be further customized using
additional arguments.
For example, if you wish to install the current setup without
rebuilding the code (which the default install target always does) and

@ -1,15 +1,15 @@
---
short-description: Unstable kconfig module
short-description: Keyval module
authors:
- name: Mark Schulte, Paolo Bonzini
years: [2017, 2019]
has-copyright: false
...
# Unstable kconfig module
# keyval module
This module parses Kconfig output files to allow use of kconfig
configurations in meson projects.
This module parses files consisting of a series of `key=value` lines. One use
of this module is to load kconfig configurations in meson projects.
**Note**: this does not provide kconfig frontend tooling to generate a
configuration. You still need something such as kconfig frontends (see
@ -23,20 +23,23 @@ chosen the configuration options), output a ".config" file.
The module may be imported as follows:
``` meson
kconfig = import('unstable-kconfig')
keyval = import('keyval')
```
The following functions will then be available as methods on the object
with the name `kconfig`. You can, of course, replace the name
`kconfig` with anything else.
with the name `keyval`. You can, of course, replace the name
`keyval` with anything else.
### kconfig.load()
### keyval.load()
This function loads a kconfig output file and returns a dictionary object.
This function loads a file consisting of a series of `key=value` lines
and returns a dictionary object.
`kconfig.load()` makes no attempt at parsing the values in the
file. Therefore, true boolean values will be represented as the string "y"
and integer values will have to be converted with `.to_int()`.
`keyval.load()` makes no attempt at parsing the values in the file.
In particular boolean and integer values will be represented as strings,
and strings will keep any quoting that is present in the input file. It
can be useful to create a [`configuration_data()`](#configuration_data)
object from the dictionary and use methods such as `get_unquoted()`.
Kconfig frontends usually have ".config" as the default name for the
configuration file. However, placing the configuration file in the source

@ -48,7 +48,7 @@ Then we need to generate the main pot file. The potfile can have any name but is
Run the following command from your build folder to generate the pot file. It is recommended to inspect it manually afterwards and fill in e.g. proper copyright and contact information.
```console
$ ninja intltest-pot
$ meson compile intltest-pot
```
### generate .po files
@ -56,5 +56,5 @@ $ ninja intltest-pot
For each language listed in the array above we need a corresponding `.po` file. Those can be generated by running the following command from your build folder.
```console
$ ninja intltest-update-po
$ meson compile intltest-update-po
```

@ -5,24 +5,132 @@ documentation on the common values used by both, for the specific values of
one or the other see the [cross compilation](Cross-compilation.md) and [native
environments](Native-environments.md).
## Data Types
There are four basic data types in a machine file:
- strings
- arrays
- booleans
- integers
A string is specified single quoted:
```ini
[section]
option1 = 'false'
option2 = '2'
```
An array is enclosed in square brackets, and must consist of strings or booleans
```ini
[section]
option = ['value']
```
A boolean must be either `true` or `false`, and unquoted.
```ini
option = false
```
An integer must be either an unquoted numeric constant;
```ini
option = 42
```
## Sections
The following sections are allowed:
- constants
- binaries
- paths
- properties
- project options
- built-in options
### constants
*Since 0.56.0*
String and list concatenation is supported using the `+` operator, joining paths
is supported using the `/` operator.
Entries defined in the `[constants]` section can be used in any other section
(they are always parsed first), entries in any other section can be used only
within that same section and only after it has been defined.
```ini
[constants]
toolchain = '/toolchain'
common_flags = ['--sysroot=' + toolchain / 'sysroot']
[properties]
c_args = common_flags + ['-DSOMETHING']
cpp_args = c_args + ['-DSOMETHING_ELSE']
[binaries]
c = toolchain / 'gcc'
```
This can be useful with cross file composition as well. A generic cross file
could be composed with a platform specific file where constants are defined:
```ini
# aarch64.ini
[constants]
arch = 'aarch64-linux-gnu'
```
```ini
# cross.ini
[binaries]
c = arch + '-gcc'
cpp = arch + '-g++'
strip = arch + '-strip'
pkgconfig = arch + '-pkg-config'
...
```
This can be used as `meson setup --cross-file aarch64.ini --cross-file cross.ini builddir`.
Note that file composition happens before the parsing of values. The example
below results in `b` being `'HelloWorld'`:
```ini
# file1.ini:
[constants]
a = 'Foo'
b = a + 'World'
```
```ini
#file2.ini:
[constants]
a = 'Hello'
```
The example below results in an error when file1.ini is included before file2.ini
because `b` would be defined before `a`:
```ini
# file1.ini:
[constants]
b = a + 'World'
```
```ini
#file2.ini:
[constants]
a = 'Hello'
```
### Binaries
The binaries section contains a list of binaries. These can be used
internally by meson, or by the `find_program` function:
internally by meson, or by the `find_program` function.
These values must be either strings or an array of strings
Compilers and linkers are defined here using `<lang>` and `<lang>_ld`.
`<lang>_ld` is special because it is compiler specific. For compilers like
gcc and clang which are used to invoke the linker this is a value to pass to
their "choose the linker" argument (-fuse-ld= in this case). For compilers
like MSVC and Clang-Cl, this is the path to a linker for meson to invoke,
such as `link.exe` or `lld-link.exe`. Support for ls is *new in 0.53.0*
such as `link.exe` or `lld-link.exe`. Support for `ld` is *new in 0.53.0*
*changed in 0.53.1* the `ld` variable was replaced by `<lang>_ld`, because it
*regressed a large number of projects. in 0.53.0 the `ld` variable was used
@ -40,8 +148,8 @@ llvm-config = '/usr/lib/llvm8/bin/llvm-config'
Cross example:
```ini
c = '/usr/bin/i586-mingw32msvc-gcc'
cpp = '/usr/bin/i586-mingw32msvc-g++'
c = ['ccache', '/usr/bin/i586-mingw32msvc-gcc']
cpp = ['ccache', '/usr/bin/i586-mingw32msvc-g++']
c_ld = 'gold'
cpp_ld = 'gold'
ar = '/usr/i586-mingw32msvc/bin/ar'
@ -64,8 +172,10 @@ An incomplete list of internally used programs that can be overridden here is:
### Paths and Directories
*Deprecated in 0.56.0* use the built-in section instead.
As of 0.50.0 paths and directories such as libdir can be defined in the native
file in a paths section
and cross files in a paths section. These should be strings.
```ini
[paths]
@ -84,21 +194,79 @@ command line will override any options in the native file. For example, passing
In addition to special data that may be specified in cross files, this
section may contain random key value pairs accessed using the
`meson.get_external_property()`
`meson.get_external_property()`, or `meson.get_cross_property()`.
*Changed in 0.56.0* putting `<lang>_args` and `<lang>_link_args` in the
properties section has been deprecated, and should be put in the built-in
options section.
### Project specific options
*New in 0.56.0*
Path options are not allowed, those must be set in the `[paths]` section.
Being able to set project specific options in a cross or native file can be
done using the `[project options]` section of the specific file (if doing a
cross build the options from the native file will be ignored)
For setting options in subprojects use the `[<subproject>:project options]`
section instead.
```ini
[project options]
build-tests = true
[zlib:project options]
build-tests = false
```
### Meson built-in options
Meson built-in options can be set the same way:
```ini
[built-in options]
c_std = 'c99'
```
You can set some meson built-in options on a per-subproject basis, such as
`default_library` and `werror`. The order of precedence is:
1) Command line
2) Machine file
3) Build system definitions
```ini
[zlib:built-in options]
default_library = 'static'
werror = false
```
Options set on a per-subproject basis will inherit the
option from the parent if the parent has a setting but the subproject
doesn't, even when there is a default set meson language.
```ini
[built-in options]
default_library = 'static'
```
## Properties
will make subprojects use default_library as static.
*New for native files in 0.54.0*
Some options can be set on a per-machine basis (in other words, the value of
the build machine can be different than the host machine in a cross compile).
In these cases the values from both a cross file and a native file are used.
The properties section can contain any variable you like, and is accessed via
`meson.get_external_property`, or `meson.get_cross_property`.
An incomplete list of options is:
- pkg_config_path
- cmake_prefix_path
## Loading multiple machine files
Native files allow layering (cross files can be layered since meson 0.52.0).
More than one native file can be loaded, with values from a previous file being
More than one file can be loaded, with values from a previous file being
overridden by the next. The intention of this is not overriding, but to allow
composing native files. This composition is done by passing the command line
composing files. This composition is done by passing the command line
argument multiple times:
```console

@ -50,7 +50,7 @@ exe = executable('myexe', src)
test('simple test', exe)
```
Here we create a unit test called *simple test*, and which uses the built executable. When the tests are run with the `ninja test` command, the built executable is run. If it returns zero, the test passes. A non-zero return value indicates an error, which Meson will then report to the user.
Here we create a unit test called *simple test*, and which uses the built executable. When the tests are run with the `meson test` command, the built executable is run. If it returns zero, the test passes. A non-zero return value indicates an error, which Meson will then report to the user.
A note to Visual Studio users
-----

@ -0,0 +1,53 @@
# Meson CI setup
This document is aimed for Meson contributors and documents
the CI setup used for testing Meson itself. The Meson
project uses multiple CI platforms for covering a wide
range of target systems.
## Travis CI
The travis configuration file is the `.travis.yml` in the
the project root. This platform tests cross compilation and
unity builds on a [linux docker image](#docker-images) and
on OSX.
## GitHub actions
The configuration files for GitHub actions are located in
`.github/workflows`. Here, all [images](#docker-images)
are tested with the full `run_tests.py` run. Additionally,
some other, smaller, tests are run.
## Docker images
The Linux docker images are automatically built and
uploaded by GitHub actions. An image rebuild is triggerd
when any of the image definition files are changed (in
`ci/ciimage`) in the master branch. Additionally, the
images are also updated weekly.
Each docker image has one corresponding dirctory in
`ci/ciimage` with an `image.json` and an `install.sh`.
### Image generation
There are no manual Dockerfiles. Instead the Dockerfile is
automatically generated by the `build.py` script. This is
done to ensure that all images have the same layout and can
all be built and tested automatically.
The Dockerfile is generated from the `image.json` file and
basically only adds a few common files and runs the
`install.sh` script which should contain all distribution
specific setup steps. The `common.sh` can be sourced via
`source /ci/common.sh` to access some shared functionalety.
To generate the image run `build.py -t build <image>`. A
generated image can be tested with `build.py -t test <image>`.
### Common image setup
Each docker image has a `/ci` directory with an
`env_vars.sh` script. This script has to be sourced before
running the meson test suite.

@ -51,7 +51,7 @@ Using precompiled headers with GCC and derivatives
--
Once you have a file to precompile, you can enable the use of pch for
a give target with a *pch* keyword argument. As an example, let's assume
a given target with a *pch* keyword argument. As an example, let's assume
you want to build a small C binary with precompiled headers.
Let's say the source files of the binary use the system headers `stdio.h`
and `string.h`. Then you create a header file `pch/myexe_pch.h` with this

@ -16,15 +16,34 @@ $ meson init --language=c --name=myproject --version=0.1
```
This would create the build definitions for a helloworld type
project. The result can be compiled as usual. For example compiling it
with Ninja could be done like this:
project. The result can be compiled as usual. For example it
could be done like this:
```
$ meson builddir
$ ninja -C builddir
$ meson setup builddir
$ meson compile -C builddir
```
The generator has many different projects and settings. They can all
be listed by invoking the command `meson init --help`.
This feature is available since Meson version 0.45.0.
# Generate a build script for an existing project
With `meson init` you can generate a build script for an existing
project with existing project files by running the command in the
root directory of your project. Meson currently supports this
feature for `executable`, and `jar` projects.
# Build after generation of template
It is possible to have Meson generate a build directory from the
`meson init` command without running `meson setup`. This is done
by passing `-b` or `--build` switch.
```console
$ mkdir project_name
$ cd project_name
$ meson init --language=c --name=myproject --version=0.1 --build
```

@ -21,7 +21,7 @@ This method generates the necessary targets to build translation files with lrel
- `ts_files`, the list of input translation files produced by Qt's lupdate tool.
- `install` when true, this target is installed during the install step (optional).
- `install_dir` directory to install to (optional).
- `build_by_default` when set to true, to have this target be built by default, that is, when invoking plain ninja; the default value is false (optional).
- `build_by_default` when set to true, to have this target be built by default, that is, when invoking `meson compile`; the default value is false (optional).
## has_tools

@ -93,8 +93,8 @@ are working on. The steps to take are very simple.
```console
$ cd /path/to/source/root
$ meson builddir && cd builddir
$ ninja
$ ninja test
$ meson compile
$ meson test
```
The only thing to note is that you need to create a separate build
@ -104,14 +104,14 @@ directory. This allows you to have multiple build trees with different
configurations at the same time. This way generated files are not
added into revision control by accident.
To recompile after code changes, just type `ninja`. The build command
To recompile after code changes, just type `meson compile`. The build command
is always the same. You can do arbitrary changes to source code and
build system files and Meson will detect those and will do the right
thing. If you want to build optimized binaries, just use the argument
`--buildtype=debugoptimized` when running Meson. It is recommended
that you keep one build directory for unoptimized builds and one for
optimized ones. To compile any given configuration, just go into the
corresponding build directory and run `ninja`.
corresponding build directory and run `meson compile`.
Meson will automatically add compiler flags to enable debug
information and compiler warnings (i.e. `-g` and `-Wall`). This means
@ -128,9 +128,9 @@ build and install Meson projects are the following.
```console
$ cd /path/to/source/root
$ meson --prefix /usr --buildtype=plain builddir -Dc_args=... -Dcpp_args=... -Dc_link_args=... -Dcpp_link_args=...
$ ninja -v -C builddir
$ ninja -C builddir test
$ DESTDIR=/path/to/staging/root ninja -C builddir install
$ meson compile -C builddir
$ meson test -C builddir
$ DESTDIR=/path/to/staging/root meson install -C builddir
```
The command line switch `--buildtype=plain` tells Meson not to add its
@ -139,7 +139,7 @@ on used flags.
This is very similar to other build systems. The only difference is
that the `DESTDIR` variable is passed as an environment variable
rather than as an argument to `ninja install`.
rather than as an argument to `meson install`.
As distro builds happen always from scratch, you might consider
enabling [unity builds](Unity-builds.md) on your packages because they

File diff suppressed because it is too large Load Diff

@ -81,14 +81,17 @@ set in the cross file.
| alpha | DEC Alpha processor |
| arc | 32 bit ARC processor |
| arm | 32 bit ARM processor |
| e2k | MCST Elbrus processor |
| avr | Atmel AVR processor |
| c2000 | 32 bit C2000 processor |
| dspic | 16 bit Microchip dsPIC |
| e2k | MCST Elbrus processor |
| ia64 | Itanium processor |
| m68k | Motorola 68000 processor |
| microblaze | MicroBlaze processor |
| mips | 32 bit MIPS processor |
| mips64 | 64 bit MIPS processor |
| parisc | HP PA-RISC processor |
| pic24 | 16 bit Microchip PIC24 |
| ppc | 32 bit PPC processors |
| ppc64 | 64 bit PPC processors |
| riscv32 | 32 bit RISC-V Open ISA |
@ -97,12 +100,11 @@ set in the cross file.
| rx | Renesas RX 32 bit MCU |
| s390 | IBM zSystem s390 |
| s390x | IBM zSystem s390x |
| sh4 | SuperH SH-4 |
| sparc | 32 bit SPARC |
| sparc64 | SPARC v9 processor |
| wasm32 | 32 bit Webassembly |
| wasm64 | 64 bit Webassembly |
| pic24 | 16 bit Microchip PIC24 |
| dspic | 16 bit Microchip dsPIC |
| x86 | 32 bit x86 processor |
| x86_64 | 64 bit x86 processor |
@ -120,6 +122,7 @@ These are provided by the `.system()` method call.
| Value | Comment |
| ----- | ------- |
| android | By convention only, subject to change |
| cygwin | The Cygwin environment for Windows |
| darwin | Either OSX or iOS |
| dragonfly | DragonFly BSD |
@ -153,6 +156,10 @@ These are the parameter names for passing language specific arguments to your bu
| Rust | rust_args | rust_link_args |
| Vala | vala_args | vala_link_args |
All these `<lang>_*` options are specified per machine. See in [specifying
options per machine](Builtin-options.md#Specifying-options-per-machine) for on
how to do this in cross builds.
## Compiler and linker flag environment variables
These environment variables will be used to modify the compiler and
@ -175,6 +182,10 @@ instead.
| RUSTFLAGS | Flags for the Rust compiler |
| LDFLAGS | The linker flags, used for all languages |
N.B. these settings are specified per machine, and so the environment varibles
actually come in pairs. See the [environment variables per
machine](#Environment-variables-per-machine) section for details.
## Function Attributes
These are the parameters names that are supported using
@ -187,49 +198,50 @@ These values are supported using the GCC style `__attribute__` annotations,
which are supported by GCC, Clang, and other compilers.
| Name |
|----------------------|
| alias |
| aligned |
| alloc_size |
| always_inline |
| artificial |
| cold |
| const |
| constructor |
| constructor_priority |
| deprecated |
| destructor |
| error |
| externally_visible |
| fallthrough |
| flatten |
| format |
| format_arg |
| gnu_inline |
| hot |
| ifunc |
| malloc |
| noclone |
| noinline |
| nonnull |
| noreturn |
| nothrow |
| optimize |
| packed |
| pure |
| returns_nonnull |
| unused |
| used |
| visibility* |
| visibility:default† |
| visibility:hidden† |
| visibility:internal† |
| visibility:protected†|
| warning |
| warn_unused_result |
| weak |
| weakreaf |
| Name |
|--------------------------|
| alias |
| aligned |
| alloc_size |
| always_inline |
| artificial |
| cold |
| const |
| constructor |
| constructor_priority |
| deprecated |
| destructor |
| error |
| externally_visible |
| fallthrough |
| flatten |
| format |
| format_arg |
| force_align_arg_pointer³ |
| gnu_inline |
| hot |
| ifunc |
| malloc |
| noclone |
| noinline |
| nonnull |
| noreturn |
| nothrow |
| optimize |
| packed |
| pure |
| returns_nonnull |
| unused |
| used |
| visibility* |
| visibility:default† |
| visibility:hidden† |
| visibility:internal† |
| visibility:protected† |
| warning |
| warn_unused_result |
| weak |
| weakreaf |
\* *Changed in 0.52.0* the "visibility" target no longer includes
"protected", which is not present in Apple's clang.
@ -237,6 +249,8 @@ which are supported by GCC, Clang, and other compilers.
*New in 0.52.0* These split visibility attributes are preferred to the plain
"visibility" as they provide narrower checks.
³ *New in 0.55.0*
### MSVC __declspec
These values are supported using the MSVC style `__declspec` annotation,
@ -265,6 +279,10 @@ These are the values that can be passed to `dependency` function's
## Compiler and Linker selection variables
N.B. these settings are specified per machine, and so the environment varibles
actually come in pairs. See the [environment variables per
machine](#Environment-variables-per-machine) section for details.
| Language | Compiler | Linker | Note |
|---------------|----------|-----------|---------------------------------------------|
| C | CC | CC_LD | |
@ -278,5 +296,28 @@ These are the values that can be passed to `dependency` function's
| C# | CSC | CSC | The linker is the compiler |
*The old environment variales are still supported, but are deprecated and will
be removed in a future version of meson.
be removed in a future version of meson.*
## Environment variables per machine
Since *0.54.0*, Following Autotool and other legacy build systems, environment
variables that affect machine-specific settings come in pairs: for every bare
environment variable `FOO`, there is a suffixed `FOO_FOR_BUILD`, where `FOO`
just affects the host machine configuration, while `FOO_FOR_BUILD` just affects
the build machine configuration. For example:
- `PKG_CONFIG_PATH_FOR_BUILD` controls the paths pkg-config will search for
just `native: true` dependencies (build machine).
- `PKG_CONFIG_PATH` controls the paths pkg-config will search for just
`native: false` dependencies (host machine).
This mirrors the `build.` prefix used for (built-in) meson options, which has
the same meaning.
This is useful for cross builds. In the native builds, build = host, and the
unsuffixed environment variables alone will suffice.
Prior to *0.54.0*, there was no `_FOR_BUILD`-suffixed variables, and most
environment variables only effected native machine configurations, though this
wasn't consistent (e.g. `PKG_CONFIG_PATH` still affected cross builds).

@ -14,7 +14,7 @@ If it set to 0 then the PTHREAD_POOL_SIZE option will not be passed.
## Introduce dataonly for the pkgconfig module
This allows users to disable writing out the inbuilt variables to
the pkg-config file as they might actualy not be required.
the pkg-config file as they might actually not be required.
One reason to have this is for architecture-independent pkg-config
files in projects which also have architecture-dependent outputs.
@ -359,3 +359,8 @@ target that has eight source files, Meson will generate two unity
files each of which includes four source files. The old behaviour can
be replicated by setting `unity_size` to a large value, such as 10000.
## Verbose mode for `meson compile`
The new option `--verbose` has been added to `meson compile` that will enable
more verbose compilation logs. Note that for VS backend it means that logs will
be less verbose by default (without `--verbose` option).

@ -0,0 +1,307 @@
---
title: Release 0.55.0
short-description: Release notes for 0.55.0
...
# New features
## rpath removal now more careful
On Linux-like systems, meson adds rpath entries to allow running apps
in the build tree, and then removes those build-time-only
rpath entries when installing. Rpath entries may also come
in via LDFLAGS and via .pc files. Meson used to remove those
latter rpath entries by accident, but is now more careful.
## Added ability to specify targets in `meson compile`
It's now possible to specify targets in `meson compile`, which will result in building only the requested targets.
Usage: `meson compile [TARGET [TARGET...]]`
`TARGET` has the following syntax: `[PATH/]NAME[:TYPE]`.
`NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`).
`PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`.
`TYPE`: type of the target (e.g. `shared_library`, `executable` and etc)
`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`.
For example targets from the following code:
```meson
shared_library('foo', ...)
static_library('foo', ...)
executable('bar', ...)
```
can be invoked with `meson compile foo:shared_library foo:static_library bar`.
## Test protocol for gtest
Due to the popularity of Gtest (google test) among C and C++ developers meson
now supports a special protocol for gtest. With this protocol meson injects
arguments to gtests to output JUnit, reads that JUnit, and adds the output to
the JUnit it generates.
## meson.add_*_script methods accept new types
All three (`add_install_script`, `add_dist_script`, and
`add_postconf_script`) now accept ExternalPrograms (as returned by
`find_program`), Files, and the output of `configure_file`. The dist and
postconf methods cannot accept other types because of when they are run.
While dist could, in theory, take other dependencies, it would require more
extensive changes, particularly to the backend.
```meson
meson.add_install_script(find_program('foo'), files('bar'))
meson.add_dist_script(find_program('foo'), files('bar'))
meson.add_postconf_script(find_program('foo'), files('bar'))
```
The install script variant is also able to accept custom_targets,
custom_target indexes, and build targets (executables, libraries), and can
use built executables a the script to run
```meson
installer = executable('installer', ...)
meson.add_install_script(installer, ...)
meson.add_install_script('foo.py', installer)
```
## Machine file constants
Native and cross files now support string and list concatenation using the `+`
operator, and joining paths using the `/` operator.
Entries defined in the `[constants]` section can be used in any other section.
An entry defined in any other section can be used only within that same section and only
after it has been defined.
```ini
[constants]
toolchain = '/toolchain'
common_flags = ['--sysroot=' + toolchain + '/sysroot']
[properties]
c_args = common_flags + ['-DSOMETHING']
cpp_args = c_args + ['-DSOMETHING_ELSE']
[binaries]
c = toolchain + '/gcc'
```
## Configure CMake subprojects with meson.subproject_options
Meson now supports passing configuration options to CMake and overriding
certain build details extracted from the CMake subproject.
The new CMake configuration options object is very similar to the
[configuration data object](Reference-manual.md#configuration-data-object) object
returned by [`configuration_data`](Reference-manual.md#configuration_data). It
is generated by the `subproject_options` function
All configuration options have to be set *before* the subproject is configured
and must be passed to the `subproject` method via the `options` key. Altering
the configuration object won't have any effect on previous `cmake.subproject`
calls.
**Note:** The `cmake_options` kwarg for the `subproject` function is now
deprecated since it is replaced by the new `options` system.
## find_program: Fixes when the program has been overridden by executable
When a program has been overridden by an executable, the returned object of
find_program() had some issues:
```meson
# In a subproject:
exe = executable('foo', ...)
meson.override_find_program('foo', exe)
# In main project:
# The version check was crashing meson.
prog = find_program('foo', version : '>=1.0')
# This was crashing meson.
message(prog.path())
# New method to be consistent with built objects.
message(prog.full_path())
```
## Response files enabled on Linux, reined in on Windows
Meson used to always use response files on Windows,
but never on Linux.
It now strikes a happier balance, using them on both platforms,
but only when needed to avoid command line length limits.
## `unstable-kconfig` module renamed to `unstable-keyval`
The `unstable-kconfig` module is now renamed to `unstable-keyval`.
We expect this module to become stable once it has some usage experience,
specifically in the next or the following release
## Fatal warnings in `gnome.generate_gir()`
`gnome.generate_gir()` now has `fatal_warnings` keyword argument to abort when
a warning is produced. This is useful for example in CI environment where it's
important to catch potential issues.
## b_ndebug support for D language compilers
D Language compilers will now set -release/--release/-frelease (depending on
the compiler) when the b_ndebug flag is set.
## Meson test now produces JUnit xml from results
Meson will now generate a JUnit compatible XML file from test results. it
will be in the meson-logs directory and is called testlog.junit.xml.
## Config tool based dependencies no longer search PATH for cross compiling
Before 0.55.0 config tool based dependencies (llvm-config, cups-config, etc),
would search system $PATH if they weren't defined in the cross file. This has
been a source of bugs and has been deprecated. It is now removed, config tool
binaries must be specified in the cross file now or the dependency will not
be found.
## Rename has_exe_wrapper -> can_run_host_binaries
The old name was confusing as it didn't really match the behavior of the
function. The old name remains as an alias (the behavior hasn't changed), but
is now deprecated.
## String concatenation in meson_options.txt
It is now possible to use string concatenation (with the `+` opperator) in the
meson_options.txt file. This allows splitting long option descriptions.
```meson
option(
'testoption',
type : 'string',
value : 'optval',
description : 'An option with a very long description' +
'that does something in a specific context'
)
```
## Wrap fallback URL
Wrap files can now define `source_fallback_url` and `patch_fallback_url` to be
used in case the main server is temporaly down.
## Clang coverage support
llvm-cov is now used to generate coverage information when clang is used as
the compiler.
## Local wrap source and patch files
It is now possible to use the `patch_filename` and `source_filename` value in a
`.wrap` file without `*_url` to specify a local source / patch file. All local
files must be located in the `subprojects/packagefiles` directory. The `*_hash`
entries are optional with this setup.
## Local wrap patch directory
Wrap files can now specify `patch_directory` instead of `patch_filename` in the
case overlay files are local. Every files in that directory, and subdirectories,
will be copied to the subproject directory. This can be used for example to add
`meson.build` files to a project not using Meson build system upstream.
The patch directory must be placed in `subprojects/packagefiles` directory.
## Patch on all wrap types
`patch_*` keys are not limited to `wrap-file` any more, they can be specified for
all wrap types.
## link_language argument added to all targets
Previously the `link_language` argument was only supposed to be allowed in
executables, because the linker used needs to be the linker for the language
that implements the main function. Unfortunately it didn't work in that case,
and, even worse, if it had been implemented properly it would have worked for
*all* targets. In 0.55.0 this restriction has been removed, and the bug fixed.
It now is valid for `executable` and all derivative of `library`.
## meson dist --no-tests
`meson dist` has a new option `--no-tests` to skip build and tests of generated
packages. It can be used to not waste time for example when done in CI that
already does its own testing.
## Force fallback for
A newly-added `--force-fallback-for` command line option can now be used to
force fallback for specific subprojects.
Example:
```
meson build --force-fallback-for=foo,bar
```
## Implicit dependency fallback
`dependency('foo')` now automatically fallback if the dependency is not found on
the system but a subproject wrap file or directory exists with the same name.
That means that simply adding `subprojects/foo.wrap` is enough to add fallback
to any `dependency('foo')` call. It is however requires that the subproject call
`meson.override_dependency('foo', foo_dep)` to specify which dependency object
should be used for `foo`.
## Wrap file `provide` section
Wrap files can define the dependencies it provides in the `[provide]` section.
When `foo.wrap` provides the dependency `foo-1.0` any call do `dependency('foo-1.0')`
will automatically fallback to that subproject even if no `fallback` keyword
argument is given. See [Wrap documentation](Wrap-dependency-system-manual.md#provide_section).
## `find_program()` fallback
When a program cannot be found on the system but a wrap file has its name in the
`[provide]` section, that subproject will be used as fallback.
## Test scripts are given the exe wrapper if needed
Meson will now set the `MESON_EXE_WRAPPER` as the properly wrapped and joined
representation. For Unix-like OSes this means python's shelx.join, on Windows
an implementation that attempts to properly quote windows argument is used.
This allow wrapper scripts to run test binaries, instead of just skipping.
for example, if the wrapper is `['emulator', '--script']`, it will be passed
as `MESON_EXE_WRAPPER="emulator --script"`.
## Added ability to specify backend arguments in `meson compile`
It's now possible to specify backend specific arguments in `meson compile`.
Usage: `meson compile [--vs-args=args] [--ninja-args=args]`
```
--ninja-args NINJA_ARGS Arguments to pass to `ninja` (applied only on `ninja` backend).
--vs-args VS_ARGS Arguments to pass to `msbuild` (applied only on `vs` backend).
```
These arguments use the following syntax:
If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command:
```
$ meson compile --ninja-args=-n,-d,explain
```
would add `-n`, `-d` and `explain` arguments to ninja invocation.
If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this:
```
$ meson compile "--ninja-args=['a,b', 'c d']"
```
## Introspection API changes
dumping the AST (--ast): **new in 0.55.0**
- prints the AST of a meson.build as JSON

@ -29,7 +29,7 @@ run_target('inspector',
Run targets are not run by default. To run it run the following command.
```console
$ ninja inspector
$ meson compile inspector
```
All additional entries in `run_target`'s `command` array are passed unchanged to the inspector script, so you can do things like this:

@ -9,13 +9,12 @@ from the source tree with the command `/path/to/source/meson.py`. Meson may
also be installed in which case the command is simply `meson`. In this manual
we only use the latter format for simplicity.
Additionally, the invocation can pass options to meson. The list of options is
documented [here](Builtin-options.md).
At the time of writing only a command line version of Meson is available. This
means that Meson must be invoked using the terminal. If you wish to use the
MSVC compiler, you need to run Meson under "Visual Studio command prompt".
All available meson commands are listed on the [commands reference page](Commands.md).
## Configuring the build directory
Let us assume that we have a source tree that has a Meson build system. This
@ -41,6 +40,9 @@ build backend in the build directory. By default Meson generates a *debug
build*, which turns on basic warnings and debug information and disables
compiler optimizations.
Additionally, the invocation can pass options to meson. The list of options is
documented [here](Builtin-options.md).
You can specify a different type of build with the `--buildtype` command line
argument. It can have one of the following values.
@ -83,7 +85,7 @@ during configuration time. As an example, here is how you would use Meson to
generate a Visual studio solution.
```sh
meson setup <build dir> --backend=vs2010
meson setup <build dir> --backend=vs
```
You can then open the generated solution with Visual Studio and compile it in
@ -105,9 +107,18 @@ linker arguments needed.
## Building from the source
If you are not using an IDE, Meson uses the [Ninja build
system](https://ninja-build.org/) to actually build the code. To start the
build, simply type the following command.
To start the build, simply type the following command.
```sh
meson compile -C builddir
```
See [`meson compile` description](Commands.md#compile) for more info.
### Building directly with ninja
By default Meson uses the [Ninja build system](https://ninja-build.org/) to
actually build the code. To start the build, simply type the following command.
```sh
ninja -C builddir
@ -133,20 +144,29 @@ Meson provides native support for running tests. The command to do that is
simple.
```sh
ninja -C builddir test
meson test -C builddir
```
See [`meson test` description](Commands.md#test) for more info.
Meson does not force the use of any particular testing framework. You are free
to use GTest, Boost Test, Check or even custom executables.
Note: it can be also invoked directly with ninja with the following command:
```sh
ninja -C builddir test
```
## Installing
Installing the built software is just as simple.
```sh
ninja -C builddir install
meson install -C builddir
```
See [`meson install` description](Commands.md#install) for more info.
Note that Meson will only install build targets explicitly tagged as
installable, as detailed in the [installing targets
documentation](Installing.md).
@ -157,7 +177,12 @@ Meson also supports the `DESTDIR` variable used in e.g. building packages. It
is used like this:
```sh
DESTDIR=/path/to/staging ninja -C builddir install
DESTDIR=/path/to/staging meson install -C builddir
```
Note: it can be also invoked directly with ninja with the following command:
```sh
ninja -C builddir install
```
## Command line help

@ -11,6 +11,12 @@ Meson build files.
Always spaces.
## Naming Variable
The most consistent naming convention is the snake case. Let say you would
like to refer to your executable so something like `my_exe` would work or
just `exe`.
## Naming options
There are two ways of naming project options. As an example for

@ -212,6 +212,9 @@ the following command-line options:
calls, and those are meant to be used for sources that cannot be
provided by the system, such as copylibs.
This option may be overriden by `--force-fallback-for` for specific
dependencies.
* **--wrap-mode=forcefallback**
Meson will not look at the system for any dependencies which have
@ -220,6 +223,41 @@ the following command-line options:
want to specifically build against the library sources provided by
your subprojects.
* **--force-fallback-for=list,of,dependencies**
Meson will not look at the system for any dependencies listed there,
provided a fallback was supplied when the dependency was declared.
This option takes precedence over `--wrap-mode=nofallback`, and when
used in combination with `--wrap-mode=nodownload` will only work
if the dependency has already been downloaded.
This is useful when your project has many fallback dependencies,
but you only want to build against the library sources for a few
of them.
**Warning**: This could lead to mixing system and subproject version of the
same library in the same process. Take this case as example:
- Libraries `glib-2.0` and `gstreamer-1.0` are installed on your system.
- `gstreamer-1.0` depends on `glib-2.0`, pkg-config file `gstreamer-1.0.pc`
has `Requires: glib-2.0`.
- In your application build definition you do:
```meson
executable('app', ...,
dependencies: [
dependency('glib-2.0', fallback: 'glib'),
dependency('gstreamer-1.0', fallback: 'gstreamer')],
)
```
- You configure with `--force-fallback-for=glib`.
This result in linking to two different versions of library `glib-2.0`
because `dependency('glib-2.0', fallback: 'glib')` will return the
subproject dependency, but `dependency('gstreamer-1.0', fallback: 'gstreamer')`
will not fallback and return the system dependency, including `glib-2.0`
library. To avoid that situation, every dependency that itself depend on
`glib-2.0` must also be forced to fallback, in this case with
`--force-fallback-for=glib,gsteamer`.
## Download subprojects
*Since 0.49.0*

@ -16,12 +16,12 @@ statements* and *includes*.
Usually one Meson statement takes just one line. There is no way to
have multiple statements on one line as in e.g. *C*. Function and
method calls' argument lists can be split over multiple lines. Meson
will autodetect this case and do the right thing. In other cases you
can get multi-line statements by ending the line with a `\`. Apart
from line ending whitespace has no syntactic meaning.
will autodetect this case and do the right thing.
Variables
--
In other cases, *(added 0.50)* you can get multi-line statements by ending the
line with a `\`. Apart from line ending whitespace has no syntactic meaning.
## Variables
Variables in Meson work just like in other high level programming
languages. A variable can contain a value of any type, such as an
@ -46,8 +46,7 @@ var2 += [4]
# var1 is still [1, 2, 3]
```
Numbers
--
## Numbers
Meson supports only integer numbers. They are declared simply by
writing them out. Basic arithmetic operations are supported.
@ -85,8 +84,7 @@ int_var = 42
string_var = int_var.to_string()
```
Booleans
--
## Booleans
A boolean is either `true` or `false`.
@ -94,8 +92,7 @@ A boolean is either `true` or `false`.
truth = true
```
Strings
--
## Strings
Strings in Meson are declared with single quotes. To enter a literal
single quote do it like this:
@ -126,7 +123,7 @@ As in python and C, up to three octal digits are accepted in `\ooo`.
Unrecognized escape sequences are left in the string unchanged, i.e., the
backslash is left in the string.
#### String concatenation
### String concatenation
Strings can be concatenated to form a new string using the `+` symbol.
@ -136,7 +133,25 @@ str2 = 'xyz'
combined = str1 + '_' + str2 # combined is now abc_xyz
```
#### Strings running over multiple lines
### String path building
*(Added 0.49)*
You can concatenate any two strings using `/` as an operator to build paths.
This will always use `/` as the path separator on all platforms.
```meson
joined = '/usr/share' / 'projectname' # => /usr/share/projectname
joined = '/usr/local' / '/etc/name' # => /etc/name
joined = 'C:\\foo\\bar' / 'builddir' # => C:/foo/bar/builddir
joined = 'C:\\foo\\bar' / 'D:\\builddir' # => D:/builddir
```
Note that this is equivalent to using [`join_paths()`](Reference-manual.md#join_paths),
which was obsoleted by this operator.
### Strings running over multiple lines
Strings running over multiple lines can be declared with three single
quotes, like this:
@ -152,7 +167,7 @@ These are raw strings that do not support the escape sequences listed
above. These strings can also be combined with the string formatting
functionality described below.
#### String formatting
### String formatting
Strings can be built using the string formatting functionality.
@ -165,12 +180,12 @@ res = template.format('text', 1, true)
As can be seen, the formatting works by replacing placeholders of type
`@number@` with the corresponding argument.
#### String methods
### String methods
Strings also support a number of other methods that return transformed
copies.
**.strip()**
#### .strip()
```meson
# Similar to the Python str.strip(). Removes leading/ending spaces and newlines
@ -179,7 +194,7 @@ stripped_define = define.strip()
# 'stripped_define' now has the value '-Dsomedefine'
```
**.to_upper()**, **.to_lower()**
#### .to_upper(), .to_lower()
```meson
target = 'x86_FreeBSD'
@ -187,7 +202,7 @@ upper = target.to_upper() # t now has the value 'X86_FREEBSD'
lower = target.to_lower() # t now has the value 'x86_freebsd'
```
**.to_int()**
#### .to_int()
```meson
version = '1'
@ -195,7 +210,7 @@ version = '1'
ver_int = version.to_int()
```
**.contains()**, **.startswith()**, **.endswith()**
#### .contains(), .startswith(), .endswith()
```meson
target = 'x86_FreeBSD'
@ -205,7 +220,27 @@ is_x86 = target.startswith('x86') # boolean value 'true'
is_bsd = target.to_lower().endswith('bsd') # boolean value 'true'
```
**.split()**, **.join()**
#### .substring()
Since 0.56.0, you can extract a substring from a string.
```meson
# Similar to the Python str[start:end] syntax
target = 'x86_FreeBSD'
platform = target.substring(0, 3) # prefix string value 'x86'
system = target.substring(4) # suffix string value 'FreeBSD'
```
The method accepts negative values where negative `start` is relative to the end of
string `len(string) - start` as well as negative `end`.
```meson
string = 'foobar'
target.substring(-5, -3) # => 'oo'
target.substring(1, -1) # => 'ooba'
```
#### .split(), .join()
```meson
# Similar to the Python str.split()
@ -246,7 +281,7 @@ api_version = '@0@.@1@'.format(version_array[0], version_array[1])
# api_version now (again) has the value '0.2'
```
**.underscorify()**
#### .underscorify()
```meson
name = 'Meson Docs.txt#Reference-manual'
@ -256,7 +291,7 @@ underscored = name.underscorify()
# underscored now has the value 'Meson_Docs_txt_Reference_manual'
```
**.version_compare()**
#### .version_compare()
```meson
version = '1.2.3'
@ -266,8 +301,15 @@ is_new = version.version_compare('>=2.0')
# Supports the following operators: '>', '<', '>=', '<=', '!=', '==', '='
```
Arrays
--
Meson version comparison conventions include:
```meson
'3.6'.version_compare('>=3.6.0') == false
```
It is best to be unambiguous and specify the full revision level to compare.
## Arrays
Arrays are delimited by brackets. An array can contain an arbitrary number of objects of any type.
@ -302,6 +344,7 @@ assign it to `my_array` instead of modifying the original since all
objects in Meson are immutable.
Since 0.49.0, you can check if an array contains an element like this:
```meson
my_array = [1, 2]
if 1 in my_array
@ -312,7 +355,7 @@ if 1 not in my_array
endif
```
#### Array methods
### Array methods
The following methods are defined for all arrays:
@ -320,8 +363,7 @@ The following methods are defined for all arrays:
- `contains`, returns `true` if the array contains the object given as argument, `false` otherwise
- `get`, returns the object at the given index, negative indices count from the back of the array, indexing out of bounds is a fatal error. Provided for backwards-compatibility, it is identical to array indexing.
Dictionaries
--
## Dictionaries
Dictionaries are delimited by curly braces. A dictionary can contain an
arbitrary number of key value pairs. Keys are required to be strings, values can
@ -346,6 +388,7 @@ Visit the [Reference Manual](Reference-manual.md#dictionary-object) to read
about the methods exposed by dictionaries.
Since 0.49.0, you can check if a dictionary contains a key like this:
```meson
my_dict = {'foo': 42, 'bar': 43}
if 'foo' in my_dict
@ -361,14 +404,14 @@ endif
*Since 0.53.0* Keys can be any expression evaluating to a string value, not limited
to string literals any more.
```meson
d = {'a' + 'b' : 42}
k = 'cd'
d += {k : 43}
```
Function calls
--
## Function calls
Meson provides a set of usable functions. The most common use case is
creating build objects.
@ -413,8 +456,7 @@ executable('progname', 'prog.c',
Attempting to do this causes Meson to immediately exit with an error.
Method calls
--
## Method calls
Objects can have methods, which are called with the dot operator. The
exact methods it provides depends on the object.
@ -424,8 +466,7 @@ myobj = some_function()
myobj.do_something('now')
```
If statements
--
## If statements
If statements work just like in other languages.
@ -446,8 +487,7 @@ if opt != 'foo'
endif
```
Logical operations
--
## Logical operations
Meson has the standard range of logical operations which can be used in
`if` statements.
@ -537,8 +577,7 @@ endforeach
# result is ['a', 'b']
```
Comments
--
## Comments
A comment starts with the `#` character and extends until the end of the line.
@ -547,8 +586,7 @@ some_function() # This is a comment
some_other_function()
```
Ternary operator
--
## Ternary operator
The ternary operator works just like in other languages.
@ -560,8 +598,7 @@ The only exception is that nested ternary operators are forbidden to
improve legibility. If your branching needs are more complex than this
you need to write an `if/else` construct.
Includes
--
## Includes
Most source trees have multiple subdirectories to process. These can
be handled by Meson's `subdir` command. It changes to the given
@ -576,8 +613,7 @@ test_data_dir = 'data'
subdir('tests')
```
User-defined functions and methods
--
## User-defined functions and methods
Meson does not currently support user-defined functions or
methods. The addition of user-defined functions would make Meson
@ -588,3 +624,71 @@ FAQ](FAQ.md#why-is-meson-not-just-a-python-module-so-i-could-code-my-build-setup
because of this limitation you find yourself copying and pasting code
a lot you may be able to use a [`foreach` loop
instead](#foreach-statements).
## Stability Promises
Meson is very actively developed and continuously improved. There is a
possibility that future enhancements to the Meson build system will require
changes to the syntax. Such changes might be the addition of new reserved
keywords, changing the meaning of existing keywords or additions around the
basic building blocks like statements and fundamental types. It is planned
to stabilize the syntax with the 1.0 release.
## Grammar
This is the full Meson grammar, as it is used to parse Meson build definition files:
```
additive_expression: multiplicative_expression | (additive_expression additive_operator multiplicative_expression)
additive_operator: "+" | "-"
argument_list: positional_arguments ["," keyword_arguments] | keyword_arguments
array_literal: "[" [expression_list] "]"
assignment_expression: conditional_expression | (logical_or_expression assignment_operator assignment_expression)
assignment_operator: "=" | "*=" | "/=" | "%=" | "+=" | "-="
boolean_literal: "true" | "false"
build_definition: (NEWLINE | statement)*
condition: expression
conditional_expression: logical_or_expression | (logical_or_expression "?" expression ":" assignment_expression
decimal_literal: DECIMAL_NUMBER
DECIMAL_NUMBER: /[1-9][0-9]*/
dictionary_literal: "{" [key_value_list] "}"
equality_expression: relational_expression | (equality_expression equality_operator relational_expression)
equality_operator: "==" | "!="
expression: assignment_expression
expression_list: expression ("," expression)*
expression_statememt: expression
function_expression: id_expression "(" [argument_list] ")"
hex_literal: "0x" HEX_NUMBER
HEX_NUMBER: /[a-fA-F0-9]+/
id_expression: IDENTIFIER
IDENTIFIER: /[a-zA-Z_][a-zA-Z_0-9]*/
identifier_list: id_expression ("," id_expression)*
integer_literal: decimal_literal | octal_literal | hex_literal
iteration_statement: "foreach" identifier_list ":" id_expression NEWLINE (statement | jump_statement)* "endforeach"
jump_statement: ("break" | "continue") NEWLINE
key_value_item: expression ":" expression
key_value_list: key_value_item ("," key_value_item)*
keyword_item: id_expression ":" expression
keyword_arguments: keyword_item ("," keyword_item)*
literal: integer_literal | string_literal | boolean_literal | array_literal | dictionary_literal
logical_and_expression: equality_expression | (logical_and_expression "and" equality_expression)
logical_or_expression: logical_and_expression | (logical_or_expression "or" logical_and_expression)
method_expression: postfix_expression "." function_expression
multiplicative_expression: unary_expression | (multiplicative_expression multiplicative_operator unary_expression)
multiplicative_operator: "*" | "/" | "%"
octal_literal: "0o" OCTAL_NUMBER
OCTAL_NUMBER: /[0-7]+/
positional_arguments: expression ("," expression)*
postfix_expression: primary_expression | subscript_expression | function_expression | method_expression
primary_expression: literal | ("(" expression ")") | id_expression
relational_expression: additive_expression | (relational_expression relational_operator additive_expression)
relational_operator: ">" | "<" | ">=" | "<=" | "in" | ("not" "in")
selection_statement: "if" condition NEWLINE (statement)* ("elif" condition NEWLINE (statement)*)* ["else" (statement)*] "endif"
statement: (expression_statement | selection_statement | iteration_statement) NEWLINE
string_literal: ("'" STRING_SIMPLE_VALUE "'") | ("'''" STRING_MULTILINE_VALUE "'''")
STRING_MULTILINE_VALUE: \.*?(''')\
STRING_SIMPLE_VALUE: \.*?(?<!\\)(\\\\)*?'\
subscript_expression: postfix_expression "[" expression "]"
unary_expression: postfix_expression | (unary_operator unary_expression)
unary_operator: "not" | "+" | "-"
```

@ -74,7 +74,7 @@ Now we are ready to build our code.
```
$ cd builddir
$ ninja
$ meson compile
```
Once that is done we can run the resulting binary.
@ -124,12 +124,12 @@ or the like. Instead we just type the exact same command as if we were
rebuilding our code without any build system changes.
```
$ ninja
$ meson compile
```
Once you have set up your build directory the first time, you don't
ever need to run the `meson` command again. You always just run
`ninja`. Meson will automatically detect when you have done changes to
`meson compile`. Meson will automatically detect when you have done changes to
build definitions and will take care of everything so users don't have
to care. In this case the following output is produced.

@ -4,20 +4,24 @@ short-description: Meson's own unit-test system
# Unit tests
Meson comes with a fully functional unit test system. To use it simply build an executable and then use it in a test.
Meson comes with a fully functional unit test system. To use it simply build
an executable and then use it in a test.
```meson
e = executable('prog', 'testprog.c')
test('name of test', e)
```
You can add as many tests as you want. They are run with the command `ninja test`.
You can add as many tests as you want. They are run with the command `meson
test`.
Meson captures the output of all tests and writes it in the log file `meson-logs/testlog.txt`.
Meson captures the output of all tests and writes it in the log file
`meson-logs/testlog.txt`.
## Test parameters
Some tests require the use of command line arguments or environment variables. These are simple to define.
Some tests require the use of command line arguments or environment
variables. These are simple to define.
```meson
test('command line test', exe, args : ['first', 'second'])
@ -29,48 +33,59 @@ Note how you need to specify multiple values as an array.
### MALLOC_PERTURB_
By default, environment variable
[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html)
is set to a random value between 1..255. This can help find memory
leaks on configurations using glibc, including with non-GCC compilers.
This feature can be disabled as discussed in [test()](Reference-manual.md#test).
[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html) is
set to a random value between 1..255. This can help find memory leaks on
configurations using glibc, including with non-GCC compilers. This feature
can be disabled as discussed in [test()](Reference-manual.md#test).
## Coverage
If you enable coverage measurements by giving Meson the command line flag
`-Db_coverage=true`, you can generate coverage reports after running the tests
(running the tests is required to gather the list of functions that get
called). Meson will autodetect what coverage generator tools you have installed
and will generate the corresponding targets. These targets are `coverage-xml`
and `coverage-text` which are both provided by [Gcovr](http://gcovr.com)
(version 3.3 or higher) and `coverage-html`, which requires
[Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and
[GenHTML](https://linux.die.net/man/1/genhtml) or
[Gcovr](http://gcovr.com). As a convenience, a high-level `coverage` target is
also generated which will produce all 3 coverage report types, if possible.
The output of these commands is written to the log directory `meson-logs` in your build directory.
`-Db_coverage=true`, you can generate coverage reports after running the
tests (running the tests is required to gather the list of functions that get
called). Meson will autodetect what coverage generator tools you have
installed and will generate the corresponding targets. These targets are
`coverage-xml` and `coverage-text` which are both provided by
[Gcovr](http://gcovr.com) (version 3.3 or higher) and `coverage-html`, which
requires [Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and
[GenHTML](https://linux.die.net/man/1/genhtml) or [Gcovr](http://gcovr.com).
As a convenience, a high-level `coverage` target is also generated which will
produce all 3 coverage report types, if possible.
The output of these commands is written to the log directory `meson-logs` in
your build directory.
## Parallelism
To reduce test times, Meson will by default run multiple unit tests in parallel. It is common to have some tests which can not be run in parallel because they require unique hold on some resource such as a file or a D-Bus name. You have to specify these tests with a keyword argument.
To reduce test times, Meson will by default run multiple unit tests in
parallel. It is common to have some tests which can not be run in parallel
because they require unique hold on some resource such as a file or a D-Bus
name. You have to specify these tests with a keyword argument.
```meson
test('unique test', t, is_parallel : false)
```
Meson will then make sure that no other unit test is running at the same time. Non-parallel tests take longer to run so it is recommended that you write your unit tests to be parallel executable whenever possible.
Meson will then make sure that no other unit test is running at the same
time. Non-parallel tests take longer to run so it is recommended that you
write your unit tests to be parallel executable whenever possible.
By default Meson uses as many concurrent processes as there are cores on the test machine. You can override this with the environment variable `MESON_TESTTHREADS` like this.
By default Meson uses as many concurrent processes as there are cores on the
test machine. You can override this with the environment variable
`MESON_TESTTHREADS` like this.
```console
$ MESON_TESTTHREADS=5 ninja test
$ MESON_TESTTHREADS=5 meson test
```
## Priorities
*(added in version 0.52.0)*
Tests can be assigned a priority that determines when a test is *started*. Tests with higher priority are started first, tests with lower priority started later. The default priority is 0, meson makes no guarantee on the ordering of tests with identical priority.
Tests can be assigned a priority that determines when a test is *started*.
Tests with higher priority are started first, tests with lower priority
started later. The default priority is 0, meson makes no guarantee on the
ordering of tests with identical priority.
```meson
test('started second', t, priority : 0)
@ -78,23 +93,36 @@ test('started third', t, priority : -50)
test('started first', t, priority : 1000)
```
Note that the test priority only affects the starting order of tests and subsequent tests are affected by how long it takes previous tests to complete. It is thus possible that a higher-priority test is still running when lower-priority tests with a shorter runtime have completed.
Note that the test priority only affects the starting order of tests and
subsequent tests are affected by how long it takes previous tests to
complete. It is thus possible that a higher-priority test is still running
when lower-priority tests with a shorter runtime have completed.
## Skipped tests and hard errors
Sometimes a test can only determine at runtime that it can not be run.
For the default `exitcode` testing protocol, the GNU standard approach in this case is to exit the program with error code 77. Meson will detect this and report these tests as skipped rather than failed. This behavior was added in version 0.37.0.
For the default `exitcode` testing protocol, the GNU standard approach in
this case is to exit the program with error code 77. Meson will detect this
and report these tests as skipped rather than failed. This behavior was added
in version 0.37.0.
For TAP-based tests, skipped tests should print a single line starting with `1..0 # SKIP`.
For TAP-based tests, skipped tests should print a single line starting with
`1..0 # SKIP`.
In addition, sometimes a test fails set up so that it should fail even if it is marked as an expected failure. The GNU standard approach in this case is to exit the program with error code 99. Again, Meson will detect this and report these tests as `ERROR`, ignoring the setting of `should_fail`. This behavior was added in version 0.50.0.
In addition, sometimes a test fails set up so that it should fail even if it
is marked as an expected failure. The GNU standard approach in this case is
to exit the program with error code 99. Again, Meson will detect this and
report these tests as `ERROR`, ignoring the setting of `should_fail`. This
behavior was added in version 0.50.0.
## Testing tool
The goal of the meson test tool is to provide a simple way to run tests in a variety of different ways. The tool is designed to be run in the build directory.
The goal of the meson test tool is to provide a simple way to run tests in a
variety of different ways. The tool is designed to be run in the build
directory.
The simplest thing to do is just to run all tests, which is equivalent to running `ninja test`.
The simplest thing to do is just to run all tests.
```console
$ meson test
@ -107,7 +135,7 @@ For clarity, consider the meson.build containing:
```meson
test('A', ..., suite: 'foo')
test('B', ..., suite: 'foo')
test('B', ..., suite: ['foo', 'bar'])
test('C', ..., suite: 'bar')
test('D', ..., suite: 'baz')
@ -125,7 +153,8 @@ Tests belonging to a suite `suite` can be run as follows
$ meson test --suite (sub)project_name:suite
```
Since version *0.46*, `(sub)project_name` can be omitted if it is the top-level project.
Since version *0.46*, `(sub)project_name` can be omitted if it is the
top-level project.
Multiple suites are specified like:
@ -145,7 +174,8 @@ Sometimes you need to run the tests multiple times, which is done like this:
$ meson test --repeat=10
```
Invoking tests via a helper executable such as Valgrind can be done with the `--wrap` argument
Invoking tests via a helper executable such as Valgrind can be done with the
`--wrap` argument
```console
$ meson test --wrap=valgrind testname
@ -163,17 +193,25 @@ Meson also supports running the tests under GDB. Just doing this:
$ meson test --gdb testname
```
Meson will launch `gdb` all set up to run the test. Just type `run` in the GDB command prompt to start the program.
Meson will launch `gdb` all set up to run the test. Just type `run` in the
GDB command prompt to start the program.
The second use case is a test that segfaults only rarely. In this case you can invoke the following command:
The second use case is a test that segfaults only rarely. In this case you
can invoke the following command:
```console
$ meson test --gdb --repeat=10000 testname
```
This runs the test up to 10 000 times under GDB automatically. If the program crashes, GDB will halt and the user can debug the application. Note that testing timeouts are disabled in this case so `meson test` will not kill `gdb` while the developer is still debugging it. The downside is that if the test binary freezes, the test runner will wait forever.
This runs the test up to 10 000 times under GDB automatically. If the program
crashes, GDB will halt and the user can debug the application. Note that
testing timeouts are disabled in this case so `meson test` will not kill
`gdb` while the developer is still debugging it. The downside is that if the
test binary freezes, the test runner will wait forever.
Sometimes, the GDB binary is not in the PATH variable or the user wants to use a GDB replacement. Therefore, the invoked GDB program can be specified *(added 0.52.0)*:
Sometimes, the GDB binary is not in the PATH variable or the user wants to
use a GDB replacement. Therefore, the invoked GDB program can be specified
*(added 0.52.0)*:
```console
$ meson test --gdb --gdb-path /path/to/gdb testname
@ -183,12 +221,41 @@ $ meson test --gdb --gdb-path /path/to/gdb testname
$ meson test --print-errorlogs
```
Meson will report the output produced by the failing tests along with other useful information as the environmental variables. This is useful, for example, when you run the tests on Travis-CI, Jenkins and the like.
Meson will report the output produced by the failing tests along with other
useful information as the environmental variables. This is useful, for
example, when you run the tests on Travis-CI, Jenkins and the like.
For further information see the command line help of Meson by running `meson test -h`.
For further information see the command line help of Meson by running `meson
test -h`.
## Legacy notes
If `meson test` does not work for you, you likely have a old version of Meson.
In that case you should call `mesontest` instead. If `mesontest` doesn't work
either you have a very old version prior to 0.37.0 and should upgrade.
If `meson test` does not work for you, you likely have a old version of
Meson. In that case you should call `mesontest` instead. If `mesontest`
doesn't work either you have a very old version prior to 0.37.0 and should
upgrade.
## Test outputs
Meson will write several different files with detailed results of running
tests. These will be written into $builddir/meson-logs/
### testlog.json
This is not a proper json file, but a file containing one valid json object
per line. This is file is designed so each line is streamed out as each test
is run, so it can be read as a stream while the test harness is running
### testlog.junit.xml
This is a valid JUnit XML description of all tests run. It is not streamed
out, and is written only once all tests complete running.
When tests use the `tap` protocol each test will be recorded as a testsuite
container, with each case named by the number of the result.
When tests use the `gtest` protocol meson will inject arguments to the test
to generate it's own JUnit XML, which meson will include as part of this XML
file.
*New in 0.55.0*

@ -56,6 +56,7 @@ topic](https://github.com/topics/meson).
- [GtkDApp](https://gitlab.com/csoriano/GtkDApp), an application template for developing Flatpak apps with Gtk+ and D
- [GVfs](https://git.gnome.org/browse/gvfs/), a userspace virtual filesystem designed to work with the I/O abstraction of GIO
- [Hardcode-Tray](https://github.com/bil-elmoussaoui/Hardcode-Tray), fixes hardcoded tray icons in Linux
- [HarfBuzz](https://github.com/harfbuzz/harfbuzz), a text shaping engine
- [HelenOS](http://helenos.org), a portable microkernel-based multiserver operating system
- [HexChat](https://github.com/hexchat/hexchat), a cross-platform IRC client in C
- [IGT](https://gitlab.freedesktop.org/drm/igt-gpu-tools), Linux kernel graphics driver test suite
@ -70,8 +71,10 @@ topic](https://github.com/topics/meson).
- [Knot Resolver](https://gitlab.labs.nic.cz/knot/knot-resolver), Full caching DNS resolver implementation
- [Ksh](https://github.com/att/ast), a Korn Shell
- [Lc0](https://github.com/LeelaChessZero/lc0), LeelaChessZero is a UCI-compliant chess engine designed to play chess via neural network
- [Le](https://github.com/kirushyk/le), machine learning framework
- [libcamera](https://git.linuxtv.org/libcamera.git/), a library to handle complex cameras on Linux, ChromeOS and Android
- [Libdrm](https://gitlab.freedesktop.org/mesa/drm), a library for abstracting DRM kernel interfaces
- [libeconf](https://github.com/openSUSE/libeconf), Enhanced config file parsing library, which merges config files placed in several locations into one
- [Libepoxy](https://github.com/anholt/libepoxy/), a library for handling OpenGL function pointer management
- [libfuse](https://github.com/libfuse/libfuse), the reference implementation of the Linux FUSE (Filesystem in Userspace) interface
- [Libgit2-glib](https://git.gnome.org/browse/libgit2-glib), a GLib wrapper for libgit2
@ -115,6 +118,7 @@ format files
- [RxDock](https://gitlab.com/rxdock/rxdock), a protein-ligand docking software designed for high throughput virtual screening (fork of rDock)
- [scrcpy](https://github.com/Genymobile/scrcpy), a cross platform application that provides display and control of Android devices connected on USB or over TCP/IP
- [Sequeler](https://github.com/Alecaddd/sequeler), a friendly SQL client for Linux, built with Vala and Gtk
- [Siril](https://gitlab.com/free-astro/siril), an image processing software for amateur astronomy
- [SSHFS](https://github.com/libfuse/sshfs), allows you to mount a remote filesystem using SFTP
- [sway](https://github.com/swaywm/sway), i3-compatible Wayland compositor
- [Sysprof](https://git.gnome.org/browse/sysprof), a profiling tool
@ -124,6 +128,7 @@ format files
- [Terminology](https://github.com/billiob/terminology), a terminal emulator based on the Enlightenment Foundation Libraries
- [Tilix](https://github.com/gnunn1/tilix), a tiling terminal emulator for Linux using GTK+ 3
- [Tizonia](https://github.com/tizonia/tizonia-openmax-il), a command-line cloud music player for Linux with support for Spotify, Google Play Music, YouTube, SoundCloud, TuneIn, Plex servers and Chromecast devices
- [Vala Language Server](https://github.com/benwaffle/vala-language-server), code intelligence engine for the Vala and Genie programming languages
- [Valum](https://github.com/valum-framework/valum), a micro web framework written in Vala
- [Venom](https://github.com/naxuroqa/Venom), a modern Tox client for the GNU/Linux desktop
- [VMAF](https://github.com/Netflix/vmaf) (by Netflix), a perceptual video quality assessment based on multi-method fusion

@ -32,9 +32,9 @@ You can add cross builds, too. As an example, let's set up a Linux -> Windows cr
mkdir buildwine
meson --cross-file=mingw-cross.txt buildwine
The cross compilation file sets up Wine so that not only can you compile your application, you can also run the unit test suite just by issuing the command `ninja test`.
The cross compilation file sets up Wine so that not only can you compile your application, you can also run the unit test suite just by issuing the command `meson test`.
To compile any of these build types, just cd into the corresponding build directory and run `ninja` or instruct your IDE to do the same. Note that once you have set up your build directory once, you can just run Ninja and Meson will ensure that the resulting build is fully up to date according to the source. Even if you have not touched one of the directories in weeks and have done major changes to your build configuration, Meson will detect this and bring the build directory up to date (or print an error if it can't do that). This allows you to do most of your work in the default directory and use the others every now and then without having to babysit your build directories.
To compile any of these build types, just cd into the corresponding build directory and run `meson compile` or instruct your IDE to do the same. Note that once you have set up your build directory once, you can just run Ninja and Meson will ensure that the resulting build is fully up to date according to the source. Even if you have not touched one of the directories in weeks and have done major changes to your build configuration, Meson will detect this and bring the build directory up to date (or print an error if it can't do that). This allows you to do most of your work in the default directory and use the others every now and then without having to babysit your build directories.
## Specialized uses

@ -237,7 +237,7 @@ dependencies = [
dependency('glib-2.0'),
dependency('gobject-2.0'),
meson.get_compiler('c').find_library('foo'),
meson.get_compiler('vala').find_library('foo', dir: vapi_dir),
meson.get_compiler('vala').find_library('foo', dirs: vapi_dir),
]
sources = files('app.vala')

@ -23,9 +23,9 @@ as follows:
| entry | value |
| ----- | ----- |
|build | `ninja -C $(Configuration)` |
|clean | `ninja -C $(Configuration) clean` |
|rebuild| `ninja -C $(Configuration) clean all|
|build | `meson compile -C $(Configuration)` |
|clean | `meson compile -C $(Configuration) --clean` |
|rebuild| `meson compile -C $(Configuration) --clean && meson compile -C $(Configuration)` |
|Output | `$(Configuration)\name_of_your_executable.exe|

@ -28,16 +28,16 @@ itself in a way that makes it easy to use (usually this means as a
static library).
To use this kind of a project as a dependency you could just copy and
extract it inside your project's `subprojects` directory.
extract it inside your project's `subprojects` directory.
However there is a simpler way. You can specify a Wrap file that tells Meson
how to download it for you. If you then use this subproject in your build,
how to download it for you. If you then use this subproject in your build,
Meson will automatically download and extract it during build. This makes
subproject embedding extremely easy.
All wrap files must have a name of `<project_name>.wrap` form and be in `subprojects` dir.
Currently Meson has four kinds of wraps:
Currently Meson has four kinds of wraps:
- wrap-file
- wrap-git
- wrap-hg
@ -70,19 +70,31 @@ revision = head
## Accepted configuration properties for wraps
- `directory` - name of the subproject root directory, defaults to the name of the wrap.
Since *0.55.0* those can be used in all wrap types, they were previously reserved to `wrap-file`:
- `patch_url` - download url to retrieve an optional overlay archive
- `patch_fallback_url` - fallback URL to be used when download from `patch_url` fails *Since: 0.55.0*
- `patch_filename` - filename of the downloaded overlay archive
- `patch_hash` - sha256 checksum of the downloaded overlay archive
- `patch_directory` - *Since 0.55.0* Overlay directory, alternative to `patch_filename` in the case
files are local instead of a downloaded archive. The directory must be placed in
`subprojects/packagefiles`.
### Specific to wrap-file
- `source_url` - download url to retrieve the wrap-file source archive
- `source_fallback_url` - fallback URL to be used when download from `source_url` fails *Since: 0.55.0*
- `source_filename` - filename of the downloaded source archive
- `source_hash` - sha256 checksum of the downloaded source archive
- `patch_url` - download url to retrieve an optional overlay archive
- `patch_fallback_url` - fallback URL to be used when download from `patch_url` fails *Since: 0.55.0*
- `patch_filename` - filename of the downloaded overlay archive
- `patch_hash` - sha256 checksum of the downloaded overlay archive
- `lead_directory_missing` - for `wrap-file` create the leading
directory name. Needed when the source file does not have a leading
directory.
Since *0.55.0* it is possible to use only the `source_filename` and
`patch_filename` value in a .wrap file (without `source_url` and `patch_url`) to
specify a local archive in the `subprojects/packagefiles` directory. The `*_hash`
entries are optional when using this method. This method should be prefered over
the old `packagecache` approach described below.
Since *0.49.0* if `source_filename` or `patch_filename` is found in the
project's `subprojects/packagecache` directory, it will be used instead
of downloading the file, even if `--wrap-mode` option is set to
@ -94,7 +106,7 @@ of downloading the file, even if `--wrap-mode` option is set to
valid value (such as a git tag) for the VCS's `checkout` command, or
(for git) `head` to track upstream's default branch. Required.
## Specific to wrap-git
### Specific to wrap-git
- `depth` - shallowly clone the repository to X number of commits. Note
that git always allow shallowly cloning branches, but in order to
clone commit ids shallowly, the server must support
@ -124,19 +136,106 @@ thousands of lines of code. Once you have a working build definition,
just zip up the Meson build files (and others you have changed) and
put them somewhere where you can download them.
Meson build patches are only supported for wrap-file mode. When using
wrap-git, the repository must contain all Meson build definitions.
Prior to *0.55.0* Meson build patches were only supported for wrap-file mode.
When using wrap-git, the repository must contain all Meson build definitions.
Since *0.55.0* Meson build patches are supported for any wrap modes, including
wrap-git.
## `provide` section
*Since *0.55.0*
Wrap files can define the dependencies it provides in the `[provide]` section.
```ini
[provide]
dependency_names = foo-1.0
```
When a wrap file provides the dependency `foo-1.0`, as above, any call to
`dependency('foo-1.0')` will automatically fallback to that subproject even if
no `fallback` keyword argument is given. A wrap file named `foo.wrap` implicitly
provides the dependency name `foo` even when the `[provide]` section is missing.
Optional dependencies, like `dependency('foo-1.0', required: get_option('foo_opt'))`
where `foo_opt` is a feature option set to `auto`, will not fallback to the
subproject defined in the wrap file, for 2 reasons:
- It allows for looking the dependency in other ways first, for example using
`cc.find_library('foo')`, and only fallback if that fails:
```meson
# this won't use fallback defined in foo.wrap
foo_dep = dependency('foo-1.0', required: false)
if not foo_dep.found()
foo_dep = cc.find_library('foo', has_headers: 'foo.h', required: false)
if not foo_dep.found()
# This will use the fallback
foo_dep = dependency('foo-1.0')
# or
foo_dep = dependency('foo-1.0', required: false, fallback: 'foo')
endif
endif
```
- Sometimes not-found dependency is preferable to a fallback when the feature is
not explicitly requested by the user. In that case
`dependency('foo-1.0', required: get_option('foo_opt'))` will only fallback
when the user sets `foo_opt` to `enabled` instead of `auto`.
If it is desired to fallback for an optional dependency, the `fallback` keyword
argument must be passed explicitly. For example
`dependency('foo-1.0', required: get_option('foo_opt'), fallback: 'foo')` will
use the fallback even when `foo_opt` is set to `auto`.
This mechanism assumes the subproject calls `meson.override_dependency('foo-1.0', foo_dep)`
so Meson knows which dependency object should be used as fallback. Since that
method was introduced in version *0.54.0*, as a transitional aid for projects
that do not yet make use of it the variable name can be provided in the wrap file
with entries in the format `foo-1.0 = foo_dep`.
For example when using a recent enough version of glib that uses
`meson.override_dependency()` to override `glib-2.0`, `gobject-2.0` and `gio-2.0`,
a wrap file would look like:
```ini
[wrap-git]
url=https://gitlab.gnome.org/GNOME/glib.git
revision=glib-2-62
[provide]
dependency_names = glib-2.0, gobject-2.0, gio-2.0
```
With older version of glib dependency variable names need to be specified:
```ini
[wrap-git]
url=https://gitlab.gnome.org/GNOME/glib.git
revision=glib-2-62
[provide]
glib-2.0=glib_dep
gobject-2.0=gobject_dep
gio-2.0=gio_dep
```
Programs can also be provided by wrap files, with the `program_names` key:
```ini
[provide]
program_names = myprog, otherprog
```
With such wrap file, `find_program('myprog')` will automatically fallback to use
the subproject, assuming it uses `meson.override_find_program('myprog')`.
## Using wrapped projects
Wraps provide a convenient way of obtaining a project into your subproject directory.
Wraps provide a convenient way of obtaining a project into your subproject directory.
Then you use it as a regular subproject (see [subprojects](Subprojects.md)).
## Getting wraps
Usually you don't want to write your wraps by hand.
Usually you don't want to write your wraps by hand.
There is an online repository called [WrapDB](https://wrapdb.mesonbuild.com) that provides
There is an online repository called [WrapDB](https://wrapdb.mesonbuild.com) that provides
many dependencies ready to use. You can read more about WrapDB [here](Using-the-WrapDB.md).
There is also a Meson subcommand to get and manage wraps (see [using wraptool](Using-wraptool.md)).

@ -0,0 +1,17 @@
# Wrap maintainer tools
The [mesonwrap repository](https://github.com/mesonbuild/mesonwrap) provides tools
to maintain the WrapDB. Read-only features such can be used by anyone without Meson admin rights.
## Personal access token
Some tools require access to the Github API.
A [personal access token](https://github.com/settings/tokens) may be required
if the freebie Github API quota is exhausted. `public_repo` scope is required
for write operations.
```
$ cat ~/.config/mesonwrap.ini
[mesonwrap]
github_token = <github token>
```

@ -7,18 +7,30 @@ package is rejected. What should be done will be determined on a
case-by-case basis. Similarly meeting all these requirements does not
guarantee that the package will get accepted. Use common sense.
## Checklist
Reviewer: copy-paste this to MR discussion box and tick all boxes that apply.
- [ ] project() has version string
- [ ] project() has license string
- [ ] if new project, master has tagged commit as only commit
- [ ] if new branch, it is branched from master
- [ ] contains a readme.txt
- [ ] contains an upstream.wrap file
- [ ] download link points to authoritative upstream location
- [ ] wrap repository contains only build system files
- [ ] merge request is pointed to correct target branch (not master)
- [ ] wrap works
- [ ] repo does not have useless top level directory (i.e. libfoobar-1.0.0)
The review process is partially automated by the [mesonwrap](Wrap-maintainer-tools.md)
`review` tool.
```
mesonwrap review zlib --pull-request=1 [--approve]
```
Since not every check can be automated please pay attention to the following during the review:
- Download link points to an authoritative upstream location.
- Version branch is created from master.
- Except for the existing code, `LICENSE.build` is mandatory.
- `project()` has a version and it matches the source version.
- `project()` has a license.
- Complex `configure_file()` inputs are documented.
If the file is a copy of a project file make sure it is clear what was changed.
- Unit tests are enabled if the project provides them.
- There are no guidelines if `install()` is a good or a bad thing in wraps.
- If the project can't be tested on the host platform consider using the `--cross-file` flag.
See [the issue](https://github.com/mesonbuild/mesonwrap/issues/125).
Encourage wrap readability. Use your own judgement.
## Approval
If the code looks good use the `--approve` flag to merge it.
The tool automatically creates a release.

@ -25,7 +25,7 @@ for the host platform in cross builds can only be specified with a cross file.
There is a table of all environment variables supported [Here](Reference-tables.md#compiler-and-linker-selection-variables)
## Set dynamic linker
## Set linker
*New in 0.53.0*
@ -148,15 +148,14 @@ $ meson <other flags> -Db_coverage=true
Then issue the following commands.
```console
$ ninja
$ ninja test
$ ninja coverage-html (or coverage-xml)
$ meson compile
$ meson test
$ meson compile coverage-html (or coverage-xml)
```
The coverage report can be found in the meson-logs subdirectory.
Note: Currently, Meson does not support generating coverage reports
with Clang.
*New in 0.55.0* llvm-cov support for use with clang
## Add some optimization to debug builds
@ -191,14 +190,14 @@ test failures.
Install scan-build and configure your project. Then do this:
```console
$ ninja scan-build
$ meson compile scan-build
```
You can use the `SCANBUILD` environment variable to choose the
scan-build executable.
```console
$ SCANBUILD=<your exe> ninja scan-build
$ SCANBUILD=<your exe> meson compile scan-build
```
@ -209,8 +208,8 @@ operation. First we set up the project with profile measurements
enabled and compile it.
```console
$ meson <Meson options, such as --buildtype=debugoptimized> -Db_pgo=generate
$ ninja -C builddir
$ meson setup <Meson options, such as --buildtype=debugoptimized> -Db_pgo=generate
$ meson compile -C builddir
```
Then we need to run the program with some representative input. This
@ -221,7 +220,7 @@ information and rebuild.
```console
$ meson configure -Db_pgo=use
$ ninja
$ meson compile
```
After these steps the resulting binary is fully optimized.
@ -260,3 +259,28 @@ The `cmake_module_path` property is only needed for custom CMake scripts. System
wide CMake scripts are found automatically.
More information can be found [here](Dependencies.md#cmake)
## Get a default not-found dependency?
```meson
null_dep = dependency('', required : false)
```
This can be used in cases where you want a default value, but might override it
later.
```meson
# Not needed on Windows!
my_dep = dependency('', required : false)
if host_machine.system() in ['freebsd', 'netbsd', 'openbsd', 'dragonfly']
my_dep = dependency('some dep', required : false)
elif host_machine.system() == 'linux'
my_dep = dependency('some other dep', required : false)
endif
executable(
'myexe',
my_sources,
deps : [my_dep]
)
```

@ -12,7 +12,7 @@ specific permission. It is not licensed under the same terms as the
rest of the project.
If you are a third party and want to use the Meson logo, you must
first must obtain written permission from Jussi Pakkanen.
first obtain written permission from Jussi Pakkanen.
## Website licensing

@ -1,5 +0,0 @@
## meson dist --no-tests
`meson dist` has a new option `--no-tests` to skip build and tests of generated
packages. It can be used to not waste time for example when done in CI that
already does its own testing.

@ -0,0 +1,7 @@
## `unstable-keyval` is now stable `keyval`
The `unstable-keyval` has been renamed to `keyval` and now promises stability
guarantees.
Meson will print a warning when you load an `unstable-` module that has been
stabilised (so `unstable-keyval` is still accepted for example).

@ -0,0 +1,4 @@
## Per subproject `warning_level` option
`warning_level` can now be defined per subproject, in the same way as
`default_library` and `werror`.

@ -0,0 +1,52 @@
## Project and built-in options can be set in native or cross files
A new set of sections has been added to the cross and native files, `[project
options]` and `[<subproject_name>:project options]`, where `subproject_name`
is the name of a subproject. Any options that are allowed in the project can
be set from this section. They have the lowest precedent, and will be
overwritten by command line arguments.
```meson
option('foo', type : 'string', value : 'foo')
```
```ini
[project options]
foo = 'other val'
```
```console
meson build --native-file my.ini
```
Will result in the option foo having the value `other val`,
```console
meson build --native-file my.ini -Dfoo='different val'
```
Will result in the option foo having the value `different val`,
Subproject options are assigned like this:
```ini
[zlib:project options]
foo = 'some val'
```
Additionally meson level options can be set in the same way, using the
`[built-in options]` section.
```ini
[built-in options]
c_std = 'c99'
```
These options can also be set on a per-subproject basis, although only
`default_library` and `werror` can currently be set:
```ini
[zlib:built-in options]
default_library = 'static'
```

@ -1,4 +0,0 @@
## Wrap fallback URL
Wrap files can now define `source_fallback_url` and `patch_fallback_url` to be
used in case the main server is temporaly down.

@ -0,0 +1,296 @@
# Command-line commands
There are two different ways of invoking Meson. First, you can run it directly
from the source tree with the command `/path/to/source/meson.py`. Meson may
also be installed in which case the command is simply `meson`. In this manual
we only use the latter format for simplicity.
Meson is invoked using the following syntax:
`meson [COMMAND] [COMMAND_OPTIONS]`
This section describes all available commands and some of their Optional arguments.
The most common workflow is to run [`setup`](#setup), followed by [`compile`](#compile), and then [`install`](#install).
For the full list of all available options for a specific command use the following syntax:
`meson COMMAND --help`
### configure
```
{{ cmd_help['configure']['usage'] }}
```
Changes options of a configured meson project.
```
{{ cmd_help['configure']['arguments'] }}
```
Most arguments are the same as in [`setup`](#setup).
Note: reconfiguring project will not reset options to their default values (even if they were changed in `meson.build`).
#### Examples:
List all available options:
```
meson configure builddir
```
Change value of a single option:
```
meson configure builddir -Doption=new_value
```
### compile
*(since 0.54.0)*
```
{{ cmd_help['compile']['usage'] }}
```
Builds a default or a specified target of a configured meson project.
```
{{ cmd_help['compile']['arguments'] }}
```
`--verbose` argument is available since 0.55.0.
#### Targets
*(since 0.55.0)*
`TARGET` has the following syntax `[PATH/]NAME[:TYPE]`, where:
- `NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`).
- `PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`.
- `TYPE`: type of the target. Can be one of the following: 'executable', 'static_library', 'shared_library', 'shared_module', 'custom', 'run', 'jar'.
`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`.
#### Backend specific arguments
*(since 0.55.0)*
`BACKEND-args` use the following syntax:
If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command:
```
$ meson compile --ninja-args=-n,-d,explain
```
would add `-n`, `-d` and `explain` arguments to ninja invocation.
If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this:
```
$ meson compile "--ninja-args=['a,b', 'c d']"
```
#### Examples:
Build the project:
```
meson compile -C builddir
```
Execute a dry run on ninja backend with additional debug info:
```
meson compile --ninja-args=-n,-d,explain
```
Build three targets: two targets that have the same `foo` name, but different type, and a `bar` target:
```
meson compile foo:shared_library foo:static_library bar
```
Produce a coverage html report (if available):
```
meson compile coverage-html
```
### dist
*(since 0.52.0)*
```
{{ cmd_help['dist']['usage'] }}
```
Generates a release archive from the current source tree.
```
{{ cmd_help['dist']['arguments'] }}
```
See [notes about creating releases](Creating-releases.md) for more info.
#### Examples:
Create a release archive:
```
meson dist -C builddir
```
### init
*(since 0.45.0)*
```
{{ cmd_help['init']['usage'] }}
```
Creates a basic set of build files based on a template.
```
{{ cmd_help['init']['arguments'] }}
```
#### Examples:
Create a project in `sourcedir`:
```
meson init -C sourcedir
```
### introspect
```
{{ cmd_help['introspect']['usage'] }}
```
Displays information about a configured meson project.
```
{{ cmd_help['introspect']['arguments'] }}
```
#### Examples:
Display basic information about a configured project in `builddir`:
```
meson introspect builddir
```
### install
*(since 0.47.0)*
```
{{ cmd_help['install']['usage'] }}
```
Installs the project to the prefix specified in [`setup`](#setup).
```
{{ cmd_help['install']['arguments'] }}
```
See [the installation documentation](Installing.md) for more info.
#### Examples:
Install project to `prefix`:
```
meson install -C builddir
```
Install project to `$DESTDIR/prefix`:
```
DESTDIR=/path/to/staging/area meson install -C builddir
```
### rewrite
*(since 0.50.0)*
```
{{ cmd_help['rewrite']['usage'] }}
```
Modifies the meson project.
```
{{ cmd_help['rewrite']['arguments'] }}
```
See [the meson file rewriter documentation](Rewriter.md) for more info.
### setup
```
{{ cmd_help['setup']['usage'] }}
```
Configures a build directory for the meson project.
This is the default meson command (invoked if there was no COMMAND supplied).
```
{{ cmd_help['setup']['arguments'] }}
```
See [meson introduction page](Running-Meson.md#configuring-the-build-directory) for more info.
#### Examples:
Configures `builddir` with default values:
```
meson setup builddir
```
### subprojects
*(since 0.49.0)*
```
{{ cmd_help['subprojects']['usage'] }}
```
Manages subprojects of the meson project.
```
{{ cmd_help['subprojects']['arguments'] }}
```
### test
```
{{ cmd_help['test']['usage'] }}
```
Run tests for the configure meson project.
```
{{ cmd_help['test']['arguments'] }}
```
See [the unit test documentation](Unit-tests.md) for more info.
#### Examples:
Run tests for the project:
```
meson test -C builddir
```
Run only `specific_test_1` and `specific_test_2`:
```
meson test -C builddir specific_test_1 specific_test_2
```
### wrap
```
{{ cmd_help['wrap']['usage'] }}
```
An utility to manage WrapDB dependencies.
```
{{ cmd_help['wrap']['arguments'] }}
```
See [the WrapDB tool documentation](Using-wraptool.md) for more info.

@ -1,16 +1,40 @@
project('Meson documentation', version: '1.0')
cur_bdir = meson.current_build_dir()
# Copy all files to build dir, since HotDoc uses relative paths
run_command(
files('../tools/copy_files.py'),
'-C', meson.current_source_dir(),
'--output-dir', cur_bdir,
'markdown', 'theme', 'sitemap.txt',
check: true)
# Only the script knows which files are being generated
docs_gen = custom_target(
'gen_docs',
input: files('markdown/index.md'),
output: 'gen_docs.dummy',
command: [
files('../tools/regenerate_docs.py'),
'--output-dir', join_paths(cur_bdir, 'markdown'),
'--dummy-output-file', '@OUTPUT@',
],
build_by_default: true,
install: false)
hotdoc = import('hotdoc')
documentation = hotdoc.generate_doc(meson.project_name(),
project_version: meson.project_version(),
sitemap: 'sitemap.txt',
sitemap: join_paths(cur_bdir, 'sitemap.txt'),
build_by_default: true,
index: 'markdown/index.md',
depends: docs_gen,
index: join_paths(cur_bdir, 'markdown/index.md'),
install: false,
extra_assets: ['images/'],
include_paths: ['markdown'],
include_paths: [join_paths(cur_bdir, 'markdown')],
default_license: 'CC-BY-SAv4.0',
html_extra_theme: join_paths('theme', 'extra'),
html_extra_theme: join_paths(cur_bdir, 'theme', 'extra'),
git_upload_repository: 'git@github.com:jpakkane/jpakkane.github.io.git',
edit_on_github_repository: 'https://github.com/mesonbuild/meson/',
syntax_highlighting_activate: true,

@ -5,6 +5,7 @@ index.md
Manual.md
Overview.md
Running-Meson.md
Commands.md
Builtin-options.md
Using-with-Visual-Studio.md
Meson-sample.md
@ -48,7 +49,7 @@ index.md
SourceSet-module.md
Windows-module.md
Cuda-module.md
Kconfig-module.md
Keyval-module.md
Java.md
Vala.md
D.md
@ -72,11 +73,13 @@ index.md
Adding-new-projects-to-wrapdb.md
Using-the-WrapDB.md
Using-wraptool.md
Wrap-maintainer-tools.md
Wrap-best-practices-and-tips.md
Wrap-review-guidelines.md
Shipping-prebuilt-binaries-as-wraps.md
fallback-wraptool.md
Release-notes.md
Release-notes-for-0.55.0.md
Release-notes-for-0.54.0.md
Release-notes-for-0.53.0.md
Release-notes-for-0.52.0.md
@ -116,5 +119,6 @@ index.md
Using-multiple-build-directories.md
Vs-External.md
Contributing.md
MesonCI.md
legal.md
Videos.md

@ -14,7 +14,7 @@
("Hotdoc-module.html","Hotdoc"), \
("i18n-module.html","i18n"), \
("Icestorm-module.html","Icestorm"), \
("Kconfig-module.html","kconfig"), \
("Keyval-module.html","Keyval"), \
("Pkgconfig-module.html","Pkgconfig"), \
("Python-module.html","Python"), \
("Python-3-module.html","Python 3"), \

@ -1,4 +1,4 @@
.TH MESON "1" "March 2020" "meson 0.54.0" "User Commands"
.TH MESON "1" "July 2020" "meson 0.55.0" "User Commands"
.SH NAME
meson - a high productivity build system
.SH DESCRIPTION

@ -0,0 +1,331 @@
# Copyright 2012-2020 The Meson development team
# Copyright © 2020 Intel Corporation
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import lru_cache
import collections
import enum
import os
import re
import typing as T
from . import mesonlib
if T.TYPE_CHECKING:
from .linkers import StaticLinker
from .compilers import Compiler
UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List[str]
# execinfo is a compiler lib on FreeBSD and NetBSD
if mesonlib.is_freebsd() or mesonlib.is_netbsd():
UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo')
class Dedup(enum.Enum):
"""What kind of deduplication can be done to compiler args.
OVERRIDEN - Whether an argument can be 'overridden' by a later argument.
For example, -DFOO defines FOO and -UFOO undefines FOO. In this case,
we can safely remove the previous occurrence and add a new one. The
same is true for include paths and library paths with -I and -L.
UNIQUE - Arguments that once specified cannot be undone, such as `-c` or
`-pipe`. New instances of these can be completely skipped.
NO_DEDUP - Whether it matters where or how many times on the command-line
a particular argument is present. This can matter for symbol
resolution in static or shared libraries, so we cannot de-dup or
reorder them.
"""
NO_DEDUP = 0
UNIQUE = 1
OVERRIDEN = 2
class CompilerArgs(collections.abc.MutableSequence):
'''
List-like class that manages a list of compiler arguments. Should be used
while constructing compiler arguments from various sources. Can be
operated with ordinary lists, so this does not need to be used
everywhere.
All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc)
and can converted to the native type of each compiler by using the
.to_native() method to which you must pass an instance of the compiler or
the compiler class.
New arguments added to this class (either with .append(), .extend(), or +=)
are added in a way that ensures that they override previous arguments.
For example:
>>> a = ['-Lfoo', '-lbar']
>>> a += ['-Lpho', '-lbaz']
>>> print(a)
['-Lpho', '-Lfoo', '-lbar', '-lbaz']
Arguments will also be de-duped if they can be de-duped safely.
Note that because of all this, this class is not commutative and does not
preserve the order of arguments if it is safe to not. For example:
>>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror']
['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror']
>>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar']
['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror']
'''
# Arg prefixes that override by prepending instead of appending
prepend_prefixes = () # type: T.Tuple[str, ...]
# Arg prefixes and args that must be de-duped by returning 2
dedup2_prefixes = () # type: T.Tuple[str, ...]
dedup2_suffixes = () # type: T.Tuple[str, ...]
dedup2_args = () # type: T.Tuple[str, ...]
# Arg prefixes and args that must be de-duped by returning 1
#
# NOTE: not thorough. A list of potential corner cases can be found in
# https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
dedup1_prefixes = () # type: T.Tuple[str, ...]
dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # type: T.Tuple[str, ...]
# Match a .so of the form path/to/libfoo.so.0.1.0
# Only UNIX shared libraries require this. Others have a fixed extension.
dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
dedup1_args = () # type: T.Tuple[str, ...]
# In generate_link() we add external libs without de-dup, but we must
# *always* de-dup these because they're special arguments to the linker
# TODO: these should probably move too
always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) # type : T.Tuple[str, ...]
def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'],
iterable: T.Optional[T.Iterable[str]] = None):
self.compiler = compiler
self._container = list(iterable) if iterable is not None else [] # type: T.List[str]
self.pre = collections.deque() # type: T.Deque[str]
self.post = collections.deque() # type: T.Deque[str]
# Flush the saved pre and post list into the _container list
#
# This correctly deduplicates the entries after _can_dedup definition
# Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot.
def flush_pre_post(self) -> None:
pre_flush = collections.deque() # type: T.Deque[str]
pre_flush_set = set() # type: T.Set[str]
post_flush = collections.deque() # type: T.Deque[str]
post_flush_set = set() # type: T.Set[str]
#The two lists are here walked from the front to the back, in order to not need removals for deduplication
for a in self.pre:
dedup = self._can_dedup(a)
if a not in pre_flush_set:
pre_flush.append(a)
if dedup is Dedup.OVERRIDEN:
pre_flush_set.add(a)
for a in reversed(self.post):
dedup = self._can_dedup(a)
if a not in post_flush_set:
post_flush.appendleft(a)
if dedup is Dedup.OVERRIDEN:
post_flush_set.add(a)
#pre and post will overwrite every element that is in the container
#only copy over args that are in _container but not in the post flush or pre flush set
for a in self._container:
if a not in post_flush_set and a not in pre_flush_set:
pre_flush.append(a)
self._container = list(pre_flush) + list(post_flush)
self.pre.clear()
self.post.clear()
def __iter__(self) -> T.Iterator[str]:
self.flush_pre_post()
return iter(self._container)
@T.overload # noqa: F811
def __getitem__(self, index: int) -> str: # noqa: F811
pass
@T.overload # noqa: F811
def __getitem__(self, index: slice) -> T.MutableSequence[str]: # noqa: F811
pass
def __getitem__(self, index): # noqa: F811
self.flush_pre_post()
return self._container[index]
@T.overload # noqa: F811
def __setitem__(self, index: int, value: str) -> None: # noqa: F811
pass
@T.overload # noqa: F811
def __setitem__(self, index: slice, value: T.Iterable[str]) -> None: # noqa: F811
pass
def __setitem__(self, index, value) -> None: # noqa: F811
self.flush_pre_post()
self._container[index] = value
def __delitem__(self, index: T.Union[int, slice]) -> None:
self.flush_pre_post()
del self._container[index]
def __len__(self) -> int:
return len(self._container) + len(self.pre) + len(self.post)
def insert(self, index: int, value: str) -> None:
self.flush_pre_post()
self._container.insert(index, value)
def copy(self) -> 'CompilerArgs':
self.flush_pre_post()
return type(self)(self.compiler, self._container.copy())
@classmethod
@lru_cache(maxsize=None)
def _can_dedup(cls, arg: str) -> Dedup:
"""Returns whether the argument can be safely de-duped.
In addition to these, we handle library arguments specially.
With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup
to recursively search for symbols in the libraries. This is not needed
with other linkers.
"""
# A standalone argument must never be deduplicated because it is
# defined by what comes _after_ it. Thus dedupping this:
# -D FOO -D BAR
# would yield either
# -D FOO BAR
# or
# FOO -D BAR
# both of which are invalid.
if arg in cls.dedup2_prefixes:
return Dedup.NO_DEDUP
if arg in cls.dedup2_args or \
arg.startswith(cls.dedup2_prefixes) or \
arg.endswith(cls.dedup2_suffixes):
return Dedup.OVERRIDEN
if arg in cls.dedup1_args or \
arg.startswith(cls.dedup1_prefixes) or \
arg.endswith(cls.dedup1_suffixes) or \
re.search(cls.dedup1_regex, arg):
return Dedup.UNIQUE
return Dedup.NO_DEDUP
@classmethod
@lru_cache(maxsize=None)
def _should_prepend(cls, arg: str) -> bool:
return arg.startswith(cls.prepend_prefixes)
def to_native(self, copy: bool = False) -> T.List[str]:
# Check if we need to add --start/end-group for circular dependencies
# between static libraries, and for recursively searching for symbols
# needed by static libraries that are provided by object files or
# shared libraries.
self.flush_pre_post()
if copy:
new = self.copy()
else:
new = self
return self.compiler.unix_args_to_native(new._container)
def append_direct(self, arg: str) -> None:
'''
Append the specified argument without any reordering or de-dup except
for absolute paths to libraries, etc, which can always be de-duped
safely.
'''
self.flush_pre_post()
if os.path.isabs(arg):
self.append(arg)
else:
self._container.append(arg)
def extend_direct(self, iterable: T.Iterable[str]) -> None:
'''
Extend using the elements in the specified iterable without any
reordering or de-dup except for absolute paths where the order of
include search directories is not relevant
'''
self.flush_pre_post()
for elem in iterable:
self.append_direct(elem)
def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None:
normal_flags = []
lflags = []
for i in iterable:
if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')):
lflags.append(i)
else:
normal_flags.append(i)
self.extend(normal_flags)
self.extend_direct(lflags)
def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs':
self.flush_pre_post()
new = self.copy()
new += args
return new
def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
'''
Add two CompilerArgs while taking into account overriding of arguments
and while preserving the order of arguments as much as possible
'''
tmp_pre = collections.deque() # type: T.Deque[str]
if not isinstance(args, collections.abc.Iterable):
raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args))
for arg in args:
# If the argument can be de-duped, do it either by removing the
# previous occurrence of it and adding a new one, or not adding the
# new occurrence.
dedup = self._can_dedup(arg)
if dedup is Dedup.UNIQUE:
# Argument already exists and adding a new instance is useless
if arg in self._container or arg in self.pre or arg in self.post:
continue
if self._should_prepend(arg):
tmp_pre.appendleft(arg)
else:
self.post.append(arg)
self.pre.extendleft(tmp_pre)
#pre and post is going to be merged later before a iter call
return self
def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
self.flush_pre_post()
new = type(self)(self.compiler, args)
new += self
return new
def __eq__(self, other: T.Any) -> T.Union[bool]:
self.flush_pre_post()
# Only allow equality checks against other CompilerArgs and lists instances
if isinstance(other, CompilerArgs):
return self.compiler == other.compiler and self._container == other._container
elif isinstance(other, list):
return self._container == other
return NotImplemented
def append(self, arg: str) -> None:
self.__iadd__([arg])
def extend(self, args: T.Iterable[str]) -> None:
self.__iadd__(args)
def __repr__(self) -> str:
self.flush_pre_post()
return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self._container)

@ -20,6 +20,7 @@ __all__ = [
'AstInterpreter',
'AstIDGenerator',
'AstIndentationGenerator',
'AstJSONPrinter',
'AstVisitor',
'AstPrinter',
'IntrospectionInterpreter',
@ -30,4 +31,4 @@ from .interpreter import AstInterpreter
from .introspection import IntrospectionInterpreter, build_target_functions
from .visitor import AstVisitor
from .postprocess import AstConditionLevel, AstIDGenerator, AstIndentationGenerator
from .printer import AstPrinter
from .printer import AstPrinter, AstJSONPrinter

@ -297,6 +297,11 @@ class AstInterpreter(interpreterbase.InterpreterBase):
elif isinstance(node, ElementaryNode):
result = node.value
elif isinstance(node, NotNode):
result = self.resolve_node(node.value, include_unknown_args, id_loop_detect)
if isinstance(result, bool):
result = not result
elif isinstance(node, ArrayNode):
result = [x for x in node.args.arguments]

@ -120,7 +120,7 @@ class IntrospectionInterpreter(AstInterpreter):
self.do_subproject(i)
self.coredata.init_backend_options(self.backend)
options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
options = {k: v for k, v in self.environment.meson_options.host[''].items() if k.startswith('backend_')}
self.coredata.set_options(options)
self.func_add_languages(None, proj_langs, None)

@ -18,6 +18,7 @@
from .. import mparser
from . import AstVisitor
import re
import typing as T
arithmic_map = {
'add': '+',
@ -155,7 +156,7 @@ class AstPrinter(AstVisitor):
self.append_padded(prefix + 'if', node)
prefix = 'el'
i.accept(self)
if node.elseblock:
if not isinstance(node.elseblock, mparser.EmptyNode):
self.append('else', node)
node.elseblock.accept(self)
self.append('endif', node)
@ -199,3 +200,160 @@ class AstPrinter(AstVisitor):
self.result = re.sub(r', \n$', '\n', self.result)
else:
self.result = re.sub(r', $', '', self.result)
class AstJSONPrinter(AstVisitor):
def __init__(self) -> None:
self.result = {} # type: T.Dict[str, T.Any]
self.current = self.result
def _accept(self, key: str, node: mparser.BaseNode) -> None:
old = self.current
data = {} # type: T.Dict[str, T.Any]
self.current = data
node.accept(self)
self.current = old
self.current[key] = data
def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None:
old = self.current
datalist = [] # type: T.List[T.Dict[str, T.Any]]
for i in nodes:
self.current = {}
i.accept(self)
datalist += [self.current]
self.current = old
self.current[key] = datalist
def _raw_accept(self, node: mparser.BaseNode, data: T.Dict[str, T.Any]) -> None:
old = self.current
self.current = data
node.accept(self)
self.current = old
def setbase(self, node: mparser.BaseNode) -> None:
self.current['node'] = type(node).__name__
self.current['lineno'] = node.lineno
self.current['colno'] = node.colno
self.current['end_lineno'] = node.end_lineno
self.current['end_colno'] = node.end_colno
def visit_default_func(self, node: mparser.BaseNode) -> None:
self.setbase(node)
def gen_ElementaryNode(self, node: mparser.ElementaryNode) -> None:
self.current['value'] = node.value
self.setbase(node)
def visit_BooleanNode(self, node: mparser.BooleanNode) -> None:
self.gen_ElementaryNode(node)
def visit_IdNode(self, node: mparser.IdNode) -> None:
self.gen_ElementaryNode(node)
def visit_NumberNode(self, node: mparser.NumberNode) -> None:
self.gen_ElementaryNode(node)
def visit_StringNode(self, node: mparser.StringNode) -> None:
self.gen_ElementaryNode(node)
def visit_ArrayNode(self, node: mparser.ArrayNode) -> None:
self._accept('args', node.args)
self.setbase(node)
def visit_DictNode(self, node: mparser.DictNode) -> None:
self._accept('args', node.args)
self.setbase(node)
def visit_OrNode(self, node: mparser.OrNode) -> None:
self._accept('left', node.left)
self._accept('right', node.right)
self.setbase(node)
def visit_AndNode(self, node: mparser.AndNode) -> None:
self._accept('left', node.left)
self._accept('right', node.right)
self.setbase(node)
def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None:
self._accept('left', node.left)
self._accept('right', node.right)
self.current['ctype'] = node.ctype
self.setbase(node)
def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None:
self._accept('left', node.left)
self._accept('right', node.right)
self.current['op'] = arithmic_map[node.operation]
self.setbase(node)
def visit_NotNode(self, node: mparser.NotNode) -> None:
self._accept('right', node.value)
self.setbase(node)
def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None:
self._accept_list('lines', node.lines)
self.setbase(node)
def visit_IndexNode(self, node: mparser.IndexNode) -> None:
self._accept('object', node.iobject)
self._accept('index', node.index)
self.setbase(node)
def visit_MethodNode(self, node: mparser.MethodNode) -> None:
self._accept('object', node.source_object)
self._accept('args', node.args)
self.current['name'] = node.name
self.setbase(node)
def visit_FunctionNode(self, node: mparser.FunctionNode) -> None:
self._accept('args', node.args)
self.current['name'] = node.func_name
self.setbase(node)
def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None:
self._accept('value', node.value)
self.current['var_name'] = node.var_name
self.setbase(node)
def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None:
self._accept('value', node.value)
self.current['var_name'] = node.var_name
self.setbase(node)
def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None:
self._accept('items', node.items)
self._accept('block', node.block)
self.current['varnames'] = node.varnames
self.setbase(node)
def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None:
self._accept_list('ifs', node.ifs)
self._accept('else', node.elseblock)
self.setbase(node)
def visit_UMinusNode(self, node: mparser.UMinusNode) -> None:
self._accept('right', node.value)
self.setbase(node)
def visit_IfNode(self, node: mparser.IfNode) -> None:
self._accept('condition', node.condition)
self._accept('block', node.block)
self.setbase(node)
def visit_TernaryNode(self, node: mparser.TernaryNode) -> None:
self._accept('condition', node.condition)
self._accept('true', node.trueblock)
self._accept('false', node.falseblock)
self.setbase(node)
def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None:
self._accept_list('positional', node.arguments)
kwargs_list = [] # type: T.List[T.Dict[str, T.Dict[str, T.Any]]]
for key, val in node.kwargs.items():
key_res = {} # type: T.Dict[str, T.Any]
val_res = {} # type: T.Dict[str, T.Any]
self._raw_accept(key, key_res)
self._raw_accept(val, val_res)
kwargs_list += [{'key': key_res, 'val': val_res}]
self.current['kwargs'] = kwargs_list
self.setbase(node)

@ -113,8 +113,7 @@ class AstVisitor:
self.visit_default_func(node)
for i in node.ifs:
i.accept(self)
if node.elseblock:
node.elseblock.accept(self)
node.elseblock.accept(self)
def visit_UMinusNode(self, node: mparser.UMinusNode) -> None:
self.visit_default_func(node)

@ -12,24 +12,54 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os, pickle, re
from collections import OrderedDict
from functools import lru_cache
from pathlib import Path
import enum
import json
import os
import pickle
import re
import shlex
import subprocess
import textwrap
import typing as T
from .. import build
from .. import dependencies
from .. import mesonlib
from .. import mlog
import json
import subprocess
from ..mesonlib import (
File, Language, MachineChoice, MesonException, OrderedSet,
OptionOverrideProxy, classify_unity_sources, unholder,
)
from ..compilers import CompilerArgs, VisualStudioLikeCompiler
from ..interpreter import Interpreter
from collections import OrderedDict
import shlex
from functools import lru_cache
import typing as T
if T.TYPE_CHECKING:
from ..interpreter import Interpreter
class TestProtocol(enum.Enum):
EXITCODE = 0
TAP = 1
GTEST = 2
@classmethod
def from_str(cls, string: str) -> 'TestProtocol':
if string == 'exitcode':
return cls.EXITCODE
elif string == 'tap':
return cls.TAP
elif string == 'gtest':
return cls.GTEST
raise MesonException('unknown test format {}'.format(string))
def __str__(self) -> str:
if self is self.EXITCODE:
return 'exitcode'
elif self is self.GTEST:
return 'gtest'
return 'tap'
class CleanTrees:
@ -60,12 +90,13 @@ class InstallData:
self.mesonintrospect = mesonintrospect
class TargetInstallData:
def __init__(self, fname, outdir, aliases, strip, install_name_mappings, install_rpath, install_mode, optional=False):
def __init__(self, fname, outdir, aliases, strip, install_name_mappings, rpath_dirs_to_remove, install_rpath, install_mode, optional=False):
self.fname = fname
self.outdir = outdir
self.aliases = aliases
self.strip = strip
self.install_name_mappings = install_name_mappings
self.rpath_dirs_to_remove = rpath_dirs_to_remove
self.install_rpath = install_rpath
self.install_mode = install_mode
self.optional = optional
@ -84,11 +115,12 @@ class ExecutableSerialisation:
class TestSerialisation:
def __init__(self, name: str, project: str, suite: str, fname: T.List[str],
is_cross_built: bool, exe_wrapper: T.Optional[build.Executable],
is_cross_built: bool, exe_wrapper: T.Optional[dependencies.ExternalProgram],
needs_exe_wrapper: bool, is_parallel: bool, cmd_args: T.List[str],
env: build.EnvironmentVariables, should_fail: bool,
timeout: T.Optional[int], workdir: T.Optional[str],
extra_paths: T.List[str], protocol: str, priority: int):
extra_paths: T.List[str], protocol: TestProtocol, priority: int,
cmd_is_built: bool):
self.name = name
self.project_name = project
self.suite = suite
@ -107,8 +139,10 @@ class TestSerialisation:
self.protocol = protocol
self.priority = priority
self.needs_exe_wrapper = needs_exe_wrapper
self.cmd_is_built = cmd_is_built
def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional[Interpreter] = None) -> T.Optional['Backend']:
def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional['Interpreter'] = None) -> T.Optional['Backend']:
if backend == 'ninja':
from . import ninjabackend
return ninjabackend.NinjaBackend(build, interpreter)
@ -135,7 +169,7 @@ def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, i
# This class contains the basic functionality that is needed by all backends.
# Feel free to move stuff in and out of it as you see fit.
class Backend:
def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]):
def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional['Interpreter']):
# Make it possible to construct a dummy backend
# This is used for introspection without a build directory
if build is None:
@ -150,9 +184,9 @@ class Backend:
self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(),
self.environment.get_build_dir())
def get_target_filename(self, t):
def get_target_filename(self, t, *, warn_multi_output: bool = True):
if isinstance(t, build.CustomTarget):
if len(t.get_outputs()) != 1:
if warn_multi_output and len(t.get_outputs()) != 1:
mlog.warning('custom_target {!r} has more than one output! '
'Using the first one.'.format(t.name))
filename = t.get_outputs()[0]
@ -197,7 +231,7 @@ class Backend:
return os.path.join(self.get_target_dir(target), target.get_filename())
elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)):
if not target.is_linkable_target():
raise MesonException('Tried to link against custom target "%s", which is not linkable.' % target.name)
raise MesonException('Tried to link against custom target "{}", which is not linkable.'.format(target.name))
return os.path.join(self.get_target_dir(target), target.get_filename())
elif isinstance(target, build.Executable):
if target.import_filename:
@ -228,7 +262,7 @@ class Backend:
return self.build_to_src
def get_target_private_dir(self, target):
return os.path.join(self.get_target_dir(target), target.get_id())
return os.path.join(self.get_target_filename(target, warn_multi_output=False) + '.p')
def get_target_private_dir_abs(self, target):
return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target))
@ -283,7 +317,7 @@ class Backend:
ofile = init_language_file(comp.get_default_suffix(), unity_file_number)
unity_file_number += 1
files_in_current = 0
ofile.write('#include<%s>\n' % src)
ofile.write('#include<{}>\n'.format(src))
files_in_current += 1
if ofile:
ofile.close()
@ -413,6 +447,46 @@ class Backend:
return True
return False
def get_external_rpath_dirs(self, target):
dirs = set()
args = []
# FIXME: is there a better way?
for lang in ['c', 'cpp']:
try:
args.extend(self.environment.coredata.get_external_link_args(target.for_machine, lang))
except Exception:
pass
# Match rpath formats:
# -Wl,-rpath=
# -Wl,-rpath,
rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)')
# Match solaris style compat runpath formats:
# -Wl,-R
# -Wl,-R,
runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)')
# Match symbols formats:
# -Wl,--just-symbols=
# -Wl,--just-symbols,
symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)')
for arg in args:
rpath_match = rpath_regex.match(arg)
if rpath_match:
for dir in rpath_match.group(1).split(':'):
dirs.add(dir)
runpath_match = runpath_regex.match(arg)
if runpath_match:
for dir in runpath_match.group(1).split(':'):
# The symbols arg is an rpath if the path is a directory
if Path(dir).is_dir():
dirs.add(dir)
symbols_match = symbols_regex.match(arg)
if symbols_match:
for dir in symbols_match.group(1).split(':'):
# Prevent usage of --just-symbols to specify rpath
if Path(dir).is_dir():
raise MesonException('Invalid arg for --just-symbols, {} is a directory.'.format(dir))
return dirs
def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True):
paths = []
for dep in target.external_deps:
@ -427,6 +501,9 @@ class Backend:
if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment):
# No point in adding system paths.
continue
# Don't remove rpaths specified in LDFLAGS.
if libdir in self.get_external_rpath_dirs(target):
continue
# Windows doesn't support rpaths, but we use this function to
# emulate rpaths by setting PATH, so also accept DLLs here
if os.path.splitext(libpath)[1] not in ['.dll', '.lib', '.so', '.dylib']:
@ -446,8 +523,15 @@ class Backend:
result = OrderedSet()
result.add('meson-out')
result.update(self.rpaths_for_bundled_shared_libraries(target))
target.rpath_dirs_to_remove.update([d.encode('utf8') for d in result])
return tuple(result)
@staticmethod
def canonicalize_filename(fname):
for ch in ('/', '\\', ':'):
fname = fname.replace(ch, '_')
return fname
def object_filename_from_source(self, target, source):
assert isinstance(source, mesonlib.File)
build_dir = self.environment.get_build_dir()
@ -478,7 +562,7 @@ class Backend:
source = os.path.relpath(os.path.join(build_dir, rel_src),
os.path.join(self.environment.get_source_dir(), target.get_subdir()))
machine = self.environment.machines[target.for_machine]
return source.replace('/', '_').replace('\\', '_') + '.' + machine.get_object_suffix()
return self.canonicalize_filename(source) + '.' + machine.get_object_suffix()
def determine_ext_objs(self, extobj, proj_dir_to_build_root):
result = []
@ -538,14 +622,14 @@ class Backend:
def create_msvc_pch_implementation(self, target, lang, pch_header):
# We have to include the language in the file name, otherwise
# pch.c and pch.cpp will both end up as pch.obj in VS backends.
impl_name = 'meson_pch-%s.%s' % (lang, lang)
impl_name = 'meson_pch-{}.{}'.format(lang, lang)
pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name)
# Make sure to prepend the build dir, since the working directory is
# not defined. Otherwise, we might create the file in the wrong path.
pch_file = os.path.join(self.build_dir, pch_rel_to_build)
os.makedirs(os.path.dirname(pch_file), exist_ok=True)
content = '#include "%s"' % os.path.basename(pch_header)
content = '#include "{}"'.format(os.path.basename(pch_header))
pch_file_tmp = pch_file + '.tmp'
with open(pch_file_tmp, 'w') as f:
f.write(content)
@ -554,36 +638,20 @@ class Backend:
@staticmethod
def escape_extra_args(compiler, args):
# No extra escaping/quoting needed when not running on Windows
if not mesonlib.is_windows():
return args
# all backslashes in defines are doubly-escaped
extra_args = []
# Compiler-specific escaping is needed for -D args but not for any others
if isinstance(compiler, VisualStudioLikeCompiler):
# MSVC needs escaping when a -D argument ends in \ or \"
for arg in args:
if arg.startswith('-D') or arg.startswith('/D'):
# Without extra escaping for these two, the next character
# gets eaten
if arg.endswith('\\'):
arg += '\\'
elif arg.endswith('\\"'):
arg = arg[:-2] + '\\\\"'
extra_args.append(arg)
else:
# MinGW GCC needs all backslashes in defines to be doubly-escaped
# FIXME: Not sure about Cygwin or Clang
for arg in args:
if arg.startswith('-D') or arg.startswith('/D'):
arg = arg.replace('\\', '\\\\')
extra_args.append(arg)
for arg in args:
if arg.startswith('-D') or arg.startswith('/D'):
arg = arg.replace('\\', '\\\\')
extra_args.append(arg)
return extra_args
def generate_basic_compiler_args(self, target, compiler, no_warn_args=False):
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
# starting from hard-coded defaults followed by build options and so on.
commands = CompilerArgs(compiler)
commands = compiler.compiler_args()
copt_proxy = self.get_compiler_options_for_target(target)[compiler.language]
# First, the trivial ones that are impossible to override.
@ -665,7 +733,7 @@ class Backend:
args = []
for d in deps:
if not (d.is_linkable_target()):
raise RuntimeError('Tried to link with a non-library target "%s".' % d.get_basename())
raise RuntimeError('Tried to link with a non-library target "{}".'.format(d.get_basename()))
arg = self.get_target_filename_for_linking(d)
if not arg:
continue
@ -706,6 +774,7 @@ class Backend:
for deppath in self.rpaths_for_bundled_shared_libraries(target, exclude_system=False):
result.add(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath)))
for bdep in extra_bdeps:
prospectives.add(bdep)
prospectives.update(bdep.get_transitive_link_deps())
# Internal deps
for ld in prospectives:
@ -738,7 +807,16 @@ class Backend:
# E.g. an external verifier or simulator program run on a generated executable.
# Can always be run without a wrapper.
test_for_machine = MachineChoice.BUILD
is_cross = not self.environment.machines.matches_build_machine(test_for_machine)
# we allow passing compiled executables to tests, which may be cross built.
# We need to consider these as well when considering whether the target is cross or not.
for a in t.cmd_args:
if isinstance(a, build.BuildTarget):
if a.for_machine is MachineChoice.HOST:
test_for_machine = MachineChoice.HOST
break
is_cross = self.environment.is_cross_build(test_for_machine)
if is_cross and self.environment.need_exe_wrapper():
exe_wrapper = self.environment.get_exe_wrapper()
else:
@ -751,6 +829,7 @@ class Backend:
extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps)
else:
extra_paths = []
cmd_args = []
for a in unholder(t.cmd_args):
if isinstance(a, build.BuildTarget):
@ -760,6 +839,11 @@ class Backend:
cmd_args.append(a)
elif isinstance(a, str):
cmd_args.append(a)
elif isinstance(a, build.Executable):
p = self.construct_target_rel_path(a, t.workdir)
if p == a.get_filename():
p = './' + p
cmd_args.append(p)
elif isinstance(a, build.Target):
cmd_args.append(self.construct_target_rel_path(a, t.workdir))
else:
@ -768,7 +852,8 @@ class Backend:
exe_wrapper, self.environment.need_exe_wrapper(),
t.is_parallel, cmd_args, t.env,
t.should_fail, t.timeout, t.workdir,
extra_paths, t.protocol, t.priority)
extra_paths, t.protocol, t.priority,
isinstance(exe, build.Executable))
arr.append(ts)
return arr
@ -854,7 +939,7 @@ class Backend:
m = regex.search(arg)
while m is not None:
index = int(m.group(1))
src = '@OUTPUT%d@' % index
src = '@OUTPUT{}@'.format(index)
arg = arg.replace(src, os.path.join(private_dir, output_list[index]))
m = regex.search(arg)
newargs.append(arg)
@ -981,35 +1066,36 @@ class Backend:
elif not isinstance(i, str):
err_msg = 'Argument {0} is of unknown type {1}'
raise RuntimeError(err_msg.format(str(i), str(type(i))))
elif '@SOURCE_ROOT@' in i:
i = i.replace('@SOURCE_ROOT@', source_root)
elif '@BUILD_ROOT@' in i:
i = i.replace('@BUILD_ROOT@', build_root)
elif '@DEPFILE@' in i:
if target.depfile is None:
msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \
'keyword argument.'.format(target.name)
raise MesonException(msg)
dfilename = os.path.join(outdir, target.depfile)
i = i.replace('@DEPFILE@', dfilename)
elif '@PRIVATE_DIR@' in i:
if target.absolute_paths:
pdir = self.get_target_private_dir_abs(target)
else:
pdir = self.get_target_private_dir(target)
i = i.replace('@PRIVATE_DIR@', pdir)
elif '@PRIVATE_OUTDIR_' in i:
match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
if not match:
msg = 'Custom target {!r} has an invalid argument {!r}' \
''.format(target.name, i)
raise MesonException(msg)
source = match.group(0)
if match.group(1) is None and not target.absolute_paths:
lead_dir = ''
else:
lead_dir = self.environment.get_build_dir()
i = i.replace(source, os.path.join(lead_dir, outdir))
else:
if '@SOURCE_ROOT@' in i:
i = i.replace('@SOURCE_ROOT@', source_root)
if '@BUILD_ROOT@' in i:
i = i.replace('@BUILD_ROOT@', build_root)
if '@DEPFILE@' in i:
if target.depfile is None:
msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \
'keyword argument.'.format(target.name)
raise MesonException(msg)
dfilename = os.path.join(outdir, target.depfile)
i = i.replace('@DEPFILE@', dfilename)
if '@PRIVATE_DIR@' in i:
if target.absolute_paths:
pdir = self.get_target_private_dir_abs(target)
else:
pdir = self.get_target_private_dir(target)
i = i.replace('@PRIVATE_DIR@', pdir)
if '@PRIVATE_OUTDIR_' in i:
match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
if not match:
msg = 'Custom target {!r} has an invalid argument {!r}' \
''.format(target.name, i)
raise MesonException(msg)
source = match.group(0)
if match.group(1) is None and not target.absolute_paths:
lead_dir = ''
else:
lead_dir = self.environment.get_build_dir()
i = i.replace(source, os.path.join(lead_dir, outdir))
cmd.append(i)
# Substitute the rest of the template strings
values = mesonlib.get_filenames_templates_dict(inputs, outputs)
@ -1110,6 +1196,7 @@ class Backend:
mappings = t.get_link_deps_mapping(d.prefix, self.environment)
i = TargetInstallData(self.get_target_filename(t), outdirs[0],
t.get_aliases(), should_strip, mappings,
t.rpath_dirs_to_remove,
t.install_rpath, install_mode)
d.targets.append(i)
@ -1127,14 +1214,14 @@ class Backend:
implib_install_dir = self.environment.get_import_lib_dir()
# Install the import library; may not exist for shared modules
i = TargetInstallData(self.get_target_filename_for_linking(t),
implib_install_dir, {}, False, {}, '', install_mode,
implib_install_dir, {}, False, {}, set(), '', install_mode,
optional=isinstance(t, build.SharedModule))
d.targets.append(i)
if not should_strip and t.get_debug_filename():
debug_file = os.path.join(self.get_target_dir(t), t.get_debug_filename())
i = TargetInstallData(debug_file, outdirs[0],
{}, False, {}, '',
{}, False, {}, set(), '',
install_mode, optional=True)
d.targets.append(i)
# Install secondary outputs. Only used for Vala right now.
@ -1144,7 +1231,7 @@ class Backend:
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode)
i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode)
d.targets.append(i)
elif isinstance(t, build.CustomTarget):
# If only one install_dir is specified, assume that all
@ -1157,7 +1244,7 @@ class Backend:
if num_outdirs == 1 and num_out > 1:
for output in t.get_outputs():
f = os.path.join(self.get_target_dir(t), output)
i = TargetInstallData(f, outdirs[0], {}, False, {}, None, install_mode,
i = TargetInstallData(f, outdirs[0], {}, False, {}, set(), None, install_mode,
optional=not t.build_by_default)
d.targets.append(i)
else:
@ -1166,7 +1253,7 @@ class Backend:
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode,
i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode,
optional=not t.build_by_default)
d.targets.append(i)

@ -15,8 +15,10 @@ import typing as T
import os
import re
import pickle
import shlex
import subprocess
from collections import OrderedDict
from enum import Enum, unique
import itertools
from pathlib import PurePath, Path
from functools import lru_cache
@ -28,9 +30,15 @@ from .. import build
from .. import mlog
from .. import dependencies
from .. import compilers
from ..compilers import (Compiler, CompilerArgs, CCompiler, FortranCompiler,
PGICCompiler, VisualStudioLikeCompiler)
from ..linkers import ArLinker
from ..arglist import CompilerArgs
from ..compilers import (
Compiler, CCompiler,
DmdDCompiler,
FortranCompiler, PGICCompiler,
VisualStudioCsCompiler,
VisualStudioLikeCompiler,
)
from ..linkers import ArLinker, VisualStudioLinker
from ..mesonlib import (
File, LibType, Language, MachineChoice, MesonException, OrderedSet, PerMachine,
ProgressBar, quote_arg, unholder,
@ -45,18 +53,67 @@ FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$"
FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)"
FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)"
def cmd_quote(s):
# see: https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-commandlinetoargvw#remarks
# backslash escape any existing double quotes
# any existing backslashes preceding a quote are doubled
s = re.sub(r'(\\*)"', lambda m: '\\' * (len(m.group(1)) * 2 + 1) + '"', s)
# any terminal backslashes likewise need doubling
s = re.sub(r'(\\*)$', lambda m: '\\' * (len(m.group(1)) * 2), s)
# and double quote
s = '"{}"'.format(s)
return s
def gcc_rsp_quote(s):
# see: the function buildargv() in libiberty
#
# this differs from sh-quoting in that a backslash *always* escapes the
# following character, even inside single quotes.
s = s.replace('\\', '\\\\')
return shlex.quote(s)
# How ninja executes command lines differs between Unix and Windows
# (see https://ninja-build.org/manual.html#ref_rule_command)
if mesonlib.is_windows():
# FIXME: can't use quote_arg on Windows just yet; there are a number of existing workarounds
# throughout the codebase that cumulatively make the current code work (see, e.g. Backend.escape_extra_args
# and NinjaBuildElement.write below) and need to be properly untangled before attempting this
quote_func = lambda s: '"{}"'.format(s)
execute_wrapper = ['cmd', '/c']
quote_func = cmd_quote
execute_wrapper = ['cmd', '/c'] # unused
rmfile_prefix = ['del', '/f', '/s', '/q', '{}', '&&']
else:
quote_func = quote_arg
execute_wrapper = []
rmfile_prefix = ['rm', '-f', '{}', '&&']
def get_rsp_threshold():
'''Return a conservative estimate of the commandline size in bytes
above which a response file should be used. May be overridden for
debugging by setting environment variable MESON_RSP_THRESHOLD.'''
if mesonlib.is_windows():
# Usually 32k, but some projects might use cmd.exe,
# and that has a limit of 8k.
limit = 8192
else:
# On Linux, ninja always passes the commandline as a single
# big string to /bin/sh, and the kernel limits the size of a
# single argument; see MAX_ARG_STRLEN
limit = 131072
# Be conservative
limit = limit / 2
return int(os.environ.get('MESON_RSP_THRESHOLD', limit))
# a conservative estimate of the command-line length limit
rsp_threshold = get_rsp_threshold()
# ninja variables whose value should remain unquoted. The value of these ninja
# variables (or variables we use them in) is interpreted directly by ninja
# (e.g. the value of the depfile variable is a pathname that ninja will read
# from, etc.), so it must not be shell quoted.
raw_names = {'DEPFILE_UNQUOTED', 'DESC', 'pool', 'description', 'targetdep'}
def ninja_quote(text, is_build_line=False):
if is_build_line:
qcs = ('$', ' ', ':')
@ -67,12 +124,31 @@ def ninja_quote(text, is_build_line=False):
if '\n' in text:
errmsg = '''Ninja does not support newlines in rules. The content was:
%s
{}
Please report this error with a test case to the Meson bug tracker.''' % text
Please report this error with a test case to the Meson bug tracker.'''.format(text)
raise MesonException(errmsg)
return text
@unique
class Quoting(Enum):
both = 0
notShell = 1
notNinja = 2
none = 3
class NinjaCommandArg:
def __init__(self, s, quoting = Quoting.both):
self.s = s
self.quoting = quoting
def __str__(self):
return self.s
@staticmethod
def list(l, q):
return [NinjaCommandArg(i, q) for i in l]
class NinjaComment:
def __init__(self, comment):
self.comment = comment
@ -86,49 +162,127 @@ class NinjaComment:
class NinjaRule:
def __init__(self, rule, command, args, description,
rspable = False, deps = None, depfile = None, extra = None):
rspable = False, deps = None, depfile = None, extra = None,
rspfile_quote_style = 'gcc'):
def strToCommandArg(c):
if isinstance(c, NinjaCommandArg):
return c
# deal with common cases here, so we don't have to explicitly
# annotate the required quoting everywhere
if c == '&&':
# shell constructs shouldn't be shell quoted
return NinjaCommandArg(c, Quoting.notShell)
if c.startswith('$'):
var = re.search(r'\$\{?(\w*)\}?', c).group(1)
if var not in raw_names:
# ninja variables shouldn't be ninja quoted, and their value
# is already shell quoted
return NinjaCommandArg(c, Quoting.none)
else:
# shell quote the use of ninja variables whose value must
# not be shell quoted (as it also used by ninja)
return NinjaCommandArg(c, Quoting.notNinja)
return NinjaCommandArg(c)
self.name = rule
self.command = command # includes args which never go into a rspfile
self.args = args # args which will go into a rspfile, if used
self.command = list(map(strToCommandArg, command)) # includes args which never go into a rspfile
self.args = list(map(strToCommandArg, args)) # args which will go into a rspfile, if used
self.description = description
self.deps = deps # depstyle 'gcc' or 'msvc'
self.depfile = depfile
self.extra = extra
self.rspable = rspable # if a rspfile can be used
self.refcount = 0
self.rsprefcount = 0
self.rspfile_quote_style = rspfile_quote_style # rspfile quoting style is 'gcc' or 'cl'
def write(self, outfile):
if not self.refcount:
return
if self.depfile == '$DEPFILE':
self.depfile += '_UNQUOTED'
@staticmethod
def _quoter(x, qf = quote_func):
if isinstance(x, NinjaCommandArg):
if x.quoting == Quoting.none:
return x.s
elif x.quoting == Quoting.notNinja:
return qf(x.s)
elif x.quoting == Quoting.notShell:
return ninja_quote(x.s)
# fallthrough
return ninja_quote(qf(str(x)))
outfile.write('rule %s\n' % self.name)
if self.rspable:
outfile.write(' command = %s @$out.rsp\n' % ' '.join(self.command))
outfile.write(' rspfile = $out.rsp\n')
outfile.write(' rspfile_content = %s\n' % ' '.join(self.args))
def write(self, outfile):
if self.rspfile_quote_style == 'cl':
rspfile_quote_func = cmd_quote
else:
outfile.write(' command = %s\n' % ' '.join(self.command + self.args))
if self.deps:
outfile.write(' deps = %s\n' % self.deps)
if self.depfile:
outfile.write(' depfile = %s\n' % self.depfile)
outfile.write(' description = %s\n' % self.description)
if self.extra:
for l in self.extra.split('\n'):
outfile.write(' ')
outfile.write(l)
outfile.write('\n')
outfile.write('\n')
rspfile_quote_func = gcc_rsp_quote
def rule_iter():
if self.refcount:
yield ''
if self.rsprefcount:
yield '_RSP'
for rsp in rule_iter():
outfile.write('rule {}{}\n'.format(self.name, rsp))
if rsp == '_RSP':
outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command])))
outfile.write(' rspfile = $out.rsp\n')
outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x, rspfile_quote_func) for x in self.args])))
else:
outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in (self.command + self.args)])))
if self.deps:
outfile.write(' deps = {}\n'.format(self.deps))
if self.depfile:
outfile.write(' depfile = {}\n'.format(self.depfile))
outfile.write(' description = {}\n'.format(self.description))
if self.extra:
for l in self.extra.split('\n'):
outfile.write(' ')
outfile.write(l)
outfile.write('\n')
outfile.write('\n')
def length_estimate(self, infiles, outfiles, elems):
# determine variables
# this order of actions only approximates ninja's scoping rules, as
# documented at: https://ninja-build.org/manual.html#ref_scope
ninja_vars = {}
for e in elems:
(name, value) = e
ninja_vars[name] = value
ninja_vars['deps'] = self.deps
ninja_vars['depfile'] = self.depfile
ninja_vars['in'] = infiles
ninja_vars['out'] = outfiles
# expand variables in command
command = ' '.join([self._quoter(x) for x in self.command + self.args])
expanded_command = ''
for m in re.finditer(r'(\${\w*})|(\$\w*)|([^$]*)', command):
chunk = m.group()
if chunk.startswith('$'):
chunk = chunk[1:]
chunk = re.sub(r'{(.*)}', r'\1', chunk)
chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty
chunk = ' '.join(chunk)
expanded_command += chunk
# determine command length
return len(expanded_command)
class NinjaBuildElement:
def __init__(self, all_outputs, outfilenames, rule, infilenames, implicit_outs=None):
def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None):
self.implicit_outfilenames = implicit_outs or []
if isinstance(outfilenames, str):
self.outfilenames = [outfilenames]
else:
self.outfilenames = outfilenames
assert(isinstance(rule, str))
self.rule = rule
assert(isinstance(rulename, str))
self.rulename = rulename
if isinstance(infilenames, str):
self.infilenames = [infilenames]
else:
@ -151,10 +305,39 @@ class NinjaBuildElement:
self.orderdeps.add(dep)
def add_item(self, name, elems):
# Always convert from GCC-style argument naming to the naming used by the
# current compiler. Also filter system include paths, deduplicate, etc.
if isinstance(elems, CompilerArgs):
elems = elems.to_native()
if isinstance(elems, str):
elems = [elems]
self.elems.append((name, elems))
if name == 'DEPFILE':
self.elems.append((name + '_UNQUOTED', elems))
def _should_use_rspfile(self):
# 'phony' is a rule built-in to ninja
if self.rulename == 'phony':
return False
if not self.rule.rspable:
return False
infilenames = ' '.join([ninja_quote(i, True) for i in self.infilenames])
outfilenames = ' '.join([ninja_quote(i, True) for i in self.outfilenames])
return self.rule.length_estimate(infilenames,
outfilenames,
self.elems) >= rsp_threshold
def count_rule_references(self):
if self.rulename != 'phony':
if self._should_use_rspfile():
self.rule.rsprefcount += 1
else:
self.rule.refcount += 1
def write(self, outfile):
self.check_outputs()
ins = ' '.join([ninja_quote(i, True) for i in self.infilenames])
@ -162,7 +345,13 @@ class NinjaBuildElement:
implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames])
if implicit_outs:
implicit_outs = ' | ' + implicit_outs
line = 'build {}{}: {} {}'.format(outs, implicit_outs, self.rule, ins)
use_rspfile = self._should_use_rspfile()
if use_rspfile:
rulename = self.rulename + '_RSP'
mlog.debug("Command line for building %s is long, using a response file" % self.outfilenames)
else:
rulename = self.rulename
line = 'build {}{}: {} {}'.format(outs, implicit_outs, rulename, ins)
if len(self.deps) > 0:
line += ' | ' + ' '.join([ninja_quote(x, True) for x in self.deps])
if len(self.orderdeps) > 0:
@ -176,25 +365,24 @@ class NinjaBuildElement:
line = line.replace('\\', '/')
outfile.write(line)
# ninja variables whose value should remain unquoted. The value of these
# ninja variables (or variables we use them in) is interpreted directly
# by ninja (e.g. the value of the depfile variable is a pathname that
# ninja will read from, etc.), so it must not be shell quoted.
raw_names = {'DEPFILE', 'DESC', 'pool', 'description', 'targetdep'}
if use_rspfile:
if self.rule.rspfile_quote_style == 'cl':
qf = cmd_quote
else:
qf = gcc_rsp_quote
else:
qf = quote_func
for e in self.elems:
(name, elems) = e
should_quote = name not in raw_names
line = ' %s = ' % name
line = ' {} = '.format(name)
newelems = []
for i in elems:
if not should_quote or i == '&&': # Hackety hack hack
quoter = ninja_quote
else:
quoter = lambda x: ninja_quote(quote_func(x))
i = i.replace('\\', '\\\\')
if quote_func('') == '""':
i = i.replace('"', '\\"')
quoter = lambda x: ninja_quote(qf(x))
newelems.append(quoter(i))
line += ' '.join(newelems)
line += '\n'
@ -204,7 +392,7 @@ class NinjaBuildElement:
def check_outputs(self):
for n in self.outfilenames:
if n in self.all_outputs:
raise MesonException('Multiple producers for Ninja target "%s". Please rename your targets.' % n)
raise MesonException('Multiple producers for Ninja target "{}". Please rename your targets.'.format(n))
self.all_outputs[n] = True
class NinjaBackend(backends.Backend):
@ -271,7 +459,7 @@ int dummy;
# different locales have different messages with a different
# number of colons. Match up to the the drive name 'd:\'.
# When used in cross compilation, the path separator is a
# backslash rather than a forward slash so handle both.
# forward slash rather than a backslash so handle both.
matchre = re.compile(rb"^(.*\s)([a-zA-Z]:\\|\/).*stdio.h$")
def detect_prefix(out):
@ -299,8 +487,7 @@ int dummy;
outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename)
tempfilename = outfilename + '~'
with open(tempfilename, 'w', encoding='utf-8') as outfile:
outfile.write('# This is the build file for project "%s"\n' %
self.build.get_project())
outfile.write('# This is the build file for project "{}"\n'.format(self.build.get_project()))
outfile.write('# It is autogenerated by the Meson build system.\n')
outfile.write('# Do not edit by hand.\n\n')
outfile.write('ninja_required_version = 1.7.1\n\n')
@ -308,9 +495,9 @@ int dummy;
num_pools = self.environment.coredata.backend_options['backend_max_links'].value
if num_pools > 0:
outfile.write('''pool link_pool
depth = %d
depth = {}
''' % num_pools)
'''.format(num_pools))
with self.detect_vs_dep_prefix(tempfilename) as outfile:
self.generate_rules()
@ -347,10 +534,14 @@ int dummy;
# http://clang.llvm.org/docs/JSONCompilationDatabase.html
def generate_compdb(self):
rules = []
# TODO: Rather than an explicit list here, rules could be marked in the
# rule store as being wanted in compdb
for for_machine in MachineChoice:
for lang in self.environment.coredata.compilers[for_machine]:
rules += [self.get_compiler_rule_name(lang, for_machine)]
rules += [self.get_pch_rule_name(lang, for_machine)]
rules += [ "%s%s" % (rule, ext) for rule in [self.get_compiler_rule_name(lang, for_machine)]
for ext in ['', '_RSP']]
rules += [ "%s%s" % (rule, ext) for rule in [self.get_pch_rule_name(lang, for_machine)]
for ext in ['', '_RSP']]
compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else []
ninja_compdb = [self.ninja_command, '-t', 'compdb'] + compdb_options + rules
builddir = self.environment.get_build_dir()
@ -571,7 +762,7 @@ int dummy;
generated_source_files.append(raw_src)
elif self.environment.is_object(rel_src):
obj_list.append(rel_src)
elif self.environment.is_library(rel_src):
elif self.environment.is_library(rel_src) or modules.is_module_library(rel_src):
pass
else:
# Assume anything not specifically a source file is a header. This is because
@ -586,7 +777,7 @@ int dummy;
o = self.generate_llvm_ir_compile(target, src)
else:
o = self.generate_single_compile(target, src, True,
header_deps=header_deps)
order_deps=header_deps)
obj_list.append(o)
use_pch = self.environment.coredata.base_options.get('b_pch', False)
@ -765,7 +956,7 @@ int dummy;
target_name = 'meson-{}'.format(self.build_run_target_name(target))
elem = NinjaBuildElement(self.all_outputs, target_name, 'CUSTOM_COMMAND', [])
elem.add_item('COMMAND', cmd)
elem.add_item('description', 'Running external command %s' % target.name)
elem.add_item('description', 'Running external command {}'.format(target.name))
elem.add_item('pool', 'console')
# Alias that runs the target defined above with the name the user specified
self.create_target_alias(target_name)
@ -778,6 +969,15 @@ int dummy;
self.processed_targets[target.get_id()] = True
def generate_coverage_command(self, elem, outputs):
targets = self.build.get_targets().values()
use_llvm_cov = False
for target in targets:
if not hasattr(target, 'compilers'):
continue
for compiler in target.compilers.values():
if compiler.get_id() == 'clang' and not compiler.info.is_darwin():
use_llvm_cov = True
break
elem.add_item('COMMAND', self.environment.get_build_command() +
['--internal', 'coverage'] +
outputs +
@ -785,7 +985,8 @@ int dummy;
os.path.join(self.environment.get_source_dir(),
self.build.get_subproject_dir()),
self.environment.get_build_dir(),
self.environment.get_log_dir()])
self.environment.get_log_dir()] +
['--use_llvm_cov'] if use_llvm_cov else [])
def generate_coverage_rules(self):
e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY')
@ -874,13 +1075,15 @@ int dummy;
deps='gcc', depfile='$DEPFILE',
extra='restat = 1'))
c = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
c = self.environment.get_build_command() + \
['--internal',
'regenerate',
ninja_quote(quote_func(self.environment.get_source_dir())),
ninja_quote(quote_func(self.environment.get_build_dir()))]
self.environment.get_source_dir(),
self.environment.get_build_dir(),
'--backend',
'ninja']
self.add_rule(NinjaRule('REGENERATE_BUILD',
c + ['--backend', 'ninja'], [],
c, [],
'Regenerating build files.',
extra='generator = 1'))
@ -897,11 +1100,15 @@ int dummy;
def add_build(self, build):
self.build_elements.append(build)
# increment rule refcount
if build.rule != 'phony':
self.ruledict[build.rule].refcount += 1
if build.rulename != 'phony':
# reference rule
build.rule = self.ruledict[build.rulename]
def write_rules(self, outfile):
for b in self.build_elements:
if isinstance(b, NinjaBuildElement):
b.count_rule_references()
for r in self.rules:
r.write(outfile)
@ -980,12 +1187,12 @@ int dummy;
ofilename = os.path.join(self.get_target_private_dir(target), ofilebase)
elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile)
elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename])
elem.add_item('DESC', 'Compiling resource %s' % rel_sourcefile)
elem.add_item('DESC', 'Compiling resource {}'.format(rel_sourcefile))
self.add_build(elem)
deps.append(ofilename)
a = '-resource:' + ofilename
else:
raise InvalidArguments('Unknown resource file %s.' % r)
raise InvalidArguments('Unknown resource file {}.'.format(r))
args.append(a)
return args, deps
@ -997,7 +1204,7 @@ int dummy;
compiler = target.compilers[Language.CS]
rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list]
deps = []
commands = CompilerArgs(compiler, target.extra_args.get(Language.CS, []))
commands = compiler.compiler_args(target.extra_args.get(Language.CS, []))
commands += compiler.get_buildtype_args(buildtype)
commands += compiler.get_optimization_args(self.get_option_for_target('optimization', target))
commands += compiler.get_debug_args(self.get_option_for_target('debug', target))
@ -1278,7 +1485,7 @@ int dummy;
main_rust_file = None
for i in target.get_sources():
if not rustc.can_compile(i):
raise InvalidArguments('Rust target %s contains a non-rust source file.' % target.get_basename())
raise InvalidArguments('Rust target {} contains a non-rust source file.'.format(target.get_basename()))
if main_rust_file is None:
main_rust_file = i.rel_to_builddir(self.build_to_src)
if main_rust_file is None:
@ -1349,7 +1556,8 @@ int dummy;
self.get_target_dir(target))
else:
target_slashname_workaround_dir = self.get_target_dir(target)
rpath_args = rustc.build_rpath_args(self.environment,
(rpath_args, target.rpath_dirs_to_remove) = \
rustc.build_rpath_args(self.environment,
self.environment.get_build_dir(),
target_slashname_workaround_dir,
self.determine_rpath_dirs(target),
@ -1376,12 +1584,12 @@ int dummy;
return PerMachine('_FOR_BUILD', '')[for_machine]
@classmethod
def get_compiler_rule_name(cls, lang: Language, for_machine: MachineChoice) -> str:
return '%s_COMPILER%s' % (lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
def get_compiler_rule_name(cls, lang: str, for_machine: MachineChoice) -> str:
return '{}_COMPILER{}'.format(lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
@classmethod
def get_pch_rule_name(cls, lang: Language, for_machine: MachineChoice) -> str:
return '%s_PCH%s' % (lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
def get_pch_rule_name(cls, lang: str, for_machine: MachineChoice) -> str:
return '{}_PCH{}'.format(lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
@classmethod
def compiler_to_rule_name(cls, compiler: Compiler) -> str:
@ -1453,7 +1661,7 @@ int dummy;
abs_headers.append(absh)
header_imports += swiftc.get_header_import_args(absh)
else:
raise InvalidArguments('Swift target %s contains a non-swift source file.' % target.get_basename())
raise InvalidArguments('Swift target {} contains a non-swift source file.'.format(target.get_basename()))
os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True)
compile_args = swiftc.get_compile_only_args()
compile_args += swiftc.get_optimization_args(self.get_option_for_target('optimization', target))
@ -1540,7 +1748,7 @@ int dummy;
static_linker = self.build.static_linker[for_machine]
if static_linker is None:
return
rule = 'STATIC_LINKER%s' % self.get_rule_suffix(for_machine)
rule = 'STATIC_LINKER{}'.format(self.get_rule_suffix(for_machine))
cmdlist = []
args = ['$in']
# FIXME: Must normalize file names with pathlib.Path before writing
@ -1554,7 +1762,7 @@ int dummy;
cmdlist = execute_wrapper + [c.format('$out') for c in rmfile_prefix]
cmdlist += static_linker.get_exelist()
cmdlist += ['$LINK_ARGS']
cmdlist += static_linker.get_output_args('$out')
cmdlist += NinjaCommandArg.list(static_linker.get_output_args('$out'), Quoting.none)
description = 'Linking static target $out'
if num_pools > 0:
pool = 'pool = link_pool'
@ -1562,6 +1770,7 @@ int dummy;
pool = None
self.add_rule(NinjaRule(rule, cmdlist, args, description,
rspable=static_linker.can_linker_accept_rsp(),
rspfile_quote_style='cl' if isinstance(static_linker, VisualStudioLinker) else 'gcc',
extra=pool))
def generate_dynamic_link_rules(self):
@ -1574,9 +1783,9 @@ int dummy;
or langname == Language.RUST \
or langname == Language.CS:
continue
rule = '%s_LINKER%s' % (langname.get_lower_case_name(), self.get_rule_suffix(for_machine))
rule = '{}_LINKER{}'.format(langname.get_lower_case_name(), self.get_rule_suffix(for_machine))
command = compiler.get_linker_exelist()
args = ['$ARGS'] + compiler.get_linker_output_args('$out') + ['$in', '$LINK_ARGS']
args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_linker_output_args('$out'), Quoting.none) + ['$in', '$LINK_ARGS']
description = 'Linking target $out'
if num_pools > 0:
pool = 'pool = link_pool'
@ -1584,12 +1793,14 @@ int dummy;
pool = None
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp(),
rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or
isinstance(compiler, DmdDCompiler)) else 'gcc',
extra=pool))
args = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
args = self.environment.get_build_command() + \
['--internal',
'symbolextractor',
ninja_quote(quote_func(self.environment.get_build_dir())),
self.environment.get_build_dir(),
'$in',
'$IMPLIB',
'$out']
@ -1601,31 +1812,28 @@ int dummy;
def generate_java_compile_rule(self, compiler):
rule = self.compiler_to_rule_name(compiler)
invoc = [ninja_quote(i) for i in compiler.get_exelist()]
command = invoc + ['$ARGS', '$in']
command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Java object $in'
self.add_rule(NinjaRule(rule, command, [], description))
def generate_cs_compile_rule(self, compiler):
rule = self.compiler_to_rule_name(compiler)
invoc = [ninja_quote(i) for i in compiler.get_exelist()]
command = invoc
command = compiler.get_exelist()
args = ['$ARGS', '$in']
description = 'Compiling C Sharp target $out'
self.add_rule(NinjaRule(rule, command, args, description,
rspable=mesonlib.is_windows()))
rspable=mesonlib.is_windows(),
rspfile_quote_style='cl' if isinstance(compiler, VisualStudioCsCompiler) else 'gcc'))
def generate_vala_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
invoc = [ninja_quote(i) for i in compiler.get_exelist()]
command = invoc + ['$ARGS', '$in']
command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Vala source $in'
self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1'))
def generate_rust_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
invoc = [ninja_quote(i) for i in compiler.get_exelist()]
command = invoc + ['$ARGS', '$in']
command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Rust source $in'
depfile = '$targetdep'
depstyle = 'gcc'
@ -1634,18 +1842,18 @@ int dummy;
def generate_swift_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
full_exe = [ninja_quote(x) for x in self.environment.get_build_command()] + [
full_exe = self.environment.get_build_command() + [
'--internal',
'dirchanger',
'$RUNDIR',
]
invoc = full_exe + [ninja_quote(i) for i in compiler.get_exelist()]
invoc = full_exe + compiler.get_exelist()
command = invoc + ['$ARGS', '$in']
description = 'Compiling Swift source $in'
self.add_rule(NinjaRule(rule, command, [], description))
def generate_fortran_dep_hack(self, crstr):
rule = 'FORTRAN_DEP_HACK%s' % (crstr)
rule = 'FORTRAN_DEP_HACK{}'.format(crstr)
if mesonlib.is_windows():
cmd = ['cmd', '/C']
else:
@ -1659,8 +1867,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if self.created_llvm_ir_rule[compiler.for_machine]:
return
rule = self.get_compiler_rule_name('llvm_ir', compiler.for_machine)
command = [ninja_quote(i) for i in compiler.get_exelist()]
args = ['$ARGS'] + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in']
command = compiler.get_exelist()
args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in']
description = 'Compiling LLVM IR object $in'
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp()))
@ -1689,16 +1897,10 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if langname == Language.FORTRAN:
self.generate_fortran_dep_hack(crstr)
rule = self.get_compiler_rule_name(langname, compiler.for_machine)
depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
quoted_depargs = []
for d in depargs:
if d != '$out' and d != '$in':
d = quote_func(d)
quoted_depargs.append(d)
command = [ninja_quote(i) for i in compiler.get_exelist()]
args = ['$ARGS'] + quoted_depargs + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in']
description = 'Compiling %s object $out' % compiler.get_display_language()
depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none)
command = compiler.get_exelist()
args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in']
description = 'Compiling {} object $out'.format(compiler.get_display_language())
if isinstance(compiler, VisualStudioLikeCompiler):
deps = 'msvc'
depfile = None
@ -1707,6 +1909,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
depfile = '$DEPFILE'
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp(),
rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or
isinstance(compiler, DmdDCompiler)) else 'gcc',
deps=deps, depfile=depfile))
def generate_pch_rule_for(self, langname, compiler):
@ -1715,16 +1919,11 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
rule = self.compiler_to_pch_rule_name(compiler)
depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
quoted_depargs = []
for d in depargs:
if d != '$out' and d != '$in':
d = quote_func(d)
quoted_depargs.append(d)
if isinstance(compiler, VisualStudioLikeCompiler):
output = []
else:
output = compiler.get_output_args('$out')
command = compiler.get_exelist() + ['$ARGS'] + quoted_depargs + output + compiler.get_compile_only_args() + ['$in']
output = NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none)
command = compiler.get_exelist() + ['$ARGS'] + depargs + output + compiler.get_compile_only_args() + ['$in']
description = 'Precompiling header $in'
if isinstance(compiler, VisualStudioLikeCompiler):
deps = 'msvc'
@ -1859,9 +2058,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
modname = modmatch.group(1).lower()
if modname in module_files:
raise InvalidArguments(
'Namespace collision: module %s defined in '
'two files %s and %s.' %
(modname, module_files[modname], s))
'Namespace collision: module {} defined in '
'two files {} and {}.'.format(modname, module_files[modname], s))
module_files[modname] = s
else:
submodmatch = submodre.match(line)
@ -1872,9 +2070,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if submodname in submodule_files:
raise InvalidArguments(
'Namespace collision: submodule %s defined in '
'two files %s and %s.' %
(submodname, submodule_files[submodname], s))
'Namespace collision: submodule {} defined in '
'two files {} and {}.'.format(submodname, submodule_files[submodname], s))
submodule_files[submodname] = s
self.fortran_deps[target.get_basename()] = {**module_files, **submodule_files}
@ -1960,11 +2157,11 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
return linker.get_link_debugfile_args(outname)
def generate_llvm_ir_compile(self, target, src):
base_proxy = self.get_base_options_for_target(target)
compiler = get_compiler_for_source(target.compilers.values(), src)
commands = CompilerArgs(compiler)
commands = compiler.compiler_args()
# Compiler args for compiling this target
commands += compilers.get_base_compile_args(self.environment.coredata.base_options,
compiler)
commands += compilers.get_base_compile_args(base_proxy, compiler)
if isinstance(src, File):
if src.is_built:
src_filename = os.path.join(src.subdir, src.fname)
@ -1974,7 +2171,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
src_filename = os.path.basename(src)
else:
src_filename = src
obj_basename = src_filename.replace('/', '_').replace('\\', '_')
obj_basename = self.canonicalize_filename(src_filename)
rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename)
rel_obj += '.' + self.environment.machines[target.for_machine].get_object_suffix()
commands += self.get_compile_debugfile_args(compiler, target, rel_obj)
@ -1987,9 +2184,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
# Write the Ninja build command
compiler_name = self.get_compiler_rule_name('llvm_ir', compiler.for_machine)
element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
# Convert from GCC-style link argument naming to the naming used by the
# current compiler.
commands = commands.to_native()
element.add_item('ARGS', commands)
self.add_build(element)
return rel_obj
@ -2005,6 +2199,10 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
curdir = '.'
return compiler.get_include_args(curdir, False)
@lru_cache(maxsize=None)
def get_normpath_target(self, source) -> str:
return os.path.normpath(source)
def get_custom_target_dir_include_args(self, target, compiler):
custom_target_include_dirs = []
for i in target.get_generated_sources():
@ -2013,7 +2211,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
# own target build dir.
if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)):
continue
idir = os.path.normpath(self.get_target_dir(i))
idir = self.get_normpath_target(self.get_target_dir(i))
if not idir:
idir = '.'
if idir not in custom_target_include_dirs:
@ -2049,7 +2247,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
base_proxy = self.get_base_options_for_target(target)
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
commands = CompilerArgs(compiler)
commands = compiler.compiler_args()
# Start with symbol visibility.
commands += compiler.gnu_symbol_visibility_args(target.gnu_symbol_visibility)
# Add compiler args for compiling this target derived from 'base' build
@ -2129,7 +2327,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
compiler = get_compiler_for_source(target.compilers.values(), src)
commands = self._generate_single_compile(target, compiler, is_generated)
commands = CompilerArgs(commands.compiler, commands)
commands = commands.compiler.compiler_args(commands)
# Create introspection information
if is_generated is False:
@ -2206,9 +2404,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
d = os.path.join(self.get_target_private_dir(target), d)
element.add_orderdep(d)
element.add_dep(pch_dep)
# Convert from GCC-style link argument naming to the naming used by the
# current compiler.
commands = commands.to_native()
for i in self.get_fortran_orderdeps(target, compiler):
element.add_orderdep(i)
element.add_item('DEPFILE', dep_file)
@ -2481,7 +2676,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
#
# Once all the linker options have been passed, we will start passing
# libraries and library paths from internal and external sources.
commands = CompilerArgs(linker)
commands = linker.compiler_args()
# First, the trivial ones that are impossible to override.
#
# Add linker args for linking this target derived from 'base' build
@ -2583,20 +2778,19 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
self.get_target_dir(target))
else:
target_slashname_workaround_dir = self.get_target_dir(target)
commands += linker.build_rpath_args(self.environment,
(rpath_args, target.rpath_dirs_to_remove) = \
linker.build_rpath_args(self.environment,
self.environment.get_build_dir(),
target_slashname_workaround_dir,
self.determine_rpath_dirs(target),
target.build_rpath,
target.install_rpath)
commands += rpath_args
# Add libraries generated by custom targets
custom_target_libraries = self.get_custom_target_provided_libraries(target)
commands += extra_args
commands += custom_target_libraries
commands += stdlib_args # Standard library arguments go last, because they never depend on anything.
# Convert from GCC-style link argument naming to the naming used by the
# current compiler.
commands = commands.to_native()
dep_targets.extend([self.get_dependency_filename(t) for t in dependencies])
dep_targets.extend([self.get_dependency_filename(t)
for t in target.link_depends])
@ -2647,18 +2841,14 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
def generate_gcov_clean(self):
gcno_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcno', 'CUSTOM_COMMAND', 'PHONY')
script_root = self.environment.get_script_dir()
clean_script = os.path.join(script_root, 'delwithsuffix.py')
gcno_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcno'])
gcno_elem.add_item('COMMAND', mesonlib.meson_command + ['--internal', 'delwithsuffix', '.', 'gcno'])
gcno_elem.add_item('description', 'Deleting gcno files')
self.add_build(gcno_elem)
# Alias that runs the target defined above
self.create_target_alias('meson-clean-gcno')
gcda_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcda', 'CUSTOM_COMMAND', 'PHONY')
script_root = self.environment.get_script_dir()
clean_script = os.path.join(script_root, 'delwithsuffix.py')
gcda_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcda'])
gcda_elem.add_item('COMMAND', mesonlib.meson_command + ['--internal', 'delwithsuffix', '.', 'gcda'])
gcda_elem.add_item('description', 'Deleting gcda files')
self.add_build(gcda_elem)
# Alias that runs the target defined above

@ -26,7 +26,6 @@ from .. import build
from .. import dependencies
from .. import mlog
from .. import compilers
from ..compilers import CompilerArgs
from ..interpreter import Interpreter
from ..mesonlib import (
MesonException, File, python_command, replace_if_different
@ -98,6 +97,9 @@ class Vs2010Backend(backends.Backend):
self.subdirs = {}
self.handled_target_deps = {}
def get_target_private_dir(self, target):
return os.path.join(self.get_target_dir(target), target.get_id())
def generate_custom_generator_commands(self, target, parent_node):
generator_output_files = []
custom_target_include_dirs = []
@ -591,10 +593,8 @@ class Vs2010Backend(backends.Backend):
raise MesonException('Could not guess language from source file %s.' % src)
def add_pch(self, pch_sources, lang, inc_cl):
if len(pch_sources) <= 1:
# We only need per file precompiled headers if we have more than 1 language.
return
self.use_pch(pch_sources, lang, inc_cl)
if lang in pch_sources:
self.use_pch(pch_sources, lang, inc_cl)
def create_pch(self, pch_sources, lang, inc_cl):
pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
@ -602,6 +602,8 @@ class Vs2010Backend(backends.Backend):
self.add_pch_files(pch_sources, lang, inc_cl)
def use_pch(self, pch_sources, lang, inc_cl):
pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
pch.text = 'Use'
header = self.add_pch_files(pch_sources, lang, inc_cl)
pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles')
pch_include.text = header + ';%(ForcedIncludeFiles)'
@ -821,12 +823,12 @@ class Vs2010Backend(backends.Backend):
clconf = ET.SubElement(compiles, 'ClCompile')
# CRT type; debug or release
if vscrt_type.value == 'from_buildtype':
if self.buildtype == 'debug' or self.buildtype == 'debugoptimized':
if self.buildtype == 'debug':
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
else:
ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded'
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL'
elif vscrt_type.value == 'mdd':
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
@ -855,6 +857,18 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck'
elif '/RTCs' in buildtype_args:
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck'
# Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise
# cl will give warning D9025: overriding '/Ehs' with cpp_eh value
if 'cpp' in target.compilers:
eh = self.environment.coredata.compiler_options[target.for_machine]['cpp']['eh']
if eh.value == 'a':
ET.SubElement(clconf, 'ExceptionHandling').text = 'Async'
elif eh.value == 's':
ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow'
elif eh.value == 'none':
ET.SubElement(clconf, 'ExceptionHandling').text = 'false'
else: # 'sc' or 'default'
ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync'
# End configuration
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root)
@ -884,9 +898,9 @@ class Vs2010Backend(backends.Backend):
#
# file_args is also later split out into defines and include_dirs in
# case someone passed those in there
file_args = dict((lang, CompilerArgs(comp)) for lang, comp in target.compilers.items())
file_defines = dict((lang, []) for lang in target.compilers)
file_inc_dirs = dict((lang, []) for lang in target.compilers)
file_args = {l: c.compiler_args() for l, c in target.compilers.items()}
file_defines = {l: [] for l in target.compilers}
file_inc_dirs = {l: [] for l in target.compilers}
# The order in which these compile args are added must match
# generate_single_compile() and generate_basic_compiler_args()
for l, comp in target.compilers.items():
@ -989,23 +1003,23 @@ class Vs2010Backend(backends.Backend):
# Cflags required by external deps might have UNIX-specific flags,
# so filter them out if needed
if isinstance(d, dependencies.OpenMPDependency):
d_compile_args = compiler.openmp_flags()
ET.SubElement(clconf, 'OpenMPSupport').text = 'true'
else:
d_compile_args = compiler.unix_args_to_native(d.get_compile_args())
for arg in d_compile_args:
if arg.startswith(('-D', '/D')):
define = arg[2:]
# De-dup
if define in target_defines:
target_defines.remove(define)
target_defines.append(define)
elif arg.startswith(('-I', '/I')):
inc_dir = arg[2:]
# De-dup
if inc_dir not in target_inc_dirs:
target_inc_dirs.append(inc_dir)
else:
target_args.append(arg)
for arg in d_compile_args:
if arg.startswith(('-D', '/D')):
define = arg[2:]
# De-dup
if define in target_defines:
target_defines.remove(define)
target_defines.append(define)
elif arg.startswith(('-I', '/I')):
inc_dir = arg[2:]
# De-dup
if inc_dir not in target_inc_dirs:
target_inc_dirs.append(inc_dir)
else:
target_args.append(arg)
languages += gen_langs
if len(target_args) > 0:
@ -1046,12 +1060,10 @@ class Vs2010Backend(backends.Backend):
# Note: SuppressStartupBanner is /NOLOGO and is 'true' by default
pch_sources = {}
if self.environment.coredata.base_options.get('b_pch', False):
pch_node = ET.SubElement(clconf, 'PrecompiledHeader')
for lang in [Language.C, Language.CPP]:
pch = target.get_pch(lang)
if not pch:
continue
pch_node.text = 'Use'
if compiler.id == 'msvc':
if len(pch) == 1:
# Auto generate PCH.
@ -1065,17 +1077,13 @@ class Vs2010Backend(backends.Backend):
# I don't know whether its relevant but let's handle other compilers
# used with a vs backend
pch_sources[lang] = [pch[0], None, lang, None]
if len(pch_sources) == 1:
# If there is only 1 language with precompiled headers, we can use it for the entire project, which
# is cleaner than specifying it for each source file.
self.use_pch(pch_sources, list(pch_sources)[0], clconf)
resourcecompile = ET.SubElement(compiles, 'ResourceCompile')
ET.SubElement(resourcecompile, 'PreprocessorDefinitions')
# Linker options
link = ET.SubElement(compiles, 'Link')
extra_link_args = CompilerArgs(compiler)
extra_link_args = compiler.compiler_args()
# FIXME: Can these buildtype linker args be added as tags in the
# vcxproj file (similar to buildtype compiler args) instead of in
# AdditionalOptions?
@ -1103,14 +1111,14 @@ class Vs2010Backend(backends.Backend):
# Extend without reordering or de-dup to preserve `-L -l` sets
# https://github.com/mesonbuild/meson/issues/1718
if isinstance(dep, dependencies.OpenMPDependency):
extra_link_args.extend_direct(compiler.openmp_flags())
ET.SubElement(clconf, 'OpenMPSuppport').text = 'true'
else:
extra_link_args.extend_direct(dep.get_link_args())
for d in target.get_dependencies():
if isinstance(d, build.StaticLibrary):
for dep in d.get_external_deps():
if isinstance(dep, dependencies.OpenMPDependency):
extra_link_args.extend_direct(compiler.openmp_flags())
ET.SubElement(clconf, 'OpenMPSuppport').text = 'true'
else:
extra_link_args.extend_direct(dep.get_link_args())
# Add link args for c_* or cpp_* build options. Currently this only
@ -1198,7 +1206,8 @@ class Vs2010Backend(backends.Backend):
# /nologo
ET.SubElement(link, 'SuppressStartupBanner').text = 'true'
# /release
ET.SubElement(link, 'SetChecksum').text = 'true'
if not self.environment.coredata.get_builtin_option('debug'):
ET.SubElement(link, 'SetChecksum').text = 'true'
meson_file_group = ET.SubElement(root, 'ItemGroup')
ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename))

@ -12,12 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import copy, os, re
from collections import OrderedDict, defaultdict
import itertools, pathlib
from functools import lru_cache
import copy
import hashlib
import itertools, pathlib
import os
import pickle
from functools import lru_cache
import re
import typing as T
from . import environment
@ -82,6 +84,7 @@ buildtarget_kwargs = set([
'override_options',
'sources',
'gnu_symbol_visibility',
'link_language',
])
known_build_target_kwargs = (
@ -92,7 +95,7 @@ known_build_target_kwargs = (
rust_kwargs |
cs_kwargs)
known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'link_language', 'pie'}
known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'}
known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'}
known_shmod_kwargs = known_build_target_kwargs | {'vs_module_defs'}
known_stlib_kwargs = known_build_target_kwargs | {'pic'}
@ -495,6 +498,7 @@ class BuildTarget(Target):
self.link_targets = []
self.link_whole_targets = []
self.link_depends = []
self.added_deps = set()
self.name_prefix_set = False
self.name_suffix_set = False
self.filename = 'no_name'
@ -509,6 +513,8 @@ class BuildTarget(Target):
self.d_features = {}
self.pic = False
self.pie = False
# Track build_rpath entries so we can remove them at install time
self.rpath_dirs_to_remove = set()
# Sources can be:
# 1. Pre-existing source files in the source tree
# 2. Pre-existing sources generated by configure_file in the build tree
@ -532,6 +538,9 @@ class BuildTarget(Target):
repr_str = "<{0} {1}: {2}>"
return repr_str.format(self.__class__.__name__, self.get_id(), self.filename)
def __str__(self):
return "{}".format(self.name)
def validate_install(self, environment):
if self.for_machine is MachineChoice.BUILD and self.need_install:
if environment.is_cross_build():
@ -729,7 +738,7 @@ class BuildTarget(Target):
File.from_source_file(environment.source_dir, self.subdir, s))
elif hasattr(s, 'get_outputs'):
self.link_depends.extend(
[File.from_built_file(s.subdir, p) for p in s.get_outputs()])
[File.from_built_file(s.get_subdir(), p) for p in s.get_outputs()])
else:
raise InvalidArguments(
'Link_depends arguments must be strings, Files, '
@ -772,7 +781,7 @@ class BuildTarget(Target):
if isinstance(src, str):
src = File(False, self.subdir, src)
elif isinstance(src, File):
FeatureNew('File argument for extract_objects', '0.50.0').use(self.subproject)
FeatureNew.single_use('File argument for extract_objects', '0.50.0', self.subproject)
else:
raise MesonException('Object extraction arguments must be strings or Files.')
# FIXME: It could be a generated source
@ -813,7 +822,8 @@ class BuildTarget(Target):
def get_link_dep_subdirs(self):
result = OrderedSet()
for i in self.link_targets:
result.add(i.get_subdir())
if not isinstance(i, StaticLibrary):
result.add(i.get_subdir())
result.update(i.get_link_dep_subdirs())
return result
@ -1012,23 +1022,16 @@ This will become a hard error in a future Meson release.''')
def get_extra_args(self, language):
return self.extra_args.get(language, [])
def get_dependencies(self, exclude=None, for_pkgconfig=False):
def get_dependencies(self, exclude=None):
transitive_deps = []
if exclude is None:
exclude = []
for t in itertools.chain(self.link_targets, self.link_whole_targets):
if t in transitive_deps or t in exclude:
continue
# When generating `Libs:` and `Libs.private:` lists in pkg-config
# files we don't want to include static libraries that we link_whole
# or are uninstalled (they're implicitly promoted to link_whole).
# But we still need to include their transitive dependencies,
# a static library we link_whole would itself link to a shared
# library or an installed static library.
if not for_pkgconfig or (not t.is_internal() and t not in self.link_whole_targets):
transitive_deps.append(t)
transitive_deps.append(t)
if isinstance(t, StaticLibrary):
transitive_deps += t.get_dependencies(transitive_deps + exclude, for_pkgconfig)
transitive_deps += t.get_dependencies(transitive_deps + exclude)
return transitive_deps
def get_source_subdir(self):
@ -1061,6 +1064,8 @@ This will become a hard error in a future Meson release.''')
def add_deps(self, deps):
deps = listify(deps)
for dep in unholder(deps):
if dep in self.added_deps:
continue
if isinstance(dep, dependencies.InternalDependency):
# Those parts that are internal.
self.process_sourcelist(dep.sources)
@ -1099,6 +1104,7 @@ You probably should put it in link_with instead.''')
'either an external dependency (returned by find_library() or '
'dependency()) or an internal dependency (returned by '
'declare_dependency()).'.format(type(dep).__name__))
self.added_deps.add(dep)
def get_external_deps(self):
return self.external_deps
@ -1115,7 +1121,7 @@ You probably should put it in link_with instead.''')
if not isinstance(t, (Target, CustomTargetIndex)):
raise InvalidArguments('{!r} is not a target.'.format(t))
if not t.is_linkable_target():
raise InvalidArguments('Link target {!r} is not linkable.'.format(t))
raise InvalidArguments("Link target '{!s}' is not linkable.".format(t))
if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic:
msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name)
msg += "Use the 'pic' option to static_library to build with PIC."
@ -1228,11 +1234,7 @@ You probably should put it in link_with instead.''')
See: https://github.com/mesonbuild/meson/issues/1653
'''
langs = []
# User specified link_language of target (for multi-language targets)
if self.link_language:
return [self.link_language]
langs = [] # type: T.List[str]
# Check if any of the external libraries were written in this language
for dep in self.external_deps:
@ -1264,6 +1266,12 @@ You probably should put it in link_with instead.''')
# Populate list of all compilers, not just those being used to compile
# sources in this target
all_compilers = self.environment.coredata.compilers[self.for_machine]
# If the user set the link_language, just return that.
if self.link_language:
comp = all_compilers[self.link_language]
return comp, comp.language_stdlib_only_link_flags()
# Languages used by dependencies
dep_langs = self.get_langs_used_by_deps()
# Pick a compiler based on the language priority-order
@ -2159,7 +2167,7 @@ class CustomTarget(Target):
'when installing a target')
if isinstance(kwargs['install_dir'], list):
FeatureNew('multiple install_dir for custom_target', '0.40.0').use(self.subproject)
FeatureNew.single_use('multiple install_dir for custom_target', '0.40.0', self.subproject)
# If an item in this list is False, the output corresponding to
# the list index of that item will not be installed
self.install_dir = typeslistify(kwargs['install_dir'], (str, bool))
@ -2171,7 +2179,6 @@ class CustomTarget(Target):
if 'build_always' in kwargs and 'build_always_stale' in kwargs:
raise InvalidArguments('build_always and build_always_stale are mutually exclusive. Combine build_by_default and build_always_stale.')
elif 'build_always' in kwargs:
mlog.deprecation('build_always is deprecated. Combine build_by_default and build_always_stale instead.')
if 'build_by_default' not in kwargs:
self.build_by_default = kwargs['build_always']
self.build_always_stale = kwargs['build_always']

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save