diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 455a9d418c2b..6a78a36c62a2 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -17,17 +17,16 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Initialize CodeQL
- uses: github/codeql-action/init@v2
+ uses: github/codeql-action/init@v3
with:
# bypass cache: https://github.com/github/codeql-action/issues/1445
- tools: latest
+ tools: linked
config-file: .github/codeql/codeql-config.yml
languages: python
# we have none
- setup-python-dependencies: false
- name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v2
+ uses: github/codeql-action/analyze@v3
diff --git a/.github/workflows/cygwin.yml b/.github/workflows/cygwin.yml
index c6f3dc416687..d819f802f1fe 100644
--- a/.github/workflows/cygwin.yml
+++ b/.github/workflows/cygwin.yml
@@ -13,12 +13,14 @@ on:
paths:
- "mesonbuild/**"
- "test cases/**"
+ - "unittests/**"
- ".github/workflows/cygwin.yml"
- "run*tests.py"
pull_request:
paths:
- "mesonbuild/**"
- "test cases/**"
+ - "unittests/**"
- ".github/workflows/cygwin.yml"
- "run*tests.py"
@@ -39,7 +41,7 @@ jobs:
MESON_CI_JOBNAME: cygwin-${{ matrix.NAME }}
steps:
- - uses: actions/cache/restore@v3
+ - uses: actions/cache/restore@v4
id: restore-cache
with:
# should use 'pip3 cache dir' to discover this path
@@ -49,7 +51,7 @@ jobs:
- run: git config --global core.autocrlf input
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- uses: cygwin/cygwin-install-action@master
with:
@@ -67,6 +69,7 @@ jobs:
libgtk3-devel
libxml2-devel
libxslt-devel
+ make
ninja
python2-devel
python3-devel
@@ -83,7 +86,7 @@ jobs:
python3 -m pip --disable-pip-version-check install gcovr fastjsonschema pefile pytest pytest-subtests pytest-xdist coverage
shell: C:\cygwin\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}'
- - uses: actions/cache/save@v3
+ - uses: actions/cache/save@v4
with:
# should use 'pip3 cache dir' to discover this path
path: C:\cygwin\home\runneradmin\.cache\pip
@@ -99,7 +102,7 @@ jobs:
SKIP_STATIC_BOOST: 1
shell: C:\cygwin\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}'
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: ${{ matrix.NAME }}
path: meson-test-run.*
diff --git a/.github/workflows/file_format.yml b/.github/workflows/file_format.yml
index 429fc91107ad..d61d634c0315 100644
--- a/.github/workflows/file_format.yml
+++ b/.github/workflows/file_format.yml
@@ -13,8 +13,8 @@ jobs:
format:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
with:
python-version: '3.x'
- run: python3 ./run_format_tests.py
diff --git a/.github/workflows/images.yml b/.github/workflows/images.yml
index dd91898efc24..d20f7e5ced02 100644
--- a/.github/workflows/images.yml
+++ b/.github/workflows/images.yml
@@ -45,6 +45,7 @@ jobs:
- { name: Ubuntu Bionic, id: bionic }
- { name: Ubuntu Rolling, id: ubuntu-rolling }
steps:
+ # Need v3 because of bionic
- uses: actions/checkout@v3
# Login to dockerhub
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index e8cd37341762..5588034723b8 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -9,10 +9,16 @@ on:
paths:
- "**.py"
- ".github/workflows/lint.yml"
+ - ".pylintrc"
+ - ".flake8"
+ - ".mypy.ini"
pull_request:
paths:
- "**.py"
- ".github/workflows/lint.yml"
+ - ".pylintrc"
+ - ".flake8"
+ - ".mypy.ini"
permissions:
contents: read
@@ -22,8 +28,8 @@ jobs:
pylint:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
with:
python-version: '3.x'
- run: python -m pip install pylint
@@ -32,8 +38,8 @@ jobs:
flake8:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
with:
python-version: '3.x'
- run: python -m pip install flake8
@@ -42,8 +48,8 @@ jobs:
mypy:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
with:
python-version: '3.x'
# Pin mypy to version 1.8, so we retain the ability to lint for Python 3.7
diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml
index 3b726315537a..88acbef90206 100644
--- a/.github/workflows/macos.yml
+++ b/.github/workflows/macos.yml
@@ -31,8 +31,8 @@ jobs:
unittests-appleclang:
runs-on: macos-latest
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
with:
python-version: '3.x'
- run: |
@@ -81,7 +81,7 @@ jobs:
HOMEBREW_NO_AUTO_UPDATE: 1
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
# Avoid picking up an older version of LLVM that does not work.
- run: brew update
# github actions overwrites brew's python. Force it to reassert itself, by running in a separate step.
@@ -132,8 +132,8 @@ jobs:
env:
HOMEBREW_NO_AUTO_UPDATE: 1
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
with:
python-version: '3.x'
- run: python -m pip install -e .
@@ -145,13 +145,13 @@ jobs:
- run: ln -sfn /usr/local/Cellar/qt@4/4.8.7_6.reinstall /usr/local/Cellar/qt@4/4.8.7_6
- run: meson setup "test cases/frameworks/4 qt" build -Drequired=qt4
- run: meson compile -C build
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
if: failure()
with:
name: Qt4_Mac_build
path: build/meson-logs/meson-log.txt
- run: meson test -C build -v
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
if: failure()
with:
name: Qt4_Mac_test
diff --git a/.github/workflows/msys2.yml b/.github/workflows/msys2.yml
index 278954c393d6..3b518fee7607 100644
--- a/.github/workflows/msys2.yml
+++ b/.github/workflows/msys2.yml
@@ -61,7 +61,7 @@ jobs:
shell: msys2 {0}
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- uses: msys2/setup-msys2@v2
with:
@@ -127,7 +127,7 @@ jobs:
MSYSTEM= python3 ./tools/run_with_cov.py run_tests.py --backend=ninja
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: ${{ matrix.NAME }}
path: meson-test-run.*
diff --git a/.github/workflows/nonnative.yml b/.github/workflows/nonnative.yml
index 1ad9e35bb0e0..2712d1032935 100644
--- a/.github/workflows/nonnative.yml
+++ b/.github/workflows/nonnative.yml
@@ -37,7 +37,7 @@ jobs:
apt-get -y purge clang gcc gdc
apt-get -y autoremove
python3 -m pip install coverage
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Run tests
run: bash -c 'source /ci/env_vars.sh; cd $GITHUB_WORKSPACE; ./tools/run_with_cov.py ./run_tests.py $CI_ARGS --cross ubuntu-armhf.json --cross-only'
diff --git a/.github/workflows/os_comp.yml b/.github/workflows/os_comp.yml
index 4fa4a87ed15d..56859ec3db01 100644
--- a/.github/workflows/os_comp.yml
+++ b/.github/workflows/os_comp.yml
@@ -49,17 +49,36 @@ jobs:
- { name: Gentoo, id: gentoo }
- { name: OpenSUSE, id: opensuse }
- { name: Ubuntu Bionic, id: bionic }
- container: mesonbuild/${{ matrix.cfg.id }}:latest
+ container:
+ image: mesonbuild/${{ matrix.cfg.id }}:latest
+ volumes:
+ - ${{ matrix.cfg.id == 'bionic' && '/node20217:/node20217:rw,rshared' || ' ' }}
+ - ${{ matrix.cfg.id == 'bionic' && '/node20217:/__e/node20:ro,rshared' || ' ' }}
env:
MESON_CI_JOBNAME: linux-${{ matrix.cfg.id }}-gcc
steps:
- - uses: actions/checkout@v3
+ - name: install nodejs20glibc2.17
+ if: ${{ matrix.cfg.id == 'bionic' }}
+ run: |
+ apt install curl -y
+ curl -LO https://unofficial-builds.nodejs.org/download/release/v20.9.0/node-v20.9.0-linux-x64-glibc-217.tar.xz
+ tar -xf node-v20.9.0-linux-x64-glibc-217.tar.xz --strip-components 1 -C /node20217
+ ldd /__e/node20/bin/node
+ - uses: actions/checkout@v4
+
- name: Run tests
# All environment variables are stored inside the docker image in /ci/env_vars.sh
# They are defined in the `env` section in each image.json. CI_ARGS should be set
# via the `args` array ub the image.json
- run: bash -c 'source /ci/env_vars.sh; cd $GITHUB_WORKSPACE; ./tools/run_with_cov.py ./run_tests.py $CI_ARGS'
+ shell: bash
+ run: |
+ # dmd is installed under /root on OpenSUSE
+ [[ ${{ matrix.cfg.id }} == opensuse ]] && export HOME=/root
+
+ source /ci/env_vars.sh
+ cd $GITHUB_WORKSPACE
+ ./tools/run_with_cov.py ./run_tests.py $CI_ARGS
- name: Aggregate coverage reports
run: ./ci/combine_cov.sh
@@ -80,7 +99,7 @@ jobs:
MESON_CI_JOBNAME_UPDATE: linux-arch-gcc-pypy
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Run tests
run: |
source /ci/env_vars.sh
@@ -134,7 +153,7 @@ jobs:
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Run tests
shell: bash
diff --git a/.github/workflows/stable_builddir.yml b/.github/workflows/stable_builddir.yml
index 5da321d18bce..ae2951801cef 100644
--- a/.github/workflows/stable_builddir.yml
+++ b/.github/workflows/stable_builddir.yml
@@ -14,7 +14,7 @@ jobs:
env:
TESTDIR: "manual tests/13 builddir upgrade"
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: install ninja
run: sudo apt-get -y install build-essential ninja-build
- name: Fetch tags and unshallow
diff --git a/.github/workflows/unusedargs_missingreturn.yml b/.github/workflows/unusedargs_missingreturn.yml
index d823c310e7c2..72f39b511f45 100644
--- a/.github/workflows/unusedargs_missingreturn.yml
+++ b/.github/workflows/unusedargs_missingreturn.yml
@@ -44,8 +44,8 @@ jobs:
linux:
runs-on: ubuntu-20.04
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Install Compilers
@@ -71,8 +71,8 @@ jobs:
windows:
runs-on: windows-latest
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
with:
python-version: '3.x'
diff --git a/.github/workflows/website.yml b/.github/workflows/website.yml
index 2c76d87846b8..fdb7d1400919 100644
--- a/.github/workflows/website.yml
+++ b/.github/workflows/website.yml
@@ -32,9 +32,9 @@ jobs:
HAS_SSH_KEY: ${{ secrets.WEBSITE_PRIV_KEY != '' }}
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- - uses: actions/cache/restore@v3
+ - uses: actions/cache/restore@v4
id: restore-cache
with:
# should use 'pip3 cache dir' to discover this path
@@ -47,7 +47,7 @@ jobs:
sudo apt-get -y install python3-pip ninja-build libjson-glib-dev
pip install hotdoc chevron strictyaml
- - uses: actions/cache/save@v3
+ - uses: actions/cache/save@v4
with:
# should use 'pip3 cache dir' to discover this path
path: ~/.cache/pip
diff --git a/.pylintrc b/.pylintrc
index 64316fe6e70e..4a93d4923a25 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -66,6 +66,7 @@ disable=
too-many-lines,
too-many-locals,
too-many-nested-blocks,
+ too-many-positional-arguments,
too-many-public-methods,
too-many-return-statements,
too-many-statements,
diff --git a/ci/ciimage/arch/install.sh b/ci/ciimage/arch/install.sh
index 76eb8cd37aeb..8f5245149688 100755
--- a/ci/ciimage/arch/install.sh
+++ b/ci/ciimage/arch/install.sh
@@ -11,7 +11,7 @@ pkgs=(
ninja make git sudo fakeroot autoconf automake patch
libelf gcc gcc-fortran gcc-objc vala rust bison flex cython go dlang-dmd
mono boost qt5-base gtkmm3 gtest gmock protobuf gobject-introspection
- itstool gtk3 java-environment=8 gtk-doc llvm clang sdl2 graphviz
+ itstool glib2-devel gtk3 java-environment=8 gtk-doc llvm clang sdl2 graphviz
doxygen vulkan-validation-layers openssh mercurial gtk-sharp-2 qt5-tools
libwmf cmake netcdf-fortran openmpi nasm gnustep-base gettext
python-lxml hotdoc rust-bindgen qt6-base qt6-tools wayland wayland-protocols
@@ -41,6 +41,9 @@ useradd -m $AUR_USER
echo "${AUR_USER}:" | chpasswd -e
echo "$AUR_USER ALL = NOPASSWD: ALL" >> /etc/sudoers
+# fix installing packages from source, attempting to install debug info
+sed -i '/OPTIONS/{s|debug|!debug|}' /etc/makepkg.conf
+
# Install yay
su $AUR_USER -c 'cd; git clone https://aur.archlinux.org/yay.git'
su $AUR_USER -c 'cd; cd yay; makepkg'
diff --git a/ci/ciimage/gentoo/install.sh b/ci/ciimage/gentoo/install.sh
index 8f7aa33f5d17..caf21a1fad69 100755
--- a/ci/ciimage/gentoo/install.sh
+++ b/ci/ciimage/gentoo/install.sh
@@ -40,8 +40,7 @@ pkgs_stable=(
sci-libs/hdf5
dev-qt/linguist-tools
sys-devel/llvm
- # qt6 unstable
- #dev-qt/qttools
+ dev-qt/qttools
# misc
app-admin/sudo
@@ -158,3 +157,11 @@ install_python_packages
python3 -m pip install "${base_python_pkgs[@]}"
echo "source /etc/profile" >> /ci/env_vars.sh
+
+# Cleanup to avoid including large contents in the docker image.
+# We don't need cache files that are side artifacts of installing packages.
+# We also don't need the gentoo tree -- the official docker image doesn't
+# either, and expects you to use emerge-webrsync once you need it.
+rm -rf /var/cache/binpkgs
+rm -rf /var/cache/distfiles
+rm -rf /var/db/repos/gentoo
diff --git a/ci/ciimage/opensuse/install.sh b/ci/ciimage/opensuse/install.sh
index b0097172a8a9..fdfedcb1bf2e 100755
--- a/ci/ciimage/opensuse/install.sh
+++ b/ci/ciimage/opensuse/install.sh
@@ -5,7 +5,7 @@ set -e
source /ci/common.sh
pkgs=(
- python3-pip python3 python3-devel
+ python3-pip python3 python3-devel python3-setuptools
ninja make git autoconf automake patch libjpeg-devel
elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl lcov
mono-core gtkmm3-devel gtest gmock protobuf-devel wxGTK3-3_2-devel gobject-introspection-devel
@@ -35,11 +35,14 @@ echo 'export PKG_CONFIG_PATH="/usr/lib64/mpi/gcc/openmpi3/lib64/pkgconfig:$PKG_C
curl -fsS https://dlang.org/install.sh | bash -s dmd | tee dmd_out.txt
cat dmd_out.txt | grep source | sed 's/^[^`]*`//g' | sed 's/`.*//g' >> /ci/env_vars.sh
chmod +x /ci/env_vars.sh
+# Lower ulimit before running dub, otherwise there's a very high chance it will OOM.
+# See: https://github.com/dlang/phobos/pull/9048 and https://github.com/dlang/phobos/pull/8990
+echo 'ulimit -n -S 10000' >> /ci/env_vars.sh
source /ci/env_vars.sh
dub_fetch urld
-dub build urld --compiler=dmd
+dub build --deep urld --arch=x86_64 --compiler=dmd --build=debug
dub_fetch dubtestproject
dub build dubtestproject:test1 --compiler=dmd
dub build dubtestproject:test2 --compiler=dmd
diff --git a/ci/ciimage/ubuntu-rolling/install.sh b/ci/ciimage/ubuntu-rolling/install.sh
index e1747034fe36..2066944e4cf5 100755
--- a/ci/ciimage/ubuntu-rolling/install.sh
+++ b/ci/ciimage/ubuntu-rolling/install.sh
@@ -27,6 +27,7 @@ pkgs=(
bindgen
itstool
openjdk-11-jre
+ jq
)
sed -i '/^Types: deb/s/deb/deb deb-src/' /etc/apt/sources.list.d/ubuntu.sources
@@ -43,9 +44,13 @@ eatmydata apt-get -y install --no-install-recommends wine-stable # Wine is spec
install_python_packages hotdoc
+# Lower ulimit before running dub, otherwise there's a very high chance it will OOM.
+# See: https://github.com/dlang/phobos/pull/9048 and https://github.com/dlang/phobos/pull/8990
+echo 'ulimit -n -S 10000' >> /ci/env_vars.sh
+ulimit -n -S 10000
# dub stuff
dub_fetch urld
-dub build urld --compiler=gdc
+dub build --deep urld --arch=x86_64 --compiler=gdc --build=debug
dub_fetch dubtestproject
dub build dubtestproject:test1 --compiler=ldc2
dub build dubtestproject:test2 --compiler=ldc2
@@ -58,6 +63,36 @@ source "$HOME/.cargo/env"
rustup target add x86_64-pc-windows-gnu
rustup target add arm-unknown-linux-gnueabihf
+# Zig
+# Use the GitHub API to get the latest release information
+LATEST_RELEASE=$(wget -qO- "https://api.github.com/repos/ziglang/zig/releases/latest")
+ZIGVER=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
+ZIG_BASE="zig-linux-x86_64-$ZIGVER"
+wget "https://ziglang.org/download/$ZIGVER/$ZIG_BASE.tar.xz"
+tar xf "$ZIG_BASE.tar.xz"
+rm -rf "$ZIG_BASE.tar.xz"
+cd "$ZIG_BASE"
+
+# As mentioned in the Zig readme, the binary and files under lib can be copied
+# https://github.com/ziglang/zig?tab=readme-ov-file#installation
+mv zig /usr/bin
+mv lib /usr/lib/zig
+
+# Copy the LICENSE
+mkdir -p /usr/share/doc/zig
+cp LICENSE /usr/share/doc/zig
+
+# Remove what's left of the directory
+cd ..
+rm -rf "$ZIG_BASE"
+
+# Hack for https://github.com/linux-test-project/lcov/issues/245
+# https://github.com/linux-test-project/lcov/commit/bf135caf5f626e02191c42bd2773e08a0bb9b7e5
+# XXX: Drop this once Ubuntu has lcov-2.1*
+git clone https://github.com/linux-test-project/lcov
+cd lcov
+make install
+
# cleanup
apt-get -y clean
apt-get -y autoclean
diff --git a/ci/run.ps1 b/ci/run.ps1
index 596253fe2d62..d3fda2d8b70e 100644
--- a/ci/run.ps1
+++ b/ci/run.ps1
@@ -50,7 +50,7 @@ function DownloadFile([String] $Source, [String] $Destination) {
if (($env:backend -eq 'ninja') -and ($env:arch -ne 'arm64')) { $dmd = $true } else { $dmd = $false }
-DownloadFile -Source https://github.com/mesonbuild/cidata/releases/download/ci3/ci_data.zip -Destination $env:AGENT_WORKFOLDER\ci_data.zip
+DownloadFile -Source https://github.com/mesonbuild/cidata/releases/download/ci5/ci_data.zip -Destination $env:AGENT_WORKFOLDER\ci_data.zip
echo "Extracting ci_data.zip"
Expand-Archive $env:AGENT_WORKFOLDER\ci_data.zip -DestinationPath $env:AGENT_WORKFOLDER\ci_data
& "$env:AGENT_WORKFOLDER\ci_data\install.ps1" -Arch $env:arch -Compiler $env:compiler -Boost $true -DMD $dmd
diff --git a/data/shell-completions/bash/meson b/data/shell-completions/bash/meson
index 88dc15ec3225..0814342dbe2a 100644
--- a/data/shell-completions/bash/meson
+++ b/data/shell-completions/bash/meson
@@ -30,7 +30,7 @@ _subprojects() {
local COMPREPLY=()
_filedir
# _filedir for whatever reason can't reason about symlinks, so -d will them.
- # Filter out wrap files with this expresion.
+ # Filter out wrap files with this expression.
IFS=$'\n' echo "${COMPREPLY[*]}" | grep -vE '\.wrap$' | xargs
popd &>/dev/null
}
diff --git a/data/shell-completions/zsh/_meson b/data/shell-completions/zsh/_meson
index 7d6d89b7ef73..8178060b4eda 100644
--- a/data/shell-completions/zsh/_meson
+++ b/data/shell-completions/zsh/_meson
@@ -29,9 +29,9 @@
local curcontext="$curcontext" state line
local -i ret
-local __meson_backends="(ninja xcode ${(j. .)${:-vs{,2010,2015,2017}}})"
+local __meson_backends="(ninja "${(j. .)${:-vs{,2010,2015,2017,2019,2022}}}" xcode none)"
local __meson_build_types="(plain debug debugoptimized minsize release)"
-local __meson_wrap_modes="(WrapMode.{default,nofallback,nodownload,forcefallback})"
+local __meson_wrap_modes="(default nofallback nodownload forcefallback nopromote)"
local __meson_dist_formats=("xztar" "bztar" "gztar" "zip")
local __meson_cd='-C[change into this directory before running]:target dir:_directories'
local -a __meson_common=(
@@ -81,6 +81,11 @@ local -a meson_commands=(
'wrap:manage source dependencies'
'subprojects:manage subprojects'
'compile:Build the project'
+'rewrite:Modify the project definition'
+'devenv:Run commands in developer environment'
+'env2mfile:Convert current environment to a cross or native file'
+'format:Format meson source file'
+'help:Print help of a subcommand'
)
(( $+functions[__meson_is_build_dir] )) || __meson_is_build_dir() {
@@ -370,7 +375,7 @@ _arguments \
)
if (( CURRENT == 2 )); then
- _describe -t commands "Meson subproject subcommands" commands
+ _describe -t commands "Meson subprojects subcommands" commands
else
local curcontext="$curcontext"
cmd="${${commands[(r)$words[2]:*]%%:*}}"
@@ -381,7 +386,7 @@ _arguments \
_meson-subprojects-$cmd
fi
else
- _message "unknown meson subproject command: $words[2]"
+ _message "unknown meson subprojects command: $words[2]"
fi
fi
@@ -392,7 +397,7 @@ _arguments \
local -a specs=(
"$__meson_cd"
'--clean[Clean the build directory]'
- '(-j --jobs)'{'-j','--jobs'}'=[the number fo work jobs to run (if supported)]:_guard "[0-9]#" "number of jobs"'
+ '(-j --jobs)'{'-j','--jobs'}'=[the number of work jobs to run (if supported)]:_guard "[0-9]#" "number of jobs"'
'(-l --load-average)'{'-l','--load-average'}'=[the system load average to try to maintain (if supported)]:_guard "[0-9]#" "load average"'
'(-v --verbose)'{'-v','--verbose'}'[Show more output]'
'--ninja-args=[Arguments to pass to ninja (only when using ninja)]'
@@ -403,6 +408,98 @@ _arguments \
"${(@)specs}"
}
+# TODO: implement rewrite sub-commands properly
+(( $+functions[_meson-rewrite-target] )) || _meson-rewrite-target() {
+_arguments \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+}
+
+(( $+functions[_meson-rewrite-kwargs] )) || _meson-rewrite-kwargs() {
+_arguments \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+}
+
+(( $+functions[_meson-rewrite-default-options] )) || _meson-rewrite-default-options() {
+_arguments \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+}
+
+(( $+functions[_meson-rewrite-command] )) || _meson-rewrite-command() {
+_arguments \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+}
+
+(( $+functions[_meson-rewrite] )) || _meson-rewrite() {
+ local -a commands=(
+ 'target:Modify a target'
+ 'kwargs:Modify keyword arguments'
+ 'default-options:Modify the project default options'
+ 'command:Execute a JSON array of commands'
+ )
+
+ if (( CURRENT == 2 )); then
+ _describe -t commands "Meson rewrite subcommands" commands
+ else
+ local curcontext="$curcontext"
+ cmd="${${commands[(r)$words[2]:*]%%:*}}"
+ if [[ $cmd == status ]]; then
+ _message "no options"
+ else
+ _meson-rewrite-$cmd
+ fi
+ fi
+
+}
+
+(( $+functions[_meson-devenv] )) || _meson-devenv() {
+ local curcontext="$curcontext"
+ local -a specs=(
+ "$__meson_cd"
+ '--clean=[Clean the build directory]'
+ '(-w workdir)'{'-w','--workdir'}'=[Directory to cd into before running (default: builddir, Since 1.0.0)]:target dir:_directories'
+ '--dump=[Only print required environment (Since 0.62.0) Takes an optional file path (Since 1.1.0)]:dump path:_files'
+ '--dump-format=[Format used with --dump (Since 1.1.0)]:format:(sh export vscode)'
+ )
+_arguments \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+ "${(@)specs}"
+}
+
+(( $+functions[_meson-env2mfile] )) || _meson-env2mfile() {
+ local curcontext="$curcontext"
+ local -a specs=(
+ '--debarch=[The dpkg architecture to generate.]'
+ '--gccsuffix=[A particular gcc version suffix if necessary.]'
+ '-o=[The output file.]:file:_files'
+ '--cross=[Generate a cross compilation file.]'
+ '--native=[Generate a native compilation file.]'
+ '--system=[Define system for cross compilation.]'
+ '--subsystem=[Define subsystem for cross compilation.]'
+ '--kernel=[Define kernel for cross compilation.]'
+ '--cpu=[Define cpu for cross compilation.]'
+ '--cpu-family=[Define cpu family for cross compilation.]'
+ '--endian=[Define endianness for cross compilation.]:endianness:(big little)'
+ )
+_arguments \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+ "${(@)specs}"
+}
+
+(( $+functions[_meson-format] )) || _meson-format() {
+ local curcontext="$curcontext"
+ local -a specs=(
+ '(-q --check-only)'{'-q','--check-only'}'=[exit with 1 if files would be modified by meson format]'
+ '(-i --inplace)'{'-i','--inplace'}'=[format files in-place]'
+ '(-r --recursive)'{'-r','--recursive'}'=[recurse subdirs (requires --check-only or --inplace option)]'
+ '(-c --configuration)'{'-c','--configuration'}'=[read configuration from meson.format]'
+ '(-e --editor-config)'{'-e','--editor-config'}'=[try to read configuration from .editorconfig]'
+ '(-o --output)'{'-o','--output'}'=[output file (implies having exactly one input)]'
+ )
+_arguments \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+ "${(@)specs}"
+}
+
if [[ $service != meson ]]; then
_call_function ret _$service
return ret
diff --git a/data/test.schema.json b/data/test.schema.json
index 705413363771..e87e7d03a9e5 100644
--- a/data/test.schema.json
+++ b/data/test.schema.json
@@ -178,6 +178,12 @@
"items": {
"type": "string"
}
+ },
+ "cleanup": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
}
}
diff --git a/docs/markdown/Build-options.md b/docs/markdown/Build-options.md
index 56b2693b8401..d615db6891b0 100644
--- a/docs/markdown/Build-options.md
+++ b/docs/markdown/Build-options.md
@@ -155,7 +155,7 @@ option('o5', type: 'boolean', deprecated: {'enabled': 'true', 'disabled': 'false
```
Since *0.63.0* the `deprecated` keyword argument can take the name of a new option
-that replace this option. In that case, setting a value on the deprecated option
+that replaces this option. In that case, setting a value on the deprecated option
will set the value on both the old and new names, assuming they accept the same
values.
diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md
index 6adc4218bda1..f16a46ffebea 100644
--- a/docs/markdown/Builtin-options.md
+++ b/docs/markdown/Builtin-options.md
@@ -79,6 +79,7 @@ machine](#specifying-options-per-machine) section for details.
| genvslite {vs2022} | vs2022 | Setup multi-builtype ninja build directories and Visual Studio solution | no | no |
| buildtype {plain, debug,
debugoptimized, release, minsize, custom} | debug | Build type to use | no | no |
| debug | true | Enable debug symbols and other information | no | no |
+| default_both_libraries {shared, static, auto} | shared | Default library type for both_libraries | no | no |
| default_library {shared, static, both} | shared | Default library type | no | yes |
| errorlogs | true | Whether to print the logs from failing tests. | no | no |
| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no | no |
@@ -177,6 +178,19 @@ fails.
`vsenv` is `true` by default when using the `vs` backend.
+
+#### Details for `default_both_libraries`
+
+Since `1.6.0`, you can select the default type of library selected when using
+a `both_libraries` object. This can be either 'shared' (default value, compatible
+with previous meson versions), 'static', or 'auto'. With auto, the value from
+`default_library` option is used, unless it is 'both', in which case 'shared'
+is used instead.
+
+When `default_both_libraries` is 'auto', passing a [[@both_libs]] dependency
+in [[both_libraries]] will link the static dependency with the static lib,
+and the shared dependency with the shared lib.
+
## Base options
These are set in the same way as universal options, either by
diff --git a/docs/markdown/Commands.md b/docs/markdown/Commands.md
index 542f1b269d40..8e34800a44b3 100644
--- a/docs/markdown/Commands.md
+++ b/docs/markdown/Commands.md
@@ -225,6 +225,34 @@ DESTDIR=/path/to/staging/area meson install -C builddir
Since *0.60.0* `DESTDIR` and `--destdir` can be a path relative to build
directory. An absolute path will be set into environment when executing scripts.
+### reprotest
+
+*(since 1.6.0)*
+
+{{ reprotest_usage.inc }}
+
+Simple reproducible build tester that compiles the project twice and
+checks whether the end results are identical.
+
+This command must be run in the source root of the project you want to
+test.
+
+{{ reprotest_arguments.inc }}
+
+#### Examples
+
+ meson reprotest
+
+Builds the current project with its default settings.
+
+ meson reprotest --intermediaries -- --buildtype=debugoptimized
+
+Builds the target and also checks that all intermediate files like
+object files are also identical. All command line arguments after the
+`--` are passed directly to the underlying `meson` invocation. Only
+use option arguments, i.e. those that start with a dash, Meson sets
+directory arguments automatically.
+
### rewrite
*(since 0.50.0)*
diff --git a/docs/markdown/Configuration.md b/docs/markdown/Configuration.md
index 48f071e6c9e4..fd19e0a699bd 100644
--- a/docs/markdown/Configuration.md
+++ b/docs/markdown/Configuration.md
@@ -87,7 +87,7 @@ endif
## Configuring without an input file
If the input file is not defined then Meson will generate a header
-file all the entries in the configuration data object. The
+file with all the entries in the configuration data object. The
replacements are the same as when generating `#mesondefine` entries:
```meson
@@ -116,7 +116,7 @@ Will produce:
## Dealing with file encodings
The default Meson file encoding to configure files is utf-8. If you
-need to configure a file that is not utf-8 encoded the encoding
+need to configure a file that is not utf-8 encoded the `encoding`
keyword will allow you to specify which file encoding to use. It is
however strongly advised to convert your non utf-8 file to utf-8
whenever possible. Supported file encodings are those of python3, see
diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md
index 731abc87d89a..8f796ab65530 100644
--- a/docs/markdown/Contributing.md
+++ b/docs/markdown/Contributing.md
@@ -150,8 +150,8 @@ Subsets of project tests can be selected with
time when only a certain part of Meson is being tested.
For example, a useful and easy contribution to Meson is making
sure the full set of compilers is supported. One could for example test
-various Fortran compilers by setting `FC=ifort` or `FC=flang` or similar
-with `./run_project_test.py --only fortran`.
+various Fortran compilers by setting `FC=ifort`, `FC=flang` or
+`FC=flang-new` or similar with `./run_project_test.py --only fortran`.
Some families of tests require a particular backend to run.
For example, all the CUDA project tests run and pass on Windows via
`./run_project_tests.py --only cuda --backend ninja`
diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md
index ccbcfd363ef5..0cfef71b9cd0 100644
--- a/docs/markdown/Cross-compilation.md
+++ b/docs/markdown/Cross-compilation.md
@@ -14,6 +14,7 @@ targeting 64-bit Windows could be:
c = 'x86_64-w64-mingw32-gcc'
cpp = 'x86_64-w64-mingw32-g++'
ar = 'x86_64-w64-mingw32-ar'
+windres = 'x86_64-w64-mingw32-windres'
strip = 'x86_64-w64-mingw32-strip'
exe_wrapper = 'wine64'
diff --git a/docs/markdown/Cython.md b/docs/markdown/Cython.md
index 304275043dcc..1491dc4fc4e5 100644
--- a/docs/markdown/Cython.md
+++ b/docs/markdown/Cython.md
@@ -32,6 +32,18 @@ py.extension_module(
)
```
+You can pass arguments accepted by the `cython` CLI script with the
+`cython_args` argument:
+
+```meson
+py.extension_module(
+ 'foo-bounds'
+ 'foo.pyx',
+ dependencies : dep_py,
+ cython_args : ['-Xboundscheck=False'],
+)
+```
+
## C++ intermediate support
*(New in 0.60.0)*
diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md
index 6f975c456d9a..70cf60ea94f8 100644
--- a/docs/markdown/Dependencies.md
+++ b/docs/markdown/Dependencies.md
@@ -80,8 +80,8 @@ var = foo_dep.get_variable(cmake : 'CMAKE_VAR', pkgconfig : 'pkg-config-var', co
```
It accepts the keywords 'cmake', 'pkgconfig', 'pkgconfig_define',
-'configtool', 'internal', and 'default_value'. 'pkgconfig_define'
-works just like the 'define_variable' argument to
+'configtool', 'internal', 'system', and 'default_value'.
+'pkgconfig_define' works just like the 'define_variable' argument to
`get_pkgconfig_variable`. When this method is invoked the keyword
corresponding to the underlying type of the dependency will be used to
look for a variable. If that variable cannot be found or if the caller
@@ -924,7 +924,7 @@ your own risk.
## SDL2
-SDL2 can be located using `pkg-confg`, the `sdl2-config` config tool,
+SDL2 can be located using `pkg-config`, the `sdl2-config` config tool,
as an OSX framework, or `cmake`.
`method` may be `auto`, `config-tool`, `extraframework`,
@@ -1007,6 +1007,34 @@ version.
*New in 0.54.0* the `system` method.
+## DIA SDK
+
+*(added 1.6.0)*
+
+Microsoft's Debug Interface Access SDK (DIA SDK) is available only on Windows,
+when using msvc, clang-cl or clang compiler from Microsoft Visual Studio.
+
+The DIA SDK runtime is not statically linked to target. The default usage
+method requires the runtime DLL (msdiaXXX.dll) to be manually registered in the
+OS with `regsrv32.exe` command, so it can be loaded using `CoCreateInstance`
+Windows function.
+
+Alternatively, you can use meson to copy the DIA runtime DLL to your build
+directory, and load it dynamically using `NoRegCoCreate` function provided by
+the DIA SDK. To facilitate this, you can read DLL path from dependency's
+variable 'dll' and use fs module to copy it. Example:
+
+```meson
+dia = dependency('diasdk', required: true)
+fs = import('fs')
+fs.copyfile(dia.get_variable('dll'))
+
+conf = configuration_data()
+conf.set('msdia_dll_name', fs.name(dia_dll_name))
+```
+
+Only the major version is available (eg. version is `14` for msdia140.dll).
+
1: They may appear to be case-insensitive, if the
underlying file system happens to be case-insensitive.
diff --git a/docs/markdown/Include-directories.md b/docs/markdown/Include-directories.md
index 6dfed5e48ae2..f9850ac91451 100644
--- a/docs/markdown/Include-directories.md
+++ b/docs/markdown/Include-directories.md
@@ -27,4 +27,6 @@ proper compiler flags to make it all work.
Another thing to note is that `include_directories` adds both the
source directory and corresponding build directory to include path, so
-you don't have to care.
+you don't have to care. If it turns out you don't want it after all, this can
+be disabled with the `implicit_include_directories` argument to the [build
+function](Reference-manual_functions.md) you use.
diff --git a/docs/markdown/Python-module.md b/docs/markdown/Python-module.md
index c02eed91d596..f3ee9ffc5a98 100644
--- a/docs/markdown/Python-module.md
+++ b/docs/markdown/Python-module.md
@@ -121,6 +121,8 @@ Additionally, the following diverge from [[shared_module]]'s default behavior:
- `gnu_symbol_visibility`: if unset, it will default to `'hidden'` on versions
of Python that support this (the python headers define `PyMODINIT_FUNC` has
default visibility).
+
+Note that Cython support uses `extension_module`, see [the reference for Cython](Cython.md).
*since 0.63.0* `extension_module` automatically adds a dependency to the library
if one is not explicitly provided. To support older versions, the user may need to
diff --git a/docs/markdown/Qt6-module.md b/docs/markdown/Qt6-module.md
index 0a453dd41443..7b6f94715882 100644
--- a/docs/markdown/Qt6-module.md
+++ b/docs/markdown/Qt6-module.md
@@ -20,7 +20,7 @@ Compiles Qt's resources collection files (.qrc) into c++ files for compilation.
It takes no positional arguments, and the following keyword arguments:
- `name` (string | empty): if provided a single .cpp file will be generated,
and the output of all qrc files will be combined in this file, otherwise
- each qrc file be written to it's own cpp file.
+ each qrc file be written to its own cpp file.
- `sources` (File | string | custom_target | custom_target index | generator_output)[]:
A list of sources to be transpiled. Required, must have at least one source
*New in 0.60.0*: support for custom_target, custom_target_index, and generator_output.
@@ -111,7 +111,7 @@ This method takes the following keyword arguments:
directory. For instance, when a file called `subdir/one.input` is processed
it generates a file `{target private directory}/subdir/one.out` when `true`,
and `{target private directory}/one.out` when `false` (default).
-
+
It returns an array of targets and sources to pass to a compilation target.
## compile_translations
@@ -156,6 +156,9 @@ This method takes the following keyword arguments:
`true` or an enabled [`feature`](Build-options.md#features) and some tools are
missing Meson will abort.
- `method` string: The method to use to detect Qt, see [[dependency]]
+- `tools`: string[]: *Since 1.6.0*. List of tools to check. Testable tools
+ are `moc`, `uic`, `rcc` and `lrelease`. By default `tools` is set to `['moc',
+ 'uic', 'rcc', 'lrelease']`
## Dependencies
diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md
index 2357ff459af4..a5d0d5cdcc18 100644
--- a/docs/markdown/Reference-tables.md
+++ b/docs/markdown/Reference-tables.md
@@ -15,7 +15,7 @@ These are return values of the `get_id` (Compiler family) and
| clang-cl | The Clang compiler (MSVC compatible driver) | msvc |
| dmd | D lang reference compiler | |
| emscripten| Emscripten WASM compiler | |
-| flang | Flang Fortran compiler | |
+| flang | Classic Flang Fortran compiler | |
| g95 | The G95 Fortran compiler | |
| gcc | The GNU Compiler Collection | gcc |
| intel | Intel compiler (Linux and Mac) | gcc |
@@ -24,6 +24,7 @@ These are return values of the `get_id` (Compiler family) and
| intel-llvm-cl | Intel oneAPI LLVM-based compiler (Windows) | msvc |
| lcc | Elbrus C/C++/Fortran Compiler | |
| llvm | LLVM-based compiler (Swift, D) | |
+| llvm-flang| Flang Fortran compiler (LLVM-based) | |
| mono | Xamarin C# compiler | |
| mwccarm | Metrowerks C/C++ compiler for Embedded ARM | |
| mwcceppc | Metrowerks C/C++ compiler for Embedded PowerPC | |
@@ -61,6 +62,7 @@ These are return values of the `get_linker_id` method in a compiler object.
| ld.mold | The fast MOLD linker |
| ld.solaris | Solaris and illumos |
| ld.wasm | emscripten's wasm-ld linker |
+| ld.zigcc | The Zig linker (C/C++ frontend; GNU-like) |
| ld64 | Apple ld64 |
| ld64.lld | The LLVM linker, with the ld64 interface |
| link | MSVC linker |
@@ -154,7 +156,7 @@ These are provided by the `.system()` method call.
| Value | Comment |
| ----- | ------- |
| android | By convention only, subject to change |
-| cygwin | The Cygwin environment for Windows |
+| cygwin | Cygwin or MSYS2 environment on Windows |
| darwin | Either OSX or iOS |
| dragonfly | DragonFly BSD |
| emscripten | Emscripten's JavaScript environment |
@@ -164,7 +166,7 @@ These are provided by the `.system()` method call.
| linux | |
| netbsd | |
| openbsd | |
-| windows | Any version of Windows |
+| windows | Native Windows (not Cygwin or MSYS2) |
| sunos | illumos and Solaris |
Any string not listed above is not guaranteed to remain stable in
@@ -180,6 +182,7 @@ Native names as returned by the `.kernel()` method.
| freebsd | |
| openbsd | |
| netbsd | |
+| gnu | GNU Hurd |
| nt | |
| xnu | Kernel of various Apple OSes |
| illumos | Kernel derived from OpenSolaris by community efforts |
diff --git a/docs/markdown/Release-notes-for-0.51.0.md b/docs/markdown/Release-notes-for-0.51.0.md
index 635fbbdda773..d4eaa5e526b9 100644
--- a/docs/markdown/Release-notes-for-0.51.0.md
+++ b/docs/markdown/Release-notes-for-0.51.0.md
@@ -143,7 +143,7 @@ then invoke Meson as `meson setup builddir/ -Dcmake_prefix_path=/tmp/dep`
You can tag a test as needing to fail like this:
```meson
-test('shoulfail', exe, should_fail: true)
+test('shouldfail', exe, should_fail: true)
```
If the test passes the problem is reported in the error logs but due
diff --git a/docs/markdown/Release-notes-for-0.63.0.md b/docs/markdown/Release-notes-for-0.63.0.md
index 3b47ff4eb634..ed0239e5cb20 100644
--- a/docs/markdown/Release-notes-for-0.63.0.md
+++ b/docs/markdown/Release-notes-for-0.63.0.md
@@ -47,7 +47,7 @@ ptr_size = meson.get_compiler('d').sizeof('void*')
## Deprecate an option and replace it with a new one
The `deprecated` keyword argument can now take the name of a new option
-that replace this option. In that case, setting a value on the deprecated option
+that replaces this option. In that case, setting a value on the deprecated option
will set the value on both the old and new names, assuming they accept the same
values.
diff --git a/docs/markdown/Release-notes-for-1.0.0.md b/docs/markdown/Release-notes-for-1.0.0.md
index 42c05ddab3f9..2399373b1975 100644
--- a/docs/markdown/Release-notes-for-1.0.0.md
+++ b/docs/markdown/Release-notes-for-1.0.0.md
@@ -59,7 +59,7 @@ Any include paths in these dependencies will be passed to the underlying call to
## String arguments to the rust.bindgen include_directories argument
Most other cases of include_directories accept strings as well as
-`IncludeDirectory` objects, so lets do that here too for consistency.
+`IncludeDirectory` objects, so let's do that here too for consistency.
## The Rust module is stable
diff --git a/docs/markdown/Release-notes-for-1.2.0.md b/docs/markdown/Release-notes-for-1.2.0.md
index 23312164e3f7..6e321b6d42d4 100644
--- a/docs/markdown/Release-notes-for-1.2.0.md
+++ b/docs/markdown/Release-notes-for-1.2.0.md
@@ -49,7 +49,7 @@ directory, instead of using Visual Studio's native engine.
## More data in introspection files
- Used compilers are listed in `intro-compilers.json`
-- Informations about `host`, `build` and `target` machines
+- Information about `host`, `build` and `target` machines
are lister in `intro-machines.json`
- `intro-dependencies.json` now includes internal dependencies,
and relations between dependencies.
diff --git a/docs/markdown/Release-notes-for-1.3.0.md b/docs/markdown/Release-notes-for-1.3.0.md
index cf6ad46ae064..0c7660f230ca 100644
--- a/docs/markdown/Release-notes-for-1.3.0.md
+++ b/docs/markdown/Release-notes-for-1.3.0.md
@@ -39,7 +39,7 @@ about its value.
## [[configure_file]] now has a `macro_name` parameter.
-This new paramater, `macro_name` allows C macro-style include guards to be added
+This new parameter, `macro_name` allows C macro-style include guards to be added
to [[configure_file]]'s output when a template file is not given. This change
simplifies the creation of configure files that define macros with dynamic names
and want the C-style include guards.
@@ -89,8 +89,8 @@ you to set the environment in which the generator will process inputs.
In previous versions of meson, a `meson.build` file like this:
```
-exectuable('foo', 'main.c')
-exectuable('foo', 'main.c', name_suffix: 'bar')
+executable('foo', 'main.c')
+executable('foo', 'main.c', name_suffix: 'bar')
```
would result in a configure error because meson internally used
diff --git a/docs/markdown/Release-notes-for-1.5.0.md b/docs/markdown/Release-notes-for-1.5.0.md
index 7dfea9af2b13..794efe47c9c1 100644
--- a/docs/markdown/Release-notes-for-1.5.0.md
+++ b/docs/markdown/Release-notes-for-1.5.0.md
@@ -19,7 +19,7 @@ Cargo dependencies names are now in the format `--rs`:
* `x.y.z` -> 'x'
* `0.x.y` -> '0.x'
* `0.0.x` -> '0'
- It allows to make different dependencies for uncompatible versions of the same
+ It allows to make different dependencies for incompatible versions of the same
crate.
- `-rs` suffix is added to distinguish from regular system dependencies, for
example `gstreamer-1.0` is a system pkg-config dependency and `gstreamer-0.22-rs`
diff --git a/docs/markdown/Release-notes-for-1.6.0.md b/docs/markdown/Release-notes-for-1.6.0.md
new file mode 100644
index 000000000000..9e5cea6d17d5
--- /dev/null
+++ b/docs/markdown/Release-notes-for-1.6.0.md
@@ -0,0 +1,175 @@
+---
+title: Release 1.6.0
+short-description: Release notes for 1.6.0
+...
+
+# New features
+
+Meson 1.6.0 was released on 20 October 2024
+## Support for OpenXL compiler in AIX.
+
+The OpenXL compiler is now supported from Meson 1.6.0 onwards.
+So currently, in AIX Operating system we support GCC and openXL compilers for Meson build system.
+
+Both the compilers will archive shared libraries and generate a shared object
+for a shared module while using Meson in AIX.
+
+## `alias_target` of `both_libraries`
+
+Previously, when passing a [[@both_libs]] object to [[alias_target]], the alias
+would only point to the shared library. It now points to both the static and the
+shared library.
+
+## Default to printing deprecations when no minimum version is specified.
+
+For a long time, the [[project]] function has supported specifying the minimum
+`meson_version:` needed by a project. When this is used, deprecated features
+from before that version produce warnings, as do features which aren't
+available in all supported versions.
+
+When no minimum version was specified, meson didn't warn you even about
+deprecated functionality that might go away in an upcoming semver major release
+of meson.
+
+Now, meson will treat an unspecified minimum version following semver:
+
+- For new features introduced in the current meson semver major cycle
+ (currently: all features added since 1.0) a warning is printed. Features that
+ have been available since the initial 1.0 release are assumed to be widely
+ available.
+
+- For features that have been deprecated by any version of meson, a warning is
+ printed. Since no minimum version was specified, it is assumed that the
+ project wishes to follow the latest and greatest functionality.
+
+These warnings will overlap for functionality that was both deprecated and
+replaced with an alternative in the current release cycle. The combination
+means that projects without a minimum version specified are assumed to want
+broad compatibility with the current release cycle (1.x).
+
+Projects that specify a minimum `meson_version:` will continue to only receive
+actionable warnings based on their current minimum version.
+
+## Cargo subprojects is experimental
+
+Cargo subprojects was intended to be experimental with no stability guarantees.
+That notice was unfortunately missing from documentation. Meson will now start
+warning about usage of experimental features and future releases might do breaking
+changes.
+
+This is aligned with our general policy regarding [mixing build systems](Mixing-build-systems.md).
+
+## Dependencies from CMake subprojects now use only PUBLIC link flags
+
+Any [[@dep]] obtained from a CMake subproject (or `.wrap` with `method = cmake`)
+now only includes link flags marked in CMake as `PUBLIC` or `INTERFACE`.
+Flags marked as `PRIVATE` are now only applied when building the subproject
+library and not when using it as a dependency. This better matches how CMake
+handles link flags and fixes link errors when using some CMake projects as
+subprojects.
+
+## New built-in option for default both_libraries
+
+`both_libraries` targets used to be considered as a shared library by default.
+There is now the `default_both_libraries` option to change this default.
+
+When `default_both_libraries` is 'auto', [[both_libraries]] with dependencies
+that are [[@both_libs]] themselves will link with the same kind of library.
+For example, if `libA` is a [[@both_libs]] and `libB` is a [[@both_libs]]
+linked with `libA` (or with an internal dependency on `libA`),
+the static lib of `libB` will link with the static lib of `libA`, and the
+shared lib of `libA` will link with the shared lib of `libB`.
+
+## New `as_static` and `as_shared` methods on internal dependencies
+
+[[@dep]] object returned by [[declare_dependency]] now has `.as_static()` and
+`.as_shared()` methods, to convert to a dependency that prefers the `static`
+or the `shared` version of the linked [[@both_libs]] target.
+
+When the same dependency is used without those methods, the
+`default_both_libraries` option determines which version is used.
+
+## Support for DIA SDK
+
+Added support for Windows Debug Interface Access SDK (DIA SDK) dependency. It allows reading with MSVC debugging information (.PDB format). This dependency can only be used on Windows, with msvc, clang or clang-cl compiler.
+
+## Support for LLVM-based flang compiler
+
+Added basic handling for the [flang](https://flang.llvm.org/docs/) compiler
+that's now part of LLVM. It is the successor of another compiler named
+[flang](https://github.com/flang-compiler/flang) by largely the same
+group of developers, who now refer to the latter as "classic flang".
+
+Meson already supports classic flang, and the LLVM-based flang now
+uses the compiler-id `'llvm-flang'`.
+
+## nvc and nvc++ now support setting std
+
+The following standards are available for nvc: c89, c90, c99, c11,
+c17, c18, gnu90, gnu89, gnu99, gnu11, gnu17, gnu18. For nvc++:
+c++98, c++03, c++11, c++14, c++17, c++20, c++23, gnu++98, gnu++03,
+gnu++11, gnu++14, gnu++17, gnu++20
+
+## Tools can be selected when calling `has_tools()` on the Qt modules
+
+When checking for the presence of Qt tools, you can now explictly ask Meson
+which tools you need. This is particularly useful when you do not need
+`lrelease` because you are not shipping any translations. For example:
+
+```meson
+qt6_mod = import('qt6')
+qt6_mod.has_tools(required: true, tools: ['moc', 'uic', 'rcc'])
+```
+
+valid tools are `moc`, `uic`, `rcc` and `lrelease`.
+
+## Simple tool to test build reproducibility
+
+Meson now ships with a command for testing whether your project can be
+[built reprodicibly](https://reproducible-builds.org/). It can be used
+by running a command like the following in the source root of your
+project:
+
+ meson reprotest --intermediaries -- --buildtype=debugoptimized
+
+All command line options after the `--` are passed to the build
+invocations directly.
+
+This tool is not meant to be exhaustive, but instead easy and
+convenient to run. It will detect some but definitely not all
+reproducibility issues.
+
+## Support for variable in system dependencies
+
+System Dependency method `get_variable()` now supports `system` variable.
+
+## test() and benchmark() functions accept new types
+
+`test` and `benchmark` now accept ExternalPrograms (as returned by
+`find_program`) in the `args` list. This can be useful where the test
+executable is a wrapper which invokes another program given as an
+argument.
+
+```meson
+test('some_test', find_program('sudo'), args : [ find_program('sh'), 'script.sh' ])
+```
+
+## Zig 0.11 can be used as a C/C++ compiler frontend
+
+Zig offers
+[a C/C++ frontend](https://andrewkelley.me/post/zig-cc-powerful-drop-in-replacement-gcc-clang.html) as a drop-in replacement for Clang. It worked fine with Meson up to Zig 0.10. Since 0.11, Zig's
+dynamic linker reports itself as `zig ld`, which wasn't known to Meson. Meson now correctly handles
+Zig's linker.
+
+You can use Zig's frontend via a [machine file](Machine-files.md):
+
+```ini
+[binaries]
+c = ['zig', 'cc']
+cpp = ['zig', 'c++']
+ar = ['zig', 'ar']
+ranlib = ['zig', 'ranlib']
+lib = ['zig', 'lib']
+dlltool = ['zig', 'dlltool']
+```
+
diff --git a/docs/markdown/Rust.md b/docs/markdown/Rust.md
index 151aac080464..d30fe68373cc 100644
--- a/docs/markdown/Rust.md
+++ b/docs/markdown/Rust.md
@@ -5,6 +5,26 @@ short-description: Working with Rust in Meson
# Using Rust with Meson
+## Avoid using `extern crate`
+
+Meson can't track dependency information for crates linked by rustc as
+a result of `extern crate` statements in Rust source code. If your
+crate dependencies are properly expressed in Meson, there should be no
+need for `extern crate` statements in your Rust code, as long as you use the
+Rust 2018 edition or later. This means adding `rust_std=2018` (or later) to the
+`project(default_options)` argument.
+
+An example of the problems with `extern crate` is that if you delete a
+crate from a Meson build file, other crates that depend on that crate
+using `extern crate` might continue linking with the leftover rlib of
+the deleted crate rather than failing to build, until the build
+directory is cleaned.
+
+This limitation could be resolved in future with rustc improvements,
+for example if the [`-Z
+binary-dep-depinfo`](https://github.com/rust-lang/rust/issues/63012)
+feature is stabilized.
+
## Mixing Rust and non-Rust sources
Meson currently does not support creating a single target with Rust and non Rust
diff --git a/docs/markdown/Subprojects.md b/docs/markdown/Subprojects.md
index 78239b9fc4d4..0375b5698160 100644
--- a/docs/markdown/Subprojects.md
+++ b/docs/markdown/Subprojects.md
@@ -300,7 +300,7 @@ types. Multiple types can be set as comma separated list e.g. `--types
git,file`.
*Since 0.56.0* If the subcommand fails on any subproject an error code
-is returned at the end instead of retuning success.
+is returned at the end instead of returning success.
### Download subprojects
diff --git a/docs/markdown/Tutorial.md b/docs/markdown/Tutorial.md
index 4a2725503bea..7aff164a266a 100644
--- a/docs/markdown/Tutorial.md
+++ b/docs/markdown/Tutorial.md
@@ -143,7 +143,11 @@ int main(int argc, char **argv)
GtkApplication *app;
int status;
+#if GLIB_CHECK_VERSION(2, 74, 0)
app = gtk_application_new(NULL, G_APPLICATION_DEFAULT_FLAGS);
+#else
+ app = gtk_application_new(NULL, G_APPLICATION_FLAGS_NONE);
+#endif
g_signal_connect(app, "activate", G_CALLBACK(activate), NULL);
status = g_application_run(G_APPLICATION(app), argc, argv);
g_object_unref(app);
diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md
index b5d3a1b81831..898366095b05 100644
--- a/docs/markdown/Unit-tests.md
+++ b/docs/markdown/Unit-tests.md
@@ -89,6 +89,10 @@ variable `MESON_TESTTHREADS` like this.
$ MESON_TESTTHREADS=5 meson test
```
+Setting `MESON_TESTTHREADS` to 0 enables the default behavior (core
+count), whereas setting an invalid value results in setting the job
+count to 1.
+
## Priorities
*(added in version 0.52.0)*
diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md
index 70060ab05247..0b1de42f4daa 100644
--- a/docs/markdown/Users.md
+++ b/docs/markdown/Users.md
@@ -62,6 +62,7 @@ topic](https://github.com/topics/meson).
- [GNOME Software](https://gitlab.gnome.org/GNOME/gnome-software), an app store for GNOME
- [GNOME Twitch](https://github.com/vinszent/gnome-twitch), an app for viewing Twitch streams on GNOME desktop
- [GNOME Usage](https://gitlab.gnome.org/GNOME/gnome-usage), a GNOME application for visualizing system resources
+ - [GNOME Web](https://gitlab.gnome.org/GNOME/epiphany), a browser for a simple, clean, beautiful view of the web
- [GNU FriBidi](https://github.com/fribidi/fribidi), the open source implementation of the Unicode Bidirectional Algorithm
- [Graphene](https://ebassi.github.io/graphene/), a thin type library for graphics
- [Grilo](https://git.gnome.org/browse/grilo) and [Grilo plugins](https://git.gnome.org/browse/grilo-plugins), the Grilo multimedia framework
diff --git a/docs/markdown/Using-with-Visual-Studio.md b/docs/markdown/Using-with-Visual-Studio.md
index 2680e8baf989..c6a0806a5bd1 100644
--- a/docs/markdown/Using-with-Visual-Studio.md
+++ b/docs/markdown/Using-with-Visual-Studio.md
@@ -5,9 +5,17 @@ short-description: How to use Meson in Visual Studio
# Using with Visual Studio
In order to generate Visual Studio projects, Meson needs to know the
-settings of your installed version of Visual Studio. The only way to
-get this information is to run Meson under the Visual Studio Command
-Prompt.
+settings of your installed version of Visual Studio.
+
+
+The traditional way to get this information is to run Meson (or any build
+system) under the Visual Studio Command Prompt.
+
+If no Visual Studio Command Prompt was detected, and no mingw compilers are
+detected either, meson will attempt to find "a" Visual Studio installation for
+you automatically, by asking Microsoft's "vswhere" program. If you want to
+ignore mingw compilers, pass the `--vsenv` option on the meson command line.
+If you need to guarantee a specific Visual Studio version, set it up manually.
You can always find the Visual Studio Command Prompt by searching from
the Start Menu. However, the name is different for each Visual Studio
diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md
index 3983d28771e9..d84e4aa186d2 100644
--- a/docs/markdown/Wrap-dependency-system-manual.md
+++ b/docs/markdown/Wrap-dependency-system-manual.md
@@ -297,6 +297,9 @@ fallback to use the subproject, assuming it uses
### CMake wraps
+**Note**: This is experimental and has no backwards or forwards compatibility guarantees.
+See [Meson's rules on mixing build systems](Mixing-build-systems.md).
+
Since the CMake module does not know the public name of the provided
dependencies, a CMake `.wrap` file cannot use the `dependency_names = foo`
syntax. Instead, the `dep_name = _dep` syntax should be used, where
@@ -316,6 +319,9 @@ foo-bar-1.0 = foo_bar_dep
```
### Cargo wraps
+**Note**: This is experimental and has no backwards or forwards compatibility guarantees.
+See [Meson's rules on mixing build systems](Mixing-build-systems.md).
+
Cargo subprojects automatically override the `--rs` dependency
name:
- `package_name` is defined in `[package] name = ...` section of the `Cargo.toml`.
@@ -323,7 +329,7 @@ name:
* `x.y.z` -> 'x'
* `0.x.y` -> '0.x'
* `0.0.x` -> '0'
- It allows to make different dependencies for uncompatible versions of the same
+ It allows to make different dependencies for incompatible versions of the same
crate.
- `-rs` suffix is added to distinguish from regular system dependencies, for
example `gstreamer-1.0` is a system pkg-config dependency and `gstreamer-0.22-rs`
@@ -359,7 +365,7 @@ the main project depends on `foo-1-rs` and `bar-1-rs`, and they both depend on
configure `common-rs` with a set of features. Later, when `bar-1-rs` does a lookup
for `common-1-rs` it has already been configured and the set of features cannot be
changed. If `bar-1-rs` wants extra features from `common-1-rs`, Meson will error out.
-It is currently the responsability of the main project to resolve those
+It is currently the responsibility of the main project to resolve those
issues by enabling extra features on each subproject:
```meson
project(...,
@@ -379,7 +385,7 @@ Some naming conventions need to be respected:
Since *1.5.0* Cargo wraps can also be provided with `Cargo.lock` file at the root
of (sub)project source tree. Meson will automatically load that file and convert
-it into a serie of wraps definitions.
+it into a series of wraps definitions.
## Using wrapped projects
diff --git a/docs/refman/generatormd.py b/docs/refman/generatormd.py
index 2c80ab02308e..854712d53f88 100644
--- a/docs/refman/generatormd.py
+++ b/docs/refman/generatormd.py
@@ -94,7 +94,7 @@ def _gen_object_file_id(self, obj: Object) -> str:
def _link_to_object(self, obj: T.Union[Function, Object], in_code_block: bool = False) -> str:
'''
- Generate a palaceholder tag for the function/method/object documentation.
+ Generate a placeholder tag for the function/method/object documentation.
This tag is then replaced in the custom hotdoc plugin.
'''
prefix = '#' if in_code_block else ''
diff --git a/docs/sitemap.txt b/docs/sitemap.txt
index 218d1a634230..2e80840578f6 100644
--- a/docs/sitemap.txt
+++ b/docs/sitemap.txt
@@ -89,6 +89,7 @@ index.md
Wrap-best-practices-and-tips.md
Shipping-prebuilt-binaries-as-wraps.md
Release-notes.md
+ Release-notes-for-1.6.0.md
Release-notes-for-1.5.0.md
Release-notes-for-1.4.0.md
Release-notes-for-1.3.0.md
diff --git a/docs/yaml/builtins/build_machine.yaml b/docs/yaml/builtins/build_machine.yaml
index 7b7fcd208ec5..b79525fd5053 100644
--- a/docs/yaml/builtins/build_machine.yaml
+++ b/docs/yaml/builtins/build_machine.yaml
@@ -22,7 +22,7 @@ methods:
- name: cpu
returns: str
- description: Returns a more specific CPU name, such as `i686`, `amd64`, etc.
+ description: Returns a more specific CPU name, such as `i686`, `armv8l`, etc.
- name: system
returns: str
@@ -35,3 +35,24 @@ methods:
- name: endian
returns: str
description: returns `'big'` on big-endian systems and `'little'` on little-endian systems.
+
+- name: kernel
+ returns: str
+ since: 1.2.0
+ description: |
+ Returns the name of the Operating System Kernel, such as `linux`, `xnu`, and
+ `nt`. [See here for more complete
+ list](Reference-tables.md#kernel-names-since-120) These are more specific
+ than `system`. If this cannot be detected (for the build machine), or is not
+ set in the cross file (for the host machine when cross compiling), calling
+ this method is fatal.
+
+- name: subsystem
+ returns: str
+ since: 1.2.0
+ description: |
+ Returns the variant of the operating system, such as `ios` and `tvos`. [See
+ here for more complete list](Reference-tables.md#kernel-names-since-120)
+ These are more specific than `system`. If this cannot be detected (for the
+ build machine), or is not set in the cross file (for the host machine when
+ cross compiling), calling this method is fatal.
diff --git a/docs/yaml/functions/_build_target_base.yaml b/docs/yaml/functions/_build_target_base.yaml
index 1db49a5315a6..1721b29cfe5a 100644
--- a/docs/yaml/functions/_build_target_base.yaml
+++ b/docs/yaml/functions/_build_target_base.yaml
@@ -256,8 +256,11 @@ kwargs:
do not support GNU visibility arguments.
d_import_dirs:
- type: list[str]
- description: List of directories to look in for string imports used in the D programming language.
+ type: list[inc | str]
+ since: 0.62.0
+ description: |
+ the directories to add to the string search path (i.e. `-J` switch for DMD).
+ Must be [[@inc]] objects or plain strings.
d_unittest:
type: bool
diff --git a/docs/yaml/functions/alias_target.yaml b/docs/yaml/functions/alias_target.yaml
index bc14f84da2fe..9833569018f7 100644
--- a/docs/yaml/functions/alias_target.yaml
+++ b/docs/yaml/functions/alias_target.yaml
@@ -9,6 +9,9 @@ description: |
are built. Dependencies can be any build target. Since 0.60.0, this includes
[[@run_tgt]].
+ *Since 1.6.0* passing a [[@both_libs]] object builds both shared and
+ static libraries.
+
posargs:
target_name:
type: str
diff --git a/docs/yaml/functions/benchmark.yaml b/docs/yaml/functions/benchmark.yaml
index 0323b26e41b9..7a555a42d6a0 100644
--- a/docs/yaml/functions/benchmark.yaml
+++ b/docs/yaml/functions/benchmark.yaml
@@ -28,7 +28,7 @@ posargs:
kwargs:
args:
- type: list[str | file | tgt]
+ type: list[str | file | tgt | external_program]
description: Arguments to pass to the executable
env:
diff --git a/docs/yaml/functions/build_target.yaml b/docs/yaml/functions/build_target.yaml
index 74d45f0070de..a56fe75feae0 100644
--- a/docs/yaml/functions/build_target.yaml
+++ b/docs/yaml/functions/build_target.yaml
@@ -32,7 +32,7 @@ description: |
The returned object also has methods that are documented in [[@build_tgt]].
- *"jar" is deprecated because it is fundementally a different thing than the
+ *"jar" is deprecated because it is fundamentally a different thing than the
other build_target types.
posargs_inherit: _build_target_base
diff --git a/docs/yaml/functions/dependency.yaml b/docs/yaml/functions/dependency.yaml
index 74981b2d9f89..a19deab8fa22 100644
--- a/docs/yaml/functions/dependency.yaml
+++ b/docs/yaml/functions/dependency.yaml
@@ -169,7 +169,6 @@ kwargs:
static:
type: bool
- default: false
description: |
Tells the dependency provider to try to get static
libraries instead of dynamic ones (note that this is not supported
@@ -178,6 +177,9 @@ kwargs:
*Since 0.60.0* it also sets `default_library` option accordingly on the fallback
subproject if it was not set explicitly in `default_options` keyword argument.
+ *Since 0.63.0* when the `prefer_static` option is set to `true` the default
+ value is `true` otherwise the default value is `false`.
+
version:
type: list[str] | str
since: 0.37.0
diff --git a/docs/yaml/functions/install_data.yaml b/docs/yaml/functions/install_data.yaml
index ff4f3363d6ba..9ed09a75ccc4 100644
--- a/docs/yaml/functions/install_data.yaml
+++ b/docs/yaml/functions/install_data.yaml
@@ -13,7 +13,7 @@ varargs:
warnings:
- the `install_mode` kwarg ignored integer values between 0.59.0 -- 1.1.0.
- an omitted `install_dir` kwarg did not work correctly inside of a subproject until 1.3.0.
- - an omitted `install_dir` kwarg did not work correctly when combined with the `preserve_path` kwarg untill 1.3.0.
+ - an omitted `install_dir` kwarg did not work correctly when combined with the `preserve_path` kwarg until 1.3.0.
kwargs:
install_dir:
diff --git a/docs/yaml/functions/subdir_done.yaml b/docs/yaml/functions/subdir_done.yaml
index 1e2a3493ad83..bf8dc51411f3 100644
--- a/docs/yaml/functions/subdir_done.yaml
+++ b/docs/yaml/functions/subdir_done.yaml
@@ -5,11 +5,11 @@ description: |
Stops further interpretation of the Meson script file from the point
of the invocation. All steps executed up to this point are valid and
will be executed by Meson. This means that all targets defined before
- the call of [[subdir_done]] will be build.
+ the call of [[subdir_done]] will be built.
- If the current script was called by `subdir` the execution returns to
+ If the current script was called by `subdir`, the execution returns to
the calling directory and continues as if the script had reached the
- end. If the current script is the top level script Meson configures
+ end. If the current script is the top level script, Meson configures
the project as defined up to this point.
example: |
@@ -20,5 +20,5 @@ example: |
executable('exe2', 'exe2.cpp')
```
- The executable `exe1` will be build, while the executable `exe2` is not
- build.
+ The executable `exe1` will be built, while the executable `exe2` is not
+ built.
diff --git a/docs/yaml/objects/compiler.yaml b/docs/yaml/objects/compiler.yaml
index cd988a6834cd..43831d2c1d81 100644
--- a/docs/yaml/objects/compiler.yaml
+++ b/docs/yaml/objects/compiler.yaml
@@ -612,7 +612,7 @@ methods:
# kwargs:
# checked:
# type: str
- # sinec: 0.59.0
+ # since: 0.59.0
# default: "'off'"
# description: |
# Supported values:
diff --git a/docs/yaml/objects/dep.yaml b/docs/yaml/objects/dep.yaml
index 76543d2c1ab9..ffd19f7976b2 100644
--- a/docs/yaml/objects/dep.yaml
+++ b/docs/yaml/objects/dep.yaml
@@ -191,7 +191,7 @@ methods:
since: 0.58.0
description: |
This argument is used as a default value
- for `cmake`, `pkgconfig`, `configtool` and `internal` keyword
+ for `cmake`, `pkgconfig`, `configtool`, `internal` and `system` keyword
arguments. It is useful in the common case where `pkgconfig` and `internal`
use the same variable name, in which case it's easier to write `dep.get_variable('foo')`
instead of `dep.get_variable(pkgconfig: 'foo', internal: 'foo')`.
@@ -214,6 +214,11 @@ methods:
since: 0.54.0
description: The internal variable name
+ system:
+ type: str
+ since: 1.6.0
+ description: The system variable name
+
default_value:
type: str
description: The default value to return when the variable does not exist
@@ -221,3 +226,28 @@ methods:
pkgconfig_define:
type: list[str]
description: See [[dep.get_pkgconfig_variable]]
+
+ - name: as_static
+ returns: dep
+ since: 1.6.0
+ description: |
+ Only for dependencies created with [[declare_dependency]],
+ returns a copy of the dependency object that prefer the `static` version
+ of [[both_libraries]].
+ kwargs:
+ recursive:
+ type: bool
+ description: If true, this is recursively applied to dependencies
+
+ - name: as_shared
+ returns: dep
+ since: 1.6.0
+ description: |
+ Only for dependencies created with [[declare_dependency]],
+ returns a copy of the dependency object that prefer the `shared` version
+ of [[both_libraries]].
+ kwargs:
+ recursive:
+ type: bool
+ description: If true, this is recursively applied to dependencies
+
\ No newline at end of file
diff --git a/docs/yaml/objects/feature.yaml b/docs/yaml/objects/feature.yaml
index 3e0ae69df1e0..fad6cd5e7bda 100644
--- a/docs/yaml/objects/feature.yaml
+++ b/docs/yaml/objects/feature.yaml
@@ -89,7 +89,7 @@ methods:
```
if get_option('directx').require(host_machine.system() == 'windows',
- error_message: 'DirectX only available on Windows').allowed() then
+ error_message: 'DirectX only available on Windows').allowed()
src += ['directx.c']
config.set10('HAVE_DIRECTX', true)
endif
diff --git a/man/meson.1 b/man/meson.1
index 5c929bc1adf9..9ecbb99d0439 100644
--- a/man/meson.1
+++ b/man/meson.1
@@ -1,4 +1,4 @@
-.TH MESON "1" "September 2024" "meson 1.5.2" "User Commands"
+.TH MESON "1" "December 2024" "meson 1.6.1" "User Commands"
.SH NAME
meson - a high productivity build system
.SH DESCRIPTION
diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py
index 15d279350eaa..5edd9b3d972c 100644
--- a/mesonbuild/ast/interpreter.py
+++ b/mesonbuild/ast/interpreter.py
@@ -43,7 +43,6 @@
NotNode,
PlusAssignmentNode,
TernaryNode,
- TestCaseClauseNode,
)
if T.TYPE_CHECKING:
@@ -57,6 +56,7 @@
IfClauseNode,
IndexNode,
OrNode,
+ TestCaseClauseNode,
UMinusNode,
)
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index 11975109d480..b2eb1f8cd633 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -15,7 +15,8 @@
from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary
from ..compilers import detect_compiler_for
from ..interpreterbase import InvalidArguments, SubProject
-from ..mesonlib import MachineChoice, OptionKey
+from ..mesonlib import MachineChoice
+from ..options import OptionKey
from ..mparser import BaseNode, ArithmeticNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode
from .interpreter import AstInterpreter
@@ -92,20 +93,36 @@ def func_project(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[st
if len(args) < 1:
raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
+ def _str_list(node: T.Any) -> T.Optional[T.List[str]]:
+ if isinstance(node, ArrayNode):
+ r = []
+ for v in node.args.arguments:
+ if not isinstance(v, StringNode):
+ return None
+ r.append(v.value)
+ return r
+ if isinstance(node, StringNode):
+ return [node.value]
+ return None
+
proj_name = args[0]
proj_vers = kwargs.get('version', 'undefined')
- proj_langs = self.flatten_args(args[1:])
if isinstance(proj_vers, ElementaryNode):
proj_vers = proj_vers.value
if not isinstance(proj_vers, str):
proj_vers = 'undefined'
- self.project_data = {'descriptive_name': proj_name, 'version': proj_vers}
+ proj_langs = self.flatten_args(args[1:])
+ # Match the value returned by ``meson.project_license()`` when
+ # no ``license`` argument is specified in the ``project()`` call.
+ proj_license = _str_list(kwargs.get('license', None)) or ['unknown']
+ proj_license_files = _str_list(kwargs.get('license_files', None)) or []
+ self.project_data = {'descriptive_name': proj_name, 'version': proj_vers, 'license': proj_license, 'license_files': proj_license_files}
optfile = os.path.join(self.source_root, self.subdir, 'meson.options')
if not os.path.exists(optfile):
optfile = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
if os.path.exists(optfile):
- oi = optinterpreter.OptionInterpreter(self.subproject)
+ oi = optinterpreter.OptionInterpreter(self.coredata.optstore, self.subproject)
oi.process(optfile)
assert isinstance(proj_name, str), 'for mypy'
self.coredata.update_project_options(oi.options, T.cast('SubProject', proj_name))
@@ -130,7 +147,7 @@ def func_project(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[st
self.do_subproject(SubProject(i))
self.coredata.init_backend_options(self.backend)
- options = {k: v for k, v in self.environment.options.items() if k.is_backend()}
+ options = {k: v for k, v in self.environment.options.items() if self.environment.coredata.optstore.is_backend_option(k)}
self.coredata.set_options(options)
self._add_languages(proj_langs, True, MachineChoice.HOST)
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 4c7faa5aa380..079b62dbdeb9 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -27,8 +27,10 @@
from ..compilers import LANGUAGES_USING_LDFLAGS, detect
from ..mesonlib import (
File, MachineChoice, MesonException, OrderedSet,
- ExecutableSerialisation, classify_unity_sources, OptionKey
+ ExecutableSerialisation, classify_unity_sources,
)
+from ..options import OptionKey
+
if T.TYPE_CHECKING:
from .._typing import ImmutableListProtocol
@@ -595,7 +597,7 @@ def as_meson_exe_cmdline(self, exe: T.Union[str, mesonlib.File, build.BuildTarge
feed: T.Optional[str] = None,
force_serialize: bool = False,
env: T.Optional[mesonlib.EnvironmentVariables] = None,
- verbose: bool = False) -> T.Tuple[T.Sequence[T.Union[str, File, build.Target, programs.ExternalProgram]], str]:
+ verbose: bool = False) -> T.Tuple[T.List[str], str]:
'''
Serialize an executable for running with a generator or a custom target
'''
@@ -1133,7 +1135,7 @@ def search_dll_path(link_arg: str) -> T.Optional[str]:
if p.is_file():
p = p.parent
- # Heuristic: replace *last* occurence of '/lib'
+ # Heuristic: replace *last* occurrence of '/lib'
binpath = Path('/bin'.join(p.as_posix().rsplit('/lib', maxsplit=1)))
for _ in binpath.glob('*.dll'):
return str(binpath)
@@ -1259,6 +1261,8 @@ def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSeriali
cmd_args.append(a)
elif isinstance(a, (build.Target, build.CustomTargetIndex)):
cmd_args.extend(self.construct_target_rel_paths(a, t.workdir))
+ elif isinstance(a, programs.ExternalProgram):
+ cmd_args.extend(a.get_command())
else:
raise MesonException('Bad object in test command.')
@@ -1678,7 +1682,7 @@ def guess_install_tag(self, fname: str, outdir: T.Optional[str] = None) -> T.Opt
bindir = Path(prefix, self.environment.get_bindir())
libdir = Path(prefix, self.environment.get_libdir())
incdir = Path(prefix, self.environment.get_includedir())
- _ldir = self.environment.coredata.get_option(mesonlib.OptionKey('localedir'))
+ _ldir = self.environment.coredata.get_option(OptionKey('localedir'))
assert isinstance(_ldir, str), 'for mypy'
localedir = Path(prefix, _ldir)
dest_path = Path(prefix, outdir, Path(fname).name) if outdir else Path(prefix, fname)
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 9c6932884e4a..b78ad076d24b 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -31,7 +31,8 @@
File, LibType, MachineChoice, MesonBugException, MesonException, OrderedSet, PerMachine,
ProgressBar, quote_arg
)
-from ..mesonlib import get_compiler_for_source, has_path_sep, OptionKey
+from ..mesonlib import get_compiler_for_source, has_path_sep
+from ..options import OptionKey
from .backends import CleanTrees
from ..build import GeneratedList, InvalidArguments
@@ -44,6 +45,8 @@
from ..linkers.linkers import DynamicLinker, StaticLinker
from ..compilers.cs import CsCompiler
from ..compilers.fortran import FortranCompiler
+ from ..mesonlib import FileOrString
+ from .backends import TargetIntrospectionData
CommandArgOrStr = T.List[T.Union['NinjaCommandArg', str]]
RUST_EDITIONS = Literal['2015', '2018', '2021']
@@ -98,7 +101,7 @@ def get_rsp_threshold() -> int:
# and that has a limit of 8k.
limit = 8192
else:
- # Unix-like OSes usualy have very large command line limits, (On Linux,
+ # Unix-like OSes usually have very large command line limits, (On Linux,
# for example, this is limited by the kernel's MAX_ARG_STRLEN). However,
# some programs place much lower limits, notably Wine which enforces a
# 32k limit like Windows. Therefore, we limit the command line to 32k.
@@ -305,6 +308,9 @@ def length_estimate(self, infiles: str, outfiles: str,
return estimate
class NinjaBuildElement:
+
+ rule: NinjaRule
+
def __init__(self, all_outputs: T.Set[str], outfilenames, rulename, infilenames, implicit_outs=None):
self.implicit_outfilenames = implicit_outs or []
if isinstance(outfilenames, str):
@@ -329,13 +335,13 @@ def add_dep(self, dep: T.Union[str, T.List[str]]) -> None:
else:
self.deps.add(dep)
- def add_orderdep(self, dep):
+ def add_orderdep(self, dep) -> None:
if isinstance(dep, list):
self.orderdeps.update(dep)
else:
self.orderdeps.add(dep)
- def add_item(self, name: str, elems: T.Union[str, T.List[str, CompilerArgs]]) -> None:
+ def add_item(self, name: str, elems: T.Union[str, T.List[str], CompilerArgs]) -> None:
# Always convert from GCC-style argument naming to the naming used by the
# current compiler. Also filter system include paths, deduplicate, etc.
if isinstance(elems, CompilerArgs):
@@ -347,7 +353,7 @@ def add_item(self, name: str, elems: T.Union[str, T.List[str, CompilerArgs]]) ->
if name == 'DEPFILE':
self.elems.append((name + '_UNQUOTED', elems))
- def _should_use_rspfile(self):
+ def _should_use_rspfile(self) -> bool:
# 'phony' is a rule built-in to ninja
if self.rulename == 'phony':
return False
@@ -362,14 +368,14 @@ def _should_use_rspfile(self):
outfilenames,
self.elems) >= rsp_threshold
- def count_rule_references(self):
+ def count_rule_references(self) -> None:
if self.rulename != 'phony':
if self._should_use_rspfile():
self.rule.rsprefcount += 1
else:
self.rule.refcount += 1
- def write(self, outfile):
+ def write(self, outfile: T.TextIO) -> None:
if self.output_errors:
raise MesonException(self.output_errors)
ins = ' '.join([ninja_quote(i, True) for i in self.infilenames])
@@ -428,7 +434,7 @@ def write(self, outfile):
outfile.write(line)
outfile.write('\n')
- def check_outputs(self):
+ def check_outputs(self) -> None:
for n in self.outfilenames:
if n in self.all_outputs:
self.output_errors = f'Multiple producers for Ninja target "{n}". Please rename your targets.'
@@ -490,13 +496,14 @@ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Inter
super().__init__(build, interpreter)
self.name = 'ninja'
self.ninja_filename = 'build.ninja'
- self.fortran_deps = {}
+ self.fortran_deps: T.Dict[str, T.Dict[str, File]] = {}
self.all_outputs: T.Set[str] = set()
self.introspection_data = {}
self.created_llvm_ir_rule = PerMachine(False, False)
self.rust_crates: T.Dict[str, RustCrate] = {}
- self.implicit_meson_outs = []
+ self.implicit_meson_outs: T.List[str] = []
self._uses_dyndeps = False
+ self._generated_header_cache: T.Dict[str, T.List[FileOrString]] = {}
# nvcc chokes on thin archives:
# nvlink fatal : Could not open input file 'libfoo.a.p'
# nvlink fatal : elfLink internal error
@@ -685,7 +692,9 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None)
mlog.cmd_ci_include(outfilename) # For CI debugging
# Refresh Ninja's caches. https://github.com/ninja-build/ninja/pull/1685
# Cannot use when running with dyndeps: https://github.com/ninja-build/ninja/issues/1952
- if mesonlib.version_compare(self.ninja_version, '>=1.10.0') and os.path.exists(os.path.join(self.environment.build_dir, '.ninja_log')) and not self._uses_dyndeps:
+ if ((mesonlib.version_compare(self.ninja_version, '>= 1.12.0') or
+ (mesonlib.version_compare(self.ninja_version, '>=1.10.0') and not self._uses_dyndeps))
+ and os.path.exists(os.path.join(self.environment.build_dir, '.ninja_log'))):
subprocess.call(self.ninja_command + ['-t', 'restat'], cwd=self.environment.build_dir)
subprocess.call(self.ninja_command + ['-t', 'cleandead'], cwd=self.environment.build_dir)
self.generate_compdb()
@@ -710,7 +719,7 @@ def generate_rust_project_json(self) -> None:
f, indent=4)
# http://clang.llvm.org/docs/JSONCompilationDatabase.html
- def generate_compdb(self):
+ def generate_compdb(self) -> None:
rules = []
# TODO: Rather than an explicit list here, rules could be marked in the
# rule store as being wanted in compdb
@@ -732,10 +741,11 @@ def generate_compdb(self):
# Get all generated headers. Any source file might need them so
# we need to add an order dependency to them.
- def get_generated_headers(self, target):
- if hasattr(target, 'cached_generated_headers'):
- return target.cached_generated_headers
- header_deps = []
+ def get_generated_headers(self, target: build.BuildTarget) -> T.List[FileOrString]:
+ tid = target.get_id()
+ if tid in self._generated_header_cache:
+ return self._generated_header_cache[tid]
+ header_deps: T.List[FileOrString] = []
# XXX: Why don't we add deps to CustomTarget headers here?
for genlist in target.get_generated_sources():
if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)):
@@ -752,7 +762,7 @@ def get_generated_headers(self, target):
header_deps += self.get_generated_headers(dep)
if isinstance(target, build.CompileTarget):
header_deps.extend(target.get_generated_headers())
- target.cached_generated_headers = header_deps
+ self._generated_header_cache[tid] = header_deps
return header_deps
def get_target_generated_sources(self, target: build.BuildTarget) -> T.MutableMapping[str, File]:
@@ -795,7 +805,7 @@ def get_target_source_can_unity(self, target, source):
return True
def create_target_source_introspection(self, target: build.Target, comp: compilers.Compiler, parameters, sources, generated_sources,
- unity_sources: T.Optional[T.List[mesonlib.FileOrString]] = None):
+ unity_sources: T.Optional[T.List[FileOrString]] = None) -> None:
'''
Adds the source file introspection information for a language of a target
@@ -834,7 +844,7 @@ def create_target_source_introspection(self, target: build.Target, comp: compile
}
tgt[id_hash] = src_block
- def compute_path(file: mesonlib.FileOrString) -> str:
+ def compute_path(file: FileOrString) -> str:
""" Make source files absolute """
if isinstance(file, File):
return file.absolute_path(self.source_dir, self.build_dir)
@@ -845,14 +855,13 @@ def compute_path(file: mesonlib.FileOrString) -> str:
if unity_sources:
src_block['unity_sources'].extend(compute_path(x) for x in unity_sources)
- def create_target_linker_introspection(self, target: build.Target, linker: T.Union[Compiler, StaticLinker], parameters):
+ def create_target_linker_introspection(self, target: build.Target, linker: T.Union[Compiler, StaticLinker], parameters: CompilerArgs) -> None:
tid = target.get_id()
tgt = self.introspection_data[tid]
lnk_hash = tuple(parameters)
lnk_block = tgt.get(lnk_hash, None)
if lnk_block is None:
- if isinstance(parameters, CompilerArgs):
- parameters = parameters.to_native(copy=True)
+ paramlist = parameters.to_native(copy=True)
if isinstance(linker, Compiler):
linkers = linker.get_linker_exelist()
@@ -861,11 +870,11 @@ def create_target_linker_introspection(self, target: build.Target, linker: T.Uni
lnk_block = {
'linker': linkers,
- 'parameters': parameters,
+ 'parameters': paramlist,
}
tgt[lnk_hash] = lnk_block
- def generate_target(self, target):
+ def generate_target(self, target) -> None:
if isinstance(target, build.BuildTarget):
os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True)
if isinstance(target, build.CustomTarget):
@@ -1092,7 +1101,7 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool:
cpp = target.compilers['cpp']
if cpp.get_id() != 'msvc':
return False
- cppversion = target.get_option(OptionKey('std', machine=target.for_machine, lang='cpp'))
+ cppversion = target.get_option(OptionKey('cpp_std', machine=target.for_machine))
if cppversion not in ('latest', 'c++latest', 'vc++latest'):
return False
if not mesonlib.current_vs_supports_modules():
@@ -1105,7 +1114,7 @@ def generate_dependency_scan_target(self, target: build.BuildTarget,
compiled_sources: T.List[str],
source2object: T.Dict[str, str],
generated_source_files: T.List[mesonlib.File],
- object_deps: T.List['mesonlib.FileOrString']) -> None:
+ object_deps: T.List[FileOrString]) -> None:
if not self.should_use_dyndeps_for_target(target):
return
self._uses_dyndeps = True
@@ -1149,12 +1158,12 @@ def select_sources_to_scan(self, compiled_sources: T.List[str]
elif ext.lower() in compilers.lang_suffixes['fortran']:
yield source, 'fortran'
- def process_target_dependencies(self, target):
+ def process_target_dependencies(self, target) -> None:
for t in target.get_dependencies():
if t.get_id() not in self.processed_targets:
self.generate_target(t)
- def custom_target_generator_inputs(self, target):
+ def custom_target_generator_inputs(self, target) -> None:
for s in target.sources:
if isinstance(s, build.GeneratedList):
self.generate_genlist_for_target(s, target)
@@ -1170,7 +1179,7 @@ def unwrap_dep_list(self, target):
deps.append(os.path.join(self.get_target_dir(i), output))
return deps
- def generate_custom_target(self, target: build.CustomTarget):
+ def generate_custom_target(self, target: build.CustomTarget) -> None:
self.custom_target_generator_inputs(target)
(srcs, ofilenames, cmd) = self.eval_custom_target_command(target)
deps = self.unwrap_dep_list(target)
@@ -1212,14 +1221,14 @@ def generate_custom_target(self, target: build.CustomTarget):
self.add_build(elem)
self.processed_targets.add(target.get_id())
- def build_run_target_name(self, target):
+ def build_run_target_name(self, target) -> str:
if target.subproject != '':
subproject_prefix = f'{target.subproject}@@'
else:
subproject_prefix = ''
return f'{subproject_prefix}{target.name}'
- def generate_run_target(self, target: build.RunTarget):
+ def generate_run_target(self, target: build.RunTarget) -> None:
target_name = self.build_run_target_name(target)
if not target.command:
# This is an alias target, it has no command, it just depends on
@@ -1242,7 +1251,8 @@ def generate_run_target(self, target: build.RunTarget):
self.add_build(elem)
self.processed_targets.add(target.get_id())
- def generate_coverage_command(self, elem, outputs: T.List[str], gcovr_exe: T.Optional[str], llvm_cov_exe: T.Optional[str]):
+ def generate_coverage_command(self, elem: NinjaBuildElement, outputs: T.List[str],
+ gcovr_exe: T.Optional[str], llvm_cov_exe: T.Optional[str]) -> None:
targets = self.build.get_targets().values()
use_llvm_cov = False
exe_args = []
@@ -1269,14 +1279,14 @@ def generate_coverage_command(self, elem, outputs: T.List[str], gcovr_exe: T.Opt
exe_args +
(['--use-llvm-cov'] if use_llvm_cov else []))
- def generate_coverage_rules(self, gcovr_exe: T.Optional[str], gcovr_version: T.Optional[str], llvm_cov_exe: T.Optional[str]):
+ def generate_coverage_rules(self, gcovr_exe: T.Optional[str], gcovr_version: T.Optional[str], llvm_cov_exe: T.Optional[str]) -> None:
e = self.create_phony_target('coverage', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, [], gcovr_exe, llvm_cov_exe)
e.add_item('description', 'Generates coverage reports')
self.add_build(e)
self.generate_coverage_legacy_rules(gcovr_exe, gcovr_version, llvm_cov_exe)
- def generate_coverage_legacy_rules(self, gcovr_exe: T.Optional[str], gcovr_version: T.Optional[str], llvm_cov_exe: T.Optional[str]):
+ def generate_coverage_legacy_rules(self, gcovr_exe: T.Optional[str], gcovr_version: T.Optional[str], llvm_cov_exe: T.Optional[str]) -> None:
e = self.create_phony_target('coverage-html', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, ['--html'], gcovr_exe, llvm_cov_exe)
e.add_item('description', 'Generates HTML coverage report')
@@ -1299,7 +1309,7 @@ def generate_coverage_legacy_rules(self, gcovr_exe: T.Optional[str], gcovr_versi
e.add_item('description', 'Generates Sonarqube XML coverage report')
self.add_build(e)
- def generate_install(self):
+ def generate_install(self) -> None:
self.create_install_data_files()
elem = self.create_phony_target('install', 'CUSTOM_COMMAND', 'PHONY')
elem.add_dep('all')
@@ -1308,7 +1318,7 @@ def generate_install(self):
elem.add_item('pool', 'console')
self.add_build(elem)
- def generate_tests(self):
+ def generate_tests(self) -> None:
self.serialize_tests()
cmd = self.environment.get_build_command(True) + ['test', '--no-rebuild']
if not self.environment.coredata.get_option(OptionKey('stdsplit')):
@@ -1331,7 +1341,7 @@ def generate_tests(self):
elem.add_item('pool', 'console')
self.add_build(elem)
- def generate_rules(self):
+ def generate_rules(self) -> None:
self.rules = []
self.ruledict = {}
@@ -1406,7 +1416,7 @@ def generate_phony(self) -> None:
elem = NinjaBuildElement(self.all_outputs, 'PHONY', 'phony', '')
self.add_build(elem)
- def generate_jar_target(self, target: build.Jar):
+ def generate_jar_target(self, target: build.Jar) -> None:
fname = target.get_filename()
outname_rel = os.path.join(self.get_target_dir(target), fname)
src_list = target.get_sources()
@@ -1460,7 +1470,7 @@ def generate_jar_target(self, target: build.Jar):
# Create introspection information
self.create_target_source_introspection(target, compiler, compile_args, src_list, gen_src_list)
- def generate_cs_resource_tasks(self, target):
+ def generate_cs_resource_tasks(self, target) -> T.Tuple[T.List[str], T.List[str]]:
args = []
deps = []
for r in target.resources:
@@ -1481,7 +1491,7 @@ def generate_cs_resource_tasks(self, target):
args.append(a)
return args, deps
- def generate_cs_target(self, target: build.BuildTarget):
+ def generate_cs_target(self, target: build.BuildTarget) -> None:
fname = target.get_filename()
outname_rel = os.path.join(self.get_target_dir(target), fname)
src_list = target.get_sources()
@@ -1526,7 +1536,7 @@ def generate_cs_target(self, target: build.BuildTarget):
self.generate_generator_list_rules(target)
self.create_target_source_introspection(target, compiler, commands, rel_srcs, generated_rel_srcs)
- def determine_java_compile_args(self, target, compiler):
+ def determine_java_compile_args(self, target, compiler) -> T.List[str]:
args = []
args = self.generate_basic_compiler_args(target, compiler)
args += target.get_java_args()
@@ -1566,13 +1576,13 @@ def generate_java_compile(self, srcs, target, compiler, args):
self.add_build(element)
return plain_class_paths
- def generate_java_link(self):
+ def generate_java_link(self) -> None:
rule = 'java_LINKER'
command = ['jar', '$ARGS']
description = 'Creating JAR $out'
self.add_rule(NinjaRule(rule, command, [], description))
- def determine_dep_vapis(self, target):
+ def determine_dep_vapis(self, target) -> T.List[str]:
"""
Peek into the sources of BuildTargets we're linking with, and if any of
them was built with Vala, assume that it also generated a .vapi file of
@@ -1784,7 +1794,7 @@ def generate_cython_transpile(self, target: build.BuildTarget) -> \
args += self.build.get_project_args(cython, target.subproject, target.for_machine)
args += target.get_extra_args('cython')
- ext = target.get_option(OptionKey('language', machine=target.for_machine, lang='cython'))
+ ext = target.get_option(OptionKey('cython_language', machine=target.for_machine))
pyx_sources = [] # Keep track of sources we're adding to build
@@ -1835,7 +1845,7 @@ def generate_cython_transpile(self, target: build.BuildTarget) -> \
return static_sources, generated_sources, cython_sources
- def _generate_copy_target(self, src: 'mesonlib.FileOrString', output: Path) -> None:
+ def _generate_copy_target(self, src: FileOrString, output: Path) -> None:
"""Create a target to copy a source file from one location to another."""
if isinstance(src, File):
instr = src.absolute_path(self.environment.source_dir, self.environment.build_dir)
@@ -2114,7 +2124,7 @@ def _link_library(libname: str, static: bool, bundle: bool = False):
# ... but then add rustc's sysroot to account for rustup
# installations
for rpath_arg in rpath_args:
- args += ['-C', 'link-arg=' + rpath_arg + ':' + os.path.join(rustc.get_sysroot(), 'lib')]
+ args += ['-C', 'link-arg=' + rpath_arg + ':' + rustc.get_target_libdir()]
proc_macro_dylib_path = None
if getattr(target, 'rust_crate_type', '') == 'proc-macro':
@@ -2188,7 +2198,7 @@ def split_swift_generated_sources(self, target):
others.append(i)
return srcs, others
- def generate_swift_target(self, target):
+ def generate_swift_target(self, target) -> None:
module_name = self.target_swift_modulename(target)
swiftc = target.compilers['swift']
abssrc = []
@@ -2293,7 +2303,7 @@ def _rsp_options(self, tool: T.Union['Compiler', 'StaticLinker', 'DynamicLinker'
options['rspfile_quote_style'] = tool.rsp_file_syntax()
return options
- def generate_static_link_rules(self):
+ def generate_static_link_rules(self) -> None:
num_pools = self.environment.coredata.optstore.get_value('backend_max_links')
if 'java' in self.environment.coredata.compilers.host:
self.generate_java_link()
@@ -2341,7 +2351,7 @@ def generate_static_link_rules(self):
options = self._rsp_options(static_linker)
self.add_rule(NinjaRule(rule, cmdlist, args, description, **options, extra=pool))
- def generate_dynamic_link_rules(self):
+ def generate_dynamic_link_rules(self) -> None:
num_pools = self.environment.coredata.optstore.get_value('backend_max_links')
for for_machine in MachineChoice:
complist = self.environment.coredata.compilers[for_machine]
@@ -2359,7 +2369,7 @@ def generate_dynamic_link_rules(self):
options = self._rsp_options(compiler)
self.add_rule(NinjaRule(rule, command, args, description, **options, extra=pool))
- if self.environment.machines[for_machine].is_aix():
+ if self.environment.machines[for_machine].is_aix() and complist:
rule = 'AIX_LINKER{}'.format(self.get_rule_suffix(for_machine))
description = 'Archiving AIX shared library'
cmdlist = compiler.get_command_to_archive_shlib()
@@ -2380,7 +2390,7 @@ def generate_dynamic_link_rules(self):
synstat = 'restat = 1'
self.add_rule(NinjaRule(symrule, symcmd, [], syndesc, extra=synstat))
- def generate_java_compile_rule(self, compiler):
+ def generate_java_compile_rule(self, compiler) -> None:
rule = self.compiler_to_rule_name(compiler)
command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Java sources for $FOR_JAR'
@@ -2395,7 +2405,7 @@ def generate_cs_compile_rule(self, compiler: 'CsCompiler') -> None:
rspable=mesonlib.is_windows(),
rspfile_quote_style=compiler.rsp_file_syntax()))
- def generate_vala_compile_rules(self, compiler):
+ def generate_vala_compile_rules(self, compiler) -> None:
rule = self.compiler_to_rule_name(compiler)
command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Vala source $in'
@@ -2416,7 +2426,7 @@ def generate_cython_compile_rules(self, compiler: 'Compiler') -> None:
depfile=depfile,
extra='restat = 1'))
- def generate_rust_compile_rules(self, compiler):
+ def generate_rust_compile_rules(self, compiler) -> None:
rule = self.compiler_to_rule_name(compiler)
command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Rust source $in'
@@ -2425,7 +2435,7 @@ def generate_rust_compile_rules(self, compiler):
self.add_rule(NinjaRule(rule, command, [], description, deps=depstyle,
depfile=depfile))
- def generate_swift_compile_rules(self, compiler):
+ def generate_swift_compile_rules(self, compiler) -> None:
rule = self.compiler_to_rule_name(compiler)
full_exe = self.environment.get_build_command() + [
'--internal',
@@ -2456,7 +2466,7 @@ def generate_fortran_dep_hack(self, crstr: str) -> None:
https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
self.add_rule(NinjaRule(rule, cmd, [], 'Dep hack', extra='restat = 1'))
- def generate_llvm_ir_compile_rule(self, compiler):
+ def generate_llvm_ir_compile_rule(self, compiler) -> None:
if self.created_llvm_ir_rule[compiler.for_machine]:
return
rule = self.get_compiler_rule_name('llvm_ir', compiler.for_machine)
@@ -2469,7 +2479,7 @@ def generate_llvm_ir_compile_rule(self, compiler):
self.add_rule(NinjaRule(rule, command, args, description, **options))
self.created_llvm_ir_rule[compiler.for_machine] = True
- def generate_compile_rule_for(self, langname, compiler):
+ def generate_compile_rule_for(self, langname: str, compiler: Compiler) -> None:
if langname == 'java':
self.generate_java_compile_rule(compiler)
return
@@ -2515,7 +2525,7 @@ def generate_compile_rule_for(self, langname, compiler):
self.add_rule(NinjaRule(rule, command, args, description, **options,
deps=deps, depfile=depfile))
- def generate_pch_rule_for(self, langname, compiler):
+ def generate_pch_rule_for(self, langname: str, compiler: Compiler) -> None:
if langname not in {'c', 'cpp'}:
return
rule = self.compiler_to_pch_rule_name(compiler)
@@ -2541,7 +2551,7 @@ def generate_pch_rule_for(self, langname, compiler):
self.add_rule(NinjaRule(rule, command, [], description, deps=deps,
depfile=depfile))
- def generate_scanner_rules(self):
+ def generate_scanner_rules(self) -> None:
rulename = 'depscan'
if rulename in self.ruledict:
# Scanning command is the same for native and cross compilation.
@@ -2553,7 +2563,7 @@ def generate_scanner_rules(self):
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)
- def generate_compile_rules(self):
+ def generate_compile_rules(self) -> None:
for for_machine in MachineChoice:
clist = self.environment.coredata.compilers[for_machine]
for langname, compiler in clist.items():
@@ -2564,7 +2574,7 @@ def generate_compile_rules(self):
for mode in compiler.get_modes():
self.generate_compile_rule_for(langname, mode)
- def generate_generator_list_rules(self, target):
+ def generate_generator_list_rules(self, target) -> None:
# CustomTargets have already written their rules and
# CustomTargetIndexes don't actually get generated, so write rules for
# GeneratedLists here
@@ -2595,17 +2605,18 @@ def generate_genlist_for_target(self, genlist: build.GeneratedList, target: buil
subdir = genlist.subdir
exe = generator.get_exe()
infilelist = genlist.get_inputs()
- outfilelist = genlist.get_outputs()
extra_dependencies = self.get_target_depend_files(genlist)
- for i, curfile in enumerate(infilelist):
- if len(generator.outputs) == 1:
- sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i])
- else:
- sole_output = f'{curfile}'
+ for curfile in infilelist:
infilename = curfile.rel_to_builddir(self.build_to_src, self.get_target_private_dir(target))
base_args = generator.get_arglist(infilename)
outfiles = genlist.get_outputs_for(curfile)
- outfiles = [os.path.join(self.get_target_private_dir(target), of) for of in outfiles]
+ outfilespriv = [os.path.join(self.get_target_private_dir(target), of) for of in outfiles]
+
+ if len(generator.outputs) == 1:
+ sole_output = outfilespriv[0]
+ else:
+ sole_output = f'{curfile}'
+
if generator.depfile is None:
rulename = 'CUSTOM_COMMAND'
args = base_args
@@ -2616,19 +2627,16 @@ def generate_genlist_for_target(self, genlist: build.GeneratedList, target: buil
args = [x.replace('@DEPFILE@', depfile) for x in base_args]
args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output)
for x in args]
- args = self.replace_outputs(args, self.get_target_private_dir(target), outfilelist)
- # We have consumed output files, so drop them from the list of remaining outputs.
- if len(generator.outputs) > 1:
- outfilelist = outfilelist[len(generator.outputs):]
+ args = self.replace_outputs(args, self.get_target_private_dir(target), outfiles)
args = self.replace_paths(target, args, override_subdir=subdir)
cmdlist, reason = self.as_meson_exe_cmdline(exe,
self.replace_extra_args(args, genlist),
- capture=outfiles[0] if generator.capture else None,
+ capture=outfilespriv[0] if generator.capture else None,
env=genlist.env)
abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
os.makedirs(abs_pdir, exist_ok=True)
- elem = NinjaBuildElement(self.all_outputs, outfiles, rulename, infilename)
+ elem = NinjaBuildElement(self.all_outputs, outfilespriv, rulename, infilename)
elem.add_dep([self.get_target_filename(x) for x in generator.depends])
if generator.depfile is not None:
elem.add_item('DEPFILE', depfile)
@@ -2649,7 +2657,7 @@ def generate_genlist_for_target(self, genlist: build.GeneratedList, target: buil
elem.add_item('COMMAND', cmdlist)
self.add_build(elem)
- def scan_fortran_module_outputs(self, target):
+ def scan_fortran_module_outputs(self, target) -> None:
"""
Find all module and submodule made available in a Fortran code file.
"""
@@ -2791,7 +2799,7 @@ def get_link_debugfile_args(self, linker: T.Union[Compiler, StaticLinker], targe
return linker.get_link_debugfile_args(filename)
return []
- def generate_llvm_ir_compile(self, target, src: mesonlib.FileOrString):
+ def generate_llvm_ir_compile(self, target, src: FileOrString):
base_proxy = target.get_options()
compiler = get_compiler_for_source(target.compilers.values(), src)
commands = compiler.compiler_args()
@@ -2951,9 +2959,9 @@ def generate_common_compile_args_per_src_type(self, target: build.BuildTarget) -
def generate_single_compile(self, target: build.BuildTarget, src,
is_generated: bool = False, header_deps=None,
- order_deps: T.Optional[T.List['mesonlib.FileOrString']] = None,
+ order_deps: T.Optional[T.List[FileOrString]] = None,
extra_args: T.Optional[T.List[str]] = None,
- unity_sources: T.Optional[T.List[mesonlib.FileOrString]] = None,
+ unity_sources: T.Optional[T.List[FileOrString]] = None,
) -> T.Tuple[str, str]:
"""
Compiles C/C++, ObjC/ObjC++, Fortran, and D sources
@@ -3095,7 +3103,7 @@ def quote_make_target(targetName: str) -> str:
assert isinstance(rel_src, str)
return (rel_obj, rel_src.replace('\\', '/'))
- def add_dependency_scanner_entries_to_element(self, target: build.BuildTarget, compiler, element, src):
+ def add_dependency_scanner_entries_to_element(self, target: build.BuildTarget, compiler, element, src) -> None:
if not self.should_use_dyndeps_for_target(target):
return
if isinstance(target, build.CompileTarget):
@@ -3120,7 +3128,7 @@ def add_header_deps(self, target, ninja_element, header_deps):
d = os.path.join(self.get_target_private_dir(target), d)
ninja_element.add_dep(d)
- def has_dir_part(self, fname: mesonlib.FileOrString) -> bool:
+ def has_dir_part(self, fname: FileOrString) -> bool:
# FIXME FIXME: The usage of this is a terrible and unreliable hack
if isinstance(fname, File):
return fname.subdir != ''
@@ -3128,7 +3136,7 @@ def has_dir_part(self, fname: mesonlib.FileOrString) -> bool:
# Fortran is a bit weird (again). When you link against a library, just compiling a source file
# requires the mod files that are output when single files are built. To do this right we would need to
- # scan all inputs and write out explicit deps for each file. That is stoo slow and too much effort so
+ # scan all inputs and write out explicit deps for each file. That is too slow and too much effort so
# instead just have an ordered dependency on the library. This ensures all required mod files are created.
# The real deps are then detected via dep file generation from the compiler. This breaks on compilers that
# produce incorrect dep files but such is life.
@@ -3227,7 +3235,7 @@ def get_target_shsym_filename(self, target):
targetdir = self.get_target_private_dir(target)
return os.path.join(targetdir, target.get_filename() + '.symbols')
- def generate_shsym(self, target):
+ def generate_shsym(self, target) -> None:
target_file = self.get_target_filename(target)
symname = self.get_target_shsym_filename(target)
elem = NinjaBuildElement(self.all_outputs, symname, 'SHSYM', target_file)
@@ -3237,7 +3245,7 @@ def generate_shsym(self, target):
elem.add_item('CROSS', '--cross-host=' + self.environment.machines[target.for_machine].system)
self.add_build(elem)
- def get_import_filename(self, target):
+ def get_import_filename(self, target) -> str:
return os.path.join(self.get_target_dir(target), target.import_filename)
def get_target_type_link_args(self, target, linker):
@@ -3292,7 +3300,7 @@ def get_target_type_link_args_post_dependencies(self, target, linker):
commands += linker.get_win_subsystem_args(target.win_subsystem)
return commands
- def get_link_whole_args(self, linker, target):
+ def get_link_whole_args(self, linker: DynamicLinker, target):
use_custom = False
if linker.id == 'msvc':
# Expand our object lists manually if we are on pre-Visual Studio 2015 Update 2
@@ -3562,7 +3570,7 @@ def get_dependency_filename(self, t):
self.environment.get_build_dir())
return self.get_target_filename(t)
- def generate_shlib_aliases(self, target, outdir):
+ def generate_shlib_aliases(self, target, outdir) -> None:
for alias, to, tag in target.get_aliases():
aliasfile = os.path.join(outdir, alias)
abs_aliasfile = os.path.join(self.environment.get_build_dir(), outdir, alias)
@@ -3602,10 +3610,10 @@ def generate_gcov_clean(self) -> None:
gcda_elem.add_item('description', 'Deleting gcda files')
self.add_build(gcda_elem)
- def get_user_option_args(self):
+ def get_user_option_args(self) -> T.List[str]:
cmds = []
for k, v in self.environment.coredata.optstore.items():
- if k.is_project():
+ if self.environment.coredata.optstore.is_project_option(k):
cmds.append('-D' + str(k) + '=' + (v.value if isinstance(v.value, str) else str(v.value).lower()))
# The order of these arguments must be the same between runs of Meson
# to ensure reproducible output. The order we pass them shouldn't
@@ -3658,10 +3666,11 @@ def generate_clangformat(self) -> None:
self.generate_clangtool('format', 'check')
def generate_clangtidy(self) -> None:
- import shutil
- if not shutil.which('clang-tidy'):
+ if not environment.detect_clangtidy():
return
self.generate_clangtool('tidy')
+ if not environment.detect_clangapply():
+ return
self.generate_clangtool('tidy', 'fix')
def generate_tags(self, tool: str, target_name: str) -> None:
@@ -3707,7 +3716,7 @@ def generate_ending(self) -> None:
#Add archive file if shared library in AIX for build all.
if isinstance(t, build.SharedLibrary) and t.aix_so_archive:
if self.environment.machines[t.for_machine].is_aix():
- linker, stdlib_args = self.determine_linker_and_stdlib_args(t)
+ linker, stdlib_args = t.get_clink_dynamic_linker_and_stdlibs()
t.get_outputs()[0] = linker.get_archive_name(t.get_outputs()[0])
targetlist.append(os.path.join(self.get_target_dir(t), t.get_outputs()[0]))
@@ -3758,7 +3767,7 @@ def generate_ending(self) -> None:
elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '')
self.add_build(elem)
- def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
+ def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[TargetIntrospectionData]:
data = self.introspection_data.get(target_id)
if not data:
return super().get_introspection_data(target_id, target)
@@ -3766,7 +3775,7 @@ def get_introspection_data(self, target_id: str, target: build.Target) -> T.List
return list(data.values())
-def _scan_fortran_file_deps(src: Path, srcdir: Path, dirname: Path, tdeps, compiler) -> T.List[str]:
+def _scan_fortran_file_deps(src: Path, srcdir: Path, dirname: Path, tdeps: T.Dict[str, File], compiler: FortranCompiler) -> T.List[str]:
"""
scan a Fortran file for dependencies. Needs to be distinct from target
to allow for recursion induced by `include` statements.er
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 496e8ffed404..08a19c659e44 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -19,8 +19,9 @@
from .. import compilers
from .. import mesonlib
from ..mesonlib import (
- File, MesonBugException, MesonException, replace_if_different, OptionKey, version_compare, MachineChoice
+ File, MesonBugException, MesonException, replace_if_different, version_compare, MachineChoice
)
+from ..options import OptionKey
from ..environment import Environment, build_filename
from .. import coredata
@@ -878,7 +879,7 @@ def add_project_nmake_defs_incs_and_opts(self, parent_node, src: str, defs_paths
ET.SubElement(parent_node, 'PreprocessorDefinitions', Condition=condition).text = defs
ET.SubElement(parent_node, 'AdditionalIncludeDirectories', Condition=condition).text = paths
ET.SubElement(parent_node, 'AdditionalOptions', Condition=condition).text = opts
- else: # Can't find bespoke nmake defs/dirs/opts fields for this extention, so just reference the project's fields
+ else: # Can't find bespoke nmake defs/dirs/opts fields for this extension, so just reference the project's fields
ET.SubElement(parent_node, 'PreprocessorDefinitions').text = '$(NMakePreprocessorDefinitions)'
ET.SubElement(parent_node, 'AdditionalIncludeDirectories').text = '$(NMakeIncludeSearchPath)'
ET.SubElement(parent_node, 'AdditionalOptions').text = '$(AdditionalOptions)'
@@ -1010,8 +1011,8 @@ def get_args_defines_and_inc_dirs(self, target, compiler, generated_files_includ
file_args[l] += args
# Compile args added from the env or cross file: CFLAGS/CXXFLAGS, etc. We want these
# to override all the defaults, but not the per-target compile args.
- for l in file_args.keys():
- file_args[l] += target.get_option(OptionKey('args', machine=target.for_machine, lang=l))
+ for lang in file_args.keys():
+ file_args[lang] += target.get_option(OptionKey(f'{lang}_args', machine=target.for_machine))
for args in file_args.values():
# This is where Visual Studio will insert target_args, target_defines,
# etc, which are added later from external deps (see below).
@@ -1339,7 +1340,7 @@ def add_non_makefile_vcxproj_elements(
# Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise
# cl will give warning D9025: overriding '/Ehs' with cpp_eh value
if 'cpp' in target.compilers:
- eh = target.get_option(OptionKey('eh', machine=target.for_machine, lang='cpp'))
+ eh = target.get_option(OptionKey('cpp_eh', machine=target.for_machine))
if eh == 'a':
ET.SubElement(clconf, 'ExceptionHandling').text = 'Async'
elif eh == 's':
@@ -1541,7 +1542,7 @@ def add_non_makefile_vcxproj_elements(
# the solution's configurations. Similarly, 'ItemGroup' also doesn't support 'Condition'. So, without knowing
# a better (simple) alternative, for now, we'll repoint these generated sources (which will be incorrectly
# pointing to non-existent files under our '[builddir]_vs' directory) to the appropriate location under one of
- # our buildtype build directores (e.g. '[builddir]_debug').
+ # our buildtype build directories (e.g. '[builddir]_debug').
# This will at least allow the user to open the files of generated sources listed in the solution explorer,
# once a build/compile has generated these sources.
#
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index 69a544b240a0..31fd272b3f0b 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -11,7 +11,8 @@
from .. import mesonlib
from .. import mlog
from ..arglist import CompilerArgs
-from ..mesonlib import MesonBugException, MesonException, OptionKey
+from ..mesonlib import MesonBugException, MesonException
+from ..options import OptionKey
if T.TYPE_CHECKING:
from ..build import BuildTarget
@@ -138,7 +139,11 @@ def write(self, ofile: T.TextIO, indent_level: int) -> None:
class PbxDictItem:
def __init__(self, key: str, value: T.Union[PbxArray, PbxDict, str, int], comment: str = ''):
self.key = key
- self.value = value
+ if isinstance(value, str):
+ self.value = self.quote_value(value)
+ else:
+ self.value = value
+
if comment:
if '/*' in comment:
self.comment = comment
@@ -147,6 +152,17 @@ def __init__(self, key: str, value: T.Union[PbxArray, PbxDict, str, int], commen
else:
self.comment = comment
+ def quote_value(self, value: str) -> str:
+ quoted = f'"{value}"'
+
+ if not value:
+ return quoted
+
+ if set(' +@$<>/').isdisjoint(value) or value[0] == '"':
+ return value
+
+ return quoted
+
class PbxDict:
def __init__(self) -> None:
# This class is a bit weird, because we want to write PBX dicts in
@@ -407,7 +423,7 @@ def generate_build_phase_map(self) -> None:
# generate id for our own target-name
t.buildphasemap = {}
t.buildphasemap[tname] = self.gen_id()
- # each target can have it's own Frameworks/Sources/..., generate id's for those
+ # each target can have its own Frameworks/Sources/..., generate id's for those
t.buildphasemap['Frameworks'] = self.gen_id()
t.buildphasemap['Resources'] = self.gen_id()
t.buildphasemap['Sources'] = self.gen_id()
@@ -510,7 +526,7 @@ def generate_native_frameworks_map(self) -> None:
self.native_frameworks_fileref = {}
for t in self.build_targets.values():
for dep in t.get_external_deps():
- if dep.name == 'appleframeworks':
+ if dep.name == 'appleframeworks' and dep.found():
for f in dep.frameworks:
self.native_frameworks[f] = self.gen_id()
self.native_frameworks_fileref[f] = self.gen_id()
@@ -611,7 +627,8 @@ def generate_pbx_aggregate_target(self, objects_dict: PbxDict) -> None:
elif isinstance(t, build.BuildTarget):
target_dependencies.append(self.pbx_dep_map[t.get_id()])
aggregated_targets = []
- aggregated_targets.append((self.all_id, 'ALL_BUILD',
+ aggregated_targets.append((self.all_id,
+ 'ALL_BUILD',
self.all_buildconf_id,
[],
[self.regen_dependency_id] + target_dependencies + custom_target_dependencies))
@@ -667,14 +684,14 @@ def generate_pbx_aggregate_target(self, objects_dict: PbxDict) -> None:
agt_dict.add_item('dependencies', dep_arr)
for td in dependencies:
dep_arr.add_item(td, 'PBXTargetDependency')
- agt_dict.add_item('name', f'"{name}"')
- agt_dict.add_item('productName', f'"{name}"')
+ agt_dict.add_item('name', name)
+ agt_dict.add_item('productName', name)
objects_dict.add_item(t[0], agt_dict, name)
def generate_pbx_build_file(self, objects_dict: PbxDict) -> None:
for tname, t in self.build_targets.items():
for dep in t.get_external_deps():
- if dep.name == 'appleframeworks':
+ if dep.name == 'appleframeworks' and dep.found():
for f in dep.frameworks:
fw_dict = PbxDict()
fwkey = self.native_frameworks[f]
@@ -769,7 +786,7 @@ def generate_pbx_build_style(self, objects_dict: PbxDict) -> None:
settings_dict = PbxDict()
styledict.add_item('buildSettings', settings_dict)
settings_dict.add_item('COPY_PHASE_STRIP', 'NO')
- styledict.add_item('name', f'"{name}"')
+ styledict.add_item('name', name)
def to_shell_script(self, args: CompilerArgs) -> str:
quoted_cmd = []
@@ -788,13 +805,13 @@ def generate_pbx_build_rule(self, objects_dict: PbxDict) -> None:
buildrule.add_item('compilerSpec', 'com.apple.compilers.proxy.script')
if compiler.get_id() != 'yasm':
# Yasm doesn't generate escaped build rules
- buildrule.add_item('dependencyFile', '"$(DERIVED_FILE_DIR)/$(INPUT_FILE_BASE).d"')
+ buildrule.add_item('dependencyFile', '$(DERIVED_FILE_DIR)/$(INPUT_FILE_BASE).d')
buildrule.add_item('fileType', NEEDS_CUSTOM_RULES[language])
inputfiles = PbxArray()
buildrule.add_item('inputFiles', inputfiles)
buildrule.add_item('isEditable', '0')
outputfiles = PbxArray()
- outputfiles.add_item('"$(DERIVED_FILE_DIR)/$(INPUT_FILE_BASE).o"')
+ outputfiles.add_item('$(DERIVED_FILE_DIR)/$(INPUT_FILE_BASE).o')
buildrule.add_item('outputFiles', outputfiles)
# Do NOT use this parameter. Xcode will accept it from the UI,
# but the parser will break down inconsistently upon next
@@ -826,12 +843,12 @@ def generate_pbx_container_item_proxy(self, objects_dict: PbxDict) -> None:
proxy_dict.add_item('containerPortal', self.project_uid, 'Project object')
proxy_dict.add_item('proxyType', '1')
proxy_dict.add_item('remoteGlobalIDString', self.native_targets[t])
- proxy_dict.add_item('remoteInfo', '"' + t + '"')
+ proxy_dict.add_item('remoteInfo', t)
def generate_pbx_file_reference(self, objects_dict: PbxDict) -> None:
for tname, t in self.build_targets.items():
for dep in t.get_external_deps():
- if dep.name == 'appleframeworks':
+ if dep.name == 'appleframeworks' and dep.found():
for f in dep.frameworks:
fw_dict = PbxDict()
framework_fileref = self.native_frameworks_fileref[f]
@@ -859,17 +876,17 @@ def generate_pbx_file_reference(self, objects_dict: PbxDict) -> None:
path = s
objects_dict.add_item(idval, src_dict, fullpath)
src_dict.add_item('isa', 'PBXFileReference')
- src_dict.add_item('explicitFileType', '"' + xcodetype + '"')
+ src_dict.add_item('explicitFileType', xcodetype)
src_dict.add_item('fileEncoding', '4')
if in_build_dir:
- src_dict.add_item('name', '"' + name + '"')
+ src_dict.add_item('name', name)
# This makes no sense. This should say path instead of name
# but then the path gets added twice.
- src_dict.add_item('path', '"' + name + '"')
+ src_dict.add_item('path', name)
src_dict.add_item('sourceTree', 'BUILD_ROOT')
else:
- src_dict.add_item('name', '"' + name + '"')
- src_dict.add_item('path', '"' + path + '"')
+ src_dict.add_item('name', name)
+ src_dict.add_item('path', path)
src_dict.add_item('sourceTree', 'SOURCE_ROOT')
generator_id = 0
@@ -886,10 +903,10 @@ def generate_pbx_file_reference(self, objects_dict: PbxDict) -> None:
xcodetype = self.get_xcodetype(o)
rel_name = mesonlib.relpath(o, self.environment.get_source_dir())
odict.add_item('isa', 'PBXFileReference')
- odict.add_item('explicitFileType', '"' + xcodetype + '"')
+ odict.add_item('explicitFileType', xcodetype)
odict.add_item('fileEncoding', '4')
- odict.add_item('name', f'"{name}"')
- odict.add_item('path', f'"{rel_name}"')
+ odict.add_item('name', name)
+ odict.add_item('path', rel_name)
odict.add_item('sourceTree', 'SOURCE_ROOT')
generator_id += 1
@@ -910,10 +927,10 @@ def generate_pbx_file_reference(self, objects_dict: PbxDict) -> None:
name = os.path.basename(o)
objects_dict.add_item(idval, o_dict, fullpath)
o_dict.add_item('isa', 'PBXFileReference')
- o_dict.add_item('explicitFileType', '"' + self.get_xcodetype(o) + '"')
+ o_dict.add_item('explicitFileType', self.get_xcodetype(o))
o_dict.add_item('fileEncoding', '4')
- o_dict.add_item('name', f'"{name}"')
- o_dict.add_item('path', f'"{rel_name}"')
+ o_dict.add_item('name', name)
+ o_dict.add_item('path', rel_name)
o_dict.add_item('sourceTree', 'SOURCE_ROOT')
for e in t.extra_files:
@@ -929,9 +946,9 @@ def generate_pbx_file_reference(self, objects_dict: PbxDict) -> None:
path = e
objects_dict.add_item(idval, e_dict, fullpath)
e_dict.add_item('isa', 'PBXFileReference')
- e_dict.add_item('explicitFileType', '"' + xcodetype + '"')
- e_dict.add_item('name', '"' + name + '"')
- e_dict.add_item('path', '"' + path + '"')
+ e_dict.add_item('explicitFileType', xcodetype)
+ e_dict.add_item('name', name)
+ e_dict.add_item('path', path)
e_dict.add_item('sourceTree', 'SOURCE_ROOT')
for tname, idval in self.target_filemap.items():
target_dict = PbxDict()
@@ -949,11 +966,8 @@ def generate_pbx_file_reference(self, objects_dict: PbxDict) -> None:
typestr = self.get_xcodetype(fname)
path = '"%s"' % t.get_filename()
target_dict.add_item('isa', 'PBXFileReference')
- target_dict.add_item('explicitFileType', '"' + typestr + '"')
- if ' ' in path and path[0] != '"':
- target_dict.add_item('path', f'"{path}"')
- else:
- target_dict.add_item('path', path)
+ target_dict.add_item('explicitFileType', typestr)
+ target_dict.add_item('path', path)
target_dict.add_item('refType', reftype)
target_dict.add_item('sourceTree', 'BUILT_PRODUCTS_DIR')
@@ -971,9 +985,9 @@ def generate_pbx_file_reference(self, objects_dict: PbxDict) -> None:
custom_dict = PbxDict()
typestr = self.get_xcodetype(s)
custom_dict.add_item('isa', 'PBXFileReference')
- custom_dict.add_item('explicitFileType', '"' + typestr + '"')
- custom_dict.add_item('name', f'"{s}"')
- custom_dict.add_item('path', f'"{s}"')
+ custom_dict.add_item('explicitFileType', typestr)
+ custom_dict.add_item('name', s)
+ custom_dict.add_item('path', s)
custom_dict.add_item('refType', 0)
custom_dict.add_item('sourceTree', 'SOURCE_ROOT')
objects_dict.add_item(self.fileref_ids[(tname, s)], custom_dict)
@@ -981,9 +995,9 @@ def generate_pbx_file_reference(self, objects_dict: PbxDict) -> None:
custom_dict = PbxDict()
typestr = self.get_xcodetype(o)
custom_dict.add_item('isa', 'PBXFileReference')
- custom_dict.add_item('explicitFileType', '"' + typestr + '"')
+ custom_dict.add_item('explicitFileType', typestr)
custom_dict.add_item('name', o)
- custom_dict.add_item('path', f'"{os.path.join(self.src_to_build, o)}"')
+ custom_dict.add_item('path', os.path.join(self.src_to_build, o))
custom_dict.add_item('refType', 0)
custom_dict.add_item('sourceTree', 'SOURCE_ROOT')
objects_dict.add_item(self.custom_target_output_fileref[o], custom_dict)
@@ -993,9 +1007,9 @@ def generate_pbx_file_reference(self, objects_dict: PbxDict) -> None:
buildfile_dict = PbxDict()
typestr = self.get_xcodetype(buildfile)
buildfile_dict.add_item('isa', 'PBXFileReference')
- buildfile_dict.add_item('explicitFileType', '"' + typestr + '"')
- buildfile_dict.add_item('name', f'"{basename}"')
- buildfile_dict.add_item('path', f'"{buildfile}"')
+ buildfile_dict.add_item('explicitFileType', typestr)
+ buildfile_dict.add_item('name', basename)
+ buildfile_dict.add_item('path', buildfile)
buildfile_dict.add_item('refType', 0)
buildfile_dict.add_item('sourceTree', 'SOURCE_ROOT')
objects_dict.add_item(self.fileref_ids[buildfile], buildfile_dict)
@@ -1009,7 +1023,7 @@ def generate_pbx_frameworks_buildphase(self, objects_dict: PbxDict) -> None:
file_list = PbxArray()
bt_dict.add_item('files', file_list)
for dep in t.get_external_deps():
- if dep.name == 'appleframeworks':
+ if dep.name == 'appleframeworks' and dep.found():
for f in dep.frameworks:
file_list.add_item(self.native_frameworks[f], f'{f}.framework in Frameworks')
bt_dict.add_item('runOnlyForDeploymentPostprocessing', 0)
@@ -1036,7 +1050,7 @@ def generate_pbx_group(self, objects_dict: PbxDict) -> None:
main_children.add_item(resources_id, 'Resources')
main_children.add_item(products_id, 'Products')
main_children.add_item(frameworks_id, 'Frameworks')
- main_dict.add_item('sourceTree', '""')
+ main_dict.add_item('sourceTree', '')
self.add_projecttree(objects_dict, projecttree_id)
@@ -1046,7 +1060,7 @@ def generate_pbx_group(self, objects_dict: PbxDict) -> None:
resource_children = PbxArray()
resource_dict.add_item('children', resource_children)
resource_dict.add_item('name', 'Resources')
- resource_dict.add_item('sourceTree', '""')
+ resource_dict.add_item('sourceTree', '')
frameworks_dict = PbxDict()
objects_dict.add_item(frameworks_id, frameworks_dict, 'Frameworks')
@@ -1057,12 +1071,12 @@ def generate_pbx_group(self, objects_dict: PbxDict) -> None:
for t in self.build_targets.values():
for dep in t.get_external_deps():
- if dep.name == 'appleframeworks':
+ if dep.name == 'appleframeworks' and dep.found():
for f in dep.frameworks:
frameworks_children.add_item(self.native_frameworks_fileref[f], f)
frameworks_dict.add_item('name', 'Frameworks')
- frameworks_dict.add_item('sourceTree', '""')
+ frameworks_dict.add_item('sourceTree', '')
for tname, t in self.custom_targets.items():
target_dict = PbxDict()
@@ -1072,10 +1086,10 @@ def generate_pbx_group(self, objects_dict: PbxDict) -> None:
target_dict.add_item('children', target_children)
target_children.add_item(target_src_map[tname], 'Source files')
if t.subproject:
- target_dict.add_item('name', f'"{t.subproject} • {t.name}"')
+ target_dict.add_item('name', f'{t.subproject} • {t.name}')
else:
- target_dict.add_item('name', f'"{t.name}"')
- target_dict.add_item('sourceTree', '""')
+ target_dict.add_item('name', t.name)
+ target_dict.add_item('sourceTree', '')
source_files_dict = PbxDict()
objects_dict.add_item(target_src_map[tname], source_files_dict, 'Source files')
source_files_dict.add_item('isa', 'PBXGroup')
@@ -1089,8 +1103,8 @@ def generate_pbx_group(self, objects_dict: PbxDict) -> None:
else:
continue
source_file_children.add_item(self.fileref_ids[(tname, s)], s)
- source_files_dict.add_item('name', '"Source files"')
- source_files_dict.add_item('sourceTree', '""')
+ source_files_dict.add_item('name', 'Source files')
+ source_files_dict.add_item('sourceTree', '')
# And finally products
product_dict = PbxDict()
@@ -1101,7 +1115,7 @@ def generate_pbx_group(self, objects_dict: PbxDict) -> None:
for t in self.build_targets:
product_children.add_item(self.target_filemap[t], t)
product_dict.add_item('name', 'Products')
- product_dict.add_item('sourceTree', '""')
+ product_dict.add_item('sourceTree', '')
def write_group_target_entry(self, objects_dict, t):
tid = t.get_id()
@@ -1111,8 +1125,8 @@ def write_group_target_entry(self, objects_dict, t):
target_dict.add_item('isa', 'PBXGroup')
target_children = PbxArray()
target_dict.add_item('children', target_children)
- target_dict.add_item('name', f'"{t} · target"')
- target_dict.add_item('sourceTree', '""')
+ target_dict.add_item('name', f'{t} · target')
+ target_dict.add_item('sourceTree', '')
source_files_dict = PbxDict()
for s in t.sources:
if isinstance(s, mesonlib.File):
@@ -1139,8 +1153,8 @@ def write_group_target_entry(self, objects_dict, t):
else:
continue
target_children.add_item(self.fileref_ids[(tid, e)], e)
- source_files_dict.add_item('name', '"Source files"')
- source_files_dict.add_item('sourceTree', '""')
+ source_files_dict.add_item('name', 'Source files')
+ source_files_dict.add_item('sourceTree', '')
return group_id
def add_projecttree(self, objects_dict, projecttree_id) -> None:
@@ -1149,8 +1163,8 @@ def add_projecttree(self, objects_dict, projecttree_id) -> None:
root_dict.add_item('isa', 'PBXGroup')
target_children = PbxArray()
root_dict.add_item('children', target_children)
- root_dict.add_item('name', '"Project root"')
- root_dict.add_item('sourceTree', '""')
+ root_dict.add_item('name', 'Project root')
+ root_dict.add_item('sourceTree', '')
project_tree = self.generate_project_tree()
self.write_tree(objects_dict, project_tree, target_children, '')
@@ -1164,8 +1178,8 @@ def write_tree(self, objects_dict, tree_node, children_array, current_subdir) ->
children_array.add_item(subdir_id)
subdir_dict.add_item('isa', 'PBXGroup')
subdir_dict.add_item('children', subdir_children)
- subdir_dict.add_item('name', f'"{subdir_name}"')
- subdir_dict.add_item('sourceTree', '""')
+ subdir_dict.add_item('name', subdir_name)
+ subdir_dict.add_item('sourceTree', '')
self.write_tree(objects_dict, subdir_node, subdir_children, os.path.join(current_subdir, subdir_name))
for target in tree_node.targets:
group_id = self.write_group_target_entry(objects_dict, target)
@@ -1247,8 +1261,8 @@ def generate_pbx_native_target(self, objects_dict: PbxDict) -> None:
generator_id += 1
- ntarget_dict.add_item('name', f'"{tname}"')
- ntarget_dict.add_item('productName', f'"{tname}"')
+ ntarget_dict.add_item('name', tname)
+ ntarget_dict.add_item('productName', tname)
ntarget_dict.add_item('productReference', self.target_filemap[tname], tname)
if isinstance(t, build.Executable):
typestr = 'com.apple.product-type.tool'
@@ -1274,11 +1288,11 @@ def generate_pbx_project(self, objects_dict: PbxDict) -> None:
project_dict.add_item('buildStyles', style_arr)
for name, idval in self.buildstylemap.items():
style_arr.add_item(idval, name)
- project_dict.add_item('compatibilityVersion', f'"{self.xcodeversion}"')
+ project_dict.add_item('compatibilityVersion', self.xcodeversion)
project_dict.add_item('hasScannedForEncodings', 0)
project_dict.add_item('mainGroup', self.maingroup_id)
- project_dict.add_item('projectDirPath', '"' + self.environment.get_source_dir() + '"')
- project_dict.add_item('projectRoot', '""')
+ project_dict.add_item('projectDirPath', self.environment.get_source_dir())
+ project_dict.add_item('projectRoot', '')
targets_arr = PbxArray()
project_dict.add_item('targets', targets_arr)
targets_arr.add_item(self.all_id, 'ALL_BUILD')
@@ -1307,7 +1321,7 @@ def generate_test_shell_build_phase(self, objects_dict: PbxDict) -> None:
shell_dict.add_item('shellPath', '/bin/sh')
cmd = mesonlib.get_meson_command() + ['test', '--no-rebuild', '-C', self.environment.get_build_dir()]
cmdstr = ' '.join(["'%s'" % i for i in cmd])
- shell_dict.add_item('shellScript', f'"{cmdstr}"')
+ shell_dict.add_item('shellScript', cmdstr)
shell_dict.add_item('showEnvVarsInLog', 0)
def generate_regen_shell_build_phase(self, objects_dict: PbxDict) -> None:
@@ -1322,7 +1336,7 @@ def generate_regen_shell_build_phase(self, objects_dict: PbxDict) -> None:
shell_dict.add_item('shellPath', '/bin/sh')
cmd = mesonlib.get_meson_command() + ['--internal', 'regencheck', os.path.join(self.environment.get_build_dir(), 'meson-private')]
cmdstr = ' '.join(["'%s'" % i for i in cmd])
- shell_dict.add_item('shellScript', f'"{cmdstr}"')
+ shell_dict.add_item('shellScript', cmdstr)
shell_dict.add_item('showEnvVarsInLog', 0)
def generate_custom_target_shell_build_phases(self, objects_dict: PbxDict) -> None:
@@ -1346,7 +1360,7 @@ def generate_custom_target_shell_build_phases(self, objects_dict: PbxDict) -> No
custom_dict.add_item('name', '"Generate {}."'.format(ofilenames[0]))
custom_dict.add_item('outputPaths', outarray)
for o in ofilenames:
- outarray.add_item(f'"{os.path.join(self.environment.get_build_dir(), o)}"')
+ outarray.add_item(os.path.join(self.environment.get_build_dir(), o))
custom_dict.add_item('runOnlyForDeploymentPostprocessing', 0)
custom_dict.add_item('shellPath', '/bin/sh')
workdir = self.environment.get_build_dir()
@@ -1354,7 +1368,7 @@ def generate_custom_target_shell_build_phases(self, objects_dict: PbxDict) -> No
for c in fixed_cmd:
quoted_cmd.append(c.replace('"', chr(92) + '"'))
cmdstr = ' '.join([f"\\'{x}\\'" for x in quoted_cmd])
- custom_dict.add_item('shellScript', f'"cd \'{workdir}\'; {cmdstr}"')
+ custom_dict.add_item('shellScript', f'cd \'{workdir}\'; {cmdstr}')
custom_dict.add_item('showEnvVarsInLog', 0)
def generate_generator_target_shell_build_phases(self, objects_dict: PbxDict) -> None:
@@ -1380,21 +1394,21 @@ def generate_single_generator_phase(self, tname, t, genlist, generator_id, objec
workdir = self.environment.get_build_dir()
target_private_dir = self.relpath(self.get_target_private_dir(t), self.get_target_dir(t))
gen_dict = PbxDict()
- objects_dict.add_item(self.shell_targets[(tname, generator_id)], gen_dict, f'"Generator {generator_id}/{tname}"')
+ objects_dict.add_item(self.shell_targets[(tname, generator_id)], gen_dict, f'Generator {generator_id}/{tname}')
infilelist = genlist.get_inputs()
outfilelist = genlist.get_outputs()
gen_dict.add_item('isa', 'PBXShellScriptBuildPhase')
gen_dict.add_item('buildActionMask', 2147483647)
gen_dict.add_item('files', PbxArray())
gen_dict.add_item('inputPaths', PbxArray())
- gen_dict.add_item('name', f'"Generator {generator_id}/{tname}"')
+ gen_dict.add_item('name', f'Generator {generator_id}/{tname}')
commands = [["cd", workdir]] # Array of arrays, each one a single command, will get concatenated below.
k = (tname, generator_id)
ofile_abs = self.generator_outputs[k]
outarray = PbxArray()
gen_dict.add_item('outputPaths', outarray)
for of in ofile_abs:
- outarray.add_item(f'"{of}"')
+ outarray.add_item(of)
for i in infilelist:
# This might be needed to be added to inputPaths. It's not done yet as it is
# unclear whether it is necessary, what actually happens when it is defined
@@ -1429,7 +1443,7 @@ def generate_single_generator_phase(self, tname, t, genlist, generator_id, objec
else:
q.append(c)
quoted_cmds.append(' '.join(q))
- cmdstr = '"' + ' && '.join(quoted_cmds) + '"'
+ cmdstr = ' && '.join(quoted_cmds)
gen_dict.add_item('shellScript', cmdstr)
gen_dict.add_item('showEnvVarsInLog', 0)
@@ -1454,7 +1468,9 @@ def generate_pbx_sources_build_phase(self, objects_dict: PbxDict) -> None:
file_arr.add_item(self.custom_target_output_buildfile[o],
os.path.join(self.environment.get_build_dir(), o))
elif isinstance(gt, build.CustomTargetIndex):
+ output_dir = self.get_custom_target_output_dir(gt)
for o in gt.get_outputs():
+ o = os.path.join(output_dir, o)
file_arr.add_item(self.custom_target_output_buildfile[o],
os.path.join(self.environment.get_build_dir(), o))
elif isinstance(gt, build.GeneratedList):
@@ -1499,14 +1515,14 @@ def generate_xc_build_configuration(self, objects_dict: PbxDict) -> None:
bt_dict.add_item('isa', 'XCBuildConfiguration')
settings_dict = PbxDict()
bt_dict.add_item('buildSettings', settings_dict)
- settings_dict.add_item('ARCHS', f'"{self.arch}"')
- settings_dict.add_item('BUILD_DIR', f'"{self.environment.get_build_dir()}"')
- settings_dict.add_item('BUILD_ROOT', '"$(BUILD_DIR)"')
+ settings_dict.add_item('ARCHS', self.arch)
+ settings_dict.add_item('BUILD_DIR', self.environment.get_build_dir())
+ settings_dict.add_item('BUILD_ROOT', '$(BUILD_DIR)')
settings_dict.add_item('ONLY_ACTIVE_ARCH', 'YES')
settings_dict.add_item('SWIFT_VERSION', '5.0')
- settings_dict.add_item('SDKROOT', '"macosx"')
- settings_dict.add_item('OBJROOT', '"$(BUILD_DIR)/build"')
- bt_dict.add_item('name', f'"{buildtype}"')
+ settings_dict.add_item('SDKROOT', 'macosx')
+ settings_dict.add_item('OBJROOT', '$(BUILD_DIR)/build')
+ bt_dict.add_item('name', buildtype)
# Then the all target.
for buildtype in self.buildtypes:
@@ -1519,7 +1535,7 @@ def generate_xc_build_configuration(self, objects_dict: PbxDict) -> None:
warn_array.add_item('"$(inherited)"')
settings_dict.add_item('WARNING_CFLAGS', warn_array)
- bt_dict.add_item('name', f'"{buildtype}"')
+ bt_dict.add_item('name', buildtype)
# Then the test target.
for buildtype in self.buildtypes:
@@ -1531,7 +1547,7 @@ def generate_xc_build_configuration(self, objects_dict: PbxDict) -> None:
warn_array = PbxArray()
settings_dict.add_item('WARNING_CFLAGS', warn_array)
warn_array.add_item('"$(inherited)"')
- bt_dict.add_item('name', f'"{buildtype}"')
+ bt_dict.add_item('name', buildtype)
# Now finally targets.
for target_name, target in self.build_targets.items():
@@ -1543,10 +1559,10 @@ def generate_xc_build_configuration(self, objects_dict: PbxDict) -> None:
bt_dict.add_item('isa', 'XCBuildConfiguration')
settings_dict = PbxDict()
bt_dict.add_item('buildSettings', settings_dict)
- settings_dict.add_item('ARCHS', f'"{self.arch}"')
+ settings_dict.add_item('ARCHS', self.arch)
settings_dict.add_item('ONLY_ACTIVE_ARCH', 'YES')
- settings_dict.add_item('SDKROOT', '"macosx"')
- bt_dict.add_item('name', f'"{buildtype}"')
+ settings_dict.add_item('SDKROOT', 'macosx')
+ bt_dict.add_item('name', buildtype)
def determine_internal_dep_link_args(self, target, buildtype):
links_dylib = False
@@ -1713,11 +1729,11 @@ def generate_single_build_target(self, objects_dict, target_name, target) -> Non
bt_dict.add_item('buildSettings', settings_dict)
settings_dict.add_item('COMBINE_HIDPI_IMAGES', 'YES')
if isinstance(target, build.SharedModule):
- settings_dict.add_item('DYLIB_CURRENT_VERSION', '""')
- settings_dict.add_item('DYLIB_COMPATIBILITY_VERSION', '""')
+ settings_dict.add_item('DYLIB_CURRENT_VERSION', '')
+ settings_dict.add_item('DYLIB_COMPATIBILITY_VERSION', '')
else:
if dylib_version is not None:
- settings_dict.add_item('DYLIB_CURRENT_VERSION', f'"{dylib_version}"')
+ settings_dict.add_item('DYLIB_CURRENT_VERSION', str(dylib_version))
if target.prefix:
settings_dict.add_item('EXECUTABLE_PREFIX', target.prefix)
if target.suffix:
@@ -1740,8 +1756,8 @@ def generate_single_build_target(self, objects_dict, target_name, target) -> Non
mlog.warning(f'Unsupported Xcode configuration: More than 1 precompiled header found "{pchs!s}". Target "{target.name}" might not compile correctly.')
relative_pch_path = os.path.join(target.get_subdir(), pchs[0]) # Path relative to target so it can be used with "$(PROJECT_DIR)"
settings_dict.add_item('GCC_PRECOMPILE_PREFIX_HEADER', 'YES')
- settings_dict.add_item('GCC_PREFIX_HEADER', f'"$(PROJECT_DIR)/{relative_pch_path}"')
- settings_dict.add_item('GCC_PREPROCESSOR_DEFINITIONS', '""')
+ settings_dict.add_item('GCC_PREFIX_HEADER', f'$(PROJECT_DIR)/{relative_pch_path}')
+ settings_dict.add_item('GCC_PREPROCESSOR_DEFINITIONS', '')
settings_dict.add_item('GCC_SYMBOLS_PRIVATE_EXTERN', 'NO')
header_arr = PbxArray()
unquoted_headers = []
@@ -1754,27 +1770,24 @@ def generate_single_build_target(self, objects_dict, target_name, target) -> Non
i = os.path.normpath(i)
unquoted_headers.append(i)
for i in unquoted_headers:
- header_arr.add_item(f'"\\"{i}\\""')
+ header_arr.add_item(f'"{i}"')
settings_dict.add_item('HEADER_SEARCH_PATHS', header_arr)
- settings_dict.add_item('INSTALL_PATH', f'"{install_path}"')
- settings_dict.add_item('LIBRARY_SEARCH_PATHS', '""')
+ settings_dict.add_item('INSTALL_PATH', install_path)
+ settings_dict.add_item('LIBRARY_SEARCH_PATHS', '')
if isinstance(target, build.SharedModule):
settings_dict.add_item('LIBRARY_STYLE', 'BUNDLE')
settings_dict.add_item('MACH_O_TYPE', 'mh_bundle')
elif isinstance(target, build.SharedLibrary):
settings_dict.add_item('LIBRARY_STYLE', 'DYNAMIC')
self.add_otherargs(settings_dict, langargs)
- settings_dict.add_item('OTHER_LDFLAGS', f'"{ldstr}"')
- settings_dict.add_item('OTHER_REZFLAGS', '""')
- if ' ' in product_name:
- settings_dict.add_item('PRODUCT_NAME', f'"{product_name}"')
- else:
- settings_dict.add_item('PRODUCT_NAME', product_name)
- settings_dict.add_item('SECTORDER_FLAGS', '""')
+ settings_dict.add_item('OTHER_LDFLAGS', ldstr)
+ settings_dict.add_item('OTHER_REZFLAGS', '')
+ settings_dict.add_item('PRODUCT_NAME', product_name)
+ settings_dict.add_item('SECTORDER_FLAGS', '')
if is_swift and bridging_header:
- settings_dict.add_item('SWIFT_OBJC_BRIDGING_HEADER', f'"{bridging_header}"')
- settings_dict.add_item('BUILD_DIR', f'"{symroot}"')
- settings_dict.add_item('OBJROOT', f'"{symroot}/build"')
+ settings_dict.add_item('SWIFT_OBJC_BRIDGING_HEADER', bridging_header)
+ settings_dict.add_item('BUILD_DIR', symroot)
+ settings_dict.add_item('OBJROOT', f'{symroot}/build')
sysheader_arr = PbxArray()
# XCode will change every -I flag that points inside these directories
# to an -isystem. Thus set nothing in it since we control our own
@@ -1799,7 +1812,7 @@ def add_otherargs(self, settings_dict, langargs):
if ' ' in a or "'" in a:
a = r'\"' + a + r'\"'
quoted_args.append(a)
- settings_dict.add_item(f'OTHER_{langname}FLAGS', '"' + ' '.join(quoted_args) + '"')
+ settings_dict.add_item(f'OTHER_{langname}FLAGS', ' '.join(quoted_args))
def generate_xc_configurationList(self, objects_dict: PbxDict) -> None:
# FIXME: sort items
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 02d2bb94df83..f5be34daa05d 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -23,9 +23,11 @@
File, MesonException, MachineChoice, PerMachine, OrderedSet, listify,
extract_as_list, typeslistify, stringlistify, classify_unity_sources,
get_filenames_templates_dict, substitute_values, has_path_sep,
- OptionKey, PerMachineDefaultable,
+ PerMachineDefaultable,
MesonBugException, EnvironmentVariables, pickle_load,
)
+from .options import OptionKey
+
from .compilers import (
is_header, is_object, is_source, clink_langs, sort_clink, all_languages,
is_known_suffix, detect_static_linker
@@ -135,6 +137,7 @@ def get_target_macos_dylib_install_name(ld) -> str:
name.append('.dylib')
return ''.join(name)
+
class InvalidArguments(MesonException):
pass
@@ -236,7 +239,7 @@ class Build:
def __init__(self, environment: environment.Environment):
self.version = coredata.version
self.project_name = 'name of master project'
- self.project_version = None
+ self.project_version: T.Optional[str] = None
self.environment = environment
self.projects = {}
self.targets: 'T.OrderedDict[str, T.Union[CustomTarget, BuildTarget]]' = OrderedDict()
@@ -650,10 +653,20 @@ def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None:
self.set_option_overrides(self.parse_overrides(kwargs))
+ def is_compiler_option_hack(self, key):
+ # FIXME this method must be deleted when OptionsView goes away.
+ # At that point the build target only stores the original string.
+ # The decision on how to use those pieces of data is done elsewhere.
+ from .compilers import all_languages
+ if '_' not in key.name:
+ return False
+ prefix = key.name.split('_')[0]
+ return prefix in all_languages
+
def set_option_overrides(self, option_overrides: T.Dict[OptionKey, str]) -> None:
self.options.overrides = {}
for k, v in option_overrides.items():
- if k.lang:
+ if self.is_compiler_option_hack(k):
self.options.overrides[k.evolve(machine=self.for_machine)] = v
else:
self.options.overrides[k] = v
@@ -745,6 +758,8 @@ def __init__(
self.pch: T.Dict[str, T.List[str]] = {}
self.extra_args: T.DefaultDict[str, T.List[str]] = kwargs.get('language_args', defaultdict(list))
self.sources: T.List[File] = []
+ # If the same source is defined multiple times, use it only once.
+ self.seen_sources: T.Set[File] = set()
self.generated: T.List['GeneratedTypes'] = []
self.extra_files: T.List[File] = []
self.d_features: DFeatures = {
@@ -755,6 +770,7 @@ def __init__(
}
self.pic = False
self.pie = False
+ self.both_lib: T.Optional[T.Union[StaticLibrary, SharedLibrary]] = None
# Track build_rpath entries so we can remove them at install time
self.rpath_dirs_to_remove: T.Set[bytes] = set()
self.process_sourcelist(sources)
@@ -770,8 +786,8 @@ def __init__(
# we have to call process_compilers() first and we need to process libraries
# from link_with and link_whole first.
# See https://github.com/mesonbuild/meson/pull/11957#issuecomment-1629243208.
- link_targets = extract_as_list(kwargs, 'link_with') + self.link_targets
- link_whole_targets = extract_as_list(kwargs, 'link_whole') + self.link_whole_targets
+ link_targets = self.extract_targets_as_list(kwargs, 'link_with')
+ link_whole_targets = self.extract_targets_as_list(kwargs, 'link_whole')
self.link_targets.clear()
self.link_whole_targets.clear()
self.link(link_targets)
@@ -867,12 +883,11 @@ def process_sourcelist(self, sources: T.List['SourceOutputs']) -> None:
(static as they are only regenerated if meson itself is regenerated)
3. Sources files generated by another target or a Generator (generated)
"""
- added_sources: T.Set[File] = set() # If the same source is defined multiple times, use it only once.
for s in sources:
if isinstance(s, File):
- if s not in added_sources:
+ if s not in self.seen_sources:
self.sources.append(s)
- added_sources.add(s)
+ self.seen_sources.add(s)
elif isinstance(s, (CustomTarget, CustomTargetIndex, GeneratedList)):
self.generated.append(s)
@@ -1000,7 +1015,7 @@ def process_compilers(self) -> T.List[str]:
if 'vala' in self.compilers and 'c' not in self.compilers:
self.compilers['c'] = self.all_compilers['c']
if 'cython' in self.compilers:
- key = OptionKey('language', machine=self.for_machine, lang='cython')
+ key = OptionKey('cython_language', machine=self.for_machine)
value = self.get_option(key)
try:
@@ -1709,7 +1724,7 @@ def check_module_linking(self):
else:
mlog.deprecation(f'target {self.name} links against shared module {link_target.name}, which is incorrect.'
'\n '
- f'This will be an error in the future, so please use shared_library() for {link_target.name} instead.'
+ f'This will be an error in meson 2.0, so please use shared_library() for {link_target.name} instead.'
'\n '
f'If shared_module() was used for {link_target.name} because it has references to undefined symbols,'
'\n '
@@ -1738,6 +1753,28 @@ def process_vs_module_defs_kw(self, kwargs: T.Dict[str, T.Any]) -> None:
'a file object, a Custom Target, or a Custom Target Index')
self.process_link_depends(path)
+ def extract_targets_as_list(self, kwargs: T.Dict[str, T.Union[LibTypes, T.Sequence[LibTypes]]], key: T.Literal['link_with', 'link_whole']) -> T.List[LibTypes]:
+ bl_type = self.environment.coredata.get_option(OptionKey('default_both_libraries'))
+ if bl_type == 'auto':
+ if isinstance(self, StaticLibrary):
+ bl_type = 'static'
+ elif isinstance(self, SharedLibrary):
+ bl_type = 'shared'
+
+ self_libs: T.List[LibTypes] = self.link_targets if key == 'link_with' else self.link_whole_targets
+
+ lib_list = []
+ for lib in listify(kwargs.get(key, [])) + self_libs:
+ if isinstance(lib, (Target, BothLibraries)):
+ lib_list.append(lib.get(bl_type))
+ else:
+ lib_list.append(lib)
+ return lib_list
+
+ def get(self, lib_type: T.Literal['static', 'shared', 'auto']) -> LibTypes:
+ """Base case used by BothLibraries"""
+ return self
+
class FileInTargetPrivateDir:
"""Represents a file with the path '/path/to/build/target_private_dir/fname'.
target_private_dir is the return value of get_target_private_dir which is e.g. 'subdir/target.p'.
@@ -2121,6 +2158,11 @@ def post_init(self) -> None:
if self.rust_crate_type == 'staticlib':
# FIXME: In the case of no-std we should not add those libraries,
# but we have no way to know currently.
+
+ # XXX:
+ # In the case of no-std, we are likely in a bare metal case
+ # and thus, machine_info kernel should be set to 'none'.
+ # In that case, native_static_libs list is empty.
rustc = self.compilers['rust']
d = dependencies.InternalDependency('undefined', [], [],
rustc.native_static_libs,
@@ -2182,6 +2224,14 @@ def is_linkable_target(self):
def is_internal(self) -> bool:
return not self.install
+ def set_shared(self, shared_library: SharedLibrary) -> None:
+ self.both_lib = shared_library
+
+ def get(self, lib_type: T.Literal['static', 'shared', 'auto']) -> LibTypes:
+ if lib_type == 'shared':
+ return self.both_lib or self
+ return self
+
class SharedLibrary(BuildTarget):
known_kwargs = known_shlib_kwargs
@@ -2448,6 +2498,14 @@ def type_suffix(self):
def is_linkable_target(self):
return True
+ def set_static(self, static_library: StaticLibrary) -> None:
+ self.both_lib = static_library
+
+ def get(self, lib_type: T.Literal['static', 'shared']) -> LibTypes:
+ if lib_type == 'static':
+ return self.both_lib or self
+ return self
+
# A shared library that is meant to be used with dlopen rather than linking
# into something else.
class SharedModule(SharedLibrary):
@@ -2484,8 +2542,8 @@ def get_default_install_dir(self) -> T.Union[T.Tuple[str, str], T.Tuple[None, No
return self.environment.get_shared_module_dir(), '{moduledir_shared}'
class BothLibraries(SecondLevelHolder):
- def __init__(self, shared: SharedLibrary, static: StaticLibrary) -> None:
- self._preferred_library = 'shared'
+ def __init__(self, shared: SharedLibrary, static: StaticLibrary, preferred_library: Literal['shared', 'static']) -> None:
+ self._preferred_library = preferred_library
self.shared = shared
self.static = static
self.subproject = self.shared.subproject
@@ -2493,13 +2551,23 @@ def __init__(self, shared: SharedLibrary, static: StaticLibrary) -> None:
def __repr__(self) -> str:
return f''
- def get_default_object(self) -> BuildTarget:
+ def get(self, lib_type: T.Literal['static', 'shared', 'auto']) -> LibTypes:
+ if lib_type == 'static':
+ return self.static
+ if lib_type == 'shared':
+ return self.shared
+ return self.get_default_object()
+
+ def get_default_object(self) -> T.Union[StaticLibrary, SharedLibrary]:
if self._preferred_library == 'shared':
return self.shared
elif self._preferred_library == 'static':
return self.static
raise MesonBugException(f'self._preferred_library == "{self._preferred_library}" is neither "shared" nor "static".')
+ def get_id(self) -> str:
+ return self.get_default_object().get_id()
+
class CommandBase:
depend_files: T.List[File]
@@ -2557,6 +2625,10 @@ def get_internal_static_libraries(self) -> OrderedSet[BuildTargetTypes]:
def get_internal_static_libraries_recurse(self, result: OrderedSet[BuildTargetTypes]) -> None:
pass
+ def get(self, lib_type: T.Literal['static', 'shared', 'auto']) -> LibTypes:
+ """Base case used by BothLibraries"""
+ return self
+
class CustomTarget(Target, CustomTargetBase, CommandBase):
typename = 'custom'
@@ -2878,7 +2950,7 @@ class AliasTarget(RunTarget):
typename = 'alias'
- def __init__(self, name: str, dependencies: T.Sequence['Target'],
+ def __init__(self, name: str, dependencies: T.Sequence[Target],
subdir: str, subproject: str, environment: environment.Environment):
super().__init__(name, [], dependencies, subdir, subproject, environment)
diff --git a/mesonbuild/cargo/builder.py b/mesonbuild/cargo/builder.py
index 99659da11195..112c7c580d49 100644
--- a/mesonbuild/cargo/builder.py
+++ b/mesonbuild/cargo/builder.py
@@ -133,7 +133,7 @@ def equal(self, lhs: mparser.BaseNode, rhs: mparser.BaseNode) -> mparser.Compari
:param lhs: The left hand side of the equal
:param rhs: the right hand side of the equal
- :return: A compraison node
+ :return: A comparison node
"""
return mparser.ComparisonNode('==', lhs, self._symbol('=='), rhs)
@@ -142,7 +142,7 @@ def not_equal(self, lhs: mparser.BaseNode, rhs: mparser.BaseNode) -> mparser.Com
:param lhs: The left hand side of the "!="
:param rhs: the right hand side of the "!="
- :return: A compraison node
+ :return: A comparison node
"""
return mparser.ComparisonNode('!=', lhs, self._symbol('!='), rhs)
@@ -151,7 +151,7 @@ def in_(self, lhs: mparser.BaseNode, rhs: mparser.BaseNode) -> mparser.Compariso
:param lhs: The left hand side of the "in"
:param rhs: the right hand side of the "in"
- :return: A compraison node
+ :return: A comparison node
"""
return mparser.ComparisonNode('in', lhs, self._symbol('in'), rhs)
@@ -160,7 +160,7 @@ def not_in(self, lhs: mparser.BaseNode, rhs: mparser.BaseNode) -> mparser.Compar
:param lhs: The left hand side of the "not in"
:param rhs: the right hand side of the "not in"
- :return: A compraison node
+ :return: A comparison node
"""
return mparser.ComparisonNode('notin', lhs, self._symbol('not in'), rhs)
diff --git a/mesonbuild/cargo/interpreter.py b/mesonbuild/cargo/interpreter.py
index 029e5a1b8006..a95ee5be39be 100644
--- a/mesonbuild/cargo/interpreter.py
+++ b/mesonbuild/cargo/interpreter.py
@@ -23,7 +23,8 @@
from . import builder
from . import version
-from ..mesonlib import MesonException, Popen_safe, OptionKey
+from ..mesonlib import MesonException, Popen_safe
+from ..options import OptionKey
from .. import coredata, options, mlog
from ..wrap.wrap import PackageDefinition
@@ -105,7 +106,7 @@ def _fixup_raw_mappings(d: T.Union[manifest.BuildTarget, manifest.LibTarget, man
This does the following:
* replaces any `-` with `_`, cargo likes the former, but python dicts make
keys with `-` in them awkward to work with
- * Convert Dependndency versions from the cargo format to something meson
+ * Convert Dependency versions from the cargo format to something meson
understands
:param d: The mapping to fix
@@ -146,6 +147,7 @@ class Package:
publish: bool = True
metadata: T.Dict[str, T.Dict[str, str]] = dataclasses.field(default_factory=dict)
default_run: T.Optional[str] = None
+ autolib: bool = True
autobins: bool = True
autoexamples: bool = True
autotests: bool = True
@@ -731,7 +733,7 @@ def interpret(subp_name: str, subdir: str, env: Environment) -> T.Tuple[mparser.
ast += _create_meson_subdir(cargo, build)
# Libs are always auto-discovered and there's no other way to handle them,
- # which is unfortunate for reproducability
+ # which is unfortunate for reproducibility
if os.path.exists(os.path.join(env.source_dir, cargo.subdir, cargo.path, cargo.lib.path)):
for crate_type in cargo.lib.crate_type:
ast.extend(_create_lib(cargo, build, crate_type))
diff --git a/mesonbuild/cargo/manifest.py b/mesonbuild/cargo/manifest.py
index 50c048991333..95b0d4bb8cb2 100644
--- a/mesonbuild/cargo/manifest.py
+++ b/mesonbuild/cargo/manifest.py
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: Apache-2.0
-# Copyright © 2022-2023 Intel Corporation
+# Copyright © 2022-2024 Intel Corporation
"""Type definitions for cargo manifest files."""
@@ -33,6 +33,7 @@
'publish': bool,
'metadata': T.Dict[str, T.Dict[str, str]],
'default-run': str,
+ 'autolib': bool,
'autobins': bool,
'autoexamples': bool,
'autotests': bool,
@@ -65,6 +66,7 @@ class FixedPackage(TypedDict, total=False):
publish: bool
metadata: T.Dict[str, T.Dict[str, str]]
default_run: str
+ autolib: bool
autobins: bool
autoexamples: bool
autotests: bool
diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py
index ad4ec6b1a002..d9ff559971f7 100644
--- a/mesonbuild/cmake/common.py
+++ b/mesonbuild/cmake/common.py
@@ -3,7 +3,8 @@
from __future__ import annotations
-from ..mesonlib import MesonException, OptionKey
+from ..mesonlib import MesonException
+from ..options import OptionKey
from .. import mlog
from pathlib import Path
import typing as T
diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py
index 392063d58590..cbe75f36c688 100644
--- a/mesonbuild/cmake/executor.py
+++ b/mesonbuild/cmake/executor.py
@@ -10,7 +10,8 @@
import os
from .. import mlog
-from ..mesonlib import PerMachine, Popen_safe, version_compare, is_windows, OptionKey
+from ..mesonlib import PerMachine, Popen_safe, version_compare, is_windows
+from ..options import OptionKey
from ..programs import find_external_program, NonExistingExternalProgram
if T.TYPE_CHECKING:
diff --git a/mesonbuild/cmake/generator.py b/mesonbuild/cmake/generator.py
index b78860564c34..a617f8adb9b6 100644
--- a/mesonbuild/cmake/generator.py
+++ b/mesonbuild/cmake/generator.py
@@ -20,7 +20,7 @@ def parse_generator_expressions(
'''Parse CMake generator expressions
Most generator expressions are simply ignored for
- simplicety, however some are required for some common
+ simplicity, however some are required for some common
use cases.
'''
diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py
index f67591f68b98..fafee86abd4f 100644
--- a/mesonbuild/cmake/interpreter.py
+++ b/mesonbuild/cmake/interpreter.py
@@ -8,6 +8,7 @@
from functools import lru_cache
from os import environ
from pathlib import Path
+import itertools
import re
import typing as T
@@ -18,7 +19,8 @@
from .traceparser import CMakeTraceParser
from .tracetargets import resolve_cmake_trace_targets
from .. import mlog, mesonlib
-from ..mesonlib import MachineChoice, OrderedSet, path_is_in_root, relative_to_if_possible, OptionKey
+from ..mesonlib import MachineChoice, OrderedSet, path_is_in_root, relative_to_if_possible
+from ..options import OptionKey
from ..mesondata import DataFile
from ..compilers.compilers import assembler_suffixes, lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header
from ..programs import ExternalProgram
@@ -134,10 +136,7 @@ def __init__(self, build_dir: Path):
self.build_dir = build_dir
def add(self, tgt: T.Union['ConverterTarget', 'ConverterCustomTarget']) -> None:
- def assign_keys(keys: T.List[str]) -> None:
- for i in [x for x in keys if x]:
- self.tgt_map[i] = tgt
- keys = [self._target_key(tgt.cmake_name)]
+ keys: T.List[T.Optional[str]] = [self._target_key(tgt.cmake_name)]
if isinstance(tgt, ConverterTarget):
keys += [tgt.full_name]
keys += [self._rel_artifact_key(x) for x in tgt.artifacts]
@@ -145,9 +144,11 @@ def assign_keys(keys: T.List[str]) -> None:
if isinstance(tgt, ConverterCustomTarget):
keys += [self._rel_generated_file_key(x) for x in tgt.original_outputs]
keys += [self._base_generated_file_key(x) for x in tgt.original_outputs]
- assign_keys(keys)
+ for k in keys:
+ if k is not None:
+ self.tgt_map[k] = tgt
- def _return_first_valid_key(self, keys: T.List[str]) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]:
+ def _return_first_valid_key(self, keys: T.List[T.Optional[str]]) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]:
for i in keys:
if i and i in self.tgt_map:
return self.tgt_map[i]
@@ -165,7 +166,7 @@ def executable(self, name: str) -> T.Optional['ConverterTarget']:
return tgt
def artifact(self, name: str) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]:
- keys = []
+ keys: T.List[T.Optional[str]] = []
candidates = [name, OutputTargetMap.rm_so_version.sub('', name)]
for i in lib_suffixes:
if not name.endswith('.' + i):
@@ -222,6 +223,7 @@ def __init__(self, target: CMakeTarget, env: 'Environment', for_machine: Machine
self.install_dir: T.Optional[Path] = None
self.link_libraries = target.link_libraries
self.link_flags = target.link_flags + target.link_lang_flags
+ self.public_link_flags: T.List[str] = []
self.depends_raw: T.List[str] = []
self.depends: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] = []
@@ -346,6 +348,7 @@ def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, su
rtgt = resolve_cmake_trace_targets(self.cmake_name, trace, self.env)
self.includes += [Path(x) for x in rtgt.include_directories]
self.link_flags += rtgt.link_flags
+ self.public_link_flags += rtgt.public_link_flags
self.public_compile_opts += rtgt.public_compile_opts
self.link_libraries += rtgt.libraries
@@ -414,11 +417,14 @@ def rel_path(x: Path, is_header: bool, is_generated: bool) -> T.Optional[Path]:
return x.relative_to(root_src_dir)
return x
+ def non_optional(inputs: T.Iterable[T.Optional[Path]]) -> T.List[Path]:
+ return [p for p in inputs if p is not None]
+
build_dir_rel = self.build_dir.relative_to(Path(self.env.get_build_dir()) / subdir)
- self.generated_raw = [rel_path(x, False, True) for x in self.generated_raw]
- self.includes = list(OrderedSet([rel_path(x, True, False) for x in OrderedSet(self.includes)] + [build_dir_rel]))
- self.sys_includes = list(OrderedSet([rel_path(x, True, False) for x in OrderedSet(self.sys_includes)]))
- self.sources = [rel_path(x, False, False) for x in self.sources]
+ self.generated_raw = non_optional(rel_path(x, False, True) for x in self.generated_raw)
+ self.includes = non_optional(itertools.chain((rel_path(x, True, False) for x in OrderedSet(self.includes)), [build_dir_rel]))
+ self.sys_includes = non_optional(rel_path(x, True, False) for x in OrderedSet(self.sys_includes))
+ self.sources = non_optional(rel_path(x, False, False) for x in self.sources)
# Resolve custom targets
for gen_file in self.generated_raw:
@@ -428,14 +434,9 @@ def rel_path(x: Path, is_header: bool, is_generated: bool) -> T.Optional[Path]:
ref = ctgt.get_ref(gen_file)
assert isinstance(ref, CustomTargetReference) and ref.valid()
self.generated_ctgt += [ref]
- elif gen_file is not None:
+ else:
self.generated += [gen_file]
- # Remove delete entries
- self.includes = [x for x in self.includes if x is not None]
- self.sys_includes = [x for x in self.sys_includes if x is not None]
- self.sources = [x for x in self.sources if x is not None]
-
# Make sure '.' is always in the include directories
if Path('.') not in self.includes:
self.includes += [Path('.')]
@@ -486,7 +487,7 @@ def process_object_libs(self, obj_target_list: T.List['ConverterTarget'], linker
source_files = [x.name for x in i.sources + i.generated]
for j in stem:
# On some platforms (specifically looking at you Windows with vs20xy backend) CMake does
- # not produce object files with the format `foo.cpp.obj`, instead it skipps the language
+ # not produce object files with the format `foo.cpp.obj`, instead it skips the language
# suffix and just produces object files like `foo.obj`. Thus we have to do our best to
# undo this step and guess the correct language suffix of the object file. This is done
# by trying all language suffixes meson knows and checking if one of them fits.
@@ -532,7 +533,7 @@ def _all_source_suffixes(self) -> 'ImmutableListProtocol[str]':
@lru_cache(maxsize=None)
def _all_lang_stds(self, lang: str) -> 'ImmutableListProtocol[str]':
try:
- res = self.env.coredata.optstore.get_value_object(OptionKey('std', machine=MachineChoice.BUILD, lang=lang)).choices
+ res = self.env.coredata.optstore.get_value_object(OptionKey(f'{lang}_std', machine=MachineChoice.BUILD)).choices
except KeyError:
return []
@@ -1059,7 +1060,9 @@ def extract_tgt(tgt: T.Union[ConverterTarget, ConverterCustomTarget, CustomTarge
def detect_cycle(tgt: T.Union[ConverterTarget, ConverterCustomTarget]) -> None:
if tgt.name in processing:
- raise CMakeException('Cycle in CMake inputs/dependencies detected')
+ processing.append(tgt.name)
+ stack = ' -> '.join(processing)
+ raise CMakeException(f'Cycle in CMake inputs/dependencies detected: {stack}')
processing.append(tgt.name)
def resolve_ctgt_ref(ref: CustomTargetReference) -> T.Union[IdNode, IndexNode]:
@@ -1166,12 +1169,18 @@ def process_target(tgt: ConverterTarget) -> None:
# declare_dependency kwargs
dep_kwargs: TYPE_mixed_kwargs = {
- 'link_args': tgt.link_flags + tgt.link_libraries,
'link_with': id_node(tgt_var),
'compile_args': tgt.public_compile_opts,
'include_directories': id_node(inc_var),
}
+ # Static libraries need all link options and transient dependencies, but other
+ # libraries should only use the link flags from INTERFACE_LINK_OPTIONS.
+ if tgt_func == 'static_library':
+ dep_kwargs['link_args'] = tgt.link_flags + tgt.link_libraries
+ else:
+ dep_kwargs['link_args'] = tgt.public_link_flags
+
if dependencies:
generated += dependencies
diff --git a/mesonbuild/cmake/toolchain.py b/mesonbuild/cmake/toolchain.py
index 89d5d84449e0..9eb961c52bc7 100644
--- a/mesonbuild/cmake/toolchain.py
+++ b/mesonbuild/cmake/toolchain.py
@@ -198,6 +198,8 @@ def is_cmdline_option(compiler: 'Compiler', arg: str) -> bool:
if compiler.get_argument_syntax() == 'msvc':
return arg.startswith('/')
else:
+ if compiler.exelist[0] == 'zig' and arg in {'ar', 'cc', 'c++', 'dlltool', 'lib', 'ranlib', 'objcopy', 'rc'}:
+ return True
return arg.startswith('-')
def update_cmake_compiler_state(self) -> None:
@@ -210,7 +212,7 @@ def update_cmake_compiler_state(self) -> None:
languages = list(self.compilers.keys())
lang_ids = [language_map.get(x, x.upper()) for x in languages]
cmake_content = dedent(f'''
- cmake_minimum_required(VERSION 3.7)
+ cmake_minimum_required(VERSION 3.10)
project(CompInfo {' '.join(lang_ids)})
''')
@@ -230,10 +232,15 @@ def update_cmake_compiler_state(self) -> None:
cmake_args += trace.trace_args()
cmake_args += cmake_get_generator_args(self.env)
cmake_args += [f'-DCMAKE_TOOLCHAIN_FILE={temp_toolchain_file.as_posix()}', '.']
- rc, _, raw_trace = self.cmakebin.call(cmake_args, build_dir=build_dir, disable_cache=True)
+ rc, raw_stdout, raw_trace = self.cmakebin.call(cmake_args, build_dir=build_dir, disable_cache=True)
if rc != 0:
mlog.warning('CMake Toolchain: Failed to determine CMake compilers state')
+ mlog.debug(f' -- return code: {rc}')
+ for line in raw_stdout.split('\n'):
+ mlog.debug(f' -- stdout: {line.rstrip()}')
+ for line in raw_trace.split('\n'):
+ mlog.debug(f' -- stderr: {line.rstrip()}')
return
# Parse output
diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py
index 69e4131823fa..38a2d31a595a 100644
--- a/mesonbuild/cmake/traceparser.py
+++ b/mesonbuild/cmake/traceparser.py
@@ -165,7 +165,7 @@ def requires_stderr(self) -> bool:
def parse(self, trace: T.Optional[str] = None) -> None:
# First load the trace (if required)
if not self.requires_stderr():
- if not self.trace_file_path.exists and not self.trace_file_path.is_file():
+ if not self.trace_file_path.is_file():
raise CMakeException(f'CMake: Trace file "{self.trace_file_path!s}" not found')
trace = self.trace_file_path.read_text(errors='ignore', encoding='utf-8')
if not trace:
@@ -637,7 +637,7 @@ def _cmake_target_link_options(self, tline: CMakeTraceLine) -> None:
def _cmake_target_link_libraries(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/target_link_libraries.html
- self._parse_common_target_options('target_link_options', 'LINK_LIBRARIES', 'INTERFACE_LINK_LIBRARIES', tline)
+ self._parse_common_target_options('target_link_libraries', 'LINK_LIBRARIES', 'INTERFACE_LINK_LIBRARIES', tline)
def _cmake_message(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/message.html
diff --git a/mesonbuild/cmake/tracetargets.py b/mesonbuild/cmake/tracetargets.py
index 5a9d35284e29..2cc0c1722c3a 100644
--- a/mesonbuild/cmake/tracetargets.py
+++ b/mesonbuild/cmake/tracetargets.py
@@ -42,6 +42,7 @@ class ResolvedTarget:
def __init__(self) -> None:
self.include_directories: T.List[str] = []
self.link_flags: T.List[str] = []
+ self.public_link_flags: T.List[str] = []
self.public_compile_opts: T.List[str] = []
self.libraries: T.List[str] = []
@@ -111,7 +112,8 @@ def resolve_cmake_trace_targets(target_name: str,
res.include_directories += [x for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x]
if 'INTERFACE_LINK_OPTIONS' in tgt.properties:
- res.link_flags += [x for x in tgt.properties['INTERFACE_LINK_OPTIONS'] if x]
+ res.public_link_flags += [x for x in tgt.properties['INTERFACE_LINK_OPTIONS'] if x]
+ res.link_flags += res.public_link_flags
if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties:
res.public_compile_opts += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x]
diff --git a/mesonbuild/compilers/asm.py b/mesonbuild/compilers/asm.py
index e25f18d6746d..8cd5e28dc47f 100644
--- a/mesonbuild/compilers/asm.py
+++ b/mesonbuild/compilers/asm.py
@@ -3,7 +3,8 @@
import os
import typing as T
-from ..mesonlib import EnvironmentException, OptionKey, get_meson_command
+from ..mesonlib import EnvironmentException, get_meson_command
+from ..options import OptionKey
from .compilers import Compiler
from .mixins.metrowerks import MetrowerksCompiler, mwasmarm_instruction_set_args, mwasmeppc_instruction_set_args
@@ -157,7 +158,8 @@ class MasmCompiler(Compiler):
def get_compile_only_args(self) -> T.List[str]:
return ['/c']
- def get_argument_syntax(self) -> str:
+ @staticmethod
+ def get_argument_syntax() -> str:
return 'msvc'
def needs_static_linker(self) -> bool:
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index cbc1bea95d6e..f67281f04a51 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -95,7 +95,7 @@ def has_header_symbol(self, hname: str, symbol: str, prefix: str,
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = super().get_options()
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts.update({
key: options.UserStdOption('C', _ALL_STDS),
})
@@ -128,7 +128,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
stds += ['c2x']
if version_compare(self.version, self._C23_VERSION):
stds += ['c23']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(stds, gnu=True)
@@ -157,7 +157,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
self.update_options(
opts,
self.create_option(options.UserArrayOption,
- self.form_langopt_key('winlibs'),
+ self.form_compileropt_key('winlibs'),
'Standard Win libraries to link against',
gnu_winlibs),
)
@@ -165,7 +165,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-std=' + std)
@@ -174,7 +174,7 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
if self.info.is_windows() or self.info.is_cygwin():
# without a typedict mypy can't understand this.
- key = self.form_langopt_key('winlibs')
+ key = self.form_compileropt_key('winlibs')
libs = options.get_value(key).copy()
assert isinstance(libs, list)
for l in libs:
@@ -213,7 +213,7 @@ class EmscriptenCCompiler(EmscriptenMixin, ClangCCompiler):
_C17_VERSION = '>=1.38.35'
_C18_VERSION = '>=1.38.35'
_C2X_VERSION = '>=1.38.35' # 1.38.35 used Clang 9.0.0
- _C23_VERSION = '>=3.0.0' # 3.0.0 used Clang 18.0.0
+ _C23_VERSION = '>=3.1.45' # 3.1.45 used Clang 18.0.0
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
info: 'MachineInfo',
@@ -250,7 +250,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c90', 'c99', 'c11'], gnu=True)
@@ -258,7 +258,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-std=' + std)
@@ -302,7 +302,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
stds += ['c2x']
if version_compare(self.version, self._C23_VERSION):
stds += ['c23']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(stds, gnu=True)
@@ -310,7 +310,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
self.update_options(
opts,
self.create_option(options.UserArrayOption,
- key.evolve('winlibs'),
+ key.evolve('c_winlibs'),
'Standard Win libraries to link against',
gnu_winlibs),
)
@@ -318,7 +318,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-std=' + std)
@@ -327,7 +327,7 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
if self.info.is_windows() or self.info.is_cygwin():
# without a typeddict mypy can't figure this out
- key = self.form_langopt_key('winlibs')
+ key = self.form_compileropt_key('winlibs')
libs: T.List[str] = options.get_value(key).copy()
assert isinstance(libs, list)
for l in libs:
@@ -361,6 +361,14 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
info, linker=linker, full_version=full_version)
PGICompiler.__init__(self)
+ def get_options(self) -> 'MutableKeyedOptionDictType':
+ opts = CCompiler.get_options(self)
+ cppstd_choices = ['c89', 'c90', 'c99', 'c11', 'c17', 'c18']
+ std_opt = opts[self.form_compileropt_key('std')]
+ assert isinstance(std_opt, options.UserStdOption), 'for mypy'
+ std_opt.set_versions(cppstd_choices, gnu=True)
+ return opts
+
class ElbrusCCompiler(ElbrusCompiler, CCompiler):
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
@@ -384,7 +392,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
stds += ['c90', 'c1x', 'gnu90', 'gnu1x', 'iso9899:2011']
if version_compare(self.version, '>=1.26.00'):
stds += ['c17', 'c18', 'iso9899:2017', 'iso9899:2018', 'gnu17', 'gnu18']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(stds)
@@ -424,7 +432,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
stds = ['c89', 'c99']
if version_compare(self.version, '>=16.0.0'):
stds += ['c11']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(stds, gnu=True)
@@ -432,7 +440,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-std=' + std)
@@ -453,7 +461,7 @@ def get_options(self) -> MutableKeyedOptionDictType:
super().get_options(),
self.create_option(
options.UserArrayOption,
- self.form_langopt_key('winlibs'),
+ self.form_compileropt_key('winlibs'),
'Windows libs to link against.',
msvc_winlibs,
),
@@ -461,7 +469,7 @@ def get_options(self) -> MutableKeyedOptionDictType:
def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
# need a TypeDict to make this work
- key = self.form_langopt_key('winlibs')
+ key = self.form_compileropt_key('winlibs')
libs = options.get_value(key).copy()
assert isinstance(libs, list)
for l in libs:
@@ -490,7 +498,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
stds += ['c11']
if version_compare(self.version, self._C17_VERSION):
stds += ['c17', 'c18']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(stds, gnu=True, gnu_deprecated=True)
@@ -498,7 +506,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
# As of MVSC 16.8, /std:c11 and /std:c17 are the only valid C standard options.
if std == 'c11':
@@ -519,7 +527,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
ClangClCompiler.__init__(self, target)
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != "none":
return [f'/clang:-std={std}']
@@ -541,7 +549,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = super().get_options()
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
# To shut up mypy.
if isinstance(opts, dict):
raise RuntimeError('This is a transitory issue that should not happen. Please report with full backtrace.')
@@ -552,7 +560,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std == 'c89':
mlog.log("ICL doesn't explicitly implement c89, setting the standard to 'none', which is close.", once=True)
@@ -578,7 +586,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c89', 'c99', 'c11'])
@@ -586,7 +594,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('--' + std)
@@ -608,7 +616,7 @@ def get_always_args(self) -> T.List[str]:
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c89', 'c99'])
@@ -619,7 +627,7 @@ def get_no_stdinc_args(self) -> T.List[str]:
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std == 'c89':
args.append('-lang=c')
@@ -656,7 +664,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c89', 'c99'], gnu=True)
@@ -667,7 +675,7 @@ def get_no_stdinc_args(self) -> T.List[str]:
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-ansi')
@@ -702,7 +710,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c89', 'c99'])
@@ -740,7 +748,7 @@ def get_always_args(self) -> T.List[str]:
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c89', 'c99', 'c11'])
@@ -751,7 +759,7 @@ def get_no_stdinc_args(self) -> T.List[str]:
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('--' + std)
@@ -781,13 +789,13 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
c_stds = ['c99']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts[key].choices = ['none'] + c_stds
return opts
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-lang')
@@ -811,13 +819,13 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
c_stds = ['c99']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts[key].choices = ['none'] + c_stds
return opts
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-lang ' + std)
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index 08a596c7078c..603a3eb484de 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -9,23 +9,24 @@
import enum
import itertools
import typing as T
-from dataclasses import dataclass
+from dataclasses import dataclass, field
from functools import lru_cache
-from .. import coredata
from .. import mlog
from .. import mesonlib
from .. import options
from ..mesonlib import (
HoldableObject,
EnvironmentException, MesonException,
- Popen_safe_logged, LibType, TemporaryDirectoryWinProof, OptionKey,
+ Popen_safe_logged, LibType, TemporaryDirectoryWinProof,
)
+from ..options import OptionKey
+
from ..arglist import CompilerArgs
if T.TYPE_CHECKING:
- from typing import Any
+ from .. import coredata
from ..build import BuildTarget, DFeatures
from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType
from ..envconfig import MachineInfo
@@ -50,9 +51,9 @@
# Mapping of language to suffixes of files that should always be in that language
# This means we can't include .h headers here since they could be C, C++, ObjC, etc.
# First suffix is the language's default.
-lang_suffixes = {
+lang_suffixes: T.Mapping[str, T.Tuple[str, ...]] = {
'c': ('c',),
- 'cpp': ('cpp', 'cppm', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx', 'ino', 'ixx', 'C', 'H'),
+ 'cpp': ('cpp', 'cppm', 'cc', 'cp', 'cxx', 'c++', 'hh', 'hp', 'hpp', 'ipp', 'hxx', 'h++', 'ino', 'ixx', 'CPP', 'C', 'HPP', 'H'),
'cuda': ('cu',),
# f90, f95, f03, f08 are for free-form fortran ('f90' recommended)
# f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended)
@@ -405,34 +406,27 @@ def get_base_link_args(options: 'KeyedOptionDictType', linker: 'Compiler',
class CrossNoRunException(MesonException):
pass
+@dataclass
class RunResult(HoldableObject):
- def __init__(self, compiled: bool, returncode: int = 999,
- stdout: str = 'UNDEFINED', stderr: str = 'UNDEFINED',
- cached: bool = False):
- self.compiled = compiled
- self.returncode = returncode
- self.stdout = stdout
- self.stderr = stderr
- self.cached = cached
+ compiled: bool
+ returncode: int = 999
+ stdout: str = 'UNDEFINED'
+ stderr: str = 'UNDEFINED'
+ cached: bool = False
+@dataclass
class CompileResult(HoldableObject):
"""The result of Compiler.compiles (and friends)."""
- def __init__(self, stdo: T.Optional[str] = None, stde: T.Optional[str] = None,
- command: T.Optional[T.List[str]] = None,
- returncode: int = 999,
- input_name: T.Optional[str] = None,
- output_name: T.Optional[str] = None,
- cached: bool = False):
- self.stdout = stdo
- self.stderr = stde
- self.input_name = input_name
- self.output_name = output_name
- self.command = command or []
- self.cached = cached
- self.returncode = returncode
+ stdout: str
+ stderr: str
+ command: T.List[str]
+ returncode: int
+ input_name: str
+ output_name: T.Optional[str] = field(default=None, init=False)
+ cached: bool = field(default=False, init=False)
class Compiler(HoldableObject, metaclass=abc.ABCMeta):
@@ -457,11 +451,8 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str,
full_version: T.Optional[str] = None, is_cross: bool = False):
self.exelist = ccache + exelist
self.exelist_no_ccache = exelist
- # In case it's been overridden by a child class already
- if not hasattr(self, 'file_suffixes'):
- self.file_suffixes = lang_suffixes[self.language]
- if not hasattr(self, 'can_compile_suffixes'):
- self.can_compile_suffixes: T.Set[str] = set(self.file_suffixes)
+ self.file_suffixes = lang_suffixes[self.language]
+ self.can_compile_suffixes = set(self.file_suffixes)
self.default_suffix = self.file_suffixes[0]
self.version = version
self.full_version = full_version
@@ -811,6 +802,8 @@ def compile(self, code: 'mesonlib.FileOrString',
'testfile.' + self.default_suffix)
with open(srcname, 'w', encoding='utf-8') as ofile:
ofile.write(code)
+ if not code.endswith('\n'):
+ ofile.write('\n')
# ccache would result in a cache miss
no_ccache = True
code_debug = f'Code:\n{code}'
@@ -969,7 +962,8 @@ def get_pie_args(self) -> T.List[str]:
def get_pie_link_args(self) -> T.List[str]:
return self.linker.get_pie_args()
- def get_argument_syntax(self) -> str:
+ @staticmethod
+ def get_argument_syntax() -> str:
"""Returns the argument family type.
Compilers fall into families if they try to emulate the command line
@@ -1352,18 +1346,18 @@ def get_preprocessor(self) -> Compiler:
"""
raise EnvironmentException(f'{self.get_id()} does not support preprocessor')
- def form_langopt_key(self, basename: str) -> OptionKey:
- return OptionKey(basename, machine=self.for_machine, lang=self.language)
+ def form_compileropt_key(self, basename: str) -> OptionKey:
+ return OptionKey(f'{self.language}_{basename}', machine=self.for_machine)
def get_global_options(lang: str,
comp: T.Type[Compiler],
for_machine: MachineChoice,
- env: 'Environment') -> 'dict[OptionKey, options.UserOption[Any]]':
+ env: 'Environment') -> dict[OptionKey, options.UserOption[T.Any]]:
"""Retrieve options that apply to all compilers for a given language."""
description = f'Extra arguments passed to the {lang}'
- argkey = OptionKey('args', lang=lang, machine=for_machine)
- largkey = argkey.evolve('link_args')
- envkey = argkey.evolve('env_args')
+ argkey = OptionKey(f'{lang}_args', machine=for_machine)
+ largkey = argkey.evolve(f'{lang}_link_args')
+ envkey = argkey.evolve(f'{lang}_env_args')
comp_key = argkey if argkey in env.options else envkey
@@ -1388,6 +1382,6 @@ def get_global_options(lang: str,
# autotools compatibility.
largs.extend_value(comp_options)
- opts: 'dict[OptionKey, options.UserOption[Any]]' = {argkey: cargs, largkey: largs}
+ opts: dict[OptionKey, options.UserOption[T.Any]] = {argkey: cargs, largkey: largs}
return opts
diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py
index 044cd2b378a4..930e7b7e5bb9 100644
--- a/mesonbuild/compilers/cpp.py
+++ b/mesonbuild/compilers/cpp.py
@@ -173,7 +173,7 @@ def _find_best_cpp_std(self, cpp_std: str) -> str:
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = super().get_options()
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts.update({
key: options.UserStdOption('C++', _ALL_STDS),
})
@@ -243,16 +243,16 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
self.update_options(
opts,
self.create_option(options.UserComboOption,
- self.form_langopt_key('eh'),
+ self.form_compileropt_key('eh'),
'C++ exception handling type.',
['none', 'default', 'a', 's', 'sc'],
'default'),
self.create_option(options.UserBooleanOption,
- self.form_langopt_key('rtti'),
+ self.form_compileropt_key('rtti'),
'Enable RTTI',
True),
self.create_option(options.UserBooleanOption,
- self.form_langopt_key('debugstl'),
+ self.form_compileropt_key('debugstl'),
'STL debug mode',
False),
)
@@ -263,14 +263,14 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
cppstd_choices.append('c++23')
if version_compare(self.version, self._CPP26_VERSION):
cppstd_choices.append('c++26')
- std_opt = opts[self.form_langopt_key('std')]
+ std_opt = opts[self.form_compileropt_key('std')]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(cppstd_choices, gnu=True)
if self.info.is_windows() or self.info.is_cygwin():
self.update_options(
opts,
self.create_option(options.UserArrayOption,
- self.form_langopt_key('winlibs'),
+ self.form_compileropt_key('winlibs'),
'Standard Win libraries to link against',
gnu_winlibs),
)
@@ -278,15 +278,15 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append(self._find_best_cpp_std(std))
- key = self.form_langopt_key('eh')
+ key = self.form_compileropt_key('eh')
non_msvc_eh_options(options.get_value(key), args)
- key = self.form_langopt_key('debugstl')
+ key = self.form_compileropt_key('debugstl')
if options.get_value(key):
args.append('-D_GLIBCXX_DEBUG=1')
@@ -296,7 +296,7 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
if version_compare(self.version, '>=18'):
args.append('-D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG')
- key = self.form_langopt_key('rtti')
+ key = self.form_compileropt_key('rtti')
if not options.get_value(key):
args.append('-fno-rtti')
@@ -305,7 +305,7 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
if self.info.is_windows() or self.info.is_cygwin():
# without a typedict mypy can't understand this.
- key = self.form_langopt_key('winlibs')
+ key = self.form_compileropt_key('winlibs')
libs = options.get_value(key).copy()
assert isinstance(libs, list)
for l in libs:
@@ -350,6 +350,15 @@ class EmscriptenCPPCompiler(EmscriptenMixin, ClangCPPCompiler):
id = 'emscripten'
+ # Emscripten uses different version numbers than Clang; `emcc -v` will show
+ # the Clang version number used as well (but `emcc --version` does not).
+ # See https://github.com/pyodide/pyodide/discussions/4762 for more on
+ # emcc <--> clang versions. Note, although earlier versions claim to be the
+ # Clang versions 12.0.0 and 17.0.0 required for these C++ standards, they
+ # only accept the flags in the later versions below.
+ _CPP23_VERSION = '>=2.0.10'
+ _CPP26_VERSION = '>=3.1.39'
+
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
info: 'MachineInfo',
linker: T.Optional['DynamicLinker'] = None,
@@ -365,7 +374,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append(self._find_best_cpp_std(std))
@@ -393,7 +402,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CPPCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
self.update_options(
opts,
self.create_option(options.UserComboOption,
@@ -409,12 +418,12 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-std=' + std)
- key = self.form_langopt_key('eh')
+ key = self.form_compileropt_key('eh')
non_msvc_eh_options(options.get_value(key), args)
return args
@@ -442,21 +451,21 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
self.supported_warn_args(gnu_cpp_warning_args))}
def get_options(self) -> 'MutableKeyedOptionDictType':
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts = CPPCompiler.get_options(self)
self.update_options(
opts,
self.create_option(options.UserComboOption,
- self.form_langopt_key('eh'),
+ self.form_compileropt_key('eh'),
'C++ exception handling type.',
['none', 'default', 'a', 's', 'sc'],
'default'),
self.create_option(options.UserBooleanOption,
- self.form_langopt_key('rtti'),
+ self.form_compileropt_key('rtti'),
'Enable RTTI',
True),
self.create_option(options.UserBooleanOption,
- self.form_langopt_key('debugstl'),
+ self.form_compileropt_key('debugstl'),
'STL debug mode',
False),
)
@@ -475,7 +484,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
self.update_options(
opts,
self.create_option(options.UserArrayOption,
- key.evolve('winlibs'),
+ key.evolve('cpp_winlibs'),
'Standard Win libraries to link against',
gnu_winlibs),
)
@@ -483,24 +492,28 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
- std = options.get_value(key)
+ stdkey = self.form_compileropt_key('std')
+ ehkey = self.form_compileropt_key('eh')
+ rttikey = self.form_compileropt_key('rtti')
+ debugstlkey = self.form_compileropt_key('debugstl')
+
+ std = options.get_value(stdkey)
if std != 'none':
args.append(self._find_best_cpp_std(std))
- non_msvc_eh_options(options.get_value(key.evolve('eh')), args)
+ non_msvc_eh_options(options.get_value(ehkey), args)
- if not options.get_value(key.evolve('rtti')):
+ if not options.get_value(rttikey):
args.append('-fno-rtti')
- if options.get_value(key.evolve('debugstl')):
+ if options.get_value(debugstlkey):
args.append('-D_GLIBCXX_DEBUG=1')
return args
def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
if self.info.is_windows() or self.info.is_cygwin():
# without a typedict mypy can't understand this.
- key = self.form_langopt_key('winlibs')
+ key = self.form_compileropt_key('winlibs')
libs = options.get_value(key).copy()
assert isinstance(libs, list)
for l in libs:
@@ -553,6 +566,17 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
info, linker=linker, full_version=full_version)
PGICompiler.__init__(self)
+ def get_options(self) -> 'MutableKeyedOptionDictType':
+ opts = CPPCompiler.get_options(self)
+ cppstd_choices = [
+ 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++20', 'c++23',
+ 'gnu++98', 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++20'
+ ]
+ std_opt = opts[self.form_compileropt_key('std')]
+ assert isinstance(std_opt, options.UserStdOption), 'for mypy'
+ std_opt.set_versions(cppstd_choices)
+ return opts
+
class ElbrusCPPCompiler(ElbrusCompiler, CPPCompiler):
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
@@ -583,16 +607,16 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
if version_compare(self.version, '>=1.26.00'):
cpp_stds += ['c++20']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
self.update_options(
opts,
self.create_option(options.UserComboOption,
- self.form_langopt_key('eh'),
+ self.form_compileropt_key('eh'),
'C++ exception handling type.',
['none', 'default', 'a', 's', 'sc'],
'default'),
self.create_option(options.UserBooleanOption,
- self.form_langopt_key('debugstl'),
+ self.form_compileropt_key('debugstl'),
'STL debug mode',
False),
)
@@ -616,15 +640,15 @@ def has_function(self, funcname: str, prefix: str, env: 'Environment', *,
# Elbrus C++ compiler does not support RTTI, so don't check for it.
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append(self._find_best_cpp_std(std))
- key = self.form_langopt_key('eh')
+ key = self.form_compileropt_key('eh')
non_msvc_eh_options(options.get_value(key), args)
- key = self.form_langopt_key('debugstl')
+ key = self.form_compileropt_key('debugstl')
if options.get_value(key):
args.append('-D_GLIBCXX_DEBUG=1')
return args
@@ -664,20 +688,20 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
c_stds += ['c++2a']
g_stds += ['gnu++2a']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
self.update_options(
opts,
self.create_option(options.UserComboOption,
- self.form_langopt_key('eh'),
+ self.form_compileropt_key('eh'),
'C++ exception handling type.',
['none', 'default', 'a', 's', 'sc'],
'default'),
self.create_option(options.UserBooleanOption,
- self.form_langopt_key('rtti'),
+ self.form_compileropt_key('rtti'),
'Enable RTTI',
True),
self.create_option(options.UserBooleanOption,
- self.form_langopt_key('debugstl'),
+ self.form_compileropt_key('debugstl'),
'STL debug mode',
False),
)
@@ -688,7 +712,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
remap_cpp03 = {
@@ -733,24 +757,24 @@ class VisualStudioLikeCPPCompilerMixin(CompilerMixinBase):
def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
# need a typeddict for this
- key = self.form_langopt_key('winlibs')
+ key = self.form_compileropt_key('winlibs')
return T.cast('T.List[str]', options.get_value(key)[:])
def _get_options_impl(self, opts: 'MutableKeyedOptionDictType', cpp_stds: T.List[str]) -> 'MutableKeyedOptionDictType':
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
self.update_options(
opts,
self.create_option(options.UserComboOption,
- self.form_langopt_key('eh'),
+ self.form_compileropt_key('eh'),
'C++ exception handling type.',
['none', 'default', 'a', 's', 'sc'],
'default'),
self.create_option(options.UserBooleanOption,
- self.form_langopt_key('rtti'),
+ self.form_compileropt_key('rtti'),
'Enable RTTI',
True),
self.create_option(options.UserArrayOption,
- self.form_langopt_key('winlibs'),
+ self.form_compileropt_key('winlibs'),
'Windows libs to link against.',
msvc_winlibs),
)
@@ -761,9 +785,9 @@ def _get_options_impl(self, opts: 'MutableKeyedOptionDictType', cpp_stds: T.List
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
- eh = options.get_value(self.form_langopt_key('eh'))
+ eh = options.get_value(self.form_compileropt_key('eh'))
if eh == 'default':
args.append('/EHsc')
elif eh == 'none':
@@ -771,7 +795,7 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
else:
args.append('/EH' + eh)
- if not options.get_value(self.form_langopt_key('rtti')):
+ if not options.get_value(self.form_compileropt_key('rtti')):
args.append('/GR-')
permissive, ver = self.VC_VERSION_MAP[options.get_value(key)]
@@ -801,7 +825,7 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
# which means setting the C++ standard version to C++14, in compilers that support it
# (i.e., after VS2015U3)
# if one is using anything before that point, one cannot set the standard.
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
if options.get_value(key) in {'vc++11', 'c++11'}:
mlog.warning(self.id, 'does not support C++11;',
'attempting best effort; setting the standard to C++14',
@@ -848,7 +872,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
return self._get_options_impl(super().get_options(), cpp_stds)
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
if options.get_value(key) != 'none' and version_compare(self.version, '<19.00.24210'):
mlog.warning('This version of MSVC does not support cpp_std arguments', fatal=False)
options = copy.copy(options)
@@ -917,14 +941,14 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CPPCompiler.get_options(self)
- std_opt = self.form_langopt_key('std')
+ std_opt = self.form_compileropt_key('std')
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c++03', 'c++11'])
return opts
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std == 'c++11':
args.append('--cpp11')
@@ -978,7 +1002,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CPPCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c++03'])
@@ -986,7 +1010,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('--' + std)
@@ -1021,13 +1045,13 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CPPCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts[key].choices = ['none']
return opts
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-lang')
@@ -1050,13 +1074,13 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CPPCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts[key].choices = ['none']
return opts
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-lang ' + std)
diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py
index a6049a8439cd..38a938f24aff 100644
--- a/mesonbuild/compilers/cuda.py
+++ b/mesonbuild/compilers/cuda.py
@@ -13,8 +13,9 @@
from .. import mlog
from ..mesonlib import (
EnvironmentException, Popen_safe,
- is_windows, LibType, version_compare, OptionKey
+ is_windows, LibType, version_compare
)
+from ..options import OptionKey
from .compilers import Compiler
if T.TYPE_CHECKING:
@@ -44,7 +45,7 @@
}
-class _Phase(enum.Enum):
+class Phase(enum.Enum):
COMPILER = 'compiler'
LINKER = 'linker'
@@ -308,14 +309,14 @@ def get_xcompiler_val(flag: str, flagit: T.Iterator[str]) -> str:
raise ValueError("-Xcompiler flag merging failed, unknown argument form!")
return xflags
- def _to_host_flags(self, flags: T.List[str], phase: _Phase = _Phase.COMPILER) -> T.List[str]:
+ @classmethod
+ def to_host_flags_base(cls, flags: T.List[str], phase: Phase = Phase.COMPILER, default_include_dirs: T.Optional[T.List[str]] = None) -> T.List[str]:
"""
Translate generic "GCC-speak" plus particular "NVCC-speak" flags to NVCC flags.
NVCC's "short" flags have broad similarities to the GCC standard, but have
gratuitous, irritating differences.
"""
-
xflags = []
flagit = iter(flags)
@@ -370,7 +371,7 @@ def _to_host_flags(self, flags: T.List[str], phase: _Phase = _Phase.COMPILER) ->
# an exception for -D (where this would be value-changing) and -U (because
# it isn't possible to define a macro with a comma in the name).
- if flag in self._FLAG_PASSTHRU_NOARGS:
+ if flag in cls._FLAG_PASSTHRU_NOARGS:
xflags.append(flag)
continue
@@ -403,16 +404,16 @@ def _to_host_flags(self, flags: T.List[str], phase: _Phase = _Phase.COMPILER) ->
else: # -Isomething
val = flag[2:]
flag = flag[:2] # -I
- elif flag in self._FLAG_LONG2SHORT_WITHARGS or \
- flag in self._FLAG_SHORT2LONG_WITHARGS:
+ elif flag in cls._FLAG_LONG2SHORT_WITHARGS or \
+ flag in cls._FLAG_SHORT2LONG_WITHARGS:
# This is either -o or a multi-letter flag, and it is receiving its
# value isolated.
try:
val = next(flagit) # -o something
except StopIteration:
pass
- elif flag.split('=', 1)[0] in self._FLAG_LONG2SHORT_WITHARGS or \
- flag.split('=', 1)[0] in self._FLAG_SHORT2LONG_WITHARGS:
+ elif flag.split('=', 1)[0] in cls._FLAG_LONG2SHORT_WITHARGS or \
+ flag.split('=', 1)[0] in cls._FLAG_SHORT2LONG_WITHARGS:
# This is either -o or a multi-letter flag, and it is receiving its
# value after an = sign.
flag, val = flag.split('=', 1) # -o=something
@@ -441,14 +442,14 @@ def _to_host_flags(self, flags: T.List[str], phase: _Phase = _Phase.COMPILER) ->
xflags.append('-prec-div=true')
xflags.append('-Xcompiler='+flag)
else:
- xflags.append('-Xcompiler='+self._shield_nvcc_list_arg(flag))
+ xflags.append('-Xcompiler='+cls._shield_nvcc_list_arg(flag))
# The above should securely handle GCC's -Wl, -Wa, -Wp, arguments.
continue
assert val is not None # Should only trip if there is a missing argument.
# Take care of the various NVCC-supported flags that need special handling.
- flag = self._FLAG_LONG2SHORT_WITHARGS.get(flag, flag)
+ flag = cls._FLAG_LONG2SHORT_WITHARGS.get(flag, flag)
if flag in {'-include', '-isystem', '-I', '-L', '-l'}:
# These flags are known to GCC, but list-valued in NVCC. They potentially
@@ -460,14 +461,14 @@ def _to_host_flags(self, flags: T.List[str], phase: _Phase = _Phase.COMPILER) ->
# -U with comma arguments is impossible in GCC-speak (and thus unambiguous
#in NVCC-speak, albeit unportable).
if len(flag) == 2:
- xflags.append(flag+self._shield_nvcc_list_arg(val))
- elif flag == '-isystem' and val in self.host_compiler.get_default_include_dirs():
+ xflags.append(flag+cls._shield_nvcc_list_arg(val))
+ elif flag == '-isystem' and default_include_dirs is not None and val in default_include_dirs:
# like GnuLikeCompiler, we have to filter out include directories specified
# with -isystem that overlap with the host compiler's search path
pass
else:
xflags.append(flag)
- xflags.append(self._shield_nvcc_list_arg(val))
+ xflags.append(cls._shield_nvcc_list_arg(val))
elif flag == '-O':
# Handle optimization levels GCC knows about that NVCC does not.
if val == 'fast':
@@ -488,13 +489,16 @@ def _to_host_flags(self, flags: T.List[str], phase: _Phase = _Phase.COMPILER) ->
xflags.append(flag)
xflags.append(val)
- return self._merge_flags(xflags)
+ return cls._merge_flags(xflags)
+
+ def _to_host_flags(self, flags: T.List[str], phase: Phase = Phase.COMPILER) -> T.List[str]:
+ return self.to_host_flags_base(flags, phase, self.host_compiler.get_default_include_dirs())
def needs_static_linker(self) -> bool:
return False
def thread_link_flags(self, environment: 'Environment') -> T.List[str]:
- return self._to_host_flags(self.host_compiler.thread_link_flags(environment), _Phase.LINKER)
+ return self._to_host_flags(self.host_compiler.thread_link_flags(environment), Phase.LINKER)
def sanity_check(self, work_dir: str, env: 'Environment') -> None:
mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist))
@@ -645,12 +649,12 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
return self.update_options(
super().get_options(),
self.create_option(options.UserComboOption,
- self.form_langopt_key('std'),
+ self.form_compileropt_key('std'),
'C++ language standard to use with CUDA',
cpp_stds,
'none'),
self.create_option(options.UserStringOption,
- self.form_langopt_key('ccbindir'),
+ self.form_compileropt_key('ccbindir'),
'CUDA non-default toolchain directory to use (-ccbin)',
''),
)
@@ -663,7 +667,7 @@ def _to_host_compiler_options(self, master_options: 'KeyedOptionDictType') -> 'K
# We must strip the -std option from the host compiler option set, as NVCC has
# its own -std flag that may not agree with the host compiler's.
host_options = {key: master_options.get(key, opt) for key, opt in self.host_compiler.get_options().items()}
- std_key = OptionKey('std', machine=self.for_machine, lang=self.host_compiler.language)
+ std_key = OptionKey(f'{self.host_compiler.language}_std', machine=self.for_machine)
overrides = {std_key: 'none'}
# To shut up mypy.
return coredata.OptionsView(host_options, overrides=overrides)
@@ -674,7 +678,7 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
# the combination of CUDA version and MSVC version; the --std= is thus ignored
# and attempting to use it will result in a warning: https://stackoverflow.com/a/51272091/741027
if not is_windows():
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('--std=' + std)
@@ -683,13 +687,13 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = self.get_ccbin_args(options)
- return args + self._to_host_flags(self.host_compiler.get_option_link_args(self._to_host_compiler_options(options)), _Phase.LINKER)
+ return args + self._to_host_flags(self.host_compiler.get_option_link_args(self._to_host_compiler_options(options)), Phase.LINKER)
def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
suffix: str, soversion: str,
darwin_versions: T.Tuple[str, str]) -> T.List[str]:
return self._to_host_flags(self.host_compiler.get_soname_args(
- env, prefix, shlib_name, suffix, soversion, darwin_versions), _Phase.LINKER)
+ env, prefix, shlib_name, suffix, soversion, darwin_versions), Phase.LINKER)
def get_compile_only_args(self) -> T.List[str]:
return ['-c']
@@ -728,20 +732,20 @@ def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[
return self._to_host_flags(self.host_compiler.get_compile_debugfile_args(rel_obj, pch))
def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
- return self._to_host_flags(self.host_compiler.get_link_debugfile_args(targetfile), _Phase.LINKER)
+ return self._to_host_flags(self.host_compiler.get_link_debugfile_args(targetfile), Phase.LINKER)
def get_depfile_suffix(self) -> str:
return 'd'
def get_optimization_link_args(self, optimization_level: str) -> T.List[str]:
- return self._to_host_flags(self.host_compiler.get_optimization_link_args(optimization_level), _Phase.LINKER)
+ return self._to_host_flags(self.host_compiler.get_optimization_link_args(optimization_level), Phase.LINKER)
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: T.Tuple[str, ...], build_rpath: str,
install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
(rpath_args, rpath_dirs_to_remove) = self.host_compiler.build_rpath_args(
env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
- return (self._to_host_flags(rpath_args, _Phase.LINKER), rpath_dirs_to_remove)
+ return (self._to_host_flags(rpath_args, Phase.LINKER), rpath_dirs_to_remove)
def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
return args
@@ -766,7 +770,7 @@ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
return []
def get_std_exe_link_args(self) -> T.List[str]:
- return self._to_host_flags(self.host_compiler.get_std_exe_link_args(), _Phase.LINKER)
+ return self._to_host_flags(self.host_compiler.get_std_exe_link_args(), Phase.LINKER)
def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]:
@@ -782,19 +786,19 @@ def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]:
host_crt_compile_args = self.host_compiler.get_crt_compile_args(crt_val, buildtype)
if any(arg in {'/MDd', '/MD', '/MTd'} for arg in host_crt_compile_args):
host_link_arg_overrides += ['/NODEFAULTLIB:LIBCMT.lib']
- return self._to_host_flags(host_link_arg_overrides + self.host_compiler.get_crt_link_args(crt_val, buildtype), _Phase.LINKER)
+ return self._to_host_flags(host_link_arg_overrides + self.host_compiler.get_crt_link_args(crt_val, buildtype), Phase.LINKER)
def get_target_link_args(self, target: 'BuildTarget') -> T.List[str]:
- return self._to_host_flags(super().get_target_link_args(target), _Phase.LINKER)
+ return self._to_host_flags(super().get_target_link_args(target), Phase.LINKER)
def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
return self._to_host_flags(super().get_dependency_compile_args(dep))
def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]:
- return self._to_host_flags(super().get_dependency_link_args(dep), _Phase.LINKER)
+ return self._to_host_flags(super().get_dependency_link_args(dep), Phase.LINKER)
def get_ccbin_args(self, ccoptions: 'KeyedOptionDictType') -> T.List[str]:
- key = self.form_langopt_key('ccbindir')
+ key = self.form_compileropt_key('ccbindir')
ccbindir = ccoptions.get_value(key)
if isinstance(ccbindir, str) and ccbindir != '':
return [self._shield_nvcc_list_arg('-ccbin='+ccbindir, False)]
diff --git a/mesonbuild/compilers/cython.py b/mesonbuild/compilers/cython.py
index 7c1128692e40..5cc0200458fa 100644
--- a/mesonbuild/compilers/cython.py
+++ b/mesonbuild/compilers/cython.py
@@ -70,12 +70,12 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
return self.update_options(
super().get_options(),
self.create_option(options.UserComboOption,
- self.form_langopt_key('version'),
+ self.form_compileropt_key('version'),
'Python version to target',
['2', '3'],
'3'),
self.create_option(options.UserComboOption,
- self.form_langopt_key('language'),
+ self.form_compileropt_key('language'),
'Output C or C++ files',
['c', 'cpp'],
'c'),
@@ -83,10 +83,10 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('version')
+ key = self.form_compileropt_key('version')
version = options.get_value(key)
args.append(f'-{version}')
- key = self.form_langopt_key('language')
+ key = self.form_compileropt_key('language')
lang = options.get_value(key)
if lang == 'cpp':
args.append('--cplus')
diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py
index c478c040ba9c..8ee6ebf651c5 100644
--- a/mesonbuild/compilers/d.py
+++ b/mesonbuild/compilers/d.py
@@ -12,10 +12,10 @@
from ..arglist import CompilerArgs
from ..linkers import RSPFileSyntax
from ..mesonlib import (
- EnvironmentException, version_compare, OptionKey, is_windows
+ EnvironmentException, version_compare, is_windows
)
+from ..options import OptionKey
-from . import compilers
from .compilers import (
clike_debug_args,
Compiler,
@@ -25,6 +25,7 @@
from .mixins.gnu import gnu_common_warning_args
if T.TYPE_CHECKING:
+ from . import compilers
from ..build import DFeatures
from ..dependencies import Dependency
from ..envconfig import MachineInfo
diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py
index c8b67f479471..7542fb6283a2 100644
--- a/mesonbuild/compilers/detect.py
+++ b/mesonbuild/compilers/detect.py
@@ -39,15 +39,16 @@
if is_windows():
# Intel C and C++ compiler is icl on Windows, but icc and icpc elsewhere.
# Search for icl before cl, since Intel "helpfully" provides a
- # cl.exe that returns *exactly the same thing* that microsofts
+ # cl.exe that returns *exactly the same thing* that Microsoft's
# cl.exe does, and if icl is present, it's almost certainly what
# you want.
defaults['c'] = ['icl', 'cl', 'cc', 'gcc', 'clang', 'clang-cl', 'pgcc']
# There is currently no pgc++ for Windows, only for Mac and Linux.
defaults['cpp'] = ['icl', 'cl', 'c++', 'g++', 'clang++', 'clang-cl']
- defaults['fortran'] = ['ifort', 'gfortran', 'flang', 'pgfortran', 'g95']
+ # the binary flang-new will be renamed to flang in the foreseeable future
+ defaults['fortran'] = ['ifort', 'gfortran', 'flang-new', 'flang', 'pgfortran', 'g95']
defaults['objc'] = ['clang', 'clang-cl', 'gcc']
- defaults['objcpp'] = ['clang-cl', 'clang-cl', 'g++']
+ defaults['objcpp'] = ['clang++', 'clang-cl', 'g++']
defaults['cs'] = ['csc', 'mcs']
else:
if platform.machine().lower() == 'e2k':
@@ -60,7 +61,8 @@
defaults['cpp'] = ['c++', 'g++', 'clang++', 'nvc++', 'pgc++', 'icpc', 'icpx']
defaults['objc'] = ['clang', 'gcc']
defaults['objcpp'] = ['clang++', 'g++']
- defaults['fortran'] = ['gfortran', 'flang', 'nvfortran', 'pgfortran', 'ifort', 'ifx', 'g95']
+ # the binary flang-new will be renamed to flang in the foreseeable future
+ defaults['fortran'] = ['gfortran', 'flang-new', 'flang', 'nvfortran', 'pgfortran', 'ifort', 'ifx', 'g95']
defaults['cs'] = ['mcs', 'csc']
defaults['d'] = ['ldc2', 'ldc', 'gdc', 'dmd']
defaults['java'] = ['javac']
@@ -179,7 +181,7 @@ def detect_static_linker(env: 'Environment', compiler: Compiler) -> StaticLinker
else:
trials = default_linkers
elif compiler.id == 'intel-cl' and compiler.language == 'c': # why not cpp? Is this a bug?
- # Intel has its own linker that acts like microsoft's lib
+ # Intel has its own linker that acts like Microsoft's lib
trials = [['xilib']]
elif is_windows() and compiler.id == 'pgi': # this handles cpp / nvidia HPC, in addition to just c/fortran
trials = [['ar']] # For PGI on Windows, "ar" is just a wrapper calling link/lib.
@@ -278,7 +280,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
compiler_name = os.path.basename(compiler[0])
if any(os.path.basename(x) in {'cl', 'cl.exe', 'clang-cl', 'clang-cl.exe'} for x in compiler):
- # Watcom C provides it's own cl.exe clone that mimics an older
+ # Watcom C provides its own cl.exe clone that mimics an older
# version of Microsoft's compiler. Since Watcom's cl.exe is
# just a wrapper, we skip using it if we detect its presence
# so as not to confuse Meson when configuring for MSVC.
@@ -583,7 +585,7 @@ def sanitize(p: T.Optional[str]) -> T.Optional[str]:
lnk = linkers.MetrowerksLinkerEmbeddedPowerPC
mwcc_ver_match = re.search(r'Version (\d+)\.(\d+)\.?(\d+)? build (\d+)', out)
- assert mwcc_ver_match is not None, 'for mypy' # because mypy *should* be complaning that this could be None
+ assert mwcc_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None
compiler_version = '.'.join(x for x in mwcc_ver_match.groups() if x is not None)
env.coredata.add_lang_args(cls.language, cls, for_machine, env)
@@ -593,7 +595,7 @@ def sanitize(p: T.Optional[str]) -> T.Optional[str]:
_, o_ld, _ = Popen_safe(ld + ['--version'])
mwld_ver_match = re.search(r'Version (\d+)\.(\d+)\.?(\d+)? build (\d+)', o_ld)
- assert mwld_ver_match is not None, 'for mypy' # because mypy *should* be complaning that this could be None
+ assert mwld_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None
linker_version = '.'.join(x for x in mwld_ver_match.groups() if x is not None)
linker = lnk(ld, for_machine, version=linker_version)
@@ -614,7 +616,8 @@ def detect_cpp_compiler(env: 'Environment', for_machine: MachineChoice) -> Compi
return _detect_c_or_cpp_compiler(env, 'cpp', for_machine)
def detect_cuda_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
- from .cuda import CudaCompiler
+ from .cuda import CudaCompiler, Phase
+ from ..options import OptionKey
from ..linkers.linkers import CudaLinker
popen_exceptions = {}
is_cross = env.is_cross_build(for_machine)
@@ -646,6 +649,12 @@ def detect_cuda_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
cpp_compiler = detect_cpp_compiler(env, for_machine)
cls = CudaCompiler
env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ key = OptionKey('cuda_link_args', machine=for_machine)
+ if key in env.options:
+ # To fix LDFLAGS issue
+ val = env.options[key]
+ assert isinstance(val, list)
+ env.coredata.set_options({key: cls.to_host_flags_base(val, Phase.LINKER)})
linker = CudaLinker(compiler, for_machine, CudaCompiler.LINKER_PREFIX, [], version=CudaLinker.parse_version())
return cls(ccache, compiler, version, for_machine, is_cross, host_compiler=cpp_compiler, info=info, linker=linker)
raise EnvironmentException(f'Could not find suitable CUDA compiler: "{"; ".join([" ".join(c) for c in compilers])}"')
@@ -659,6 +668,13 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
info = env.machines[for_machine]
cls: T.Type[FortranCompiler]
for compiler in compilers:
+ # capture help text for possible fallback
+ try:
+ _, help_out, _ = Popen_safe_logged(compiler + ['--help'], msg='Detecting compiler via')
+ except OSError as e:
+ popen_exceptions[join_args(compiler + ['--help'])] = e
+ help_out = ''
+
for arg in ['--version', '-V']:
try:
p, out, err = Popen_safe_logged(compiler + [arg], msg='Detecting compiler via')
@@ -776,8 +792,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
compiler, version, for_machine, is_cross, info,
full_version=full_version, linker=linker)
- if 'flang' in out or 'clang' in out:
- cls = fortran.FlangFortranCompiler
+ def _get_linker_try_windows(cls: T.Type['Compiler']) -> T.Optional['DynamicLinker']:
linker = None
if 'windows' in out or env.machines[for_machine].is_windows():
# If we're in a MINGW context this actually will use a gnu
@@ -793,6 +808,18 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
if linker is None:
linker = guess_nix_linker(env, compiler, cls,
version, for_machine)
+ return linker
+
+ if 'flang-new' in out or 'flang LLVM compiler' in help_out:
+ cls = fortran.LlvmFlangFortranCompiler
+ linker = _get_linker_try_windows(cls)
+ return cls(
+ compiler, version, for_machine, is_cross, info,
+ full_version=full_version, linker=linker)
+
+ if 'flang' in out or 'clang' in out:
+ cls = fortran.ClassicFlangFortranCompiler
+ linker = _get_linker_try_windows(cls)
return cls(
compiler, version, for_machine, is_cross, info,
full_version=full_version, linker=linker)
@@ -999,7 +1026,7 @@ def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> Rust
version = search_version(out)
cls: T.Type[RustCompiler] = rust.RustCompiler
- # Clippy is a wrapper around rustc, but it doesn't have rustc in it's
+ # Clippy is a wrapper around rustc, but it doesn't have rustc in its
# output. We can otherwise treat it as rustc.
if 'clippy' in out:
# clippy returns its own version and not the rustc version by
@@ -1228,7 +1255,7 @@ def detect_swift_compiler(env: 'Environment', for_machine: MachineChoice) -> Com
cls = SwiftCompiler
linker = guess_nix_linker(env,
exelist, cls, version, for_machine,
- extra_args=[f.name])
+ extra_args=[f.name, '-o', '/dev/null'])
return cls(
exelist, version, for_machine, is_cross, info, linker=linker)
@@ -1409,13 +1436,12 @@ def _try_obtain_compiler_defines(args: T.List[str]) -> str:
# based on the driver.
lang = clang_lang_map[lang]
- # The compiler may not infer the target language based on the driver name
- # so first, try with '-cpp -x lang', then fallback without given it's less
- # portable. We try with '-cpp' as GCC needs it for Fortran at least, and
- # it seems to do no harm.
- output = _try_obtain_compiler_defines(['-cpp', '-x', lang] + baseline_test_args)
+ # The compiler may not infer the target language based on the driver name.
+ # Try first with '-x lang' to supported systemwide language level overrides,
+ # then fallback to without since it's a more recent option.
+ output = _try_obtain_compiler_defines(['-x', lang] + baseline_test_args)
except (EnvironmentException, KeyError):
- mlog.debug(f'pre-processor extraction using -cpp -x {lang} failed, falling back w/o lang')
+ mlog.debug(f'pre-processor extraction using -x {lang} failed, falling back w/o lang')
output = _try_obtain_compiler_defines(baseline_test_args)
defines: T.Dict[str, str] = {}
diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py
index 3e332381d53b..5012fba074a0 100644
--- a/mesonbuild/compilers/fortran.py
+++ b/mesonbuild/compilers/fortran.py
@@ -115,7 +115,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
return self.update_options(
super().get_options(),
self.create_option(options.UserComboOption,
- self.form_langopt_key('std'),
+ self.form_compileropt_key('std'),
'Fortran language standard to use',
['none'],
'none'),
@@ -147,13 +147,13 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
fortran_stds += ['f2008']
if version_compare(self.version, '>=8.0.0'):
fortran_stds += ['f2018']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts[key].choices = ['none'] + fortran_stds
return opts
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('-std=' + std)
@@ -205,7 +205,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = FortranCompiler.get_options(self)
fortran_stds = ['f95', 'f2003', 'f2008', 'gnu', 'legacy', 'f2008ts']
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts[key].choices = ['none'] + fortran_stds
return opts
@@ -262,7 +262,6 @@ def openmp_flags(self, env: Environment) -> T.List[str]:
class IntelFortranCompiler(IntelGnuLikeCompiler, FortranCompiler):
- file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', )
id = 'intel'
def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
@@ -275,6 +274,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
# FIXME: Add support for OS X and Windows in detect_fortran_compiler so
# we are sent the type of compiler
IntelGnuLikeCompiler.__init__(self)
+ self.file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', )
default_warn_args = ['-warn', 'general', '-warn', 'truncated_source']
self.warn_args = {'0': [],
'1': default_warn_args,
@@ -284,13 +284,13 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = FortranCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018']
return opts
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'}
if std != 'none':
@@ -318,7 +318,6 @@ class IntelLLVMFortranCompiler(IntelFortranCompiler):
class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler):
- file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', )
always_args = ['/nologo']
def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
@@ -329,6 +328,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
is_cross, info, linker=linker,
full_version=full_version)
IntelVisualStudioLikeCompiler.__init__(self, target)
+ self.file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', )
default_warn_args = ['/warn:general', '/warn:truncated_source']
self.warn_args = {'0': [],
@@ -339,13 +339,13 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = FortranCompiler.get_options(self)
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018']
return opts
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'}
if std != 'none':
@@ -430,7 +430,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
'everything': default_warn_args + ['-Mdclchk']}
-class FlangFortranCompiler(ClangCompiler, FortranCompiler):
+class ClassicFlangFortranCompiler(ClangCompiler, FortranCompiler):
id = 'flang'
@@ -460,10 +460,62 @@ def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]:
search_dirs.append(f'-L{d}')
return search_dirs + ['-lflang', '-lpgmath']
-class ArmLtdFlangFortranCompiler(FlangFortranCompiler):
+
+class ArmLtdFlangFortranCompiler(ClassicFlangFortranCompiler):
id = 'armltdflang'
+
+class LlvmFlangFortranCompiler(ClangCompiler, FortranCompiler):
+
+ id = 'llvm-flang'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, linker=linker,
+ full_version=full_version)
+ ClangCompiler.__init__(self, {})
+ default_warn_args = ['-Wall']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args,
+ '3': default_warn_args,
+ 'everything': default_warn_args}
+
+ def get_colorout_args(self, colortype: str) -> T.List[str]:
+ # not yet supported, see https://github.com/llvm/llvm-project/issues/89888
+ return []
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ # not yet supported, see https://github.com/llvm/llvm-project/issues/89888
+ return []
+
+ def get_module_outdir_args(self, path: str) -> T.List[str]:
+ # different syntax from classic flang (which supported `-module`), see
+ # https://github.com/llvm/llvm-project/issues/66969
+ return ['-module-dir', path]
+
+ def gnu_symbol_visibility_args(self, vistype: str) -> T.List[str]:
+ # flang doesn't support symbol visibility flag yet, see
+ # https://github.com/llvm/llvm-project/issues/92459
+ return []
+
+ def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]:
+ # matching setup from ClassicFlangFortranCompiler
+ search_dirs: T.List[str] = []
+ for d in self.get_compiler_dirs(env, 'libraries'):
+ search_dirs.append(f'-L{d}')
+ # does not automatically link to Fortran_main anymore after
+ # https://github.com/llvm/llvm-project/commit/9d6837d595719904720e5ff68ec1f1a2665bdc2f
+ # note that this changed again in flang 19 with
+ # https://github.com/llvm/llvm-project/commit/8d5386669ed63548daf1bee415596582d6d78d7d;
+ # it seems flang 18 doesn't work if something accidentally includes a program unit, see
+ # https://github.com/llvm/llvm-project/issues/92496
+ return search_dirs + ['-lFortranRuntime', '-lFortranDecimal']
+
+
class Open64FortranCompiler(FortranCompiler):
id = 'open64'
diff --git a/mesonbuild/compilers/mixins/apple.py b/mesonbuild/compilers/mixins/apple.py
index 98c4bfa1a18b..fc93d38a5673 100644
--- a/mesonbuild/compilers/mixins/apple.py
+++ b/mesonbuild/compilers/mixins/apple.py
@@ -55,3 +55,7 @@ def openmp_link_flags(self, env: Environment) -> T.List[str]:
if not link:
raise MesonException("Couldn't find libomp")
return self.__BASE_OMP_FLAGS + link
+
+ def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]:
+ # The objects are prelinked through the compiler, which injects -lSystem
+ return ['-nostdlib', '-r', '-o', prelink_name] + obj_list
diff --git a/mesonbuild/compilers/mixins/arm.py b/mesonbuild/compilers/mixins/arm.py
index 942156fab1a9..a70ec4ff95fb 100644
--- a/mesonbuild/compilers/mixins/arm.py
+++ b/mesonbuild/compilers/mixins/arm.py
@@ -10,7 +10,7 @@
from ... import mesonlib
from ...linkers.linkers import ArmClangDynamicLinker
-from ...mesonlib import OptionKey
+from ...options import OptionKey
from ..compilers import clike_debug_args
from .clang import clang_color_args
diff --git a/mesonbuild/compilers/mixins/ccrx.py b/mesonbuild/compilers/mixins/ccrx.py
index 63270726bc4c..d1badaa1c7eb 100644
--- a/mesonbuild/compilers/mixins/ccrx.py
+++ b/mesonbuild/compilers/mixins/ccrx.py
@@ -40,7 +40,6 @@ class CcrxCompiler(Compiler):
if T.TYPE_CHECKING:
is_cross = True
- can_compile_suffixes: T.Set[str] = set()
id = 'ccrx'
diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py
index d99dc3abf9f9..a0d3d5ffb069 100644
--- a/mesonbuild/compilers/mixins/clang.py
+++ b/mesonbuild/compilers/mixins/clang.py
@@ -12,7 +12,7 @@
from ... import mesonlib
from ...linkers.linkers import AppleDynamicLinker, ClangClDynamicLinker, LLVMDynamicLinker, GnuGoldDynamicLinker, \
MoldDynamicLinker, MSVCDynamicLinker
-from ...mesonlib import OptionKey
+from ...options import OptionKey
from ..compilers import CompileCheckMode
from .gnu import GnuLikeCompiler
diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py
index 174104b079ee..f0515a9bdcb4 100644
--- a/mesonbuild/compilers/mixins/clike.py
+++ b/mesonbuild/compilers/mixins/clike.py
@@ -1269,8 +1269,10 @@ def _has_multi_arguments(self, args: T.List[str], env: 'Environment', code: str)
for arg in args:
# some compilers, e.g. GCC, don't warn for unsupported warning-disable
# flags, so when we are testing a flag like "-Wno-forgotten-towel", also
- # check the equivalent enable flag too "-Wforgotten-towel"
- if arg.startswith('-Wno-'):
+ # check the equivalent enable flag too "-Wforgotten-towel".
+ # Make an exception for -Wno-attributes=x as -Wattributes=x is invalid
+ # for GCC at least.
+ if arg.startswith('-Wno-') and not arg.startswith('-Wno-attributes='):
new_args.append('-W' + arg[5:])
if arg.startswith('-Wl,'):
mlog.warning(f'{arg} looks like a linker argument, '
diff --git a/mesonbuild/compilers/mixins/elbrus.py b/mesonbuild/compilers/mixins/elbrus.py
index 71cf722c8192..66f419cf02d8 100644
--- a/mesonbuild/compilers/mixins/elbrus.py
+++ b/mesonbuild/compilers/mixins/elbrus.py
@@ -13,7 +13,8 @@
from .gnu import GnuLikeCompiler
from .gnu import gnu_optimization_args
-from ...mesonlib import Popen_safe, OptionKey
+from ...mesonlib import Popen_safe
+from ...options import OptionKey
if T.TYPE_CHECKING:
from ...environment import Environment
@@ -84,7 +85,7 @@ def get_pch_suffix(self) -> str:
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args: T.List[str] = []
- std = options.get_value(OptionKey('std', lang=self.language, machine=self.for_machine))
+ std = options.get_value(OptionKey(f'{self.language}_std', machine=self.for_machine))
if std != 'none':
args.append('-std=' + std)
return args
diff --git a/mesonbuild/compilers/mixins/emscripten.py b/mesonbuild/compilers/mixins/emscripten.py
index 6b7f087ba6e1..64315ae96797 100644
--- a/mesonbuild/compilers/mixins/emscripten.py
+++ b/mesonbuild/compilers/mixins/emscripten.py
@@ -8,14 +8,14 @@
import os.path
import typing as T
-from ... import coredata
from ... import options
from ... import mesonlib
-from ...mesonlib import OptionKey
+from ...options import OptionKey
from ...mesonlib import LibType
from mesonbuild.compilers.compilers import CompileCheckMode
if T.TYPE_CHECKING:
+ from ... import coredata
from ...environment import Environment
from ...compilers.compilers import Compiler
from ...dependencies import Dependency
@@ -51,7 +51,7 @@ def _get_compile_output(self, dirname: str, mode: CompileCheckMode) -> str:
def thread_link_flags(self, env: 'Environment') -> T.List[str]:
args = ['-pthread']
- count: int = env.coredata.optstore.get_value(OptionKey('thread_count', lang=self.language, machine=self.for_machine))
+ count: int = env.coredata.optstore.get_value(OptionKey(f'{self.language}_thread_count', machine=self.for_machine))
if count:
args.append(f'-sPTHREAD_POOL_SIZE={count}')
return args
@@ -61,7 +61,7 @@ def get_options(self) -> coredata.MutableKeyedOptionDictType:
super().get_options(),
self.create_option(
options.UserIntegerOption,
- OptionKey('thread_count', machine=self.for_machine, lang=self.language),
+ OptionKey(f'{self.language}_thread_count', machine=self.for_machine),
'Number of threads to use in web assembly, set to 0 to disable',
(0, None, 4), # Default was picked at random
),
diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py
index 4a9eb8848489..62f55543a0a7 100644
--- a/mesonbuild/compilers/mixins/gnu.py
+++ b/mesonbuild/compilers/mixins/gnu.py
@@ -16,7 +16,7 @@
from ... import mesonlib
from ... import mlog
-from ...mesonlib import OptionKey
+from ...options import OptionKey
from mesonbuild.compilers.compilers import CompileCheckMode
if T.TYPE_CHECKING:
@@ -420,7 +420,8 @@ def gen_vs_module_defs_args(self, defsfile: str) -> T.List[str]:
# For other targets, discard the .def file.
return []
- def get_argument_syntax(self) -> str:
+ @staticmethod
+ def get_argument_syntax() -> str:
return 'gcc'
def get_profile_generate_args(self) -> T.List[str]:
diff --git a/mesonbuild/compilers/mixins/intel.py b/mesonbuild/compilers/mixins/intel.py
index 902cc748145f..32cbdf010539 100644
--- a/mesonbuild/compilers/mixins/intel.py
+++ b/mesonbuild/compilers/mixins/intel.py
@@ -18,6 +18,7 @@
from ..compilers import CompileCheckMode
from .gnu import GnuLikeCompiler
from .visualstudio import VisualStudioLikeCompiler
+from ...options import OptionKey
if T.TYPE_CHECKING:
from ...environment import Environment
@@ -66,7 +67,7 @@ def __init__(self) -> None:
# It does have IPO, which serves much the same purpose as LOT, but
# there is an unfortunate rule for using IPO (you can't control the
# name of the output file) which break assumptions meson makes
- self.base_options = {mesonlib.OptionKey(o) for o in [
+ self.base_options = {OptionKey(o) for o in [
'b_pch', 'b_lundef', 'b_asneeded', 'b_pgo', 'b_coverage',
'b_ndebug', 'b_staticpic', 'b_pie']}
self.lang_header = 'none'
diff --git a/mesonbuild/compilers/mixins/metrowerks.py b/mesonbuild/compilers/mixins/metrowerks.py
index 4be27c5dac32..60982212e241 100644
--- a/mesonbuild/compilers/mixins/metrowerks.py
+++ b/mesonbuild/compilers/mixins/metrowerks.py
@@ -8,7 +8,8 @@
import os
import typing as T
-from ...mesonlib import EnvironmentException, OptionKey
+from ...mesonlib import EnvironmentException
+from ...options import OptionKey
if T.TYPE_CHECKING:
from ...envconfig import MachineInfo
@@ -178,13 +179,12 @@ def __init__(self) -> None:
self.base_options = {
OptionKey(o) for o in ['b_pch', 'b_ndebug']}
- default_warn_args: T.List[str] = []
self.warn_args: T.Dict[str, T.List[str]] = {
- '0': ['-w', 'off'],
- '1': default_warn_args,
- '2': default_warn_args + ['-w', 'most'],
- '3': default_warn_args + ['-w', 'all'],
- 'everything': default_warn_args + ['-w', 'full']}
+ '0': ['-warnings', 'off'],
+ '1': [],
+ '2': ['-warnings', 'on,nocmdline'],
+ '3': ['-warnings', 'on,all'],
+ 'everything': ['-warnings', 'on,full']}
def depfile_for_object(self, objfile: str) -> T.Optional[str]:
# Earlier versions of these compilers do not support specifying
@@ -274,6 +274,6 @@ def _unix_args_to_native(cls, args: T.List[str], info: MachineInfo) -> T.List[st
def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
for idx, i in enumerate(parameter_list):
if i[:2] == '-I':
- parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
return parameter_list
diff --git a/mesonbuild/compilers/mixins/pgi.py b/mesonbuild/compilers/mixins/pgi.py
index 71ad81f38a01..50335c895cc5 100644
--- a/mesonbuild/compilers/mixins/pgi.py
+++ b/mesonbuild/compilers/mixins/pgi.py
@@ -10,7 +10,7 @@
from pathlib import Path
from ..compilers import clike_debug_args, clike_optimization_args
-from ...mesonlib import OptionKey
+from ...options import OptionKey
if T.TYPE_CHECKING:
from ...environment import Environment
diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py
index bdf293bd010f..b4677f4172ba 100644
--- a/mesonbuild/compilers/mixins/visualstudio.py
+++ b/mesonbuild/compilers/mixins/visualstudio.py
@@ -15,6 +15,8 @@
from ... import mesonlib
from ... import mlog
from mesonbuild.compilers.compilers import CompileCheckMode
+from ...options import OptionKey
+from mesonbuild.linkers.linkers import ClangClDynamicLinker
if T.TYPE_CHECKING:
from ...environment import Environment
@@ -110,7 +112,7 @@ class VisualStudioLikeCompiler(Compiler, metaclass=abc.ABCMeta):
INVOKES_LINKER = False
def __init__(self, target: str):
- self.base_options = {mesonlib.OptionKey(o) for o in ['b_pch', 'b_ndebug', 'b_vscrt']} # FIXME add lto, pgo and the like
+ self.base_options = {OptionKey(o) for o in ['b_pch', 'b_ndebug', 'b_vscrt']} # FIXME add lto, pgo and the like
self.target = target
self.is_64 = ('x64' in target) or ('x86_64' in target)
# do some canonicalization of target machine
@@ -125,7 +127,7 @@ def __init__(self, target: str):
else:
self.machine = target
if mesonlib.version_compare(self.version, '>=19.28.29910'): # VS 16.9.0 includes cl 19.28.29910
- self.base_options.add(mesonlib.OptionKey('b_sanitize'))
+ self.base_options.add(OptionKey('b_sanitize'))
assert self.linker is not None
self.linker.machine = self.machine
@@ -361,7 +363,8 @@ def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, boo
# false without compiling anything
return name in {'dllimport', 'dllexport'}, False
- def get_argument_syntax(self) -> str:
+ @staticmethod
+ def get_argument_syntax() -> str:
return 'msvc'
def symbols_have_underscore_prefix(self, env: 'Environment') -> bool:
@@ -441,6 +444,10 @@ class ClangClCompiler(VisualStudioLikeCompiler):
def __init__(self, target: str):
super().__init__(target)
+ self.base_options.update(
+ {OptionKey('b_lto_threads'), OptionKey('b_lto'), OptionKey('b_lto_mode'), OptionKey('b_thinlto_cache'),
+ OptionKey('b_thinlto_cache_dir')})
+
# Assembly
self.can_compile_suffixes.add('s')
self.can_compile_suffixes.add('sx')
@@ -492,3 +499,27 @@ def openmp_link_flags(self, env: Environment) -> T.List[str]:
if libs is None:
raise mesonlib.MesonBugException('Could not find libomp')
return super().openmp_link_flags(env) + libs
+
+ def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+ args: T.List[str] = []
+ if mode == 'thin':
+ # LTO data generated by clang-cl is only usable by lld-link
+ if not isinstance(self.linker, ClangClDynamicLinker):
+ raise mesonlib.MesonException(f"LLVM's ThinLTO only works with lld-link, not {self.linker.id}")
+ args.append(f'-flto={mode}')
+ else:
+ assert mode == 'default', 'someone forgot to wire something up'
+ args.extend(super().get_lto_compile_args(threads=threads))
+ return args
+
+ def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default',
+ thinlto_cache_dir: T.Optional[str] = None) -> T.List[str]:
+ args = []
+ if mode == 'thin' and thinlto_cache_dir is not None:
+ args.extend(self.linker.get_thinlto_cache_args(thinlto_cache_dir))
+ # lld-link /threads:N has the same behaviour as -flto-jobs=N in lld
+ if threads > 0:
+ # clang-cl was released after clang already had LTO support, so it
+ # is safe to assume that all versions of clang-cl support LTO
+ args.append(f'/threads:{threads}')
+ return args
diff --git a/mesonbuild/compilers/objc.py b/mesonbuild/compilers/objc.py
index c63f288e314a..97550c2ea251 100644
--- a/mesonbuild/compilers/objc.py
+++ b/mesonbuild/compilers/objc.py
@@ -5,9 +5,8 @@
import typing as T
-from .. import coredata
from .. import options
-from ..mesonlib import OptionKey
+from ..options import OptionKey
from .compilers import Compiler
from .mixins.clike import CLikeCompiler
@@ -15,6 +14,7 @@
from .mixins.clang import ClangCompiler
if T.TYPE_CHECKING:
+ from .. import coredata
from ..envconfig import MachineInfo
from ..environment import Environment
from ..linkers.linkers import DynamicLinker
@@ -82,7 +82,7 @@ def get_options(self) -> 'coredata.MutableKeyedOptionDictType':
return self.update_options(
super().get_options(),
self.create_option(options.UserComboOption,
- OptionKey('std', machine=self.for_machine, lang='c'),
+ OptionKey('c_std', machine=self.for_machine),
'C language standard to use',
['none', 'c89', 'c99', 'c11', 'c17', 'gnu89', 'gnu99', 'gnu11', 'gnu17'],
'none'),
@@ -90,7 +90,7 @@ def get_options(self) -> 'coredata.MutableKeyedOptionDictType':
def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]:
args = []
- std = options.get_value(OptionKey('std', machine=self.for_machine, lang='c'))
+ std = options.get_value(OptionKey('c_std', machine=self.for_machine))
if std != 'none':
args.append('-std=' + std)
return args
diff --git a/mesonbuild/compilers/objcpp.py b/mesonbuild/compilers/objcpp.py
index e24406c32cc5..973d7bb0cfb8 100644
--- a/mesonbuild/compilers/objcpp.py
+++ b/mesonbuild/compilers/objcpp.py
@@ -5,9 +5,8 @@
import typing as T
-from .. import coredata
from .. import options
-from ..mesonlib import OptionKey
+from ..options import OptionKey
from .mixins.clike import CLikeCompiler
from .compilers import Compiler
@@ -15,6 +14,7 @@
from .mixins.clang import ClangCompiler
if T.TYPE_CHECKING:
+ from .. import coredata
from ..envconfig import MachineInfo
from ..environment import Environment
from ..linkers.linkers import DynamicLinker
@@ -82,7 +82,7 @@ def get_options(self) -> coredata.MutableKeyedOptionDictType:
return self.update_options(
super().get_options(),
self.create_option(options.UserComboOption,
- OptionKey('std', machine=self.for_machine, lang='cpp'),
+ OptionKey('cpp_std', machine=self.for_machine),
'C++ language standard to use',
['none', 'c++98', 'c++11', 'c++14', 'c++17', 'c++20', 'c++2b',
'gnu++98', 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++20',
@@ -92,7 +92,7 @@ def get_options(self) -> coredata.MutableKeyedOptionDictType:
def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]:
args = []
- std = options.get_value(OptionKey('std', machine=self.for_machine, lang='cpp'))
+ std = options.get_value(OptionKey('cpp_std', machine=self.for_machine))
if std != 'none':
args.append('-std=' + std)
return args
diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py
index 0ac07a8be763..02ac593842ad 100644
--- a/mesonbuild/compilers/rust.py
+++ b/mesonbuild/compilers/rust.py
@@ -10,7 +10,8 @@
import typing as T
from .. import options
-from ..mesonlib import EnvironmentException, MesonException, Popen_safe_logged, OptionKey
+from ..mesonlib import EnvironmentException, MesonException, Popen_safe_logged
+from ..options import OptionKey
from .compilers import Compiler, clike_debug_args
if T.TYPE_CHECKING:
@@ -75,13 +76,33 @@ def needs_static_linker(self) -> bool:
def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
source_name = os.path.join(work_dir, 'sanity.rs')
output_name = os.path.join(work_dir, 'rusttest')
- with open(source_name, 'w', encoding='utf-8') as ofile:
- ofile.write(textwrap.dedent(
- '''fn main() {
- }
- '''))
+ cmdlist = self.exelist.copy()
- cmdlist = self.exelist + ['-o', output_name, source_name]
+ with open(source_name, 'w', encoding='utf-8') as ofile:
+ # If machine kernel is not `none`, try to compile a dummy program.
+ # If 'none', this is likely a `no-std`(i.e. bare metal) project.
+ if self.info.kernel != 'none':
+ ofile.write(textwrap.dedent(
+ '''fn main() {
+ }
+ '''))
+ else:
+ # If rustc linker is gcc, add `-nostartfiles`
+ if 'ld.' in self.linker.id:
+ cmdlist.extend(['-C', 'link-arg=-nostartfiles'])
+ ofile.write(textwrap.dedent(
+ '''#![no_std]
+ #![no_main]
+ #[no_mangle]
+ pub fn _start() {
+ }
+ #[panic_handler]
+ fn panic(_info: &core::panic::PanicInfo) -> ! {
+ loop {}
+ }
+ '''))
+
+ cmdlist.extend(['-o', output_name, source_name])
pc, stdo, stde = Popen_safe_logged(cmdlist, cwd=work_dir)
if pc.returncode != 0:
raise EnvironmentException(f'Rust compiler {self.name_string()} cannot compile programs.')
@@ -106,6 +127,10 @@ def _native_static_libs(self, work_dir: str, source_name: str) -> None:
raise EnvironmentException('Rust compiler cannot compile staticlib.')
match = re.search('native-static-libs: (.*)$', stde, re.MULTILINE)
if not match:
+ if self.info.kernel == 'none':
+ # no match and kernel == none (i.e. baremetal) is a valid use case.
+ # return and let native_static_libs list empty
+ return
raise EnvironmentException('Failed to find native-static-libs in Rust compiler output.')
# Exclude some well known libraries that we don't need because they
# are always part of C/C++ linkers. Rustc probably should not print
@@ -117,11 +142,18 @@ def _native_static_libs(self, work_dir: str, source_name: str) -> None:
def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
return ['--dep-info', outfile]
+ @functools.lru_cache(maxsize=None)
def get_sysroot(self) -> str:
cmd = self.get_exelist(ccache=False) + ['--print', 'sysroot']
p, stdo, stde = Popen_safe_logged(cmd)
return stdo.split('\n', maxsplit=1)[0]
+ @functools.lru_cache(maxsize=None)
+ def get_target_libdir(self) -> str:
+ cmd = self.get_exelist(ccache=False) + ['--print', 'target-libdir']
+ p, stdo, stde = Popen_safe_logged(cmd)
+ return stdo.split('\n', maxsplit=1)[0]
+
@functools.lru_cache(maxsize=None)
def get_crt_static(self) -> bool:
cmd = self.get_exelist(ccache=False) + ['--print', 'cfg']
@@ -159,7 +191,7 @@ def use_linker_args(cls, linker: str, version: str) -> T.List[str]:
def get_options(self) -> MutableKeyedOptionDictType:
return dict((self.create_option(options.UserComboOption,
- self.form_langopt_key('std'),
+ self.form_compileropt_key('std'),
'Rust edition to use',
['none', '2015', '2018', '2021'],
'none'),))
@@ -172,7 +204,7 @@ def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
args = []
- key = self.form_langopt_key('std')
+ key = self.form_compileropt_key('std')
std = options.get_value(key)
if std != 'none':
args.append('--edition=' + std)
diff --git a/mesonbuild/compilers/vala.py b/mesonbuild/compilers/vala.py
index 2e35db109560..a1d57b38cb8e 100644
--- a/mesonbuild/compilers/vala.py
+++ b/mesonbuild/compilers/vala.py
@@ -8,11 +8,12 @@
from .. import mlog
from .. import mesonlib
-from ..mesonlib import EnvironmentException, version_compare, LibType, OptionKey
+from ..mesonlib import EnvironmentException, version_compare, LibType
+from ..options import OptionKey
from .compilers import CompileCheckMode, Compiler
-from ..arglist import CompilerArgs
if T.TYPE_CHECKING:
+ from ..arglist import CompilerArgs
from ..coredata import KeyedOptionDictType
from ..envconfig import MachineInfo
from ..environment import Environment
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 8c27c3ae1fc9..84c352199bfd 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -18,19 +18,21 @@
MesonBugException,
MesonException, MachineChoice, PerMachine,
PerMachineDefaultable,
- OptionKey, OptionType, stringlistify,
+ stringlistify,
pickle_load
)
+from .options import OptionKey
+
from .machinefile import CmdLineFileParser
import ast
-import argparse
import enum
import shlex
import typing as T
if T.TYPE_CHECKING:
+ import argparse
from typing_extensions import Protocol
from typing import Any
@@ -72,7 +74,7 @@ class SharedCMDOptions(Protocol):
#
# Pip requires that RCs are named like this: '0.1.0.rc1'
# But the corresponding Git tag needs to be '0.1.0rc1'
-version = '1.5.2'
+version = '1.6.1'
# The next stable version when we are in dev. This is used to allow projects to
# require meson version >=1.2.0 when using 1.1.99. FeatureNew won't warn when
@@ -420,7 +422,11 @@ def add_builtin_option(opts_map: 'MutableKeyedOptionDictType', key: OptionKey,
value = opts_map.get_value(key.as_root())
else:
value = None
- opts_map.add_system_option(key, opt.init_option(key, value, options.default_prefix()))
+ if key.has_module_prefix():
+ modulename = key.get_module_prefix()
+ opts_map.add_module_option(modulename, key, opt.init_option(key, value, options.default_prefix()))
+ else:
+ opts_map.add_system_option(key, opt.init_option(key, value, options.default_prefix()))
def init_backend_options(self, backend_name: str) -> None:
if backend_name == 'ninja':
@@ -453,7 +459,7 @@ def get_option(self, key: OptionKey) -> T.Union[T.List[str], str, int, bool]:
def set_option(self, key: OptionKey, value, first_invocation: bool = False) -> bool:
dirty = False
- if key.is_builtin():
+ if self.optstore.is_builtin_option(key):
if key.name == 'prefix':
value = self.sanitize_prefix(value)
else:
@@ -566,26 +572,23 @@ def _set_others_from_buildtype(self, value: str) -> bool:
return dirty
- @staticmethod
- def is_per_machine_option(optname: OptionKey) -> bool:
+ def is_per_machine_option(self, optname: OptionKey) -> bool:
if optname.as_host() in options.BUILTIN_OPTIONS_PER_MACHINE:
return True
- return optname.lang is not None
+ return self.optstore.is_compiler_option(optname)
def get_external_args(self, for_machine: MachineChoice, lang: str) -> T.List[str]:
# mypy cannot analyze type of OptionKey
- key = OptionKey('args', machine=for_machine, lang=lang)
+ key = OptionKey(f'{lang}_args', machine=for_machine)
return T.cast('T.List[str]', self.optstore.get_value(key))
def get_external_link_args(self, for_machine: MachineChoice, lang: str) -> T.List[str]:
# mypy cannot analyze type of OptionKey
- key = OptionKey('link_args', machine=for_machine, lang=lang)
+ key = OptionKey(f'{lang}_link_args', machine=for_machine)
return T.cast('T.List[str]', self.optstore.get_value(key))
def update_project_options(self, project_options: 'MutableKeyedOptionDictType', subproject: SubProject) -> None:
for key, value in project_options.items():
- if not key.is_project():
- continue
if key not in self.optstore:
self.optstore.add_project_option(key, value)
continue
@@ -608,7 +611,7 @@ def update_project_options(self, project_options: 'MutableKeyedOptionDictType',
# Find any extranious keys for this project and remove them
for key in self.optstore.keys() - project_options.keys():
- if key.is_project() and key.subproject == subproject:
+ if self.optstore.is_project_option(key) and key.subproject == subproject:
self.optstore.remove(key)
def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
@@ -652,12 +655,23 @@ def set_options(self, opts_to_set: T.Dict[OptionKey, T.Any], subproject: str = '
continue
elif k in self.optstore:
dirty |= self.set_option(k, v, first_invocation)
- elif k.machine != MachineChoice.BUILD and k.type != OptionType.COMPILER:
+ elif k.machine != MachineChoice.BUILD and not self.optstore.is_compiler_option(k):
unknown_options.append(k)
if unknown_options:
- unknown_options_str = ', '.join(sorted(str(s) for s in unknown_options))
- sub = f'In subproject {subproject}: ' if subproject else ''
- raise MesonException(f'{sub}Unknown options: "{unknown_options_str}"')
+ if subproject:
+ # The subproject may have top-level options that should be used
+ # when it is not a subproject. Ignore those for now. With option
+ # refactor they will get per-subproject values.
+ really_unknown = []
+ for uo in unknown_options:
+ topkey = uo.evolve(subproject='')
+ if topkey not in self.optstore:
+ really_unknown.append(uo)
+ unknown_options = really_unknown
+ if unknown_options:
+ unknown_options_str = ', '.join(sorted(str(s) for s in unknown_options))
+ sub = f'In subproject {subproject}: ' if subproject else ''
+ raise MesonException(f'{sub}Unknown options: "{unknown_options_str}"')
if not self.is_cross_build():
dirty |= self.copy_build_options_from_regular_ones()
@@ -694,13 +708,13 @@ def set_default_options(self, default_options: T.MutableMapping[OptionKey, str],
# Always test this using the HOST machine, as many builtin options
# are not valid for the BUILD machine, but the yielding value does
# not differ between them even when they are valid for both.
- if subproject and k.is_builtin() and self.optstore.get_value_object(k.evolve(subproject='', machine=MachineChoice.HOST)).yielding:
+ if subproject and self.optstore.is_builtin_option(k) and self.optstore.get_value_object(k.evolve(subproject='', machine=MachineChoice.HOST)).yielding:
continue
# Skip base, compiler, and backend options, they are handled when
# adding languages and setting backend.
- if k.type in {OptionType.COMPILER, OptionType.BACKEND}:
+ if self.optstore.is_compiler_option(k) or self.optstore.is_backend_option(k):
continue
- if k.type == OptionType.BASE and k.as_root() in base_options:
+ if self.optstore.is_base_option(k) and k.as_root() in base_options:
# set_options will report unknown base options
continue
options[k] = v
@@ -732,7 +746,8 @@ def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
# These options are all new at this point, because the compiler is
# responsible for adding its own options, thus calling
# `self.optstore.update()`` is perfectly safe.
- self.optstore.update(compilers.get_global_options(lang, comp, for_machine, env))
+ for gopt_key, gopt_valobj in compilers.get_global_options(lang, comp, for_machine, env).items():
+ self.optstore.add_compiler_option(lang, gopt_key, gopt_valobj)
def process_compiler_options(self, lang: str, comp: Compiler, env: Environment, subproject: str) -> None:
from . import compilers
@@ -906,7 +921,17 @@ def __getitem__(self, key: OptionKey) -> options.UserOption:
# FIXME: This is fundamentally the same algorithm than interpreter.get_option_internal().
# We should try to share the code somehow.
key = key.evolve(subproject=self.subproject)
- if not key.is_project():
+ if not isinstance(self.original_options, options.OptionStore):
+ # This is only used by CUDA currently.
+ # This entire class gets removed when option refactor
+ # is finished.
+ if '_' in key.name or key.lang is not None:
+ is_project_option = False
+ else:
+ sys.exit(f'FAIL {key}.')
+ else:
+ is_project_option = self.original_options.is_project_option(key)
+ if not is_project_option:
opt = self.original_options.get(key)
if opt is None or opt.yielding:
key2 = key.as_root()
@@ -914,7 +939,7 @@ def __getitem__(self, key: OptionKey) -> options.UserOption:
# to hold overrides.
if isinstance(self.original_options, options.OptionStore):
if key2 not in self.original_options:
- raise KeyError
+ raise KeyError(f'{key} {key2}')
opt = self.original_options.get_value_object(key2)
else:
opt = self.original_options[key2]
diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py
index ec90e373067c..4552987898c0 100644
--- a/mesonbuild/dependencies/__init__.py
+++ b/mesonbuild/dependencies/__init__.py
@@ -197,6 +197,7 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
'zlib': 'dev',
'jni': 'dev',
'jdk': 'dev',
+ 'diasdk': 'dev',
'boost': 'boost',
'cuda': 'cuda',
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index 9b218c6432b8..ed6138a7ee0b 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2013-2018 The Meson development team
+# Copyright © 2024 Intel Corporation
# This file contains the detection logic for external dependencies.
# Custom logic for several other packages are in separate files.
@@ -10,12 +11,13 @@
import collections
import itertools
import typing as T
+import uuid
from enum import Enum
from .. import mlog, mesonlib
from ..compilers import clib_langs
-from ..mesonlib import LibType, MachineChoice, MesonException, HoldableObject, OptionKey
-from ..mesonlib import version_compare_many
+from ..mesonlib import LibType, MachineChoice, MesonException, HoldableObject, version_compare_many
+from ..options import OptionKey
#from ..interpreterbase import FeatureDeprecated, FeatureNew
if T.TYPE_CHECKING:
@@ -106,6 +108,9 @@ def _process_include_type_kw(cls, kwargs: T.Dict[str, T.Any]) -> str:
return kwargs['include_type']
def __init__(self, type_name: DependencyTypeName, kwargs: T.Dict[str, T.Any]) -> None:
+ # This allows two Dependencies to be compared even after being copied.
+ # The purpose is to allow the name to be changed, but still have a proper comparison
+ self._id = uuid.uuid4().int
self.name = f'dep{id(self)}'
self.version: T.Optional[str] = None
self.language: T.Optional[str] = None # None means C-like
@@ -124,6 +129,14 @@ def __init__(self, type_name: DependencyTypeName, kwargs: T.Dict[str, T.Any]) ->
self.featurechecks: T.List['FeatureCheckBase'] = []
self.feature_since: T.Optional[T.Tuple[str, str]] = None
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Dependency):
+ return NotImplemented
+ return self._id == other._id
+
+ def __hash__(self) -> int:
+ return self._id
+
def __repr__(self) -> str:
return f'<{self.__class__.__name__} {self.name}: {self.is_found}>'
@@ -157,7 +170,7 @@ def get_compile_args(self) -> T.List[str]:
return self.compile_args
def get_all_compile_args(self) -> T.List[str]:
- """Get the compile arguments from this dependency and it's sub dependencies."""
+ """Get the compile arguments from this dependency and its sub dependencies."""
return list(itertools.chain(self.get_compile_args(),
*(d.get_all_compile_args() for d in self.ext_deps)))
@@ -167,7 +180,7 @@ def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) ->
return self.link_args
def get_all_link_args(self) -> T.List[str]:
- """Get the link arguments from this dependency and it's sub dependencies."""
+ """Get the link arguments from this dependency and its sub dependencies."""
return list(itertools.chain(self.get_link_args(),
*(d.get_all_link_args() for d in self.ext_deps)))
@@ -213,7 +226,7 @@ def get_partial_dependency(self, *, compile_args: bool = False,
compile_args -- any compile args
link_args -- any link args
- Additionally the new dependency will have the version parameter of it's
+ Additionally the new dependency will have the version parameter of its
parent (if any) and the requested values of any dependencies will be
added as well.
"""
@@ -237,7 +250,7 @@ def _add_sub_dependency(self, deplist: T.Iterable[T.Callable[[], 'Dependency']])
def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
- default_value: T.Optional[str] = None,
+ system: T.Optional[str] = None, default_value: T.Optional[str] = None,
pkgconfig_define: PkgConfigDefineType = None) -> str:
if default_value is not None:
return default_value
@@ -248,6 +261,14 @@ def generate_system_dependency(self, include_type: str) -> 'Dependency':
new_dep.include_type = self._process_include_type_kw({'include_type': include_type})
return new_dep
+ def get_as_static(self, recursive: bool) -> Dependency:
+ """Used as base case for internal_dependency"""
+ return self
+
+ def get_as_shared(self, recursive: bool) -> Dependency:
+ """Used as base case for internal_dependency"""
+ return self
+
class InternalDependency(Dependency):
def __init__(self, version: str, incdirs: T.List['IncludeDirs'], compile_args: T.List[str],
link_args: T.List[str],
@@ -321,7 +342,7 @@ def get_include_dirs(self) -> T.List['IncludeDirs']:
def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
- default_value: T.Optional[str] = None,
+ system: T.Optional[str] = None, default_value: T.Optional[str] = None,
pkgconfig_define: PkgConfigDefineType = None) -> str:
val = self.variables.get(internal, default_value)
if val is not None:
@@ -345,6 +366,20 @@ def generate_link_whole_dependency(self) -> Dependency:
new_dep.libraries = []
return new_dep
+ def get_as_static(self, recursive: bool) -> InternalDependency:
+ new_dep = copy.copy(self)
+ new_dep.libraries = [lib.get('static') for lib in self.libraries]
+ if recursive:
+ new_dep.ext_deps = [dep.get_as_static(True) for dep in self.ext_deps]
+ return new_dep
+
+ def get_as_shared(self, recursive: bool) -> InternalDependency:
+ new_dep = copy.copy(self)
+ new_dep.libraries = [lib.get('shared') for lib in self.libraries]
+ if recursive:
+ new_dep.ext_deps = [dep.get_as_shared(True) for dep in self.ext_deps]
+ return new_dep
+
class HasNativeKwarg:
def __init__(self, kwargs: T.Dict[str, T.Any]):
self.for_machine = self.get_for_machine_from_kwargs(kwargs)
@@ -380,6 +415,7 @@ def get_partial_dependency(self, *, compile_args: bool = False,
link_args: bool = False, links: bool = False,
includes: bool = False, sources: bool = False) -> Dependency:
new = copy.copy(self)
+ new._id = uuid.uuid4().int
if not compile_args:
new.compile_args = []
if not link_args:
@@ -450,7 +486,9 @@ def __init__(self, name: str, environment: 'Environment') -> None:
def get_partial_dependency(self, *, compile_args: bool = False,
link_args: bool = False, links: bool = False,
includes: bool = False, sources: bool = False) -> 'NotFoundDependency':
- return copy.copy(self)
+ new = copy.copy(self)
+ new._id = uuid.uuid4().int
+ return new
class ExternalLibrary(ExternalDependency):
@@ -490,6 +528,7 @@ def get_partial_dependency(self, *, compile_args: bool = False,
# External library only has link_args, so ignore the rest of the
# interface.
new = copy.copy(self)
+ new._id = uuid.uuid4().int
if not link_args:
new.link_args = []
return new
diff --git a/mesonbuild/dependencies/blas_lapack.py b/mesonbuild/dependencies/blas_lapack.py
index 3bd4ae8425bd..28e673223e18 100644
--- a/mesonbuild/dependencies/blas_lapack.py
+++ b/mesonbuild/dependencies/blas_lapack.py
@@ -23,7 +23,8 @@
from .. import mlog
from .. import mesonlib
-from ..mesonlib import MachineChoice, OptionKey
+from ..mesonlib import MachineChoice
+from ..options import OptionKey
from .base import DependencyMethods, SystemDependency
from .cmake import CMakeDependency
diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py
index 7a461637c4a9..870c0b16b2c3 100644
--- a/mesonbuild/dependencies/boost.py
+++ b/mesonbuild/dependencies/boost.py
@@ -11,6 +11,7 @@
from .. import mlog
from .. import mesonlib
+from ..options import OptionKey
from .base import DependencyException, SystemDependency
from .detect import packages
@@ -55,7 +56,7 @@
# Mac / homebrew: libboost_.dylib + libboost_-mt.dylib (location = /usr/local/lib)
# Mac / macports: libboost_.dylib + libboost_-mt.dylib (location = /opt/local/lib)
#
-# Its not clear that any other abi tags (e.g. -gd) are used in official packages.
+# It's not clear that any other abi tags (e.g. -gd) are used in official packages.
#
# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag.
#
@@ -260,7 +261,7 @@ def update_vers(new_vers: str) -> None:
update_vers(i[2:])
elif i.isdigit():
update_vers(i)
- elif len(i) >= 3 and i[0].isdigit and i[2].isdigit() and i[1] == '.':
+ elif len(i) >= 3 and i[0].isdigit() and i[2].isdigit() and i[1] == '.':
update_vers(i)
else:
other_tags += [i]
@@ -340,7 +341,7 @@ def get_link_args(self) -> T.List[str]:
class BoostDependency(SystemDependency):
def __init__(self, environment: Environment, kwargs: T.Dict[str, T.Any]) -> None:
super().__init__('boost', environment, kwargs, language='cpp')
- buildtype = environment.coredata.get_option(mesonlib.OptionKey('buildtype'))
+ buildtype = environment.coredata.get_option(OptionKey('buildtype'))
assert isinstance(buildtype, str)
self.debug = buildtype.startswith('debug')
self.multithreading = kwargs.get('threading', 'multi') == 'multi'
diff --git a/mesonbuild/dependencies/cmake.py b/mesonbuild/dependencies/cmake.py
index 66d331925574..4a722157ff56 100644
--- a/mesonbuild/dependencies/cmake.py
+++ b/mesonbuild/dependencies/cmake.py
@@ -525,7 +525,7 @@ def _detect_dep(self, name: str, package_version: str, modules: T.List[T.Tuple[s
for i, required in modules:
if i not in self.traceparser.targets:
if not required:
- mlog.warning('CMake: T.Optional module', mlog.bold(self._original_module_name(i)), 'for', mlog.bold(name), 'was not found')
+ mlog.warning('CMake: Optional module', mlog.bold(self._original_module_name(i)), 'for', mlog.bold(name), 'was not found')
continue
raise self._gen_exception('CMake: invalid module {} for {}.\n'
'Try to explicitly specify one or more targets with the "modules" property.\n'
@@ -617,7 +617,7 @@ def log_details(self) -> str:
def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
- default_value: T.Optional[str] = None,
+ system: T.Optional[str] = None, default_value: T.Optional[str] = None,
pkgconfig_define: PkgConfigDefineType = None) -> str:
if cmake and self.traceparser is not None:
try:
diff --git a/mesonbuild/dependencies/configtool.py b/mesonbuild/dependencies/configtool.py
index 679c69f5d9be..476f7ad420aa 100644
--- a/mesonbuild/dependencies/configtool.py
+++ b/mesonbuild/dependencies/configtool.py
@@ -22,7 +22,7 @@ class ConfigToolDependency(ExternalDependency):
Takes the following extra keys in kwargs that it uses internally:
:tools List[str]: A list of tool names to use
- :version_arg str: The argument to pass to the tool to get it's version
+ :version_arg str: The argument to pass to the tool to get its version
:skip_version str: The argument to pass to the tool to ignore its version
(if ``version_arg`` fails, but it may start accepting it in the future)
Because some tools are stupid and don't accept --version
@@ -37,7 +37,7 @@ class ConfigToolDependency(ExternalDependency):
allow_default_for_cross = False
__strip_version = re.compile(r'^[0-9][0-9.]+')
- def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None, exclude_paths: T.Optional[T.List[str]] = None):
super().__init__(DependencyTypeName('config-tool'), environment, kwargs, language=language)
self.name = name
# You may want to overwrite the class version in some cases
@@ -52,7 +52,7 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
req_version = mesonlib.stringlistify(req_version_raw)
else:
req_version = []
- tool, version = self.find_config(req_version, kwargs.get('returncode_value', 0))
+ tool, version = self.find_config(req_version, kwargs.get('returncode_value', 0), exclude_paths=exclude_paths)
self.config = tool
self.is_found = self.report_config(version, req_version)
if not self.is_found:
@@ -69,49 +69,59 @@ def _sanitize_version(self, version: str) -> str:
return m.group(0).rstrip('.')
return version
- def find_config(self, versions: T.List[str], returncode: int = 0) \
+ def _check_and_get_version(self, tool: T.List[str], returncode: int) -> T.Tuple[bool, T.Union[str, None]]:
+ """Check whether a command is valid and get its version"""
+ p, out = Popen_safe(tool + [self.version_arg])[:2]
+ valid = True
+ if p.returncode != returncode:
+ if self.skip_version:
+ # maybe the executable is valid even if it doesn't support --version
+ p = Popen_safe(tool + [self.skip_version])[0]
+ if p.returncode != returncode:
+ valid = False
+ else:
+ valid = False
+ version = self._sanitize_version(out.strip())
+ return valid, version
+
+ def find_config(self, versions: T.List[str], returncode: int = 0, exclude_paths: T.Optional[T.List[str]] = None) \
-> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:
"""Helper method that searches for config tool binaries in PATH and
returns the one that best matches the given version requirements.
"""
+ exclude_paths = [] if exclude_paths is None else exclude_paths
best_match: T.Tuple[T.Optional[T.List[str]], T.Optional[str]] = (None, None)
for potential_bin in find_external_program(
self.env, self.for_machine, self.tool_name,
- self.tool_name, self.tools, allow_default_for_cross=self.allow_default_for_cross):
+ self.tool_name, self.tools, exclude_paths=exclude_paths,
+ allow_default_for_cross=self.allow_default_for_cross):
if not potential_bin.found():
continue
tool = potential_bin.get_command()
try:
- p, out = Popen_safe(tool + [self.version_arg])[:2]
+ valid, version = self._check_and_get_version(tool, returncode)
except (FileNotFoundError, PermissionError):
continue
- if p.returncode != returncode:
- if self.skip_version:
- # maybe the executable is valid even if it doesn't support --version
- p = Popen_safe(tool + [self.skip_version])[0]
- if p.returncode != returncode:
- continue
- else:
- continue
-
- out = self._sanitize_version(out.strip())
+ if not valid:
+ continue
+
# Some tools, like pcap-config don't supply a version, but also
# don't fail with --version, in that case just assume that there is
# only one version and return it.
- if not out:
+ if not version:
return (tool, None)
if versions:
- is_found = version_compare_many(out, versions)[0]
+ is_found = version_compare_many(version, versions)[0]
# This allows returning a found version without a config tool,
# which is useful to inform the user that you found version x,
# but y was required.
if not is_found:
tool = None
if best_match[1]:
- if version_compare(out, '> {}'.format(best_match[1])):
- best_match = (tool, out)
+ if version_compare(version, '> {}'.format(best_match[1])):
+ best_match = (tool, version)
else:
- best_match = (tool, out)
+ best_match = (tool, version)
return best_match
@@ -150,7 +160,7 @@ def log_tried() -> str:
def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
- default_value: T.Optional[str] = None,
+ system: T.Optional[str] = None, default_value: T.Optional[str] = None,
pkgconfig_define: PkgConfigDefineType = None) -> str:
if configtool:
p, out, _ = Popen_safe(self.config + self.get_variable_args(configtool))
diff --git a/mesonbuild/dependencies/data/CMakePathInfo.txt b/mesonbuild/dependencies/data/CMakePathInfo.txt
index 662ec58363e2..c3d8c59a145b 100644
--- a/mesonbuild/dependencies/data/CMakePathInfo.txt
+++ b/mesonbuild/dependencies/data/CMakePathInfo.txt
@@ -13,7 +13,7 @@ list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
set(LIB_ARCH_LIST)
if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
- file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+ file(GLOB implicit_dirs RELATIVE /lib /lib/*-gnu* )
foreach(dir ${implicit_dirs})
if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
list(APPEND LIB_ARCH_LIST "${dir}")
diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py
index de85516feb64..94f51ff69b12 100644
--- a/mesonbuild/dependencies/dev.py
+++ b/mesonbuild/dependencies/dev.py
@@ -28,8 +28,10 @@
if T.TYPE_CHECKING:
from ..envconfig import MachineInfo
from ..environment import Environment
+ from ..compilers import Compiler
from ..mesonlib import MachineChoice
from typing_extensions import TypedDict
+ from ..interpreter.type_checking import PkgConfigDefineType
class JNISystemDependencyKW(TypedDict):
modules: T.List[str]
@@ -700,6 +702,106 @@ def __init__(self, environment: 'Environment', kwargs: JNISystemDependencyKW):
packages['jdk'] = JDKSystemDependency
+class DiaSDKSystemDependency(SystemDependency):
+
+ def _try_path(self, diadir: str, cpu: str) -> bool:
+ if not os.path.isdir(diadir):
+ return False
+
+ include = os.path.join(diadir, 'include')
+ if not os.path.isdir(include):
+ mlog.error('DIA SDK is missing include directory:', include)
+ return False
+
+ lib = os.path.join(diadir, 'lib', cpu, 'diaguids.lib')
+ if not os.path.exists(lib):
+ mlog.error('DIA SDK is missing library:', lib)
+ return False
+
+ bindir = os.path.join(diadir, 'bin', cpu)
+ if not os.path.exists(bindir):
+ mlog.error(f'Directory {bindir} not found')
+ return False
+
+ found = glob.glob(os.path.join(bindir, 'msdia*.dll'))
+ if not found:
+ mlog.error("Can't find msdia*.dll in " + bindir)
+ return False
+ if len(found) > 1:
+ mlog.error('Multiple msdia*.dll files found in ' + bindir)
+ return False
+ self.dll = found[0]
+
+ # Parse only major version from DLL name (eg '8' from 'msdia80.dll', '14' from 'msdia140.dll', etc.).
+ # Minor version is not reflected in the DLL name, instead '0' is always used.
+ # Aside from major version in DLL name, the SDK version is not visible to user anywhere.
+ # The only place where the full version is stored, seems to be the Version field in msdia*.dll resources.
+ dllname = os.path.basename(self.dll)
+ versionstr = dllname[len('msdia'):-len('.dll')]
+ if versionstr[-1] == '0':
+ self.version = versionstr[:-1]
+ else:
+ mlog.error(f"Unexpected DIA SDK version string in '{dllname}'")
+ self.version = 'unknown'
+
+ self.compile_args.append('-I' + include)
+ self.link_args.append(lib)
+ self.is_found = True
+ return True
+
+ # Check if compiler has a built-in macro defined
+ @staticmethod
+ def _has_define(compiler: 'Compiler', dname: str, env: 'Environment') -> bool:
+ defval, _ = compiler.get_define(dname, '', env, [], [])
+ return defval is not None
+
+ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__('diasdk', environment, kwargs)
+ self.is_found = False
+
+ compilers = environment.coredata.compilers.host
+ if 'cpp' in compilers:
+ compiler = compilers['cpp']
+ elif 'c' in compilers:
+ compiler = compilers['c']
+ else:
+ raise DependencyException('DIA SDK is only supported in C and C++ projects')
+
+ is_msvc_clang = compiler.id == 'clang' and self._has_define(compiler, '_MSC_VER', environment)
+ if compiler.id not in {'msvc', 'clang-cl'} and not is_msvc_clang:
+ raise DependencyException('DIA SDK is only supported with Microsoft Visual Studio compilers')
+
+ cpu_translate = {'arm': 'arm', 'aarch64': 'arm64', 'x86': '.', 'x86_64': 'amd64'}
+ cpu_family = environment.machines.host.cpu_family
+ cpu = cpu_translate.get(cpu_family)
+ if cpu is None:
+ raise DependencyException(f'DIA SDK is not supported for "{cpu_family}" architecture')
+
+ vsdir = os.environ.get('VSInstallDir')
+ if vsdir is None:
+ raise DependencyException("Environment variable VSInstallDir required for DIA SDK is not set")
+
+ diadir = os.path.join(vsdir, 'DIA SDK')
+ if self._try_path(diadir, cpu):
+ mlog.debug('DIA SDK was found at default path: ', diadir)
+ self.is_found = True
+ return
+ mlog.debug('DIA SDK was not found at default path: ', diadir)
+
+ return
+
+ def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+ configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+ system: T.Optional[str] = None, default_value: T.Optional[str] = None,
+ pkgconfig_define: PkgConfigDefineType = None) -> str:
+ if system == 'dll' and self.is_found:
+ return self.dll
+ if default_value is not None:
+ return default_value
+ raise DependencyException(f'Could not get system variable and no default was set for {self!r}')
+
+packages['diasdk'] = DiaSDKSystemDependency
+
packages['llvm'] = llvm_factory = DependencyFactory(
'LLVM',
[DependencyMethods.CMAKE, DependencyMethods.CONFIG_TOOL],
diff --git a/mesonbuild/dependencies/dub.py b/mesonbuild/dependencies/dub.py
index e4f09d4d36f0..1c904ab2a5af 100644
--- a/mesonbuild/dependencies/dub.py
+++ b/mesonbuild/dependencies/dub.py
@@ -5,7 +5,8 @@
from .base import ExternalDependency, DependencyException, DependencyTypeName
from .pkgconfig import PkgConfigDependency
-from ..mesonlib import (Popen_safe, OptionKey, join_args, version_compare)
+from ..mesonlib import (Popen_safe, join_args, version_compare)
+from ..options import OptionKey
from ..programs import ExternalProgram
from .. import mlog
import re
@@ -14,13 +15,59 @@
import typing as T
if T.TYPE_CHECKING:
+ from typing_extensions import TypedDict
+
from ..environment import Environment
+ # Definition of what `dub describe` returns (only the fields used by Meson)
+ class DubDescription(TypedDict):
+ platform: T.List[str]
+ architecture: T.List[str]
+ buildType: str
+ packages: T.List[DubPackDesc]
+ targets: T.List[DubTargetDesc]
+
+ class DubPackDesc(TypedDict):
+ name: str
+ version: str
+ active: bool
+ configuration: str
+ path: str
+ targetType: str
+ targetFileName: str
+
+ class DubTargetDesc(TypedDict):
+ rootPackage: str
+ linkDependencies: T.List[str]
+ buildSettings: DubBuildSettings
+ cacheArtifactPath: str
+
+ class DubBuildSettings(TypedDict):
+ importPaths: T.List[str]
+ stringImportPaths: T.List[str]
+ versions: T.List[str]
+ mainSourceFile: str
+ sourceFiles: T.List[str]
+ dflags: T.List[str]
+ libs: T.List[str]
+ lflags: T.List[str]
+
+ class FindTargetEntry(TypedDict):
+ search: str
+ artifactPath: str
class DubDependency(ExternalDependency):
# dub program and version
class_dubbin: T.Optional[T.Tuple[ExternalProgram, str]] = None
class_dubbin_searched = False
+ class_cache_dir = ''
+
+ # Map Meson Compiler ID's to Dub Compiler ID's
+ _ID_MAP: T.Mapping[str, str] = {
+ 'dmd': 'dmd',
+ 'gcc': 'gdc',
+ 'llvm': 'ldc',
+ }
def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
super().__init__(DependencyTypeName('dub'), environment, kwargs, language='d')
@@ -47,11 +94,20 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
assert isinstance(self.dubbin, ExternalProgram)
- # Check if Dub version is compatible with Meson
- if version_compare(dubver, '>1.31.1'):
+ # Check if Dub's compatibility with Meson
+ self._search_in_cache = version_compare(dubver, '<=1.31.1')
+ self._use_cache_describe = version_compare(dubver, '>=1.35.0')
+ self._dub_has_build_deep = version_compare(dubver, '>=1.35.0')
+
+ if not self._search_in_cache and not self._use_cache_describe:
if self.required:
raise DependencyException(
- f"DUB version {dubver} is not compatible with Meson (can't locate artifacts in Dub cache)")
+ f'DUB version {dubver} is not compatible with Meson'
+ " (can't locate artifacts in DUB's cache). Upgrade to Dub >= 1.35.")
+ else:
+ mlog.warning(f'DUB dependency {name} not found because Dub {dubver} '
+ "is not compatible with Meson. (Can't locate artifacts in DUB's cache)."
+ ' Upgrade to Dub >= 1.35')
self.is_found = False
return
@@ -79,6 +135,20 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
elif dub_buildtype == 'minsize':
dub_buildtype = 'release'
+ # A command that might be useful in case of missing DUB package
+ def dub_build_deep_command() -> str:
+ if self._dub_has_build_deep:
+ cmd = ['dub', 'build', '--deep']
+ else:
+ cmd = ['dub', 'run', '--yes', 'dub-build-deep', '--']
+
+ return join_args(cmd + [
+ main_pack_spec,
+ '--arch=' + dub_arch,
+ '--compiler=' + self.compiler.get_exelist()[-1],
+ '--build=' + dub_buildtype
+ ])
+
# Ask dub for the package
describe_cmd = [
'describe', main_pack_spec, '--arch=' + dub_arch,
@@ -89,37 +159,31 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
if ret != 0:
mlog.debug('DUB describe failed: ' + err)
if 'locally' in err:
- fetch_cmd = ['dub', 'fetch', main_pack_spec]
mlog.error(mlog.bold(main_pack_spec), 'is not present locally. You may try the following command:')
- mlog.log(mlog.bold(join_args(fetch_cmd)))
+ mlog.log(mlog.bold(dub_build_deep_command()))
self.is_found = False
return
- # A command that might be useful in case of missing DUB package
- def dub_build_deep_command() -> str:
- cmd = [
- 'dub', 'run', 'dub-build-deep', '--yes', '--', main_pack_spec,
- '--arch=' + dub_arch, '--compiler=' + self.compiler.get_exelist()[-1],
- '--build=' + dub_buildtype
- ]
- return join_args(cmd)
-
- dub_comp_id = self.compiler.get_id().replace('llvm', 'ldc').replace('gcc', 'gdc')
- description = json.loads(res)
+ dub_comp_id = self._ID_MAP[self.compiler.get_id()]
+ description: DubDescription = json.loads(res)
self.compile_args = []
self.link_args = self.raw_link_args = []
show_buildtype_warning = False
- def find_package_target(pkg: T.Dict[str, str]) -> bool:
+ # collect all targets
+ targets = {t['rootPackage']: t for t in description['targets']}
+
+ def find_package_target(pkg: DubPackDesc) -> bool:
nonlocal show_buildtype_warning
# try to find a static library in a DUB folder corresponding to
# version, configuration, compiler, arch and build-type
# if can find, add to link_args.
# link_args order is meaningful, so this function MUST be called in the right order
pack_id = f'{pkg["name"]}@{pkg["version"]}'
- (tgt_file, compatibilities) = self._find_compatible_package_target(description, pkg, dub_comp_id)
+ tgt_desc = targets[pkg['name']]
+ (tgt_file, compatibilities) = self._find_target_in_cache(description, pkg, tgt_desc, dub_comp_id)
if tgt_file is None:
if not compatibilities:
mlog.error(mlog.bold(pack_id), 'not found')
@@ -132,7 +196,7 @@ def find_package_target(pkg: T.Dict[str, str]) -> bool:
mlog.error(mlog.bold(pack_id), 'found but not compiled for', mlog.bold(dub_arch))
elif 'platform' not in compatibilities:
mlog.error(mlog.bold(pack_id), 'found but not compiled for',
- mlog.bold(description['platform'].join('.')))
+ mlog.bold('.'.join(description['platform'])))
elif 'configuration' not in compatibilities:
mlog.error(mlog.bold(pack_id), 'found but not compiled for the',
mlog.bold(pkg['configuration']), 'configuration')
@@ -160,7 +224,7 @@ def find_package_target(pkg: T.Dict[str, str]) -> bool:
# 1
self.is_found = False
- packages = {}
+ packages: T.Dict[str, DubPackDesc] = {}
for pkg in description['packages']:
packages[pkg['name']] = pkg
@@ -184,11 +248,6 @@ def find_package_target(pkg: T.Dict[str, str]) -> bool:
self.version = pkg['version']
self.pkg = pkg
- # collect all targets
- targets = {}
- for tgt in description['targets']:
- targets[tgt['rootPackage']] = tgt
-
if name not in targets:
self.is_found = False
if self.pkg['targetType'] == 'sourceLibrary':
@@ -290,13 +349,22 @@ def find_package_target(pkg: T.Dict[str, str]) -> bool:
# compiler, architecture, configuration...
# It returns (target|None, {compatibilities})
# If None is returned for target, compatibilities will list what other targets were found without full compatibility
- def _find_compatible_package_target(self, jdesc: T.Dict[str, str], jpack: T.Dict[str, str], dub_comp_id: str) -> T.Tuple[str, T.Set[str]]:
- dub_build_path = os.path.join(jpack['path'], '.dub', 'build')
+ def _find_target_in_cache(self, desc: DubDescription, pkg_desc: DubPackDesc,
+ tgt_desc: DubTargetDesc, dub_comp_id: str
+ ) -> T.Tuple[T.Optional[str], T.Set[str]]:
+ mlog.debug('Searching in DUB cache for compatible', pkg_desc['targetFileName'])
+
+ # recent DUB versions include a direct path to a compatible cached artifact
+ if self._use_cache_describe:
+ tgt_file = tgt_desc['cacheArtifactPath']
+ if os.path.exists(tgt_file):
+ return (tgt_file, {'configuration', 'platform', 'arch', 'compiler', 'compiler_version', 'build_type'})
+ else:
+ return (None, set())
- if not os.path.exists(dub_build_path):
- return (None, None)
+ assert self._search_in_cache
- # try to find a dir like library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA
+ # try to find a string like library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA
# fields are:
# - configuration
@@ -306,40 +374,16 @@ def _find_compatible_package_target(self, jdesc: T.Dict[str, str], jpack: T.Dict
# - compiler id (dmd, ldc, gdc)
# - compiler version or frontend id or frontend version?
- conf = jpack['configuration']
- build_type = jdesc['buildType']
- platforms = jdesc['platform']
- archs = jdesc['architecture']
-
- # Get D frontend version implemented in the compiler, or the compiler version itself
- # gdc doesn't support this
- comp_versions = []
-
- if dub_comp_id != 'gdc':
- comp_versions.append(self.compiler.version)
-
- ret, res = self._call_compbin(['--version'])[0:2]
- if ret != 0:
- mlog.error('Failed to run {!r}', mlog.bold(dub_comp_id))
- return (None, None)
- d_ver_reg = re.search('v[0-9].[0-9][0-9][0-9].[0-9]', res) # Ex.: v2.081.2
-
- if d_ver_reg is not None:
- frontend_version = d_ver_reg.group()
- frontend_id = frontend_version.rsplit('.', 1)[0].replace(
- 'v', '').replace('.', '') # Fix structure. Ex.: 2081
- comp_versions.extend([frontend_version, frontend_id])
-
- compatibilities: T.Set[str] = set()
+ comp_versions = self._get_comp_versions_to_find(dub_comp_id)
# build_type is not in check_list because different build types might be compatible.
# We do show a WARNING that the build type is not the same.
# It might be critical in release builds, and acceptable otherwise
- check_list = ('configuration', 'platform', 'arch', 'compiler', 'compiler_version')
-
- for entry in os.listdir(dub_build_path):
+ check_list = {'configuration', 'platform', 'arch', 'compiler', 'compiler_version'}
+ compatibilities: T.Set[str] = set()
- target = os.path.join(dub_build_path, entry, jpack['targetFileName'])
+ for entry in self._cache_entries(pkg_desc):
+ target = entry['artifactPath']
if not os.path.exists(target):
# unless Dub and Meson are racing, the target file should be present
# when the directory is present
@@ -349,33 +393,80 @@ def _find_compatible_package_target(self, jdesc: T.Dict[str, str], jpack: T.Dict
# we build a new set for each entry, because if this target is returned
# we want to return only the compatibilities associated to this target
# otherwise we could miss the WARNING about build_type
- comps = set()
+ comps: T.Set[str] = set()
+
+ search = entry['search']
+
+ mlog.debug('searching compatibility in ' + search)
+ mlog.debug('compiler_versions', comp_versions)
- if conf in entry:
+ if pkg_desc['configuration'] in search:
comps.add('configuration')
- if build_type in entry:
+ if desc['buildType'] in search:
comps.add('build_type')
- if all(platform in entry for platform in platforms):
+ if all(platform in search for platform in desc['platform']):
comps.add('platform')
- if all(arch in entry for arch in archs):
+ if all(arch in search for arch in desc['architecture']):
comps.add('arch')
- if dub_comp_id in entry:
+ if dub_comp_id in search:
comps.add('compiler')
- if dub_comp_id == 'gdc' or any(cv in entry for cv in comp_versions):
+ if not comp_versions or any(cv in search for cv in comp_versions):
comps.add('compiler_version')
- if all(key in comps for key in check_list):
+ if check_list.issubset(comps):
+ mlog.debug('Found', target)
return (target, comps)
else:
compatibilities = set.union(compatibilities, comps)
return (None, compatibilities)
+ def _cache_entries(self, pkg_desc: DubPackDesc) -> T.List[FindTargetEntry]:
+ # the "old" cache is the `.dub` directory in every package of ~/.dub/packages
+ dub_build_path = os.path.join(pkg_desc['path'], '.dub', 'build')
+
+ if not os.path.exists(dub_build_path):
+ mlog.warning('No such cache folder:', dub_build_path)
+ return []
+
+ mlog.debug('Checking in DUB cache folder', dub_build_path)
+
+ return [
+ {
+ 'search': dir_entry,
+ 'artifactPath': os.path.join(dub_build_path, dir_entry, pkg_desc['targetFileName'])
+ }
+ for dir_entry in os.listdir(dub_build_path)
+ ]
+
+ def _get_comp_versions_to_find(self, dub_comp_id: str) -> T.List[str]:
+ # Get D frontend version implemented in the compiler, or the compiler version itself
+ # gdc doesn't support this
+
+ if dub_comp_id == 'gdc':
+ return []
+
+ comp_versions = [self.compiler.version]
+
+ ret, res = self._call_compbin(['--version'])[0:2]
+ if ret != 0:
+ mlog.error('Failed to run', mlog.bold(' '.join(self.dubbin.get_command() + ['--version'])))
+ return []
+ d_ver_reg = re.search('v[0-9].[0-9][0-9][0-9].[0-9]', res) # Ex.: v2.081.2
+
+ if d_ver_reg is not None:
+ frontend_version = d_ver_reg.group()
+ frontend_id = frontend_version.rsplit('.', 1)[0].replace(
+ 'v', '').replace('.', '') # Fix structure. Ex.: 2081
+ comp_versions.extend([frontend_version, frontend_id])
+
+ return comp_versions
+
def _call_dubbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]:
assert isinstance(self.dubbin, ExternalProgram)
p, out, err = Popen_safe(self.dubbin.get_command() + args, env=env)
diff --git a/mesonbuild/dependencies/framework.py b/mesonbuild/dependencies/framework.py
index 3c880c7430af..1fbd628235ba 100644
--- a/mesonbuild/dependencies/framework.py
+++ b/mesonbuild/dependencies/framework.py
@@ -47,6 +47,7 @@ def detect(self, name: str, paths: T.List[str]) -> None:
framework_path = self._get_framework_path(p, name)
if framework_path is None:
continue
+ framework_name = framework_path.stem
# We want to prefer the specified paths (in order) over the system
# paths since these are "extra" frameworks.
# For example, Python2's framework is in /System/Library/Frameworks and
@@ -54,11 +55,15 @@ def detect(self, name: str, paths: T.List[str]) -> None:
# Python.framework. We need to know for sure that the framework was
# found in the path we expect.
allow_system = p in self.system_framework_paths
- args = self.clib_compiler.find_framework(name, self.env, [p], allow_system)
+ args = self.clib_compiler.find_framework(framework_name, self.env, [p], allow_system)
if args is None:
continue
self.link_args = args
self.framework_path = framework_path.as_posix()
+ # The search is done case-insensitively, so the found name may differ
+ # from the one that was requested. Setting the name ensures the correct
+ # one is used when linking on case-sensitive filesystems.
+ self.name = framework_name
self.compile_args = ['-F' + self.framework_path]
# We need to also add -I includes to the framework because all
# cross-platform projects such as OpenGL, Python, Qt, GStreamer,
@@ -74,7 +79,7 @@ def _get_framework_path(self, path: str, name: str) -> T.Optional[Path]:
p = Path(path)
lname = name.lower()
for d in p.glob('*.framework/'):
- if lname == d.name.rsplit('.', 1)[0].lower():
+ if lname == d.stem.lower():
return d
return None
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
index 4011c60fb189..4815e1c18cd9 100644
--- a/mesonbuild/dependencies/misc.py
+++ b/mesonbuild/dependencies/misc.py
@@ -17,6 +17,7 @@
from .detect import packages
from .factory import DependencyFactory, factory_methods
from .pkgconfig import PkgConfigDependency
+from ..options import OptionKey
if T.TYPE_CHECKING:
from ..environment import Environment
@@ -311,7 +312,14 @@ class CursesConfigToolDependency(ConfigToolDependency):
tools = ['ncursesw6-config', 'ncursesw5-config', 'ncurses6-config', 'ncurses5-config', 'ncurses5.4-config']
def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
- super().__init__(name, env, kwargs, language)
+ exclude_paths = None
+ # macOS mistakenly ships /usr/bin/ncurses5.4-config and a man page for
+ # it, but none of the headers or libraries. Ignore /usr/bin because it
+ # can only contain this broken configtool script.
+ # Homebrew is /usr/local or /opt/homebrew.
+ if env.machines.build and env.machines.build.system == 'darwin':
+ exclude_paths = ['/usr/bin']
+ super().__init__(name, env, kwargs, language, exclude_paths=exclude_paths)
if not self.is_found:
return
self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
@@ -541,7 +549,7 @@ def shaderc_factory(env: 'Environment',
shared_libs = ['shaderc']
static_libs = ['shaderc_combined', 'shaderc_static']
- if kwargs.get('static', env.coredata.get_option(mesonlib.OptionKey('prefer_static'))):
+ if kwargs.get('static', env.coredata.get_option(OptionKey('prefer_static'))):
c = [functools.partial(PkgConfigDependency, name, env, kwargs)
for name in static_libs + shared_libs]
else:
diff --git a/mesonbuild/dependencies/mpi.py b/mesonbuild/dependencies/mpi.py
index f9c911c29a2a..2d9e992f6f5a 100644
--- a/mesonbuild/dependencies/mpi.py
+++ b/mesonbuild/dependencies/mpi.py
@@ -9,7 +9,8 @@
import re
from ..environment import detect_cpu_family
-from .base import DependencyMethods, detect_compiler, SystemDependency
+from ..mesonlib import Popen_safe
+from .base import DependencyException, DependencyMethods, detect_compiler, SystemDependency
from .configtool import ConfigToolDependency
from .detect import packages
from .factory import factory_methods
@@ -37,65 +38,91 @@ def mpi_factory(env: 'Environment',
return []
compiler_is_intel = compiler.get_id() in {'intel', 'intel-cl'}
- # Only OpenMPI has pkg-config, and it doesn't work with the intel compilers
- if DependencyMethods.PKGCONFIG in methods and not compiler_is_intel:
- pkg_name = None
+ if DependencyMethods.CONFIG_TOOL in methods:
+ nwargs = kwargs.copy()
+
+ # We try the environment variables for the tools first, but then
+ # fall back to the hardcoded names
+
if language == 'c':
- pkg_name = 'ompi-c'
+ env_vars = ['MPICC']
elif language == 'cpp':
- pkg_name = 'ompi-cxx'
+ env_vars = ['MPICXX']
elif language == 'fortran':
- pkg_name = 'ompi-fort'
- candidates.append(functools.partial(
- PkgConfigDependency, pkg_name, env, kwargs, language=language))
+ env_vars = ['MPIFC', 'MPIF90', 'MPIF77']
- if DependencyMethods.CONFIG_TOOL in methods:
- nwargs = kwargs.copy()
+ tool_names = [os.environ.get(env_name) for env_name in env_vars]
+ tool_names = [t for t in tool_names if t] # remove empty environment variables
if compiler_is_intel:
if env.machines[for_machine].is_windows():
- nwargs['version_arg'] = '-v'
nwargs['returncode_value'] = 3
if language == 'c':
- tool_names = [os.environ.get('I_MPI_CC'), 'mpiicc']
- elif language == 'cpp':
- tool_names = [os.environ.get('I_MPI_CXX'), 'mpiicpc']
- elif language == 'fortran':
- tool_names = [os.environ.get('I_MPI_F90'), 'mpiifort']
-
- cls: T.Type[ConfigToolDependency] = IntelMPIConfigToolDependency
- else: # OpenMPI, which doesn't work with intel
- #
- # We try the environment variables for the tools first, but then
- # fall back to the hardcoded names
- if language == 'c':
- tool_names = [os.environ.get('MPICC'), 'mpicc']
+ tool_names.append('mpiicc')
elif language == 'cpp':
- tool_names = [os.environ.get('MPICXX'), 'mpic++', 'mpicxx', 'mpiCC']
+ tool_names.append('mpiicpc')
elif language == 'fortran':
- tool_names = [os.environ.get(e) for e in ['MPIFC', 'MPIF90', 'MPIF77']]
- tool_names.extend(['mpifort', 'mpif90', 'mpif77'])
+ tool_names.append('mpiifort')
- cls = OpenMPIConfigToolDependency
-
- tool_names = [t for t in tool_names if t] # remove empty environment variables
- assert tool_names
+ # even with intel compilers, mpicc has to be considered
+ if language == 'c':
+ tool_names.append('mpicc')
+ elif language == 'cpp':
+ tool_names.extend(['mpic++', 'mpicxx', 'mpiCC'])
+ elif language == 'fortran':
+ tool_names.extend(['mpifort', 'mpif90', 'mpif77'])
nwargs['tools'] = tool_names
candidates.append(functools.partial(
- cls, tool_names[0], env, nwargs, language=language))
+ MPIConfigToolDependency, tool_names[0], env, nwargs, language=language))
- if DependencyMethods.SYSTEM in methods:
+ if DependencyMethods.SYSTEM in methods and env.machines[for_machine].is_windows():
candidates.append(functools.partial(
MSMPIDependency, 'msmpi', env, kwargs, language=language))
+ # Only OpenMPI has pkg-config, and it doesn't work with the intel compilers
+ # for MPI, environment variables and commands like mpicc should have priority
+ if DependencyMethods.PKGCONFIG in methods and not compiler_is_intel:
+ pkg_name = None
+ if language == 'c':
+ pkg_name = 'ompi-c'
+ elif language == 'cpp':
+ pkg_name = 'ompi-cxx'
+ elif language == 'fortran':
+ pkg_name = 'ompi-fort'
+ candidates.append(functools.partial(
+ PkgConfigDependency, pkg_name, env, kwargs, language=language))
+
return candidates
packages['mpi'] = mpi_factory
-class _MPIConfigToolDependency(ConfigToolDependency):
+class MPIConfigToolDependency(ConfigToolDependency):
+ """Wrapper around mpicc, Intel's mpiicc and friends."""
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+ language: T.Optional[str] = None):
+ super().__init__(name, env, kwargs, language=language)
+ if not self.is_found:
+ return
+
+ # --showme for OpenMPI, -compile_info/-link_info for MPICH and IntelMPI
+ for comp, link in [('--showme:compile', '--showme:link'), ('-compile_info', '-link_info'), ('-show', None)]:
+ try:
+ c_args = self.get_config_value([comp], 'compile_args')
+ l_args = self.get_config_value([link], 'link_args') if link is not None else c_args
+ except DependencyException:
+ continue
+ else:
+ break
+ else:
+ self.is_found = False
+ return
+
+ self.compile_args = self._filter_compile_args(c_args)
+ self.link_args = self._filter_link_args(l_args)
def _filter_compile_args(self, args: T.List[str]) -> T.List[str]:
"""
@@ -146,53 +173,39 @@ def _is_link_arg(self, f: str) -> bool:
f == '-pthread' or
(f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')))
-
-class IntelMPIConfigToolDependency(_MPIConfigToolDependency):
-
- """Wrapper around Intel's mpiicc and friends."""
-
- version_arg = '-v' # --version is not the same as -v
-
- def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
- language: T.Optional[str] = None):
- super().__init__(name, env, kwargs, language=language)
- if not self.is_found:
- return
-
- args = self.get_config_value(['-show'], 'link and compile args')
- self.compile_args = self._filter_compile_args(args)
- self.link_args = self._filter_link_args(args)
-
- def _sanitize_version(self, out: str) -> str:
- v = re.search(r'(\d{4}) Update (\d)', out)
+ def _check_and_get_version(self, tool: T.List[str], returncode: int) -> T.Tuple[bool, T.Union[str, None]]:
+ p, out = Popen_safe(tool + ['--showme:version'])[:2]
+ valid = p.returncode == returncode
+ if valid:
+ # OpenMPI
+ v = re.search(r'\d+.\d+.\d+', out)
+ if v:
+ version = v.group(0)
+ else:
+ version = None
+ return valid, version
+
+ # --version is not the same as -v
+ p, out = Popen_safe(tool + ['-v'])[:2]
+ valid = p.returncode == returncode
+ first_line = out.split('\n', maxsplit=1)[0]
+
+ # cases like "mpicc for MPICH version 4.2.2"
+ v = re.search(r'\d+.\d+.\d+', first_line)
if v:
- return '{}.{}'.format(v.group(1), v.group(2))
- return out
-
-
-class OpenMPIConfigToolDependency(_MPIConfigToolDependency):
-
- """Wrapper around OpenMPI mpicc and friends."""
-
- version_arg = '--showme:version'
-
- def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
- language: T.Optional[str] = None):
- super().__init__(name, env, kwargs, language=language)
- if not self.is_found:
- return
+ return valid, v.group(0)
- c_args = self.get_config_value(['--showme:compile'], 'compile_args')
- self.compile_args = self._filter_compile_args(c_args)
-
- l_args = self.get_config_value(['--showme:link'], 'link_args')
- self.link_args = self._filter_link_args(l_args)
+ # cases like "mpigcc for Intel(R) MPI library 2021.13"
+ v = re.search(r'\d+.\d+', first_line)
+ if v:
+ return valid, v.group(0)
- def _sanitize_version(self, out: str) -> str:
- v = re.search(r'\d+.\d+.\d+', out)
+ # cases like "mpiifort for the Intel(R) MPI Library 2019 Update 9 for Linux*"
+ v = re.search(r'(\d{4}) Update (\d)', first_line)
if v:
- return v.group(0)
- return out
+ return valid, f'{v.group(1)}.{v.group(2)}'
+
+ return valid, None
class MSMPIDependency(SystemDependency):
diff --git a/mesonbuild/dependencies/pkgconfig.py b/mesonbuild/dependencies/pkgconfig.py
index a87f413ad9df..bc24f760fd0d 100644
--- a/mesonbuild/dependencies/pkgconfig.py
+++ b/mesonbuild/dependencies/pkgconfig.py
@@ -6,7 +6,9 @@
from pathlib import Path
from .base import ExternalDependency, DependencyException, sort_libpaths, DependencyTypeName
-from ..mesonlib import EnvironmentVariables, OptionKey, OrderedSet, PerMachine, Popen_safe, Popen_safe_logged, MachineChoice, join_args
+from ..mesonlib import (EnvironmentVariables, OrderedSet, PerMachine, Popen_safe, Popen_safe_logged, MachineChoice,
+ join_args, MesonException)
+from ..options import OptionKey
from ..programs import find_external_program, ExternalProgram
from .. import mlog
from pathlib import PurePath
@@ -29,6 +31,14 @@ class PkgConfigInterface:
class_impl: PerMachine[T.Union[Literal[False], T.Optional[PkgConfigInterface]]] = PerMachine(False, False)
class_cli_impl: PerMachine[T.Union[Literal[False], T.Optional[PkgConfigCLI]]] = PerMachine(False, False)
+ pkg_bin_per_machine: PerMachine[T.Optional[ExternalProgram]] = PerMachine(None, None)
+
+ @staticmethod
+ def set_program_override(pkg_bin: ExternalProgram, for_machine: MachineChoice) -> None:
+ if PkgConfigInterface.class_impl[for_machine]:
+ raise MesonException(f'Tried to override pkg-config for machine {for_machine} but it was already initialized.\n'
+ 'pkg-config must be overridden before it\'s used.')
+ PkgConfigInterface.pkg_bin_per_machine[for_machine] = pkg_bin
@staticmethod
def instance(env: Environment, for_machine: MachineChoice, silent: bool) -> T.Optional[PkgConfigInterface]:
@@ -36,7 +46,7 @@ def instance(env: Environment, for_machine: MachineChoice, silent: bool) -> T.Op
for_machine = for_machine if env.is_cross_build() else MachineChoice.HOST
impl = PkgConfigInterface.class_impl[for_machine]
if impl is False:
- impl = PkgConfigCLI(env, for_machine, silent)
+ impl = PkgConfigCLI(env, for_machine, silent, PkgConfigInterface.pkg_bin_per_machine[for_machine])
if not impl.found():
impl = None
if not impl and not silent:
@@ -56,7 +66,7 @@ def _cli(env: Environment, for_machine: MachineChoice, silent: bool = False) ->
if impl and not isinstance(impl, PkgConfigCLI):
impl = PkgConfigInterface.class_cli_impl[for_machine]
if impl is False:
- impl = PkgConfigCLI(env, for_machine, silent)
+ impl = PkgConfigCLI(env, for_machine, silent, PkgConfigInterface.pkg_bin_per_machine[for_machine])
if not impl.found():
impl = None
PkgConfigInterface.class_cli_impl[for_machine] = impl
@@ -112,9 +122,10 @@ def list_all(self) -> ImmutableListProtocol[str]:
class PkgConfigCLI(PkgConfigInterface):
'''pkg-config CLI implementation'''
- def __init__(self, env: Environment, for_machine: MachineChoice, silent: bool) -> None:
+ def __init__(self, env: Environment, for_machine: MachineChoice, silent: bool,
+ pkgbin: T.Optional[ExternalProgram] = None) -> None:
super().__init__(env, for_machine)
- self._detect_pkgbin()
+ self._detect_pkgbin(pkgbin)
if self.pkgbin and not silent:
mlog.log('Found pkg-config:', mlog.green('YES'), mlog.bold(f'({self.pkgbin.get_path()})'), mlog.blue(self.pkgbin_version))
@@ -199,14 +210,21 @@ def _split_args(cmd: str) -> T.List[str]:
# output using shlex.split rather than mesonlib.split_args
return shlex.split(cmd)
- def _detect_pkgbin(self) -> None:
- for potential_pkgbin in find_external_program(
- self.env, self.for_machine, 'pkg-config', 'Pkg-config',
- self.env.default_pkgconfig, allow_default_for_cross=False):
+ def _detect_pkgbin(self, pkgbin: T.Optional[ExternalProgram] = None) -> None:
+ def validate(potential_pkgbin: ExternalProgram) -> bool:
version_if_ok = self._check_pkgconfig(potential_pkgbin)
if version_if_ok:
self.pkgbin = potential_pkgbin
self.pkgbin_version = version_if_ok
+ return True
+ return False
+
+ if pkgbin and validate(pkgbin):
+ return
+
+ for potential_pkgbin in find_external_program(self.env, self.for_machine, "pkg-config", "Pkg-config",
+ self.env.default_pkgconfig, allow_default_for_cross=False):
+ if validate(potential_pkgbin):
return
self.pkgbin = None
@@ -273,7 +291,8 @@ def _call_pkgbin(self, args: T.List[str], env: T.Optional[EnvironOrDict] = None)
class PkgConfigDependency(ExternalDependency):
- def __init__(self, name: str, environment: Environment, kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+ def __init__(self, name: str, environment: Environment, kwargs: T.Dict[str, T.Any],
+ language: T.Optional[str] = None) -> None:
super().__init__(DependencyTypeName('pkgconfig'), environment, kwargs, language=language)
self.name = name
self.is_libtool = False
@@ -556,7 +575,7 @@ def log_tried() -> str:
def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
- default_value: T.Optional[str] = None,
+ system: T.Optional[str] = None, default_value: T.Optional[str] = None,
pkgconfig_define: PkgConfigDefineType = None) -> str:
if pkgconfig:
try:
diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py
index 46d12f309855..34945cb82b6d 100644
--- a/mesonbuild/dependencies/python.py
+++ b/mesonbuild/dependencies/python.py
@@ -17,6 +17,7 @@
from .pkgconfig import PkgConfigDependency
from ..environment import detect_cpu_family
from ..programs import ExternalProgram
+from ..options import OptionKey
if T.TYPE_CHECKING:
from typing_extensions import TypedDict
@@ -234,7 +235,10 @@ def get_windows_link_args(self, limited_api: bool) -> T.Optional[T.List[str]]:
elif imp_lower == 'pypy':
libpath = Path(f'libpypy{verdot}-c.dll')
else:
- libpath = Path(f'python{vernum}.dll')
+ if self.is_freethreaded:
+ libpath = Path(f'python{vernum}t.dll')
+ else:
+ libpath = Path(f'python{vernum}.dll')
else:
if self.is_freethreaded:
libpath = Path('libs') / f'python{vernum}t.lib'
@@ -246,13 +250,13 @@ def get_windows_link_args(self, limited_api: bool) -> T.Optional[T.List[str]]:
# Python itself (except with pybind11, which has an ugly
# hack to work around this) - so emit a warning to explain
# the cause of the expected link error.
- buildtype = self.env.coredata.get_option(mesonlib.OptionKey('buildtype'))
+ buildtype = self.env.coredata.get_option(OptionKey('buildtype'))
assert isinstance(buildtype, str)
- debug = self.env.coredata.get_option(mesonlib.OptionKey('debug'))
+ debug = self.env.coredata.get_option(OptionKey('debug'))
# `debugoptimized` buildtype may not set debug=True currently, see gh-11645
is_debug_build = debug or buildtype == 'debug'
vscrt_debug = False
- if mesonlib.OptionKey('b_vscrt') in self.env.coredata.optstore:
+ if OptionKey('b_vscrt') in self.env.coredata.optstore:
vscrt = self.env.coredata.optstore.get_value('b_vscrt')
if vscrt in {'mdd', 'mtd', 'from_buildtype', 'static_from_buildtype'}:
vscrt_debug = True
diff --git a/mesonbuild/dependencies/qt.py b/mesonbuild/dependencies/qt.py
index 86e32140e924..1b60deb8afd2 100644
--- a/mesonbuild/dependencies/qt.py
+++ b/mesonbuild/dependencies/qt.py
@@ -19,6 +19,7 @@
from .factory import DependencyFactory
from .. import mlog
from .. import mesonlib
+from ..options import OptionKey
if T.TYPE_CHECKING:
from ..compilers import Compiler
@@ -296,8 +297,8 @@ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
# Use the buildtype by default, but look at the b_vscrt option if the
# compiler supports it.
- is_debug = self.env.coredata.get_option(mesonlib.OptionKey('buildtype')) == 'debug'
- if mesonlib.OptionKey('b_vscrt') in self.env.coredata.optstore:
+ is_debug = self.env.coredata.get_option(OptionKey('buildtype')) == 'debug'
+ if OptionKey('b_vscrt') in self.env.coredata.optstore:
if self.env.coredata.optstore.get_value('b_vscrt') in {'mdd', 'mtd'}:
is_debug = True
modules_lib_suffix = _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], is_debug)
diff --git a/mesonbuild/dependencies/scalapack.py b/mesonbuild/dependencies/scalapack.py
index bfc83b138f42..e50338710dd3 100644
--- a/mesonbuild/dependencies/scalapack.py
+++ b/mesonbuild/dependencies/scalapack.py
@@ -8,7 +8,7 @@
import os
import typing as T
-from ..mesonlib import OptionKey
+from ..options import OptionKey
from .base import DependencyMethods
from .cmake import CMakeDependency
from .detect import packages
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index cc17377a649b..7adac5e75723 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -68,7 +68,7 @@ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> No
['--gui-libs' if 'gui' in self.modules else '--base-libs'],
'link_args'))
- def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:
+ def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0, exclude_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:
tool = [self.tools[0]]
try:
p, out = Popen_safe(tool + ['--help'])[:2]
@@ -189,7 +189,7 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
super().__init__(name, environment, kwargs, language=language)
try:
- self.vulkan_sdk = os.environ['VULKAN_SDK']
+ self.vulkan_sdk = os.environ.get('VULKAN_SDK', os.environ['VK_SDK_PATH'])
if not os.path.isabs(self.vulkan_sdk):
raise DependencyException('VULKAN_SDK must be an absolute path.')
except KeyError:
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index 484ef45d478d..e3fa14fabbe5 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -17,9 +17,10 @@
from .mesonlib import (
MesonException, MachineChoice, Popen_safe, PerMachine,
- PerMachineDefaultable, PerThreeMachineDefaultable, split_args, quote_arg, OptionKey,
+ PerMachineDefaultable, PerThreeMachineDefaultable, split_args, quote_arg,
search_version, MesonBugException
)
+from .options import OptionKey
from . import mlog
from .programs import ExternalProgram
@@ -109,13 +110,13 @@ def detect_llvm_cov(suffix: T.Optional[str] = None):
tool = 'llvm-cov'
else:
tool = f'llvm-cov-{suffix}'
- if mesonlib.exe_exists([tool, '--version']):
+ if shutil.which(tool) is not None:
return tool
else:
# Otherwise guess in the dark
tools = get_llvm_tool_names('llvm-cov')
for tool in tools:
- if mesonlib.exe_exists([tool, '--version']):
+ if shutil.which(tool):
return tool
return None
@@ -139,7 +140,7 @@ def compute_llvm_suffix(coredata: coredata.CoreData):
def detect_lcov_genhtml(lcov_exe: str = 'lcov', genhtml_exe: str = 'genhtml'):
lcov_exe, lcov_version = detect_lcov(lcov_exe)
- if not mesonlib.exe_exists([genhtml_exe, '--version']):
+ if shutil.which(genhtml_exe) is None:
genhtml_exe = None
return lcov_exe, lcov_version, genhtml_exe
@@ -156,7 +157,7 @@ def find_coverage_tools(coredata: coredata.CoreData) -> T.Tuple[T.Optional[str],
return gcovr_exe, gcovr_version, lcov_exe, lcov_version, genhtml_exe, llvm_cov_exe
-def detect_ninja(version: str = '1.8.2', log: bool = False) -> T.List[str]:
+def detect_ninja(version: str = '1.8.2', log: bool = False) -> T.Optional[T.List[str]]:
r = detect_ninja_command_and_version(version, log)
return r[0] if r else None
@@ -274,6 +275,32 @@ def detect_clangformat() -> T.List[str]:
return [path]
return []
+def detect_clangtidy() -> T.List[str]:
+ """ Look for clang-tidy binary on build platform
+
+ Return: a single-element list of the found clang-tidy binary ready to be
+ passed to Popen()
+ """
+ tools = get_llvm_tool_names('clang-tidy')
+ for tool in tools:
+ path = shutil.which(tool)
+ if path is not None:
+ return [path]
+ return []
+
+def detect_clangapply() -> T.List[str]:
+ """ Look for clang-apply-replacements binary on build platform
+
+ Return: a single-element list of the found clang-apply-replacements binary
+ ready to be passed to Popen()
+ """
+ tools = get_llvm_tool_names('clang-apply-replacements')
+ for tool in tools:
+ path = shutil.which(tool)
+ if path is not None:
+ return [path]
+ return []
+
def detect_windows_arch(compilers: CompilersDict) -> str:
"""
Detecting the 'native' architecture of Windows is not a trivial task. We
@@ -308,7 +335,7 @@ def detect_windows_arch(compilers: CompilersDict) -> str:
for compiler in compilers.values():
if compiler.id == 'msvc' and (compiler.target in {'x86', '80x86'}):
return 'x86'
- if compiler.id == 'clang-cl' and compiler.target == 'x86':
+ if compiler.id == 'clang-cl' and (compiler.target in {'x86', 'i686'}):
return 'x86'
if compiler.id == 'gcc' and compiler.has_builtin_define('__i386__'):
return 'x86'
@@ -328,7 +355,7 @@ def detect_cpu_family(compilers: CompilersDict) -> str:
"""
Python is inconsistent in its platform module.
It returns different values for the same cpu.
- For x86 it might return 'x86', 'i686' or somesuch.
+ For x86 it might return 'x86', 'i686' or some such.
Do some canonicalization.
"""
if mesonlib.is_windows():
@@ -449,6 +476,7 @@ def detect_cpu(compilers: CompilersDict) -> str:
'darwin': 'xnu',
'dragonfly': 'dragonfly',
'haiku': 'haiku',
+ 'gnu': 'gnu',
}
def detect_kernel(system: str) -> T.Optional[str]:
@@ -465,7 +493,7 @@ def detect_kernel(system: str) -> T.Optional[str]:
raise MesonException('Failed to run "/usr/bin/uname -o"')
out = out.lower().strip()
if out not in {'illumos', 'solaris'}:
- mlog.warning(f'Got an unexpected value for kernel on a SunOS derived platform, expcted either "illumos" or "solaris", but got "{out}".'
+ mlog.warning(f'Got an unexpected value for kernel on a SunOS derived platform, expected either "illumos" or "solaris", but got "{out}".'
"Please open a Meson issue with the OS you're running and the value detected for your kernel.")
return None
return out
@@ -695,7 +723,7 @@ def _load_machine_file_options(self, config: 'ConfigParser', properties: Propert
key = OptionKey.from_string(k)
# If we're in the cross file, and there is a `build.foo` warn about that. Later we'll remove it.
if machine is MachineChoice.HOST and key.machine is not machine:
- mlog.deprecation('Setting build machine options in cross files, please use a native file instead, this will be removed in meson 0.60', once=True)
+ mlog.deprecation('Setting build machine options in cross files, please use a native file instead, this will be removed in meson 2.0', once=True)
if key.subproject:
raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.')
self.options[key.evolve(subproject=subproject, machine=machine)] = v
@@ -747,14 +775,12 @@ def _set_default_options_from_env(self) -> None:
# if it changes on future invocations.
if self.first_invocation:
if keyname == 'ldflags':
- key = OptionKey('link_args', machine=for_machine, lang='c') # needs a language to initialize properly
for lang in compilers.compilers.LANGUAGES_USING_LDFLAGS:
- key = key.evolve(lang=lang)
+ key = OptionKey(name=f'{lang}_link_args', machine=for_machine)
env_opts[key].extend(p_list)
elif keyname == 'cppflags':
- key = OptionKey('env_args', machine=for_machine, lang='c')
for lang in compilers.compilers.LANGUAGES_USING_CPPFLAGS:
- key = key.evolve(lang=lang)
+ key = OptionKey(f'{lang}_env_args', machine=for_machine)
env_opts[key].extend(p_list)
else:
key = OptionKey.from_string(keyname).evolve(machine=for_machine)
@@ -774,7 +800,8 @@ def _set_default_options_from_env(self) -> None:
# We still use the original key as the base here, as
# we want to inherit the machine and the compiler
# language
- key = key.evolve('env_args')
+ lang = key.name.split('_', 1)[0]
+ key = key.evolve(f'{lang}_env_args')
env_opts[key].extend(p_list)
# Only store options that are not already in self.options,
@@ -961,6 +988,8 @@ def need_exe_wrapper(self, for_machine: MachineChoice = MachineChoice.HOST):
value = self.properties[for_machine].get('needs_exe_wrapper', None)
if value is not None:
return value
+ if not self.is_cross_build():
+ return False
return not machine_info_can_run(self.machines[for_machine])
def get_exe_wrapper(self) -> T.Optional[ExternalProgram]:
diff --git a/mesonbuild/interpreter/compiler.py b/mesonbuild/interpreter/compiler.py
index 1bdb321e47e8..90514446bb12 100644
--- a/mesonbuild/interpreter/compiler.py
+++ b/mesonbuild/interpreter/compiler.py
@@ -22,7 +22,7 @@
FeatureNew, FeatureNewKwargs, disablerIfNotFound,
InterpreterException)
from ..interpreterbase.decorators import ContainerTypeInfo, typed_kwargs, KwargInfo, typed_pos_args
-from ..mesonlib import OptionKey
+from ..options import OptionKey
from .interpreterobjects import (extract_required_kwarg, extract_search_dirs)
from .type_checking import REQUIRED_KW, in_set_validator, NoneType
diff --git a/mesonbuild/interpreter/dependencyfallbacks.py b/mesonbuild/interpreter/dependencyfallbacks.py
index d5e0740e0974..fd8a025ea220 100644
--- a/mesonbuild/interpreter/dependencyfallbacks.py
+++ b/mesonbuild/interpreter/dependencyfallbacks.py
@@ -5,7 +5,8 @@
from .. import dependencies
from .. import build
from ..wrap import WrapMode
-from ..mesonlib import OptionKey, extract_as_list, stringlistify, version_compare_many, listify
+from ..mesonlib import extract_as_list, stringlistify, version_compare_many, listify
+from ..options import OptionKey
from ..dependencies import Dependency, DependencyException, NotFoundDependency
from ..interpreterbase import (MesonInterpreterObject, FeatureNew,
InterpreterException, InvalidArguments)
diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py
index 0711bbf75283..3fa5c25d06c9 100644
--- a/mesonbuild/interpreter/interpreter.py
+++ b/mesonbuild/interpreter/interpreter.py
@@ -19,8 +19,9 @@
from ..wrap import wrap, WrapMode
from .. import mesonlib
from ..mesonlib import (EnvironmentVariables, ExecutableSerialisation, MesonBugException, MesonException, HoldableObject,
- FileMode, MachineChoice, OptionKey, listify,
+ FileMode, MachineChoice, listify,
extract_as_list, has_path_sep, path_is_in_root, PerMachine)
+from ..options import OptionKey
from ..programs import ExternalProgram, NonExistingExternalProgram
from ..dependencies import Dependency
from ..depfile import DepFile
@@ -255,7 +256,7 @@ class InterpreterRuleRelaxation(Enum):
implicit_check_false_warning = """You should add the boolean check kwarg to the run_command call.
It currently defaults to false,
- but it will default to true in future releases of meson.
+ but it will default to true in meson 2.0.
See also: https://github.com/mesonbuild/meson/issues/9300"""
class Interpreter(InterpreterBase, HoldableObject):
@@ -635,7 +636,7 @@ def func_import(self, node: mparser.BaseNode, args: T.Tuple[str],
ext_module = NotFoundExtensionModule(real_modname)
else:
ext_module = module.initialize(self)
- assert isinstance(ext_module, (ExtensionModule, NewExtensionModule))
+ assert isinstance(ext_module, (ExtensionModule, NewExtensionModule)), 'for mypy'
self.build.modules.append(real_modname)
if ext_module.INFO.added:
FeatureNew.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.added, self.subproject, location=node)
@@ -814,7 +815,7 @@ def run_command_impl(self,
cmd = cmd.absolute_path(srcdir, builddir)
# Prefer scripts in the current source directory
search_dir = os.path.join(srcdir, self.subdir)
- prog = ExternalProgram(cmd, silent=True, search_dir=search_dir)
+ prog = ExternalProgram(cmd, silent=True, search_dirs=[search_dir])
if not prog.found():
raise InterpreterException(f'Program or command {cmd!r} not found or not executable')
cmd = prog
@@ -876,6 +877,7 @@ def disabled_subproject(self, subp_name: str, disabled_feature: T.Optional[str]
def do_subproject(self, subp_name: str, kwargs: kwtypes.DoSubproject, force_method: T.Optional[wrap.Method] = None) -> SubprojectHolder:
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
+ assert feature, 'for mypy'
mlog.log('Subproject', mlog.bold(subp_name), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
return self.disabled_subproject(subp_name, disabled_feature=feature)
@@ -1040,6 +1042,8 @@ def _do_subproject_cargo(self, subp_name: str, subdir: str,
kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
from .. import cargo
FeatureNew.single_use('Cargo subproject', '1.3.0', self.subproject, location=self.current_node)
+ mlog.warning('Cargo subproject is an experimental feature and has no backwards compatibility guarantees.',
+ once=True, location=self.current_node)
with mlog.nested(subp_name):
ast, options = cargo.interpret(subp_name, subdir, self.environment)
self.coredata.update_project_options(options, subp_name)
@@ -1051,7 +1055,7 @@ def _do_subproject_cargo(self, subp_name: str, subdir: str,
def get_option_internal(self, optname: str) -> options.UserOption:
key = OptionKey.from_string(optname).evolve(subproject=self.subproject)
- if not key.is_project():
+ if not self.environment.coredata.optstore.is_project_option(key):
for opts in [self.coredata.optstore, compilers.base_options]:
v = opts.get(key)
if v is None or v.yielding:
@@ -1148,7 +1152,7 @@ def set_backend(self) -> None:
if self.environment.first_invocation:
self.coredata.init_backend_options(backend_name)
- options = {k: v for k, v in self.environment.options.items() if k.is_backend()}
+ options = {k: v for k, v in self.environment.options.items() if self.environment.coredata.optstore.is_backend_option(k)}
self.coredata.set_options(options)
@typed_pos_args('project', str, varargs=str)
@@ -1176,6 +1180,8 @@ def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str
# for things like deprecation testing.
if kwargs['meson_version']:
self.handle_meson_version(kwargs['meson_version'], node)
+ else:
+ mesonlib.project_meson_versions[self.subproject] = mesonlib.NoProjectVersion()
# Load "meson.options" before "meson_options.txt", and produce a warning if
# it is being used with an old version. I have added check that if both
@@ -1198,7 +1204,7 @@ def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str
# We want fast not cryptographically secure, this is just to
# see if the option file has changed
self.coredata.options_files[self.subproject] = (option_file, hashlib.sha1(f.read()).hexdigest())
- oi = optinterpreter.OptionInterpreter(self.subproject)
+ oi = optinterpreter.OptionInterpreter(self.environment.coredata.optstore, self.subproject)
oi.process(option_file)
self.coredata.update_project_options(oi.options, self.subproject)
self.add_build_def_file(option_file)
@@ -1233,6 +1239,7 @@ def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str
self.active_projectname = proj_name
version = kwargs['version']
+ assert version is not None, 'for mypy'
if isinstance(version, mesonlib.File):
FeatureNew.single_use('version from file', '0.57.0', self.subproject, location=node)
self.add_build_def_file(version)
@@ -1290,6 +1297,7 @@ def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str
subdir = os.path.join(self.subdir, spdirname)
r = wrap.Resolver(self.environment.get_source_dir(), subdir, self.subproject, wrap_mode)
if self.is_subproject():
+ assert self.environment.wrap_resolver is not None, 'for mypy'
self.environment.wrap_resolver.merge_wraps(r)
else:
self.environment.wrap_resolver = r
@@ -1304,7 +1312,9 @@ def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str
# vs backend version we need. But after setting default_options in case
# the project sets vs backend by default.
backend = self.coredata.get_option(OptionKey('backend'))
+ assert backend is None or isinstance(backend, str), 'for mypy'
vsenv = self.coredata.get_option(OptionKey('vsenv'))
+ assert isinstance(vsenv, bool), 'for mypy'
force_vsenv = vsenv or backend.startswith('vs')
mesonlib.setup_vsenv(force_vsenv)
@@ -1323,6 +1333,7 @@ def func_add_languages(self, node: mparser.FunctionNode, args: T.Tuple[T.List[st
native = kwargs['native']
if disabled:
+ assert feature, 'for mypy'
for lang in sorted(langs, key=compilers.sort_clink):
mlog.log('Compiler for language', mlog.bold(lang), 'skipped: feature', mlog.bold(feature), 'disabled')
return False
@@ -1576,7 +1587,7 @@ def program_from_file_for(self, for_machine: MachineChoice, prognames: T.List[me
return prog
return None
- def program_from_system(self, args: T.List[mesonlib.FileOrString], search_dirs: T.List[str],
+ def program_from_system(self, args: T.List[mesonlib.FileOrString], search_dirs: T.Optional[T.List[str]],
extra_info: T.List[mlog.TV_Loggable]) -> T.Optional[ExternalProgram]:
# Search for scripts relative to current subdir.
# Do not cache found programs because find_program('foobar')
@@ -1591,15 +1602,15 @@ def program_from_system(self, args: T.List[mesonlib.FileOrString], search_dirs:
search_dir = os.path.join(self.environment.get_source_dir(),
exename.subdir)
exename = exename.fname
- extra_search_dirs = []
+ search_dirs = [search_dir]
elif isinstance(exename, str):
- search_dir = source_dir
- extra_search_dirs = search_dirs
+ if search_dirs:
+ search_dirs = [source_dir] + search_dirs
+ else:
+ search_dirs = [source_dir]
else:
raise InvalidArguments(f'find_program only accepts strings and files, not {exename!r}')
- extprog = ExternalProgram(exename, search_dir=search_dir,
- extra_search_dirs=extra_search_dirs,
- silent=True)
+ extprog = ExternalProgram(exename, search_dirs=search_dirs, silent=True)
if extprog.found():
extra_info.append(f"({' '.join(extprog.get_command())})")
return extprog
@@ -1628,6 +1639,9 @@ def add_find_program_override(self, name: str, exe: T.Union[build.Executable, Ex
if name in self.build.find_overrides:
raise InterpreterException(f'Tried to override executable "{name}" which has already been overridden.')
self.build.find_overrides[name] = exe
+ if name == 'pkg-config' and isinstance(exe, ExternalProgram):
+ from ..dependencies.pkgconfig import PkgConfigInterface
+ PkgConfigInterface.set_program_override(exe, MachineChoice.HOST)
def notfound_program(self, args: T.List[mesonlib.FileOrString]) -> ExternalProgram:
return NonExistingExternalProgram(' '.join(
@@ -1671,7 +1685,7 @@ def find_program_impl(self, args: T.List[mesonlib.FileOrString],
def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: MachineChoice,
default_options: T.Optional[T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]],
required: bool,
- search_dirs: T.List[str],
+ search_dirs: T.Optional[T.List[str]],
wanted: T.Union[str, T.List[str]],
version_arg: T.Optional[str],
version_func: T.Optional[ProgramVersionFunc],
@@ -1724,7 +1738,7 @@ def check_program_version(self, progobj: T.Union[ExternalProgram, build.Executab
interp = self.subprojects[progobj.subproject].held_object
else:
interp = self
- assert isinstance(interp, Interpreter)
+ assert isinstance(interp, Interpreter), 'for mypy'
version = interp.project_version
else:
version = progobj.get_version(self)
@@ -1769,6 +1783,7 @@ def func_find_program(self, node: mparser.BaseNode, args: T.Tuple[T.List[mesonli
) -> T.Union['build.Executable', ExternalProgram, 'OverrideProgram']:
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
+ assert feature, 'for mypy'
mlog.log('Program', mlog.bold(' '.join(args[0])), 'skipped: feature', mlog.bold(feature), 'disabled')
return self.notfound_program(args[0])
@@ -1811,7 +1826,7 @@ def func_dependency(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kw
if not_found_message:
self.message_impl([not_found_message])
raise
- assert isinstance(d, Dependency)
+ assert isinstance(d, Dependency), 'for mypy'
if not d.found() and not_found_message:
self.message_impl([not_found_message])
# Ensure the correct include type
@@ -1865,6 +1880,7 @@ def func_shared_lib(self, node: mparser.BaseNode,
@permittedKwargs(known_library_kwargs)
@typed_pos_args('both_libraries', str, varargs=SOURCES_VARARGS)
@typed_kwargs('both_libraries', *LIBRARY_KWS, allow_unknown=True)
+ @noSecondLevelHolderResolving
def func_both_lib(self, node: mparser.BaseNode,
args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.Library) -> build.BothLibraries:
@@ -1882,6 +1898,7 @@ def func_shared_module(self, node: mparser.BaseNode,
@permittedKwargs(known_library_kwargs)
@typed_pos_args('library', str, varargs=SOURCES_VARARGS)
@typed_kwargs('library', *LIBRARY_KWS, allow_unknown=True)
+ @noSecondLevelHolderResolving
def func_library(self, node: mparser.BaseNode,
args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.Library) -> build.Executable:
@@ -1905,6 +1922,7 @@ def func_build_target(self, node: mparser.BaseNode,
) -> T.Union[build.Executable, build.StaticLibrary, build.SharedLibrary,
build.SharedModule, build.BothLibraries, build.Jar]:
target_type = kwargs['target_type']
+
if target_type == 'executable':
return self.build_target(node, args, kwargs, build.Executable)
elif target_type == 'shared_library':
@@ -2148,14 +2166,21 @@ def func_run_target(self, node: mparser.FunctionNode, args: T.Tuple[str],
return tg
@FeatureNew('alias_target', '0.52.0')
- @typed_pos_args('alias_target', str, varargs=build.Target, min_varargs=1)
+ @typed_pos_args('alias_target', str, varargs=(build.Target, build.BothLibraries), min_varargs=1)
@noKwargs
- def func_alias_target(self, node: mparser.BaseNode, args: T.Tuple[str, T.List[build.Target]],
- kwargs: 'TYPE_kwargs') -> build.AliasTarget:
+ def func_alias_target(self, node: mparser.BaseNode, args: T.Tuple[str, T.List[T.Union[build.Target, build.BothLibraries]]],
+ kwargs: TYPE_kwargs) -> build.AliasTarget:
name, deps = args
if any(isinstance(d, build.RunTarget) for d in deps):
FeatureNew.single_use('alias_target that depends on run_targets', '0.60.0', self.subproject)
- tg = build.AliasTarget(name, deps, self.subdir, self.subproject, self.environment)
+ real_deps: T.List[build.Target] = []
+ for d in deps:
+ if isinstance(d, build.BothLibraries):
+ real_deps.append(d.shared)
+ real_deps.append(d.static)
+ else:
+ real_deps.append(d)
+ tg = build.AliasTarget(name, real_deps, self.subdir, self.subproject, self.environment)
self.add_target(name, tg)
return tg
@@ -2260,6 +2285,8 @@ def add_test(self, node: mparser.BaseNode,
kwargs: T.Dict[str, T.Any], is_base_test: bool):
if isinstance(args[1], (build.CustomTarget, build.CustomTargetIndex)):
FeatureNew.single_use('test with CustomTarget as command', '1.4.0', self.subproject)
+ if any(isinstance(i, ExternalProgram) for i in kwargs['args']):
+ FeatureNew.single_use('test with external_program in args', '1.6.0', self.subproject)
t = self.make_test(node, args, kwargs)
if is_base_test:
@@ -2288,7 +2315,7 @@ def func_install_headers(self, node: mparser.BaseNode,
if kwargs['install_dir'] is not None:
raise InterpreterException('install_headers: cannot specify both "install_dir" and "subdir". Use only "install_dir".')
if os.path.isabs(install_subdir):
- mlog.deprecation('Subdir keyword must not be an absolute path. This will be a hard error in the next release.')
+ mlog.deprecation('Subdir keyword must not be an absolute path. This will be a hard error in meson 2.0.')
else:
install_subdir = ''
@@ -2781,6 +2808,12 @@ def extract_incdirs(self, kwargs, key: str = 'include_directories') -> T.List[bu
if isinstance(p, build.IncludeDirs):
result.append(p)
elif isinstance(p, str):
+ if key == 'd_import_dirs' and os.path.normpath(p).startswith(self.environment.get_source_dir()):
+ FeatureDeprecated.single_use('Building absolute path to source dir is not supported',
+ '0.45', self.subproject,
+ 'Use a relative path instead.',
+ location=self.current_node)
+ p = os.path.relpath(p, os.path.join(self.environment.get_source_dir(), self.subdir))
result.append(self.build_incdir_object([p]))
else:
raise InterpreterException('Include directory objects can only be created from strings or include directories.')
@@ -3145,7 +3178,7 @@ def source_strings_to_files(self, sources: T.List['SourceInputs'], strict: bool
if not strict and s.startswith(self.environment.get_build_dir()):
results.append(s)
mlog.warning(f'Source item {s!r} cannot be converted to File object, because it is a generated file. '
- 'This will become a hard error in the future.', location=self.current_node)
+ 'This will become a hard error in meson 2.0.', location=self.current_node)
else:
self.validate_within_subproject(self.subdir, s)
results.append(mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, s))
@@ -3217,6 +3250,11 @@ def add_target(self, name: str, tobj: build.Target) -> None:
def build_both_libraries(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType], kwargs: kwtypes.Library) -> build.BothLibraries:
shared_lib = self.build_target(node, args, kwargs, build.SharedLibrary)
static_lib = self.build_target(node, args, kwargs, build.StaticLibrary)
+ preferred_library = self.coredata.get_option(OptionKey('default_both_libraries'))
+ if preferred_library == 'auto':
+ preferred_library = self.coredata.get_option(OptionKey('default_library'))
+ if preferred_library == 'both':
+ preferred_library = 'shared'
if self.backend.name == 'xcode':
# Xcode is a bit special in that you can't (at least for the moment)
@@ -3248,7 +3286,11 @@ def build_both_libraries(self, node: mparser.BaseNode, args: T.Tuple[str, Source
# Keep only compilers used for linking
static_lib.compilers = {k: v for k, v in static_lib.compilers.items() if k in compilers.clink_langs}
- return build.BothLibraries(shared_lib, static_lib)
+ # Cross reference them to implement as_shared() and as_static() methods.
+ shared_lib.set_static(static_lib)
+ static_lib.set_shared(shared_lib)
+
+ return build.BothLibraries(shared_lib, static_lib, preferred_library)
def build_library(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType], kwargs: kwtypes.Library):
default_library = self.coredata.get_option(OptionKey('default_library', subproject=self.subproject))
@@ -3373,7 +3415,7 @@ def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargs
kwargs['language_args'][lang].extend(args)
kwargs['depend_files'].extend(deps)
if targetclass is not build.Jar:
- self.kwarg_strings_to_includedirs(kwargs)
+ kwargs['d_import_dirs'] = self.extract_incdirs(kwargs, 'd_import_dirs')
# Filter out kwargs from other target types. For example 'soversion'
# passed to library() when default_library == 'static'.
@@ -3426,7 +3468,7 @@ def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargs
if kwargs['implib']:
if kwargs['export_dynamic'] is False:
- FeatureDeprecated.single_use('implib overrides explict export_dynamic off', '1.3.0', self.subproject,
+ FeatureDeprecated.single_use('implib overrides explicit export_dynamic off', '1.3.0', self.subproject,
'Do not set ths if want export_dynamic disabled if implib is enabled',
location=node)
kwargs['export_dynamic'] = True
@@ -3446,23 +3488,6 @@ def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargs
self.project_args_frozen = True
return target
- def kwarg_strings_to_includedirs(self, kwargs: kwtypes._BuildTarget) -> None:
- if kwargs['d_import_dirs']:
- items = kwargs['d_import_dirs']
- cleaned_items: T.List[build.IncludeDirs] = []
- for i in items:
- if isinstance(i, str):
- # BW compatibility. This was permitted so we must support it
- # for a few releases so people can transition to "correct"
- # path declarations.
- if os.path.normpath(i).startswith(self.environment.get_source_dir()):
- mlog.warning('''Building a path to the source dir is not supported. Use a relative path instead.
-This will become a hard error in the future.''', location=self.current_node)
- i = os.path.relpath(i, os.path.join(self.environment.get_source_dir(), self.subdir))
- i = self.build_incdir_object([i])
- cleaned_items.append(i)
- kwargs['d_import_dirs'] = cleaned_items
-
def add_stdlib_info(self, target):
for l in target.compilers.keys():
dep = self.build.stdlibs[target.for_machine].get(l, None)
diff --git a/mesonbuild/interpreter/interpreterobjects.py b/mesonbuild/interpreter/interpreterobjects.py
index 79e205d31d0f..f4a2b4107ed3 100644
--- a/mesonbuild/interpreter/interpreterobjects.py
+++ b/mesonbuild/interpreter/interpreterobjects.py
@@ -41,6 +41,10 @@ class EnvironmentSeparatorKW(TypedDict):
separator: str
+ class InternalDependencyAsKW(TypedDict):
+
+ recursive: bool
+
_ERROR_MSG_KW: KwargInfo[T.Optional[str]] = KwargInfo('error_message', (str, NoneType))
@@ -462,6 +466,8 @@ def __init__(self, dep: Dependency, interpreter: 'Interpreter'):
'include_type': self.include_type_method,
'as_system': self.as_system_method,
'as_link_whole': self.as_link_whole_method,
+ 'as_static': self.as_static_method,
+ 'as_shared': self.as_shared_method,
})
def found(self) -> bool:
@@ -539,6 +545,7 @@ def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.Dep
KwargInfo('pkgconfig', (str, NoneType)),
KwargInfo('configtool', (str, NoneType)),
KwargInfo('internal', (str, NoneType), since='0.54.0'),
+ KwargInfo('system', (str, NoneType), since='1.6.0'),
KwargInfo('default_value', (str, NoneType)),
PKGCONFIG_DEFINE_KW,
)
@@ -555,6 +562,7 @@ def variable_method(self, args: T.Tuple[T.Optional[str]], kwargs: 'kwargs.Depend
pkgconfig=kwargs['pkgconfig'] or default_varname,
configtool=kwargs['configtool'] or default_varname,
internal=kwargs['internal'] or default_varname,
+ system=kwargs['system'] or default_varname,
default_value=kwargs['default_value'],
pkgconfig_define=kwargs['pkgconfig_define'],
)
@@ -580,6 +588,28 @@ def as_link_whole_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> D
new_dep = self.held_object.generate_link_whole_dependency()
return new_dep
+ @FeatureNew('dependency.as_static', '1.6.0')
+ @noPosargs
+ @typed_kwargs(
+ 'dependency.as_static',
+ KwargInfo('recursive', bool, default=False),
+ )
+ def as_static_method(self, args: T.List[TYPE_var], kwargs: InternalDependencyAsKW) -> Dependency:
+ if not isinstance(self.held_object, InternalDependency):
+ raise InterpreterException('as_static method is only supported on declare_dependency() objects')
+ return self.held_object.get_as_static(kwargs['recursive'])
+
+ @FeatureNew('dependency.as_shared', '1.6.0')
+ @noPosargs
+ @typed_kwargs(
+ 'dependency.as_shared',
+ KwargInfo('recursive', bool, default=False),
+ )
+ def as_shared_method(self, args: T.List[TYPE_var], kwargs: InternalDependencyAsKW) -> Dependency:
+ if not isinstance(self.held_object, InternalDependency):
+ raise InterpreterException('as_shared method is only supported on declare_dependency() objects')
+ return self.held_object.get_as_shared(kwargs['recursive'])
+
_EXTPROG = T.TypeVar('_EXTPROG', bound=ExternalProgram)
class _ExternalProgramHolder(ObjectHolder[_EXTPROG]):
@@ -753,7 +783,7 @@ def __init__(self, name: str, project: str, suite: T.List[str],
exe: T.Union[ExternalProgram, build.Executable, build.CustomTarget, build.CustomTargetIndex],
depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]],
is_parallel: bool,
- cmd_args: T.List[T.Union[str, mesonlib.File, build.Target]],
+ cmd_args: T.List[T.Union[str, mesonlib.File, build.Target, ExternalProgram]],
env: mesonlib.EnvironmentVariables,
should_fail: bool, timeout: int, workdir: T.Optional[str], protocol: str,
priority: int, verbose: bool):
@@ -813,26 +843,23 @@ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
def found(self) -> bool:
return not isinstance(self.held_object, NullSubprojectInterpreter)
- @noKwargs
- @noArgsFlattening
@unholder_return
- def get_variable_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
- if len(args) < 1 or len(args) > 2:
- raise InterpreterException('Get_variable takes one or two arguments.')
+ def get_variable(self, args: T.Tuple[str, T.Optional[str]], kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
if isinstance(self.held_object, NullSubprojectInterpreter): # == not self.found()
raise InterpreterException(f'Subproject "{self.subdir}" disabled can\'t get_variable on it.')
- varname = args[0]
- if not isinstance(varname, str):
- raise InterpreterException('Get_variable first argument must be a string.')
+ varname, fallback = args
try:
return self.held_object.variables[varname]
except KeyError:
- pass
-
- if len(args) == 2:
- return self.held_object._holderify(args[1])
+ if fallback is not None:
+ return self.held_object._holderify(fallback)
+ raise InvalidArguments(f'Requested variable "{varname}" not found.')
- raise InvalidArguments(f'Requested variable "{varname}" not found.')
+ @noKwargs
+ @typed_pos_args('subproject.get_variable', str, optargs=[object])
+ @noArgsFlattening
+ def get_variable_method(self, args: T.Tuple[str, T.Optional[str]], kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
+ return self.get_variable(args, kwargs)
class ModuleObjectHolder(ObjectHolder[ModuleObject]):
def method_call(self, method_name: str, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> TYPE_var:
@@ -942,7 +969,7 @@ def extract_objects_method(self, args: T.Tuple[T.List[T.Union[mesonlib.FileOrStr
extract_all_objects called without setting recursive
keyword argument. Meson currently defaults to
non-recursive to maintain backward compatibility but
- the default will be changed in the future.
+ the default will be changed in meson 2.0.
''')
)
)
@@ -974,8 +1001,6 @@ class SharedLibraryHolder(BuildTargetHolder[build.SharedLibrary]):
class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]):
def __init__(self, libs: build.BothLibraries, interp: 'Interpreter'):
- # FIXME: This build target always represents the shared library, but
- # that should be configurable.
super().__init__(libs, interp)
self.methods.update({'get_shared_lib': self.get_shared_lib_method,
'get_static_lib': self.get_static_lib_method,
@@ -990,12 +1015,16 @@ def __repr__(self) -> str:
@noPosargs
@noKwargs
def get_shared_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.SharedLibrary:
- return self.held_object.shared
+ lib = copy.copy(self.held_object.shared)
+ lib.both_lib = None
+ return lib
@noPosargs
@noKwargs
def get_static_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.StaticLibrary:
- return self.held_object.static
+ lib = copy.copy(self.held_object.static)
+ lib.both_lib = None
+ return lib
class SharedModuleHolder(BuildTargetHolder[build.SharedModule]):
pass
diff --git a/mesonbuild/interpreter/kwargs.py b/mesonbuild/interpreter/kwargs.py
index 85779bc00b08..ae4866a88ad8 100644
--- a/mesonbuild/interpreter/kwargs.py
+++ b/mesonbuild/interpreter/kwargs.py
@@ -13,7 +13,8 @@
from .. import options
from ..compilers import Compiler
from ..dependencies.base import Dependency
-from ..mesonlib import EnvironmentVariables, MachineChoice, File, FileMode, FileOrString, OptionKey
+from ..mesonlib import EnvironmentVariables, MachineChoice, File, FileMode, FileOrString
+from ..options import OptionKey
from ..modules.cmake import CMakeSubprojectOptions
from ..programs import ExternalProgram
from .type_checking import PkgConfigDefineType, SourcesVarargsType
@@ -37,7 +38,7 @@ class BaseTest(TypedDict):
"""Shared base for the Rust module."""
- args: T.List[T.Union[str, File, build.Target]]
+ args: T.List[T.Union[str, File, build.Target, ExternalProgram]]
should_fail: bool
timeout: int
workdir: T.Optional[str]
@@ -70,7 +71,7 @@ class ExtractRequired(TypedDict):
"""Keyword Arguments consumed by the `extract_required_kwargs` function.
Any function that uses the `required` keyword argument which accepts either
- a boolean or a feature option should inherit it's arguments from this class.
+ a boolean or a feature option should inherit its arguments from this class.
"""
required: T.Union[bool, options.UserFeatureOption]
@@ -210,6 +211,7 @@ class Project(TypedDict):
meson_version: T.Optional[str]
default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
license: T.List[str]
+ license_files: T.List[str]
subproject_dir: str
@@ -266,6 +268,7 @@ class DependencyGetVariable(TypedDict):
pkgconfig: T.Optional[str]
configtool: T.Optional[str]
internal: T.Optional[str]
+ system: T.Optional[str]
default_value: T.Optional[str]
pkgconfig_define: PkgConfigDefineType
diff --git a/mesonbuild/interpreter/mesonmain.py b/mesonbuild/interpreter/mesonmain.py
index 4d1f427da210..c82f933450c1 100644
--- a/mesonbuild/interpreter/mesonmain.py
+++ b/mesonbuild/interpreter/mesonmain.py
@@ -1,8 +1,9 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2021 The Meson development team
-# Copyright © 2021 Intel Corporation
+# Copyright © 2021-2024 Intel Corporation
from __future__ import annotations
+import copy
import os
import typing as T
@@ -11,7 +12,8 @@
from .. import build
from .. import mlog, coredata
-from ..mesonlib import MachineChoice, OptionKey
+from ..mesonlib import MachineChoice
+from ..options import OptionKey
from ..programs import OverrideProgram, ExternalProgram
from ..interpreter.type_checking import ENV_KW, ENV_METHOD_KW, ENV_SEPARATOR_KW, env_convertor_with_method
from ..interpreterbase import (MesonInterpreterObject, FeatureNew, FeatureDeprecated,
@@ -332,7 +334,7 @@ def override_find_program_method(self, args: T.Tuple[str, T.Union[mesonlib.File,
self.interpreter.environment.build_dir)
if not os.path.exists(abspath):
raise InterpreterException(f'Tried to override {name} with a file that does not exist.')
- exe = OverrideProgram(name, [abspath])
+ exe = OverrideProgram(name, self.interpreter.project_version, command=[abspath])
self.interpreter.add_find_program_override(name, exe)
@typed_kwargs(
@@ -347,6 +349,16 @@ def override_dependency_method(self, args: T.Tuple[str, dependencies.Dependency]
if not name:
raise InterpreterException('First argument must be a string and cannot be empty')
+ # Make a copy since we're going to mutate.
+ #
+ # dep = declare_dependency()
+ # meson.override_dependency('foo', dep)
+ # meson.override_dependency('foo-1.0', dep)
+ # dep = dependency('foo')
+ # dep.name() # == 'foo-1.0'
+ dep = copy.copy(dep)
+ dep.name = name
+
optkey = OptionKey('default_library', subproject=self.interpreter.subproject)
default_library = self.interpreter.coredata.get_option(optkey)
assert isinstance(default_library, str), 'for mypy'
diff --git a/mesonbuild/interpreter/type_checking.py b/mesonbuild/interpreter/type_checking.py
index 2856136361f8..ed34be950065 100644
--- a/mesonbuild/interpreter/type_checking.py
+++ b/mesonbuild/interpreter/type_checking.py
@@ -15,7 +15,8 @@
from ..dependencies import Dependency, InternalDependency
from ..interpreterbase.decorators import KwargInfo, ContainerTypeInfo
from ..mesonlib import (File, FileMode, MachineChoice, listify, has_path_sep,
- OptionKey, EnvironmentVariables)
+ EnvironmentVariables)
+from ..options import OptionKey
from ..programs import ExternalProgram
# Helper definition for type checks that are `Optional[T]`
@@ -484,7 +485,7 @@ def link_whole_validator(values: T.List[T.Union[StaticLibrary, CustomTarget, Cus
PRESERVE_PATH_KW: KwargInfo[bool] = KwargInfo('preserve_path', bool, default=False, since='0.63.0')
TEST_KWS: T.List[KwargInfo] = [
- KwargInfo('args', ContainerTypeInfo(list, (str, File, BuildTarget, CustomTarget, CustomTargetIndex)),
+ KwargInfo('args', ContainerTypeInfo(list, (str, File, BuildTarget, CustomTarget, CustomTargetIndex, ExternalProgram)),
listify=True, default=[]),
KwargInfo('should_fail', bool, default=False),
KwargInfo('timeout', int, default=30),
diff --git a/mesonbuild/interpreterbase/__init__.py b/mesonbuild/interpreterbase/__init__.py
index 934375a0ee93..aa38e949063b 100644
--- a/mesonbuild/interpreterbase/__init__.py
+++ b/mesonbuild/interpreterbase/__init__.py
@@ -28,7 +28,6 @@
'noPosargs',
'noKwargs',
- 'stringArgs',
'noArgsFlattening',
'noSecondLevelHolderResolving',
'unholder_return',
@@ -87,7 +86,6 @@
from .decorators import (
noPosargs,
noKwargs,
- stringArgs,
noArgsFlattening,
noSecondLevelHolderResolving,
unholder_return,
diff --git a/mesonbuild/interpreterbase/baseobjects.py b/mesonbuild/interpreterbase/baseobjects.py
index 9a119a98a75d..a5ccccedc060 100644
--- a/mesonbuild/interpreterbase/baseobjects.py
+++ b/mesonbuild/interpreterbase/baseobjects.py
@@ -15,7 +15,7 @@
from contextlib import AbstractContextManager
if T.TYPE_CHECKING:
- from typing_extensions import Protocol
+ from typing_extensions import Protocol, TypeAlias
# Object holders need the actual interpreter
from ..interpreter import Interpreter
@@ -28,8 +28,8 @@ def __call__(self, other: __T) -> 'TYPE_var': ...
TV_func = T.TypeVar('TV_func', bound=T.Callable[..., T.Any])
-TYPE_elementary = T.Union[str, int, bool, T.List[T.Any], T.Dict[str, T.Any]]
-TYPE_var = T.Union[TYPE_elementary, HoldableObject, 'MesonInterpreterObject']
+TYPE_elementary: TypeAlias = T.Union[str, int, bool, T.Sequence['TYPE_elementary'], T.Dict[str, 'TYPE_elementary']]
+TYPE_var: TypeAlias = T.Union[TYPE_elementary, HoldableObject, 'MesonInterpreterObject', T.Sequence['TYPE_var'], T.Dict[str, 'TYPE_var']]
TYPE_nvar = T.Union[TYPE_var, mparser.BaseNode]
TYPE_kwargs = T.Dict[str, TYPE_var]
TYPE_nkwargs = T.Dict[str, TYPE_nvar]
@@ -122,7 +122,7 @@ class MutableInterpreterObject:
''' Dummy class to mark the object type as mutable '''
HoldableTypes = (HoldableObject, int, bool, str, list, dict)
-TYPE_HoldableTypes = T.Union[TYPE_elementary, HoldableObject]
+TYPE_HoldableTypes = T.Union[TYPE_var, HoldableObject]
InterpreterObjectTypeVar = T.TypeVar('InterpreterObjectTypeVar', bound=TYPE_HoldableTypes)
class ObjectHolder(InterpreterObject, T.Generic[InterpreterObjectTypeVar]):
diff --git a/mesonbuild/interpreterbase/decorators.py b/mesonbuild/interpreterbase/decorators.py
index 6524aa92dd4f..06cac526db22 100644
--- a/mesonbuild/interpreterbase/decorators.py
+++ b/mesonbuild/interpreterbase/decorators.py
@@ -3,7 +3,7 @@
from __future__ import annotations
-from .. import mesonlib, mlog
+from .. import coredata, mesonlib, mlog
from .disabler import Disabler
from .exceptions import InterpreterException, InvalidArguments
from ._unholder import _unholder
@@ -62,19 +62,6 @@ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
return f(*wrapped_args, **wrapped_kwargs)
return T.cast('TV_func', wrapped)
-def stringArgs(f: TV_func) -> TV_func:
- @wraps(f)
- def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
- args = get_callee_args(wrapped_args)[1]
- if not isinstance(args, list):
- mlog.debug('Not a list:', str(args))
- raise InvalidArguments('Argument not a list.')
- if not all(isinstance(s, str) for s in args):
- mlog.debug('Element not a string:', str(args))
- raise InvalidArguments('Arguments must be strings.')
- return f(*wrapped_args, **wrapped_kwargs)
- return T.cast('TV_func', wrapped)
-
def noArgsFlattening(f: TV_func) -> TV_func:
setattr(f, 'no-args-flattening', True) # noqa: B010
return f
@@ -585,7 +572,7 @@ def __init__(self, feature_name: str, feature_version: str, extra_message: str =
self.extra_message = extra_message
@staticmethod
- def get_target_version(subproject: str) -> str:
+ def get_target_version(subproject: str) -> T.Union[str, mesonlib.NoProjectVersion]:
# Don't do any checks if project() has not been parsed yet
if subproject not in mesonlib.project_meson_versions:
return ''
@@ -593,7 +580,7 @@ def get_target_version(subproject: str) -> str:
@staticmethod
@abc.abstractmethod
- def check_version(target_version: str, feature_version: str) -> bool:
+ def check_version(target_version: T.Union[str, mesonlib.NoProjectVersion], feature_version: str) -> bool:
pass
def use(self, subproject: 'SubProject', location: T.Optional['mparser.BaseNode'] = None) -> None:
@@ -642,15 +629,15 @@ def report(cls, subproject: str) -> None:
if '\n' in warning_str:
mlog.warning(warning_str)
- def log_usage_warning(self, tv: str, location: T.Optional['mparser.BaseNode']) -> None:
+ def log_usage_warning(self, tv: T.Union[str, mesonlib.NoProjectVersion], location: T.Optional['mparser.BaseNode']) -> None:
raise InterpreterException('log_usage_warning not implemented')
@staticmethod
- def get_warning_str_prefix(tv: str) -> str:
+ def get_warning_str_prefix(tv: T.Union[str, mesonlib.NoProjectVersion]) -> str:
raise InterpreterException('get_warning_str_prefix not implemented')
@staticmethod
- def get_notice_str_prefix(tv: str) -> str:
+ def get_notice_str_prefix(tv: T.Union[str, mesonlib.NoProjectVersion]) -> str:
raise InterpreterException('get_notice_str_prefix not implemented')
def __call__(self, f: TV_func) -> TV_func:
@@ -679,20 +666,32 @@ class FeatureNew(FeatureCheckBase):
feature_registry = {}
@staticmethod
- def check_version(target_version: str, feature_version: str) -> bool:
- return mesonlib.version_compare_condition_with_min(target_version, feature_version)
+ def check_version(target_version: T.Union[str, mesonlib.NoProjectVersion], feature_version: str) -> bool:
+ if isinstance(target_version, str):
+ return mesonlib.version_compare_condition_with_min(target_version, feature_version)
+ else:
+ # Warn for anything newer than the current semver base slot.
+ major = coredata.version.split('.', maxsplit=1)[0]
+ return mesonlib.version_compare(feature_version, f'<{major}.0')
@staticmethod
- def get_warning_str_prefix(tv: str) -> str:
- return f'Project specifies a minimum meson_version \'{tv}\' but uses features which were added in newer versions:'
+ def get_warning_str_prefix(tv: T.Union[str, mesonlib.NoProjectVersion]) -> str:
+ if isinstance(tv, str):
+ return f'Project specifies a minimum meson_version \'{tv}\' but uses features which were added in newer versions:'
+ else:
+ return 'Project specifies no minimum version but uses features which were added in versions:'
@staticmethod
- def get_notice_str_prefix(tv: str) -> str:
+ def get_notice_str_prefix(tv: T.Union[str, mesonlib.NoProjectVersion]) -> str:
return ''
- def log_usage_warning(self, tv: str, location: T.Optional['mparser.BaseNode']) -> None:
+ def log_usage_warning(self, tv: T.Union[str, mesonlib.NoProjectVersion], location: T.Optional['mparser.BaseNode']) -> None:
+ if isinstance(tv, str):
+ prefix = f'Project targets {tv!r}'
+ else:
+ prefix = 'Project does not target a minimum version'
args = [
- 'Project targets', f"'{tv}'",
+ prefix,
'but uses feature introduced in',
f"'{self.feature_version}':",
f'{self.feature_name}.',
@@ -711,21 +710,29 @@ class FeatureDeprecated(FeatureCheckBase):
emit_notice = True
@staticmethod
- def check_version(target_version: str, feature_version: str) -> bool:
- # For deprecation checks we need to return the inverse of FeatureNew checks
- return not mesonlib.version_compare_condition_with_min(target_version, feature_version)
+ def check_version(target_version: T.Union[str, mesonlib.NoProjectVersion], feature_version: str) -> bool:
+ if isinstance(target_version, str):
+ # For deprecation checks we need to return the inverse of FeatureNew checks
+ return not mesonlib.version_compare_condition_with_min(target_version, feature_version)
+ else:
+ # Always warn for functionality deprecated in the current semver slot (i.e. the current version).
+ return False
@staticmethod
- def get_warning_str_prefix(tv: str) -> str:
+ def get_warning_str_prefix(tv: T.Union[str, mesonlib.NoProjectVersion]) -> str:
return 'Deprecated features used:'
@staticmethod
- def get_notice_str_prefix(tv: str) -> str:
+ def get_notice_str_prefix(tv: T.Union[str, mesonlib.NoProjectVersion]) -> str:
return 'Future-deprecated features used:'
- def log_usage_warning(self, tv: str, location: T.Optional['mparser.BaseNode']) -> None:
+ def log_usage_warning(self, tv: T.Union[str, mesonlib.NoProjectVersion], location: T.Optional['mparser.BaseNode']) -> None:
+ if isinstance(tv, str):
+ prefix = f'Project targets {tv!r}'
+ else:
+ prefix = 'Project does not target a minimum version'
args = [
- 'Project targets', f"'{tv}'",
+ prefix,
'but uses feature deprecated since',
f"'{self.feature_version}':",
f'{self.feature_name}.',
@@ -745,19 +752,19 @@ class FeatureBroken(FeatureCheckBase):
unconditional = True
@staticmethod
- def check_version(target_version: str, feature_version: str) -> bool:
+ def check_version(target_version: T.Union[str, mesonlib.NoProjectVersion], feature_version: str) -> bool:
# always warn for broken stuff
return False
@staticmethod
- def get_warning_str_prefix(tv: str) -> str:
+ def get_warning_str_prefix(tv: T.Union[str, mesonlib.NoProjectVersion]) -> str:
return 'Broken features used:'
@staticmethod
- def get_notice_str_prefix(tv: str) -> str:
+ def get_notice_str_prefix(tv: T.Union[str, mesonlib.NoProjectVersion]) -> str:
return ''
- def log_usage_warning(self, tv: str, location: T.Optional['mparser.BaseNode']) -> None:
+ def log_usage_warning(self, tv: T.Union[str, mesonlib.NoProjectVersion], location: T.Optional['mparser.BaseNode']) -> None:
args = [
'Project uses feature that was always broken,',
'and is now deprecated since',
diff --git a/mesonbuild/linkers/detect.py b/mesonbuild/linkers/detect.py
index 1bbe7b2e1520..493430a87cee 100644
--- a/mesonbuild/linkers/detect.py
+++ b/mesonbuild/linkers/detect.py
@@ -3,6 +3,7 @@
from __future__ import annotations
+from .base import RSPFileSyntax
from .. import mlog
from ..mesonlib import (
EnvironmentException,
@@ -40,6 +41,11 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
from . import linkers
env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ if invoked_directly or comp_class.get_argument_syntax() == 'msvc':
+ rsp_syntax = RSPFileSyntax.MSVC
+ else:
+ rsp_syntax = RSPFileSyntax.GCC
+
# Explicitly pass logo here so that we can get the version of link.exe
if not use_linker_prefix or comp_class.LINKER_PREFIX is None:
check_args = ['/logo', '--version']
@@ -71,7 +77,8 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
elif not invoked_directly:
return linkers.ClangClDynamicLinker(
for_machine, override, exelist=compiler, prefix=comp_class.LINKER_PREFIX,
- version=search_version(o), direct=False, machine=None)
+ version=search_version(o), direct=False, machine=None,
+ rsp_syntax=rsp_syntax)
if value is not None and invoked_directly:
compiler = value
@@ -82,7 +89,8 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
return linkers.ClangClDynamicLinker(
for_machine, [],
prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
- exelist=compiler, version=search_version(o), direct=invoked_directly)
+ exelist=compiler, version=search_version(o), direct=invoked_directly,
+ rsp_syntax=rsp_syntax)
elif 'OPTLINK' in o:
# Optlink's stdout *may* begin with a \r character.
return linkers.OptlinkDynamicLinker(compiler, for_machine, version=search_version(o))
@@ -97,7 +105,8 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
return linkers.MSVCDynamicLinker(
for_machine, [], machine=target, exelist=compiler,
prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
- version=search_version(out), direct=invoked_directly)
+ version=search_version(out), direct=invoked_directly,
+ rsp_syntax=rsp_syntax)
elif 'GNU coreutils' in o:
import shutil
fullpath = shutil.which(compiler[0])
diff --git a/mesonbuild/linkers/linkers.py b/mesonbuild/linkers/linkers.py
index 4eec82edd177..8ef88322a06c 100644
--- a/mesonbuild/linkers/linkers.py
+++ b/mesonbuild/linkers/linkers.py
@@ -606,6 +606,9 @@ def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]: ...
"boot_application": "16",
}
+ def get_accepts_rsp(self) -> bool:
+ return True
+
def get_pie_args(self) -> T.List[str]:
return ['-pie']
@@ -705,7 +708,9 @@ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
args.extend(self._apply_prefix('-rpath,' + paths))
# TODO: should this actually be "for solaris/sunos"?
- if mesonlib.is_sunos():
+ # NOTE: Remove the zigcc check once zig support "-rpath-link"
+ # See https://github.com/ziglang/zig/issues/18713
+ if mesonlib.is_sunos() or self.id == 'ld.zigcc':
return (args, rpath_dirs_to_remove)
# Rpaths to use while linking must be absolute. These are not
@@ -851,9 +856,6 @@ class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dynam
"""Representation of GNU ld.bfd and ld.gold."""
- def get_accepts_rsp(self) -> bool:
- return True
-
class GnuGoldDynamicLinker(GnuDynamicLinker):
@@ -938,6 +940,13 @@ def get_win_subsystem_args(self, value: str) -> T.List[str]:
raise mesonlib.MesonBugException(f'win_subsystem: {value} not handled in lld linker. This should not be possible.')
+class ZigCCDynamicLinker(LLVMDynamicLinker):
+ id = 'ld.zigcc'
+
+ def get_thinlto_cache_args(self, path: str) -> T.List[str]:
+ return []
+
+
class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
"""Emscripten's wasm-ld."""
@@ -1276,11 +1285,13 @@ def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]: ...
def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
prefix_arg: T.Union[str, T.List[str]], always_args: T.List[str], *,
- version: str = 'unknown version', direct: bool = True, machine: str = 'x86'):
+ version: str = 'unknown version', direct: bool = True, machine: str = 'x86',
+ rsp_syntax: RSPFileSyntax = RSPFileSyntax.MSVC):
# There's no way I can find to make mypy understand what's going on here
super().__init__(exelist, for_machine, prefix_arg, always_args, version=version)
self.machine = machine
self.direct = direct
+ self.rsp_syntax = rsp_syntax
def invoked_by_compiler(self) -> bool:
return not self.direct
@@ -1324,7 +1335,10 @@ def import_library_args(self, implibname: str) -> T.List[str]:
return self._apply_prefix(['/IMPLIB:' + implibname])
def rsp_file_syntax(self) -> RSPFileSyntax:
- return RSPFileSyntax.MSVC
+ return self.rsp_syntax
+
+ def get_pie_args(self) -> T.List[str]:
+ return []
def get_pie_args(self) -> T.List[str]:
return []
@@ -1340,9 +1354,10 @@ def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str]
exelist: T.Optional[T.List[str]] = None,
prefix: T.Union[str, T.List[str]] = '',
machine: str = 'x86', version: str = 'unknown version',
- direct: bool = True):
+ direct: bool = True, rsp_syntax: RSPFileSyntax = RSPFileSyntax.MSVC):
super().__init__(exelist or ['link.exe'], for_machine,
- prefix, always_args, machine=machine, version=version, direct=direct)
+ prefix, always_args, machine=machine, version=version, direct=direct,
+ rsp_syntax=rsp_syntax)
def get_always_args(self) -> T.List[str]:
return self._apply_prefix(['/release']) + super().get_always_args()
@@ -1364,9 +1379,10 @@ def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str]
exelist: T.Optional[T.List[str]] = None,
prefix: T.Union[str, T.List[str]] = '',
machine: str = 'x86', version: str = 'unknown version',
- direct: bool = True):
+ direct: bool = True, rsp_syntax: RSPFileSyntax = RSPFileSyntax.MSVC):
super().__init__(exelist or ['lld-link.exe'], for_machine,
- prefix, always_args, machine=machine, version=version, direct=direct)
+ prefix, always_args, machine=machine, version=version, direct=direct,
+ rsp_syntax=rsp_syntax)
def get_output_args(self, outputname: str) -> T.List[str]:
# If we're being driven indirectly by clang just skip /MACHINE
@@ -1624,9 +1640,6 @@ def get_allow_undefined_args(self) -> T.List[str]:
def get_accepts_rsp(self) -> bool:
return True
- def get_lib_prefix(self) -> str:
- return ""
-
def get_linker_always_args(self) -> T.List[str]:
return []
@@ -1639,8 +1652,9 @@ def get_search_args(self, dirname: str) -> T.List[str]:
def invoked_by_compiler(self) -> bool:
return False
- def rsp_file_syntax(self) -> RSPFileSyntax:
- return RSPFileSyntax.GCC
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+ raise MesonException(f'{self.id} does not support shared libraries.')
class MetrowerksLinkerARM(MetrowerksLinker):
diff --git a/mesonbuild/machinefile.py b/mesonbuild/machinefile.py
index afeb4d05637c..a3aeae522713 100644
--- a/mesonbuild/machinefile.py
+++ b/mesonbuild/machinefile.py
@@ -1,6 +1,8 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2013-2024 Contributors to the The Meson project
+# Copyright © 2024 Intel Corporation
+from __future__ import annotations
import typing as T
import configparser
import os
@@ -10,10 +12,11 @@
from .mesonlib import MesonException
if T.TYPE_CHECKING:
- from .compilers import Compiler
+ from typing_extensions import TypeAlias
+
from .coredata import StrOrBytesPath
- CompilersDict = T.Dict[str, Compiler]
+ SectionT: TypeAlias = T.Union[str, int, bool, T.List[str], T.List['SectionT']]
class CmdLineFileParser(configparser.ConfigParser):
@@ -33,8 +36,8 @@ def optionxform(self, optionstr: str) -> str:
class MachineFileParser():
def __init__(self, filenames: T.List[str], sourcedir: str) -> None:
self.parser = CmdLineFileParser()
- self.constants: T.Dict[str, T.Union[str, bool, int, T.List[str]]] = {'True': True, 'False': False}
- self.sections: T.Dict[str, T.Dict[str, T.Union[str, bool, int, T.List[str]]]] = {}
+ self.constants: T.Dict[str, SectionT] = {'True': True, 'False': False}
+ self.sections: T.Dict[str, T.Dict[str, SectionT]] = {}
for fname in filenames:
try:
@@ -59,9 +62,9 @@ def __init__(self, filenames: T.List[str], sourcedir: str) -> None:
continue
self.sections[s] = self._parse_section(s)
- def _parse_section(self, s: str) -> T.Dict[str, T.Union[str, bool, int, T.List[str]]]:
+ def _parse_section(self, s: str) -> T.Dict[str, SectionT]:
self.scope = self.constants.copy()
- section: T.Dict[str, T.Union[str, bool, int, T.List[str]]] = {}
+ section: T.Dict[str, SectionT] = {}
for entry, value in self.parser.items(s):
if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
raise MesonException(f'Malformed variable name {entry!r} in machine file.')
@@ -80,7 +83,7 @@ def _parse_section(self, s: str) -> T.Dict[str, T.Union[str, bool, int, T.List[s
self.scope[entry] = res
return section
- def _evaluate_statement(self, node: mparser.BaseNode) -> T.Union[str, bool, int, T.List[str]]:
+ def _evaluate_statement(self, node: mparser.BaseNode) -> SectionT:
if isinstance(node, (mparser.StringNode)):
return node.value
elif isinstance(node, mparser.BooleanNode):
@@ -90,7 +93,6 @@ def _evaluate_statement(self, node: mparser.BaseNode) -> T.Union[str, bool, int,
elif isinstance(node, mparser.ParenthesizedNode):
return self._evaluate_statement(node.inner)
elif isinstance(node, mparser.ArrayNode):
- # TODO: This is where recursive types would come in handy
return [self._evaluate_statement(arg) for arg in node.args.arguments]
elif isinstance(node, mparser.IdNode):
return self.scope[node.value]
@@ -98,20 +100,21 @@ def _evaluate_statement(self, node: mparser.BaseNode) -> T.Union[str, bool, int,
l = self._evaluate_statement(node.left)
r = self._evaluate_statement(node.right)
if node.operation == 'add':
- if (isinstance(l, str) and isinstance(r, str)) or \
- (isinstance(l, list) and isinstance(r, list)):
+ if isinstance(l, str) and isinstance(r, str):
+ return l + r
+ if isinstance(l, list) and isinstance(r, list):
return l + r
elif node.operation == 'div':
if isinstance(l, str) and isinstance(r, str):
return os.path.join(l, r)
raise MesonException('Unsupported node type')
-def parse_machine_files(filenames: T.List[str], sourcedir: str):
+def parse_machine_files(filenames: T.List[str], sourcedir: str) -> T.Dict[str, T.Dict[str, SectionT]]:
parser = MachineFileParser(filenames, sourcedir)
return parser.sections
class MachineFileStore:
- def __init__(self, native_files, cross_files, source_dir):
- self.native = MachineFileParser(native_files if native_files is not None else [], source_dir).sections
- self.cross = MachineFileParser(cross_files if cross_files is not None else [], source_dir).sections
+ def __init__(self, native_files: T.Optional[T.List[str]], cross_files: T.Optional[T.List[str]], source_dir: str):
+ self.native = parse_machine_files(native_files if native_files is not None else [], source_dir)
+ self.cross = parse_machine_files(cross_files if cross_files is not None else [], source_dir)
diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py
index b07b60a9e984..2f5708c86521 100644
--- a/mesonbuild/mcompile.py
+++ b/mesonbuild/mcompile.py
@@ -16,6 +16,7 @@
from . import mlog
from . import mesonlib
+from .options import OptionKey
from .mesonlib import MesonException, RealPathAction, join_args, listify_array_value, setup_vsenv
from mesonbuild.environment import detect_ninja
from mesonbuild import build
@@ -354,14 +355,14 @@ def run(options: 'argparse.Namespace') -> int:
b = build.load(options.wd)
cdata = b.environment.coredata
- need_vsenv = T.cast('bool', cdata.get_option(mesonlib.OptionKey('vsenv')))
+ need_vsenv = T.cast('bool', cdata.get_option(OptionKey('vsenv')))
if setup_vsenv(need_vsenv):
mlog.log(mlog.green('INFO:'), 'automatically activated MSVC compiler environment')
cmd: T.List[str] = []
env: T.Optional[T.Dict[str, str]] = None
- backend = cdata.get_option(mesonlib.OptionKey('backend'))
+ backend = cdata.get_option(OptionKey('backend'))
assert isinstance(backend, str)
mlog.log(mlog.green('INFO:'), 'autodetecting backend as', backend)
if backend == 'ninja':
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index da96ac41ff3b..3a6343ba1233 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -20,7 +20,8 @@
from . import mintro
from . import mlog
from .ast import AstIDGenerator, IntrospectionInterpreter
-from .mesonlib import MachineChoice, OptionKey
+from .mesonlib import MachineChoice
+from .options import OptionKey
from .optinterpreter import OptionInterpreter
if T.TYPE_CHECKING:
@@ -92,7 +93,7 @@ def __init__(self, build_dir: str):
with open(opfile, 'rb') as f:
ophash = hashlib.sha1(f.read()).hexdigest()
if ophash != conf_options[1]:
- oi = OptionInterpreter(sub)
+ oi = OptionInterpreter(self.coredata.optstore, sub)
oi.process(opfile)
self.coredata.update_project_options(oi.options, sub)
self.coredata.options_files[sub] = (opfile, ophash)
@@ -101,7 +102,7 @@ def __init__(self, build_dir: str):
if not os.path.exists(opfile):
opfile = os.path.join(self.source_dir, 'meson_options.txt')
if os.path.exists(opfile):
- oi = OptionInterpreter(sub)
+ oi = OptionInterpreter(self.coredata.optstore, sub)
oi.process(opfile)
self.coredata.update_project_options(oi.options, sub)
with open(opfile, 'rb') as f:
@@ -140,7 +141,7 @@ def print_aligned(self) -> None:
This prints the generated output in an aligned, pretty form. it aims
for a total width of 160 characters, but will use whatever the tty
- reports it's value to be. Though this is much wider than the standard
+ reports its value to be. Though this is much wider than the standard
80 characters of terminals, and even than the newer 120, compressing
it to those lengths makes the output hard to read.
@@ -271,28 +272,29 @@ def print_default_values_warning() -> None:
dir_options[k] = v
elif k in test_option_names:
test_options[k] = v
- elif k.module:
+ elif k.has_module_prefix():
# Ignore module options if we did not use that module during
# configuration.
- if self.build and k.module not in self.build.modules:
+ modname = k.get_module_prefix()
+ if self.build and modname not in self.build.modules:
continue
- module_options[k.module][k] = v
- elif k.is_builtin():
+ module_options[modname][k] = v
+ elif self.coredata.optstore.is_builtin_option(k):
core_options[k] = v
host_core_options = self.split_options_per_subproject({k: v for k, v in core_options.items() if k.machine is MachineChoice.HOST})
build_core_options = self.split_options_per_subproject({k: v for k, v in core_options.items() if k.machine is MachineChoice.BUILD})
- host_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if k.is_compiler() and k.machine is MachineChoice.HOST})
- build_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if k.is_compiler() and k.machine is MachineChoice.BUILD})
- project_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if k.is_project()})
+ host_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_compiler_option(k) and k.machine is MachineChoice.HOST})
+ build_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_compiler_option(k) and k.machine is MachineChoice.BUILD})
+ project_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_project_option(k)})
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
self.add_section('Main project options')
self.print_options('Core options', host_core_options[''])
if show_build_options:
self.print_options('', build_core_options[''])
- self.print_options('Backend options', {k: v for k, v in self.coredata.optstore.items() if k.is_backend()})
- self.print_options('Base options', {k: v for k, v in self.coredata.optstore.items() if k.is_base()})
+ self.print_options('Backend options', {k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_backend_option(k)})
+ self.print_options('Base options', {k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_base_option(k)})
self.print_options('Compiler options', host_compiler_options.get('', {}))
if show_build_options:
self.print_options('', build_compiler_options.get('', {}))
diff --git a/mesonbuild/mdevenv.py b/mesonbuild/mdevenv.py
index cc69fb6e084b..8c6ce2031d45 100644
--- a/mesonbuild/mdevenv.py
+++ b/mesonbuild/mdevenv.py
@@ -9,8 +9,9 @@
from pathlib import Path
from . import build, minstall
-from .mesonlib import (EnvironmentVariables, MesonException, is_windows, setup_vsenv, OptionKey,
+from .mesonlib import (EnvironmentVariables, MesonException, join_args, is_windows, setup_vsenv,
get_wine_shortpath, MachineChoice, relpath)
+from .options import OptionKey
from . import mlog
@@ -225,10 +226,9 @@ def run(options: argparse.Namespace) -> int:
args[0] = abs_path or args[0]
try:
- return subprocess.call(args, close_fds=False,
- env=devenv,
- cwd=workdir)
- except subprocess.CalledProcessError as e:
- return e.returncode
+ os.chdir(workdir)
+ os.execvpe(args[0], args, env=devenv)
except FileNotFoundError:
raise MesonException(f'Command not found: {args[0]}')
+ except OSError as e:
+ raise MesonException(f'Command `{join_args(args)}` failed to execute: {e}')
diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py
index d569f6015739..17329009b40a 100644
--- a/mesonbuild/mdist.py
+++ b/mesonbuild/mdist.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2017 The Meson development team
-# Copyright © 2023 Intel Corporation
+# Copyright © 2023-2024 Intel Corporation
from __future__ import annotations
@@ -23,7 +23,8 @@
from pathlib import Path
from mesonbuild.environment import Environment, detect_ninja
from mesonbuild.mesonlib import (GIT, MesonException, RealPathAction, get_meson_command, quiet_git,
- windows_proof_rmtree, setup_vsenv, OptionKey)
+ windows_proof_rmtree, setup_vsenv)
+from .options import OptionKey
from mesonbuild.msetup import add_arguments as msetup_argparse
from mesonbuild.wrap import wrap
from mesonbuild import mlog, build, coredata
@@ -320,7 +321,7 @@ def run_dist_steps(meson_command: T.List[str], unpacked_src_dir: str, builddir:
return 1
return 0
-def check_dist(packagename: str, meson_command: ImmutableListProtocol[str], extra_meson_args: T.List[str], bld_root: str, privdir: str) -> int:
+def check_dist(packagename: str, _meson_command: ImmutableListProtocol[str], extra_meson_args: T.List[str], bld_root: str, privdir: str) -> int:
print(f'Testing distribution package {packagename}')
unpackdir = os.path.join(privdir, 'dist-unpack')
builddir = os.path.join(privdir, 'dist-build')
@@ -334,6 +335,7 @@ def check_dist(packagename: str, meson_command: ImmutableListProtocol[str], extr
unpacked_files = glob(os.path.join(unpackdir, '*'))
assert len(unpacked_files) == 1
unpacked_src_dir = unpacked_files[0]
+ meson_command = _meson_command.copy()
meson_command += ['setup']
meson_command += create_cmdline_args(bld_root)
meson_command += extra_meson_args
diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py
index faa0f426d82a..2c1ca97a386f 100644
--- a/mesonbuild/mesonmain.py
+++ b/mesonbuild/mesonmain.py
@@ -65,7 +65,7 @@ class CommandLineParser:
def __init__(self) -> None:
# only import these once we do full argparse processing
from . import mconf, mdist, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects, munstable_coredata, mcompile, mdevenv, mformat
- from .scripts import env2mfile
+ from .scripts import env2mfile, reprotest
from .wrap import wraptool
import shutil
@@ -103,6 +103,8 @@ def __init__(self) -> None:
help_msg='Run commands in developer environment')
self.add_command('env2mfile', env2mfile.add_arguments, env2mfile.run,
help_msg='Convert current environment to a cross or native file')
+ self.add_command('reprotest', reprotest.add_arguments, reprotest.run,
+ help_msg='Test if project builds reproducibly')
self.add_command('format', mformat.add_arguments, mformat.run, aliases=['fmt'],
help_msg='Format meson source file')
# Add new commands above this line to list them in help command
diff --git a/mesonbuild/mformat.py b/mesonbuild/mformat.py
index 7f3dbf01400e..119c89351ec4 100644
--- a/mesonbuild/mformat.py
+++ b/mesonbuild/mformat.py
@@ -3,7 +3,6 @@
from __future__ import annotations
-import argparse
import re
import typing as T
from configparser import ConfigParser, MissingSectionHeaderError, ParsingError
@@ -19,6 +18,7 @@
from .environment import build_filename
if T.TYPE_CHECKING:
+ import argparse
from typing_extensions import Literal
diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py
index cb5de231e419..70f184d02ebd 100644
--- a/mesonbuild/minit.py
+++ b/mesonbuild/minit.py
@@ -20,6 +20,7 @@
from mesonbuild.environment import detect_ninja
from mesonbuild.templates.mesontemplates import create_meson_build
from mesonbuild.templates.samplefactory import sample_generator
+from mesonbuild.options import OptionKey
if T.TYPE_CHECKING:
import argparse
@@ -192,7 +193,7 @@ def run(options: Arguments) -> int:
raise SystemExit
b = build.load(options.builddir)
- need_vsenv = T.cast('bool', b.environment.coredata.get_option(mesonlib.OptionKey('vsenv')))
+ need_vsenv = T.cast('bool', b.environment.coredata.get_option(OptionKey('vsenv')))
vsenv_active = mesonlib.setup_vsenv(need_vsenv)
if vsenv_active:
mlog.log(mlog.green('INFO:'), 'automatically activated MSVC compiler environment')
diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py
index 36284f0affb5..9921295fda8e 100644
--- a/mesonbuild/minstall.py
+++ b/mesonbuild/minstall.py
@@ -18,7 +18,8 @@
from . import build, environment
from .backend.backends import InstallData
from .mesonlib import (MesonException, Popen_safe, RealPathAction, is_windows,
- is_aix, setup_vsenv, pickle_load, is_osx, OptionKey)
+ is_aix, setup_vsenv, pickle_load, is_osx)
+from .options import OptionKey
from .scripts import depfixer, destdir_join
from .scripts.meson_exe import run_exe
try:
@@ -151,7 +152,7 @@ def set_chown(path: str, user: T.Union[str, int, None] = None,
if sys.version_info >= (3, 13):
# pylint: disable=unexpected-keyword-arg
- # cannot handle sys.version_info, https://github.com/pylint-dev/pylint/issues/9138
+ # cannot handle sys.version_info, https://github.com/pylint-dev/pylint/issues/9622
shutil.chown(path, user, group, dir_fd=dir_fd, follow_symlinks=follow_symlinks)
else:
real_os_chown = os.chown
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index dea67d82e1cd..810a2b674b40 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -25,7 +25,7 @@
from .dependencies import Dependency
from . import environment
from .interpreterbase import ObjectHolder
-from .mesonlib import OptionKey
+from .options import OptionKey
from .mparser import FunctionNode, ArrayNode, ArgumentNode, StringNode
if T.TYPE_CHECKING:
@@ -131,6 +131,7 @@ def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[s
'destination': target.out_name,
'tag': target.tag or None,
'subproject': target.subproject or None,
+ 'install_rpath': target.install_rpath or None
}
for target in installdata.targets
},
@@ -298,7 +299,7 @@ def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[s
dir_options[k] = v
elif k in test_option_names:
test_options[k] = v
- elif k.is_builtin():
+ elif coredata.optstore.is_builtin_option(k):
core_options[k] = v
if not v.yielding:
for s in subprojects:
@@ -328,14 +329,14 @@ def add_keys(opts: 'T.Union[dict[OptionKey, UserOption[Any]], cdata.KeyedOptionD
optlist.append(optdict)
add_keys(core_options, 'core')
- add_keys({k: v for k, v in coredata.optstore.items() if k.is_backend()}, 'backend')
- add_keys({k: v for k, v in coredata.optstore.items() if k.is_base()}, 'base')
+ add_keys({k: v for k, v in coredata.optstore.items() if coredata.optstore.is_backend_option(k)}, 'backend')
+ add_keys({k: v for k, v in coredata.optstore.items() if coredata.optstore.is_base_option(k)}, 'base')
add_keys(
- {k: v for k, v in sorted(coredata.optstore.items(), key=lambda i: i[0].machine) if k.is_compiler()},
+ {k: v for k, v in sorted(coredata.optstore.items(), key=lambda i: i[0].machine) if coredata.optstore.is_compiler_option(k)},
'compiler',
)
add_keys(dir_options, 'directory')
- add_keys({k: v for k, v in coredata.optstore.items() if k.is_project()}, 'user')
+ add_keys({k: v for k, v in coredata.optstore.items() if coredata.optstore.is_project_option(k)}, 'user')
add_keys(test_options, 'test')
return optlist
@@ -469,10 +470,12 @@ def list_machines(builddata: build.Build) -> T.Dict[str, T.Dict[str, T.Union[str
machines[m]['object_suffix'] = machine.get_object_suffix()
return machines
-def list_projinfo(builddata: build.Build) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
- result: T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]] = {
+def list_projinfo(builddata: build.Build) -> T.Dict[str, T.Union[str, T.List[str], T.List[T.Dict[str, str]]]]:
+ result: T.Dict[str, T.Union[str, T.List[str], T.List[T.Dict[str, str]]]] = {
'version': builddata.project_version,
'descriptive_name': builddata.project_name,
+ 'license': builddata.dep_manifest[builddata.project_name].license,
+ 'license_files': [f[1].fname for f in builddata.dep_manifest[builddata.project_name].license_files],
'subproject_dir': builddata.subproject_dir,
}
subprojects = []
diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py
index bc8faeba7d06..b43ac8a692e3 100644
--- a/mesonbuild/mlog.py
+++ b/mesonbuild/mlog.py
@@ -49,32 +49,6 @@ def _windows_ansi() -> bool:
# original behavior
return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
-def colorize_console() -> bool:
- _colorize_console: bool = getattr(sys.stdout, 'colorize_console', None)
- if _colorize_console is not None:
- return _colorize_console
-
- try:
- if is_windows():
- _colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi()
- else:
- _colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM', 'dumb') != 'dumb'
- except Exception:
- _colorize_console = False
-
- sys.stdout.colorize_console = _colorize_console # type: ignore[attr-defined]
- return _colorize_console
-
-def setup_console() -> None:
- # on Windows, a subprocess might call SetConsoleMode() on the console
- # connected to stdout and turn off ANSI escape processing. Call this after
- # running a subprocess to ensure we turn it on again.
- if is_windows():
- try:
- delattr(sys.stdout, 'colorize_console')
- except AttributeError:
- pass
-
_in_ci = 'CI' in os.environ
_ci_is_github = 'GITHUB_ACTIONS' in os.environ
@@ -91,6 +65,7 @@ class _Logger:
log_dir: T.Optional[str] = None
log_depth: T.List[str] = field(default_factory=list)
+ log_to_stderr: bool = False
log_file: T.Optional[T.TextIO] = None
log_timestamp_start: T.Optional[float] = None
log_fatal_warnings = False
@@ -139,7 +114,7 @@ def shutdown(self) -> T.Optional[str]:
return None
def start_pager(self) -> None:
- if not colorize_console():
+ if not self.colorize_console():
return
pager_cmd = []
if 'PAGER' in os.environ:
@@ -223,12 +198,17 @@ def force_print(self, *args: str, nested: bool, sep: T.Optional[str] = None,
raw = '\n'.join(lines)
# _Something_ is going to get printed.
+ if self.log_pager:
+ output = self.log_pager.stdin
+ elif self.log_to_stderr:
+ output = sys.stderr
+ else:
+ output = sys.stdout
try:
- output = self.log_pager.stdin if self.log_pager else None
print(raw, end='', file=output)
except UnicodeEncodeError:
cleaned = raw.encode('ascii', 'replace').decode('ascii')
- print(cleaned, end='')
+ print(cleaned, end='', file=output)
def debug(self, *args: TV_Loggable, sep: T.Optional[str] = None,
end: T.Optional[str] = None, display_timestamp: bool = True) -> None:
@@ -244,7 +224,7 @@ def _log(self, *args: TV_Loggable, is_error: bool = False,
if self.log_file is not None:
print(*arr, file=self.log_file, sep=sep, end=end)
self.log_file.flush()
- if colorize_console():
+ if self.colorize_console():
arr = process_markup(args, True, display_timestamp)
if not self.log_errors_only or is_error:
force_print(*arr, nested=nested, sep=sep, end=end)
@@ -263,34 +243,27 @@ def log(self, *args: TV_Loggable, is_error: bool = False,
sep: T.Optional[str] = None,
end: T.Optional[str] = None,
display_timestamp: bool = True) -> None:
- if once:
- self._log_once(*args, is_error=is_error, nested=nested, sep=sep, end=end, display_timestamp=display_timestamp)
- else:
+ if self._should_log(*args, once=once):
self._log(*args, is_error=is_error, nested=nested, sep=sep, end=end, display_timestamp=display_timestamp)
def log_timestamp(self, *args: TV_Loggable) -> None:
if self.log_timestamp_start:
self.log(*args)
- def _log_once(self, *args: TV_Loggable, is_error: bool = False,
- nested: bool = True, sep: T.Optional[str] = None,
- end: T.Optional[str] = None, display_timestamp: bool = True) -> None:
- """Log variant that only prints a given message one time per meson invocation.
-
- This considers ansi decorated values by the values they wrap without
- regard for the AnsiDecorator itself.
- """
+ def _should_log(self, *args: TV_Loggable, once: bool) -> bool:
def to_str(x: TV_Loggable) -> str:
if isinstance(x, str):
return x
if isinstance(x, AnsiDecorator):
return x.text
return str(x)
+ if not once:
+ return True
t = tuple(to_str(a) for a in args)
if t in self.logged_once:
- return
+ return False
self.logged_once.add(t)
- self._log(*args, is_error=is_error, nested=nested, sep=sep, end=end, display_timestamp=display_timestamp)
+ return True
def _log_error(self, severity: _Severity, *rargs: TV_Loggable,
once: bool = False, fatal: bool = True,
@@ -313,6 +286,9 @@ def _log_error(self, severity: _Severity, *rargs: TV_Loggable,
# rargs is a tuple, not a list
args = label + list(rargs)
+ if not self._should_log(*args, once=once):
+ return
+
if location is not None:
location_file = relpath(location.filename, os.getcwd())
location_str = get_error_location_string(location_file, location.lineno)
@@ -321,7 +297,7 @@ def _log_error(self, severity: _Severity, *rargs: TV_Loggable,
location_list = T.cast('TV_LoggableList', [location_str])
args = location_list + args
- log(*args, once=once, nested=nested, sep=sep, end=end, is_error=is_error)
+ self._log(*args, nested=nested, sep=sep, end=end, is_error=is_error)
self.log_warnings_counter += 1
@@ -403,8 +379,38 @@ def nested_warnings(self) -> T.Iterator[None]:
def get_warning_count(self) -> int:
return self.log_warnings_counter
+ def redirect(self, to_stderr: bool) -> None:
+ self.log_to_stderr = to_stderr
+
+ def colorize_console(self) -> bool:
+ output = sys.stderr if self.log_to_stderr else sys.stdout
+ _colorize_console: bool = getattr(output, 'colorize_console', None)
+ if _colorize_console is not None:
+ return _colorize_console
+ try:
+ if is_windows():
+ _colorize_console = os.isatty(output.fileno()) and _windows_ansi()
+ else:
+ _colorize_console = os.isatty(output.fileno()) and os.environ.get('TERM', 'dumb') != 'dumb'
+ except Exception:
+ _colorize_console = False
+ output.colorize_console = _colorize_console # type: ignore[attr-defined]
+ return _colorize_console
+
+ def setup_console(self) -> None:
+ # on Windows, a subprocess might call SetConsoleMode() on the console
+ # connected to stdout and turn off ANSI escape processing. Call this after
+ # running a subprocess to ensure we turn it on again.
+ output = sys.stderr if self.log_to_stderr else sys.stdout
+ if is_windows():
+ try:
+ delattr(output, 'colorize_console')
+ except AttributeError:
+ pass
+
_logger = _Logger()
cmd_ci_include = _logger.cmd_ci_include
+colorize_console = _logger.colorize_console
debug = _logger.debug
deprecation = _logger.deprecation
error = _logger.error
@@ -421,9 +427,11 @@ def get_warning_count(self) -> int:
no_logging = _logger.no_logging
notice = _logger.notice
process_markup = _logger.process_markup
+redirect = _logger.redirect
set_quiet = _logger.set_quiet
set_timestamp_start = _logger.set_timestamp_start
set_verbose = _logger.set_verbose
+setup_console = _logger.setup_console
shutdown = _logger.shutdown
start_pager = _logger.start_pager
stop_pager = _logger.stop_pager
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py
index 046c530a8404..86dc8762e65a 100644
--- a/mesonbuild/modules/__init__.py
+++ b/mesonbuild/modules/__init__.py
@@ -8,6 +8,7 @@
import typing as T
from .. import build, mesonlib
+from ..options import OptionKey
from ..build import IncludeDirs
from ..interpreterbase.decorators import noKwargs, noPosargs
from ..mesonlib import relpath, HoldableObject, MachineChoice
@@ -112,7 +113,7 @@ def dependency(self, depname: str, native: bool = False, required: bool = True,
if wanted:
kwargs['version'] = wanted
# FIXME: Even if we fix the function, mypy still can't figure out what's
- # going on here. And we really dont want to call interpreter
+ # going on here. And we really don't want to call interpreter
# implementations of meson functions anyway.
return self._interpreter.func_dependency(self.current_node, [depname], kwargs) # type: ignore
@@ -131,16 +132,13 @@ def test(self, args: T.Tuple[str, T.Union[build.Executable, build.Jar, 'External
self._interpreter.func_test(self.current_node, real_args, kwargs)
def get_option(self, name: str, subproject: str = '',
- machine: MachineChoice = MachineChoice.HOST,
- lang: T.Optional[str] = None,
- module: T.Optional[str] = None) -> T.Union[T.List[str], str, int, bool]:
- return self.environment.coredata.get_option(mesonlib.OptionKey(name, subproject, machine, lang, module))
+ machine: MachineChoice = MachineChoice.HOST) -> T.Union[T.List[str], str, int, bool]:
+ return self.environment.coredata.get_option(OptionKey(name, subproject, machine))
def is_user_defined_option(self, name: str, subproject: str = '',
machine: MachineChoice = MachineChoice.HOST,
- lang: T.Optional[str] = None,
- module: T.Optional[str] = None) -> bool:
- key = mesonlib.OptionKey(name, subproject, machine, lang, module)
+ lang: T.Optional[str] = None) -> bool:
+ key = OptionKey(name, subproject, machine)
return key in self._interpreter.user_defined_options.cmd_line_options
def process_include_dirs(self, dirs: T.Iterable[T.Union[str, IncludeDirs]]) -> T.Iterable[IncludeDirs]:
diff --git a/mesonbuild/modules/_qt.py b/mesonbuild/modules/_qt.py
index ebb8a3994097..9f10c58266a5 100644
--- a/mesonbuild/modules/_qt.py
+++ b/mesonbuild/modules/_qt.py
@@ -27,6 +27,7 @@
from ..interpreter import kwargs
from ..mesonlib import FileOrString
from ..programs import ExternalProgram
+ from typing_extensions import Literal
QtDependencyType = T.Union[QtPkgConfigDependency, QmakeQtDependency]
@@ -80,6 +81,7 @@ class PreprocessKwArgs(TypedDict):
class HasToolKwArgs(kwargs.ExtractRequired):
method: str
+ tools: T.List[Literal['moc', 'uic', 'rcc', 'lrelease']]
class CompileTranslationsKwArgs(TypedDict):
@@ -91,10 +93,21 @@ class CompileTranslationsKwArgs(TypedDict):
rcc_extra_arguments: T.List[str]
ts_files: T.List[T.Union[str, File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]]
+def _list_in_set_validator(choices: T.Set[str]) -> T.Callable[[T.List[str]], T.Optional[str]]:
+ """Check that the choice given was one of the given set."""
+ def inner(checklist: T.List[str]) -> T.Optional[str]:
+ invalid = set(checklist).difference(choices)
+ if invalid:
+ return f"invalid selections {', '.join(sorted(invalid))}, valid elements are {', '.join(sorted(choices))}."
+ return None
+
+ return inner
+
class QtBaseModule(ExtensionModule):
_tools_detected = False
_rcc_supports_depfiles = False
_moc_supports_depfiles = False
+ _set_of_qt_tools = {'moc', 'uic', 'rcc', 'lrelease'}
def __init__(self, interpreter: 'Interpreter', qt_version: int = 5):
ExtensionModule.__init__(self, interpreter)
@@ -102,10 +115,7 @@ def __init__(self, interpreter: 'Interpreter', qt_version: int = 5):
# It is important that this list does not change order as the order of
# the returned ExternalPrograms will change as well
self.tools: T.Dict[str, T.Union[ExternalProgram, build.Executable]] = {
- 'moc': NonExistingExternalProgram('moc'),
- 'uic': NonExistingExternalProgram('uic'),
- 'rcc': NonExistingExternalProgram('rcc'),
- 'lrelease': NonExistingExternalProgram('lrelease'),
+ tool: NonExistingExternalProgram(tool) for tool in self._set_of_qt_tools
}
self.methods.update({
'has_tools': self.has_tools,
@@ -258,6 +268,10 @@ def _parse_qrc_deps(self, state: 'ModuleState',
'qt.has_tools',
KwargInfo('required', (bool, options.UserFeatureOption), default=False),
KwargInfo('method', str, default='auto'),
+ KwargInfo('tools', ContainerTypeInfo(list, str), listify=True,
+ default=['moc', 'uic', 'rcc', 'lrelease'],
+ validator=_list_in_set_validator(_set_of_qt_tools),
+ since='1.6.0'),
)
def has_tools(self, state: 'ModuleState', args: T.Tuple, kwargs: 'HasToolKwArgs') -> bool:
method = kwargs.get('method', 'auto')
@@ -269,8 +283,9 @@ def has_tools(self, state: 'ModuleState', args: T.Tuple, kwargs: 'HasToolKwArgs'
mlog.log('qt.has_tools skipped: feature', mlog.bold(feature), 'disabled')
return False
self._detect_tools(state, method, required=False)
- for tool in self.tools.values():
- if not tool.found():
+ for tool in kwargs['tools']:
+ assert tool in self._set_of_qt_tools, f'tools must be in {self._set_of_qt_tools}'
+ if not self.tools[tool].found():
if required:
raise MesonException('Qt tools not found')
return False
diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py
index b9161021d917..2168aaa516c0 100644
--- a/mesonbuild/modules/cmake.py
+++ b/mesonbuild/modules/cmake.py
@@ -10,15 +10,13 @@
from . import ExtensionModule, ModuleReturnValue, ModuleObject, ModuleInfo
from .. import build, mesonlib, mlog, dependencies
+from ..options import OptionKey
from ..cmake import TargetOptions, cmake_defines_to_args
from ..interpreter import SubprojectHolder
from ..interpreter.type_checking import REQUIRED_KW, INSTALL_DIR_KW, NoneType, in_set_validator
from ..interpreterbase import (
FeatureNew,
- FeatureNewKwargs,
- stringArgs,
- permittedKwargs,
noPosargs,
noKwargs,
@@ -35,10 +33,10 @@
from typing_extensions import TypedDict
from . import ModuleState
- from ..cmake import SingleTargetOptions
+ from ..cmake.common import SingleTargetOptions
from ..environment import Environment
from ..interpreter import Interpreter, kwargs
- from ..interpreterbase import TYPE_kwargs, TYPE_var
+ from ..interpreterbase import TYPE_kwargs, TYPE_var, InterpreterObject
class WriteBasicPackageVersionFile(TypedDict):
@@ -60,6 +58,12 @@ class Subproject(kwargs.ExtractRequired):
options: T.Optional[CMakeSubprojectOptions]
cmake_options: T.List[str]
+ class TargetKW(TypedDict):
+
+ target: T.Optional[str]
+
+
+_TARGET_KW = KwargInfo('target', (str, NoneType))
COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion']
@@ -89,7 +93,6 @@ class Subproject(kwargs.ExtractRequired):
message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !")
endif()
endmacro()
-
####################################################################################
'''
@@ -109,11 +112,7 @@ def __init__(self, subp: SubprojectHolder):
'found': self.found_method,
})
- def _args_to_info(self, args: T.List[str]) -> T.Dict[str, str]:
- if len(args) != 1:
- raise InterpreterException('Exactly one argument is required.')
-
- tgt = args[0]
+ def _args_to_info(self, tgt: str) -> T.Dict[str, str]:
res = self.cm_interpreter.target_info(tgt)
if res is None:
raise InterpreterException(f'The CMake target {tgt} does not exist\n' +
@@ -125,43 +124,50 @@ def _args_to_info(self, args: T.List[str]) -> T.Dict[str, str]:
return res
@noKwargs
- @stringArgs
- def get_variable(self, state: ModuleState, args: T.List[str], kwargs: TYPE_kwargs) -> TYPE_var:
- return self.subp.get_variable_method(args, kwargs)
-
- @FeatureNewKwargs('dependency', '0.56.0', ['include_type'])
- @permittedKwargs({'include_type'})
- @stringArgs
- def dependency(self, state: ModuleState, args: T.List[str], kwargs: T.Dict[str, str]) -> dependencies.Dependency:
- info = self._args_to_info(args)
+ @typed_pos_args('cmake.subproject.get_variable', str, optargs=[str])
+ def get_variable(self, state: ModuleState, args: T.Tuple[str, T.Optional[str]], kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
+ return self.subp.get_variable(args, kwargs)
+
+ @typed_pos_args('cmake.subproject.dependency', str)
+ @typed_kwargs(
+ 'cmake.subproject.dependency',
+ KwargInfo(
+ 'include_type',
+ str,
+ default='preserve',
+ since='0.56.0',
+ validator=in_set_validator({'preserve', 'system', 'non-system'})
+ ),
+ )
+ def dependency(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, str]) -> dependencies.Dependency:
+ info = self._args_to_info(args[0])
if info['func'] == 'executable':
raise InvalidArguments(f'{args[0]} is an executable and does not support the dependency() method. Use target() instead.')
if info['dep'] is None:
raise InvalidArguments(f'{args[0]} does not support the dependency() method. Use target() instead.')
orig = self.get_variable(state, [info['dep']], {})
assert isinstance(orig, dependencies.Dependency)
- actual = orig.include_type
- if 'include_type' in kwargs and kwargs['include_type'] != actual:
- mlog.debug('Current include type is {}. Converting to requested {}'.format(actual, kwargs['include_type']))
+ if kwargs['include_type'] != 'preserve' and kwargs['include_type'] != orig.include_type:
+ mlog.debug('Current include type is {}. Converting to requested {}'.format(orig.include_type, kwargs['include_type']))
return orig.generate_system_dependency(kwargs['include_type'])
return orig
@noKwargs
- @stringArgs
- def include_directories(self, state: ModuleState, args: T.List[str], kwargs: TYPE_kwargs) -> build.IncludeDirs:
- info = self._args_to_info(args)
+ @typed_pos_args('cmake.subproject.include_directories', str)
+ def include_directories(self, state: ModuleState, args: T.Tuple[str], kwargs: TYPE_kwargs) -> build.IncludeDirs:
+ info = self._args_to_info(args[0])
return self.get_variable(state, [info['inc']], kwargs)
@noKwargs
- @stringArgs
- def target(self, state: ModuleState, args: T.List[str], kwargs: TYPE_kwargs) -> build.Target:
- info = self._args_to_info(args)
+ @typed_pos_args('cmake.subproject.target', str)
+ def target(self, state: ModuleState, args: T.Tuple[str], kwargs: TYPE_kwargs) -> build.Target:
+ info = self._args_to_info(args[0])
return self.get_variable(state, [info['tgt']], kwargs)
@noKwargs
- @stringArgs
- def target_type(self, state: ModuleState, args: T.List[str], kwargs: TYPE_kwargs) -> str:
- info = self._args_to_info(args)
+ @typed_pos_args('cmake.subproject.target_type', str)
+ def target_type(self, state: ModuleState, args: T.Tuple[str], kwargs: TYPE_kwargs) -> str:
+ info = self._args_to_info(args[0])
return info['func']
@noPosargs
@@ -193,8 +199,8 @@ def __init__(self) -> None:
}
)
- def _get_opts(self, kwargs: dict) -> SingleTargetOptions:
- if 'target' in kwargs:
+ def _get_opts(self, kwargs: TargetKW) -> SingleTargetOptions:
+ if kwargs['target'] is not None:
return self.target_options[kwargs['target']]
return self.target_options.global_options
@@ -204,23 +210,23 @@ def add_cmake_defines(self, state: ModuleState, args: T.Tuple[T.List[T.Dict[str,
self.cmake_options += cmake_defines_to_args(args[0])
@typed_pos_args('subproject_options.set_override_option', str, str)
- @permittedKwargs({'target'})
- def set_override_option(self, state: ModuleState, args: T.Tuple[str, str], kwargs: TYPE_kwargs) -> None:
+ @typed_kwargs('subproject_options.set_override_option', _TARGET_KW)
+ def set_override_option(self, state: ModuleState, args: T.Tuple[str, str], kwargs: TargetKW) -> None:
self._get_opts(kwargs).set_opt(args[0], args[1])
@typed_pos_args('subproject_options.set_install', bool)
- @permittedKwargs({'target'})
- def set_install(self, state: ModuleState, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> None:
+ @typed_kwargs('subproject_options.set_install', _TARGET_KW)
+ def set_install(self, state: ModuleState, args: T.Tuple[bool], kwargs: TargetKW) -> None:
self._get_opts(kwargs).set_install(args[0])
@typed_pos_args('subproject_options.append_compile_args', str, varargs=str, min_varargs=1)
- @permittedKwargs({'target'})
- def append_compile_args(self, state: ModuleState, args: T.Tuple[str, T.List[str]], kwargs: TYPE_kwargs) -> None:
+ @typed_kwargs('subproject_options.append_compile_args', _TARGET_KW)
+ def append_compile_args(self, state: ModuleState, args: T.Tuple[str, T.List[str]], kwargs: TargetKW) -> None:
self._get_opts(kwargs).append_args(args[0], args[1])
@typed_pos_args('subproject_options.append_link_args', varargs=str, min_varargs=1)
- @permittedKwargs({'target'})
- def append_link_args(self, state: ModuleState, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> None:
+ @typed_kwargs('subproject_options.append_link_args', _TARGET_KW)
+ def append_link_args(self, state: ModuleState, args: T.Tuple[T.List[str]], kwargs: TargetKW) -> None:
self._get_opts(kwargs).append_link_args(args[0])
@noPosargs
@@ -299,7 +305,7 @@ def write_basic_package_version_file(self, state: ModuleState, args: TYPE_var, k
pkgroot = pkgroot_name = kwargs['install_dir']
if pkgroot is None:
- pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name)
+ pkgroot = os.path.join(state.environment.coredata.get_option(OptionKey('libdir')), 'cmake', name)
pkgroot_name = os.path.join('{libdir}', 'cmake', name)
template_file = os.path.join(self.cmake_root, 'Modules', f'BasicConfigVersion-{compatibility}.cmake.in')
@@ -370,14 +376,14 @@ def configure_package_config_file(self, state: ModuleState, args: TYPE_var, kwar
install_dir = kwargs['install_dir']
if install_dir is None:
- install_dir = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name)
+ install_dir = os.path.join(state.environment.coredata.get_option(OptionKey('libdir')), 'cmake', name)
conf = kwargs['configuration']
if isinstance(conf, dict):
FeatureNew.single_use('cmake.configure_package_config_file dict as configuration', '0.62.0', state.subproject, location=state.current_node)
conf = build.ConfigurationData(conf)
- prefix = state.environment.coredata.get_option(mesonlib.OptionKey('prefix'))
+ prefix = state.environment.coredata.get_option(OptionKey('prefix'))
abs_install_dir = install_dir
if not os.path.isabs(abs_install_dir):
abs_install_dir = os.path.join(prefix, install_dir)
diff --git a/mesonbuild/modules/dlang.py b/mesonbuild/modules/dlang.py
index 34fea9081f85..966b0703d417 100644
--- a/mesonbuild/modules/dlang.py
+++ b/mesonbuild/modules/dlang.py
@@ -28,8 +28,8 @@ def __init__(self, interpreter):
})
def _init_dub(self, state):
- if DlangModule.class_dubbin is None:
- self.dubbin = DubDependency.class_dubbin
+ if DlangModule.class_dubbin is None and DubDependency.class_dubbin is not None:
+ self.dubbin = DubDependency.class_dubbin[0]
DlangModule.class_dubbin = self.dubbin
else:
self.dubbin = DlangModule.class_dubbin
diff --git a/mesonbuild/modules/external_project.py b/mesonbuild/modules/external_project.py
index 5fdb0214c539..fb82a384d919 100644
--- a/mesonbuild/modules/external_project.py
+++ b/mesonbuild/modules/external_project.py
@@ -19,7 +19,8 @@
from ..interpreter.type_checking import ENV_KW, DEPENDS_KW
from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, typed_kwargs, typed_pos_args
from ..mesonlib import (EnvironmentException, MesonException, Popen_safe, MachineChoice,
- get_variable_regex, do_replacement, join_args, OptionKey)
+ get_variable_regex, do_replacement, join_args)
+from ..options import OptionKey
if T.TYPE_CHECKING:
from typing_extensions import TypedDict
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index 9d872353ae02..e0c1214d0851 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -31,6 +31,7 @@
from ..mesonlib import (
MachineChoice, MesonException, OrderedSet, Popen_safe, join_args, quote_arg
)
+from ..options import OptionKey
from ..programs import OverrideProgram
from ..scripts.gettext import read_linguas
@@ -519,7 +520,7 @@ def compile_resources(self, state: 'ModuleState', args: T.Tuple[str, 'FileOrStri
if gresource: # Only one target for .gresource files
return ModuleReturnValue(target_c, [target_c])
- install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))
+ install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(OptionKey('includedir'))
assert isinstance(install_dir, str), 'for mypy'
target_h = GResourceHeaderTarget(
f'{target_name}_h',
@@ -908,7 +909,7 @@ def _get_langs_compilers_flags(state: 'ModuleState', langs_compilers: T.List[T.T
cflags += state.global_args[lang]
if state.project_args.get(lang):
cflags += state.project_args[lang]
- if mesonlib.OptionKey('b_sanitize') in compiler.base_options:
+ if OptionKey('b_sanitize') in compiler.base_options:
sanitize = state.environment.coredata.optstore.get_value('b_sanitize')
cflags += compiler.sanitizer_compile_args(sanitize)
sanitize = sanitize.split(',')
@@ -1334,15 +1335,18 @@ def yelp(self, state: 'ModuleState', args: T.Tuple[str, T.List[str]], kwargs: 'Y
for i, m in enumerate(media):
m_dir = os.path.dirname(m)
m_install_dir = os.path.join(l_install_dir, m_dir)
+ try:
+ m_file: T.Optional[mesonlib.File] = mesonlib.File.from_source_file(state.environment.source_dir, l_subdir, m)
+ except MesonException:
+ m_file = None
+
l_data: T.Union[build.Data, build.SymlinkData]
- if symlinks:
+ if symlinks and not m_file:
link_target = os.path.join(os.path.relpath(c_install_dir, start=m_install_dir), m)
l_data = build.SymlinkData(link_target, os.path.basename(m),
m_install_dir, state.subproject, install_tag='doc')
else:
- try:
- m_file = mesonlib.File.from_source_file(state.environment.source_dir, l_subdir, m)
- except MesonException:
+ if not m_file:
m_file = media_files[i]
l_data = build.Data([m_file], m_install_dir, m_install_dir,
mesonlib.FileMode(), state.subproject, install_tag='doc')
@@ -1645,7 +1649,7 @@ def gdbus_codegen(self, state: 'ModuleState', args: T.Tuple[str, T.Optional[T.Un
targets = []
install_header = kwargs['install_header']
- install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))
+ install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(OptionKey('includedir'))
assert isinstance(install_dir, str), 'for mypy'
output = namebase + '.c'
@@ -1957,7 +1961,7 @@ def _make_mkenum_impl(
) -> build.CustomTarget:
real_cmd: T.List[T.Union[str, 'ToolType']] = [self._find_tool(state, 'glib-mkenums')]
real_cmd.extend(cmd)
- _install_dir = install_dir or state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))
+ _install_dir = install_dir or state.environment.coredata.get_option(OptionKey('includedir'))
assert isinstance(_install_dir, str), 'for mypy'
return CustomTarget(
@@ -2169,7 +2173,7 @@ def generate_vapi(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'Gener
cmd.append(gir_file)
vapi_output = library + '.vapi'
- datadir = state.environment.coredata.get_option(mesonlib.OptionKey('datadir'))
+ datadir = state.environment.coredata.get_option(OptionKey('datadir'))
assert isinstance(datadir, str), 'for mypy'
install_dir = kwargs['install_dir'] or os.path.join(datadir, 'vala', 'vapi')
diff --git a/mesonbuild/modules/hotdoc.py b/mesonbuild/modules/hotdoc.py
index fc8832efcba8..50b5fe6f2fed 100644
--- a/mesonbuild/modules/hotdoc.py
+++ b/mesonbuild/modules/hotdoc.py
@@ -20,6 +20,7 @@
from ..interpreter.type_checking import NoneType
from ..mesonlib import File, MesonException
from ..programs import ExternalProgram
+from ..options import OptionKey
if T.TYPE_CHECKING:
from typing_extensions import TypedDict
@@ -330,7 +331,7 @@ def make_targets(self) -> T.Tuple[HotdocTarget, mesonlib.ExecutableSerialisation
for path in self.include_paths:
self.cmd.extend(['--include-path', path])
- if self.state.environment.coredata.get_option(mesonlib.OptionKey('werror', subproject=self.state.subproject)):
+ if self.state.environment.coredata.get_option(OptionKey('werror', subproject=self.state.subproject)):
self.cmd.append('--fatal-warnings')
self.generate_hotdoc_config()
diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py
index 27c9dc00e9b8..551e0b36fab6 100644
--- a/mesonbuild/modules/i18n.py
+++ b/mesonbuild/modules/i18n.py
@@ -10,6 +10,7 @@
from . import ExtensionModule, ModuleReturnValue, ModuleInfo
from .. import build
from .. import mesonlib
+from ..options import OptionKey
from .. import mlog
from ..interpreter.type_checking import CT_BUILD_BY_DEFAULT, CT_INPUT_KW, INSTALL_TAG_KW, OUTPUT_KW, INSTALL_DIR_KW, INSTALL_KW, NoneType, in_set_validator
from ..interpreterbase import FeatureNew, InvalidArguments
@@ -277,7 +278,7 @@ def gettext(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'Gettext') -
targets.append(pottarget)
install = kwargs['install']
- install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(mesonlib.OptionKey('localedir'))
+ install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(OptionKey('localedir'))
assert isinstance(install_dir, str), 'for mypy'
if not languages:
languages = read_linguas(path.join(state.environment.source_dir, state.subdir))
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index 1a730707986f..1bdf82931a94 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -13,6 +13,7 @@
from .. import build
from .. import dependencies
from .. import mesonlib
+from ..options import OptionKey
from .. import mlog
from ..options import BUILTIN_DIR_OPTIONS
from ..dependencies.pkgconfig import PkgConfigDependency, PkgConfigInterface
@@ -126,7 +127,7 @@ def _check_generated_pc_deprecation(self, obj: T.Union[build.CustomTarget, build
'to generate() method instead of first positional '
'argument.', 'Adding', mlog.bold(data.display_name),
'to "Requires" field, but this is a deprecated '
- 'behaviour that will change in a future version '
+ 'behaviour that will change in version 2.0 '
'of Meson. Please report the issue if this '
'warning cannot be avoided in your case.',
location=data.location)
@@ -194,6 +195,13 @@ def _process_libs(
if obj.found():
if obj.objects:
raise mesonlib.MesonException('.pc file cannot refer to individual object files.')
+
+ # Ensure BothLibraries are resolved:
+ if self.pub_libs and isinstance(self.pub_libs[0], build.StaticLibrary):
+ obj = obj.get_as_static(recursive=True)
+ else:
+ obj = obj.get_as_shared(recursive=True)
+
processed_libs += obj.get_link_args()
processed_cflags += obj.get_compile_args()
self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public, private_external_deps=True)
@@ -482,7 +490,7 @@ def _generate_pkgconfig_file(self, state: ModuleState, deps: DependenciesHelper,
srcdir = PurePath(state.environment.get_source_dir())
else:
outdir = state.environment.scratch_dir
- prefix = PurePath(_as_str(coredata.get_option(mesonlib.OptionKey('prefix'))))
+ prefix = PurePath(_as_str(coredata.get_option(OptionKey('prefix'))))
if pkgroot:
pkgroot_ = PurePath(pkgroot)
if not pkgroot_.is_absolute():
@@ -499,7 +507,7 @@ def _generate_pkgconfig_file(self, state: ModuleState, deps: DependenciesHelper,
if optname == 'prefix':
ofile.write('prefix={}\n'.format(self._escape(prefix)))
else:
- dirpath = PurePath(_as_str(coredata.get_option(mesonlib.OptionKey(optname))))
+ dirpath = PurePath(_as_str(coredata.get_option(OptionKey(optname))))
ofile.write('{}={}\n'.format(optname, self._escape('${prefix}' / dirpath)))
if uninstalled and not dataonly:
ofile.write('srcdir={}\n'.format(self._escape(srcdir)))
@@ -694,15 +702,15 @@ def parse_variable_list(vardict: T.Dict[str, str]) -> T.List[T.Tuple[str, str]]:
pkgroot = pkgroot_name = kwargs['install_dir'] or default_install_dir
if pkgroot is None:
if mesonlib.is_freebsd():
- pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(mesonlib.OptionKey('prefix'))), 'libdata', 'pkgconfig')
+ pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(OptionKey('prefix'))), 'libdata', 'pkgconfig')
pkgroot_name = os.path.join('{prefix}', 'libdata', 'pkgconfig')
elif mesonlib.is_haiku():
- pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(mesonlib.OptionKey('prefix'))), 'develop', 'lib', 'pkgconfig')
+ pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(OptionKey('prefix'))), 'develop', 'lib', 'pkgconfig')
pkgroot_name = os.path.join('{prefix}', 'develop', 'lib', 'pkgconfig')
else:
- pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(mesonlib.OptionKey('libdir'))), 'pkgconfig')
+ pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(OptionKey('libdir'))), 'pkgconfig')
pkgroot_name = os.path.join('{libdir}', 'pkgconfig')
- relocatable = state.get_option('relocatable', module='pkgconfig')
+ relocatable = state.get_option('pkgconfig.relocatable')
self._generate_pkgconfig_file(state, deps, subdirs, name, description, url,
version, pcfile, conflicts, variables,
unescaped_variables, False, dataonly,
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index 30fc50bc2318..f828f0ebf272 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -22,7 +22,8 @@
InvalidArguments, typed_pos_args, typed_kwargs, KwargInfo,
FeatureNew, FeatureNewKwargs, disablerIfNotFound
)
-from ..mesonlib import MachineChoice, OptionKey
+from ..mesonlib import MachineChoice
+from ..options import OptionKey
from ..programs import ExternalProgram, NonExistingExternalProgram
if T.TYPE_CHECKING:
@@ -82,13 +83,13 @@ def _get_path(self, state: T.Optional['ModuleState'], key: str) -> str:
if not state:
# This happens only from run_project_tests.py
return rel_path
- value = T.cast('str', state.get_option(f'{key}dir', module='python'))
+ value = T.cast('str', state.get_option(f'python.{key}dir'))
if value:
- if state.is_user_defined_option('install_env', module='python'):
+ if state.is_user_defined_option('python.install_env'):
raise mesonlib.MesonException(f'python.{key}dir and python.install_env are mutually exclusive')
return value
- install_env = state.get_option('install_env', module='python')
+ install_env = state.get_option('python.install_env')
if install_env == 'auto':
install_env = 'venv' if self.info['is_venv'] else 'system'
@@ -112,7 +113,7 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
def __init__(self, python: 'PythonExternalProgram', interpreter: 'Interpreter'):
_ExternalProgramHolder.__init__(self, python, interpreter)
info = python.info
- prefix = self.interpreter.environment.coredata.get_option(mesonlib.OptionKey('prefix'))
+ prefix = self.interpreter.environment.coredata.get_option(OptionKey('prefix'))
assert isinstance(prefix, str), 'for mypy'
self.variables = info['variables']
self.suffix = info['suffix']
@@ -168,7 +169,7 @@ def extension_module_method(self, args: T.Tuple[str, T.List[BuildTargetSource]],
self.current_node)
limited_api_version = kwargs.pop('limited_api')
- allow_limited_api = self.interpreter.environment.coredata.get_option(OptionKey('allow_limited_api', module='python'))
+ allow_limited_api = self.interpreter.environment.coredata.get_option(OptionKey('python.allow_limited_api'))
if limited_api_version != '' and allow_limited_api:
target_suffix = self.limited_api_suffix
@@ -373,7 +374,7 @@ def __init__(self, interpreter: 'Interpreter') -> None:
def _get_install_scripts(self) -> T.List[mesonlib.ExecutableSerialisation]:
backend = self.interpreter.backend
ret = []
- optlevel = self.interpreter.environment.coredata.get_option(mesonlib.OptionKey('bytecompile', module='python'))
+ optlevel = self.interpreter.environment.coredata.get_option(OptionKey('python.bytecompile'))
if optlevel == -1:
return ret
if not any(PythonExternalProgram.run_bytecompile.values()):
diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py
index 45ad850aa129..2e6779ab2b5f 100644
--- a/mesonbuild/modules/python3.py
+++ b/mesonbuild/modules/python3.py
@@ -7,7 +7,7 @@
import typing as T
from .. import mesonlib
-from . import ExtensionModule, ModuleInfo, ModuleState
+from . import ExtensionModule, ModuleInfo
from ..build import (
BuildTarget, CustomTarget, CustomTargetIndex, ExtractedObjects,
GeneratedList, SharedModule, StructuredSources, known_shmod_kwargs
@@ -17,6 +17,7 @@
from ..programs import ExternalProgram
if T.TYPE_CHECKING:
+ from . import ModuleState
from ..interpreter.interpreter import BuildTargetSource
from ..interpreter.kwargs import SharedModule as SharedModuleKW
diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py
index a8e22541c164..1368c4c1970f 100644
--- a/mesonbuild/modules/rust.py
+++ b/mesonbuild/modules/rust.py
@@ -86,7 +86,7 @@ def __init__(self, interpreter: Interpreter) -> None:
def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: FuncTest) -> ModuleReturnValue:
"""Generate a rust test target from a given rust target.
- Rust puts it's unitests inside it's main source files, unlike most
+ Rust puts its unitests inside its main source files, unlike most
languages that put them in external files. This means that normally
you have to define two separate targets with basically the same
arguments to get tests:
@@ -207,7 +207,7 @@ def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: Func
DEPENDENCIES_KW.evolve(since='1.0.0'),
)
def bindgen(self, state: ModuleState, args: T.List, kwargs: FuncBindgen) -> ModuleReturnValue:
- """Wrapper around bindgen to simplify it's use.
+ """Wrapper around bindgen to simplify its use.
The main thing this simplifies is the use of `include_directory`
objects, instead of having to pass a plethora of `-I` arguments.
@@ -269,7 +269,7 @@ def bindgen(self, state: ModuleState, args: T.List, kwargs: FuncBindgen) -> Modu
raise InterpreterException(f'Unknown file type extension for: {name}')
# We only want include directories and defines, other things may not be valid
- cargs = state.get_option('args', state.subproject, lang=language)
+ cargs = state.get_option(f'{language}_args', state.subproject)
assert isinstance(cargs, list), 'for mypy'
for a in itertools.chain(state.global_args.get(language, []), state.project_args.get(language, []), cargs):
if a.startswith(('-I', '/I', '-D', '/D', '-U', '/U')):
@@ -280,7 +280,7 @@ def bindgen(self, state: ModuleState, args: T.List, kwargs: FuncBindgen) -> Modu
# Add the C++ standard to the clang arguments. Attempt to translate VS
# extension versions into the nearest standard version
- std = state.get_option('std', lang=language)
+ std = state.get_option(f'{language}_std')
assert isinstance(std, str), 'for mypy'
if std.startswith('vc++'):
if std.endswith('latest'):
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index 4e359b3b9f20..4f43455468c6 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -363,7 +363,7 @@ def append(self, statement: BaseNode) -> None:
def set_kwarg(self, name: IdNode, value: BaseNode) -> None:
if any((isinstance(x, IdNode) and name.value == x.value) for x in self.kwargs):
mlog.warning(f'Keyword argument "{name.value}" defined multiple times.', location=self)
- mlog.warning('This will be an error in future Meson releases.')
+ mlog.warning('This will be an error in Meson 2.0.')
self.kwargs[name] = value
def set_kwarg_no_check(self, name: BaseNode, value: BaseNode) -> None:
diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py
index 47b40af331c3..e634c05ab5aa 100644
--- a/mesonbuild/msetup.py
+++ b/mesonbuild/msetup.py
@@ -11,6 +11,7 @@
from . import build, coredata, environment, interpreter, mesonlib, mintro, mlog
from .mesonlib import MesonException
+from .options import OptionKey
if T.TYPE_CHECKING:
from typing_extensions import Protocol
@@ -164,7 +165,7 @@ def validate_dirs(self) -> T.Tuple[str, str]:
if not self.options.reconfigure and not self.options.wipe:
print('Directory already configured.\n\n'
'Just run your build command (e.g. ninja) and Meson will regenerate as necessary.\n'
- 'Run "meson setup --reconfigure to force Meson to regenerate.\n\n'
+ 'Run "meson setup --reconfigure" to force Meson to regenerate.\n\n'
'If build failures persist, run "meson setup --wipe" to rebuild from scratch\n'
'using the same options as passed when configuring the build.')
if self.options.cmd_line_options:
@@ -320,10 +321,10 @@ def run_genvslite_setup(options: CMDOptions) -> None:
# invoke the appropriate 'meson compile ...' build commands upon the normal visual studio build/rebuild/clean actions, instead of using
# the native VS/msbuild system.
builddir_prefix = options.builddir
- genvsliteval = options.cmd_line_options.pop(mesonlib.OptionKey('genvslite'))
+ genvsliteval = options.cmd_line_options.pop(OptionKey('genvslite'))
# The command line may specify a '--backend' option, which doesn't make sense in conjunction with
# '--genvslite', where we always want to use a ninja back end -
- k_backend = mesonlib.OptionKey('backend')
+ k_backend = OptionKey('backend')
if k_backend in options.cmd_line_options.keys():
if options.cmd_line_options[k_backend] != 'ninja':
raise MesonException('Explicitly specifying a backend option with \'genvslite\' is not necessary '
@@ -336,12 +337,12 @@ def run_genvslite_setup(options: CMDOptions) -> None:
for buildtypestr in buildtypes_list:
options.builddir = f'{builddir_prefix}_{buildtypestr}' # E.g. builddir_release
- options.cmd_line_options[mesonlib.OptionKey('buildtype')] = buildtypestr
+ options.cmd_line_options[OptionKey('buildtype')] = buildtypestr
app = MesonApp(options)
vslite_ctx[buildtypestr] = app.generate(capture=True)
#Now for generating the 'lite' solution and project files, which will use these builds we've just set up, above.
options.builddir = f'{builddir_prefix}_vs'
- options.cmd_line_options[mesonlib.OptionKey('genvslite')] = genvsliteval
+ options.cmd_line_options[OptionKey('genvslite')] = genvsliteval
app = MesonApp(options)
app.generate(capture=False, vslite_ctx=vslite_ctx)
@@ -353,11 +354,11 @@ def run(options: T.Union[CMDOptions, T.List[str]]) -> int:
coredata.parse_cmd_line_options(options)
# Msetup doesn't actually use this option, but we pass msetup options to
- # mconf, and it does. We won't actally hit the path that uses it, but don't
+ # mconf, and it does. We won't actually hit the path that uses it, but don't
# lie
options.pager = False
- if mesonlib.OptionKey('genvslite') in options.cmd_line_options.keys():
+ if OptionKey('genvslite') in options.cmd_line_options.keys():
run_genvslite_setup(options)
else:
app = MesonApp(options)
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index c0ddb30bacf7..c417bc0b38b7 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -35,8 +35,9 @@
from . import mlog
from .coredata import MesonVersionMismatchException, major_versions_differ
from .coredata import version as coredata_version
-from .mesonlib import (MesonException, OptionKey, OrderedSet, RealPathAction,
+from .mesonlib import (MesonException, OrderedSet, RealPathAction,
get_wine_shortpath, join_args, split_args, setup_vsenv)
+from .options import OptionKey
from .mintro import get_infodir, load_info_file
from .programs import ExternalProgram
from .backend.backends import TestProtocol, TestSerialisation
@@ -98,13 +99,17 @@ def uniwidth(s: str) -> int:
def determine_worker_count() -> int:
varname = 'MESON_TESTTHREADS'
+ num_workers = 0
if varname in os.environ:
try:
num_workers = int(os.environ[varname])
+ if num_workers < 0:
+ raise ValueError
except ValueError:
print(f'Invalid value in {varname}, using 1 thread.')
num_workers = 1
- else:
+
+ if num_workers == 0:
try:
# Fails in some weird environments such as Debian
# reproducible build.
@@ -1193,7 +1198,7 @@ async def read_decode(reader: asyncio.StreamReader,
except asyncio.LimitOverrunError as e:
line_bytes = await reader.readexactly(e.consumed)
if line_bytes:
- line = decode(line_bytes)
+ line = decode(line_bytes).replace('\r\n', '\n')
stdo_lines.append(line)
if console_mode is ConsoleUser.STDOUT:
print(line, end='', flush=True)
diff --git a/mesonbuild/munstable_coredata.py b/mesonbuild/munstable_coredata.py
index df045a104636..409b514b608e 100644
--- a/mesonbuild/munstable_coredata.py
+++ b/mesonbuild/munstable_coredata.py
@@ -5,7 +5,8 @@
from . import coredata as cdata
-from .mesonlib import MachineChoice, OptionKey
+from .mesonlib import MachineChoice
+from .options import OptionKey
import os.path
import pprint
diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py
index ffa46cda650e..4688ee4c4f49 100644
--- a/mesonbuild/optinterpreter.py
+++ b/mesonbuild/optinterpreter.py
@@ -6,18 +6,20 @@
import re
import typing as T
-from . import coredata
from . import options
from . import mesonlib
+from .options import OptionKey
from . import mparser
from . import mlog
from .interpreterbase import FeatureNew, FeatureDeprecated, typed_pos_args, typed_kwargs, ContainerTypeInfo, KwargInfo
from .interpreter.type_checking import NoneType, in_set_validator
if T.TYPE_CHECKING:
+ from . import coredata
from .interpreterbase import TYPE_var, TYPE_kwargs
from .interpreterbase import SubProject
from typing_extensions import TypedDict, Literal
+ from .options import OptionStore
_DEPRECATED_ARGS = T.Union[bool, str, T.Dict[str, str], T.List[str]]
@@ -64,7 +66,7 @@ class OptionException(mesonlib.MesonException):
class OptionInterpreter:
- def __init__(self, subproject: 'SubProject') -> None:
+ def __init__(self, optionstore: 'OptionStore', subproject: 'SubProject') -> None:
self.options: 'coredata.MutableKeyedOptionDictType' = {}
self.subproject = subproject
self.option_types: T.Dict[str, T.Callable[..., options.UserOption]] = {
@@ -75,6 +77,7 @@ def __init__(self, subproject: 'SubProject') -> None:
'array': self.string_array_parser,
'feature': self.feature_parser,
}
+ self.optionstore = optionstore
def process(self, option_file: str) -> None:
try:
@@ -188,8 +191,8 @@ def func_option(self, args: T.Tuple[str], kwargs: 'FuncOptionArgs') -> None:
opt_name = args[0]
if optname_regex.search(opt_name) is not None:
raise OptionException('Option names can only contain letters, numbers or dashes.')
- key = mesonlib.OptionKey.from_string(opt_name).evolve(subproject=self.subproject)
- if not key.is_project():
+ key = OptionKey.from_string(opt_name).evolve(subproject=self.subproject)
+ if self.optionstore.is_reserved_name(key):
raise OptionException('Option name %s is reserved.' % opt_name)
opt_type = kwargs['type']
diff --git a/mesonbuild/options.py b/mesonbuild/options.py
index d83a312886d5..1566f940c98c 100644
--- a/mesonbuild/options.py
+++ b/mesonbuild/options.py
@@ -1,13 +1,16 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2013-2024 Contributors to the The Meson project
+# Copyright © 2019-2024 Intel Corporation
+from __future__ import annotations
from collections import OrderedDict
from itertools import chain
+from functools import total_ordering
import argparse
+import typing as T
from .mesonlib import (
HoldableObject,
- OptionKey,
default_prefix,
default_datadir,
default_includedir,
@@ -20,12 +23,19 @@
default_sysconfdir,
MesonException,
listify_array_value,
+ MachineChoice,
)
-
from . import mlog
-import typing as T
-from typing import ItemsView
+if T.TYPE_CHECKING:
+ from typing_extensions import TypedDict
+
+ class ArgparseKWs(TypedDict, total=False):
+
+ action: str
+ dest: str
+ default: str
+ choices: T.List
DEFAULT_YIELDING = False
@@ -37,6 +47,203 @@
buildtypelist = ['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom']
+# This is copied from coredata. There is no way to share this, because this
+# is used in the OptionKey constructor, and the coredata lists are
+# OptionKeys...
+_BUILTIN_NAMES = {
+ 'prefix',
+ 'bindir',
+ 'datadir',
+ 'includedir',
+ 'infodir',
+ 'libdir',
+ 'licensedir',
+ 'libexecdir',
+ 'localedir',
+ 'localstatedir',
+ 'mandir',
+ 'sbindir',
+ 'sharedstatedir',
+ 'sysconfdir',
+ 'auto_features',
+ 'backend',
+ 'buildtype',
+ 'debug',
+ 'default_library',
+ 'default_both_libraries',
+ 'errorlogs',
+ 'genvslite',
+ 'install_umask',
+ 'layout',
+ 'optimization',
+ 'prefer_static',
+ 'stdsplit',
+ 'strip',
+ 'unity',
+ 'unity_size',
+ 'warning_level',
+ 'werror',
+ 'wrap_mode',
+ 'force_fallback_for',
+ 'pkg_config_path',
+ 'cmake_prefix_path',
+ 'vsenv',
+}
+
+@total_ordering
+class OptionKey:
+
+ """Represents an option key in the various option dictionaries.
+
+ This provides a flexible, powerful way to map option names from their
+ external form (things like subproject:build.option) to something that
+ internally easier to reason about and produce.
+ """
+
+ __slots__ = ['name', 'subproject', 'machine', '_hash']
+
+ name: str
+ subproject: str
+ machine: MachineChoice
+ _hash: int
+
+ def __init__(self, name: str, subproject: str = '',
+ machine: MachineChoice = MachineChoice.HOST):
+ # the _type option to the constructor is kinda private. We want to be
+ # able to save the state and avoid the lookup function when
+ # pickling/unpickling, but we need to be able to calculate it when
+ # constructing a new OptionKey
+ object.__setattr__(self, 'name', name)
+ object.__setattr__(self, 'subproject', subproject)
+ object.__setattr__(self, 'machine', machine)
+ object.__setattr__(self, '_hash', hash((name, subproject, machine)))
+
+ def __setattr__(self, key: str, value: T.Any) -> None:
+ raise AttributeError('OptionKey instances do not support mutation.')
+
+ def __getstate__(self) -> T.Dict[str, T.Any]:
+ return {
+ 'name': self.name,
+ 'subproject': self.subproject,
+ 'machine': self.machine,
+ }
+
+ def __setstate__(self, state: T.Dict[str, T.Any]) -> None:
+ """De-serialize the state of a pickle.
+
+ This is very clever. __init__ is not a constructor, it's an
+ initializer, therefore it's safe to call more than once. We create a
+ state in the custom __getstate__ method, which is valid to pass
+ splatted to the initializer.
+ """
+ # Mypy doesn't like this, because it's so clever.
+ self.__init__(**state) # type: ignore
+
+ def __hash__(self) -> int:
+ return self._hash
+
+ def _to_tuple(self) -> T.Tuple[str, str, str, MachineChoice, str]:
+ return (self.subproject, self.machine, self.name)
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, OptionKey):
+ return self._to_tuple() == other._to_tuple()
+ return NotImplemented
+
+ def __lt__(self, other: object) -> bool:
+ if isinstance(other, OptionKey):
+ return self._to_tuple() < other._to_tuple()
+ return NotImplemented
+
+ def __str__(self) -> str:
+ out = self.name
+ if self.machine is MachineChoice.BUILD:
+ out = f'build.{out}'
+ if self.subproject:
+ out = f'{self.subproject}:{out}'
+ return out
+
+ def __repr__(self) -> str:
+ return f'OptionKey({self.name!r}, {self.subproject!r}, {self.machine!r})'
+
+ @classmethod
+ def from_string(cls, raw: str) -> 'OptionKey':
+ """Parse the raw command line format into a three part tuple.
+
+ This takes strings like `mysubproject:build.myoption` and Creates an
+ OptionKey out of them.
+ """
+ try:
+ subproject, raw2 = raw.split(':')
+ except ValueError:
+ subproject, raw2 = '', raw
+
+ for_machine = MachineChoice.HOST
+ try:
+ prefix, raw3 = raw2.split('.')
+ if prefix == 'build':
+ for_machine = MachineChoice.BUILD
+ else:
+ raw3 = raw2
+ except ValueError:
+ raw3 = raw2
+
+ opt = raw3
+ assert ':' not in opt
+ assert opt.count('.') < 2
+
+ return cls(opt, subproject, for_machine)
+
+ def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = None,
+ machine: T.Optional[MachineChoice] = None) -> 'OptionKey':
+ """Create a new copy of this key, but with altered members.
+
+ For example:
+ >>> a = OptionKey('foo', '', MachineChoice.Host)
+ >>> b = OptionKey('foo', 'bar', MachineChoice.Host)
+ >>> b == a.evolve(subproject='bar')
+ True
+ """
+ # We have to be a little clever with lang here, because lang is valid
+ # as None, for non-compiler options
+ return OptionKey(
+ name if name is not None else self.name,
+ subproject if subproject is not None else self.subproject,
+ machine if machine is not None else self.machine,
+ )
+
+ def as_root(self) -> 'OptionKey':
+ """Convenience method for key.evolve(subproject='')."""
+ return self.evolve(subproject='')
+
+ def as_build(self) -> 'OptionKey':
+ """Convenience method for key.evolve(machine=MachineChoice.BUILD)."""
+ return self.evolve(machine=MachineChoice.BUILD)
+
+ def as_host(self) -> 'OptionKey':
+ """Convenience method for key.evolve(machine=MachineChoice.HOST)."""
+ return self.evolve(machine=MachineChoice.HOST)
+
+ def is_project_hack_for_optionsview(self) -> bool:
+ """This method will be removed once we can delete OptionsView."""
+ import sys
+ sys.exit('FATAL internal error. This should not make it into an actual release. File a bug.')
+
+ def has_module_prefix(self) -> bool:
+ return '.' in self.name
+
+ def get_module_prefix(self) -> T.Optional[str]:
+ if self.has_module_prefix():
+ return self.name.split('.', 1)[0]
+ return None
+
+ def without_module_prefix(self) -> 'OptionKey':
+ if self.has_module_prefix():
+ newname = self.name.split('.', 1)[1]
+ return self.evolve(newname)
+ return self
+
+
class UserOption(T.Generic[_T], HoldableObject):
def __init__(self, name: str, description: str, choices: T.Optional[T.Union[str, T.List[_T]]],
yielding: bool,
@@ -212,7 +419,7 @@ def validate_value(self, value: T.Union[str, T.List[str]]) -> T.List[str]:
if not self.allow_dups and len(set(newvalue)) != len(newvalue):
msg = 'Duplicated values in array option is deprecated. ' \
- 'This will become a hard error in the future.'
+ 'This will become a hard error in meson 2.0.'
mlog.deprecation(msg)
for i in newvalue:
if not isinstance(i, str):
@@ -303,7 +510,7 @@ def validate_value(self, value: T.Union[str, T.List[str]]) -> str:
mlog.deprecation(
f'None of the values {candidates} are supported by the {self.lang} compiler.\n' +
f'However, the deprecated {std} std currently falls back to {newstd}.\n' +
- 'This will be an error in the future.\n' +
+ 'This will be an error in meson 2.0.\n' +
'If the project supports both GNU and MSVC compilers, a value such as\n' +
'"c_std=gnu11,c11" specifies that GNU is preferred but it can safely fallback to plain c11.')
return newstd
@@ -370,7 +577,7 @@ def prefixed_default(self, name: 'OptionKey', prefix: str = '') -> T.Any:
return self.default
def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffix: str) -> None:
- kwargs = OrderedDict()
+ kwargs: ArgparseKWs = {}
c = self._argparse_choices()
b = self._argparse_action()
@@ -425,6 +632,7 @@ def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffi
(OptionKey('debug'), BuiltinOption(UserBooleanOption, 'Enable debug symbols and other information', True)),
(OptionKey('default_library'), BuiltinOption(UserComboOption, 'Default library type', 'shared', choices=['shared', 'static', 'both'],
yielding=False)),
+ (OptionKey('default_both_libraries'), BuiltinOption(UserComboOption, 'Default library type for both_libraries', 'shared', choices=['shared', 'static', 'auto'])),
(OptionKey('errorlogs'), BuiltinOption(UserBooleanOption, "Whether to print the logs from failing tests", True)),
(OptionKey('install_umask'), BuiltinOption(UserUmaskOption, 'Default umask to apply on permissions of installed files', '022')),
(OptionKey('layout'), BuiltinOption(UserComboOption, 'Build directory layout', 'mirror', choices=['mirror', 'flat'])),
@@ -441,19 +649,19 @@ def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffi
(OptionKey('vsenv'), BuiltinOption(UserBooleanOption, 'Activate Visual Studio environment', False, readonly=True)),
# Pkgconfig module
- (OptionKey('relocatable', module='pkgconfig'),
+ (OptionKey('pkgconfig.relocatable'),
BuiltinOption(UserBooleanOption, 'Generate pkgconfig files as relocatable', False)),
# Python module
- (OptionKey('bytecompile', module='python'),
+ (OptionKey('python.bytecompile'),
BuiltinOption(UserIntegerOption, 'Whether to compile bytecode', (-1, 2, 0))),
- (OptionKey('install_env', module='python'),
+ (OptionKey('python.install_env'),
BuiltinOption(UserComboOption, 'Which python environment to install to', 'prefix', choices=['auto', 'prefix', 'system', 'venv'])),
- (OptionKey('platlibdir', module='python'),
+ (OptionKey('python.platlibdir'),
BuiltinOption(UserStringOption, 'Directory for site-specific, platform-specific files.', '')),
- (OptionKey('purelibdir', module='python'),
+ (OptionKey('python.purelibdir'),
BuiltinOption(UserStringOption, 'Directory for site-specific, non-platform-specific files.', '')),
- (OptionKey('allow_limited_api', module='python'),
+ (OptionKey('python.allow_limited_api'),
BuiltinOption(UserBooleanOption, 'Whether to allow use of the Python Limited API', True)),
])
@@ -470,15 +678,19 @@ def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffi
OptionKey('sysconfdir'): {'/usr': '/etc'},
OptionKey('localstatedir'): {'/usr': '/var', '/usr/local': '/var/local'},
OptionKey('sharedstatedir'): {'/usr': '/var/lib', '/usr/local': '/var/local/lib'},
- OptionKey('platlibdir', module='python'): {},
- OptionKey('purelibdir', module='python'): {},
+ OptionKey('python.platlibdir'): {},
+ OptionKey('python.purelibdir'): {},
}
class OptionStore:
- def __init__(self):
+ def __init__(self) -> None:
self.d: T.Dict['OptionKey', 'UserOption[T.Any]'] = {}
+ self.project_options: T.Set[OptionKey] = set()
+ self.module_options: T.Set[OptionKey] = set()
+ from .compilers import all_languages
+ self.all_languages = set(all_languages)
- def __len__(self):
+ def __len__(self) -> int:
return len(self.d)
def ensure_key(self, key: T.Union[OptionKey, str]) -> OptionKey:
@@ -492,47 +704,116 @@ def get_value_object(self, key: T.Union[OptionKey, str]) -> 'UserOption[T.Any]':
def get_value(self, key: T.Union[OptionKey, str]) -> 'T.Any':
return self.get_value_object(key).value
- def add_system_option(self, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]'):
+ def add_system_option(self, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]') -> None:
+ key = self.ensure_key(key)
+ if '.' in key.name:
+ raise MesonException(f'Internal error: non-module option has a period in its name {key.name}.')
+ self.add_system_option_internal(key, valobj)
+
+ def add_system_option_internal(self, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]') -> None:
key = self.ensure_key(key)
+ assert isinstance(valobj, UserOption)
self.d[key] = valobj
- def add_project_option(self, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]'):
+ def add_compiler_option(self, language: str, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]') -> None:
+ key = self.ensure_key(key)
+ if not key.name.startswith(language + '_'):
+ raise MesonException(f'Internal error: all compiler option names must start with language prefix. ({key.name} vs {language}_)')
+ self.add_system_option(key, valobj)
+
+ def add_project_option(self, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]') -> None:
key = self.ensure_key(key)
self.d[key] = valobj
+ self.project_options.add(key)
+
+ def add_module_option(self, modulename: str, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]') -> None:
+ key = self.ensure_key(key)
+ if key.name.startswith('build.'):
+ raise MesonException('FATAL internal error: somebody goofed option handling.')
+ if not key.name.startswith(modulename + '.'):
+ raise MesonException('Internal error: module option name {key.name} does not start with module prefix {modulename}.')
+ self.add_system_option_internal(key, valobj)
+ self.module_options.add(key)
def set_value(self, key: T.Union[OptionKey, str], new_value: 'T.Any') -> bool:
key = self.ensure_key(key)
return self.d[key].set_value(new_value)
# FIXME, this should be removed.or renamed to "change_type_of_existing_object" or something like that
- def set_value_object(self, key: T.Union[OptionKey, str], new_object: 'UserOption[T.Any]') -> bool:
+ def set_value_object(self, key: T.Union[OptionKey, str], new_object: 'UserOption[T.Any]') -> None:
key = self.ensure_key(key)
self.d[key] = new_object
- def remove(self, key):
+ def remove(self, key: OptionKey) -> None:
del self.d[key]
- def __contains__(self, key):
+ def __contains__(self, key: OptionKey) -> bool:
key = self.ensure_key(key)
return key in self.d
- def __repr__(self):
+ def __repr__(self) -> str:
return repr(self.d)
- def keys(self):
+ def keys(self) -> T.KeysView[OptionKey]:
return self.d.keys()
- def values(self):
+ def values(self) -> T.ValuesView[UserOption[T.Any]]:
return self.d.values()
- def items(self) -> ItemsView['OptionKey', 'UserOption[T.Any]']:
+ def items(self) -> T.ItemsView['OptionKey', 'UserOption[T.Any]']:
return self.d.items()
- def update(self, *args, **kwargs):
- return self.d.update(*args, **kwargs)
+ # FIXME: this method must be deleted and users moved to use "add_xxx_option"s instead.
+ def update(self, **kwargs: UserOption[T.Any]) -> None:
+ self.d.update(**kwargs)
- def setdefault(self, k, o):
+ def setdefault(self, k: OptionKey, o: UserOption[T.Any]) -> UserOption[T.Any]:
return self.d.setdefault(k, o)
- def get(self, *args, **kwargs) -> UserOption:
- return self.d.get(*args, **kwargs)
+ def get(self, o: OptionKey, default: T.Optional[UserOption[T.Any]] = None) -> T.Optional[UserOption[T.Any]]:
+ return self.d.get(o, default)
+
+ def is_project_option(self, key: OptionKey) -> bool:
+ """Convenience method to check if this is a project option."""
+ return key in self.project_options
+
+ def is_reserved_name(self, key: OptionKey) -> bool:
+ if key.name in _BUILTIN_NAMES:
+ return True
+ if '_' not in key.name:
+ return False
+ prefix = key.name.split('_')[0]
+ # Pylint seems to think that it is faster to build a set object
+ # and all related work just to test whether a string has one of two
+ # values. It is not, thank you very much.
+ if prefix in ('b', 'backend'): # pylint: disable=R6201
+ return True
+ if prefix in self.all_languages:
+ return True
+ return False
+
+ def is_builtin_option(self, key: OptionKey) -> bool:
+ """Convenience method to check if this is a builtin option."""
+ return key.name in _BUILTIN_NAMES or self.is_module_option(key)
+
+ def is_base_option(self, key: OptionKey) -> bool:
+ """Convenience method to check if this is a base option."""
+ return key.name.startswith('b_')
+
+ def is_backend_option(self, key: OptionKey) -> bool:
+ """Convenience method to check if this is a backend option."""
+ return key.name.startswith('backend_')
+
+ def is_compiler_option(self, key: OptionKey) -> bool:
+ """Convenience method to check if this is a compiler option."""
+
+ # FIXME, duplicate of is_reserved_name above. Should maybe store a cache instead.
+ if '_' not in key.name:
+ return False
+ prefix = key.name.split('_')[0]
+ if prefix in self.all_languages:
+ return True
+ return False
+
+ def is_module_option(self, key: OptionKey) -> bool:
+ return key in self.module_options
diff --git a/mesonbuild/programs.py b/mesonbuild/programs.py
index fbe241d99607..9ad38e126b60 100644
--- a/mesonbuild/programs.py
+++ b/mesonbuild/programs.py
@@ -25,14 +25,20 @@
class ExternalProgram(mesonlib.HoldableObject):
- """A program that is found on the system."""
+ """A program that is found on the system.
+ :param name: The name of the program
+ :param command: Optionally, an argument list constituting the command. Used when
+ you already know the command and do not want to search.
+ :param silent: Whether to print messages when initializing
+ :param search_dirs: A list of directories to search in first, followed by PATH
+ :param exclude_paths: A list of directories to exclude when searching in PATH"""
windows_exts = ('exe', 'msc', 'com', 'bat', 'cmd')
for_machine = MachineChoice.BUILD
def __init__(self, name: str, command: T.Optional[T.List[str]] = None,
- silent: bool = False, search_dir: T.Optional[str] = None,
- extra_search_dirs: T.Optional[T.List[str]] = None):
+ silent: bool = False, search_dirs: T.Optional[T.List[T.Optional[str]]] = None,
+ exclude_paths: T.Optional[T.List[str]] = None):
self.name = name
self.path: T.Optional[str] = None
self.cached_version: T.Optional[str] = None
@@ -51,13 +57,10 @@ def __init__(self, name: str, command: T.Optional[T.List[str]] = None,
else:
self.command = [cmd] + args
else:
- all_search_dirs = [search_dir]
- if extra_search_dirs:
- all_search_dirs += extra_search_dirs
- for d in all_search_dirs:
- self.command = self._search(name, d)
- if self.found():
- break
+ if search_dirs is None:
+ # For compat with old behaviour
+ search_dirs = [None]
+ self.command = self._search(name, search_dirs, exclude_paths)
if self.found():
# Set path to be the last item that is actually a file (in order to
@@ -119,7 +122,7 @@ def get_version(self, interpreter: T.Optional['Interpreter'] = None) -> str:
@classmethod
def from_bin_list(cls, env: 'Environment', for_machine: MachineChoice, name: str) -> 'ExternalProgram':
# There is a static `for_machine` for this class because the binary
- # always runs on the build platform. (It's host platform is our build
+ # always runs on the build platform. (Its host platform is our build
# platform.) But some external programs have a target platform, so this
# is what we are specifying here.
command = env.lookup_binary_entry(for_machine, name)
@@ -242,7 +245,7 @@ def _search_dir(self, name: str, search_dir: T.Optional[str]) -> T.Optional[list
return [trial_ext]
return None
- def _search_windows_special_cases(self, name: str, command: str) -> T.List[T.Optional[str]]:
+ def _search_windows_special_cases(self, name: str, command: T.Optional[str], exclude_paths: T.Optional[T.List[str]]) -> T.List[T.Optional[str]]:
'''
Lots of weird Windows quirks:
1. PATH search for @name returns files with extensions from PATHEXT,
@@ -278,31 +281,37 @@ def _search_windows_special_cases(self, name: str, command: str) -> T.List[T.Opt
# On Windows, interpreted scripts must have an extension otherwise they
# cannot be found by a standard PATH search. So we do a custom search
# where we manually search for a script with a shebang in PATH.
- search_dirs = self._windows_sanitize_path(os.environ.get('PATH', '')).split(';')
+ search_dirs = OrderedSet(self._windows_sanitize_path(os.environ.get('PATH', '')).split(';'))
+ if exclude_paths:
+ search_dirs.difference_update(exclude_paths)
for search_dir in search_dirs:
commands = self._search_dir(name, search_dir)
if commands:
return commands
return [None]
- def _search(self, name: str, search_dir: T.Optional[str]) -> T.List[T.Optional[str]]:
+ def _search(self, name: str, search_dirs: T.List[T.Optional[str]], exclude_paths: T.Optional[T.List[str]]) -> T.List[T.Optional[str]]:
'''
- Search in the specified dir for the specified executable by name
+ Search in the specified dirs for the specified executable by name
and if not found search in PATH
'''
- commands = self._search_dir(name, search_dir)
- if commands:
- return commands
+ for search_dir in search_dirs:
+ commands = self._search_dir(name, search_dir)
+ if commands:
+ return commands
# If there is a directory component, do not look in PATH
if os.path.dirname(name) and not os.path.isabs(name):
return [None]
# Do a standard search in PATH
- path = os.environ.get('PATH', None)
+ path = os.environ.get('PATH', os.defpath)
if mesonlib.is_windows() and path:
path = self._windows_sanitize_path(path)
+ if exclude_paths:
+ paths = OrderedSet(path.split(os.pathsep)).difference(exclude_paths)
+ path = os.pathsep.join(paths)
command = shutil.which(name, path=path)
if mesonlib.is_windows():
- return self._search_windows_special_cases(name, command)
+ return self._search_windows_special_cases(name, command, exclude_paths)
# On UNIX-like platforms, shutil.which() is enough to find
# all executables whether in PATH or with an absolute path
return [command]
@@ -340,10 +349,17 @@ class OverrideProgram(ExternalProgram):
"""A script overriding a program."""
+ def __init__(self, name: str, version: str, command: T.Optional[T.List[str]] = None,
+ silent: bool = False, search_dirs: T.Optional[T.List[T.Optional[str]]] = None,
+ exclude_paths: T.Optional[T.List[str]] = None):
+ self.cached_version = version
+ super().__init__(name, command=command, silent=silent,
+ search_dirs=search_dirs, exclude_paths=exclude_paths)
def find_external_program(env: 'Environment', for_machine: MachineChoice, name: str,
display_name: str, default_names: T.List[str],
- allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]:
+ allow_default_for_cross: bool = True,
+ exclude_paths: T.Optional[T.List[str]] = None) -> T.Generator['ExternalProgram', None, None]:
"""Find an external program, checking the cross file plus any default options."""
potential_names = OrderedSet(default_names)
potential_names.add(name)
@@ -361,8 +377,8 @@ def find_external_program(env: 'Environment', for_machine: MachineChoice, name:
# Fallback on hard-coded defaults, if a default binary is allowed for use
# with cross targets, or if this is not a cross target
if allow_default_for_cross or not (for_machine is MachineChoice.HOST and env.is_cross_build(for_machine)):
- for potential_path in default_names:
- mlog.debug(f'Trying a default {display_name} fallback at', potential_path)
- yield ExternalProgram(potential_path, silent=True)
+ for potential_name in default_names:
+ mlog.debug(f'Trying a default {display_name} fallback at', potential_name)
+ yield ExternalProgram(potential_name, silent=True, exclude_paths=exclude_paths)
else:
mlog.debug('Default target is not allowed for cross use')
diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py
index 78517bf05f8b..919bd3847b13 100644
--- a/mesonbuild/rewriter.py
+++ b/mesonbuild/rewriter.py
@@ -385,7 +385,7 @@ def add_info(self, cmd_type: str, cmd_id: str, data: dict):
def print_info(self):
if self.info_dump is None:
return
- sys.stderr.write(json.dumps(self.info_dump, indent=2))
+ sys.stdout.write(json.dumps(self.info_dump, indent=2))
def on_error(self):
if self.skip_errors:
@@ -1044,6 +1044,7 @@ def generate_cmd(options) -> T.List[dict]:
}
def run(options):
+ mlog.redirect(True)
if not options.verbose:
mlog.set_quiet()
diff --git a/mesonbuild/scripts/clangtidy.py b/mesonbuild/scripts/clangtidy.py
index 1e0c4a5a396a..a922f8514062 100644
--- a/mesonbuild/scripts/clangtidy.py
+++ b/mesonbuild/scripts/clangtidy.py
@@ -6,15 +6,22 @@
import argparse
import subprocess
from pathlib import Path
+import tempfile
+import os
+import shutil
+import sys
from .run_tool import run_tool
+from ..environment import detect_clangtidy, detect_clangapply
import typing as T
-def run_clang_tidy(fname: Path, builddir: Path) -> subprocess.CompletedProcess:
- return subprocess.run(['clang-tidy', '-quiet', '-p', str(builddir), str(fname)])
-
-def run_clang_tidy_fix(fname: Path, builddir: Path) -> subprocess.CompletedProcess:
- return subprocess.run(['run-clang-tidy', '-fix', '-format', '-quiet', '-p', str(builddir), str(fname)])
+def run_clang_tidy(fname: Path, tidyexe: list, builddir: Path, fixesdir: T.Optional[Path]) -> subprocess.CompletedProcess:
+ args = []
+ if fixesdir is not None:
+ handle, name = tempfile.mkstemp(prefix=fname.name + '.', suffix='.yaml', dir=fixesdir)
+ os.close(handle)
+ args.extend(['-export-fixes', name])
+ return subprocess.run(tidyexe + args + ['-quiet', '-p', str(builddir), str(fname)])
def run(args: T.List[str]) -> int:
parser = argparse.ArgumentParser()
@@ -26,5 +33,34 @@ def run(args: T.List[str]) -> int:
srcdir = Path(options.sourcedir)
builddir = Path(options.builddir)
- run_func = run_clang_tidy_fix if options.fix else run_clang_tidy
- return run_tool('clang-tidy', srcdir, builddir, run_func, builddir)
+ tidyexe = detect_clangtidy()
+ if not tidyexe:
+ print(f'Could not execute clang-tidy "{" ".join(tidyexe)}"')
+ return 1
+
+ fixesdir: T.Optional[Path] = None
+ if options.fix:
+ applyexe = detect_clangapply()
+ if not applyexe:
+ print(f'Could not execute clang-apply-replacements "{" ".join(applyexe)}"')
+ return 1
+
+ fixesdir = builddir / 'meson-private' / 'clang-tidy-fix'
+ if fixesdir.is_dir():
+ shutil.rmtree(fixesdir)
+ elif fixesdir.exists():
+ fixesdir.unlink()
+ fixesdir.mkdir(parents=True)
+
+ tidyret = run_tool('clang-tidy', srcdir, builddir, run_clang_tidy, tidyexe, builddir, fixesdir)
+ if fixesdir is not None:
+ print('Applying fix-its...')
+ applyret = subprocess.run(applyexe + ['-format', '-style=file', '-ignore-insert-conflict', fixesdir]).returncode
+
+ if tidyret != 0:
+ print('Errors encountered while running clang-tidy', file=sys.stderr)
+ return tidyret
+ if fixesdir is not None and applyret != 0:
+ print('Errors encountered while running clang-apply-replacements', file=sys.stderr)
+ return applyret
+ return 0
diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py
index 17a4a10ae55f..a4dfebfb9d9b 100644
--- a/mesonbuild/scripts/coverage.py
+++ b/mesonbuild/scripts/coverage.py
@@ -5,7 +5,7 @@
from mesonbuild import environment, mesonlib
-import argparse, re, sys, os, subprocess, pathlib, stat
+import argparse, re, sys, os, subprocess, pathlib, stat, shutil
import typing as T
def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build_root: str, log_dir: str, use_llvm_cov: bool,
@@ -17,7 +17,7 @@ def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build
gcovr_exe = None
else:
gcovr_exe, gcovr_version = environment.detect_gcovr(gcovr_exe)
- if llvm_cov_exe == '' or not mesonlib.exe_exists([llvm_cov_exe, '--version']):
+ if llvm_cov_exe == '' or shutil.which(llvm_cov_exe) is None:
llvm_cov_exe = None
lcov_exe, lcov_version, genhtml_exe = environment.detect_lcov_genhtml()
@@ -159,9 +159,14 @@ def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build
htmloutdir = os.path.join(log_dir, 'coveragereport')
if not os.path.isdir(htmloutdir):
os.mkdir(htmloutdir)
+ # Use `--html-details` if gcovr version < 6.0, otherwise
+ # use `--html-nested`.
+ html_arg = '--html-details'
+ if mesonlib.version_compare(gcovr_version, '>=6.0'):
+ html_arg = '--html-nested'
subprocess.check_call(gcovr_base_cmd + gcovr_config +
['--html',
- '--html-nested',
+ html_arg,
'--print-summary',
'-o', os.path.join(htmloutdir, 'index.html'),
] + gcov_exe_args)
diff --git a/mesonbuild/scripts/env2mfile.py b/mesonbuild/scripts/env2mfile.py
index bc0101e26d5b..16051a871941 100755
--- a/mesonbuild/scripts/env2mfile.py
+++ b/mesonbuild/scripts/env2mfile.py
@@ -3,6 +3,7 @@
from __future__ import annotations
+from dataclasses import dataclass, field
import sys, os, subprocess, shutil
import shlex
import typing as T
@@ -15,12 +16,6 @@
if T.TYPE_CHECKING:
import argparse
-def has_for_build() -> bool:
- for cenv in envconfig.ENV_VAR_COMPILER_MAP.values():
- if os.environ.get(cenv + '_FOR_BUILD'):
- return True
- return False
-
# Note: when adding arguments, please also add them to the completion
# scripts in $MESONSRC/data/shell-completions/
def add_arguments(parser: 'argparse.ArgumentParser') -> None:
@@ -34,6 +29,8 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None:
help='Generate a cross compilation file.')
parser.add_argument('--native', default=False, action='store_true',
help='Generate a native compilation file.')
+ parser.add_argument('--use-for-build', default=False, action='store_true',
+ help='Use _FOR_BUILD envvars.')
parser.add_argument('--system', default=None,
help='Define system for cross compilation.')
parser.add_argument('--subsystem', default=None,
@@ -47,21 +44,21 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None:
parser.add_argument('--endian', default='little', choices=['big', 'little'],
help='Define endianness for cross compilation.')
+@dataclass
class MachineInfo:
- def __init__(self) -> None:
- self.compilers: T.Dict[str, T.List[str]] = {}
- self.binaries: T.Dict[str, T.List[str]] = {}
- self.properties: T.Dict[str, T.Union[str, T.List[str]]] = {}
- self.compile_args: T.Dict[str, T.List[str]] = {}
- self.link_args: T.Dict[str, T.List[str]] = {}
- self.cmake: T.Dict[str, T.Union[str, T.List[str]]] = {}
-
- self.system: T.Optional[str] = None
- self.subsystem: T.Optional[str] = None
- self.kernel: T.Optional[str] = None
- self.cpu: T.Optional[str] = None
- self.cpu_family: T.Optional[str] = None
- self.endian: T.Optional[str] = None
+ compilers: T.Dict[str, T.List[str]] = field(default_factory=dict)
+ binaries: T.Dict[str, T.List[str]] = field(default_factory=dict)
+ properties: T.Dict[str, T.Union[str, T.List[str]]] = field(default_factory=dict)
+ compile_args: T.Dict[str, T.List[str]] = field(default_factory=dict)
+ link_args: T.Dict[str, T.List[str]] = field(default_factory=dict)
+ cmake: T.Dict[str, T.Union[str, T.List[str]]] = field(default_factory=dict)
+
+ system: T.Optional[str] = None
+ subsystem: T.Optional[str] = None
+ kernel: T.Optional[str] = None
+ cpu: T.Optional[str] = None
+ cpu_family: T.Optional[str] = None
+ endian: T.Optional[str] = None
#parser = argparse.ArgumentParser(description='''Generate cross compilation definition file for the Meson build system.
#
@@ -129,18 +126,42 @@ def get_args_from_envvars(infos: MachineInfo) -> None:
if objcpp_link_args:
infos.link_args['objcpp'] = objcpp_link_args
+# map from DEB_HOST_GNU_CPU to Meson machine.cpu_family()
deb_cpu_family_map = {
'mips64el': 'mips64',
'i686': 'x86',
'powerpc64le': 'ppc64',
}
-deb_cpu_map = {
+# map from DEB_HOST_ARCH to Meson machine.cpu()
+deb_arch_cpu_map = {
'armhf': 'arm7hlf',
+}
+
+# map from DEB_HOST_GNU_CPU to Meson machine.cpu()
+deb_cpu_map = {
'mips64el': 'mips64',
'powerpc64le': 'ppc64',
}
+# map from DEB_HOST_ARCH_OS to Meson machine.system()
+deb_os_map = {
+ 'hurd': 'gnu',
+}
+
+# map from DEB_HOST_ARCH_OS to Meson machine.kernel()
+deb_kernel_map = {
+ 'kfreebsd': 'freebsd',
+ 'hurd': 'gnu',
+}
+
+def replace_special_cases(special_cases: T.Mapping[str, str], name: str) -> str:
+ '''
+ If name is a key in special_cases, replace it with the value, or otherwise
+ pass it through unchanged.
+ '''
+ return special_cases.get(name, name)
+
def deb_detect_cmake(infos: MachineInfo, data: T.Dict[str, str]) -> None:
system_name_map = {'linux': 'Linux', 'kfreebsd': 'kFreeBSD', 'hurd': 'GNU'}
system_processor_map = {'arm': 'armv7l', 'mips64el': 'mips64', 'powerpc64le': 'ppc64le'}
@@ -151,8 +172,7 @@ def deb_detect_cmake(infos: MachineInfo, data: T.Dict[str, str]) -> None:
except KeyError:
pass
infos.cmake["CMAKE_SYSTEM_NAME"] = system_name_map[data['DEB_HOST_ARCH_OS']]
- infos.cmake["CMAKE_SYSTEM_PROCESSOR"] = system_processor_map.get(data['DEB_HOST_GNU_CPU'],
- data['DEB_HOST_GNU_CPU'])
+ infos.cmake["CMAKE_SYSTEM_PROCESSOR"] = replace_special_cases(system_processor_map, data['DEB_HOST_GNU_CPU'])
def deb_compiler_lookup(infos: MachineInfo, compilerstems: T.List[T.Tuple[str, str]], host_arch: str, gccsuffix: str) -> None:
for langname, stem in compilerstems:
@@ -170,6 +190,9 @@ def detect_cross_debianlike(options: T.Any) -> MachineInfo:
cmd = ['dpkg-architecture', '-a' + options.debarch]
output = subprocess.check_output(cmd, universal_newlines=True,
stderr=subprocess.DEVNULL)
+ return dpkg_architecture_to_machine_info(output, options)
+
+def dpkg_architecture_to_machine_info(output: str, options: T.Any) -> MachineInfo:
data = {}
for line in output.split('\n'):
line = line.strip()
@@ -178,13 +201,12 @@ def detect_cross_debianlike(options: T.Any) -> MachineInfo:
k, v = line.split('=', 1)
data[k] = v
host_arch = data['DEB_HOST_GNU_TYPE']
- host_os = data['DEB_HOST_ARCH_OS']
+ host_os = replace_special_cases(deb_os_map, data['DEB_HOST_ARCH_OS'])
host_subsystem = host_os
- host_kernel = 'linux'
- host_cpu_family = deb_cpu_family_map.get(data['DEB_HOST_GNU_CPU'],
- data['DEB_HOST_GNU_CPU'])
- host_cpu = deb_cpu_map.get(data['DEB_HOST_ARCH'],
- data['DEB_HOST_ARCH'])
+ host_kernel = replace_special_cases(deb_kernel_map, data['DEB_HOST_ARCH_OS'])
+ host_cpu_family = replace_special_cases(deb_cpu_family_map, data['DEB_HOST_GNU_CPU'])
+ host_cpu = deb_arch_cpu_map.get(data['DEB_HOST_ARCH'],
+ replace_special_cases(deb_cpu_map, data['DEB_HOST_GNU_CPU']))
host_endian = data['DEB_HOST_ARCH_ENDIAN']
compilerstems = [('c', 'gcc'),
@@ -204,10 +226,33 @@ def detect_cross_debianlike(options: T.Any) -> MachineInfo:
deb_detect_cmake(infos, data)
except ValueError:
pass
- try:
- infos.binaries['pkg-config'] = locate_path("%s-pkg-config" % host_arch)
- except ValueError:
- pass # pkg-config is optional
+ for tool in [
+ 'g-ir-annotation-tool',
+ 'g-ir-compiler',
+ 'g-ir-doc-tool',
+ 'g-ir-generate',
+ 'g-ir-inspect',
+ 'g-ir-scanner',
+ 'pkg-config',
+ ]:
+ try:
+ infos.binaries[tool] = locate_path("%s-%s" % (host_arch, tool))
+ except ValueError:
+ pass # optional
+ for tool, exe in [
+ ('exe_wrapper', 'cross-exe-wrapper'),
+ ]:
+ try:
+ infos.binaries[tool] = locate_path("%s-%s" % (host_arch, exe))
+ except ValueError:
+ pass
+ for tool, exe in [
+ ('vala', 'valac'),
+ ]:
+ try:
+ infos.compilers[tool] = locate_path("%s-%s" % (host_arch, exe))
+ except ValueError:
+ pass
try:
infos.binaries['cups-config'] = locate_path("cups-config")
except ValueError:
@@ -330,7 +375,7 @@ def detect_cross_system(infos: MachineInfo, options: T.Any) -> None:
def detect_cross_env(options: T.Any) -> MachineInfo:
if options.debarch:
- print('Detecting cross environment via dpkg-reconfigure.')
+ print('Detecting cross environment via dpkg-architecture.')
infos = detect_cross_debianlike(options)
else:
print('Detecting cross environment via environment variables.')
@@ -372,12 +417,10 @@ def detect_missing_native_binaries(infos: MachineInfo) -> None:
infos.binaries[toolname] = [exe]
def detect_native_env(options: T.Any) -> MachineInfo:
- use_for_build = has_for_build()
- if use_for_build:
- mlog.log('Using FOR_BUILD envvars for detection')
+ if options.use_for_build:
+ mlog.log('Using _FOR_BUILD envvars for detection (native file for use during cross compilation)')
esuffix = '_FOR_BUILD'
else:
- mlog.log('Using regular envvars for detection.')
esuffix = ''
infos = detect_compilers_from_envvars(esuffix)
detect_missing_native_compilers(infos)
@@ -394,6 +437,8 @@ def run(options: T.Any) -> None:
mlog.notice('This functionality is experimental and subject to change.')
detect_cross = options.cross
if detect_cross:
+ if options.use_for_build:
+ sys.exit('--use-for-build only makes sense for --native, not --cross')
infos = detect_cross_env(options)
write_system_info = True
else:
diff --git a/mesonbuild/scripts/regen_checker.py b/mesonbuild/scripts/regen_checker.py
index e638f50f6e65..fc69ed7d50a1 100644
--- a/mesonbuild/scripts/regen_checker.py
+++ b/mesonbuild/scripts/regen_checker.py
@@ -8,7 +8,7 @@
import typing as T
from ..coredata import CoreData
from ..backend.backends import RegenInfo
-from ..mesonlib import OptionKey
+from ..options import OptionKey
# This could also be used for XCode.
diff --git a/mesonbuild/scripts/reprotest.py b/mesonbuild/scripts/reprotest.py
new file mode 100755
index 000000000000..fc9315c8f21f
--- /dev/null
+++ b/mesonbuild/scripts/reprotest.py
@@ -0,0 +1,123 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2024 The Meson development team
+
+from __future__ import annotations
+
+import sys, os, subprocess, shutil
+import pathlib
+import typing as T
+
+if T.TYPE_CHECKING:
+ import argparse
+
+from ..mesonlib import get_meson_command
+
+# Note: when adding arguments, please also add them to the completion
+# scripts in $MESONSRC/data/shell-completions/
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+ parser.add_argument('--intermediaries',
+ default=False,
+ action='store_true',
+ help='Check intermediate files.')
+ parser.add_argument('mesonargs', nargs='*',
+ help='Arguments to pass to "meson setup".')
+
+IGNORE_PATTERNS = ('.ninja_log',
+ '.ninja_deps',
+ 'meson-private',
+ 'meson-logs',
+ 'meson-info',
+ )
+
+INTERMEDIATE_EXTENSIONS = ('.gch',
+ '.pch',
+ '.o',
+ '.obj',
+ '.class',
+ )
+
+class ReproTester:
+ def __init__(self, options: T.Any):
+ self.args = options.mesonargs
+ self.meson = get_meson_command()[:]
+ self.builddir = pathlib.Path('buildrepro')
+ self.storagedir = pathlib.Path('buildrepro.1st')
+ self.issues: T.List[str] = []
+ self.check_intermediaries = options.intermediaries
+
+ def run(self) -> int:
+ if not os.path.isfile('meson.build'):
+ sys.exit('This command needs to be run at your project source root.')
+ self.disable_ccache()
+ self.cleanup()
+ self.build()
+ self.check_output()
+ self.print_results()
+ if not self.issues:
+ self.cleanup()
+ return len(self.issues)
+
+ def disable_ccache(self) -> None:
+ os.environ['CCACHE_DISABLE'] = '1'
+
+ def cleanup(self) -> None:
+ if self.builddir.exists():
+ shutil.rmtree(self.builddir)
+ if self.storagedir.exists():
+ shutil.rmtree(self.storagedir)
+
+ def build(self) -> None:
+ setup_command: T.Sequence[str] = self.meson + ['setup', str(self.builddir)] + self.args
+ build_command: T.Sequence[str] = self.meson + ['compile', '-C', str(self.builddir)]
+ subprocess.check_call(setup_command)
+ subprocess.check_call(build_command)
+ self.builddir.rename(self.storagedir)
+ subprocess.check_call(setup_command)
+ subprocess.check_call(build_command)
+
+ def ignore_file(self, fstr: str) -> bool:
+ for p in IGNORE_PATTERNS:
+ if p in fstr:
+ return True
+ if not self.check_intermediaries:
+ if fstr.endswith(INTERMEDIATE_EXTENSIONS):
+ return True
+ return False
+
+ def check_contents(self, fromdir: str, todir: str, check_contents: bool) -> None:
+ import filecmp
+ frompath = fromdir + '/'
+ topath = todir + '/'
+ for fromfile in pathlib.Path(fromdir).glob('**/*'):
+ if not fromfile.is_file():
+ continue
+ fstr = fromfile.as_posix()
+ if self.ignore_file(fstr):
+ continue
+ assert fstr.startswith(frompath)
+ tofile = pathlib.Path(fstr.replace(frompath, topath, 1))
+ if not tofile.exists():
+ self.issues.append(f'Missing file: {tofile}')
+ elif check_contents:
+ if not filecmp.cmp(fromfile, tofile, shallow=False):
+ self.issues.append(f'File contents differ: {fromfile}')
+
+ def print_results(self) -> None:
+ if self.issues:
+ print('Build differences detected')
+ for i in self.issues:
+ print(i)
+ else:
+ print('No differences detected.')
+
+ def check_output(self) -> None:
+ self.check_contents('buildrepro', 'buildrepro.1st', True)
+ self.check_contents('buildrepro.1st', 'buildrepro', False)
+
+def run(options: T.Any) -> None:
+ rt = ReproTester(options)
+ try:
+ sys.exit(rt.run())
+ except Exception as e:
+ print(e)
+ sys.exit(1)
diff --git a/mesonbuild/scripts/run_tool.py b/mesonbuild/scripts/run_tool.py
index a1641e90adb5..a84de15b12df 100644
--- a/mesonbuild/scripts/run_tool.py
+++ b/mesonbuild/scripts/run_tool.py
@@ -5,8 +5,8 @@
import itertools
import fnmatch
+import concurrent.futures
from pathlib import Path
-from concurrent.futures import ThreadPoolExecutor
from ..compilers import lang_suffixes
from ..mesonlib import quiet_git
@@ -46,13 +46,27 @@ def run_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., subpro
suffixes = {f'.{s}' for s in suffixes}
futures = []
returncode = 0
- with ThreadPoolExecutor() as e:
+ e = concurrent.futures.ThreadPoolExecutor()
+ try:
for f in itertools.chain(*globs):
strf = str(f)
if f.is_dir() or f.suffix not in suffixes or \
any(fnmatch.fnmatch(strf, i) for i in ignore):
continue
futures.append(e.submit(fn, f, *args))
- if futures:
- returncode = max(x.result().returncode for x in futures)
+ concurrent.futures.wait(
+ futures,
+ return_when=concurrent.futures.FIRST_EXCEPTION
+ )
+ finally:
+ # We try to prevent new subprocesses from being started by canceling
+ # the futures, but this is not water-tight: some may have started
+ # between the wait being interrupted or exited and the futures being
+ # canceled. (A fundamental fix would probably require the ability to
+ # terminate such subprocesses upon cancellation of the future.)
+ for x in futures: # Python >=3.9: e.shutdown(cancel_futures=True)
+ x.cancel()
+ e.shutdown()
+ if futures:
+ returncode = max(x.result().returncode for x in futures)
return returncode
diff --git a/mesonbuild/templates/rusttemplates.py b/mesonbuild/templates/rusttemplates.py
index 5bb7e4c19006..1dbf5b614115 100644
--- a/mesonbuild/templates/rusttemplates.py
+++ b/mesonbuild/templates/rusttemplates.py
@@ -20,25 +20,28 @@
pub fn {function_name}() -> i32 {{
return internal_function();
}}
-'''
-lib_rust_test_template = '''extern crate {crate_file};
+#[cfg(test)]
+mod tests {{
+ use super::*;
-fn main() {{
- println!("printing: {{}}", {crate_file}::{function_name}());
+ #[test]
+ fn test_function() {{
+ assert_eq!({function_name}(), 0);
+ }}
}}
'''
lib_rust_meson_template = '''project('{project_name}', 'rust',
- version : '{version}',
- default_options : ['warning_level=3'])
+ version : '{version}', meson_version: '>=1.3.0',
+ default_options : ['rust_std=2021', 'warning_level=3'])
+
+rust = import('rust')
shlib = static_library('{lib_name}', '{source_file}', install : true)
-test_exe = executable('{test_exe_name}', '{test_source_file}',
- link_with : shlib)
-test('{test_name}', test_exe)
+rust.test('{test_name}', shlib)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
@@ -54,8 +57,8 @@
'''
hello_rust_meson_template = '''project('{project_name}', 'rust',
- version : '{version}',
- default_options : ['warning_level=3'])
+ version : '{version}', meson_version: '>=1.3.0',
+ default_options : ['rust_std=2021', 'warning_level=3'])
exe = executable('{exe_name}', '{source_name}',
install : true)
@@ -70,7 +73,7 @@ class RustProject(FileImpl):
exe_template = hello_rust_template
exe_meson_template = hello_rust_meson_template
lib_template = lib_rust_template
- lib_test_template = lib_rust_test_template
+ lib_test_template = None
lib_meson_template = lib_rust_meson_template
def lib_kwargs(self) -> T.Dict[str, str]:
diff --git a/mesonbuild/templates/sampleimpl.py b/mesonbuild/templates/sampleimpl.py
index 570a370b8e3f..c222a1bf9aa7 100644
--- a/mesonbuild/templates/sampleimpl.py
+++ b/mesonbuild/templates/sampleimpl.py
@@ -41,7 +41,7 @@ def lib_template(self) -> str:
pass
@abc.abstractproperty
- def lib_test_template(self) -> str:
+ def lib_test_template(self) -> T.Optional[str]:
pass
@abc.abstractproperty
@@ -85,8 +85,9 @@ def create_library(self) -> None:
}
with open(lib_name, 'w', encoding='utf-8') as f:
f.write(self.lib_template.format(**kwargs))
- with open(test_name, 'w', encoding='utf-8') as f:
- f.write(self.lib_test_template.format(**kwargs))
+ if self.lib_test_template:
+ with open(test_name, 'w', encoding='utf-8') as f:
+ f.write(self.lib_test_template.format(**kwargs))
with open('meson.build', 'w', encoding='utf-8') as f:
f.write(self.lib_meson_template.format(**kwargs))
@@ -132,8 +133,9 @@ def create_library(self) -> None:
kwargs = self.lib_kwargs()
with open(lib_name, 'w', encoding='utf-8') as f:
f.write(self.lib_template.format(**kwargs))
- with open(test_name, 'w', encoding='utf-8') as f:
- f.write(self.lib_test_template.format(**kwargs))
+ if self.lib_test_template:
+ with open(test_name, 'w', encoding='utf-8') as f:
+ f.write(self.lib_test_template.format(**kwargs))
with open('meson.build', 'w', encoding='utf-8') as f:
f.write(self.lib_meson_template.format(**kwargs))
diff --git a/mesonbuild/utils/universal.py b/mesonbuild/utils/universal.py
index 6aee268ee21b..88d8e1f891c7 100644
--- a/mesonbuild/utils/universal.py
+++ b/mesonbuild/utils/universal.py
@@ -15,7 +15,7 @@
import abc
import platform, subprocess, operator, os, shlex, shutil, re
import collections
-from functools import lru_cache, wraps, total_ordering
+from functools import lru_cache, wraps
from itertools import tee
from tempfile import TemporaryDirectory, NamedTemporaryFile
import typing as T
@@ -57,6 +57,7 @@ class _VerPickleLoadable(Protocol):
__all__ = [
'GIT',
'python_command',
+ 'NoProjectVersion',
'project_meson_versions',
'SecondLevelHolder',
'File',
@@ -67,9 +68,7 @@ class _VerPickleLoadable(Protocol):
'EnvironmentException',
'FileOrString',
'GitException',
- 'OptionKey',
'dump_conf_header',
- 'OptionType',
'OrderedSet',
'PerMachine',
'PerMachineDefaultable',
@@ -98,7 +97,6 @@ class _VerPickleLoadable(Protocol):
'do_conf_file',
'do_conf_str',
'do_replacement',
- 'exe_exists',
'expand_arguments',
'extract_as_list',
'first',
@@ -160,10 +158,13 @@ class _VerPickleLoadable(Protocol):
]
+class NoProjectVersion:
+ pass
+
# TODO: this is such a hack, this really should be either in coredata or in the
# interpreter
# {subproject: project_meson_version}
-project_meson_versions: T.DefaultDict[str, str] = collections.defaultdict(str)
+project_meson_versions: T.Dict[str, T.Union[str, NoProjectVersion]] = {}
from glob import glob
@@ -683,15 +684,6 @@ def is_qnx() -> bool:
def is_aix() -> bool:
return platform.system().lower() == 'aix'
-def exe_exists(arglist: T.List[str]) -> bool:
- try:
- if subprocess.run(arglist, timeout=10).returncode == 0:
- return True
- except (FileNotFoundError, subprocess.TimeoutExpired):
- pass
- return False
-
-
@lru_cache(maxsize=None)
def darwin_get_object_archs(objpath: str) -> 'ImmutableListProtocol[str]':
'''
@@ -1788,7 +1780,7 @@ def get_filenames_templates_dict(inputs: T.List[str], outputs: T.List[str]) -> T
return values
-def _make_tree_writable(topdir: str) -> None:
+def _make_tree_writable(topdir: T.Union[str, Path]) -> None:
# Ensure all files and directories under topdir are writable
# (and readable) by owner.
for d, _, files in os.walk(topdir):
@@ -1799,7 +1791,7 @@ def _make_tree_writable(topdir: str) -> None:
os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
-def windows_proof_rmtree(f: str) -> None:
+def windows_proof_rmtree(f: T.Union[str, Path]) -> None:
# On Windows if anyone is holding a file open you can't
# delete it. As an example an anti virus scanner might
# be scanning files you are trying to delete. The only
@@ -1826,7 +1818,7 @@ def windows_proof_rmtree(f: str) -> None:
shutil.rmtree(f)
-def windows_proof_rm(fpath: str) -> None:
+def windows_proof_rm(fpath: T.Union[str, Path]) -> None:
"""Like windows_proof_rmtree, but for a single file."""
if os.path.isfile(fpath):
os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
@@ -1988,7 +1980,58 @@ class LibType(enum.IntEnum):
class ProgressBarFallback: # lgtm [py/iter-returns-non-self]
'''
- Fallback progress bar implementation when tqdm is not found
+ Fallback progress bar implementation when tqdm is not foundclass OptionType(enum.IntEnum):
+
+ """Enum used to specify what kind of argument a thing is."""
+
+ BUILTIN = 0
+ BACKEND = 1
+ BASE = 2
+ COMPILER = 3
+ PROJECT = 4
+
+# This is copied from coredata. There is no way to share this, because this
+# is used in the OptionKey constructor, and the coredata lists are
+# OptionKeys...
+_BUILTIN_NAMES = {
+ 'prefix',
+ 'bindir',
+ 'datadir',
+ 'includedir',
+ 'infodir',
+ 'libdir',
+ 'licensedir',
+ 'libexecdir',
+ 'localedir',
+ 'localstatedir',
+ 'mandir',
+ 'sbindir',
+ 'sharedstatedir',
+ 'sysconfdir',
+ 'auto_features',
+ 'backend',
+ 'buildtype',
+ 'debug',
+ 'default_library',
+ 'errorlogs',
+ 'genvslite',
+ 'install_umask',
+ 'layout',
+ 'optimization',
+ 'prefer_static',
+ 'stdsplit',
+ 'strip',
+ 'unity',
+ 'unity_size',
+ 'warning_level',
+ 'werror',
+ 'wrap_mode',
+ 'force_fallback_for',
+ 'pkg_config_path',
+ 'cmake_prefix_path',
+ 'vsenv',
+}
+
Since this class is not an actual iterator, but only provides a minimal
fallback, it is safe to ignore the 'Iterator does not return self from
@@ -2164,257 +2207,6 @@ def wrapper(*args: T.Any, **kwargs: T.Any) -> T.List[_T]:
return wrapper
-class OptionType(enum.IntEnum):
-
- """Enum used to specify what kind of argument a thing is."""
-
- BUILTIN = 0
- BACKEND = 1
- BASE = 2
- COMPILER = 3
- PROJECT = 4
-
-# This is copied from coredata. There is no way to share this, because this
-# is used in the OptionKey constructor, and the coredata lists are
-# OptionKeys...
-_BUILTIN_NAMES = {
- 'prefix',
- 'bindir',
- 'datadir',
- 'includedir',
- 'infodir',
- 'libdir',
- 'licensedir',
- 'libexecdir',
- 'localedir',
- 'localstatedir',
- 'mandir',
- 'sbindir',
- 'sharedstatedir',
- 'sysconfdir',
- 'auto_features',
- 'backend',
- 'buildtype',
- 'debug',
- 'default_library',
- 'errorlogs',
- 'genvslite',
- 'install_umask',
- 'layout',
- 'optimization',
- 'prefer_static',
- 'stdsplit',
- 'strip',
- 'unity',
- 'unity_size',
- 'warning_level',
- 'werror',
- 'wrap_mode',
- 'force_fallback_for',
- 'pkg_config_path',
- 'cmake_prefix_path',
- 'vsenv',
-}
-
-
-def _classify_argument(key: 'OptionKey') -> OptionType:
- """Classify arguments into groups so we know which dict to assign them to."""
-
- if key.name.startswith('b_'):
- return OptionType.BASE
- elif key.lang is not None:
- return OptionType.COMPILER
- elif key.name in _BUILTIN_NAMES or key.module:
- return OptionType.BUILTIN
- elif key.name.startswith('backend_'):
- assert key.machine is MachineChoice.HOST, str(key)
- return OptionType.BACKEND
- else:
- assert key.machine is MachineChoice.HOST, str(key)
- return OptionType.PROJECT
-
-
-@total_ordering
-class OptionKey:
-
- """Represents an option key in the various option dictionaries.
-
- This provides a flexible, powerful way to map option names from their
- external form (things like subproject:build.option) to something that
- internally easier to reason about and produce.
- """
-
- __slots__ = ['name', 'subproject', 'machine', 'lang', '_hash', 'type', 'module']
-
- name: str
- subproject: str
- machine: MachineChoice
- lang: T.Optional[str]
- _hash: int
- type: OptionType
- module: T.Optional[str]
-
- def __init__(self, name: str, subproject: str = '',
- machine: MachineChoice = MachineChoice.HOST,
- lang: T.Optional[str] = None,
- module: T.Optional[str] = None,
- _type: T.Optional[OptionType] = None):
- # the _type option to the constructor is kinda private. We want to be
- # able tos ave the state and avoid the lookup function when
- # pickling/unpickling, but we need to be able to calculate it when
- # constructing a new OptionKey
- object.__setattr__(self, 'name', name)
- object.__setattr__(self, 'subproject', subproject)
- object.__setattr__(self, 'machine', machine)
- object.__setattr__(self, 'lang', lang)
- object.__setattr__(self, 'module', module)
- object.__setattr__(self, '_hash', hash((name, subproject, machine, lang, module)))
- if _type is None:
- _type = _classify_argument(self)
- object.__setattr__(self, 'type', _type)
-
- def __setattr__(self, key: str, value: T.Any) -> None:
- raise AttributeError('OptionKey instances do not support mutation.')
-
- def __getstate__(self) -> T.Dict[str, T.Any]:
- return {
- 'name': self.name,
- 'subproject': self.subproject,
- 'machine': self.machine,
- 'lang': self.lang,
- '_type': self.type,
- 'module': self.module,
- }
-
- def __setstate__(self, state: T.Dict[str, T.Any]) -> None:
- """De-serialize the state of a pickle.
-
- This is very clever. __init__ is not a constructor, it's an
- initializer, therefore it's safe to call more than once. We create a
- state in the custom __getstate__ method, which is valid to pass
- splatted to the initializer.
- """
- # Mypy doesn't like this, because it's so clever.
- self.__init__(**state) # type: ignore
-
- def __hash__(self) -> int:
- return self._hash
-
- def _to_tuple(self) -> T.Tuple[str, OptionType, str, str, MachineChoice, str]:
- return (self.subproject, self.type, self.lang or '', self.module or '', self.machine, self.name)
-
- def __eq__(self, other: object) -> bool:
- if isinstance(other, OptionKey):
- return self._to_tuple() == other._to_tuple()
- return NotImplemented
-
- def __lt__(self, other: object) -> bool:
- if isinstance(other, OptionKey):
- return self._to_tuple() < other._to_tuple()
- return NotImplemented
-
- def __str__(self) -> str:
- out = self.name
- if self.lang:
- out = f'{self.lang}_{out}'
- if self.machine is MachineChoice.BUILD:
- out = f'build.{out}'
- if self.module:
- out = f'{self.module}.{out}'
- if self.subproject:
- out = f'{self.subproject}:{out}'
- return out
-
- def __repr__(self) -> str:
- return f'OptionKey({self.name!r}, {self.subproject!r}, {self.machine!r}, {self.lang!r}, {self.module!r}, {self.type!r})'
-
- @classmethod
- def from_string(cls, raw: str) -> 'OptionKey':
- """Parse the raw command line format into a three part tuple.
-
- This takes strings like `mysubproject:build.myoption` and Creates an
- OptionKey out of them.
- """
- try:
- subproject, raw2 = raw.split(':')
- except ValueError:
- subproject, raw2 = '', raw
-
- module = None
- for_machine = MachineChoice.HOST
- try:
- prefix, raw3 = raw2.split('.')
- if prefix == 'build':
- for_machine = MachineChoice.BUILD
- else:
- module = prefix
- except ValueError:
- raw3 = raw2
-
- from ..compilers import all_languages
- if any(raw3.startswith(f'{l}_') for l in all_languages):
- lang, opt = raw3.split('_', 1)
- else:
- lang, opt = None, raw3
- assert ':' not in opt
- assert '.' not in opt
-
- return cls(opt, subproject, for_machine, lang, module)
-
- def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = None,
- machine: T.Optional[MachineChoice] = None, lang: T.Optional[str] = '',
- module: T.Optional[str] = '') -> 'OptionKey':
- """Create a new copy of this key, but with altered members.
-
- For example:
- >>> a = OptionKey('foo', '', MachineChoice.Host)
- >>> b = OptionKey('foo', 'bar', MachineChoice.Host)
- >>> b == a.evolve(subproject='bar')
- True
- """
- # We have to be a little clever with lang here, because lang is valid
- # as None, for non-compiler options
- return OptionKey(
- name if name is not None else self.name,
- subproject if subproject is not None else self.subproject,
- machine if machine is not None else self.machine,
- lang if lang != '' else self.lang,
- module if module != '' else self.module
- )
-
- def as_root(self) -> 'OptionKey':
- """Convenience method for key.evolve(subproject='')."""
- return self.evolve(subproject='')
-
- def as_build(self) -> 'OptionKey':
- """Convenience method for key.evolve(machine=MachineChoice.BUILD)."""
- return self.evolve(machine=MachineChoice.BUILD)
-
- def as_host(self) -> 'OptionKey':
- """Convenience method for key.evolve(machine=MachineChoice.HOST)."""
- return self.evolve(machine=MachineChoice.HOST)
-
- def is_backend(self) -> bool:
- """Convenience method to check if this is a backend option."""
- return self.type is OptionType.BACKEND
-
- def is_builtin(self) -> bool:
- """Convenience method to check if this is a builtin option."""
- return self.type is OptionType.BUILTIN
-
- def is_compiler(self) -> bool:
- """Convenience method to check if this is a builtin option."""
- return self.type is OptionType.COMPILER
-
- def is_project(self) -> bool:
- """Convenience method to check if this is a project option."""
- return self.type is OptionType.PROJECT
-
- def is_base(self) -> bool:
- """Convenience method to check if this is a base option."""
- return self.type is OptionType.BASE
-
-
def pickle_load(filename: str, object_name: str, object_type: T.Type[_PL], suggest_reconfigure: bool = True) -> _PL:
load_fail_msg = f'{object_name} file {filename!r} is corrupted.'
extra_msg = ' Consider reconfiguring the directory with "meson setup --reconfigure".' if suggest_reconfigure else ''
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index 3fe40ed9f322..197a4478a40e 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -687,7 +687,11 @@ def get_data(self, urlstring: str) -> T.Tuple[str, str]:
elif WHITELIST_SUBDOMAIN in urlstring:
raise WrapException(f'{urlstring} may be a WrapDB-impersonating URL')
else:
- headers = {'User-Agent': f'mesonbuild/{coredata.version}'}
+ headers = {
+ 'User-Agent': f'mesonbuild/{coredata.version}',
+ 'Accept-Language': '*',
+ 'Accept-Encoding': '*',
+ }
creds = self.get_netrc_credentials(url.netloc)
if creds is not None and '@' not in url.netloc:
diff --git a/packaging/builddist.sh b/packaging/builddist.sh
new file mode 100755
index 000000000000..edcf3ec09b94
--- /dev/null
+++ b/packaging/builddist.sh
@@ -0,0 +1,19 @@
+#!/usr/bin/zsh
+
+# This script must be run from the source root.
+
+set -e
+
+GENDIR=distgendir
+
+rm -rf dist
+rm -rf $GENDIR
+mkdir dist
+mkdir $GENDIR
+cp -r .git $GENDIR
+cd $GENDIR
+git reset --hard
+python3 setup.py sdist bdist_wheel
+cp dist/* ../dist
+cd ..
+rm -rf $GENDIR
diff --git a/packaging/createmsi.py b/packaging/createmsi.py
index 77667fe68a36..bb2fade72e70 100755
--- a/packaging/createmsi.py
+++ b/packaging/createmsi.py
@@ -20,6 +20,8 @@
# Elementtree does not support CDATA. So hack it.
WINVER_CHECK = 'Installed OR (VersionNT64 > 602)>'
+NUGET_INDEX = 'https://api.nuget.org/v3/index.json'
+WIXEXT_TOOL = 'WixToolset.UI.wixext'
def gen_guid():
'''
@@ -302,28 +304,50 @@ def build_package(self):
])
+def is_nuget_source_active():
+ '''
+ Check if nuget source is active
+ '''
+ result = subprocess.run(['dotnet', 'nuget', 'list', 'source', '--format', 'Short'], stdout=subprocess.PIPE)
+ return f'E {NUGET_INDEX}' in result.stdout.decode('utf-8')
+
+def is_wixext_installed():
+ '''
+ Check if wix extension is installed
+ '''
+ result = subprocess.run(['wix', 'extension', 'list'], stdout=subprocess.PIPE)
+ return WIXEXT_TOOL in result.stdout.decode('utf-8')
+
def install_wix():
- subprocess.check_call(['dotnet',
- 'nuget',
- 'add',
- 'source',
- 'https://api.nuget.org/v3/index.json'])
+ # Check if nuget source is active before trying to add it
+ # dotnet nuget add source returns non-zero if the source already exists
+ if not is_nuget_source_active():
+ subprocess.check_call(['dotnet',
+ 'nuget',
+ 'add',
+ 'source',
+ NUGET_INDEX])
+
subprocess.check_call(['dotnet',
'tool',
'install',
'--global',
'wix'])
- subprocess.check_call(['wix',
- 'extension',
- 'add',
- 'WixToolset.UI.wixext',
- ])
if __name__ == '__main__':
if not os.path.exists('meson.py'):
sys.exit(print('Run me in the top level source dir.'))
if not shutil.which('wix'):
install_wix()
+
+ # Install wixext if not installed
+ if not is_wixext_installed():
+ subprocess.check_call(['wix',
+ 'extension',
+ 'add',
+ WIXEXT_TOOL,
+ ])
+
subprocess.check_call(['pip', 'install', '--upgrade', 'pyinstaller'])
p = PackageGenerator()
diff --git a/run_format_tests.py b/run_format_tests.py
index 719b76b5ac21..30c975882d9b 100755
--- a/run_format_tests.py
+++ b/run_format_tests.py
@@ -65,9 +65,12 @@ def check_format() -> None:
check_file(root / file)
def check_symlinks():
+ # Test data must NOT contain symlinks. setup.py
+ # butchers them. If you need symlinks, they need
+ # to be created on the fly.
for f in Path('test cases').glob('**/*'):
if f.is_symlink():
- if 'boost symlinks' in str(f):
+ if 'boost symlinks/boost/lib' in str(f):
continue
raise SystemExit(f'Test data dir contains symlink: {f}.')
diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py
index d405a5b8790e..f9faca9af6fe 100755
--- a/run_meson_command_tests.py
+++ b/run_meson_command_tests.py
@@ -206,13 +206,13 @@ def test_meson_runpython(self):
with open(script_file, 'w') as f:
f.write('#!/usr/bin/env python3\n\n')
f.write(f'{test_command}\n')
+ self.addCleanup(os.remove, script_file)
for cmd in [['-c', test_command, 'fake argument'], [script_file, 'fake argument']]:
pyout = self._run(python_command + cmd)
mesonout = self._run(python_command + [meson_command, 'runpython'] + cmd, env=env)
self.assertEqual(pyout, mesonout)
-
if __name__ == '__main__':
print('Meson build system', meson_version, 'Command Tests')
raise SystemExit(unittest.main(buffer=True))
diff --git a/run_mypy.py b/run_mypy.py
index d9e3109d0393..f59ec89c071e 100755
--- a/run_mypy.py
+++ b/run_mypy.py
@@ -1,4 +1,6 @@
#!/usr/bin/env python3
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2024 Intel Corporation
from pathlib import Path
import argparse
@@ -38,11 +40,13 @@
'mesonbuild/interpreter/mesonmain.py',
'mesonbuild/interpreter/interpreterobjects.py',
'mesonbuild/interpreter/type_checking.py',
+ 'mesonbuild/machinefile.py',
'mesonbuild/mcompile.py',
'mesonbuild/mdevenv.py',
'mesonbuild/utils/core.py',
'mesonbuild/utils/platform.py',
'mesonbuild/utils/universal.py',
+ 'mesonbuild/utils/vsenv.py',
'mesonbuild/mconf.py',
'mesonbuild/mdist.py',
'mesonbuild/mformat.py',
@@ -85,6 +89,7 @@
'tools',
'docs/genrefman.py',
'docs/refman',
+ 'unittests/helpers.py',
]
if os.name == 'posix':
diff --git a/run_project_tests.py b/run_project_tests.py
index c11410486e23..ab34c27f21d2 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -278,6 +278,7 @@ def __init__(self, path: Path, name: T.Optional[str], args: T.List[str], skip: b
self.stdout: T.List[T.Dict[str, str]] = []
self.skip_category = skip_category
self.skip_expected = False
+ self.cleanup: T.List[str] = []
# Always print a stack trace for Meson exceptions
self.env['MESON_FORCE_BACKTRACE'] = '1'
@@ -550,9 +551,14 @@ def validate_output(test: TestDef, stdo: str, stde: str) -> str:
def clear_internal_caches() -> None:
import mesonbuild.interpreterbase
from mesonbuild.dependencies.cmake import CMakeDependency
+ from mesonbuild.dependencies.pkgconfig import PkgConfigInterface
from mesonbuild.mesonlib import PerMachine
mesonbuild.interpreterbase.FeatureNew.feature_registry = {}
CMakeDependency.class_cmakeinfo = PerMachine(None, None)
+ PkgConfigInterface.class_impl = PerMachine(False, False)
+ PkgConfigInterface.class_cli_impl = PerMachine(False, False)
+ PkgConfigInterface.pkg_bin_per_machine = PerMachine(None, None)
+
def run_test_inprocess(testdir: str) -> T.Tuple[int, str, str, str]:
old_stdout = sys.stdout
@@ -839,6 +845,8 @@ def load_test_json(t: TestDef, stdout_mandatory: bool, skip_category: bool = Fal
(t.skip, t.skip_expected) = _skip_keys(test_def)
+ cleanup = test_def.get('cleanup', [])
+
# Skip tests if the tool requirements are not met
if 'tools' in test_def:
assert isinstance(test_def['tools'], dict)
@@ -854,6 +862,7 @@ def load_test_json(t: TestDef, stdout_mandatory: bool, skip_category: bool = Fal
t.installed_files = installed
t.do_not_set_opts = do_not_set_opts
t.stdout = stdout
+ t.cleanup = cleanup
return [t]
new_opt_list: T.List[T.List[T.Tuple[str, str, bool, bool]]]
@@ -923,6 +932,8 @@ def load_test_json(t: TestDef, stdout_mandatory: bool, skip_category: bool = Fal
test.do_not_set_opts = do_not_set_opts
test.stdout = stdout
test.skip_expected = skip_expected or t.skip_expected
+ test.cleanup = cleanup
+
all_tests.append(test)
return all_tests
@@ -1075,6 +1086,7 @@ def detect_tests_to_run(only: T.Dict[str, T.List[str]], use_tmp: bool) -> T.List
"""
skip_fortran = not(shutil.which('gfortran') or
+ shutil.which('flang-new') or
shutil.which('flang') or
shutil.which('pgfortran') or
shutil.which('nagfor') or
@@ -1388,6 +1400,13 @@ def tqdm_print(*args: mlog.TV_Loggable, sep: str = ' ') -> None:
else:
f.update_log(TestStatus.OK)
passing_tests += 1
+ for cleanup_path in t.cleanup:
+ assert not os.path.isabs(cleanup_path)
+ abspath = t.path / cleanup_path
+ if abspath.is_file():
+ mesonlib.windows_proof_rm(abspath)
+ else:
+ mesonlib.windows_proof_rmtree(abspath)
conf_time += result.conftime
build_time += result.buildtime
test_time += result.testtime
@@ -1495,7 +1514,7 @@ class ToolInfo(T.NamedTuple):
regex: T.Pattern
match_group: int
-def print_tool_versions() -> None:
+def detect_tools(report: bool = True) -> None:
tools: T.List[ToolInfo] = [
ToolInfo(
'ninja',
@@ -1535,6 +1554,11 @@ def get_version(t: ToolInfo) -> str:
return f'{exe} (unknown)'
+ if not report:
+ for tool in tools:
+ get_version(tool)
+ return
+
print()
print('tools')
print()
@@ -1544,12 +1568,23 @@ def get_version(t: ToolInfo) -> str:
print('{0:<{2}}: {1}'.format(tool.tool, get_version(tool), max_width))
print()
-tmpdir = list(Path('.').glob('test cases/**/*install functions and follow symlinks'))
-assert(len(tmpdir) == 1)
-symlink_test_dir = tmpdir[0]
-symlink_file1 = symlink_test_dir / 'foo/link1'
-symlink_file2 = symlink_test_dir / 'foo/link2.h'
-del tmpdir
+symlink_test_dir1 = None
+symlink_test_dir2 = None
+symlink_file1 = None
+symlink_file2 = None
+symlink_file3 = None
+
+def scan_test_data_symlinks() -> None:
+ global symlink_test_dir1, symlink_test_dir2, symlink_file1, symlink_file2, symlink_file3
+ tmpdir1 = list(Path('.').glob('test cases/**/*install functions and follow symlinks'))
+ tmpdir2 = list(Path('.').glob('test cases/frameworks/*boost symlinks'))
+ assert len(tmpdir1) == 1
+ assert len(tmpdir2) == 1
+ symlink_test_dir1 = tmpdir1[0]
+ symlink_test_dir2 = tmpdir2[0] / 'boost/include'
+ symlink_file1 = symlink_test_dir1 / 'foo/link1'
+ symlink_file2 = symlink_test_dir1 / 'foo/link2.h'
+ symlink_file3 = symlink_test_dir2 / 'boost'
def clear_transitive_files() -> None:
a = Path('test cases/common')
@@ -1559,11 +1594,19 @@ def clear_transitive_files() -> None:
else:
mesonlib.windows_proof_rm(str(d))
try:
- symlink_file1.unlink()
+ if symlink_file1 is not None:
+ symlink_file1.unlink()
except FileNotFoundError:
pass
try:
- symlink_file2.unlink()
+ if symlink_file2 is not None:
+ symlink_file2.unlink()
+ except FileNotFoundError:
+ pass
+ try:
+ if symlink_file3 is not None:
+ symlink_file3.unlink()
+ symlink_test_dir2.rmdir()
except FileNotFoundError:
pass
@@ -1571,6 +1614,8 @@ def setup_symlinks() -> None:
try:
symlink_file1.symlink_to('file1')
symlink_file2.symlink_to('file1')
+ symlink_test_dir2.mkdir(parents=True, exist_ok=True)
+ symlink_file3.symlink_to('../Cellar/boost/0.3.0/include/boost')
except OSError:
print('symlinks are not supported on this system')
@@ -1579,7 +1624,6 @@ def setup_symlinks() -> None:
raise SystemExit('Running under CI but $MESON_CI_JOBNAME is not set (set to "thirdparty" if you are running outside of the github org)')
setup_vsenv()
-
try:
# This fails in some CI environments for unknown reasons.
num_workers = multiprocessing.cpu_count()
@@ -1618,8 +1662,11 @@ def setup_symlinks() -> None:
if options.native_file:
options.extra_args += ['--native-file', options.native_file]
+ if not mesonlib.is_windows():
+ scan_test_data_symlinks()
clear_transitive_files()
- setup_symlinks()
+ if not mesonlib.is_windows():
+ setup_symlinks()
mesonlib.set_meson_command(get_meson_script())
print('Meson build system', meson_version, 'Project Tests')
@@ -1628,7 +1675,7 @@ def setup_symlinks() -> None:
print('VSCMD version', os.environ['VSCMD_VER'])
setup_commands(options.backend)
detect_system_compiler(options)
- print_tool_versions()
+ detect_tools()
script_dir = os.path.split(__file__)[0]
if script_dir != '':
os.chdir(script_dir)
diff --git a/run_single_test.py b/run_single_test.py
index 8db9b402d90a..23b175641b50 100755
--- a/run_single_test.py
+++ b/run_single_test.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: Apache-2.0
-# Copyright © 2021-2023 Intel Corporation
+# Copyright © 2021-2024 Intel Corporation
"""Script for running a single project test.
@@ -13,9 +13,11 @@
import typing as T
from mesonbuild import mlog
+from mesonbuild.mesonlib import is_windows
from run_tests import handle_meson_skip_test
from run_project_tests import TestDef, load_test_json, run_test, BuildStep
-from run_project_tests import setup_commands, detect_system_compiler, print_tool_versions
+from run_project_tests import setup_commands, detect_system_compiler, detect_tools
+from run_project_tests import scan_test_data_symlinks, setup_symlinks, clear_transitive_files
if T.TYPE_CHECKING:
from run_project_tests import CompilerArgumentType
@@ -44,10 +46,13 @@ def main() -> None:
parser.add_argument('--quick', action='store_true', help='Skip some compiler and tool checking')
args = T.cast('ArgumentType', parser.parse_args())
+ if not is_windows():
+ scan_test_data_symlinks()
+ setup_symlinks()
setup_commands(args.backend)
if not args.quick:
detect_system_compiler(args)
- print_tool_versions()
+ detect_tools(not args.quick)
test = TestDef(args.case, args.case.stem, [])
tests = load_test_json(test, False)
@@ -95,6 +100,7 @@ def should_fail(path: pathlib.Path) -> str:
mlog.log(cmd_res)
mlog.log(result.stde)
+ clear_transitive_files()
exit(1 if failed else 0)
if __name__ == "__main__":
diff --git a/run_tests.py b/run_tests.py
index 8ab53a1a5c80..4e22028b830c 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -35,7 +35,8 @@
from mesonbuild.environment import Environment, detect_ninja, detect_machine_info
from mesonbuild.coredata import version as meson_version
from mesonbuild.options import backendlist
-from mesonbuild.mesonlib import OptionKey, setup_vsenv
+from mesonbuild.mesonlib import setup_vsenv
+from mesonbuild.options import OptionKey
if T.TYPE_CHECKING:
from mesonbuild.coredata import SharedCMDOptions
@@ -147,11 +148,12 @@ def get_fake_options(prefix: str = '') -> SharedCMDOptions:
opts.cmd_line_options = {}
return opts
-def get_fake_env(sdir='', bdir=None, prefix='', opts=None):
+def get_fake_env(sdir: str = '', bdir: T.Optional[str] = None, prefix: str = '',
+ opts: T.Optional[SharedCMDOptions] = None) -> Environment:
if opts is None:
opts = get_fake_options(prefix)
env = Environment(sdir, bdir, opts)
- env.coredata.optstore.set_value_object(OptionKey('args', lang='c'), FakeCompilerOptions())
+ env.coredata.optstore.set_value_object(OptionKey('c_args'), FakeCompilerOptions())
env.machines.host.cpu_family = 'x86_64' # Used on macOS inside find_library
# Invalidate cache when using a different Environment object.
clear_meson_configure_class_caches()
@@ -283,7 +285,7 @@ def get_backend_commands(backend: Backend, debug: bool = False) -> \
raise AssertionError(f'Unknown backend: {backend!r}')
return cmd, clean_cmd, test_cmd, install_cmd, uninstall_cmd
-def run_mtest_inprocess(commandlist: T.List[str]) -> T.Tuple[int, str, str]:
+def run_mtest_inprocess(commandlist: T.List[str]) -> T.Tuple[int, str]:
out = StringIO()
with mock.patch.object(sys, 'stdout', out), mock.patch.object(sys, 'stderr', out):
returncode = mtest.run_with_args(commandlist)
diff --git a/setup.cfg b/setup.cfg
index 2f2962eedcba..51b8aeeba3bd 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -31,6 +31,7 @@ classifiers =
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: 3.12
+ Programming Language :: Python :: 3.13
Topic :: Software Development :: Build Tools
long_description = Meson is a cross-platform build system designed to be both as fast and as user friendly as possible. It supports many languages and compilers, including GCC, Clang, PGI, Intel, and Visual Studio. Its build definitions are written in a simple non-Turing complete DSL.
diff --git a/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt
index 2197667a3412..5a2589a973ce 100644
--- a/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
set (CMAKE_CXX_STANDARD 14)
@@ -12,7 +12,7 @@ target_compile_definitions(cmModLib++ PRIVATE MESON_MAGIC_FLAG=21)
target_compile_definitions(cmModLib++ INTERFACE MESON_MAGIC_FLAG=42)
# Test PCH support
-if(${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.16.0")
+if(CMAKE_VERSION VERSION_GREATER_EQUAL "3.16.0")
target_precompile_headers(cmModLib++ PRIVATE "cpp_pch.hpp")
endif()
diff --git a/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt
index e01b6e260517..ba6f70111328 100644
--- a/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
set (CMAKE_CXX_STANDARD 14)
diff --git a/test cases/cmake/11 cmake_module_path/meson.build b/test cases/cmake/11 cmake_module_path/meson.build
index e201936964ba..2e835fba1d3b 100644
--- a/test cases/cmake/11 cmake_module_path/meson.build
+++ b/test cases/cmake/11 cmake_module_path/meson.build
@@ -3,10 +3,6 @@
project('user CMake find_package module using cmake_module_path', ['c', 'cpp'],
meson_version: '>= 0.55.0')
-if not find_program('cmake', required: false).found()
- error('MESON_SKIP_TEST cmake binary not available.')
-endif
-
# NOTE: can't request Python3 via dependency('Python3', method: 'cmake')
# Meson intercepts and wants "method: auto"
diff --git a/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt
index 88ba9bc57ad0..b77d7a3afae2 100644
--- a/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
diff --git a/test cases/cmake/12 generator expressions/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/12 generator expressions/subprojects/cmMod/CMakeLists.txt
index 27b37218a356..c7cd2ffc06b1 100644
--- a/test cases/cmake/12 generator expressions/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/12 generator expressions/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
set (CMAKE_CXX_STANDARD 14)
diff --git a/test cases/cmake/13 system includes/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/13 system includes/subprojects/cmMod/CMakeLists.txt
index a6b0ba40c7d0..2557a2ae9a9e 100644
--- a/test cases/cmake/13 system includes/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/13 system includes/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
set (CMAKE_CXX_STANDARD 14)
diff --git a/test cases/cmake/16 threads/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/16 threads/subprojects/cmMod/CMakeLists.txt
index 4d61b0c37ad7..04e7bdda14d4 100644
--- a/test cases/cmake/16 threads/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/16 threads/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod C CXX)
set (CMAKE_CXX_STANDARD 14)
diff --git a/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt
index 9a252df18918..800ada9fd466 100644
--- a/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
set (CMAKE_CXX_STANDARD 14)
diff --git a/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt
index 4db01b32da6a..50f87524638c 100644
--- a/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
set (CMAKE_CXX_STANDARD 14)
diff --git a/test cases/cmake/2 advanced/main3.cpp b/test cases/cmake/2 advanced/main3.cpp
new file mode 100644
index 000000000000..a82c9bd1d266
--- /dev/null
+++ b/test cases/cmake/2 advanced/main3.cpp
@@ -0,0 +1,6 @@
+extern void slib();
+
+int main() {
+ slib();
+ return 0;
+}
diff --git a/test cases/cmake/2 advanced/meson.build b/test cases/cmake/2 advanced/meson.build
index b301bfe42ab5..39da0c6adfc9 100644
--- a/test cases/cmake/2 advanced/meson.build
+++ b/test cases/cmake/2 advanced/meson.build
@@ -15,6 +15,8 @@ sub_sta = sub_pro.dependency('cmModLibStatic')
# Build some files
exe1 = executable('main1', ['main.cpp'], dependencies: [sub_dep])
exe2 = executable('main2', ['main.cpp'], dependencies: [sub_sta])
+slib = shared_library('slib', ['slib.cpp'], dependencies: [sub_dep])
+exe3 = executable('main3', ['main3.cpp'], link_with: slib)
test('test1', exe1)
test('test2', exe2)
diff --git a/test cases/cmake/2 advanced/slib.cpp b/test cases/cmake/2 advanced/slib.cpp
new file mode 100644
index 000000000000..a3395e5e15dc
--- /dev/null
+++ b/test cases/cmake/2 advanced/slib.cpp
@@ -0,0 +1,14 @@
+#include
+#include
+#include "config.h"
+
+#if CONFIG_OPT != 42
+#error "Invalid value of CONFIG_OPT"
+#endif
+
+using namespace std;
+
+void slib(void) {
+ cmModClass obj("Hello from lib");
+ cout << obj.getStr() << endl;
+}
diff --git a/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt
index 07501174990c..7dd11e74942f 100644
--- a/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
set(CMAKE_CXX_STANDARD 14)
@@ -17,6 +17,13 @@ generate_export_header(cmModLib)
set_target_properties(cmModLib PROPERTIES VERSION 1.0.1)
+include(CheckLinkerFlag)
+check_linker_flag(CXX "-Wl,--version-script=${CMAKE_CURRENT_SOURCE_DIR}/vers.map" HAS_VER_SCRIPT)
+if(HAS_VER_SCRIPT)
+ target_link_options(cmModLib PRIVATE
+ "-Wl,--version-script=${CMAKE_CURRENT_SOURCE_DIR}/vers.map")
+endif()
+
add_executable(testEXE main.cpp "${CMAKE_CURRENT_BINARY_DIR}/config.h")
target_link_libraries(cmModLib ZLIB::ZLIB)
diff --git a/test cases/cmake/2 advanced/subprojects/cmMod/vers.map b/test cases/cmake/2 advanced/subprojects/cmMod/vers.map
new file mode 100644
index 000000000000..316d05254c11
--- /dev/null
+++ b/test cases/cmake/2 advanced/subprojects/cmMod/vers.map
@@ -0,0 +1,7 @@
+{
+ global:
+ extern "C++" {
+ cmModClass::*;
+ };
+ local: *;
+};
diff --git a/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt
index d2fcfe3ffde3..4770bc7b3d91 100644
--- a/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmModule)
diff --git a/test cases/cmake/22 cmake module/meson.build b/test cases/cmake/22 cmake module/meson.build
index 68f9993a69ec..581804f66ce5 100644
--- a/test cases/cmake/22 cmake module/meson.build
+++ b/test cases/cmake/22 cmake module/meson.build
@@ -4,11 +4,6 @@ if build_machine.system() == 'cygwin'
error('MESON_SKIP_TEST CMake is broken on Cygwin.')
endif
-cmake_bin = find_program('cmake', required: false)
-if not cmake_bin.found()
- error('MESON_SKIP_TEST CMake not installed.')
-endif
-
cc = meson.get_compiler('c')
if cc.get_id() == 'clang-cl' and meson.backend() == 'ninja' and build_machine.system() == 'windows'
error('MESON_SKIP_TEST CMake installation nor operational for vs2017 clangclx64ninja')
diff --git a/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt
index a00affaebe26..9fc121b8b888 100644
--- a/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod NONE)
diff --git a/test cases/cmake/23 cmake toolchain/subprojects/cmModFortran/CMakeLists.txt b/test cases/cmake/23 cmake toolchain/subprojects/cmModFortran/CMakeLists.txt
index ecf1737fc223..cf84e13efe88 100644
--- a/test cases/cmake/23 cmake toolchain/subprojects/cmModFortran/CMakeLists.txt
+++ b/test cases/cmake/23 cmake toolchain/subprojects/cmModFortran/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION 3.10)
project(cmMod NONE)
diff --git a/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt b/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt
index a1886115bf5f..a25882bb340c 100644
--- a/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt
+++ b/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmTest LANGUAGES C OBJC)
diff --git a/test cases/cmake/25 assembler/subprojects/cmTest/CMakeLists.txt b/test cases/cmake/25 assembler/subprojects/cmTest/CMakeLists.txt
index bb8834dc22bc..f8f014747a10 100644
--- a/test cases/cmake/25 assembler/subprojects/cmTest/CMakeLists.txt
+++ b/test cases/cmake/25 assembler/subprojects/cmTest/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmTest)
diff --git a/test cases/cmake/27 dependency fallback/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/27 dependency fallback/subprojects/cmMod/CMakeLists.txt
index f920576e27ea..2f4e31c9fbb2 100644
--- a/test cases/cmake/27 dependency fallback/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/27 dependency fallback/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod VERSION 1.2.3)
set(CMAKE_CXX_STANDARD 14)
@@ -12,7 +12,7 @@ target_compile_definitions(cmModLib++ PRIVATE MESON_MAGIC_FLAG=21)
target_compile_definitions(cmModLib++ INTERFACE MESON_MAGIC_FLAG=42)
# Test PCH support
-if(${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.16.0")
+if(CMAKE_VERSION VERSION_GREATER_EQUAL "3.16.0")
target_precompile_headers(cmModLib++ PRIVATE "cpp_pch.hpp")
endif()
diff --git a/test cases/cmake/27 dependency fallback/subprojects/cmake_subp/CMakeLists.txt b/test cases/cmake/27 dependency fallback/subprojects/cmake_subp/CMakeLists.txt
index 6443fcadd69a..51451a222bd9 100644
--- a/test cases/cmake/27 dependency fallback/subprojects/cmake_subp/CMakeLists.txt
+++ b/test cases/cmake/27 dependency fallback/subprojects/cmake_subp/CMakeLists.txt
@@ -1,2 +1,2 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmModDummy)
diff --git a/test cases/cmake/27 dependency fallback/subprojects/force_cmake/CMakeLists.txt b/test cases/cmake/27 dependency fallback/subprojects/force_cmake/CMakeLists.txt
index 497beb95ee18..442fc4eb6c40 100644
--- a/test cases/cmake/27 dependency fallback/subprojects/force_cmake/CMakeLists.txt
+++ b/test cases/cmake/27 dependency fallback/subprojects/force_cmake/CMakeLists.txt
@@ -1,2 +1,2 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmModBoth)
diff --git a/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt
index d738d45c9767..d31a6a294de7 100644
--- a/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
set(CMAKE_CXX_STANDARD 14)
diff --git a/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt
index a5da5e7d21ef..65c725b25ff8 100644
--- a/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
set (CMAKE_CXX_STANDARD 14)
diff --git a/test cases/common/104 has arg/meson.build b/test cases/common/104 has arg/meson.build
index ba0731111d21..c85ec9f25ab8 100644
--- a/test cases/common/104 has arg/meson.build
+++ b/test cases/common/104 has arg/meson.build
@@ -52,6 +52,12 @@ if cc.get_id() == 'gcc'
assert(not cc.has_multi_arguments(['-Wno-pragmas', '-Wno-lol-meson-test-flags']), 'should error out even if some -Wno args are valid')
endif
+if cpp.get_id() == 'gcc' and cpp.version().version_compare('>=12.1.0')
+ # Handle special -Wno-attributes=foo where -Wattributes=foo is invalid
+ # i.e. our usual -Wno-foo -Wfoo hack doesn't work for -Wattributes=foo.
+ assert(cpp.has_argument('-Wno-attributes=meson::i_do_not_exist'))
+endif
+
if cc.get_id() == 'clang' and cc.version().version_compare('<=4.0.0')
# 4.0.0 does not support -fpeel-loops. Newer versions may.
# Please adjust above version number as new versions of clang are released.
diff --git a/test cases/common/153 wrap file should not failed/test.json b/test cases/common/153 wrap file should not failed/test.json
new file mode 100644
index 000000000000..7763d6ebc502
--- /dev/null
+++ b/test cases/common/153 wrap file should not failed/test.json
@@ -0,0 +1,3 @@
+{
+ "cleanup": ["subprojects/foo-1.0-patchfile"]
+}
diff --git a/test cases/common/178 bothlibraries/meson.build b/test cases/common/178 bothlibraries/meson.build
index 62f2061f8d67..654d94e66f80 100644
--- a/test cases/common/178 bothlibraries/meson.build
+++ b/test cases/common/178 bothlibraries/meson.build
@@ -2,6 +2,7 @@ project('both libraries linking test', 'c', 'cpp')
both_libs = both_libraries('mylib', 'libfile.c')
dep = declare_dependency(link_with: both_libs)
+alias_target('alias', both_libs)
exe_shared = executable('prog-shared', 'main.c', link_with : both_libs.get_shared_lib())
exe_static = executable('prog-static', 'main.c',
c_args : ['-DSTATIC_COMPILATION'],
diff --git a/test cases/common/183 partial dependency/declare_dependency/meson.build b/test cases/common/183 partial dependency/declare_dependency/meson.build
index 3783f669467a..8c62d10820cc 100644
--- a/test cases/common/183 partial dependency/declare_dependency/meson.build
+++ b/test cases/common/183 partial dependency/declare_dependency/meson.build
@@ -1,4 +1,4 @@
-# Copyright © 2018 Intel Corporation
+# Copyright © 2018-2024 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -29,4 +29,12 @@ dec_exe = executable(
dependencies : sub_dep,
)
+# Ensure that two partial dependencies of the same dependency are applied, as
+# they may provide different values.
+dec2_exe = executable(
+ 'declare_dep2',
+ files('main.c', 'other.c'),
+ dependencies : [sub_dep.partial_dependency(), sub_dep],
+)
+
test('Declare Dependency', dec_exe)
diff --git a/test cases/common/183 partial dependency/external_dependency/header_only.c b/test cases/common/183 partial dependency/external_dependency/header_only.c
new file mode 100644
index 000000000000..ecdd58becbd7
--- /dev/null
+++ b/test cases/common/183 partial dependency/external_dependency/header_only.c
@@ -0,0 +1,8 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright © 2024 Intel Corporation
+ */
+
+#include
+
+int main(void) { return 0; }
diff --git a/test cases/common/183 partial dependency/external_dependency/link.c b/test cases/common/183 partial dependency/external_dependency/link.c
new file mode 100644
index 000000000000..dae5561e14eb
--- /dev/null
+++ b/test cases/common/183 partial dependency/external_dependency/link.c
@@ -0,0 +1,12 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright © 2024 Intel Corporation
+ */
+
+#include
+#include
+
+int main(void) {
+ const char * zver = zlibVersion();
+ return strcmp(zver, ZLIB_VERSION);
+}
diff --git a/test cases/common/183 partial dependency/external_dependency/meson.build b/test cases/common/183 partial dependency/external_dependency/meson.build
new file mode 100644
index 000000000000..4194bb76e379
--- /dev/null
+++ b/test cases/common/183 partial dependency/external_dependency/meson.build
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2024 Intel Corporation
+
+# TODO: don't use compile whenever we get includes and compile args separated
+dep_zlib_sub = dep_zlib.partial_dependency(compile_args : true, includes : true)
+
+executable(
+ 'zlib header only test',
+ 'header_only.c',
+ dependencies : dep_zlib_sub,
+)
+
+executable(
+ 'zlib link test',
+ 'link.c',
+ dependencies : [dep_zlib_sub, dep_zlib],
+)
diff --git a/test cases/common/183 partial dependency/meson.build b/test cases/common/183 partial dependency/meson.build
index e908487f184a..c492cc3d42f6 100644
--- a/test cases/common/183 partial dependency/meson.build
+++ b/test cases/common/183 partial dependency/meson.build
@@ -1,4 +1,4 @@
-# Copyright © 2018 Intel Corporation
+# Copyright © 2018-2024 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,3 +15,6 @@
project('partial dependency', ['c', 'cpp'])
subdir('declare_dependency')
+
+dep_zlib = dependency('zlib', required : false)
+subdir('external_dependency', if_found : dep_zlib)
diff --git a/test cases/common/230 external project/test.json b/test cases/common/230 external project/test.json
index 4888e8752327..4df7d4ac57ff 100644
--- a/test cases/common/230 external project/test.json
+++ b/test cases/common/230 external project/test.json
@@ -1,6 +1,7 @@
{
"installed": [
- { "type": "shared_lib", "file": "usr/lib/foo" },
+ { "type": "shared_lib", "file": "usr/lib/foo", "platform": "!cygwin" },
+ { "type": "file", "file": "usr/lib/libfoo.dll", "platform": "cygwin" },
{ "type": "file", "file": "usr/include/libfoo.h" },
{ "type": "file", "file": "usr/lib/pkgconfig/somelib.pc" }
]
diff --git a/test cases/common/258 subsubproject inplace/test.json b/test cases/common/258 subsubproject inplace/test.json
new file mode 100644
index 000000000000..bf083966c031
--- /dev/null
+++ b/test cases/common/258 subsubproject inplace/test.json
@@ -0,0 +1,3 @@
+{
+ "cleanup": ["subprojects/subsub.wrap"]
+}
diff --git a/test cases/common/268 install functions and follow symlinks/meson.build b/test cases/common/268 install functions and follow symlinks/meson.build
index 327c02131fb3..9158ea8ee3fe 100644
--- a/test cases/common/268 install functions and follow symlinks/meson.build
+++ b/test cases/common/268 install functions and follow symlinks/meson.build
@@ -1,5 +1,9 @@
project('install_data following symlinks')
+if build_machine.system() == 'windows'
+ error('MESON_SKIP_TEST symlinks (typically) do not work on Windows.')
+endif
+
install_data(
'foo/link1',
install_dir: get_option('datadir') / 'followed',
diff --git a/test cases/common/273 both libraries/meson.build b/test cases/common/273 both libraries/meson.build
new file mode 100644
index 000000000000..00da1c8e6cf5
--- /dev/null
+++ b/test cases/common/273 both libraries/meson.build
@@ -0,0 +1,113 @@
+project(
+ 'test both libraries',
+ 'c',
+ meson_version: '>= 1.6.0',
+)
+
+expected = 0
+
+
+with_bl = both_libraries(
+ 'with_bl',
+ files('src/both_libraries.c'),
+ c_shared_args: ['-DEXPORT'],
+)
+
+with_bl_dep = declare_dependency(
+ link_with: with_bl,
+)
+
+
+if get_option('use_dep')
+ lib_deps = [with_bl_dep]
+ lib_links = []
+else
+ lib_deps = []
+ lib_links = [with_bl]
+endif
+
+
+with_library = library(
+ 'with_library',
+ files('src/library.c'),
+ c_shared_args: ['-DEXPORT'],
+ link_with: lib_links,
+ dependencies: lib_deps,
+)
+
+with_library_dep = declare_dependency(
+ link_with: with_library,
+)
+
+
+if get_option('default_library') == 'shared'
+ expected += 1
+ if get_option('default_both_libraries') in ['shared', 'auto']
+ expected += 1
+ endif
+elif get_option('default_library') == 'both'
+ if get_option('default_both_libraries') in ['shared', 'auto']
+ expected += 2
+ endif
+else
+ if get_option('default_both_libraries') == 'shared'
+ expected += 1
+ endif
+endif
+
+
+if get_option('use_dep')
+ main_deps = [with_library_dep]
+ main_links = []
+else
+ main_deps = []
+ main_links = [with_library]
+endif
+
+main = executable(
+ 'main',
+ files('src/main.c'),
+ c_args: [f'-DEXPECTED=@expected@'],
+ link_with: main_links,
+ dependencies: main_deps,
+)
+test('test both libs', main)
+
+
+if get_option('default_library') == 'both' and get_option('default_both_libraries') == 'auto'
+ # With those options, even if the both_libraries defaults to 'shared',
+ # 'static' version is used when linking to the static part of another both_libraries.
+
+ if get_option('use_dep')
+ main_static_deps = [with_library_dep.as_static(recursive: true)]
+ main_static_links = []
+ else
+ main_static_deps = []
+ main_static_links = [with_library.get_static_lib()]
+ endif
+ main_static = executable(
+ 'main_static',
+ files('src/main.c'),
+ c_args: [f'-DEXPECTED=0'],
+ link_with: main_static_links,
+ dependencies: main_static_deps,
+ )
+ test('test static', main_static)
+
+
+ if get_option('use_dep')
+ main_shared_deps = [with_library_dep.as_shared(recursive: true)]
+ main_shared_links = []
+ else
+ main_shared_deps = []
+ main_shared_links = [with_library.get_shared_lib()]
+ endif
+ main_shared = executable(
+ 'main_shared',
+ files('src/main.c'),
+ c_args: [f'-DEXPECTED=2'],
+ link_with: main_shared_links,
+ dependencies: main_shared_deps,
+ )
+ test('test shared', main_shared)
+endif
diff --git a/test cases/common/273 both libraries/meson.options b/test cases/common/273 both libraries/meson.options
new file mode 100644
index 000000000000..2e3c357ae0b2
--- /dev/null
+++ b/test cases/common/273 both libraries/meson.options
@@ -0,0 +1 @@
+option('use_dep', type: 'boolean', value: false)
diff --git a/test cases/common/273 both libraries/src/api.h b/test cases/common/273 both libraries/src/api.h
new file mode 100644
index 000000000000..a20ded3642d4
--- /dev/null
+++ b/test cases/common/273 both libraries/src/api.h
@@ -0,0 +1,15 @@
+#pragma once
+
+#if defined EXPORT
+ #if defined _WIN32 || defined __CYGWIN__
+ #define API __declspec(dllexport)
+ #else
+ #if defined __GNUC__
+ #define API __attribute__((visibility("default")))
+ #else
+ #define API
+ #endif
+ #endif
+#else
+ #define API
+#endif
diff --git a/test cases/common/273 both libraries/src/both_libraries.c b/test cases/common/273 both libraries/src/both_libraries.c
new file mode 100644
index 000000000000..ab1bd1fd9965
--- /dev/null
+++ b/test cases/common/273 both libraries/src/both_libraries.c
@@ -0,0 +1,10 @@
+#include "both_libraries.h"
+
+int both_libraries_function(void)
+{
+#if defined EXPORT
+ return 1;
+#else
+ return 0;
+#endif
+}
diff --git a/test cases/common/273 both libraries/src/both_libraries.h b/test cases/common/273 both libraries/src/both_libraries.h
new file mode 100644
index 000000000000..39c4c8430154
--- /dev/null
+++ b/test cases/common/273 both libraries/src/both_libraries.h
@@ -0,0 +1,5 @@
+#pragma once
+
+#include "api.h"
+
+int API both_libraries_function(void);
diff --git a/test cases/common/273 both libraries/src/library.c b/test cases/common/273 both libraries/src/library.c
new file mode 100644
index 000000000000..bdd965f7fb96
--- /dev/null
+++ b/test cases/common/273 both libraries/src/library.c
@@ -0,0 +1,12 @@
+#include "library.h"
+#include "both_libraries.h"
+
+int library_function(void)
+{
+ int sum = both_libraries_function();
+#if defined EXPORT
+ return sum + 1;
+#else
+ return sum;
+#endif
+}
diff --git a/test cases/common/273 both libraries/src/library.h b/test cases/common/273 both libraries/src/library.h
new file mode 100644
index 000000000000..7f57af4f136d
--- /dev/null
+++ b/test cases/common/273 both libraries/src/library.h
@@ -0,0 +1,5 @@
+#pragma once
+
+#include "api.h"
+
+int API library_function(void);
diff --git a/test cases/common/273 both libraries/src/main.c b/test cases/common/273 both libraries/src/main.c
new file mode 100644
index 000000000000..1b367896d189
--- /dev/null
+++ b/test cases/common/273 both libraries/src/main.c
@@ -0,0 +1,8 @@
+#include "library.h"
+
+
+int main(void)
+{
+ int sum = library_function();
+ return sum == EXPECTED ? 0 : 1;
+}
diff --git a/test cases/common/273 both libraries/test.json b/test cases/common/273 both libraries/test.json
new file mode 100644
index 000000000000..2aba26e48a3a
--- /dev/null
+++ b/test cases/common/273 both libraries/test.json
@@ -0,0 +1,20 @@
+{
+ "matrix": {
+ "options": {
+ "default_library": [
+ { "val": "shared" },
+ { "val": "static" },
+ { "val": "both" }
+ ],
+ "default_both_libraries": [
+ { "val": "shared" },
+ { "val": "static" },
+ { "val": "auto" }
+ ],
+ "use_dep": [
+ { "val": false },
+ { "val": true }
+ ]
+ }
+ }
+}
diff --git a/test cases/common/273 customtarget exe for test/generate.py b/test cases/common/274 customtarget exe for test/generate.py
similarity index 100%
rename from test cases/common/273 customtarget exe for test/generate.py
rename to test cases/common/274 customtarget exe for test/generate.py
diff --git a/test cases/common/273 customtarget exe for test/meson.build b/test cases/common/274 customtarget exe for test/meson.build
similarity index 100%
rename from test cases/common/273 customtarget exe for test/meson.build
rename to test cases/common/274 customtarget exe for test/meson.build
diff --git a/test cases/common/274 environment/meson.build b/test cases/common/275 environment/meson.build
similarity index 100%
rename from test cases/common/274 environment/meson.build
rename to test cases/common/275 environment/meson.build
diff --git a/test cases/common/274 environment/testenv.py b/test cases/common/275 environment/testenv.py
similarity index 100%
rename from test cases/common/274 environment/testenv.py
rename to test cases/common/275 environment/testenv.py
diff --git a/test cases/common/275 required keyword in compiles functions/invalid.c b/test cases/common/276 required keyword in compiles functions/invalid.c
similarity index 100%
rename from test cases/common/275 required keyword in compiles functions/invalid.c
rename to test cases/common/276 required keyword in compiles functions/invalid.c
diff --git a/test cases/common/275 required keyword in compiles functions/meson.build b/test cases/common/276 required keyword in compiles functions/meson.build
similarity index 100%
rename from test cases/common/275 required keyword in compiles functions/meson.build
rename to test cases/common/276 required keyword in compiles functions/meson.build
diff --git a/test cases/common/275 required keyword in compiles functions/meson_options.txt b/test cases/common/276 required keyword in compiles functions/meson_options.txt
similarity index 100%
rename from test cases/common/275 required keyword in compiles functions/meson_options.txt
rename to test cases/common/276 required keyword in compiles functions/meson_options.txt
diff --git a/test cases/common/275 required keyword in compiles functions/valid.c b/test cases/common/276 required keyword in compiles functions/valid.c
similarity index 100%
rename from test cases/common/275 required keyword in compiles functions/valid.c
rename to test cases/common/276 required keyword in compiles functions/valid.c
diff --git a/test cases/common/276 generator custom_tgt subdir/include/meson.build b/test cases/common/277 generator custom_tgt subdir/include/meson.build
similarity index 100%
rename from test cases/common/276 generator custom_tgt subdir/include/meson.build
rename to test cases/common/277 generator custom_tgt subdir/include/meson.build
diff --git a/test cases/common/276 generator custom_tgt subdir/meson.build b/test cases/common/277 generator custom_tgt subdir/meson.build
similarity index 100%
rename from test cases/common/276 generator custom_tgt subdir/meson.build
rename to test cases/common/277 generator custom_tgt subdir/meson.build
diff --git a/test cases/common/277 custom target private dir/meson.build b/test cases/common/278 custom target private dir/meson.build
similarity index 100%
rename from test cases/common/277 custom target private dir/meson.build
rename to test cases/common/278 custom target private dir/meson.build
diff --git a/test cases/common/279 pkgconfig override/meson.build b/test cases/common/279 pkgconfig override/meson.build
new file mode 100644
index 000000000000..90298288bda1
--- /dev/null
+++ b/test cases/common/279 pkgconfig override/meson.build
@@ -0,0 +1,8 @@
+project('override pkg-config', 'c')
+
+subproject('pkg-config')
+
+pkgconfig = find_program('pkg-config')
+
+# This dependency can only be found if pkg-config is overridden with our custom pkg-config.py
+gobj = dependency('test-package-0.0', version : '= 0.0.0')
diff --git a/test cases/common/279 pkgconfig override/subprojects/pkg-config.wrap b/test cases/common/279 pkgconfig override/subprojects/pkg-config.wrap
new file mode 100644
index 000000000000..ef7a52ed2037
--- /dev/null
+++ b/test cases/common/279 pkgconfig override/subprojects/pkg-config.wrap
@@ -0,0 +1,5 @@
+[wrap-file]
+directory = pkg-config
+
+[provide]
+program_names = pkg-config
diff --git a/test cases/common/279 pkgconfig override/subprojects/pkg-config/bin/pkg-config.py b/test cases/common/279 pkgconfig override/subprojects/pkg-config/bin/pkg-config.py
new file mode 100755
index 000000000000..7dc28c849415
--- /dev/null
+++ b/test cases/common/279 pkgconfig override/subprojects/pkg-config/bin/pkg-config.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+
+import sys
+
+if len(sys.argv) > 1:
+ if sys.argv[1] == "--modversion":
+ if sys.argv[2] == "test-package-0.0":
+ print("0.0.0")
+ else:
+ exit(-1)
+ elif sys.argv[1] == "--version":
+ print("0.0.0")
+ exit(0)
diff --git a/test cases/common/279 pkgconfig override/subprojects/pkg-config/meson.build b/test cases/common/279 pkgconfig override/subprojects/pkg-config/meson.build
new file mode 100644
index 000000000000..af526f9345a9
--- /dev/null
+++ b/test cases/common/279 pkgconfig override/subprojects/pkg-config/meson.build
@@ -0,0 +1,4 @@
+project('pkg-config')
+
+pkgconfig = find_program(meson.project_source_root() / 'bin' / 'pkg-config.py')
+meson.override_find_program('pkg-config', pkgconfig)
diff --git a/test cases/common/280 pkgconfig-gen/meson.build b/test cases/common/280 pkgconfig-gen/meson.build
new file mode 100644
index 000000000000..3f158882f54b
--- /dev/null
+++ b/test cases/common/280 pkgconfig-gen/meson.build
@@ -0,0 +1,19 @@
+project('pkgconfig-get', 'c')
+
+pkgg = import('pkgconfig')
+
+subdir('simple2')
+
+lib = library('simple', 'simple.c', dependencies: lib_dep)
+libver = '1.0'
+h = install_headers('simple.h')
+
+
+pkgg.generate(
+ lib,
+ version : libver,
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ libraries: [lib_dep],
+)
\ No newline at end of file
diff --git a/test cases/common/280 pkgconfig-gen/simple.c b/test cases/common/280 pkgconfig-gen/simple.c
new file mode 100644
index 000000000000..da1d909f7d26
--- /dev/null
+++ b/test cases/common/280 pkgconfig-gen/simple.c
@@ -0,0 +1,6 @@
+#include"simple.h"
+#include
+
+int simple_function(void) {
+ return simple_simple_function();
+}
diff --git a/test cases/common/280 pkgconfig-gen/simple.h b/test cases/common/280 pkgconfig-gen/simple.h
new file mode 100644
index 000000000000..6896bfd17f7e
--- /dev/null
+++ b/test cases/common/280 pkgconfig-gen/simple.h
@@ -0,0 +1,6 @@
+#ifndef SIMPLE_H_
+#define SIMPLE_H_
+
+int simple_function(void);
+
+#endif
diff --git a/test cases/common/280 pkgconfig-gen/simple2/exports.def b/test cases/common/280 pkgconfig-gen/simple2/exports.def
new file mode 100644
index 000000000000..42c911b93c51
--- /dev/null
+++ b/test cases/common/280 pkgconfig-gen/simple2/exports.def
@@ -0,0 +1,2 @@
+EXPORTS
+ simple_simple_function @1
diff --git a/test cases/common/280 pkgconfig-gen/simple2/meson.build b/test cases/common/280 pkgconfig-gen/simple2/meson.build
new file mode 100644
index 000000000000..c8f13c05ee9d
--- /dev/null
+++ b/test cases/common/280 pkgconfig-gen/simple2/meson.build
@@ -0,0 +1,2 @@
+lib2 = library('simple2', 'simple2.c', vs_module_defs: 'exports.def')
+lib_dep = declare_dependency(link_with: lib2, include_directories: include_directories('.'))
diff --git a/test cases/common/280 pkgconfig-gen/simple2/simple2.c b/test cases/common/280 pkgconfig-gen/simple2/simple2.c
new file mode 100644
index 000000000000..215b2aef8eed
--- /dev/null
+++ b/test cases/common/280 pkgconfig-gen/simple2/simple2.c
@@ -0,0 +1,5 @@
+#include"simple2.h"
+
+int simple_simple_function(void) {
+ return 42;
+}
diff --git a/test cases/common/280 pkgconfig-gen/simple2/simple2.h b/test cases/common/280 pkgconfig-gen/simple2/simple2.h
new file mode 100644
index 000000000000..472e135f2b61
--- /dev/null
+++ b/test cases/common/280 pkgconfig-gen/simple2/simple2.h
@@ -0,0 +1,6 @@
+#ifndef SIMPLE2_H_
+#define SIMPLE2_H_
+
+int simple_simple_function(void);
+
+#endif
diff --git a/test cases/common/280 pkgconfig-gen/test.json b/test cases/common/280 pkgconfig-gen/test.json
new file mode 100644
index 000000000000..99b18086c0f5
--- /dev/null
+++ b/test cases/common/280 pkgconfig-gen/test.json
@@ -0,0 +1,15 @@
+{
+ "installed": [
+ { "type": "file", "file": "usr/include/simple.h"},
+ { "type": "file", "file": "usr/lib/pkgconfig/simple.pc"}
+ ],
+ "matrix": {
+ "options": {
+ "default_library": [
+ { "val": "shared" },
+ { "val": "static" },
+ { "val": "both" }
+ ]
+ }
+ }
+}
diff --git a/test cases/common/36 has function/meson.build b/test cases/common/36 has function/meson.build
index bb3e869760cb..d8b539880d8c 100644
--- a/test cases/common/36 has function/meson.build
+++ b/test cases/common/36 has function/meson.build
@@ -98,12 +98,11 @@ foreach cc : compilers
# For some functions one needs to define _GNU_SOURCE before including the
# right headers to get them picked up. Make sure we can detect these functions
# as well without any prefix
- if cc.has_header_symbol('sys/socket.h', 'recvmmsg',
+ if cc.has_header_symbol('sys/stat.h', 'statx',
prefix : '#define _GNU_SOURCE',
args : unit_test_args)
- # We assume that if recvmmsg exists sendmmsg does too
- assert (cc.has_function('sendmmsg', args : unit_test_args),
- 'Failed to detect function "sendmmsg" (should always exist).')
+ assert (cc.has_function('statx', args : unit_test_args),
+ 'Failed to detect function "statx" (should always exist).')
endif
# We should be able to find GCC and Clang __builtin functions
diff --git a/test cases/common/41 test args/meson.build b/test cases/common/41 test args/meson.build
index 4894f3e163a8..ddd369e7fc5f 100644
--- a/test cases/common/41 test args/meson.build
+++ b/test cases/common/41 test args/meson.build
@@ -38,10 +38,14 @@ test('custom target arg', tester, args : testfilect, env : env_array)
env = environment()
env.append('PATH', 'something')
-bash = find_program('bash')
+pathtester = find_program('pathtester.py')
custompathtgt = custom_target('testpathappend',
output : 'nothing.txt',
build_always : true,
- command : [bash, '-c', 'env'],
+ command : [pathtester],
env : env)
+
+# https://github.com/mesonbuild/meson/issues/3552
+wrap = find_program('wrap.py')
+test('external program arg', wrap, args : [testerpy, testfile])
diff --git a/test cases/common/41 test args/pathtester.py b/test cases/common/41 test args/pathtester.py
new file mode 100755
index 000000000000..96152845d664
--- /dev/null
+++ b/test cases/common/41 test args/pathtester.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python3
+
+import sys, subprocess
+
+if sys.platform == 'win32':
+ cmd = ['xcopy', '/?']
+else:
+ cmd = ['env']
+
+rc = subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+sys.exit(rc.returncode)
diff --git a/test cases/common/41 test args/wrap.py b/test cases/common/41 test args/wrap.py
new file mode 100755
index 000000000000..87508e0083d3
--- /dev/null
+++ b/test cases/common/41 test args/wrap.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+
+import subprocess
+import sys
+
+subprocess.run(sys.argv[1:])
diff --git a/test cases/common/52 object generator/dir/meson.build b/test cases/common/52 object generator/dir/meson.build
new file mode 100644
index 000000000000..e661d8665499
--- /dev/null
+++ b/test cases/common/52 object generator/dir/meson.build
@@ -0,0 +1,6 @@
+#check with a single @OUTPUT0@ in a subdirectory and multiple inputs
+gen4 = generator(python,
+ output : ['@BASENAME@.o'],
+ arguments : [comp, cc, '@INPUT@', '@OUTPUT0@'])
+
+generated4 = gen4.process(files('source5.c', 'source6.c'))
diff --git a/test cases/common/52 object generator/dir/source5.c b/test cases/common/52 object generator/dir/source5.c
new file mode 100644
index 000000000000..c512fc310279
--- /dev/null
+++ b/test cases/common/52 object generator/dir/source5.c
@@ -0,0 +1,3 @@
+int func5_in_obj(void) {
+ return 0;
+}
diff --git a/test cases/common/52 object generator/dir/source6.c b/test cases/common/52 object generator/dir/source6.c
new file mode 100644
index 000000000000..adcf2cd454fd
--- /dev/null
+++ b/test cases/common/52 object generator/dir/source6.c
@@ -0,0 +1,3 @@
+int func6_in_obj(void) {
+ return 0;
+}
diff --git a/test cases/common/52 object generator/meson.build b/test cases/common/52 object generator/meson.build
index 49590d6d60f9..656a9088586f 100644
--- a/test cases/common/52 object generator/meson.build
+++ b/test cases/common/52 object generator/meson.build
@@ -37,6 +37,8 @@ gen3 = generator(python,
generated3 = gen3.process(['source4.c'])
-e = executable('prog', 'prog.c', generated, generated2, generated3)
+subdir('dir')
+
+e = executable('prog', 'prog.c', generated, generated2, generated3, generated4)
test('objgen', e)
\ No newline at end of file
diff --git a/test cases/common/52 object generator/prog.c b/test cases/common/52 object generator/prog.c
index 80056dc031a9..ce54b073939a 100644
--- a/test cases/common/52 object generator/prog.c
+++ b/test cases/common/52 object generator/prog.c
@@ -2,7 +2,10 @@ int func1_in_obj(void);
int func2_in_obj(void);
int func3_in_obj(void);
int func4_in_obj(void);
+int func5_in_obj(void);
+int func6_in_obj(void);
+
int main(void) {
- return func1_in_obj() + func2_in_obj() + func3_in_obj() + func4_in_obj();
+ return func1_in_obj() + func2_in_obj() + func3_in_obj() + func4_in_obj() + func5_in_obj() + func6_in_obj();
}
diff --git a/test cases/common/98 subproject subdir/meson.build b/test cases/common/98 subproject subdir/meson.build
index ef053d86c41c..d2bafedf5119 100644
--- a/test cases/common/98 subproject subdir/meson.build
+++ b/test cases/common/98 subproject subdir/meson.build
@@ -1,3 +1,7 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2016-2023 The Meson Developers
+# Copyright © 2024 Intel Corporation
+
project('proj', 'c')
subproject('sub')
libSub = dependency('sub', fallback: ['sub', 'libSub'])
@@ -6,7 +10,19 @@ exe = executable('prog', 'prog.c', dependencies: libSub)
test('subproject subdir', exe)
# Verify the subproject has placed dependency override.
-dependency('sub-1.0')
+d = dependency('sub-1.0')
+
+# verify that the name is the overridden name
+assert(d.name() == 'sub-1.0', 'name was not properly set, should have been "sub-1.0", but was @0@'.format(d.name()))
+
+# Verify that when a dependency object is used for two overrides, the correct
+# name is used
+meson.override_dependency('new-dep', d)
+d2 = dependency('new-dep')
+assert(d2.name() == 'new-dep', 'name was not properly set, should have been "new-dep", but was @0@'.format(d2.name()))
+
+# And that the old dependency wasn't changed
+assert(d.name() == 'sub-1.0', 'original dependency was mutated.')
# Verify we can now take 'sub' dependency without fallback, but only version 1.0.
dependency('sub')
diff --git a/test cases/d/11 dub/meson.build b/test cases/d/11 dub/meson.build
index 3bb3d56c0d91..91955710e709 100644
--- a/test cases/d/11 dub/meson.build
+++ b/test cases/d/11 dub/meson.build
@@ -1,12 +1,15 @@
project('dub-example', 'd')
-error('MESON_SKIP_TEST: Dub support is broken at the moment (#11773)')
-
dub_exe = find_program('dub', required : false)
if not dub_exe.found()
error('MESON_SKIP_TEST: Dub not found')
endif
+dub_ver = dub_exe.version()
+if dub_ver.version_compare('>1.31.1') and dub_ver.version_compare('<1.35.0')
+ error('MESON_SKIP_TEST: Incompatible Dub version ' + dub_ver)
+endif
+
urld_dep = dependency('urld', method: 'dub')
test_exe = executable('test-urld', 'test.d', dependencies: urld_dep)
diff --git a/test cases/d/14 dub with deps/meson.build b/test cases/d/14 dub with deps/meson.build
index c8e472bffc3d..2e3bce87d37d 100644
--- a/test cases/d/14 dub with deps/meson.build
+++ b/test cases/d/14 dub with deps/meson.build
@@ -1,12 +1,15 @@
project('dub-with-deps-example', ['d'])
-error('MESON_SKIP_TEST: Dub support is broken at the moment (#11773)')
-
dub_exe = find_program('dub', required : false)
if not dub_exe.found()
error('MESON_SKIP_TEST: Dub not found')
endif
+dub_ver = dub_exe.version()
+if dub_ver.version_compare('>1.31.1') and dub_ver.version_compare('<1.35.0')
+ error('MESON_SKIP_TEST: Incompatible Dub version')
+endif
+
if meson.get_compiler('d').get_id() == 'gcc'
error('MESON_SKIP_TEST: can\'t build dependencies with GDC')
elif meson.get_compiler('d').get_id() == 'llvm'
diff --git a/test cases/d/9 features/meson.build b/test cases/d/9 features/meson.build
index 50059f169c10..065ef3a6ddef 100644
--- a/test cases/d/9 features/meson.build
+++ b/test cases/d/9 features/meson.build
@@ -1,4 +1,4 @@
-project('D Features', 'd', default_options : ['debug=false'])
+project('D Features', 'd', meson_version: '>=1.6', default_options : ['debug=false'])
dc = meson.get_compiler('d')
diff --git a/test cases/failing build/10 nasm werror/meson.build b/test cases/failing build/10 nasm werror/meson.build
new file mode 100644
index 000000000000..d0c056a5debe
--- /dev/null
+++ b/test cases/failing build/10 nasm werror/meson.build
@@ -0,0 +1,8 @@
+project('test', default_options: ['werror=true'])
+if meson.backend().startswith('vs')
+ error('MESON_SKIP_TEST: VS backend does not recognise NASM yet')
+endif
+if not add_languages('nasm', required: false)
+ error('MESON_SKIP_TEST: nasm not found')
+endif
+executable('prog', 'test.asm')
diff --git a/test cases/failing build/10 nasm werror/test.asm b/test cases/failing build/10 nasm werror/test.asm
new file mode 100644
index 000000000000..2f7a7d6c3a4d
--- /dev/null
+++ b/test cases/failing build/10 nasm werror/test.asm
@@ -0,0 +1,8 @@
+SECTION .text
+global main
+main:
+ mov ebx,0
+ mov eax,1
+ int 0x80
+
+%warning oops
diff --git a/test cases/failing build/11 objc werror/meson.build b/test cases/failing build/11 objc werror/meson.build
new file mode 100644
index 000000000000..5e83d3850bd4
--- /dev/null
+++ b/test cases/failing build/11 objc werror/meson.build
@@ -0,0 +1,8 @@
+project('test', default_options: ['werror=true'])
+if not add_languages('objc', required: false)
+ error('MESON_SKIP_TEST: Objective C not found')
+endif
+if get_option('backend').startswith('vs')
+ error('MESON_SKIP_TEST: objcpp is not supported by vs backend')
+endif
+executable('prog', 'test.m')
diff --git a/test cases/failing build/11 objc werror/test.m b/test cases/failing build/11 objc werror/test.m
new file mode 100644
index 000000000000..b03db466ad0c
--- /dev/null
+++ b/test cases/failing build/11 objc werror/test.m
@@ -0,0 +1,5 @@
+#import
+
+int main(void) {
+ return 1 / 0;
+}
diff --git a/test cases/failing build/12 objcpp werror/meson.build b/test cases/failing build/12 objcpp werror/meson.build
new file mode 100644
index 000000000000..ee0d7cdba850
--- /dev/null
+++ b/test cases/failing build/12 objcpp werror/meson.build
@@ -0,0 +1,8 @@
+project('test', default_options: ['werror=true'])
+if not add_languages('objcpp', required: false)
+ error('MESON_SKIP_TEST: Objective C++ not found')
+endif
+if get_option('backend').startswith('vs')
+ error('MESON_SKIP_TEST: objcpp is not supported by vs backend')
+endif
+executable('prog', 'test.mm')
diff --git a/test cases/failing build/12 objcpp werror/test.mm b/test cases/failing build/12 objcpp werror/test.mm
new file mode 100644
index 000000000000..732cffa3ff17
--- /dev/null
+++ b/test cases/failing build/12 objcpp werror/test.mm
@@ -0,0 +1,8 @@
+#import
+
+class MyClass {
+};
+
+int main(void) {
+ return 1 / 0;
+}
diff --git a/test cases/failing build/13 rust werror/meson.build b/test cases/failing build/13 rust werror/meson.build
new file mode 100644
index 000000000000..fd11de141f95
--- /dev/null
+++ b/test cases/failing build/13 rust werror/meson.build
@@ -0,0 +1,8 @@
+project('test', default_options: ['werror=true'])
+if meson.backend() != 'ninja'
+ error('MESON_SKIP_TEST: Rust requires Ninja backend')
+endif
+if not add_languages('rust', required: false)
+ error('MESON_SKIP_TEST: Rust not found')
+endif
+executable('prog', 'test.rs')
diff --git a/test cases/failing build/13 rust werror/test.rs b/test cases/failing build/13 rust werror/test.rs
new file mode 100644
index 000000000000..c0e42233623c
--- /dev/null
+++ b/test cases/failing build/13 rust werror/test.rs
@@ -0,0 +1,4 @@
+#[warn(unconditional_panic)]
+fn main() {
+ println!("Hello, world {}!", 1 / 0);
+}
diff --git a/test cases/failing build/14 swift werror/meson.build b/test cases/failing build/14 swift werror/meson.build
new file mode 100644
index 000000000000..1d827517efed
--- /dev/null
+++ b/test cases/failing build/14 swift werror/meson.build
@@ -0,0 +1,5 @@
+project('test', default_options: ['werror=true'])
+if not add_languages('swift', required: false)
+ error('MESON_SKIP_TEST: Swift not found')
+endif
+executable('prog', 'test.swift')
diff --git a/test cases/failing build/14 swift werror/test.swift b/test cases/failing build/14 swift werror/test.swift
new file mode 100644
index 000000000000..b569811d80a2
--- /dev/null
+++ b/test cases/failing build/14 swift werror/test.swift
@@ -0,0 +1,2 @@
+#warning("oops")
+print("Hello, World!")
diff --git a/test cases/failing build/3 cmake subproject isolation/meson.build b/test cases/failing build/3 cmake subproject isolation/meson.build
index e60633595529..2351ec54be4b 100644
--- a/test cases/failing build/3 cmake subproject isolation/meson.build
+++ b/test cases/failing build/3 cmake subproject isolation/meson.build
@@ -1,9 +1,5 @@
project('subproject isolation', ['c', 'cpp'])
-if not find_program('cmake', required: false).found()
- error('MESON_SKIP_TEST CMake is not installed')
-endif
-
incdir = meson.source_root() / 'incDir'
cm = import('cmake')
diff --git a/test cases/failing build/3 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt b/test cases/failing build/3 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt
index 852dd09a8dab..a4564d3f1b25 100644
--- a/test cases/failing build/3 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/failing build/3 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmMod)
set (CMAKE_CXX_STANDARD 14)
diff --git a/test cases/failing build/5 c werror/meson.build b/test cases/failing build/5 c werror/meson.build
new file mode 100644
index 000000000000..ff6026afaa21
--- /dev/null
+++ b/test cases/failing build/5 c werror/meson.build
@@ -0,0 +1,2 @@
+project('test', 'c', default_options: ['werror=true'])
+executable('prog', 'test.c')
diff --git a/test cases/failing build/5 c werror/test.c b/test cases/failing build/5 c werror/test.c
new file mode 100644
index 000000000000..f0b7f0baa3d4
--- /dev/null
+++ b/test cases/failing build/5 c werror/test.c
@@ -0,0 +1,3 @@
+int main(int argc, char **argv) {
+ return 1 / 0;
+}
diff --git a/test cases/failing build/6 cpp werror/meson.build b/test cases/failing build/6 cpp werror/meson.build
new file mode 100644
index 000000000000..0d1c4c5ba38b
--- /dev/null
+++ b/test cases/failing build/6 cpp werror/meson.build
@@ -0,0 +1,2 @@
+project('test', 'cpp', default_options: ['werror=true'])
+executable('prog', 'test.cpp')
diff --git a/test cases/failing build/6 cpp werror/test.cpp b/test cases/failing build/6 cpp werror/test.cpp
new file mode 100644
index 000000000000..f0b7f0baa3d4
--- /dev/null
+++ b/test cases/failing build/6 cpp werror/test.cpp
@@ -0,0 +1,3 @@
+int main(int argc, char **argv) {
+ return 1 / 0;
+}
diff --git a/test cases/failing build/7 csharp werror/meson.build b/test cases/failing build/7 csharp werror/meson.build
new file mode 100644
index 000000000000..8e73c6ab9fc0
--- /dev/null
+++ b/test cases/failing build/7 csharp werror/meson.build
@@ -0,0 +1,8 @@
+project('test', default_options: ['werror=true'])
+if meson.backend() != 'ninja'
+ error('MESON_SKIP_TEST: C# requires Ninja backend')
+endif
+if not add_languages('cs', required: false)
+ error('MESON_SKIP_TEST: C# not found')
+endif
+executable('prog', 'test.cs')
diff --git a/test cases/failing build/7 csharp werror/test.cs b/test cases/failing build/7 csharp werror/test.cs
new file mode 100644
index 000000000000..262f4d180258
--- /dev/null
+++ b/test cases/failing build/7 csharp werror/test.cs
@@ -0,0 +1,8 @@
+namespace HelloWorld {
+ class Hello {
+ static void Main(string[] args) {
+ int w = 0; // unused
+ System.Console.WriteLine("Hello World!");
+ }
+ }
+}
diff --git a/test cases/failing build/8 fortran werror/meson.build b/test cases/failing build/8 fortran werror/meson.build
new file mode 100644
index 000000000000..9cccbecb2bd6
--- /dev/null
+++ b/test cases/failing build/8 fortran werror/meson.build
@@ -0,0 +1,5 @@
+project('test', default_options: ['warning_level=3', 'werror=true'])
+if not add_languages('fortran', required: false)
+ error('MESON_SKIP_TEST: Fortran not found')
+endif
+executable('prog', 'test.f90')
diff --git a/test cases/failing build/8 fortran werror/test.f90 b/test cases/failing build/8 fortran werror/test.f90
new file mode 100644
index 000000000000..78edb8d782f4
--- /dev/null
+++ b/test cases/failing build/8 fortran werror/test.f90
@@ -0,0 +1,4 @@
+program main
+integer :: i
+print *, ii
+end program
diff --git a/test cases/failing build/9 java werror/Test.java b/test cases/failing build/9 java werror/Test.java
new file mode 100644
index 000000000000..3bfb2d0f0875
--- /dev/null
+++ b/test cases/failing build/9 java werror/Test.java
@@ -0,0 +1,5 @@
+public class Test {
+ public static void main(String args[]) {
+ System.out.println(1 / 0);
+ }
+}
diff --git a/test cases/failing build/9 java werror/meson.build b/test cases/failing build/9 java werror/meson.build
new file mode 100644
index 000000000000..392325988e5d
--- /dev/null
+++ b/test cases/failing build/9 java werror/meson.build
@@ -0,0 +1,8 @@
+project('test', default_options: ['werror=true'])
+if meson.backend() != 'ninja'
+ error('MESON_SKIP_TEST: Java requires Ninja backend')
+endif
+if not add_languages('java', required: false)
+ error('MESON_SKIP_TEST: Java not found')
+endif
+jar('prog', 'Test.java')
diff --git a/test cases/failing/109 cmake executable dependency/meson.build b/test cases/failing/109 cmake executable dependency/meson.build
index 48d8fcbb637c..0fc0f9b6a6e8 100644
--- a/test cases/failing/109 cmake executable dependency/meson.build
+++ b/test cases/failing/109 cmake executable dependency/meson.build
@@ -1,9 +1,5 @@
project('cmake-executable-dependency', 'c')
-if not find_program('cmake', required: false).found()
- error('MESON_SKIP_TEST CMake is not installed')
-endif
-
cmake = import('cmake')
cmlib = cmake.subproject('cmlib')
maind = cmlib.dependency('main')
diff --git a/test cases/failing/109 cmake executable dependency/subprojects/cmlib/CMakeLists.txt b/test cases/failing/109 cmake executable dependency/subprojects/cmlib/CMakeLists.txt
index 006787986f0f..e927eae8f944 100644
--- a/test cases/failing/109 cmake executable dependency/subprojects/cmlib/CMakeLists.txt
+++ b/test cases/failing/109 cmake executable dependency/subprojects/cmlib/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmlib)
diff --git a/test cases/failing/109 cmake executable dependency/test.json b/test cases/failing/109 cmake executable dependency/test.json
index f7c477555694..92a6ee1c5916 100644
--- a/test cases/failing/109 cmake executable dependency/test.json
+++ b/test cases/failing/109 cmake executable dependency/test.json
@@ -1,7 +1,7 @@
{
"stdout": [
{
- "line": "test cases/failing/109 cmake executable dependency/meson.build:9:14: ERROR: main is an executable and does not support the dependency() method. Use target() instead."
+ "line": "test cases/failing/109 cmake executable dependency/meson.build:5:14: ERROR: main is an executable and does not support the dependency() method. Use target() instead."
}
],
"tools": {
diff --git a/test cases/failing/112 run_target in test/test.json b/test cases/failing/112 run_target in test/test.json
index 100db94821d3..515897899d34 100644
--- a/test cases/failing/112 run_target in test/test.json
+++ b/test cases/failing/112 run_target in test/test.json
@@ -1,7 +1,7 @@
{
"stdout": [
{
- "line": "test cases/failing/112 run_target in test/meson.build:7:0: ERROR: test keyword argument 'args' was of type array[RunTarget] but should have been array[str | File | BuildTarget | CustomTarget | CustomTargetIndex]"
+ "line": "test cases/failing/112 run_target in test/meson.build:7:0: ERROR: test keyword argument 'args' was of type array[RunTarget] but should have been array[str | File | BuildTarget | CustomTarget | CustomTargetIndex | ExternalProgram]"
}
]
}
diff --git a/test cases/failing/119 cmake subproject error/meson.build b/test cases/failing/119 cmake subproject error/meson.build
index a308239fc7cf..9304af7e874c 100644
--- a/test cases/failing/119 cmake subproject error/meson.build
+++ b/test cases/failing/119 cmake subproject error/meson.build
@@ -1,8 +1,4 @@
project('cmake-executable-dependency')
-if not find_program('cmake', required: false).found()
- error('MESON_SKIP_TEST CMake is not installed')
-endif
-
cmake = import('cmake')
cmlib = cmake.subproject('cmlib')
diff --git a/test cases/failing/119 cmake subproject error/subprojects/cmlib/CMakeLists.txt b/test cases/failing/119 cmake subproject error/subprojects/cmlib/CMakeLists.txt
index edbe39535c1f..a845525b8df0 100644
--- a/test cases/failing/119 cmake subproject error/subprojects/cmlib/CMakeLists.txt
+++ b/test cases/failing/119 cmake subproject error/subprojects/cmlib/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
project(cmlib)
diff --git a/test cases/failing/119 cmake subproject error/test.json b/test cases/failing/119 cmake subproject error/test.json
index 625e45175fc1..d8271a22a065 100644
--- a/test cases/failing/119 cmake subproject error/test.json
+++ b/test cases/failing/119 cmake subproject error/test.json
@@ -1,7 +1,7 @@
{
"stdout": [
{
- "line": "test cases/failing/119 cmake subproject error/meson.build:8:14: ERROR: Failed to configure the CMake subproject: Fancy error message"
+ "line": "test cases/failing/119 cmake subproject error/meson.build:4:14: ERROR: Failed to configure the CMake subproject: Fancy error message"
}
],
"tools": {
diff --git a/test cases/format/5 transform/genexpected.cmd b/test cases/format/5 transform/genexpected.cmd
index de3699ddebc7..c273a99c5b84 100644
--- a/test cases/format/5 transform/genexpected.cmd
+++ b/test cases/format/5 transform/genexpected.cmd
@@ -1,6 +1,6 @@
@echo off
REM This script generates the expected files
-REM Please double-check the contents of those files before commiting them!!!
+REM Please double-check the contents of those files before committing them!!!
python ../../../meson.py format -o default.expected.meson source.meson
python ../../../meson.py format -c muon.ini -o muon.expected.meson source.meson
diff --git a/test cases/frameworks/17 mpi/meson.build b/test cases/frameworks/17 mpi/meson.build
index d1d89915579f..a682c281d8ea 100644
--- a/test cases/frameworks/17 mpi/meson.build
+++ b/test cases/frameworks/17 mpi/meson.build
@@ -32,6 +32,16 @@ test('MPI C++', execpp, timeout: 20)
if add_languages('fortran', required : false)
+ if method in ['auto', 'pkg-config']
+ # https://bugs.debian.org/1078026
+ fs = import('fs')
+ if fs.is_dir('/ci') and fs.exists('/usr/lib/x86_64-linux-gnu/pkgconfig/ompi-fort.pc')
+ if fs.hash('/usr/lib/x86_64-linux-gnu/pkgconfig/ompi-fort.pc', 'md5') == '0892a93630e3d3359c43c58d5a82efc0'
+ error('MESON_SKIP_TEST: openmpi pkgconfig file is broken on Debian/Ubuntu')
+ endif
+ endif
+ endif
+
fc = meson.get_compiler('fortran')
mpif = dependency('mpi', language : 'fortran', required: false, method : method)
if not fc.links('use mpi; end', dependencies: mpif, name: 'Fortran MPI')
diff --git a/test cases/frameworks/17 mpi/test.json b/test cases/frameworks/17 mpi/test.json
index cbd1686121c5..3a46657ef112 100644
--- a/test cases/frameworks/17 mpi/test.json
+++ b/test cases/frameworks/17 mpi/test.json
@@ -2,8 +2,10 @@
"matrix": {
"options": {
"method": [
- { "val": "auto" },
- { "val": "pkg-config" },
+ { "val": "auto",
+ "expect_skip_on_jobname": ["ubuntu"] },
+ { "val": "pkg-config",
+ "expect_skip_on_jobname": ["ubuntu"] },
{ "val": "config-tool",
"expect_skip_on_jobname": ["fedora"] },
{
diff --git a/test cases/frameworks/24 libgcrypt/test.json b/test cases/frameworks/24 libgcrypt/test.json
index 9c282daa86e7..4860d9e00e65 100644
--- a/test cases/frameworks/24 libgcrypt/test.json
+++ b/test cases/frameworks/24 libgcrypt/test.json
@@ -1,3 +1,3 @@
{
- "expect_skip_on_jobname": ["arch", "azure", "cygwin", "msys2"]
+ "expect_skip_on_jobname": ["arch", "azure", "cygwin", "msys2", "ubuntu-rolling"]
}
diff --git a/test cases/frameworks/25 hdf5/meson.build b/test cases/frameworks/25 hdf5/meson.build
index b9f5784b4fd6..38e001202bfc 100644
--- a/test cases/frameworks/25 hdf5/meson.build
+++ b/test cases/frameworks/25 hdf5/meson.build
@@ -1,4 +1,8 @@
-project('hdf5_framework', 'c')
+project(
+ 'hdf5_framework',
+ 'c',
+ default_options : ['cpp_std=c++11'],
+)
# NOTE: all HDF5 languages must have HDF5 C library working.
diff --git a/test cases/frameworks/25 hdf5/test.json b/test cases/frameworks/25 hdf5/test.json
index 590d4bc20283..2448f57425d7 100644
--- a/test cases/frameworks/25 hdf5/test.json
+++ b/test cases/frameworks/25 hdf5/test.json
@@ -3,7 +3,7 @@
"options": {
"method": [
{ "val": "pkg-config", "expect_skip_on_jobname": ["linux-gentoo-gcc"] },
- { "val": "config-tool", "expect_skip_on_jobname": ["macos"] }
+ { "val": "config-tool" }
]
}
},
diff --git a/test cases/frameworks/35 boost symlinks/boost/include/boost b/test cases/frameworks/35 boost symlinks/boost/include/boost
deleted file mode 120000
index 8acd7e291d4c..000000000000
--- a/test cases/frameworks/35 boost symlinks/boost/include/boost
+++ /dev/null
@@ -1 +0,0 @@
-../Cellar/boost/0.3.0/include/boost
\ No newline at end of file
diff --git a/test cases/frameworks/35 boost symlinks/meson.build b/test cases/frameworks/35 boost symlinks/meson.build
index b49a143ef40d..b3767f1bdebe 100644
--- a/test cases/frameworks/35 boost symlinks/meson.build
+++ b/test cases/frameworks/35 boost symlinks/meson.build
@@ -1,5 +1,11 @@
project('boosttestsymlinks', 'cpp')
+bm = build_machine.system()
+
+if bm == 'windows' or bm == 'cygwin'
+ error('MESON_SKIP_TEST: Windows and symlinks do not mix.')
+endif
+
dep = dependency('boost', modules : ['regex', 'python'], required: false)
assert(dep.found(), 'expected to find a fake version of boost')
diff --git a/test cases/frameworks/35 boost symlinks/test.json b/test cases/frameworks/35 boost symlinks/test.json
new file mode 100644
index 000000000000..23af7d93189d
--- /dev/null
+++ b/test cases/frameworks/35 boost symlinks/test.json
@@ -0,0 +1,3 @@
+{
+ "expect_skip_on_jobname": ["azure", "cygwin", "msys2"]
+}
diff --git a/test cases/frameworks/38 gir both_libraries/bar.c b/test cases/frameworks/38 gir both_libraries/bar.c
new file mode 100644
index 000000000000..4cb41f798294
--- /dev/null
+++ b/test cases/frameworks/38 gir both_libraries/bar.c
@@ -0,0 +1,7 @@
+#include "bar.h"
+#include "foo.h"
+
+int bar_func(void)
+{
+ return foo_func() + 42;
+}
diff --git a/test cases/frameworks/38 gir both_libraries/bar.h b/test cases/frameworks/38 gir both_libraries/bar.h
new file mode 100644
index 000000000000..d22827b837f7
--- /dev/null
+++ b/test cases/frameworks/38 gir both_libraries/bar.h
@@ -0,0 +1 @@
+int bar_func(void);
diff --git a/test cases/frameworks/38 gir both_libraries/foo.c b/test cases/frameworks/38 gir both_libraries/foo.c
new file mode 100644
index 000000000000..b88aa91dabb4
--- /dev/null
+++ b/test cases/frameworks/38 gir both_libraries/foo.c
@@ -0,0 +1,6 @@
+#include "foo.h"
+
+int foo_func(void)
+{
+ return 42;
+}
diff --git a/test cases/frameworks/38 gir both_libraries/foo.h b/test cases/frameworks/38 gir both_libraries/foo.h
new file mode 100644
index 000000000000..2a0867249307
--- /dev/null
+++ b/test cases/frameworks/38 gir both_libraries/foo.h
@@ -0,0 +1 @@
+int foo_func(void);
diff --git a/test cases/frameworks/38 gir both_libraries/meson.build b/test cases/frameworks/38 gir both_libraries/meson.build
new file mode 100644
index 000000000000..cb9cdd31f3ed
--- /dev/null
+++ b/test cases/frameworks/38 gir both_libraries/meson.build
@@ -0,0 +1,42 @@
+project('gir both libraries', 'c')
+
+gir = dependency('gobject-introspection-1.0', required: false)
+if not gir.found()
+ error('MESON_SKIP_TEST gobject-introspection not found.')
+endif
+
+if host_machine.system() == 'cygwin'
+ # FIXME: g-ir-scanner seems broken on cygwin:
+ # ERROR: can't resolve libraries to shared libraries: foo++
+ error('MESON_SKIP_TEST g-ir-scanner is broken on cygwin.')
+endif
+
+gnome = import('gnome')
+
+# Regression test simulating how GStreamer generate its GIRs.
+# Generated gobject-introspection binaries for every GStreamer libraries must
+# first call gst_init() defined in the main libgstreamer, which means they need
+# to link on that lib.
+# A regression caused by https://github.com/mesonbuild/meson/pull/12632 made
+# Meson not link the binary generated for bar with libfoo in the case it uses
+# both_libraries().
+
+libfoo = both_libraries('foo', 'foo.c')
+foo_gir = gnome.generate_gir(libfoo,
+ namespace: 'foo',
+ nsversion: '1.0',
+ sources: ['foo.c', 'foo.h'],
+)
+foo_dep = declare_dependency(
+ link_with: libfoo,
+ sources: foo_gir,
+)
+
+libbar = both_libraries('bar', 'bar.c', dependencies: foo_dep)
+gnome.generate_gir(libbar,
+ namespace: 'bar',
+ nsversion: '1.0',
+ sources: ['bar.c', 'bar.h'],
+ extra_args: '--add-init-section=extern void foo_func(void);foo_func();',
+ dependencies: foo_dep,
+)
diff --git a/test cases/frameworks/38 gir both_libraries/test.json b/test cases/frameworks/38 gir both_libraries/test.json
new file mode 100644
index 000000000000..82ac42a293b3
--- /dev/null
+++ b/test cases/frameworks/38 gir both_libraries/test.json
@@ -0,0 +1,3 @@
+{
+ "expect_skip_on_jobname": ["azure", "macos", "msys2", "cygwin"]
+}
\ No newline at end of file
diff --git a/test cases/frameworks/4 qt/meson.build b/test cases/frameworks/4 qt/meson.build
index 54cd7cb9bac3..58ec4d16f786 100644
--- a/test cases/frameworks/4 qt/meson.build
+++ b/test cases/frameworks/4 qt/meson.build
@@ -48,7 +48,13 @@ foreach qt : ['qt4', 'qt5', 'qt6']
qtdep = dependency(qt, modules : qt_modules, main : true, private_headers: true, required : required, method : get_option('method'))
if qtdep.found()
qtmodule = import(qt)
- assert(qtmodule.has_tools(), 'You may be missing a devel package. (qttools5-dev-tools on Debian based systems)')
+ if get_option('expect_lrelease')
+ assert(qtmodule.has_tools(), 'You may be missing a devel package. (qttools5-dev-tools on Debian based systems)')
+ else
+ assert(not qtmodule.has_tools(), 'Unexpectedly found lrelease')
+ assert(not qtmodule.has_tools(tools: ['lrelease']), 'Unexpectedly found lrelease')
+ assert(qtmodule.has_tools(tools: ['moc', 'uic', 'rcc']), 'You may be missing a devel package. (qttools5-dev-tools on Debian based systems)')
+ endif
# Test that fetching a variable works and yields a non-empty value
assert(qtdep.get_variable('prefix', configtool: 'QT_INSTALL_PREFIX') != '')
@@ -91,23 +97,25 @@ foreach qt : ['qt4', 'qt5', 'qt6']
# qt4-rcc and qt5-rcc take different arguments, for example qt4: ['-compress', '3']; qt5: '--compress=3'
qtmodule.preprocess(qt + 'testrccarg', qresources : files(['stuff.qrc', 'stuff2.qrc']), rcc_extra_arguments : '--compress=3', method : get_option('method'))
- translations_cpp = qtmodule.compile_translations(qresource: qt+'_lang.qrc')
- # unity builds suck and definitely cannot handle two qrc embeds in one compilation unit
- unityproof_translations = static_library(qt+'unityproof_translations', translations_cpp, dependencies: qtdep)
+ if get_option('expect_lrelease')
+ translations_cpp = qtmodule.compile_translations(qresource: qt+'_lang.qrc')
+ # unity builds suck and definitely cannot handle two qrc embeds in one compilation unit
+ unityproof_translations = static_library(qt+'unityproof_translations', translations_cpp, dependencies: qtdep)
- extra_cpp_args += '-DQT="@0@"'.format(qt)
- qexe = executable(qt + 'app',
- sources : ['main.cpp', 'mainWindow.cpp', # Sources that don't need preprocessing.
- prep, prep_rcc],
- dependencies : qtdep,
- link_with: unityproof_translations,
- cpp_args: extra_cpp_args,
- gui_app : true)
+ extra_cpp_args += '-DQT="@0@"'.format(qt)
+ qexe = executable(qt + 'app',
+ sources : ['main.cpp', 'mainWindow.cpp', # Sources that don't need preprocessing.
+ prep, prep_rcc],
+ dependencies : qtdep,
+ link_with: unityproof_translations,
+ cpp_args: extra_cpp_args,
+ gui_app : true)
- # We need a console test application because some test environments
- # do not have an X server.
+ # We need a console test application because some test environments
+ # do not have an X server.
- translations = qtmodule.compile_translations(ts_files : qt+'core_fr.ts', build_by_default : true)
+ translations = qtmodule.compile_translations(ts_files : qt+'core_fr.ts', build_by_default : true)
+ endif
qtcore = dependency(qt, modules : 'Core', method : get_option('method'))
diff --git a/test cases/frameworks/4 qt/meson_options.txt b/test cases/frameworks/4 qt/meson_options.txt
index 223f4fb0b05f..9a7513434012 100644
--- a/test cases/frameworks/4 qt/meson_options.txt
+++ b/test cases/frameworks/4 qt/meson_options.txt
@@ -1,2 +1,3 @@
option('method', type : 'string', value : 'auto', description : 'The method to use to find Qt')
option('required', type : 'string', value : 'qt5', description : 'The version of Qt which is required to be present')
+option('expect_lrelease', type: 'boolean', value: true)
diff --git a/test cases/linuxlike/13 cmake dependency/meson.build b/test cases/linuxlike/13 cmake dependency/meson.build
index f612e1d52d6d..812dcf9c5c19 100644
--- a/test cases/linuxlike/13 cmake dependency/meson.build
+++ b/test cases/linuxlike/13 cmake dependency/meson.build
@@ -2,10 +2,7 @@
# due to use of setup_env.json
project('external CMake dependency', ['c', 'cpp'])
-cmake = find_program('cmake', required: false)
-if not cmake.found()
- error('MESON_SKIP_TEST cmake binary not available.')
-endif
+cmake = find_program('cmake')
# Zlib is probably on all dev machines.
diff --git a/test cases/linuxlike/13 cmake dependency/test.json b/test cases/linuxlike/13 cmake dependency/test.json
index 484ce202cfbc..208b59193835 100644
--- a/test cases/linuxlike/13 cmake dependency/test.json
+++ b/test cases/linuxlike/13 cmake dependency/test.json
@@ -10,5 +10,8 @@
{
"line": " ['CMMesonTESTf1::evil_non_standard_target']"
}
- ]
+ ],
+ "tools": {
+ "cmake": ">=3.11"
+ }
}
diff --git a/test cases/osx/11 case sensitive apfs/meson.build b/test cases/osx/11 case sensitive apfs/meson.build
new file mode 100644
index 000000000000..dd566b185f28
--- /dev/null
+++ b/test cases/osx/11 case sensitive apfs/meson.build
@@ -0,0 +1,5 @@
+project('case-sensitive APFS with extra frameworks test', 'c')
+
+dep = dependency('FoUnDaTiOn')
+
+exe = executable('prog', 'prog.c', install : true, dependencies: dep)
diff --git a/test cases/osx/11 case sensitive apfs/prog.c b/test cases/osx/11 case sensitive apfs/prog.c
new file mode 100644
index 000000000000..9b6bdc2ec2f0
--- /dev/null
+++ b/test cases/osx/11 case sensitive apfs/prog.c
@@ -0,0 +1,3 @@
+int main(void) {
+ return 0;
+}
diff --git a/test cases/osx/11 case sensitive apfs/test.json b/test cases/osx/11 case sensitive apfs/test.json
new file mode 100644
index 000000000000..a883714eaa27
--- /dev/null
+++ b/test cases/osx/11 case sensitive apfs/test.json
@@ -0,0 +1,5 @@
+{
+ "installed": [
+ {"type": "file", "file": "usr/bin/prog"}
+ ]
+}
diff --git a/test cases/osx/9 framework recasting/test.json b/test cases/osx/9 framework recasting/test.json
new file mode 100644
index 000000000000..77003c4dc3f6
--- /dev/null
+++ b/test cases/osx/9 framework recasting/test.json
@@ -0,0 +1,5 @@
+{
+ "tools": {
+ "cmake": ">=3.11"
+ }
+}
diff --git a/test cases/rust/25 cargo lock/test.json b/test cases/rust/25 cargo lock/test.json
new file mode 100644
index 000000000000..6e2a1ca1d0a2
--- /dev/null
+++ b/test cases/rust/25 cargo lock/test.json
@@ -0,0 +1,3 @@
+{
+ "cleanup": ["subprojects/bar-0.1"]
+}
diff --git a/test cases/rust/5 polyglot static/meson.build b/test cases/rust/5 polyglot static/meson.build
index 54f383cd3872..180f86ecb95c 100644
--- a/test cases/rust/5 polyglot static/meson.build
+++ b/test cases/rust/5 polyglot static/meson.build
@@ -19,7 +19,7 @@ e = executable('prog', 'prog.c',
test('polyglottest', e)
# Create a version that has overflow-checks on, then run a test to ensure that
-# the overflow-checks is larger than the other version by some ammount
+# the overflow-checks is larger than the other version by some amount
r2 = static_library('stuff2', 'stuff.rs', rust_crate_type : 'staticlib', rust_args : ['-C', 'overflow-checks=on'])
l2 = static_library('clib2', 'clib.c')
e2 = executable('prog2', 'prog.c', link_with : [r2, l2])
diff --git a/test cases/rust/9 unit tests/test3.rs b/test cases/rust/9 unit tests/test3.rs
index 6d538a059ff1..72349a63cd66 100644
--- a/test cases/rust/9 unit tests/test3.rs
+++ b/test cases/rust/9 unit tests/test3.rs
@@ -8,7 +8,7 @@ mod tests {
use super::*;
- // This is an intentinally broken test that should be turned off by extra rust arguments
+ // This is an intentionally broken test that should be turned off by extra rust arguments
#[cfg(not(broken = "false"))]
#[test]
fn test_broken() {
diff --git a/test cases/unit/1 soname/CMakeLists.txt b/test cases/unit/1 soname/CMakeLists.txt
index c4f2e3eb93d8..47571b1c1e7b 100644
--- a/test cases/unit/1 soname/CMakeLists.txt
+++ b/test cases/unit/1 soname/CMakeLists.txt
@@ -7,7 +7,7 @@
# soname to 1.2.3 but Autotools sets it to 1.
project(vertest C)
-cmake_minimum_required(VERSION 3.5)
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
add_library(nover SHARED versioned.c)
diff --git a/test cases/unit/101 relative find program/foo.py b/test cases/unit/100 relative find program/foo.py
similarity index 100%
rename from test cases/unit/101 relative find program/foo.py
rename to test cases/unit/100 relative find program/foo.py
diff --git a/test cases/unit/101 relative find program/meson.build b/test cases/unit/100 relative find program/meson.build
similarity index 100%
rename from test cases/unit/101 relative find program/meson.build
rename to test cases/unit/100 relative find program/meson.build
diff --git a/test cases/unit/101 relative find program/subdir/meson.build b/test cases/unit/100 relative find program/subdir/meson.build
similarity index 100%
rename from test cases/unit/101 relative find program/subdir/meson.build
rename to test cases/unit/100 relative find program/subdir/meson.build
diff --git a/test cases/unit/102 rlib linkage/lib2.rs b/test cases/unit/101 rlib linkage/lib2.rs
similarity index 100%
rename from test cases/unit/102 rlib linkage/lib2.rs
rename to test cases/unit/101 rlib linkage/lib2.rs
diff --git a/test cases/unit/102 rlib linkage/main.rs b/test cases/unit/101 rlib linkage/main.rs
similarity index 100%
rename from test cases/unit/102 rlib linkage/main.rs
rename to test cases/unit/101 rlib linkage/main.rs
diff --git a/test cases/unit/102 rlib linkage/meson.build b/test cases/unit/101 rlib linkage/meson.build
similarity index 100%
rename from test cases/unit/102 rlib linkage/meson.build
rename to test cases/unit/101 rlib linkage/meson.build
diff --git a/test cases/unit/103 python without pkgconfig/meson.build b/test cases/unit/102 python without pkgconfig/meson.build
similarity index 100%
rename from test cases/unit/103 python without pkgconfig/meson.build
rename to test cases/unit/102 python without pkgconfig/meson.build
diff --git a/test cases/unit/104 strip/lib.c b/test cases/unit/103 strip/lib.c
similarity index 100%
rename from test cases/unit/104 strip/lib.c
rename to test cases/unit/103 strip/lib.c
diff --git a/test cases/unit/104 strip/meson.build b/test cases/unit/103 strip/meson.build
similarity index 100%
rename from test cases/unit/104 strip/meson.build
rename to test cases/unit/103 strip/meson.build
diff --git a/test cases/unit/105 debug function/meson.build b/test cases/unit/104 debug function/meson.build
similarity index 100%
rename from test cases/unit/105 debug function/meson.build
rename to test cases/unit/104 debug function/meson.build
diff --git a/test cases/unit/106 pkgconfig relocatable with absolute path/meson.build b/test cases/unit/105 pkgconfig relocatable with absolute path/meson.build
similarity index 100%
rename from test cases/unit/106 pkgconfig relocatable with absolute path/meson.build
rename to test cases/unit/105 pkgconfig relocatable with absolute path/meson.build
diff --git a/test cases/unit/121 executable suffix/meson.build b/test cases/unit/121 executable suffix/meson.build
index 8f952260cbf9..7dff9d69e1e1 100644
--- a/test cases/unit/121 executable suffix/meson.build
+++ b/test cases/unit/121 executable suffix/meson.build
@@ -1,3 +1,3 @@
-project('exectuable suffix', 'c')
+project('executable suffix', 'c')
foo = executable('foo', 'main.c')
foo_bin = executable('foo', 'main.c', name_suffix: 'bin')
diff --git a/test cases/unit/123 pkgsubproj/meson.build b/test cases/unit/123 pkgsubproj/meson.build
new file mode 100644
index 000000000000..b4cf89fa0b6d
--- /dev/null
+++ b/test cases/unit/123 pkgsubproj/meson.build
@@ -0,0 +1,3 @@
+project('pkg_opt_test')
+
+subproject('sub')
diff --git a/test cases/unit/123 pkgsubproj/subprojects/sub/meson.build b/test cases/unit/123 pkgsubproj/subprojects/sub/meson.build
new file mode 100644
index 000000000000..99622b681cdd
--- /dev/null
+++ b/test cases/unit/123 pkgsubproj/subprojects/sub/meson.build
@@ -0,0 +1 @@
+project('subproject', default_options: 'pkgconfig.relocatable=true')
diff --git a/test cases/unit/35 dist script/subprojects/sub/dist-script.py b/test cases/unit/35 dist script/subprojects/sub/dist-script.py
index 5f1b4a12443a..5166a26f54d0 100644
--- a/test cases/unit/35 dist script/subprojects/sub/dist-script.py
+++ b/test cases/unit/35 dist script/subprojects/sub/dist-script.py
@@ -12,7 +12,7 @@
mesonrewrite = shlex.split(os.environ['MESONREWRITE'])
rewrite_cmd = ['kwargs', 'set', 'project', '/', 'version', 'release']
-subprocess.run([*mesonrewrite, '-s', source_root, *rewrite_cmd], check=True)
+subprocess.run([*mesonrewrite, '-s', str(source_root.absolute()), *rewrite_cmd], check=True)
modfile = source_root / 'prog.c'
with modfile.open('w') as f:
diff --git a/test cases/unit/92 install skip subprojects/foo.c b/test cases/unit/91 install skip subprojects/foo.c
similarity index 100%
rename from test cases/unit/92 install skip subprojects/foo.c
rename to test cases/unit/91 install skip subprojects/foo.c
diff --git a/test cases/unit/92 install skip subprojects/foo.dat b/test cases/unit/91 install skip subprojects/foo.dat
similarity index 100%
rename from test cases/unit/92 install skip subprojects/foo.dat
rename to test cases/unit/91 install skip subprojects/foo.dat
diff --git a/test cases/unit/92 install skip subprojects/foo.h b/test cases/unit/91 install skip subprojects/foo.h
similarity index 100%
rename from test cases/unit/92 install skip subprojects/foo.h
rename to test cases/unit/91 install skip subprojects/foo.h
diff --git a/test cases/unit/92 install skip subprojects/foo/foofile b/test cases/unit/91 install skip subprojects/foo/foofile
similarity index 100%
rename from test cases/unit/92 install skip subprojects/foo/foofile
rename to test cases/unit/91 install skip subprojects/foo/foofile
diff --git a/test cases/unit/92 install skip subprojects/meson.build b/test cases/unit/91 install skip subprojects/meson.build
similarity index 100%
rename from test cases/unit/92 install skip subprojects/meson.build
rename to test cases/unit/91 install skip subprojects/meson.build
diff --git a/test cases/unit/92 install skip subprojects/subprojects/bar/bar.c b/test cases/unit/91 install skip subprojects/subprojects/bar/bar.c
similarity index 100%
rename from test cases/unit/92 install skip subprojects/subprojects/bar/bar.c
rename to test cases/unit/91 install skip subprojects/subprojects/bar/bar.c
diff --git a/test cases/unit/92 install skip subprojects/subprojects/bar/bar.dat b/test cases/unit/91 install skip subprojects/subprojects/bar/bar.dat
similarity index 100%
rename from test cases/unit/92 install skip subprojects/subprojects/bar/bar.dat
rename to test cases/unit/91 install skip subprojects/subprojects/bar/bar.dat
diff --git a/test cases/unit/92 install skip subprojects/subprojects/bar/bar.h b/test cases/unit/91 install skip subprojects/subprojects/bar/bar.h
similarity index 100%
rename from test cases/unit/92 install skip subprojects/subprojects/bar/bar.h
rename to test cases/unit/91 install skip subprojects/subprojects/bar/bar.h
diff --git a/test cases/unit/92 install skip subprojects/subprojects/bar/bar/barfile b/test cases/unit/91 install skip subprojects/subprojects/bar/bar/barfile
similarity index 100%
rename from test cases/unit/92 install skip subprojects/subprojects/bar/bar/barfile
rename to test cases/unit/91 install skip subprojects/subprojects/bar/bar/barfile
diff --git a/test cases/unit/92 install skip subprojects/subprojects/bar/meson.build b/test cases/unit/91 install skip subprojects/subprojects/bar/meson.build
similarity index 100%
rename from test cases/unit/92 install skip subprojects/subprojects/bar/meson.build
rename to test cases/unit/91 install skip subprojects/subprojects/bar/meson.build
diff --git a/test cases/unit/93 new subproject in configured project/meson.build b/test cases/unit/92 new subproject in configured project/meson.build
similarity index 100%
rename from test cases/unit/93 new subproject in configured project/meson.build
rename to test cases/unit/92 new subproject in configured project/meson.build
diff --git a/test cases/unit/93 new subproject in configured project/meson_options.txt b/test cases/unit/92 new subproject in configured project/meson_options.txt
similarity index 100%
rename from test cases/unit/93 new subproject in configured project/meson_options.txt
rename to test cases/unit/92 new subproject in configured project/meson_options.txt
diff --git a/test cases/unit/93 new subproject in configured project/subprojects/sub/foo.c b/test cases/unit/92 new subproject in configured project/subprojects/sub/foo.c
similarity index 100%
rename from test cases/unit/93 new subproject in configured project/subprojects/sub/foo.c
rename to test cases/unit/92 new subproject in configured project/subprojects/sub/foo.c
diff --git a/test cases/unit/93 new subproject in configured project/subprojects/sub/meson.build b/test cases/unit/92 new subproject in configured project/subprojects/sub/meson.build
similarity index 100%
rename from test cases/unit/93 new subproject in configured project/subprojects/sub/meson.build
rename to test cases/unit/92 new subproject in configured project/subprojects/sub/meson.build
diff --git a/test cases/unit/94 clangformat/.clang-format b/test cases/unit/93 clangformat/.clang-format
similarity index 100%
rename from test cases/unit/94 clangformat/.clang-format
rename to test cases/unit/93 clangformat/.clang-format
diff --git a/test cases/unit/94 clangformat/.clang-format-ignore b/test cases/unit/93 clangformat/.clang-format-ignore
similarity index 100%
rename from test cases/unit/94 clangformat/.clang-format-ignore
rename to test cases/unit/93 clangformat/.clang-format-ignore
diff --git a/test cases/unit/94 clangformat/.clang-format-include b/test cases/unit/93 clangformat/.clang-format-include
similarity index 100%
rename from test cases/unit/94 clangformat/.clang-format-include
rename to test cases/unit/93 clangformat/.clang-format-include
diff --git a/test cases/unit/94 clangformat/meson.build b/test cases/unit/93 clangformat/meson.build
similarity index 100%
rename from test cases/unit/94 clangformat/meson.build
rename to test cases/unit/93 clangformat/meson.build
diff --git a/test cases/unit/94 clangformat/not-included/badformat.cpp b/test cases/unit/93 clangformat/not-included/badformat.cpp
similarity index 100%
rename from test cases/unit/94 clangformat/not-included/badformat.cpp
rename to test cases/unit/93 clangformat/not-included/badformat.cpp
diff --git a/test cases/unit/94 clangformat/src/badformat.c b/test cases/unit/93 clangformat/src/badformat.c
similarity index 100%
rename from test cases/unit/94 clangformat/src/badformat.c
rename to test cases/unit/93 clangformat/src/badformat.c
diff --git a/test cases/unit/94 clangformat/src/badformat.cpp b/test cases/unit/93 clangformat/src/badformat.cpp
similarity index 100%
rename from test cases/unit/94 clangformat/src/badformat.cpp
rename to test cases/unit/93 clangformat/src/badformat.cpp
diff --git a/test cases/unit/95 custominc/easytogrepfor/genh.py b/test cases/unit/94 custominc/easytogrepfor/genh.py
similarity index 100%
rename from test cases/unit/95 custominc/easytogrepfor/genh.py
rename to test cases/unit/94 custominc/easytogrepfor/genh.py
diff --git a/test cases/unit/95 custominc/easytogrepfor/meson.build b/test cases/unit/94 custominc/easytogrepfor/meson.build
similarity index 100%
rename from test cases/unit/95 custominc/easytogrepfor/meson.build
rename to test cases/unit/94 custominc/easytogrepfor/meson.build
diff --git a/test cases/unit/95 custominc/helper.c b/test cases/unit/94 custominc/helper.c
similarity index 100%
rename from test cases/unit/95 custominc/helper.c
rename to test cases/unit/94 custominc/helper.c
diff --git a/test cases/unit/95 custominc/meson.build b/test cases/unit/94 custominc/meson.build
similarity index 100%
rename from test cases/unit/95 custominc/meson.build
rename to test cases/unit/94 custominc/meson.build
diff --git a/test cases/unit/95 custominc/prog.c b/test cases/unit/94 custominc/prog.c
similarity index 100%
rename from test cases/unit/95 custominc/prog.c
rename to test cases/unit/94 custominc/prog.c
diff --git a/test cases/unit/95 custominc/prog2.c b/test cases/unit/94 custominc/prog2.c
similarity index 100%
rename from test cases/unit/95 custominc/prog2.c
rename to test cases/unit/94 custominc/prog2.c
diff --git a/test cases/unit/96 implicit force fallback/meson.build b/test cases/unit/95 implicit force fallback/meson.build
similarity index 100%
rename from test cases/unit/96 implicit force fallback/meson.build
rename to test cases/unit/95 implicit force fallback/meson.build
diff --git a/test cases/unit/96 implicit force fallback/subprojects/something/meson.build b/test cases/unit/95 implicit force fallback/subprojects/something/meson.build
similarity index 100%
rename from test cases/unit/96 implicit force fallback/subprojects/something/meson.build
rename to test cases/unit/95 implicit force fallback/subprojects/something/meson.build
diff --git a/test cases/unit/97 compiler.links file arg/meson.build b/test cases/unit/96 compiler.links file arg/meson.build
similarity index 100%
rename from test cases/unit/97 compiler.links file arg/meson.build
rename to test cases/unit/96 compiler.links file arg/meson.build
diff --git a/test cases/unit/97 compiler.links file arg/test.c b/test cases/unit/96 compiler.links file arg/test.c
similarity index 100%
rename from test cases/unit/97 compiler.links file arg/test.c
rename to test cases/unit/96 compiler.links file arg/test.c
diff --git a/test cases/unit/98 link full name/.gitignore b/test cases/unit/97 link full name/.gitignore
similarity index 100%
rename from test cases/unit/98 link full name/.gitignore
rename to test cases/unit/97 link full name/.gitignore
diff --git a/test cases/unit/98 link full name/libtestprovider/meson.build b/test cases/unit/97 link full name/libtestprovider/meson.build
similarity index 100%
rename from test cases/unit/98 link full name/libtestprovider/meson.build
rename to test cases/unit/97 link full name/libtestprovider/meson.build
diff --git a/test cases/unit/98 link full name/libtestprovider/provider.c b/test cases/unit/97 link full name/libtestprovider/provider.c
similarity index 100%
rename from test cases/unit/98 link full name/libtestprovider/provider.c
rename to test cases/unit/97 link full name/libtestprovider/provider.c
diff --git a/test cases/unit/98 link full name/proguser/meson.build b/test cases/unit/97 link full name/proguser/meson.build
similarity index 100%
rename from test cases/unit/98 link full name/proguser/meson.build
rename to test cases/unit/97 link full name/proguser/meson.build
diff --git a/test cases/unit/98 link full name/proguser/receiver.c b/test cases/unit/97 link full name/proguser/receiver.c
similarity index 100%
rename from test cases/unit/98 link full name/proguser/receiver.c
rename to test cases/unit/97 link full name/proguser/receiver.c
diff --git a/test cases/unit/99 install all targets/bar-custom.txt b/test cases/unit/98 install all targets/bar-custom.txt
similarity index 100%
rename from test cases/unit/99 install all targets/bar-custom.txt
rename to test cases/unit/98 install all targets/bar-custom.txt
diff --git a/test cases/unit/99 install all targets/bar-devel.h b/test cases/unit/98 install all targets/bar-devel.h
similarity index 100%
rename from test cases/unit/99 install all targets/bar-devel.h
rename to test cases/unit/98 install all targets/bar-devel.h
diff --git a/test cases/unit/99 install all targets/bar-notag.txt b/test cases/unit/98 install all targets/bar-notag.txt
similarity index 100%
rename from test cases/unit/99 install all targets/bar-notag.txt
rename to test cases/unit/98 install all targets/bar-notag.txt
diff --git a/test cases/unit/99 install all targets/custom_files/data.txt b/test cases/unit/98 install all targets/custom_files/data.txt
similarity index 100%
rename from test cases/unit/99 install all targets/custom_files/data.txt
rename to test cases/unit/98 install all targets/custom_files/data.txt
diff --git a/test cases/unit/99 install all targets/excludes/excluded.txt b/test cases/unit/98 install all targets/excludes/excluded.txt
similarity index 100%
rename from test cases/unit/99 install all targets/excludes/excluded.txt
rename to test cases/unit/98 install all targets/excludes/excluded.txt
diff --git a/test cases/unit/99 install all targets/excludes/excluded/placeholder.txt b/test cases/unit/98 install all targets/excludes/excluded/placeholder.txt
similarity index 100%
rename from test cases/unit/99 install all targets/excludes/excluded/placeholder.txt
rename to test cases/unit/98 install all targets/excludes/excluded/placeholder.txt
diff --git a/test cases/unit/99 install all targets/excludes/installed.txt b/test cases/unit/98 install all targets/excludes/installed.txt
similarity index 100%
rename from test cases/unit/99 install all targets/excludes/installed.txt
rename to test cases/unit/98 install all targets/excludes/installed.txt
diff --git a/test cases/unit/99 install all targets/foo.in b/test cases/unit/98 install all targets/foo.in
similarity index 100%
rename from test cases/unit/99 install all targets/foo.in
rename to test cases/unit/98 install all targets/foo.in
diff --git a/test cases/unit/99 install all targets/foo1-devel.h b/test cases/unit/98 install all targets/foo1-devel.h
similarity index 100%
rename from test cases/unit/99 install all targets/foo1-devel.h
rename to test cases/unit/98 install all targets/foo1-devel.h
diff --git a/test cases/unit/99 install all targets/lib.c b/test cases/unit/98 install all targets/lib.c
similarity index 100%
rename from test cases/unit/99 install all targets/lib.c
rename to test cases/unit/98 install all targets/lib.c
diff --git a/test cases/unit/99 install all targets/main.c b/test cases/unit/98 install all targets/main.c
similarity index 100%
rename from test cases/unit/99 install all targets/main.c
rename to test cases/unit/98 install all targets/main.c
diff --git a/test cases/unit/99 install all targets/meson.build b/test cases/unit/98 install all targets/meson.build
similarity index 100%
rename from test cases/unit/99 install all targets/meson.build
rename to test cases/unit/98 install all targets/meson.build
diff --git a/test cases/unit/99 install all targets/script.py b/test cases/unit/98 install all targets/script.py
similarity index 100%
rename from test cases/unit/99 install all targets/script.py
rename to test cases/unit/98 install all targets/script.py
diff --git a/test cases/unit/99 install all targets/subdir/bar2-devel.h b/test cases/unit/98 install all targets/subdir/bar2-devel.h
similarity index 100%
rename from test cases/unit/99 install all targets/subdir/bar2-devel.h
rename to test cases/unit/98 install all targets/subdir/bar2-devel.h
diff --git a/test cases/unit/99 install all targets/subdir/foo2.in b/test cases/unit/98 install all targets/subdir/foo2.in
similarity index 100%
rename from test cases/unit/99 install all targets/subdir/foo2.in
rename to test cases/unit/98 install all targets/subdir/foo2.in
diff --git a/test cases/unit/99 install all targets/subdir/foo3-devel.h b/test cases/unit/98 install all targets/subdir/foo3-devel.h
similarity index 100%
rename from test cases/unit/99 install all targets/subdir/foo3-devel.h
rename to test cases/unit/98 install all targets/subdir/foo3-devel.h
diff --git a/test cases/unit/99 install all targets/subdir/lib.c b/test cases/unit/98 install all targets/subdir/lib.c
similarity index 100%
rename from test cases/unit/99 install all targets/subdir/lib.c
rename to test cases/unit/98 install all targets/subdir/lib.c
diff --git a/test cases/unit/99 install all targets/subdir/main.c b/test cases/unit/98 install all targets/subdir/main.c
similarity index 100%
rename from test cases/unit/99 install all targets/subdir/main.c
rename to test cases/unit/98 install all targets/subdir/main.c
diff --git a/test cases/unit/99 install all targets/subdir/meson.build b/test cases/unit/98 install all targets/subdir/meson.build
similarity index 100%
rename from test cases/unit/99 install all targets/subdir/meson.build
rename to test cases/unit/98 install all targets/subdir/meson.build
diff --git a/test cases/unit/99 install all targets/subdir/script.py b/test cases/unit/98 install all targets/subdir/script.py
similarity index 100%
rename from test cases/unit/99 install all targets/subdir/script.py
rename to test cases/unit/98 install all targets/subdir/script.py
diff --git a/test cases/unit/99 install all targets/subprojects/subproject/aaa.txt b/test cases/unit/98 install all targets/subprojects/subproject/aaa.txt
similarity index 100%
rename from test cases/unit/99 install all targets/subprojects/subproject/aaa.txt
rename to test cases/unit/98 install all targets/subprojects/subproject/aaa.txt
diff --git a/test cases/unit/99 install all targets/subprojects/subproject/bbb.txt b/test cases/unit/98 install all targets/subprojects/subproject/bbb.txt
similarity index 100%
rename from test cases/unit/99 install all targets/subprojects/subproject/bbb.txt
rename to test cases/unit/98 install all targets/subprojects/subproject/bbb.txt
diff --git a/test cases/unit/99 install all targets/subprojects/subproject/meson.build b/test cases/unit/98 install all targets/subprojects/subproject/meson.build
similarity index 100%
rename from test cases/unit/99 install all targets/subprojects/subproject/meson.build
rename to test cases/unit/98 install all targets/subprojects/subproject/meson.build
diff --git a/test cases/unit/100 custom target name/file.txt.in b/test cases/unit/99 custom target name/file.txt.in
similarity index 100%
rename from test cases/unit/100 custom target name/file.txt.in
rename to test cases/unit/99 custom target name/file.txt.in
diff --git a/test cases/unit/100 custom target name/meson.build b/test cases/unit/99 custom target name/meson.build
similarity index 100%
rename from test cases/unit/100 custom target name/meson.build
rename to test cases/unit/99 custom target name/meson.build
diff --git a/test cases/unit/100 custom target name/subdir/meson.build b/test cases/unit/99 custom target name/subdir/meson.build
similarity index 100%
rename from test cases/unit/100 custom target name/subdir/meson.build
rename to test cases/unit/99 custom target name/subdir/meson.build
diff --git a/test cases/wasm/1 basic/meson.build b/test cases/wasm/1 basic/meson.build
index d27599271b1c..bbf37b636091 100644
--- a/test cases/wasm/1 basic/meson.build
+++ b/test cases/wasm/1 basic/meson.build
@@ -1,7 +1,7 @@
project('emcctest', 'c', 'cpp',
default_options: [
'c_std=c17',
- 'cpp_std=c++17',
+ 'cpp_std=c++26',
]
)
diff --git a/test cases/windows/23 diasdk/dia_registered.cpp b/test cases/windows/23 diasdk/dia_registered.cpp
new file mode 100644
index 000000000000..cec4fb2f678f
--- /dev/null
+++ b/test cases/windows/23 diasdk/dia_registered.cpp
@@ -0,0 +1,30 @@
+// Loads DIA SDK from system registry using CoCreateInstance().
+// The corresponding msdiaXXX.dll must be registered in system registry
+// (eg. run `regsvr32.exe msdia140.dll` as administrator)
+
+#include
+#include
+#include
+#include
+
+int main()
+{
+ try {
+
+ HRESULT hr = CoInitialize(NULL);
+ if (FAILED(hr))
+ throw std::runtime_error("Failed to initialize COM library");
+
+ IDiaDataSource* datasrc;
+ hr = CoCreateInstance( CLSID_DiaSource, NULL, CLSCTX_INPROC_SERVER, __uuidof(IDiaDataSource), (void **)&datasrc);
+ if (FAILED(hr))
+ throw std::runtime_error("Can't create IDiaDataSource. You must register msdia*.dll with regsvr32.exe.");
+
+ std::cout << "DIA was successfully loaded\n";
+ return 0;
+
+ } catch (std::exception& err) {
+ std::cerr << err.what() << std::endl;
+ return 1;
+ }
+}
diff --git a/test cases/windows/23 diasdk/meson.build b/test cases/windows/23 diasdk/meson.build
new file mode 100644
index 000000000000..bb8477822a66
--- /dev/null
+++ b/test cases/windows/23 diasdk/meson.build
@@ -0,0 +1,13 @@
+project('diatest', 'cpp')
+
+if host_machine.system() != 'windows'
+ error('MESON_SKIP_TEST: unsupported platform')
+endif
+cpp = meson.get_compiler('cpp', native: false)
+is_msvc_clang = cpp.get_id() == 'clang' and cpp.get_define('_MSC_VER') != ''
+if not ['msvc', 'clang-cl'].contains(cpp.get_id()) and not is_msvc_clang
+ error('MESON_SKIP_TEST: unsupported compiler')
+endif
+
+dia = dependency('diasdk', required: true)
+executable('dia_registered', ['dia_registered.cpp'], dependencies:[dia])
diff --git a/test cases/windows/24 diasdk copy dll/config.h.in b/test cases/windows/24 diasdk copy dll/config.h.in
new file mode 100644
index 000000000000..80d59c1a4b09
--- /dev/null
+++ b/test cases/windows/24 diasdk copy dll/config.h.in
@@ -0,0 +1,3 @@
+#pragma once
+
+#define MSDIA_DLL_NAME L"@msdia_dll_name@"
diff --git a/test cases/windows/24 diasdk copy dll/dia_from_dll.cpp b/test cases/windows/24 diasdk copy dll/dia_from_dll.cpp
new file mode 100644
index 000000000000..5474717490d3
--- /dev/null
+++ b/test cases/windows/24 diasdk copy dll/dia_from_dll.cpp
@@ -0,0 +1,32 @@
+// Loads msdiaXXX.dll from current directory using NoRegCoCreate.
+// File name is set in config.h symbol MSDIA_DLL_NAME.
+
+#include
+#include
+#include
+#include
+#include
+
+#include "config.h"
+
+int main()
+{
+ try {
+
+ HRESULT hr = CoInitialize(NULL);
+ if (FAILED(hr))
+ throw std::runtime_error("Failed to initialize COM library");
+
+ IDiaDataSource* datasrc;
+ hr = NoRegCoCreate(MSDIA_DLL_NAME, CLSID_DiaSource, __uuidof(IDiaDataSource), (void**)&datasrc);
+ if (FAILED(hr))
+ throw std::runtime_error("Can't open DIA DLL");
+
+ std::cout << "DIA was successfully loaded\n";
+ return 0;
+
+ } catch (std::exception& err) {
+ std::cerr << err.what() << std::endl;
+ return 1;
+ }
+}
diff --git a/test cases/windows/24 diasdk copy dll/meson.build b/test cases/windows/24 diasdk copy dll/meson.build
new file mode 100644
index 000000000000..1a078b0d98de
--- /dev/null
+++ b/test cases/windows/24 diasdk copy dll/meson.build
@@ -0,0 +1,21 @@
+project('diatest', 'cpp')
+
+if host_machine.system() != 'windows'
+ error('MESON_SKIP_TEST: unsupported platform')
+endif
+cpp = meson.get_compiler('cpp', native: false)
+is_msvc_clang = cpp.get_id() == 'clang' and cpp.get_define('_MSC_VER') != ''
+if not ['msvc', 'clang-cl'].contains(cpp.get_id()) and not is_msvc_clang
+ error('MESON_SKIP_TEST: unsupported compiler')
+endif
+
+dia = dependency('diasdk', required: true)
+dia_dll_name = dia.get_variable('dll')
+fs = import('fs')
+fs.copyfile( dia_dll_name )
+
+conf = configuration_data()
+conf.set('msdia_dll_name', fs.name(dia_dll_name))
+configure_file(input: 'config.h.in', output: 'config.h', configuration: conf)
+
+executable('dia_from_dll', ['dia_from_dll.cpp'], dependencies: [dia])
diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py
index 138465d2f9e4..aedf9feca134 100644
--- a/unittests/allplatformstests.py
+++ b/unittests/allplatformstests.py
@@ -31,9 +31,10 @@
BuildDirLock, MachineChoice, is_windows, is_osx, is_cygwin, is_dragonflybsd,
is_sunos, windows_proof_rmtree, python_command, version_compare, split_args, quote_arg,
relpath, is_linux, git, search_version, do_conf_file, do_conf_str, default_prefix,
- MesonException, EnvironmentException, OptionKey,
+ MesonException, EnvironmentException,
windows_proof_rm
)
+from mesonbuild.options import OptionKey
from mesonbuild.programs import ExternalProgram
from mesonbuild.compilers.mixins.clang import ClangCompiler
@@ -626,7 +627,7 @@ def test_forcefallback(self):
self.run_tests()
def test_implicit_forcefallback(self):
- testdir = os.path.join(self.unit_test_dir, '96 implicit force fallback')
+ testdir = os.path.join(self.unit_test_dir, '95 implicit force fallback')
with self.assertRaises(subprocess.CalledProcessError):
self.init(testdir)
self.init(testdir, extra_args=['--wrap-mode=forcefallback'])
@@ -1054,6 +1055,7 @@ def test_internal_include_order(self):
# target internal dependency wrong include_directories: source dir
self.assertPathBasenameEqual(incs[8], 'sub2')
+ @mock.patch.dict(os.environ)
def test_compiler_detection(self):
'''
Test that automatic compiler detection and setting from the environment
@@ -2061,7 +2063,7 @@ def check_pcfile(name, *, relocatable, levels=2):
check_pcfile('libvartest2.pc', relocatable=False)
self.wipe()
- testdir_abs = os.path.join(self.unit_test_dir, '106 pkgconfig relocatable with absolute path')
+ testdir_abs = os.path.join(self.unit_test_dir, '105 pkgconfig relocatable with absolute path')
self.init(testdir_abs)
check_pcfile('libsimple.pc', relocatable=True, levels=3)
@@ -2466,13 +2468,13 @@ def test_check_module_linking(self):
tdir = os.path.join(self.unit_test_dir, '30 shared_mod linking')
out = self.init(tdir)
msg = ('''DEPRECATION: target prog links against shared module mymod, which is incorrect.
- This will be an error in the future, so please use shared_library() for mymod instead.
+ This will be an error in meson 2.0, so please use shared_library() for mymod instead.
If shared_module() was used for mymod because it has references to undefined symbols,
use shared_library() with `override_options: ['b_lundef=false']` instead.''')
self.assertIn(msg, out)
def test_mixed_language_linker_check(self):
- testdir = os.path.join(self.unit_test_dir, '97 compiler.links file arg')
+ testdir = os.path.join(self.unit_test_dir, '96 compiler.links file arg')
self.init(testdir)
cmds = self.get_meson_log_compiler_checks()
self.assertEqual(len(cmds), 5)
@@ -2728,11 +2730,11 @@ def test_command_line(self):
# c_args value should be parsed with split_args
self.init(testdir, extra_args=['-Dc_args=-Dfoo -Dbar "-Dthird=one two"', '--fatal-meson-warnings'])
obj = mesonbuild.coredata.load(self.builddir)
- self.assertEqual(obj.optstore.get_value(OptionKey('args', lang='c')), ['-Dfoo', '-Dbar', '-Dthird=one two'])
+ self.assertEqual(obj.optstore.get_value(OptionKey('c_args')), ['-Dfoo', '-Dbar', '-Dthird=one two'])
self.setconf('-Dc_args="foo bar" one two')
obj = mesonbuild.coredata.load(self.builddir)
- self.assertEqual(obj.optstore.get_value(OptionKey('args', lang='c')), ['foo bar', 'one', 'two'])
+ self.assertEqual(obj.optstore.get_value(OptionKey('c_args')), ['foo bar', 'one', 'two'])
self.wipe()
self.init(testdir, extra_args=['-Dset_percent_opt=myoption%', '--fatal-meson-warnings'])
@@ -2751,7 +2753,7 @@ def test_command_line(self):
self.assertEqual(obj.optstore.get_value('bindir'), 'bar')
self.assertEqual(obj.optstore.get_value('buildtype'), 'release')
self.assertEqual(obj.optstore.get_value('b_sanitize'), 'thread')
- self.assertEqual(obj.optstore.get_value(OptionKey('args', lang='c')), ['-Dbar'])
+ self.assertEqual(obj.optstore.get_value(OptionKey('c_args')), ['-Dbar'])
self.setconf(['--bindir=bar', '--bindir=foo',
'-Dbuildtype=release', '-Dbuildtype=plain',
'-Db_sanitize=thread', '-Db_sanitize=address',
@@ -2760,7 +2762,7 @@ def test_command_line(self):
self.assertEqual(obj.optstore.get_value('bindir'), 'foo')
self.assertEqual(obj.optstore.get_value('buildtype'), 'plain')
self.assertEqual(obj.optstore.get_value('b_sanitize'), 'address')
- self.assertEqual(obj.optstore.get_value(OptionKey('args', lang='c')), ['-Dfoo'])
+ self.assertEqual(obj.optstore.get_value(OptionKey('c_args')), ['-Dfoo'])
self.wipe()
except KeyError:
# Ignore KeyError, it happens on CI for compilers that does not
@@ -3021,6 +3023,8 @@ def test_introspect_projectinfo_subprojects(self):
expected = {
'descriptive_name': 'proj',
'version': 'undefined',
+ 'license': ['unknown'],
+ 'license_files': [],
'subproject_dir': 'subprojects',
'subprojects': [
{
@@ -3415,7 +3419,14 @@ def assertKeyTypes(key_type_list, obj, strict: bool = True):
self.assertListEqual(dependencies_to_find, [])
# Check projectinfo
- self.assertDictEqual(res['projectinfo'], {'version': '1.2.3', 'descriptive_name': 'introspection', 'subproject_dir': 'subprojects', 'subprojects': []})
+ self.assertDictEqual(res['projectinfo'], {
+ 'version': '1.2.3',
+ 'license': ['unknown'],
+ 'license_files': [],
+ 'descriptive_name': 'introspection',
+ 'subproject_dir': 'subprojects',
+ 'subprojects': []
+ })
# Check targets
targets_to_find = {
@@ -3761,9 +3772,9 @@ def test_summary(self):
User defined options
backend : ''' + self.backend_name + '''
+ enabled_opt : enabled
libdir : lib
prefix : /usr
- enabled_opt : enabled
python : ''' + sys.executable + '''
''')
expected_lines = expected.split('\n')[1:]
@@ -4288,7 +4299,7 @@ def test_build_b_options(self) -> None:
self.init(srcdir, extra_args=['-Dbuild.b_lto=true'])
def test_install_skip_subprojects(self):
- testdir = os.path.join(self.unit_test_dir, '92 install skip subprojects')
+ testdir = os.path.join(self.unit_test_dir, '91 install skip subprojects')
self.init(testdir)
self.build()
@@ -4335,7 +4346,7 @@ def check_installed_files(extra_args, expected):
check_installed_files(['--skip-subprojects', 'another'], all_expected)
def test_adding_subproject_to_configure_project(self) -> None:
- srcdir = os.path.join(self.unit_test_dir, '93 new subproject in configured project')
+ srcdir = os.path.join(self.unit_test_dir, '92 new subproject in configured project')
self.init(srcdir)
self.build()
self.setconf('-Duse-sub=true')
@@ -4385,7 +4396,7 @@ def test_clang_format_check(self):
if not shutil.which('clang-format'):
raise SkipTest('clang-format not found')
- testdir = os.path.join(self.unit_test_dir, '94 clangformat')
+ testdir = os.path.join(self.unit_test_dir, '93 clangformat')
newdir = os.path.join(self.builddir, 'testdir')
shutil.copytree(testdir, newdir)
self.new_builddir()
@@ -4410,7 +4421,7 @@ def test_clang_format_check(self):
self.build('clang-format-check')
def test_custom_target_implicit_include(self):
- testdir = os.path.join(self.unit_test_dir, '95 custominc')
+ testdir = os.path.join(self.unit_test_dir, '94 custominc')
self.init(testdir)
self.build()
compdb = self.get_compdb()
@@ -4450,7 +4461,7 @@ def test_env_flags_to_linker(self) -> None:
self.assertEqual(sorted(link_args), sorted(['-flto']))
def test_install_tag(self) -> None:
- testdir = os.path.join(self.unit_test_dir, '99 install all targets')
+ testdir = os.path.join(self.unit_test_dir, '98 install all targets')
self.init(testdir)
self.build()
@@ -4621,7 +4632,7 @@ def test_install_script_dry_run(self):
def test_introspect_install_plan(self):
- testdir = os.path.join(self.unit_test_dir, '99 install all targets')
+ testdir = os.path.join(self.unit_test_dir, '98 install all targets')
introfile = os.path.join(self.builddir, 'meson-info', 'intro-install_plan.json')
self.init(testdir)
self.assertPathExists(introfile)
@@ -4647,101 +4658,121 @@ def output_name(name, type_):
'targets': {
f'{self.builddir}/out1-notag.txt': {
'destination': '{datadir}/out1-notag.txt',
+ 'install_rpath': None,
'tag': None,
'subproject': None,
},
f'{self.builddir}/out2-notag.txt': {
'destination': '{datadir}/out2-notag.txt',
+ 'install_rpath': None,
'tag': None,
'subproject': None,
},
f'{self.builddir}/libstatic.a': {
'destination': '{libdir_static}/libstatic.a',
+ 'install_rpath': None,
'tag': 'devel',
'subproject': None,
},
f'{self.builddir}/' + exe_name('app'): {
'destination': '{bindir}/' + exe_name('app'),
+ 'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/' + exe_name('app-otherdir'): {
'destination': '{prefix}/otherbin/' + exe_name('app-otherdir'),
+ 'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/subdir/' + exe_name('app2'): {
'destination': '{bindir}/' + exe_name('app2'),
+ 'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/' + shared_lib_name('shared'): {
'destination': '{libdir_shared}/' + shared_lib_name('shared'),
+ 'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/' + shared_lib_name('both'): {
'destination': '{libdir_shared}/' + shared_lib_name('both'),
+ 'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/' + static_lib_name('both'): {
'destination': '{libdir_static}/' + static_lib_name('both'),
+ 'install_rpath': None,
'tag': 'devel',
'subproject': None,
},
f'{self.builddir}/' + shared_lib_name('bothcustom'): {
'destination': '{libdir_shared}/' + shared_lib_name('bothcustom'),
+ 'install_rpath': None,
'tag': 'custom',
'subproject': None,
},
f'{self.builddir}/' + static_lib_name('bothcustom'): {
'destination': '{libdir_static}/' + static_lib_name('bothcustom'),
+ 'install_rpath': None,
'tag': 'custom',
'subproject': None,
},
f'{self.builddir}/subdir/' + shared_lib_name('both2'): {
'destination': '{libdir_shared}/' + shared_lib_name('both2'),
+ 'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/subdir/' + static_lib_name('both2'): {
'destination': '{libdir_static}/' + static_lib_name('both2'),
+ 'install_rpath': None,
'tag': 'devel',
'subproject': None,
},
f'{self.builddir}/out1-custom.txt': {
'destination': '{datadir}/out1-custom.txt',
+ 'install_rpath': None,
'tag': 'custom',
'subproject': None,
},
f'{self.builddir}/out2-custom.txt': {
'destination': '{datadir}/out2-custom.txt',
+ 'install_rpath': None,
'tag': 'custom',
'subproject': None,
},
f'{self.builddir}/out3-custom.txt': {
'destination': '{datadir}/out3-custom.txt',
+ 'install_rpath': None,
'tag': 'custom',
'subproject': None,
},
f'{self.builddir}/subdir/out1.txt': {
'destination': '{datadir}/out1.txt',
+ 'install_rpath': None,
'tag': None,
'subproject': None,
},
f'{self.builddir}/subdir/out2.txt': {
'destination': '{datadir}/out2.txt',
+ 'install_rpath': None,
'tag': None,
'subproject': None,
},
f'{self.builddir}/out-devel.h': {
'destination': '{includedir}/out-devel.h',
+ 'install_rpath': None,
'tag': 'devel',
'subproject': None,
},
f'{self.builddir}/out3-notag.txt': {
'destination': '{datadir}/out3-notag.txt',
+ 'install_rpath': None,
'tag': None,
'subproject': None,
},
@@ -4873,7 +4904,7 @@ def test_rust_rlib_linkage(self) -> None:
}}
''')
- testdir = os.path.join(self.unit_test_dir, '102 rlib linkage')
+ testdir = os.path.join(self.unit_test_dir, '101 rlib linkage')
gen_file = os.path.join(testdir, 'lib.rs')
with open(gen_file, 'w', encoding='utf-8') as f:
f.write(template.format(0))
@@ -4921,7 +4952,7 @@ def test_bindgen_drops_invalid(self) -> None:
return
def test_custom_target_name(self):
- testdir = os.path.join(self.unit_test_dir, '100 custom target name')
+ testdir = os.path.join(self.unit_test_dir, '99 custom target name')
self.init(testdir)
out = self.build()
if self.backend is Backend.ninja:
@@ -5013,3 +5044,12 @@ def test_c_cpp_stds(self):
# The first supported std should be selected
self.setconf('-Dcpp_std=c++11,gnu++11,vc++11')
self.assertEqual(self.getconf('cpp_std'), 'c++11')
+
+ def test_rsp_support(self):
+ env = get_fake_env()
+ cc = detect_c_compiler(env, MachineChoice.HOST)
+ has_rsp = cc.linker.id in {
+ 'ld.bfd', 'ld.gold', 'ld.lld', 'ld.mold', 'ld.qcld', 'ld.wasm',
+ 'link', 'lld-link', 'mwldarm', 'mwldeppc', 'optlink', 'xilink',
+ }
+ self.assertEqual(cc.linker.get_accepts_rsp(), has_rsp)
diff --git a/unittests/baseplatformtests.py b/unittests/baseplatformtests.py
index 6e6a01d40fd3..3770321925fa 100644
--- a/unittests/baseplatformtests.py
+++ b/unittests/baseplatformtests.py
@@ -42,56 +42,58 @@
# e.g. for assertXXX helpers.
__unittest = True
+@mock.patch.dict(os.environ)
class BasePlatformTests(TestCase):
prefix = '/usr'
libdir = 'lib'
- def setUp(self):
- super().setUp()
- self.maxDiff = None
+ @classmethod
+ def setUpClass(cls) -> None:
+ super().setUpClass()
+ cls.maxDiff = None
src_root = str(PurePath(__file__).parents[1])
- self.src_root = src_root
+ cls.src_root = src_root
# Get the backend
- self.backend_name = os.environ['MESON_UNIT_TEST_BACKEND']
- backend_type = 'vs' if self.backend_name.startswith('vs') else self.backend_name
- self.backend = getattr(Backend, backend_type)
- self.meson_args = ['--backend=' + self.backend_name]
- self.meson_native_files = []
- self.meson_cross_files = []
- self.meson_command = python_command + [get_meson_script()]
- self.setup_command = self.meson_command + ['setup'] + self.meson_args
- self.mconf_command = self.meson_command + ['configure']
- self.mintro_command = self.meson_command + ['introspect']
- self.wrap_command = self.meson_command + ['wrap']
- self.rewrite_command = self.meson_command + ['rewrite']
+ cls.backend_name = os.environ.get('MESON_UNIT_TEST_BACKEND', 'ninja')
+ backend_type = 'vs' if cls.backend_name.startswith('vs') else cls.backend_name
+ cls.backend = getattr(Backend, backend_type)
+ cls.meson_args = ['--backend=' + cls.backend_name]
+ cls.meson_command = python_command + [get_meson_script()]
+ cls.setup_command = cls.meson_command + ['setup'] + cls.meson_args
+ cls.mconf_command = cls.meson_command + ['configure']
+ cls.mintro_command = cls.meson_command + ['introspect']
+ cls.wrap_command = cls.meson_command + ['wrap']
+ cls.rewrite_command = cls.meson_command + ['rewrite']
# Backend-specific build commands
- self.build_command, self.clean_command, self.test_command, self.install_command, \
- self.uninstall_command = get_backend_commands(self.backend)
+ cls.build_command, cls.clean_command, cls.test_command, cls.install_command, \
+ cls.uninstall_command = get_backend_commands(cls.backend)
# Test directories
- self.common_test_dir = os.path.join(src_root, 'test cases/common')
- self.python_test_dir = os.path.join(src_root, 'test cases/python')
- self.rust_test_dir = os.path.join(src_root, 'test cases/rust')
- self.vala_test_dir = os.path.join(src_root, 'test cases/vala')
- self.framework_test_dir = os.path.join(src_root, 'test cases/frameworks')
- self.unit_test_dir = os.path.join(src_root, 'test cases/unit')
- self.rewrite_test_dir = os.path.join(src_root, 'test cases/rewrite')
- self.linuxlike_test_dir = os.path.join(src_root, 'test cases/linuxlike')
- self.objc_test_dir = os.path.join(src_root, 'test cases/objc')
- self.objcpp_test_dir = os.path.join(src_root, 'test cases/objcpp')
- self.darwin_test_dir = os.path.join(src_root, 'test cases/darwin')
+ cls.common_test_dir = os.path.join(src_root, 'test cases/common')
+ cls.python_test_dir = os.path.join(src_root, 'test cases/python')
+ cls.rust_test_dir = os.path.join(src_root, 'test cases/rust')
+ cls.vala_test_dir = os.path.join(src_root, 'test cases/vala')
+ cls.framework_test_dir = os.path.join(src_root, 'test cases/frameworks')
+ cls.unit_test_dir = os.path.join(src_root, 'test cases/unit')
+ cls.rewrite_test_dir = os.path.join(src_root, 'test cases/rewrite')
+ cls.linuxlike_test_dir = os.path.join(src_root, 'test cases/linuxlike')
+ cls.objc_test_dir = os.path.join(src_root, 'test cases/objc')
+ cls.objcpp_test_dir = os.path.join(src_root, 'test cases/objcpp')
+ cls.darwin_test_dir = os.path.join(src_root, 'test cases/darwin')
# Misc stuff
- self.orig_env = os.environ.copy()
- if self.backend is Backend.ninja:
- self.no_rebuild_stdout = ['ninja: no work to do.', 'samu: nothing to do']
+ if cls.backend is Backend.ninja:
+ cls.no_rebuild_stdout = ['ninja: no work to do.', 'samu: nothing to do']
else:
# VS doesn't have a stable output when no changes are done
# XCode backend is untested with unit tests, help welcome!
- self.no_rebuild_stdout = [f'UNKNOWN BACKEND {self.backend.name!r}']
+ cls.no_rebuild_stdout = [f'UNKNOWN BACKEND {cls.backend.name!r}']
os.environ['COLUMNS'] = '80'
os.environ['PYTHONIOENCODING'] = 'utf8'
- self.builddirs = []
+ def setUp(self):
+ super().setUp()
+ self.meson_native_files = []
+ self.meson_cross_files = []
self.new_builddir()
def change_builddir(self, newdir):
@@ -101,7 +103,10 @@ def change_builddir(self, newdir):
self.installdir = os.path.join(self.builddir, 'install')
self.distdir = os.path.join(self.builddir, 'meson-dist')
self.mtest_command = self.meson_command + ['test', '-C', self.builddir]
- self.builddirs.append(self.builddir)
+ if os.path.islink(newdir):
+ self.addCleanup(os.unlink, self.builddir)
+ else:
+ self.addCleanup(windows_proof_rmtree, self.builddir)
def new_builddir(self):
# Keep builddirs inside the source tree so that virus scanners
@@ -141,16 +146,6 @@ def _print_meson_log(self) -> None:
if log:
print(log)
- def tearDown(self):
- for path in self.builddirs:
- try:
- windows_proof_rmtree(path)
- except FileNotFoundError:
- pass
- os.environ.clear()
- os.environ.update(self.orig_env)
- super().tearDown()
-
def _run(self, command, *, workdir=None, override_envvars: T.Optional[T.Mapping[str, str]] = None, stderr=True):
'''
Run a command while printing the stdout and stderr to stdout,
@@ -364,14 +359,14 @@ def introspect(self, args):
if isinstance(args, str):
args = [args]
out = subprocess.check_output(self.mintro_command + args + [self.builddir],
- universal_newlines=True)
+ encoding='utf-8', universal_newlines=True)
return json.loads(out)
def introspect_directory(self, directory, args):
if isinstance(args, str):
args = [args]
out = subprocess.check_output(self.mintro_command + args + [directory],
- universal_newlines=True)
+ encoding='utf-8', universal_newlines=True)
try:
obj = json.loads(out)
except Exception as e:
@@ -501,13 +496,13 @@ def copy_srcdir(self, srcdir: str) -> str:
ensures that the copied tree is deleted after running.
- :param srcdir: The locaiton of the source tree to copy
+ :param srcdir: The location of the source tree to copy
:return: The location of the copy
"""
dest = tempfile.mkdtemp()
self.addCleanup(windows_proof_rmtree, dest)
- # shutil.copytree expects the destinatin directory to not exist, Once
+ # shutil.copytree expects the destination directory to not exist, Once
# python 3.8 is required the `dirs_exist_ok` parameter negates the need
# for this
dest = os.path.join(dest, 'subdir')
diff --git a/unittests/datatests.py b/unittests/datatests.py
index b7b2d322a5ed..cb6542db8f71 100644
--- a/unittests/datatests.py
+++ b/unittests/datatests.py
@@ -18,9 +18,8 @@
import mesonbuild.modules.gnome
from mesonbuild.interpreter import Interpreter
from mesonbuild.ast import AstInterpreter
-from mesonbuild.mesonlib import (
- MachineChoice, OptionKey
-)
+from mesonbuild.mesonlib import MachineChoice
+from mesonbuild.options import OptionKey
from mesonbuild.compilers import (
detect_c_compiler, detect_cpp_compiler
)
@@ -140,8 +139,8 @@ def test_builtin_options_documented(self):
found_entries |= options
self.assertEqual(found_entries, {
- *(str(k.evolve(module=None)) for k in mesonbuild.options.BUILTIN_OPTIONS),
- *(str(k.evolve(module=None)) for k in mesonbuild.options.BUILTIN_OPTIONS_PER_MACHINE),
+ *(str(k.without_module_prefix()) for k in mesonbuild.options.BUILTIN_OPTIONS),
+ *(str(k.without_module_prefix()) for k in mesonbuild.options.BUILTIN_OPTIONS_PER_MACHINE),
})
# Check that `buildtype` table inside `Core options` matches how
diff --git a/unittests/failuretests.py b/unittests/failuretests.py
index baa59204766f..8a802120b6f3 100644
--- a/unittests/failuretests.py
+++ b/unittests/failuretests.py
@@ -34,10 +34,10 @@ def no_pkgconfig():
old_which = shutil.which
old_search = ExternalProgram._search
- def new_search(self, name, search_dir):
+ def new_search(self, name, search_dirs, exclude_paths):
if name == 'pkg-config':
return [None]
- return old_search(self, name, search_dir)
+ return old_search(self, name, search_dirs, exclude_paths)
def new_which(cmd, *kwargs):
if cmd == 'pkg-config':
diff --git a/unittests/helpers.py b/unittests/helpers.py
index 7483f51b75df..5cf8845b21b5 100644
--- a/unittests/helpers.py
+++ b/unittests/helpers.py
@@ -1,3 +1,7 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2024 Intel Corporation
+
+from __future__ import annotations
import subprocess
import os
import shutil
@@ -8,28 +12,35 @@
import zipfile
from pathlib import Path
from contextlib import contextmanager
+from unittest import mock
from mesonbuild.compilers import detect_c_compiler, compiler_from_language
from mesonbuild.mesonlib import (
- MachineChoice, is_osx, is_cygwin, EnvironmentException, OptionKey, MachineChoice,
+ MachineChoice, is_osx, is_cygwin, EnvironmentException, MachineChoice,
OrderedSet
)
+from mesonbuild.options import OptionKey
from run_tests import get_fake_env
+if T.TYPE_CHECKING:
+ from typing_extensions import ParamSpec
+
+ P = ParamSpec('P')
+ R = T.TypeVar('R')
+
-def is_ci():
- if os.environ.get('MESON_CI_JOBNAME') not in {None, 'thirdparty'}:
- return True
- return False
+def is_ci() -> bool:
+ return os.environ.get('MESON_CI_JOBNAME', 'thirdparty') != 'thirdparty'
-def skip_if_not_base_option(feature):
+
+def skip_if_not_base_option(feature: str) -> T.Callable[[T.Callable[P, R]], T.Callable[P, R]]:
"""Skip tests if The compiler does not support a given base option.
for example, ICC doesn't currently support b_sanitize.
"""
- def actual(f):
+ def actual(f: T.Callable[P, R]) -> T.Callable[P, R]:
@functools.wraps(f)
- def wrapped(*args, **kwargs):
+ def wrapped(*args: P.args, **kwargs: P.kwargs) -> R:
env = get_fake_env()
cc = detect_c_compiler(env, MachineChoice.HOST)
key = OptionKey(feature)
@@ -40,7 +51,8 @@ def wrapped(*args, **kwargs):
return wrapped
return actual
-def skipIfNoPkgconfig(f):
+
+def skipIfNoPkgconfig(f: T.Callable[P, R]) -> T.Callable[P, R]:
'''
Skip this test if no pkg-config is found, unless we're on CI.
This allows users to run our test suite without having
@@ -50,19 +62,20 @@ def skipIfNoPkgconfig(f):
Note: Yes, we provide pkg-config even while running Windows CI
'''
@functools.wraps(f)
- def wrapped(*args, **kwargs):
+ def wrapped(*args: P.args, **kwargs: P.kwargs) -> R:
if not is_ci() and shutil.which('pkg-config') is None:
raise unittest.SkipTest('pkg-config not found')
return f(*args, **kwargs)
return wrapped
-def skipIfNoPkgconfigDep(depname):
+
+def skipIfNoPkgconfigDep(depname: str) -> T.Callable[[T.Callable[P, R]], T.Callable[P, R]]:
'''
Skip this test if the given pkg-config dep is not found, unless we're on CI.
'''
- def wrapper(func):
+ def wrapper(func: T.Callable[P, R]) -> T.Callable[P, R]:
@functools.wraps(func)
- def wrapped(*args, **kwargs):
+ def wrapped(*args: P.args, **kwargs: P.kwargs) -> R:
if not is_ci() and shutil.which('pkg-config') is None:
raise unittest.SkipTest('pkg-config not found')
if not is_ci() and subprocess.call(['pkg-config', '--exists', depname]) != 0:
@@ -71,7 +84,8 @@ def wrapped(*args, **kwargs):
return wrapped
return wrapper
-def skip_if_no_cmake(f):
+
+def skip_if_no_cmake(f: T.Callable[P, R]) -> T.Callable[P, R]:
'''
Skip this test if no cmake is found, unless we're on CI.
This allows users to run our test suite without having
@@ -79,16 +93,17 @@ def skip_if_no_cmake(f):
silently skip the test because of misconfiguration.
'''
@functools.wraps(f)
- def wrapped(*args, **kwargs):
+ def wrapped(*args: P.args, **kwargs: P.kwargs) -> R:
if not is_ci() and shutil.which('cmake') is None:
raise unittest.SkipTest('cmake not found')
return f(*args, **kwargs)
return wrapped
-def skip_if_not_language(lang: str):
- def wrapper(func):
+
+def skip_if_not_language(lang: str) -> T.Callable[[T.Callable[P, R]], T.Callable[P, R]]:
+ def wrapper(func: T.Callable[P, R]) -> T.Callable[P, R]:
@functools.wraps(func)
- def wrapped(*args, **kwargs):
+ def wrapped(*args: P.args, **kwargs: P.kwargs) -> R:
try:
compiler_from_language(get_fake_env(), lang, MachineChoice.HOST)
except EnvironmentException:
@@ -97,46 +112,43 @@ def wrapped(*args, **kwargs):
return wrapped
return wrapper
-def skip_if_env_set(key):
+
+def skip_if_env_set(key: str) -> T.Callable[[T.Callable[P, R]], T.Callable[P, R]]:
'''
Skip a test if a particular env is set, except when running under CI
'''
- def wrapper(func):
+ def wrapper(func: T.Callable[P, R]) -> T.Callable[P, R]:
@functools.wraps(func)
- def wrapped(*args, **kwargs):
- old = None
- if key in os.environ:
- if not is_ci():
- raise unittest.SkipTest(f'Env var {key!r} set, skipping')
- old = os.environ.pop(key)
- try:
+ def wrapped(*args: P.args, **kwargs: P.kwargs) -> R:
+ if key in os.environ and not is_ci():
+ raise unittest.SkipTest(f'Env var {key!r} set, skipping')
+ with mock.patch.dict(os.environ):
+ os.environ.pop(key, None)
return func(*args, **kwargs)
- finally:
- if old is not None:
- os.environ[key] = old
return wrapped
return wrapper
-def skipIfNoExecutable(exename):
+
+def skipIfNoExecutable(exename: str) -> T.Callable[[T.Callable[P, R]], T.Callable[P, R]]:
'''
Skip this test if the given executable is not found.
'''
- def wrapper(func):
+ def wrapper(func: T.Callable[P, R]) -> T.Callable[P, R]:
@functools.wraps(func)
- def wrapped(*args, **kwargs):
+ def wrapped(*args: P.args, **kwargs: P.kwargs) -> R:
if shutil.which(exename) is None:
raise unittest.SkipTest(exename + ' not found')
return func(*args, **kwargs)
return wrapped
return wrapper
-def is_tarball():
- if not os.path.isdir('docs'):
- return True
- return False
+
+def is_tarball() -> bool:
+ return not os.path.isdir('docs')
+
@contextmanager
-def chdir(path: str):
+def chdir(path: str) -> T.Iterator[None]:
curdir = os.getcwd()
os.chdir(path)
try:
@@ -144,13 +156,14 @@ def chdir(path: str):
finally:
os.chdir(curdir)
+
def get_dynamic_section_entry(fname: str, entry: str) -> T.Optional[str]:
if is_cygwin() or is_osx():
raise unittest.SkipTest('Test only applicable to ELF platforms')
try:
raw_out = subprocess.check_output(['readelf', '-d', fname],
- universal_newlines=True)
+ encoding='utf-8', universal_newlines=True)
except FileNotFoundError:
# FIXME: Try using depfixer.py:Elf() as a fallback
raise unittest.SkipTest('readelf not found')
@@ -161,9 +174,11 @@ def get_dynamic_section_entry(fname: str, entry: str) -> T.Optional[str]:
return str(m.group(1))
return None # The file did not contain the specified entry.
+
def get_soname(fname: str) -> T.Optional[str]:
return get_dynamic_section_entry(fname, 'soname')
+
def get_rpath(fname: str) -> T.Optional[str]:
raw = get_dynamic_section_entry(fname, r'(?:rpath|runpath)')
# Get both '' and None here
@@ -177,11 +192,12 @@ def get_rpath(fname: str) -> T.Optional[str]:
return None
return final
+
def get_classpath(fname: str) -> T.Optional[str]:
with zipfile.ZipFile(fname) as zip:
with zip.open('META-INF/MANIFEST.MF') as member:
contents = member.read().decode().strip()
- lines = []
+ lines: T.List[str] = []
for line in contents.splitlines():
if line.startswith(' '):
# continuation line
@@ -193,6 +209,7 @@ def get_classpath(fname: str) -> T.Optional[str]:
}
return manifest.get('class-path')
+
def get_path_without_cmd(cmd: str, path: str) -> str:
pathsep = os.pathsep
paths = OrderedSet([Path(p).resolve() for p in path.split(pathsep)])
@@ -205,10 +222,11 @@ def get_path_without_cmd(cmd: str, path: str) -> str:
path = pathsep.join([str(p) for p in paths])
return path
-def xfail_if_jobname(name: str):
+
+def xfail_if_jobname(name: str) -> T.Callable[[T.Callable[P, R]], T.Callable[P, R]]:
if os.environ.get('MESON_CI_JOBNAME') == name:
return unittest.expectedFailure
- def wrapper(func):
+ def wrapper(func: T.Callable[P, R]) -> T.Callable[P, R]:
return func
return wrapper
diff --git a/unittests/internaltests.py b/unittests/internaltests.py
index fe9f0d4f5cfa..69f52a413627 100644
--- a/unittests/internaltests.py
+++ b/unittests/internaltests.py
@@ -4,6 +4,7 @@
from configparser import ConfigParser
from pathlib import Path
from unittest import mock
+import argparse
import contextlib
import io
import json
@@ -13,6 +14,7 @@
import stat
import subprocess
import tempfile
+import textwrap
import typing as T
import unittest
@@ -23,6 +25,7 @@
import mesonbuild.envconfig
import mesonbuild.environment
import mesonbuild.modules.gnome
+import mesonbuild.scripts.env2mfile
from mesonbuild import coredata
from mesonbuild.compilers.c import ClangCCompiler, GnuCCompiler
from mesonbuild.compilers.cpp import VisualStudioCPPCompiler
@@ -32,13 +35,14 @@
from mesonbuild.interpreterbase import typed_pos_args, InvalidArguments, typed_kwargs, ContainerTypeInfo, KwargInfo
from mesonbuild.mesonlib import (
LibType, MachineChoice, PerMachine, Version, is_windows, is_osx,
- is_cygwin, is_openbsd, search_version, MesonException, OptionKey,
- OptionType
+ is_cygwin, is_openbsd, search_version, MesonException,
)
+from mesonbuild.options import OptionKey
from mesonbuild.interpreter.type_checking import in_set_validator, NoneType
from mesonbuild.dependencies.pkgconfig import PkgConfigDependency, PkgConfigInterface, PkgConfigCLI
from mesonbuild.programs import ExternalProgram
import mesonbuild.modules.pkgconfig
+from mesonbuild import utils
from run_tests import (
@@ -625,7 +629,7 @@ def create_static_lib(name):
env = get_fake_env()
compiler = detect_c_compiler(env, MachineChoice.HOST)
env.coredata.compilers.host = {'c': compiler}
- env.coredata.optstore.set_value_object(OptionKey('link_args', lang='c'), FakeCompilerOptions())
+ env.coredata.optstore.set_value_object(OptionKey('c_link_args'), FakeCompilerOptions())
p1 = Path(tmpdir) / '1'
p2 = Path(tmpdir) / '2'
p1.mkdir()
@@ -1347,8 +1351,8 @@ def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, T.List[str]]) -> None:
def test_typed_kwarg_since(self) -> None:
@typed_kwargs(
'testfunc',
- KwargInfo('input', str, since='1.0', since_message='Its awesome, use it',
- deprecated='2.0', deprecated_message='Its terrible, dont use it')
+ KwargInfo('input', str, since='1.0', since_message='It\'s awesome, use it',
+ deprecated='2.0', deprecated_message='It\'s terrible, don\'t use it')
)
def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, str]) -> None:
self.assertIsInstance(kwargs['input'], str)
@@ -1359,8 +1363,8 @@ def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, str]) -> None:
mock.patch('mesonbuild.mesonlib.project_meson_versions', {'': '0.1'}):
# With Meson 0.1 it should trigger the "introduced" warning but not the "deprecated" warning
_(None, mock.Mock(subproject=''), [], {'input': 'foo'})
- self.assertRegex(out.getvalue(), r'WARNING:.*introduced.*input arg in testfunc. Its awesome, use it')
- self.assertNotRegex(out.getvalue(), r'WARNING:.*deprecated.*input arg in testfunc. Its terrible, dont use it')
+ self.assertRegex(out.getvalue(), r'WARNING:.*introduced.*input arg in testfunc. It\'s awesome, use it')
+ self.assertNotRegex(out.getvalue(), r'WARNING:.*deprecated.*input arg in testfunc. It\'s terrible, don\'t use it')
with self.subTest('no warnings should be triggered'), \
mock.patch('sys.stdout', io.StringIO()) as out, \
@@ -1374,8 +1378,8 @@ def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, str]) -> None:
mock.patch('mesonbuild.mesonlib.project_meson_versions', {'': '2.0'}):
# With Meson 2.0 it should trigger the "deprecated" warning but not the "introduced" warning
_(None, mock.Mock(subproject=''), [], {'input': 'foo'})
- self.assertRegex(out.getvalue(), r'WARNING:.*deprecated.*input arg in testfunc. Its terrible, dont use it')
- self.assertNotRegex(out.getvalue(), r'WARNING:.*introduced.*input arg in testfunc. Its awesome, use it')
+ self.assertRegex(out.getvalue(), r'WARNING:.*deprecated.*input arg in testfunc. It\'s terrible, don\'t use it')
+ self.assertNotRegex(out.getvalue(), r'WARNING:.*introduced.*input arg in testfunc. It\'s awesome, use it')
def test_typed_kwarg_validator(self) -> None:
@typed_kwargs(
@@ -1407,7 +1411,7 @@ def test_typed_kwarg_since_values(self) -> None:
@typed_kwargs(
'testfunc',
KwargInfo('input', ContainerTypeInfo(list, str), listify=True, default=[], deprecated_values={'foo': '0.9'}, since_values={'bar': '1.1'}),
- KwargInfo('output', ContainerTypeInfo(dict, str), default={}, deprecated_values={'foo': '0.9', 'foo2': ('0.9', 'dont use it')}, since_values={'bar': '1.1', 'bar2': ('1.1', 'use this')}),
+ KwargInfo('output', ContainerTypeInfo(dict, str), default={}, deprecated_values={'foo': '0.9', 'foo2': ('0.9', 'don\'t use it')}, since_values={'bar': '1.1', 'bar2': ('1.1', 'use this')}),
KwargInfo('install_dir', (bool, str, NoneType), deprecated_values={False: '0.9'}),
KwargInfo(
'mode',
@@ -1442,7 +1446,7 @@ def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, str]) -> None:
with self.subTest('deprecated dict string value with msg'), mock.patch('sys.stdout', io.StringIO()) as out:
_(None, mock.Mock(subproject=''), [], {'output': {'foo2': 'a'}})
- self.assertRegex(out.getvalue(), r"""WARNING:.Project targets '1.0'.*deprecated since '0.9': "testfunc" keyword argument "output" value "foo2" in dict keys. dont use it.*""")
+ self.assertRegex(out.getvalue(), r"""WARNING:.Project targets '1.0'.*deprecated since '0.9': "testfunc" keyword argument "output" value "foo2" in dict keys. don't use it.*""")
with self.subTest('new dict string value'), mock.patch('sys.stdout', io.StringIO()) as out:
_(None, mock.Mock(subproject=''), [], {'output': {'bar': 'b'}})
@@ -1699,18 +1703,404 @@ def test_major_versions_differ(self) -> None:
def test_option_key_from_string(self) -> None:
cases = [
- ('c_args', OptionKey('args', lang='c', _type=OptionType.COMPILER)),
- ('build.cpp_args', OptionKey('args', machine=MachineChoice.BUILD, lang='cpp', _type=OptionType.COMPILER)),
- ('prefix', OptionKey('prefix', _type=OptionType.BUILTIN)),
- ('made_up', OptionKey('made_up', _type=OptionType.PROJECT)),
+ ('c_args', OptionKey('c_args')),
+ ('build.cpp_args', OptionKey('cpp_args', machine=MachineChoice.BUILD)),
+ ('prefix', OptionKey('prefix')),
+ ('made_up', OptionKey('made_up')),
# TODO: the from_String method should be splitting the prefix off of
# these, as we have the type already, but it doesn't. For now have a
# test so that we don't change the behavior un-intentionally
- ('b_lto', OptionKey('b_lto', _type=OptionType.BASE)),
- ('backend_startup_project', OptionKey('backend_startup_project', _type=OptionType.BACKEND)),
+ ('b_lto', OptionKey('b_lto')),
+ ('backend_startup_project', OptionKey('backend_startup_project')),
]
for raw, expected in cases:
with self.subTest(raw):
self.assertEqual(OptionKey.from_string(raw), expected)
+
+ def test_env2mfile_deb(self) -> None:
+ MachineInfo = mesonbuild.scripts.env2mfile.MachineInfo
+ to_machine_info = mesonbuild.scripts.env2mfile.dpkg_architecture_to_machine_info
+
+ # For testing purposes, behave as though all cross-programs
+ # exist in /usr/bin
+ def locate_path(program: str) -> T.List[str]:
+ if os.path.isabs(program):
+ return [program]
+ return ['/usr/bin/' + program]
+
+ def expected_compilers(
+ gnu_tuple: str,
+ gcc_suffix: str = '',
+ ) -> T.Dict[str, T.List[str]]:
+ return {
+ 'c': [f'/usr/bin/{gnu_tuple}-gcc{gcc_suffix}'],
+ 'cpp': [f'/usr/bin/{gnu_tuple}-g++{gcc_suffix}'],
+ 'objc': [f'/usr/bin/{gnu_tuple}-gobjc{gcc_suffix}'],
+ 'objcpp': [f'/usr/bin/{gnu_tuple}-gobjc++{gcc_suffix}'],
+ 'vala': [f'/usr/bin/{gnu_tuple}-valac'],
+ }
+
+ def expected_binaries(gnu_tuple: str) -> T.Dict[str, T.List[str]]:
+ return {
+ 'ar': [f'/usr/bin/{gnu_tuple}-ar'],
+ 'strip': [f'/usr/bin/{gnu_tuple}-strip'],
+ 'objcopy': [f'/usr/bin/{gnu_tuple}-objcopy'],
+ 'ld': [f'/usr/bin/{gnu_tuple}-ld'],
+ 'cmake': ['/usr/bin/cmake'],
+ 'pkg-config': [f'/usr/bin/{gnu_tuple}-pkg-config'],
+ 'cups-config': ['/usr/bin/cups-config'],
+ 'exe_wrapper': [f'/usr/bin/{gnu_tuple}-cross-exe-wrapper'],
+ 'g-ir-annotation-tool': [f'/usr/bin/{gnu_tuple}-g-ir-annotation-tool'],
+ 'g-ir-compiler': [f'/usr/bin/{gnu_tuple}-g-ir-compiler'],
+ 'g-ir-doc-tool': [f'/usr/bin/{gnu_tuple}-g-ir-doc-tool'],
+ 'g-ir-generate': [f'/usr/bin/{gnu_tuple}-g-ir-generate'],
+ 'g-ir-inspect': [f'/usr/bin/{gnu_tuple}-g-ir-inspect'],
+ 'g-ir-scanner': [f'/usr/bin/{gnu_tuple}-g-ir-scanner'],
+ }
+
+ for title, dpkg_arch, gccsuffix, env, expected in [
+ (
+ # s390x is an example of the common case where the
+ # Meson CPU name, the GNU CPU name, the dpkg architecture
+ # name and uname -m all agree.
+ # (alpha, m68k, ppc64, riscv64, sh4, sparc64 are similar)
+ 's390x-linux-gnu',
+ # Output of `dpkg-architecture -a...`, filtered to
+ # only the DEB_HOST_ parts because that's all we use
+ textwrap.dedent(
+ '''
+ DEB_HOST_ARCH=s390x
+ DEB_HOST_ARCH_ABI=base
+ DEB_HOST_ARCH_BITS=64
+ DEB_HOST_ARCH_CPU=s390x
+ DEB_HOST_ARCH_ENDIAN=big
+ DEB_HOST_ARCH_LIBC=gnu
+ DEB_HOST_ARCH_OS=linux
+ DEB_HOST_GNU_CPU=s390x
+ DEB_HOST_GNU_SYSTEM=linux-gnu
+ DEB_HOST_GNU_TYPE=s390x-linux-gnu
+ DEB_HOST_MULTIARCH=s390x-linux-gnu
+ '''
+ ),
+ '',
+ {'PATH': '/usr/bin'},
+ MachineInfo(
+ compilers=expected_compilers('s390x-linux-gnu'),
+ binaries=expected_binaries('s390x-linux-gnu'),
+ properties={},
+ compile_args={},
+ link_args={},
+ cmake={
+ 'CMAKE_C_COMPILER': ['/usr/bin/s390x-linux-gnu-gcc'],
+ 'CMAKE_CXX_COMPILER': ['/usr/bin/s390x-linux-gnu-g++'],
+ 'CMAKE_SYSTEM_NAME': 'Linux',
+ 'CMAKE_SYSTEM_PROCESSOR': 's390x',
+ },
+ system='linux',
+ subsystem='linux',
+ kernel='linux',
+ cpu='s390x',
+ cpu_family='s390x',
+ endian='big',
+ ),
+ ),
+ # Debian amd64 vs. GNU, Meson, etc. x86_64.
+ # arm64/aarch64, hppa/parisc, i386/i686/x86, loong64/loongarch64,
+ # powerpc/ppc are similar.
+ (
+ 'x86_64-linux-gnu',
+ textwrap.dedent(
+ '''
+ DEB_HOST_ARCH=amd64
+ DEB_HOST_ARCH_ABI=base
+ DEB_HOST_ARCH_BITS=64
+ DEB_HOST_ARCH_CPU=amd64
+ DEB_HOST_ARCH_ENDIAN=little
+ DEB_HOST_ARCH_LIBC=gnu
+ DEB_HOST_ARCH_OS=linux
+ DEB_HOST_GNU_CPU=x86_64
+ DEB_HOST_GNU_SYSTEM=linux-gnu
+ DEB_HOST_GNU_TYPE=x86_64-linux-gnu
+ DEB_HOST_MULTIARCH=x86_64-linux-gnu
+ '''
+ ),
+ '',
+ {'PATH': '/usr/bin'},
+ MachineInfo(
+ compilers=expected_compilers('x86_64-linux-gnu'),
+ binaries=expected_binaries('x86_64-linux-gnu'),
+ properties={},
+ compile_args={},
+ link_args={},
+ cmake={
+ 'CMAKE_C_COMPILER': ['/usr/bin/x86_64-linux-gnu-gcc'],
+ 'CMAKE_CXX_COMPILER': ['/usr/bin/x86_64-linux-gnu-g++'],
+ 'CMAKE_SYSTEM_NAME': 'Linux',
+ 'CMAKE_SYSTEM_PROCESSOR': 'x86_64',
+ },
+ system='linux',
+ subsystem='linux',
+ kernel='linux',
+ cpu='x86_64',
+ cpu_family='x86_64',
+ endian='little',
+ ),
+ ),
+ (
+ 'arm-linux-gnueabihf with non-default gcc and environment',
+ textwrap.dedent(
+ '''
+ DEB_HOST_ARCH=armhf
+ DEB_HOST_ARCH_ABI=eabihf
+ DEB_HOST_ARCH_BITS=32
+ DEB_HOST_ARCH_CPU=arm
+ DEB_HOST_ARCH_ENDIAN=little
+ DEB_HOST_ARCH_LIBC=gnu
+ DEB_HOST_ARCH_OS=linux
+ DEB_HOST_GNU_CPU=arm
+ DEB_HOST_GNU_SYSTEM=linux-gnueabihf
+ DEB_HOST_GNU_TYPE=arm-linux-gnueabihf
+ DEB_HOST_MULTIARCH=arm-linux-gnueabihf
+ '''
+ ),
+ '-12',
+ {
+ 'PATH': '/usr/bin',
+ 'CPPFLAGS': '-DNDEBUG',
+ 'CFLAGS': '-std=c99',
+ 'CXXFLAGS': '-std=c++11',
+ 'OBJCFLAGS': '-fobjc-exceptions',
+ 'OBJCXXFLAGS': '-fobjc-nilcheck',
+ 'LDFLAGS': '-Wl,-O1',
+ },
+ MachineInfo(
+ compilers=expected_compilers('arm-linux-gnueabihf', '-12'),
+ binaries=expected_binaries('arm-linux-gnueabihf'),
+ properties={},
+ compile_args={
+ 'c': ['-DNDEBUG', '-std=c99'],
+ 'cpp': ['-DNDEBUG', '-std=c++11'],
+ 'objc': ['-DNDEBUG', '-fobjc-exceptions'],
+ 'objcpp': ['-DNDEBUG', '-fobjc-nilcheck'],
+ },
+ link_args={
+ 'c': ['-std=c99', '-Wl,-O1'],
+ 'cpp': ['-std=c++11', '-Wl,-O1'],
+ 'objc': ['-fobjc-exceptions', '-Wl,-O1'],
+ 'objcpp': ['-fobjc-nilcheck', '-Wl,-O1'],
+ },
+ cmake={
+ 'CMAKE_C_COMPILER': ['/usr/bin/arm-linux-gnueabihf-gcc-12'],
+ 'CMAKE_CXX_COMPILER': ['/usr/bin/arm-linux-gnueabihf-g++-12'],
+ 'CMAKE_SYSTEM_NAME': 'Linux',
+ 'CMAKE_SYSTEM_PROCESSOR': 'armv7l',
+ },
+ system='linux',
+ subsystem='linux',
+ kernel='linux',
+ # In a native build this would often be armv8l
+ # (the version of the running CPU) but the architecture
+ # baseline in Debian is officially ARMv7
+ cpu='arm7hlf',
+ cpu_family='arm',
+ endian='little',
+ ),
+ ),
+ (
+ 'special cases for i386 (i686, x86) and Hurd',
+ textwrap.dedent(
+ '''
+ DEB_HOST_ARCH=hurd-i386
+ DEB_HOST_ARCH_ABI=base
+ DEB_HOST_ARCH_BITS=32
+ DEB_HOST_ARCH_CPU=i386
+ DEB_HOST_ARCH_ENDIAN=little
+ DEB_HOST_ARCH_LIBC=gnu
+ DEB_HOST_ARCH_OS=hurd
+ DEB_HOST_GNU_CPU=i686
+ DEB_HOST_GNU_SYSTEM=gnu
+ DEB_HOST_GNU_TYPE=i686-gnu
+ DEB_HOST_MULTIARCH=i386-gnu
+ '''
+ ),
+ '',
+ {'PATH': '/usr/bin'},
+ MachineInfo(
+ compilers=expected_compilers('i686-gnu'),
+ binaries=expected_binaries('i686-gnu'),
+ properties={},
+ compile_args={},
+ link_args={},
+ cmake={
+ 'CMAKE_C_COMPILER': ['/usr/bin/i686-gnu-gcc'],
+ 'CMAKE_CXX_COMPILER': ['/usr/bin/i686-gnu-g++'],
+ 'CMAKE_SYSTEM_NAME': 'GNU',
+ 'CMAKE_SYSTEM_PROCESSOR': 'i686',
+ },
+ system='gnu',
+ subsystem='gnu',
+ kernel='gnu',
+ cpu='i686',
+ cpu_family='x86',
+ endian='little',
+ ),
+ ),
+ (
+ 'special cases for amd64 (x86_64) and kFreeBSD',
+ textwrap.dedent(
+ '''
+ DEB_HOST_ARCH=kfreebsd-amd64
+ DEB_HOST_ARCH_ABI=base
+ DEB_HOST_ARCH_BITS=64
+ DEB_HOST_ARCH_CPU=x86_amd64
+ DEB_HOST_ARCH_ENDIAN=little
+ DEB_HOST_ARCH_LIBC=gnu
+ DEB_HOST_ARCH_OS=kfreebsd
+ DEB_HOST_GNU_CPU=x86_64
+ DEB_HOST_GNU_SYSTEM=kfreebsd-gnu
+ DEB_HOST_GNU_TYPE=x86_64-kfreebsd-gnu
+ DEB_HOST_MULTIARCH=x86_64-kfreebsd-gnu
+ '''
+ ),
+ '',
+ {'PATH': '/usr/bin'},
+ MachineInfo(
+ compilers=expected_compilers('x86_64-kfreebsd-gnu'),
+ binaries=expected_binaries('x86_64-kfreebsd-gnu'),
+ properties={},
+ compile_args={},
+ link_args={},
+ cmake={
+ 'CMAKE_C_COMPILER': ['/usr/bin/x86_64-kfreebsd-gnu-gcc'],
+ 'CMAKE_CXX_COMPILER': ['/usr/bin/x86_64-kfreebsd-gnu-g++'],
+ 'CMAKE_SYSTEM_NAME': 'kFreeBSD',
+ 'CMAKE_SYSTEM_PROCESSOR': 'x86_64',
+ },
+ system='kfreebsd',
+ subsystem='kfreebsd',
+ kernel='freebsd',
+ cpu='x86_64',
+ cpu_family='x86_64',
+ endian='little',
+ ),
+ ),
+ (
+ 'special case for mips64el',
+ textwrap.dedent(
+ '''
+ DEB_HOST_ARCH=mips64el
+ DEB_HOST_ARCH_ABI=abi64
+ DEB_HOST_ARCH_BITS=64
+ DEB_HOST_ARCH_CPU=mips64el
+ DEB_HOST_ARCH_ENDIAN=little
+ DEB_HOST_ARCH_LIBC=gnu
+ DEB_HOST_ARCH_OS=linux
+ DEB_HOST_GNU_CPU=mips64el
+ DEB_HOST_GNU_SYSTEM=linux-gnuabi64
+ DEB_HOST_GNU_TYPE=mips64el-linux-gnuabi64
+ DEB_HOST_MULTIARCH=mips64el-linux-gnuabi64
+ '''
+ ),
+ '',
+ {'PATH': '/usr/bin'},
+ MachineInfo(
+ compilers=expected_compilers('mips64el-linux-gnuabi64'),
+ binaries=expected_binaries('mips64el-linux-gnuabi64'),
+ properties={},
+ compile_args={},
+ link_args={},
+ cmake={
+ 'CMAKE_C_COMPILER': ['/usr/bin/mips64el-linux-gnuabi64-gcc'],
+ 'CMAKE_CXX_COMPILER': ['/usr/bin/mips64el-linux-gnuabi64-g++'],
+ 'CMAKE_SYSTEM_NAME': 'Linux',
+ 'CMAKE_SYSTEM_PROCESSOR': 'mips64',
+ },
+ system='linux',
+ subsystem='linux',
+ kernel='linux',
+ cpu='mips64',
+ cpu_family='mips64',
+ endian='little',
+ ),
+ ),
+ (
+ 'special case for ppc64el',
+ textwrap.dedent(
+ '''
+ DEB_HOST_ARCH=ppc64el
+ DEB_HOST_ARCH_ABI=base
+ DEB_HOST_ARCH_BITS=64
+ DEB_HOST_ARCH_CPU=ppc64el
+ DEB_HOST_ARCH_ENDIAN=little
+ DEB_HOST_ARCH_LIBC=gnu
+ DEB_HOST_ARCH_OS=linux
+ DEB_HOST_GNU_CPU=powerpc64le
+ DEB_HOST_GNU_SYSTEM=linux-gnu
+ DEB_HOST_GNU_TYPE=powerpc64le-linux-gnu
+ DEB_HOST_MULTIARCH=powerpc64le-linux-gnu
+ '''
+ ),
+ '',
+ {'PATH': '/usr/bin'},
+ MachineInfo(
+ compilers=expected_compilers('powerpc64le-linux-gnu'),
+ binaries=expected_binaries('powerpc64le-linux-gnu'),
+ properties={},
+ compile_args={},
+ link_args={},
+ cmake={
+ 'CMAKE_C_COMPILER': ['/usr/bin/powerpc64le-linux-gnu-gcc'],
+ 'CMAKE_CXX_COMPILER': ['/usr/bin/powerpc64le-linux-gnu-g++'],
+ 'CMAKE_SYSTEM_NAME': 'Linux',
+ 'CMAKE_SYSTEM_PROCESSOR': 'ppc64le',
+ },
+ system='linux',
+ subsystem='linux',
+ kernel='linux',
+ # TODO: Currently ppc64, but native builds have ppc64le
+ # https://github.com/mesonbuild/meson/issues/13741
+ cpu='TODO',
+ cpu_family='ppc64',
+ endian='little',
+ ),
+ ),
+ ]:
+ with self.subTest(title), \
+ unittest.mock.patch.dict('os.environ', env, clear=True), \
+ unittest.mock.patch('mesonbuild.scripts.env2mfile.locate_path') as mock_locate_path:
+ mock_locate_path.side_effect = locate_path
+ options = argparse.Namespace()
+ options.gccsuffix = gccsuffix
+ actual = to_machine_info(dpkg_arch, options)
+
+ if expected.system == 'TODO':
+ print(f'TODO: {title}: system() -> {actual.system}')
+ else:
+ self.assertEqual(actual.system, expected.system)
+
+ if expected.subsystem == 'TODO':
+ print(f'TODO: {title}: subsystem() -> {actual.subsystem}')
+ else:
+ self.assertEqual(actual.subsystem, expected.subsystem)
+
+ if expected.kernel == 'TODO':
+ print(f'TODO: {title}: kernel() -> {actual.kernel}')
+ else:
+ self.assertEqual(actual.kernel, expected.kernel)
+
+ if expected.cpu == 'TODO':
+ print(f'TODO: {title}: cpu() -> {actual.cpu}')
+ else:
+ self.assertEqual(actual.cpu, expected.cpu)
+
+ self.assertEqual(actual.cpu_family, expected.cpu_family)
+ self.assertEqual(actual.endian, expected.endian)
+
+ self.assertEqual(actual.compilers, expected.compilers)
+ self.assertEqual(actual.binaries, expected.binaries)
+ self.assertEqual(actual.properties, expected.properties)
+ self.assertEqual(actual.compile_args, expected.compile_args)
+ self.assertEqual(actual.link_args, expected.link_args)
+ self.assertEqual(actual.cmake, expected.cmake)
diff --git a/unittests/linuxliketests.py b/unittests/linuxliketests.py
index ea86784892f3..55e83750af34 100644
--- a/unittests/linuxliketests.py
+++ b/unittests/linuxliketests.py
@@ -25,8 +25,9 @@
from mesonbuild.mesonlib import (
MachineChoice, is_windows, is_osx, is_cygwin, is_openbsd, is_haiku,
is_sunos, windows_proof_rmtree, version_compare, is_linux,
- OptionKey, EnvironmentException
+ EnvironmentException
)
+from mesonbuild.options import OptionKey
from mesonbuild.compilers import (
detect_c_compiler, detect_cpp_compiler, compiler_from_language,
)
@@ -35,6 +36,7 @@
from mesonbuild.compilers.objc import AppleClangObjCCompiler
from mesonbuild.compilers.objcpp import AppleClangObjCPPCompiler
from mesonbuild.dependencies.pkgconfig import PkgConfigDependency, PkgConfigCLI, PkgConfigInterface
+from mesonbuild.programs import NonExistingExternalProgram
import mesonbuild.modules.pkgconfig
PKG_CONFIG = os.environ.get('PKG_CONFIG', 'pkg-config')
@@ -280,7 +282,6 @@ def test_symlink_builddir(self) -> None:
symdir = f'{self.builddir}-symlink'
os.symlink(self.builddir, symdir)
- self.addCleanup(os.unlink, symdir)
self.change_builddir(symdir)
self.init(testdir)
@@ -317,6 +318,19 @@ def test_generate_gir_with_address_sanitizer(self):
self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false'])
self.build()
+ def test_qt5dependency_no_lrelease(self):
+ '''
+ Test that qt5 detection with qmake works. This can't be an ordinary
+ test case because it involves setting the environment.
+ '''
+ testdir = os.path.join(self.framework_test_dir, '4 qt')
+ def _no_lrelease(self, prog, *args, **kwargs):
+ if 'lrelease' in prog:
+ return NonExistingExternalProgram(prog)
+ return self._interpreter.find_program_impl(prog, *args, **kwargs)
+ with mock.patch.object(mesonbuild.modules.ModuleState, 'find_program', _no_lrelease):
+ self.init(testdir, inprocess=True, extra_args=['-Dmethod=qmake', '-Dexpect_lrelease=false'])
+
def test_qt5dependency_qmake_detection(self):
'''
Test that qt5 detection with qmake works. This can't be an ordinary
@@ -485,7 +499,7 @@ def _test_stds_impl(self, testdir: str, compiler: 'Compiler') -> None:
# Check that all the listed -std=xxx options for this compiler work just fine when used
# https://en.wikipedia.org/wiki/Xcode#Latest_versions
# https://www.gnu.org/software/gcc/projects/cxx-status.html
- key = OptionKey('std', lang=compiler.language)
+ key = OptionKey(f'{compiler.language}_std')
for v in compiler.get_options()[key].choices:
# we do it like this to handle gnu++17,c++17 and gnu17,c17 cleanly
# thus, C++ first
@@ -1319,7 +1333,7 @@ def test_link_arg_fullname(self):
see: https://github.com/mesonbuild/meson/issues/9000
https://stackoverflow.com/questions/48532868/gcc-library-option-with-a-colon-llibevent-a
'''
- testdir = os.path.join(self.unit_test_dir, '98 link full name','libtestprovider')
+ testdir = os.path.join(self.unit_test_dir, '97 link full name','libtestprovider')
oldprefix = self.prefix
# install into installdir without using DESTDIR
installdir = self.installdir
@@ -1332,7 +1346,7 @@ def test_link_arg_fullname(self):
self.new_builddir()
env = {'LIBRARY_PATH': os.path.join(installdir, self.libdir),
'PKG_CONFIG_PATH': _prepend_pkg_config_path(os.path.join(installdir, self.libdir, 'pkgconfig'))}
- testdir = os.path.join(self.unit_test_dir, '98 link full name','proguser')
+ testdir = os.path.join(self.unit_test_dir, '97 link full name','proguser')
self.init(testdir,override_envvars=env)
# test for link with full path
@@ -1684,9 +1698,7 @@ def test_prelinking(self):
# Prelinking currently only works on recently new GNU toolchains.
# Skip everything else. When support for other toolchains is added,
# remove limitations as necessary.
- if is_osx():
- raise SkipTest('Prelinking not supported on Darwin.')
- if 'clang' in os.environ.get('CC', 'dummy'):
+ if 'clang' in os.environ.get('CC', 'dummy') and not is_osx():
raise SkipTest('Prelinking not supported with Clang.')
testdir = os.path.join(self.unit_test_dir, '86 prelinking')
env = get_fake_env(testdir, self.builddir, self.prefix)
@@ -1702,10 +1714,9 @@ def test_prelinking(self):
p = subprocess.run([ar, 't', outlib],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
- text=True, timeout=1)
+ encoding='utf-8', text=True, timeout=1)
obj_files = p.stdout.strip().split('\n')
- self.assertEqual(len(obj_files), 1)
- self.assertTrue(obj_files[0].endswith('-prelink.o'))
+ self.assertTrue(any(o.endswith('-prelink.o') for o in obj_files))
def do_one_test_with_nativefile(self, testdir, args):
testdir = os.path.join(self.common_test_dir, testdir)
@@ -1741,7 +1752,7 @@ def test_cmake_multilib(self):
@skipUnless(is_linux() or is_osx(), 'Test only applicable to Linux and macOS')
def test_install_strip(self):
- testdir = os.path.join(self.unit_test_dir, '104 strip')
+ testdir = os.path.join(self.unit_test_dir, '103 strip')
self.init(testdir)
self.build()
@@ -1816,3 +1827,7 @@ def test_complex_link_cases(self):
self.assertIn('build t9-e1: c_LINKER t9-e1.p/main.c.o | libt9-s1.a libt9-s2.a libt9-s3.a\n', content)
self.assertIn('build t12-e1: c_LINKER t12-e1.p/main.c.o | libt12-s1.a libt12-s2.a libt12-s3.a\n', content)
self.assertIn('build t13-e1: c_LINKER t13-e1.p/main.c.o | libt12-s1.a libt13-s3.a\n', content)
+
+ def test_top_options_in_sp(self):
+ testdir = os.path.join(self.unit_test_dir, '123 pkgsubproj')
+ self.init(testdir)
diff --git a/unittests/machinefiletests.py b/unittests/machinefiletests.py
index 5ff862cdcfb1..ba9cb11530dd 100644
--- a/unittests/machinefiletests.py
+++ b/unittests/machinefiletests.py
@@ -50,7 +50,7 @@ def is_real_gnu_compiler(path):
'''
if not path:
return False
- out = subprocess.check_output([path, '--version'], universal_newlines=True, stderr=subprocess.STDOUT)
+ out = subprocess.check_output([path, '--version'], encoding='utf-8', universal_newlines=True, stderr=subprocess.STDOUT)
return 'Free Software Foundation' in out
cross_dir = Path(__file__).parent.parent / 'cross'
@@ -330,7 +330,7 @@ def cb(comp):
elif comp.id == 'gcc':
if shutil.which('ifort'):
# There is an ICC for windows (windows build, linux host),
- # but we don't support that ATM so lets not worry about it.
+ # but we don't support that ATM so let's not worry about it.
if is_windows():
return 'ifort', 'intel-cl'
return 'ifort', 'intel'
@@ -634,7 +634,7 @@ def test_bindgen_clang_arguments(self) -> None:
testcase = os.path.join(self.rust_test_dir, '12 bindgen')
config = self.helper_create_native_file({
- 'properties': {'bindgen_clang_arguments': 'sentinal'}
+ 'properties': {'bindgen_clang_arguments': 'sentinel'}
})
self.init(testcase, extra_args=['--native-file', config])
@@ -642,10 +642,10 @@ def test_bindgen_clang_arguments(self) -> None:
for t in targets:
if t['id'].startswith('rustmod-bindgen'):
args: T.List[str] = t['target_sources'][0]['compiler']
- self.assertIn('sentinal', args, msg="Did not find machine file value")
+ self.assertIn('sentinel', args, msg="Did not find machine file value")
cargs_start = args.index('--')
- sent_arg = args.index('sentinal')
- self.assertLess(cargs_start, sent_arg, msg='sentinal argument does not come after "--"')
+ sent_arg = args.index('sentinel')
+ self.assertLess(cargs_start, sent_arg, msg='sentinel argument does not come after "--"')
break
else:
self.fail('Did not find a bindgen target')
diff --git a/unittests/platformagnostictests.py b/unittests/platformagnostictests.py
index 775d0dc2bc1e..228c04d4b68e 100644
--- a/unittests/platformagnostictests.py
+++ b/unittests/platformagnostictests.py
@@ -18,6 +18,7 @@
from mesonbuild.mesonlib import EnvironmentVariables, ExecutableSerialisation, MesonException, is_linux, python_command, windows_proof_rmtree
from mesonbuild.mformat import Formatter, match_path
from mesonbuild.optinterpreter import OptionInterpreter, OptionException
+from mesonbuild.options import OptionStore
from run_tests import Backend
@skipIf(is_ci() and not is_linux(), "Run only on fast platforms")
@@ -31,11 +32,12 @@ def test_relative_find_program(self):
Tests that find_program() with a relative path does not find the program
in current workdir.
'''
- testdir = os.path.join(self.unit_test_dir, '101 relative find program')
+ testdir = os.path.join(self.unit_test_dir, '100 relative find program')
self.init(testdir, workdir=testdir)
def test_invalid_option_names(self):
- interp = OptionInterpreter('')
+ store = OptionStore()
+ interp = OptionInterpreter(store, '')
def write_file(code: str):
with tempfile.NamedTemporaryFile('w', dir=self.builddir, encoding='utf-8', delete=False) as f:
@@ -68,7 +70,8 @@ def write_file(code: str):
def test_option_validation(self):
"""Test cases that are not catch by the optinterpreter itself."""
- interp = OptionInterpreter('')
+ store = OptionStore()
+ interp = OptionInterpreter(store, '')
def write_file(code: str):
with tempfile.NamedTemporaryFile('w', dir=self.builddir, encoding='utf-8', delete=False) as f:
@@ -88,11 +91,11 @@ def write_file(code: str):
interp.process, fname)
def test_python_dependency_without_pkgconfig(self):
- testdir = os.path.join(self.unit_test_dir, '103 python without pkgconfig')
+ testdir = os.path.join(self.unit_test_dir, '102 python without pkgconfig')
self.init(testdir, override_envvars={'PKG_CONFIG': 'notfound'})
def test_debug_function_outputs_to_meson_log(self):
- testdir = os.path.join(self.unit_test_dir, '105 debug function')
+ testdir = os.path.join(self.unit_test_dir, '104 debug function')
log_msg = 'This is an example debug output, should only end up in debug log'
output = self.init(testdir)
diff --git a/unittests/rewritetests.py b/unittests/rewritetests.py
index af5e204dcb47..7fad513f5271 100644
--- a/unittests/rewritetests.py
+++ b/unittests/rewritetests.py
@@ -28,18 +28,18 @@ def rewrite_raw(self, directory, args):
if isinstance(args, str):
args = [args]
command = self.rewrite_command + ['--verbose', '--skip', '--sourcedir', directory] + args
- p = subprocess.run(command, capture_output=True, text=True, timeout=60)
+ p = subprocess.run(command, capture_output=True, encoding='utf-8', text=True, timeout=60)
print('STDOUT:')
print(p.stdout)
print('STDERR:')
print(p.stderr)
if p.returncode != 0:
- if 'MESON_SKIP_TEST' in p.stdout:
+ if 'MESON_SKIP_TEST' in p.stderr:
raise unittest.SkipTest('Project requested skipping.')
- raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout)
- if not p.stderr:
+ raise subprocess.CalledProcessError(p.returncode, command, output=p.stderr)
+ if not p.stdout:
return {}
- return json.loads(p.stderr)
+ return json.loads(p.stdout)
def rewrite(self, directory, args):
if isinstance(args, str):
diff --git a/unittests/windowstests.py b/unittests/windowstests.py
index 48e41f517953..79114a04708f 100644
--- a/unittests/windowstests.py
+++ b/unittests/windowstests.py
@@ -17,8 +17,9 @@
import mesonbuild.modules.gnome
from mesonbuild.mesonlib import (
MachineChoice, is_windows, is_cygwin, python_command, version_compare,
- EnvironmentException, OptionKey
+ EnvironmentException
)
+from mesonbuild.options import OptionKey
from mesonbuild.compilers import (
detect_c_compiler, detect_d_compiler, compiler_from_language,
)