--- /dev/null
+queries:
+ - uses: ./.github/codeql/lgtm.qls
+
+paths-ignore:
+ - 'test cases'
--- /dev/null
+# for some reason this doesn't work by default any way I can see
+
+- import: codeql-suites/python-lgtm.qls
+ from: codeql/python-queries
--- /dev/null
+name: "CodeQL"
+
+on:
+ push:
+ branches: [ "master" ]
+ pull_request:
+ branches: [ "master" ]
+
+jobs:
+ analyze:
+ # lgtm.com does not run in forks, for good reason
+ if: github.repository == 'mesonbuild/meson'
+ name: Analyze
+ runs-on: ubuntu-latest
+ permissions:
+ security-events: write
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v3
+
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v2
+ with:
+ config-file: .github/codeql/codeql-config.yml
+ languages: python
+ # we have none
+ setup-python-dependencies: false
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v2
- ".github/workflows/cygwin.yml"
- "run*tests.py"
+permissions:
+ contents: read
+
jobs:
test:
runs-on: windows-latest
- run: git config --global core.autocrlf input
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- uses: cygwin/cygwin-install-action@master
with:
SKIP_STATIC_BOOST: 1
shell: C:\cygwin\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}'
- - uses: actions/upload-artifact@v2
+ - uses: actions/upload-artifact@v3
with:
name: ${{ matrix.NAME }}
path: meson-test-run.*
format:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-python@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
with:
python-version: '3.x'
- run: python3 ./run_format_tests.py
schedule:
- cron: '0 0 * * 0'
+permissions:
+ contents: read
+
jobs:
build:
# do not run the weekly scheduled job in a fork
- { name: Ubuntu Bionic, id: bionic }
- { name: Ubuntu Rolling, id: ubuntu-rolling }
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
# Login to dockerhub
- name: Docker login
pylint:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-python@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
with:
python-version: '3.x'
- run: python -m pip install pylint
flake8:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-python@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
with:
python-version: '3.x'
- run: python -m pip install flake8
mypy:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-python@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
with:
python-version: '3.x'
- run: python -m pip install mypy types-PyYAML
unittests-appleclang:
runs-on: macos-latest
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-python@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
with:
python-version: '3.x'
- run: |
MESON_CI_JOBNAME: ${{ matrix.NAME }}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
# Avoid picking up an older version of LLVM that does not work.
- run: brew update
# github actions overwrites brew's python. Force it to reassert itself, by running in a separate step.
Qt4macos:
runs-on: macos-latest
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-python@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
with:
python-version: '3.x'
- run: python -m pip install -e .
- run: ln -sfn /usr/local/Cellar/qt@4/4.8.7_6.reinstall /usr/local/Cellar/qt@4/4.8.7_6
- run: meson setup "test cases/frameworks/4 qt" build -Drequired=qt4
- run: meson compile -C build
- - uses: actions/upload-artifact@v1
+ - uses: actions/upload-artifact@v3
if: failure()
with:
name: Qt4_Mac_build
path: build/meson-logs/meson-log.txt
- run: meson test -C build -v
- - uses: actions/upload-artifact@v1
+ - uses: actions/upload-artifact@v3
if: failure()
with:
name: Qt4_Mac_test
shell: msys2 {0}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- uses: msys2/setup-msys2@v2
with:
MSYSTEM= python3 ./tools/run_with_cov.py run_tests.py --backend=ninja
- - uses: actions/upload-artifact@v2
+ - uses: actions/upload-artifact@v3
with:
name: ${{ matrix.NAME }}
path: meson-test-run.*
apt-get -y purge clang gcc gdc
apt-get -y autoremove
python3 -m pip install coverage codecov
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Run tests
run: bash -c 'source /ci/env_vars.sh; cd $GITHUB_WORKSPACE; ./tools/run_with_cov.py ./run_tests.py $CI_ARGS --cross ubuntu-armhf.json --cross-only'
- name: Upload coverage report
MESON_CI_JOBNAME: linux-${{ matrix.cfg.id }}-gcc
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Run tests
# All environment variables are stored inside the docker image in /ci/env_vars.sh
# They are defined in the `env` section in each image.json. CI_ARGS should be set
steps:
- name: Checkout code
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
- name: Run tests
shell: bash
linux:
runs-on: ubuntu-20.04
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-python@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install Compilers
windows:
runs-on: windows-latest
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-python@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
with:
python-version: '3.x'
types:
- published
+permissions:
+ contents: write # for release creation (svenstaro/upload-release-action)
+
# This job is copy/paster into wrapdb CI, please update it there when doing any
# change here.
jobs:
HAS_SSH_KEY: ${{ secrets.WEBSITE_PRIV_KEY != '' }}
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Install package
run: |
sudo apt-get -y install python3-pip ninja-build libjson-glib-dev
+++ /dev/null
-extraction:
- python:
- python_setup:
- version: 3
\ No newline at end of file
[MASTER]
jobs=0
+load-plugins=
+ pylint.extensions.bad_builtin,
+ pylint.extensions.set_membership,
[REPORTS]
score=no
duplicate-value,
exec-used,
fixme,
- implicit-str-concat,
import-error,
import-outside-toplevel,
inconsistent-mro,
redefined-argument-from-local,
redefined-builtin,
redefined-outer-name,
- simplifiable-if-statement,
subprocess-run-check,
super-init-not-called,
too-few-public-methods,
unsubscriptable-object,
unused-argument,
unused-variable,
- use-implicit-booleaness-not-comparison,
- used-before-assignment,
- useless-return,
useless-super-delegation,
wrong-import-order,
wrong-import-position,
[![PyPI](https://img.shields.io/pypi/v/meson.svg)](https://pypi.python.org/pypi/meson)
[![Build Status](https://dev.azure.com/jussi0947/jussi/_apis/build/status/mesonbuild.meson)](https://dev.azure.com/jussi0947/jussi/_build/latest?definitionId=1)
[![Codecov](https://codecov.io/gh/mesonbuild/meson/coverage.svg?branch=master)](https://codecov.io/gh/mesonbuild/meson/branch/master)
-[![Code Quality: Python](https://img.shields.io/lgtm/grade/python/g/mesonbuild/meson.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mesonbuild/meson/context:python)
-[![Total Alerts](https://img.shields.io/lgtm/alerts/g/mesonbuild/meson.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mesonbuild/meson/alerts)
#### Dependencies
%meson \
%set_build_flags \
- %{shrink:%{__meson} \
+ %{shrink:%{__meson} setup \
--buildtype=plain \
--prefix=%{_prefix} \
--libdir=%{_libdir} \
import subprocess
import re
import shutil
+import datetime
from pathlib import Path
RELNOTE_TEMPLATE = '''---
m = re.match(r'[\s]*Release-notes-for-([0-9]+)\.([0-9]+)\.([0-9]+)\.md', line)
if m:
from_version = f'{m[1]}.{m[2]}.{m[3]}'
- to_version = f'{m[1]}.{int(m[2]) + 1}.{m[3]}'
+ if from_version == '0.64.0':
+ to_version = '1.0.0'
+ else:
+ to_version = f'{m[1]}.{int(m[2]) + 1}.{m[3]}'
new_line = line.replace(from_version, to_version)
relnotes = new_line.strip()
s_f.write(new_line)
output.parent.mkdir(exist_ok=True, parents=True)
with output.open('w', encoding='utf-8') as ofile:
ofile.write(RELNOTE_TEMPLATE.format(title, to_version, title_suffix))
+ if not output_dir:
+ date = datetime.date.today()
+ date_str = date.strftime("%d %B %Y")
+ ofile.write(f'Meson {to_version} was released on {date_str}\n')
for snippetfile in sorted(Path(source_dir, 'markdown/snippets').glob('*.md')):
snippet = snippetfile.read_text(encoding='utf-8')
ofile.write(snippet)
A `feature` option has three states: `enabled`, `disabled` or `auto`.
It is intended to be passed as value for the `required` keyword
argument of most functions. Currently supported in
-[[dependency]],
+[[add_languages]],
[[compiler.find_library]],
-[[find_program]] and
-[[add_languages]] functions.
+[[compiler.has_header]],
+[[dependency]],
+[[find_program]],
+[[import]] and
+[[subproject]]
+functions.
- `enabled` is the same as passing `required : true`.
- `auto` is the same as passing `required : false`.
| strip | false | Strip targets on install | no | no |
| unity {on, off, subprojects} | off | Unity build | no | no |
| unity_size {>=2} | 4 | Unity file block size | no | no |
-| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no | yes |
+| warning_level {0, 1, 2, 3, everything} | 1 | Set the warning level. From 0 = none to everything = highest | no | yes |
| werror | false | Treat warnings as errors | no | yes |
| wrap_mode {default, nofallback,<br>nodownload, forcefallback, nopromote} | default | Wrap mode to use | no | no |
| force_fallback_for | [] | Force fallback for those dependencies | no | no |
compilers might not support all of them. For example Visual Studio
only supports the address sanitizer.
-* < 0 means disable, == 0 means automatic selection, > 0 sets a specific number to use
+\* < 0 means disable, == 0 means automatic selection, > 0 sets a specific number to use
LLVM supports `thin` lto, for more discussion see [LLVM's documentation](https://clang.llvm.org/docs/ThinLTO.html)
Since *0.63.0* all compiler options can be set per subproject, see
[here](#specifying-options-per-subproject) for details on how the default value
-is inherited from main project. This is useful for example when the main project
-requires C++11 but a subproject requires C++14. The `cpp_std` value from
-subproject's `default_options` is now respected.
+is inherited from the main project. This is useful, for example, when the main
+project requires C++11, but a subproject requires C++14. The `cpp_std` value
+from the subproject's `default_options` is now respected.
## Specifying options per machine
Since *0.51.0*, some options are specified per machine rather than
globally for all machine configurations. Prefixing the option with
-`build.` just affects the build machine configuration, while
-unprefixed just affects the host machine configuration, respectively.
+`build.` only affects the build machine configuration, while leaving it
+unprefixed only affects the host machine configuration.
For example:
- `build.pkg_config_path` controls the paths pkg-config will search
- for just `native: true` dependencies (build machine).
+ for `native: true` (build machine) dependencies.
- `pkg_config_path` controls the paths pkg-config will search for
- just `native: false` dependencies (host machine).
+ `native: false` (host machine) dependencies.
-This is useful for cross builds. In the native builds, build = host,
-and the unprefixed option alone will suffice.
+This is useful for cross builds. In native builds, the build and host
+machines are the same, and the unprefixed option alone will suffice.
-Prior to *0.51.0*, these options just effected native builds when
-specified on the command line, as there was no `build.` prefix.
+Prior to *0.51.0*, these options only affected native builds when
+specified on the command line as there was no `build.` prefix.
Similarly named fields in the `[properties]` section of the cross file
-would effect cross compilers, but the code paths were fairly different
+would affect cross compilers, but the code paths were fairly different,
allowing differences in behavior to crop out.
## Specifying options per subproject
Since *0.54.0* `default_library` and `werror` built-in options can be
-defined per subproject. This is useful for example when building
-shared libraries in the main project, but static link a subproject, or
-when the main project must build with no warnings but some subprojects
+defined per subproject. This is useful, for example, when building
+shared libraries in the main project and statically linking a subproject,
+or when the main project must build with no warnings but some subprojects
cannot.
-Most of the time this would be used either by the parent project by
+Most of the time, this would be used either in the parent project by
setting subproject's default_options (e.g. `subproject('foo',
-default_options: 'default_library=static')`), or by the user using the
-command line `-Dfoo:default_library=static`.
+default_options: 'default_library=static')`), or by the user through the
+command line: `-Dfoo:default_library=static`.
The value is overridden in this order:
- Value from parent project
- Value from subproject() default_options if set
- Value from command line if set
-Since 0.56.0 `warning_level` can also be defined per subproject.
+Since *0.56.0* `warning_level` can also be defined per subproject.
## Module options
-Some Meson modules have built-in options. They can be set by prefixing the option
-name with the module name: `-D<module>.<option>=<value>` (e.g. `-Dpython.platlibdir=/foo`).
+Some Meson modules have built-in options. They can be set by prefixing the
+option with the module's name:
+`-D<module>.<option>=<value>` (e.g. `-Dpython.platlibdir=/foo`).
### Pkgconfig module
| relocatable | false | true, false | Generate the pkgconfig files as relocatable (Since 0.63.0) |
*Since 0.63.0* The `pkgconfig.relocatable` option is used by the
-pkgconfig module, namely [`pkg.generate()`](Pkgconfig-module.md) and affect how the
-`prefix` in the generated pkgconfig file is set (not to be confused
-with the [install prefix](#directories)). When it is `true` the `prefix` will be
-relative to the `install_dir`. This allows the pkgconfig file to be
-moved around and still work, as long as the relative path is not
-broken. In general this allows for the whole installed package to be
-placed anywhere on the system and still work as a dependency. When it
-is set to `false` the `prefix` will be the same as the install prefix.
+pkgconfig module–namely [`pkg.generate()`](Pkgconfig-module.md)–and
+affects how the `prefix` (not to be confused with the
+[install prefix](#directories)) in the generated pkgconfig file is set.
+When it is `true`, the `prefix` will be relative to the `install_dir`-this
+allows the pkgconfig file to be moved around and still work, as long
+as the relative path is not broken. In general, this allows for the whole
+installed package to be placed anywhere on the system and still work as a
+dependency. When it is set to `false`, the `prefix` will be the same as
+the install prefix.
An error will be raised if `pkgconfig.relocatable` is `true` and the
`install_dir` for a generated pkgconfig file points outside the
-install prefix. For example if the install prefix is `/usr` and the
+install prefix. For example: if the install prefix is `/usr` and the
`install_dir` for a pkgconfig file is `/var/lib/pkgconfig`.
### Python module
| platlibdir | | Directory path | Directory for site-specific, platform-specific files (Since 0.60.0) |
| purelibdir | | Directory path | Directory for site-specific, non-platform-specific files (Since 0.60.0) |
-*Since 0.60.0* `python.platlibdir` and `python.purelibdir` options are used by
-python module methods `python.install_sources()` and `python.get_install_dir()`.
-By default Meson tries to detect the correct installation path, but make them
-relative to the installation `prefix`, which will often result in installed python
-modules to not be found by the interpreter unless `prefix` is `/usr` on Linux,
-or for example `C:\Python39` on Windows. These options can be absolute paths
-outside of `prefix`.
+*Since 0.60.0* The `python.platlibdir` and `python.purelibdir` options are used
+by the python module methods `python.install_sources()` and
+`python.get_install_dir()`; Meson tries to detect the correct installation paths
+and make them relative to the installation `prefix` by default which will often
+result in the interpreter not finding the installed python modules unless
+`prefix` is `/usr` on Linux, or, for instance, `C:\Python39` on Windows. These
+options can be absolute paths outside of `prefix`.
*Since 0.62.0* The `python.install_env` option is used to detect the correct
installation path. Setting to `system` will avoid making the paths relative to
The file `.clang-format-ignore` contains a list of patterns matching the files
that will be excluded. Files matching the include list (see above) that match
-one of the ignore pattern will not be reformatted. Unlike include patters, ignore
+one of the ignore pattern will not be reformatted. Unlike include patterns, ignore
patterns does not support `**` and a single `*` match any characters including
path separators. Empty lines and lines starting with `#` are ignored.
schemas is compiled. This is automatically set when using `gnome.compile_schemas()`.
Note that this requires GLib >= 2.64 when `gnome.compile_schemas()` is used in
more than one directory.
+- `QEMU_LD_PREFIX` *Since 1.0.0* is set to the `sys_root` value from cross file
+ when cross compiling and that property is defined.
Since *Since 0.62.0* if bash-completion scripts are being installed and the
shell is bash, they will be automatically sourced.
prefer_static false [true, false] Whether to try static linking before shared linking
strip false [true, false] Strip targets on install
unity off [on, off, subprojects] Unity build
- warning_level 1 [0, 1, 2, 3] Compiler warning level to use
+ warning_level 1 [0, 1, 2, 3, everything] Compiler warning level to use
werror false [true, false] Treat warnings as errors
Backend options:
## Documentation
The `docs` directory contains the full documentation that will be used
-to generate [the Meson web site](http://mesonbuild.com). Line length
+to generate [the Meson web site](https://mesonbuild.com). Line length
in most cases should not exceed 70 characters (lines containing links
or examples are usually exempt). Every change in functionality must
change the documentation pages. In most cases this means updating the
*New in 0.56.0* The `config-tool` and `system` methods.
-To define some of the the preprocessor symbols mentioned in the
+To define some of the preprocessor symbols mentioned in the
[curses autoconf documentation](http://git.savannah.gnu.org/gitweb/?p=autoconf-archive.git;a=blob_plain;f=m4/ax_with_curses.m4):
```meson
--
The code for this experiment can be found at [the Meson
-repository](https://sourceforge.net/p/meson/code/). It should be noted
-that it is not a build system. It is only a proposal for one. It does
-not work reliably yet. You probably should not use it as the build
-system of your project.
+repository](https://github.com/mesonbuild/meson). It should be noted
+that (at the time of writing) it is not a build system. It is only
+a proposal for one. It does not work reliably yet. You probably
+should not use it as the build system of your project.
All that said I hope that this experiment will eventually turn into a
full blown build system. For that I need your help. Comments and
meson_version : '>=0.56.0',
)
-mod = import('unstable_external_project')
+mod = import('unstable-external_project')
p = mod.add_project('configure',
configure_options : ['--prefix=@PREFIX@',
* [[install_headers]],
* `pkgconfig.generate()`,
* `gnome.generate_gir()` - `.gir` file,
+ * `gnome.generate_vapi()` - `.vapi` file (*Since 0.64.0*),
* Files installed into `libdir` and with `.a` or `.pc` extension,
- * File installed into `includedir`.
+ * File installed into `includedir`,
+ * Generated header files installed with `gnome.compile_resources()`,
+ `gnome.genmarshal()`, `gnome.mkenums()`, `gnome.mkenums_simple()`
+ and `gnome.gdbus_codegen()` (*Since 0.64.0*).
- `runtime`:
* [[executable]],
* [[shared_library]],
* [[shared_module]],
* [[jar]],
- * Files installed into `bindir`.
+ * `gnome.compile_resources()` - `.gresource` file (*Since 0.64.0*),
+ * Files installed into `bindir`,
* Files installed into `libdir` and with `.so` or `.dll` extension.
- `python-runtime`:
* `python.install_sources()`.
### `generate_native_header()`
*(deprecated in 0.62.0, use `generate_native_headers()`)*
+*(removed in 1.0.0)*
This function will generate a header file for use in Java native module
development by reading the supplied Java file for `native` method declarations.
### `generate_native_headers()`
*(added in 0.62.0)*
+*(deprecated in 1.0.0, use `native_headers()`)*
+
+This function will generate native header files for use in Java native module
+development by reading the supplied Java files for `native` method declarations.
+
+Keyword arguments:
+
+- `classes`: The list of class names relative to the `package`, if it exists,
+which contain `native` method declarations. Use `.` separated class names.
+
+- `package`: The [package](https://en.wikipedia.org/wiki/Java_package) of the
+file. If left empty, Meson will assume that there is no package.
+
+Example:
+
+```java
+// Outer.java
+
+package com.mesonbuild;
+
+public class Outer {
+ private static native void outer();
+
+ public static class Inner {
+ private static native void inner();
+ }
+}
+```
+
+With the above file, an invocation would look like the following:
+
+```meson
+java = import('java')
+
+native_headers = java.generate_native_headers(
+ 'Outer.java',
+ package: 'com.mesonbuild',
+ classes: ['Outer', 'Outer.Inner']
+)
+```
+
+### `native_headers()`
+
+*(added in 1.0.0)*
This function will generate native header files for use in Java native module
development by reading the supplied Java files for `native` method declarations.
- Once a pkg-config file is generated for a library using `pkg.generate(mylib)`,
any subsequent call to `pkg.generate()` where mylib appears, will generate a
`Requires:` or `Requires.private` instead of a `Libs:` or `Libs.private:`.
+
+### Relocatable pkg-config files
+
+By default, the files generated by `pkg.generate` contain a hardcoded prefix path.
+In order to make them relocatable, a `pkgconfig.relocatable` builtin option is provided.
+See [Pkgconfig module options](Builtin-options.md#pkgconfig-module).
*Since 0.49.0*
- `modules`: a list of module names that this python installation must have.
*Since 0.51.0*
+- `pure`: On some platforms, architecture independent files are
+ expected to be placed in a separate directory. However, if the
+ python sources should be installed alongside an extension module
+ built with this module, this keyword argument can be used to
+ override the default behavior of `.install_sources()`.
+ *since 0.64.0*
**Returns**: a [python installation][`python_installation` object]
expected to be placed in a separate directory. However, if the
python sources should be installed alongside an extension module
built with this module, this keyword argument can be used to
- override that behaviour. Defaults to `true`
+ override that behaviour. Defaults to the value specified in
+ `find_installation()`, or else `true`
- `subdir`: See documentation for the argument of the same name to
[][`extension_module()`]
*New in Meson 0.57.0*
-The Qt5 module provides tools to automatically deal with the various
+The Qt6 module provides tools to automatically deal with the various
tools and steps required for Qt.
-{{ _include_qt_base.md }}
+## compile_resources
+
+*New in 0.59.0*
+
+Compiles Qt's resources collection files (.qrc) into c++ files for compilation.
+
+It takes no positional arguments, and the following keyword arguments:
+ - `name` (string | empty): if provided a single .cpp file will be generated,
+ and the output of all qrc files will be combined in this file, otherwise
+ each qrc file be written to it's own cpp file.
+ - `sources` (File | string | custom_target | custom_target index | generator_output)[]:
+ A list of sources to be transpiled. Required, must have at least one source<br/>
+ *New in 0.60.0*: support for custom_target, custom_target_index, and generator_output.
+ - `extra_args` string[]: Extra arguments to pass directly to `qt-rcc`
+ - `method` string: The method to use to detect qt, see [[dependency]]
+
+## compile_ui
+
+*New in 0.59.0*
+
+Compiles Qt's ui files (.ui) into header files.
+
+It takes no positional arguments, and the following keyword arguments:
+ - `sources` (File | string | custom_target | custom_target index | generator_output)[]:
+ A list of sources to be transpiled. Required, must have at least one source<br/>
+ *New in 0.60.0*: support for custom_target, custom_target_index, and generator_output.
+ - `extra_args` string[]: Extra arguments to pass directly to `qt-uic`
+ - `method` string: The method to use to detect qt, see [[dependency]]
+
+## compile_moc
+
+*New in 0.59.0*
+
+Compiles Qt's moc files (.moc) into header and/or source files. At least one of
+the keyword arguments `headers` and `sources` must be provided.
+
+It takes no positional arguments, and the following keyword arguments:
+ - `sources` (File | string | custom_target | custom_target index | generator_output)[]:
+ A list of sources to be transpiled into .moc files for manual inclusion.<br/>
+ *New in 0.60.0*: support for custom_target, custom_target_index, and generator_output.
+ - `headers` (File | string | custom_target | custom_target index | generator_output)[]:
+ A list of headers to be transpiled into .cpp files<br/>
+ *New in 0.60.0*: support for custom_target, custom_target_index, and generator_output.
+ - `extra_args` string[]: Extra arguments to pass directly to `qt-moc`
+ - `method` string: The method to use to detect qt, see [[dependency]]
+ - `dependencies`: dependency objects whose include directories are used by moc.
+ - `include_directories` (string | IncludeDirectory)[]: A list of `include_directory()`
+ objects used when transpiling the .moc files
+
+## preprocess
+
+Consider using `compile_resources`, `compile_ui`, and `compile_moc` instead.
+
+Takes sources for moc, uic, and rcc, and converts them into c++ files for
+compilation.
+
+Has the following signature: `qt.preprocess(name: str | None, *sources: str)`
+
+If the `name` parameter is passed then all of the rcc files will be written to
+a single output file
+
+*Deprecated in 0.59.0*: Files given in the variadic `sources` arguments as well
+as the `sources` keyword argument, were passed unmodified through the preprocessor
+programs. Don't do this - just add the output of `preprocess()` to another sources
+list:
+```meson
+sources = files('a.cpp', 'main.cpp', 'bar.c')
+sources += qt.preprocess(qresources : ['resources'])
+```
+
+This method takes the following keyword arguments:
+ - `qresources` (string | File)[]: Passed to the RCC compiler
+ - `ui_files`: (string | File | CustomTarget)[]: Passed the `uic` compiler
+ - `moc_sources`: (string | File | CustomTarget)[]: Passed the `moc` compiler.
+ These are converted into .moc files meant to be `#include`ed
+ - `moc_headers`: (string | File | CustomTarget)[]: Passied the `moc` compiler.
+ These will be converted into .cpp files
+ - `include_directories` (IncludeDirectories | string)[], the directories to add
+ to header search path for `moc`
+ - `moc_extra_arguments` string[]: any additional arguments to `moc`.
+ - `uic_extra_arguments` string[]: any additional arguments to `uic`.
+ - `rcc_extra_arguments` string[]: any additional arguments to `rcc`.
+ - `dependencies` Dependency[]: dependency objects needed by moc.
+ - *Deprecated in 0.59.0.*: `sources`: a list of extra sources, which are added
+ to the output unchanged.
+
+It returns an array of targets and sources to pass to a compilation target.
+
+## compile_translations
+
+This method generates the necessary targets to build translation files with
+lrelease, it takes no positional arguments, and the following keyword arguments:
+
+ - `ts_files` (File | string | custom_target | custom_target index | generator_output)[]:
+ the list of input translation files produced by Qt's lupdate tool.<br/>
+ *New in 0.60.0*: support for custom_target, custom_target_index, and generator_output.
+ - `install` bool: when true, this target is installed during the install step (optional).
+ - `install_dir` string: directory to install to (optional).
+ - `build_by_default` bool: when set to true, to have this target be built by
+ default, that is, when invoking `meson compile`; the default value is false
+ (optional).
+ - `qresource` string: rcc source file to extract ts_files from; cannot be used
+ with ts_files kwarg.
+ - `rcc_extra_arguments` string[]: any additional arguments to `rcc` (optional),
+ when used with `qresource.
+
+Returns either: a list of custom targets for the compiled
+translations, or, if using a `qresource` file, a single custom target
+containing the processed source file, which should be passed to a main
+build target.
+
+## has_tools
+
+This method returns `true` if all tools used by this module are found,
+`false` otherwise.
+
+It should be used to compile optional Qt code:
+```meson
+qt6 = import('qt6')
+if qt6.has_tools(required: get_option('qt_feature'))
+ moc_files = qt6.preprocess(...)
+ ...
+endif
+```
+
+This method takes the following keyword arguments:
+- `required` bool | FeatureOption: by default, `required` is set to `false`. If `required` is set to
+ `true` or an enabled [`feature`](Build-options.md#features) and some tools are
+ missing Meson will abort.
+- `method` string: The method to use to detect qt, see [[dependency]]
+
+## Dependencies
+
+See [Qt dependencies](Dependencies.md#qt4-qt5)
+
+The 'modules' argument is used to include Qt modules in the project.
+See the Qt documentation for the [list of
+modules](https://doc.qt.io/qt-6/qtmodules.html).
+
+The 'private_headers' argument allows usage of Qt's modules private
+headers.
+
+## Example
+A simple example would look like this:
+
+```meson
+qt6 = import('qt6')
+qt6_dep = dependency('qt6', modules: ['Core', 'Gui'])
+inc = include_directories('includes')
+moc_files = qt6.compile_moc(headers : 'myclass.h',
+ extra_arguments: ['-DMAKES_MY_MOC_HEADER_COMPILE'],
+ include_directories: inc,
+ dependencies: qt6_dep)
+translations = qt6.compile_translations(ts_files : 'myTranslation_fr.ts', build_by_default : true)
+executable('myprog', 'main.cpp', 'myclass.cpp', moc_files,
+ include_directories: inc,
+ dependencies : qt6_dep)
+```
+
+Sometimes, translations are embedded inside the binary using qresource
+files. In this case the ts files do not need to be explicitly listed,
+but will be inferred from the built qm files listed in the qresource
+file. For example:
+
+```meson
+qt6 = import('qt6')
+qt6_dep = dependency('qt6', modules: ['Core', 'Gui'])
+lang_cpp = qt6.compile_translations(qresource: 'lang.qrc')
+executable('myprog', 'main.cpp', lang_cpp,
+ dependencies: qt6_dep)
+```
+
## Splitting of Compiler.get_function_attribute('visibility')
On macOS there is no `protected` visibility, which results in the
-visbility check always failing. 0.52.0 introduces two changes to
+visibility check always failing. 0.52.0 introduces two changes to
improve this situation:
1. the "visibility" check no longer includes "protected"
## External projects
-A new experimental module `unstable_external_project` has been added
+A new experimental module `unstable-external_project` has been added
to build code using other build systems than Meson. Currently only
supporting projects with a configure script that generates Makefiles.
meson_version : '>=0.56.0',
)
-mod = import('unstable_external_project')
+mod = import('unstable-external_project')
p = mod.add_project('configure',
configure_options : ['--prefix=@PREFIX@',
only specify compiler/linker arguments or other dependencies that satisfy
the same requirements.
-## `unstable_external_project` improvements
+## `unstable-external_project` improvements
- Default arguments are added to `add_project()` in case some tags are not found
in `configure_options`: `'--prefix=@PREFIX@'`, `'--libdir=@PREFIX@/@LIBDIR@'`,
The file `.clang-format-ignore` contains a list of patterns matching the files
that will be excluded. Files matching the include list (see above) that match
-one of the ignore pattern will not be reformatted. Unlike include patters, ignore
+one of the ignore pattern will not be reformatted. Unlike include patterns, ignore
patterns does not support `**` and a single `*` match any characters including
path separators. Empty lines and lines starting with `#` are ignored.
## Automatic fallback using WrapDB
A new command has been added: `meson wrap update-db`. It downloads the list of
-wraps available in [WrapDB](wrapdb.mesonbuild.com) and stores it locally in
+wraps available in [WrapDB](https://wrapdb.mesonbuild.com) and stores it locally in
`subprojects/wrapdb.json`. When that file exists and a dependency is not found
on the system but is available in WrapDB, Meson will automatically download it.
--- /dev/null
+---
+title: Release 1.0.0
+short-description: Release notes for 1.0.0
+...
+
+# New features
+
+Meson 1.0.0 was released on 23 December 2022
+## Compiler check functions `prefix` kwargs accepts arrays
+
+The `prefix` kwarg that most compiler check functions support
+now accepts an array in addition to a string. The elements of the
+array will be concatenated separated by a newline.
+
+This makes it more readable to write checks that need multiple headers
+to be included:
+
+```meson
+cc.check_header('GL/wglew.h', prefix : ['#include <windows.h>', '#include <GL/glew.h>'])
+```
+
+instead of
+
+```meson
+cc.check_header('GL/wglew.h', prefix : '#include <windows.h>\n#include <GL/glew.h>'])
+```
+
+## Flags removed from cpp/objcpp warning level 1
+
+`-Wnon-virtual-dtor` is no longer implied by `meson setup -Dwarning_level=1`.
+
+## Developer environment improvements
+
+When cross compiling, the developer environment now sets all environment
+variables for the HOST machine. It now also sets `QEMU_LD_PREFIX` to the
+`sys_root` value from cross file if property is defined. That means that cross
+compiled executables can often be run transparently on the build machine, for
+example when cross compiling for aarch64 linux from x86_64 linux.
+
+A new argument `--workdir` has been added, by default it is set to build
+directory. For example, `meson devenv -C builddir --workdir .` can be used to
+remain in the current dir (often source dir) instead.
+
+`--dump` now prints shell commands like `FOO="/prepend/path:$FOO:/append/path"`,
+using the literal `$FOO` instead of current value of `FOO` from environment.
+This makes easier to evaluate those expressions in a different environment.
+
+## Deprecate `java.generate_native_headers`, rename to `java.native_headers`
+
+The functions operate in the exact same way. The new name matches more with
+Meson function name styling.
+
+## rust.bindgen accepts a dependency argument
+
+The `bindgen` method of the `rust` module now accepts a dependencies argument.
+Any include paths in these dependencies will be passed to the underlying call to
+`clang`, and the call to `bindgen` will correctly depend on any generatd sources.
+
+## String arguments to the rust.bindgen include_directories argument
+
+Most other cases of include_directories accept strings as well as
+`IncludeDirectory` objects, so lets do that here too for consistency.
+
+## The Rust module is stable
+
+Mesa is using the rust module in production, so it's time to mark it as stable.
+
+## `in` operator for strings
+
+`in` and `not in` operators now works on strings, in addition to arrays and
+dictionaries.
+
+```
+fs = import('fs')
+if 'something' in fs.read('somefile')
+ # True
+endif
+```
+
+## `warning-level=everything` option
+
+The new `everything` value for the built-in `warning_level` enables roughly all applicable compiler warnings.
+For clang and MSVC, this simply enables `-Weverything` or `/Wall`, respectively.
+For GCC, meson enables warnings approximately equivalent to `-Weverything` from clang.
+
authors:
- name: Dylan Baker
email: dylan@pnwbakers.com
- years: [2020, 2021]
+ years: [2020, 2021, 2022]
...
-# Unstable Rust module
+# Rust module
*(new in 0.57.0)*
-
-**Note** Unstable modules make no backwards compatible API guarantees.
+*(Stable since 1.0.0)*
The rust module provides helper to integrate rust code into Meson. The
goal is to make using rust in Meson more pleasant, while still
Additional, test only dependencies may be passed via the dependencies
argument.
-### bindgen(*, input: string | BuildTarget | [](string | BuildTarget), output: string, include_directories: []include_directories, c_args: []string, args: []string)
+### bindgen(*, input: string | BuildTarget | [](string | BuildTarget), output: string, include_directories: [](include_directories | string), c_args: []string, args: []string, dependencies: []Dependency)
This function wraps bindgen to simplify creating rust bindings around C
libraries. This has two advantages over hand-rolling ones own with a
- input — A list of Files, Strings, or CustomTargets. The first element is
the header bindgen will parse, additional elements are dependencies.
- output — the name of the output rust file
-- include_directories — A list of `include_directories` objects, these are
- passed to clang as `-I` arguments
+- include_directories — A list of `include_directories` or `string` objects,
+ these are passed to clang as `-I` arguments *(string since 1.0.0)*
- c_args — A list of string arguments to pass to clang untouched
- args — A list of string arguments to pass to `bindgen` untouched.
+- dependencies — A list of `Dependency` objects to pass to the underlying clang call (*since 1.0.0*)
```meson
rust = import('unstable-rust')
topic](https://github.com/topics/meson).
- [2048.cpp](https://github.com/plibither8/2048.cpp), a fully featured terminal version of the game "2048" written in C++
+ - [aawordsearch](https://github.com/theimpossibleastronaut/aawordsearch), generate wordsearch puzzles using random words in different languages
- [Adwaita Manager](https://github.com/AdwCustomizerTeam/AdwCustomizer), change the look of Adwaita, with ease
- [Aravis](https://github.com/AravisProject/aravis), a glib/gobject based library for video acquisition using Genicam cameras
- [Akira](https://github.com/akiraux/Akira), a native Linux app for UI and UX design built in Vala and Gtk
- [HelenOS](http://helenos.org), a portable microkernel-based multiserver operating system
- [HexChat](https://github.com/hexchat/hexchat), a cross-platform IRC client in C
- [IGT](https://gitlab.freedesktop.org/drm/igt-gpu-tools), Linux kernel graphics driver test suite
+ - [i3](https://i3wm.org), improved tiling window manager
- [inih](https://github.com/benhoyt/inih) (INI Not Invented Here), a small and simple .INI file parser written in C
- [Irssi](https://github.com/irssi/irssi), a terminal chat client in C
- [iSH](https://github.com/tbodt/ish), Linux shell for iOS
- [Planner](https://github.com/alainm23/planner), task manager with Todoist support designed for GNU/Linux
- [Playerctl](https://github.com/acrisci/playerctl), mpris command-line controller and library for spotify, vlc, audacious, bmp, cmus, and others
- [Polari](https://gitlab.gnome.org/GNOME/polari), an IRC client
+ - [PostgreSQL](https://www.postgresql.org/), an advanced open source relational database
- [qboot](https://github.com/bonzini/qboot), a minimal x86 firmware for booting Linux kernels
- [radare2](https://github.com/radare/radare2), unix-like reverse engineering framework and commandline tools (not the default)
- [rmw](https://remove-to-waste.info), safe-remove utility for the command line
Type your project's name In the `Project name` entry box. In this
example we're going to use `testproj`. Next select the `Location`
-entry and browse to the root of your projet sources. Make sure that
+entry and browse to the root of your project sources. Make sure that
the checkbox `Place solution and project in the same directory` is
checked. Click `Create`.
### compile_resources
```
+ windows = import('windows')
windows.compile_resources(...(string | File | CustomTarget | CustomTargetIndex),
args: []string,
depend_files: [](string | File),
class Type(T.TypedDict):
obj: str # References an object from `root.objects`
- holds: T.Sequence[object] # Mypy does not support recusive dicts, but this should be T.List[Type]...
+ holds: T.Sequence[object] # Mypy does not support recursive dicts, but this should be T.List[Type]...
class Argument(BaseObject):
'''
Wrap-best-practices-and-tips.md
Shipping-prebuilt-binaries-as-wraps.md
Release-notes.md
+ Release-notes-for-1.0.0.md
Release-notes-for-0.64.0.md
Release-notes-for-0.63.0.md
Release-notes-for-0.62.0.md
(but *not* before that). On Windows, this argument has no effect.
objects:
- type: list[extracted_obj]
+ type: list[extracted_obj | file | str]
description: |
- List of prebuilt object files (usually for third party
- products you don't have source to) that should be linked in this
- target, **never** use this for object files that you build yourself.
+ List of object files that should be linked in this target.
+ These can include third party products you don't have source to,
+ or object files produced by other build targets.
name_prefix:
type: str | list[void]
since: 0.60.0
min_varargs: 1
description: |
- The names of the dependency too look up. The dependencies are looked up in
+ The names of the dependency to look up. The dependencies are looked up in
the order they are provided here. The first found dependency will then be
used. The fallback subproject will be used only if none of the names are
found on the system. Once one of the name has been found, all other names
type: bool | feature
default: true
description: |
- when set to false, Meson will proceed with the build
- even if the dependency is not found. *(since 0.47.0)* The value of a
- [`feature`](Build-options.md#features) option can also be passed.
+ When set to `false`, Meson will proceed with the build
+ even if the dependency is not found.
+
+ When set to a [`feature`](Build-options.md#features) option, the feature
+ will control if it is searched and whether to fail if not found.
+
+ *(since 0.47.0)* The value of a `feature` option can also be passed.
static:
type: bool
kwargs:
required:
type: bool | feature
+ default: true
since: 0.59.0
- description: Whether the mdule is required and Meson should abort if not found.
+ description:
+ When set to `false`, Meson will proceed with the build even if the module
+ is not found.
+
+ When set to a [`feature`](Build-options.md#features) option, the feature
+ will control if it is searched and whether to fail if not found.
disabler:
type: bool
since: 0.48.0
default: true
description: |
- If `true`, Meson will abort if the subproject could not be setup. You can set
- this to `false` and then use the `.found()` method on the [[@subproject]] object.
- You may also pass the value of a
- [`feature`](Build-options.md#features) option, same as [[dependency]].
+ Works just the same as in [[dependency]].
description: You have found a bug if you can see this!
kwargs:
prefix:
- type: str
+ type: str | list[str]
description: |
Used to add `#include`s and other things that are required
- for the symbol to be declared. System definitions should be
- passed via compiler args (eg: `_GNU_SOURCE` is often required for
- some symbols to be exposed on Linux, and it should be passed via
- `args` keyword argument).
+ for the symbol to be declared. Since 1.0.0 an array is accepted
+ too. When an array is passed, the items are concatenated together
+ separated by a newline.
+ System definitions should be passed via compiler args
+ (eg: `_GNU_SOURCE` is often required for some symbols to be exposed
+ on Linux, and it should be passed via `args` keyword argument).
- name: _no_builtin_args
returns: void
type: bool | feature
default: false
since: 0.50.0
- description: If set to `true`, Meson will halt if the header check fails.
+ description:
+ When set to `true`, Meson will halt if the header check fails.
+
+ When set to a [`feature`](Build-options.md#features) option, the feature
+ will control if it is searched and whether to fail if not found.
# Star of the actual functions
be found. Otherwise, Meson will continue and the found method of the
returned object will return `false`.
- *(since 0.47.0)* The value of a [`feature`](Build-options.md#features)
- option can also be passed here.
+ When set to a [`feature`](Build-options.md#features) option, the feature
+ will control if it is searched and whether to fail if not found.
+
+ *(since 0.47.0)* The value of a `feature` option can also be passed here.
has_headers:
type: list[str]
normal compilation. That includes for example args added with
`add_project_arguments()`, or on the command line with `-Dc_args=-DFOO`.
varargs_inherit: _build_target_base
+ kwargs_inherit:
+ - compiler._include_directories
kwargs:
output:
type: str
type: list[str]
description: |
Extra flags to pass to the preprocessor
-
Returns a string pointing to the script or executable.
- **NOTE:** You should not need to use this method. Passing the object itself
- should work in all cases.
+ **NOTE:** You should not usually need to use this method. Passing the
+ object itself should work in most contexts where a program can appear,
+ and allows Meson to setup inter-target dependencies correctly (for
+ example in cases where a program might be overridden by a [[build_tgt]]).
+ Only use this if you specifically need a string, such as when embedding
+ a program path into a header file.
For example:
description: |
Returns a string pointing to the script or executable.
- **NOTE:** You should not need to use this method. Passing the object itself
- should work in all cases.
+ **NOTE:** You should not usually need to use this method. Passing the
+ object itself should work in most contexts where a program can appear,
+ and allows Meson to setup inter-target dependencies correctly (for
+ example in cases where a program might be overridden by a [[build_tgt]]).
+ Only use this if you specifically need a string, such as when embedding
+ a program path into a header file.
For example:
-.TH MESON "1" "November 2022" "meson 0.64.1" "User Commands"
+.TH MESON "1" "December 2022" "meson 1.0.0" "User Commands"
.SH NAME
meson - a high productivity build system
.SH DESCRIPTION
import re
import typing as T
-from . import mesonlib
-
if T.TYPE_CHECKING:
from .linkers import StaticLinker
from .compilers import Compiler
-UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List[str]
-# execinfo is a compiler lib on FreeBSD and NetBSD
-if mesonlib.is_freebsd() or mesonlib.is_netbsd():
- UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo')
+# execinfo is a compiler lib on BSD
+UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt', 'execinfo'] # type: T.List[str]
class Dedup(enum.Enum):
extraf_nodes = traverse_nodes(extra_queue)
# Make sure nothing can crash when creating the build class
- kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in ['install', 'build_by_default', 'build_always']}
+ kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in {'install', 'build_by_default', 'build_always'}}
kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()}
kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)}
for_machine = MachineChoice.HOST
from functools import lru_cache
from itertools import chain
from pathlib import Path
+import copy
import enum
import json
import os
break
is_cross = self.environment.is_cross_build(test_for_machine)
- if is_cross and self.environment.need_exe_wrapper():
- exe_wrapper = self.environment.get_exe_wrapper()
- else:
- exe_wrapper = None
+ exe_wrapper = self.environment.get_exe_wrapper()
machine = self.environment.machines[exe.for_machine]
if machine.is_windows() or machine.is_cygwin():
extra_bdeps: T.List[T.Union[build.BuildTarget, build.CustomTarget]] = []
cmd_args.extend(self.construct_target_rel_paths(a, t.workdir))
else:
raise MesonException('Bad object in test command.')
+
+ t_env = copy.deepcopy(t.env)
+ if not machine.is_windows() and not machine.is_cygwin() and not machine.is_darwin():
+ ld_lib_path: T.Set[str] = set()
+ for d in depends:
+ if isinstance(d, build.BuildTarget):
+ for l in d.get_all_link_deps():
+ if isinstance(l, build.SharedLibrary):
+ ld_lib_path.add(os.path.join(self.environment.get_build_dir(), l.get_subdir()))
+ if ld_lib_path:
+ t_env.prepend('LD_LIBRARY_PATH', list(ld_lib_path), ':')
+
ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross,
exe_wrapper, self.environment.need_exe_wrapper(),
- t.is_parallel, cmd_args, t.env,
+ t.is_parallel, cmd_args, t_env,
t.should_fail, t.timeout, t.workdir,
extra_paths, t.protocol, t.priority,
isinstance(exe, build.Target),
raise MesonException(m.format(t.name, num_out, t.get_outputs(), num_outdirs))
assert len(t.install_tag) == num_out
install_mode = t.get_custom_install_mode()
- # because mypy get's confused type narrowing in lists
+ # because mypy gets confused type narrowing in lists
first_outdir = outdirs[0]
first_outdir_name = install_dir_names[0]
env = build.EnvironmentVariables()
extra_paths = set()
library_paths = set()
+ build_machine = self.environment.machines[MachineChoice.BUILD]
host_machine = self.environment.machines[MachineChoice.HOST]
- need_exe_wrapper = self.environment.need_exe_wrapper()
- need_wine = need_exe_wrapper and host_machine.is_windows()
+ need_wine = not build_machine.is_windows() and host_machine.is_windows()
for t in self.build.get_targets().values():
- cross_built = not self.environment.machines.matches_build_machine(t.for_machine)
- can_run = not cross_built or not need_exe_wrapper or need_wine
in_default_dir = t.should_install() and not t.get_install_dir()[2]
- if not can_run or not in_default_dir:
+ if t.for_machine != MachineChoice.HOST or not in_default_dir:
continue
tdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t))
if isinstance(t, build.Executable):
# LD_LIBRARY_PATH. This allows running system applications using
# that library.
library_paths.add(tdir)
+ if need_wine:
+ # Executable paths should be in both PATH and WINEPATH.
+ # - Having them in PATH makes bash completion find it,
+ # and make running "foo.exe" find it when wine-binfmt is installed.
+ # - Having them in WINEPATH makes "wine foo.exe" find it.
+ library_paths.update(extra_paths)
if library_paths:
- if host_machine.is_windows() or host_machine.is_cygwin():
+ if need_wine:
+ env.prepend('WINEPATH', list(library_paths), separator=';')
+ elif host_machine.is_windows() or host_machine.is_cygwin():
extra_paths.update(library_paths)
elif host_machine.is_darwin():
env.prepend('DYLD_LIBRARY_PATH', list(library_paths))
else:
env.prepend('LD_LIBRARY_PATH', list(library_paths))
if extra_paths:
- if need_wine:
- env.prepend('WINEPATH', list(extra_paths), separator=';')
- else:
- env.prepend('PATH', list(extra_paths))
+ env.prepend('PATH', list(extra_paths))
return env
def compiler_to_generator(self, target: build.BuildTarget,
return NinjaCommandArg(c)
self.name = rule
- self.command = list(map(strToCommandArg, command)) # includes args which never go into a rspfile
- self.args = list(map(strToCommandArg, args)) # args which will go into a rspfile, if used
+ self.command = [strToCommandArg(c) for c in command] # includes args which never go into a rspfile
+ self.args = [strToCommandArg(a) for a in args] # args which will go into a rspfile, if used
self.description = description
self.deps = deps # depstyle 'gcc' or 'msvc'
self.depfile = depfile
self.orderdeps = OrderedSet()
self.elems = []
self.all_outputs = all_outputs
+ self.output_errors = ''
def add_dep(self, dep):
if isinstance(dep, list):
self.rule.refcount += 1
def write(self, outfile):
- self.check_outputs()
+ if self.output_errors:
+ raise MesonException(self.output_errors)
ins = ' '.join([ninja_quote(i, True) for i in self.infilenames])
outs = ' '.join([ninja_quote(i, True) for i in self.outfilenames])
implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames])
def check_outputs(self):
for n in self.outfilenames:
if n in self.all_outputs:
- raise MesonException(f'Multiple producers for Ninja target "{n}". Please rename your targets.')
+ self.output_errors = f'Multiple producers for Ninja target "{n}". Please rename your targets.'
self.all_outputs[n] = True
@dataclass
self.ruledict[rule.name] = rule
def add_build(self, build):
+ build.check_outputs()
self.build_elements.append(build)
if build.rulename != 'phony':
def generate_scanbuild(self):
if not environment.detect_scanbuild():
return
- if ('', 'scan-build') in self.build.run_target_names:
+ if 'scan-build' in self.all_outputs:
return
cmd = self.environment.get_build_command() + \
['--internal', 'scanbuild', self.environment.source_dir, self.environment.build_dir] + \
return
if target_name in self.all_outputs:
return
- if ('', target_name) in self.build.run_target_names:
- return
cmd = self.environment.get_build_command() + \
['--internal', 'clang' + name, self.environment.source_dir, self.environment.build_dir] + \
extra_args
import shutil
if not shutil.which(tool):
return
- if ('', target_name) in self.build.run_target_names:
- return
if target_name in self.all_outputs:
return
cmd = self.environment.get_build_command() + \
submodmatch = submodre.match(line)
if submodmatch is not None:
parents = submodmatch.group(1).lower().split(':')
- assert len(parents) in (1, 2), (
+ assert len(parents) in {1, 2}, (
'submodule ancestry must be specified as'
f' ancestor:parent but Meson found {parents}')
def _get_cl_compiler(self, target):
for lang, c in target.compilers.items():
- if lang in ('c', 'cpp'):
+ if lang in {'c', 'cpp'}:
return c
# No source files, only objects, but we still need a compiler, so
# return a found compiler
if len(target.objects) > 0:
for lang, c in self.environment.coredata.compilers[target.for_machine].items():
- if lang in ('c', 'cpp'):
+ if lang in {'c', 'cpp'}:
return c
raise MesonException('Could not find a C or C++ compiler. MSVC can only build C/C++ projects.')
self.custom_aggregate_targets = {}
self.build_all_tdep_id = self.gen_id()
# FIXME: filter out targets that are not built by default.
- target_dependencies = list(map(lambda t: self.pbx_dep_map[t], self.build_targets))
+ target_dependencies = [self.pbx_dep_map[t] for t in self.build_targets]
custom_target_dependencies = [self.pbx_custom_dep_map[t] for t in self.custom_targets]
aggregated_targets = []
aggregated_targets.append((self.all_id, 'ALL_BUILD',
known_stlib_kwargs = known_build_target_kwargs | {'pic', 'prelink'}
known_jar_kwargs = known_exe_kwargs | {'main_class', 'java_resources'}
-def _process_install_tag(install_tag: T.Optional[T.Sequence[T.Optional[str]]],
+def _process_install_tag(install_tag: T.Optional[T.List[T.Optional[str]]],
num_outputs: int) -> T.List[T.Optional[str]]:
_install_tag: T.List[T.Optional[str]]
if not install_tag:
_install_tag = [None] * num_outputs
elif len(install_tag) == 1:
- _install_tag = list(install_tag) * num_outputs
+ _install_tag = install_tag * num_outputs
else:
- _install_tag = list(install_tag)
+ _install_tag = install_tag
return _install_tag
self.environment = environment
self.projects = {}
self.targets: 'T.OrderedDict[str, T.Union[CustomTarget, BuildTarget]]' = OrderedDict()
- self.run_target_names: T.Set[T.Tuple[str, str]] = set()
self.global_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {})
self.global_link_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {})
self.projects_args: PerMachine[T.Dict[str, T.Dict[str, T.List[str]]]] = PerMachine({}, {})
"""Convert IncludeDirs object to a list of strings.
:param sourcedir: The absolute source directory
- :param builddir: The absolute build directory, option, buid dir will not
+ :param builddir: The absolute build directory, option, build dir will not
be added if this is unset
:returns: A list of strings (without compiler argument)
"""
self.suffix = 'abs'
elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('xc16')):
self.suffix = 'elf'
- elif ('c' in self.compilers and self.compilers['c'].get_id() in ('ti', 'c2000') or
- 'cpp' in self.compilers and self.compilers['cpp'].get_id() in ('ti', 'c2000')):
+ elif ('c' in self.compilers and self.compilers['c'].get_id() in {'ti', 'c2000'} or
+ 'cpp' in self.compilers and self.compilers['cpp'].get_id() in {'ti', 'c2000'}):
self.suffix = 'out'
else:
self.suffix = machine.get_exe_suffix()
raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z where '
'X, Y, Z are numbers, and Y and Z are optional')
parts = v.split('.')
- if len(parts) in (1, 2, 3) and int(parts[0]) > 65535:
+ if len(parts) in {1, 2, 3} and int(parts[0]) > 65535:
raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
'where X is [0, 65535] and Y, Z are optional')
- if len(parts) in (2, 3) and int(parts[1]) > 255:
+ if len(parts) in {2, 3} and int(parts[1]) > 255:
raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
'where Y is [0, 255] and Y, Z are optional')
if len(parts) == 3 and int(parts[2]) > 255:
super().process_kwargs(kwargs)
if not self.environment.machines[self.for_machine].is_android():
- supports_versioning = True
- else:
- supports_versioning = False
-
- if supports_versioning:
# Shared library version
if 'version' in kwargs:
self.ltversion = kwargs['version']
env: T.Optional[EnvironmentVariables] = None,
feed: bool = False,
install: bool = False,
- install_dir: T.Optional[T.Sequence[T.Union[str, Literal[False]]]] = None,
+ install_dir: T.Optional[T.List[T.Union[str, Literal[False]]]] = None,
install_mode: T.Optional[FileMode] = None,
- install_tag: T.Optional[T.Sequence[T.Optional[str]]] = None,
+ install_tag: T.Optional[T.List[T.Optional[str]]] = None,
absolute_paths: bool = False,
backend: T.Optional['Backend'] = None,
):
def is_internal(self) -> bool:
'''
- Returns True iif this is a not installed static library.
+ Returns True if this is a not installed static library.
'''
if len(self.outputs) != 1:
return False
def is_internal(self) -> bool:
'''
- Returns True iif this is a not installed static library
+ Returns True if this is a not installed static library
'''
suf = os.path.splitext(self.output)[-1]
return suf in {'.a', '.lib'} and not self.should_install()
escape = False
elif i == '\\':
escape = True
- elif i in ['"', "'"]:
+ elif i in {'"', "'"}:
in_string = not in_string
- elif i in [' ', '\n']:
+ elif i in {' ', '\n'}:
if in_string:
curr += i
else:
else:
curr += i
res += [curr]
- res = list(filter(lambda x: len(x) > 0, res))
+ res = [r for r in res if len(r) > 0]
return res
def cmake_get_generator_args(env: 'Environment') -> T.List[str]:
supported = {
# Boolean functions
- 'BOOL': lambda x: '0' if x.upper() in ['0', 'FALSE', 'OFF', 'N', 'NO', 'IGNORE', 'NOTFOUND'] or x.endswith('-NOTFOUND') else '1',
+ 'BOOL': lambda x: '0' if x.upper() in {'0', 'FALSE', 'OFF', 'N', 'NO', 'IGNORE', 'NOTFOUND'} or x.endswith('-NOTFOUND') else '1',
'AND': lambda x: '1' if all(y == '1' for y in x.split(',')) else '0',
'OR': lambda x: '1' if any(y == '1' for y in x.split(',')) else '0',
'NOT': lambda x: '0' if x == '1' else '1',
rm_so_version = re.compile(r'(\.[0-9]+)+$')
def __init__(self, build_dir: Path):
- self.tgt_map = {} # type: T.Dict[str, T.Union['ConverterTarget', 'ConverterCustomTarget']]
+ self.tgt_map: T.Dict[str, T.Union['ConverterTarget', 'ConverterCustomTarget']] = {}
self.build_dir = build_dir
def add(self, tgt: T.Union['ConverterTarget', 'ConverterCustomTarget']) -> None:
self.full_name = target.full_name
self.type = target.type
self.install = target.install
- self.install_dir = None # type: T.Optional[Path]
+ self.install_dir: T.Optional[Path] = None
self.link_libraries = target.link_libraries
self.link_flags = target.link_flags + target.link_lang_flags
- self.depends_raw = [] # type: T.List[str]
- self.depends = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]]
+ self.depends_raw: T.List[str] = []
+ self.depends: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] = []
if target.install_paths:
self.install_dir = target.install_paths[0]
- self.languages = set() # type: T.Set[str]
- self.sources = [] # type: T.List[Path]
- self.generated = [] # type: T.List[Path]
- self.generated_ctgt = [] # type: T.List[CustomTargetReference]
- self.includes = [] # type: T.List[Path]
- self.sys_includes = [] # type: T.List[Path]
- self.link_with = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]]
- self.object_libs = [] # type: T.List[ConverterTarget]
- self.compile_opts = {} # type: T.Dict[str, T.List[str]]
- self.public_compile_opts = [] # type: T.List[str]
+ self.languages: T.Set[str] = set()
+ self.sources: T.List[Path] = []
+ self.generated: T.List[Path] = []
+ self.generated_ctgt: T.List[CustomTargetReference] = []
+ self.includes: T.List[Path] = []
+ self.sys_includes: T.List[Path] = []
+ self.link_with: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] = []
+ self.object_libs: T.List[ConverterTarget] = []
+ self.compile_opts: T.Dict[str, T.List[str]] = {}
+ self.public_compile_opts: T.List[str] = []
self.pie = False
# Project default override options (c_std, cpp_std, etc.)
- self.override_options = [] # type: T.List[str]
+ self.override_options: T.List[str] = []
# Convert the target name to a valid meson target name
self.name = _sanitize_cmake_name(self.name)
- self.generated_raw = [] # type: T.List[Path]
+ self.generated_raw: T.List[Path] = []
for i in target.files:
- languages = set() # type: T.Set[str]
- src_suffixes = set() # type: T.Set[str]
+ languages: T.Set[str] = set()
+ src_suffixes: T.Set[str] = set()
# Insert suffixes
for j in i.sources:
)
continue
self.override_options += [f'{i}_std={std}']
- elif j in ['-fPIC', '-fpic', '-fPIE', '-fpie']:
+ elif j in {'-fPIC', '-fpic', '-fPIE', '-fpie'}:
self.pie = True
elif isinstance(ctgt, ConverterCustomTarget):
# Sometimes projects pass generated source files as compiler
# suffix and just produces object files like `foo.obj`. Thus we have to do our best to
# undo this step and guess the correct language suffix of the object file. This is done
# by trying all language suffixes meson knows and checking if one of them fits.
- candidates = [j] # type: T.List[str]
+ candidates = [j]
if not any(j.endswith('.' + x) for x in exts):
mlog.warning('Object files do not contain source file extensions, thus falling back to guessing them.', once=True)
candidates += [f'{j}.{x}' for x in exts]
@lru_cache(maxsize=None)
def _all_source_suffixes(self) -> 'ImmutableListProtocol[str]':
- suffixes = [] # type: T.List[str]
+ suffixes: T.List[str] = []
for exts in lang_suffixes.values():
suffixes.extend(exts)
return suffixes
class CustomTargetReference:
def __init__(self, ctgt: 'ConverterCustomTarget', index: int) -> None:
- self.ctgt = ctgt # type: ConverterCustomTarget
- self.index = index # type: int
+ self.ctgt = ctgt
+ self.index = index
def __repr__(self) -> str:
if self.valid():
return self.ctgt.outputs[self.index]
class ConverterCustomTarget:
- tgt_counter = 0 # type: int
- out_counter = 0 # type: int
+ tgt_counter = 0
+ out_counter = 0
def __init__(self, target: CMakeGeneratorTarget, env: 'Environment', for_machine: MachineChoice) -> None:
assert target.current_bin_dir is not None
self.cmake_name = str(self.name)
self.original_outputs = list(target.outputs)
self.outputs = [x.name for x in self.original_outputs]
- self.conflict_map = {} # type: T.Dict[str, str]
- self.command = [] # type: T.List[T.List[T.Union[str, ConverterTarget]]]
+ self.conflict_map: T.Dict[str, str] = {}
+ self.command: T.List[T.List[T.Union[str, ConverterTarget]]] = []
self.working_dir = target.working_dir
self.depends_raw = target.depends
- self.inputs = [] # type: T.List[T.Union[str, CustomTargetReference]]
- self.depends = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]]
- self.current_bin_dir = target.current_bin_dir # type: Path
- self.current_src_dir = target.current_src_dir # type: Path
+ self.inputs: T.List[T.Union[str, CustomTargetReference]] = []
+ self.depends: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] = []
+ self.current_bin_dir = target.current_bin_dir
+ self.current_src_dir = target.current_src_dir
self.env = env
self.for_machine = for_machine
self._raw_target = target
# Ensure that there is no duplicate output in the project so
# that meson can handle cases where the same filename is
# generated in multiple directories
- temp_outputs = [] # type: T.List[str]
+ temp_outputs: T.List[str] = []
for i in self.outputs:
if i in all_outputs:
old = str(i)
self.outputs = temp_outputs
# Check if the command is a build target
- commands = [] # type: T.List[T.List[T.Union[str, ConverterTarget]]]
+ commands: T.List[T.List[T.Union[str, ConverterTarget]]] = []
for curr_cmd in self._raw_target.command:
assert isinstance(curr_cmd, list)
assert curr_cmd[0] != '', "An empty string is not a valid executable"
- cmd = [] # type: T.List[T.Union[str, ConverterTarget]]
+ cmd: T.List[T.Union[str, ConverterTarget]] = []
for j in curr_cmd:
if not j:
self.env = env
self.for_machine = MachineChoice.HOST # TODO make parameter
self.backend_name = backend.name
- self.linkers = set() # type: T.Set[str]
+ self.linkers: T.Set[str] = set()
self.fileapi = CMakeFileAPI(self.build_dir)
# Raw CMake results
- self.bs_files = [] # type: T.List[Path]
- self.codemodel_configs = None # type: T.Optional[T.List[CMakeConfiguration]]
- self.cmake_stderr = None # type: T.Optional[str]
+ self.bs_files: T.List[Path] = []
+ self.codemodel_configs: T.Optional[T.List[CMakeConfiguration]] = None
+ self.cmake_stderr: T.Optional[str] = None
# Analysed data
self.project_name = ''
- self.languages = [] # type: T.List[str]
- self.targets = [] # type: T.List[ConverterTarget]
- self.custom_targets = [] # type: T.List[ConverterCustomTarget]
+ self.languages: T.List[str] = []
+ self.targets: T.List[ConverterTarget] = []
+ self.custom_targets: T.List[ConverterCustomTarget] = []
self.trace: CMakeTraceParser
self.output_target_map = OutputTargetMap(self.build_dir)
# Generated meson data
- self.generated_targets = {} # type: T.Dict[str, T.Dict[str, T.Optional[str]]]
- self.internal_name_map = {} # type: T.Dict[str, str]
+ self.generated_targets: T.Dict[str, T.Dict[str, T.Optional[str]]] = {}
+ self.internal_name_map: T.Dict[str, str] = {}
# Do some special handling for object libraries for certain configurations
self._object_lib_workaround = False
self.trace.parse(self.cmake_stderr)
# Find all targets
- added_target_names = [] # type: T.List[str]
+ added_target_names: T.List[str] = []
for i_0 in self.codemodel_configs:
for j_0 in i_0.projects:
if not self.project_name:
# First pass: Basic target cleanup
object_libs = []
- custom_target_outputs = [] # type: T.List[str]
+ custom_target_outputs: T.List[str] = []
for ctgt in self.custom_targets:
ctgt.postprocess(self.output_target_map, self.src_dir, custom_target_outputs, self.trace)
for tgt in self.targets:
# Add the run script for custom commands
# Add the targets
- processing = [] # type: T.List[str]
- processed = {} # type: T.Dict[str, T.Dict[str, T.Optional[str]]]
- name_map = {} # type: T.Dict[str, str]
+ processing: T.List[str] = []
+ processed: T.Dict[str, T.Dict[str, T.Optional[str]]] = {}
+ name_map: T.Dict[str, str] = {}
def extract_tgt(tgt: T.Union[ConverterTarget, ConverterCustomTarget, CustomTargetReference]) -> IdNode:
tgt_name = None
detect_cycle(tgt)
# First handle inter target dependencies
- link_with = [] # type: T.List[IdNode]
- objec_libs = [] # type: T.List[IdNode]
- sources = [] # type: T.List[Path]
- generated = [] # type: T.List[T.Union[IdNode, IndexNode]]
- generated_filenames = [] # type: T.List[str]
- custom_targets = [] # type: T.List[ConverterCustomTarget]
- dependencies = [] # type: T.List[IdNode]
+ link_with: T.List[IdNode] = []
+ objec_libs: T.List[IdNode] = []
+ sources: T.List[Path] = []
+ generated: T.List[T.Union[IdNode, IndexNode]] = []
+ generated_filenames: T.List[str] = []
+ custom_targets: T.List[ConverterCustomTarget] = []
+ dependencies: T.List[IdNode] = []
for i in tgt.link_with:
assert isinstance(i, ConverterTarget)
if i.name not in processed:
install_tgt = options.get_install(tgt.cmake_name, tgt.install)
# Generate target kwargs
- tgt_kwargs = {
+ tgt_kwargs: TYPE_mixed_kwargs = {
'build_by_default': install_tgt,
'link_args': options.get_link_args(tgt.cmake_name, tgt.link_flags + tgt.link_libraries),
'link_with': link_with,
'install': install_tgt,
'override_options': options.get_override_options(tgt.cmake_name, tgt.override_options),
'objects': [method(x, 'extract_all_objects') for x in objec_libs],
- } # type: TYPE_mixed_kwargs
+ }
# Only set if installed and only override if it is set
if install_tgt and tgt.install_dir:
tgt_kwargs['pic'] = tgt.pie
# declare_dependency kwargs
- dep_kwargs = {
+ dep_kwargs: TYPE_mixed_kwargs = {
'link_args': tgt.link_flags + tgt.link_libraries,
'link_with': id_node(tgt_var),
'compile_args': tgt.public_compile_opts,
'include_directories': id_node(inc_var),
- } # type: TYPE_mixed_kwargs
+ }
if dependencies:
generated += dependencies
src_node = assign(src_var, function('files', sources))
tgt_node = assign(tgt_var, function(tgt_func, [tgt_var, id_node(src_var), *generated], tgt_kwargs))
node_list += [src_node, tgt_node]
- if tgt_func in ['static_library', 'shared_library']:
+ if tgt_func in {'static_library', 'shared_library'}:
dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs))
node_list += [dep_node]
- elif tgt_func in ['shared_module']:
+ elif tgt_func == 'shared_module':
del dep_kwargs['link_with']
dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs))
node_list += [dep_node]
# directory.
detect_cycle(tgt)
- tgt_var = tgt.name # type: str
+ tgt_var = tgt.name
def resolve_source(x: T.Union[str, ConverterTarget, ConverterCustomTarget, CustomTargetReference]) -> T.Union[str, IdNode, IndexNode]:
if isinstance(x, ConverterTarget):
return x
# Generate the command list
- command = [] # type: T.List[T.Union[str, IdNode, IndexNode]]
+ command: T.List[T.Union[str, IdNode, IndexNode]] = []
command += mesonlib.get_meson_command()
command += ['--internal', 'cmake_run_ctgt']
command += ['-o', '@OUTPUT@']
for cmd in tgt.command:
command += [resolve_source(x) for x in cmd] + [';;;']
- tgt_kwargs = {
+ tgt_kwargs: TYPE_mixed_kwargs = {
'input': [resolve_source(x) for x in tgt.inputs],
'output': tgt.outputs,
'command': command,
'depends': [resolve_source(x) for x in tgt.depends],
- } # type: TYPE_mixed_kwargs
+ }
root_cb.lines += [assign(tgt_var, function('custom_target', [tgt.name], tgt_kwargs))]
processed[tgt.name] = {'inc': None, 'src': None, 'dep': None, 'tgt': tgt_var, 'func': 'custom_target'}
if i in ignore:
continue
- if i in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'PRIVATE', 'LINK_PUBLIC', 'LINK_PRIVATE']:
+ if i in {'INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'PRIVATE', 'LINK_PUBLIC', 'LINK_PRIVATE'}:
mode = i
continue
- if mode in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'LINK_PUBLIC']:
+ if mode in {'INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'LINK_PUBLIC'}:
interface += i.split(';')
- if mode in ['PUBLIC', 'PRIVATE', 'LINK_PRIVATE']:
+ if mode in {'PUBLIC', 'PRIVATE', 'LINK_PRIVATE'}:
private += i.split(';')
if paths:
func = mo_file_line.group(4)
args = mo_file_line.group(5)
argl = args.split(' ')
- argl = list(map(lambda x: x.strip(), argl))
+ argl = [a.strip() for a in argl]
yield CMakeTraceLine(file, int(line), func, argl)
from .traceparser import CMakeTraceParser
from ..environment import Environment
from ..compilers import Compiler
+ from ..dependencies import MissingCompiler
class ResolvedTarget:
def __init__(self) -> None:
trace: 'CMakeTraceParser',
env: 'Environment',
*,
- clib_compiler: T.Optional['Compiler'] = None,
+ clib_compiler: T.Union['MissingCompiler', 'Compiler'] = None,
not_found_warning: T.Callable[[str], None] = lambda x: None) -> ResolvedTarget:
res = ResolvedTarget()
targets = [target_name]
res.libraries += [curr]
elif Path(curr).is_absolute() and Path(curr).exists():
res.libraries += [curr]
- elif env.machines.build.is_windows() and reg_is_maybe_bare_lib.match(curr) and clib_compiler is not None:
+ elif env.machines.build.is_windows() and reg_is_maybe_bare_lib.match(curr) and clib_compiler:
# On Windows, CMake library dependencies can be passed as bare library names,
# CMake brute-forces a combination of prefix/suffix combinations to find the
# right library. Assume any bare argument passed which is not also a CMake
from .mixins.arm import ArmCompiler, ArmclangCompiler
from .mixins.visualstudio import MSVCCompiler, ClangClCompiler
from .mixins.gnu import GnuCompiler
+from .mixins.gnu import gnu_common_warning_args, gnu_c_warning_args
from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler
from .mixins.clang import ClangCompiler
from .mixins.elbrus import ElbrusCompiler
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': ['-Weverything']}
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = super().get_options()
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': ['-Weverything']}
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': (default_warn_args + ['-Wextra', '-Wpedantic'] +
+ self.supported_warn_args(gnu_common_warning_args) +
+ self.supported_warn_args(gnu_c_warning_args))}
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
info, exe_wrapper, linker=linker, full_version=full_version)
IntelGnuLikeCompiler.__init__(self)
self.lang_header = 'c-header'
- default_warn_args = ['-Wall', '-w3', '-diag-disable:remark']
+ default_warn_args = ['-Wall', '-w3']
self.warn_args = {'0': [],
- '1': default_warn_args,
- '2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra']}
+ '1': default_warn_args + ['-diag-disable:remark'],
+ '2': default_warn_args + ['-Wextra', '-diag-disable:remark'],
+ '3': default_warn_args + ['-Wextra', '-diag-disable:remark'],
+ 'everything': default_warn_args + ['-Wextra']}
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CCompiler.get_options(self)
from .mixins.ti import TICompiler
from .mixins.arm import ArmCompiler, ArmclangCompiler
from .mixins.visualstudio import MSVCCompiler, ClangClCompiler
-from .mixins.gnu import GnuCompiler
+from .mixins.gnu import GnuCompiler, gnu_common_warning_args, gnu_cpp_warning_args
from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler
from .mixins.clang import ClangCompiler
from .mixins.elbrus import ElbrusCompiler
CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross,
info, exe_wrapper, linker=linker, full_version=full_version)
ClangCompiler.__init__(self, defines)
- default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': ['-Weverything']}
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CPPCompiler.get_options(self)
CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross,
info, exe_wrapper, linker=linker, full_version=full_version)
ArmclangCompiler.__init__(self)
- default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': ['-Weverything']}
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CPPCompiler.get_options(self)
CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross,
info, exe_wrapper, linker=linker, full_version=full_version)
GnuCompiler.__init__(self, defines)
- default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': (default_warn_args + ['-Wextra', '-Wpedantic'] +
+ self.supported_warn_args(gnu_common_warning_args) +
+ self.supported_warn_args(gnu_cpp_warning_args))}
def get_options(self) -> 'MutableKeyedOptionDictType':
key = OptionKey('std', machine=self.for_machine, lang=self.language)
info, exe_wrapper, linker=linker, full_version=full_version)
IntelGnuLikeCompiler.__init__(self)
self.lang_header = 'c++-header'
- default_warn_args = ['-Wall', '-w3', '-diag-disable:remark',
- '-Wpch-messages', '-Wnon-virtual-dtor']
+ default_warn_args = ['-Wall', '-w3', '-Wpch-messages']
self.warn_args = {'0': [],
- '1': default_warn_args,
- '2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra']}
+ '1': default_warn_args + ['-diag-disable:remark'],
+ '2': default_warn_args + ['-Wextra', '-diag-disable:remark'],
+ '3': default_warn_args + ['-Wextra', '-diag-disable:remark'],
+ 'everything': default_warn_args + ['-Wextra']}
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = CPPCompiler.get_options(self)
# native option to override it; override it with /NODEFAULTLIB
host_link_arg_overrides = []
host_crt_compile_args = self.host_compiler.get_crt_compile_args(crt_val, buildtype)
- if any(arg in ['/MDd', '/MD', '/MTd'] for arg in host_crt_compile_args):
+ if any(arg in {'/MDd', '/MD', '/MTd'} for arg in host_crt_compile_args):
host_link_arg_overrides += ['/NODEFAULTLIB:LIBCMT.lib']
return self._to_host_flags(host_link_arg_overrides + self.host_compiler.get_crt_link_args(crt_val, buildtype), _Phase.LINKER)
CompileCheckMode,
)
from .mixins.gnu import GnuCompiler
+from .mixins.gnu import gnu_common_warning_args
if T.TYPE_CHECKING:
from ..dependencies import Dependency
continue
if arg.startswith('-fstack-protector'):
continue
- if arg.startswith('-D'):
+ if arg.startswith('-D') and not (arg == '-D' or arg.startswith(('-Dd', '-Df'))):
+ # ignore all '-D*' flags (like '-D_THREAD_SAFE')
+ # unless they are related to documentation
continue
if arg.startswith('-Wl,'):
# Translate linker arguments here.
if crt_val in self.mscrt_args:
return self.mscrt_args[crt_val]
- assert crt_val in ['from_buildtype', 'static_from_buildtype']
+ assert crt_val in {'from_buildtype', 'static_from_buildtype'}
dbg = 'mdd'
rel = 'md'
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': (default_warn_args + ['-Wextra', '-Wpedantic'] +
+ self.supported_warn_args(gnu_common_warning_args))}
+
self.base_options = {
OptionKey(o) for o in [
'b_colorout', 'b_sanitize', 'b_staticpic', 'b_vscrt',
)
from .mixins.clike import CLikeCompiler
from .mixins.gnu import (
- GnuCompiler, gnulike_buildtype_args, gnu_optimization_args,
+ GnuCompiler, gnulike_buildtype_args, gnu_optimization_args
)
from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler
from .mixins.clang import ClangCompiler
def module_name_to_filename(self, module_name: str) -> str:
if '_' in module_name: # submodule
s = module_name.lower()
- if self.id in ('gcc', 'intel', 'intel-cl'):
+ if self.id in {'gcc', 'intel', 'intel-cl'}:
filename = s.replace('_', '@') + '.smod'
- elif self.id in ('pgi', 'flang'):
+ elif self.id in {'pgi', 'flang'}:
filename = s.replace('_', '-') + '.mod'
else:
filename = s + '.mod'
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic', '-fimplicit-none']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic', '-fimplicit-none'],
+ 'everything': default_warn_args + ['-Wextra', '-Wpedantic', '-fimplicit-none']}
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = FortranCompiler.get_options(self)
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-pedantic']}
+ '3': default_warn_args + ['-Wextra', '-pedantic'],
+ 'everything': default_warn_args + ['-Wextra', '-pedantic']}
def get_module_outdir_args(self, path: str) -> T.List[str]:
return ['-fmod=' + path]
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-warn', 'unused'],
- '3': ['-warn', 'all']}
+ '3': ['-warn', 'all'],
+ 'everything': ['-warn', 'all']}
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = FortranCompiler.get_options(self)
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['/warn:unused'],
- '3': ['/warn:all']}
+ '3': ['/warn:all'],
+ 'everything': ['/warn:all']}
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = FortranCompiler.get_options(self)
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args,
- '3': default_warn_args}
+ '3': default_warn_args,
+ 'everything': default_warn_args}
def openmp_flags(self) -> T.List[str]:
return ['-mp']
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args,
- '3': default_warn_args + ['-Mdclchk']}
+ '3': default_warn_args + ['-Mdclchk'],
+ 'everything': default_warn_args + ['-Mdclchk']}
def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]:
# TODO: needs default search path added
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args,
- '3': default_warn_args + ['-Mdclchk']}
+ '3': default_warn_args + ['-Mdclchk'],
+ 'everything': default_warn_args + ['-Mdclchk']}
class FlangFortranCompiler(ClangCompiler, FortranCompiler):
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args,
- '3': default_warn_args}
+ '3': default_warn_args,
+ 'everything': default_warn_args}
def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]:
# We need to apply the search prefix here, as these link arguments may
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args,
- '3': default_warn_args}
+ '3': default_warn_args,
+ 'everything': default_warn_args}
def openmp_flags(self) -> T.List[str]:
return ['-mp']
'1': [],
'2': [],
'3': [],
+ 'everything': [],
}
def get_always_args(self) -> T.List[str]:
def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
build_dir: str) -> T.List[str]:
for idx, i in enumerate(parameter_list):
- if i in ['-cp', '-classpath', '-sourcepath'] and idx + 1 < len(parameter_list):
+ if i in {'-cp', '-classpath', '-sourcepath'} and idx + 1 < len(parameter_list):
path_list = parameter_list[idx + 1].split(os.pathsep)
path_list = [os.path.normpath(os.path.join(build_dir, x)) for x in path_list]
parameter_list[idx + 1] = os.pathsep.join(path_list)
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + [],
- '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+ '3': default_warn_args + [],
+ 'everything': default_warn_args + []} # type: T.Dict[str, T.List[str]]
# Assembly
self.can_compile_suffixes.add('s')
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + [],
- '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+ '3': default_warn_args + [],
+ 'everything': default_warn_args + []} # type: T.Dict[str, T.List[str]]
def get_pic_args(self) -> T.List[str]:
# PIC support is not enabled by default for CCRX,
# Remove system/default include paths added with -isystem
default_dirs = self.compiler.get_default_include_dirs()
if default_dirs:
- real_default_dirs = [os.path.realpath(i) for i in default_dirs]
+ real_default_dirs = [self._cached_realpath(i) for i in default_dirs]
bad_idx_list = [] # type: T.List[int]
for i, each in enumerate(new):
if not each.startswith('-isystem'):
# Remove the -isystem and the path if the path is a default path
if (each == '-isystem' and
i < (len(new) - 1) and
- os.path.realpath(new[i + 1]) in real_default_dirs):
+ self._cached_realpath(new[i + 1]) in real_default_dirs):
bad_idx_list += [i, i + 1]
- elif each.startswith('-isystem=') and os.path.realpath(each[9:]) in real_default_dirs:
+ elif each.startswith('-isystem=') and self._cached_realpath(each[9:]) in real_default_dirs:
bad_idx_list += [i]
- elif os.path.realpath(each[8:]) in real_default_dirs:
+ elif self._cached_realpath(each[8:]) in real_default_dirs:
bad_idx_list += [i]
for i in reversed(bad_idx_list):
new.pop(i)
return self.compiler.unix_args_to_native(new._container)
+ @staticmethod
+ @functools.lru_cache(maxsize=None)
+ def _cached_realpath(arg: str) -> str:
+ return os.path.realpath(arg)
+
def __repr__(self) -> str:
self.flush_pre_post()
return f'CLikeCompilerArgs({self.compiler!r}, {self._container!r})'
# don't work
m = env.machines[self.for_machine]
if not (m.is_windows() or m.is_cygwin()):
- if name in ['dllimport', 'dllexport']:
+ if name in {'dllimport', 'dllexport'}:
return False, False
return self.compiles(self.attribute_check_func(name), env,
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + [],
- '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+ '3': default_warn_args + [],
+ 'everything': default_warn_args + []} # type: T.Dict[str, T.List[str]]
def get_always_args(self) -> T.List[str]:
return []
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': default_warn_args + ['-Wextra', '-Wpedantic']}
# FIXME: use _build_wrapper to call this so that linker flags from the env
# get applied
'never': ['-fdiagnostics-color=never'],
} # type: T.Dict[str, T.List[str]]
+# Warnings collected from the GCC source and documentation. This is an
+# objective set of all the warnings flags that apply to general projects: the
+# only ones omitted are those that require a project-specific value, or are
+# related to non-standard or legacy language support. This behaves roughly
+# like -Weverything in clang. Warnings implied by -Wall, -Wextra, or
+# higher-level warnings already enabled here are not included in these lists to
+# keep them as short as possible. History goes back to GCC 3.0.0, everything
+# earlier is considered historical and listed under version 0.0.0.
+
+# GCC warnings for all C-family languages
+# Omitted non-general warnings:
+# -Wabi=
+# -Waggregate-return
+# -Walloc-size-larger-than=BYTES
+# -Walloca-larger-than=BYTES
+# -Wframe-larger-than=BYTES
+# -Wlarger-than=BYTES
+# -Wstack-usage=BYTES
+# -Wsystem-headers
+# -Wtrampolines
+# -Wvla-larger-than=BYTES
+#
+# Omitted warnings enabled elsewhere in meson:
+# -Winvalid-pch (GCC 3.4.0)
+gnu_common_warning_args = {
+ "0.0.0": [
+ "-Wcast-qual",
+ "-Wconversion",
+ "-Wfloat-equal",
+ "-Wformat=2",
+ "-Winline",
+ "-Wmissing-declarations",
+ "-Wredundant-decls",
+ "-Wshadow",
+ "-Wundef",
+ "-Wuninitialized",
+ "-Wwrite-strings",
+ ],
+ "3.0.0": [
+ "-Wdisabled-optimization",
+ "-Wpacked",
+ "-Wpadded",
+ ],
+ "3.3.0": [
+ "-Wmultichar",
+ "-Wswitch-default",
+ "-Wswitch-enum",
+ "-Wunused-macros",
+ ],
+ "4.0.0": [
+ "-Wmissing-include-dirs",
+ ],
+ "4.1.0": [
+ "-Wunsafe-loop-optimizations",
+ "-Wstack-protector",
+ ],
+ "4.2.0": [
+ "-Wstrict-overflow=5",
+ ],
+ "4.3.0": [
+ "-Warray-bounds=2",
+ "-Wlogical-op",
+ "-Wstrict-aliasing=3",
+ "-Wvla",
+ ],
+ "4.6.0": [
+ "-Wdouble-promotion",
+ "-Wsuggest-attribute=const",
+ "-Wsuggest-attribute=noreturn",
+ "-Wsuggest-attribute=pure",
+ "-Wtrampolines",
+ ],
+ "4.7.0": [
+ "-Wvector-operation-performance",
+ ],
+ "4.8.0": [
+ "-Wsuggest-attribute=format",
+ ],
+ "4.9.0": [
+ "-Wdate-time",
+ ],
+ "5.1.0": [
+ "-Wformat-signedness",
+ "-Wnormalized=nfc",
+ ],
+ "6.1.0": [
+ "-Wduplicated-cond",
+ "-Wnull-dereference",
+ "-Wshift-negative-value",
+ "-Wshift-overflow=2",
+ "-Wunused-const-variable=2",
+ ],
+ "7.1.0": [
+ "-Walloca",
+ "-Walloc-zero",
+ "-Wformat-overflow=2",
+ "-Wformat-truncation=2",
+ "-Wstringop-overflow=3",
+ ],
+ "7.2.0": [
+ "-Wduplicated-branches",
+ ],
+ "8.1.0": [
+ "-Wattribute-alias=2",
+ "-Wcast-align=strict",
+ "-Wsuggest-attribute=cold",
+ "-Wsuggest-attribute=malloc",
+ ],
+ "10.1.0": [
+ "-Wanalyzer-too-complex",
+ "-Warith-conversion",
+ ],
+ "12.1.0": [
+ "-Wbidi-chars=ucn",
+ "-Wopenacc-parallelism",
+ "-Wtrivial-auto-var-init",
+ ],
+} # type: T.Dict[str, T.List[str]]
+
+# GCC warnings for C
+# Omitted non-general or legacy warnings:
+# -Wc11-c2x-compat
+# -Wc90-c99-compat
+# -Wc99-c11-compat
+# -Wdeclaration-after-statement
+# -Wtraditional
+# -Wtraditional-conversion
+gnu_c_warning_args = {
+ "0.0.0": [
+ "-Wbad-function-cast",
+ "-Wmissing-prototypes",
+ "-Wnested-externs",
+ "-Wstrict-prototypes",
+ ],
+ "3.4.0": [
+ "-Wold-style-definition",
+ "-Winit-self",
+ ],
+ "4.1.0": [
+ "-Wc++-compat",
+ ],
+ "4.5.0": [
+ "-Wunsuffixed-float-constants",
+ ],
+} # type: T.Dict[str, T.List[str]]
+
+# GCC warnings for C++
+# Omitted non-general or legacy warnings:
+# -Wc++0x-compat
+# -Wc++1z-compat
+# -Wc++2a-compat
+# -Wctad-maybe-unsupported
+# -Wnamespaces
+# -Wtemplates
+gnu_cpp_warning_args = {
+ "0.0.0": [
+ "-Wctor-dtor-privacy",
+ "-Weffc++",
+ "-Wnon-virtual-dtor",
+ "-Wold-style-cast",
+ "-Woverloaded-virtual",
+ "-Wsign-promo",
+ ],
+ "4.0.1": [
+ "-Wstrict-null-sentinel",
+ ],
+ "4.6.0": [
+ "-Wnoexcept",
+ ],
+ "4.7.0": [
+ "-Wzero-as-null-pointer-constant",
+ ],
+ "4.8.0": [
+ "-Wabi-tag",
+ "-Wuseless-cast",
+ ],
+ "4.9.0": [
+ "-Wconditionally-supported",
+ ],
+ "5.1.0": [
+ "-Wsuggest-final-methods",
+ "-Wsuggest-final-types",
+ "-Wsuggest-override",
+ ],
+ "6.1.0": [
+ "-Wmultiple-inheritance",
+ "-Wplacement-new=2",
+ "-Wvirtual-inheritance",
+ ],
+ "7.1.0": [
+ "-Waligned-new=all",
+ "-Wnoexcept-type",
+ "-Wregister",
+ ],
+ "8.1.0": [
+ "-Wcatch-value=3",
+ "-Wextra-semi",
+ ],
+ "9.1.0": [
+ "-Wdeprecated-copy-dtor",
+ "-Wredundant-move",
+ ],
+ "10.1.0": [
+ "-Wcomma-subscript",
+ "-Wmismatched-tags",
+ "-Wredundant-tags",
+ "-Wvolatile",
+ ],
+ "11.1.0": [
+ "-Wdeprecated-enum-enum-conversion",
+ "-Wdeprecated-enum-float-conversion",
+ "-Winvalid-imported-macros",
+ ],
+} # type: T.Dict[str, T.List[str]]
+
+# GCC warnings for Objective C and Objective C++
+# Omitted non-general or legacy warnings:
+# -Wtraditional
+# -Wtraditional-conversion
+gnu_objc_warning_args = {
+ "0.0.0": [
+ "-Wselector",
+ ],
+ "3.3": [
+ "-Wundeclared-selector",
+ ],
+ "4.1.0": [
+ "-Wassign-intercept",
+ "-Wstrict-selector-match",
+ ],
+} # type: T.Dict[str, T.List[str]]
+
@functools.lru_cache(maxsize=None)
def gnulike_default_include_dirs(compiler: T.Tuple[str, ...], lang: str) -> 'ImmutableListProtocol[str]':
args[args.index('-Wpedantic')] = '-pedantic'
return args
+ def supported_warn_args(self, warn_args_by_version: T.Dict[str, T.List[str]]) -> T.List[str]:
+ result = []
+ for version, warn_args in warn_args_by_version.items():
+ if mesonlib.version_compare(self.version, '>=' + version):
+ result += warn_args
+ return result
+
def has_builtin_define(self, define: str) -> bool:
return define in self.defines
'0': [],
'1': default_warn_args,
'2': default_warn_args,
- '3': default_warn_args
+ '3': default_warn_args,
+ 'everything': default_warn_args
}
def get_module_incdir_args(self) -> T.Tuple[str]:
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + [],
- '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+ '3': default_warn_args + [],
+ 'everything': default_warn_args + []} # type: T.Dict[str, T.List[str]]
def get_pic_args(self) -> T.List[str]:
# PIC support is not enabled by default for TI compilers,
'1': ['/W2'],
'2': ['/W3'],
'3': ['/W4'],
+ 'everything': ['/Wall'],
} # type: T.Dict[str, T.List[str]]
INVOKES_LINKER = False
for i in args:
# -mms-bitfields is specific to MinGW-GCC
# -pthread is only valid for GCC
- if i in ('-mms-bitfields', '-pthread'):
+ if i in {'-mms-bitfields', '-pthread'}:
continue
if i.startswith('-LIBPATH:'):
i = '/LIBPATH:' + i[9:]
def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
if crt_val in self.crt_args:
return self.crt_args[crt_val]
- assert crt_val in ['from_buildtype', 'static_from_buildtype']
+ assert crt_val in {'from_buildtype', 'static_from_buildtype'}
dbg = 'mdd'
rel = 'md'
if crt_val == 'static_from_buildtype':
def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]:
# MSVC doesn't have __attribute__ like Clang and GCC do, so just return
# false without compiling anything
- return name in ['dllimport', 'dllexport'], False
+ return name in {'dllimport', 'dllexport'}, False
def get_argument_syntax(self) -> str:
return 'msvc'
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + [],
- '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+ '3': default_warn_args + [],
+ 'everything': default_warn_args + []} # type: T.Dict[str, T.List[str]]
def get_always_args(self) -> T.List[str]:
return []
from .compilers import Compiler
from .mixins.clike import CLikeCompiler
-from .mixins.gnu import GnuCompiler
+from .mixins.gnu import GnuCompiler, gnu_common_warning_args, gnu_objc_warning_args
from .mixins.clang import ClangCompiler
if T.TYPE_CHECKING:
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': (default_warn_args + ['-Wextra', '-Wpedantic'] +
+ self.supported_warn_args(gnu_common_warning_args) +
+ self.supported_warn_args(gnu_objc_warning_args))}
class ClangObjCCompiler(ClangCompiler, ObjCCompiler):
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': ['-Weverything']}
def get_options(self) -> 'coredata.MutableKeyedOptionDictType':
opts = super().get_options()
from .mixins.clike import CLikeCompiler
from .compilers import Compiler
-from .mixins.gnu import GnuCompiler
+from .mixins.gnu import GnuCompiler, gnu_common_warning_args, gnu_objc_warning_args
from .mixins.clang import ClangCompiler
if T.TYPE_CHECKING:
ObjCPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross,
info, exe_wrapper, linker=linker, full_version=full_version)
GnuCompiler.__init__(self, defines)
- default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': (default_warn_args + ['-Wextra', '-Wpedantic'] +
+ self.supported_warn_args(gnu_common_warning_args) +
+ self.supported_warn_args(gnu_objc_warning_args))}
class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler):
ObjCPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross,
info, exe_wrapper, linker=linker, full_version=full_version)
ClangCompiler.__init__(self, defines)
- default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
- '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+ 'everything': ['-Weverything']}
def get_options(self) -> 'coredata.MutableKeyedOptionDictType':
opts = super().get_options()
#
# Pip requires that RCs are named like this: '0.1.0.rc1'
# But the corresponding Git tag needs to be '0.1.0rc1'
-version = '0.64.1'
+version = '1.0.0'
backendlist = ['ninja', 'vs', 'vs2010', 'vs2012', 'vs2013', 'vs2015', 'vs2017', 'vs2019', 'vs2022', 'xcode']
continue
elif k in self.options:
self.set_option(k, v)
- elif k.machine != MachineChoice.BUILD:
+ elif k.machine != MachineChoice.BUILD and k.type != OptionType.COMPILER:
unknown_options.append(k)
if unknown_options:
unknown_options_str = ', '.join(sorted(str(s) for s in unknown_options))
(OptionKey('strip'), BuiltinOption(UserBooleanOption, 'Strip targets on install', False)),
(OptionKey('unity'), BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])),
(OptionKey('unity_size'), BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))),
- (OptionKey('warning_level'), BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'], yielding=False)),
+ (OptionKey('warning_level'), BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3', 'everything'], yielding=False)),
(OptionKey('werror'), BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)),
(OptionKey('wrap_mode'), BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback', 'nopromote'])),
(OptionKey('force_fallback_for'), BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])),
from .boost import BoostDependency
from .cuda import CudaDependency
from .hdf5 import hdf5_factory
-from .base import Dependency, InternalDependency, ExternalDependency, NotFoundDependency
+from .base import Dependency, InternalDependency, ExternalDependency, NotFoundDependency, MissingCompiler
from .base import (
ExternalLibrary, DependencyException, DependencyMethods,
BuiltinDependency, SystemDependency, get_leaf_external_dependencies)
'ExternalLibrary',
'DependencyException',
'DependencyMethods',
+ 'MissingCompiler',
'CMakeDependency',
'ConfigToolDependency',
'''Exceptions raised while trying to find dependencies'''
+class MissingCompiler:
+ """Represent a None Compiler - when no tool chain is found.
+ replacing AttributeError with DependencyException"""
+
+ def __getattr__(self, item: str) -> T.Any:
+ if item.startswith('__'):
+ raise AttributeError()
+ raise DependencyException('no toolchain found')
+
+ def __bool__(self) -> bool:
+ return False
+
+
class DependencyMethods(Enum):
# Auto means to use whatever dependency checking mechanisms in whatever order meson thinks is best.
AUTO = 'auto'
assert isinstance(result, InternalDependency)
memo[id(self)] = result
for k, v in self.__dict__.items():
- if k in ['libraries', 'whole_libraries']:
+ if k in {'libraries', 'whole_libraries'}:
setattr(result, k, copy.copy(v))
else:
setattr(result, k, copy.deepcopy(v, memo))
HasNativeKwarg.__init__(self, kwargs)
self.clib_compiler = detect_compiler(self.name, environment, self.for_machine, self.language)
- def get_compiler(self) -> 'Compiler':
+ def get_compiler(self) -> T.Union['MissingCompiler', 'Compiler']:
return self.clib_compiler
def get_partial_dependency(self, *, compile_args: bool = False,
return methods
def detect_compiler(name: str, env: 'Environment', for_machine: MachineChoice,
- language: T.Optional[str]) -> T.Optional['Compiler']:
+ language: T.Optional[str]) -> T.Union['MissingCompiler', 'Compiler']:
"""Given a language and environment find the compiler used."""
compilers = env.coredata.compilers[for_machine]
return compilers[lang]
except KeyError:
continue
- return None
+ return MissingCompiler()
class SystemDependency(ExternalDependency):
self.version_lib = '{}_{}'.format(self.vers_raw[0], self.vers_raw[1])
# Detecting library type
- if self.nvsuffix in ['so', 'dll', 'dll.a', 'dll.lib', 'dylib']:
+ if self.nvsuffix in {'so', 'dll', 'dll.a', 'dll.lib', 'dylib'}:
self.static = False
- elif self.nvsuffix in ['a', 'lib']:
+ elif self.nvsuffix in {'a', 'lib'}:
self.static = True
else:
raise UnknownFileException(self.path)
for i in tags:
if i == 'mt':
self.mt = True
- elif len(i) == 3 and i[1:] in ['32', '64']:
+ elif len(i) == 3 and i[1:] in {'32', '64'}:
self.arch = i
elif BoostLibraryFile.reg_abi_tag.match(i):
self.runtime_static = 's' in i
# If no vscrt tag present, assume that it fits ['/MD', '/MDd', '/MT', '/MTd']
if not vscrt:
return True
- if vscrt in ['/MD', '-MD']:
+ if vscrt in {'/MD', '-MD'}:
return not self.runtime_static and not self.runtime_debug
- elif vscrt in ['/MDd', '-MDd']:
+ elif vscrt in {'/MDd', '-MDd'}:
return not self.runtime_static and self.runtime_debug
- elif vscrt in ['/MT', '-MT']:
+ elif vscrt in {'/MT', '-MT'}:
return (self.runtime_static or not self.static) and not self.runtime_debug
- elif vscrt in ['/MTd', '-MTd']:
+ elif vscrt in {'/MTd', '-MTd'}:
return (self.runtime_static or not self.static) and self.runtime_debug
mlog.warning(f'Boost: unknown vscrt tag {vscrt}. This may cause the compilation to fail. Please consider reporting this as a bug.', once=True)
# Try to use old style variables if no module is specified
if len(libs) > 0:
- self.compile_args = list(map(lambda x: f'-I{x}', incDirs)) + defs
+ self.compile_args = [f'-I{x}' for x in incDirs] + defs
self.link_args = []
for j in libs:
rtgt = resolve_cmake_trace_targets(j, self.traceparser, self.env, clib_compiler=self.clib_compiler)
return candidates
def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
- args = []
+ args: T.List[str] = []
if self.libdir:
args += self.clib_compiler.get_linker_search_args(self.libdir)
for lib in self.requested_modules:
# 'default_options' is only used in fallback case
# 'not_found_message' has no impact on the dependency lookup
# 'include_type' is handled after the dependency lookup
- if key in ('version', 'native', 'required', 'fallback', 'allow_fallback', 'default_options',
- 'not_found_message', 'include_type'):
+ if key in {'version', 'native', 'required', 'fallback', 'allow_fallback', 'default_options',
+ 'not_found_message', 'include_type'}:
continue
# All keyword arguments are strings, ints, or lists (or lists of lists)
if isinstance(value, list):
import re
import pathlib
import shutil
+import subprocess
import typing as T
from mesonbuild.interpreterbase.decorators import FeatureDeprecated
self.is_found = True
self.link_args = ['-lz']
else:
- # Without a clib_compiler we can't find zlib, so just give up.
- if self.clib_compiler is None:
- self.is_found = False
- return
-
if self.clib_compiler.get_argument_syntax() == 'msvc':
- libs = ['zlib1' 'zlib']
+ libs = ['zlib1', 'zlib']
else:
libs = ['z']
for lib in libs:
self.java_home = environment.properties[self.for_machine].get_java_home()
if not self.java_home:
self.java_home = pathlib.Path(shutil.which(self.javac.exelist[0])).resolve().parents[1]
+ if m.is_darwin():
+ problem_java_prefix = pathlib.Path('/System/Library/Frameworks/JavaVM.framework/Versions')
+ if problem_java_prefix in self.java_home.parents:
+ res = subprocess.run(['/usr/libexec/java_home', '--failfast', '--arch', m.cpu_family],
+ stdout=subprocess.PIPE)
+ if res.returncode != 0:
+ log = mlog.error if self.required else mlog.debug
+ log('JAVA_HOME could not be discovered on the system. Please set it explicitly.')
+ self.is_found = False
+ return
+ self.java_home = pathlib.Path(res.stdout.decode().strip())
platform_include_dir = self.__machine_info_to_platform_include_dir(m)
if platform_include_dir is None:
java_home_lib_server = java_home_lib
else:
if version_compare(self.version, '<= 1.8.0'):
- # The JDK and Meson have a disagreement here, so translate it
- # over. In the event more translation needs to be done, add to
- # following dict.
- def cpu_translate(cpu: str) -> str:
- java_cpus = {
- 'x86_64': 'amd64',
- }
-
- return java_cpus.get(cpu, cpu)
-
- java_home_lib = self.java_home / 'jre' / 'lib' / cpu_translate(m.cpu_family)
- java_home_lib_server = java_home_lib / "server"
+ java_home_lib = self.java_home / 'jre' / 'lib' / self.__cpu_translate(m.cpu_family)
else:
java_home_lib = self.java_home / 'lib'
- java_home_lib_server = java_home_lib / "server"
+
+ java_home_lib_server = java_home_lib / 'server'
if 'jvm' in modules:
jvm = self.clib_compiler.find_library('jvm', environment, extra_dirs=[str(java_home_lib_server)])
self.is_found = True
@staticmethod
+ def __cpu_translate(cpu: str) -> str:
+ '''
+ The JDK and Meson have a disagreement here, so translate it over. In the event more
+ translation needs to be done, add to following dict.
+ '''
+ java_cpus = {
+ 'x86_64': 'amd64',
+ }
+
+ return java_cpus.get(cpu, cpu)
+
+ @staticmethod
def __machine_info_to_platform_include_dir(m: 'MachineInfo') -> T.Optional[str]:
- """Translates the machine information to the platform-dependent include directory
+ '''Translates the machine information to the platform-dependent include directory
When inspecting a JDK release tarball or $JAVA_HOME, inside the `include/` directory is a
platform-dependent directory that must be on the target's include path in addition to the
parent `include/` directory.
- """
+ '''
if m.is_linux():
return 'linux'
elif m.is_windows():
import functools
import os
import re
-import shutil
import subprocess
from pathlib import Path
from ..mesonlib import Popen_safe, OrderedSet, join_args
+from ..programs import ExternalProgram
from .base import DependencyException, DependencyMethods
from .configtool import ConfigToolDependency
from .pkgconfig import PkgConfigDependency
if DependencyMethods.PKGCONFIG in methods:
# Use an ordered set so that these remain the first tried pkg-config files
pkgconfig_files = OrderedSet(['hdf5', 'hdf5-serial'])
- # FIXME: This won't honor pkg-config paths, and cross-native files
- PCEXE = shutil.which('pkg-config')
+ PCEXE = PkgConfigDependency._detect_pkgbin(False, env, for_machine)
+ pcenv = PkgConfigDependency.setup_env(os.environ, env, for_machine)
if PCEXE:
+ assert isinstance(PCEXE, ExternalProgram)
# some distros put hdf5-1.2.3.pc with version number in .pc filename.
- ret, stdout, _ = Popen_safe([PCEXE, '--list-all'], stderr=subprocess.DEVNULL)
+ ret, stdout, _ = Popen_safe(PCEXE.get_command() + ['--list-all'], stderr=subprocess.DEVNULL, env=pcenv)
if ret.returncode == 0:
for pkg in stdout.split('\n'):
if pkg.startswith('hdf5'):
self.is_found = True
# Happens if you are using a language with threads
# concept without C, such as plain Cuda.
- if self.clib_compiler is None:
+ if not self.clib_compiler:
self.compile_args = []
self.link_args = []
else:
return None
elif pyplat == 'win32':
return '32'
- elif pyplat in ('win64', 'win-amd64'):
+ elif pyplat in {'win64', 'win-amd64'}:
return '64'
mlog.log(f'Unknown Windows Python platform {pyplat!r}')
return None
candidates: T.List['DependencyGenerator'] = []
compiler = detect_compiler('mpi', env, for_machine, language)
- if compiler is None:
+ if not compiler:
return []
compiler_is_intel = compiler.get_id() in {'intel', 'intel-cl'}
class _MPIConfigToolDependency(ConfigToolDependency):
- def _filter_compile_args(self, args: T.Sequence[str]) -> T.List[str]:
+ def _filter_compile_args(self, args: T.List[str]) -> T.List[str]:
"""
MPI wrappers return a bunch of garbage args.
Drop -O2 and everything that is not needed.
result.append(f)
return result
- def _filter_link_args(self, args: T.Sequence[str]) -> T.List[str]:
+ def _filter_link_args(self, args: T.List[str]) -> T.List[str]:
"""
MPI wrappers return a bunch of garbage args.
Drop -O2 and everything that is not needed.
for f in args:
if self._is_link_arg(f):
result.append(f)
- if f in ('-L', '-Xlinker'):
+ if f in {'-L', '-Xlinker'}:
include_next = True
elif include_next:
include_next = False
self.is_libtool = False
# Store a copy of the pkg-config path on the object itself so it is
# stored in the pickled coredata and recovered.
- self.pkgbin: T.Union[None, bool, ExternalProgram] = None
-
- # Only search for pkg-config for each machine the first time and store
- # the result in the class definition
- if PkgConfigDependency.class_pkgbin[self.for_machine] is False:
- mlog.debug(f'Pkg-config binary for {self.for_machine} is cached as not found.')
- elif PkgConfigDependency.class_pkgbin[self.for_machine] is not None:
- mlog.debug(f'Pkg-config binary for {self.for_machine} is cached.')
- else:
- assert PkgConfigDependency.class_pkgbin[self.for_machine] is None
- mlog.debug(f'Pkg-config binary for {self.for_machine} is not cached.')
- for potential_pkgbin in find_external_program(
- self.env, self.for_machine, 'pkgconfig', 'Pkg-config',
- environment.default_pkgconfig, allow_default_for_cross=False):
- version_if_ok = self.check_pkgconfig(potential_pkgbin)
- if not version_if_ok:
- continue
- if not self.silent:
- mlog.log('Found pkg-config:', mlog.bold(potential_pkgbin.get_path()),
- f'({version_if_ok})')
- PkgConfigDependency.class_pkgbin[self.for_machine] = potential_pkgbin
- break
- else:
- if not self.silent:
- mlog.log('Found Pkg-config:', mlog.red('NO'))
- # Set to False instead of None to signify that we've already
- # searched for it and not found it
- PkgConfigDependency.class_pkgbin[self.for_machine] = False
-
- self.pkgbin = PkgConfigDependency.class_pkgbin[self.for_machine]
+ self.pkgbin = self._detect_pkgbin(self.silent, self.env, self.for_machine)
if self.pkgbin is False:
self.pkgbin = None
msg = f'Pkg-config binary for machine {self.for_machine} not found. Giving up.'
return s.format(self.__class__.__name__, self.name, self.is_found,
self.version_reqs)
+ @classmethod
+ def _detect_pkgbin(cls, silent: bool, env: Environment,
+ for_machine: MachineChoice) -> T.Union[None, bool, ExternalProgram]:
+ # Only search for pkg-config for each machine the first time and store
+ # the result in the class definition
+ if cls.class_pkgbin[for_machine] is False:
+ mlog.debug(f'Pkg-config binary for {for_machine} is cached as not found.')
+ elif cls.class_pkgbin[for_machine] is not None:
+ mlog.debug(f'Pkg-config binary for {for_machine} is cached.')
+ else:
+ assert cls.class_pkgbin[for_machine] is None, 'for mypy'
+ mlog.debug(f'Pkg-config binary for {for_machine} is not cached.')
+ for potential_pkgbin in find_external_program(
+ env, for_machine, 'pkgconfig', 'Pkg-config',
+ env.default_pkgconfig, allow_default_for_cross=False):
+ version_if_ok = cls.check_pkgconfig(env, potential_pkgbin)
+ if not version_if_ok:
+ continue
+ if not silent:
+ mlog.log('Found pkg-config:', mlog.bold(potential_pkgbin.get_path()),
+ f'({version_if_ok})')
+ cls.class_pkgbin[for_machine] = potential_pkgbin
+ break
+ else:
+ if not silent:
+ mlog.log('Found Pkg-config:', mlog.red('NO'))
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ cls.class_pkgbin[for_machine] = False
+
+ return cls.class_pkgbin[for_machine]
+
def _call_pkgbin_real(self, args: T.List[str], env: T.Dict[str, str]) -> T.Tuple[int, str, str]:
assert isinstance(self.pkgbin, ExternalProgram)
cmd = self.pkgbin.get_command() + args
mlog.debug(f'Got pkgconfig variable {variable_name} : {variable}')
return variable
- def check_pkgconfig(self, pkgbin: ExternalProgram) -> T.Optional[str]:
+ @staticmethod
+ def check_pkgconfig(env: Environment, pkgbin: ExternalProgram) -> T.Optional[str]:
if not pkgbin.found():
mlog.log(f'Did not find pkg-config by name {pkgbin.name!r}')
return None
return None
except PermissionError:
msg = f'Found pkg-config {command_as_string!r} but didn\'t have permissions to run it.'
- if not self.env.machines.build.is_windows():
+ if not env.machines.build.is_windows():
msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
mlog.warning(msg)
return None
from ..compilers import Compiler
from ..envconfig import MachineInfo
from ..environment import Environment
+ from ..dependencies import MissingCompiler
def _qt_get_private_includes(mod_inc_dir: str, module: str, mod_version: str) -> T.List[str]:
"""Mixin class for shared components between PkgConfig and Qmake."""
link_args: T.List[str]
- clib_compiler: 'Compiler'
+ clib_compiler: T.Union['MissingCompiler', 'Compiler']
env: 'Environment'
libexecdir: T.Optional[str] = None
if c in {'\\', '$'}:
escape = c
continue
- elif c in (' ', '\n'):
+ elif c in {' ', '\n'}:
if out != '':
if in_deps:
deps.append(out)
search_version, MesonBugException
)
from . import mlog
-from .programs import (
- ExternalProgram, EmptyExternalProgram
-)
+from .programs import ExternalProgram
from .envconfig import (
BinaryTable, MachineInfo, Properties, known_cpu_families, CMakeVariables,
trial = platform.processor().lower()
else:
trial = platform.machine().lower()
+ mlog.debug(f'detecting CPU family based on trial={trial!r}')
if trial.startswith('i') and trial.endswith('86'):
trial = 'x86'
elif trial == 'bepc':
trial = 'ppc64'
elif trial.startswith(('powerpc', 'ppc')) or trial in {'macppc', 'power macintosh'}:
trial = 'ppc'
- elif trial in ('amd64', 'x64', 'i86pc'):
+ elif trial in {'amd64', 'x64', 'i86pc'}:
trial = 'x86_64'
elif trial in {'sun4u', 'sun4v'}:
trial = 'sparc64'
else:
trial = platform.machine().lower()
- if trial in ('amd64', 'x64', 'i86pc'):
+ if trial in {'amd64', 'x64', 'i86pc'}:
trial = 'x86_64'
if trial == 'x86_64':
# Same check as above for cpu_family
# acts as a linker driver, `-Dc_args` will not.
#
# We still use the original key as the base here, as
- # we want to inhert the machine and the compiler
+ # we want to inherit the machine and the compiler
# language
key = key.evolve('env_args')
env_opts[key].extend(p_list)
return value
return not machine_info_can_run(self.machines[for_machine])
- def get_exe_wrapper(self) -> ExternalProgram:
+ def get_exe_wrapper(self) -> T.Optional[ExternalProgram]:
if not self.need_exe_wrapper():
- return EmptyExternalProgram()
+ return None
return self.exe_wrapper
default=[],
listify=True,
)
-_PREFIX_KW = KwargInfo('prefix', str, default='')
+_PREFIX_KW: KwargInfo[str] = KwargInfo(
+ 'prefix',
+ (str, ContainerTypeInfo(list, str)),
+ default='',
+ since_values={list: '1.0.0'},
+ convertor=lambda x: '\n'.join(x) if isinstance(x, list) else x)
+
_NO_BUILTIN_ARGS_KW = KwargInfo('no_builtin_args', bool, default=False)
_NAME_KW = KwargInfo('name', str, default='')
REQUIRED_KW,
SOURCES_KW,
VARIABLES_KW,
+ TEST_KWS,
NoneType,
in_set_validator,
env_convertor_with_method
{'target_type'}
)
-TEST_KWARGS: T.List[KwargInfo] = [
- KwargInfo('args', ContainerTypeInfo(list, (str, mesonlib.File, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)),
- listify=True, default=[]),
- KwargInfo('should_fail', bool, default=False),
- KwargInfo('timeout', int, default=30),
- KwargInfo('workdir', (str, NoneType), default=None,
- validator=lambda x: 'must be an absolute path' if not os.path.isabs(x) else None),
- KwargInfo('protocol', str,
- default='exitcode',
- validator=in_set_validator({'exitcode', 'tap', 'gtest', 'rust'}),
- since_values={'gtest': '0.55.0', 'rust': '0.57.0'}),
- KwargInfo('priority', int, default=0, since='0.52.0'),
- # TODO: env needs reworks of the way the environment variable holder itself works probably
- ENV_KW,
- DEPENDS_KW.evolve(since='0.46.0'),
- KwargInfo('suite', ContainerTypeInfo(list, str), listify=True, default=['']), # yes, a list of empty string
- KwargInfo('verbose', bool, default=False, since='0.62.0'),
-]
-
class InterpreterRuleRelaxation(Enum):
''' Defines specific relaxations of the Meson rules.
def func_exception(self, node, args, kwargs):
raise Exception()
- def add_languages(self, args: T.Sequence[str], required: bool, for_machine: MachineChoice) -> bool:
+ def add_languages(self, args: T.List[str], required: bool, for_machine: MachineChoice) -> bool:
success = self.add_languages_for(args, required, for_machine)
if not self.coredata.is_cross_build():
self.coredata.copy_build_options_from_regular_ones()
assert isinstance(d, Dependency)
if not d.found() and not_found_message:
self.message_impl([not_found_message])
- self.message_impl([not_found_message])
# Ensure the correct include type
if 'include_type' in kwargs:
wanted = kwargs['include_type']
"""
for out in outputs:
if has_multi_in and ('@PLAINNAME@' in out or '@BASENAME@' in out):
- raise InvalidArguments(f'{name}: output cannot containe "@PLAINNAME@" or "@BASENAME@" '
+ raise InvalidArguments(f'{name}: output cannot contain "@PLAINNAME@" or "@BASENAME@" '
'when there is more than one input (we can\'t know which to use)')
@typed_pos_args('custom_target', optargs=[str])
tg = build.RunTarget(name, all_args, kwargs['depends'], self.subdir, self.subproject, self.environment,
kwargs['env'])
self.add_target(name, tg)
- full_name = (self.subproject, name)
- assert full_name not in self.build.run_target_names
- self.build.run_target_names.add(full_name)
return tg
@FeatureNew('alias_target', '0.52.0')
return gen
@typed_pos_args('benchmark', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File))
- @typed_kwargs('benchmark', *TEST_KWARGS)
+ @typed_kwargs('benchmark', *TEST_KWS)
def func_benchmark(self, node: mparser.BaseNode,
args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
kwargs: 'kwtypes.FuncBenchmark') -> None:
self.add_test(node, args, kwargs, False)
@typed_pos_args('test', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File))
- @typed_kwargs('test', *TEST_KWARGS, KwargInfo('is_parallel', bool, default=True))
+ @typed_kwargs('test', *TEST_KWS, KwargInfo('is_parallel', bool, default=True))
def func_test(self, node: mparser.BaseNode,
args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
kwargs: 'kwtypes.FuncTest') -> None:
mesonlib.do_conf_file(inputs_abs[0], ofile_abs, conf,
fmt, file_encoding)
if missing_variables:
- var_list = ", ".join(map(repr, sorted(missing_variables)))
+ var_list = ", ".join(repr(m) for m in sorted(missing_variables))
mlog.warning(
f"The variable(s) {var_list} in the input file '{inputs[0]}' are not "
"present in the given configuration data.", location=node)
continue
if not d.is_absolute():
raise InvalidCode(f'Search directory {d} is not an absolute path.')
- return list(map(str, search_dirs))
+ return [str(s) for s in search_dirs]
class FeatureOptionHolder(ObjectHolder[coredata.UserFeatureOption]):
def __init__(self, option: coredata.UserFeatureOption, interpreter: 'Interpreter'):
@typed_kwargs('configuration_data.set10', _CONF_DATA_SET_KWS)
def set10_method(self, args: T.Tuple[str, T.Union[int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None:
self.__check_used()
- # bool is a subclass of int, so we need to check for bool excplicitly.
+ # bool is a subclass of int, so we need to check for bool explicitly.
# We already have typed_pos_args checking that this is either a bool or
# an int.
if not isinstance(args[1], bool):
"""Protocol for subdir arguments.
- This allows us to define any objec that has a found(self) -> bool method
+ This allows us to define any object that has a found(self) -> bool method
"""
def found(self) -> bool: ...
self.operators.update({
MesonOperator.DIV: self.op_div,
MesonOperator.INDEX: self.op_index,
+ MesonOperator.IN: self.op_in,
+ MesonOperator.NOT_IN: self.op_notin,
})
def display_name(self) -> str:
except IndexError:
raise InvalidArguments(f'Index {other} out of bounds of string of size {len(self.held_object)}.')
+ @FeatureNew('"in" string operator', '1.0.0')
+ @typed_operator(MesonOperator.IN, str)
+ def op_in(self, other: str) -> bool:
+ return other in self.held_object
+
+ @FeatureNew('"not in" string operator', '1.0.0')
+ @typed_operator(MesonOperator.NOT_IN, str)
+ def op_notin(self, other: str) -> bool:
+ return other not in self.held_object
+
class MesonVersionString(str):
pass
BothLibraries, SharedLibrary, StaticLibrary, Jar, Executable)
from ..coredata import UserFeatureOption
from ..dependencies import Dependency, InternalDependency
+from ..interpreterbase import FeatureNew
from ..interpreterbase.decorators import KwargInfo, ContainerTypeInfo
from ..mesonlib import (
File, FileMode, MachineChoice, listify, has_path_sep, OptionKey,
from typing_extensions import Literal
from ..interpreterbase import TYPE_var
+ from ..interpreterbase.decorators import FeatureCheckBase
def in_set_validator(choices: T.Set[str]) -> T.Callable[[str], T.Optional[str]]:
"""Check that the choice given was one of the given set."""
default=[],
)
+def include_dir_string_new(val: T.List[T.Union[str, IncludeDirs]]) -> T.Iterable[FeatureCheckBase]:
+ strs = [v for v in val if isinstance(v, str)]
+ if strs:
+ str_msg = ", ".join(f"'{s}'" for s in strs)
+ yield FeatureNew('include_directories kwarg of type string', '1.0.0',
+ f'Use include_directories({str_msg}) instead')
+
# for cases like default_options and override_options
DEFAULT_OPTIONS: KwargInfo[T.List[str]] = KwargInfo(
'default_options',
)
PRESERVE_PATH_KW: KwargInfo[bool] = KwargInfo('preserve_path', bool, default=False, since='0.63.0')
+
+TEST_KWS: T.List[KwargInfo] = [
+ KwargInfo('args', ContainerTypeInfo(list, (str, File, BuildTarget, CustomTarget, CustomTargetIndex)),
+ listify=True, default=[]),
+ KwargInfo('should_fail', bool, default=False),
+ KwargInfo('timeout', int, default=30),
+ KwargInfo('workdir', (str, NoneType), default=None,
+ validator=lambda x: 'must be an absolute path' if not os.path.isabs(x) else None),
+ KwargInfo('protocol', str,
+ default='exitcode',
+ validator=in_set_validator({'exitcode', 'tap', 'gtest', 'rust'}),
+ since_values={'gtest': '0.55.0', 'rust': '0.57.0'}),
+ KwargInfo('priority', int, default=0, since='0.52.0'),
+ # TODO: env needs reworks of the way the environment variable holder itself works probably
+ ENV_KW,
+ DEPENDS_KW.evolve(since='0.46.0'),
+ KwargInfo('suite', ContainerTypeInfo(list, str), listify=True, default=['']), # yes, a list of empty string
+ KwargInfo('verbose', bool, default=False, since='0.62.0'),
+]
added in.
:param not_set_warning: A warning message that is logged if the kwarg is not
set by the user.
+ :param feature_validator: A callable returning an iterable of FeatureNew | FeatureDeprecated objects.
"""
def __init__(self, name: str,
types: T.Union[T.Type[_T], T.Tuple[T.Union[T.Type[_T], ContainerTypeInfo], ...], ContainerTypeInfo],
deprecated: T.Optional[str] = None,
deprecated_message: T.Optional[str] = None,
deprecated_values: T.Optional[T.Dict[T.Union[_T, T.Type[T.List], T.Type[T.Dict]], T.Union[str, T.Tuple[str, str]]]] = None,
+ feature_validator: T.Optional[T.Callable[[_T], T.Iterable[FeatureCheckBase]]] = None,
validator: T.Optional[T.Callable[[T.Any], T.Optional[str]]] = None,
convertor: T.Optional[T.Callable[[_T], object]] = None,
not_set_warning: T.Optional[str] = None):
self.since = since
self.since_message = since_message
self.since_values = since_values
+ self.feature_validator = feature_validator
self.deprecated = deprecated
self.deprecated_message = deprecated_message
self.deprecated_values = deprecated_values
deprecated: T.Union[str, None, _NULL_T] = _NULL,
deprecated_message: T.Union[str, None, _NULL_T] = _NULL,
deprecated_values: T.Union[T.Dict[T.Union[_T, T.Type[T.List], T.Type[T.Dict]], T.Union[str, T.Tuple[str, str]]], None, _NULL_T] = _NULL,
+ feature_validator: T.Union[T.Callable[[_T], T.Iterable[FeatureCheckBase]], None, _NULL_T] = _NULL,
validator: T.Union[T.Callable[[_T], T.Optional[str]], None, _NULL_T] = _NULL,
convertor: T.Union[T.Callable[[_T], TYPE_var], None, _NULL_T] = _NULL) -> 'KwargInfo':
"""Create a shallow copy of this KwargInfo, with modifications.
deprecated=deprecated if not isinstance(deprecated, _NULL_T) else self.deprecated,
deprecated_message=deprecated_message if not isinstance(deprecated_message, _NULL_T) else self.deprecated_message,
deprecated_values=deprecated_values if not isinstance(deprecated_values, _NULL_T) else self.deprecated_values,
+ feature_validator=feature_validator if not isinstance(feature_validator, _NULL_T) else self.feature_validator,
validator=validator if not isinstance(validator, _NULL_T) else self.validator,
convertor=convertor if not isinstance(convertor, _NULL_T) else self.convertor,
)
if msg is not None:
raise InvalidArguments(f'{name} keyword argument "{info.name}" {msg}')
+ if info.feature_validator is not None:
+ for each in info.feature_validator(value):
+ each.use(subproject, node)
+
if info.deprecated_values is not None:
emit_feature_change(info.deprecated_values, FeatureDeprecated)
if isinstance(value, MutableInterpreterObject):
value = copy.deepcopy(value)
self.set_variable(var_name, value)
- return None
def set_variable(self, varname: str, variable: T.Union[TYPE_var, InterpreterObject], *, holderify: bool = False) -> None:
if variable is None:
prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
exelist=compiler, version=search_version(o), direct=invoked_directly)
elif 'OPTLINK' in o:
- # Opltink's stdout *may* beging with a \r character.
+ # Optlink's stdout *may* begin with a \r character.
return OptlinkDynamicLinker(compiler, for_machine, version=search_version(o))
elif o.startswith('Microsoft') or e.startswith('Microsoft'):
out = o or e
def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
*, version: str = 'unknown version'):
- # Use optlink instead of link so we don't interfer with other link.exe
+ # Use optlink instead of link so we don't interfere with other link.exe
# implementations.
super().__init__(exelist, for_machine, '', [], version=version)
if options.targets:
intro_data = parse_introspect_data(builddir)
- has_run_target = any(map(
- lambda t:
- get_target_from_intro_data(ParsedTargetName(t), builddir, intro_data)['type'] == 'run',
- options.targets
- ))
+ has_run_target = any(
+ get_target_from_intro_data(ParsedTargetName(t), builddir, intro_data)['type'] == 'run'
+ for t in options.targets)
if has_run_target:
# `run` target can't be used the same way as other targets on `vs` backend.
from pathlib import Path
from . import build, minstall, dependencies
-from .mesonlib import MesonException, RealPathAction, is_windows, setup_vsenv, OptionKey, quote_arg, get_wine_shortpath
+from .mesonlib import (MesonException, is_windows, setup_vsenv, OptionKey,
+ get_wine_shortpath, MachineChoice)
from . import mlog
import typing as T
POWERSHELL_EXES = {'pwsh.exe', 'powershell.exe'}
def add_arguments(parser: argparse.ArgumentParser) -> None:
- parser.add_argument('-C', dest='wd', action=RealPathAction,
- help='Directory to cd into before running')
+ parser.add_argument('-C', dest='builddir', type=Path, default='.',
+ help='Path to build directory')
+ parser.add_argument('--workdir', '-w', type=Path, default=None,
+ help='Directory to cd into before running (default: builddir, Since 1.0.0)')
parser.add_argument('--dump', action='store_true',
help='Only print required environment (Since 0.62.0)')
- parser.add_argument('command', nargs=argparse.REMAINDER,
+ parser.add_argument('devcmd', nargs=argparse.REMAINDER, metavar='command',
help='Command to run in developer environment (default: interactive shell)')
def get_windows_shell() -> T.Optional[str]:
env['WINEPATH'] = get_wine_shortpath([winecmd], winepath.split(';'))
mlog.log('Meson detected wine and has set WINEPATH accordingly')
-def get_env(b: build.Build) -> T.Tuple[T.Dict[str, str], T.Set[str]]:
+def get_env(b: build.Build, dump: bool) -> T.Tuple[T.Dict[str, str], T.Set[str]]:
extra_env = build.EnvironmentVariables()
extra_env.set('MESON_DEVENV', ['1'])
extra_env.set('MESON_PROJECT_NAME', [b.project_name])
- env = os.environ.copy()
+ sysroot = b.environment.properties[MachineChoice.HOST].get_sys_root()
+ if sysroot:
+ extra_env.set('QEMU_LD_PREFIX', [sysroot])
+
+ env = {} if dump else os.environ.copy()
varnames = set()
for i in itertools.chain(b.devenv, {extra_env}):
- env = i.get_env(env)
+ env = i.get_env(env, dump)
varnames |= i.get_names()
reduce_winepath(env)
except (FileExistsError, shutil.SameFileError):
pass
-def write_gdb_script(privatedir: Path, install_data: 'InstallData') -> None:
+def write_gdb_script(privatedir: Path, install_data: 'InstallData', workdir: Path) -> None:
if not shutil.which('gdb'):
return
bdir = privatedir.parent
gdbinit_path.write_text(gdbinit_line, encoding='utf-8')
first_time = True
if first_time:
- mlog.log('Meson detected GDB helpers and added config in', mlog.bold(str(gdbinit_path)))
+ gdbinit_path = gdbinit_path.resolve()
+ workdir_path = workdir.resolve()
+ rel_path = gdbinit_path.relative_to(workdir_path)
+ mlog.log('Meson detected GDB helpers and added config in', mlog.bold(str(rel_path)))
+ mlog.log('To load it automatically you might need to:')
+ mlog.log(' - Add', mlog.bold(f'add-auto-load-safe-path {gdbinit_path.parent}'),
+ 'in', mlog.bold('~/.gdbinit'))
+ if gdbinit_path.parent != workdir_path:
+ mlog.log(' - Change current workdir to', mlog.bold(str(rel_path.parent)),
+ 'or use', mlog.bold(f'--init-command {rel_path}'))
def run(options: argparse.Namespace) -> int:
- privatedir = Path(options.wd) / 'meson-private'
+ privatedir = Path(options.builddir) / 'meson-private'
buildfile = privatedir / 'build.dat'
if not buildfile.is_file():
- raise MesonException(f'Directory {options.wd!r} does not seem to be a Meson build directory.')
- b = build.load(options.wd)
+ raise MesonException(f'Directory {options.builddir!r} does not seem to be a Meson build directory.')
+ b = build.load(options.builddir)
+ workdir = options.workdir or options.builddir
- devenv, varnames = get_env(b)
+ devenv, varnames = get_env(b, options.dump)
if options.dump:
- if options.command:
+ if options.devcmd:
raise MesonException('--dump option does not allow running other command.')
for name in varnames:
- print(f'{name}={quote_arg(devenv[name])}')
+ print(f'{name}="{devenv[name]}"')
print(f'export {name}')
return 0
+ if b.environment.need_exe_wrapper():
+ m = 'An executable wrapper could be required'
+ exe_wrapper = b.environment.get_exe_wrapper()
+ if exe_wrapper:
+ cmd = ' '.join(exe_wrapper.get_command())
+ m += f': {cmd}'
+ mlog.log(m)
+
install_data = minstall.load_install_data(str(privatedir / 'install.dat'))
- write_gdb_script(privatedir, install_data)
+ write_gdb_script(privatedir, install_data, workdir)
setup_vsenv(b.need_vsenv)
- args = options.command
+ args = options.devcmd
if not args:
prompt_prefix = f'[{b.project_name}]'
shell_env = os.environ.get("SHELL")
try:
return subprocess.call(args, close_fds=False,
env=devenv,
- cwd=options.wd)
+ cwd=workdir)
except subprocess.CalledProcessError as e:
return e.returncode
except FileNotFoundError:
else:
mlog.error(msg + '\n' + 'Use --allow-dirty to ignore the warning and proceed anyway')
sys.exit(1)
- return
def process_submodules(src, distdir, options):
module_file = os.path.join(src, '.gitmodules')
def run(self, args):
implicit_setup_command_notice = False
- pending_python_deprecation_notice = False
# If first arg is not a known command, assume user wants to run the setup
# command.
known_commands = list(self.commands.keys()) + ['-h', '--help']
# Bump the version here in order to add a pre-exit warning that we are phasing out
# support for old python. If this is already the oldest supported version, then
# this can never be true and does nothing.
- if command in ('setup', 'compile', 'test', 'install') and sys.version_info < (3, 7):
- pending_python_deprecation_notice = True
+ pending_python_deprecation_notice = \
+ command in {'setup', 'compile', 'test', 'install'} and sys.version_info < (3, 7)
try:
return options.run_func(options)
raise SystemExit('No recognizable source files found.\n'
'Run meson init in an empty directory to create a sample project.')
options.srcfiles = srcfiles
- print("Detected source files: " + ' '.join(map(str, srcfiles)))
+ print("Detected source files: " + ' '.join(str(s) for s in srcfiles))
options.srcfiles = [Path(f) for f in options.srcfiles]
if not options.language:
for f in options.srcfiles:
if f.suffix == '.c':
options.language = 'c'
break
- if f.suffix in ('.cc', '.cpp'):
+ if f.suffix in {'.cc', '.cpp'}:
options.language = 'cpp'
break
if f.suffix == '.cs':
path = Path(git).parents[1] / 'usr' / 'bin'
less = shutil.which('less', path=str(path))
if less:
- # "R" : support color
- # "X" : do not clear the screen when leaving the pager
- # "F" : skip the pager if content fit into the screen
- pager_cmd = [less, '-RXF']
+ pager_cmd = [less]
if not pager_cmd:
return
global log_pager # pylint: disable=global-statement
assert log_pager is None
try:
+ # Set 'LESS' environment variable, rather than arguments in
+ # pager_cmd, to also support the case where the user has 'PAGER'
+ # set to 'less'. Arguments set are:
+ # "R" : support color
+ # "X" : do not clear the screen when leaving the pager
+ # "F" : skip the pager if content fits into the screen
+ env = os.environ.copy()
+ if 'LESS' not in env:
+ env['LESS'] = 'RXF'
+ # Set "-c" for lv to support color
+ if 'LV' not in env:
+ env['LV'] = '-c'
log_pager = subprocess.Popen(pager_cmd, stdin=subprocess.PIPE,
- text=True, encoding='utf-8')
+ text=True, encoding='utf-8', env=env)
except Exception as e:
# Ignore errors, unless it is a user defined pager.
if 'PAGER' in os.environ:
import typing as T
from .. import mesonlib
-from ..mesonlib import relpath, HoldableObject, MachineChoice
+from ..build import IncludeDirs
from ..interpreterbase.decorators import noKwargs, noPosargs
+from ..mesonlib import relpath, HoldableObject, MachineChoice
from ..programs import ExternalProgram
if T.TYPE_CHECKING:
key = mesonlib.OptionKey(name, subproject, machine, lang, module)
return key in self._interpreter.user_defined_options.cmd_line_options
+ def process_include_dirs(self, dirs: T.Iterable[T.Union[str, IncludeDirs]]) -> T.Iterable[IncludeDirs]:
+ """Convert raw include directory arguments to only IncludeDirs
+
+ :param dirs: An iterable of strings and IncludeDirs
+ :return: None
+ :yield: IncludeDirs objects
+ """
+ for d in dirs:
+ if isinstance(d, IncludeDirs):
+ yield d
+ else:
+ yield self._interpreter.build_incdir_object([d])
+
class ModuleObject(HoldableObject):
"""Base class for all objects returned by modules
if hasattr(fname, 'fname'):
fname = fname.fname
suffix = fname.split('.')[-1]
- return suffix in ('gir', 'typelib')
+ return suffix in {'gir', 'typelib'}
class ModuleReturnValue:
cuda_version = args[0]
driver_version_table = [
+ {'cuda_version': '>=12.0.0', 'windows': '527.41', 'linux': '525.60.13'},
+ {'cuda_version': '>=11.8.0', 'windows': '522.06', 'linux': '520.61.05'},
+ {'cuda_version': '>=11.7.1', 'windows': '516.31', 'linux': '515.48.07'},
{'cuda_version': '>=11.7.0', 'windows': '516.01', 'linux': '515.43.04'},
{'cuda_version': '>=11.6.1', 'windows': '511.65', 'linux': '510.47.03'},
{'cuda_version': '>=11.6.0', 'windows': '511.23', 'linux': '510.39.01'},
cuda_hi_limit_gpu_architecture = '8.6' # noqa: E221
if version_compare(cuda_version, '>=11.1'):
- cuda_common_gpu_architectures += ['8.6', '8.6+PTX'] # noqa: E221
+ cuda_common_gpu_architectures += ['8.6'] # noqa: E221
cuda_all_gpu_architectures += ['8.6'] # noqa: E221
- if version_compare(cuda_version, '<12.0'):
- cuda_hi_limit_gpu_architecture = '9.0' # noqa: E221
+ if version_compare(cuda_version, '<11.8'):
+ cuda_common_gpu_architectures += ['8.6+PTX'] # noqa: E221
+ cuda_hi_limit_gpu_architecture = '8.7' # noqa: E221
+
+ if version_compare(cuda_version, '>=11.8'):
+ cuda_known_gpu_architectures += ['Orin', 'Lovelace', 'Hopper'] # noqa: E221
+ cuda_common_gpu_architectures += ['8.9', '9.0', '9.0+PTX'] # noqa: E221
+ cuda_all_gpu_architectures += ['8.7', '8.9', '9.0'] # noqa: E221
+
+ if version_compare(cuda_version, '<12'):
+ cuda_hi_limit_gpu_architecture = '9.1' # noqa: E221
+
+ if version_compare(cuda_version, '>=12.0'):
+ # https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features (Current)
+ # https://docs.nvidia.com/cuda/archive/12.0/cuda-toolkit-release-notes/index.html#deprecated-features (Eventual?)
+ cuda_lo_limit_gpu_architecture = '5.0' # noqa: E221
+
+ if version_compare(cuda_version, '<13'):
+ cuda_hi_limit_gpu_architecture = '10.0' # noqa: E221
if not cuda_arch_list:
cuda_arch_list = 'Auto'
'Xavier': (['7.2'], []),
'Turing': (['7.5'], ['7.5']),
'Ampere': (['8.0'], ['8.0']),
+ 'Orin': (['8.7'], []),
+ 'Lovelace': (['8.9'], ['8.9']),
+ 'Hopper': (['9.0'], ['9.0']),
}.get(arch_name, (None, None))
if arch_bin is None:
if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture):
continue
- if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture):
+ if cuda_hi_limit_gpu_architecture and version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture):
continue
if codev:
if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture):
continue
- if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture):
+ if cuda_hi_limit_gpu_architecture and version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture):
continue
arch = arch.replace('.', '')
missing.update(missing_vars)
out.append(arg)
if missing:
- var_list = ", ".join(map(repr, sorted(missing)))
+ var_list = ", ".join(repr(m) for m in sorted(missing))
raise EnvironmentException(
f"Variables {var_list} in configure options are missing.")
return out
[dest],
build_by_default=True,
install=kwargs['install'],
- install_dir=kwargs['install_dir'],
+ install_dir=[kwargs['install_dir']],
install_mode=kwargs['install_mode'],
- install_tag=kwargs['install_tag'],
+ install_tag=[kwargs['install_tag']],
backend=state.backend,
)
from ..mesonlib import (
MachineChoice, MesonException, OrderedSet, Popen_safe, join_args,
)
-from ..programs import OverrideProgram, EmptyExternalProgram
+from ..programs import OverrideProgram
from ..scripts.gettext import read_linguas
if T.TYPE_CHECKING:
for girtarget in girtargets:
for lang, compiler in girtarget.compilers.items():
# XXX: Can you use g-i with any other language?
- if lang in ('c', 'cpp', 'objc', 'objcpp', 'd'):
+ if lang in {'c', 'cpp', 'objc', 'objcpp', 'd'}:
ret.append((lang, compiler))
break
return typelib_includes, new_depends
@staticmethod
- def _get_external_args_for_langs(state: 'ModuleState', langs: T.Sequence[str]) -> T.List[str]:
+ def _get_external_args_for_langs(state: 'ModuleState', langs: T.List[str]) -> T.List[str]:
ret: T.List[str] = []
for lang in langs:
ret += mesonlib.listify(state.environment.coredata.get_external_args(MachineChoice.HOST, lang))
t_args.append(f'--{program_name}={path}')
if namespace:
t_args.append('--namespace=' + namespace)
- # if not need_exe_wrapper, we get an EmptyExternalProgram. If none provided, we get NoneType
exe_wrapper = state.environment.get_exe_wrapper()
- if not isinstance(exe_wrapper, (NoneType, EmptyExternalProgram)):
+ if exe_wrapper:
t_args.append('--run=' + ' '.join(exe_wrapper.get_command()))
t_args.append(f'--htmlargs={"@@".join(kwargs["html_args"])}')
t_args.append(f'--scanargs={"@@".join(kwargs["scan_args"])}')
'''This module provides helper functions for generating documentation using hotdoc'''
import os
-from collections import OrderedDict
+import subprocess
from mesonbuild import mesonlib
from mesonbuild import mlog, build
self.name = name
self.state = state
self.interpreter = interpreter
- self.include_paths = OrderedDict()
+ self.include_paths = mesonlib.OrderedSet()
self.builddir = state.environment.get_build_dir()
self.sourcedir = state.environment.get_source_dir()
self.process_dependencies(dep.get_target_dependencies())
self._subprojects.extend(dep.subprojects)
self.process_dependencies(dep.subprojects)
- self.add_include_path(os.path.join(self.builddir, dep.hotdoc_conf.subdir))
+ self.include_paths.add(os.path.join(self.builddir, dep.hotdoc_conf.subdir))
self.cmd += ['--extra-assets=' + p for p in dep.extra_assets]
self.add_extension_paths(dep.extra_extension_paths)
elif isinstance(dep, (build.CustomTarget, build.BuildTarget)):
if arg in self.kwargs:
raise InvalidArguments(f'Argument "{arg}" is forbidden.')
- def add_include_path(self, path):
- self.include_paths[path] = path
-
def make_targets(self):
self.check_forbidden_args()
self.process_known_arg("--index", value_processor=self.ensure_file)
self.process_known_arg("--project-version")
self.process_known_arg("--sitemap", value_processor=self.ensure_file)
self.process_known_arg("--html-extra-theme", value_processor=self.ensure_dir)
- self.process_known_arg(None, "include_paths",
- value_processor=lambda x: [self.add_include_path(self.ensure_dir(v)) for v in x])
+ self.include_paths.update(self.ensure_dir(v) for v in self.kwargs.pop('include_paths'))
self.process_known_arg('--c-include-directories', argname="dependencies", value_processor=self.process_dependencies)
self.process_gi_c_source_roots()
self.process_extra_assets()
f.write('{}')
self.cmd += ['--conf-file', hotdoc_config_path]
- self.add_include_path(os.path.join(self.builddir, self.subdir))
- self.add_include_path(os.path.join(self.sourcedir, self.subdir))
+ self.include_paths.add(os.path.join(self.builddir, self.subdir))
+ self.include_paths.add(os.path.join(self.sourcedir, self.subdir))
depfile = os.path.join(self.builddir, self.subdir, self.name + '.deps')
self.cmd += ['--deps-file-dest', depfile]
- for path in self.include_paths.keys():
+ for path in self.include_paths:
self.cmd.extend(['--include-path', path])
if self.state.environment.coredata.get_option(mesonlib.OptionKey('werror', subproject=self.state.subproject)):
self.hotdoc = ExternalProgram('hotdoc')
if not self.hotdoc.found():
raise MesonException('hotdoc executable not found')
+ version = self.hotdoc.get_version(interpreter)
+ if not mesonlib.version_compare(version, f'>={MIN_HOTDOC_VERSION}'):
+ raise MesonException(f'hotdoc {MIN_HOTDOC_VERSION} required but not found.)')
- try:
- from hotdoc.run_hotdoc import run # noqa: F401
- self.hotdoc.run_hotdoc = run
- except Exception as e:
- raise MesonException(f'hotdoc {MIN_HOTDOC_VERSION} required but not found. ({e})')
+ def run_hotdoc(cmd):
+ return subprocess.run(self.hotdoc.get_command() + cmd, stdout=subprocess.DEVNULL).returncode
+
+ self.hotdoc.run_hotdoc = run_hotdoc
self.methods.update({
'has_extensions': self.has_extensions,
'generate_doc': self.generate_doc,
if build_by_default is None:
build_by_default = kwargs['install']
- install_dir = [kwargs['install_dir']] if kwargs['install_dir'] is not None else None
install_tag = [kwargs['install_tag']] if kwargs['install_tag'] is not None else None
ct = build.CustomTarget(
[kwargs['output']],
build_by_default=build_by_default,
install=kwargs['install'],
- install_dir=install_dir,
+ install_dir=[kwargs['install_dir']] if kwargs['install_dir'] is not None else None,
install_tag=install_tag,
)
if build_by_default is None:
build_by_default = kwargs['install']
- install_dir = [kwargs['install_dir']] if kwargs['install_dir'] is not None else None
install_tag = [kwargs['install_tag']] if kwargs['install_tag'] is not None else None
ct = build.CustomTarget(
build_by_default=build_by_default,
extra_depends=mo_targets,
install=kwargs['install'],
- install_dir=install_dir,
+ install_dir=[kwargs['install_dir']] if kwargs['install_dir'] is not None else None,
install_tag=install_tag,
)
# limitations under the License.
from __future__ import annotations
-import os
import pathlib
import typing as T
from mesonbuild.interpreterbase.decorators import ContainerTypeInfo, FeatureDeprecated, FeatureNew, KwargInfo, typed_pos_args, typed_kwargs
from mesonbuild.mesonlib import version_compare, MachineChoice
from . import NewExtensionModule, ModuleReturnValue, ModuleInfo
+from ..interpreter.type_checking import NoneType
if T.TYPE_CHECKING:
from . import ModuleState
def __init__(self, interpreter: Interpreter):
super().__init__()
self.methods.update({
- 'generate_native_header': self.generate_native_header,
'generate_native_headers': self.generate_native_headers,
+ 'native_headers': self.native_headers,
})
def __get_java_compiler(self, state: ModuleState) -> Compiler:
detect_compiler_for(state.environment, 'java', MachineChoice.BUILD)
return state.environment.coredata.compilers[MachineChoice.BUILD]['java']
- @FeatureDeprecated('java.generate_native_header', '0.62.0', 'Use java.generate_native_headers instead')
- @typed_pos_args('java.generate_native_header', (str, mesonlib.File))
- @typed_kwargs('java.generate_native_header', KwargInfo('package', str, default=None))
- def generate_native_header(self, state: ModuleState, args: T.Tuple[T.Union[str, mesonlib.File]],
- kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue:
- package = kwargs.get('package')
-
- if isinstance(args[0], mesonlib.File):
- file = args[0]
- else:
- file = mesonlib.File.from_source_file(state.source_root, state.subdir, args[0])
-
- if package:
- header = f'{package.replace(".", "_")}_{pathlib.Path(file.fname).stem}.h'
- else:
- header = f'{pathlib.Path(file.fname).stem}.h'
-
- javac = self.__get_java_compiler(state)
-
- target = CustomTarget(
- os.path.basename(header),
- state.subdir,
- state.subproject,
- state.environment,
- mesonlib.listify([
- javac.exelist,
- '-d',
- '@PRIVATE_DIR@',
- '-h',
- state.subdir,
- '@INPUT@',
- ]),
- [file],
- [header],
- backend=state.backend,
- )
- # It is only known that 1.8.0 won't pre-create the directory. 11 and 16
- # do not exhibit this behavior.
- if version_compare(javac.version, '1.8.0'):
- pathlib.Path(state.backend.get_target_private_dir_abs(target)).mkdir(parents=True, exist_ok=True)
-
- return ModuleReturnValue(target, [target])
-
@FeatureNew('java.generate_native_headers', '0.62.0')
+ @FeatureDeprecated('java.generate_native_headers', '1.0.0')
@typed_pos_args(
'java.generate_native_headers',
varargs=(str, mesonlib.File, Target, CustomTargetIndex, GeneratedList))
@typed_kwargs(
'java.generate_native_headers',
KwargInfo('classes', ContainerTypeInfo(list, str), default=[], listify=True, required=True),
- KwargInfo('package', str, default=None))
+ KwargInfo('package', (str, NoneType), default=None))
def generate_native_headers(self, state: ModuleState, args: T.Tuple[T.List[mesonlib.FileOrString]],
kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue:
+ return self.__native_headers(state, args, kwargs)
+
+ @FeatureNew('java.native_headers', '1.0.0')
+ @typed_pos_args(
+ 'java.native_headers',
+ varargs=(str, mesonlib.File, Target, CustomTargetIndex, GeneratedList))
+ @typed_kwargs(
+ 'java.native_headers',
+ KwargInfo('classes', ContainerTypeInfo(list, str), default=[], listify=True, required=True),
+ KwargInfo('package', (str, NoneType), default=None))
+ def native_headers(self, state: ModuleState, args: T.Tuple[T.List[mesonlib.FileOrString]],
+ kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue:
+ return self.__native_headers(state, args, kwargs)
+
+ def __native_headers(self, state: ModuleState, args: T.Tuple[T.List[mesonlib.FileOrString]],
+ kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue:
classes = T.cast('T.List[str]', kwargs.get('classes'))
package = kwargs.get('package')
class _PythonDependencyBase(_Base):
def __init__(self, python_holder: 'PythonInstallation', embed: bool):
- self.name = 'python' # override the name from the "real" dependency lookup
self.embed = embed
self.version: str = python_holder.version
self.platform = python_holder.platform
if libpc and not self.is_found:
mlog.debug(f'"python-{self.version}" could not be found in LIBPC, this is likely due to a relocated python installation')
+ # The "-embed" version of python.pc was introduced in 3.8, and distutils
+ # extension linking was changed to be considered a non embed usage. Before
+ # then, this dependency always uses the embed=True file because that is the
+ # only one that exists,
+ #
+ # On macOS and some Linux distros (Debian) distutils doesn't link extensions
+ # against libpython, even on 3.7 and below. We call into distutils and
+ # mirror its behavior. See https://github.com/mesonbuild/meson/issues/4117
+ if not self.embed and not self.link_libpython and mesonlib.version_compare(self.version, '< 3.8'):
+ self.link_args = []
+
class PythonFrameworkDependency(ExtraFrameworkDependency, _PythonDependencyBase):
return None
elif self.platform == 'win32':
return '32'
- elif self.platform in ('win64', 'win-amd64'):
+ elif self.platform in {'win64', 'win-amd64'}:
return '64'
mlog.log(f'Unknown Windows Python platform {self.platform!r}')
return None
variables = sysconfig.get_config_vars()
variables.update({'base_prefix': getattr(sys, 'base_prefix', sys.prefix)})
+if sys.version_info < (3, 0):
+ suffix = variables.get('SO')
+elif sys.version_info < (3, 8, 7):
+ # https://bugs.python.org/issue?@action=redirect&bpo=39825
+ from distutils.sysconfig import get_config_var
+ suffix = get_config_var('EXT_SUFFIX')
+else:
+ suffix = variables.get('EXT_SUFFIX')
+
print(json.dumps({
'variables': variables,
'paths': paths,
'is_pypy': '__pypy__' in sys.builtin_module_names,
'is_venv': sys.prefix != variables['base_prefix'],
'link_libpython': links_against_libpython(),
+ 'suffix': suffix,
}))
'''
mlog.debug(stderr)
if info is not None and self._check_version(info['version']):
- variables = info['variables']
- info['suffix'] = variables.get('EXT_SUFFIX') or variables.get('SO') or variables.get('.so')
self.info = T.cast('PythonIntrospectionDict', info)
self.platlib = self._get_path(state, 'platlib')
self.purelib = self._get_path(state, 'purelib')
kwargs['install_dir'] = self._get_install_dir_impl(False, subdir)
- new_deps = []
- has_pydep = False
- for dep in mesonlib.extract_as_list(kwargs, 'dependencies'):
- if isinstance(dep, _PythonDependencyBase):
- has_pydep = True
- # On macOS and some Linux distros (Debian) distutils doesn't link
- # extensions against libpython. We call into distutils and mirror its
- # behavior. See https://github.com/mesonbuild/meson/issues/4117
- if not self.link_libpython:
- dep = dep.get_partial_dependency(compile_args=True)
- new_deps.append(dep)
+ new_deps = mesonlib.extract_as_list(kwargs, 'dependencies')
+ has_pydep = any(isinstance(dep, _PythonDependencyBase) for dep in new_deps)
if not has_pydep:
pydep = self._dependency_method_impl({})
if not pydep.found():
# on various platforms, let's not give up just yet, if an executable
# named python is available and has a compatible version, let's use
# it
- if not python.found() and name_or_path in ['python2', 'python3']:
+ if not python.found() and name_or_path in {'python2', 'python3'}:
python = PythonExternalProgram('python')
if python.found():
abspath: str
if isinstance(rcc_file, str):
abspath = os.path.join(state.environment.source_dir, state.subdir, rcc_file)
- rcc_dirname = os.path.dirname(abspath)
else:
abspath = rcc_file.absolute_path(state.environment.source_dir, state.environment.build_dir)
- rcc_dirname = os.path.dirname(abspath)
+ rcc_dirname = os.path.dirname(abspath)
- # FIXME: what error are we actually trying to check here?
+ # FIXME: what error are we actually trying to check here? (probably parse errors?)
try:
tree = ET.parse(abspath)
root = tree.getroot()
if child.tag != 'file':
mlog.warning("malformed rcc file: ", os.path.join(state.subdir, str(rcc_file)))
break
+ elif child.text is None:
+ raise MesonException(f'<file> element without a path in {os.path.join(state.subdir, str(rcc_file))}')
else:
result.append(child.text)
return rcc_dirname, result
+ except MesonException:
+ raise
except Exception:
raise MesonException(f'Unable to parse resource file {abspath}')
# See the License for the specific language governing permissions and
# limitations under the License.
+from __future__ import annotations
import os
import typing as T
from .. import mlog
from ..build import BothLibraries, BuildTarget, CustomTargetIndex, Executable, ExtractedObjects, GeneratedList, IncludeDirs, CustomTarget, StructuredSources
from ..dependencies import Dependency, ExternalLibrary
-from ..interpreter.interpreter import TEST_KWARGS, OUTPUT_KW
+from ..interpreter.type_checking import DEPENDENCIES_KW, TEST_KWS, OUTPUT_KW, INCLUDE_DIRECTORIES, include_dir_string_new
from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, typed_kwargs, typed_pos_args, noPosargs
from ..mesonlib import File
include_directories: T.List[IncludeDirs]
input: T.List[SourceInputs]
output: str
+ dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
class RustModule(ExtensionModule):
"""A module that holds helper functions for rust."""
- INFO = ModuleInfo('rust', '0.57.0', unstable=True)
+ INFO = ModuleInfo('rust', '0.57.0', stabilized='1.0.0')
- def __init__(self, interpreter: 'Interpreter') -> None:
+ def __init__(self, interpreter: Interpreter) -> None:
super().__init__(interpreter)
- self._bindgen_bin: T.Optional['ExternalProgram'] = None
+ self._bindgen_bin: T.Optional[ExternalProgram] = None
self.methods.update({
'test': self.test,
'bindgen': self.bindgen,
@typed_pos_args('rust.test', str, BuildTarget)
@typed_kwargs(
'rust.test',
- *TEST_KWARGS,
+ *TEST_KWS,
+ DEPENDENCIES_KW,
KwargInfo('is_parallel', bool, default=False),
- KwargInfo(
- 'dependencies',
- ContainerTypeInfo(list, (Dependency, ExternalLibrary)),
- listify=True,
- default=[]),
)
- def test(self, state: 'ModuleState', args: T.Tuple[str, BuildTarget], kwargs: 'FuncTest') -> ModuleReturnValue:
+ def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: FuncTest) -> ModuleReturnValue:
"""Generate a rust test target from a given rust target.
Rust puts it's unitests inside it's main source files, unlike most
'rust.bindgen',
KwargInfo('c_args', ContainerTypeInfo(list, str), default=[], listify=True),
KwargInfo('args', ContainerTypeInfo(list, str), default=[], listify=True),
- KwargInfo('include_directories', ContainerTypeInfo(list, IncludeDirs), default=[], listify=True),
KwargInfo(
'input',
ContainerTypeInfo(list, (File, GeneratedList, BuildTarget, BothLibraries, ExtractedObjects, CustomTargetIndex, CustomTarget, str), allow_empty=False),
listify=True,
required=True,
),
+ INCLUDE_DIRECTORIES.evolve(feature_validator=include_dir_string_new),
OUTPUT_KW,
+ DEPENDENCIES_KW.evolve(since='1.0.0'),
)
- def bindgen(self, state: 'ModuleState', args: T.List, kwargs: 'FuncBindgen') -> ModuleReturnValue:
+ def bindgen(self, state: ModuleState, args: T.List, kwargs: FuncBindgen) -> ModuleReturnValue:
"""Wrapper around bindgen to simplify it's use.
The main thing this simplifies is the use of `include_directory`
header, *_deps = self.interpreter.source_strings_to_files(kwargs['input'])
# Split File and Target dependencies to add pass to CustomTarget
- depends: T.List['SourceOutputs'] = []
+ depends: T.List[SourceOutputs] = []
depend_files: T.List[File] = []
for d in _deps:
if isinstance(d, File):
else:
depends.append(d)
- inc_strs: T.List[str] = []
- for i in kwargs['include_directories']:
+ clang_args: T.List[str] = []
+ for i in state.process_include_dirs(kwargs['include_directories']):
# bindgen always uses clang, so it's safe to hardcode -I here
- inc_strs.extend([f'-I{x}' for x in i.to_string_list(
+ clang_args.extend([f'-I{x}' for x in i.to_string_list(
state.environment.get_source_dir(), state.environment.get_build_dir())])
+ for de in kwargs['dependencies']:
+ for i in de.get_include_dirs():
+ clang_args.extend([f'-I{x}' for x in i.to_string_list(
+ state.environment.get_source_dir(), state.environment.get_build_dir())])
+ clang_args.extend(de.get_all_compile_args())
+ for s in de.get_sources():
+ if isinstance(s, File):
+ depend_files.append(s)
+ elif isinstance(s, CustomTarget):
+ depends.append(s)
+
if self._bindgen_bin is None:
self._bindgen_bin = state.find_program('bindgen')
'@INPUT@', '--output',
os.path.join(state.environment.build_dir, '@OUTPUT@')
] + \
- kwargs['args'] + ['--'] + kwargs['c_args'] + inc_strs + \
+ kwargs['args'] + ['--'] + kwargs['c_args'] + clang_args + \
['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@']
target = CustomTarget(
return ModuleReturnValue([target], [target])
-def initialize(interp: 'Interpreter') -> RustModule:
+def initialize(interp: Interpreter) -> RustModule:
return RustModule(interp)
"""Other sourcesets added when this rule's conditions are true"""
if_false: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]
- """Source files added when this rule's conditons are false"""
+ """Source files added when this rule's conditions are false"""
class SourceFiles(T.NamedTuple):
return True
return False
- def accept_any(self, tids: T.Sequence[str]) -> str:
+ def accept_any(self, tids: T.Tuple[str, ...]) -> str:
tid = self.current.tid
if tid in tids:
self.getsym()
except Exception as e:
mintro.write_meson_info_file(b, [e])
raise
+
+ cdf: T.Optional[str] = None
try:
dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
# We would like to write coredata as late as possible since we use the existence of
except Exception as e:
mintro.write_meson_info_file(b, [e])
- if 'cdf' in locals():
+ if cdf is not None:
old_cdf = cdf + '.prev'
if os.path.exists(old_cdf):
os.replace(old_cdf, cdf)
'TAPParser.Error',
'TAPParser.Version',
'TAPParser.Plan',
+ 'TAPParser.UnknownLine',
'TAPParser.Bailout']
class TAPParser:
class Error(T.NamedTuple):
message: str
+ class UnknownLine(T.NamedTuple):
+ message: str
+ lineno: int
+
class Version(T.NamedTuple):
version: int
self.state = self._MAIN
assert self.state == self._MAIN
- if line.startswith('#'):
+ if not line or line.startswith('#'):
return
m = self._RE_TEST.match(line)
else:
yield self.Version(version=self.version)
return
+
+ # unknown syntax
+ yield self.UnknownLine(line, self.lineno)
else:
# end of file
if self.state == self._YAML:
flush=True)
if result.verbose or result.res.is_bad():
self.print_log(harness, result)
+ if result.warnings:
+ print(flush=True)
+ for w in result.warnings:
+ print(w, flush=True)
+ print(flush=True)
if result.verbose or result.res.is_bad():
print(flush=True)
self.junit = None # type: T.Optional[et.ElementTree]
self.is_parallel = is_parallel
self.verbose = verbose
+ self.warnings = [] # type: T.List[str]
def start(self, cmd: T.List[str]) -> None:
self.res = TestResult.RUNNING
async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> None:
res = None
+ warnings = [] # type: T.List[TAPParser.UnknownLine]
+ version: T.Optional[int] = None
async for i in TAPParser().parse_async(lines):
- if isinstance(i, TAPParser.Bailout):
+ if isinstance(i, TAPParser.Version):
+ version = i.version
+ elif isinstance(i, TAPParser.Bailout):
res = TestResult.ERROR
harness.log_subtest(self, i.message, res)
elif isinstance(i, TAPParser.Test):
if i.result.is_bad():
res = TestResult.FAIL
harness.log_subtest(self, i.name or f'subtest {i.number}', i.result)
+ elif isinstance(i, TAPParser.UnknownLine):
+ warnings.append(i)
elif isinstance(i, TAPParser.Error):
self.additional_error += 'TAP parsing error: ' + i.message
res = TestResult.ERROR
+ if version is None:
+ self.warnings.append('Unknown TAP version. The first line MUST be `TAP version <int>`. Assuming version 12.')
+ version = 12
+ if warnings:
+ unknown = str(mlog.yellow('UNKNOWN'))
+ width = len(str(max(i.lineno for i in warnings)))
+ for w in warnings:
+ self.warnings.append(f'stdout: {w.lineno:{width}}: {unknown}: {w.message}')
+ if version > 13:
+ self.warnings.append('Unknown TAP output lines have been ignored. Please open a feature request to\n'
+ 'implement them, or prefix them with a # if they are not TAP syntax.')
+ else:
+ self.warnings.append(str(mlog.red('ERROR')) + ': Unknown TAP output lines for a supported TAP version.\n'
+ 'This is probably a bug in the test; if they are not TAP syntax, prefix them with a #')
if all(t.result is TestResult.SKIP for t in self.results):
# This includes the case where self.results is empty
res = TestResult.SKIP
coredata = cdata.load(options.builddir)
backend = coredata.get_option(OptionKey('backend'))
for k, v in sorted(coredata.__dict__.items()):
- if k in ('backend_options', 'base_options', 'builtins', 'compiler_options', 'user_options'):
+ if k in {'backend_options', 'base_options', 'builtins', 'compiler_options', 'user_options'}:
# use `meson configure` to view these
pass
- elif k in ['install_guid', 'test_guid', 'regen_guid']:
+ elif k in {'install_guid', 'test_guid', 'regen_guid'}:
if all_backends or backend.startswith('vs'):
print(k + ': ' + v)
elif k == 'target_guids':
if all_backends or backend.startswith('vs'):
print(k + ':')
dump_guids(v)
- elif k in ['lang_guids']:
+ elif k == 'lang_guids':
if all_backends or backend.startswith('vs') or backend == 'xcode':
print(k + ':')
dump_guids(v)
return False
-class EmptyExternalProgram(ExternalProgram): # lgtm [py/missing-call-to-init]
- '''
- A program object that returns an empty list of commands. Used for cases
- such as a cross file exe_wrapper to represent that it's not required.
- '''
-
- def __init__(self) -> None:
- self.name = None
- self.command = []
- self.path = None
-
- def __repr__(self) -> str:
- r = '<{} {!r} -> {!r}>'
- return r.format(self.__class__.__name__, self.name, self.command)
-
- def found(self) -> bool:
- return True
-
-
class OverrideProgram(ExternalProgram):
"""A script overriding a program."""
if target in self.interpreter.assignments:
node = self.interpreter.assignments[target]
if isinstance(node, FunctionNode):
- if node.func_name in ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']:
+ if node.func_name in {'executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries'}:
tgt = self.interpreter.assign_vals[target]
return tgt
if dependency in self.interpreter.assignments:
node = self.interpreter.assignments[dependency]
if isinstance(node, FunctionNode):
- if node.func_name in ['dependency']:
+ if node.func_name == 'dependency':
name = self.interpreter.flatten_args(node.args)[0]
dep = check_list(name)
while raw[end] != '=':
end += 1
end += 1 # Handle the '='
- while raw[end] in [' ', '\n', '\t']:
+ while raw[end] in {' ', '\n', '\t'}:
end += 1
files[i['file']]['raw'] = raw[:start] + i['str'] + raw[end:]
for i in str_list:
- if i['action'] in ['modify', 'rm']:
+ if i['action'] in {'modify', 'rm'}:
remove_node(i)
- elif i['action'] in ['add']:
+ elif i['action'] == 'add':
files[i['file']]['raw'] += i['str'] + '\n'
# Write the files back
from .run_tool import run_tool
from ..environment import detect_clangformat
+from ..mesonlib import version_compare
+from ..programs import ExternalProgram
import typing as T
def run_clang_format(fname: Path, exelist: T.List[str], check: bool) -> subprocess.CompletedProcess:
+ clangformat_10 = False
if check:
- original = fname.read_bytes()
+ cformat_ver = ExternalProgram('clang-format', exelist).get_version()
+ if version_compare(cformat_ver, '>=10'):
+ clangformat_10 = True
+ exelist = exelist + ['--dry-run', '--Werror']
+ else:
+ original = fname.read_bytes()
before = fname.stat().st_mtime
ret = subprocess.run(exelist + ['-style=file', '-i', str(fname)])
after = fname.stat().st_mtime
if before != after:
print('File reformatted: ', fname)
- if check:
+ if check and not clangformat_10:
# Restore the original if only checking.
fname.write_bytes(original)
ret.returncode = 1
capture_file = ''
for j in i:
- if j in ['>', '>>']:
+ if j in {'>', '>>'}:
stdout = subprocess.PIPE
continue
- elif j in ['&>', '&>>']:
+ elif j in {'&>', '&>>'}:
stdout = subprocess.PIPE
stderr = subprocess.STDOUT
continue
class SectionHeader(DataSizes):
def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
super().__init__(ptrsize, is_le)
- if ptrsize == 64:
- is_64 = True
- else:
- is_64 = False
+ is_64 = ptrsize == 64
+
# Elf64_Word
self.sh_name = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
# Elf64_Word
raise SystemExit(err)
def fix_jar(fname: str) -> None:
- subprocess.check_call(['jar', 'xfv', fname, 'META-INF/MANIFEST.MF'])
+ subprocess.check_call(['jar', 'xf', fname, 'META-INF/MANIFEST.MF'])
with open('META-INF/MANIFEST.MF', 'r+', encoding='utf-8') as f:
lines = f.readlines()
f.seek(0)
if not line.startswith('Class-Path:'):
f.write(line)
f.truncate()
- subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF'])
+ # jar -um doesn't allow removing existing attributes. Use -uM instead,
+ # which a) removes the existing manifest from the jar and b) disables
+ # special-casing for the manifest file, so we can re-add it as a normal
+ # archive member. This puts the manifest at the end of the jar rather
+ # than the beginning, but the spec doesn't forbid that.
+ subprocess.check_call(['jar', 'ufM', fname, 'META-INF/MANIFEST.MF'])
def fix_rpath(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Union[str, bytes], final_path: str, install_name_mappings: T.Dict[str, str], verbose: bool = True) -> None:
global INSTALL_NAME_TOOL # pylint: disable=global-statement
# Store the size of symbols pointing to data objects so we relink
# when those change, which is needed because of copy relocations
# https://github.com/mesonbuild/meson/pull/7132#issuecomment-628353702
- if line_split[1].upper() in ('B', 'G', 'D') and len(line_split) >= 4:
+ if line_split[1].upper() in {'B', 'G', 'D'} and len(line_split) >= 4:
entry += [line_split[3]]
result += [' '.join(entry)]
write_if_changed('\n'.join(result) + '\n', outfilename)
tool_name = args[0]
srcdir_name = args[1]
os.chdir(srcdir_name)
- assert tool_name in ['cscope', 'ctags', 'etags']
+ assert tool_name in {'cscope', 'ctags', 'etags'}
res = globals()[tool_name]()
assert isinstance(res, int)
return res
int main(int argc, char **argv) {{
if(argc != 1) {{
- std::cout << argv[0] << "takes no arguments.\\n";
+ std::cout << argv[0] << " takes no arguments.\\n";
return 1;
}}
std::cout << "This is project " << PROJECT_NAME << ".\\n";
self.envvars.append((self._prepend, name, values, separator))
@staticmethod
- def _set(env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str:
+ def _set(env: T.Dict[str, str], name: str, values: T.List[str], separator: str, default_value: T.Optional[str]) -> str:
return separator.join(values)
@staticmethod
- def _append(env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str:
- curr = env.get(name)
+ def _append(env: T.Dict[str, str], name: str, values: T.List[str], separator: str, default_value: T.Optional[str]) -> str:
+ curr = env.get(name, default_value)
return separator.join(values if curr is None else [curr] + values)
@staticmethod
- def _prepend(env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str:
- curr = env.get(name)
+ def _prepend(env: T.Dict[str, str], name: str, values: T.List[str], separator: str, default_value: T.Optional[str]) -> str:
+ curr = env.get(name, default_value)
return separator.join(values if curr is None else values + [curr])
- def get_env(self, full_env: T.MutableMapping[str, str]) -> T.Dict[str, str]:
+ def get_env(self, full_env: T.MutableMapping[str, str], dump: bool = False) -> T.Dict[str, str]:
env = full_env.copy()
for method, name, values, separator in self.envvars:
- env[name] = method(env, name, values, separator)
+ default_value = f'${name}' if dump else None
+ env[name] = method(env, name, values, separator, default_value)
return env
from __future__ import annotations
from pathlib import Path
import argparse
-import ctypes
import enum
import sys
import stat
import platform, subprocess, operator, os, shlex, shutil, re
import collections
from functools import lru_cache, wraps, total_ordering
-from itertools import tee, filterfalse
+from itertools import tee
from tempfile import TemporaryDirectory, NamedTemporaryFile
import typing as T
import textwrap
import copy
import pickle
+import errno
from mesonbuild import mlog
from .core import MesonException, HoldableObject
if sys.platform != 'win32':
return ''
try:
+ import ctypes
process_arch = ctypes.c_ushort()
native_arch = ctypes.c_ushort()
kernel32 = ctypes.windll.kernel32
not_found.append(req)
else:
found.append(req)
- return not_found == [], not_found, found
+ return not not_found, not_found, found
# determine if the minimum version satisfying the condition |condition| exceeds
return unixdirs
# FIXME: this needs to be further genericized for aarch64 etc.
machine = platform.machine()
- if machine in ('i386', 'i486', 'i586', 'i686'):
+ if machine in {'i386', 'i486', 'i586', 'i686'}:
plat = 'i386'
elif machine.startswith('arm'):
plat = 'arm'
([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
"""
t1, t2 = tee(iterable)
- return filterfalse(pred, t1), filter(pred, t2)
+ return (t for t in t1 if not pred(t)), (t for t in t2 if pred(t))
def Popen_safe(args: T.List[str], write: T.Optional[str] = None,
# If write is not None, set stdin to PIPE so data can be sent.
if write is not None:
stdin = subprocess.PIPE
- if not sys.stdout.encoding or encoding.upper() != 'UTF-8':
- p, o, e = Popen_safe_legacy(args, write=write, stdin=stdin, stdout=stdout, stderr=stderr, **kwargs)
- else:
- p = subprocess.Popen(args, universal_newlines=True, encoding=encoding, close_fds=False,
- stdin=stdin, stdout=stdout, stderr=stderr, **kwargs)
- o, e = p.communicate(write)
+
+ try:
+ if not sys.stdout.encoding or encoding.upper() != 'UTF-8':
+ p, o, e = Popen_safe_legacy(args, write=write, stdin=stdin, stdout=stdout, stderr=stderr, **kwargs)
+ else:
+ p = subprocess.Popen(args, universal_newlines=True, encoding=encoding, close_fds=False,
+ stdin=stdin, stdout=stdout, stderr=stderr, **kwargs)
+ o, e = p.communicate(write)
+ except OSError as oserr:
+ if oserr.errno == errno.ENOEXEC:
+ raise MesonException(f'Failed running {args[0]!r}, binary or interpreter not executable.\n'
+ 'Possibly wrong architecture or the executable bit is not set.')
+ raise
# Sometimes the command that we run will call another command which will be
# without the above stdin workaround, so set the console mode again just in
# case.
input_ = write.encode('utf-8')
o, e = p.communicate(input_)
if o is not None:
- if sys.stdout.encoding:
+ if sys.stdout.encoding is not None:
o = o.decode(encoding=sys.stdout.encoding, errors='replace').replace('\r\n', '\n')
else:
o = o.decode(errors='replace').replace('\r\n', '\n')
setattr(namespace, self.dest, os.path.abspath(os.path.realpath(values)))
-def get_wine_shortpath(winecmd: T.List[str], wine_paths: T.Sequence[str],
+def get_wine_shortpath(winecmd: T.List[str], wine_paths: T.List[str],
workdir: T.Optional[str] = None) -> str:
'''
WINEPATH size is limited to 1024 bytes which can easily be exceeded when
verbose_git(['fetch', self.wrap.get('url'), revno], self.dirname, check=True)
verbose_git(checkout_cmd, self.dirname, check=True)
else:
- verbose_git(['-c', 'advice.detachedHead=false', 'clone', *depth_option, '--branch', revno, self.wrap.get('url'),
- self.directory], self.subdir_root, check=True)
+ args = ['-c', 'advice.detachedHead=false', 'clone', *depth_option]
+ if revno.lower() != 'head':
+ args += ['--branch', revno]
+ args += [self.wrap.get('url'), self.directory]
+ verbose_git(args, self.subdir_root, check=True)
if self.wrap.values.get('clone-recursive', '').lower() == 'true':
verbose_git(['submodule', 'update', '--init', '--checkout', '--recursive', *depth_option],
self.dirname, check=True)
def is_git_full_commit_id(self, revno: str) -> bool:
result = False
- if len(revno) in (40, 64): # 40 for sha1, 64 for upcoming sha256
+ if len(revno) in {40, 64}: # 40 for sha1, 64 for upcoming sha256
result = all(ch in '0123456789AaBbCcDdEeFf' for ch in revno)
return result
return p
if self.typ == 'python_lib':
return p.with_suffix(python_suffix)
- if self.typ in ['file', 'dir']:
+ if self.typ == 'py_implib':
+ p = p.with_suffix(python_suffix)
+ if env.machines.host.is_windows() and canonical_compiler == 'msvc':
+ return p.with_suffix('.lib')
+ elif env.machines.host.is_windows() or env.machines.host.is_cygwin():
+ return p.with_suffix('.dll.a')
+ else:
+ return None
+ elif self.typ in {'file', 'dir'}:
return p
elif self.typ == 'shared_lib':
if env.machines.host.is_windows() or env.machines.host.is_cygwin():
if self.version:
p = p.with_name('{}-{}'.format(p.name, self.version[0]))
return p.with_suffix('.pdb') if has_pdb else None
- elif self.typ in {'implib', 'implibempty', 'py_implib'}:
+ elif self.typ in {'implib', 'implibempty'}:
if env.machines.host.is_windows() and canonical_compiler == 'msvc':
# only MSVC doesn't generate empty implibs
if self.typ == 'implibempty' and compiler == 'msvc':
return None
return p.parent / (re.sub(r'^lib', '', p.name) + '.lib')
elif env.machines.host.is_windows() or env.machines.host.is_cygwin():
- if self.typ == 'py_implib':
- p = p.with_suffix(python_suffix)
return p.with_suffix('.dll.a')
else:
return None
t.stdout = stdout
return [t]
- new_opt_list: T.List[T.List[T.Tuple[str, bool, bool]]]
+ new_opt_list: T.List[T.List[T.Tuple[str, str, bool, bool]]]
# 'matrix; entry is present, so build multiple tests from matrix definition
- opt_list = [] # type: T.List[T.List[T.Tuple[str, bool, bool]]]
+ opt_list = [] # type: T.List[T.List[T.Tuple[str, str, bool, bool]]]
matrix = test_def['matrix']
assert "options" in matrix
for key, val in matrix["options"].items():
assert isinstance(val, list)
- tmp_opts = [] # type: T.List[T.Tuple[str, bool, bool]]
+ tmp_opts = [] # type: T.List[T.Tuple[str, str, bool, bool]]
for i in val:
assert isinstance(i, dict)
assert "val" in i
# Add an empty matrix entry
if i['val'] is None:
- tmp_opts += [(None, skip, skip_expected)]
+ tmp_opts += [(key, None, skip, skip_expected)]
continue
- tmp_opts += [('{}={}'.format(key, i['val']), skip, skip_expected)]
+ tmp_opts += [(key, i['val'], skip, skip_expected)]
if opt_list:
new_opt_list = []
new_opt_list = []
for i in opt_list:
exclude = False
- opt_names = [x[0] for x in i]
+ opt_tuple = [(x[0], x[1]) for x in i]
for j in matrix['exclude']:
- ex_list = [f'{k}={v}' for k, v in j.items()]
- if all([x in opt_names for x in ex_list]):
+ ex_list = [(k, v) for k, v in j.items()]
+ if all([x in opt_tuple for x in ex_list]):
exclude = True
break
opt_list = new_opt_list
for i in opt_list:
- name = ' '.join([x[0] for x in i if x[0] is not None])
- opts = ['-D' + x[0] for x in i if x[0] is not None]
- skip = any([x[1] for x in i])
- skip_expected = any([x[2] for x in i])
+ name = ' '.join([f'{x[0]}={x[1]}' for x in i if x[1] is not None])
+ opts = [f'-D{x[0]}={x[1]}' for x in i if x[1] is not None]
+ skip = any([x[2] for x in i])
+ skip_expected = any([x[3] for x in i])
test = TestDef(t.path, name, opts, skip or t.skip)
test.env.update(env)
test.installed_files = installed
TestCategory('platform-osx', 'osx', not mesonlib.is_osx()),
TestCategory('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()),
TestCategory('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()),
- TestCategory('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()),
+ TestCategory('java', 'java', backend is not Backend.ninja or not have_java()),
TestCategory('C#', 'csharp', skip_csharp(backend)),
TestCategory('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))),
TestCategory('cython', 'cython', backend is not Backend.ninja or not shutil.which(os.environ.get('CYTHON', 'cython'))),
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
+ Programming Language :: Python :: 3.11
Topic :: Software Development :: Build Tools
long_description = Meson is a cross-platform build system designed to be both as fast and as user friendly as possible. It supports many languages and compilers, including GCC, Clang, PGI, Intel, and Visual Studio. Its build definitions are written in a simple non-Turing complete DSL.
have = cc.get_define('MESON_FAIL_VALUE')
assert(have == '', 'MESON_FAIL_VALUE value is "@0@" instead of ""'.format(have))
+ # Check if prefix array works properly and has the expected order
+ have = cc.get_define('MESON_FAIL_VALUE', prefix: ['#define MESON_FAIL_VALUE 1', '#undef MESON_FAIL_VALUE'])
+ assert(have == '', 'MESON_FAIL_VALUE value is "@0@" instead of ""'.format(have))
+
+ have = cc.get_define('MESON_SUCCESS_VALUE', prefix: ['#undef MESON_SUCCESS_VALUE', '#define MESON_SUCCESS_VALUE 1'])
+ assert(have == '1', 'MESON_SUCCESS_VALUE value is "@0@" instead of ""'.format(have))
+
# This is used in the test_preprocessor_checks_CPPFLAGS() unit test.
have = cc.get_define('MESON_TEST_DEFINE_VALUE')
expect = get_option('MESON_TEST_DEFINE_VALUE')
# Test the fs replacement
# Test copying of an empty configuration data object
inf = 'invalid-utf8.bin.in'
-outf = fs.copyfile(inf, 'invalid-utf8-1.bin')
+outf = fs.copyfile(inf, 'invalid-utf8-1.bin',
+ install: true,
+ install_dir: get_option('datadir') / meson.project_name(),
+ install_tag: 'copyfile',
+)
test('fs.copyfile string', check_file, args: [files(inf), outf])
# Test with default outname of string
{"type": "file", "file": "usr/share/appdir/config2.h"},
{"type": "file", "file": "usr/share/appdir/config2b.h"},
{"type": "file", "file": "usr/share/appdireh/config2-1.h"},
- {"type": "file", "file": "usr/share/appdirok/config2-2.h"}
+ {"type": "file", "file": "usr/share/appdirok/config2-2.h"},
+ {"type": "file", "file": "usr/share/configure file test/invalid-utf8-1.bin"}
]
}
assert('a' in {'a': 'b'}, '''1 should be in {'a': 'b'}''')
assert('b' not in {'a': 'b'}, '''1 should be in {'a': 'b'}''')
+
+assert('a' in 'abc')
+assert('b' not in 'def')
--- /dev/null
+BasedOnStyle: LLVM
run_target('ctags',
command : converter)
-run_target('clang-format',
- command : converter)
+clangf = run_target('clang-format',
+ command : [converter, files('.clang-format'), meson.current_build_dir() / 'clang-format'])
+custom_target('clang-tidy',
+ input: '.clang-tidy',
+ output: 'clang-tidy',
+ command : [converter, '@INPUT@', '@OUTPUT@'])
+alias_target('clang-format-check', clangf)
# Check we can pass env to the program. Also check some string substitutions
# that were added in 0.57.0 but not documented. This is documented behaviour
# ------------------------------------------------------------------------------
# multi-line format strings
# ------------------------------------------------------------------------------
-y_actual = f'''This is a multi-line comment with string substition:
+y_actual = f'''This is a multi-line comment with string substitution:
"@sub1@@sub2@@sub3@@sub4@"
And I can even substitute the entry multiple times!
@sub3@
'''
-y_expect = '''This is a multi-line comment with string substition:
+y_expect = '''This is a multi-line comment with string substitution:
"the quick
brown
fox"
{
"stdout": [
{
- "line": "test cases/failing/40 custom target plainname many inputs/meson.build:5:0: ERROR: custom_target: output cannot containe \"@PLAINNAME@\" or \"@BASENAME@\" when there is more than one input (we can't know which to use)"
+ "line": "test cases/failing/40 custom target plainname many inputs/meson.build:5:0: ERROR: custom_target: output cannot contain \"@PLAINNAME@\" or \"@BASENAME@\" when there is more than one input (we can't know which to use)"
}
]
}
{
- "skip_on_jobname": ["azure"]
+ "skip_on_jobname": ["azure", "msys2"]
}
fs = import('fs')
javamod = import('java')
+cc = meson.get_compiler('c')
java = find_program('java')
jni_dep = dependency('jni', version : '>=1.8', modules: ['jvm', 'awt'])
+# Assert that the header can actually be found with the dependency.
+cc.has_header('jni.h', dependencies: [jni_dep])
+# Assert that the platform-specific include directory is included in the compiler arguments.
+cc.has_header('jni_md.h', dependencies: [jni_dep])
+
# generate native headers
subdir('src')
subdir('lib')
sources += configured
-native_headers = javamod.generate_native_headers(
+native_headers = javamod.native_headers(
sources, package: 'com.mesonbuild', classes: ['JniTest'])
native_header_includes = include_directories('.')
--- /dev/null
+#include "internal_dep.h"
+
+int64_t add64(const int64_t first, const int64_t second) {
+ return first + second;
+}
--- /dev/null
+/* SPDX-License-Identifer: Apache-2.0 */
+/* Copyright © 2022 Intel Corporation */
+
+#include <zlib.h>
+
+struct External {
+ z_stream * stream;
+};
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+// Copyright © 2022 Intel Corporation
+
+#include "gen.h"
+
+int64_t add64(const int64_t, const int64_t);
--- /dev/null
+// SPDX-license-identifer: Apache-2.0
+// Copyright © 2021 Intel Corporation
+
+#![allow(non_upper_case_globals)]
+#![allow(non_camel_case_types)]
+#![allow(non_snake_case)]
+
+include!("internal_dep.rs");
+
+use std::convert::TryInto;
+
+fn main() {
+ unsafe {
+ std::process::exit(add64(0, 0).try_into().unwrap_or(5));
+ };
+}
--- /dev/null
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2022 Intel Corporation
+
+dep_zlib = dependency('zlib', required : false, disabler : true)
+
+external_dep_rs = rust.bindgen(
+ input : 'external_dep.h',
+ output : 'external_dep.rs',
+ dependencies : dep_zlib
+)
+
+external_dep = static_library(
+ 'external_dep',
+ [external_dep_rs],
+ dependencies : dep_zlib.partial_dependency(links : true),
+)
+
+rust.test('external dep', external_dep)
+
+int_dep = declare_dependency(
+ sources : [gen_h, gen2_h],
+ include_directories : include_directories('..'),
+)
+
+internal_dep_rs = rust.bindgen(
+ input : 'internal_dep.h',
+ output : 'internal_dep.rs',
+ dependencies : int_dep,
+)
+
+c_lib2 = static_library(
+ 'clib2',
+ 'clib2.c',
+ dependencies : int_dep,
+)
+
+rust_bin_int_dep = executable(
+ 'rust_bin_int_dep',
+ structured_sources(['internal_main.rs', internal_dep_rs]),
+ link_with : [c_lib, c_lib2],
+)
+
+test('generated header dependency', rust_bin_int_dep)
# SPDX-license-identifer: Apache-2.0
-# Copyright © 2021 Intel Corporation
+# Copyright © 2021-2022 Intel Corporation
-project('rustmod bindgen', ['c', 'rust'])
+project('rustmod bindgen', ['c', 'rust'], meson_version : '>= 0.63')
prog_bindgen = find_program('bindgen', required : false)
if not prog_bindgen.found()
gen = rust.bindgen(
input : 'src/header.h',
output : 'header.rs',
- include_directories : inc,
+ include_directories : 'include',
)
# see: https://github.com/mesonbuild/meson/issues/8160
test('generated header', rust_bin2)
subdir('sub')
+subdir('dependencies')
--- /dev/null
+{
+ "stdout": [
+ {
+ "line": "test cases/rust/12 bindgen/meson.build:30: WARNING: Project targets '>= 0.63' but uses feature introduced in '1.0.0': include_directories kwarg of type string. Use include_directories('include') instead"
+ }
+ ]
+}
-project('python wihtout pkgconfig', 'c')
+project('python without pkgconfig', 'c')
# This unit test is ran with PKG_CONFIG=notfound
import('python').find_installation().dependency()
--- /dev/null
+package com.mesonbuild;
+
+class Simple {
+ public static void main(String [] args) {
+ System.out.println("Java is working.\n");
+ }
+}
--- /dev/null
+project('simplejava', 'java')
+
+one = jar('one', 'com/mesonbuild/Simple.java',
+ main_class : 'com.mesonbuild.Simple',
+ install : true,
+ install_dir : get_option('bindir'),
+)
+
+two = jar('two', 'com/mesonbuild/Simple.java',
+ main_class : 'com.mesonbuild.Simple',
+ install : true,
+ install_dir : get_option('bindir'),
+ link_with : one,
+)
--- /dev/null
+#! /usr/bin/env python3
+
+import sys
+from shutil import copyfile
+copyfile(*sys.argv[1:])
# declaration. run_tests.py generates the
# prebuilt object before running this test.
-e = executable('prog', 'main.c',
-objects : prebuilt)
+e = []
-test('objtest', e)
+e += executable('exe1', sources: 'main.c', objects: prebuilt)
+e += executable('exe2', sources: 'main.c', objects: files(prebuilt))
+
+sl1 = static_library('lib3', objects: prebuilt)
+e += executable('exe3', sources: 'main.c', objects: sl1.extract_all_objects(recursive: true))
+
+ct = custom_target(output: 'copy-' + prebuilt, input: prebuilt,
+ command: [find_program('cp.py'), '@INPUT@', '@OUTPUT@'])
+e += executable('exe4', 'main.c', ct)
+e += executable('exe5', 'main.c', ct[0])
+
+sl2 = static_library('lib6', sources: ct)
+e += executable('exe6', sources: 'main.c', objects: sl2.extract_all_objects(recursive: true))
+
+foreach i : e
+ test(i.name(), i)
+endforeach
self.setconf('--warnlevel=3')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.options[OptionKey('warning_level')].value, '3')
+ self.setconf('--warnlevel=everything')
+ obj = mesonbuild.coredata.load(self.builddir)
+ self.assertEqual(obj.options[OptionKey('warning_level')].value, 'everything')
self.wipe()
# But when using -D syntax, it should be 'warning_level'
self.setconf('-Dwarning_level=3')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.options[OptionKey('warning_level')].value, '3')
+ self.setconf('-Dwarning_level=everything')
+ obj = mesonbuild.coredata.load(self.builddir)
+ self.assertEqual(obj.options[OptionKey('warning_level')].value, 'everything')
self.wipe()
# Mixing --option and -Doption is forbidden
from pathlib import PurePath
from unittest import mock, TestCase, SkipTest
import json
+import io
import os
import re
import subprocess
newdir = os.path.realpath(newdir)
self.change_builddir(newdir)
- def _get_meson_log(self) -> T.Optional[str]:
+ def _open_meson_log(self) -> io.TextIOWrapper:
log = os.path.join(self.logdir, 'meson-log.txt')
- if not os.path.isfile(log):
- print(f"{log!r} doesn't exist", file=sys.stderr)
+ return open(log, encoding='utf-8')
+
+ def _get_meson_log(self) -> T.Optional[str]:
+ try:
+ with self._open_meson_log() as f:
+ return f.read()
+ except FileNotFoundError as e:
+ print(f"{e.filename!r} doesn't exist", file=sys.stderr)
return None
- with open(log, encoding='utf-8') as f:
- return f.read()
def _print_meson_log(self) -> None:
log = self._get_meson_log()
each['command'] = compiler + ' ' + f.read()
return contents
+ def get_meson_log_raw(self):
+ with self._open_meson_log() as f:
+ return f.read()
+
def get_meson_log(self):
- with open(os.path.join(self.builddir, 'meson-logs', 'meson-log.txt'), encoding='utf-8') as f:
+ with self._open_meson_log() as f:
return f.readlines()
def get_meson_log_compiler_checks(self):
Fetch a list command-lines run by meson for compiler checks.
Each command-line is returned as a list of arguments.
'''
- log = self.get_meson_log()
prefix = 'Command line:'
- cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)]
- return cmds
+ with self._open_meson_log() as log:
+ cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)]
+ return cmds
def get_meson_log_sanitychecks(self):
'''
Same as above, but for the sanity checks that were run
'''
- log = self.get_meson_log()
prefix = 'Sanity check compiler command line:'
- cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)]
- return cmds
+ with self._open_meson_log() as log:
+ cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)]
+ return cmds
def introspect(self, args):
if isinstance(args, str):
import functools
import re
import typing as T
+import zipfile
from pathlib import Path
from contextlib import contextmanager
return None
return final
+def get_classpath(fname: str) -> T.Optional[str]:
+ with zipfile.ZipFile(fname) as zip:
+ with zip.open('META-INF/MANIFEST.MF') as member:
+ contents = member.read().decode().strip()
+ lines = []
+ for line in contents.splitlines():
+ if line.startswith(' '):
+ # continuation line
+ lines[-1] += line[1:]
+ else:
+ lines.append(line)
+ manifest = {
+ k.lower(): v.strip() for k, v in [l.split(':', 1) for l in lines]
+ }
+ return manifest.get('class-path')
+
def get_path_without_cmd(cmd: str, path: str) -> str:
pathsep = os.pathsep
paths = OrderedSet([Path(p).resolve() for p in path.split(pathsep)])
self.assertEqual(libhello_nolib.get_compile_args(), [])
self.assertEqual(libhello_nolib.get_pkgconfig_variable('foo', [], None), 'bar')
self.assertEqual(libhello_nolib.get_pkgconfig_variable('prefix', [], None), self.prefix)
- if version_compare(libhello_nolib.check_pkgconfig(libhello_nolib.pkgbin),">=0.29.1"):
+ if version_compare(PkgConfigDependency.check_pkgconfig(env, libhello_nolib.pkgbin),">=0.29.1"):
self.assertEqual(libhello_nolib.get_pkgconfig_variable('escaped_var', [], None), r'hello\ world')
self.assertEqual(libhello_nolib.get_pkgconfig_variable('unescaped_var', [], None), 'hello world')
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Dmethod=pkg-config'])
# Confirm that the dependency was found with pkg-config
- mesonlog = self.get_meson_log()
+ mesonlog = self.get_meson_log_raw()
if qt4 == 0:
- self.assertRegex('\n'.join(mesonlog),
+ self.assertRegex(mesonlog,
r'Run-time dependency qt4 \(modules: Core\) found: YES 4.* \(pkg-config\)')
if qt5 == 0:
- self.assertRegex('\n'.join(mesonlog),
+ self.assertRegex(mesonlog,
r'Run-time dependency qt5 \(modules: Core\) found: YES 5.* \(pkg-config\)')
@skip_if_not_base_option('b_sanitize')
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Dmethod=qmake'])
# Confirm that the dependency was found with qmake
- mesonlog = self.get_meson_log()
- self.assertRegex('\n'.join(mesonlog),
+ mesonlog = self.get_meson_log_raw()
+ self.assertRegex(mesonlog,
r'Run-time dependency qt5 \(modules: Core\) found: YES .* \(qmake\)\n')
def test_qt6dependency_qmake_detection(self):
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Dmethod=qmake'])
# Confirm that the dependency was found with qmake
- mesonlog = self.get_meson_log()
- self.assertRegex('\n'.join(mesonlog),
+ mesonlog = self.get_meson_log_raw()
+ self.assertRegex(mesonlog,
r'Run-time dependency qt6 \(modules: Core\) found: YES .* \(qmake\)\n')
def glob_sofiles_without_privdir(self, g):
from run_tests import (
+ Backend,
get_fake_env
)
self._single_implementation_compiler(
'java', 'javac', 'javac 9.99.77', '9.99.77')
+ @skip_if_not_language('java')
+ def test_java_classpath(self):
+ if self.backend is not Backend.ninja:
+ raise SkipTest('Jar is only supported with Ninja')
+ testdir = os.path.join(self.unit_test_dir, '110 classpath')
+ self.init(testdir)
+ self.build()
+ one_build_path = get_classpath(os.path.join(self.builddir, 'one.jar'))
+ self.assertIsNone(one_build_path)
+ two_build_path = get_classpath(os.path.join(self.builddir, 'two.jar'))
+ self.assertEqual(two_build_path, 'one.jar')
+ self.install()
+ one_install_path = get_classpath(os.path.join(self.installdir, 'usr/bin/one.jar'))
+ self.assertIsNone(one_install_path)
+ two_install_path = get_classpath(os.path.join(self.installdir, 'usr/bin/two.jar'))
+ self.assertIsNone(two_install_path)
+
@skip_if_not_language('swift')
def test_swift_compiler(self):
wrapper = self.helper_create_binary_wrapper(
self.assertNotIn(log_msg, output)
# Check if message is written to the meson log
- mesonlog = os.path.join(self.builddir, 'meson-logs/meson-log.txt')
- with open(mesonlog, mode='r', encoding='utf-8') as file:
- self.assertIn(log_msg, file.read())
+ mesonlog = self.get_meson_log_raw()
+ self.assertIn(log_msg, mesonlog)
def test_new_subproject_reconfigure(self):
testdir = os.path.join(self.unit_test_dir, '107 new subproject on reconfigure')
self._git_remote(['commit', '--no-gpg-sign', '--allow-empty', '-m', f'tag {tag} commit'], name)
self._git_remote(['tag', '--no-sign', tag], name)
- def _wrap_create_git(self, name, revision='master'):
+ def _wrap_create_git(self, name, revision='master', depth=None):
path = self.root_dir / name
with open(str((self.subprojects_dir / name).with_suffix('.wrap')), 'w', encoding='utf-8') as f:
+ if depth is None:
+ depth_line = ''
+ else:
+ depth_line = 'depth = {}'.format(depth)
f.write(textwrap.dedent(
'''
[wrap-git]
url={}
revision={}
- '''.format(os.path.abspath(str(path)), revision)))
+ {}
+ '''.format(os.path.abspath(str(path)), revision, depth_line)))
def _wrap_create_file(self, name, tarball='dummy.tar.gz'):
path = self.root_dir / tarball
self._subprojects_cmd(['update', '--reset'])
self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name))
+ # Create a fake remote git repository and a wrap file targeting
+ # HEAD and depth = 1. Checks that "meson subprojects download" works.
+ subp_name = 'sub3'
+ self._git_create_remote_repo(subp_name)
+ self._wrap_create_git(subp_name, revision='head', depth='1')
+ self._subprojects_cmd(['download'])
+ self.assertPathExists(str(self.subprojects_dir / subp_name))
+ self._git_config(self.subprojects_dir / subp_name)
+
@skipIfNoExecutable('true')
def test_foreach(self):
self._create_project(self.subprojects_dir / 'sub_file')
def assert_error(self, events):
self.assertEqual(type(next(events)), TAPParser.Error)
+ def assert_unexpected(self, events, **kwargs):
+ self.assertEqual(next(events), TAPParser.UnknownLine(**kwargs))
+
def assert_bailout(self, events, **kwargs):
self.assertEqual(next(events), TAPParser.Bailout(**kwargs))
def test_unexpected(self):
events = self.parse_tap('1..1\ninvalid\nok 1')
self.assert_plan(events, num_tests=1, late=False)
+ self.assert_unexpected(events, message='invalid', lineno=2)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)