From e60b373cc98c74468683f48ed814338a5b652086 Mon Sep 17 00:00:00 2001 From: DongHun Kwak Date: Thu, 10 Jan 2019 10:54:01 +0900 Subject: [PATCH] Imported Upstream version 0.49.0 Change-Id: Ic6ce24b5d5911cc280317daa491d36736e841eff Signed-off-by: DongHun Kwak --- PKG-INFO | 2 +- README.md | 11 +- cross/ccrx.txt | 20 + data/syntax-highlighting/vim/README | 3 +- data/syntax-highlighting/vim/ftplugin/meson.vim | 15 + data/syntax-highlighting/vim/indent/meson.vim | 4 - man/meson.1 | 2 +- manual tests/4 standalone binaries/myapp.iss | 36 +- meson.egg-info/PKG-INFO | 2 +- meson.egg-info/SOURCES.txt | 63 ++ mesonbuild/astinterpreter.py | 94 ++- mesonbuild/backend/backends.py | 81 +-- mesonbuild/backend/ninjabackend.py | 59 +- mesonbuild/backend/vs2010backend.py | 74 ++- mesonbuild/backend/xcodebackend.py | 150 +++-- mesonbuild/build.py | 198 ++++-- mesonbuild/compilers/__init__.py | 10 + mesonbuild/compilers/c.py | 296 ++++++--- mesonbuild/compilers/c_function_attributes.py | 8 +- mesonbuild/compilers/compilers.py | 411 ++++++++++-- mesonbuild/compilers/cpp.py | 116 +++- mesonbuild/compilers/d.py | 62 +- mesonbuild/compilers/fortran.py | 26 +- mesonbuild/coredata.py | 144 ++++- mesonbuild/dependencies/__init__.py | 5 +- mesonbuild/dependencies/base.py | 684 +++++++++++++++++++- mesonbuild/dependencies/boost.py | 6 +- mesonbuild/dependencies/data/CMakeLists.txt | 93 +++ mesonbuild/dependencies/misc.py | 52 +- mesonbuild/dependencies/platform.py | 2 +- mesonbuild/dependencies/ui.py | 72 ++- mesonbuild/environment.py | 522 ++++++++++++---- mesonbuild/interpreter.py | 459 +++++++------- mesonbuild/interpreterbase.py | 85 ++- mesonbuild/linkers.py | 51 ++ mesonbuild/mconf.py | 33 +- mesonbuild/mesonlib.py | 142 ++++- mesonbuild/mesonmain.py | 466 ++++---------- mesonbuild/minit.py | 8 +- mesonbuild/minstall.py | 30 +- mesonbuild/mintro.py | 122 +++- mesonbuild/mlog.py | 5 + mesonbuild/modules/gnome.py | 78 ++- mesonbuild/modules/hotdoc.py | 2 +- mesonbuild/modules/i18n.py | 4 +- mesonbuild/modules/pkgconfig.py | 63 +- mesonbuild/modules/python.py | 12 +- mesonbuild/modules/python3.py | 8 +- mesonbuild/modules/qt.py | 26 +- mesonbuild/modules/qt4.py | 7 +- mesonbuild/modules/qt5.py | 7 +- mesonbuild/modules/windows.py | 77 ++- mesonbuild/mparser.py | 21 +- mesonbuild/msetup.py | 218 +++++++ mesonbuild/msubprojects.py | 226 +++++++ mesonbuild/mtest.py | 93 ++- mesonbuild/rewriter.py | 11 +- mesonbuild/scripts/coverage.py | 6 +- mesonbuild/scripts/depfixer.py | 3 + mesonbuild/scripts/dist.py | 54 +- mesonbuild/scripts/gtkdochelper.py | 19 +- mesonbuild/scripts/meson_exe.py | 2 + mesonbuild/scripts/scanbuild.py | 21 +- mesonbuild/wrap/__init__.py | 14 + mesonbuild/wrap/wrap.py | 408 ++++++------ mesonbuild/wrap/wraptool.py | 25 +- run_cross_test.py | 26 +- run_meson_command_tests.py | 23 +- run_project_tests.py | 123 ++-- run_tests.py | 125 ++-- run_unittests.py | 689 +++++++++++++++++++-- setup.cfg | 1 + setup.py | 5 +- .../common/10 man install/installed_files.txt | 10 +- test cases/common/100 manygen/subdir/manygen.py | 32 +- test cases/common/100 manygen/subdir/meson.build | 5 +- test cases/common/112 spaces backslash/meson.build | 2 +- test cases/common/116 pathjoin/meson.build | 27 +- .../common/122 shared module/installed_files.txt | 5 +- test cases/common/122 shared module/meson.build | 8 +- .../common/123 llvm ir and assembly/meson.build | 11 +- test cases/common/124 cpp and asm/meson.build | 2 +- test cases/common/127 no buildincdir/meson.build | 2 +- test cases/common/13 pch/meson.build | 7 +- test cases/common/13 pch/mixed/meson.build | 3 +- .../common/132 generated assembly/meson.build | 4 +- test cases/common/137 get define/meson.build | 3 + test cases/common/138 c cpp and asm/meson.build | 2 +- .../14 configure file/differentafterbasename1.in | 0 .../14 configure file/differentafterbasename2.in | 0 test cases/common/14 configure file/meson.build | 87 ++- test cases/common/14 configure file/prog9.c | 18 + .../common/14 configure file/sameafterbasename.in | 0 .../common/14 configure file/sameafterbasename.in2 | 0 test cases/common/143 C and CPP link/meson.build | 11 +- test cases/common/152 simd/simd_mmx.c | 4 + test cases/common/152 simd/simd_sse2.c | 2 +- test cases/common/152 simd/simd_sse3.c | 2 +- test cases/common/152 simd/simd_sse41.c | 2 +- test cases/common/152 simd/simd_sse42.c | 2 +- test cases/common/152 simd/simd_ssse3.c | 2 +- test cases/common/152 simd/simdchecker.c | 158 +++-- test cases/common/152 simd/simdfuncs.h | 8 + .../158 wrap file should not failed/meson.build | 5 +- .../subprojects/foo.wrap | 11 + .../subprojects/packagecache/foo-1.0-patch.tar.xz | Bin 0 -> 232 bytes .../subprojects/packagecache/foo-1.0.tar.xz | Bin 0 -> 180 bytes test cases/common/164 disabler/meson.build | 8 + test cases/common/17 comparison/meson.build | 15 + test cases/common/186 has link arg/meson.build | 2 +- test cases/common/190 openmp/meson.build | 3 + .../common/196 install_mode/installed_files.txt | 2 +- .../common/204 function attributes/meson.build | 10 +- .../common/205 broken subproject/meson.build | 2 + .../subprojects/broken/broken.c | 1 + .../subprojects/broken/meson.build | 4 + test cases/common/206 argument syntax/meson.build | 25 + .../installed_files.txt | 15 + .../207 install name_prefix name_suffix/libfile.c | 14 + .../meson.build | 10 + test cases/common/208 kwarg entry/inc/prog.h | 3 + .../common/208 kwarg entry/installed_files.txt | 2 + test cases/common/208 kwarg entry/meson.build | 7 + test cases/common/208 kwarg entry/prog.c | 7 + .../common/25 library versions/installed_files.txt | 1 + test cases/common/25 library versions/lib.c | 13 +- test cases/common/44 options/meson.build | 2 + .../common/48 pkgconfig-gen/dependencies/custom.c | 3 + .../48 pkgconfig-gen/dependencies/meson.build | 3 +- .../49 custom install dirs/installed_files.txt | 4 +- test cases/common/64 foreach/meson.build | 13 + test cases/common/91 default options/meson.build | 5 +- test cases/csharp/1 basic/installed_files.txt | 1 + test cases/csharp/2 library/installed_files.txt | 5 +- test cases/d/9 features/app.d | 24 + test cases/d/9 features/meson.build | 62 +- test cases/failing/88 dub compiler/meson.build | 6 + .../failing/90 subproj not-found dep/meson.build | 2 + .../subprojects/somesubproj/meson.build | 3 + test cases/failing/91 kwarg dupe/meson.build | 6 + test cases/failing/91 kwarg dupe/prog.c | 6 + test cases/fortran/9 cpp/meson.build | 13 +- test cases/frameworks/1 boost/meson.build | 6 + test cases/frameworks/10 gtk-doc/doc/meson.build | 15 + .../frameworks/10 gtk-doc/installed_files.txt | 28 + test cases/frameworks/15 llvm/meson.build | 25 +- test cases/frameworks/17 mpi/is_broken_ubuntu.py | 2 +- test cases/frameworks/17 mpi/meson.build | 8 +- test cases/frameworks/17 mpi/meson.build.orig | 49 ++ .../frameworks/23 hotdoc/installed_files.txt | 3 +- test cases/frameworks/23 hotdoc/meson.build | 6 + .../frameworks/24 libgcrypt/libgcrypt_prog.c | 8 + test cases/frameworks/24 libgcrypt/meson.build | 23 + test cases/frameworks/4 qt/meson.build | 4 + .../frameworks/6 gettext/data/data3/meson.build | 9 + .../6 gettext/data/data3/test.desktop.in | 6 + test cases/frameworks/6 gettext/data/meson.build | 2 + .../6 gettext/generated/desktopgenerator.py | 8 +- .../frameworks/6 gettext/generated/meson.build | 2 + .../frameworks/6 gettext/installed_files.txt | 2 + test cases/frameworks/7 gnome/resources/res3.txt | 1 + .../com/mesonbuild/Config.java.in | 5 + .../com/mesonbuild/Simple.java | 12 + .../com/mesonbuild/TextPrinter.java | 14 + .../com/mesonbuild/meson.build | 8 + .../java/8 codegen custom target/meson.build | 15 + .../linuxlike/13 cmake dependency/incdir/myinc.h | 3 + .../linuxlike/13 cmake dependency/meson.build | 56 ++ .../linuxlike/13 cmake dependency/prog-checkver.c | 15 + test cases/linuxlike/13 cmake dependency/prog.c | 8 + test cases/nasm/1 configure file/meson.build | 6 + test cases/unit/35 dist script/meson.build | 2 +- test cases/unit/35 dist script/replacer.py | 6 +- test cases/unit/45 vscpp17/main.cpp | 7 + test cases/unit/45 vscpp17/meson.build | 4 + .../46 native dep pkgconfig var/cross_pkgconfig.py | 12 + .../cross_pkgconfig/dep_tester.pc | 5 + .../unit/46 native dep pkgconfig var/meson.build | 15 + .../46 native dep pkgconfig var/meson_options.txt | 6 + .../native_pkgconfig/dep_tester.pc | 5 + test cases/unit/46 native file binary/meson.build | 21 + .../unit/46 native file binary/meson_options.txt | 5 + test cases/unit/46 reconfigure/main.c | 4 + test cases/unit/46 reconfigure/meson.build | 9 + test cases/unit/46 reconfigure/meson_options.txt | 4 + test cases/unit/47 testsetup default/envcheck.py | 11 + test cases/unit/47 testsetup default/meson.build | 23 + .../unit/48 pkgconfig csharp library/meson.build | 10 + .../unit/48 pkgconfig csharp library/somelib.cs | 12 + test cases/unit/49 ldflagdedup/bob.c | 5 + test cases/unit/49 ldflagdedup/meson.build | 12 + test cases/unit/49 ldflagdedup/prog.c | 7 + test cases/windows/16 gui app/meson.build | 10 +- .../windows/7 dll versioning/installed_files.txt | 10 +- test cases/windows/7 dll versioning/meson.build | 4 +- 195 files changed, 6436 insertions(+), 1980 deletions(-) create mode 100644 cross/ccrx.txt create mode 100644 data/syntax-highlighting/vim/ftplugin/meson.vim create mode 100644 mesonbuild/dependencies/data/CMakeLists.txt create mode 100644 mesonbuild/msetup.py create mode 100644 mesonbuild/msubprojects.py create mode 100644 test cases/common/14 configure file/differentafterbasename1.in create mode 100644 test cases/common/14 configure file/differentafterbasename2.in create mode 100644 test cases/common/14 configure file/prog9.c create mode 100644 test cases/common/14 configure file/sameafterbasename.in create mode 100644 test cases/common/14 configure file/sameafterbasename.in2 create mode 100644 test cases/common/158 wrap file should not failed/subprojects/foo.wrap create mode 100644 test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz create mode 100644 test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz create mode 100644 test cases/common/205 broken subproject/meson.build create mode 100644 test cases/common/205 broken subproject/subprojects/broken/broken.c create mode 100644 test cases/common/205 broken subproject/subprojects/broken/meson.build create mode 100644 test cases/common/206 argument syntax/meson.build create mode 100644 test cases/common/207 install name_prefix name_suffix/installed_files.txt create mode 100644 test cases/common/207 install name_prefix name_suffix/libfile.c create mode 100644 test cases/common/207 install name_prefix name_suffix/meson.build create mode 100644 test cases/common/208 kwarg entry/inc/prog.h create mode 100644 test cases/common/208 kwarg entry/installed_files.txt create mode 100644 test cases/common/208 kwarg entry/meson.build create mode 100644 test cases/common/208 kwarg entry/prog.c create mode 100644 test cases/common/48 pkgconfig-gen/dependencies/custom.c create mode 100644 test cases/failing/90 subproj not-found dep/meson.build create mode 100644 test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build create mode 100644 test cases/failing/91 kwarg dupe/meson.build create mode 100644 test cases/failing/91 kwarg dupe/prog.c create mode 100644 test cases/frameworks/17 mpi/meson.build.orig create mode 100644 test cases/frameworks/24 libgcrypt/libgcrypt_prog.c create mode 100644 test cases/frameworks/24 libgcrypt/meson.build create mode 100644 test cases/frameworks/6 gettext/data/data3/meson.build create mode 100644 test cases/frameworks/6 gettext/data/data3/test.desktop.in create mode 100644 test cases/frameworks/7 gnome/resources/res3.txt create mode 100644 test cases/java/8 codegen custom target/com/mesonbuild/Config.java.in create mode 100644 test cases/java/8 codegen custom target/com/mesonbuild/Simple.java create mode 100644 test cases/java/8 codegen custom target/com/mesonbuild/TextPrinter.java create mode 100644 test cases/java/8 codegen custom target/com/mesonbuild/meson.build create mode 100644 test cases/java/8 codegen custom target/meson.build create mode 100644 test cases/linuxlike/13 cmake dependency/incdir/myinc.h create mode 100644 test cases/linuxlike/13 cmake dependency/meson.build create mode 100644 test cases/linuxlike/13 cmake dependency/prog-checkver.c create mode 100644 test cases/linuxlike/13 cmake dependency/prog.c create mode 100644 test cases/unit/45 vscpp17/main.cpp create mode 100644 test cases/unit/45 vscpp17/meson.build create mode 100755 test cases/unit/46 native dep pkgconfig var/cross_pkgconfig.py create mode 100644 test cases/unit/46 native dep pkgconfig var/cross_pkgconfig/dep_tester.pc create mode 100644 test cases/unit/46 native dep pkgconfig var/meson.build create mode 100644 test cases/unit/46 native dep pkgconfig var/meson_options.txt create mode 100644 test cases/unit/46 native dep pkgconfig var/native_pkgconfig/dep_tester.pc create mode 100644 test cases/unit/46 native file binary/meson.build create mode 100644 test cases/unit/46 native file binary/meson_options.txt create mode 100644 test cases/unit/46 reconfigure/main.c create mode 100644 test cases/unit/46 reconfigure/meson.build create mode 100644 test cases/unit/46 reconfigure/meson_options.txt create mode 100644 test cases/unit/47 testsetup default/envcheck.py create mode 100644 test cases/unit/47 testsetup default/meson.build create mode 100644 test cases/unit/48 pkgconfig csharp library/meson.build create mode 100644 test cases/unit/48 pkgconfig csharp library/somelib.cs create mode 100644 test cases/unit/49 ldflagdedup/bob.c create mode 100644 test cases/unit/49 ldflagdedup/meson.build create mode 100644 test cases/unit/49 ldflagdedup/prog.c diff --git a/PKG-INFO b/PKG-INFO index 7098b5a..7a807ad 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.2 Name: meson -Version: 0.48.2 +Version: 0.49.0 Summary: A high performance build system Home-page: http://mesonbuild.com Author: Jussi Pakkanen diff --git a/README.md b/README.md index 3a43b41..8f03b42 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,10 @@ build system. [![PyPI](https://img.shields.io/pypi/v/meson.svg)](https://pypi.python.org/pypi/meson) [![Travis](https://travis-ci.org/mesonbuild/meson.svg?branch=master)](https://travis-ci.org/mesonbuild/meson) [![Appveyor](https://ci.appveyor.com/api/projects/status/7jfaotriu8d8ncov?svg=true)](https://ci.appveyor.com/project/mesonbuild/meson) +[![Build Status](https://dev.azure.com/jussi0947/jussi/_apis/build/status/mesonbuild.meson)](https://dev.azure.com/jussi0947/jussi/_build/latest?definitionId=1) [![Codecov](https://codecov.io/gh/mesonbuild/meson/coverage.svg?branch=master)](https://codecov.io/gh/mesonbuild/meson/branch/master) +[![Code Quality: Python](https://img.shields.io/lgtm/grade/python/g/mesonbuild/meson.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mesonbuild/meson/context:python) +[![Total Alerts](https://img.shields.io/lgtm/alerts/g/mesonbuild/meson.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mesonbuild/meson/alerts) #### Dependencies @@ -65,7 +68,7 @@ you may need to run this command with sudo. #### Contributing -We love code contributions. See the contributing.txt file for +We love code contributions. See the [contributing.md](contributing.md) file for details. @@ -73,10 +76,14 @@ details. The irc channel for Meson is `#mesonbuild` over at Freenode. +You can use [FreeNode's official webchat][meson_irc] +to connect to this channel. + +[meson_irc]: https://webchat.freenode.net/?channels=%23mesonbuild #### Further info More information about the Meson build system can be found at the [project's home page](http://mesonbuild.com). -Meson is a registered trademark of Jussi Pakkanen +Meson is a registered trademark of Jussi Pakkanen. diff --git a/cross/ccrx.txt b/cross/ccrx.txt new file mode 100644 index 0000000..5474bb0 --- /dev/null +++ b/cross/ccrx.txt @@ -0,0 +1,20 @@ +# This file assumes that path to the Renesas CC-RX toolchain is added +# to the environment(PATH) variable, so that Meson can find +# ccrx and rlink while building. +[binaries] +c = 'ccrx' +cpp = 'ccrx' +ar = 'rlink' +strip = 'rlink' + +[properties] +# The '--cpu' option with the appropriate target type should be mentioned +# to cross compile c/c++ code with ccrx,. +c_args = ['--cpu=rx600'] +cpp_args = ['--cpu=rx600'] + +[host_machine] +system = 'bare metal' # Update with your system name - bare metal/OS. +cpu_family = 'rx' +cpu = 'rx600' +endian = 'little' diff --git a/data/syntax-highlighting/vim/README b/data/syntax-highlighting/vim/README index 1afa243..95188fc 100644 --- a/data/syntax-highlighting/vim/README +++ b/data/syntax-highlighting/vim/README @@ -1,3 +1,4 @@ ftdetect sets the filetype +ftplugin sets Meson indentation rules +indent does Meson indentation syntax does Meson syntax highlighting -plugin does Meson indentation diff --git a/data/syntax-highlighting/vim/ftplugin/meson.vim b/data/syntax-highlighting/vim/ftplugin/meson.vim new file mode 100644 index 0000000..e21b61f --- /dev/null +++ b/data/syntax-highlighting/vim/ftplugin/meson.vim @@ -0,0 +1,15 @@ +" Vim filetype plugin file +" Language: meson +" Original Author: Laurent Pinchart +" Last Change: 2018 Nov 27 + +if exists("b:did_ftplugin") | finish | endif +let b:did_ftplugin = 1 +let s:keepcpo= &cpo +set cpo&vim + +setlocal shiftwidth=2 +setlocal softtabstop=2 + +let &cpo = s:keepcpo +unlet s:keepcpo diff --git a/data/syntax-highlighting/vim/indent/meson.vim b/data/syntax-highlighting/vim/indent/meson.vim index 8553ec0..a6444dc 100644 --- a/data/syntax-highlighting/vim/indent/meson.vim +++ b/data/syntax-highlighting/vim/indent/meson.vim @@ -29,10 +29,6 @@ setlocal cpo&vim let s:maxoff = 50 " maximum number of lines to look backwards for () -" Force sw=2 sts=2 because that's required by convention -setlocal shiftwidth=2 -setlocal softtabstop=2 - function GetMesonIndent(lnum) echom getline(line(".")) diff --git a/man/meson.1 b/man/meson.1 index 04a1409..702ac4d 100644 --- a/man/meson.1 +++ b/man/meson.1 @@ -1,4 +1,4 @@ -.TH MESON "1" "November 2018" "meson 0.48.2" "User Commands" +.TH MESON "1" "December 2018" "meson 0.49.0" "User Commands" .SH NAME meson - a high productivity build system .SH DESCRIPTION diff --git a/manual tests/4 standalone binaries/myapp.iss b/manual tests/4 standalone binaries/myapp.iss index dda1537..2bd441d 100644 --- a/manual tests/4 standalone binaries/myapp.iss +++ b/manual tests/4 standalone binaries/myapp.iss @@ -1,18 +1,18 @@ -; Innosetup file for My app. - -[Setup] -AppName=My App -AppVersion=1.0 -DefaultDirName={pf}\My App -DefaultGroupName=My App -UninstallDisplayIcon={app}\myapp.exe -Compression=lzma2 -SolidCompression=yes -OutputDir=. - -[Files] -Source: "myapp.exe"; DestDir: "{app}" -Source: "SDL2.dll"; DestDir: "{app}" - -;[Icons] -;Name: "{group}\My App"; Filename: "{app}\myapp.exe" +; Innosetup file for My app. + +[Setup] +AppName=My App +AppVersion=1.0 +DefaultDirName={pf}\My App +DefaultGroupName=My App +UninstallDisplayIcon={app}\myapp.exe +Compression=lzma2 +SolidCompression=yes +OutputDir=. + +[Files] +Source: "myapp.exe"; DestDir: "{app}" +Source: "SDL2.dll"; DestDir: "{app}" + +;[Icons] +;Name: "{group}\My App"; Filename: "{app}\myapp.exe" diff --git a/meson.egg-info/PKG-INFO b/meson.egg-info/PKG-INFO index 7098b5a..7a807ad 100644 --- a/meson.egg-info/PKG-INFO +++ b/meson.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.2 Name: meson -Version: 0.48.2 +Version: 0.49.0 Summary: A high performance build system Home-page: http://mesonbuild.com Author: Jussi Pakkanen diff --git a/meson.egg-info/SOURCES.txt b/meson.egg-info/SOURCES.txt index aead6bc..249ff81 100644 --- a/meson.egg-info/SOURCES.txt +++ b/meson.egg-info/SOURCES.txt @@ -13,6 +13,7 @@ setup.cfg setup.py cross/armcc.txt cross/armclang.txt +cross/ccrx.txt cross/iphone.txt cross/linux-mingw-w64-32bit.txt cross/linux-mingw-w64-64bit.txt @@ -25,6 +26,7 @@ data/shell-completions/zsh/_meson data/syntax-highlighting/emacs/meson.el data/syntax-highlighting/vim/README data/syntax-highlighting/vim/ftdetect/meson.vim +data/syntax-highlighting/vim/ftplugin/meson.vim data/syntax-highlighting/vim/indent/meson.vim data/syntax-highlighting/vim/syntax/meson.vim graphics/meson_logo.svg @@ -98,6 +100,8 @@ mesonbuild/minstall.py mesonbuild/mintro.py mesonbuild/mlog.py mesonbuild/mparser.py +mesonbuild/msetup.py +mesonbuild/msubprojects.py mesonbuild/mtest.py mesonbuild/optinterpreter.py mesonbuild/rewriter.py @@ -129,6 +133,7 @@ mesonbuild/dependencies/dev.py mesonbuild/dependencies/misc.py mesonbuild/dependencies/platform.py mesonbuild/dependencies/ui.py +mesonbuild/dependencies/data/CMakeLists.txt mesonbuild/modules/__init__.py mesonbuild/modules/dlang.py mesonbuild/modules/gnome.py @@ -401,6 +406,8 @@ test cases/common/14 configure file/config5.h.in test cases/common/14 configure file/config6.h.in test cases/common/14 configure file/config7.h.in test cases/common/14 configure file/config8.h.in +test cases/common/14 configure file/differentafterbasename1.in +test cases/common/14 configure file/differentafterbasename2.in test cases/common/14 configure file/dummy.dat test cases/common/14 configure file/dumpprog.c test cases/common/14 configure file/file_contains.py @@ -417,6 +424,9 @@ test cases/common/14 configure file/prog4.c test cases/common/14 configure file/prog5.c test cases/common/14 configure file/prog6.c test cases/common/14 configure file/prog7.c +test cases/common/14 configure file/prog9.c +test cases/common/14 configure file/sameafterbasename.in +test cases/common/14 configure file/sameafterbasename.in2 test cases/common/14 configure file/test.py.in test cases/common/14 configure file/touch.py test cases/common/14 configure file/subdir/meson.build @@ -567,9 +577,12 @@ test cases/common/158 wrap file should not failed/meson.build test cases/common/158 wrap file should not failed/src/meson.build test cases/common/158 wrap file should not failed/src/subprojects/prog.c test cases/common/158 wrap file should not failed/src/subprojects/foo/prog2.c +test cases/common/158 wrap file should not failed/subprojects/foo.wrap test cases/common/158 wrap file should not failed/subprojects/zlib.wrap test cases/common/158 wrap file should not failed/subprojects/foo-1.0/foo.c test cases/common/158 wrap file should not failed/subprojects/foo-1.0/meson.build +test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz +test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz test cases/common/158 wrap file should not failed/subprojects/packagecache/zlib-1.2.8-8-wrap.zip test cases/common/158 wrap file should not failed/subprojects/packagecache/zlib-1.2.8.tar.gz test cases/common/158 wrap file should not failed/subprojects/zlib-1.2.8/foo.c @@ -771,6 +784,17 @@ test cases/common/203 subproject with features/subprojects/sub/lib/meson.build test cases/common/203 subproject with features/subprojects/sub/lib/sub.c test cases/common/203 subproject with features/subprojects/sub/lib/sub.h test cases/common/204 function attributes/meson.build +test cases/common/205 broken subproject/meson.build +test cases/common/205 broken subproject/subprojects/broken/broken.c +test cases/common/205 broken subproject/subprojects/broken/meson.build +test cases/common/206 argument syntax/meson.build +test cases/common/207 install name_prefix name_suffix/installed_files.txt +test cases/common/207 install name_prefix name_suffix/libfile.c +test cases/common/207 install name_prefix name_suffix/meson.build +test cases/common/208 kwarg entry/installed_files.txt +test cases/common/208 kwarg entry/meson.build +test cases/common/208 kwarg entry/prog.c +test cases/common/208 kwarg entry/inc/prog.h test cases/common/21 global arg/meson.build test cases/common/21 global arg/prog.c test cases/common/21 global arg/prog.cc @@ -872,6 +896,7 @@ test cases/common/48 pkgconfig-gen/installed_files.txt test cases/common/48 pkgconfig-gen/meson.build test cases/common/48 pkgconfig-gen/simple.c test cases/common/48 pkgconfig-gen/simple.h +test cases/common/48 pkgconfig-gen/dependencies/custom.c test cases/common/48 pkgconfig-gen/dependencies/exposed.c test cases/common/48 pkgconfig-gen/dependencies/internal.c test cases/common/48 pkgconfig-gen/dependencies/meson.build @@ -1401,6 +1426,10 @@ test cases/failing/89 link_with custom target/lib_generator.py test cases/failing/89 link_with custom target/meson.build test cases/failing/9 missing extra file/meson.build test cases/failing/9 missing extra file/prog.c +test cases/failing/90 subproj not-found dep/meson.build +test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build +test cases/failing/91 kwarg dupe/meson.build +test cases/failing/91 kwarg dupe/prog.c test cases/fortran/1 basic/meson.build test cases/fortran/1 basic/simple.f90 test cases/fortran/10 find library/gzip.f90 @@ -1495,6 +1524,7 @@ test cases/frameworks/17 mpi/main.c test cases/frameworks/17 mpi/main.cpp test cases/frameworks/17 mpi/main.f90 test cases/frameworks/17 mpi/meson.build +test cases/frameworks/17 mpi/meson.build.orig test cases/frameworks/18 vulkan/meson.build test cases/frameworks/18 vulkan/vulkanprog.c test cases/frameworks/19 pcap/meson.build @@ -1520,6 +1550,8 @@ test cases/frameworks/23 hotdoc/meson.build test cases/frameworks/23 hotdoc/doc/index.md test cases/frameworks/23 hotdoc/doc/meson.build test cases/frameworks/23 hotdoc/doc/sitemap.txt +test cases/frameworks/24 libgcrypt/libgcrypt_prog.c +test cases/frameworks/24 libgcrypt/meson.build test cases/frameworks/3 gmock/gmocktest.cc test cases/frameworks/3 gmock/meson.build test cases/frameworks/4 qt/main.cpp @@ -1565,6 +1597,8 @@ test cases/frameworks/6 gettext/meson.build test cases/frameworks/6 gettext/data/meson.build test cases/frameworks/6 gettext/data/test.desktop.in test cases/frameworks/6 gettext/data/test2.desktop.in +test cases/frameworks/6 gettext/data/data3/meson.build +test cases/frameworks/6 gettext/data/data3/test.desktop.in test cases/frameworks/6 gettext/data2/meson.build test cases/frameworks/6 gettext/data2/test.desktop.in test cases/frameworks/6 gettext/generated/desktopgenerator.py @@ -1614,6 +1648,7 @@ test cases/frameworks/7 gnome/resources/generated-main.c test cases/frameworks/7 gnome/resources/generated.gresource.xml test cases/frameworks/7 gnome/resources/meson.build test cases/frameworks/7 gnome/resources/myresource.gresource.xml +test cases/frameworks/7 gnome/resources/res3.txt test cases/frameworks/7 gnome/resources/resources.py test cases/frameworks/7 gnome/resources/simple-main.c test cases/frameworks/7 gnome/resources/simple.gresource.xml @@ -1658,6 +1693,11 @@ test cases/java/7 linking/meson.build test cases/java/7 linking/com/mesonbuild/Linking.java test cases/java/7 linking/sub/meson.build test cases/java/7 linking/sub/com/mesonbuild/SimpleLib.java +test cases/java/8 codegen custom target/meson.build +test cases/java/8 codegen custom target/com/mesonbuild/Config.java.in +test cases/java/8 codegen custom target/com/mesonbuild/Simple.java +test cases/java/8 codegen custom target/com/mesonbuild/TextPrinter.java +test cases/java/8 codegen custom target/com/mesonbuild/meson.build test cases/linuxlike/1 pkg-config/meson.build test cases/linuxlike/1 pkg-config/prog-checkver.c test cases/linuxlike/1 pkg-config/prog.c @@ -1678,6 +1718,10 @@ test cases/linuxlike/12 subprojects in subprojects/subprojects/b/b.h test cases/linuxlike/12 subprojects in subprojects/subprojects/b/meson.build test cases/linuxlike/12 subprojects in subprojects/subprojects/c/c.h test cases/linuxlike/12 subprojects in subprojects/subprojects/c/meson.build +test cases/linuxlike/13 cmake dependency/meson.build +test cases/linuxlike/13 cmake dependency/prog-checkver.c +test cases/linuxlike/13 cmake dependency/prog.c +test cases/linuxlike/13 cmake dependency/incdir/myinc.h test cases/linuxlike/2 external library/meson.build test cases/linuxlike/2 external library/prog.c test cases/linuxlike/3 linker script/bob.c @@ -2006,6 +2050,25 @@ test cases/unit/44 promote wrap/subprojects/s1/meson.build test cases/unit/44 promote wrap/subprojects/s1/subprojects/ambiguous/meson.build test cases/unit/44 promote wrap/subprojects/s2/meson.build test cases/unit/44 promote wrap/subprojects/s2/subprojects/ambiguous.wrap +test cases/unit/45 vscpp17/main.cpp +test cases/unit/45 vscpp17/meson.build +test cases/unit/46 native dep pkgconfig var/cross_pkgconfig.py +test cases/unit/46 native dep pkgconfig var/meson.build +test cases/unit/46 native dep pkgconfig var/meson_options.txt +test cases/unit/46 native dep pkgconfig var/cross_pkgconfig/dep_tester.pc +test cases/unit/46 native dep pkgconfig var/native_pkgconfig/dep_tester.pc +test cases/unit/46 native file binary/meson.build +test cases/unit/46 native file binary/meson_options.txt +test cases/unit/46 reconfigure/main.c +test cases/unit/46 reconfigure/meson.build +test cases/unit/46 reconfigure/meson_options.txt +test cases/unit/47 testsetup default/envcheck.py +test cases/unit/47 testsetup default/meson.build +test cases/unit/48 pkgconfig csharp library/meson.build +test cases/unit/48 pkgconfig csharp library/somelib.cs +test cases/unit/49 ldflagdedup/bob.c +test cases/unit/49 ldflagdedup/meson.build +test cases/unit/49 ldflagdedup/prog.c test cases/unit/5 compiler detection/compiler wrapper.py test cases/unit/5 compiler detection/meson.build test cases/unit/5 compiler detection/trivial.c diff --git a/mesonbuild/astinterpreter.py b/mesonbuild/astinterpreter.py index 32d0845..a447a55 100644 --- a/mesonbuild/astinterpreter.py +++ b/mesonbuild/astinterpreter.py @@ -18,7 +18,7 @@ from . import interpreterbase, mlog, mparser, mesonlib from . import environment -from .interpreterbase import InterpreterException, InvalidArguments +from .interpreterbase import InterpreterException, InvalidArguments, BreakRequest, ContinueRequest import os, sys @@ -46,7 +46,6 @@ REMOVE_SOURCE = 1 class AstInterpreter(interpreterbase.InterpreterBase): def __init__(self, source_root, subdir): super().__init__(source_root, subdir) - self.asts = {} self.funcs.update({'project': self.func_do_nothing, 'test': self.func_do_nothing, 'benchmark': self.func_do_nothing, @@ -76,7 +75,72 @@ class AstInterpreter(interpreterbase.InterpreterBase): 'vcs_tag': self.func_do_nothing, 'add_languages': self.func_do_nothing, 'declare_dependency': self.func_do_nothing, - 'files': self.func_files, + 'files': self.func_do_nothing, + 'executable': self.func_do_nothing, + 'static_library': self.func_do_nothing, + 'shared_library': self.func_do_nothing, + 'library': self.func_do_nothing, + 'build_target': self.func_do_nothing, + 'custom_target': self.func_do_nothing, + 'run_target': self.func_do_nothing, + 'subdir': self.func_do_nothing, + 'set_variable': self.func_do_nothing, + 'get_variable': self.func_do_nothing, + 'is_variable': self.func_do_nothing, + }) + + def func_do_nothing(self, node, args, kwargs): + return True + + def method_call(self, node): + return True + + def evaluate_arithmeticstatement(self, cur): + return 0 + + def evaluate_plusassign(self, node): + return 0 + + def evaluate_indexing(self, node): + return 0 + + def unknown_function_called(self, func_name): + pass + + def reduce_arguments(self, args): + assert(isinstance(args, mparser.ArgumentNode)) + if args.incorrect_order(): + raise InvalidArguments('All keyword arguments must be after positional arguments.') + return args.arguments, args.kwargs + + def evaluate_comparison(self, node): + return False + + def evaluate_foreach(self, node): + try: + self.evaluate_codeblock(node.block) + except ContinueRequest: + pass + except BreakRequest: + pass + + def evaluate_if(self, node): + for i in node.ifs: + self.evaluate_codeblock(i.block) + if not isinstance(node.elseblock, mparser.EmptyNode): + self.evaluate_codeblock(node.elseblock) + + def get_variable(self, varname): + return 0 + + def assignment(self, node): + pass + +class RewriterInterpreter(AstInterpreter): + def __init__(self, source_root, subdir): + super().__init__(source_root, subdir) + self.asts = {} + self.funcs.update({'files': self.func_files, 'executable': self.func_executable, 'static_library': self.func_static_lib, 'shared_library': self.func_shared_lib, @@ -90,12 +154,6 @@ class AstInterpreter(interpreterbase.InterpreterBase): 'is_variable': self.func_is_variable, }) - def func_do_nothing(self, node, args, kwargs): - return True - - def method_call(self, node): - return True - def func_executable(self, node, args, kwargs): if args[0] == self.targetname: if self.operation == ADD_SOURCE: @@ -147,21 +205,6 @@ class AstInterpreter(interpreterbase.InterpreterBase): return [args] return args - def evaluate_arithmeticstatement(self, cur): - return 0 - - def evaluate_plusassign(self, node): - return 0 - - def evaluate_indexing(self, node): - return 0 - - def reduce_arguments(self, args): - assert(isinstance(args, mparser.ArgumentNode)) - if args.incorrect_order(): - raise InvalidArguments('All keyword arguments must be after positional arguments.') - return args.arguments, args.kwargs - def transform(self): self.load_root_meson_file() self.asts[''] = self.ast @@ -181,9 +224,6 @@ class AstInterpreter(interpreterbase.InterpreterBase): self.filename = filename self.transform() - def unknown_function_called(self, func_name): - mlog.warning('Unknown function called: ' + func_name) - def add_source_to_target(self, node, args, kwargs): namespan = node.args.arguments[0].bytespan buildfilename = os.path.join(self.source_root, self.subdir, environment.build_filename) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 019d5fc..4040251 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -23,15 +23,11 @@ import subprocess from ..mesonlib import MesonException, OrderedSet from ..mesonlib import classify_unity_sources from ..mesonlib import File -from ..compilers import CompilerArgs, get_macos_dylib_install_name +from ..compilers import CompilerArgs, VisualStudioCCompiler from collections import OrderedDict import shlex from functools import lru_cache -@lru_cache(maxsize=None) -def get_target_macos_dylib_install_name(ld): - return get_macos_dylib_install_name(ld.prefix, ld.name, ld.suffix, ld.soversion) - class CleanTrees: ''' @@ -135,8 +131,8 @@ class Backend: self.build = build self.environment = build.environment self.processed_targets = {} - self.build_to_src = os.path.relpath(self.environment.get_source_dir(), - self.environment.get_build_dir()) + self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(), + self.environment.get_build_dir()) def get_target_filename(self, t): if isinstance(t, build.CustomTarget): @@ -215,6 +211,7 @@ class Backend: def get_target_private_dir_abs(self, target): return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target)) + @lru_cache(maxsize=None) def get_target_generated_dir(self, target, gensrc, src): """ Takes a BuildTarget, a generator source (CustomTarget or GeneratedList), @@ -446,7 +443,13 @@ class Backend: sources.append(File(True, dirpart, fnamepart)) # Filter out headers and all non-source files - sources = [s for s in sources if self.environment.is_source(s) and not self.environment.is_header(s)] + filtered_sources = [] + for s in sources: + if self.environment.is_source(s) and not self.environment.is_header(s): + filtered_sources.append(s) + elif self.environment.is_object(s): + result.append(s.relative_name()) + sources = filtered_sources # extobj could contain only objects and no sources if not sources: @@ -488,7 +491,7 @@ class Backend: return args extra_args = [] # Compiler-specific escaping is needed for -D args but not for any others - if compiler.get_id() == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): # MSVC needs escaping when a -D argument ends in \ or \" for arg in args: if arg.startswith('-D') or arg.startswith('/D'): @@ -555,6 +558,8 @@ class Backend: # Set -fPIC for static libraries by default unless explicitly disabled if isinstance(target, build.StaticLibrary) and target.pic: commands += compiler.get_pic_args() + if isinstance(target, build.Executable) and target.pie: + commands += compiler.get_pie_args() # Add compile args needed to find external dependencies. Link args are # added while generating the link command. # NOTE: We must preserve the order in which external deps are @@ -602,6 +607,8 @@ class Backend: continue if compiler.get_language() == 'd': arg = '-Wl,' + arg + else: + arg = compiler.get_linker_lib_prefix() + arg args.append(arg) return args @@ -972,61 +979,11 @@ class Backend: with open(install_data_file, 'wb') as ofile: pickle.dump(d, ofile) - def get_target_install_dirs(self, t): - # Find the installation directory. - if isinstance(t, build.SharedModule): - default_install_dir = self.environment.get_shared_module_dir() - elif isinstance(t, build.SharedLibrary): - default_install_dir = self.environment.get_shared_lib_dir() - elif isinstance(t, build.StaticLibrary): - default_install_dir = self.environment.get_static_lib_dir() - elif isinstance(t, build.Executable): - default_install_dir = self.environment.get_bindir() - elif isinstance(t, build.CustomTarget): - default_install_dir = None - else: - assert(isinstance(t, build.BuildTarget)) - # XXX: Add BuildTarget-specific install dir cases here - default_install_dir = self.environment.get_libdir() - outdirs = t.get_custom_install_dir() - if outdirs[0] is not None and outdirs[0] != default_install_dir and outdirs[0] is not True: - # Either the value is set to a non-default value, or is set to - # False (which means we want this specific output out of many - # outputs to not be installed). - custom_install_dir = True - else: - custom_install_dir = False - outdirs[0] = default_install_dir - return outdirs, custom_install_dir - - def get_target_link_deps_mappings(self, t, prefix): - ''' - On macOS, we need to change the install names of all built libraries - that a target depends on using install_name_tool so that the target - continues to work after installation. For this, we need a dictionary - mapping of the install_name value to the new one, so we can change them - on install. - ''' - result = {} - if isinstance(t, build.StaticLibrary): - return result - for ld in t.get_all_link_deps(): - if ld is t or not isinstance(ld, build.SharedLibrary): - continue - old = get_target_macos_dylib_install_name(ld) - if old in result: - continue - fname = ld.get_filename() - outdirs, _ = self.get_target_install_dirs(ld) - new = os.path.join(prefix, outdirs[0], fname) - result.update({old: new}) - return result - def generate_target_install(self, d): for t in self.build.get_targets().values(): if not t.should_install(): continue - outdirs, custom_install_dir = self.get_target_install_dirs(t) + outdirs, custom_install_dir = t.get_install_dir(self.environment) # Sanity-check the outputs and install_dirs num_outdirs, num_out = len(outdirs), len(t.get_outputs()) if num_outdirs != 1 and num_outdirs != num_out: @@ -1041,7 +998,7 @@ class Backend: # Install primary build output (library/executable/jar, etc) # Done separately because of strip/aliases/rpath if outdirs[0] is not False: - mappings = self.get_target_link_deps_mappings(t, d.prefix) + mappings = t.get_link_deps_mapping(d.prefix, self.environment) i = TargetInstallData(self.get_target_filename(t), outdirs[0], t.get_aliases(), should_strip, mappings, t.install_rpath, install_mode) @@ -1136,7 +1093,7 @@ class Backend: if subdir is None: subdir = os.path.join(manroot, 'man' + num) srcabs = f.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) - dstabs = os.path.join(subdir, os.path.basename(f.fname) + '.gz') + dstabs = os.path.join(subdir, os.path.basename(f.fname)) i = [srcabs, dstabs, m.get_custom_install_mode()] d.man.append(i) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index b564311..f49649b 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -29,7 +29,7 @@ from .. import build from .. import mlog from .. import dependencies from .. import compilers -from ..compilers import CompilerArgs, CCompiler +from ..compilers import CompilerArgs, CCompiler, VisualStudioCCompiler from ..linkers import ArLinker from ..mesonlib import File, MesonException, OrderedSet from ..mesonlib import get_compiler_for_source, has_path_sep @@ -169,7 +169,7 @@ class NinjaBackend(backends.Backend): Detect the search prefix to use.''' for compiler in self.build.compilers.values(): # Have to detect the dependency format - if compiler.id == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): break else: # None of our compilers are MSVC, we're done. @@ -185,7 +185,8 @@ int dummy; # and locale dependent. Any attempt at converting it to # Python strings leads to failure. We _must_ do this detection # in raw byte mode and write the result in raw bytes. - pc = subprocess.Popen(['cl', '/showIncludes', '/c', 'incdetect.c'], + pc = subprocess.Popen([compiler.get_exelist(), + '/showIncludes', '/c', 'incdetect.c'], cwd=self.environment.get_scratch_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdo, _) = pc.communicate() @@ -195,7 +196,7 @@ int dummy; # different locales have different messages with a different # number of colons. Match up to the the drive name 'd:\'. matchre = re.compile(rb"^(.*\s)[a-zA-Z]:\\.*stdio.h$") - for line in stdo.split(b'\r\n'): + for line in re.split(rb'\r?\n', stdo): match = matchre.match(line) if match: with open(tempfilename, 'ab') as binfile: @@ -767,6 +768,15 @@ int dummy; main_class = target.get_main_class() if main_class != '': e = 'e' + + # Add possible java generated files to src list + generated_sources = self.get_target_generated_sources(target) + for rel_src, gensrc in generated_sources.items(): + dirpart, fnamepart = os.path.split(rel_src) + raw_src = File(True, dirpart, fnamepart) + if rel_src.endswith('.java'): + src_list.append(raw_src) + for src in src_list: plain_class_path = self.generate_single_java_compile(src, target, compiler, outfile) class_list.append(plain_class_path) @@ -867,6 +877,10 @@ int dummy; def generate_single_java_compile(self, src, target, compiler, outfile): deps = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets] + generated_sources = self.get_target_generated_sources(target) + for rel_src, gensrc in generated_sources.items(): + if rel_src.endswith('.java'): + deps.append(rel_src) args = [] args += compiler.get_buildtype_args(self.get_option_for_target('buildtype', target)) args += self.build.get_global_args(compiler, target.is_cross) @@ -1527,7 +1541,7 @@ rule FORTRAN_DEP_HACK%s command_template = ' command = {executable} $ARGS {cross_args} {output_args} {compile_only_args} $in\n' command = command_template.format( executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]), - cross_args=' '.join(self.get_cross_info_lang_args(compiler.language, is_cross)), + cross_args=' '.join(compiler.get_cross_extra_flags(self.environment, False)) if is_cross else '', output_args=' '.join(compiler.get_output_args('$out')), compile_only_args=' '.join(compiler.get_compile_only_args()) ) @@ -1538,14 +1552,6 @@ rule FORTRAN_DEP_HACK%s outfile.write('\n') self.created_llvm_ir_rule = True - def get_cross_info_lang_args(self, lang, is_cross): - if is_cross: - try: - return self.environment.cross_info.config['properties'][lang + '_args'] - except KeyError: - pass - return [] - def generate_compile_rule_for(self, langname, compiler, is_cross, outfile): if langname == 'java': if not is_cross: @@ -1579,7 +1585,11 @@ rule FORTRAN_DEP_HACK%s if d != '$out' and d != '$in': d = quote_func(d) quoted_depargs.append(d) - cross_args = self.get_cross_info_lang_args(langname, is_cross) + + if is_cross: + cross_args = compiler.get_cross_extra_flags(self.environment, False) + else: + cross_args = '' if compiler.can_linker_accept_rsp(): command_template = ''' command = {executable} @$out.rsp rspfile = $out.rsp @@ -1595,7 +1605,7 @@ rule FORTRAN_DEP_HACK%s compile_only_args=' '.join(compiler.get_compile_only_args()) ) description = ' description = Compiling %s object $out.\n' % compiler.get_display_language() - if compiler.get_id() == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): deps = ' deps = msvc\n' else: deps = ' deps = gcc\n' @@ -1618,7 +1628,7 @@ rule FORTRAN_DEP_HACK%s cross_args = [] if is_cross: try: - cross_args = self.environment.cross_info.config['properties'][langname + '_args'] + cross_args = compiler.get_cross_extra_flags(self.environment, False) except KeyError: pass @@ -1627,7 +1637,7 @@ rule FORTRAN_DEP_HACK%s if d != '$out' and d != '$in': d = quote_func(d) quoted_depargs.append(d) - if compiler.get_id() == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): output = '' else: output = ' '.join(compiler.get_output_args('$out')) @@ -1639,7 +1649,7 @@ rule FORTRAN_DEP_HACK%s compile_only_args=' '.join(compiler.get_compile_only_args()) ) description = ' description = Precompiling header %s.\n' % '$in' - if compiler.get_id() == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): deps = ' deps = msvc\n' else: deps = ' deps = gcc\n' @@ -1830,7 +1840,7 @@ rule FORTRAN_DEP_HACK%s return compiler.get_no_stdinc_args() def get_compile_debugfile_args(self, compiler, target, objfile): - if compiler.id != 'msvc': + if not isinstance(compiler, VisualStudioCCompiler): return [] # The way MSVC uses PDB files is documented exactly nowhere so # the following is what we have been able to decipher via @@ -2194,7 +2204,7 @@ rule FORTRAN_DEP_HACK%s ''.format(target.get_basename()) raise InvalidArguments(msg) compiler = target.compilers[lang] - if compiler.id == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[-1]) (commands, dep, dst, objs) = self.generate_msvc_pch_command(target, compiler, pch) extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) @@ -2245,6 +2255,8 @@ rule FORTRAN_DEP_HACK%s # If implib, and that's significant on this platform (i.e. Windows using either GCC or Visual Studio) if target.import_filename: commands += linker.gen_import_library_args(os.path.join(self.get_target_dir(target), target.import_filename)) + if target.pie: + commands += linker.get_pie_link_args() elif isinstance(target, build.SharedLibrary): if isinstance(target, build.SharedModule): options = self.environment.coredata.base_options @@ -2384,7 +2396,8 @@ rule FORTRAN_DEP_HACK%s # Add buildtype linker args: optimization level, etc. commands += linker.get_buildtype_linker_args(self.get_option_for_target('buildtype', target)) # Add /DEBUG and the pdb filename when using MSVC - commands += self.get_link_debugfile_args(linker, target, outname) + if self.get_option_for_target('debug', target): + commands += self.get_link_debugfile_args(linker, target, outname) # Add link args specific to this BuildTarget type, such as soname args, # PIC, import library generation, etc. commands += self.get_target_type_link_args(target, linker) @@ -2429,7 +2442,7 @@ rule FORTRAN_DEP_HACK%s for dep in target.get_external_deps(): # Extend without reordering or de-dup to preserve `-L -l` sets # https://github.com/mesonbuild/meson/issues/1718 - commands.extend_direct(dep.get_link_args()) + commands.extend_preserving_lflags(dep.get_link_args()) need_threads |= dep.need_threads() need_openmp |= dep.need_openmp() for d in target.get_dependencies(): @@ -2437,7 +2450,7 @@ rule FORTRAN_DEP_HACK%s for dep in d.get_external_deps(): need_threads |= dep.need_threads() need_openmp |= dep.need_openmp() - commands.extend_direct(dep.get_link_args()) + commands.extend_preserving_lflags(dep.get_link_args()) if need_openmp: commands += linker.openmp_flags() if need_threads: diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 2e86ca9..8ac88ce 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -25,8 +25,8 @@ from .. import dependencies from .. import mlog from .. import compilers from ..compilers import CompilerArgs -from ..mesonlib import MesonException, File, python_command -from ..environment import Environment +from ..mesonlib import MesonException, File, python_command, replace_if_different +from ..environment import Environment, build_filename def autodetect_vs_version(build): vs_version = os.getenv('VisualStudioVersion', None) @@ -69,6 +69,9 @@ def split_o_flags_args(args): o_flags += ['/O' + f for f in flags] return o_flags +def generate_guid_from_path(path, path_type): + return str(uuid.uuid5(uuid.NAMESPACE_URL, 'meson-vs-' + path_type + ':' + str(path))).upper() + class RegenInfo: def __init__(self, source_dir, build_dir, depfiles): self.source_dir = source_dir @@ -189,6 +192,33 @@ class Vs2010Backend(backends.Backend): with open(filename, 'wb') as f: pickle.dump(regeninfo, f) + def get_vcvars_command(self): + has_arch_values = 'VSCMD_ARG_TGT_ARCH' in os.environ and 'VSCMD_ARG_HOST_ARCH' in os.environ + + # Use vcvarsall.bat if we found it. + if 'VCINSTALLDIR' in os.environ: + vs_version = os.environ['VisualStudioVersion'] \ + if 'VisualStudioVersion' in os.environ else None + relative_path = 'Auxiliary\\Build\\' if vs_version == '15.0' else '' + script_path = os.environ['VCINSTALLDIR'] + relative_path + 'vcvarsall.bat' + if os.path.exists(script_path): + if has_arch_values: + target_arch = os.environ['VSCMD_ARG_TGT_ARCH'] + host_arch = os.environ['VSCMD_ARG_HOST_ARCH'] + else: + target_arch = os.environ.get('Platform', 'x86') + host_arch = target_arch + arch = host_arch + '_' + target_arch if host_arch != target_arch else target_arch + return '"%s" %s' % (script_path, arch) + + # Otherwise try the VS2017 Developer Command Prompt. + if 'VS150COMNTOOLS' in os.environ and has_arch_values: + script_path = os.environ['VS150COMNTOOLS'] + 'VsDevCmd.bat' + if os.path.exists(script_path): + return '"%s" -arch=%s -host_arch=%s' % \ + (script_path, os.environ['VSCMD_ARG_TGT_ARCH'], os.environ['VSCMD_ARG_HOST_ARCH']) + return '' + def get_obj_target_deps(self, obj_list): result = {} for o in obj_list: @@ -238,7 +268,7 @@ class Vs2010Backend(backends.Backend): for path in iterpaths: if path not in self.subdirs: basename = path.name - identifier = str(uuid.uuid4()).upper() + identifier = generate_guid_from_path(path, 'subdir') # top-level directories have None as their parent_dir parent_dir = path.parent parent_identifier = self.subdirs[parent_dir][0] \ @@ -252,7 +282,8 @@ class Vs2010Backend(backends.Backend): def generate_solution(self, sln_filename, projlist): default_projlist = self.get_build_by_default_targets() - with open(sln_filename, 'w', encoding='utf-8') as ofile: + sln_filename_tmp = sln_filename + '~' + with open(sln_filename_tmp, 'w', encoding='utf-8') as ofile: ofile.write('Microsoft Visual Studio Solution File, Format ' 'Version 11.00\n') ofile.write('# Visual Studio ' + self.vs_version + '\n') @@ -353,6 +384,7 @@ class Vs2010Backend(backends.Backend): ofile.write("\t\t{%s} = {%s}\n" % (subdir[0], subdir[1])) ofile.write('\tEndGlobalSection\n') ofile.write('EndGlobal\n') + replace_if_different(sln_filename, sln_filename_tmp) def generate_projects(self): startup_project = self.environment.coredata.backend_options['backend_startup_project'].value @@ -417,7 +449,7 @@ class Vs2010Backend(backends.Backend): pref = ET.SubElement(ig, 'ProjectReference', Include=include) ET.SubElement(pref, 'Project').text = '{%s}' % projid - def create_basic_crap(self, target): + def create_basic_crap(self, target, guid): project_name = target.name root = ET.Element('Project', {'DefaultTargets': "Build", 'ToolsVersion': '4.0', @@ -431,7 +463,7 @@ class Vs2010Backend(backends.Backend): pl.text = self.platform globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') guidelem = ET.SubElement(globalgroup, 'ProjectGuid') - guidelem.text = '{%s}' % self.environment.coredata.test_guid + guidelem.text = '{%s}' % guid kw = ET.SubElement(globalgroup, 'Keyword') kw.text = self.platform + 'Proj' p = ET.SubElement(globalgroup, 'Platform') @@ -460,7 +492,7 @@ class Vs2010Backend(backends.Backend): return root def gen_run_target_vcxproj(self, target, ofname, guid): - root = self.create_basic_crap(target) + root = self.create_basic_crap(target, guid) action = ET.SubElement(root, 'ItemDefinitionGroup') customstep = ET.SubElement(action, 'PostBuildEvent') cmd_raw = [target.command] + target.args @@ -486,7 +518,7 @@ class Vs2010Backend(backends.Backend): self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) def gen_custom_target_vcxproj(self, target, ofname, guid): - root = self.create_basic_crap(target) + root = self.create_basic_crap(target, guid) action = ET.SubElement(root, 'ItemDefinitionGroup') customstep = ET.SubElement(action, 'CustomBuildStep') # We need to always use absolute paths because our invocation is always @@ -647,11 +679,14 @@ class Vs2010Backend(backends.Backend): raise MesonException('Could not find a C or C++ compiler. MSVC can only build C/C++ projects.') def _prettyprint_vcxproj_xml(self, tree, ofname): - tree.write(ofname, encoding='utf-8', xml_declaration=True) + ofname_tmp = ofname + '~' + tree.write(ofname_tmp, encoding='utf-8', xml_declaration=True) + # ElementTree can not do prettyprinting so do it manually - doc = xml.dom.minidom.parse(ofname) - with open(ofname, 'w', encoding='utf-8') as of: + doc = xml.dom.minidom.parse(ofname_tmp) + with open(ofname_tmp, 'w', encoding='utf-8') as of: of.write(doc.toprettyxml()) + replace_if_different(ofname, ofname_tmp) def gen_vcxproj(self, target, ofname, guid): mlog.debug('Generating vcxproj %s.' % target.name) @@ -730,8 +765,9 @@ class Vs2010Backend(backends.Backend): ET.SubElement(type_config, 'InlineFunctionExpansion').text = 'OnlyExplicitInline' elif '/Ob2' in o_flags: ET.SubElement(type_config, 'InlineFunctionExpansion').text = 'AnySuitable' - # Size-preserving flags - if '/Os' in o_flags: + # In modern MSVC parlance "/O1" means size optimization. + # "/Os" has been deprecated. + if '/O1' in o_flags: ET.SubElement(type_config, 'FavorSizeOrSpeed').text = 'Size' else: ET.SubElement(type_config, 'FavorSizeOrSpeed').text = 'Speed' @@ -945,7 +981,6 @@ class Vs2010Backend(backends.Backend): ET.SubElement(clconf, 'AdditionalIncludeDirectories').text = ';'.join(target_inc_dirs) target_defines.append('%(PreprocessorDefinitions)') ET.SubElement(clconf, 'PreprocessorDefinitions').text = ';'.join(target_defines) - ET.SubElement(clconf, 'MinimalRebuild').text = 'true' ET.SubElement(clconf, 'FunctionLevelLinking').text = 'true' pch_node = ET.SubElement(clconf, 'PrecompiledHeader') # Warning level @@ -1096,7 +1131,10 @@ class Vs2010Backend(backends.Backend): elif targetplatform == 'arm': targetmachine.text = 'MachineARM' else: - raise MesonException('Unsupported Visual Studio target machine: ' + targetmachine) + raise MesonException('Unsupported Visual Studio target machine: ' + targetplatform) + + meson_file_group = ET.SubElement(root, 'ItemGroup') + ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename)) extra_files = target.extra_files if len(headers) + len(gen_hdrs) + len(extra_files) > 0: @@ -1173,7 +1211,7 @@ class Vs2010Backend(backends.Backend): pl.text = self.platform globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') guidelem = ET.SubElement(globalgroup, 'ProjectGuid') - guidelem.text = '{%s}' % self.environment.coredata.test_guid + guidelem.text = '{%s}' % self.environment.coredata.regen_guid kw = ET.SubElement(globalgroup, 'Keyword') kw.text = self.platform + 'Proj' p = ET.SubElement(globalgroup, 'Platform') @@ -1210,7 +1248,9 @@ class Vs2010Backend(backends.Backend): ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c' regen_command = self.environment.get_build_command() + ['--internal', 'regencheck'] private_dir = self.environment.get_scratch_dir() + vcvars_command = self.get_vcvars_command() cmd_templ = '''setlocal +call %s > NUL "%s" "%s" if %%errorlevel%% neq 0 goto :cmEnd :cmEnd @@ -1228,7 +1268,7 @@ if %%errorlevel%% neq 0 goto :VCEnd''' message = ET.SubElement(custombuild, 'Message') message.text = 'Checking whether solution needs to be regenerated.' ET.SubElement(custombuild, 'Command').text = cmd_templ % \ - ('" "'.join(regen_command), private_dir) + (vcvars_command, '" "'.join(regen_command), private_dir) ET.SubElement(custombuild, 'Outputs').text = Vs2010Backend.get_regen_stampfile(self.environment.get_build_dir()) deps = self.get_regen_filelist() ET.SubElement(custombuild, 'AdditionalInputs').text = ';'.join(deps) diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py index b930c7f..a550d91 100644 --- a/mesonbuild/backend/xcodebackend.py +++ b/mesonbuild/backend/xcodebackend.py @@ -16,7 +16,8 @@ from . import backends from .. import build from .. import dependencies from .. import mesonlib -import uuid, os +from .. import mlog +import uuid, os, operator from ..mesonlib import MesonException @@ -43,6 +44,8 @@ class XCodeBackend(backends.Backend): 'inc': 'sourcecode.c.h', 'dylib': 'compiled.mach-o.dylib', 'o': 'compiled.mach-o.objfile', + 's': 'sourcecode.asm', + 'asm': 'sourcecode.asm', } self.maingroup_id = self.gen_id() self.all_id = self.gen_id() @@ -60,6 +63,12 @@ class XCodeBackend(backends.Backend): os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True) return dirname + def target_to_build_root(self, target): + if self.get_target_dir(target) == '': + return '' + directories = os.path.normpath(self.get_target_dir(target)).split(os.sep) + return os.sep.join(['..'] * len(directories)) + def write_line(self, text): self.ofile.write(self.indent * self.indent_level + text) if not text.endswith('\n'): @@ -105,7 +114,11 @@ class XCodeBackend(backends.Backend): self.generate_suffix() def get_xcodetype(self, fname): - return self.xcodetypemap[fname.split('.')[-1]] + xcodetype = self.xcodetypemap.get(fname.split('.')[-1].lower()) + if not xcodetype: + xcodetype = 'sourcecode.unknown' + mlog.warning('Unknown file type "%s" fallbacking to "%s". Xcode project might be malformed.' % (fname, xcodetype)) + return xcodetype def generate_filemap(self): self.filemap = {} # Key is source file relative to src root. @@ -202,38 +215,38 @@ class XCodeBackend(backends.Backend): self.source_phase[t] = self.gen_id() def generate_pbx_aggregate_target(self): + target_dependencies = list(map(lambda t: self.pbx_dep_map[t], self.build.targets)) + aggregated_targets = [] + aggregated_targets.append((self.all_id, 'ALL_BUILD', self.all_buildconf_id, [], target_dependencies)) + aggregated_targets.append((self.test_id, 'RUN_TESTS', self.test_buildconf_id, [self.test_command_id], [])) + # Sort objects by ID before writing + sorted_aggregated_targets = sorted(aggregated_targets, key=operator.itemgetter(0)) self.ofile.write('\n/* Begin PBXAggregateTarget section */\n') - self.write_line('%s /* ALL_BUILD */ = {' % self.all_id) - self.indent_level += 1 - self.write_line('isa = PBXAggregateTarget;') - self.write_line('buildConfigurationList = %s /* Build configuration list for PBXAggregateTarget "ALL_BUILD" */;' % self.all_buildconf_id) - self.write_line('buildPhases = (') - self.write_line(');') - self.write_line('dependencies = (') - self.indent_level += 1 - for t in self.build.targets: - self.write_line('%s /* PBXTargetDependency */,' % self.pbx_dep_map[t]) - self.indent_level -= 1 - self.write_line(');') - self.write_line('name = ALL_BUILD;') - self.write_line('productName = ALL_BUILD;') - self.indent_level -= 1 - self.write_line('};') - self.write_line('%s /* RUN_TESTS */ = {' % self.test_id) - self.indent_level += 1 - self.write_line('isa = PBXAggregateTarget;') - self.write_line('buildConfigurationList = %s /* Build configuration list for PBXAggregateTarget "RUN_TESTS" */;' % self.test_buildconf_id) - self.write_line('buildPhases = (') - self.indent_level += 1 - self.write_line('%s /* ShellScript */,' % self.test_command_id) - self.indent_level -= 1 - self.write_line(');') - self.write_line('dependencies = (') - self.write_line(');') - self.write_line('name = RUN_TESTS;') - self.write_line('productName = RUN_TESTS;') - self.indent_level -= 1 - self.write_line('};') + for t in sorted_aggregated_targets: + name = t[1] + buildconf_id = t[2] + build_phases = t[3] + dependencies = t[4] + self.write_line('%s /* %s */ = {' % (t[0], name)) + self.indent_level += 1 + self.write_line('isa = PBXAggregateTarget;') + self.write_line('buildConfigurationList = %s /* Build configuration list for PBXAggregateTarget "%s" */;' % (buildconf_id, name)) + self.write_line('buildPhases = (') + self.indent_level += 1 + for bp in build_phases: + self.write_line('%s /* ShellScript */,' % bp) + self.indent_level -= 1 + self.write_line(');') + self.write_line('dependencies = (') + self.indent_level += 1 + for td in dependencies: + self.write_line('%s /* PBXTargetDependency */,' % td) + self.indent_level -= 1 + self.write_line(');') + self.write_line('name = %s;' % name) + self.write_line('productName = %s;' % name) + self.indent_level -= 1 + self.write_line('};') self.ofile.write('/* End PBXAggregateTarget section */\n') def generate_pbx_build_file(self): @@ -594,14 +607,20 @@ class XCodeBackend(backends.Backend): self.ofile.write('/* End PBXSourcesBuildPhase section */\n') def generate_pbx_target_dependency(self): - self.ofile.write('\n/* Begin PBXTargetDependency section */\n') + targets = [] for t in self.build.targets: idval = self.pbx_dep_map[t] # VERIFY: is this correct? - self.write_line('%s /* PBXTargetDependency */ = {' % idval) + targets.append((idval, self.native_targets[t], t, self.containerproxy_map[t])) + + # Sort object by ID + sorted_targets = sorted(targets, key=operator.itemgetter(0)) + self.ofile.write('\n/* Begin PBXTargetDependency section */\n') + for t in sorted_targets: + self.write_line('%s /* PBXTargetDependency */ = {' % t[0]) self.indent_level += 1 self.write_line('isa = PBXTargetDependency;') - self.write_line('target = %s /* %s */;' % (self.native_targets[t], t)) - self.write_line('targetProxy = %s /* PBXContainerItemProxy */;' % self.containerproxy_map[t]) + self.write_line('target = %s /* %s */;' % (t[1], t[2])) + self.write_line('targetProxy = %s /* PBXContainerItemProxy */;' % t[3]) self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End PBXTargetDependency section */\n') @@ -720,9 +739,13 @@ class XCodeBackend(backends.Backend): for lang in self.environment.coredata.compilers: if lang not in langnamemap: continue + # Add compile args added using add_project_arguments() + pargs = self.build.projects_args.get(target.subproject, {}).get(lang, []) + # Add compile args added using add_global_arguments() + # These override per-project arguments gargs = self.build.global_args.get(lang, []) targs = target.get_extra_args(lang) - args = gargs + targs + args = pargs + gargs + targs if len(args) > 0: langargs[langnamemap[lang]] = args symroot = os.path.join(self.environment.get_build_dir(), target.subdir) @@ -743,6 +766,19 @@ class XCodeBackend(backends.Backend): self.write_line('GCC_GENERATE_DEBUGGING_SYMBOLS = YES;') self.write_line('GCC_INLINES_ARE_PRIVATE_EXTERN = NO;') self.write_line('GCC_OPTIMIZATION_LEVEL = 0;') + if target.has_pch: + # Xcode uses GCC_PREFIX_HEADER which only allows one file per target/executable. Precompiling various header files and + # applying a particular pch to each source file will require custom scripts (as a build phase) and build flags per each + # file. Since Xcode itself already discourages precompiled headers in favor of modules we don't try much harder here. + pchs = target.get_pch('c') + target.get_pch('cpp') + target.get_pch('objc') + target.get_pch('objcpp') + # Make sure to use headers (other backends require implementation files like *.c *.cpp, etc; these should not be used here) + pchs = [pch for pch in pchs if pch.endswith('.h') or pch.endswith('.hh') or pch.endswith('hpp')] + if pchs: + if len(pchs) > 1: + mlog.warning('Unsupported Xcode configuration: More than 1 precompiled header found "%s". Target "%s" might not compile correctly.' % (str(pchs), target.name)) + relative_pch_path = os.path.join(target.get_subdir(), pchs[0]) # Path relative to target so it can be used with "$(PROJECT_DIR)" + self.write_line('GCC_PRECOMPILE_PREFIX_HEADER = YES;') + self.write_line('GCC_PREFIX_HEADER = "$(PROJECT_DIR)/%s";' % relative_pch_path) self.write_line('GCC_PREPROCESSOR_DEFINITIONS = "";') self.write_line('GCC_SYMBOLS_PRIVATE_EXTERN = NO;') if len(headerdirs) > 0: @@ -753,23 +789,24 @@ class XCodeBackend(backends.Backend): if isinstance(target, build.SharedLibrary): self.write_line('LIBRARY_STYLE = DYNAMIC;') for langname, args in langargs.items(): - argstr = ' '.join(args) - self.write_line('OTHER_%sFLAGS = "%s";' % (langname, argstr)) + self.write_build_setting_line('OTHER_%sFLAGS' % langname, args) self.write_line('OTHER_LDFLAGS = "%s";' % ldstr) self.write_line('OTHER_REZFLAGS = "";') self.write_line('PRODUCT_NAME = %s;' % product_name) self.write_line('SECTORDER_FLAGS = "";') self.write_line('SYMROOT = "%s";' % symroot) + self.write_build_setting_line('SYSTEM_HEADER_SEARCH_PATHS', [self.environment.get_build_dir()]) self.write_line('USE_HEADERMAP = NO;') self.write_build_setting_line('WARNING_CFLAGS', ['-Wmost', '-Wno-four-char-constants', '-Wno-unknown-pragmas']) self.indent_level -= 1 self.write_line('};') - self.write_line('name = "%s";' % buildtype) + self.write_line('name = %s;' % buildtype) self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End XCBuildConfiguration section */\n') def generate_xc_configurationList(self): + # FIXME: sort items self.ofile.write('\n/* Begin XCConfigurationList section */\n') self.write_line('%s /* Build configuration list for PBXProject "%s" */ = {' % (self.project_conflist, self.build.project_name)) self.indent_level += 1 @@ -828,21 +865,34 @@ class XCodeBackend(backends.Backend): self.indent_level -= 1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') - self.write_line('defaultConfigurationName = "%s";' % typestr) + self.write_line('defaultConfigurationName = %s;' % typestr) self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End XCConfigurationList section */\n') - def write_build_setting_line(self, flag_name, flag_values): + def write_build_setting_line(self, flag_name, flag_values, explicit=False): if flag_values: - self.write_line('%s = (' % flag_name) - self.indent_level += 1 - for value in flag_values: - self.write_line('"%s",' % value) - self.indent_level -= 1 - self.write_line(');') + if len(flag_values) == 1: + value = flag_values[0] + if (' ' in value): + # If path contains spaces surround it with double colon + self.write_line('%s = "\\"%s\\"";' % (flag_name, value)) + else: + self.write_line('%s = "%s";' % (flag_name, value)) + else: + self.write_line('%s = (' % flag_name) + self.indent_level += 1 + for value in flag_values: + if (' ' in value): + # If path contains spaces surround it with double colon + self.write_line('"\\"%s\\"",' % value) + else: + self.write_line('"%s",' % value) + self.indent_level -= 1 + self.write_line(');') else: - self.write_line('%s = "";' % flag_name) + if explicit: + self.write_line('%s = "";' % flag_name) def generate_prefix(self): self.ofile.write('// !$*UTF8*$!\n{\n') diff --git a/mesonbuild/build.py b/mesonbuild/build.py index eb0e294..1fcbc04 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -15,17 +15,20 @@ import copy, os, re from collections import OrderedDict import itertools, pathlib +import hashlib import pickle from functools import lru_cache from . import environment from . import dependencies from . import mlog -from .mesonlib import File, MesonException, listify, extract_as_list, OrderedSet -from .mesonlib import typeslistify, stringlistify, classify_unity_sources -from .mesonlib import get_filenames_templates_dict, substitute_values -from .mesonlib import for_windows, for_darwin, for_cygwin, for_android, has_path_sep -from .compilers import is_object, clink_langs, sort_clink, lang_suffixes +from .mesonlib import ( + File, MesonException, listify, extract_as_list, OrderedSet, + typeslistify, stringlistify, classify_unity_sources, + get_filenames_templates_dict, substitute_values, + for_windows, for_darwin, for_cygwin, for_android, has_path_sep +) +from .compilers import is_object, clink_langs, sort_clink, lang_suffixes, get_macos_dylib_install_name from .interpreterbase import FeatureNew pch_kwargs = set(['c_pch', 'cpp_pch']) @@ -37,6 +40,7 @@ lang_arg_kwargs = set([ 'd_import_dirs', 'd_unittest', 'd_module_versions', + 'd_debug', 'fortran_args', 'java_args', 'objc_args', @@ -83,12 +87,16 @@ known_build_target_kwargs = ( rust_kwargs | cs_kwargs) -known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic'} +known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'} known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'} known_shmod_kwargs = known_build_target_kwargs known_stlib_kwargs = known_build_target_kwargs | {'pic'} known_jar_kwargs = known_exe_kwargs | {'main_class'} +@lru_cache(maxsize=None) +def get_target_macos_dylib_install_name(ld): + return get_macos_dylib_install_name(ld.prefix, ld.name, ld.suffix, ld.soversion) + class InvalidArguments(MesonException): pass @@ -130,9 +138,23 @@ class Build: self.dep_manifest = {} self.cross_stdlibs = {} self.test_setups = {} + self.test_setup_default_name = None self.find_overrides = {} self.searched_programs = set() # The list of all programs that have been searched for. + def copy(self): + other = Build(self.environment) + for k, v in self.__dict__.items(): + if isinstance(v, (list, dict, set, OrderedDict)): + other.__dict__[k] = v.copy() + else: + other.__dict__[k] = v + return other + + def merge(self, other): + for k, v in other.__dict__.items(): + self.__dict__[k] = v + def add_compiler(self, compiler): if self.static_linker is None and compiler.needs_static_linker(): self.static_linker = self.environment.detect_static_linker(compiler) @@ -324,25 +346,58 @@ a hard error in the future.''' % name) self.build_always_stale = False self.option_overrides = {} + def get_install_dir(self, environment): + # Find the installation directory. + default_install_dir = self.get_default_install_dir(environment) + outdirs = self.get_custom_install_dir() + if outdirs[0] is not None and outdirs[0] != default_install_dir and outdirs[0] is not True: + # Either the value is set to a non-default value, or is set to + # False (which means we want this specific output out of many + # outputs to not be installed). + custom_install_dir = True + else: + custom_install_dir = False + outdirs[0] = default_install_dir + return outdirs, custom_install_dir + def get_basename(self): return self.name def get_subdir(self): return self.subdir - def get_id(self): + @staticmethod + def _get_id_hash(target_id): + # We don't really need cryptographic security here. + # Small-digest hash function with unlikely collision is good enough. + h = hashlib.sha256() + h.update(target_id.encode(encoding='utf-8', errors='replace')) + # This ID should be case-insensitive and should work in Visual Studio, + # e.g. it should not start with leading '-'. + return h.hexdigest()[:7] + + @staticmethod + def construct_id_from_path(subdir, name, type_suffix): + """Construct target ID from subdir, name and type suffix. + + This helper function is made public mostly for tests.""" # This ID must also be a valid file name on all OSs. # It should also avoid shell metacharacters for obvious # reasons. '@' is not used as often as '_' in source code names. # In case of collisions consider using checksums. # FIXME replace with assert when slash in names is prohibited - name_part = self.name.replace('/', '@').replace('\\', '@') - assert not has_path_sep(self.type_suffix()) - myid = name_part + self.type_suffix() - if self.subdir: - subdir_part = self.subdir.replace('/', '@').replace('\\', '@') - myid = subdir_part + '@@' + myid - return myid + name_part = name.replace('/', '@').replace('\\', '@') + assert not has_path_sep(type_suffix) + my_id = name_part + type_suffix + if subdir: + subdir_part = Target._get_id_hash(subdir) + # preserve myid for better debuggability + return subdir_part + '@@' + my_id + return my_id + + def get_id(self): + return self.construct_id_from_path( + self.subdir, self.name, self.type_suffix()) def process_kwargs(self, kwargs): if 'build_by_default' in kwargs: @@ -395,6 +450,8 @@ class BuildTarget(Target): self.generated = [] self.extra_files = [] self.d_features = {} + self.pic = False + self.pie = False # Sources can be: # 1. Pre-existing source files in the source tree # 2. Pre-existing sources generated by configure_file in the build tree @@ -422,7 +479,7 @@ class BuildTarget(Target): return repr_str.format(self.__class__.__name__, self.get_id(), self.filename) def validate_cross_install(self, environment): - if environment.is_cross_build() and not self.is_cross and self.install: + if environment.is_cross_build() and not self.is_cross and self.need_install: raise InvalidArguments('Tried to install a natively built target in a cross build.') def check_unknown_kwargs(self, kwargs): @@ -678,6 +735,20 @@ class BuildTarget(Target): result += i.get_all_link_deps() return result + def get_link_deps_mapping(self, prefix, environment): + return self.get_transitive_link_deps_mapping(prefix, environment) + + @lru_cache(maxsize=None) + def get_transitive_link_deps_mapping(self, prefix, environment): + result = {} + for i in self.link_targets: + mapping = i.get_link_deps_mapping(prefix, environment) + #we are merging two dictionaries, while keeping the earlier one dominant + result_tmp = mapping.copy() + result_tmp.update(result) + result = result_tmp + return result + @lru_cache(maxsize=None) def get_link_dep_subdirs(self): result = OrderedSet() @@ -686,6 +757,9 @@ class BuildTarget(Target): result.update(i.get_link_dep_subdirs()) return result + def get_default_install_dir(self, environment): + return environment.get_libdir() + def get_custom_install_dir(self): return self.install_dir @@ -737,9 +811,12 @@ just like those detected with the dependency() function.''') dfeature_unittest = kwargs.get('d_unittest', False) if dfeature_unittest: dfeatures['unittest'] = dfeature_unittest - dfeature_versions = kwargs.get('d_module_versions', None) + dfeature_versions = kwargs.get('d_module_versions', []) if dfeature_versions: dfeatures['versions'] = dfeature_versions + dfeature_debug = kwargs.get('d_debug', []) + if dfeature_debug: + dfeatures['debug'] = dfeature_debug if 'd_import_dirs' in kwargs: dfeature_import_dirs = extract_as_list(kwargs, 'd_import_dirs', unholder=True) for d in dfeature_import_dirs: @@ -830,13 +907,14 @@ This will become a hard error in a future Meson release.''') # since library loading is done differently) if for_darwin(self.is_cross, self.environment) or for_windows(self.is_cross, self.environment): self.pic = True - elif '-fPIC' in clist + cpplist: - mlog.warning("Use the 'pic' kwarg instead of passing -fPIC manually to static library {!r}".format(self.name)) - self.pic = True else: - self.pic = kwargs.get('pic', False) - if not isinstance(self.pic, bool): - raise InvalidArguments('Argument pic to static library {!r} must be boolean'.format(self.name)) + self.pic = self._extract_pic_pie(kwargs, 'pic') + if isinstance(self, Executable): + # Executables must be PIE on Android + if for_android(self.is_cross, self.environment): + self.pie = True + else: + self.pie = self._extract_pic_pie(kwargs, 'pie') self.implicit_include_directories = kwargs.get('implicit_include_directories', True) if not isinstance(self.implicit_include_directories, bool): raise InvalidArguments('Implicit_include_directories must be a boolean.') @@ -849,6 +927,18 @@ This will become a hard error in a future Meson release.''') raise InvalidArguments('GNU symbol visibility arg %s not one of: %s', self.symbol_visibility, ', '.join(permitted)) + def _extract_pic_pie(self, kwargs, arg): + # Check if we have -fPIC, -fpic, -fPIE, or -fpie in cflags + all_flags = self.extra_args['c'] + self.extra_args['cpp'] + if '-f' + arg.lower() in all_flags or '-f' + arg.upper() in all_flags: + mlog.warning("Use the '{}' kwarg instead of passing '{}' manually to {!r}".format(arg, '-f' + arg, self.name)) + return True + + val = kwargs.get(arg, False) + if not isinstance(val, bool): + raise InvalidArguments('Argument {} to {!r} must be boolean'.format(arg, self.name)) + return val + def get_filename(self): return self.filename @@ -1101,7 +1191,7 @@ You probably should put it in link_with instead.''') ''' linker, _ = self.get_clink_dynamic_linker_and_stdlibs() # Mixing many languages with MSVC is not supported yet so ignore stdlibs. - if linker and linker.get_id() in ['msvc', 'llvm', 'dmd']: + if linker and linker.get_id() in ['msvc', 'clang-cl', 'llvm', 'dmd']: return True return False @@ -1268,6 +1358,8 @@ class Executable(BuildTarget): known_kwargs = known_exe_kwargs def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs): + if 'pie' not in kwargs and 'b_pie' in environment.coredata.base_options: + kwargs['pie'] = environment.coredata.base_options['b_pie'].value super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs) # Unless overridden, executables have no suffix or prefix. Except on # Windows and with C#/Mono executables where the suffix is 'exe' @@ -1278,12 +1370,14 @@ class Executable(BuildTarget): if (for_windows(is_cross, environment) or for_cygwin(is_cross, environment) or 'cs' in self.compilers): self.suffix = 'exe' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('arm') or + 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('arm')): + self.suffix = 'axf' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('ccrx') or + 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('ccrx')): + self.suffix = 'abs' else: - if ('c' in self.compilers and self.compilers['c'].get_id().startswith('arm') or - 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('arm')): - self.suffix = 'axf' - else: - self.suffix = '' + self.suffix = '' self.filename = self.name if self.suffix: self.filename += '.' + self.suffix @@ -1323,6 +1417,9 @@ class Executable(BuildTarget): # Only linkwithable if using export_dynamic self.is_linkwithable = self.export_dynamic + def get_default_install_dir(self, environment): + return environment.get_bindir() + def description(self): '''Human friendly description of the executable''' return self.name @@ -1384,6 +1481,12 @@ class StaticLibrary(BuildTarget): self.filename = self.prefix + self.name + '.' + self.suffix self.outputs = [self.filename] + def get_link_deps_mapping(self, prefix, environment): + return {} + + def get_default_install_dir(self, environment): + return environment.get_static_lib_dir() + def type_suffix(self): return "@sta" @@ -1430,6 +1533,21 @@ class SharedLibrary(BuildTarget): self.basic_filename_tpl = '{0.prefix}{0.name}.{0.suffix}' self.determine_filenames(is_cross, environment) + def get_link_deps_mapping(self, prefix, environment): + result = {} + mappings = self.get_transitive_link_deps_mapping(prefix, environment) + old = get_target_macos_dylib_install_name(self) + if old not in mappings: + fname = self.get_filename() + outdirs, _ = self.get_install_dir(self.environment) + new = os.path.join(prefix, outdirs[0], fname) + result.update({old: new}) + mappings.update(result) + return mappings + + def get_default_install_dir(self, environment): + return environment.get_shared_lib_dir() + def determine_filenames(self, is_cross, env): """ See https://github.com/mesonbuild/meson/pull/417 for details. @@ -1454,13 +1572,9 @@ class SharedLibrary(BuildTarget): prefix = '' suffix = '' self.filename_tpl = self.basic_filename_tpl - # If the user already provided the prefix and suffix to us, we don't - # need to do any filename suffix/prefix detection. # NOTE: manual prefix/suffix override is currently only tested for C/C++ - if self.prefix is not None and self.suffix is not None: - pass # C# and Mono - elif 'cs' in self.compilers: + if 'cs' in self.compilers: prefix = '' suffix = 'dll' self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' @@ -1469,8 +1583,8 @@ class SharedLibrary(BuildTarget): # For all other targets/platforms import_filename stays None elif for_windows(is_cross, env): suffix = 'dll' - self.vs_import_filename = '{0}.lib'.format(self.name) - self.gcc_import_filename = 'lib{0}.dll.a'.format(self.name) + self.vs_import_filename = '{0}{1}.lib'.format(self.prefix if self.prefix is not None else '', self.name) + self.gcc_import_filename = '{0}{1}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name) if self.get_using_msvc(): # Shared library is of the form foo.dll prefix = '' @@ -1489,7 +1603,7 @@ class SharedLibrary(BuildTarget): self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' elif for_cygwin(is_cross, env): suffix = 'dll' - self.gcc_import_filename = 'lib{0}.dll.a'.format(self.name) + self.gcc_import_filename = '{0}{1}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name) # Shared library is of the form cygfoo.dll # (ld --dll-search-prefix=cyg is the default) prefix = 'cyg' @@ -1701,6 +1815,10 @@ class SharedModule(SharedLibrary): raise MesonException('Shared modules must not specify the soversion kwarg.') super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs) + def get_default_install_dir(self, environment): + return environment.get_shared_module_dir() + + class CustomTarget(Target): known_kwargs = set([ 'input', @@ -1738,6 +1856,9 @@ class CustomTarget(Target): mlog.warning('Unknown keyword arguments in target %s: %s' % (self.name, ', '.join(unknowns))) + def get_default_install_dir(self, environment): + return None + def __lt__(self, other): return self.get_id() < other.get_id() @@ -2022,7 +2143,10 @@ class Jar(BuildTarget): def get_classpath_args(self): cp_paths = [os.path.join(l.get_subdir(), l.get_filename()) for l in self.link_targets] - return ['-cp', os.pathsep.join(cp_paths)] + cp_string = os.pathsep.join(cp_paths) + if cp_string: + return ['-cp', os.pathsep.join(cp_paths)] + return [] class CustomTargetIndex: diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py index 677301e..31b7b89 100644 --- a/mesonbuild/compilers/__init__.py +++ b/mesonbuild/compilers/__init__.py @@ -45,6 +45,8 @@ __all__ = [ 'ClangCPPCompiler', 'ClangObjCCompiler', 'ClangObjCPPCompiler', + 'ClangClCCompiler', + 'ClangClCPPCompiler', 'CompilerArgs', 'CPPCompiler', 'DCompiler', @@ -76,6 +78,9 @@ __all__ = [ 'PathScaleFortranCompiler', 'PGIFortranCompiler', 'RustCompiler', + 'CcrxCCompiler', + 'CcrxCompiler', + 'CcrxCPPCompiler', 'SunFortranCompiler', 'SwiftCompiler', 'ValaCompiler', @@ -108,15 +113,18 @@ from .compilers import ( CompilerArgs, GnuCompiler, IntelCompiler, + CcrxCompiler, ) from .c import ( CCompiler, ArmCCompiler, ArmclangCCompiler, ClangCCompiler, + ClangClCCompiler, GnuCCompiler, ElbrusCCompiler, IntelCCompiler, + CcrxCCompiler, VisualStudioCCompiler, ) from .cpp import ( @@ -124,9 +132,11 @@ from .cpp import ( ArmCPPCompiler, ArmclangCPPCompiler, ClangCPPCompiler, + ClangClCPPCompiler, GnuCPPCompiler, ElbrusCPPCompiler, IntelCPPCompiler, + CcrxCPPCompiler, VisualStudioCPPCompiler, ) from .cs import MonoCompiler, VisualStudioCsCompiler diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py index 7c6a43b..b009645 100644 --- a/mesonbuild/compilers/c.py +++ b/mesonbuild/compilers/c.py @@ -30,7 +30,6 @@ from ..mesonlib import ( from .c_function_attributes import C_FUNC_ATTRIBUTES from .compilers import ( - CompilerType, get_largefile_args, gnu_winlibs, msvc_winlibs, @@ -46,6 +45,7 @@ from .compilers import ( ElbrusCompiler, IntelCompiler, RunResult, + CcrxCompiler, ) gnu_compiler_internal_libs = ('m', 'c', 'pthread', 'dl', 'rt') @@ -121,10 +121,7 @@ class CCompiler(Compiler): # The default behavior is this, override in MSVC @functools.lru_cache(maxsize=None) def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): - if getattr(self, 'compiler_type', False) and self.compiler_type.is_osx_compiler: - # Clang, GCC and ICC on macOS all use the same rpath arguments - return self.build_osx_rpath_args(build_dir, rpath_paths, build_rpath) - elif self.compiler_type.is_windows_compiler: + if self.compiler_type.is_windows_compiler: return [] return self.build_unix_rpath_args(build_dir, from_dir, rpath_paths, build_rpath, install_rpath) @@ -159,23 +156,6 @@ class CCompiler(Compiler): ''' return self.get_no_optimization_args() - def get_allow_undefined_link_args(self): - ''' - Get args for allowing undefined symbols when linking to a shared library - ''' - if self.id in ('clang', 'gcc'): - if self.compiler_type.is_osx_compiler: - # Apple ld - return ['-Wl,-undefined,dynamic_lookup'] - else: - # GNU ld and LLVM lld - return ['-Wl,--allow-shlib-undefined'] - elif self.id == 'msvc': - # link.exe - return ['/FORCE:UNRESOLVED'] - # FIXME: implement other linkers - return [] - def get_output_args(self, target): return ['-o', target] @@ -208,7 +188,9 @@ class CCompiler(Compiler): def _get_search_dirs(self, env): extra_args = ['--print-search-dirs'] stdo = None - with self._build_wrapper('', env, extra_args, None, 'compile', True) as p: + with self._build_wrapper('', env, extra_args=extra_args, + dependencies=None, mode='compile', + want_output=True) as p: stdo = p.stdo return stdo @@ -216,9 +198,25 @@ class CCompiler(Compiler): def _split_fetch_real_dirs(pathstr, sep=':'): paths = [] for p in pathstr.split(sep): - p = Path(p) - if p.exists(): - paths.append(p.resolve().as_posix()) + # GCC returns paths like this: + # /usr/lib/gcc/x86_64-linux-gnu/8/../../../../x86_64-linux-gnu/lib + # It would make sense to normalize them to get rid of the .. parts + # Sadly when you are on a merged /usr fs it also kills these: + # /lib/x86_64-linux-gnu + # since /lib is a symlink to /usr/lib. This would mean + # paths under /lib would be considered not a "system path", + # which is wrong and breaks things. Store everything, just to be sure. + pobj = Path(p) + unresolved = pobj.as_posix() + if pobj.exists(): + if unresolved not in paths: + paths.append(unresolved) + try: + resolved = Path(p).resolve().as_posix() + if resolved not in paths: + paths.append(resolved) + except FileNotFoundError: + pass return tuple(paths) def get_compiler_dirs(self, env, name): @@ -232,8 +230,34 @@ class CCompiler(Compiler): return () @functools.lru_cache() - def get_library_dirs(self, env): - return self.get_compiler_dirs(env, 'libraries') + def get_library_dirs(self, env, elf_class = None): + dirs = self.get_compiler_dirs(env, 'libraries') + if elf_class is None or elf_class == 0: + return dirs + + # if we do have an elf class for 32-bit or 64-bit, we want to check that + # the directory in question contains libraries of the appropriate class. Since + # system directories aren't mixed, we only need to check one file for each + # directory and go by that. If we can't check the file for some reason, assume + # the compiler knows what it's doing, and accept the directory anyway. + retval = [] + for d in dirs: + files = [f for f in os.listdir(d) if f.endswith('.so') and os.path.isfile(os.path.join(d, f))] + # if no files, accept directory and move on + if len(files) == 0: + retval.append(d) + continue + file_to_check = os.path.join(d, files[0]) + with open(file_to_check, 'rb') as fd: + header = fd.read(5) + # if file is not an ELF file, it's weird, but accept dir + # if it is elf, and the class matches, accept dir + if header[1:4] != b'ELF' or int(header[4]) == elf_class: + retval.append(d) + # at this point, it's an ELF file which doesn't match the + # appropriate elf_class, so skip this one + pass + return tuple(retval) @functools.lru_cache() def get_program_dirs(self, env): @@ -334,13 +358,14 @@ class CCompiler(Compiler): code = 'int main(int argc, char **argv) { int class=0; return class; }\n' return self.sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) - def check_header(self, hname, prefix, env, extra_args=None, dependencies=None): + def check_header(self, hname, prefix, env, *, extra_args=None, dependencies=None): fargs = {'prefix': prefix, 'header': hname} code = '''{prefix} #include <{header}>''' - return self.compiles(code.format(**fargs), env, extra_args, dependencies) + return self.compiles(code.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) - def has_header(self, hname, prefix, env, extra_args=None, dependencies=None): + def has_header(self, hname, prefix, env, *, extra_args=None, dependencies=None): fargs = {'prefix': prefix, 'header': hname} code = '''{prefix} #ifdef __has_include @@ -350,10 +375,10 @@ class CCompiler(Compiler): #else #include <{header}> #endif''' - return self.compiles(code.format(**fargs), env, extra_args, - dependencies, 'preprocess') + return self.compiles(code.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies, mode='preprocess') - def has_header_symbol(self, hname, symbol, prefix, env, extra_args=None, dependencies=None): + def has_header_symbol(self, hname, symbol, prefix, env, *, extra_args=None, dependencies=None): fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol} t = '''{prefix} #include <{header}> @@ -363,13 +388,16 @@ class CCompiler(Compiler): {symbol}; #endif }}''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'): if extra_args is None: extra_args = [] - elif isinstance(extra_args, str): - extra_args = [extra_args] + else: + extra_args = listify(extra_args) + extra_args = listify([e(mode) if callable(e) else e for e in extra_args]) + if dependencies is None: dependencies = [] elif not isinstance(dependencies, list): @@ -399,7 +427,13 @@ class CCompiler(Compiler): args += env.coredata.get_external_preprocess_args(self.language) elif mode == 'compile': # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env - args += env.coredata.get_external_args(self.language) + sys_args = env.coredata.get_external_args(self.language) + # Apparently it is a thing to inject linker flags both + # via CFLAGS _and_ LDFLAGS, even though the former are + # also used during linking. These flags can break + # argument checks. Thanks, Autotools. + cleaned_sys_args = self.remove_linkerlike_args(sys_args) + args += cleaned_sys_args elif mode == 'link': # Add LDFLAGS from the env args += env.coredata.get_external_link_args(self.language) @@ -408,7 +442,7 @@ class CCompiler(Compiler): args += extra_args return args - def compiles(self, code, env, extra_args=None, dependencies=None, mode='compile'): + def compiles(self, code, env, *, extra_args=None, dependencies=None, mode='compile'): with self._build_wrapper(code, env, extra_args, dependencies, mode) as p: return p.returncode == 0 @@ -416,10 +450,11 @@ class CCompiler(Compiler): args = self._get_compiler_check_args(env, extra_args, dependencies, mode) return self.compile(code, args, mode, want_output=want_output) - def links(self, code, env, extra_args=None, dependencies=None): - return self.compiles(code, env, extra_args, dependencies, mode='link') + def links(self, code, env, *, extra_args=None, dependencies=None): + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies, mode='link') - def run(self, code, env, extra_args=None, dependencies=None): + def run(self, code, env, *, extra_args=None, dependencies=None): if self.is_cross and self.exe_wrapper is None: raise CrossNoRunException('Can not run test applications in this cross environment.') with self._build_wrapper(code, env, extra_args, dependencies, mode='link', want_output=True) as p: @@ -449,7 +484,8 @@ class CCompiler(Compiler): t = '''#include {prefix} int main() {{ static int a[1-2*!({expression})]; a[0]=0; return 0; }}''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) def cross_compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies): # Try user's guess first @@ -499,7 +535,7 @@ class CCompiler(Compiler): return low - def compute_int(self, expression, low, high, guess, prefix, env, extra_args=None, dependencies=None): + def compute_int(self, expression, low, high, guess, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] if self.is_cross: @@ -511,14 +547,15 @@ class CCompiler(Compiler): printf("%ld\\n", (long)({expression})); return 0; }};''' - res = self.run(t.format(**fargs), env, extra_args, dependencies) + res = self.run(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) if not res.compiled: return -1 if res.returncode != 0: raise EnvironmentException('Could not run compute_int test binary.') return int(res.stdout) - def cross_sizeof(self, typename, prefix, env, extra_args=None, dependencies=None): + def cross_sizeof(self, typename, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename} @@ -527,30 +564,33 @@ class CCompiler(Compiler): int main(int argc, char **argv) {{ {type} something; }}''' - if not self.compiles(t.format(**fargs), env, extra_args, dependencies): + if not self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies): return -1 return self.cross_compute_int('sizeof(%s)' % typename, None, None, None, prefix, env, extra_args, dependencies) - def sizeof(self, typename, prefix, env, extra_args=None, dependencies=None): + def sizeof(self, typename, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename} if self.is_cross: - return self.cross_sizeof(typename, prefix, env, extra_args, dependencies) + return self.cross_sizeof(typename, prefix, env, extra_args=extra_args, + dependencies=dependencies) t = '''#include {prefix} int main(int argc, char **argv) {{ printf("%ld\\n", (long)(sizeof({type}))); return 0; }};''' - res = self.run(t.format(**fargs), env, extra_args, dependencies) + res = self.run(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) if not res.compiled: return -1 if res.returncode != 0: raise EnvironmentException('Could not run sizeof test binary.') return int(res.stdout) - def cross_alignment(self, typename, prefix, env, extra_args=None, dependencies=None): + def cross_alignment(self, typename, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename} @@ -559,7 +599,8 @@ class CCompiler(Compiler): int main(int argc, char **argv) {{ {type} something; }}''' - if not self.compiles(t.format(**fargs), env, extra_args, dependencies): + if not self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies): return -1 t = '''#include {prefix} @@ -569,11 +610,12 @@ class CCompiler(Compiler): }};''' return self.cross_compute_int('offsetof(struct tmp, target)', None, None, None, t.format(**fargs), env, extra_args, dependencies) - def alignment(self, typename, prefix, env, extra_args=None, dependencies=None): + def alignment(self, typename, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] if self.is_cross: - return self.cross_alignment(typename, prefix, env, extra_args, dependencies) + return self.cross_alignment(typename, prefix, env, extra_args=extra_args, + dependencies=dependencies) fargs = {'prefix': prefix, 'type': typename} t = '''#include #include @@ -586,7 +628,8 @@ class CCompiler(Compiler): printf("%d", (int)offsetof(struct tmp, target)); return 0; }}''' - res = self.run(t.format(**fargs), env, extra_args, dependencies) + res = self.run(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) if not res.compiled: raise EnvironmentException('Could not compile alignment test.') if res.returncode != 0: @@ -630,7 +673,7 @@ class CCompiler(Compiler): int main(int argc, char *argv[]) {{ printf ("{fmt}", {cast} {f}()); }}'''.format(**fargs) - res = self.run(code, env, extra_args, dependencies) + res = self.run(code, env, extra_args=extra_args, dependencies=dependencies) if not res.compiled: m = 'Could not get return value of {}()' raise EnvironmentException(m.format(fname)) @@ -699,7 +742,7 @@ class CCompiler(Compiler): }}''' return head, main - def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None): + def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): """ First, this function looks for the symbol in the default libraries provided by the compiler (stdlib + a few others usually). If that @@ -747,7 +790,8 @@ class CCompiler(Compiler): head, main = self._no_prototype_templ() templ = head + stubs_fail + main - if self.links(templ.format(**fargs), env, extra_args, dependencies): + if self.links(templ.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies): return True # MSVC does not have compiler __builtin_-s. @@ -780,9 +824,10 @@ class CCompiler(Compiler): #endif #endif }}''' - return self.links(t.format(**fargs), env, extra_args, dependencies) + return self.links(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) - def has_members(self, typename, membernames, prefix, env, extra_args=None, dependencies=None): + def has_members(self, typename, membernames, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename, 'name': 'foo'} @@ -796,7 +841,8 @@ class CCompiler(Compiler): {type} {name}; {members} }};''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) def has_type(self, typename, prefix, env, extra_args, dependencies=None): fargs = {'prefix': prefix, 'type': typename} @@ -804,7 +850,8 @@ class CCompiler(Compiler): void bar() {{ sizeof({type}); }};''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) def symbols_have_underscore_prefix(self, env): ''' @@ -864,17 +911,17 @@ class CCompiler(Compiler): stlibext = ['a'] # We've always allowed libname to be both `foo` and `libfoo`, # and now people depend on it - if strict and self.id != 'msvc': # lib prefix is not usually used with msvc + if strict and not isinstance(self, VisualStudioCCompiler): # lib prefix is not usually used with msvc prefixes = ['lib'] else: prefixes = ['lib', ''] # Library suffixes and prefixes if for_darwin(env.is_cross_build(), env): - shlibext = ['dylib'] + shlibext = ['dylib', 'so'] elif for_windows(env.is_cross_build(), env): # FIXME: .lib files can be import or static so we should read the # file, figure out which one it is, and reject the wrong kind. - if self.id == 'msvc': + if isinstance(self, VisualStudioCCompiler): shlibext = ['lib'] else: shlibext = ['dll.a', 'lib', 'dll'] @@ -935,6 +982,13 @@ class CCompiler(Compiler): return f return None + @functools.lru_cache() + def output_is_64bit(self, env): + ''' + returns true if the output produced is 64-bit, false if 32-bit + ''' + return self.sizeof('void *', '', env) == 8 + def find_library_real(self, libname, env, extra_dirs, code, libtype): # First try if we can just add the library as -l. # Gcc + co seem to prefer builtin lib dirs to -L dirs. @@ -950,8 +1004,18 @@ class CCompiler(Compiler): # Not found or we want to use a specific libtype? Try to find the # library file itself. patterns = self.get_library_naming(env, libtype) + # try to detect if we are 64-bit or 32-bit. If we can't + # detect, we will just skip path validity checks done in + # get_library_dirs() call + try: + if self.output_is_64bit(env): + elf_class = 2 + else: + elf_class = 1 + except: + elf_class = 0 # Search in the specified dirs, and then in the system libraries - for d in itertools.chain(extra_dirs, self.get_library_dirs(env)): + for d in itertools.chain(extra_dirs, self.get_library_dirs(env, elf_class)): for p in patterns: trial = self._get_trials_from_pattern(p, d, libname) if not trial: @@ -986,12 +1050,12 @@ class CCompiler(Compiler): return self.find_library_impl(libname, env, extra_dirs, code, libtype) def thread_flags(self, env): - if for_haiku(self.is_cross, env): + if for_haiku(self.is_cross, env) or for_darwin(self.is_cross, env): return [] return ['-pthread'] def thread_link_flags(self, env): - if for_haiku(self.is_cross, env): + if for_haiku(self.is_cross, env) or for_darwin(self.is_cross, env): return [] return ['-pthread'] @@ -1094,9 +1158,9 @@ class ClangCCompiler(ClangCompiler, CCompiler): class ArmclangCCompiler(ArmclangCompiler, CCompiler): - def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ArmclangCompiler.__init__(self) + ArmclangCompiler.__init__(self, compiler_type) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -1136,7 +1200,7 @@ class GnuCCompiler(GnuCompiler, CCompiler): ['none', 'c89', 'c99', 'c11', 'gnu89', 'gnu99', 'gnu11'], 'none')}) - if self.compiler_type == CompilerType.GCC_MINGW: + if self.compiler_type.is_windows_compiler: opts.update({ 'c_winlibs': coredata.UserArrayOption('c_winlibs', 'Standard Win libraries to link against', gnu_winlibs), }) @@ -1150,7 +1214,7 @@ class GnuCCompiler(GnuCompiler, CCompiler): return args def get_option_link_args(self, options): - if self.compiler_type == CompilerType.GCC_MINGW: + if self.compiler_type.is_windows_compiler: return options['c_winlibs'].value[:] return [] @@ -1175,11 +1239,13 @@ class ElbrusCCompiler(GnuCCompiler, ElbrusCompiler): # Elbrus C compiler does not have lchmod, but there is only linker warning, not compiler error. # So we should explicitly fail at this case. - def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None): + def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): if funcname == 'lchmod': return False else: - return super().has_function(funcname, prefix, env, extra_args, dependencies) + return super().has_function(funcname, prefix, env, + extra_args=extra_args, + dependencies=dependencies) class IntelCCompiler(IntelCompiler, CCompiler): @@ -1187,7 +1253,7 @@ class IntelCCompiler(IntelCompiler, CCompiler): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) IntelCompiler.__init__(self, compiler_type) self.lang_header = 'c-header' - default_warn_args = ['-Wall', '-w3', '-diag-disable:remark', '-Wpch-messages'] + default_warn_args = ['-Wall', '-w3', '-diag-disable:remark'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra']} @@ -1253,7 +1319,7 @@ class VisualStudioCCompiler(CCompiler): def get_buildtype_args(self, buildtype): args = compilers.msvc_buildtype_args[buildtype] - if version_compare(self.version, '<18.0'): + if self.id == 'msvc' and version_compare(self.version, '<18.0'): args = [arg for arg in args if arg != '/Gw'] return args @@ -1271,6 +1337,8 @@ class VisualStudioCCompiler(CCompiler): def get_pch_use_args(self, pch_dir, header): base = os.path.basename(header) + if self.id == 'clang-cl': + base = header pchname = self.get_pch_name(header) return ['/FI' + base, '/Yu' + base, '/Fp' + os.path.join(pch_dir, pchname)] @@ -1298,7 +1366,12 @@ class VisualStudioCCompiler(CCompiler): return [] def get_linker_exelist(self): - return ['link'] # FIXME, should have same path as compiler. + # FIXME, should have same path as compiler. + # FIXME, should be controllable via cross-file. + if self.id == 'clang-cl': + return ['lld-link'] + else: + return ['link'] def get_linker_always_args(self): return ['/nologo'] @@ -1406,6 +1479,8 @@ class VisualStudioCCompiler(CCompiler): # http://stackoverflow.com/questions/15259720/how-can-i-make-the-microsoft-c-compiler-treat-unknown-flags-as-errors-rather-t def has_arguments(self, args, env, code, mode): warning_text = '4044' if mode == 'link' else '9002' + if self.id == 'clang-cl' and mode != 'link': + args = args + ['-Werror=unknown-argument'] with self._build_wrapper(code, env, extra_args=args, mode=mode) as p: if p.returncode != 0: return False @@ -1421,7 +1496,7 @@ class VisualStudioCCompiler(CCompiler): # build obviously, which is why we only do this when PCH is on. # This was added in Visual Studio 2013 (MSVC 18.0). Before that it was # always on: https://msdn.microsoft.com/en-us/library/dn502518.aspx - if pch and version_compare(self.version, '>=18.0'): + if pch and self.id == 'msvc' and version_compare(self.version, '>=18.0'): args = ['/FS'] + args return args @@ -1438,7 +1513,7 @@ class VisualStudioCCompiler(CCompiler): def get_instruction_set_args(self, instruction_set): if self.is_64: return vs64_instruction_set_args.get(instruction_set, None) - if self.version.split('.')[0] == '16' and instruction_set == 'avx': + if self.id == 'msvc' and self.version.split('.')[0] == '16' and instruction_set == 'avx': # VS documentation says that this exists and should work, but # it does not. The headers do not contain AVX intrinsics # and the can not be called. @@ -1446,6 +1521,10 @@ class VisualStudioCCompiler(CCompiler): return vs32_instruction_set_args.get(instruction_set, None) def get_toolset_version(self): + if self.id == 'clang-cl': + # I have no idea + return '14.1' + # See boost/config/compiler/visualc.cpp for up to date mapping try: version = int(''.join(self.version.split('.')[0:2])) @@ -1500,11 +1579,24 @@ class VisualStudioCCompiler(CCompiler): # false without compiling anything return name in ['dllimport', 'dllexport'] + def get_argument_syntax(self): + return 'msvc' + + def get_allow_undefined_link_args(self): + # link.exe + return ['/FORCE:UNRESOLVED'] + + +class ClangClCCompiler(VisualStudioCCompiler): + def __init__(self, exelist, version, is_cross, exe_wrap, is_64): + super().__init__(exelist, version, is_cross, exe_wrap, is_64) + self.id = 'clang-cl' + class ArmCCompiler(ArmCompiler, CCompiler): - def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ArmCompiler.__init__(self) + ArmCompiler.__init__(self, compiler_type) def get_options(self): opts = CCompiler.get_options(self) @@ -1519,3 +1611,45 @@ class ArmCCompiler(ArmCompiler, CCompiler): if std.value != 'none': args.append('--' + std.value) return args + +class CcrxCCompiler(CcrxCompiler, CCompiler): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): + CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) + CcrxCompiler.__init__(self, compiler_type) + + # Override CCompiler.get_always_args + def get_always_args(self): + return ['-nologo'] + + def get_options(self): + opts = CCompiler.get_options(self) + opts.update({'c_std': coredata.UserComboOption('c_std', 'C language standard to use', + ['none', 'c89', 'c99'], + 'none')}) + return opts + + def get_option_compile_args(self, options): + args = [] + std = options['c_std'] + if std.value == 'c89': + args.append('-lang=c') + elif std.value == 'c99': + args.append('-lang=c99') + return args + + def get_compile_only_args(self): + return [] + + def get_no_optimization_args(self): + return ['-optimize=0'] + + def get_output_args(self, target): + return ['-output=obj=%s' % target] + + def get_linker_output_args(self, outputname): + return ['-output=%s' % outputname] + + def get_include_args(self, path, is_system): + if path == '': + path = '.' + return ['-include=' + path] diff --git a/mesonbuild/compilers/c_function_attributes.py b/mesonbuild/compilers/c_function_attributes.py index 9aeaaf2..a522a1a 100644 --- a/mesonbuild/compilers/c_function_attributes.py +++ b/mesonbuild/compilers/c_function_attributes.py @@ -91,10 +91,10 @@ C_FUNC_ATTRIBUTES = { 'used': 'int foo(void) __attribute__((used));', 'visibility': ''' - int foo_def(void) __attribute__((visibility(("default")))); - int foo_hid(void) __attribute__((visibility(("hidden")))); - int foo_int(void) __attribute__((visibility(("internal")))); - int foo_pro(void) __attribute__((visibility(("protected"))));''', + int foo_def(void) __attribute__((visibility("default"))); + int foo_hid(void) __attribute__((visibility("hidden"))); + int foo_int(void) __attribute__((visibility("internal"))); + int foo_pro(void) __attribute__((visibility("protected")));''', 'warning': 'int foo(void) __attribute__((warning("")));', 'warn_unused_result': diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 7db5822..e27ae2b 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -19,7 +19,10 @@ from ..linkers import StaticLinker from .. import coredata from .. import mlog from .. import mesonlib -from ..mesonlib import EnvironmentException, MesonException, OrderedSet, version_compare, Popen_safe +from ..mesonlib import ( + EnvironmentException, MesonException, OrderedSet, version_compare, + Popen_safe +) """This file contains the data files of all compilers Meson knows about. To support a new compiler, add its information below. @@ -147,6 +150,14 @@ arm_buildtype_args = {'plain': [], 'custom': [], } +ccrx_buildtype_args = {'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], + } + msvc_buildtype_args = {'plain': [], 'debug': ["/ZI", "/Ob0", "/Od", "/RTC1"], 'debugoptimized': ["/Zi", "/Ob1"], @@ -179,6 +190,14 @@ arm_buildtype_linker_args = {'plain': [], 'custom': [], } +ccrx_buildtype_linker_args = {'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], + } + msvc_buildtype_linker_args = {'plain': [], 'debug': [], 'debugoptimized': [], @@ -295,6 +314,14 @@ gnu_optimization_args = {'0': [], 's': ['-Os'], } +ccrx_optimization_args = {'0': ['-optimize=0'], + 'g': ['-optimize=0'], + '1': ['-optimize=1'], + '2': ['-optimize=2'], + '3': ['-optimize=max'], + 's': ['-optimize=2', '-size'] + } + msvc_optimization_args = {'0': [], 'g': ['/O0'], '1': ['/O1'], @@ -309,6 +336,9 @@ clike_debug_args = {False: [], msvc_debug_args = {False: [], True: []} # Fixme! +ccrx_debug_args = {False: [], + True: ['-debug']} + base_options = {'b_pch': coredata.UserBooleanOption('b_pch', 'Use precompiled headers', True), 'b_lto': coredata.UserBooleanOption('b_lto', 'Use link time optimization', False), 'b_sanitize': coredata.UserComboOption('b_sanitize', @@ -331,6 +361,9 @@ base_options = {'b_pch': coredata.UserBooleanOption('b_pch', 'Use precompiled he 'b_staticpic': coredata.UserBooleanOption('b_staticpic', 'Build static libraries as position independent', True), + 'b_pie': coredata.UserBooleanOption('b_pie', + 'Build executables as position independent', + False), 'b_bitcode': coredata.UserBooleanOption('b_bitcode', 'Generate and embed bitcode (only macOS and iOS)', False), @@ -424,9 +457,9 @@ def get_base_compile_args(options, compiler): try: pgo_val = options['b_pgo'].value if pgo_val == 'generate': - args.append('-fprofile-generate') + args.extend(compiler.get_profile_generate_args()) elif pgo_val == 'use': - args.append('-fprofile-use') + args.extend(compiler.get_profile_use_args()) except KeyError: pass try: @@ -470,9 +503,9 @@ def get_base_link_args(options, linker, is_shared_module): try: pgo_val = options['b_pgo'].value if pgo_val == 'generate': - args.append('-fprofile-generate') + args.extend(linker.get_profile_generate_args()) elif pgo_val == 'use': - args.append('-fprofile-use') + args.extend(linker.get_profile_use_args()) except KeyError: pass try: @@ -503,6 +536,31 @@ def get_base_link_args(options, linker, is_shared_module): pass return args +def prepare_rpaths(raw_rpaths, build_dir, from_dir): + internal_format_rpaths = [evaluate_rpath(p, build_dir, from_dir) for p in raw_rpaths] + ordered_rpaths = order_rpaths(internal_format_rpaths) + return ordered_rpaths + +def order_rpaths(rpath_list): + # We want rpaths that point inside our build dir to always override + # those pointing to other places in the file system. This is so built + # binaries prefer our libraries to the ones that may lie somewhere + # in the file system, such as /lib/x86_64-linux-gnu. + # + # The correct thing to do here would be C++'s std::stable_partition. + # Python standard library does not have it, so replicate it with + # sort, which is guaranteed to be stable. + return sorted(rpath_list, key=os.path.isabs) + +def evaluate_rpath(p, build_dir, from_dir): + if p == from_dir: + return '' # relpath errors out in this case + elif os.path.isabs(p): + return p # These can be outside of build dir. + else: + return os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir)) + + class CrossNoRunException(MesonException): pass @@ -554,7 +612,10 @@ class CompilerArgs(list): dedup2_suffixes = () dedup2_args = () # Arg prefixes and args that must be de-duped by returning 1 - dedup1_prefixes = ('-l', '-Wl,-l') + # + # NOTE: not thorough. A list of potential corner cases can be found in + # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 + dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic') dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # Match a .so of the form path/to/libfoo.so.0.1.0 # Only UNIX shared libraries require this. Others have a fixed extension. @@ -616,7 +677,6 @@ class CompilerArgs(list): to recursively search for symbols in the libraries. This is not needed with other linkers. ''' - # A standalone argument must never be deduplicated because it is # defined by what comes _after_ it. Thus dedupping this: # -D FOO -D BAR @@ -691,6 +751,17 @@ class CompilerArgs(list): for elem in iterable: self.append_direct(elem) + def extend_preserving_lflags(self, iterable): + normal_flags = [] + lflags = [] + for i in iterable: + if i.startswith('-l') or i.startswith('-L'): + lflags.append(i) + else: + normal_flags.append(i) + self.extend(normal_flags) + self.extend_direct(lflags) + def __add__(self, args): new = CompilerArgs(self, self.compiler) new += args @@ -813,10 +884,10 @@ class Compiler: def compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies): raise EnvironmentException('%s does not support compute_int ' % self.get_id()) - def has_members(self, typename, membernames, prefix, env, extra_args=None, dependencies=None): + def has_members(self, typename, membernames, prefix, env, *, extra_args=None, dependencies=None): raise EnvironmentException('%s does not support has_member(s) ' % self.get_id()) - def has_type(self, typename, prefix, env, extra_args, dependencies=None): + def has_type(self, typename, prefix, env, extra_args, *, dependencies=None): raise EnvironmentException('%s does not support has_type ' % self.get_id()) def symbols_have_underscore_prefix(self, env): @@ -843,6 +914,9 @@ class Compiler: def get_linker_always_args(self): return [] + def get_linker_lib_prefix(self): + return '' + def gen_import_library_args(self, implibname): """ Used only on Windows for libraries that need an import library. @@ -1088,46 +1162,30 @@ class Compiler: def get_instruction_set_args(self, instruction_set): return None - def build_osx_rpath_args(self, build_dir, rpath_paths, build_rpath): - # Ensure that there is enough space for large RPATHs and install_name - args = ['-Wl,-headerpad_max_install_names'] - if not rpath_paths and not build_rpath: - return args - # On OSX, rpaths must be absolute. - abs_rpaths = [os.path.join(build_dir, p) for p in rpath_paths] - if build_rpath != '': - abs_rpaths.append(build_rpath) - # Need to deduplicate abs_rpaths, as rpath_paths and - # build_rpath are not guaranteed to be disjoint sets - args += ['-Wl,-rpath,' + rp for rp in OrderedSet(abs_rpaths)] - return args - def build_unix_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): if not rpath_paths and not install_rpath and not build_rpath: return [] - # The rpaths we write must be relative, because otherwise - # they have different length depending on the build + args = [] + if mesonlib.is_osx(): + # Ensure that there is enough space for install_name_tool in-place editing of large RPATHs + args.append('-Wl,-headerpad_max_install_names') + # @loader_path is the equivalent of $ORIGIN on macOS + # https://stackoverflow.com/q/26280738 + origin_placeholder = '@loader_path' + else: + origin_placeholder = '$ORIGIN' + # The rpaths we write must be relative if they point to the build dir, + # because otherwise they have different length depending on the build # directory. This breaks reproducible builds. - rel_rpaths = [] - for p in rpath_paths: - if p == from_dir: - relative = '' # relpath errors out in this case - else: - relative = os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir)) - rel_rpaths.append(relative) - paths = ':'.join([os.path.join('$ORIGIN', p) for p in rel_rpaths]) + processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir) + # Need to deduplicate rpaths, as macOS's install_name_tool + # is *very* allergic to duplicate -delete_rpath arguments + # when calling depfixer on installation. + all_paths = OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths]) # Build_rpath is used as-is (it is usually absolute). if build_rpath != '': - if paths != '': - paths += ':' - paths += build_rpath - if len(paths) < len(install_rpath): - padding = 'X' * (len(install_rpath) - len(paths)) - if not paths: - paths = padding - else: - paths = paths + ':' + padding - args = [] + all_paths.add(build_rpath) + if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd(): # This argument instructs the compiler to record the value of # ORIGIN in the .dynamic section of the elf. On Linux this is done @@ -1135,7 +1193,23 @@ class Compiler: # $ORIGIN in the runtime path will be undefined and any binaries # linked against local libraries will fail to resolve them. args.append('-Wl,-z,origin') - args.append('-Wl,-rpath,' + paths) + + if mesonlib.is_osx(): + # macOS does not support colon-separated strings in LC_RPATH, + # hence we have to pass each path component individually + args += ['-Wl,-rpath,' + rp for rp in all_paths] + else: + # In order to avoid relinking for RPATH removal, the binary needs to contain just + # enough space in the ELF header to hold the final installation RPATH. + paths = ':'.join(all_paths) + if len(paths) < len(install_rpath): + padding = 'X' * (len(install_rpath) - len(paths)) + if not paths: + paths = padding + else: + paths = paths + ':' + padding + args.append('-Wl,-rpath,' + paths) + if get_compiler_is_linuxlike(self): # Rpaths to use while linking must be absolute. These are not # written to the binary. Needed only with GNU ld: @@ -1174,6 +1248,46 @@ class Compiler: raise EnvironmentException( 'Language {} does not support function attributes.'.format(self.get_display_language())) + def get_pic_args(self): + m = 'Language {} does not support position-independent code' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_pie_args(self): + m = 'Language {} does not support position-independent executable' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_pie_link_args(self): + m = 'Language {} does not support position-independent executable' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_argument_syntax(self): + """Returns the argument family type. + + Compilers fall into families if they try to emulate the command line + interface of another compiler. For example, clang is in the GCC family + since it accepts most of the same arguments as GCC. ICL (ICC on + windows) is in the MSVC family since it accepts most of the same + arguments as MSVC. + """ + return 'other' + + def get_profile_generate_args(self): + raise EnvironmentException( + '%s does not support get_profile_generate_args ' % self.get_id()) + + def get_profile_use_args(self): + raise EnvironmentException( + '%s does not support get_profile_use_args ' % self.get_id()) + + def get_undefined_link_args(self): + ''' + Get args for allowing undefined symbols when linking to a shared library + ''' + return [] + + def remove_linkerlike_args(self, args): + return [x for x in args if not x.startswith('-Wl')] + @enum.unique class CompilerType(enum.Enum): @@ -1191,6 +1305,10 @@ class CompilerType(enum.Enum): ICC_OSX = 21 ICC_WIN = 22 + ARM_WIN = 30 + + CCRX_WIN = 40 + @property def is_standard_compiler(self): return self.name in ('GCC_STANDARD', 'CLANG_STANDARD', 'ICC_STANDARD') @@ -1201,7 +1319,7 @@ class CompilerType(enum.Enum): @property def is_windows_compiler(self): - return self.name in ('GCC_MINGW', 'GCC_CYGWIN', 'CLANG_MINGW', 'ICC_WIN') + return self.name in ('GCC_MINGW', 'GCC_CYGWIN', 'CLANG_MINGW', 'ICC_WIN', 'ARM_WIN', 'CCRX_WIN') def get_macos_dylib_install_name(prefix, shlib_name, suffix, soversion): @@ -1314,9 +1432,11 @@ class GnuLikeCompiler(abc.ABC): def __init__(self, compiler_type): self.compiler_type = compiler_type self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', - 'b_ndebug', 'b_staticpic', 'b_asneeded'] - if not self.compiler_type.is_osx_compiler: + 'b_ndebug', 'b_staticpic', 'b_pie'] + if not self.compiler_type.is_osx_compiler and not self.compiler_type.is_windows_compiler: self.base_options.append('b_lundef') + if not self.compiler_type.is_windows_compiler: + self.base_options.append('b_asneeded') # All GCC-like backends can do assembly self.can_compile_suffixes.add('s') @@ -1335,6 +1455,12 @@ class GnuLikeCompiler(abc.ABC): return [] # On Window and OS X, pic is always on. return ['-fPIC'] + def get_pie_args(self): + return ['-fPIE'] + + def get_pie_link_args(self): + return ['-pie'] + def get_buildtype_args(self, buildtype): return gnulike_buildtype_args[buildtype] @@ -1389,6 +1515,36 @@ class GnuLikeCompiler(abc.ABC): def gnu_symbol_visibility_args(self, vistype): return gnu_symbol_visibility_args[vistype] + def gen_vs_module_defs_args(self, defsfile): + if not isinstance(defsfile, str): + raise RuntimeError('Module definitions file should be str') + # On Windows targets, .def files may be specified on the linker command + # line like an object file. + if self.compiler_type.is_windows_compiler: + return [defsfile] + # For other targets, discard the .def file. + return [] + + def get_argument_syntax(self): + return 'gcc' + + def get_profile_generate_args(self): + return ['-fprofile-generate'] + + def get_profile_use_args(self): + return ['-fprofile-use', '-fprofile-correction'] + + def get_allow_undefined_link_args(self): + if self.compiler_type.is_osx_compiler: + # Apple ld + return ['-Wl,-undefined,dynamic_lookup'] + elif self.compiler_type.is_windows_compiler: + # For PE/COFF this is impossible + return [] + else: + # GNU ld and LLVM lld + return ['-Wl,--allow-shlib-undefined'] + class GnuCompiler(GnuLikeCompiler): """ @@ -1427,18 +1583,8 @@ class GnuCompiler(GnuLikeCompiler): def get_pch_suffix(self): return 'gch' - def gen_vs_module_defs_args(self, defsfile): - if not isinstance(defsfile, str): - raise RuntimeError('Module definitions file should be str') - # On Windows targets, .def files may be specified on the linker command - # line like an object file. - if self.compiler_type in (CompilerType.GCC_CYGWIN, CompilerType.GCC_MINGW): - return [defsfile] - # For other targets, discard the .def file. - return [] - def get_gui_app_args(self, value): - if self.compiler_type in (CompilerType.GCC_CYGWIN, CompilerType.GCC_MINGW) and value: + if self.compiler_type.is_windows_compiler and value: return ['-mwindows'] return [] @@ -1518,7 +1664,7 @@ class ClangCompiler(GnuLikeCompiler): myargs + args, env) - def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None): + def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] # Starting with XCode 8, we need to pass this to force linker @@ -1527,7 +1673,8 @@ class ClangCompiler(GnuLikeCompiler): # https://github.com/Homebrew/homebrew-core/issues/3727 if self.compiler_type.is_osx_compiler and version_compare(self.version, '>=8.0'): extra_args.append('-Wl,-no_weak_imports') - return super().has_function(funcname, prefix, env, extra_args, dependencies) + return super().has_function(funcname, prefix, env, extra_args=extra_args, + dependencies=dependencies) def openmp_flags(self): if version_compare(self.version, '>=3.8.0'): @@ -1540,7 +1687,7 @@ class ClangCompiler(GnuLikeCompiler): class ArmclangCompiler: - def __init__(self): + def __init__(self, compiler_type): if not self.is_cross: raise EnvironmentException('armclang supports only cross-compilation.') # Check whether 'armlink.exe' is available in path @@ -1566,6 +1713,7 @@ class ArmclangCompiler: if not version_compare(self.version, '==' + linker_ver): raise EnvironmentException('armlink version does not match with compiler version') self.id = 'armclang' + self.compiler_type = compiler_type self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', 'b_ndebug', 'b_staticpic', 'b_colorout'] # Assembly @@ -1618,11 +1766,33 @@ class ArmclangCompiler: def get_debug_args(self, is_debug): return clike_debug_args[is_debug] + def gen_export_dynamic_link_args(self, env): + """ + The args for export dynamic + """ + return ['--export_dynamic'] + + def gen_import_library_args(self, implibname): + """ + The args of the outputted import library -# Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1 + ArmLinker's symdefs output can be used as implib + """ + return ['--symdefs=' + implibname] + + +# Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1, 19.0.0 class IntelCompiler(GnuLikeCompiler): + def __init__(self, compiler_type): super().__init__(compiler_type) + # As of 19.0.0 ICC doesn't have sanitizer, color, or lto support. + # + # It does have IPO, which serves much the same purpose as LOT, but + # there is an unfortunate rule for using IPO (you can't control the + # name of the output file) which break assumptions meson makes + self.base_options = ['b_pch', 'b_lundef', 'b_asneeded', 'b_pgo', + 'b_coverage', 'b_ndebug', 'b_staticpic', 'b_pie'] self.id = 'intel' self.lang_header = 'none' @@ -1645,17 +1815,32 @@ class IntelCompiler(GnuLikeCompiler): else: return ['-openmp'] - def has_arguments(self, args, env, code, mode): - # -diag-error 10148 is required to catch invalid -W options - return super().has_arguments(args + ['-diag-error', '10006', '-diag-error', '10148'], env, code, mode) + def compiles(self, *args, **kwargs): + # This covers a case that .get('foo', []) doesn't, that extra_args is + # defined and is None + extra_args = kwargs.get('extra_args') or [] + kwargs['extra_args'] = [ + extra_args, + '-diag-error', '10006', # ignoring unknown option + '-diag-error', '10148', # Option not supported + '-diag-error', '1292', # unknown __attribute__ + ] + return super().compiles(*args, **kwargs) + + def get_profile_generate_args(self): + return ['-prof-gen=threadsafe'] + + def get_profile_use_args(self): + return ['-prof-use'] class ArmCompiler: # Functionality that is common to all ARM family compilers. - def __init__(self): + def __init__(self, compiler_type): if not self.is_cross: raise EnvironmentException('armcc supports only cross-compilation.') self.id = 'arm' + self.compiler_type = compiler_type default_warn_args = [] self.warn_args = {'1': default_warn_args, '2': default_warn_args + [], @@ -1724,3 +1909,97 @@ class ArmCompiler: def get_debug_args(self, is_debug): return clike_debug_args[is_debug] + +class CcrxCompiler: + def __init__(self, compiler_type): + if not self.is_cross: + raise EnvironmentException('ccrx supports only cross-compilation.') + # Check whether 'rlink.exe' is available in path + self.linker_exe = 'rlink.exe' + args = '--version' + try: + p, stdo, stderr = Popen_safe(self.linker_exe, args) + except OSError as e: + err_msg = 'Unknown linker\nRunning "{0}" gave \n"{1}"'.format(' '.join([self.linker_exe] + [args]), e) + raise EnvironmentException(err_msg) + self.id = 'ccrx' + self.compiler_type = compiler_type + # Assembly + self.can_compile_suffixes.update('s') + default_warn_args = [] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + [], + '3': default_warn_args + []} + + def can_linker_accept_rsp(self): + return False + + def get_pic_args(self): + # PIC support is not enabled by default for CCRX, + # if users want to use it, they need to add the required arguments explicitly + return [] + + def get_buildtype_args(self, buildtype): + return ccrx_buildtype_args[buildtype] + + def get_buildtype_linker_args(self, buildtype): + return ccrx_buildtype_linker_args[buildtype] + + # Override CCompiler.get_std_shared_lib_link_args + def get_std_shared_lib_link_args(self): + return [] + + def get_pch_suffix(self): + return 'pch' + + def get_pch_use_args(self, pch_dir, header): + return [] + + # Override CCompiler.get_dependency_gen_args + def get_dependency_gen_args(self, outtarget, outfile): + return [] + + # Override CCompiler.build_rpath_args + def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): + return [] + + def thread_flags(self, env): + return [] + + def thread_link_flags(self, env): + return [] + + def get_linker_exelist(self): + return [self.linker_exe] + + def get_linker_lib_prefix(self): + return '-lib=' + + def get_coverage_args(self): + return [] + + def get_coverage_link_args(self): + return [] + + def get_optimization_args(self, optimization_level): + return ccrx_optimization_args[optimization_level] + + def get_debug_args(self, is_debug): + return ccrx_debug_args[is_debug] + + @classmethod + def unix_args_to_native(cls, args): + result = [] + for i in args: + if i.startswith('-D'): + i = '-define=' + i[2:] + if i.startswith('-I'): + i = '-include=' + i[2:] + if i.startswith('-Wl,-rpath='): + continue + elif i == '--print-search-dirs': + continue + elif i.startswith('-L'): + continue + result.append(i) + return result diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index c68c529..87371c0 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -19,9 +19,8 @@ from .. import coredata from .. import mlog from ..mesonlib import MesonException, version_compare -from .c import CCompiler, VisualStudioCCompiler +from .c import CCompiler, VisualStudioCCompiler, ClangClCCompiler from .compilers import ( - CompilerType, gnu_winlibs, msvc_winlibs, ClangCompiler, @@ -30,6 +29,7 @@ from .compilers import ( IntelCompiler, ArmCompiler, ArmclangCompiler, + CcrxCompiler, ) from .c_function_attributes import CXX_FUNC_ATTRIBUTES @@ -61,9 +61,11 @@ class CPPCompiler(CCompiler): # too strict without this and always fails. return super().get_compiler_check_args() + ['-fpermissive'] - def has_header_symbol(self, hname, symbol, prefix, env, extra_args=None, dependencies=None): + def has_header_symbol(self, hname, symbol, prefix, env, *, extra_args=None, dependencies=None): # Check if it's a C-like symbol - if super().has_header_symbol(hname, symbol, prefix, env, extra_args, dependencies): + if super().has_header_symbol(hname, symbol, prefix, env, + extra_args=extra_args, + dependencies=dependencies): return True # Check if it's a class or a template if extra_args is None: @@ -73,7 +75,8 @@ class CPPCompiler(CCompiler): #include <{header}> using {symbol}; int main () {{ return 0; }}''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) def _test_cpp_std_arg(self, cpp_std_value): # Test whether the compiler understands a -std=XY argument @@ -157,9 +160,9 @@ class ClangCPPCompiler(ClangCompiler, CPPCompiler): class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler): - def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ArmclangCompiler.__init__(self) + ArmclangCompiler.__init__(self, compiler_type) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -202,7 +205,7 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler): 'cpp_debugstl': coredata.UserBooleanOption('cpp_debugstl', 'STL debug mode', False)}) - if self.compiler_type == CompilerType.GCC_MINGW: + if self.compiler_type.is_windows_compiler: opts.update({ 'cpp_winlibs': coredata.UserArrayOption('cpp_winlibs', 'Standard Win libraries to link against', gnu_winlibs), }) @@ -218,7 +221,7 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler): return args def get_option_link_args(self, options): - if self.compiler_type == CompilerType.GCC_MINGW: + if self.compiler_type.is_windows_compiler: return options['cpp_winlibs'].value[:] return [] @@ -245,11 +248,13 @@ class ElbrusCPPCompiler(GnuCPPCompiler, ElbrusCompiler): # Elbrus C++ compiler does not have lchmod, but there is only linker warning, not compiler error. # So we should explicitly fail at this case. - def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None): + def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): if funcname == 'lchmod': return False else: - return super().has_function(funcname, prefix, env, extra_args, dependencies) + return super().has_function(funcname, prefix, env, + extra_args=extra_args, + dependencies=dependencies) class IntelCPPCompiler(IntelCompiler, CPPCompiler): @@ -309,11 +314,26 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): self.base_options = ['b_pch', 'b_vscrt'] # FIXME add lto, pgo and the like def get_options(self): + cpp_stds = ['none', 'c++11', 'vc++11'] + if self.id == 'clang-cl': + cpp_stds.extend(['c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest']) + else: + # Visual Studio 2015 and later + if version_compare(self.version, '>=19'): + cpp_stds.extend(['c++14', 'vc++14', 'c++latest', 'vc++latest']) + # Visual Studio 2017 and later + if version_compare(self.version, '>=19.11'): + cpp_stds.extend(['c++17', 'vc++17']) + opts = CPPCompiler.get_options(self) opts.update({'cpp_eh': coredata.UserComboOption('cpp_eh', 'C++ exception handling type.', ['none', 'a', 's', 'sc'], 'sc'), + 'cpp_std': coredata.UserComboOption('cpp_std', + 'C++ language standard to use', + cpp_stds, + 'none'), 'cpp_winlibs': coredata.UserArrayOption('cpp_winlibs', 'Windows libs to link against.', msvc_winlibs)}) @@ -321,9 +341,41 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): def get_option_compile_args(self, options): args = [] - std = options['cpp_eh'] - if std.value != 'none': - args.append('/EH' + std.value) + + eh = options['cpp_eh'] + if eh.value != 'none': + args.append('/EH' + eh.value) + + vc_version_map = { + 'none': (True, None), + 'vc++11': (True, 11), + 'vc++14': (True, 14), + 'vc++17': (True, 17), + 'c++11': (False, 11), + 'c++14': (False, 14), + 'c++17': (False, 17)} + + permissive, ver = vc_version_map[options['cpp_std'].value] + + if ver is None: + pass + elif ver == 11: + # Note: there is no explicit flag for supporting C++11; we attempt to do the best we can + # which means setting the C++ standard version to C++14, in compilers that support it + # (i.e., after VS2015U3) + # if one is using anything before that point, one cannot set the standard. + if self.id == 'clang-cl' or version_compare(self.version, '>=19.00.24210'): + mlog.warning('MSVC does not support C++11; ' + 'attempting best effort; setting the standard to C++14') + args.append('/std:c++14') + else: + mlog.warning('This version of MSVC does not support cpp_std arguments') + else: + args.append('/std:c++{}'.format(ver)) + + if not permissive and version_compare(self.version, '>=19.11'): + args.append('/permissive-') + return args def get_option_link_args(self, options): @@ -334,11 +386,15 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): # so just use the plain C args. return VisualStudioCCompiler.get_compiler_check_args(self) +class ClangClCPPCompiler(VisualStudioCPPCompiler, ClangClCCompiler): + def __init__(self, exelist, version, is_cross, exe_wrap, is_64): + VisualStudioCPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, is_64) + self.id = 'clang-cl' class ArmCPPCompiler(ArmCompiler, CPPCompiler): - def __init__(self, exelist, version, is_cross, exe_wrap=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrap=None, **kwargs): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, **kwargs) - ArmCompiler.__init__(self) + ArmCompiler.__init__(self, compiler_type) def get_options(self): opts = CPPCompiler.get_options(self) @@ -361,3 +417,31 @@ class ArmCPPCompiler(ArmCompiler, CPPCompiler): def get_compiler_check_args(self): return [] + + +class CcrxCPPCompiler(CcrxCompiler, CPPCompiler): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrap=None, **kwargs): + CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, **kwargs) + CcrxCompiler.__init__(self, compiler_type) + + # Override CCompiler.get_always_args + def get_always_args(self): + return ['-nologo', '-lang=cpp'] + + def get_option_compile_args(self, options): + return [] + + def get_compile_only_args(self): + return [] + + def get_output_args(self, target): + return ['-output=obj=%s' % target] + + def get_linker_output_args(self, outputname): + return ['-output=%s' % outputname] + + def get_option_link_args(self, options): + return [] + + def get_compiler_check_args(self): + return [] diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index e9ceafb..2cf0fbd 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -30,14 +30,17 @@ from .compilers import ( ) d_feature_args = {'gcc': {'unittest': '-funittest', + 'debug': '-fdebug', 'version': '-fversion', 'import_dir': '-J' }, 'llvm': {'unittest': '-unittest', + 'debug': '-d-debug', 'version': '-d-version', 'import_dir': '-J' }, 'dmd': {'unittest': '-unittest', + 'debug': '-debug', 'version': '-version', 'import_dir': '-J' } @@ -168,16 +171,53 @@ class DCompiler(Compiler): if unittest: res.append(unittest_arg) + if 'debug' in kwargs: + debug_level = -1 + debugs = kwargs.pop('debug') + if not isinstance(debugs, list): + debugs = [debugs] + + debug_arg = d_feature_args[self.id]['debug'] + if not debug_arg: + raise EnvironmentException('D compiler %s does not support conditional debug identifiers.' % self.name_string()) + + # Parse all debug identifiers and the largest debug level identifier + for d in debugs: + if isinstance(d, int): + if d > debug_level: + debug_level = d + elif isinstance(d, str) and d.isdigit(): + if int(d) > debug_level: + debug_level = int(d) + else: + res.append('{0}={1}'.format(debug_arg, d)) + + if debug_level >= 0: + res.append('{0}={1}'.format(debug_arg, debug_level)) + if 'versions' in kwargs: + version_level = -1 versions = kwargs.pop('versions') if not isinstance(versions, list): versions = [versions] version_arg = d_feature_args[self.id]['version'] if not version_arg: - raise EnvironmentException('D compiler %s does not support the "feature versions" feature.' % self.name_string()) + raise EnvironmentException('D compiler %s does not support conditional version identifiers.' % self.name_string()) + + # Parse all version identifiers and the largest version level identifier for v in versions: - res.append('{0}={1}'.format(version_arg, v)) + if isinstance(v, int): + if v > version_level: + version_level = v + elif isinstance(v, str) and v.isdigit(): + if int(v) > version_level: + version_level = int(v) + else: + res.append('{0}={1}'.format(version_arg, v)) + + if version_level >= 0: + res.append('{0}={1}'.format(version_arg, version_level)) if 'import_dirs' in kwargs: import_dirs = kwargs.pop('import_dirs') @@ -234,6 +274,8 @@ class DCompiler(Compiler): return ['-Wl,-rpath,{}'.format(paths)] def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'): + if callable(extra_args): + extra_args = extra_args(mode) if extra_args is None: extra_args = [] elif isinstance(extra_args, str): @@ -261,7 +303,7 @@ class DCompiler(Compiler): args += extra_args return args - def compiles(self, code, env, extra_args=None, dependencies=None, mode='compile'): + def compiles(self, code, env, *, extra_args=None, dependencies=None, mode='compile'): args = self._get_compiler_check_args(env, extra_args, dependencies, mode) with self.compile(code, args, mode) as p: @@ -339,8 +381,12 @@ class DCompiler(Compiler): dcargs.append('-L=' + arg) continue - - dcargs.append(arg) + elif not arg.startswith('-') and arg.endswith(('.a', '.lib')): + # ensure static libraries are passed through to the linker + dcargs.append('-L=' + arg) + continue + else: + dcargs.append(arg) return dcargs @@ -378,7 +424,11 @@ class DCompiler(Compiler): return args def get_debug_args(self, is_debug): - return clike_debug_args[is_debug] + ddebug_args = [] + if is_debug: + ddebug_args = [d_feature_args[self.id]['debug']] + + return clike_debug_args[is_debug] + ddebug_args def get_crt_args(self, crt_val, buildtype): if not is_windows(): diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py index b58c4e0..75db26d 100644 --- a/mesonbuild/compilers/fortran.py +++ b/mesonbuild/compilers/fortran.py @@ -144,9 +144,6 @@ end program prog def get_compiler_check_args(self): return CCompiler.get_compiler_check_args(self) - def get_allow_undefined_link_args(self): - return CCompiler.get_allow_undefined_link_args(self) - def get_output_args(self, target): return CCompiler.get_output_args(self, target) @@ -172,7 +169,7 @@ end program prog return ('-I', ) def get_module_outdir_args(self, path): - return ['-module' + path] + return ['-module', path] def module_name_to_filename(self, module_name): return module_name.lower() + '.mod' @@ -210,16 +207,18 @@ end program prog def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'): return CCompiler._get_compiler_check_args(self, env, extra_args, dependencies, mode='compile') - def compiles(self, code, env, extra_args=None, dependencies=None, mode='compile'): - return CCompiler.compiles(self, code, env, extra_args, dependencies, mode) + def compiles(self, code, env, *, extra_args=None, dependencies=None, mode='compile'): + return CCompiler.compiles(self, code, env, extra_args=extra_args, + dependencies=dependencies, mode=mode) def _build_wrapper(self, code, env, extra_args, dependencies=None, mode='compile', want_output=False): return CCompiler._build_wrapper(self, code, env, extra_args, dependencies, mode, want_output) - def links(self, code, env, extra_args=None, dependencies=None): - return CCompiler.links(self, code, env, extra_args, dependencies) + def links(self, code, env, *, extra_args=None, dependencies=None): + return CCompiler.links(self, code, env, extra_args=extra_args, + dependencies=dependencies) - def run(self, code, env, extra_args=None, dependencies=None): + def run(self, code, env, *, extra_args=None, dependencies=None): return CCompiler.run(self, code, env, extra_args, dependencies) def _get_patterns(self, *args, **kwargs): @@ -340,6 +339,15 @@ class IntelFortranCompiler(IntelCompiler, FortranCompiler): def get_preprocess_only_args(self): return ['-cpp', '-EP'] + def get_always_args(self): + """Ifort doesn't have -pipe.""" + val = super().get_always_args() + val.remove('-pipe') + return val + + def language_stdlib_only_link_flags(self): + return ['-lifcore', '-limf'] + class PathScaleFortranCompiler(FortranCompiler): def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwags): diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index f8754d7..de401ce 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -17,13 +17,15 @@ import pickle, os, uuid, shlex import sys from pathlib import PurePath from collections import OrderedDict -from .mesonlib import MesonException -from .mesonlib import default_libdir, default_libexecdir, default_prefix +from .mesonlib import ( + MesonException, default_libdir, default_libexecdir, default_prefix +) from .wrap import WrapMode import ast import argparse +import configparser -version = '0.48.2' +version = '0.49.0' backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'xcode'] default_yielding = False @@ -182,6 +184,7 @@ class UserArrayOption(UserOption): ', '.join(bad), ', '.join(self.choices))) return newvalue + class UserFeatureOption(UserComboOption): static_choices = ['enabled', 'disabled', 'auto'] @@ -197,6 +200,72 @@ class UserFeatureOption(UserComboOption): def is_auto(self): return self.value == 'auto' + +def load_configs(filenames): + """Load native files.""" + def gen(): + for f in filenames: + f = os.path.expanduser(os.path.expandvars(f)) + if os.path.exists(f): + yield f + continue + elif sys.platform != 'win32': + f = os.path.basename(f) + paths = [ + os.environ.get('XDG_DATA_HOME', os.path.expanduser('~/.local/share')), + ] + os.environ.get('XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(':') + for path in paths: + path_to_try = os.path.join(path, 'meson', 'native', f) + if os.path.isfile(path_to_try): + yield path_to_try + break + else: + raise MesonException('Cannot find specified native file: ' + f) + continue + + raise MesonException('Cannot find specified native file: ' + f) + + config = configparser.SafeConfigParser() + config.read(gen()) + return config + + +def _get_section(config, section): + if config.has_section(section): + final = {} + for k, v in config.items(section): + # Windows paths... + v = v.replace('\\', '\\\\') + try: + final[k] = ast.literal_eval(v) + except SyntaxError: + raise MesonException( + 'Malformed value in native file variable: {}'.format(v)) + return final + return {} + + +class ConfigData: + + """Contains configuration information provided by the user for the build.""" + + def __init__(self, config=None): + if config: + self.binaries = _get_section(config, 'binaries') + # global is a keyword and globals is a builtin, rather than mangle it, + # use a similar word + self.universal = _get_section(config, 'globals') + self.subprojects = {s: _get_section(config, s) for s in config.sections() + if s not in {'binaries', 'globals'}} + else: + self.binaries = {} + self.universal = {} + self.subprojects = {} + + def get_binaries(self, name): + return self.binaries.get(name, None) + + # This class contains all data that must persist over multiple # invocations of Meson. It is roughly the same thing as # cmakecache. @@ -223,12 +292,20 @@ class CoreData: self.base_options = {} self.external_preprocess_args = {} # CPPFLAGS only self.cross_file = self.__load_cross_file(options.cross_file) - self.wrap_mode = options.wrap_mode if options.wrap_mode is not None else WrapMode.default self.compilers = OrderedDict() self.cross_compilers = OrderedDict() self.deps = OrderedDict() # Only to print a warning if it changes between Meson invocations. self.pkgconf_envvar = os.environ.get('PKG_CONFIG_PATH', '') + self.config_files = self.__load_config_files(options.native_file) + + @staticmethod + def __load_config_files(filenames): + if not filenames: + return [] + filenames = [os.path.abspath(os.path.expanduser(os.path.expanduser(f))) + for f in filenames] + return filenames @staticmethod def __load_cross_file(filename): @@ -338,7 +415,10 @@ class CoreData: def get_builtin_option(self, optname): if optname in self.builtins: - return self.builtins[optname].value + v = self.builtins[optname] + if optname == 'wrap_mode': + return WrapMode.from_string(v.value) + return v.value raise RuntimeError('Tried to get unknown builtin option %s.' % optname) def set_builtin_option(self, optname, value): @@ -457,6 +537,51 @@ class CoreData: sub = 'In subproject {}: '.format(subproject) if subproject else '' mlog.warning('{}Unknown options: "{}"'.format(sub, unknown_options)) +class CmdLineFileParser(configparser.ConfigParser): + def __init__(self): + # We don't want ':' as key delimiter, otherwise it would break when + # storing subproject options like "subproject:option=value" + super().__init__(delimiters=['=']) + +def get_cmd_line_file(build_dir): + return os.path.join(build_dir, 'meson-private', 'cmd_line.txt') + +def read_cmd_line_file(build_dir, options): + filename = get_cmd_line_file(build_dir) + config = CmdLineFileParser() + config.read(filename) + + # Do a copy because config is not really a dict. options.cmd_line_options + # overrides values from the file. + d = dict(config['options']) + d.update(options.cmd_line_options) + options.cmd_line_options = d + + properties = config['properties'] + if options.cross_file is None: + options.cross_file = properties.get('cross_file', None) + +def write_cmd_line_file(build_dir, options): + filename = get_cmd_line_file(build_dir) + config = CmdLineFileParser() + + properties = {} + if options.cross_file is not None: + properties['cross_file'] = options.cross_file + + config['options'] = options.cmd_line_options + config['properties'] = properties + with open(filename, 'w') as f: + config.write(f) + +def update_cmd_line_file(build_dir, options): + filename = get_cmd_line_file(build_dir) + config = CmdLineFileParser() + config.read(filename) + config['options'].update(options.cmd_line_options) + with open(filename, 'w') as f: + config.write(f) + def load(build_dir): filename = os.path.join(build_dir, 'meson-private', 'coredata.dat') load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename) @@ -468,7 +593,8 @@ def load(build_dir): if not isinstance(obj, CoreData): raise MesonException(load_fail_msg) if obj.version != version: - raise MesonException('Build directory has been generated with Meson version %s, which is incompatible with current version %s.\nPlease delete this build directory AND create a new one.' % + raise MesonException('Build directory has been generated with Meson version %s, ' + 'which is incompatible with current version %s.\n' % (obj.version, version)) return obj @@ -616,7 +742,11 @@ builtin_options = { 'install_umask': [UserUmaskOption, 'Default umask to apply on permissions of installed files', '022'], 'auto_features': [UserFeatureOption, "Override value of all 'auto' features", 'auto'], 'optimization': [UserComboOption, 'Optimization level', ['0', 'g', '1', '2', '3', 's'], '0'], - 'debug': [UserBooleanOption, 'Debug', True] + 'debug': [UserBooleanOption, 'Debug', True], + 'wrap_mode': [UserComboOption, 'Wrap mode', ['default', + 'nofallback', + 'nodownload', + 'forcefallback'], 'default'], } # Special prefix-dependent defaults for installation directories that reside in diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py index 00b6fa2..afe2a3b 100644 --- a/mesonbuild/dependencies/__init__.py +++ b/mesonbuild/dependencies/__init__.py @@ -16,9 +16,9 @@ from .boost import BoostDependency from .base import ( # noqa: F401 Dependency, DependencyException, DependencyMethods, ExternalProgram, EmptyExternalProgram, NonExistingExternalProgram, ExternalDependency, NotFoundDependency, ExternalLibrary, ExtraFrameworkDependency, InternalDependency, - PkgConfigDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language) + PkgConfigDependency, CMakeDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language) from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency -from .misc import (MPIDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency) +from .misc import (MPIDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency, LibGCryptDependency) from .platform import AppleFrameworks from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency, VulkanDependency @@ -39,6 +39,7 @@ packages.update({ 'pcap': PcapDependency, 'cups': CupsDependency, 'libwmf': LibWmfDependency, + 'libgcrypt': LibGCryptDependency, # From platform: 'appleframeworks': AppleFrameworks, diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index a80423f..cd02939 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -25,6 +25,8 @@ import shlex import shutil import textwrap import platform +import itertools +import ctypes from enum import Enum from pathlib import PurePath @@ -48,6 +50,7 @@ class DependencyMethods(Enum): AUTO = 'auto' PKGCONFIG = 'pkg-config' QMAKE = 'qmake' + CMAKE = 'cmake' # Just specify the standard link arguments, assuming the operating system provides the library. SYSTEM = 'system' # This is only supported on OSX - search the frameworks directory by name. @@ -174,7 +177,7 @@ class Dependency: parent (if any) and the requested values of any dependencies will be added as well. """ - RuntimeError('Unreachable code in partial_dependency called') + raise RuntimeError('Unreachable code in partial_dependency called') class InternalDependency(Dependency): @@ -399,6 +402,8 @@ class ConfigToolDependency(ExternalDependency): 'Falling back to searching PATH. This may find a ' 'native version of {0}!'.format(self.tool_name)) tools = self.tools + elif self.tool_name in self.env.config_info.binaries: + tools = [self.env.config_info.binaries[self.tool_name]] else: tools = self.tools @@ -448,6 +453,9 @@ class ConfigToolDependency(ExternalDependency): def get_config_value(self, args, stage): p, out, err = Popen_safe([self.config] + args) + # This is required to keep shlex from stripping path separators on + # Windows. Also, don't put escape sequences in config values, okay? + out = out.replace('\\', '\\\\') if p.returncode != 0: if self.required: raise DependencyException( @@ -497,10 +505,10 @@ class PkgConfigDependency(ExternalDependency): if self.required: raise DependencyException('Pkg-config binary missing from cross file') else: - potential_pkgbin = ExternalProgram.from_cross_info(environment.cross_info, 'pkgconfig') + potential_pkgbin = ExternalProgram.from_bin_list( + environment.cross_info.config['binaries'], 'pkgconfig') if potential_pkgbin.found(): self.pkgbin = potential_pkgbin - PkgConfigDependency.class_pkgbin = self.pkgbin else: mlog.debug('Cross pkg-config %s not found.' % potential_pkgbin.name) # Only search for the native pkg-config the first time and @@ -843,6 +851,613 @@ class PkgConfigDependency(ExternalDependency): def log_tried(self): return self.type_name +class CMakeTraceLine: + def __init__(self, file, line, func, args): + self.file = file + self.line = line + self.func = func.lower() + self.args = args + + def __repr__(self): + s = 'CMake TRACE: {0}:{1} {2}({3})' + return s.format(self.file, self.line, self.func, self.args) + +class CMakeTarget: + def __init__(self, name, type, properies = {}): + self.name = name + self.type = type + self.properies = properies + + def __repr__(self): + s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- properies: {{\n{} }}' + propSTR = '' + for i in self.properies: + propSTR += " '{}': {}\n".format(i, self.properies[i]) + return s.format(self.name, self.type, propSTR) + +class CMakeDependency(ExternalDependency): + # The class's copy of the CMake path. Avoids having to search for it + # multiple times in the same Meson invocation. + class_cmakebin = None + class_cmakevers = None + # We cache all pkg-config subprocess invocations to avoid redundant calls + cmake_cache = {} + # Version string for the minimum CMake version + class_cmake_version = '>=3.4' + # CMake generators to try (empty for no generator) + class_cmake_generators = ['', 'Ninja', 'Unix Makefiles', 'Visual Studio 10 2010'] + + def _gen_exception(self, msg): + return DependencyException('Dependency {} not found: {}'.format(self.name, msg)) + + def __init__(self, name, environment, kwargs, language=None): + super().__init__('cmake', environment, language, kwargs) + self.name = name + self.is_libtool = False + # Store a copy of the CMake path on the object itself so it is + # stored in the pickled coredata and recovered. + self.cmakebin = None + self.cmakevers = None + + # Dict of CMake variables: '': ['list', 'of', 'values'] + self.vars = {} + + # Dict of CMakeTarget + self.targets = {} + + # Where all CMake "build dirs" are located + self.cmake_root_dir = environment.scratch_dir + + # When finding dependencies for cross-compiling, we don't care about + # the 'native' CMake binary + # TODO: Test if this works as expected + if self.want_cross: + if 'cmake' not in environment.cross_info.config['binaries']: + if self.required: + raise self._gen_exception('CMake binary missing from cross file') + else: + potential_cmake = ExternalProgram.from_cross_info(environment.cross_info, 'cmake') + if potential_cmake.found(): + self.cmakebin = potential_cmake + CMakeDependency.class_cmakebin = self.cmakebin + else: + mlog.debug('Cross CMake %s not found.' % potential_cmake.name) + # Only search for the native CMake the first time and + # store the result in the class definition + elif CMakeDependency.class_cmakebin is None: + self.cmakebin, self.cmakevers = self.check_cmake() + CMakeDependency.class_cmakebin = self.cmakebin + CMakeDependency.class_cmakevers = self.cmakevers + else: + self.cmakebin = CMakeDependency.class_cmakebin + self.cmakevers = CMakeDependency.class_cmakevers + + if not self.cmakebin: + if self.required: + raise self._gen_exception('CMake not found.') + return + + modules = kwargs.get('modules', []) + if not isinstance(modules, list): + modules = [modules] + self._detect_dep(name, modules) + + def __repr__(self): + s = '<{0} {1}: {2} {3}>' + return s.format(self.__class__.__name__, self.name, self.is_found, + self.version_reqs) + + def _detect_dep(self, name, modules): + # Detect a dependency with CMake using the '--find-package' mode + # and the trace output (stderr) + # + # When the trace output is enabled CMake prints all functions with + # parameters to stderr as they are executed. Since CMake 3.4.0 + # variables ("${VAR}") are also replaced in the trace output. + mlog.debug('\nDetermining dependency {!r} with CMake executable ' + '{!r}'.format(name, self.cmakebin.get_path())) + + # Try different CMake generators since specifying no generator may fail + # in cygwin for some reason + for i in CMakeDependency.class_cmake_generators: + mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto')) + + # Prepare options + cmake_opts = ['--trace-expand', '-DNAME={}'.format(name), '.'] + if len(i) > 0: + cmake_opts = ['-G', i] + cmake_opts + + # Run CMake + ret1, out1, err1 = self._call_cmake(cmake_opts) + + # Current generator was successful + if ret1 == 0: + break + + mlog.debug('CMake failed for generator {} and package {} with error code {}'.format(i, name, ret1)) + mlog.debug('OUT:\n{}\n\n\nERR:\n{}\n\n'.format(out1, err1)) + + # Check if any generator succeeded + if ret1 != 0: + return + + try: + # First parse the trace + lexer1 = self._lex_trace(err1) + + # All supported functions + functions = { + 'set': self._cmake_set, + 'unset': self._cmake_unset, + 'add_executable': self._cmake_add_executable, + 'add_library': self._cmake_add_library, + 'add_custom_target': self._cmake_add_custom_target, + 'set_property': self._cmake_set_property, + 'set_target_properties': self._cmake_set_target_properties + } + + # Primary pass -- parse everything + for l in lexer1: + # "Execute" the CMake function if supported + fn = functions.get(l.func, None) + if(fn): + fn(l) + + except DependencyException as e: + if self.required: + raise + else: + self.compile_args = [] + self.link_args = [] + self.is_found = False + self.reason = e + return + + # Whether the package is found or not is always stored in PACKAGE_FOUND + self.is_found = self._var_to_bool('PACKAGE_FOUND') + if not self.is_found: + return + + # Try to detect the version + vers_raw = self.get_first_cmake_var_of(['PACKAGE_VERSION']) + + if len(vers_raw) > 0: + self.version = vers_raw[0] + self.version.strip('"\' ') + + # Try guessing a CMake target if none is provided + if len(modules) == 0: + for i in self.targets: + tg = i.lower() + lname = name.lower() + if '{}::{}'.format(lname, lname) == tg or lname == tg.replace('::', ''): + mlog.debug('Guessed CMake target \'{}\''.format(i)) + modules = [i] + break + + # Failed to guess a target --> try the old-style method + if len(modules) == 0: + incDirs = self.get_first_cmake_var_of(['PACKAGE_INCLUDE_DIRS']) + libs = self.get_first_cmake_var_of(['PACKAGE_LIBRARIES']) + + # Try to use old style variables if no module is specified + if len(libs) > 0: + self.compile_args = list(map(lambda x: '-I{}'.format(x), incDirs)) + self.link_args = libs + mlog.debug('using old-style CMake variables for dependency {}'.format(name)) + return + + # Even the old-style approach failed. Nothing else we can do here + self.is_found = False + raise self._gen_exception('CMake: failed to guess a CMake target for {}.\n' + 'Try to explicitly specify one or more targets with the "modules" property.\n' + 'Valid targets are:\n{}'.format(name, list(self.targets.keys()))) + + # Set dependencies with CMake targets + processed_targets = [] + incDirs = [] + compileDefinitions = [] + compileOptions = [] + libraries = [] + for i in modules: + if i not in self.targets: + raise self._gen_exception('CMake: invalid CMake target {} for {}.\n' + 'Try to explicitly specify one or more targets with the "modules" property.\n' + 'Valid targets are:\n{}'.format(i, name, list(self.targets.keys()))) + + targets = [i] + while len(targets) > 0: + curr = targets.pop(0) + + # Skip already processed targets + if curr in processed_targets: + continue + + tgt = self.targets[curr] + cfgs = [] + cfg = '' + otherDeps = [] + mlog.debug(tgt) + + if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properies: + incDirs += tgt.properies['INTERFACE_INCLUDE_DIRECTORIES'] + + if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properies: + tempDefs = list(tgt.properies['INTERFACE_COMPILE_DEFINITIONS']) + tempDefs = list(map(lambda x: '-D{}'.format(re.sub('^-D', '', x)), tempDefs)) + compileDefinitions += tempDefs + + if 'INTERFACE_COMPILE_OPTIONS' in tgt.properies: + compileOptions += tgt.properies['INTERFACE_COMPILE_OPTIONS'] + + if 'IMPORTED_CONFIGURATIONS' in tgt.properies: + cfgs = tgt.properies['IMPORTED_CONFIGURATIONS'] + cfg = cfgs[0] + + if 'RELEASE' in cfgs: + cfg = 'RELEASE' + + if 'IMPORTED_LOCATION_{}'.format(cfg) in tgt.properies: + libraries += tgt.properies['IMPORTED_LOCATION_{}'.format(cfg)] + elif 'IMPORTED_LOCATION' in tgt.properies: + libraries += tgt.properies['IMPORTED_LOCATION'] + + if 'INTERFACE_LINK_LIBRARIES' in tgt.properies: + otherDeps += tgt.properies['INTERFACE_LINK_LIBRARIES'] + + if 'IMPORTED_LINK_DEPENDENT_LIBRARIES_{}'.format(cfg) in tgt.properies: + otherDeps += tgt.properies['IMPORTED_LINK_DEPENDENT_LIBRARIES_{}'.format(cfg)] + elif 'IMPORTED_LINK_DEPENDENT_LIBRARIES' in tgt.properies: + otherDeps += tgt.properies['IMPORTED_LINK_DEPENDENT_LIBRARIES'] + + for j in otherDeps: + if j in self.targets: + targets += [j] + + processed_targets += [curr] + + # Make sure all elements in the lists are unique and sorted + incDirs = list(sorted(list(set(incDirs)))) + compileDefinitions = list(sorted(list(set(compileDefinitions)))) + compileOptions = list(sorted(list(set(compileOptions)))) + libraries = list(sorted(list(set(libraries)))) + + mlog.debug('Include Dirs: {}'.format(incDirs)) + mlog.debug('Compiler Definitions: {}'.format(compileDefinitions)) + mlog.debug('Compiler Options: {}'.format(compileOptions)) + mlog.debug('Libraries: {}'.format(libraries)) + + self.compile_args = compileOptions + compileDefinitions + list(map(lambda x: '-I{}'.format(x), incDirs)) + self.link_args = libraries + + def get_first_cmake_var_of(self, var_list): + # Return the first found CMake variable in list var_list + for i in var_list: + if i in self.vars: + return self.vars[i] + + return [] + + def get_cmake_var(self, var): + # Return the value of the CMake variable var or an empty list if var does not exist + for var in self.vars: + return self.vars[var] + + return [] + + def _var_to_bool(self, var): + if var not in self.vars: + return False + + if len(self.vars[var]) < 1: + return False + + if self.vars[var][0].upper() in ['1', 'ON', 'TRUE']: + return True + return False + + def _cmake_set(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/set.html + + # 1st remove PARENT_SCOPE and CACHE from args + args = [] + for i in tline.args: + if i == 'PARENT_SCOPE' or len(i) == 0: + continue + + # Discard everything after the CACHE keyword + if i == 'CACHE': + break + + args.append(i) + + if len(args) < 1: + raise self._gen_exception('CMake: set() requires at least one argument\n{}'.format(tline)) + + if len(args) == 1: + # Same as unset + if args[0] in self.vars: + del self.vars[args[0]] + else: + values = list(itertools.chain(*map(lambda x: x.split(';'), args[1:]))) + self.vars[args[0]] = values + + def _cmake_unset(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/unset.html + if len(tline.args) < 1: + raise self._gen_exception('CMake: unset() requires at least one argument\n{}'.format(tline)) + + if tline.args[0] in self.vars: + del self.vars[tline.args[0]] + + def _cmake_add_executable(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/add_executable.html + args = list(tline.args) # Make a working copy + + # Make sure the exe is imported + if 'IMPORTED' not in args: + raise self._gen_exception('CMake: add_executable() non imported executables are not supported\n{}'.format(tline)) + + args.remove('IMPORTED') + + if len(args) < 1: + raise self._gen_exception('CMake: add_executable() requires at least 1 argument\n{}'.format(tline)) + + self.targets[args[0]] = CMakeTarget(args[0], 'EXECUTABLE', {}) + + def _cmake_add_library(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/add_library.html + args = list(tline.args) # Make a working copy + + # Make sure the lib is imported + if 'IMPORTED' not in args: + raise self._gen_exception('CMake: add_library() non imported libraries are not supported\n{}'.format(tline)) + + args.remove('IMPORTED') + + # No only look at the first two arguments (target_name and target_type) and ignore the rest + if len(args) < 2: + raise self._gen_exception('CMake: add_library() requires at least 2 arguments\n{}'.format(tline)) + + self.targets[args[0]] = CMakeTarget(args[0], args[1], {}) + + def _cmake_add_custom_target(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/add_custom_target.html + # We only the first parameter (the target name) is interesting + if len(tline.args) < 1: + raise self._gen_exception('CMake: add_custom_target() requires at least one argument\n{}'.format(tline)) + + self.targets[tline.args[0]] = CMakeTarget(tline.args[0], 'CUSTOM', {}) + + def _cmake_set_property(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/set_property.html + args = list(tline.args) + + # We only care for TARGET properties + if args.pop(0) != 'TARGET': + return + + append = False + targets = [] + while len(args) > 0: + curr = args.pop(0) + if curr == 'APPEND' or curr == 'APPEND_STRING': + append = True + continue + + if curr == 'PROPERTY': + break + + targets.append(curr) + + if len(args) == 1: + # Tries to set property to nothing so nothing has to be done + return + + if len(args) < 2: + raise self._gen_exception('CMake: set_property() faild to parse argument list\n{}'.format(tline)) + + propName = args[0] + propVal = list(itertools.chain(*map(lambda x: x.split(';'), args[1:]))) + propVal = list(filter(lambda x: len(x) > 0, propVal)) + + if len(propVal) == 0: + return + + for i in targets: + if i not in self.targets: + raise self._gen_exception('CMake: set_property() TARGET {} not found\n{}'.format(i, tline)) + + if propName not in self.targets[i].properies: + self.targets[i].properies[propName] = [] + + if append: + self.targets[i].properies[propName] += propVal + else: + self.targets[i].properies[propName] = propVal + + def _cmake_set_target_properties(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/set_target_properties.html + args = list(tline.args) + + targets = [] + while len(args) > 0: + curr = args.pop(0) + if curr == 'PROPERTIES': + break + + targets.append(curr) + + if (len(args) % 2) != 0: + raise self._gen_exception('CMake: set_target_properties() uneven number of property arguments\n{}'.format(tline)) + + while len(args) > 0: + propName = args.pop(0) + propVal = args.pop(0).split(';') + propVal = list(filter(lambda x: len(x) > 0, propVal)) + + if len(propVal) == 0: + continue + + for i in targets: + if i not in self.targets: + raise self._gen_exception('CMake: set_target_properties() TARGET {} not found\n{}'.format(i, tline)) + + self.targets[i].properies[propName] = propVal + + def _lex_trace(self, trace): + # The trace format is: '(): ( )\n' + reg_tline = re.compile(r'\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(([\s\S]*?) ?\)\s*\n', re.MULTILINE) + reg_other = re.compile(r'[^\n]*\n') + reg_genexp = re.compile(r'\$<.*>') + loc = 0 + while loc < len(trace): + mo_file_line = reg_tline.match(trace, loc) + if not mo_file_line: + skip_match = reg_other.match(trace, loc) + if not skip_match: + print(trace[loc:]) + raise self._gen_exception('Failed to parse CMake trace') + + loc = skip_match.end() + continue + + loc = mo_file_line.end() + + file = mo_file_line.group(1) + line = mo_file_line.group(3) + func = mo_file_line.group(4) + args = mo_file_line.group(5).split(' ') + args = list(map(lambda x: x.strip(), args)) + args = list(map(lambda x: reg_genexp.sub('', x), args)) # Remove generator expressions + + yield CMakeTraceLine(file, line, func, args) + + def _reset_cmake_cache(self, build_dir): + with open('{}/CMakeCache.txt'.format(build_dir), 'w') as fp: + fp.write('CMAKE_PLATFORM_INFO_INITIALIZED:INTERNAL=1\n') + + def _setup_compiler(self, build_dir): + comp_dir = '{}/CMakeFiles/{}'.format(build_dir, self.cmakevers) + os.makedirs(comp_dir, exist_ok=True) + + c_comp = '{}/CMakeCCompiler.cmake'.format(comp_dir) + cxx_comp = '{}/CMakeCXXCompiler.cmake'.format(comp_dir) + + if not os.path.exists(c_comp): + with open(c_comp, 'w') as fp: + fp.write('''# Fake CMake file to skip the boring and slow stuff +set(CMAKE_C_COMPILER "{}") # Just give CMake a valid full path to any file +set(CMAKE_C_COMPILER_ID "GNU") # Pretend we have found GCC +set(CMAKE_COMPILER_IS_GNUCC 1) +set(CMAKE_C_COMPILER_LOADED 1) +set(CMAKE_C_COMPILER_WORKS TRUE) +set(CMAKE_C_ABI_COMPILED TRUE) +set(CMAKE_SIZEOF_VOID_P "{}") +'''.format(os.path.realpath(__file__), ctypes.sizeof(ctypes.c_voidp))) + + if not os.path.exists(cxx_comp): + with open(cxx_comp, 'w') as fp: + fp.write('''# Fake CMake file to skip the boring and slow stuff +set(CMAKE_CXX_COMPILER "{}") # Just give CMake a valid full path to any file +set(CMAKE_CXX_COMPILER_ID "GNU") # Pretend we have found GCC +set(CMAKE_COMPILER_IS_GNUCXX 1) +set(CMAKE_CXX_COMPILER_LOADED 1) +set(CMAKE_CXX_COMPILER_WORKS TRUE) +set(CMAKE_CXX_ABI_COMPILED TRUE) +set(CMAKE_SIZEOF_VOID_P "{}") +'''.format(os.path.realpath(__file__), ctypes.sizeof(ctypes.c_voidp))) + + def _setup_cmake_dir(self): + # Setup the CMake build environment and return the "build" directory + build_dir = '{}/cmake_{}'.format(self.cmake_root_dir, self.name) + os.makedirs(build_dir, exist_ok=True) + + # Copy the CMakeLists.txt + cmake_lists = '{}/CMakeLists.txt'.format(build_dir) + if not os.path.exists(cmake_lists): + dir_path = os.path.dirname(os.path.realpath(__file__)) + src_cmake = '{}/data/CMakeLists.txt'.format(dir_path) + shutil.copyfile(src_cmake, cmake_lists) + + self._setup_compiler(build_dir) + self._reset_cmake_cache(build_dir) + return build_dir + + def _call_cmake_real(self, args, env): + build_dir = self._setup_cmake_dir() + cmd = self.cmakebin.get_command() + args + p, out, err = Popen_safe(cmd, env=env, cwd=build_dir) + rc = p.returncode + call = ' '.join(cmd) + mlog.debug("Called `{}` in {} -> {}".format(call, build_dir, rc)) + + return rc, out, err + + def _call_cmake(self, args, env=None): + if env is None: + fenv = env + env = os.environ + else: + fenv = frozenset(env.items()) + targs = tuple(args) + + # First check if cached, if not call the real cmake function + cache = CMakeDependency.cmake_cache + if (self.cmakebin, targs, fenv) not in cache: + cache[(self.cmakebin, targs, fenv)] = self._call_cmake_real(args, env) + return cache[(self.cmakebin, targs, fenv)] + + @staticmethod + def get_methods(): + return [DependencyMethods.CMAKE] + + def check_cmake(self): + evar = 'CMAKE' + if evar in os.environ: + cmakebin = os.environ[evar].strip() + else: + cmakebin = 'cmake' + cmakebin = ExternalProgram(cmakebin, silent=True) + cmvers = None + invalid_version = False + if cmakebin.found(): + try: + p, out = Popen_safe(cmakebin.get_command() + ['--version'])[0:2] + if p.returncode != 0: + mlog.warning('Found CMake {!r} but couldn\'t run it' + ''.format(' '.join(cmakebin.get_command()))) + # Set to False instead of None to signify that we've already + # searched for it and not found it + cmakebin = False + except (FileNotFoundError, PermissionError): + cmakebin = False + + cmvers = re.sub(r'\s*cmake version\s*', '', out.split('\n')[0]).strip() + if not version_compare(cmvers, CMakeDependency.class_cmake_version): + invalid_version = True + else: + cmakebin = False + if not self.silent: + if cmakebin and invalid_version: + mlog.log('Found CMake:', mlog.red('NO'), '(version of', mlog.bold(cmakebin.get_path()), + 'is', mlog.bold(cmvers), 'but version', mlog.bold(CMakeDependency.class_cmake_version), + 'is required)') + elif cmakebin: + mlog.log('Found CMake:', mlog.bold(cmakebin.get_path()), + '(%s)' % cmvers) + else: + mlog.log('Found CMake:', mlog.red('NO')) + + if invalid_version: + cmakebin = False + cmvers = None + + return cmakebin, cmvers + + def log_tried(self): + return self.type_name + class DubDependency(ExternalDependency): class_dubbin = None @@ -1074,10 +1689,10 @@ class ExternalProgram: return ' '.join(self.command) @staticmethod - def from_cross_info(cross_info, name): - if name not in cross_info.config['binaries']: + def from_bin_list(bins, name): + if name not in bins: return NonExistingExternalProgram() - command = cross_info.config['binaries'][name] + command = bins[name] if not isinstance(command, (list, str)): raise MesonException('Invalid type {!r} for binary {!r} in cross file' ''.format(command, name)) @@ -1236,8 +1851,8 @@ class ExternalProgram: class NonExistingExternalProgram(ExternalProgram): "A program that will never exist" - def __init__(self): - self.name = 'nonexistingprogram' + def __init__(self, name='nonexistingprogram'): + self.name = name self.command = [None] self.path = None @@ -1319,6 +1934,11 @@ class ExtraFrameworkDependency(ExternalDependency): self.link_args = ['-F' + self.path, '-framework', self.name.split('.')[0]] def detect(self, name, path): + # should use the compiler to look for frameworks, rather than peering at + # the filesystem, so we can also find them when cross-compiling + if self.want_cross: + return + lname = name.lower() if path is None: paths = ['/System/Library/Frameworks', '/Library/Frameworks'] @@ -1399,7 +2019,7 @@ def find_external_dependency(name, env, kwargs): # build a list of dependency methods to try candidates = _build_external_dependency_list(name, env, kwargs) - pkg_exc = None + pkg_exc = [] pkgdep = [] details = '' @@ -1410,11 +2030,10 @@ def find_external_dependency(name, env, kwargs): d._check_version() pkgdep.append(d) except Exception as e: + pkg_exc.append(e) mlog.debug(str(e)) - # store the first exception we see - if not pkg_exc: - pkg_exc = e else: + pkg_exc.append(None) details = d.log_details() if details: details = '(' + details + ') ' @@ -1449,14 +2068,16 @@ def find_external_dependency(name, env, kwargs): '(tried {})'.format(tried) if tried else '') if required: - # if exception(s) occurred, re-raise the first one (on the grounds that - # it came from a preferred dependency detection method) - if pkg_exc: - raise pkg_exc + # if an exception occurred with the first detection method, re-raise it + # (on the grounds that it came from the preferred dependency detection + # method) + if pkg_exc[0]: + raise pkg_exc[0] # we have a list of failed ExternalDependency objects, so we can report # the methods we tried to find the dependency - raise DependencyException('Dependency "%s" not found, tried %s' % (name, tried)) + raise DependencyException('Dependency "%s" not found' % (name) + + (', tried %s' % (tried) if tried else '')) # return the last failed dependency object if pkgdep: @@ -1467,6 +2088,10 @@ def find_external_dependency(name, env, kwargs): def _build_external_dependency_list(name, env, kwargs): + # First check if the method is valid + if 'method' in kwargs and kwargs['method'] not in [e.value for e in DependencyMethods]: + raise DependencyException('method {!r} is invalid'.format(kwargs['method'])) + # Is there a specific dependency detector for this dependency? lname = name.lower() if lname in packages: @@ -1485,15 +2110,26 @@ def _build_external_dependency_list(name, env, kwargs): if 'dub' == kwargs.get('method', ''): candidates.append(functools.partial(DubDependency, name, env, kwargs)) return candidates - # TBD: other values of method should control what method(s) are used - # Otherwise, just use the pkgconfig dependency detector - candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) + # If it's explicitly requested, use the pkgconfig detection method (only) + if 'pkg-config' == kwargs.get('method', ''): + candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) + return candidates + + # If it's explicitly requested, use the CMake detection method (only) + if 'cmake' == kwargs.get('method', ''): + candidates.append(functools.partial(CMakeDependency, name, env, kwargs)) + return candidates - # On OSX, also try framework dependency detector - if mesonlib.is_osx(): - candidates.append(functools.partial(ExtraFrameworkDependency, name, - False, None, env, None, kwargs)) + # Otherwise, just use the pkgconfig and cmake dependency detector + if 'auto' == kwargs.get('method', 'auto'): + candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) + candidates.append(functools.partial(CMakeDependency, name, env, kwargs)) + + # On OSX, also try framework dependency detector + if mesonlib.is_osx(): + candidates.append(functools.partial(ExtraFrameworkDependency, name, + False, None, env, None, kwargs)) return candidates diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index b06f62d..6a8050d 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -288,7 +288,7 @@ class BoostDependency(ExternalDependency): tag = None compiler = self.env.detect_cpp_compiler(self.want_cross) if mesonlib.for_windows(self.want_cross, self.env): - if compiler.get_id() == 'msvc': + if compiler.get_id() in ['msvc', 'clang-cl']: comp_ts_version = compiler.get_toolset_version() compiler_ts = comp_ts_version.split('.') # FIXME - what about other compilers? @@ -320,7 +320,7 @@ class BoostDependency(ExternalDependency): def arch_tag(self): # currently only applies to windows msvc installed binaries - if self.env.detect_cpp_compiler(self.want_cross).get_id() != 'msvc': + if self.env.detect_cpp_compiler(self.want_cross).get_id() not in ['msvc', 'clang-cl']: return '' # pre-compiled binaries only added arch tag for versions > 1.64 if float(self.version) < 1.65: @@ -443,7 +443,7 @@ class BoostDependency(ExternalDependency): if self.libdir: libdirs = [self.libdir] elif self.boost_root is None: - libdirs = mesonlib.get_library_dirs(self.env) + libdirs = mesonlib.get_library_dirs() else: libdirs = [os.path.join(self.boost_root, 'lib')] for libdir in libdirs: diff --git a/mesonbuild/dependencies/data/CMakeLists.txt b/mesonbuild/dependencies/data/CMakeLists.txt new file mode 100644 index 0000000..144ffda --- /dev/null +++ b/mesonbuild/dependencies/data/CMakeLists.txt @@ -0,0 +1,93 @@ +cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} ) + +# Inspired by CMakeDetermineCompilerABI.cmake to set CMAKE_LIBRARY_ARCHITECTURE +if(CMAKE_LIBRARY_ARCHITECTURE_REGEX) + if(NOT DEFINED CMAKE_LIBRARY_ARCHITECTURE) + file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* ) + foreach(dir ${implicit_dirs}) + if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}") + set(CMAKE_LIBRARY_ARCHITECTURE "${dir}") + break() + endif() + endforeach() + endif() +endif() + +find_package("${NAME}" QUIET) + +set(PACKAGE_FOUND FALSE) +set(_packageName "${NAME}") +string(TOUPPER "${_packageName}" PACKAGE_NAME) + +if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND) + set(PACKAGE_FOUND TRUE) + + # Check the following variables: + # FOO_VERSION + # Foo_VERSION + # FOO_VERSION_STRING + # Foo_VERSION_STRING + if(NOT DEFINED PACKAGE_VERSION) + if(DEFINED ${_packageName}_VERSION) + set(PACKAGE_VERSION "${${_packageName}_VERSION}") + elseif(DEFINED ${PACKAGE_NAME}_VERSION) + set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}") + elseif(DEFINED ${_packageName}_VERSION_STRING) + set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}") + elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING) + set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}") + endif() + endif() + + # Check the following variables: + # FOO_LIBRARIES + # Foo_LIBRARIES + # FOO_LIBS + # Foo_LIBS + set(libs) + if(DEFINED ${_packageName}_LIBRARIES) + set(libs ${_packageName}_LIBRARIES) + elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES) + set(libs ${PACKAGE_NAME}_LIBRARIES) + elseif(DEFINED ${_packageName}_LIBS) + set(libs ${_packageName}_LIBS) + elseif(DEFINED ${PACKAGE_NAME}_LIBS) + set(libs ${PACKAGE_NAME}_LIBS) + endif() + + # Check the following variables: + # FOO_INCLUDE_DIRS + # Foo_INCLUDE_DIRS + # FOO_INCLUDES + # Foo_INCLUDES + # FOO_INCLUDE_DIR + # Foo_INCLUDE_DIR + set(includes) + if(DEFINED ${_packageName}_INCLUDE_DIRS) + set(includes ${_packageName}_INCLUDE_DIRS) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS) + set(includes ${PACKAGE_NAME}_INCLUDE_DIRS) + elseif(DEFINED ${_packageName}_INCLUDES) + set(includes ${_packageName}_INCLUDES) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDES) + set(includes ${PACKAGE_NAME}_INCLUDES) + elseif(DEFINED ${_packageName}_INCLUDE_DIR) + set(includes ${_packageName}_INCLUDE_DIR) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR) + set(includes ${PACKAGE_NAME}_INCLUDE_DIR) + endif() + + # Check the following variables: + # FOO_DEFINITIONS + # Foo_DEFINITIONS + set(definitions) + if(DEFINED ${_packageName}_DEFINITIONS) + set(definitions ${_packageName}_DEFINITIONS) + elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS) + set(definitions ${PACKAGE_NAME}_DEFINITIONS) + endif() + + set(PACKAGE_INCLUDE_DIRS "${${includes}}") + set(PACKAGE_DEFINITIONS "${${definitions}}") + set(PACKAGE_LIBRARIES "${${libs}}") +endif() diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py index 5164512..9e0a65a 100644 --- a/mesonbuild/dependencies/misc.py +++ b/mesonbuild/dependencies/misc.py @@ -29,7 +29,7 @@ from ..environment import detect_cpu_family from .base import ( DependencyException, DependencyMethods, ExternalDependency, ExternalProgram, ExtraFrameworkDependency, PkgConfigDependency, - ConfigToolDependency, + CMakeDependency, ConfigToolDependency, ) @@ -234,6 +234,8 @@ class MPIDependency(ExternalDependency): class OpenMPDependency(ExternalDependency): # Map date of specification release (which is the macro value) to a version. VERSIONS = { + '201811': '5.0', + '201611': '5.0-revision1', # This is supported by ICC 19.x '201511': '4.5', '201307': '4.0', '201107': '3.1', @@ -278,11 +280,14 @@ class ThreadDependency(ExternalDependency): class Python3Dependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('python3', environment, None, kwargs) + + if self.want_cross: + return + self.name = 'python3' self.static = kwargs.get('static', False) # We can only be sure that it is Python 3 at this point self.version = '3' - self.pkgdep = None self._find_libpy3_windows(environment) @classmethod @@ -434,6 +439,11 @@ class PcapDependency(ExternalDependency): @staticmethod def get_pcap_lib_version(ctdep): + # Since we seem to need to run a program to discover the pcap version, + # we can't do that when cross-compiling + if ctdep.want_cross: + return None + v = ctdep.clib_compiler.get_return_value('pcap_lib_version', 'string', '#include ', ctdep.env, [], [ctdep]) v = re.sub(r'libpcap version ', '', v) @@ -465,6 +475,9 @@ class CupsDependency(ExternalDependency): ExtraFrameworkDependency, 'cups', False, None, environment, kwargs.get('language', None), kwargs)) + if DependencyMethods.CMAKE in methods: + candidates.append(functools.partial(CMakeDependency, 'Cups', environment, kwargs)) + return candidates @staticmethod @@ -475,9 +488,9 @@ class CupsDependency(ExternalDependency): @staticmethod def get_methods(): if mesonlib.is_osx(): - return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK] + return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE] else: - return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL] + return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.CMAKE] class LibWmfDependency(ExternalDependency): @@ -506,3 +519,34 @@ class LibWmfDependency(ExternalDependency): @staticmethod def get_methods(): return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL] + + +class LibGCryptDependency(ExternalDependency): + def __init__(self, environment, kwargs): + super().__init__('libgcrypt', environment, None, kwargs) + + @classmethod + def _factory(cls, environment, kwargs): + methods = cls._process_method_kw(kwargs) + candidates = [] + + if DependencyMethods.PKGCONFIG in methods: + candidates.append(functools.partial(PkgConfigDependency, 'libgcrypt', environment, kwargs)) + + if DependencyMethods.CONFIG_TOOL in methods: + candidates.append(functools.partial(ConfigToolDependency.factory, + 'libgcrypt', environment, None, kwargs, ['libgcrypt-config'], + 'libgcrypt-config', + LibGCryptDependency.tool_finish_init)) + + return candidates + + @staticmethod + def tool_finish_init(ctdep): + ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') + ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args') + ctdep.version = ctdep.get_config_value(['--version'], 'version')[0] + + @staticmethod + def get_methods(): + return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL] diff --git a/mesonbuild/dependencies/platform.py b/mesonbuild/dependencies/platform.py index 5b2003f..c78ebed 100644 --- a/mesonbuild/dependencies/platform.py +++ b/mesonbuild/dependencies/platform.py @@ -33,7 +33,7 @@ class AppleFrameworks(ExternalDependency): for f in self.frameworks: self.link_args += ['-framework', f] - self.is_found = mesonlib.is_osx() + self.is_found = mesonlib.for_darwin(self.want_cross, self.env) def log_tried(self): return 'framework' diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index 016eace..e8fba91 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -30,7 +30,7 @@ from ..mesonlib import ( from ..environment import detect_cpu from .base import DependencyException, DependencyMethods -from .base import ExternalDependency, ExternalProgram +from .base import ExternalDependency, ExternalProgram, NonExistingExternalProgram from .base import ExtraFrameworkDependency, PkgConfigDependency from .base import ConfigToolDependency @@ -39,13 +39,13 @@ class GLDependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('gl', environment, None, kwargs) - if mesonlib.is_osx(): + if mesonlib.for_darwin(self.want_cross, self.env): self.is_found = True # FIXME: Use AppleFrameworks dependency self.link_args = ['-framework', 'OpenGL'] # FIXME: Detect version using self.clib_compiler return - if mesonlib.is_windows(): + if mesonlib.for_windows(self.want_cross, self.env): self.is_found = True # FIXME: Use self.clib_compiler.find_library() self.link_args = ['-lopengl32'] @@ -230,21 +230,48 @@ class QtBaseDependency(ExternalDependency): self.from_text = mlog.format_list(methods) self.version = None - def compilers_detect(self): + def compilers_detect(self, interp_obj): "Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH" - if self.bindir or for_windows(self.env.is_cross_build(), self.env): - moc = ExternalProgram(os.path.join(self.bindir, 'moc'), silent=True) - uic = ExternalProgram(os.path.join(self.bindir, 'uic'), silent=True) - rcc = ExternalProgram(os.path.join(self.bindir, 'rcc'), silent=True) - lrelease = ExternalProgram(os.path.join(self.bindir, 'lrelease'), silent=True) - else: - # We don't accept unsuffixed 'moc', 'uic', and 'rcc' because they - # are sometimes older, or newer versions. - moc = ExternalProgram('moc-' + self.name, silent=True) - uic = ExternalProgram('uic-' + self.name, silent=True) - rcc = ExternalProgram('rcc-' + self.name, silent=True) - lrelease = ExternalProgram('lrelease-' + self.name, silent=True) - return moc, uic, rcc, lrelease + # It is important that this list does not change order as the order of + # the returned ExternalPrograms will change as well + bins = ['moc', 'uic', 'rcc', 'lrelease'] + found = {b: NonExistingExternalProgram(name='{}-{}'.format(b, self.name)) + for b in bins} + + def gen_bins(): + for b in bins: + if self.bindir: + yield os.path.join(self.bindir, b), b, False + yield '{}-{}'.format(b, self.name), b, False + yield b, b, self.required + + for b, name, required in gen_bins(): + if found[name].found(): + continue + + # prefer the -qt of the tool to the plain one, as we + # don't know what the unsuffixed one points to without calling it. + p = interp_obj.find_program_impl([b], silent=True, required=required).held_object + if not p.found(): + continue + + if name == 'lrelease': + arg = ['-version'] + elif mesonlib.version_compare(self.version, '>= 5'): + arg = ['--version'] + else: + arg = ['-v'] + + # Ensure that the version of qt and each tool are the same + _, out, err = mesonlib.Popen_safe(p.get_command() + arg) + if b.startswith('lrelease') or not self.version.startswith('4'): + care = out + else: + care = err + if mesonlib.version_compare(self.version, '== {}'.format(care.split(' ')[-1])): + found[name] = p + + return tuple([found[b] for b in bins]) def _pkgconfig_detect(self, mods, kwargs): # We set the value of required to False so that we can try the @@ -302,8 +329,15 @@ class QtBaseDependency(ExternalDependency): def _find_qmake(self, qmake): # Even when cross-compiling, if a cross-info qmake is not specified, we # fallback to using the qmake in PATH because that's what we used to do - if self.env.is_cross_build() and 'qmake' in self.env.cross_info.config['binaries']: - return ExternalProgram.from_cross_info(self.env.cross_info, 'qmake') + if self.env.is_cross_build(): + if 'qmake' in self.env.cross_info.config['binaries']: + return ExternalProgram.from_bin_list(self.env.cross_info.config['binaries'], 'qmake') + elif self.env.config_info: + # Prefer suffixed to unsuffixed version + p = ExternalProgram.from_bin_list(self.env.config_info.binaries, 'qmake-' + self.name) + if p.found(): + return p + return ExternalProgram.from_bin_list(self.env.config_info.binaries, 'qmake') return ExternalProgram(qmake, silent=True) def _qmake_detect(self, mods, kwargs): diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 6c27043..2ccd31e 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -12,12 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -import configparser, os, platform, re, shlex, shutil, subprocess +import configparser, os, platform, re, sys, shlex, shutil, subprocess from . import coredata -from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker +from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker from . import mesonlib -from .mesonlib import EnvironmentException, Popen_safe +from .mesonlib import MesonException, EnvironmentException, PerMachine, Popen_safe from . import mlog from . import compilers @@ -39,6 +39,8 @@ from .compilers import ( ClangCPPCompiler, ClangObjCCompiler, ClangObjCPPCompiler, + ClangClCCompiler, + ClangClCPPCompiler, G95FortranCompiler, GnuCCompiler, GnuCPPCompiler, @@ -59,6 +61,8 @@ from .compilers import ( PathScaleFortranCompiler, PGIFortranCompiler, RustCompiler, + CcrxCCompiler, + CcrxCPPCompiler, SunFortranCompiler, ValaCompiler, VisualStudioCCompiler, @@ -69,6 +73,7 @@ build_filename = 'meson.build' known_cpu_families = ( 'aarch64', + 'arc', 'arm', 'e2k', 'ia64', @@ -79,6 +84,7 @@ known_cpu_families = ( 'ppc64', 'riscv32', 'riscv64', + 'rx', 's390x', 'sparc', 'sparc64', @@ -114,7 +120,7 @@ def find_coverage_tools(): return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe def detect_ninja(version='1.5', log=False): - for n in ['ninja', 'ninja-build']: + for n in ['ninja', 'ninja-build', 'samu']: try: p, found = Popen_safe([n, '--version'])[0:2] except (FileNotFoundError, PermissionError): @@ -179,20 +185,41 @@ def detect_windows_arch(compilers): if float(compiler.get_toolset_version()) < 10.0: # On MSVC 2008 and earlier, check 'BUILD_PLAT', where # 'Win32' means 'x86' - platform = os.environ.get('BUILD_PLAT', 'x86') + platform = os.environ.get('BUILD_PLAT', os_arch) if platform == 'Win32': return 'x86' + elif 'VSCMD_ARG_TGT_ARCH' in os.environ: + # On MSVC 2017 'Platform' is not set in VsDevCmd.bat + return os.environ['VSCMD_ARG_TGT_ARCH'] else: - # On MSVC 2010 and later 'Platform' is only set when the + # Starting with VS 2017, `Platform` is not always set (f.ex., + # if you use VsDevCmd.bat directly instead of vcvars*.bat), but + # `VSCMD_ARG_HOST_ARCH` is always set, so try that first. + if 'VSCMD_ARG_HOST_ARCH' in os.environ: + platform = os.environ['VSCMD_ARG_HOST_ARCH'].lower() + # On VS 2010-2015, 'Platform' is only set when the # target arch is not 'x86'. It's 'x64' when targeting # x86_64 and 'arm' when targeting ARM. - platform = os.environ.get('Platform', 'x86').lower() + else: + platform = os.environ.get('Platform', 'x86').lower() if platform == 'x86': return platform + if compiler.id == 'clang-cl' and not compiler.is_64: + return 'x86' if compiler.id == 'gcc' and compiler.has_builtin_define('__i386__'): return 'x86' return os_arch +def any_compiler_has_define(compilers, define): + for c in compilers.values(): + try: + if c.has_builtin_define(define): + return True + except mesonlib.MesonException: + # Ignore compilers that do not support has_builtin_define. + pass + return False + def detect_cpu_family(compilers): """ Python is inconsistent in its platform module. @@ -205,26 +232,37 @@ def detect_cpu_family(compilers): else: trial = platform.machine().lower() if trial.startswith('i') and trial.endswith('86'): - return 'x86' - if trial.startswith('arm'): - return 'arm' - if trial.startswith('ppc64'): - return 'ppc64' - if trial in ('amd64', 'x64'): + trial = 'x86' + elif trial.startswith('arm'): + trial = 'arm' + elif trial.startswith('ppc64'): + trial = 'ppc64' + elif trial == 'powerpc': + trial = 'ppc' + # FreeBSD calls both ppc and ppc64 "powerpc". + # https://github.com/mesonbuild/meson/issues/4397 + try: + p, stdo, _ = Popen_safe(['uname', '-p']) + except (FileNotFoundError, PermissionError): + # Not much to go on here. + if sys.maxsize > 2**32: + trial = 'ppc64' + if 'powerpc64' in stdo: + trial = 'ppc64' + elif trial in ('amd64', 'x64'): trial = 'x86_64' + + # On Linux (and maybe others) there can be any mixture of 32/64 bit code in + # the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only + # reliable way to know is to check the compiler defines. if trial == 'x86_64': - # On Linux (and maybe others) there can be any mixture of 32/64 bit - # code in the kernel, Python, system etc. The only reliable way - # to know is to check the compiler defines. - for c in compilers.values(): - try: - if c.has_builtin_define('__i386__'): - return 'x86' - except mesonlib.MesonException: - # Ignore compilers that do not support has_builtin_define. - pass - return 'x86_64' - # Add fixes here as bugs are reported. + if any_compiler_has_define(compilers, '__i386__'): + trial = 'x86' + elif trial == 'aarch64': + if any_compiler_has_define(compilers, '__arm__'): + trial = 'arm' + # Add more quirks here as bugs are reported. Keep in sync with detect_cpu() + # below. if trial not in known_cpu_families: mlog.warning('Unknown CPU family {!r}, please report this at ' @@ -242,17 +280,17 @@ def detect_cpu(compilers): trial = 'x86_64' if trial == 'x86_64': # Same check as above for cpu_family - for c in compilers.values(): - try: - if c.has_builtin_define('__i386__'): - return 'i686' # All 64 bit cpus have at least this level of x86 support. - except mesonlib.MesonException: - pass - return 'x86_64' - if trial == 'e2k': + if any_compiler_has_define(compilers, '__i386__'): + trial = 'i686' # All 64 bit cpus have at least this level of x86 support. + elif trial == 'aarch64': + # Same check as above for cpu_family + if any_compiler_has_define(compilers, '__arm__'): + trial = 'arm' + elif trial == 'e2k': # Make more precise CPU detection for Elbrus platform. trial = platform.processor().lower() - # Add fixes here as bugs are reported. + # Add more quirks here as bugs are reported. Keep in sync with + # detect_cpu_family() above. return trial def detect_system(): @@ -299,26 +337,50 @@ class Environment: self.coredata = coredata.load(self.get_build_dir()) self.first_invocation = False except FileNotFoundError: - # WARNING: Don't use any values from coredata in __init__. It gets - # re-initialized with project options by the interpreter during - # build file parsing. - self.coredata = coredata.CoreData(options) - # Used by the regenchecker script, which runs meson - self.coredata.meson_command = mesonlib.meson_command - self.first_invocation = True - self.cross_info = None + self.create_new_coredata(options) + except MesonException as e: + # If we stored previous command line options, we can recover from + # a broken/outdated coredata. + if os.path.isfile(coredata.get_cmd_line_file(self.build_dir)): + mlog.warning('Regenerating configuration from scratch.') + mlog.log('Reason:', mlog.red(str(e))) + coredata.read_cmd_line_file(self.build_dir, options) + self.create_new_coredata(options) + else: + raise e self.exe_wrapper = None + + self.machines = MachineInfos() + # Will be fully initialized later using compilers later. + self.machines.detect_build() if self.coredata.cross_file: self.cross_info = CrossBuildInfo(self.coredata.cross_file) if 'exe_wrapper' in self.cross_info.config['binaries']: from .dependencies import ExternalProgram - self.exe_wrapper = ExternalProgram.from_cross_info(self.cross_info, 'exe_wrapper') + self.exe_wrapper = ExternalProgram.from_bin_list( + self.cross_info.config['binaries'], 'exe_wrapper') + if 'host_machine' in self.cross_info.config: + self.machines.host = MachineInfo.from_literal( + self.cross_info.config['host_machine']) + if 'target_machine' in self.cross_info.config: + self.machines.target = MachineInfo.from_literal( + self.cross_info.config['target_machine']) + else: + self.cross_info = None + self.machines.default_missing() + + if self.coredata.config_files: + self.config_info = coredata.ConfigData( + coredata.load_configs(self.coredata.config_files)) + else: + self.config_info = coredata.ConfigData() + self.cmd_line_options = options.cmd_line_options.copy() # List of potential compilers. if mesonlib.is_windows(): - self.default_c = ['cl', 'cc', 'gcc', 'clang'] - self.default_cpp = ['cl', 'c++', 'g++', 'clang++'] + self.default_c = ['cl', 'cc', 'gcc', 'clang', 'clang-cl'] + self.default_cpp = ['cl', 'c++', 'g++', 'clang++', 'clang-cl'] else: self.default_c = ['cc', 'gcc', 'clang'] self.default_cpp = ['c++', 'g++', 'clang++'] @@ -328,10 +390,15 @@ class Environment: self.default_cs = ['mcs', 'csc'] self.default_objc = ['cc'] self.default_objcpp = ['c++'] + self.default_d = ['ldc2', 'ldc', 'gdc', 'dmd'] self.default_fortran = ['gfortran', 'g95', 'f95', 'f90', 'f77', 'ifort'] + self.default_java = ['javac'] self.default_rust = ['rustc'] + self.default_swift = ['swiftc'] + self.default_vala = ['valac'] self.default_static_linker = ['ar'] self.vs_static_linker = ['lib'] + self.clang_cl_static_linker = ['llvm-lib'] self.gcc_static_linker = ['gcc-ar'] self.clang_static_linker = ['llvm-ar'] @@ -352,10 +419,20 @@ class Environment: self.object_suffix = 'o' self.win_libdir_layout = False if 'STRIP' in os.environ: - self.native_strip_bin = shlex.split(os.environ['STRIP']) + self.native_strip_bin = shlex.split( + os.environ[BinaryTable.evarMap['strip']]) else: self.native_strip_bin = ['strip'] + def create_new_coredata(self, options): + # WARNING: Don't use any values from coredata in __init__. It gets + # re-initialized with project options by the interpreter during + # build file parsing. + self.coredata = coredata.CoreData(options) + # Used by the regenchecker script, which runs meson + self.coredata.meson_command = mesonlib.meson_command + self.first_invocation = True + def is_cross_build(self): return self.cross_info is not None @@ -451,49 +528,37 @@ class Environment: return CompilerType.GCC_CYGWIN return CompilerType.GCC_STANDARD - def warn_about_lang_pointing_to_cross(self, compiler_exe, evar): - evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME') - if evar_str == compiler_exe: - mlog.warning('''Env var %s seems to point to the cross compiler. -This is probably wrong, it should always point to the native compiler.''' % evar) - - def _get_compilers(self, lang, evar, want_cross): + def _get_compilers(self, lang, want_cross): ''' The list of compilers is detected in the exact same way for C, C++, ObjC, ObjC++, Fortran, CS so consolidate it here. ''' + is_cross = False + exe_wrap = None + evar = BinaryTable.evarMap[lang] + if self.is_cross_build() and want_cross: if lang not in self.cross_info.config['binaries']: raise EnvironmentException('{!r} compiler binary not defined in cross file'.format(lang)) - compilers = mesonlib.stringlistify(self.cross_info.config['binaries'][lang]) - # Ensure ccache exists and remove it if it doesn't - if compilers[0] == 'ccache': - compilers = compilers[1:] - ccache = self.detect_ccache() - else: - ccache = [] - self.warn_about_lang_pointing_to_cross(compilers[0], evar) + compilers, ccache = BinaryTable.parse_entry( + mesonlib.stringlistify(self.cross_info.config['binaries'][lang])) + BinaryTable.warn_about_lang_pointing_to_cross(compilers[0], evar) # Return value has to be a list of compiler 'choices' compilers = [compilers] is_cross = True exe_wrap = self.get_exe_wrapper() elif evar in os.environ: - compilers = shlex.split(os.environ[evar]) - # Ensure ccache exists and remove it if it doesn't - if compilers[0] == 'ccache': - compilers = compilers[1:] - ccache = self.detect_ccache() - else: - ccache = [] + compilers, ccache = BinaryTable.parse_entry( + shlex.split(os.environ[evar])) # Return value has to be a list of compiler 'choices' compilers = [compilers] - is_cross = False - exe_wrap = None + elif lang in self.config_info.binaries: + compilers, ccache = BinaryTable.parse_entry( + mesonlib.stringlistify(self.config_info.binaries[lang])) + compilers = [compilers] else: compilers = getattr(self, 'default_' + lang) - ccache = self.detect_ccache() - is_cross = False - exe_wrap = None + ccache = BinaryTable.detect_ccache() return compilers, ccache, is_cross, exe_wrap def _handle_exceptions(self, exceptions, binaries, bintype='compiler'): @@ -504,13 +569,13 @@ This is probably wrong, it should always point to the native compiler.''' % evar errmsg += '\nRunning "{0}" gave "{1}"'.format(c, e) raise EnvironmentException(errmsg) - def _detect_c_or_cpp_compiler(self, lang, evar, want_cross): + def _detect_c_or_cpp_compiler(self, lang, want_cross): popen_exceptions = {} - compilers, ccache, is_cross, exe_wrap = self._get_compilers(lang, evar, want_cross) + compilers, ccache, is_cross, exe_wrap = self._get_compilers(lang, want_cross) for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] - if 'cl' in compiler or 'cl.exe' in compiler: + if not set(['cl', 'cl.exe', 'clang-cl', 'clang-cl.exe']).isdisjoint(compiler): # Watcom C provides it's own cl.exe clone that mimics an older # version of Microsoft's compiler. Since Watcom's cl.exe is # just a wrapper, we skip using it if we detect its presence @@ -532,6 +597,8 @@ This is probably wrong, it should always point to the native compiler.''' % evar arg = '/?' elif 'armcc' in compiler[0]: arg = '--vsn' + elif 'ccrx' in compiler[0]: + arg = '-v' else: arg = '--version' try: @@ -539,8 +606,12 @@ This is probably wrong, it should always point to the native compiler.''' % evar except OSError as e: popen_exceptions[' '.join(compiler + [arg])] = e continue - version = search_version(out) + + if 'ccrx' in compiler[0]: + out = err + full_version = out.split('\n', 1)[0] + version = search_version(out) guess_gcc_or_lcc = False if 'Free Software Foundation' in out: @@ -576,8 +647,21 @@ This is probably wrong, it should always point to the native compiler.''' % evar # Override previous values version = search_version(arm_ver_str) full_version = arm_ver_str + compiler_type = CompilerType.ARM_WIN cls = ArmclangCCompiler if lang == 'c' else ArmclangCPPCompiler - return cls(ccache + compiler, version, is_cross, exe_wrap, full_version=full_version) + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) + if 'CL.EXE COMPATIBILITY' in out: + # if this is clang-cl masquerading as cl, detect it as cl, not + # clang + arg = '--version' + try: + p, out, err = Popen_safe(compiler + [arg]) + except OSError as e: + popen_exceptions[' '.join(compiler + [arg])] = e + version = search_version(out) + is_64 = 'Target: x86_64' in out + cls = ClangClCCompiler if lang == 'c' else ClangClCPPCompiler + return cls(compiler, version, is_cross, exe_wrap, is_64) if 'clang' in out: if 'Apple' in out or mesonlib.for_darwin(want_cross, self): compiler_type = CompilerType.CLANG_OSX @@ -612,19 +696,25 @@ This is probably wrong, it should always point to the native compiler.''' % evar cls = IntelCCompiler if lang == 'c' else IntelCPPCompiler return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) if 'ARM' in out: + compiler_type = CompilerType.ARM_WIN cls = ArmCCompiler if lang == 'c' else ArmCPPCompiler - return cls(ccache + compiler, version, is_cross, exe_wrap, full_version=full_version) + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) + if 'RX Family' in out: + compiler_type = CompilerType.CCRX_WIN + cls = CcrxCCompiler if lang == 'c' else CcrxCPPCompiler + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) + self._handle_exceptions(popen_exceptions, compilers) def detect_c_compiler(self, want_cross): - return self._detect_c_or_cpp_compiler('c', 'CC', want_cross) + return self._detect_c_or_cpp_compiler('c', want_cross) def detect_cpp_compiler(self, want_cross): - return self._detect_c_or_cpp_compiler('cpp', 'CXX', want_cross) + return self._detect_c_or_cpp_compiler('cpp', want_cross) def detect_fortran_compiler(self, want_cross): popen_exceptions = {} - compilers, ccache, is_cross, exe_wrap = self._get_compilers('fortran', 'FC', want_cross) + compilers, ccache, is_cross, exe_wrap = self._get_compilers('fortran', want_cross) for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] @@ -686,7 +776,7 @@ This is probably wrong, it should always point to the native compiler.''' % evar def detect_objc_compiler(self, want_cross): popen_exceptions = {} - compilers, ccache, is_cross, exe_wrap = self._get_compilers('objc', 'OBJC', want_cross) + compilers, ccache, is_cross, exe_wrap = self._get_compilers('objc', want_cross) for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] @@ -713,7 +803,7 @@ This is probably wrong, it should always point to the native compiler.''' % evar def detect_objcpp_compiler(self, want_cross): popen_exceptions = {} - compilers, ccache, is_cross, exe_wrap = self._get_compilers('objcpp', 'OBJCXX', want_cross) + compilers, ccache, is_cross, exe_wrap = self._get_compilers('objcpp', want_cross) for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] @@ -739,18 +829,23 @@ This is probably wrong, it should always point to the native compiler.''' % evar self._handle_exceptions(popen_exceptions, compilers) def detect_java_compiler(self): - exelist = ['javac'] + if 'java' in self.config_info.binaries: + exelist = mesonlib.stringlistify(self.config_info.binaries['java']) + else: + # TODO support fallback + exelist = [self.default_java[0]] + try: p, out, err = Popen_safe(exelist + ['-version']) except OSError: raise EnvironmentException('Could not execute Java compiler "%s"' % ' '.join(exelist)) - version = search_version(err) if 'javac' in out or 'javac' in err: + version = search_version(err if 'javac' in err else out) return JavaCompiler(exelist, version) raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') def detect_cs_compiler(self): - compilers, ccache, is_cross, exe_wrap = self._get_compilers('cs', 'CSC', False) + compilers, ccache, is_cross, exe_wrap = self._get_compilers('cs', False) popen_exceptions = {} for comp in compilers: if not isinstance(comp, list): @@ -772,8 +867,11 @@ This is probably wrong, it should always point to the native compiler.''' % evar def detect_vala_compiler(self): if 'VALAC' in os.environ: exelist = shlex.split(os.environ['VALAC']) + elif 'vala' in self.config_info.binaries: + exelist = mesonlib.stringlistify(self.config_info.binaries['vala']) else: - exelist = ['valac'] + # TODO support fallback + exelist = [self.default_vala[0]] try: p, out = Popen_safe(exelist + ['--version'])[0:2] except OSError: @@ -785,7 +883,7 @@ This is probably wrong, it should always point to the native compiler.''' % evar def detect_rust_compiler(self, want_cross): popen_exceptions = {} - compilers, ccache, is_cross, exe_wrap = self._get_compilers('rust', 'RUSTC', want_cross) + compilers, ccache, is_cross, exe_wrap = self._get_compilers('rust', want_cross) for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] @@ -816,16 +914,15 @@ This is probably wrong, it should always point to the native compiler.''' % evar elif self.is_cross_build() and want_cross: exelist = mesonlib.stringlistify(self.cross_info.config['binaries']['d']) is_cross = True - elif shutil.which("ldc2"): - exelist = ['ldc2'] - elif shutil.which("ldc"): - exelist = ['ldc'] - elif shutil.which("gdc"): - exelist = ['gdc'] - elif shutil.which("dmd"): - exelist = ['dmd'] + elif 'd' in self.config_info.binaries: + exelist = mesonlib.stringlistify(self.config_info.binaries['d']) else: - raise EnvironmentException('Could not find any supported D compiler.') + for d in self.default_d: + if shutil.which(d): + exelist = [d] + break + else: + raise EnvironmentException('Could not find any supported D compiler.') try: p, out = Popen_safe(exelist + ['--version'])[0:2] @@ -853,7 +950,11 @@ This is probably wrong, it should always point to the native compiler.''' % evar raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') def detect_swift_compiler(self): - exelist = ['swiftc'] + if 'swift' in self.config_info.binaries: + exelist = mesonlib.stringlistify(self.config_info.binaries['swift']) + else: + # TODO support fallback + exelist = [self.default_swift[0]] try: p, _, err = Popen_safe(exelist + ['-v']) except OSError: @@ -870,11 +971,11 @@ This is probably wrong, it should always point to the native compiler.''' % evar linker = [linker] linkers = [linker] else: - evar = 'AR' + evar = BinaryTable.evarMap['ar'] if evar in os.environ: linkers = [shlex.split(os.environ[evar])] elif isinstance(compiler, compilers.VisualStudioCCompiler): - linkers = [self.vs_static_linker] + linkers = [self.vs_static_linker, self.clang_cl_static_linker] elif isinstance(compiler, compilers.GnuCompiler): # Use gcc-ar if available; needed for LTO linkers = [self.gcc_static_linker, self.default_static_linker] @@ -884,14 +985,14 @@ This is probably wrong, it should always point to the native compiler.''' % evar elif isinstance(compiler, compilers.DCompiler): # Prefer static linkers over linkers used by D compilers if mesonlib.is_windows(): - linkers = [self.vs_static_linker, compiler.get_linker_exelist()] + linkers = [self.vs_static_linker, self.clang_cl_static_linker, compiler.get_linker_exelist()] else: linkers = [self.default_static_linker, compiler.get_linker_exelist()] else: linkers = [self.default_static_linker] popen_exceptions = {} for linker in linkers: - if 'lib' in linker or 'lib.exe' in linker: + if not set(['lib', 'lib.exe', 'llvm-lib', 'llvm-lib.exe']).isdisjoint(linker): arg = '/?' else: arg = '--version' @@ -900,7 +1001,7 @@ This is probably wrong, it should always point to the native compiler.''' % evar except OSError as e: popen_exceptions[' '.join(linker + [arg])] = e continue - if '/OUT:' in out or '/OUT:' in err: + if '/OUT:' in out.upper() or '/OUT:' in err.upper(): return VisualStudioLinker(linker) if p.returncode == 0 and ('armar' in linker or 'armar.exe' in linker): return ArmarLinker(linker) @@ -910,24 +1011,17 @@ This is probably wrong, it should always point to the native compiler.''' % evar return DLinker(linker, compiler.arch) if 'GDC' in out and ' based on D ' in out: return DLinker(linker, compiler.arch) + if err.startswith('Renesas') and ('rlink' in linker or 'rlink.exe' in linker): + return CcrxLinker(linker) if p.returncode == 0: return ArLinker(linker) if p.returncode == 1 and err.startswith('usage'): # OSX return ArLinker(linker) + if p.returncode == 1 and err.startswith('Usage'): # AIX + return ArLinker(linker) self._handle_exceptions(popen_exceptions, linkers, 'linker') raise EnvironmentException('Unknown static linker "%s"' % ' '.join(linkers)) - def detect_ccache(self): - try: - has_ccache = subprocess.call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - except OSError: - has_ccache = 1 - if has_ccache == 0: - cmdlist = ['ccache'] - else: - cmdlist = [] - return cmdlist - def get_source_dir(self): return self.source_dir @@ -1036,9 +1130,6 @@ class CrossBuildInfo: except Exception: raise EnvironmentException('Malformed value in cross file variable %s.' % entry) - if entry == 'cpu_family' and res not in known_cpu_families: - mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % value) - if self.ok_type(res): self.config[s][entry] = res elif isinstance(res, list): @@ -1095,10 +1186,203 @@ class CrossBuildInfo: return False return True - class MachineInfo: def __init__(self, system, cpu_family, cpu, endian): self.system = system self.cpu_family = cpu_family self.cpu = cpu self.endian = endian + + def __eq__(self, other): + if self.__class__ is not other.__class__: + return NotImplemented + return \ + self.system == other.system and \ + self.cpu_family == other.cpu_family and \ + self.cpu == other.cpu and \ + self.endian == other.endian + + def __ne__(self, other): + if self.__class__ is not other.__class__: + return NotImplemented + return not self.__eq__(other) + + @staticmethod + def detect(compilers = None): + """Detect the machine we're running on + + If compilers are not provided, we cannot know as much. None out those + fields to avoid accidentally depending on partial knowledge. The + underlying ''detect_*'' method can be called to explicitly use the + partial information. + """ + return MachineInfo( + detect_system(), + detect_cpu_family(compilers) if compilers is not None else None, + detect_cpu(compilers) if compilers is not None else None, + sys.byteorder) + + @staticmethod + def from_literal(literal): + minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'} + if set(literal) < minimum_literal: + raise EnvironmentException( + 'Machine info is currently {}\n'.format(literal) + + 'but is missing {}.'.format(minimum_literal - set(literal))) + + cpu_family = literal['cpu_family'] + if cpu_family not in known_cpu_families: + mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % cpu_family) + + endian = literal['endian'] + if endian not in ('little', 'big'): + mlog.warning('Unknown endian %s' % endian) + + return MachineInfo( + literal['system'], + cpu_family, + literal['cpu'], + endian) + + def is_windows(self): + """ + Machine is windows? + """ + return self.system == 'windows' + + def is_cygwin(self): + """ + Machine is cygwin? + """ + return self.system == 'cygwin' + + def is_linux(self): + """ + Machine is linux? + """ + return self.system == 'linux' + + def is_darwin(self): + """ + Machine is Darwin (iOS/OS X)? + """ + return self.system in ('darwin', 'ios') + + def is_android(self): + """ + Machine is Android? + """ + return self.system == 'android' + + def is_haiku(self): + """ + Machine is Haiku? + """ + return self.system == 'haiku' + + def is_openbsd(self): + """ + Machine is OpenBSD? + """ + return self.system == 'openbsd' + + # Various prefixes and suffixes for import libraries, shared libraries, + # static libraries, and executables. + # Versioning is added to these names in the backends as-needed. + + def get_exe_suffix(self): + if self.is_windows() or self.is_cygwin(): + return 'exe' + else: + return '' + + def get_object_suffix(self): + if self.is_windows(): + return 'obj' + else: + return 'o' + + def libdir_layout_is_win(self): + return self.is_windows() \ + or self.is_cygwin() + +class MachineInfos(PerMachine): + def __init__(self): + super().__init__(None, None, None) + + def default_missing(self): + """Default host to buid and target to host. + + This allows just specifying nothing in the native case, just host in the + cross non-compiler case, and just target in the native-built + cross-compiler case. + """ + if self.host is None: + self.host = self.build + if self.target is None: + self.target = self.host + + def miss_defaulting(self): + """Unset definition duplicated from their previous to None + + This is the inverse of ''default_missing''. By removing defaulted + machines, we can elaborate the original and then redefault them and thus + avoid repeating the elaboration explicitly. + """ + if self.target == self.host: + self.target = None + if self.host == self.build: + self.host = None + + def detect_build(self, compilers = None): + self.build = MachineInfo.detect(compilers) + +class BinaryTable: + # Map from language identifiers to environment variables. + evarMap = { + # Compilers + 'c': 'CC', + 'cpp': 'CXX', + 'cs': 'CSC', + 'd': 'DC', + 'fortran': 'FC', + 'objc': 'OBJC', + 'objcpp': 'OBJCXX', + 'rust': 'RUSTC', + 'vala': 'VALAC', + + # Binutils + 'strip': 'STRIP', + 'ar': 'AR', + } + + @classmethod + def detect_ccache(cls): + try: + has_ccache = subprocess.call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except OSError: + has_ccache = 1 + if has_ccache == 0: + cmdlist = ['ccache'] + else: + cmdlist = [] + return cmdlist + + @classmethod + def warn_about_lang_pointing_to_cross(cls, compiler_exe, evar): + evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME') + if evar_str == compiler_exe: + mlog.warning('''Env var %s seems to point to the cross compiler. +This is probably wrong, it should always point to the native compiler.''' % evar) + + @classmethod + def parse_entry(cls, entry): + compiler = mesonlib.stringlistify(entry) + # Ensure ccache exists and remove it if it doesn't + if compiler[0] == 'ccache': + compiler = compiler[1:] + ccache = cls.detect_ccache() + else: + ccache = [] + # Return value has to be a list of compiler 'choices' + return compiler, ccache diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 131c24e..4f09c0f 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -27,16 +27,18 @@ from .dependencies import InternalDependency, Dependency, NotFoundDependency, De from .interpreterbase import InterpreterBase from .interpreterbase import check_stringlist, flatten, noPosargs, noKwargs, stringArgs, permittedKwargs, noArgsFlattening from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest -from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler +from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler, disablerIfNotFound from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs +from .interpreterbase import ObjectHolder from .modules import ModuleReturnValue -import os, sys, shutil, uuid +import os, shutil, uuid import re, shlex import subprocess from collections import namedtuple from pathlib import PurePath import traceback +import functools import importlib @@ -57,14 +59,6 @@ def stringifyUserArguments(args): raise InvalidArguments('Function accepts only strings, integers, lists and lists thereof.') -class ObjectHolder: - def __init__(self, obj, subproject=None): - self.held_object = obj - self.subproject = subproject - - def __repr__(self): - return ''.format(self.held_object) - class FeatureOptionHolder(InterpreterObject, ObjectHolder): def __init__(self, env, option): InterpreterObject.__init__(self) @@ -573,57 +567,11 @@ class GeneratedListHolder(InterpreterObject, ObjectHolder): def add_file(self, a): self.held_object.add_file(a) -class BuildMachine(InterpreterObject, ObjectHolder): - def __init__(self, compilers): - self.compilers = compilers +# A machine that's statically known from the cross file +class MachineHolder(InterpreterObject, ObjectHolder): + def __init__(self, machine_info): InterpreterObject.__init__(self) - held_object = environment.MachineInfo(environment.detect_system(), - environment.detect_cpu_family(self.compilers), - environment.detect_cpu(self.compilers), - sys.byteorder) - ObjectHolder.__init__(self, held_object) - self.methods.update({'system': self.system_method, - 'cpu_family': self.cpu_family_method, - 'cpu': self.cpu_method, - 'endian': self.endian_method, - }) - - @noPosargs - @permittedKwargs({}) - def cpu_family_method(self, args, kwargs): - return self.held_object.cpu_family - - @noPosargs - @permittedKwargs({}) - def cpu_method(self, args, kwargs): - return self.held_object.cpu - - @noPosargs - @permittedKwargs({}) - def system_method(self, args, kwargs): - return self.held_object.system - - @noPosargs - @permittedKwargs({}) - def endian_method(self, args, kwargs): - return self.held_object.endian - -# This class will provide both host_machine and -# target_machine -class CrossMachineInfo(InterpreterObject, ObjectHolder): - def __init__(self, cross_info): - InterpreterObject.__init__(self) - minimum_cross_info = {'cpu', 'cpu_family', 'endian', 'system'} - if set(cross_info) < minimum_cross_info: - raise InterpreterException( - 'Machine info is currently {}\n'.format(cross_info) + - 'but is missing {}.'.format(minimum_cross_info - set(cross_info))) - self.info = cross_info - minfo = environment.MachineInfo(cross_info['system'], - cross_info['cpu_family'], - cross_info['cpu'], - cross_info['endian']) - ObjectHolder.__init__(self, minfo) + ObjectHolder.__init__(self, machine_info) self.methods.update({'system': self.system_method, 'cpu': self.cpu_method, 'cpu_family': self.cpu_family_method, @@ -988,8 +936,26 @@ class CompilerHolder(InterpreterObject): 'first_supported_link_argument': self.first_supported_link_argument_method, 'unittest_args': self.unittest_args_method, 'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method, + 'get_argument_syntax': self.get_argument_syntax_method, }) + def _dep_msg(self, deps, endl): + msg_single = 'with dependency {}' + msg_many = 'with dependencies {}' + if not deps: + return endl + if endl is None: + endl = '' + tpl = msg_many if len(deps) > 1 else msg_single + names = [] + for d in deps: + if isinstance(d, dependencies.ExternalLibrary): + name = '-l' + d.name + else: + name = d.name + names.append(name) + return tpl.format(', '.join(names)) + endl + @noPosargs @permittedKwargs({}) def version_method(self, args, kwargs): @@ -1000,7 +966,7 @@ class CompilerHolder(InterpreterObject): def cmd_array_method(self, args, kwargs): return self.compiler.exelist - def determine_args(self, kwargs): + def determine_args(self, kwargs, mode='link'): nobuiltins = kwargs.get('no_builtin_args', False) if not isinstance(nobuiltins, bool): raise InterpreterException('Type of no_builtin_args not a boolean.') @@ -1016,11 +982,12 @@ class CompilerHolder(InterpreterObject): if not nobuiltins: opts = self.environment.coredata.compiler_options args += self.compiler.get_option_compile_args(opts) - args += self.compiler.get_option_link_args(opts) + if mode == 'link': + args += self.compiler.get_option_link_args(opts) args += mesonlib.stringlistify(kwargs.get('args', [])) return args - def determine_dependencies(self, kwargs): + def determine_dependencies(self, kwargs, endl=':'): deps = kwargs.get('dependencies', None) if deps is not None: deps = listify(deps) @@ -1034,7 +1001,7 @@ class CompilerHolder(InterpreterObject): raise InterpreterException('Dependencies must be external dependencies') final_deps.append(d) deps = final_deps - return deps + return deps, self._dep_msg(deps, endl) @permittedKwargs({ 'prefix', @@ -1050,9 +1017,11 @@ class CompilerHolder(InterpreterObject): if not isinstance(prefix, str): raise InterpreterException('Prefix argument of sizeof must be a string.') extra_args = mesonlib.stringlistify(kwargs.get('args', [])) - deps = self.determine_dependencies(kwargs) - result = self.compiler.alignment(typename, prefix, self.environment, extra_args, deps) - mlog.log('Checking for alignment of', mlog.bold(typename, True), ':', result) + deps, msg = self.determine_dependencies(kwargs) + result = self.compiler.alignment(typename, prefix, self.environment, + extra_args=extra_args, + dependencies=deps) + mlog.log('Checking for alignment of', mlog.bold(typename, True), msg, result) return result @permittedKwargs({ @@ -1074,9 +1043,10 @@ class CompilerHolder(InterpreterObject): testname = kwargs.get('name', '') if not isinstance(testname, str): raise InterpreterException('Testname argument must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - result = self.compiler.run(code, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs, endl=None) + result = self.compiler.run(code, self.environment, extra_args=extra_args, + dependencies=deps) if len(testname) > 0: if not result.compiled: h = mlog.red('DID NOT COMPILE') @@ -1084,7 +1054,7 @@ class CompilerHolder(InterpreterObject): h = mlog.green('YES') else: h = mlog.red('NO (%d)' % result.returncode) - mlog.log('Checking if', mlog.bold(testname, True), 'runs:', h) + mlog.log('Checking if', mlog.bold(testname, True), msg, 'runs:', h) return TryRunResultHolder(result) @noPosargs @@ -1129,16 +1099,18 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_member must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) had = self.compiler.has_members(typename, [membername], prefix, - self.environment, extra_args, deps) + self.environment, + extra_args=extra_args, + dependencies=deps) if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') mlog.log('Checking whether type', mlog.bold(typename, True), - 'has member', mlog.bold(membername, True), ':', hadtxt) + 'has member', mlog.bold(membername, True), msg, hadtxt) return had @permittedKwargs({ @@ -1157,17 +1129,19 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_members must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) had = self.compiler.has_members(typename, membernames, prefix, - self.environment, extra_args, deps) + self.environment, + extra_args=extra_args, + dependencies=deps) if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') members = mlog.bold(', '.join(['"{}"'.format(m) for m in membernames])) mlog.log('Checking whether type', mlog.bold(typename, True), - 'has members', members, ':', hadtxt) + 'has members', members, msg, hadtxt) return had @permittedKwargs({ @@ -1186,13 +1160,15 @@ class CompilerHolder(InterpreterObject): if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_function must be a string.') extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - had = self.compiler.has_function(funcname, prefix, self.environment, extra_args, deps) + deps, msg = self.determine_dependencies(kwargs) + had = self.compiler.has_function(funcname, prefix, self.environment, + extra_args=extra_args, + dependencies=deps) if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') - mlog.log('Checking for function', mlog.bold(funcname, True), ':', hadtxt) + mlog.log('Checking for function', mlog.bold(funcname, True), msg, hadtxt) return had @permittedKwargs({ @@ -1210,14 +1186,15 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_type must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - had = self.compiler.has_type(typename, prefix, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + had = self.compiler.has_type(typename, prefix, self.environment, + extra_args=extra_args, dependencies=deps) if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') - mlog.log('Checking for type', mlog.bold(typename, True), ':', hadtxt) + mlog.log('Checking for type', mlog.bold(typename, True), msg, hadtxt) return had @FeatureNew('compiler.compute_int', '0.40.0') @@ -1248,10 +1225,12 @@ class CompilerHolder(InterpreterObject): raise InterpreterException('High argument of compute_int must be an int.') if guess is not None and not isinstance(guess, int): raise InterpreterException('Guess argument of compute_int must be an int.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - res = self.compiler.compute_int(expression, low, high, guess, prefix, self.environment, extra_args, deps) - mlog.log('Computing int of "%s": %d' % (expression, res)) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + res = self.compiler.compute_int(expression, low, high, guess, prefix, + self.environment, extra_args=extra_args, + dependencies=deps) + mlog.log('Computing int of', mlog.bold(expression, True), msg, res) return res @permittedKwargs({ @@ -1269,10 +1248,11 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of sizeof must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - esize = self.compiler.sizeof(element, prefix, self.environment, extra_args, deps) - mlog.log('Checking for size of "%s": %d' % (element, esize)) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + esize = self.compiler.sizeof(element, prefix, self.environment, + extra_args=extra_args, dependencies=deps) + mlog.log('Checking for size of', mlog.bold(element, True), msg, esize) return esize @FeatureNew('compiler.get_define', '0.40.0') @@ -1291,10 +1271,12 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of get_define() must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - value = self.compiler.get_define(element, prefix, self.environment, extra_args, deps) - mlog.log('Fetching value of define "%s": %s' % (element, value)) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + value = self.compiler.get_define(element, prefix, self.environment, + extra_args=extra_args, + dependencies=deps) + mlog.log('Fetching value of define', mlog.bold(element, True), msg, value) return value @permittedKwargs({ @@ -1316,15 +1298,17 @@ class CompilerHolder(InterpreterObject): testname = kwargs.get('name', '') if not isinstance(testname, str): raise InterpreterException('Testname argument must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - result = self.compiler.compiles(code, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs, endl=None) + result = self.compiler.compiles(code, self.environment, + extra_args=extra_args, + dependencies=deps) if len(testname) > 0: if result: h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Checking if', mlog.bold(testname, True), 'compiles:', h) + mlog.log('Checking if', mlog.bold(testname, True), msg, 'compiles:', h) return result @permittedKwargs({ @@ -1346,15 +1330,17 @@ class CompilerHolder(InterpreterObject): testname = kwargs.get('name', '') if not isinstance(testname, str): raise InterpreterException('Testname argument must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - result = self.compiler.links(code, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs, endl=None) + result = self.compiler.links(code, self.environment, + extra_args=extra_args, + dependencies=deps) if len(testname) > 0: if result: h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Checking if', mlog.bold(testname, True), 'links:', h) + mlog.log('Checking if', mlog.bold(testname, True), msg, 'links:', h) return result @FeatureNew('compiler.check_header', '0.47.0') @@ -1373,14 +1359,16 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_header must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - haz = self.compiler.check_header(hname, prefix, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + haz = self.compiler.check_header(hname, prefix, self.environment, + extra_args=extra_args, + dependencies=deps) if haz: h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Check usable header "%s":' % hname, h) + mlog.log('Check usable header', mlog.bold(hname, True), msg, h) return haz @permittedKwargs({ @@ -1398,14 +1386,15 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_header must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - haz = self.compiler.has_header(hname, prefix, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + haz = self.compiler.has_header(hname, prefix, self.environment, + extra_args=extra_args, dependencies=deps) if haz: h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Has header "%s":' % hname, h) + mlog.log('Has header', mlog.bold(hname, True), msg, h) return haz @permittedKwargs({ @@ -1424,16 +1413,20 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_header_symbol must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - haz = self.compiler.has_header_symbol(hname, symbol, prefix, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + haz = self.compiler.has_header_symbol(hname, symbol, prefix, self.environment, + extra_args=extra_args, + dependencies=deps) if haz: h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Header <{0}> has symbol "{1}":'.format(hname, symbol), h) + mlog.log('Header <{0}> has symbol'.format(hname), mlog.bold(symbol, True), msg, h) return haz + @FeatureNewKwargs('compiler.find_library', '0.49.0', ['disabler']) + @disablerIfNotFound @permittedKwargs({ 'required', 'dirs', @@ -1566,6 +1559,12 @@ class CompilerHolder(InterpreterObject): args = mesonlib.stringlistify(args) return [a for a in args if self.has_func_attribute_method(a, kwargs)] + @FeatureNew('compiler.get_argument_syntax_method', '0.49.0') + @noPosargs + @noKwargs + def get_argument_syntax_method(self, args, kwargs): + return self.compiler.get_argument_syntax() + ModuleState = namedtuple('ModuleState', [ 'build_to_src', 'subproject', 'subdir', 'current_lineno', 'environment', @@ -1593,8 +1592,8 @@ class ModuleHolder(InterpreterObject, ObjectHolder): # because the Build object contains dicts and lists. num_targets = len(self.interpreter.build.targets) state = ModuleState( - build_to_src=os.path.relpath(self.interpreter.environment.get_source_dir(), - self.interpreter.environment.get_build_dir()), + build_to_src=mesonlib.relpath(self.interpreter.environment.get_source_dir(), + self.interpreter.environment.get_build_dir()), subproject=self.interpreter.subproject, subdir=self.interpreter.subdir, current_lineno=self.interpreter.current_lineno, @@ -1686,12 +1685,15 @@ class MesonMain(InterpreterObject): @permittedKwargs({}) def add_dist_script_method(self, args, kwargs): - if len(args) != 1: - raise InterpreterException('add_dist_script takes exactly one argument') + if len(args) < 1: + raise InterpreterException('add_dist_script takes one or more arguments') + if len(args) > 1: + FeatureNew('Calling "add_dist_script" with multiple arguments', '0.49.0').use(self.interpreter.subproject) check_stringlist(args, 'add_dist_script argument must be a string') if self.interpreter.subproject != '': raise InterpreterException('add_dist_script may not be used in a subproject.') - self.build.dist_scripts.append(os.path.join(self.interpreter.subdir, args[0])) + script = self._find_source_script(args[0], args[1:]) + self.build.dist_scripts.append(script) @noPosargs @permittedKwargs({}) @@ -1934,20 +1936,23 @@ class Interpreter(InterpreterBase): self.build_def_files = [os.path.join(self.subdir, environment.build_filename)] if not mock: self.parse_project() - self.builtin['build_machine'] = BuildMachine(self.coredata.compilers) - if not self.build.environment.is_cross_build(): - self.builtin['host_machine'] = self.builtin['build_machine'] - self.builtin['target_machine'] = self.builtin['build_machine'] - else: - cross_info = self.build.environment.cross_info - if cross_info.has_host(): - self.builtin['host_machine'] = CrossMachineInfo(cross_info.config['host_machine']) - else: - self.builtin['host_machine'] = self.builtin['build_machine'] - if cross_info.has_target(): - self.builtin['target_machine'] = CrossMachineInfo(cross_info.config['target_machine']) - else: - self.builtin['target_machine'] = self.builtin['host_machine'] + + # Initialize machine descriptions. We can do a better job now because we + # have the compilers needed to gain more knowledge, so wipe out old + # inferrence and start over. + self.build.environment.machines.miss_defaulting() + self.build.environment.machines.detect_build(self.coredata.compilers) + self.build.environment.machines.default_missing() + assert self.build.environment.machines.build.cpu is not None + assert self.build.environment.machines.host.cpu is not None + assert self.build.environment.machines.target.cpu is not None + + self.builtin['build_machine'] = \ + MachineHolder(self.build.environment.machines.build) + self.builtin['host_machine'] = \ + MachineHolder(self.build.environment.machines.host) + self.builtin['target_machine'] = \ + MachineHolder(self.build.environment.machines.target) def get_non_matching_default_options(self): env = self.environment @@ -2229,14 +2234,7 @@ external dependencies (including libraries) must go to "dependencies".''') raise InterpreterException('Program or command {!r} not found ' 'or not executable'.format(cmd)) cmd = prog - try: - cmd_path = os.path.relpath(cmd.get_path(), start=srcdir) - except ValueError: - # On Windows a relative path can't be evaluated for - # paths on two different drives (i.e. c:\foo and f:\bar). - # The only thing left to is is to use the original absolute - # path. - cmd_path = cmd.get_path() + cmd_path = mesonlib.relpath(cmd.get_path(), start=srcdir) if not cmd_path.startswith('..') and cmd_path not in self.build_def_files: self.build_def_files.append(cmd_path) expanded_args = [] @@ -2253,7 +2251,7 @@ external dependencies (including libraries) must go to "dependencies".''') if not os.path.isabs(a): a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a) if os.path.isfile(a): - a = os.path.relpath(a, start=srcdir) + a = mesonlib.relpath(a, start=srcdir) if not a.startswith('..'): if a not in self.build_def_files: self.build_def_files.append(a) @@ -2312,22 +2310,24 @@ external dependencies (including libraries) must go to "dependencies".''') return subproject subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) - r = wrap.Resolver(subproject_dir_abs, self.coredata.wrap_mode) + r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode')) try: resolved = r.resolve(dirname) - except RuntimeError as e: - # if the reason subproject execution failed was because - # the directory doesn't exist, try to give some helpful - # advice if it's a nested subproject that needs - # promotion... - self.print_nested_info(dirname) - - if required: - msg = 'Subproject directory {!r} does not exist and cannot be downloaded:\n{}' - raise InterpreterException(msg.format(os.path.join(self.subproject_dir, dirname), e)) - - mlog.log('\nSubproject ', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)\n') - return self.disabled_subproject(dirname) + except wrap.WrapException as e: + subprojdir = os.path.join(self.subproject_dir, r.directory) + if not required: + mlog.log('\nSubproject ', mlog.bold(subprojdir), 'is buildable:', mlog.red('NO'), '(disabling)\n') + return self.disabled_subproject(dirname) + + if isinstance(e, wrap.WrapNotFoundException): + # if the reason subproject execution failed was because + # the directory doesn't exist, try to give some helpful + # advice if it's a nested subproject that needs + # promotion... + self.print_nested_info(dirname) + + msg = 'Failed to initialize {!r}:\n{}' + raise InterpreterException(msg.format(subprojdir, e)) subdir = os.path.join(self.subproject_dir, resolved) os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True) @@ -2336,7 +2336,8 @@ external dependencies (including libraries) must go to "dependencies".''') with mlog.nested(): try: mlog.log('\nExecuting subproject', mlog.bold(dirname), '\n') - subi = Interpreter(self.build, self.backend, dirname, subdir, self.subproject_dir, + new_build = self.build.copy() + subi = Interpreter(new_build, self.backend, dirname, subdir, self.subproject_dir, self.modules, default_options) subi.subprojects = self.subprojects @@ -2362,6 +2363,7 @@ external dependencies (including libraries) must go to "dependencies".''') self.subprojects.update(subi.subprojects) self.subprojects[dirname] = SubprojectHolder(subi, self.subproject_dir, dirname) self.build_def_files += subi.build_def_files + self.build.merge(subi.build) return self.subprojects[dirname] def get_option_internal(self, optname): @@ -2419,9 +2421,18 @@ external dependencies (including libraries) must go to "dependencies".''') @noKwargs def func_configuration_data(self, node, args, kwargs): - if args: - raise InterpreterException('configuration_data takes no arguments') - return ConfigurationDataHolder(self.subproject) + if len(args) > 1: + raise InterpreterException('configuration_data takes only one optional positional arguments') + elif len(args) == 1: + initial_values = args[0] + if not isinstance(initial_values, dict): + raise InterpreterException('configuration_data first argument must be a dictionary') + else: + initial_values = {} + cdata = ConfigurationDataHolder(self.subproject) + for k, v in initial_values.items(): + cdata.set_method([k, v], {}) + return cdata def set_options(self, default_options): # Set default options as if they were passed to the command line. @@ -2737,8 +2748,7 @@ external dependencies (including libraries) must go to "dependencies".''') self.coredata. base_options[optname] = oobj self.emit_base_options_warnings(enabled_opts) - def program_from_cross_file(self, prognames, silent=False): - cross_info = self.environment.cross_info + def _program_from_file(self, prognames, bins, silent): for p in prognames: if hasattr(p, 'held_object'): p = p.held_object @@ -2746,11 +2756,19 @@ external dependencies (including libraries) must go to "dependencies".''') continue # Always points to a local (i.e. self generated) file. if not isinstance(p, str): raise InterpreterException('Executable name must be a string') - prog = ExternalProgram.from_cross_info(cross_info, p) + prog = ExternalProgram.from_bin_list(bins, p) if prog.found(): return ExternalProgramHolder(prog) return None + def program_from_cross_file(self, prognames, silent=False): + bins = self.environment.cross_info.config['binaries'] + return self._program_from_file(prognames, bins, silent) + + def program_from_config_file(self, prognames, silent=False): + bins = self.environment.config_info.binaries + return self._program_from_file(prognames, bins, silent) + def program_from_system(self, args, silent=False): # Search for scripts relative to current subdir. # Do not cache found programs because find_program('foobar') @@ -2805,10 +2823,14 @@ external dependencies (including libraries) must go to "dependencies".''') def find_program_impl(self, args, native=False, required=True, silent=True): if not isinstance(args, list): args = [args] + progobj = self.program_from_overrides(args, silent=silent) - if progobj is None and self.build.environment.is_cross_build(): - if not native: + if progobj is None: + if self.build.environment.is_cross_build() and not native: progobj = self.program_from_cross_file(args, silent=silent) + else: + progobj = self.program_from_config_file(args, silent=silent) + if progobj is None: progobj = self.program_from_system(args, silent=silent) if required and (progobj is None or not progobj.found()): @@ -2819,6 +2841,8 @@ external dependencies (including libraries) must go to "dependencies".''') self.store_name_lookups(args) return progobj + @FeatureNewKwargs('find_program', '0.49.0', ['disabler']) + @disablerIfNotFound @permittedKwargs(permitted_kwargs['find_program']) def func_find_program(self, node, args, kwargs): if not args: @@ -2884,35 +2908,33 @@ external dependencies (including libraries) must go to "dependencies".''') return True def get_subproject_dep(self, name, dirname, varname, required): + dep = DependencyHolder(NotFoundDependency(self.environment), self.subproject) try: subproject = self.subprojects[dirname] - if not subproject.found(): - if not required: - return DependencyHolder(NotFoundDependency(self.environment), self.subproject) + if subproject.found(): + dep = self.subprojects[dirname].get_variable_method([varname], {}) + except InvalidArguments as e: + pass - raise DependencyException('Subproject %s was not found.' % (name)) + if not isinstance(dep, DependencyHolder): + raise InvalidCode('Fetched variable {!r} in the subproject {!r} is ' + 'not a dependency object.'.format(varname, dirname)) - dep = self.subprojects[dirname].get_variable_method([varname], {}) - except InvalidArguments as e: + if not dep.found(): if required: - raise DependencyException('Could not find dependency {} in subproject {}; {}' - ''.format(varname, dirname, str(e))) + raise DependencyException('Could not find dependency {} in subproject {}' + ''.format(varname, dirname)) # If the dependency is not required, don't raise an exception subproj_path = os.path.join(self.subproject_dir, dirname) mlog.log('Dependency', mlog.bold(name), 'from subproject', mlog.bold(subproj_path), 'found:', mlog.red('NO')) - return None - if not isinstance(dep, DependencyHolder): - raise InvalidCode('Fetched variable {!r} in the subproject {!r} is ' - 'not a dependency object.'.format(varname, dirname)) + return dep def _find_cached_fallback_dep(self, name, dirname, varname, wanted, required): if dirname not in self.subprojects: return False dep = self.get_subproject_dep(name, dirname, varname, required) - if not dep: - return False if not dep.found(): return dep @@ -2947,8 +2969,10 @@ external dependencies (including libraries) must go to "dependencies".''') elif name == 'openmp': FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject) + @FeatureNewKwargs('dependency', '0.49.0', ['disabler']) @FeatureNewKwargs('dependency', '0.40.0', ['method']) @FeatureNewKwargs('dependency', '0.38.0', ['default_options']) + @disablerIfNotFound @permittedKwargs(permitted_kwargs['dependency']) def func_dependency(self, node, args, kwargs): self.validate_arguments(args, 1, [str]) @@ -2989,7 +3013,7 @@ external dependencies (including libraries) must go to "dependencies".''') dep = NotFoundDependency(self.environment) # Unless a fallback exists and is forced ... - if self.coredata.wrap_mode == WrapMode.forcefallback and 'fallback' in kwargs: + if self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback and 'fallback' in kwargs: pass # ... search for it outside the project elif name != '': @@ -3026,26 +3050,21 @@ external dependencies (including libraries) must go to "dependencies".''') return Disabler() def print_nested_info(self, dependency_name): - message_templ = '''\nDependency %s not found but it is available in a sub-subproject. -To use it in the current project, promote it by going in the project source -root and issuing %s. - -''' + message = ['Dependency', mlog.bold(dependency_name), 'not found but it is available in a sub-subproject.\n' + + 'To use it in the current project, promote it by going in the project source\n' + 'root and issuing'] sprojs = mesonlib.detect_subprojects('subprojects', self.source_root) if dependency_name not in sprojs: return found = sprojs[dependency_name] if len(found) > 1: - suffix = 'one of the following commands' + message.append('one of the following commands:') else: - suffix = 'the following command' - message = message_templ % (dependency_name, suffix) - cmds = [] - command_templ = 'meson wrap promote ' + message.append('the following command:') + command_templ = '\nmeson wrap promote {}' for l in found: - cmds.append(command_templ + l[len(self.source_root) + 1:]) - final_message = message + '\n'.join(cmds) - print(final_message) + message.append(mlog.bold(command_templ.format(l[len(self.source_root) + 1:]))) + mlog.warning(*message) def get_subproject_infos(self, kwargs): fbinfo = kwargs['fallback'] @@ -3056,12 +3075,12 @@ root and issuing %s. def dependency_fallback(self, name, kwargs): display_name = name if name else '(anonymous)' - if self.coredata.wrap_mode in (WrapMode.nofallback, WrapMode.nodownload): + if self.coredata.get_builtin_option('wrap_mode') in (WrapMode.nofallback, WrapMode.nodownload): mlog.log('Not looking for a fallback subproject for the dependency', mlog.bold(display_name), 'because:\nUse of fallback' 'dependencies is disabled.') return None - elif self.coredata.wrap_mode == WrapMode.forcefallback: + elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback: mlog.log('Looking for a fallback subproject for the dependency', mlog.bold(display_name), 'because:\nUse of fallback dependencies is forced.') else: @@ -3093,8 +3112,8 @@ root and issuing %s. return None required = kwargs.get('required', True) dep = self.get_subproject_dep(name, dirname, varname, required) - if not dep: - return None + if not dep.found(): + return dep subproj_path = os.path.join(self.subproject_dir, dirname) # Check if the version of the declared dependency matches what we want if 'version' in kwargs: @@ -3233,6 +3252,12 @@ root and issuing %s. 'Implementation-only, without FeatureNew checks, for internal use' name = args[0] kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs) + if 'input' in kwargs: + try: + kwargs['input'] = self.source_strings_to_files(extract_as_list(kwargs, 'input')) + except mesonlib.MesonException: + mlog.warning('''Custom target input \'%s\' can\'t be converted to File object(s). +This will become a hard error in the future.''' % kwargs['input']) tg = CustomTargetHolder(build.CustomTarget(name, self.subdir, self.subproject, kwargs), self) self.add_target(name, tg.held_object) return tg @@ -3580,9 +3605,13 @@ root and issuing %s. raise InterpreterException('@INPUT@ used as command argument, but no input file specified.') # Validate output output = kwargs['output'] - ofile_rpath = os.path.join(self.subdir, output) if not isinstance(output, str): raise InterpreterException('Output file name must be a string') + if ifile_abs: + values = mesonlib.get_filenames_templates_dict([ifile_abs], None) + outputs = mesonlib.substitute_values([output], values) + output = outputs[0] + ofile_rpath = os.path.join(self.subdir, output) if ofile_rpath in self.configure_file_outputs: mesonbuildfile = os.path.join(self.subdir, 'meson.build') current_call = "{}:{}".format(mesonbuildfile, self.current_lineno) @@ -3590,10 +3619,6 @@ root and issuing %s. mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call) else: self.configure_file_outputs[ofile_rpath] = self.current_lineno - if ifile_abs: - values = mesonlib.get_filenames_templates_dict([ifile_abs], None) - outputs = mesonlib.substitute_values([output], values) - output = outputs[0] if os.path.dirname(output) != '': raise InterpreterException('Output file name must not contain a subdirectory.') (ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output)) @@ -3601,7 +3626,12 @@ root and issuing %s. # Perform the appropriate action if 'configuration' in kwargs: conf = kwargs['configuration'] - if not isinstance(conf, ConfigurationDataHolder): + if isinstance(conf, dict): + cdata = ConfigurationDataHolder(self.subproject) + for k, v in conf.items(): + cdata.set_method([k, v], {}) + conf = cdata + elif not isinstance(conf, ConfigurationDataHolder): raise InterpreterException('Argument "configuration" is not of type configuration_data') mlog.log('Configuring', mlog.bold(output), 'using configuration') if inputfile is not None: @@ -3744,6 +3774,14 @@ different subdirectory. timeout_multiplier = kwargs.get('timeout_multiplier', 1) if not isinstance(timeout_multiplier, int): raise InterpreterException('Timeout multiplier must be a number.') + is_default = kwargs.get('is_default', False) + if not isinstance(is_default, bool): + raise InterpreterException('is_default option must be a boolean') + if is_default: + if self.build.test_setup_default_name is not None: + raise InterpreterException('\'%s\' is already set as default. ' + 'is_default can be set to true only once' % self.build.test_setup_default_name) + self.build.test_setup_default_name = setup_name env = self.unpack_env_kwarg(kwargs) self.build.test_setups[setup_name] = build.TestSetup(exe_wrapper=exe_wrapper, gdb=gdb, @@ -3830,7 +3868,7 @@ different subdirectory. @stringArgs @noKwargs def func_join_paths(self, node, args, kwargs): - return os.path.join(*args).replace('\\', '/') + return self.join_path_strings(args) def run(self): super().run() @@ -3851,7 +3889,8 @@ different subdirectory. return if 'b_sanitize' not in self.coredata.base_options: return - if self.coredata.base_options['b_lundef'].value: + if (self.coredata.base_options['b_lundef'].value and + self.coredata.base_options['b_sanitize'].value != 'none'): mlog.warning('''Trying to use {} sanitizer on Clang with b_lundef. This will probably not work. Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_sanitize'].value)) @@ -3910,7 +3949,7 @@ Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_s sources = [sources] for s in sources: if isinstance(s, (mesonlib.File, GeneratedListHolder, - CustomTargetHolder, CustomTargetIndexHolder)): + TargetHolder, CustomTargetIndexHolder)): pass elif isinstance(s, str): self.validate_within_subproject(self.subdir, s) diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 1c74eeb..707b8f7 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -21,6 +21,14 @@ from . import environment, dependencies import os, copy, re, types from functools import wraps +class ObjectHolder: + def __init__(self, obj, subproject=None): + self.held_object = obj + self.subproject = subproject + + def __repr__(self): + return ''.format(self.held_object) + # Decorators for method calls. def check_stringlist(a, msg='Arguments must be strings.'): @@ -137,6 +145,17 @@ def noArgsFlattening(f): setattr(f, 'no-args-flattening', True) return f +def disablerIfNotFound(f): + @wraps(f) + def wrapped(*wrapped_args, **wrapped_kwargs): + kwargs = _get_callee_args(wrapped_args)[3] + disabler = kwargs.pop('disabler', False) + ret = f(*wrapped_args, **wrapped_kwargs) + if disabler and not ret.held_object.found(): + return Disabler() + return ret + return wrapped + class permittedKwargs: def __init__(self, permitted): @@ -292,6 +311,12 @@ class InvalidArguments(InterpreterException): class SubdirDoneRequest(BaseException): pass +class ContinueRequest(BaseException): + pass + +class BreakRequest(BaseException): + pass + class InterpreterObject: def __init__(self): self.methods = {} @@ -357,6 +382,9 @@ class InterpreterBase: me.file = environment.build_filename raise me + def join_path_strings(self, args): + return os.path.join(*args).replace('\\', '/') + def parse_project(self): """ Parses project() and initializes languages, compilers etc. Do this @@ -445,6 +473,10 @@ class InterpreterBase: return self.evaluate_indexing(cur) elif isinstance(cur, mparser.TernaryNode): return self.evaluate_ternary(cur) + elif isinstance(cur, mparser.ContinueNode): + raise ContinueRequest() + elif isinstance(cur, mparser.BreakNode): + raise BreakRequest() elif self.is_elementary_type(cur): return cur else: @@ -487,6 +519,13 @@ class InterpreterBase: return False return True + def evaluate_in(self, val1, val2): + if not isinstance(val1, (str, int, float, ObjectHolder)): + raise InvalidArguments('lvalue of "in" operator must be a string, integer, float, or object') + if not isinstance(val2, (list, dict)): + raise InvalidArguments('rvalue of "in" operator must be an array or a dict') + return val1 in val2 + def evaluate_comparison(self, node): val1 = self.evaluate_statement(node.left) if is_disabler(val1): @@ -494,6 +533,10 @@ class InterpreterBase: val2 = self.evaluate_statement(node.right) if is_disabler(val2): return val2 + if node.ctype == 'in': + return self.evaluate_in(val1, val2) + elif node.ctype == 'notin': + return not self.evaluate_in(val1, val2) valid = self.validate_comparison_types(val1, val2) # Ordering comparisons of different types isn't allowed since PR #1810 # (0.41.0). Since PR #2884 we also warn about equality comparisons of @@ -588,9 +631,13 @@ The result of this is undefined and will become a hard error in a future Meson r raise InvalidCode('Multiplication works only with integers.') return l * r elif cur.operation == 'div': - if not isinstance(l, int) or not isinstance(r, int): - raise InvalidCode('Division works only with integers.') - return l // r + if isinstance(l, str) and isinstance(r, str): + return self.join_path_strings((l, r)) + if isinstance(l, int) and isinstance(r, int): + if r == 0: + raise InvalidCode('Division by zero.') + return l // r + raise InvalidCode('Division works only with strings or integers.') elif cur.operation == 'mod': if not isinstance(l, int) or not isinstance(r, int): raise InvalidCode('Modulo works only with integers.') @@ -622,7 +669,12 @@ The result of this is undefined and will become a hard error in a future Meson r return items for item in items: self.set_variable(varname, item) - self.evaluate_codeblock(node.block) + try: + self.evaluate_codeblock(node.block) + except ContinueRequest: + continue + except BreakRequest: + break elif isinstance(items, dict): if len(node.varnames) != 2: raise InvalidArguments('Foreach on dict unpacks key and value') @@ -631,7 +683,12 @@ The result of this is undefined and will become a hard error in a future Meson r for key, value in items.items(): self.set_variable(node.varnames[0].value, key) self.set_variable(node.varnames[1].value, value) - self.evaluate_codeblock(node.block) + try: + self.evaluate_codeblock(node.block) + except ContinueRequest: + continue + except BreakRequest: + break else: raise InvalidArguments('Items of foreach loop must be an array or a dict') @@ -692,6 +749,7 @@ The result of this is undefined and will become a hard error in a future Meson r except IndexError: raise InterpreterException('Index %d out of bounds of array of size %d.' % (index, len(iobject))) + def function_call(self, node): func_name = node.func_name (posargs, kwargs) = self.reduce_arguments(node.args) @@ -933,7 +991,22 @@ The result of this is undefined and will become a hard error in a future Meson r a = args.kwargs[key] reduced_kw[key] = self.evaluate_statement(a) self.argument_depth -= 1 - return reduced_pos, reduced_kw + final_kw = self.expand_default_kwargs(reduced_kw) + return reduced_pos, final_kw + + def expand_default_kwargs(self, kwargs): + if 'kwargs' not in kwargs: + return kwargs + to_expand = kwargs.pop('kwargs') + if not isinstance(to_expand, dict): + raise InterpreterException('Value of "kwargs" must be dictionary.') + if 'kwargs' in to_expand: + raise InterpreterException('Kwargs argument must not contain a "kwargs" entry. Points for thinking meta, though. :P') + for k, v in to_expand.items(): + if k in kwargs: + raise InterpreterException('Entry "{}" defined both as a keyword argument and in a "kwarg" entry.'.format(k)) + kwargs[k] = v + return kwargs def assignment(self, node): assert(isinstance(node, mparser.AssignmentNode)) diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index 66586e4..5432514 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -192,3 +192,54 @@ class DLinker(StaticLinker): def get_link_debugfile_args(self, targetfile): return [] + +class CcrxLinker(StaticLinker): + + def __init__(self, exelist): + self.exelist = exelist + self.id = 'rlink' + pc, stdo = Popen_safe(self.exelist + ['-h'])[0:2] + self.std_args = [] + + def can_linker_accept_rsp(self): + return False + + def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): + return [] + + def get_exelist(self): + return self.exelist[:] + + def get_std_link_args(self): + return self.std_args + + def get_output_args(self, target): + return ['-output=%s' % target] + + def get_buildtype_linker_args(self, buildtype): + return [] + + def get_linker_always_args(self): + return ['-nologo', '-form=library'] + + def get_coverage_link_args(self): + return [] + + def get_always_args(self): + return [] + + def thread_link_flags(self, env): + return [] + + def openmp_flags(self): + return [] + + def get_option_link_args(self, options): + return [] + + @classmethod + def unix_args_to_native(cls, args): + return args[:] + + def get_link_debugfile_args(self, targetfile): + return [] diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 2fd69b0..28589da 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -13,17 +13,13 @@ # limitations under the License. import os -import argparse from . import (coredata, mesonlib, build) -def buildparser(): - parser = argparse.ArgumentParser(prog='meson configure') +def add_arguments(parser): coredata.register_builtin_arguments(parser) - parser.add_argument('builddir', nargs='?', default='.') parser.add_argument('--clearcache', action='store_true', default=False, help='Clear cached state (e.g. found dependencies)') - return parser class ConfException(mesonlib.MesonException): @@ -119,21 +115,21 @@ class Conf: print(' Source dir', self.build.environment.source_dir) print(' Build dir ', self.build.environment.build_dir) - dir_option_names = ['prefix', - 'libdir', - 'libexecdir', - 'bindir', - 'sbindir', - 'includedir', + dir_option_names = ['bindir', 'datadir', - 'mandir', + 'includedir', 'infodir', + 'libdir', + 'libexecdir', 'localedir', - 'sysconfdir', 'localstatedir', - 'sharedstatedir'] - test_option_names = ['stdsplit', - 'errorlogs'] + 'mandir', + 'prefix', + 'sbindir', + 'sharedstatedir', + 'sysconfdir'] + test_option_names = ['errorlogs', + 'stdsplit'] core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names] dir_options = {k: o for k, o in self.coredata.builtins.items() if k in dir_option_names} @@ -149,9 +145,7 @@ class Conf: self.print_options('Testing options', test_options) -def run(args): - args = mesonlib.expand_arguments(args) - options = buildparser().parse_args(args) +def run(options): coredata.parse_cmd_line_options(options) builddir = os.path.abspath(os.path.realpath(options.builddir)) try: @@ -159,6 +153,7 @@ def run(args): save = False if len(options.cmd_line_options) > 0: c.set_options(options.cmd_line_options) + coredata.update_cmd_line_file(builddir, options) save = True elif options.clearcache: c.clear_cache() diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index 8648a0d..59d4f81 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -20,6 +20,9 @@ import stat import time import platform, subprocess, operator, os, shutil, re import collections +from enum import Enum +from functools import lru_cache + from mesonbuild import mlog have_fcntl = False @@ -48,6 +51,23 @@ else: python_command = [sys.executable] meson_command = None +def set_meson_command(mainfile): + global python_command + global meson_command + # On UNIX-like systems `meson` is a Python script + # On Windows `meson` and `meson.exe` are wrapper exes + if not mainfile.endswith('.py'): + meson_command = [mainfile] + elif os.path.isabs(mainfile) and mainfile.endswith('mesonmain.py'): + # Can't actually run meson with an absolute path to mesonmain.py, it must be run as -m mesonbuild.mesonmain + meson_command = python_command + ['-m', 'mesonbuild.mesonmain'] + else: + # Either run uninstalled, or full path to meson-script.py + meson_command = python_command + [mainfile] + # We print this value for unit tests. + if 'MESON_COMMAND_TESTS' in os.environ: + mlog.log('meson_command is {!r}'.format(meson_command)) + def is_ascii_string(astring): try: if isinstance(astring, str): @@ -204,6 +224,7 @@ class File: return ret.format(self.relative_name()) @staticmethod + @lru_cache(maxsize=None) def from_source_file(source_root, subdir, fname): if not os.path.isfile(os.path.join(source_root, subdir, fname)): raise MesonException('File %s does not exist.' % fname) @@ -217,12 +238,14 @@ class File: def from_absolute_file(fname): return File(False, '', fname) + @lru_cache(maxsize=None) def rel_to_builddir(self, build_to_src): if self.is_built: return self.relative_name() else: return os.path.join(build_to_src, self.subdir, self.fname) + @lru_cache(maxsize=None) def absolute_path(self, srcdir, builddir): absdir = srcdir if self.is_built: @@ -241,6 +264,7 @@ class File: def __hash__(self): return hash((self.fname, self.subdir, self.is_built)) + @lru_cache(maxsize=None) def relative_name(self): return os.path.join(self.subdir, self.fname) @@ -260,6 +284,53 @@ def classify_unity_sources(compilers, sources): compsrclist[comp].append(src) return compsrclist +class OrderedEnum(Enum): + """ + An Enum which additionally offers homogeneous ordered comparison. + """ + def __ge__(self, other): + if self.__class__ is other.__class__: + return self.value >= other.value + return NotImplemented + + def __gt__(self, other): + if self.__class__ is other.__class__: + return self.value > other.value + return NotImplemented + + def __le__(self, other): + if self.__class__ is other.__class__: + return self.value <= other.value + return NotImplemented + + def __lt__(self, other): + if self.__class__ is other.__class__: + return self.value < other.value + return NotImplemented + +MachineChoice = OrderedEnum('MachineChoice', ['BUILD', 'HOST', 'TARGET']) + +class PerMachine: + def __init__(self, build, host, target): + self.build = build + self.host = host + self.target = target + + def __getitem__(self, machine: MachineChoice): + return { + MachineChoice.BUILD: self.build, + MachineChoice.HOST: self.host, + MachineChoice.TARGET: self.target + }[machine] + + def __setitem__(self, machine: MachineChoice, val): + key = { + MachineChoice.BUILD: 'build', + MachineChoice.HOST: 'host', + MachineChoice.TARGET: 'target' + }[machine] + setattr(self, key, val) + def is_osx(): return platform.system().lower() == 'darwin' @@ -292,77 +363,93 @@ def is_dragonflybsd(): def is_freebsd(): return platform.system().lower() == 'freebsd' +def _get_machine_is_cross(env, is_cross): + """ + This is not morally correct, but works for now. For cross builds the build + and host machines differ. `is_cross == true` means the host machine, while + `is_cross == false` means the build machine. Both are used in practice, + even though the documentation refers to the host machine implying we should + hard-code it. For non-cross builds `is_cross == false` is passed but the + host and build machines are identical so it doesn't matter. + + Users for `for_*` should instead specify up front which machine they want + and query that like: + + env.machines[MachineChoice.HOST].is_haiku() + + """ + for_machine = MachineChoice.HOST if is_cross else MachineChoice.BUILD + return env.machines[for_machine] + def for_windows(is_cross, env): """ Host machine is windows? + Deprecated: Please use `env.machines[for_machine].is_windows()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_windows() - return env.cross_info.get_host_system() == 'windows' + return _get_machine_is_cross(env, is_cross).is_windows() def for_cygwin(is_cross, env): """ Host machine is cygwin? + Deprecated: Please use `env.machines[for_machine].is_cygwin()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_cygwin() - return env.cross_info.get_host_system() == 'cygwin' + return _get_machine_is_cross(env, is_cross).is_cygwin() def for_linux(is_cross, env): """ Host machine is linux? + Deprecated: Please use `env.machines[for_machine].is_linux()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_linux() - return env.cross_info.get_host_system() == 'linux' + return _get_machine_is_cross(env, is_cross).is_linux() def for_darwin(is_cross, env): """ Host machine is Darwin (iOS/OS X)? + Deprecated: Please use `env.machines[for_machine].is_darwin()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_osx() - return env.cross_info.get_host_system() in ('darwin', 'ios') + return _get_machine_is_cross(env, is_cross).is_darwin() def for_android(is_cross, env): """ Host machine is Android? + Deprecated: Please use `env.machines[for_machine].is_android()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_android() - return env.cross_info.get_host_system() == 'android' + return _get_machine_is_cross(env, is_cross).is_android() def for_haiku(is_cross, env): """ Host machine is Haiku? + Deprecated: Please use `env.machines[for_machine].is_haiku()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_haiku() - return env.cross_info.get_host_system() == 'haiku' + return _get_machine_is_cross(env, is_cross).is_haiku() def for_openbsd(is_cross, env): """ Host machine is OpenBSD? + Deprecated: Please use `env.machines[for_machine].is_openbsd()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_openbsd() - elif env.cross_info.has_host(): - return env.cross_info.config['host_machine']['system'] == 'openbsd' - return False + return _get_machine_is_cross(env, is_cross).is_openbsd() def exe_exists(arglist): try: @@ -1126,3 +1213,12 @@ class BuildDirLock: elif have_msvcrt: msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) self.lockfile.close() + +def relpath(path, start): + # On Windows a relative path can't be evaluated for paths on two different + # drives (i.e. c:\foo and f:\bar). The only thing left to do is to use the + # original absolute path. + try: + return os.path.relpath(path, start) + except ValueError: + return path diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py index dfad2e7..c11d044 100644 --- a/mesonbuild/mesonmain.py +++ b/mesonbuild/mesonmain.py @@ -12,261 +12,141 @@ # See the License for the specific language governing permissions and # limitations under the License. -import time -import sys, stat, traceback, argparse -import datetime +import sys import os.path -import platform -import cProfile as profile +import importlib +import traceback +import argparse -from . import environment, interpreter, mesonlib -from . import build -from . import mlog, coredata +from . import mesonlib +from . import mlog +from . import mconf, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects from .mesonlib import MesonException from .environment import detect_msys2_arch -from .wrap import WrapMode - -default_warning = '1' - -def create_parser(): - p = argparse.ArgumentParser(prog='meson') - coredata.register_builtin_arguments(p) - p.add_argument('--cross-file', default=None, - help='File describing cross compilation environment.') - p.add_argument('-v', '--version', action='version', - version=coredata.version) - # See the mesonlib.WrapMode enum for documentation - p.add_argument('--wrap-mode', default=None, - type=wrapmodetype, choices=WrapMode, - help='Special wrap mode to use') - p.add_argument('--profile-self', action='store_true', dest='profile', - help=argparse.SUPPRESS) - p.add_argument('--fatal-meson-warnings', action='store_true', dest='fatal_warnings', - help='Make all Meson warnings fatal') - p.add_argument('--reconfigure', action='store_true', - help='Set options and reconfigure the project. Useful when new ' + - 'options have been added to the project and the default value ' + - 'is not working.') - p.add_argument('builddir', nargs='?', default=None) - p.add_argument('sourcedir', nargs='?', default=None) - return p - -def wrapmodetype(string): - try: - return getattr(WrapMode, string) - except AttributeError: - msg = ', '.join([t.name.lower() for t in WrapMode]) - msg = 'invalid argument {!r}, use one of {}'.format(string, msg) - raise argparse.ArgumentTypeError(msg) - -class MesonApp: - - def __init__(self, options): - (self.source_dir, self.build_dir) = self.validate_dirs(options.builddir, - options.sourcedir, - options.reconfigure) - self.options = options - - def has_build_file(self, dirname): - fname = os.path.join(dirname, environment.build_filename) - return os.path.exists(fname) - - def validate_core_dirs(self, dir1, dir2): - if dir1 is None: - if dir2 is None: - if not os.path.exists('meson.build') and os.path.exists('../meson.build'): - dir2 = '..' - else: - raise MesonException('Must specify at least one directory name.') - dir1 = os.getcwd() - if dir2 is None: - dir2 = os.getcwd() - ndir1 = os.path.abspath(os.path.realpath(dir1)) - ndir2 = os.path.abspath(os.path.realpath(dir2)) - if not os.path.exists(ndir1): - os.makedirs(ndir1) - if not os.path.exists(ndir2): - os.makedirs(ndir2) - if not stat.S_ISDIR(os.stat(ndir1).st_mode): - raise MesonException('%s is not a directory' % dir1) - if not stat.S_ISDIR(os.stat(ndir2).st_mode): - raise MesonException('%s is not a directory' % dir2) - if os.path.samefile(dir1, dir2): - raise MesonException('Source and build directories must not be the same. Create a pristine build directory.') - if self.has_build_file(ndir1): - if self.has_build_file(ndir2): - raise MesonException('Both directories contain a build file %s.' % environment.build_filename) - return ndir1, ndir2 - if self.has_build_file(ndir2): - return ndir2, ndir1 - raise MesonException('Neither directory contains a build file %s.' % environment.build_filename) - - def validate_dirs(self, dir1, dir2, reconfigure): - (src_dir, build_dir) = self.validate_core_dirs(dir1, dir2) - priv_dir = os.path.join(build_dir, 'meson-private/coredata.dat') - if os.path.exists(priv_dir): - if not reconfigure: - print('Directory already configured.\n' - '\nJust run your build command (e.g. ninja) and Meson will regenerate as necessary.\n' - 'If ninja fails, run "ninja reconfigure" or "meson --reconfigure"\n' - 'to force Meson to regenerate.\n' - '\nIf build failures persist, manually wipe your build directory to clear any\n' - 'stored system data.\n' - '\nTo change option values, run "meson configure" instead.') - sys.exit(0) +from .wrap import wraptool + + +class CommandLineParser: + def __init__(self): + self.commands = {} + self.hidden_commands = [] + self.parser = argparse.ArgumentParser(prog='meson') + self.subparsers = self.parser.add_subparsers(title='Commands', + description='If no command is specified it defaults to setup command.') + self.add_command('setup', msetup.add_arguments, msetup.run, + help='Configure the project') + self.add_command('configure', mconf.add_arguments, mconf.run, + help='Change project options',) + self.add_command('install', minstall.add_arguments, minstall.run, + help='Install the project') + self.add_command('introspect', mintro.add_arguments, mintro.run, + help='Introspect project') + self.add_command('init', minit.add_arguments, minit.run, + help='Create a new project') + self.add_command('test', mtest.add_arguments, mtest.run, + help='Run tests') + self.add_command('wrap', wraptool.add_arguments, wraptool.run, + help='Wrap tools') + self.add_command('subprojects', msubprojects.add_arguments, msubprojects.run, + help='Manage subprojects') + self.add_command('help', self.add_help_arguments, self.run_help_command, + help='Print help of a subcommand') + + # Hidden commands + self.add_command('rewrite', rewriter.add_arguments, rewriter.run, + help=argparse.SUPPRESS) + self.add_command('runpython', self.add_runpython_arguments, self.run_runpython_command, + help=argparse.SUPPRESS) + + def add_command(self, name, add_arguments_func, run_func, help): + # FIXME: Cannot have hidden subparser: + # https://bugs.python.org/issue22848 + if help == argparse.SUPPRESS: + p = argparse.ArgumentParser(prog='meson ' + name) + self.hidden_commands.append(name) else: - if reconfigure: - print('Directory does not contain a valid build tree:\n{}'.format(build_dir)) - sys.exit(1) - return src_dir, build_dir - - def check_pkgconfig_envvar(self, env): - curvar = os.environ.get('PKG_CONFIG_PATH', '') - if curvar != env.coredata.pkgconf_envvar: - mlog.warning('PKG_CONFIG_PATH has changed between invocations from "%s" to "%s".' % - (env.coredata.pkgconf_envvar, curvar)) - env.coredata.pkgconf_envvar = curvar - - def generate(self): - env = environment.Environment(self.source_dir, self.build_dir, self.options) - mlog.initialize(env.get_log_dir(), self.options.fatal_warnings) - if self.options.profile: - mlog.set_timestamp_start(time.monotonic()) - with mesonlib.BuildDirLock(self.build_dir): - self._generate(env) - - def _generate(self, env): - mlog.debug('Build started at', datetime.datetime.now().isoformat()) - mlog.debug('Main binary:', sys.executable) - mlog.debug('Python system:', platform.system()) - mlog.log(mlog.bold('The Meson build system')) - self.check_pkgconfig_envvar(env) - mlog.log('Version:', coredata.version) - mlog.log('Source dir:', mlog.bold(self.source_dir)) - mlog.log('Build dir:', mlog.bold(self.build_dir)) - if env.is_cross_build(): - mlog.log('Build type:', mlog.bold('cross build')) + p = self.subparsers.add_parser(name, help=help) + add_arguments_func(p) + p.set_defaults(run_func=run_func) + self.commands[name] = p + + def add_runpython_arguments(self, parser): + parser.add_argument('script_file') + parser.add_argument('script_args', nargs=argparse.REMAINDER) + + def run_runpython_command(self, options): + import runpy + sys.argv[1:] = options.script_args + runpy.run_path(options.script_file, run_name='__main__') + return 0 + + def add_help_arguments(self, parser): + parser.add_argument('command', nargs='?') + + def run_help_command(self, options): + if options.command: + self.commands[options.command].print_help() else: - mlog.log('Build type:', mlog.bold('native build')) - b = build.Build(env) - - intr = interpreter.Interpreter(b) - if env.is_cross_build(): - mlog.log('Host machine cpu family:', mlog.bold(intr.builtin['host_machine'].cpu_family_method([], {}))) - mlog.log('Host machine cpu:', mlog.bold(intr.builtin['host_machine'].cpu_method([], {}))) - mlog.log('Target machine cpu family:', mlog.bold(intr.builtin['target_machine'].cpu_family_method([], {}))) - mlog.log('Target machine cpu:', mlog.bold(intr.builtin['target_machine'].cpu_method([], {}))) - mlog.log('Build machine cpu family:', mlog.bold(intr.builtin['build_machine'].cpu_family_method([], {}))) - mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {}))) - if self.options.profile: - fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log') - profile.runctx('intr.run()', globals(), locals(), filename=fname) + self.parser.print_help() + return 0 + + def run(self, args): + # If first arg is not a known command, assume user wants to run the setup + # command. + known_commands = list(self.commands.keys()) + ['-h', '--help'] + if len(args) == 0 or args[0] not in known_commands: + args = ['setup'] + args + + # Hidden commands have their own parser instead of using the global one + if args[0] in self.hidden_commands: + parser = self.commands[args[0]] + args = args[1:] else: - intr.run() - # Print all default option values that don't match the current value - for def_opt_name, def_opt_value, cur_opt_value in intr.get_non_matching_default_options(): - mlog.log('Option', mlog.bold(def_opt_name), 'is:', - mlog.bold(str(cur_opt_value)), - '[default: {}]'.format(str(def_opt_value))) + parser = self.parser + + args = mesonlib.expand_arguments(args) + options = parser.parse_args(args) + try: - dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') - # We would like to write coredata as late as possible since we use the existence of - # this file to check if we generated the build file successfully. Since coredata - # includes settings, the build files must depend on it and appear newer. However, due - # to various kernel caches, we cannot guarantee that any time in Python is exactly in - # sync with the time that gets applied to any files. Thus, we dump this file as late as - # possible, but before build files, and if any error occurs, delete it. - cdf = env.dump_coredata() - if self.options.profile: - fname = 'profile-{}-backend.log'.format(intr.backend.name) - fname = os.path.join(self.build_dir, 'meson-private', fname) - profile.runctx('intr.backend.generate(intr)', globals(), locals(), filename=fname) - else: - intr.backend.generate(intr) - build.save(b, dumpfile) - # Post-conf scripts must be run after writing coredata or else introspection fails. - intr.backend.run_postconf_scripts() - except: - if 'cdf' in locals(): - old_cdf = cdf + '.prev' - if os.path.exists(old_cdf): - os.replace(old_cdf, cdf) - else: - os.unlink(cdf) - raise + return options.run_func(options) + except MesonException as e: + mlog.exception(e) + logfile = mlog.shutdown() + if logfile is not None: + mlog.log("\nA full log can be found at", mlog.bold(logfile)) + if os.environ.get('MESON_FORCE_BACKTRACE'): + raise + return 1 + except Exception as e: + if os.environ.get('MESON_FORCE_BACKTRACE'): + raise + traceback.print_exc() + return 2 + finally: + mlog.shutdown() -def run_script_command(args): - cmdname = args[0] - cmdargs = args[1:] - if cmdname == 'exe': - import mesonbuild.scripts.meson_exe as abc - cmdfunc = abc.run - elif cmdname == 'cleantrees': - import mesonbuild.scripts.cleantrees as abc - cmdfunc = abc.run - elif cmdname == 'commandrunner': - import mesonbuild.scripts.commandrunner as abc - cmdfunc = abc.run - elif cmdname == 'delsuffix': - import mesonbuild.scripts.delwithsuffix as abc - cmdfunc = abc.run - elif cmdname == 'dirchanger': - import mesonbuild.scripts.dirchanger as abc - cmdfunc = abc.run - elif cmdname == 'gtkdoc': - import mesonbuild.scripts.gtkdochelper as abc - cmdfunc = abc.run - elif cmdname == 'msgfmthelper': - import mesonbuild.scripts.msgfmthelper as abc - cmdfunc = abc.run - elif cmdname == 'hotdoc': - import mesonbuild.scripts.hotdochelper as abc - cmdfunc = abc.run - elif cmdname == 'regencheck': - import mesonbuild.scripts.regen_checker as abc - cmdfunc = abc.run - elif cmdname == 'symbolextractor': - import mesonbuild.scripts.symbolextractor as abc - cmdfunc = abc.run - elif cmdname == 'scanbuild': - import mesonbuild.scripts.scanbuild as abc - cmdfunc = abc.run - elif cmdname == 'vcstagger': - import mesonbuild.scripts.vcstagger as abc - cmdfunc = abc.run - elif cmdname == 'gettext': - import mesonbuild.scripts.gettext as abc - cmdfunc = abc.run - elif cmdname == 'yelphelper': - import mesonbuild.scripts.yelphelper as abc - cmdfunc = abc.run - elif cmdname == 'uninstall': - import mesonbuild.scripts.uninstall as abc - cmdfunc = abc.run - elif cmdname == 'dist': - import mesonbuild.scripts.dist as abc - cmdfunc = abc.run - elif cmdname == 'coverage': - import mesonbuild.scripts.coverage as abc - cmdfunc = abc.run - else: - raise MesonException('Unknown internal command {}.'.format(cmdname)) - return cmdfunc(cmdargs) +def run_script_command(script_name, script_args): + # Map script name to module name for those that doesn't match + script_map = {'exe': 'meson_exe', + 'install': 'meson_install', + 'delsuffix': 'delwithsuffix', + 'gtkdoc': 'gtkdochelper', + 'hotdoc': 'hotdochelper', + 'regencheck': 'regen_checker'} + module_name = script_map.get(script_name, script_name) -def set_meson_command(mainfile): - # On UNIX-like systems `meson` is a Python script - # On Windows `meson` and `meson.exe` are wrapper exes - if not mainfile.endswith('.py'): - mesonlib.meson_command = [mainfile] - elif os.path.isabs(mainfile) and mainfile.endswith('mesonmain.py'): - # Can't actually run meson with an absolute path to mesonmain.py, it must be run as -m mesonbuild.mesonmain - mesonlib.meson_command = mesonlib.python_command + ['-m', 'mesonbuild.mesonmain'] - else: - # Either run uninstalled, or full path to meson-script.py - mesonlib.meson_command = mesonlib.python_command + [mainfile] - # We print this value for unit tests. - if 'MESON_COMMAND_TESTS' in os.environ: - mlog.log('meson_command is {!r}'.format(mesonlib.meson_command)) + try: + module = importlib.import_module('mesonbuild.scripts.' + module_name) + except ModuleNotFoundError as e: + mlog.exception(e) + return 1 + + try: + return module.run(script_args) + except MesonException as e: + mlog.error('Error in {} helper script:'.format(script_name)) + mlog.exception(e) + return 1 def run(original_args, mainfile): if sys.version_info < (3, 5): @@ -274,6 +154,7 @@ def run(original_args, mainfile): print('You have python %s.' % sys.version) print('Please update your environment') return 1 + # https://github.com/mesonbuild/meson/issues/3653 if sys.platform.lower() == 'msys': mlog.error('This python3 seems to be msys/python on MSYS2 Windows, which is known to have path semantics incompatible with Meson') @@ -283,104 +164,23 @@ def run(original_args, mainfile): else: mlog.error('Please download and use Python as detailed at: https://mesonbuild.com/Getting-meson.html') return 2 + # Set the meson command that will be used to run scripts and so on - set_meson_command(mainfile) + mesonlib.set_meson_command(mainfile) + args = original_args[:] - if len(args) > 0: - # First check if we want to run a subcommand. - cmd_name = args[0] - remaining_args = args[1:] - # "help" is a special case: Since printing of the help may be - # delegated to a subcommand, we edit cmd_name before executing - # the rest of the logic here. - if cmd_name == 'help': - remaining_args += ['--help'] - args = remaining_args - cmd_name = args[0] - if cmd_name == 'test': - from . import mtest - return mtest.run(remaining_args) - elif cmd_name == 'setup': - args = remaining_args - # FALLTHROUGH like it's 1972. - elif cmd_name == 'install': - from . import minstall - return minstall.run(remaining_args) - elif cmd_name == 'introspect': - from . import mintro - return mintro.run(remaining_args) - elif cmd_name == 'rewrite': - from . import rewriter - return rewriter.run(remaining_args) - elif cmd_name == 'configure': - try: - from . import mconf - return mconf.run(remaining_args) - except MesonException as e: - mlog.exception(e) - sys.exit(1) - elif cmd_name == 'wrap': - from .wrap import wraptool - return wraptool.run(remaining_args) - elif cmd_name == 'init': - from . import minit - return minit.run(remaining_args) - elif cmd_name == 'runpython': - import runpy - script_file = remaining_args[0] - sys.argv[1:] = remaining_args[1:] - runpy.run_path(script_file, run_name='__main__') - sys.exit(0) - # No special command? Do the basic setup/reconf. + # Special handling of internal commands called from backends, they don't + # need to go through argparse. if len(args) >= 2 and args[0] == '--internal': if args[1] == 'regenerate': # Rewrite "meson --internal regenerate" command line to # "meson --reconfigure" args = ['--reconfigure'] + args[2:] else: - script = args[1] - try: - sys.exit(run_script_command(args[1:])) - except MesonException as e: - mlog.error('\nError in {} helper script:'.format(script)) - mlog.exception(e) - sys.exit(1) - - parser = create_parser() - - args = mesonlib.expand_arguments(args) - options = parser.parse_args(args) - coredata.parse_cmd_line_options(options) - try: - app = MesonApp(options) - except Exception as e: - # Log directory does not exist, so just print - # to stdout. - print('Error during basic setup:\n') - print(e) - return 1 - try: - app.generate() - except Exception as e: - if isinstance(e, MesonException): - mlog.exception(e) - # Path to log file - mlog.shutdown() - logfile = os.path.join(app.build_dir, environment.Environment.log_dir, mlog.log_fname) - mlog.log("\nA full log can be found at", mlog.bold(logfile)) - if os.environ.get('MESON_FORCE_BACKTRACE'): - raise - return 1 - else: - if os.environ.get('MESON_FORCE_BACKTRACE'): - raise - traceback.print_exc() - return 2 - finally: - mlog.shutdown() + return run_script_command(args[1], args[2:]) - return 0 + return CommandLineParser().run(args) def main(): # Always resolve the command path so Ninja can find it for regen, tests, etc. diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py index a66361f..394fe40 100644 --- a/mesonbuild/minit.py +++ b/mesonbuild/minit.py @@ -14,7 +14,7 @@ """Code that creates simple startup projects.""" -import os, sys, argparse, re, shutil, subprocess +import os, sys, re, shutil, subprocess from glob import glob from mesonbuild import mesonlib from mesonbuild.environment import detect_ninja @@ -425,8 +425,7 @@ def create_meson_build(options): open('meson.build', 'w').write(content) print('Generated meson.build file:\n\n' + content) -def run(args): - parser = argparse.ArgumentParser(prog='meson') +def add_arguments(parser): parser.add_argument("srcfiles", metavar="sourcefile", nargs="*", help="source files. default: all recognized files in current directory") parser.add_argument("-n", "--name", help="project name. default: name of current directory") @@ -441,7 +440,8 @@ def run(args): parser.add_argument('--type', default='executable', choices=['executable', 'library']) parser.add_argument('--version', default='0.1') - options = parser.parse_args(args) + +def run(options): if len(glob('*')) == 0: autodetect_options(options, sample=True) if not options.language: diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py index 1d72179..8ac6aab 100644 --- a/mesonbuild/minstall.py +++ b/mesonbuild/minstall.py @@ -12,9 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys, pickle, os, shutil, subprocess, gzip, errno +import sys, pickle, os, shutil, subprocess, errno import shlex -import argparse from glob import glob from .scripts import depfixer from .scripts import destdir_join @@ -33,15 +32,13 @@ build definitions so that it will not break when the change happens.''' selinux_updates = [] -def buildparser(): - parser = argparse.ArgumentParser(prog='meson install') +def add_arguments(parser): parser.add_argument('-C', default='.', dest='wd', help='directory to cd into before running') parser.add_argument('--no-rebuild', default=False, action='store_true', help='Do not rebuild before installing.') parser.add_argument('--only-changed', default=False, action='store_true', help='Only overwrite files that are older than the copied file.') - return parser class DirMaker: def __init__(self, lf): @@ -241,7 +238,7 @@ class Installer: 'a file'.format(to_file)) if self.should_preserve_existing_file(from_file, to_file): append_to_log(self.lf, '# Preserving old file %s\n' % to_file) - print('Preserving existing file %s.' % to_file) + print('Preserving existing file %s' % to_file) return False os.remove(to_file) print('Installing %s to %s' % (from_file, outdir)) @@ -319,8 +316,6 @@ class Installer: abs_dst = os.path.join(dst_dir, filepart) if os.path.isdir(abs_dst): print('Tried to copy file %s but a directory of that name already exists.' % abs_dst) - if os.path.exists(abs_dst): - os.remove(abs_dst) parent_dir = os.path.dirname(abs_dst) if not os.path.isdir(parent_dir): os.mkdir(parent_dir) @@ -382,17 +377,7 @@ class Installer: outdir = os.path.dirname(outfilename) d.dirmaker.makedirs(outdir, exist_ok=True) install_mode = m[2] - if outfilename.endswith('.gz') and not full_source_filename.endswith('.gz'): - with open(outfilename, 'wb') as of: - with open(full_source_filename, 'rb') as sf: - # Set mtime and filename for reproducibility. - with gzip.GzipFile(fileobj=of, mode='wb', filename='', mtime=0) as gz: - gz.write(sf.read()) - shutil.copystat(full_source_filename, outfilename) - print('Installing %s to %s' % (full_source_filename, outdir)) - append_to_log(self.lf, outfilename) - else: - self.do_copyfile(full_source_filename, outfilename) + self.do_copyfile(full_source_filename, outfilename) set_mode(outfilename, install_mode, d.install_umask) def install_headers(self, d): @@ -501,9 +486,7 @@ class Installer: else: raise -def run(args): - parser = buildparser() - opts = parser.parse_args(args) +def run(opts): datafilename = 'meson-private/install.dat' private_dir = os.path.dirname(datafilename) log_dir = os.path.join(private_dir, '../meson-logs') @@ -520,6 +503,3 @@ def run(args): append_to_log(lf, '# Does not contain files installed by custom scripts.') installer.do_install(datafilename) return 0 - -if __name__ == '__main__': - sys.exit(run(sys.argv[1:])) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 188459a..48ec20f 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -22,13 +22,14 @@ project files and don't need this info.""" import json from . import build, mtest, coredata as cdata from . import mesonlib +from . import astinterpreter +from . import mparser +from .interpreterbase import InvalidArguments from .backend import ninjabackend -import argparse import sys, os import pathlib -def buildparser(): - parser = argparse.ArgumentParser(prog='meson introspect') +def add_arguments(parser): parser.add_argument('--targets', action='store_true', dest='list_targets', default=False, help='List top level targets.') parser.add_argument('--installed', action='store_true', dest='list_installed', default=False, @@ -48,7 +49,6 @@ def buildparser(): parser.add_argument('--projectinfo', action='store_true', dest='projectinfo', default=False, help='Information about projects.') parser.add_argument('builddir', nargs='?', default='.', help='The build directory') - return parser def determine_installed_path(target, installdata): install_target = None @@ -127,18 +127,43 @@ def list_target_files(target_name, coredata, builddata): def list_buildoptions(coredata, builddata): optlist = [] - add_keys(optlist, coredata.user_options) - add_keys(optlist, coredata.compiler_options) - add_keys(optlist, coredata.base_options) - add_keys(optlist, coredata.builtins) + + dir_option_names = ['bindir', + 'datadir', + 'includedir', + 'infodir', + 'libdir', + 'libexecdir', + 'localedir', + 'localstatedir', + 'mandir', + 'prefix', + 'sbindir', + 'sharedstatedir', + 'sysconfdir'] + test_option_names = ['errorlogs', + 'stdsplit'] + core_option_names = [k for k in coredata.builtins if k not in dir_option_names + test_option_names] + + dir_options = {k: o for k, o in coredata.builtins.items() if k in dir_option_names} + test_options = {k: o for k, o in coredata.builtins.items() if k in test_option_names} + core_options = {k: o for k, o in coredata.builtins.items() if k in core_option_names} + + add_keys(optlist, core_options, 'core') + add_keys(optlist, coredata.backend_options, 'backend') + add_keys(optlist, coredata.base_options, 'base') + add_keys(optlist, coredata.compiler_options, 'compiler') + add_keys(optlist, dir_options, 'directory') + add_keys(optlist, coredata.user_options, 'user') + add_keys(optlist, test_options, 'test') print(json.dumps(optlist)) -def add_keys(optlist, options): +def add_keys(optlist, options, section): keys = list(options.keys()) keys.sort() for key in keys: opt = options[key] - optdict = {'name': key, 'value': opt.value} + optdict = {'name': key, 'value': opt.value, 'section': section} if isinstance(opt, cdata.UserStringOption): typestr = 'string' elif isinstance(opt, cdata.UserBooleanOption): @@ -156,14 +181,18 @@ def add_keys(optlist, options): optdict['description'] = opt.description optlist.append(optdict) -def list_buildsystem_files(builddata): - src_dir = builddata.environment.get_source_dir() +def find_buildsystem_files_list(src_dir): # I feel dirty about this. But only slightly. filelist = [] for root, _, files in os.walk(src_dir): for f in files: if f == 'meson.build' or f == 'meson_options.txt': filelist.append(os.path.relpath(os.path.join(root, f), src_dir)) + return filelist + +def list_buildsystem_files(builddata): + src_dir = builddata.environment.get_source_dir() + filelist = find_buildsystem_files_list(src_dir) print(json.dumps(filelist)) def list_deps(coredata): @@ -197,20 +226,81 @@ def list_tests(testdata): print(json.dumps(result)) def list_projinfo(builddata): - result = {'name': builddata.project_name, 'version': builddata.project_version} + result = {'version': builddata.project_version, + 'descriptive_name': builddata.project_name} subprojects = [] for k, v in builddata.subprojects.items(): c = {'name': k, - 'version': v} + 'version': v, + 'descriptive_name': builddata.projects.get(k)} subprojects.append(c) result['subprojects'] = subprojects print(json.dumps(result)) -def run(args): +class ProjectInfoInterperter(astinterpreter.AstInterpreter): + def __init__(self, source_root, subdir): + super().__init__(source_root, subdir) + self.funcs.update({'project': self.func_project}) + self.project_name = None + self.project_version = None + + def func_project(self, node, args, kwargs): + if len(args) < 1: + raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.') + self.project_name = args[0] + self.project_version = kwargs.get('version', 'undefined') + if isinstance(self.project_version, mparser.ElementaryNode): + self.project_version = self.project_version.value + + def set_variable(self, varname, variable): + pass + + def analyze(self): + self.load_root_meson_file() + self.sanity_check_ast() + self.parse_project() + self.run() + +def list_projinfo_from_source(sourcedir): + files = find_buildsystem_files_list(sourcedir) + + result = {'buildsystem_files': []} + subprojects = {} + + for f in files: + f = f.replace('\\', '/') + if f == 'meson.build': + interpreter = ProjectInfoInterperter(sourcedir, '') + interpreter.analyze() + version = None + if interpreter.project_version is str: + version = interpreter.project_version + result.update({'version': version, 'descriptive_name': interpreter.project_name}) + result['buildsystem_files'].append(f) + elif f.startswith('subprojects/'): + subproject_id = f.split('/')[1] + subproject = subprojects.setdefault(subproject_id, {'buildsystem_files': []}) + subproject['buildsystem_files'].append(f) + if f.count('/') == 2 and f.endswith('meson.build'): + interpreter = ProjectInfoInterperter(os.path.join(sourcedir, 'subprojects', subproject_id), '') + interpreter.analyze() + subproject.update({'name': subproject_id, 'version': interpreter.project_version, 'descriptive_name': interpreter.project_name}) + else: + result['buildsystem_files'].append(f) + + subprojects = [obj for name, obj in subprojects.items()] + result['subprojects'] = subprojects + print(json.dumps(result)) + +def run(options): datadir = 'meson-private' - options = buildparser().parse_args(args) if options.builddir is not None: datadir = os.path.join(options.builddir, datadir) + if options.builddir.endswith('/meson.build') or options.builddir.endswith('\\meson.build') or options.builddir == 'meson.build': + if options.projectinfo: + sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11] + list_projinfo_from_source(sourcedir) + return 0 if not os.path.isdir(datadir): print('Current directory is not a build dir. Please specify it or ' 'change the working directory to it.') diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py index 095b8fd..ea99d09 100644 --- a/mesonbuild/mlog.py +++ b/mesonbuild/mlog.py @@ -96,6 +96,9 @@ def green(text): def yellow(text): return AnsiDecorator(text, "\033[1;33m") +def blue(text): + return AnsiDecorator(text, "\033[1;34m") + def cyan(text): return AnsiDecorator(text, "\033[1;36m") @@ -104,6 +107,8 @@ def process_markup(args, keep): if log_timestamp_start is not None: arr = ['[{:.3f}]'.format(time.monotonic() - log_timestamp_start)] for arg in args: + if arg is None: + continue if isinstance(arg, str): arr.append(arg) elif isinstance(arg, AnsiDecorator): diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 302538f..bf49770 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -154,10 +154,10 @@ class GnomeModule(ExtensionModule): # Make source dirs relative to build dir now source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs] - # Always include current directory, but after paths set by user - source_dirs.append(os.path.join(state.build_to_src, state.subdir)) # Ensure build directories of generated deps are included source_dirs += subdirs + # Always include current directory, but after paths set by user + source_dirs.append(os.path.join(state.build_to_src, state.subdir)) for source_dir in OrderedSet(source_dirs): cmd += ['--sourcedir', source_dir] @@ -327,19 +327,20 @@ class GnomeModule(ExtensionModule): for dep in deps: if isinstance(dep, InternalDependency): - cflags.update(dep.compile_args) + cflags.update(dep.get_compile_args()) cflags.update(get_include_args(dep.include_directories)) for lib in dep.libraries: if hasattr(lib, 'held_object'): lib = lib.held_object - internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath)) - libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath, - use_gir_args, True) - cflags.update(libdepflags[0]) - internal_ldflags.update(libdepflags[1]) - external_ldflags.update(libdepflags[2]) - external_ldflags_nodedup += libdepflags[3] - gi_includes.update(libdepflags[4]) + if isinstance(lib, build.SharedLibrary): + internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath)) + libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath, + use_gir_args, True) + cflags.update(libdepflags[0]) + internal_ldflags.update(libdepflags[1]) + external_ldflags.update(libdepflags[2]) + external_ldflags_nodedup += libdepflags[3] + gi_includes.update(libdepflags[4]) extdepflags = self._get_dependencies_flags(dep.ext_deps, state, depends, include_rpath, use_gir_args, True) cflags.update(extdepflags[0]) @@ -407,11 +408,21 @@ class GnomeModule(ExtensionModule): else: return cflags, internal_ldflags, external_ldflags, external_ldflags_nodedup, gi_includes - def _unwrap_gir_target(self, girtarget): + def _unwrap_gir_target(self, girtarget, state): while hasattr(girtarget, 'held_object'): girtarget = girtarget.held_object - if not isinstance(girtarget, (build.Executable, build.SharedLibrary)): - raise MesonException('Gir target must be an executable or shared library') + + if not isinstance(girtarget, (build.Executable, build.SharedLibrary, + build.StaticLibrary)): + raise MesonException('Gir target must be an executable or library') + + STATIC_BUILD_REQUIRED_VERSION = ">=1.58.1" + if isinstance(girtarget, (build.StaticLibrary)) and \ + not mesonlib.version_compare( + self._get_gir_dep(state)[0].get_version(), + STATIC_BUILD_REQUIRED_VERSION): + raise MesonException('Static libraries can only be introspected with GObject-Introspection ' + STATIC_BUILD_REQUIRED_VERSION) + return girtarget def _get_gir_dep(self, state): @@ -519,7 +530,12 @@ class GnomeModule(ExtensionModule): ret = [] for lang in langs: - for link_arg in state.environment.coredata.get_external_link_args(lang): + if state.environment.is_cross_build(): + link_args = state.environment.cross_info.config["properties"].get(lang + '_link_args', "") + else: + link_args = state.environment.coredata.get_external_link_args(lang) + + for link_arg in link_args: if link_arg.startswith('-L'): ret.append(link_arg) @@ -531,8 +547,13 @@ class GnomeModule(ExtensionModule): for girtarget in girtargets: if isinstance(girtarget, build.Executable): ret += ['--program', girtarget] - elif isinstance(girtarget, build.SharedLibrary): - libname = girtarget.get_basename() + else: + # Because of https://gitlab.gnome.org/GNOME/gobject-introspection/merge_requests/72 + # we can't use the full path until this is merged. + if isinstance(girtarget, build.SharedLibrary): + libname = girtarget.get_basename() + else: + libname = os.path.join("@PRIVATE_OUTDIR_ABS_%s@" % girtarget.get_id(), girtarget.get_filename()) # Needed for the following binutils bug: # https://github.com/mesonbuild/meson/issues/1911 # However, g-ir-scanner does not understand -Wl,-rpath @@ -692,7 +713,10 @@ class GnomeModule(ExtensionModule): def _get_external_args_for_langs(self, state, langs): ret = [] for lang in langs: - ret += state.environment.coredata.get_external_args(lang) + if state.environment.is_cross_build(): + ret += state.environment.cross_info.config["properties"].get(lang + '_args', "") + else: + ret += state.environment.coredata.get_external_args(lang) return ret @staticmethod @@ -723,7 +747,7 @@ class GnomeModule(ExtensionModule): giscanner = self.interpreter.find_program_impl('g-ir-scanner') gicompiler = self.interpreter.find_program_impl('g-ir-compiler') - girtargets = [self._unwrap_gir_target(arg) for arg in args] + girtargets = [self._unwrap_gir_target(arg, state) for arg in args] if len(girtargets) > 1 and any([isinstance(el, build.Executable) for el in girtargets]): raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable') @@ -884,12 +908,14 @@ This will become a hard error in the future.''') return ModuleReturnValue(None, rv) @FeatureNewKwargs('gnome.gtkdoc', '0.48.0', ['c_args']) + @FeatureNewKwargs('gnome.gtkdoc', '0.48.0', ['module_version']) @FeatureNewKwargs('gnome.gtkdoc', '0.37.0', ['namespace', 'mode']) @permittedKwargs({'main_xml', 'main_sgml', 'src_dir', 'dependencies', 'install', 'install_dir', 'scan_args', 'scanobjs_args', 'gobject_typesfile', 'fixxref_args', 'html_args', 'html_assets', 'content_files', 'mkdb_args', 'ignore_headers', 'include_directories', - 'namespace', 'mode', 'expand_content_files'}) + 'namespace', 'mode', 'expand_content_files', 'module_version', + 'c_args'}) def gtkdoc(self, state, args, kwargs): if len(args) != 1: raise MesonException('Gtkdoc must have one positional argument.') @@ -904,11 +930,14 @@ This will become a hard error in the future.''') main_xml = kwargs.get('main_xml', '') if not isinstance(main_xml, str): raise MesonException('Main xml keyword argument must be a string.') + moduleversion = kwargs.get('module_version', '') + if not isinstance(moduleversion, str): + raise MesonException('Module version keyword argument must be a string.') if main_xml != '': if main_file != '': raise MesonException('You can only specify main_xml or main_sgml, not both.') main_file = main_xml - targetname = modulename + '-doc' + targetname = modulename + ('-' + moduleversion if moduleversion else '') + '-doc' command = state.environment.get_build_command() namespace = kwargs.get('namespace', '') @@ -939,6 +968,7 @@ This will become a hard error in the future.''') '--headerdirs=' + '@@'.join(header_dirs), '--mainfile=' + main_file, '--modulename=' + modulename, + '--moduleversion=' + moduleversion, '--mode=' + mode] if namespace: args.append('--namespace=' + namespace) @@ -1008,12 +1038,18 @@ This will become a hard error in the future.''') ldflags.update(external_ldflags) if state.environment.is_cross_build(): + cflags.update(state.environment.cross_info.config["properties"].get('c_args', "")) + ldflags.update(state.environment.cross_info.config["properties"].get('c_link_args', "")) compiler = state.environment.coredata.cross_compilers.get('c') else: cflags.update(state.environment.coredata.get_external_args('c')) ldflags.update(state.environment.coredata.get_external_link_args('c')) compiler = state.environment.coredata.compilers.get('c') + compiler_flags = self._get_langs_compilers_flags(state, [('c', compiler)]) + cflags.update(compiler_flags[0]) + ldflags.update(compiler_flags[1]) + ldflags.update(compiler_flags[2]) if compiler: args += ['--cc=%s' % ' '.join([shlex.quote(x) for x in compiler.get_exelist()])] args += ['--ld=%s' % ' '.join([shlex.quote(x) for x in compiler.get_linker_exelist()])] diff --git a/mesonbuild/modules/hotdoc.py b/mesonbuild/modules/hotdoc.py index 1080160..c07391e 100644 --- a/mesonbuild/modules/hotdoc.py +++ b/mesonbuild/modules/hotdoc.py @@ -379,7 +379,7 @@ class HotDocModule(ExtensionModule): @noKwargs def has_extensions(self, state, args, kwargs): - res = self.hotdoc.run_hotdoc(['--has-extension'] + args) == 0 + res = self.hotdoc.run_hotdoc(['--has-extension=%s' % extension for extension in args]) == 0 return ModuleReturnValue(res, [res]) def generate_doc(self, state, args, kwargs): diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py index 32e080c..aeab813 100644 --- a/mesonbuild/modules/i18n.py +++ b/mesonbuild/modules/i18n.py @@ -91,14 +91,14 @@ class I18nModule(ExtensionModule): inputfile = inputfile[0] if isinstance(inputfile, str): inputfile = mesonlib.File.from_source_file(state.environment.source_dir, - state.subdir, inputfile) + state.subdir, inputfile) output = kwargs['output'] ifile_abs = inputfile.absolute_path(state.environment.source_dir, state.environment.build_dir) values = mesonlib.get_filenames_templates_dict([ifile_abs], None) outputs = mesonlib.substitute_values([output], values) output = outputs[0] - ct = build.CustomTarget(output + '_' + state.subdir + '_merge', state.subdir, state.subproject, kwargs) + ct = build.CustomTarget(output + '_' + state.subdir.replace('/', '@').replace('\\', '@') + '_merge', state.subdir, state.subproject, kwargs) return ModuleReturnValue(ct, [ct]) @FeatureNewKwargs('i18n.gettext', '0.37.0', ['preset']) diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index 8684864..eee3783 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os +import os, types from pathlib import PurePath from .. import build @@ -50,11 +50,24 @@ class DependenciesHelper: def add_priv_reqs(self, reqs): self.priv_reqs += self._process_reqs(reqs) + def _check_generated_pc_deprecation(self, obj): + if hasattr(obj, 'generated_pc_warn'): + mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the ' + '"libraries" keyword argument of a previous call ' + 'to generate() method instead of first positional ' + 'argument.', 'Adding', mlog.bold(obj.generated_pc), + 'to "Requires" field, but this is a deprecated ' + 'behaviour that will change in a future version ' + 'of Meson. Please report the issue if this ' + 'warning cannot be avoided in your case.', + location=obj.generated_pc_warn) + def _process_reqs(self, reqs): '''Returns string names of requirements''' processed_reqs = [] for obj in mesonlib.listify(reqs, unholder=True): if hasattr(obj, 'generated_pc'): + self._check_generated_pc_deprecation(obj) processed_reqs.append(obj.generated_pc) elif hasattr(obj, 'pcdep'): pcdeps = mesonlib.listify(obj.pcdep) @@ -94,6 +107,7 @@ class DependenciesHelper: processed_reqs.append(d.name) self.add_version_reqs(d.name, obj.version_reqs) elif hasattr(obj, 'generated_pc'): + self._check_generated_pc_deprecation(obj) processed_reqs.append(obj.generated_pc) elif isinstance(obj, dependencies.PkgConfigDependency): if obj.found(): @@ -102,6 +116,14 @@ class DependenciesHelper: elif isinstance(obj, dependencies.ThreadDependency): processed_libs += obj.get_compiler().thread_link_flags(obj.env) processed_cflags += obj.get_compiler().thread_flags(obj.env) + elif isinstance(obj, dependencies.InternalDependency): + if obj.found(): + processed_libs += obj.get_link_args() + processed_cflags += obj.get_compile_args() + if public: + self.add_pub_libs(obj.libraries) + else: + self.add_priv_libs(obj.libraries) elif isinstance(obj, dependencies.Dependency): if obj.found(): processed_libs += obj.get_link_args() @@ -114,14 +136,8 @@ class DependenciesHelper: # than needed build deps. # See https://bugs.freedesktop.org/show_bug.cgi?id=105572 processed_libs.append(obj) - if public: - if not hasattr(obj, 'generated_pc'): - obj.generated_pc = self.name elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)): processed_libs.append(obj) - if public: - if not hasattr(obj, 'generated_pc'): - obj.generated_pc = self.name if isinstance(obj, build.StaticLibrary) and public: self.add_pub_libs(obj.get_dependencies(internal=False)) self.add_pub_libs(obj.get_external_deps()) @@ -276,10 +292,16 @@ class PkgConfigModule(ExtensionModule): install_dir = l.get_custom_install_dir()[0] if install_dir is False: continue - if isinstance(install_dir, str): - Lflag = '-L${prefix}/%s ' % self._escape(self._make_relative(prefix, install_dir)) - else: # install_dir is True - Lflag = '-L${libdir}' + if 'cs' in l.compilers: + if isinstance(install_dir, str): + Lflag = '-r${prefix}/%s/%s ' % (self._escape(self._make_relative(prefix, install_dir)), l.filename) + else: # install_dir is True + Lflag = '-r${libdir}/%s' % l.filename + else: + if isinstance(install_dir, str): + Lflag = '-L${prefix}/%s ' % self._escape(self._make_relative(prefix, install_dir)) + else: # install_dir is True + Lflag = '-L${libdir}' if Lflag not in Lflags: Lflags.append(Lflag) yield Lflag @@ -288,7 +310,8 @@ class PkgConfigModule(ExtensionModule): # find the library if l.name_suffix_set: mlog.warning(msg.format(l.name, 'name_suffix', lname, pcfile)) - yield '-l%s' % lname + if 'cs' not in l.compilers: + yield '-l%s' % lname if len(deps.pub_libs) > 0: ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs)))) @@ -400,6 +423,22 @@ class PkgConfigModule(ExtensionModule): self.generate_pkgconfig_file(state, deps, subdirs, name, description, url, version, pcfile, conflicts, variables) res = build.Data(mesonlib.File(True, state.environment.get_scratch_dir(), pcfile), pkgroot) + # Associate the main library with this generated pc file. If the library + # is used in any subsequent call to the generated, it will generate a + # 'Requires:' or 'Requires.private:'. + # Backward compatibility: We used to set 'generated_pc' on all public + # libraries instead of just the main one. Keep doing that but warn if + # anyone is relying on that deprecated behaviour. + if mainlib: + if not hasattr(mainlib, 'generated_pc'): + mainlib.generated_pc = filebase + else: + mlog.warning('Already generated a pkg-config file for', mlog.bold(mainlib.name)) + for lib in deps.pub_libs: + if not isinstance(lib, str) and not hasattr(lib, 'generated_pc'): + lib.generated_pc = filebase + lib.generated_pc_warn = types.SimpleNamespace(subdir=state.subdir, + lineno=state.current_lineno) return ModuleReturnValue(res, [res]) def initialize(*args, **kwargs): diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py index 2229949..9cfbd6f 100644 --- a/mesonbuild/modules/python.py +++ b/mesonbuild/modules/python.py @@ -23,7 +23,7 @@ from mesonbuild.modules import ModuleReturnValue from ..interpreterbase import ( noPosargs, noKwargs, permittedKwargs, InterpreterObject, InvalidArguments, - FeatureNew + FeatureNew, FeatureNewKwargs, disablerIfNotFound ) from ..interpreter import ExternalProgramHolder, extract_required_kwarg from ..interpreterbase import flatten @@ -151,8 +151,8 @@ class PythonDependency(ExternalDependency): largs = self.clib_compiler.find_library(libname, environment, libdirs) self.is_found = largs is not None - - self.link_args = largs + if self.is_found: + self.link_args = largs inc_paths = mesonlib.OrderedSet([ self.variables.get('INCLUDEPY'), @@ -467,6 +467,8 @@ class PythonModule(ExtensionModule): return mesonlib.version_compare(version, '>= 3.0') return True + @FeatureNewKwargs('python.find_installation', '0.49.0', ['disabler']) + @disablerIfNotFound @permittedKwargs(['required']) def find_installation(self, interpreter, state, args, kwargs): feature_check = FeatureNew('Passing "feature" option to find_installation', '0.48.0') @@ -478,7 +480,9 @@ class PythonModule(ExtensionModule): if len(args) > 1: raise InvalidArguments('find_installation takes zero or one positional argument.') - if args: + if 'python' in state.environment.config_info.binaries: + name_or_path = state.environment.config_info.binaries['python'] + elif args: name_or_path = args[0] if not isinstance(name_or_path, str): raise InvalidArguments('find_installation argument must be a string.') diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py index 3cfc689..f664632 100644 --- a/mesonbuild/modules/python3.py +++ b/mesonbuild/modules/python3.py @@ -17,11 +17,12 @@ from .. import mesonlib, dependencies from . import ExtensionModule from mesonbuild.modules import ModuleReturnValue -from ..interpreterbase import noKwargs, permittedKwargs +from ..interpreterbase import noKwargs, permittedKwargs, FeatureDeprecated from ..build import known_shmod_kwargs class Python3Module(ExtensionModule): + @FeatureDeprecated('python3 module', '0.48.0') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.snippets.add('extension_module') @@ -47,7 +48,10 @@ class Python3Module(ExtensionModule): @noKwargs def find_python(self, state, args, kwargs): - py3 = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True) + options = [state.environment.config_info.binaries.get('python3')] + if not options[0]: # because this would be [None] + options = ['python3', mesonlib.python_command] + py3 = dependencies.ExternalProgram(*options, silent=True) return ModuleReturnValue(py3, [py3]) @noKwargs diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py index a8e916a..28fb98c 100644 --- a/mesonbuild/modules/qt.py +++ b/mesonbuild/modules/qt.py @@ -18,7 +18,7 @@ from .. import build from ..mesonlib import MesonException, Popen_safe, extract_as_list, File from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency import xml.etree.ElementTree as ET -from . import ModuleReturnValue, get_include_args +from . import ModuleReturnValue, get_include_args, ExtensionModule from ..interpreterbase import permittedKwargs, FeatureNewKwargs _QT_DEPS_LUT = { @@ -27,10 +27,11 @@ _QT_DEPS_LUT = { } -class QtBaseModule: +class QtBaseModule(ExtensionModule): tools_detected = False - def __init__(self, qt_version=5): + def __init__(self, interpreter, qt_version=5): + ExtensionModule.__init__(self, interpreter) self.qt_version = qt_version def _detect_tools(self, env, method): @@ -43,7 +44,7 @@ class QtBaseModule: kwargs = {'required': 'true', 'modules': 'Core', 'silent': 'true', 'method': method} qt = _QT_DEPS_LUT[self.qt_version](env, kwargs) # Get all tools and then make sure that they are the right version - self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect() + self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter) # Moc, uic and rcc write their version strings to stderr. # Moc and rcc return a non-zero result when doing so. # What kind of an idiot thought that was a good idea? @@ -116,11 +117,13 @@ class QtBaseModule: except Exception: return [] + @FeatureNewKwargs('qt.preprocess', '0.49.0', ['uic_extra_arguments']) @FeatureNewKwargs('qt.preprocess', '0.44.0', ['moc_extra_arguments']) - @permittedKwargs({'moc_headers', 'moc_sources', 'moc_extra_arguments', 'include_directories', 'dependencies', 'ui_files', 'qresources', 'method'}) + @FeatureNewKwargs('qt.preprocess', '0.49.0', ['rcc_extra_arguments']) + @permittedKwargs({'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'include_directories', 'dependencies', 'ui_files', 'qresources', 'method'}) def preprocess(self, state, args, kwargs): - rcc_files, ui_files, moc_headers, moc_sources, moc_extra_arguments, sources, include_directories, dependencies \ - = extract_as_list(kwargs, 'qresources', 'ui_files', 'moc_headers', 'moc_sources', 'moc_extra_arguments', 'sources', 'include_directories', 'dependencies', pop = True) + rcc_files, ui_files, moc_headers, moc_sources, uic_extra_arguments, moc_extra_arguments, rcc_extra_arguments, sources, include_directories, dependencies \ + = extract_as_list(kwargs, 'qresources', 'ui_files', 'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'sources', 'include_directories', 'dependencies', pop = True) sources += args[1:] method = kwargs.get('method', 'auto') self._detect_tools(state.environment, method) @@ -139,7 +142,7 @@ class QtBaseModule: name = args[0] rcc_kwargs = {'input': rcc_files, 'output': name + '.cpp', - 'command': [self.rcc, '-name', name, '-o', '@OUTPUT@', '@INPUT@'], + 'command': [self.rcc, '-name', name, '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'], 'depend_files': qrc_deps} res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs) sources.append(res_target) @@ -153,15 +156,16 @@ class QtBaseModule: name = 'qt' + str(self.qt_version) + '-' + basename.replace('.', '_') rcc_kwargs = {'input': rcc_file, 'output': name + '.cpp', - 'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', '@INPUT@'], + 'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'], 'depend_files': qrc_deps} res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs) sources.append(res_target) if len(ui_files) > 0: if not self.uic.found(): - raise MesonException(err_msg.format('UIC', 'uic-qt' + self.qt_version)) + raise MesonException(err_msg.format('UIC', 'uic-qt{}'.format(self.qt_version), self.qt_version)) + arguments = uic_extra_arguments + ['-o', '@OUTPUT@', '@INPUT@'] ui_kwargs = {'output': 'ui_@BASENAME@.h', - 'arguments': ['-o', '@OUTPUT@', '@INPUT@']} + 'arguments': arguments} ui_gen = build.Generator([self.uic], ui_kwargs) ui_output = ui_gen.process_files('Qt{} ui'.format(self.qt_version), ui_files, state) sources.append(ui_output) diff --git a/mesonbuild/modules/qt4.py b/mesonbuild/modules/qt4.py index 29992d5..112e3e4 100644 --- a/mesonbuild/modules/qt4.py +++ b/mesonbuild/modules/qt4.py @@ -14,14 +14,13 @@ from .. import mlog from .qt import QtBaseModule -from . import ExtensionModule -class Qt4Module(ExtensionModule, QtBaseModule): +class Qt4Module(QtBaseModule): def __init__(self, interpreter): - QtBaseModule.__init__(self, qt_version=4) - ExtensionModule.__init__(self, interpreter) + QtBaseModule.__init__(self, interpreter, qt_version=4) + def initialize(*args, **kwargs): mlog.warning('rcc dependencies will not work properly until this upstream issue is fixed:', diff --git a/mesonbuild/modules/qt5.py b/mesonbuild/modules/qt5.py index 19623ac..96a7964 100644 --- a/mesonbuild/modules/qt5.py +++ b/mesonbuild/modules/qt5.py @@ -14,14 +14,13 @@ from .. import mlog from .qt import QtBaseModule -from . import ExtensionModule -class Qt5Module(ExtensionModule, QtBaseModule): +class Qt5Module(QtBaseModule): def __init__(self, interpreter): - QtBaseModule.__init__(self, qt_version=5) - ExtensionModule.__init__(self, interpreter) + QtBaseModule.__init__(self, interpreter, qt_version=5) + def initialize(*args, **kwargs): mlog.warning('rcc dependencies will not work reliably until this upstream issue is fixed:', diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py index 59e845c..d185d89 100644 --- a/mesonbuild/modules/windows.py +++ b/mesonbuild/modules/windows.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import enum import os +import re from .. import mlog from .. import mesonlib, build @@ -24,6 +26,10 @@ from ..interpreter import CustomTargetHolder from ..interpreterbase import permittedKwargs, FeatureNewKwargs from ..dependencies import ExternalProgram +class ResourceCompilerType(enum.Enum): + windres = 1 + rc = 2 + class WindowsModule(ExtensionModule): def detect_compiler(self, compilers): @@ -32,31 +38,19 @@ class WindowsModule(ExtensionModule): return compilers[l] raise MesonException('Resource compilation requires a C or C++ compiler.') - @FeatureNewKwargs('windows.compile_resources', '0.47.0', ['depend_files', 'depends']) - @permittedKwargs({'args', 'include_directories', 'depend_files', 'depends'}) - def compile_resources(self, state, args, kwargs): - comp = self.detect_compiler(state.compilers) + def _find_resource_compiler(self, state): + # FIXME: Does not handle `native: true` executables, see + # See https://github.com/mesonbuild/meson/issues/1531 - extra_args = mesonlib.stringlistify(kwargs.get('args', [])) - wrc_depend_files = extract_as_list(kwargs, 'depend_files', pop = True) - wrc_depends = extract_as_list(kwargs, 'depends', pop = True) - for d in wrc_depends: - if isinstance(d, CustomTargetHolder): - extra_args += get_include_args([d.outdir_include()]) - inc_dirs = extract_as_list(kwargs, 'include_directories', pop = True) - for incd in inc_dirs: - if not isinstance(incd.held_object, (str, build.IncludeDirs)): - raise MesonException('Resource include dirs should be include_directories().') - extra_args += get_include_args(inc_dirs) + if hasattr(self, '_rescomp'): + return self._rescomp rescomp = None - # FIXME: Does not handle `native: true` executables, see - # https://github.com/mesonbuild/meson/issues/1531 if state.environment.is_cross_build(): # If cross compiling see if windres has been specified in the # cross file before trying to find it another way. - cross_info = state.environment.cross_info - rescomp = ExternalProgram.from_cross_info(cross_info, 'windres') + bins = state.environment.cross_info.config['binaries'] + rescomp = ExternalProgram.from_bin_list(bins, 'windres') if not rescomp or not rescomp.found(): if 'WINDRES' in os.environ: @@ -65,7 +59,15 @@ class WindowsModule(ExtensionModule): rescomp = ExternalProgram('windres', command=os.environ.get('WINDRES'), silent=True) if not rescomp or not rescomp.found(): - if comp.id == 'msvc': + # Take windres from the config file after the environment, which is + # in keeping with the expectations on unix-like OSes that + # environment variables trump config files. + bins = state.environment.config_info.binaries + rescomp = ExternalProgram.from_bin_list(bins, 'windres') + + if not rescomp or not rescomp.found(): + comp = self.detect_compiler(state.compilers) + if comp.id == 'msvc' or comp.id == 'clang-cl': rescomp = ExternalProgram('rc', silent=True) else: rescomp = ExternalProgram('windres', silent=True) @@ -73,7 +75,38 @@ class WindowsModule(ExtensionModule): if not rescomp.found(): raise MesonException('Could not find Windows resource compiler') - if 'rc' in rescomp.get_path(): + for (arg, match, type) in [ + ('/?', '^.*Microsoft.*Resource Compiler.*$', ResourceCompilerType.rc), + ('--version', '^.*GNU windres.*$', ResourceCompilerType.windres), + ]: + p, o, e = mesonlib.Popen_safe(rescomp.get_command() + [arg]) + m = re.search(match, o, re.MULTILINE) + if m: + mlog.log('Windows resource compiler: %s' % m.group()) + self._rescomp = (rescomp, type) + break + else: + raise MesonException('Could not determine type of Windows resource compiler') + + return self._rescomp + + @FeatureNewKwargs('windows.compile_resources', '0.47.0', ['depend_files', 'depends']) + @permittedKwargs({'args', 'include_directories', 'depend_files', 'depends'}) + def compile_resources(self, state, args, kwargs): + extra_args = mesonlib.stringlistify(kwargs.get('args', [])) + wrc_depend_files = extract_as_list(kwargs, 'depend_files', pop = True) + wrc_depends = extract_as_list(kwargs, 'depends', pop = True) + for d in wrc_depends: + if isinstance(d, CustomTargetHolder): + extra_args += get_include_args([d.outdir_include()]) + inc_dirs = extract_as_list(kwargs, 'include_directories', pop = True) + for incd in inc_dirs: + if not isinstance(incd.held_object, (str, build.IncludeDirs)): + raise MesonException('Resource include dirs should be include_directories().') + extra_args += get_include_args(inc_dirs) + + rescomp, rescomp_type = self._find_resource_compiler(state) + if rescomp_type == ResourceCompilerType.rc: # RC is used to generate .res files, a special binary resource # format, which can be passed directly to LINK (apparently LINK uses # CVTRES internally to convert this to a COFF object) @@ -129,7 +162,7 @@ class WindowsModule(ExtensionModule): } # instruct binutils windres to generate a preprocessor depfile - if 'windres' in rescomp.get_path(): + if rescomp_type == ResourceCompilerType.windres: res_kwargs['depfile'] = res_kwargs['output'] + '.d' res_kwargs['command'] += ['--preprocessor-arg=-MD', '--preprocessor-arg=-MQ@OUTPUT@', '--preprocessor-arg=-MF@DEPFILE@'] diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py index 9af6dac..be5c807 100644 --- a/mesonbuild/mparser.py +++ b/mesonbuild/mparser.py @@ -90,8 +90,9 @@ class Lexer: def __init__(self, code): self.code = code self.keywords = {'true', 'false', 'if', 'else', 'elif', - 'endif', 'and', 'or', 'not', 'foreach', 'endforeach'} - self.future_keywords = {'continue', 'break', 'in', 'return'} + 'endif', 'and', 'or', 'not', 'foreach', 'endforeach', + 'in', 'continue', 'break'} + self.future_keywords = {'return'} self.token_specification = [ # Need to be sorted longest to shortest. ('ignore', re.compile(r'[ \t]')), @@ -242,6 +243,12 @@ class StringNode(ElementaryNode): def __str__(self): return "String node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno) +class ContinueNode(ElementaryNode): + pass + +class BreakNode(ElementaryNode): + pass + class ArrayNode: def __init__(self, args): self.subdir = args.subdir @@ -436,7 +443,9 @@ comparison_map = {'equal': '==', 'lt': '<', 'le': '<=', 'gt': '>', - 'ge': '>=' + 'ge': '>=', + 'in': 'in', + 'notin': 'not in', } # Recursive descent parser for Meson's definition language. @@ -543,6 +552,8 @@ class Parser: for nodename, operator_type in comparison_map.items(): if self.accept(nodename): return ComparisonNode(operator_type, left, self.e5()) + if self.accept('not') and self.accept('in'): + return ComparisonNode('notin', left, self.e5()) return left def e5(self): @@ -754,6 +765,10 @@ class Parser: block = self.foreachblock() self.block_expect('endforeach', block_start) return block + if self.accept('continue'): + return ContinueNode(self.current) + if self.accept('break'): + return BreakNode(self.current) return self.statement() def codeblock(self): diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py new file mode 100644 index 0000000..f9a5e1c --- /dev/null +++ b/mesonbuild/msetup.py @@ -0,0 +1,218 @@ +# Copyright 2016-2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import sys, stat +import datetime +import os.path +import platform +import cProfile as profile +import argparse + +from . import environment, interpreter, mesonlib +from . import build +from . import mlog, coredata +from .mesonlib import MesonException + +def add_arguments(parser): + coredata.register_builtin_arguments(parser) + parser.add_argument('--cross-file', default=None, + help='File describing cross compilation environment.') + parser.add_argument('--native-file', + default=[], + action='append', + help='File containing overrides for native compilation environment.') + parser.add_argument('-v', '--version', action='version', + version=coredata.version) + parser.add_argument('--profile-self', action='store_true', dest='profile', + help=argparse.SUPPRESS) + parser.add_argument('--fatal-meson-warnings', action='store_true', dest='fatal_warnings', + help='Make all Meson warnings fatal') + parser.add_argument('--reconfigure', action='store_true', + help='Set options and reconfigure the project. Useful when new ' + + 'options have been added to the project and the default value ' + + 'is not working.') + parser.add_argument('--wipe', action='store_true', + help='Wipe build directory and reconfigure using previous command line options. ' + + 'Userful when build directory got corrupted, or when rebuilding with a ' + + 'newer version of meson.') + parser.add_argument('builddir', nargs='?', default=None) + parser.add_argument('sourcedir', nargs='?', default=None) + +class MesonApp: + def __init__(self, options): + (self.source_dir, self.build_dir) = self.validate_dirs(options.builddir, + options.sourcedir, + options.reconfigure, + options.wipe) + + if options.wipe: + # Make a copy of the cmd line file to make sure we can always + # restore that file if anything bad happens. For example if + # configuration fails we need to be able to wipe again. + filename = coredata.get_cmd_line_file(self.build_dir) + with open(filename, 'r') as f: + content = f.read() + + coredata.read_cmd_line_file(self.build_dir, options) + + try: + mesonlib.windows_proof_rmtree(self.build_dir) + finally: + # Restore the file + path = os.path.dirname(filename) + os.makedirs(path, exist_ok=True) + with open(filename, 'w') as f: + f.write(content) + + self.options = options + + def has_build_file(self, dirname): + fname = os.path.join(dirname, environment.build_filename) + return os.path.exists(fname) + + def validate_core_dirs(self, dir1, dir2): + if dir1 is None: + if dir2 is None: + if not os.path.exists('meson.build') and os.path.exists('../meson.build'): + dir2 = '..' + else: + raise MesonException('Must specify at least one directory name.') + dir1 = os.getcwd() + if dir2 is None: + dir2 = os.getcwd() + ndir1 = os.path.abspath(os.path.realpath(dir1)) + ndir2 = os.path.abspath(os.path.realpath(dir2)) + if not os.path.exists(ndir1): + os.makedirs(ndir1) + if not os.path.exists(ndir2): + os.makedirs(ndir2) + if not stat.S_ISDIR(os.stat(ndir1).st_mode): + raise MesonException('%s is not a directory' % dir1) + if not stat.S_ISDIR(os.stat(ndir2).st_mode): + raise MesonException('%s is not a directory' % dir2) + if os.path.samefile(dir1, dir2): + raise MesonException('Source and build directories must not be the same. Create a pristine build directory.') + if self.has_build_file(ndir1): + if self.has_build_file(ndir2): + raise MesonException('Both directories contain a build file %s.' % environment.build_filename) + return ndir1, ndir2 + if self.has_build_file(ndir2): + return ndir2, ndir1 + raise MesonException('Neither directory contains a build file %s.' % environment.build_filename) + + def validate_dirs(self, dir1, dir2, reconfigure, wipe): + (src_dir, build_dir) = self.validate_core_dirs(dir1, dir2) + priv_dir = os.path.join(build_dir, 'meson-private/coredata.dat') + if os.path.exists(priv_dir): + if not reconfigure and not wipe: + print('Directory already configured.\n' + '\nJust run your build command (e.g. ninja) and Meson will regenerate as necessary.\n' + 'If ninja fails, run "ninja reconfigure" or "meson --reconfigure"\n' + 'to force Meson to regenerate.\n' + '\nIf build failures persist, run "meson setup --wipe" to rebuild from scratch\n' + 'using the same options as passed when configuring the build.' + '\nTo change option values, run "meson configure" instead.') + sys.exit(0) + else: + has_cmd_line_file = os.path.exists(coredata.get_cmd_line_file(build_dir)) + if (wipe and not has_cmd_line_file) or (not wipe and reconfigure): + print('Directory does not contain a valid build tree:\n{}'.format(build_dir)) + sys.exit(1) + return src_dir, build_dir + + def check_pkgconfig_envvar(self, env): + curvar = os.environ.get('PKG_CONFIG_PATH', '') + if curvar != env.coredata.pkgconf_envvar: + mlog.warning('PKG_CONFIG_PATH has changed between invocations from "%s" to "%s".' % + (env.coredata.pkgconf_envvar, curvar)) + env.coredata.pkgconf_envvar = curvar + + def generate(self): + env = environment.Environment(self.source_dir, self.build_dir, self.options) + mlog.initialize(env.get_log_dir(), self.options.fatal_warnings) + if self.options.profile: + mlog.set_timestamp_start(time.monotonic()) + with mesonlib.BuildDirLock(self.build_dir): + self._generate(env) + + def _generate(self, env): + mlog.debug('Build started at', datetime.datetime.now().isoformat()) + mlog.debug('Main binary:', sys.executable) + mlog.debug('Python system:', platform.system()) + mlog.log(mlog.bold('The Meson build system')) + self.check_pkgconfig_envvar(env) + mlog.log('Version:', coredata.version) + mlog.log('Source dir:', mlog.bold(self.source_dir)) + mlog.log('Build dir:', mlog.bold(self.build_dir)) + if env.is_cross_build(): + mlog.log('Build type:', mlog.bold('cross build')) + else: + mlog.log('Build type:', mlog.bold('native build')) + b = build.Build(env) + + intr = interpreter.Interpreter(b) + if env.is_cross_build(): + mlog.log('Host machine cpu family:', mlog.bold(intr.builtin['host_machine'].cpu_family_method([], {}))) + mlog.log('Host machine cpu:', mlog.bold(intr.builtin['host_machine'].cpu_method([], {}))) + mlog.log('Target machine cpu family:', mlog.bold(intr.builtin['target_machine'].cpu_family_method([], {}))) + mlog.log('Target machine cpu:', mlog.bold(intr.builtin['target_machine'].cpu_method([], {}))) + mlog.log('Build machine cpu family:', mlog.bold(intr.builtin['build_machine'].cpu_family_method([], {}))) + mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {}))) + if self.options.profile: + fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log') + profile.runctx('intr.run()', globals(), locals(), filename=fname) + else: + intr.run() + # Print all default option values that don't match the current value + for def_opt_name, def_opt_value, cur_opt_value in intr.get_non_matching_default_options(): + mlog.log('Option', mlog.bold(def_opt_name), 'is:', + mlog.bold(str(cur_opt_value)), + '[default: {}]'.format(str(def_opt_value))) + try: + dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') + # We would like to write coredata as late as possible since we use the existence of + # this file to check if we generated the build file successfully. Since coredata + # includes settings, the build files must depend on it and appear newer. However, due + # to various kernel caches, we cannot guarantee that any time in Python is exactly in + # sync with the time that gets applied to any files. Thus, we dump this file as late as + # possible, but before build files, and if any error occurs, delete it. + cdf = env.dump_coredata() + if self.options.profile: + fname = 'profile-{}-backend.log'.format(intr.backend.name) + fname = os.path.join(self.build_dir, 'meson-private', fname) + profile.runctx('intr.backend.generate(intr)', globals(), locals(), filename=fname) + else: + intr.backend.generate(intr) + build.save(b, dumpfile) + # Post-conf scripts must be run after writing coredata or else introspection fails. + intr.backend.run_postconf_scripts() + if env.first_invocation: + coredata.write_cmd_line_file(self.build_dir, self.options) + else: + coredata.update_cmd_line_file(self.build_dir, self.options) + except: + if 'cdf' in locals(): + old_cdf = cdf + '.prev' + if os.path.exists(old_cdf): + os.replace(old_cdf, cdf) + else: + os.unlink(cdf) + raise + +def run(options): + coredata.parse_cmd_line_options(options) + app = MesonApp(options) + app.generate() + return 0 diff --git a/mesonbuild/msubprojects.py b/mesonbuild/msubprojects.py new file mode 100644 index 0000000..1536d96 --- /dev/null +++ b/mesonbuild/msubprojects.py @@ -0,0 +1,226 @@ +import os, subprocess + +from . import mlog +from .mesonlib import Popen_safe +from .wrap.wrap import API_ROOT, PackageDefinition, Resolver, WrapException +from .wrap import wraptool + +def update_wrapdb_file(wrap, repo_dir, options): + patch_url = wrap.get('patch_url') + branch, revision = wraptool.parse_patch_url(patch_url) + new_branch, new_revision = wraptool.get_latest_version(wrap.name) + if new_branch == branch and new_revision == revision: + mlog.log(' -> Up to date.') + return + wraptool.update_wrap_file(wrap.filename, wrap.name, new_branch, new_revision) + msg = [' -> New wrap file downloaded.'] + # Meson reconfigure won't use the new wrap file as long as the source + # directory exists. We don't delete it ourself to avoid data loss in case + # user has changes in their copy. + if os.path.isdir(repo_dir): + msg += ['To use it, delete', mlog.bold(repo_dir), 'and run', mlog.bold('meson --reconfigure')] + mlog.log(*msg) + +def update_file(wrap, repo_dir, options): + patch_url = wrap.values.get('patch_url', '') + if patch_url.startswith(API_ROOT): + update_wrapdb_file(wrap, repo_dir, options) + elif not os.path.isdir(repo_dir): + # The subproject is not needed, or it is a tarball extracted in + # 'libfoo-1.0' directory and the version has been bumped and the new + # directory is 'libfoo-2.0'. In that case forcing a meson + # reconfigure will download and use the new tarball. + mlog.log(' -> Subproject has not been checked out. Run', mlog.bold('meson --reconfigure'), 'to fetch it if needed.') + else: + # The subproject has not changed, or the new source and/or patch + # tarballs should be extracted in the same directory than previous + # version. + mlog.log(' -> Subproject has not changed, or the new source/patch needs to be extracted on the same location.\n' + + ' In that case, delete', mlog.bold(repo_dir), 'and run', mlog.bold('meson --reconfigure')) + +def git(cmd, workingdir): + return subprocess.check_output(['git', '-C', workingdir] + cmd, + stderr=subprocess.STDOUT).decode() + +def git_show(repo_dir): + commit_message = git(['show', '--quiet', '--pretty=format:%h%n%d%n%s%n[%an]'], repo_dir) + parts = [s.strip() for s in commit_message.split('\n')] + mlog.log(' ->', mlog.yellow(parts[0]), mlog.red(parts[1]), parts[2], mlog.blue(parts[3])) + +def update_git(wrap, repo_dir, options): + if not os.path.isdir(repo_dir): + mlog.log(' -> Not used.') + return + revision = wrap.get('revision') + ret = git(['rev-parse', '--abbrev-ref', 'HEAD'], repo_dir).strip() + if ret == 'HEAD': + try: + # We are currently in detached mode, just checkout the new revision + git(['fetch'], repo_dir) + git(['checkout', revision], repo_dir) + except subprocess.CalledProcessError as e: + out = e.output.decode().strip() + mlog.log(' -> Could not checkout revision', mlog.cyan(revision)) + mlog.log(mlog.red(out)) + mlog.log(mlog.red(str(e))) + return + elif ret == revision: + try: + # We are in the same branch, pull latest commits + git(['-c', 'rebase.autoStash=true', 'pull', '--rebase'], repo_dir) + except subprocess.CalledProcessError as e: + out = e.output.decode().strip() + mlog.log(' -> Could not rebase', mlog.bold(repo_dir), 'please fix and try again.') + mlog.log(mlog.red(out)) + mlog.log(mlog.red(str(e))) + return + else: + # We are in another branch, probably user created their own branch and + # we should rebase it on top of wrap's branch. + if options.rebase: + try: + git(['fetch'], repo_dir) + git(['-c', 'rebase.autoStash=true', 'rebase', revision], repo_dir) + except subprocess.CalledProcessError as e: + out = e.output.decode().strip() + mlog.log(' -> Could not rebase', mlog.bold(repo_dir), 'please fix and try again.') + mlog.log(mlog.red(out)) + mlog.log(mlog.red(str(e))) + return + else: + mlog.log(' -> Target revision is', mlog.bold(revision), 'but currently in branch is', mlog.bold(ret), '\n' + + ' To rebase your branch on top of', mlog.bold(revision), 'use', mlog.bold('--rebase'), 'option.') + return + + git(['submodule', 'update'], repo_dir) + git_show(repo_dir) + +def update_hg(wrap, repo_dir, options): + if not os.path.isdir(repo_dir): + mlog.log(' -> Not used.') + return + revno = wrap.get('revision') + if revno.lower() == 'tip': + # Failure to do pull is not a fatal error, + # because otherwise you can't develop without + # a working net connection. + subprocess.call(['hg', 'pull'], cwd=repo_dir) + else: + if subprocess.call(['hg', 'checkout', revno], cwd=repo_dir) != 0: + subprocess.check_call(['hg', 'pull'], cwd=repo_dir) + subprocess.check_call(['hg', 'checkout', revno], cwd=repo_dir) + +def update_svn(wrap, repo_dir, options): + if not os.path.isdir(repo_dir): + mlog.log(' -> Not used.') + return + revno = wrap.get('revision') + p, out = Popen_safe(['svn', 'info', '--show-item', 'revision', repo_dir]) + current_revno = out + if current_revno == revno: + return + if revno.lower() == 'head': + # Failure to do pull is not a fatal error, + # because otherwise you can't develop without + # a working net connection. + subprocess.call(['svn', 'update'], cwd=repo_dir) + else: + subprocess.check_call(['svn', 'update', '-r', revno], cwd=repo_dir) + +def update(wrap, repo_dir, options): + mlog.log('Updating %s...' % wrap.name) + if wrap.type == 'file': + update_file(wrap, repo_dir, options) + elif wrap.type == 'git': + update_git(wrap, repo_dir, options) + elif wrap.type == 'hg': + update_hg(wrap, repo_dir, options) + elif wrap.type == 'svn': + update_svn(wrap, repo_dir, options) + else: + mlog.log(' -> Cannot update', wrap.type, 'subproject') + +def checkout(wrap, repo_dir, options): + if wrap.type != 'git' or not os.path.isdir(repo_dir): + return + branch_name = options.branch_name if options.branch_name else wrap.get('revision') + cmd = ['checkout', branch_name, '--'] + if options.b: + cmd.insert(1, '-b') + mlog.log('Checkout %s in %s...' % (branch_name, wrap.name)) + try: + git(cmd, repo_dir) + git_show(repo_dir) + except subprocess.CalledProcessError as e: + out = e.output.decode().strip() + mlog.log(' -> ', mlog.red(out)) + +def download(wrap, repo_dir, options): + mlog.log('Download %s...' % wrap.name) + if os.path.isdir(repo_dir): + mlog.log(' -> Already downloaded') + return + try: + r = Resolver(os.path.dirname(repo_dir)) + r.resolve(wrap.name) + mlog.log(' -> done') + except WrapException as e: + mlog.log(' ->', mlog.red(str(e))) + +def add_common_arguments(p): + p.add_argument('--sourcedir', default='.', + help='Path to source directory') + p.add_argument('subprojects', nargs='*', + help='List of subprojects (default: all)') + +def add_arguments(parser): + subparsers = parser.add_subparsers(title='Commands', dest='command') + subparsers.required = True + + p = subparsers.add_parser('update', help='Update all subprojects from wrap files') + p.add_argument('--rebase', default=False, action='store_true', + help='Rebase your branch on top of wrap\'s revision (git only)') + add_common_arguments(p) + p.set_defaults(subprojects_func=update) + + p = subparsers.add_parser('checkout', help='Checkout a branch (git only)') + p.add_argument('-b', default=False, action='store_true', + help='Create a new branch') + p.add_argument('branch_name', nargs='?', + help='Name of the branch to checkout or create (default: revision set in wrap file)') + add_common_arguments(p) + p.set_defaults(subprojects_func=checkout) + + p = subparsers.add_parser('download', help='Ensure subprojects are fetched, even if not in use. ' + + 'Already downloaded subprojects are not modified. ' + + 'This can be used to pre-fetch all subprojects and avoid downloads during configure.') + add_common_arguments(p) + p.set_defaults(subprojects_func=download) + +def run(options): + src_dir = os.path.relpath(os.path.realpath(options.sourcedir)) + if not os.path.isfile(os.path.join(src_dir, 'meson.build')): + mlog.error('Directory', mlog.bold(src_dir), 'does not seem to be a Meson source directory.') + return 1 + subprojects_dir = os.path.join(src_dir, 'subprojects') + if not os.path.isdir(subprojects_dir): + mlog.log('Directory', mlog.bold(src_dir), 'does not seem to have subprojects.') + return 0 + files = [] + for name in options.subprojects: + f = os.path.join(subprojects_dir, name + '.wrap') + if not os.path.isfile(f): + mlog.error('Subproject', mlog.bold(name), 'not found.') + return 1 + else: + files.append(f) + if not files: + for f in os.listdir(subprojects_dir): + if f.endswith('.wrap'): + files.append(os.path.join(subprojects_dir, f)) + for f in files: + wrap = PackageDefinition(f) + directory = wrap.values.get('directory', wrap.name) + repo_dir = os.path.join(subprojects_dir, directory) + options.subprojects_func(wrap, repo_dir, options) + return 0 diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 8d9a585..8ce9538 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -23,6 +23,7 @@ from mesonbuild.dependencies import ExternalProgram from mesonbuild.mesonlib import substring_is_in_list, MesonException from mesonbuild import mlog +import tempfile import time, datetime, multiprocessing, json import concurrent.futures as conc import platform @@ -60,8 +61,7 @@ def determine_worker_count(): num_workers = 1 return num_workers -def buildparser(): - parser = argparse.ArgumentParser(prog='meson test') +def add_arguments(parser): parser.add_argument('--repeat', default=1, dest='repeat', type=int, help='Number of times to run the tests.') parser.add_argument('--no-rebuild', default=False, action='store_true', @@ -102,7 +102,6 @@ def buildparser(): help='Arguments to pass to the specified test(s) or all tests') parser.add_argument('args', nargs='*', help='Optional list of tests to run') - return parser def returncode_to_status(retcode): @@ -145,6 +144,8 @@ class TestResult(enum.Enum): TIMEOUT = 'TIMEOUT' SKIP = 'SKIP' FAIL = 'FAIL' + EXPECTEDFAIL = 'EXPECTEDFAIL' + UNEXPECTEDPASS = 'UNEXPECTEDPASS' class TestRun: @@ -291,8 +292,8 @@ class SingleTestRunner: stdout = None stderr = None if not self.options.verbose: - stdout = subprocess.PIPE - stderr = subprocess.PIPE if self.options and self.options.split else subprocess.STDOUT + stdout = tempfile.TemporaryFile("wb+") + stderr = tempfile.TemporaryFile("wb+") if self.options and self.options.split else stdout # Let gdb handle ^C instead of us if self.options.gdb: @@ -326,7 +327,7 @@ class SingleTestRunner: else: timeout = self.test.timeout try: - (stdo, stde) = p.communicate(timeout=timeout) + p.communicate(timeout=timeout) except subprocess.TimeoutExpired: if self.options.verbose: print('%s time out (After %d seconds)' % (self.test.name, timeout)) @@ -339,6 +340,8 @@ class SingleTestRunner: # Let us accept ^C again signal.signal(signal.SIGINT, previous_sigint_handler) + additional_error = None + if kill_test or timed_out: # Python does not provide multiplatform support for # killing a process and all its children so we need @@ -355,33 +358,43 @@ class SingleTestRunner: # already died) so carry on. pass try: - (stdo, stde) = p.communicate(timeout=1) + p.communicate(timeout=1) except subprocess.TimeoutExpired: # An earlier kill attempt has not worked for whatever reason. # Try to kill it one last time with a direct call. # If the process has spawned children, they will remain around. p.kill() try: - (stdo, stde) = p.communicate(timeout=1) + p.communicate(timeout=1) except subprocess.TimeoutExpired: - stdo = b'Test process could not be killed.' - stde = b'' + additional_error = b'Test process could not be killed.' except ValueError: - stdo = b'Could not read output. Maybe the process has redirected its stdout/stderr?' - stde = b'' + additional_error = b'Could not read output. Maybe the process has redirected its stdout/stderr?' endtime = time.time() duration = endtime - starttime - stdo = decode(stdo) - if stde: - stde = decode(stde) + if additional_error is None: + if stdout is None: # if stdout is None stderr should be as well + stdo = '' + stde = '' + else: + stdout.seek(0) + stdo = decode(stdout.read()) + if stderr != stdout: + stderr.seek(0) + stde = decode(stderr.read()) + else: + stde = "" + else: + stdo = "" + stde = additional_error if timed_out: res = TestResult.TIMEOUT elif p.returncode == GNU_SKIP_RETURNCODE: res = TestResult.SKIP - elif self.test.should_fail == bool(p.returncode): - res = TestResult.OK + elif self.test.should_fail: + res = TestResult.EXPECTEDFAIL if bool(p.returncode) else TestResult.UNEXPECTEDPASS else: - res = TestResult.FAIL + res = TestResult.FAIL if bool(p.returncode) else TestResult.OK return TestRun(res, p.returncode, self.test.should_fail, duration, stdo, stde, cmd, self.test.env) @@ -390,6 +403,8 @@ class TestHarness: self.options = options self.collected_logs = [] self.fail_count = 0 + self.expectedfail_count = 0 + self.unexpectedpass_count = 0 self.success_count = 0 self.skip_count = 0 self.timeout_count = 0 @@ -435,6 +450,8 @@ class TestHarness: def get_test_runner(self, test): options = deepcopy(self.options) + if not options.setup: + options.setup = self.build_data.test_setup_default_name if options.setup: env = self.merge_suite_options(options, test) else: @@ -454,6 +471,10 @@ class TestHarness: self.success_count += 1 elif result.res is TestResult.FAIL: self.fail_count += 1 + elif result.res is TestResult.EXPECTEDFAIL: + self.expectedfail_count += 1 + elif result.res is TestResult.UNEXPECTEDPASS: + self.unexpectedpass_count += 1 else: sys.exit('Unknown test result encountered: {}'.format(result.res)) @@ -469,9 +490,10 @@ class TestHarness: result_str = '%s %s %s%s%s%5.2f s %s' % \ (num, name, padding1, result.res.value, padding2, result.duration, status) - if not self.options.quiet or result.res is not TestResult.OK: - if result.res is not TestResult.OK and mlog.colorize_console: - if result.res in (TestResult.FAIL, TestResult.TIMEOUT): + ok_statuses = (TestResult.OK, TestResult.EXPECTEDFAIL) + if not self.options.quiet or result.res not in ok_statuses: + if result.res not in ok_statuses and mlog.colorize_console: + if result.res in (TestResult.FAIL, TestResult.TIMEOUT, TestResult.UNEXPECTEDPASS): decorator = mlog.red elif result.res is TestResult.SKIP: decorator = mlog.yellow @@ -492,11 +514,14 @@ class TestHarness: def print_summary(self): msg = ''' -OK: %4d -FAIL: %4d -SKIP: %4d -TIMEOUT: %4d -''' % (self.success_count, self.fail_count, self.skip_count, self.timeout_count) +Ok: %4d +Expected Fail: %4d +Fail: %4d +Unexpected Pass: %4d +Skipped: %4d +Timeout: %4d +''' % (self.success_count, self.expectedfail_count, self.fail_count, + self.unexpectedpass_count, self.skip_count, self.timeout_count) print(msg) if self.logfile: self.logfile.write(msg) @@ -514,7 +539,11 @@ TIMEOUT: %4d print('--- Listing only the last 100 lines from a long log. ---') lines = lines[-100:] for line in lines: - print(line) + try: + print(line) + except UnicodeEncodeError: + line = line.encode('ascii', errors='replace').decode() + print(line) def doit(self): if self.is_run: @@ -737,9 +766,7 @@ def rebuild_all(wd): return True -def run(args): - options = buildparser().parse_args(args) - +def run(options): if options.benchmark: options.num_processes = 1 @@ -784,3 +811,9 @@ def run(args): else: print(e) return 1 + +def run_with_args(args): + parser = argparse.ArgumentParser(prog='meson test') + add_arguments(parser) + options = parser.parse_args(args) + return run(options) diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py index 1127288..37ed7ef 100644 --- a/mesonbuild/rewriter.py +++ b/mesonbuild/rewriter.py @@ -27,11 +27,8 @@ import mesonbuild.astinterpreter from mesonbuild.mesonlib import MesonException from mesonbuild import mlog import sys, traceback -import argparse - -def buildparser(): - parser = argparse.ArgumentParser(prog='meson rewrite') +def add_arguments(parser): parser.add_argument('--sourcedir', default='.', help='Path to source directory.') parser.add_argument('--target', default=None, @@ -39,14 +36,12 @@ def buildparser(): parser.add_argument('--filename', default=None, help='Name of source file to add or remove to target.') parser.add_argument('commands', nargs='+') - return parser -def run(args): - options = buildparser().parse_args(args) +def run(options): if options.target is None or options.filename is None: sys.exit("Must specify both target and filename.") print('This tool is highly experimental, use with care.') - rewriter = mesonbuild.astinterpreter.AstInterpreter(options.sourcedir, '') + rewriter = mesonbuild.astinterpreter.RewriterInterpreter(options.sourcedir, '') try: if options.commands[0] == 'add': rewriter.add_source(options.target, options.filename) diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py index 916c84f..6d7e707 100644 --- a/mesonbuild/scripts/coverage.py +++ b/mesonbuild/scripts/coverage.py @@ -77,16 +77,19 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir): subprocess.check_call([lcov_exe, '-a', initial_tracefile, '-a', run_tracefile, + '--rc', 'lcov_branch_coverage=1', '-o', raw_tracefile]) # Remove all directories outside the source_root from the covinfo subprocess.check_call([lcov_exe, '--extract', raw_tracefile, os.path.join(source_root, '*'), + '--rc', 'lcov_branch_coverage=1', '--output-file', covinfo]) # Remove all directories inside subproject dir subprocess.check_call([lcov_exe, '--remove', covinfo, os.path.join(subproject_root, '*'), + '--rc', 'lcov_branch_coverage=1', '--output-file', covinfo]) subprocess.check_call([genhtml_exe, '--prefix', build_root, @@ -104,13 +107,14 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir): subprocess.check_call([gcovr_exe, '--html', '--html-details', + '--print-summary', '-r', build_root, '-e', subproject_root, '-o', os.path.join(htmloutdir, 'index.html'), ]) outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html'))) elif outputs: - print('lcov/genhtml or gcovr >= 3.1 needed to generate Html coverage report') + print('lcov/genhtml or gcovr >= 3.2 needed to generate Html coverage report') exitcode = 1 if not outputs and not outfiles: diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py index f9d7692..7294186 100644 --- a/mesonbuild/scripts/depfixer.py +++ b/mesonbuild/scripts/depfixer.py @@ -432,6 +432,9 @@ def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True) # Static libraries never have rpaths if fname.endswith('.a'): return + # DLLs never have rpaths + if fname.endswith('.dll'): + return try: if fname.endswith('.jar'): fix_jar(fname) diff --git a/mesonbuild/scripts/dist.py b/mesonbuild/scripts/dist.py index 6fa10ff..f49492c 100644 --- a/mesonbuild/scripts/dist.py +++ b/mesonbuild/scripts/dist.py @@ -24,8 +24,8 @@ import tarfile, zipfile import tempfile from glob import glob from mesonbuild.environment import detect_ninja -from mesonbuild.dependencies import ExternalProgram from mesonbuild.mesonlib import windows_proof_rmtree +from mesonbuild import mlog def create_hash(fname): hashname = fname + '.sha256sum' @@ -80,18 +80,27 @@ def run_dist_scripts(dist_root, dist_scripts): env = os.environ.copy() env['MESON_DIST_ROOT'] = dist_root for d in dist_scripts: - print('Processing dist script %s.' % d) - ddir, dname = os.path.split(d) - ep = ExternalProgram(dname, - search_dir=os.path.join(dist_root, ddir), - silent=True) - if not ep.found(): - sys.exit('Script %s could not be found in dist directory.' % d) - pc = subprocess.run(ep.command, env=env) - if pc.returncode != 0: - sys.exit('Dist script errored out.') + script = d['exe'] + args = d['args'] + name = ' '.join(script + args) + print('Running custom dist script {!r}'.format(name)) + try: + rc = subprocess.call(script + args, env=env) + if rc != 0: + sys.exit('Dist script errored out') + except OSError: + print('Failed to run dist script {!r}'.format(name)) + sys.exit(1) + + +def git_have_dirty_index(src_root): + '''Check whether there are uncommitted changes in git''' + ret = subprocess.call(['git', '-C', src_root, 'diff-index', '--quiet', 'HEAD']) + return ret == 1 def create_dist_git(dist_name, src_root, bld_root, dist_sub, dist_scripts): + if git_have_dirty_index(src_root): + mlog.warning('Repository has uncommitted changes that will not be included in the dist tarball') distdir = os.path.join(dist_sub, dist_name) if os.path.exists(distdir): shutil.rmtree(distdir) @@ -111,14 +120,21 @@ def create_dist_git(dist_name, src_root, bld_root, dist_sub, dist_scripts): return (xzname, ) +def hg_have_dirty_index(src_root): + '''Check whether there are uncommitted changes in hg''' + out = subprocess.check_output(['hg', '-R', src_root, 'summary']) + return b'commit: (clean)' not in out + def create_dist_hg(dist_name, src_root, bld_root, dist_sub, dist_scripts): - os.makedirs(dist_sub, exist_ok=True) + if hg_have_dirty_index(src_root): + mlog.warning('Repository has uncommitted changes that will not be included in the dist tarball') + os.makedirs(dist_sub, exist_ok=True) tarname = os.path.join(dist_sub, dist_name + '.tar') xzname = tarname + '.xz' subprocess.check_call(['hg', 'archive', '-R', src_root, '-S', '-t', 'tar', tarname]) if len(dist_scripts) > 0: - print('WARNING: dist scripts not supported in Mercurial projects.') + mlog.warning('dist scripts are not supported in Mercurial projects') with lzma.open(xzname, 'wb') as xf, open(tarname, 'rb') as tf: shutil.copyfileobj(tf, xf) os.unlink(tarname) @@ -129,7 +145,7 @@ def create_dist_hg(dist_name, src_root, bld_root, dist_sub, dist_scripts): def check_dist(packagename, meson_command): - print('Testing distribution package %s.' % packagename) + print('Testing distribution package %s' % packagename) unpackdir = tempfile.mkdtemp() builddir = tempfile.mkdtemp() installdir = tempfile.mkdtemp() @@ -142,21 +158,21 @@ def check_dist(packagename, meson_command): print('Running Meson on distribution package failed') return 1 if subprocess.call([ninja_bin], cwd=builddir) != 0: - print('Compiling the distribution package failed.') + print('Compiling the distribution package failed') return 1 if subprocess.call([ninja_bin, 'test'], cwd=builddir) != 0: - print('Running unit tests on the distribution package failed.') + print('Running unit tests on the distribution package failed') return 1 myenv = os.environ.copy() myenv['DESTDIR'] = installdir if subprocess.call([ninja_bin, 'install'], cwd=builddir, env=myenv) != 0: - print('Installing the distribution package failed.') + print('Installing the distribution package failed') return 1 finally: shutil.rmtree(unpackdir) shutil.rmtree(builddir) shutil.rmtree(installdir) - print('Distribution package %s tested.' % packagename) + print('Distribution package %s tested' % packagename) return 0 def run(args): @@ -177,7 +193,7 @@ def run(args): elif os.path.isdir(os.path.join(src_root, '.hg')): names = create_dist_hg(dist_name, src_root, bld_root, dist_sub, build.dist_scripts) else: - print('Dist currently only works with Git or Mercurial repos.') + print('Dist currently only works with Git or Mercurial repos') return 1 if names is None: return 1 diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py index bf3d9f6..01ced5b 100644 --- a/mesonbuild/scripts/gtkdochelper.py +++ b/mesonbuild/scripts/gtkdochelper.py @@ -28,6 +28,7 @@ parser.add_argument('--subdir', dest='subdir') parser.add_argument('--headerdirs', dest='headerdirs') parser.add_argument('--mainfile', dest='mainfile') parser.add_argument('--modulename', dest='modulename') +parser.add_argument('--moduleversion', dest='moduleversion') parser.add_argument('--htmlargs', dest='htmlargs', default='') parser.add_argument('--scanargs', dest='scanargs', default='') parser.add_argument('--scanobjsargs', dest='scanobjsargs', default='') @@ -65,7 +66,7 @@ def gtkdoc_run_check(cmd, cwd, library_paths=None): # This preserves the order of messages. p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2] if p.returncode != 0: - err_msg = ["{!r} failed with status {:d}".format(cmd[0], p.returncode)] + err_msg = ["{!r} failed with status {:d}".format(cmd, p.returncode)] if out: err_msg.append(out) raise MesonException('\n'.join(err_msg)) @@ -73,7 +74,7 @@ def gtkdoc_run_check(cmd, cwd, library_paths=None): print(out) def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs, - main_file, module, + main_file, module, module_version, html_args, scan_args, fixxref_args, mkdb_args, gobject_typesfile, scanobjs_args, run, ld, cc, ldflags, cflags, html_assets, content_files, ignore_headers, namespace, @@ -191,7 +192,7 @@ def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs, else: mkhtml_cmd.append('%s-docs.xml' % module) # html gen must be run in the HTML dir - gtkdoc_run_check(mkhtml_cmd, os.path.join(abs_out, 'html')) + gtkdoc_run_check(mkhtml_cmd, htmldir) # Fix cross-references in HTML files fixref_cmd = ['gtkdoc-fixxref', @@ -199,6 +200,10 @@ def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs, '--module-dir=html'] + fixxref_args gtkdoc_run_check(fixref_cmd, abs_out) + if module_version: + shutil.move(os.path.join(htmldir, '{}.devhelp2'.format(module)), + os.path.join(htmldir, '{}-{}.devhelp2'.format(module, module_version))) + def install_gtkdoc(build_root, doc_subdir, install_prefix, datadir, module): source = os.path.join(build_root, doc_subdir, 'html') final_destination = os.path.join(install_prefix, datadir, module) @@ -234,6 +239,7 @@ def run(args): options.headerdirs.split('@@'), options.mainfile, options.modulename, + options.moduleversion, htmlargs, scanargs, fixxrefargs, @@ -255,7 +261,12 @@ def run(args): if 'MESON_INSTALL_PREFIX' in os.environ: destdir = os.environ.get('DESTDIR', '') install_prefix = destdir_join(destdir, os.environ['MESON_INSTALL_PREFIX']) - install_dir = options.install_dir if options.install_dir else options.modulename + if options.install_dir: + install_dir = options.install_dir + else: + install_dir = options.modulename + if options.moduleversion: + install_dir += '-' + options.moduleversion if os.path.isabs(install_dir): install_dir = destdir_join(destdir, install_dir) install_gtkdoc(options.builddir, diff --git a/mesonbuild/scripts/meson_exe.py b/mesonbuild/scripts/meson_exe.py index 84abfc3..23c7334 100644 --- a/mesonbuild/scripts/meson_exe.py +++ b/mesonbuild/scripts/meson_exe.py @@ -81,6 +81,8 @@ def run_exe(exe): if exe.capture and p.returncode == 0: with open(exe.capture, 'wb') as output: output.write(stdout) + else: + sys.stdout.buffer.write(stdout) if stderr: sys.stderr.buffer.write(stderr) return p.returncode diff --git a/mesonbuild/scripts/scanbuild.py b/mesonbuild/scripts/scanbuild.py index f381552..1c86bf1 100644 --- a/mesonbuild/scripts/scanbuild.py +++ b/mesonbuild/scripts/scanbuild.py @@ -13,16 +13,17 @@ # limitations under the License. import os +import shlex import subprocess import shutil import tempfile from ..environment import detect_ninja from ..mesonlib import Popen_safe -def scanbuild(exename, srcdir, blddir, privdir, logdir, args): +def scanbuild(exelist, srcdir, blddir, privdir, logdir, args): with tempfile.TemporaryDirectory(dir=privdir) as scandir: - meson_cmd = [exename] + args - build_cmd = [exename, '-o', logdir, detect_ninja(), '-C', scandir] + meson_cmd = exelist + args + build_cmd = exelist + ['-o', logdir, detect_ninja(), '-C', scandir] rc = subprocess.call(meson_cmd + [srcdir, scandir]) if rc != 0: return rc @@ -58,8 +59,14 @@ def run(args): toolname = tool break - exename = os.environ.get('SCANBUILD', toolname) - if not shutil.which(exename): - print('Scan-build not installed.') + if 'SCANBUILD' in os.environ: + exelist = shlex.split(os.environ['SCANBUILD']) + else: + exelist = [toolname] + + try: + Popen_safe(exelist + ['--help']) + except OSError: + print('Could not execute scan-build "%s"' % ' '.join(exelist)) return 1 - return scanbuild(exename, srcdir, blddir, privdir, logdir, meson_cmd) + return scanbuild(exelist, srcdir, blddir, privdir, logdir, meson_cmd) diff --git a/mesonbuild/wrap/__init__.py b/mesonbuild/wrap/__init__.py index b792dfa..6be2c44 100644 --- a/mesonbuild/wrap/__init__.py +++ b/mesonbuild/wrap/__init__.py @@ -33,6 +33,15 @@ from enum import Enum # Note that these options do not affect subprojects that # are git submodules since those are only usable in git # repositories, and you almost always want to download them. + +# This did _not_ work when inside the WrapMode class. +# I don't know why. If you can fix this, patches welcome. +string_to_value = {'default': 1, + 'nofallback': 2, + 'nodownload': 3, + 'forcefallback': 4, + } + class WrapMode(Enum): default = 1 nofallback = 2 @@ -41,3 +50,8 @@ class WrapMode(Enum): def __str__(self): return self.name + + @staticmethod + def from_string(mode_name): + g = string_to_value[mode_name] + return WrapMode(g) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 5cc2225..f4134d3 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -17,9 +17,9 @@ import contextlib import urllib.request, os, hashlib, shutil, tempfile, stat import subprocess import sys -from pathlib import Path +import configparser from . import WrapMode -from ..mesonlib import Popen_safe +from ..mesonlib import MesonException try: import ssl @@ -41,8 +41,11 @@ def build_ssl_context(): return ctx def quiet_git(cmd, workingdir): - pc = subprocess.Popen(['git', '-C', workingdir] + cmd, stdin=subprocess.DEVNULL, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + try: + pc = subprocess.Popen(['git', '-C', workingdir] + cmd, stdin=subprocess.DEVNULL, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except FileNotFoundError as e: + return False, str(e) out, err = pc.communicate() if pc.returncode != 0: return False, err @@ -67,206 +70,183 @@ def open_wrapdburl(urlstring): urlstring = 'http' + urlstring[5:] return urllib.request.urlopen(urlstring, timeout=req_timeout) +class WrapException(MesonException): + pass + +class WrapNotFoundException(WrapException): + pass class PackageDefinition: def __init__(self, fname): - self.values = {} - with open(fname) as ifile: - first = ifile.readline().strip() - - if first == '[wrap-file]': - self.type = 'file' - elif first == '[wrap-git]': - self.type = 'git' - elif first == '[wrap-hg]': - self.type = 'hg' - elif first == '[wrap-svn]': - self.type = 'svn' - else: - raise RuntimeError('Invalid format of package file') - for line in ifile: - line = line.strip() - if line == '': - continue - (k, v) = line.split('=', 1) - k = k.strip() - v = v.strip() - self.values[k] = v + self.filename = fname + self.basename = os.path.basename(fname) + self.name = self.basename[:-5] + try: + self.config = configparser.ConfigParser(interpolation=None) + self.config.read(fname) + except: + raise WrapException('Failed to parse {}'.format(self.basename)) + if len(self.config.sections()) < 1: + raise WrapException('Missing sections in {}'.format(self.basename)) + self.wrap_section = self.config.sections()[0] + if not self.wrap_section.startswith('wrap-'): + m = '{!r} is not a valid first section in {}' + raise WrapException(m.format(self.wrap_section, self.basename)) + self.type = self.wrap_section[5:] + self.values = dict(self.config[self.wrap_section]) def get(self, key): - return self.values[key] + try: + return self.values[key] + except KeyError: + m = 'Missing key {!r} in {}' + raise WrapException(m.format(key, self.basename)) def has_patch(self): return 'patch_url' in self.values class Resolver: - def __init__(self, subdir_root, wrap_mode=WrapMode(1)): + def __init__(self, subdir_root, wrap_mode=WrapMode.default): self.wrap_mode = wrap_mode self.subdir_root = subdir_root self.cachedir = os.path.join(self.subdir_root, 'packagecache') def resolve(self, packagename): - # Check if the directory is already resolved - dirname = Path(os.path.join(self.subdir_root, packagename)) - subprojdir = os.path.join(*dirname.parts[-2:]) - if dirname.is_dir(): - if (dirname / 'meson.build').is_file(): - # The directory is there and has meson.build? Great, use it. - return packagename - # Is the dir not empty and also not a git submodule dir that is - # not checkout properly? Can't do anything, exception! - elif next(dirname.iterdir(), None) and not (dirname / '.git').is_file(): - m = '{!r} is not empty and has no meson.build files' - raise RuntimeError(m.format(subprojdir)) - elif dirname.exists(): - m = '{!r} already exists and is not a dir; cannot use as subproject' - raise RuntimeError(m.format(subprojdir)) - - dirname = str(dirname) + self.packagename = packagename + self.directory = packagename + # We always have to load the wrap file, if it exists, because it could + # override the default directory name. + self.wrap = self.load_wrap() + if self.wrap and 'directory' in self.wrap.values: + self.directory = self.wrap.get('directory') + if os.path.dirname(self.directory): + raise WrapException('Directory key must be a name and not a path') + self.dirname = os.path.join(self.subdir_root, self.directory) + meson_file = os.path.join(self.dirname, 'meson.build') + + # The directory is there and has meson.build? Great, use it. + if os.path.exists(meson_file): + return self.directory + # Check if the subproject is a git submodule - if self.resolve_git_submodule(dirname): - return packagename + self.resolve_git_submodule() + + if os.path.exists(self.dirname): + if not os.path.isdir(self.dirname): + raise WrapException('Path already exists but is not a directory') + else: + # A wrap file is required to download + if not self.wrap: + m = 'Subproject directory not found and {}.wrap file not found' + raise WrapNotFoundException(m.format(self.packagename)) + + if self.wrap.type == 'file': + self.get_file() + else: + self.check_can_download() + if self.wrap.type == 'git': + self.get_git() + elif self.wrap.type == "hg": + self.get_hg() + elif self.wrap.type == "svn": + self.get_svn() + else: + raise WrapException('Unknown wrap type {!r}'.format(self.wrap.type)) + + # A meson.build file is required in the directory + if not os.path.exists(meson_file): + raise WrapException('Subproject exists but has no meson.build file') + return self.directory + + def load_wrap(self): + fname = os.path.join(self.subdir_root, self.packagename + '.wrap') + if os.path.isfile(fname): + return PackageDefinition(fname) + return None + + def check_can_download(self): # Don't download subproject data based on wrap file if requested. # Git submodules are ok (see above)! if self.wrap_mode is WrapMode.nodownload: m = 'Automatic wrap-based subproject downloading is disabled' - raise RuntimeError(m) - - # Check if there's a .wrap file for this subproject - fname = os.path.join(self.subdir_root, packagename + '.wrap') - if not os.path.isfile(fname): - # No wrap file with this name? Give up. - m = 'No {}.wrap found for {!r}' - raise RuntimeError(m.format(packagename, subprojdir)) - p = PackageDefinition(fname) - if p.type == 'file': - if not os.path.isdir(self.cachedir): - os.mkdir(self.cachedir) - self.download(p, packagename) - self.extract_package(p) - elif p.type == 'git': - self.get_git(p) - elif p.type == "hg": - self.get_hg(p) - elif p.type == "svn": - self.get_svn(p) - else: - raise AssertionError('Unreachable code.') - return p.get('directory') + raise WrapException(m) - def resolve_git_submodule(self, dirname): + def resolve_git_submodule(self): # Are we in a git repository? ret, out = quiet_git(['rev-parse'], self.subdir_root) if not ret: return False # Is `dirname` a submodule? - ret, out = quiet_git(['submodule', 'status', dirname], self.subdir_root) + ret, out = quiet_git(['submodule', 'status', self.dirname], self.subdir_root) if not ret: return False # Submodule has not been added, add it if out.startswith(b'+'): - mlog.warning('git submodule {} might be out of date'.format(dirname)) + mlog.warning('git submodule might be out of date') return True elif out.startswith(b'U'): - raise RuntimeError('submodule {} has merge conflicts'.format(dirname)) + raise WrapException('git submodule has merge conflicts') # Submodule exists, but is deinitialized or wasn't initialized elif out.startswith(b'-'): - if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', dirname]) == 0: + if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', self.dirname]) == 0: return True - raise RuntimeError('Failed to git submodule init {!r}'.format(dirname)) + raise WrapException('git submodule failed to init') # Submodule looks fine, but maybe it wasn't populated properly. Do a checkout. elif out.startswith(b' '): - subprocess.call(['git', 'checkout', '.'], cwd=dirname) + subprocess.call(['git', 'checkout', '.'], cwd=self.dirname) # Even if checkout failed, try building it anyway and let the user # handle any problems manually. return True + elif out == b'': + # It is not a submodule, just a folder that exists in the main repository. + return False m = 'Unknown git submodule output: {!r}' - raise RuntimeError(m.format(out)) + raise WrapException(m.format(out)) - def get_git(self, p): - checkoutdir = os.path.join(self.subdir_root, p.get('directory')) - revno = p.get('revision') - is_there = os.path.isdir(checkoutdir) - if is_there: - try: - subprocess.check_call(['git', 'rev-parse'], cwd=checkoutdir) - except subprocess.CalledProcessError: - raise RuntimeError('%s is not empty but is not a valid ' - 'git repository, we can not work with it' - ' as a subproject directory.' % ( - checkoutdir)) - - if revno.lower() == 'head': - # Failure to do pull is not a fatal error, - # because otherwise you can't develop without - # a working net connection. - subprocess.call(['git', 'pull'], cwd=checkoutdir) - else: - if subprocess.call(['git', 'checkout', revno], cwd=checkoutdir) != 0: - subprocess.check_call(['git', 'fetch', p.get('url'), revno], cwd=checkoutdir) - subprocess.check_call(['git', 'checkout', revno], - cwd=checkoutdir) - else: - if p.values.get('clone-recursive', '').lower() == 'true': - subprocess.check_call(['git', 'clone', '--recursive', p.get('url'), - p.get('directory')], cwd=self.subdir_root) - else: - subprocess.check_call(['git', 'clone', p.get('url'), - p.get('directory')], cwd=self.subdir_root) - if revno.lower() != 'head': - if subprocess.call(['git', 'checkout', revno], cwd=checkoutdir) != 0: - subprocess.check_call(['git', 'fetch', p.get('url'), revno], cwd=checkoutdir) - subprocess.check_call(['git', 'checkout', revno], - cwd=checkoutdir) - push_url = p.values.get('push-url') - if push_url: - subprocess.check_call(['git', 'remote', 'set-url', - '--push', 'origin', push_url], - cwd=checkoutdir) - - def get_hg(self, p): - checkoutdir = os.path.join(self.subdir_root, p.get('directory')) - revno = p.get('revision') - is_there = os.path.isdir(checkoutdir) - if is_there: - if revno.lower() == 'tip': - # Failure to do pull is not a fatal error, - # because otherwise you can't develop without - # a working net connection. - subprocess.call(['hg', 'pull'], cwd=checkoutdir) - else: - if subprocess.call(['hg', 'checkout', revno], cwd=checkoutdir) != 0: - subprocess.check_call(['hg', 'pull'], cwd=checkoutdir) - subprocess.check_call(['hg', 'checkout', revno], - cwd=checkoutdir) - else: - subprocess.check_call(['hg', 'clone', p.get('url'), - p.get('directory')], cwd=self.subdir_root) - if revno.lower() != 'tip': - subprocess.check_call(['hg', 'checkout', revno], - cwd=checkoutdir) - - def get_svn(self, p): - checkoutdir = os.path.join(self.subdir_root, p.get('directory')) - revno = p.get('revision') - is_there = os.path.isdir(checkoutdir) - if is_there: - p, out = Popen_safe(['svn', 'info', '--show-item', 'revision', checkoutdir]) - current_revno = out - if current_revno == revno: - return - - if revno.lower() == 'head': - # Failure to do pull is not a fatal error, - # because otherwise you can't develop without - # a working net connection. - subprocess.call(['svn', 'update'], cwd=checkoutdir) - else: - subprocess.check_call(['svn', 'update', '-r', revno], cwd=checkoutdir) + def get_file(self): + path = self.get_file_internal('source') + extract_dir = self.subdir_root + # Some upstreams ship packages that do not have a leading directory. + # Create one for them. + if 'lead_directory_missing' in self.wrap.values: + os.mkdir(self.dirname) + extract_dir = self.dirname + shutil.unpack_archive(path, extract_dir) + if self.wrap.has_patch(): + self.apply_patch() + + def get_git(self): + revno = self.wrap.get('revision') + if self.wrap.values.get('clone-recursive', '').lower() == 'true': + subprocess.check_call(['git', 'clone', '--recursive', self.wrap.get('url'), + self.directory], cwd=self.subdir_root) else: - subprocess.check_call(['svn', 'checkout', '-r', revno, p.get('url'), - p.get('directory')], cwd=self.subdir_root) + subprocess.check_call(['git', 'clone', self.wrap.get('url'), + self.directory], cwd=self.subdir_root) + if revno.lower() != 'head': + if subprocess.call(['git', 'checkout', revno], cwd=self.dirname) != 0: + subprocess.check_call(['git', 'fetch', self.wrap.get('url'), revno], cwd=self.dirname) + subprocess.check_call(['git', 'checkout', revno], + cwd=self.dirname) + push_url = self.wrap.values.get('push-url') + if push_url: + subprocess.check_call(['git', 'remote', 'set-url', + '--push', 'origin', push_url], + cwd=self.dirname) + + def get_hg(self): + revno = self.wrap.get('revision') + subprocess.check_call(['hg', 'clone', self.wrap.get('url'), + self.directory], cwd=self.subdir_root) + if revno.lower() != 'tip': + subprocess.check_call(['hg', 'checkout', revno], + cwd=self.dirname) + + def get_svn(self): + revno = self.wrap.get('revision') + subprocess.check_call(['svn', 'checkout', '-r', revno, self.wrap.get('url'), + self.directory], cwd=self.subdir_root) def get_data(self, url): blocksize = 10 * 1024 @@ -312,41 +292,48 @@ class Resolver: hashvalue = h.hexdigest() return hashvalue, tmpfile.name - def get_hash(self, data): + def check_hash(self, what, path): + expected = self.wrap.get(what + '_hash') h = hashlib.sha256() - h.update(data) - hashvalue = h.hexdigest() - return hashvalue - - def download(self, p, packagename): - ofname = os.path.join(self.cachedir, p.get('source_filename')) - if os.path.exists(ofname): - mlog.log('Using', mlog.bold(packagename), 'from cache.') - else: - srcurl = p.get('source_url') - mlog.log('Downloading', mlog.bold(packagename), 'from', mlog.bold(srcurl)) - dhash, tmpfile = self.get_data(srcurl) - expected = p.get('source_hash') - if dhash != expected: - os.remove(tmpfile) - raise RuntimeError('Incorrect hash for source %s:\n %s expected\n %s actual.' % (packagename, expected, dhash)) - os.rename(tmpfile, ofname) - if p.has_patch(): - patch_filename = p.get('patch_filename') - filename = os.path.join(self.cachedir, patch_filename) - if os.path.exists(filename): - mlog.log('Using', mlog.bold(patch_filename), 'from cache.') - else: - purl = p.get('patch_url') - mlog.log('Downloading patch from', mlog.bold(purl)) - phash, tmpfile = self.get_data(purl) - expected = p.get('patch_hash') - if phash != expected: - os.remove(tmpfile) - raise RuntimeError('Incorrect hash for patch %s:\n %s expected\n %s actual' % (packagename, expected, phash)) - os.rename(tmpfile, filename) - else: - mlog.log('Package does not require patch.') + with open(path, 'rb') as f: + h.update(f.read()) + dhash = h.hexdigest() + if dhash != expected: + raise WrapException('Incorrect hash for %s:\n %s expected\n %s actual.' % (what, expected, dhash)) + + def download(self, what, ofname): + self.check_can_download() + srcurl = self.wrap.get(what + '_url') + mlog.log('Downloading', mlog.bold(self.packagename), what, 'from', mlog.bold(srcurl)) + dhash, tmpfile = self.get_data(srcurl) + expected = self.wrap.get(what + '_hash') + if dhash != expected: + os.remove(tmpfile) + raise WrapException('Incorrect hash for %s:\n %s expected\n %s actual.' % (what, expected, dhash)) + os.rename(tmpfile, ofname) + + def get_file_internal(self, what): + filename = self.wrap.get(what + '_filename') + cache_path = os.path.join(self.cachedir, filename) + + if os.path.exists(cache_path): + self.check_hash(what, cache_path) + mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.') + return cache_path + + if not os.path.isdir(self.cachedir): + os.mkdir(self.cachedir) + self.download(what, cache_path) + return cache_path + + def apply_patch(self): + path = self.get_file_internal('patch') + try: + shutil.unpack_archive(path, self.subdir_root) + except Exception: + with tempfile.TemporaryDirectory() as workdir: + shutil.unpack_archive(path, workdir) + self.copy_tree(workdir, self.subdir_root) def copy_tree(self, root_src_dir, root_dst_dir): """ @@ -366,36 +353,3 @@ class Resolver: os.chmod(dst_file, stat.S_IWUSR) os.remove(dst_file) shutil.copy2(src_file, dst_dir) - - def extract_package(self, package): - if sys.version_info < (3, 5): - try: - import lzma # noqa: F401 - del lzma - except ImportError: - pass - else: - try: - shutil.register_unpack_format('xztar', ['.tar.xz', '.txz'], shutil._unpack_tarfile, [], "xz'ed tar-file") - except shutil.RegistryError: - pass - target_dir = os.path.join(self.subdir_root, package.get('directory')) - if os.path.isdir(target_dir): - return - extract_dir = self.subdir_root - # Some upstreams ship packages that do not have a leading directory. - # Create one for them. - try: - package.get('lead_directory_missing') - os.mkdir(target_dir) - extract_dir = target_dir - except KeyError: - pass - shutil.unpack_archive(os.path.join(self.cachedir, package.get('source_filename')), extract_dir) - if package.has_patch(): - try: - shutil.unpack_archive(os.path.join(self.cachedir, package.get('patch_filename')), self.subdir_root) - except Exception: - with tempfile.TemporaryDirectory() as workdir: - shutil.unpack_archive(os.path.join(self.cachedir, package.get('patch_filename')), workdir) - self.copy_tree(workdir, self.subdir_root) diff --git a/mesonbuild/wrap/wraptool.py b/mesonbuild/wrap/wraptool.py index 364452d..132decf 100644 --- a/mesonbuild/wrap/wraptool.py +++ b/mesonbuild/wrap/wraptool.py @@ -16,7 +16,6 @@ import json import sys, os import configparser import shutil -import argparse from glob import glob @@ -105,16 +104,24 @@ def install(options): f.write(data) print('Installed', name, 'branch', branch, 'revision', revision) +def parse_patch_url(patch_url): + arr = patch_url.split('/') + return arr[-3], int(arr[-2]) + def get_current_version(wrapfile): cp = configparser.ConfigParser() cp.read(wrapfile) cp = cp['wrap-file'] patch_url = cp['patch_url'] - arr = patch_url.split('/') - branch = arr[-3] - revision = int(arr[-2]) + branch, revision = parse_patch_url(patch_url) return branch, revision, cp['directory'], cp['source_filename'], cp['patch_filename'] +def update_wrap_file(wrapfile, name, new_branch, new_revision): + u = open_wrapdburl(API_ROOT + 'projects/%s/%s/%d/get_wrap' % (name, new_branch, new_revision)) + data = u.read() + with open(wrapfile, 'wb') as f: + f.write(data) + def update(options): name = options.name if not os.path.isdir('subprojects'): @@ -129,8 +136,7 @@ def update(options): if new_branch == branch and new_revision == revision: print('Project', name, 'is already up to date.') sys.exit(0) - u = open_wrapdburl(API_ROOT + 'projects/%s/%s/%d/get_wrap' % (name, new_branch, new_revision)) - data = u.read() + update_wrap_file(wrapfile, name, new_branch, new_revision) shutil.rmtree(os.path.join('subprojects', subdir), ignore_errors=True) try: os.unlink(os.path.join('subprojects/packagecache', src_file)) @@ -140,8 +146,6 @@ def update(options): os.unlink(os.path.join('subprojects/packagecache', patch_file)) except FileNotFoundError: pass - with open(wrapfile, 'wb') as f: - f.write(data) print('Updated', name, 'to branch', new_branch, 'revision', new_revision) def info(options): @@ -208,9 +212,6 @@ def status(options): else: print('', name, 'not up to date. Have %s %d, but %s %d is available.' % (current_branch, current_revision, latest_branch, latest_revision)) -def run(args): - parser = argparse.ArgumentParser(prog='wraptool') - add_arguments(parser) - options = parser.parse_args(args) +def run(options): options.wrap_func(options) return 0 diff --git a/run_cross_test.py b/run_cross_test.py index 7191402..b2ef6be 100755 --- a/run_cross_test.py +++ b/run_cross_test.py @@ -25,26 +25,34 @@ Eventually migrate to something fancier.''' import sys import os from pathlib import Path +import argparse from run_project_tests import gather_tests, run_tests, StopException, setup_commands from run_project_tests import failing_logs -def runtests(cross_file): +def runtests(cross_file, failfast): commontests = [('common', gather_tests(Path('test cases', 'common')), False)] try: - (passing_tests, failing_tests, skipped_tests) = run_tests(commontests, 'meson-cross-test-run', ['--cross', cross_file]) + (passing_tests, failing_tests, skipped_tests) = \ + run_tests(commontests, 'meson-cross-test-run', failfast, ['--cross', cross_file]) except StopException: pass print('\nTotal passed cross tests:', passing_tests) print('Total failed cross tests:', failing_tests) print('Total skipped cross tests:', skipped_tests) - if failing_tests > 0 and ('TRAVIS' in os.environ or 'APPVEYOR' in os.environ): + if failing_tests > 0 and ('CI' in os.environ): print('\nMesonlogs of failing tests\n') - for l in failing_logs: - print(l, '\n') - sys.exit(failing_tests) + for log in failing_logs: + print(log, '\n') + return failing_tests + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--failfast', action='store_true') + parser.add_argument('cross_file') + options = parser.parse_args() + setup_commands('ninja') + return runtests(options.cross_file, options.failfast) if __name__ == '__main__': - setup_commands('ninja') - cross_file = sys.argv[1] - runtests(cross_file) + sys.exit(main()) diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py index fd33856..e7eab72 100755 --- a/run_meson_command_tests.py +++ b/run_meson_command_tests.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import os import tempfile import unittest @@ -23,11 +24,6 @@ from pathlib import Path from mesonbuild.mesonlib import windows_proof_rmtree, python_command, is_windows -# Find the meson.py adjacent to us -meson_py = Path(__file__).resolve().parent / 'meson.py' -if not meson_py.is_file(): - raise RuntimeError("meson.py not found: test must only run from git") - def get_pypath(): import sysconfig pypath = sysconfig.get_path('purelib', vars={'base': ''}) @@ -67,15 +63,14 @@ class CommandTests(unittest.TestCase): def _run(self, command, workdir=None): ''' - Run a command while printing the stdout and stderr to stdout, - and also return a copy of it + Run a command while printing the stdout, and also return a copy of it ''' # If this call hangs CI will just abort. It is very hard to distinguish # between CI issue and test bug in that case. Set timeout and fail loud # instead. p = subprocess.run(command, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, env=os.environ.copy(), - universal_newlines=True, cwd=workdir, timeout=60 * 5) + env=os.environ.copy(), universal_newlines=True, + cwd=workdir, timeout=60 * 5) print(p.stdout) if p.returncode != 0: raise subprocess.CalledProcessError(p.returncode, command) @@ -128,7 +123,9 @@ class CommandTests(unittest.TestCase): pylibdir = prefix / get_pypath() bindir = prefix / get_pybindir() pylibdir.mkdir(parents=True) - os.environ['PYTHONPATH'] = str(pylibdir) + # XXX: join with empty name so it always ends with os.sep otherwise + # distutils complains that prefix isn't contained in PYTHONPATH + os.environ['PYTHONPATH'] = os.path.join(str(pylibdir), '') os.environ['PATH'] = str(bindir) + os.pathsep + os.environ['PATH'] self._run(python_command + ['setup.py', 'install', '--prefix', str(prefix)]) # Check that all the files were installed correctly @@ -176,8 +173,7 @@ class CommandTests(unittest.TestCase): builddir = str(self.tmpdir / 'build4') (bindir / 'meson').rename(bindir / 'meson.real') wrapper = (bindir / 'meson') - with open(str(wrapper), 'w') as f: - f.write('#!/bin/sh\n\nmeson.real "$@"') + wrapper.open('w').write('#!/bin/sh\n\nmeson.real "$@"') wrapper.chmod(0o755) meson_setup = [str(wrapper), 'setup'] meson_command = meson_setup + self.meson_args @@ -195,5 +191,6 @@ class CommandTests(unittest.TestCase): zipapp.create_archive(source=source, target=target, interpreter=python_command[0], main=None) self._run([target.as_posix(), '--help']) + if __name__ == '__main__': - unittest.main(buffer=True) + sys.exit(unittest.main(buffer=True)) diff --git a/run_project_tests.py b/run_project_tests.py index 27e588b..0d64f47 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -36,11 +36,12 @@ import argparse import xml.etree.ElementTree as ET import time import multiprocessing -from concurrent.futures import ProcessPoolExecutor +from concurrent.futures import ProcessPoolExecutor, CancelledError import re from run_tests import get_fake_options, run_configure, get_meson_script from run_tests import get_backend_commands, get_backend_args_for_dir, Backend from run_tests import ensure_backend_detects_changes +from run_tests import guess_backend class BuildStep(Enum): @@ -81,7 +82,7 @@ class AutoDeletedDir: failing_logs = [] print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ -under_ci = not {'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ) +under_ci = 'CI' in os.environ do_debug = under_ci or print_debug no_meson_log_msg = 'No meson-log.txt found.' @@ -101,26 +102,7 @@ signal.signal(signal.SIGTERM, stop_handler) def setup_commands(optbackend): global do_debug, backend, backend_flags global compile_commands, clean_commands, test_commands, install_commands, uninstall_commands - backend = optbackend - msbuild_exe = shutil.which('msbuild') - # Auto-detect backend if unspecified - if backend is None: - if msbuild_exe is not None: - backend = 'vs' # Meson will auto-detect VS version to use - else: - backend = 'ninja' - # Set backend arguments for Meson - if backend.startswith('vs'): - backend_flags = ['--backend=' + backend] - backend = Backend.vs - elif backend == 'xcode': - backend_flags = ['--backend=xcode'] - backend = Backend.xcode - elif backend == 'ninja': - backend_flags = ['--backend=ninja'] - backend = Backend.ninja - else: - raise RuntimeError('Unknown backend: {!r}'.format(backend)) + backend, backend_flags = guess_backend(optbackend, shutil.which('msbuild')) compile_commands, clean_commands, test_commands, install_commands, \ uninstall_commands = get_backend_commands(backend, do_debug) @@ -136,10 +118,25 @@ def get_relative_files_list_from_dir(fromdir): return paths def platform_fix_name(fname, compiler, env): + # canonicalize compiler + if compiler == 'clang-cl': + canonical_compiler = 'msvc' + else: + canonical_compiler = compiler + if '?lib' in fname: - if mesonlib.for_cygwin(env.is_cross_build(), env): + if mesonlib.for_windows(env.is_cross_build(), env) and canonical_compiler == 'msvc': + fname = re.sub(r'lib/\?lib(.*)\.', r'bin/\1.', fname) + fname = re.sub(r'/\?lib/', r'/bin/', fname) + elif mesonlib.for_windows(env.is_cross_build(), env): + fname = re.sub(r'lib/\?lib(.*)\.', r'bin/lib\1.', fname) + fname = re.sub(r'\?lib(.*)\.dll$', r'lib\1.dll', fname) + fname = re.sub(r'/\?lib/', r'/bin/', fname) + elif mesonlib.for_cygwin(env.is_cross_build(), env): fname = re.sub(r'lib/\?lib(.*)\.so$', r'bin/cyg\1.dll', fname) + fname = re.sub(r'lib/\?lib(.*)\.', r'bin/cyg\1.', fname) fname = re.sub(r'\?lib(.*)\.dll$', r'cyg\1.dll', fname) + fname = re.sub(r'/\?lib/', r'/bin/', fname) else: fname = re.sub(r'\?lib', 'lib', fname) @@ -150,17 +147,47 @@ def platform_fix_name(fname, compiler, env): if fname.startswith('?msvc:'): fname = fname[6:] - if compiler != 'cl': + if canonical_compiler != 'msvc': return None if fname.startswith('?gcc:'): fname = fname[5:] - if compiler == 'cl': + if canonical_compiler == 'msvc': return None if fname.startswith('?cygwin:'): fname = fname[8:] - if compiler == 'cl' or not mesonlib.for_cygwin(env.is_cross_build(), env): + if not mesonlib.for_cygwin(env.is_cross_build(), env): + return None + + if fname.endswith('?so'): + if mesonlib.for_windows(env.is_cross_build(), env) and canonical_compiler == 'msvc': + fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname) + fname = re.sub(r'/(?:lib|)([^/]*?)\?so$', r'/\1.dll', fname) + return fname + elif mesonlib.for_windows(env.is_cross_build(), env): + fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname) + fname = re.sub(r'/([^/]*?)\?so$', r'/\1.dll', fname) + return fname + elif mesonlib.for_cygwin(env.is_cross_build(), env): + fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname) + fname = re.sub(r'/lib([^/]*?)\?so$', r'/cyg\1.dll', fname) + fname = re.sub(r'/([^/]*?)\?so$', r'/\1.dll', fname) + return fname + elif mesonlib.for_darwin(env.is_cross_build(), env): + return fname[:-3] + '.dylib' + else: + return fname[:-3] + '.so' + + if fname.endswith('?implib') or fname.endswith('?implibempty'): + if mesonlib.for_windows(env.is_cross_build(), env) and canonical_compiler == 'msvc': + # only MSVC doesn't generate empty implibs + if fname.endswith('?implibempty') and compiler == 'msvc': + return None + return re.sub(r'/(?:lib|)([^/]*?)\?implib(?:empty|)$', r'/\1.lib', fname) + elif mesonlib.for_windows(env.is_cross_build(), env) or mesonlib.for_cygwin(env.is_cross_build(), env): + return re.sub(r'\?implib(?:empty|)$', r'.dll.a', fname) + else: return None return fname @@ -247,12 +274,12 @@ def run_test_inprocess(testdir): os.chdir(testdir) test_log_fname = Path('meson-logs', 'testlog.txt') try: - returncode_test = mtest.run(['--no-rebuild']) + returncode_test = mtest.run_with_args(['--no-rebuild']) if test_log_fname.exists(): test_log = test_log_fname.open(errors='ignore').read() else: test_log = '' - returncode_benchmark = mtest.run(['--no-rebuild', '--benchmark', '--logbase', 'benchmarklog']) + returncode_benchmark = mtest.run_with_args(['--no-rebuild', '--benchmark', '--logbase', 'benchmarklog']) finally: sys.stdout = old_stdout sys.stderr = old_stderr @@ -393,6 +420,7 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen def gather_tests(testdir: Path): tests = [t.name for t in testdir.glob('*')] + tests = [t for t in tests if not t.startswith('.')] # Filter non-tests files (dot files, etc) testlist = [(int(t.split()[0]), t) for t in tests] testlist.sort() tests = [testdir / t[1] for t in testlist] @@ -522,14 +550,14 @@ def detect_tests_to_run(): gathered_tests = [(name, gather_tests(Path('test cases', subdir)), skip) for name, subdir, skip in all_tests] return gathered_tests -def run_tests(all_tests, log_name_base, extra_args): +def run_tests(all_tests, log_name_base, failfast, extra_args): global logfile txtname = log_name_base + '.txt' with open(txtname, 'w', encoding='utf-8', errors='ignore') as lf: logfile = lf - return _run_tests(all_tests, log_name_base, extra_args) + return _run_tests(all_tests, log_name_base, failfast, extra_args) -def _run_tests(all_tests, log_name_base, extra_args): +def _run_tests(all_tests, log_name_base, failfast, extra_args): global stop, executor, futures, system_compiler xmlname = log_name_base + '.xml' junit_root = ET.Element('testsuites') @@ -577,7 +605,10 @@ def _run_tests(all_tests, log_name_base, extra_args): futures.append((testname, t, result)) for (testname, t, result) in futures: sys.stdout.flush() - result = result.result() + try: + result = result.result() + except CancelledError: + continue if (result is None) or (('MESON_SKIP_TEST' in result.stdo) and (skippable(name, t.as_posix()))): print(yellow('Skipping:'), t.as_posix()) current_test = ET.SubElement(current_suite, 'testcase', {'name': testname, @@ -598,6 +629,10 @@ def _run_tests(all_tests, log_name_base, extra_args): else: failing_logs.append(result.stdo) failing_logs.append(result.stde) + if failfast: + print("Cancelling the rest of the tests") + for (_, _, res) in futures: + res.cancel() else: print('Succeeded test%s: %s' % (without_install, t.as_posix())) passing_tests += 1 @@ -615,6 +650,10 @@ def _run_tests(all_tests, log_name_base, extra_args): stdoel.text = result.stdo stdeel = ET.SubElement(current_test, 'system-err') stdeel.text = result.stde + + if failfast and failing_tests > 0: + break + print("\nTotal configuration time: %.2fs" % conf_time) print("Total build time: %.2fs" % build_time) print("Total test time: %.2fs" % test_time) @@ -693,14 +732,14 @@ def check_meson_commands_work(): def detect_system_compiler(): global system_compiler - if shutil.which('cl'): - system_compiler = 'cl' - elif shutil.which('cc'): - system_compiler = 'cc' - elif shutil.which('gcc'): - system_compiler = 'gcc' - else: - raise RuntimeError("Could not find C compiler.") + + with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir: + env = environment.Environment(None, build_dir, get_fake_options('/')) + try: + comp = env.detect_c_compiler(env.is_cross_build()) + except: + raise RuntimeError("Could not find C compiler.") + system_compiler = comp.get_id() if __name__ == '__main__': parser = argparse.ArgumentParser(description="Run the test suite of Meson.") @@ -708,6 +747,8 @@ if __name__ == '__main__': help='arguments that are passed directly to Meson (remember to have -- before these).') parser.add_argument('--backend', default=None, dest='backend', choices=backendlist) + parser.add_argument('--failfast', action='store_true', + help='Stop running if test case fails') options = parser.parse_args() setup_commands(options.backend) @@ -719,7 +760,7 @@ if __name__ == '__main__': check_meson_commands_work() try: all_tests = detect_tests_to_run() - (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.extra_args) + (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args) except StopException: pass print('\nTotal passed tests:', green(str(passing_tests))) diff --git a/run_tests.py b/run_tests.py index a5fd7a5..ebee602 100755 --- a/run_tests.py +++ b/run_tests.py @@ -21,17 +21,40 @@ import shutil import subprocess import tempfile import platform +import argparse from io import StringIO from enum import Enum from glob import glob from pathlib import Path - import mesonbuild from mesonbuild import mesonlib from mesonbuild import mesonmain from mesonbuild import mtest from mesonbuild import mlog from mesonbuild.environment import Environment, detect_ninja +from mesonbuild.coredata import backendlist + +def guess_backend(backend, msbuild_exe): + # Auto-detect backend if unspecified + backend_flags = [] + if backend is None: + if msbuild_exe is not None: + backend = 'vs' # Meson will auto-detect VS version to use + else: + backend = 'ninja' + # Set backend arguments for Meson + if backend.startswith('vs'): + backend_flags = ['--backend=' + backend] + backend = Backend.vs + elif backend == 'xcode': + backend_flags = ['--backend=xcode'] + backend = Backend.xcode + elif backend == 'ninja': + backend_flags = ['--backend=ninja'] + backend = Backend.ninja + else: + raise RuntimeError('Unknown backend: {!r}'.format(backend)) + return (backend, backend_flags) # Fake classes and objects for mocking @@ -50,6 +73,7 @@ def get_fake_options(prefix): opts.wrap_mode = None opts.prefix = prefix opts.cmd_line_options = {} + opts.native_file = [] return opts def get_fake_env(sdir, bdir, prefix): @@ -106,9 +130,9 @@ def find_vcxproj_with_target(builddir, target): import re, fnmatch t, ext = os.path.splitext(target) if ext: - p = '{}\s*\{}'.format(t, ext) + p = r'{}\s*\{}'.format(t, ext) else: - p = '{}'.format(t) + p = r'{}'.format(t) for root, dirs, files in os.walk(builddir): for f in fnmatch.filter(files, '*.vcxproj'): f = os.path.join(builddir, f) @@ -143,7 +167,9 @@ def get_backend_commands(backend, debug=False): test_cmd = cmd + ['RUN_TESTS.vcxproj'] elif backend is Backend.xcode: cmd = ['xcodebuild'] - clean_cmd = cmd + ['-alltargets', 'clean'] + # In Xcode9 new build system's clean command fails when using a custom build directory. + # Maybe use it when CI uses Xcode10 we can remove '-UseNewBuildSystem=FALSE' + clean_cmd = cmd + ['-alltargets', 'clean', '-UseNewBuildSystem=FALSE'] test_cmd = cmd + ['-target', 'RUN_TESTS'] elif backend is Backend.ninja: # We need at least 1.6 because of -w dupbuild=err @@ -179,7 +205,7 @@ def run_mtest_inprocess(commandlist): old_stderr = sys.stderr sys.stderr = mystderr = StringIO() try: - returncode = mtest.run(commandlist) + returncode = mtest.run_with_args(commandlist) finally: sys.stdout = old_stdout sys.stderr = old_stderr @@ -216,34 +242,27 @@ def print_system_info(): print('System:', platform.system()) print('') -if __name__ == '__main__': +def main(): print_system_info() + parser = argparse.ArgumentParser() + parser.add_argument('--cov', action='store_true') + parser.add_argument('--backend', default=None, dest='backend', + choices=backendlist) + parser.add_argument('--cross', default=False, dest='cross', action='store_true') + parser.add_argument('--failfast', action='store_true') + (options, _) = parser.parse_known_args() # Enable coverage early... - enable_coverage = '--cov' in sys.argv + enable_coverage = options.cov if enable_coverage: os.makedirs('.coverage', exist_ok=True) sys.argv.remove('--cov') import coverage coverage.process_startup() returncode = 0 - # Iterate over list in reverse order to find the last --backend arg - backend = Backend.ninja - cross = False - # FIXME: PLEASE convert to argparse - for arg in reversed(sys.argv[1:]): - if arg.startswith('--backend'): - if arg.startswith('--backend=vs'): - backend = Backend.vs - elif arg == '--backend=xcode': - backend = Backend.xcode - if arg.startswith('--cross'): - cross = True - if arg == '--cross=mingw': - cross = 'mingw' - elif arg == '--cross=arm': - cross = 'arm' + cross = options.cross + backend, _ = guess_backend(options.backend, shutil.which('msbuild')) # Running on a developer machine? Be nice! - if not mesonlib.is_windows() and not mesonlib.is_haiku() and 'TRAVIS' not in os.environ: + if not mesonlib.is_windows() and not mesonlib.is_haiku() and 'CI' not in os.environ: os.nice(20) # Appveyor sets the `platform` environment variable which completely messes # up building with the vs2010 and vs2015 backends. @@ -265,26 +284,50 @@ if __name__ == '__main__': # Can't pass arguments to unit tests, so set the backend to use in the environment env = os.environ.copy() env['MESON_UNIT_TEST_BACKEND'] = backend.name - with tempfile.TemporaryDirectory() as td: + with tempfile.TemporaryDirectory() as temp_dir: # Enable coverage on all subsequent processes. if enable_coverage: - with open(os.path.join(td, 'usercustomize.py'), 'w') as f: - f.write('import coverage\n' - 'coverage.process_startup()\n') + Path(temp_dir, 'usercustomize.py').open('w').write( + 'import coverage\n' + 'coverage.process_startup()\n') env['COVERAGE_PROCESS_START'] = '.coveragerc' - env['PYTHONPATH'] = os.pathsep.join([td] + env.get('PYTHONPATH', [])) + if 'PYTHONPATH' in env: + env['PYTHONPATH'] = os.pathsep.join([temp_dir, env.get('PYTHONPATH')]) + else: + env['PYTHONPATH'] = temp_dir if not cross: - returncode += subprocess.call(mesonlib.python_command + ['run_meson_command_tests.py', '-v'], env=env) - returncode += subprocess.call(mesonlib.python_command + ['run_unittests.py', '-v'], env=env) - returncode += subprocess.call(mesonlib.python_command + ['run_project_tests.py'] + sys.argv[1:], env=env) + cmd = mesonlib.python_command + ['run_meson_command_tests.py', '-v'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + cmd = mesonlib.python_command + ['run_unittests.py', '-v'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + cmd = mesonlib.python_command + ['run_project_tests.py'] + sys.argv[1:] + returncode += subprocess.call(cmd, env=env) else: cross_test_args = mesonlib.python_command + ['run_cross_test.py'] - if cross is True or cross == 'arm': - print(mlog.bold('Running armhf cross tests.').get_text(mlog.colorize_console)) - print() - returncode += subprocess.call(cross_test_args + ['cross/ubuntu-armhf.txt'], env=env) - if cross is True or cross == 'mingw': - print(mlog.bold('Running mingw-w64 64-bit cross tests.').get_text(mlog.colorize_console)) - print() - returncode += subprocess.call(cross_test_args + ['cross/linux-mingw-w64-64bit.txt'], env=env) - sys.exit(returncode) + print(mlog.bold('Running armhf cross tests.').get_text(mlog.colorize_console)) + print() + cmd = cross_test_args + ['cross/ubuntu-armhf.txt'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + print(mlog.bold('Running mingw-w64 64-bit cross tests.') + .get_text(mlog.colorize_console)) + print() + cmd = cross_test_args + ['cross/linux-mingw-w64-64bit.txt'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + return returncode + +if __name__ == '__main__': + sys.exit(main()) diff --git a/run_unittests.py b/run_unittests.py index 8fe1c11..f1b2249 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -16,13 +16,17 @@ import stat import shlex import subprocess -import re, json +import re +import json import tempfile import textwrap import os import shutil +import sys import unittest import platform +import pickle +import functools from itertools import chain from unittest import mock from configparser import ConfigParser @@ -37,20 +41,22 @@ import mesonbuild.coredata import mesonbuild.modules.gnome from mesonbuild.interpreter import Interpreter, ObjectHolder from mesonbuild.mesonlib import ( - is_windows, is_osx, is_cygwin, is_dragonflybsd, is_openbsd, + is_windows, is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku, windows_proof_rmtree, python_command, version_compare, BuildDirLock, Version ) from mesonbuild.environment import detect_ninja from mesonbuild.mesonlib import MesonException, EnvironmentException from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram +from mesonbuild.build import Target import mesonbuild.modules.pkgconfig -from run_tests import exe_suffix, get_fake_env, get_meson_script -from run_tests import get_builddir_target_args, get_backend_commands, Backend -from run_tests import ensure_backend_detects_changes, run_configure_inprocess -from run_tests import run_mtest_inprocess -from run_tests import FakeBuild, FakeCompilerOptions +from run_tests import ( + Backend, FakeBuild, FakeCompilerOptions, + ensure_backend_detects_changes, exe_suffix, get_backend_commands, + get_builddir_target_args, get_fake_env, get_meson_script, + run_configure_inprocess, run_mtest_inprocess +) def get_dynamic_section_entry(fname, entry): if is_cygwin() or is_osx(): @@ -81,7 +87,7 @@ def is_tarball(): return False def is_ci(): - if 'TRAVIS' in os.environ or 'APPVEYOR' in os.environ: + if 'CI' in os.environ: return True return False @@ -98,19 +104,81 @@ def _git_init(project_dir): def skipIfNoPkgconfig(f): ''' - Skip this test if no pkg-config is found, unless we're on Travis or - Appveyor CI. This allows users to run our test suite without having + Skip this test if no pkg-config is found, unless we're on CI. + This allows users to run our test suite without having pkg-config installed on, f.ex., macOS, while ensuring that our CI does not silently skip the test because of misconfiguration. Note: Yes, we provide pkg-config even while running Windows CI ''' + @functools.wraps(f) def wrapped(*args, **kwargs): if not is_ci() and shutil.which('pkg-config') is None: raise unittest.SkipTest('pkg-config not found') return f(*args, **kwargs) return wrapped +def skipIfNoPkgconfigDep(depname): + ''' + Skip this test if the given pkg-config dep is not found, unless we're on CI. + ''' + def wrapper(func): + @functools.wraps(func) + def wrapped(*args, **kwargs): + if not is_ci() and shutil.which('pkg-config') is None: + raise unittest.SkipTest('pkg-config not found') + if not is_ci() and subprocess.call(['pkg-config', '--exists', depname]) != 0: + raise unittest.SkipTest('pkg-config dependency {} not found.'.format(depname)) + return func(*args, **kwargs) + return wrapped + return wrapper + +def skip_if_not_language(lang): + def wrapper(func): + @functools.wraps(func) + def wrapped(*args, **kwargs): + try: + env = get_fake_env('', '', '') + f = getattr(env, 'detect_{}_compiler'.format(lang)) + if lang in ['cs', 'vala', 'java', 'swift']: + f() + else: + f(False) + except EnvironmentException: + raise unittest.SkipTest('No {} compiler found.'.format(lang)) + return func(*args, **kwargs) + return wrapped + return wrapper + +def skip_if_env_value(value): + def wrapper(func): + @functools.wraps(func) + def wrapped(*args, **kwargs): + if value in os.environ: + raise unittest.SkipTest( + 'Environment variable "{}" set, skipping.'.format(value)) + return func(*args, **kwargs) + return wrapped + return wrapper + +def skip_if_not_base_option(feature): + """Skip tests if The compiler does not support a given base option. + + for example, ICC doesn't currently support b_sanitize. + """ + def actual(f): + @functools.wraps(f) + def wrapped(*args, **kwargs): + env = get_fake_env('', '', '') + cc = env.detect_c_compiler(False) + if feature not in cc.base_options: + raise unittest.SkipTest( + '{} not available with {}'.format(feature, cc.id)) + return f(*args, **kwargs) + return wrapped + return actual + + class PatchModule: ''' Fancy monkey-patching! Whee! Can't use mock.patch because it only @@ -582,7 +650,7 @@ class InternalTests(unittest.TestCase): 'static': unix_static}, 'linux': {'shared': ('lib{}.so', '{}.so'), 'static': unix_static}, - 'darwin': {'shared': ('lib{}.dylib', '{}.dylib'), + 'darwin': {'shared': ('lib{}.dylib', 'lib{}.so', '{}.dylib', '{}.so'), 'static': unix_static}, 'cygwin': {'shared': ('cyg{}.dll', 'cyg{}.dll.a', 'lib{}.dll', 'lib{}.dll.a', '{}.dll', '{}.dll.a'), @@ -599,7 +667,7 @@ class InternalTests(unittest.TestCase): elif is_cygwin(): self._test_all_naming(cc, env, patterns, 'cygwin') elif is_windows(): - if cc.get_id() == 'msvc': + if cc.get_argument_syntax() == 'msvc': self._test_all_naming(cc, env, patterns, 'windows-msvc') else: self._test_all_naming(cc, env, patterns, 'windows-mingw') @@ -622,10 +690,6 @@ class InternalTests(unittest.TestCase): with PatchModule(mesonbuild.compilers.c.for_windows, 'mesonbuild.compilers.c.for_windows', true): self._test_all_naming(cc, env, patterns, 'windows-mingw') - cc.id = 'msvc' - with PatchModule(mesonbuild.compilers.c.for_windows, - 'mesonbuild.compilers.c.for_windows', true): - self._test_all_naming(cc, env, patterns, 'windows-msvc') def test_pkgconfig_parse_libs(self): ''' @@ -677,7 +741,7 @@ class InternalTests(unittest.TestCase): bar_dep = PkgConfigDependency('bar', env, kwargs) self.assertEqual(bar_dep.get_link_args(), [(p2 / 'libbar.a').as_posix()]) internal_dep = PkgConfigDependency('internal', env, kwargs) - if compiler.get_id() == 'msvc': + if compiler.get_argument_syntax() == 'msvc': self.assertEqual(internal_dep.get_link_args(), []) else: link_args = internal_dep.get_link_args() @@ -922,11 +986,11 @@ class BasePlatformTests(unittest.TestCase): # Misc stuff self.orig_env = os.environ.copy() if self.backend is Backend.ninja: - self.no_rebuild_stdout = 'ninja: no work to do.' + self.no_rebuild_stdout = ['ninja: no work to do.', 'samu: nothing to do'] else: # VS doesn't have a stable output when no changes are done # XCode backend is untested with unit tests, help welcome! - self.no_rebuild_stdout = 'UNKNOWN BACKEND {!r}'.format(self.backend.name) + self.no_rebuild_stdout = ['UNKNOWN BACKEND {!r}'.format(self.backend.name)] self.builddirs = [] self.new_builddir() @@ -1077,8 +1141,11 @@ class BasePlatformTests(unittest.TestCase): def get_compdb(self): if self.backend is not Backend.ninja: raise unittest.SkipTest('Compiler db not available with {} backend'.format(self.backend.name)) - with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile: - contents = json.load(ifile) + try: + with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile: + contents = json.load(ifile) + except FileNotFoundError: + raise unittest.SkipTest('Compiler db not found') # If Ninja is using .rsp files, generate them, read their contents, and # replace it as the command for all compile commands in the parsed json. if len(contents) > 0 and contents[0]['command'].endswith('.rsp'): @@ -1114,6 +1181,13 @@ class BasePlatformTests(unittest.TestCase): universal_newlines=True) return json.loads(out) + def introspect_directory(self, directory, args): + if isinstance(args, str): + args = [args] + out = subprocess.check_output(self.mintro_command + args + [directory], + universal_newlines=True) + return json.loads(out) + def assertPathEqual(self, path1, path2): ''' Handles a lot of platform-specific quirks related to paths such as @@ -1132,7 +1206,7 @@ class BasePlatformTests(unittest.TestCase): def assertBuildIsNoop(self): ret = self.build() if self.backend is Backend.ninja: - self.assertEqual(ret.split('\n')[-2], self.no_rebuild_stdout) + self.assertIn(ret.split('\n')[-2], self.no_rebuild_stdout) elif self.backend is Backend.vs: # Ensure that some target said that no rebuild was done self.assertIn('CustomBuild:\n All outputs are up-to-date.', ret) @@ -1462,6 +1536,38 @@ class AllPlatformTests(BasePlatformTests): self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=main:onlyinbar']) + def test_testsetup_default(self): + testdir = os.path.join(self.unit_test_dir, '47 testsetup default') + self.init(testdir) + self.build() + + # Run tests without --setup will cause the default setup to be used + self.run_tests() + with open(os.path.join(self.logdir, 'testlog.txt')) as f: + default_log = f.read() + + # Run tests with explicitly using the same setup that is set as default + self._run(self.mtest_command + ['--setup=mydefault']) + with open(os.path.join(self.logdir, 'testlog-mydefault.txt')) as f: + mydefault_log = f.read() + + # Run tests with another setup + self._run(self.mtest_command + ['--setup=other']) + with open(os.path.join(self.logdir, 'testlog-other.txt')) as f: + other_log = f.read() + + self.assertTrue('ENV_A is 1' in default_log) + self.assertTrue('ENV_B is 2' in default_log) + self.assertTrue('ENV_C is 2' in default_log) + + self.assertTrue('ENV_A is 1' in mydefault_log) + self.assertTrue('ENV_B is 2' in mydefault_log) + self.assertTrue('ENV_C is 2' in mydefault_log) + + self.assertTrue('ENV_A is 1' in other_log) + self.assertTrue('ENV_B is 3' in other_log) + self.assertTrue('ENV_C is 2' in other_log) + def assertFailedTestCount(self, failure_count, command): try: self._run(command) @@ -1551,7 +1657,8 @@ class AllPlatformTests(BasePlatformTests): incs = [a for a in shlex.split(execmd) if a.startswith("-I")] self.assertEqual(len(incs), 9) # target private dir - self.assertPathEqual(incs[0], "-Isub4/sub4@@someexe@exe") + someexe_id = Target.construct_id_from_path("sub4", "someexe", "@exe") + self.assertPathEqual(incs[0], "-I" + os.path.join("sub4", someexe_id)) # target build subdir self.assertPathEqual(incs[1], "-Isub4") # target source subdir @@ -1600,6 +1707,7 @@ class AllPlatformTests(BasePlatformTests): clang = mesonbuild.compilers.ClangCompiler intel = mesonbuild.compilers.IntelCompiler msvc = mesonbuild.compilers.VisualStudioCCompiler + clangcl = mesonbuild.compilers.ClangClCCompiler ar = mesonbuild.linkers.ArLinker lib = mesonbuild.linkers.VisualStudioLinker langs = [('c', 'CC'), ('cpp', 'CXX')] @@ -1621,6 +1729,9 @@ class AllPlatformTests(BasePlatformTests): if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')): self.assertIsInstance(ecc, gnu) self.assertIsInstance(elinker, ar) + elif 'clang-cl' in ebase: + self.assertIsInstance(ecc, clangcl) + self.assertIsInstance(elinker, lib) elif 'clang' in ebase: self.assertIsInstance(ecc, clang) self.assertIsInstance(elinker, ar) @@ -1671,7 +1782,7 @@ class AllPlatformTests(BasePlatformTests): self.assertIsInstance(linker, lib) self.assertEqual(cc.id, 'msvc') self.assertTrue(hasattr(cc, 'is_64')) - # If we're in the appveyor CI, we know what the compiler will be + # If we're on Windows CI, we know what the compiler will be if 'arch' in os.environ: if os.environ['arch'] == 'x64': self.assertTrue(cc.is_64) @@ -1694,6 +1805,8 @@ class AllPlatformTests(BasePlatformTests): wrapperlinker_s += shlex.quote(w) + ' ' os.environ['AR'] = wrapperlinker_s wlinker = env.detect_static_linker(wcc) + # Pop it so we don't use it for the next detection + evalue = os.environ.pop('AR') # Must be the same type since it's a wrapper around the same exelist self.assertIs(type(cc), type(wcc)) self.assertIs(type(linker), type(wlinker)) @@ -1992,7 +2105,7 @@ int main(int argc, char **argv) { def pbcompile(self, compiler, source, objectfile, extra_args=[]): cmd = compiler.get_exelist() - if compiler.id == 'msvc': + if compiler.get_argument_syntax() == 'msvc': cmd += ['/nologo', '/Fo' + objectfile, '/c', source] + extra_args else: cmd += ['-c', source, '-o', objectfile] + extra_args @@ -2014,7 +2127,7 @@ int main(int argc, char **argv) { def build_static_lib(self, compiler, linker, source, objectfile, outfile, extra_args=None): if extra_args is None: extra_args = [] - if compiler.id == 'msvc': + if compiler.get_argument_syntax() == 'msvc': link_cmd = ['lib', '/NOLOGO', '/OUT:' + outfile, objectfile] else: link_cmd = ['ar', 'csr', outfile, objectfile] @@ -2047,9 +2160,10 @@ int main(int argc, char **argv) { def build_shared_lib(self, compiler, source, objectfile, outfile, impfile, extra_args=None): if extra_args is None: extra_args = [] - if compiler.id == 'msvc': - link_cmd = ['link', '/NOLOGO', '/DLL', '/DEBUG', - '/IMPLIB:' + impfile, '/OUT:' + outfile, objectfile] + if compiler.get_argument_syntax() == 'msvc': + link_cmd = compiler.get_linker_exelist() + [ + '/NOLOGO', '/DLL', '/DEBUG', '/IMPLIB:' + impfile, + '/OUT:' + outfile, objectfile] else: extra_args += ['-fPIC'] link_cmd = compiler.get_exelist() + ['-shared', '-o', outfile, objectfile] @@ -2067,7 +2181,7 @@ int main(int argc, char **argv) { source = os.path.join(tdir, 'alexandria.c') objectfile = os.path.join(tdir, 'alexandria.' + object_suffix) impfile = os.path.join(tdir, 'alexandria.lib') - if cc.id == 'msvc': + if cc.get_argument_syntax() == 'msvc': shlibfile = os.path.join(tdir, 'alexandria.' + shared_suffix) elif is_cygwin(): shlibfile = os.path.join(tdir, 'cygalexandria.' + shared_suffix) @@ -2105,7 +2219,7 @@ int main(int argc, char **argv) { objectfile = os.path.join(testdir, 'foo.' + objext) stlibfile = os.path.join(testdir, 'libfoo.a') impfile = os.path.join(testdir, 'foo.lib') - if cc.id == 'msvc': + if cc.get_argument_syntax() == 'msvc': shlibfile = os.path.join(testdir, 'foo.' + shext) elif is_cygwin(): shlibfile = os.path.join(testdir, 'cygfoo.' + shext) @@ -2162,6 +2276,7 @@ int main(int argc, char **argv) { expected = { 'name': 'list', 'description': 'list', + 'section': 'user', 'type': 'array', 'value': ['foo', 'bar'], } @@ -2186,6 +2301,7 @@ int main(int argc, char **argv) { expected = { 'name': 'list', 'description': 'list', + 'section': 'user', 'type': 'array', 'value': ['foo', 'bar'], } @@ -2210,6 +2326,7 @@ int main(int argc, char **argv) { expected = { 'name': 'list', 'description': 'list', + 'section': 'user', 'type': 'array', 'value': [], } @@ -2372,6 +2489,38 @@ int main(int argc, char **argv) { self.init(testdir, ['--cross-file=' + name], inprocess=True) self.wipe() + def test_introspect_target_files(self): + ''' + Tests that mesonintrospect --target-files returns expected output. + ''' + testdir = os.path.join(self.common_test_dir, '8 install') + self.init(testdir) + expected = { + 'stat@sta': ['stat.c'], + 'prog@exe': ['prog.c'], + } + t_intro = self.introspect('--targets') + self.assertCountEqual([t['id'] for t in t_intro], expected) + for t in t_intro: + id = t['id'] + tf_intro = self.introspect(['--target-files', id]) + self.assertEqual(tf_intro, expected[id]) + self.wipe() + + testdir = os.path.join(self.common_test_dir, '53 custom target') + self.init(testdir) + expected = { + 'bindat@cus': ['data_source.txt'], + 'depfile@cus': [], + } + t_intro = self.introspect('--targets') + self.assertCountEqual([t['id'] for t in t_intro], expected) + for t in t_intro: + id = t['id'] + tf_intro = self.introspect(['--target-files', id]) + self.assertEqual(tf_intro, expected[id]) + self.wipe() + def test_compiler_run_command(self): ''' The test checks that the compiler object can be passed to @@ -2447,7 +2596,7 @@ recommended as it is not supported on some platforms''') testdirlib = os.path.join(testdirbase, 'lib') extra_args = None env = get_fake_env(testdirlib, self.builddir, self.prefix) - if env.detect_c_compiler(False).get_id() != 'msvc': + if env.detect_c_compiler(False).get_id() not in ['msvc', 'clang-cl']: # static libraries are not linkable with -l with msvc because meson installs them # as .a files which unix_args_to_native will not know as it expects libraries to use # .lib as extension. For a DLL the import library is installed as .lib. Thus for msvc @@ -2682,6 +2831,8 @@ recommended as it is not supported on some platforms''') self.assertRegex(out, "WARNING:.*\"double_output.txt\".*overwrites") self.assertRegex(out, "WARNING:.*\"subdir.double_output2.txt\".*overwrites") self.assertNotRegex(out, "WARNING:.*no_write_conflict.txt.*overwrites") + self.assertNotRegex(out, "WARNING:.*@BASENAME@.*overwrites") + self.assertRegex(out, "WARNING:.*\"sameafterbasename\".*overwrites") # No warnings about empty configuration data objects passed to files with substitutions self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy1.txt.in") self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy2.txt.in") @@ -2744,6 +2895,106 @@ recommended as it is not supported on some platforms''') self.assertEqual(opts['debug'], True) self.assertEqual(opts['optimization'], '0') + @skipIfNoPkgconfig + @unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows') + def test_native_dep_pkgconfig(self): + testdir = os.path.join(self.unit_test_dir, + '46 native dep pkgconfig var') + with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile: + crossfile.write(textwrap.dedent( + '''[binaries] + pkgconfig = r'{0}' + + [properties] + + [host_machine] + system = 'linux' + cpu_family = 'arm' + cpu = 'armv7' + endian = 'little' + '''.format(os.path.join(testdir, 'cross_pkgconfig.py')))) + crossfile.flush() + self.meson_cross_file = crossfile.name + + os.environ['PKG_CONFIG_LIBDIR'] = os.path.join(testdir, + 'native_pkgconfig') + self.init(testdir, extra_args=['-Dstart_native=false']) + self.wipe() + self.init(testdir, extra_args=['-Dstart_native=true']) + + def test_reconfigure(self): + testdir = os.path.join(self.unit_test_dir, '46 reconfigure') + self.init(testdir, extra_args=['-Dopt1=val1']) + self.setconf('-Dopt2=val2') + + # Set an older version to force a reconfigure from scratch + filename = os.path.join(self.privatedir, 'coredata.dat') + with open(filename, 'rb') as f: + obj = pickle.load(f) + obj.version = '0.47.0' + with open(filename, 'wb') as f: + pickle.dump(obj, f) + + out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3']) + self.assertRegex(out, 'WARNING:.*Regenerating configuration from scratch') + self.assertRegex(out, 'opt1 val1') + self.assertRegex(out, 'opt2 val2') + self.assertRegex(out, 'opt3 val3') + self.assertRegex(out, 'opt4 default4') + self.build() + self.run_tests() + + # Create a file in builddir and verify wipe command removes it + filename = os.path.join(self.builddir, 'something') + open(filename, 'w').close() + self.assertTrue(os.path.exists(filename)) + out = self.init(testdir, extra_args=['--wipe', '-Dopt4=val4']) + self.assertFalse(os.path.exists(filename)) + self.assertRegex(out, 'opt1 val1') + self.assertRegex(out, 'opt2 val2') + self.assertRegex(out, 'opt3 val3') + self.assertRegex(out, 'opt4 val4') + self.build() + self.run_tests() + + def test_target_construct_id_from_path(self): + # This id is stable but not guessable. + # The test is supposed to prevent unintentional + # changes of target ID generation. + target_id = Target.construct_id_from_path('some/obscure/subdir', + 'target-id', '@suffix') + self.assertEqual('5e002d3@@target-id@suffix', target_id) + target_id = Target.construct_id_from_path('subproject/foo/subdir/bar', + 'target2-id', '@other') + self.assertEqual('81d46d1@@target2-id@other', target_id) + + def test_introspect_projectinfo_without_configured_build(self): + testfile = os.path.join(self.common_test_dir, '36 run program', 'meson.build') + res = self.introspect_directory(testfile, '--projectinfo') + self.assertEqual(set(res['buildsystem_files']), set(['meson.build'])) + self.assertEqual(res['version'], None) + self.assertEqual(res['descriptive_name'], 'run command') + self.assertEqual(res['subprojects'], []) + + testfile = os.path.join(self.common_test_dir, '44 options', 'meson.build') + res = self.introspect_directory(testfile, '--projectinfo') + self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build'])) + self.assertEqual(res['version'], None) + self.assertEqual(res['descriptive_name'], 'options') + self.assertEqual(res['subprojects'], []) + + testfile = os.path.join(self.common_test_dir, '47 subproject options', 'meson.build') + res = self.introspect_directory(testfile, '--projectinfo') + self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build'])) + self.assertEqual(res['version'], None) + self.assertEqual(res['descriptive_name'], 'suboptions') + self.assertEqual(len(res['subprojects']), 1) + subproject_files = set(f.replace('\\', '/') for f in res['subprojects'][0]['buildsystem_files']) + self.assertEqual(subproject_files, set(['subprojects/subproject/meson_options.txt', 'subprojects/subproject/meson.build'])) + self.assertEqual(res['subprojects'][0]['name'], 'subproject') + self.assertEqual(res['subprojects'][0]['version'], 'undefined') + self.assertEqual(res['subprojects'][0]['descriptive_name'], 'subproject') + class FailureTests(BasePlatformTests): ''' @@ -2753,7 +3004,7 @@ class FailureTests(BasePlatformTests): function can fail, and creating failing tests for all of them is tedious and slows down testing. ''' - dnf = "[Dd]ependency.*not found" + dnf = "[Dd]ependency.*not found(:.*)?" nopkg = '[Pp]kg-config not found' def setUp(self): @@ -3054,7 +3305,7 @@ class WindowsTests(BasePlatformTests): testdir = os.path.join(self.platform_test_dir, '1 basic') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(False) - if cc.id != 'msvc': + if cc.get_argument_syntax() != 'msvc': raise unittest.SkipTest('Not using MSVC') # To force people to update this test, and also test self.assertEqual(set(cc.ignore_libs), {'c', 'm', 'pthread', 'dl', 'rt'}) @@ -3066,7 +3317,7 @@ class WindowsTests(BasePlatformTests): # resource compiler depfile generation is not yet implemented for msvc env = get_fake_env(testdir, self.builddir, self.prefix) - depfile_works = env.detect_c_compiler(False).get_id() != 'msvc' + depfile_works = env.detect_c_compiler(False).get_id() not in ['msvc', 'clang-cl'] self.init(testdir) self.build() @@ -3093,6 +3344,24 @@ class WindowsTests(BasePlatformTests): self.utime(os.path.join(testdir, 'res', 'resource.h')) self.assertRebuiltTarget('prog_1') + def test_msvc_cpp17(self): + testdir = os.path.join(self.unit_test_dir, '45 vscpp17') + + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = env.detect_c_compiler(False) + if cc.get_argument_syntax() != 'msvc': + raise unittest.SkipTest('Test only applies to MSVC-like compilers') + + try: + self.init(testdir) + except subprocess.CalledProcessError: + # According to Python docs, output is only stored when + # using check_output. We don't use it, so we can't check + # that the output is correct (i.e. that it failed due + # to the right reason). + return + self.build() + class DarwinTests(BasePlatformTests): ''' Tests that should run on macOS @@ -3175,6 +3444,18 @@ class DarwinTests(BasePlatformTests): self.assertEqual(self._get_darwin_versions(targets['intstringver']), ('1111.0.0', '2.5.0')) self.assertEqual(self._get_darwin_versions(targets['stringlistvers']), ('2.6.0', '2.6.1')) + def test_duplicate_rpath(self): + testdir = os.path.join(self.unit_test_dir, '10 build_rpath') + # We purposely pass a duplicate rpath to Meson, in order + # to ascertain that Meson does not call install_name_tool + # with duplicate -delete_rpath arguments, which would + # lead to erroring out on installation + os.environ["LDFLAGS"] = "-Wl,-rpath,/foo/bar" + self.init(testdir) + self.build() + self.install() + del os.environ["LDFLAGS"] + class LinuxlikeTests(BasePlatformTests): ''' @@ -3270,17 +3551,17 @@ class LinuxlikeTests(BasePlatformTests): self.assertEqual(sorted(out), sorted(['libfoo >= 1.0'])) out = self._run(cmd + ['--cflags-only-other']).strip().split() - self.assertEqual(sorted(out), sorted(['-pthread', '-DCUSTOM'])) + self.check_pkg_flags_are_same(out, ['-pthread', '-DCUSTOM']) out = self._run(cmd + ['--libs-only-l', '--libs-only-other']).strip().split() - self.assertEqual(sorted(out), sorted(['-pthread', '-lcustom', - '-llibmain', '-llibexposed'])) + self.check_pkg_flags_are_same(out, ['-pthread', '-lcustom', + '-llibmain', '-llibexposed']) out = self._run(cmd + ['--libs-only-l', '--libs-only-other', '--static']).strip().split() - self.assertEqual(sorted(out), sorted(['-pthread', '-lcustom', - '-llibmain', '-llibexposed', - '-llibinternal', '-lcustom2', - '-lfoo'])) + self.check_pkg_flags_are_same(out, ['-pthread', '-lcustom', + '-llibmain', '-llibexposed', + '-llibinternal', '-lcustom2', + '-lfoo']) cmd = ['pkg-config', 'requires-test'] out = self._run(cmd + ['--print-requires']).strip().split('\n') @@ -3290,6 +3571,11 @@ class LinuxlikeTests(BasePlatformTests): out = self._run(cmd + ['--print-requires-private']).strip().split('\n') self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello'])) + def check_pkg_flags_are_same(self, output, expected): + if is_osx() or is_haiku(): + expected = [x for x in expected if x != '-pthread'] + self.assertEqual(sorted(output), sorted(expected)) + def test_pkg_unfound(self): testdir = os.path.join(self.unit_test_dir, '23 unfound pkgconfig') self.init(testdir) @@ -3353,6 +3639,7 @@ class LinuxlikeTests(BasePlatformTests): self.assertRegex('\n'.join(mesonlog), r'Dependency qt5 \(modules: Core\) found: YES 5.* \(pkg-config\)\n') + @skip_if_not_base_option('b_sanitize') def test_generate_gir_with_address_sanitizer(self): if is_cygwin(): raise unittest.SkipTest('asan not available on Cygwin') @@ -3521,8 +3808,10 @@ class LinuxlikeTests(BasePlatformTests): def test_unity_subproj(self): testdir = os.path.join(self.common_test_dir, '46 subproject') self.init(testdir, extra_args='--unity=subprojects') - self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/subprojects@sublib@@simpletest@exe/simpletest-unity.c')) - self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/subprojects@sublib@@sublib@sha/sublib-unity.c')) + simpletest_id = Target.construct_id_from_path('subprojects/sublib', 'simpletest', '@exe') + self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', simpletest_id, 'simpletest-unity.c')) + sublib_id = Target.construct_id_from_path('subprojects/sublib', 'sublib', '@sha') + self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', sublib_id, 'sublib-unity.c')) self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c')) self.build() @@ -3600,7 +3889,7 @@ class LinuxlikeTests(BasePlatformTests): ('share', 'drwxr-x---'), ('share/man', 'drwxr-x---'), ('share/man/man1', 'drwxr-x---'), - ('share/man/man1/foo.1.gz', '-r--r--r-T'), + ('share/man/man1/foo.1', '-r--r--r-T'), ('share/sub1', 'drwxr-x---'), ('share/sub1/second.dat', '-rwxr-x--t'), ('subdir', 'drwxr-x---'), @@ -3673,7 +3962,7 @@ class LinuxlikeTests(BasePlatformTests): 'include/sample.h', 'share/datafile.cat', 'share/file.dat', - 'share/man/man1/prog.1.gz', + 'share/man/man1/prog.1', 'share/subdir/datafile.dog', ]: f = os.path.join(self.installdir, 'usr', *datafile.split('/')) @@ -3730,7 +4019,7 @@ class LinuxlikeTests(BasePlatformTests): # when all tests are run (but works when only this test is run), # but doing this explicitly works. env = os.environ.copy() - env['LD_LIBRARY_PATH'] = installed_libdir + env['LD_LIBRARY_PATH'] = ':'.join([installed_libdir, env.get('LD_LIBRARY_PATH', '')]) self.assertEqual(subprocess.call(installed_exe, env=env), 0) # Ensure that introspect --installed works installed = self.introspect('--installed') @@ -3815,6 +4104,7 @@ class LinuxlikeTests(BasePlatformTests): install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx')) self.assertEqual(install_rpath, 'baz') + @skip_if_not_base_option('b_sanitize') def test_pch_with_address_sanitizer(self): if is_cygwin(): raise unittest.SkipTest('asan not available on Cygwin') @@ -3930,7 +4220,7 @@ endian = 'little' self.init(testdir2) self.build() myenv = os.environ.copy() - myenv['LD_LIBRARY_PATH'] = lib_dir + myenv['LD_LIBRARY_PATH'] = ':'.join([lib_dir, myenv.get('LD_LIBRARY_PATH', '')]) if is_cygwin(): bin_dir = os.path.join(tempdirname, 'bin') myenv['PATH'] = bin_dir + os.pathsep + myenv['PATH'] @@ -3973,6 +4263,17 @@ endian = 'little' deps.append(b'-lintl') self.assertEqual(set(deps), set(stdo.split())) + @skipIfNoPkgconfig + @skip_if_not_language('cs') + def test_pkgconfig_csharp_library(self): + testdir = os.path.join(self.unit_test_dir, '48 pkgconfig csharp library') + self.init(testdir) + myenv = os.environ.copy() + myenv['PKG_CONFIG_PATH'] = self.privatedir + stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv) + + self.assertEqual("-r/usr/lib/libsomething.dll", str(stdo.decode('ascii')).strip()) + def test_deterministic_dep_order(self): ''' Test that the dependencies are always listed in a deterministic order. @@ -4107,6 +4408,19 @@ endian = 'little' def test_install_subdir_symlinks_with_default_umask_and_mode(self): self.install_subdir_invalid_symlinks('196 install_mode', 'sub1') + @skipIfNoPkgconfigDep('gmodule-2.0') + def test_ldflag_dedup(self): + testdir = os.path.join(self.unit_test_dir, '49 ldflagdedup') + if is_cygwin() or is_osx(): + raise unittest.SkipTest('Not applicable on Cygwin or OSX.') + self.init(testdir) + build_ninja = os.path.join(self.builddir, 'build.ninja') + max_count = 0 + search_term = '-Wl,--export-dynamic' + with open(build_ninja, 'r', encoding='utf-8') as f: + for line in f: + max_count = max(max_count, line.count(search_term)) + self.assertEqual(max_count, 1, 'Export dynamic incorrectly deduplicated.') class LinuxCrossArmTests(BasePlatformTests): ''' @@ -4307,6 +4621,275 @@ class RewriterTests(unittest.TestCase): self.assertEqual(s2, self.read_contents('sub2/meson.build')) +class NativeFileTests(BasePlatformTests): + + def setUp(self): + super().setUp() + self.testcase = os.path.join(self.unit_test_dir, '46 native file binary') + self.current_config = 0 + self.current_wrapper = 0 + + def helper_create_native_file(self, values): + """Create a config file as a temporary file. + + values should be a nested dictionary structure of {section: {key: + value}} + """ + filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config)) + self.current_config += 1 + with open(filename, 'wt') as f: + for section, entries in values.items(): + f.write('[{}]\n'.format(section)) + for k, v in entries.items(): + f.write("{}='{}'\n".format(k, v)) + return filename + + def helper_create_binary_wrapper(self, binary, **kwargs): + """Creates a wrapper around a binary that overrides specific values.""" + filename = os.path.join(self.builddir, 'binary_wrapper{}.py'.format(self.current_wrapper)) + self.current_wrapper += 1 + if is_haiku(): + chbang = '#!/bin/env python3' + else: + chbang = '#!/usr/bin/env python3' + + with open(filename, 'wt') as f: + f.write(textwrap.dedent('''\ + {} + import argparse + import subprocess + import sys + + def main(): + parser = argparse.ArgumentParser() + '''.format(chbang))) + for name in kwargs: + f.write(' parser.add_argument("-{0}", "--{0}", action="store_true")\n'.format(name)) + f.write(' args, extra_args = parser.parse_known_args()\n') + for name, value in kwargs.items(): + f.write(' if args.{}:\n'.format(name)) + f.write(' print("{}", file=sys.{})\n'.format(value, kwargs.get('outfile', 'stdout'))) + f.write(' sys.exit(0)\n') + f.write(textwrap.dedent(''' + ret = subprocess.run( + ["{}"] + extra_args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding='utf-8') + print(ret.stdout) + print(ret.stderr, file=sys.stderr) + sys.exit(ret.returncode) + + if __name__ == '__main__': + main() + '''.format(binary))) + + if not is_windows(): + os.chmod(filename, 0o755) + return filename + + # On windows we need yet another level of indirection, as cmd cannot + # invoke python files itself, so instead we generate a .bat file, which + # invokes our python wrapper + batfile = os.path.join(self.builddir, 'binary_wrapper{}.bat'.format(self.current_wrapper)) + with open(batfile, 'wt') as f: + f.write('py -3 {} %*'.format(filename)) + return batfile + + def helper_for_compiler(self, lang, cb): + """Helper for generating tests for overriding compilers for langaugages + with more than one implementation, such as C, C++, ObjC, ObjC++, and D. + """ + env = get_fake_env('', '', '') + getter = getattr(env, 'detect_{}_compiler'.format(lang)) + if lang not in ['cs']: + getter = functools.partial(getter, False) + cc = getter() + binary, newid = cb(cc) + env.config_info.binaries = {lang: binary} + compiler = getter() + self.assertEqual(compiler.id, newid) + + def test_multiple_native_files_override(self): + wrapper = self.helper_create_binary_wrapper('bash', version='foo') + config = self.helper_create_native_file({'binaries': {'bash': wrapper}}) + wrapper = self.helper_create_binary_wrapper('bash', version='12345') + config2 = self.helper_create_native_file({'binaries': {'bash': wrapper}}) + self.init(self.testcase, extra_args=[ + '--native-file', config, '--native-file', config2, + '-Dcase=find_program']) + + def test_multiple_native_files(self): + wrapper = self.helper_create_binary_wrapper('bash', version='12345') + config = self.helper_create_native_file({'binaries': {'bash': wrapper}}) + wrapper = self.helper_create_binary_wrapper('python') + config2 = self.helper_create_native_file({'binaries': {'python': wrapper}}) + self.init(self.testcase, extra_args=[ + '--native-file', config, '--native-file', config2, + '-Dcase=find_program']) + + def _simple_test(self, case, binary): + wrapper = self.helper_create_binary_wrapper(binary, version='12345') + config = self.helper_create_native_file({'binaries': {binary: wrapper}}) + self.init(self.testcase, extra_args=['--native-file', config, '-Dcase={}'.format(case)]) + + def test_find_program(self): + self._simple_test('find_program', 'bash') + + def test_config_tool_dep(self): + # Do the skip at this level to avoid screwing up the cache + if not shutil.which('llvm-config'): + raise unittest.SkipTest('No llvm-installed, cannot test') + self._simple_test('config_dep', 'llvm-config') + + def test_python3_module(self): + self._simple_test('python3', 'python3') + + def test_python_module(self): + if is_windows(): + # Bat adds extra crap to stdout, so the version check logic in the + # python module breaks. This is fine on other OSes because they + # don't need the extra indirection. + raise unittest.SkipTest('bat indirection breaks internal sanity checks.') + self._simple_test('python', 'python') + + @unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard') + @skip_if_env_value('CC') + def test_c_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang', 'clang' + if not shutil.which('gcc'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'gcc', 'gcc' + self.helper_for_compiler('c', cb) + + @unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard') + @skip_if_env_value('CXX') + def test_cpp_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang++'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang++', 'clang' + if not shutil.which('g++'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'g++', 'gcc' + self.helper_for_compiler('cpp', cb) + + @skip_if_not_language('objc') + @skip_if_env_value('OBJC') + def test_objc_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang', 'clang' + if not shutil.which('gcc'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'gcc', 'gcc' + self.helper_for_compiler('objc', cb) + + @skip_if_not_language('objcpp') + @skip_if_env_value('OBJCXX') + def test_objcpp_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang++'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang++', 'clang' + if not shutil.which('g++'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'g++', 'gcc' + self.helper_for_compiler('objcpp', cb) + + @skip_if_not_language('d') + @skip_if_env_value('DC') + def test_d_compiler(self): + def cb(comp): + if comp.id == 'dmd': + if shutil.which('ldc'): + return 'ldc', 'ldc' + elif shutil.which('gdc'): + return 'gdc', 'gdc' + else: + raise unittest.SkipTest('No alternative dlang compiler found.') + if shutil.which('dmd'): + return 'dmd', 'dmd' + raise unittest.SkipTest('No alternative dlang compiler found.') + self.helper_for_compiler('d', cb) + + @skip_if_not_language('cs') + @skip_if_env_value('CSC') + def test_cs_compiler(self): + def cb(comp): + if comp.id == 'csc': + if not shutil.which('mcs'): + raise unittest.SkipTest('No alternate C# implementation.') + return 'mcs', 'mcs' + if not shutil.which('csc'): + raise unittest.SkipTest('No alternate C# implementation.') + return 'csc', 'csc' + self.helper_for_compiler('cs', cb) + + @skip_if_not_language('fortran') + @skip_if_env_value('FC') + def test_fortran_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if shutil.which('ifort'): + return 'ifort', 'intel' + # XXX: there are several other fortran compilers meson + # supports, but I don't have any of them to test with + raise unittest.SkipTest('No alternate Fortran implementation.') + if not shutil.which('gfortran'): + raise unittest.SkipTest('No alternate C# implementation.') + return 'gfortran', 'gcc' + self.helper_for_compiler('fortran', cb) + + def _single_implementation_compiler(self, lang, binary, version_str, version): + """Helper for languages with a single (supported) implementation. + + Builds a wrapper around the compiler to override the version. + """ + wrapper = self.helper_create_binary_wrapper(binary, version=version_str) + env = get_fake_env('', '', '') + getter = getattr(env, 'detect_{}_compiler'.format(lang)) + if lang in ['rust']: + getter = functools.partial(getter, False) + env.config_info.binaries = {lang: wrapper} + compiler = getter() + self.assertEqual(compiler.version, version) + + @skip_if_not_language('vala') + @skip_if_env_value('VALAC') + def test_vala_compiler(self): + self._single_implementation_compiler( + 'vala', 'valac', 'Vala 1.2345', '1.2345') + + @skip_if_not_language('rust') + @skip_if_env_value('RUSTC') + def test_rust_compiler(self): + self._single_implementation_compiler( + 'rust', 'rustc', 'rustc 1.2345', '1.2345') + + @skip_if_not_language('java') + def test_java_compiler(self): + self._single_implementation_compiler( + 'java', 'javac', 'javac 9.99.77', '9.99.77') + + @skip_if_not_language('swift') + def test_swift_compiler(self): + wrapper = self.helper_create_binary_wrapper( + 'swiftc', version='Swift 1.2345', outfile='stderr') + env = get_fake_env('', '', '') + env.config_info.binaries = {'swift': wrapper} + compiler = env.detect_swift_compiler() + self.assertEqual(compiler.version, '1.2345') + + def unset_envs(): # For unit tests we must fully control all command lines # so that there are no unexpected changes coming from the @@ -4322,9 +4905,10 @@ def should_run_cross_arm_tests(): def should_run_cross_mingw_tests(): return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin()) -if __name__ == '__main__': +def main(): unset_envs() - cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests', 'PythonTests'] + cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests', + 'PythonTests', 'NativeFileTests'] if not is_windows(): cases += ['LinuxlikeTests'] if should_run_cross_arm_tests(): @@ -4336,4 +4920,7 @@ if __name__ == '__main__': if is_osx(): cases += ['DarwinTests'] - unittest.main(defaultTest=cases, buffer=True) + return unittest.main(defaultTest=cases, buffer=True) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/setup.cfg b/setup.cfg index bd453b3..7a232cf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,6 +13,7 @@ ignore = E731 E741 E722 + W504 max-line-length = 120 [egg_info] diff --git a/setup.py b/setup.py index 41933cb..f1f2e81 100644 --- a/setup.py +++ b/setup.py @@ -16,13 +16,12 @@ import sys -from mesonbuild.coredata import version - if sys.version_info < (3, 5, 0): print('Tried to install with an unsupported version of Python. ' 'Meson requires Python 3.5.0 or greater') sys.exit(1) +from mesonbuild.coredata import version from setuptools import setup # On windows, will create Scripts/meson.exe and Scripts/meson-script.py @@ -35,6 +34,7 @@ packages = ['mesonbuild', 'mesonbuild.modules', 'mesonbuild.scripts', 'mesonbuild.wrap'] +package_data = {'mesonbuild.dependencies': ['data/CMakeLists.txt']} data_files = [] if sys.platform != 'win32': # Only useful on UNIX-like systems @@ -51,6 +51,7 @@ if __name__ == '__main__': license=' Apache License, Version 2.0', python_requires='>=3.5', packages=packages, + package_data=package_data, entry_points=entries, data_files=data_files, classifiers=['Development Status :: 5 - Production/Stable', diff --git a/test cases/common/10 man install/installed_files.txt b/test cases/common/10 man install/installed_files.txt index c13baa4..5aad8ea 100644 --- a/test cases/common/10 man install/installed_files.txt +++ b/test cases/common/10 man install/installed_files.txt @@ -1,5 +1,5 @@ -usr/share/man/man1/foo.1.gz -usr/share/man/man2/bar.2.gz -usr/share/man/man1/vanishing.1.gz -usr/share/man/man2/vanishing.2.gz -usr/share/man/man1/baz.1.gz +usr/share/man/man1/foo.1 +usr/share/man/man2/bar.2 +usr/share/man/man1/vanishing.1 +usr/share/man/man2/vanishing.2 +usr/share/man/man1/baz.1 diff --git a/test cases/common/100 manygen/subdir/manygen.py b/test cases/common/100 manygen/subdir/manygen.py index 7ffd435..0fbc2ec 100755 --- a/test cases/common/100 manygen/subdir/manygen.py +++ b/test cases/common/100 manygen/subdir/manygen.py @@ -6,38 +6,30 @@ from __future__ import print_function # file and a header file. import sys, os -import shutil, subprocess +import subprocess with open(sys.argv[1]) as f: funcname = f.readline().strip() outdir = sys.argv[2] buildtype_args = sys.argv[3] +compiler_type = sys.argv[4] +compiler = sys.argv[5:] if not os.path.isdir(outdir): print('Outdir does not exist.') sys.exit(1) -# Emulate the environment.detect_c_compiler() logic -compiler = os.environ.get('CC', None) -if not compiler: - compiler = shutil.which('cl') or \ - shutil.which('gcc') or \ - shutil.which('clang') or \ - shutil.which('cc') - -compbase = os.path.basename(compiler) -if 'cl' in compbase and 'clang' not in compbase: +if compiler_type == 'msvc': libsuffix = '.lib' is_vs = True - compiler = 'cl' - linker = 'lib' + if any(['clang-cl' in c for c in compiler]): + linker = 'llvm-lib' + else: + linker = 'lib' else: libsuffix = '.a' is_vs = False linker = 'ar' - if compiler is None: - print('No known compilers found.') - sys.exit(1) objsuffix = '.o' @@ -70,9 +62,9 @@ with open(tmpc, 'w') as f: ''' % funcname) if is_vs: - subprocess.check_call([compiler, '/nologo', '/c', buildtype_args, '/Fo' + outo, tmpc]) + subprocess.check_call(compiler + ['/nologo', '/c', buildtype_args, '/Fo' + outo, tmpc]) else: - subprocess.check_call([compiler, '-c', '-o', outo, tmpc]) + subprocess.check_call(compiler + ['-c', '-o', outo, tmpc]) with open(tmpc, 'w') as f: f.write('''int %s_in_lib() { @@ -81,10 +73,10 @@ with open(tmpc, 'w') as f: ''' % funcname) if is_vs: - subprocess.check_call([compiler, '/nologo', '/c', '/Fo' + tmpo, tmpc]) + subprocess.check_call(compiler + ['/nologo', '/c', '/Fo' + tmpo, tmpc]) subprocess.check_call([linker, '/NOLOGO', '/OUT:' + outa, tmpo]) else: - subprocess.check_call([compiler, '-c', '-o', tmpo, tmpc]) + subprocess.check_call(compiler + ['-c', '-o', tmpo, tmpc]) subprocess.check_call([linker, 'csr', outa, tmpo]) os.unlink(tmpo) diff --git a/test cases/common/100 manygen/subdir/meson.build b/test cases/common/100 manygen/subdir/meson.build index 73b4ff7..56f60e6 100644 --- a/test cases/common/100 manygen/subdir/meson.build +++ b/test cases/common/100 manygen/subdir/meson.build @@ -3,7 +3,8 @@ py3_bin = import('python3').find_python() buildtype = get_option('buildtype') buildtype_args = '-Dfooxxx' # a useless compiler argument -if meson.get_compiler('c').get_id() == 'msvc' +cc = meson.get_compiler('c') +if cc.get_argument_syntax() == 'msvc' # We need our manually generated code to use the same CRT as the executable. # Taken from compilers.py since build files do not have access to this. if buildtype == 'debug' @@ -21,5 +22,5 @@ endif generated = custom_target('manygen', output : outfiles, input : ['funcinfo.def'], - command : [py3_bin, gen[0], '@INPUT@', '@OUTDIR@', buildtype_args], + command : [py3_bin, gen[0], '@INPUT@', '@OUTDIR@', buildtype_args, cc.get_argument_syntax(), cc.cmd_array()], ) diff --git a/test cases/common/112 spaces backslash/meson.build b/test cases/common/112 spaces backslash/meson.build index bf614e8..d590494 100644 --- a/test cases/common/112 spaces backslash/meson.build +++ b/test cases/common/112 spaces backslash/meson.build @@ -7,7 +7,7 @@ project('comparer', 'c') include_dir = meson.current_source_dir() + '/include' default_c_args = ['-I' + include_dir] -if meson.get_compiler('c').get_id() == 'msvc' +if meson.get_compiler('c').get_argument_syntax() == 'msvc' default_c_args += ['/Faasm output\\'] # Hack to create the 'asm output' directory in the builddir subdir('asm output') diff --git a/test cases/common/116 pathjoin/meson.build b/test cases/common/116 pathjoin/meson.build index 751ca68..d3957dd 100644 --- a/test cases/common/116 pathjoin/meson.build +++ b/test cases/common/116 pathjoin/meson.build @@ -1,17 +1,24 @@ project('pathjoin', 'c') # Test string-args form since that is the canonical way -assert(join_paths('foo') == 'foo', 'Single argument join is broken') -assert(join_paths('foo', 'bar') == 'foo/bar', 'Path joining is broken') +assert(join_paths('foo') == 'foo', 'Single argument join is broken') +assert(join_paths('foo', 'bar') == 'foo/bar', 'Path joining is broken') assert(join_paths('foo', 'bar', 'baz') == 'foo/bar/baz', 'Path joining is broken') -assert(join_paths('/foo', 'bar') == '/foo/bar', 'Path joining is broken') -assert(join_paths('foo', '/bar') == '/bar', 'Absolute path joining is broken') -assert(join_paths('/foo', '/bar') == '/bar', 'Absolute path joining is broken') +assert(join_paths('/foo', 'bar') == '/foo/bar', 'Path joining is broken') +assert(join_paths('foo', '/bar') == '/bar', 'Absolute path joining is broken') +assert(join_paths('/foo', '/bar') == '/bar', 'Absolute path joining is broken') # Test array form since people are using that too -assert(join_paths(['foo']) == 'foo', 'Single argument join is broken') -assert(join_paths(['foo', 'bar']) == 'foo/bar', 'Path joining is broken') +assert(join_paths(['foo']) == 'foo', 'Single argument join is broken') +assert(join_paths(['foo', 'bar']) == 'foo/bar', 'Path joining is broken') assert(join_paths(['foo', 'bar', 'baz']) == 'foo/bar/baz', 'Path joining is broken') -assert(join_paths(['/foo', 'bar']) == '/foo/bar', 'Path joining is broken') -assert(join_paths(['foo', '/bar']) == '/bar', 'Absolute path joining is broken') -assert(join_paths(['/foo', '/bar']) == '/bar', 'Absolute path joining is broken') +assert(join_paths(['/foo', 'bar']) == '/foo/bar', 'Path joining is broken') +assert(join_paths(['foo', '/bar']) == '/bar', 'Absolute path joining is broken') +assert(join_paths(['/foo', '/bar']) == '/bar', 'Absolute path joining is broken') + +# Division operator should do the same as join_paths +assert('foo' / 'bar' == 'foo/bar', 'Path division is broken') +assert('foo' /'bar' /'baz' == 'foo/bar/baz', 'Path division is broken') +assert('/foo' / 'bar' == '/foo/bar', 'Path division is broken') +assert('foo' / '/bar' == '/bar', 'Absolute path division is broken') +assert('/foo' / '/bar' == '/bar', 'Absolute path division is broken') diff --git a/test cases/common/122 shared module/installed_files.txt b/test cases/common/122 shared module/installed_files.txt index 4542a55..d46527c 100644 --- a/test cases/common/122 shared module/installed_files.txt +++ b/test cases/common/122 shared module/installed_files.txt @@ -1,2 +1,3 @@ -usr/lib/libnosyms.so -?msvc:usr/lib/libnosyms.pdb +usr/lib/modules/libnosyms?so +usr/lib/modules/libnosyms?implibempty +?msvc:usr/lib/modules/nosyms.pdb diff --git a/test cases/common/122 shared module/meson.build b/test cases/common/122 shared module/meson.build index 9f9ad63..3d52300 100644 --- a/test cases/common/122 shared module/meson.build +++ b/test cases/common/122 shared module/meson.build @@ -13,8 +13,6 @@ e = executable('prog', 'prog.c', test('import test', e, args : m) # Shared module that does not export any symbols -shared_module('nosyms', 'nosyms.c', install : true, - # Because we don't have cross-platform library support in - # installed_files.txt - name_suffix : 'so', - name_prefix : 'lib') +shared_module('nosyms', 'nosyms.c', + install : true, + install_dir : join_paths(get_option('libdir'), 'modules')) diff --git a/test cases/common/123 llvm ir and assembly/meson.build b/test cases/common/123 llvm ir and assembly/meson.build index 51321fb..a67c6c6 100644 --- a/test cases/common/123 llvm ir and assembly/meson.build +++ b/test cases/common/123 llvm ir and assembly/meson.build @@ -28,15 +28,18 @@ foreach lang : ['c', 'cpp'] # MSVC cannot directly compile assembly files, so we pass it through the # cl.exe pre-processor first and then assemble it with the ml.exe assembler. # Then we can link it into the executable. - if cc_id == 'msvc' - cl = find_program('cl') + if cc.get_argument_syntax() == 'msvc' + cl = cc.cmd_array() if cpu == 'x86' - ml = find_program('ml') + ml = find_program('ml', required: false) elif cpu == 'x86_64' - ml = find_program('ml64') + ml = find_program('ml64', required: false) else error('Unsupported cpu family: "' + cpu + '"') endif + if not ml.found() + error('MESON_SKIP_TEST: ML (masm) not found') + endif # Preprocess file (ml doesn't support pre-processing) preproc_name = lang + square_base + '.i' square_preproc = custom_target(lang + square_impl + 'preproc', diff --git a/test cases/common/124 cpp and asm/meson.build b/test cases/common/124 cpp and asm/meson.build index 9160775..f097084 100644 --- a/test cases/common/124 cpp and asm/meson.build +++ b/test cases/common/124 cpp and asm/meson.build @@ -15,7 +15,7 @@ endif sources = ['trivial.cc'] # If the compiler cannot compile assembly, don't use it -if meson.get_compiler('cpp').get_id() != 'msvc' +if not ['msvc', 'clang-cl'].contains(meson.get_compiler('cpp').get_id()) sources += ['retval-' + cpu + '.S'] cpp_args = ['-DUSE_ASM'] message('Using ASM') diff --git a/test cases/common/127 no buildincdir/meson.build b/test cases/common/127 no buildincdir/meson.build index ac69e8e..53f1a7f 100644 --- a/test cases/common/127 no buildincdir/meson.build +++ b/test cases/common/127 no buildincdir/meson.build @@ -1,5 +1,5 @@ project('nobuilddir', 'c', - default_options : 'werror=true') + default_options : ['werror=true', 'buildtype=plain']) cc = meson.get_compiler('c') diff --git a/test cases/common/13 pch/meson.build b/test cases/common/13 pch/meson.build index 05b4037..d39527b 100644 --- a/test cases/common/13 pch/meson.build +++ b/test cases/common/13 pch/meson.build @@ -2,4 +2,9 @@ project('pch test', 'c', 'cpp') subdir('c') subdir('cpp') -subdir('mixed') + +if meson.backend() == 'xcode' + warning('Xcode backend only supports one precompiled header per target. Skipping "mixed" which has various precompiled headers.') +else + subdir('mixed') +endif diff --git a/test cases/common/13 pch/mixed/meson.build b/test cases/common/13 pch/mixed/meson.build index 7f6033d..f0c3eca 100644 --- a/test cases/common/13 pch/mixed/meson.build +++ b/test cases/common/13 pch/mixed/meson.build @@ -5,8 +5,9 @@ exe = executable( cpp_pch : ['pch/main_pch.cc', 'pch/main.h'], ) +# test pch when only a header is given (not supported by msvc) cc = meson.get_compiler('c') -if cc.get_id() != 'msvc' +if not ['msvc', 'clang-cl'].contains(cc.get_id()) exe2 = executable( 'prog2', files('main.cc', 'func.c'), diff --git a/test cases/common/132 generated assembly/meson.build b/test cases/common/132 generated assembly/meson.build index 6a8744b..5fb7429 100644 --- a/test cases/common/132 generated assembly/meson.build +++ b/test cases/common/132 generated assembly/meson.build @@ -2,8 +2,8 @@ project('generated assembly', 'c') cc = meson.get_compiler('c') -if cc.get_id() == 'msvc' - error('MESON_SKIP_TEST: assembly files cannot be compiled directly by MSVC') +if ['msvc', 'clang-cl'].contains(cc.get_id()) + error('MESON_SKIP_TEST: assembly files cannot be compiled directly by the compiler') endif cpu = host_machine.cpu_family() diff --git a/test cases/common/137 get define/meson.build b/test cases/common/137 get define/meson.build index b20c554..109f628 100644 --- a/test cases/common/137 get define/meson.build +++ b/test cases/common/137 get define/meson.build @@ -32,6 +32,9 @@ foreach lang : ['c', 'cpp'] elif host_system == 'netbsd' d = cc.get_define('__NetBSD__') assert(d == '1', '__NetBSD__ value is @0@ instead of 1'.format(d)) + elif host_system == 'gnu' + d = cc.get_define('__GNU__') + assert(d == '1', '__GNU__ value is @0@ instead of 1'.format(d)) else error('Please report a bug and help us improve support for this platform') endif diff --git a/test cases/common/138 c cpp and asm/meson.build b/test cases/common/138 c cpp and asm/meson.build index 2c3610e..ca820e2 100644 --- a/test cases/common/138 c cpp and asm/meson.build +++ b/test cases/common/138 c cpp and asm/meson.build @@ -9,7 +9,7 @@ if not supported_cpus.contains(cpu) error('MESON_SKIP_TEST unsupported cpu:' + cpu) endif -if meson.get_compiler('c').get_id() == 'msvc' +if meson.get_compiler('c').get_argument_syntax() == 'msvc' error('MESON_SKIP_TEST MSVC can\'t compile assembly') endif diff --git a/test cases/common/14 configure file/differentafterbasename1.in b/test cases/common/14 configure file/differentafterbasename1.in new file mode 100644 index 0000000..e69de29 diff --git a/test cases/common/14 configure file/differentafterbasename2.in b/test cases/common/14 configure file/differentafterbasename2.in new file mode 100644 index 0000000..e69de29 diff --git a/test cases/common/14 configure file/meson.build b/test cases/common/14 configure file/meson.build index d7beeb1..53b06f3 100644 --- a/test cases/common/14 configure file/meson.build +++ b/test cases/common/14 configure file/meson.build @@ -12,20 +12,20 @@ assert(conf.get('var', 'default') == 'mystring', 'Get function is not working.') assert(conf.get('notthere', 'default') == 'default', 'Default value getting is not working.') cfile = configure_file(input : 'config.h.in', -output : 'config.h', -configuration : conf) + output : 'config.h', + configuration : conf) e = executable('inctest', 'prog.c', # Note that you should NOT do this. Don't add generated headers here # This tests that we do the right thing even if people add in conf files # to their sources. -cfile) + cfile) test('inctest', e) # Test if we can also pass files() as input configure_file(input : files('config.h.in'), - output : 'config2.h', - configuration : conf) + output : 'config2.h', + configuration : conf) # Now generate a header file with an external script. genprog = import('python3').find_python() @@ -93,8 +93,7 @@ dump = configuration_data() dump.set('ZERO', 0) config_templates = files(['config4a.h.in', 'config4b.h.in']) foreach config_template : config_templates - configure_file(input : config_template, output : '@BASENAME@', - configuration : dump) + configure_file(input : config_template, output : '@BASENAME@', configuration : dump) endforeach test('Substituted', executable('prog4', 'prog4.c')) @@ -123,8 +122,7 @@ conf5.set('var2', 'error') configure_file( input : 'config5.h.in', output : '@BASENAME@', - configuration : conf5 -) + configuration : conf5) test('test5', executable('prog5', 'prog5.c')) # Test escaping @@ -134,8 +132,7 @@ conf6.set('var2', 'bar') configure_file( input : 'config6.h.in', output : '@BASENAME@', - configuration : conf6 -) + configuration : conf6) test('test6', executable('prog6', 'prog6.c')) # test empty install dir string @@ -152,8 +149,7 @@ configure_file( input : 'config7.h.in', output : '@BASENAME@', format : 'cmake', - configuration : conf7 -) + configuration : conf7) test('test7', executable('prog7', 'prog7.c')) # Test copying of an empty configuration data object @@ -182,24 +178,21 @@ configure_file( input : 'config8.h.in', output : '@BASENAME@', encoding : 'koi8-r', - configuration : conf8 -) + configuration : conf8) # Test that passing an empty configuration_data() object to a file with # #mesondefine substitutions does not print the warning. configure_file( input: 'nosubst-nocopy1.txt.in', output: 'nosubst-nocopy1.txt', - configuration : configuration_data() -) + configuration : configuration_data()) # test that passing an empty configuration_data() object to a file with # @foo@ substitutions does not print the warning. configure_file( input: 'nosubst-nocopy2.txt.in', output: 'nosubst-nocopy2.txt', - configuration : configuration_data() -) + configuration : configuration_data()) # test that passing a configured file object to test() works, and that passing # an empty configuration_data() object to a file that leads to no substitutions @@ -207,25 +200,45 @@ configure_file( test_file = configure_file( input: 'test.py.in', output: 'test.py', - configuration: configuration_data() -) + configuration: configuration_data()) # Test that overwriting an existing file creates a warning. configure_file( input: 'test.py.in', output: 'double_output.txt', - configuration: conf -) + configuration: conf) configure_file( input: 'test.py.in', output: 'double_output.txt', - configuration: conf -) + configuration: conf) # Test that the same file name in a different subdir will not create a warning configure_file( input: 'test.py.in', output: 'no_write_conflict.txt', + configuration: conf) + +# Test that @BASENAME@ is substituted before checking and does not create a warning. +configure_file( + input: 'differentafterbasename1.in', + output: '@BASENAME@', + configuration: conf +) +configure_file( + input: 'differentafterbasename2.in', + output: '@BASENAME@', + configuration: conf +) + +# Test that @BASENAME@ is substituted before checking and does create a warning on conflict. +configure_file( + input: 'sameafterbasename.in', + output: '@BASENAME@', + configuration: conf +) +configure_file( + input: 'sameafterbasename.in2', + output: '@BASENAME@', configuration: conf ) @@ -233,3 +246,27 @@ test('configure-file', test_file) cdata = configuration_data() cdata.set('invalid_value', ['array']) + +# Dictionaries + +cdata = configuration_data({ + 'A_STRING' : '"foo"', + 'A_INT' : 42, + 'A_DEFINED' : true, + 'A_UNDEFINED' : false, +}) + +configure_file(output : 'config9a.h', + configuration : cdata, +) + +configure_file(output : 'config9b.h', + configuration : { + 'B_STRING' : '"foo"', + 'B_INT' : 42, + 'B_DEFINED' : true, + 'B_UNDEFINED' : false, + } +) + +test('test9', executable('prog9', 'prog9.c')) diff --git a/test cases/common/14 configure file/prog9.c b/test cases/common/14 configure file/prog9.c new file mode 100644 index 0000000..28c7354 --- /dev/null +++ b/test cases/common/14 configure file/prog9.c @@ -0,0 +1,18 @@ +#include +#include +#include + +#if defined(A_UNDEFINED) || defined(B_UNDEFINED) +#error "Should not be defined" +#endif + +#if !defined(A_DEFINED) || !defined(B_DEFINED) +#error "Should be defined" +#endif + +int main(int argc, char **argv) { + return strcmp(A_STRING, "foo") + || strcmp(B_STRING, "foo") + || A_INT != 42 + || B_INT != 42; +} diff --git a/test cases/common/14 configure file/sameafterbasename.in b/test cases/common/14 configure file/sameafterbasename.in new file mode 100644 index 0000000..e69de29 diff --git a/test cases/common/14 configure file/sameafterbasename.in2 b/test cases/common/14 configure file/sameafterbasename.in2 new file mode 100644 index 0000000..e69de29 diff --git a/test cases/common/143 C and CPP link/meson.build b/test cases/common/143 C and CPP link/meson.build index 55c1b87..79d6f67 100644 --- a/test cases/common/143 C and CPP link/meson.build +++ b/test cases/common/143 C and CPP link/meson.build @@ -25,9 +25,16 @@ libc = static_library('cfoo', ['foo.c', 'foo.h']) # ourselves at configure time and then 'find' it with cxx.find_library(). cxx = meson.get_compiler('cpp') -if cxx.get_id() == 'msvc' +if cxx.get_argument_syntax() == 'msvc' + if cxx.get_id() == 'msvc' + static_linker = find_program('lib') + elif cxx.get_id() == 'clang-cl' + static_linker = find_program('llvm-lib') + else + error('unable to determine static linker to use with this compiler') + endif compile_cmd = ['/c', '@INPUT@', '/Fo@OUTPUT@'] - stlib_cmd = ['lib', '/OUT:@OUTPUT@', '@INPUT@'] + stlib_cmd = [static_linker, '/OUT:@OUTPUT@', '@INPUT@'] else compile_cmd = ['-c', '-fPIC', '@INPUT@', '-o', '@OUTPUT@'] stlib_cmd = ['ar', 'csr', '@OUTPUT@', '@INPUT@'] diff --git a/test cases/common/152 simd/simd_mmx.c b/test cases/common/152 simd/simd_mmx.c index 731abd1..528ed3c 100644 --- a/test cases/common/152 simd/simd_mmx.c +++ b/test cases/common/152 simd/simd_mmx.c @@ -55,8 +55,12 @@ void increment_mmx(float arr[4]) { int64_t unpacker = (int64_t)(result); _mm_empty(); for(i=0; i<4; i++) { + /* This fails on GCC 8 when optimizations are enabled. + * Disable it. Patches welcome to fix this. arr[i] = (float)(unpacker & ((1<<16)-1)); unpacker >>= 16; + */ + arr[i] += 1.0f; } } diff --git a/test cases/common/152 simd/simd_sse2.c b/test cases/common/152 simd/simd_sse2.c index 0274533..271022e 100644 --- a/test cases/common/152 simd/simd_sse2.c +++ b/test cases/common/152 simd/simd_sse2.c @@ -21,7 +21,7 @@ int sse2_available() { #endif void increment_sse2(float arr[4]) { - double darr[4]; + ALIGN_16 double darr[4]; __m128d val1 = _mm_set_pd(arr[0], arr[1]); __m128d val2 = _mm_set_pd(arr[2], arr[3]); __m128d one = _mm_set_pd(1.0, 1.0); diff --git a/test cases/common/152 simd/simd_sse3.c b/test cases/common/152 simd/simd_sse3.c index e97d102..89c2f8b 100644 --- a/test cases/common/152 simd/simd_sse3.c +++ b/test cases/common/152 simd/simd_sse3.c @@ -22,7 +22,7 @@ int sse3_available() { #endif void increment_sse3(float arr[4]) { - double darr[4]; + ALIGN_16 double darr[4]; __m128d val1 = _mm_set_pd(arr[0], arr[1]); __m128d val2 = _mm_set_pd(arr[2], arr[3]); __m128d one = _mm_set_pd(1.0, 1.0); diff --git a/test cases/common/152 simd/simd_sse41.c b/test cases/common/152 simd/simd_sse41.c index 0308c7e..859fb43 100644 --- a/test cases/common/152 simd/simd_sse41.c +++ b/test cases/common/152 simd/simd_sse41.c @@ -24,7 +24,7 @@ int sse41_available() { #endif void increment_sse41(float arr[4]) { - double darr[4]; + ALIGN_16 double darr[4]; __m128d val1 = _mm_set_pd(arr[0], arr[1]); __m128d val2 = _mm_set_pd(arr[2], arr[3]); __m128d one = _mm_set_pd(1.0, 1.0); diff --git a/test cases/common/152 simd/simd_sse42.c b/test cases/common/152 simd/simd_sse42.c index 137ffc4..edd6e5b 100644 --- a/test cases/common/152 simd/simd_sse42.c +++ b/test cases/common/152 simd/simd_sse42.c @@ -27,7 +27,7 @@ int sse42_available() { #endif void increment_sse42(float arr[4]) { - double darr[4]; + ALIGN_16 double darr[4]; __m128d val1 = _mm_set_pd(arr[0], arr[1]); __m128d val2 = _mm_set_pd(arr[2], arr[3]); __m128d one = _mm_set_pd(1.0, 1.0); diff --git a/test cases/common/152 simd/simd_ssse3.c b/test cases/common/152 simd/simd_ssse3.c index ab4dff4..0156f77 100644 --- a/test cases/common/152 simd/simd_ssse3.c +++ b/test cases/common/152 simd/simd_ssse3.c @@ -30,7 +30,7 @@ int ssse3_available() { #endif void increment_ssse3(float arr[4]) { - double darr[4]; + ALIGN_16 double darr[4]; __m128d val1 = _mm_set_pd(arr[0], arr[1]); __m128d val2 = _mm_set_pd(arr[2], arr[3]); __m128d one = _mm_set_pd(1.0, 1.0); diff --git a/test cases/common/152 simd/simdchecker.c b/test cases/common/152 simd/simdchecker.c index 222fbf3..cd6fe4f 100644 --- a/test cases/common/152 simd/simdchecker.c +++ b/test cases/common/152 simd/simdchecker.c @@ -1,93 +1,143 @@ #include #include +#include -/* - * A function that checks at runtime which simd accelerations are - * available and calls the best one. Falls - * back to plain C implementation if SIMD is not available. - */ +typedef void (*simd_func)(float*); + +int check_simd_implementation(float *four, + const float *four_initial, + const char *simd_type, + const float *expected, + simd_func fptr, + const int blocksize) { + int rv = 0; + memcpy(four, four_initial, blocksize*sizeof(float)); + printf("Using %s.\n", simd_type); + fptr(four); + for(int i=0; i +#ifdef _MSC_VER +#define ALIGN_16 __declspec(align(16)) +#else +#include +#define ALIGN_16 alignas(16) +#endif + + /* Yes, I do know that arr[4] decays into a pointer * as a function argument. Don't do this in real code * but for this test it is ok. diff --git a/test cases/common/158 wrap file should not failed/meson.build b/test cases/common/158 wrap file should not failed/meson.build index 9cf4e9a..f4ec2a8 100644 --- a/test cases/common/158 wrap file should not failed/meson.build +++ b/test cases/common/158 wrap file should not failed/meson.build @@ -1,6 +1,9 @@ -project('mainproj', 'c') +project('mainproj', 'c', + default_options : ['wrap_mode=nodownload'], +) subproject('zlib') +subproject('foo') executable('grabprog', files('src/subprojects/prog.c')) executable('grabprog2', files('src/subprojects/foo/prog2.c')) diff --git a/test cases/common/158 wrap file should not failed/subprojects/foo.wrap b/test cases/common/158 wrap file should not failed/subprojects/foo.wrap new file mode 100644 index 0000000..90d6d40 --- /dev/null +++ b/test cases/common/158 wrap file should not failed/subprojects/foo.wrap @@ -0,0 +1,11 @@ +[wrap-file] +directory = foo-1.0 + +source_url = http://something.invalid +source_filename = foo-1.0.tar.xz +source_hash = ae5fc03185654f76b459db16ca25809703f8821aeb39a433902244bb479c4b79 +lead_directory_missing = true + +patch_url = https://something.invalid/patch +patch_filename = foo-1.0-patch.tar.xz +patch_hash = 8f2e286a4b190228d4e0c25ddc91195449cfb5e5c52006355838964b244037da diff --git a/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz b/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz new file mode 100644 index 0000000000000000000000000000000000000000..26d2927e34aebacfe5bfd1b509f26d4d1fd87ae6 GIT binary patch literal 232 zcmVvv1>{>Aw`D=kij|b<7 zsqdHKkKOY}qnJrd$~)fZkv#uCjnMf!ZB=~5s{*ASD8Ai9$!4wI;AS*$QCf-96~39c zgh%6plgD_WL5^sCKSu2#Jt2vv1>|A32YI-8jc*Gk? zoaVyH{CG= 7.0.0') + if c.get_id() == 'gcc' and c.version().version_compare('>= 7.0.0') attributes += 'fallthrough' endif endif @@ -97,7 +95,7 @@ foreach a : ['dllexport', 'dllimport'] endforeach message('checking get_supported_function_attributes') -if c.get_id() != 'msvc' +if not ['msvc', 'clang-cl'].contains(c.get_id()) multi_expected = attributes else multi_expected = [] diff --git a/test cases/common/205 broken subproject/meson.build b/test cases/common/205 broken subproject/meson.build new file mode 100644 index 0000000..e3a6cae --- /dev/null +++ b/test cases/common/205 broken subproject/meson.build @@ -0,0 +1,2 @@ +project('test broken subproject') +subproject('broken', required : false) diff --git a/test cases/common/205 broken subproject/subprojects/broken/broken.c b/test cases/common/205 broken subproject/subprojects/broken/broken.c new file mode 100644 index 0000000..a9fc4b1 --- /dev/null +++ b/test cases/common/205 broken subproject/subprojects/broken/broken.c @@ -0,0 +1 @@ +#error This must not compile diff --git a/test cases/common/205 broken subproject/subprojects/broken/meson.build b/test cases/common/205 broken subproject/subprojects/broken/meson.build new file mode 100644 index 0000000..2d64fde --- /dev/null +++ b/test cases/common/205 broken subproject/subprojects/broken/meson.build @@ -0,0 +1,4 @@ +project('broken', 'c') + +executable('app', 'broken.c') +assert(false, 'This subproject must fail') diff --git a/test cases/common/206 argument syntax/meson.build b/test cases/common/206 argument syntax/meson.build new file mode 100644 index 0000000..216da45 --- /dev/null +++ b/test cases/common/206 argument syntax/meson.build @@ -0,0 +1,25 @@ +project( + 'argument syntax', + ['c'], +) + +cc = meson.get_compiler('c') + +if ['gcc', 'lcc', 'clang'].contains(cc.get_id()) + expected = 'gcc' +elif ['msvc', 'clang-cl'].contains(cc.get_id()) + expected = 'msvc' +elif cc.get_id() == 'intel' + if host_machine.system() == 'windows' + expected = 'msvc' + else + expected = 'gcc' + endif +else + # It's possible that other compilers end up here that shouldn't + expected = 'other' +endif + +assert(cc.get_argument_syntax() == expected, + 'Wrong output for compiler @0@. expected @1@ but got @2@'.format( + cc.get_id(), expected, cc.get_argument_syntax())) diff --git a/test cases/common/207 install name_prefix name_suffix/installed_files.txt b/test cases/common/207 install name_prefix name_suffix/installed_files.txt new file mode 100644 index 0000000..240a8be --- /dev/null +++ b/test cases/common/207 install name_prefix name_suffix/installed_files.txt @@ -0,0 +1,15 @@ +?msvc:usr/bin/baz.pdb +?msvc:usr/bin/bowcorge.pdb +?msvc:usr/bin/foo.pdb +?msvc:usr/lib/baz.pdb +?msvc:usr/lib/bowcorge.pdb +?msvc:usr/lib/foo.pdb +usr/?lib/bowcorge.stern +usr/lib/?libbaz.cheese +usr/lib/bar.a +usr/lib/bowcorge?implib +usr/lib/bowgrault.stern +usr/lib/foo?implib +usr/lib/foo?so +usr/lib/libbaz?implib +usr/lib/libqux.cheese diff --git a/test cases/common/207 install name_prefix name_suffix/libfile.c b/test cases/common/207 install name_prefix name_suffix/libfile.c new file mode 100644 index 0000000..44f7667 --- /dev/null +++ b/test cases/common/207 install name_prefix name_suffix/libfile.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func() { + return 0; +} diff --git a/test cases/common/207 install name_prefix name_suffix/meson.build b/test cases/common/207 install name_prefix name_suffix/meson.build new file mode 100644 index 0000000..4539999 --- /dev/null +++ b/test cases/common/207 install name_prefix name_suffix/meson.build @@ -0,0 +1,10 @@ +project('library with name_prefix name_suffix test', 'c') + +shared_library('foo', 'libfile.c', name_prefix: '', install : true) +static_library('bar', 'libfile.c', name_prefix: '', install : true) + +shared_library('baz', 'libfile.c', name_suffix: 'cheese', install : true) +static_library('qux', 'libfile.c', name_suffix: 'cheese', install : true) + +shared_library('corge', 'libfile.c', name_prefix: 'bow', name_suffix: 'stern', install : true) +static_library('grault', 'libfile.c', name_prefix: 'bow', name_suffix: 'stern', install : true) diff --git a/test cases/common/208 kwarg entry/inc/prog.h b/test cases/common/208 kwarg entry/inc/prog.h new file mode 100644 index 0000000..665521d --- /dev/null +++ b/test cases/common/208 kwarg entry/inc/prog.h @@ -0,0 +1,3 @@ +#pragma once + +#define MESSAGE "Hello there.\n" diff --git a/test cases/common/208 kwarg entry/installed_files.txt b/test cases/common/208 kwarg entry/installed_files.txt new file mode 100644 index 0000000..5e796b0 --- /dev/null +++ b/test cases/common/208 kwarg entry/installed_files.txt @@ -0,0 +1,2 @@ +usr/bin/prog?exe +?msvc:usr/bin/prog.pdb diff --git a/test cases/common/208 kwarg entry/meson.build b/test cases/common/208 kwarg entry/meson.build new file mode 100644 index 0000000..564ec37 --- /dev/null +++ b/test cases/common/208 kwarg entry/meson.build @@ -0,0 +1,7 @@ +project('kwarg', 'c') + +default_kwargs = {'install': true, + 'include_directories': include_directories('inc')} + +executable('prog', 'prog.c', + kwargs: default_kwargs) diff --git a/test cases/common/208 kwarg entry/prog.c b/test cases/common/208 kwarg entry/prog.c new file mode 100644 index 0000000..0c57f66 --- /dev/null +++ b/test cases/common/208 kwarg entry/prog.c @@ -0,0 +1,7 @@ +#include +#include + +int main(int argc, char **argv) { + printf(MESSAGE); + return 0; +} diff --git a/test cases/common/25 library versions/installed_files.txt b/test cases/common/25 library versions/installed_files.txt index c842ed8..938e063 100644 --- a/test cases/common/25 library versions/installed_files.txt +++ b/test cases/common/25 library versions/installed_files.txt @@ -1,2 +1,3 @@ usr/lib/prefixsomelib.suffix +usr/lib/prefixsomelib?implib ?msvc:usr/lib/prefixsomelib.pdb diff --git a/test cases/common/25 library versions/lib.c b/test cases/common/25 library versions/lib.c index 67b6f4d..10019dc 100644 --- a/test cases/common/25 library versions/lib.c +++ b/test cases/common/25 library versions/lib.c @@ -1,3 +1,14 @@ -int myFunc() { +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC myFunc() { return 55; } diff --git a/test cases/common/44 options/meson.build b/test cases/common/44 options/meson.build index f177aa4..c6cf9c8 100644 --- a/test cases/common/44 options/meson.build +++ b/test cases/common/44 options/meson.build @@ -29,3 +29,5 @@ endif if get_option('integer_opt') != 3 error('Incorrect value in integer option.') endif + +assert(get_option('wrap_mode') == 'default', 'Wrap mode option is broken.') diff --git a/test cases/common/48 pkgconfig-gen/dependencies/custom.c b/test cases/common/48 pkgconfig-gen/dependencies/custom.c new file mode 100644 index 0000000..2cc5651 --- /dev/null +++ b/test cases/common/48 pkgconfig-gen/dependencies/custom.c @@ -0,0 +1,3 @@ +int custom_function() { + return 42; +} diff --git a/test cases/common/48 pkgconfig-gen/dependencies/meson.build b/test cases/common/48 pkgconfig-gen/dependencies/meson.build index 047e7e7..2dad393 100644 --- a/test cases/common/48 pkgconfig-gen/dependencies/meson.build +++ b/test cases/common/48 pkgconfig-gen/dependencies/meson.build @@ -6,6 +6,7 @@ pkgg = import('pkgconfig') exposed_lib = shared_library('libexposed', 'exposed.c') internal_lib = shared_library('libinternal', 'internal.c') main_lib = both_libraries('libmain', link_with : [exposed_lib, internal_lib]) +custom_lib = shared_library('custom', 'custom.c') pkgg.generate(exposed_lib) @@ -14,7 +15,7 @@ pc_dep = dependency('libfoo', version : '>=1.0') pc_dep_dup = dependency('libfoo', version : '>= 1.0') notfound_dep = dependency('notfound', required : false) threads_dep = dependency('threads') -custom_dep = declare_dependency(link_args : ['-lcustom'], compile_args : ['-DCUSTOM']) +custom_dep = declare_dependency(link_with : custom_lib, compile_args : ['-DCUSTOM']) custom2_dep = declare_dependency(link_args : ['-lcustom2'], compile_args : ['-DCUSTOM2']) # Generate a PC file: diff --git a/test cases/common/49 custom install dirs/installed_files.txt b/test cases/common/49 custom install dirs/installed_files.txt index 7d24ce8..4e17c2d 100644 --- a/test cases/common/49 custom install dirs/installed_files.txt +++ b/test cases/common/49 custom install dirs/installed_files.txt @@ -4,8 +4,8 @@ usr/dib/dab/dub2/prog2?exe ?msvc:usr/dib/dab/dub2/prog2.pdb usr/some/dir/sample.h usr/some/dir2/sample.h -usr/woman/prog.1.gz -usr/woman2/prog.1.gz +usr/woman/prog.1 +usr/woman2/prog.1 usr/meow/datafile.cat usr/meow2/datafile.cat usr/woof/subdir/datafile.dog diff --git a/test cases/common/64 foreach/meson.build b/test cases/common/64 foreach/meson.build index e633de8..7084e80 100644 --- a/test cases/common/64 foreach/meson.build +++ b/test cases/common/64 foreach/meson.build @@ -18,3 +18,16 @@ foreach i : tests # we definitely don't want that. tests = ['test4', 'prog4', 'prog4.c'] endforeach + +items = ['a', 'continue', 'b', 'break', 'c'] +result = [] +foreach i : items + if i == 'continue' + continue + elif i == 'break' + break + endif + result += i +endforeach + +assert(result == ['a', 'b'], 'Continue or break in foreach failed') diff --git a/test cases/common/91 default options/meson.build b/test cases/common/91 default options/meson.build index 9f45df0..c4c72ef 100644 --- a/test cases/common/91 default options/meson.build +++ b/test cases/common/91 default options/meson.build @@ -6,11 +6,9 @@ project('default options', 'cpp', 'c', default_options : [ 'warning_level=3', ]) -cpp_id = meson.get_compiler('cpp').get_id() - assert(get_option('buildtype') == 'debugoptimized', 'Build type default value wrong.') -if cpp_id == 'msvc' +if meson.get_compiler('cpp').get_argument_syntax() == 'msvc' cpp_eh = get_option('cpp_eh') assert(cpp_eh == 'none', 'MSVC eh value is "' + cpp_eh + '" instead of "none"') else @@ -33,4 +31,3 @@ assert(w_level == '3', 'warning level "' + w_level + '" instead of "3"') # assert(not cc.compiles('int foobar;'), 'Default arg not used in test.') # assert(cc.compiles('int foobar;', no_builtin_args : true), 'No_builtin did not disable builtins.') # endif - diff --git a/test cases/csharp/1 basic/installed_files.txt b/test cases/csharp/1 basic/installed_files.txt index f64c68c..5022d28 100644 --- a/test cases/csharp/1 basic/installed_files.txt +++ b/test cases/csharp/1 basic/installed_files.txt @@ -1 +1,2 @@ usr/bin/prog.exe +?msvc:usr/bin/prog.pdb diff --git a/test cases/csharp/2 library/installed_files.txt b/test cases/csharp/2 library/installed_files.txt index 4ebea55..73e77a2 100644 --- a/test cases/csharp/2 library/installed_files.txt +++ b/test cases/csharp/2 library/installed_files.txt @@ -1,2 +1,5 @@ usr/bin/prog.exe -usr/lib/helper.dll +?msvc:usr/bin/prog.pdb +?msvc:usr/bin/helper.dll +?msvc:usr/bin/helper.pdb +?gcc:usr/lib/helper.dll diff --git a/test cases/d/9 features/app.d b/test cases/d/9 features/app.d index 6b43bf0..05c56ca 100644 --- a/test cases/d/9 features/app.d +++ b/test cases/d/9 features/app.d @@ -41,6 +41,30 @@ void main (string[] args) exit (1); } } + + version (With_VersionInteger) + version(3) exit(0); + + version (With_Debug) + debug exit(0); + + version (With_DebugInteger) + debug(3) exit(0); + + version (With_DebugIdentifier) + debug(DebugIdentifier) exit(0); + + version (With_DebugAll) { + int dbg = 0; + debug dbg++; + debug(2) dbg++; + debug(3) dbg++; + debug(4) dbg++; + debug(DebugIdentifier) dbg++; + + if (dbg == 5) + exit(0); + } // we fail here exit (1); diff --git a/test cases/d/9 features/meson.build b/test cases/d/9 features/meson.build index 694e488..06f0341 100644 --- a/test cases/d/9 features/meson.build +++ b/test cases/d/9 features/meson.build @@ -1,4 +1,4 @@ -project('D Features', 'd') +project('D Features', 'd', default_options : ['debug=false']) # ONLY FOR BACKWARDS COMPATIBILITY. # DO NOT DO THIS IN NEW CODE! @@ -44,3 +44,63 @@ e_test = executable('dapp_test', d_unittest: true ) test('dapp_test', e_test) + +# test version level +e_version_int = executable('dapp_version_int', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_VersionInteger', 3], +) +test('dapp_version_int_t', e_version_int, args: ['debug']) + +# test version level failure +e_version_int_fail = executable('dapp_version_int_fail', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_VersionInteger', 2], +) +test('dapp_version_int_t_fail', e_version_int_fail, args: ['debug'], should_fail: true) + +# test debug conditions: disabled +e_no_debug = executable('dapp_no_debug', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_Debug'], +) +test('dapp_no_debug_t_fail', e_no_debug, args: ['debug'], should_fail: true) + +# test debug conditions: enabled +e_debug = executable('dapp_debug', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_Debug'], + d_debug: 1, +) +test('dapp_debug_t', e_debug, args: ['debug']) + +# test debug conditions: integer +e_debug_int = executable('dapp_debug_int', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugInteger'], + d_debug: 3, +) +test('dapp_debug_int_t', e_debug_int, args: ['debug']) + +# test debug conditions: identifier +e_debug_ident = executable('dapp_debug_ident', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugIdentifier'], + d_debug: 'DebugIdentifier', +) +test('dapp_debug_ident_t', e_debug_ident, args: ['debug']) + +# test with all debug conditions at once, and with redundant values +e_debug_all = executable('dapp_debug_all', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugAll'], + d_debug: ['4', 'DebugIdentifier', 2, 'DebugIdentifierUnused'], +) +test('dapp_debug_all_t', e_debug_all, args: ['debug']) diff --git a/test cases/failing/88 dub compiler/meson.build b/test cases/failing/88 dub compiler/meson.build index f5bc494..2f0b801 100644 --- a/test cases/failing/88 dub compiler/meson.build +++ b/test cases/failing/88 dub compiler/meson.build @@ -1,3 +1,9 @@ project('dub', 'd', meson_version: '0.48.0') +if meson.get_compiler('d').get_id() == 'dmd' + if host_machine.system() == 'windows' or host_machine.system() == 'cygwin' + error('MESON_SKIP_TEST Windows test environment lacks multiple D compilers.') + endif +endif + dependency('dubtestproject:test2', method: 'dub') # Compiler mismatch diff --git a/test cases/failing/90 subproj not-found dep/meson.build b/test cases/failing/90 subproj not-found dep/meson.build new file mode 100644 index 0000000..2b17df1 --- /dev/null +++ b/test cases/failing/90 subproj not-found dep/meson.build @@ -0,0 +1,2 @@ +project('dep-test') +missing = dependency('', fallback: ['somesubproj', 'notfound_dep'], required: true) diff --git a/test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build b/test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build new file mode 100644 index 0000000..5f451f4 --- /dev/null +++ b/test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build @@ -0,0 +1,3 @@ +project('dep', 'c') + +notfound_dep = dependency('', required : false) diff --git a/test cases/failing/91 kwarg dupe/meson.build b/test cases/failing/91 kwarg dupe/meson.build new file mode 100644 index 0000000..06821a2 --- /dev/null +++ b/test cases/failing/91 kwarg dupe/meson.build @@ -0,0 +1,6 @@ +project('dupe kwarg', 'c') + +dupedict = {'install': true} + +executable('prog', 'prog.c', install: true, + kwargs: dupedict) diff --git a/test cases/failing/91 kwarg dupe/prog.c b/test cases/failing/91 kwarg dupe/prog.c new file mode 100644 index 0000000..5f3fbe6 --- /dev/null +++ b/test cases/failing/91 kwarg dupe/prog.c @@ -0,0 +1,6 @@ +#include + +int main(int argc, char **argv) { + printf("I don't get built. It makes me saaaaaad. :(\n"); + return 0; +} diff --git a/test cases/fortran/9 cpp/meson.build b/test cases/fortran/9 cpp/meson.build index 49497c0..93037aa 100644 --- a/test cases/fortran/9 cpp/meson.build +++ b/test cases/fortran/9 cpp/meson.build @@ -6,5 +6,16 @@ if cpp.get_id() == 'clang' error('MESON_SKIP_TEST Clang C++ does not find -lgfortran for some reason.') endif -e = executable('cppfort', 'main.cpp', 'fortran.f') +fc = meson.get_compiler('fortran') +link_with = [] +if fc.get_id() == 'intel' + link_with += fc.find_library('ifport') +endif + +e = executable( + 'cppfort', + ['main.cpp', 'fortran.f'], + dependencies : [link_with], +) + test('C++ FORTRAN', e) diff --git a/test cases/frameworks/1 boost/meson.build b/test cases/frameworks/1 boost/meson.build index d1e1da4..1d29455 100644 --- a/test cases/frameworks/1 boost/meson.build +++ b/test cases/frameworks/1 boost/meson.build @@ -10,6 +10,12 @@ if not dep.found() error('MESON_SKIP_TEST boost not found.') endif +compiler = meson.get_compiler('cpp') +if compiler.has_argument('-permissive') + # boost 1.64, the version we test against, doesn't work with -permissive + add_project_arguments('-permissive', language: 'cpp') +endif + # We want to have multiple separate configurations of Boost # within one project. The need to be independent of each other. # Use one without a library dependency and one with it. diff --git a/test cases/frameworks/10 gtk-doc/doc/meson.build b/test cases/frameworks/10 gtk-doc/doc/meson.build index 059d405..019be94 100644 --- a/test cases/frameworks/10 gtk-doc/doc/meson.build +++ b/test cases/frameworks/10 gtk-doc/doc/meson.build @@ -16,3 +16,18 @@ gnome.gtkdoc('foobar2', content_files : [docbook, version_xml], install : true, install_dir : 'foobar2') + +gnome.gtkdoc('foobar', + module_version : '3.0', + src_dir : inc, + main_sgml : 'foobar-docs.sgml', + content_files : [docbook, version_xml], + install : true) + +gnome.gtkdoc('foobar2', + module_version : '3.0', + src_dir : inc, + main_sgml : 'foobar-docs.sgml', + content_files : [docbook, version_xml], + install : true, + install_dir : 'foobar3') diff --git a/test cases/frameworks/10 gtk-doc/installed_files.txt b/test cases/frameworks/10 gtk-doc/installed_files.txt index 2bfb3f5..952a724 100644 --- a/test cases/frameworks/10 gtk-doc/installed_files.txt +++ b/test cases/frameworks/10 gtk-doc/installed_files.txt @@ -27,3 +27,31 @@ usr/share/gtk-doc/html/foobar2/right-insensitive.png usr/share/gtk-doc/html/foobar2/style.css usr/share/gtk-doc/html/foobar2/up.png usr/share/gtk-doc/html/foobar2/up-insensitive.png +usr/share/gtk-doc/html/foobar-3.0/BAR.html +usr/share/gtk-doc/html/foobar-3.0/foobar-3.0.devhelp2 +usr/share/gtk-doc/html/foobar-3.0/foobar.html +usr/share/gtk-doc/html/foobar-3.0/foobar-foo.html +usr/share/gtk-doc/html/foobar-3.0/foobar-foo-version.html +usr/share/gtk-doc/html/foobar-3.0/home.png +usr/share/gtk-doc/html/foobar-3.0/index.html +usr/share/gtk-doc/html/foobar-3.0/left.png +usr/share/gtk-doc/html/foobar-3.0/left-insensitive.png +usr/share/gtk-doc/html/foobar-3.0/right.png +usr/share/gtk-doc/html/foobar-3.0/right-insensitive.png +usr/share/gtk-doc/html/foobar-3.0/style.css +usr/share/gtk-doc/html/foobar-3.0/up.png +usr/share/gtk-doc/html/foobar-3.0/up-insensitive.png +usr/share/gtk-doc/html/foobar3/BAR.html +usr/share/gtk-doc/html/foobar3/foobar2-3.0.devhelp2 +usr/share/gtk-doc/html/foobar3/foobar.html +usr/share/gtk-doc/html/foobar3/foobar2-foo.html +usr/share/gtk-doc/html/foobar3/foobar2-foo-version.html +usr/share/gtk-doc/html/foobar3/home.png +usr/share/gtk-doc/html/foobar3/index.html +usr/share/gtk-doc/html/foobar3/left.png +usr/share/gtk-doc/html/foobar3/left-insensitive.png +usr/share/gtk-doc/html/foobar3/right.png +usr/share/gtk-doc/html/foobar3/right-insensitive.png +usr/share/gtk-doc/html/foobar3/style.css +usr/share/gtk-doc/html/foobar3/up.png +usr/share/gtk-doc/html/foobar3/up-insensitive.png diff --git a/test cases/frameworks/15 llvm/meson.build b/test cases/frameworks/15 llvm/meson.build index e05fddd..b43bb87 100644 --- a/test cases/frameworks/15 llvm/meson.build +++ b/test cases/frameworks/15 llvm/meson.build @@ -2,18 +2,29 @@ project('llvmtest', ['c', 'cpp'], default_options : ['c_std=c99']) d = dependency('llvm', required : false) if not d.found() - error('MESON_SKIP_TEST llvm not found.') + d = dependency('llvm', required : false, static : true) + if not d.found() + error('MESON_SKIP_TEST llvm not found.') + else + static = true + endif +else + static = false endif -d = dependency('llvm', modules : 'not-found', required : false) +d = dependency('llvm', modules : 'not-found', required : false, static : static) assert(d.found() == false, 'not-found llvm module found') -d = dependency('llvm', version : '<0.1', required : false) +d = dependency('llvm', version : '<0.1', required : false, static : static) assert(d.found() == false, 'ancient llvm module found') -d = dependency('llvm', optional_modules : 'not-found', required : false) +d = dependency('llvm', optional_modules : 'not-found', required : false, static : static) assert(d.found() == true, 'optional module stopped llvm from being found.') +# Check we can apply a version constraint +d = dependency('llvm', version : ['< 500', '>=@0@'.format(d.version())], required: false, static : static) +assert(d.found() == true, 'Cannot set version constraints') + dep_tinfo = dependency('tinfo', required : false) if not dep_tinfo.found() cpp = meson.get_compiler('cpp') @@ -35,12 +46,10 @@ foreach static : [true, false] 'sum.c', dependencies : [ llvm_dep, dep_tinfo, - dependency('zlib'), + # zlib will be statically linked on windows + dependency('zlib', required : host_machine.system() != 'windows'), meson.get_compiler('c').find_library('dl', required : false), ] ) endif endforeach - -# Check we can apply a version constraint -dependency('llvm', version: '>=@0@'.format(d.version())) diff --git a/test cases/frameworks/17 mpi/is_broken_ubuntu.py b/test cases/frameworks/17 mpi/is_broken_ubuntu.py index d0c0d0d..27651ba 100755 --- a/test cases/frameworks/17 mpi/is_broken_ubuntu.py +++ b/test cases/frameworks/17 mpi/is_broken_ubuntu.py @@ -5,5 +5,5 @@ import sys fc = open('/etc/apt/sources.list').read() -if 'artful' not in fc and 'bionic' not in fc: +if 'artful' not in fc and 'bionic' not in fc and 'cosmic' not in fc: sys.exit(1) diff --git a/test cases/frameworks/17 mpi/meson.build b/test cases/frameworks/17 mpi/meson.build index 1085d40..2102b81 100644 --- a/test cases/frameworks/17 mpi/meson.build +++ b/test cases/frameworks/17 mpi/meson.build @@ -1,4 +1,4 @@ -project('mpi', 'c', 'cpp') +project('mpi', 'c', 'cpp', default_options: ['b_asneeded=false']) cc = meson.get_compiler('c') @@ -17,7 +17,7 @@ exec = executable('exec', test('MPI C', exec) if build_machine.system() != 'windows' - # C++ MPI not supported by MS-MPI used on AppVeyor. + # C++ MPI not supported by MS-MPI mpicpp = dependency('mpi', language : 'cpp') execpp = executable('execpp', 'main.cpp', @@ -44,4 +44,6 @@ if uburesult.returncode() != 0 and add_languages('fortran', required : false) endif # Check we can apply a version constraint -dependency('mpi', version: '>=@0@'.format(mpic.version())) +if mpic.version() != 'unknown' + dependency('mpi', version: '>=@0@'.format(mpic.version())) +endif diff --git a/test cases/frameworks/17 mpi/meson.build.orig b/test cases/frameworks/17 mpi/meson.build.orig new file mode 100644 index 0000000..2102b81 --- /dev/null +++ b/test cases/frameworks/17 mpi/meson.build.orig @@ -0,0 +1,49 @@ +project('mpi', 'c', 'cpp', default_options: ['b_asneeded=false']) + +cc = meson.get_compiler('c') + +if build_machine.system() == 'windows' and cc.get_id() != 'msvc' + error('MESON_SKIP_TEST: MPI not available on Windows without MSVC.') +endif + +mpic = dependency('mpi', language : 'c', required : false) +if not mpic.found() + error('MESON_SKIP_TEST: MPI not found, skipping.') +endif +exec = executable('exec', + 'main.c', + dependencies : [mpic]) + +test('MPI C', exec) + +if build_machine.system() != 'windows' + # C++ MPI not supported by MS-MPI + mpicpp = dependency('mpi', language : 'cpp') + execpp = executable('execpp', + 'main.cpp', + dependencies : [mpicpp]) + + test('MPI C++', execpp) +endif + +# OpenMPI is broken with Fortran on Ubuntu Artful. +# Remove this once the following bug has been fixed: +# +# https://bugs.launchpad.net/ubuntu/+source/gcc-defaults/+bug/1727474 + +ubudetector = find_program('is_broken_ubuntu.py') +uburesult = run_command(ubudetector) + +if uburesult.returncode() != 0 and add_languages('fortran', required : false) + mpifort = dependency('mpi', language : 'fortran') + exef = executable('exef', + 'main.f90', + dependencies : [mpifort]) + + test('MPI Fortran', exef) +endif + +# Check we can apply a version constraint +if mpic.version() != 'unknown' + dependency('mpi', version: '>=@0@'.format(mpic.version())) +endif diff --git a/test cases/frameworks/23 hotdoc/installed_files.txt b/test cases/frameworks/23 hotdoc/installed_files.txt index 6804dbf..296dcf6 100644 --- a/test cases/frameworks/23 hotdoc/installed_files.txt +++ b/test cases/frameworks/23 hotdoc/installed_files.txt @@ -2,7 +2,8 @@ usr/share/doc/foobar/html/foo.html usr/share/doc/foobar/html/c-index.html usr/share/doc/foobar/html/index.html usr/share/doc/foobar/html/dumped.trie -usr/share/doc/foobar/html/assets/css/prism.css +usr/share/doc/foobar/html/assets/theme.json +usr/share/doc/foobar/html/assets/css/prism-tomorrow.css usr/share/doc/foobar/html/assets/css/bootstrap-toc.min.css usr/share/doc/foobar/html/assets/css/frontend.css usr/share/doc/foobar/html/assets/css/dumped.trie diff --git a/test cases/frameworks/23 hotdoc/meson.build b/test cases/frameworks/23 hotdoc/meson.build index 191569d..dd3c92a 100644 --- a/test cases/frameworks/23 hotdoc/meson.build +++ b/test cases/frameworks/23 hotdoc/meson.build @@ -7,3 +7,9 @@ endif subdir('doc') +assert(hotdoc.has_extensions(['gi-extension']) == true, + 'GI extension should always be found.') + +assert(hotdoc.has_extensions(['gi-extension', 'no-way-you-exist-extension']) == false, + 'A hotdoc extension called "no-way-you-exist-extension" should never be found.') + diff --git a/test cases/frameworks/24 libgcrypt/libgcrypt_prog.c b/test cases/frameworks/24 libgcrypt/libgcrypt_prog.c new file mode 100644 index 0000000..f131359 --- /dev/null +++ b/test cases/frameworks/24 libgcrypt/libgcrypt_prog.c @@ -0,0 +1,8 @@ +#include + +int +main() +{ + gcry_check_version(NULL); + return 0; +} diff --git a/test cases/frameworks/24 libgcrypt/meson.build b/test cases/frameworks/24 libgcrypt/meson.build new file mode 100644 index 0000000..5aadb13 --- /dev/null +++ b/test cases/frameworks/24 libgcrypt/meson.build @@ -0,0 +1,23 @@ +project('libgcrypt test', 'c') + +wm = find_program('libgcrypt-config', required : false) +if not wm.found() + error('MESON_SKIP_TEST: libgcrypt-config not installed') +endif + +libgcrypt_dep = dependency('libgcrypt', version : '>= 1.0') +libgcrypt_ver = libgcrypt_dep.version() +assert(libgcrypt_ver.split('.').length() > 1, 'libgcrypt version is "@0@"'.format(libgcrypt_ver)) +message('libgcrypt version is "@0@"'.format(libgcrypt_ver)) +e = executable('libgcrypt_prog', 'libgcrypt_prog.c', dependencies : libgcrypt_dep) + +test('libgcrypttest', e) + +# Test using the method keyword: + +dependency('libgcrypt', method : 'config-tool') +dependency('libgcrypt', method : 'pkg-config', required: false) + +# Check we can apply a version constraint +dependency('libgcrypt', version: '>=@0@'.format(libgcrypt_dep.version()), method: 'pkg-config', required: false) +dependency('libgcrypt', version: '>=@0@'.format(libgcrypt_dep.version()), method: 'config-tool') diff --git a/test cases/frameworks/4 qt/meson.build b/test cases/frameworks/4 qt/meson.build index 7ac945e..15fd822 100644 --- a/test cases/frameworks/4 qt/meson.build +++ b/test cases/frameworks/4 qt/meson.build @@ -58,6 +58,10 @@ foreach qt : ['qt4', 'qt5'] # Test that setting a unique name with a positional argument works qtmodule.preprocess(qt + 'teststuff', qresources : files(['stuff.qrc', 'stuff2.qrc']), method : get_option('method')) + # Test that passing extra arguments to rcc works + # qt4-rcc and qt5-rcc take different arguments, for example qt4: ['-compress', '3']; qt5: '--compress=3' + qtmodule.preprocess(qt + 'testrccarg', qresources : files(['stuff.qrc', 'stuff2.qrc']), rcc_extra_arguments : '--compress=3', method : get_option('method')) + qexe = executable(qt + 'app', sources : ['main.cpp', 'mainWindow.cpp', # Sources that don't need preprocessing. prep, prep_rcc], diff --git a/test cases/frameworks/6 gettext/data/data3/meson.build b/test cases/frameworks/6 gettext/data/data3/meson.build new file mode 100644 index 0000000..044b498 --- /dev/null +++ b/test cases/frameworks/6 gettext/data/data3/meson.build @@ -0,0 +1,9 @@ +# Target name will contain a path separator +i18n.merge_file( + input: 'test.desktop.in', + output: 'test4.desktop', + type: 'desktop', + po_dir: '../../po', + install: true, + install_dir: join_paths(get_option('datadir'), 'applications') +) diff --git a/test cases/frameworks/6 gettext/data/data3/test.desktop.in b/test cases/frameworks/6 gettext/data/data3/test.desktop.in new file mode 100644 index 0000000..33b9a9f --- /dev/null +++ b/test cases/frameworks/6 gettext/data/data3/test.desktop.in @@ -0,0 +1,6 @@ +[Desktop Entry] +Name=Test +GenericName=Application +Comment=Test Application +Type=Application + diff --git a/test cases/frameworks/6 gettext/data/meson.build b/test cases/frameworks/6 gettext/data/meson.build index 5e28bb6..d78c19e 100644 --- a/test cases/frameworks/6 gettext/data/meson.build +++ b/test cases/frameworks/6 gettext/data/meson.build @@ -26,3 +26,5 @@ i18n.merge_file( install: true, install_dir: join_paths(get_option('datadir'), 'applications') ) + +subdir('data3') diff --git a/test cases/frameworks/6 gettext/generated/desktopgenerator.py b/test cases/frameworks/6 gettext/generated/desktopgenerator.py index 150ed04..e49c2d6 100644 --- a/test cases/frameworks/6 gettext/generated/desktopgenerator.py +++ b/test cases/frameworks/6 gettext/generated/desktopgenerator.py @@ -1,9 +1,13 @@ #!/usr/bin/env python3 -import sys, shutil +import os, sys, shutil ifile = sys.argv[1] ofile = sys.argv[2] -os.unlink(ofile) +try: + os.unlink(ofile) +except FileNotFoundError: + pass + shutil.copy(ifile, ofile) diff --git a/test cases/frameworks/6 gettext/generated/meson.build b/test cases/frameworks/6 gettext/generated/meson.build index 02e61ba..5ed9205 100644 --- a/test cases/frameworks/6 gettext/generated/meson.build +++ b/test cases/frameworks/6 gettext/generated/meson.build @@ -11,4 +11,6 @@ i18n.merge_file( output : 'something.desktop', type : 'desktop', po_dir : '../po', + install: true, + install_dir: join_paths(get_option('datadir'), 'applications'), ) diff --git a/test cases/frameworks/6 gettext/installed_files.txt b/test cases/frameworks/6 gettext/installed_files.txt index 9298909..850711a 100644 --- a/test cases/frameworks/6 gettext/installed_files.txt +++ b/test cases/frameworks/6 gettext/installed_files.txt @@ -1,6 +1,8 @@ usr/bin/intlprog?exe usr/share/locale/de/LC_MESSAGES/intltest.mo usr/share/locale/fi/LC_MESSAGES/intltest.mo +usr/share/applications/something.desktop usr/share/applications/test.desktop usr/share/applications/test2.desktop usr/share/applications/test3.desktop +usr/share/applications/test4.desktop diff --git a/test cases/frameworks/7 gnome/resources/res3.txt b/test cases/frameworks/7 gnome/resources/res3.txt new file mode 100644 index 0000000..aeed4a5 --- /dev/null +++ b/test cases/frameworks/7 gnome/resources/res3.txt @@ -0,0 +1 @@ +This file is from the wrong directory. diff --git a/test cases/java/8 codegen custom target/com/mesonbuild/Config.java.in b/test cases/java/8 codegen custom target/com/mesonbuild/Config.java.in new file mode 100644 index 0000000..8845985 --- /dev/null +++ b/test cases/java/8 codegen custom target/com/mesonbuild/Config.java.in @@ -0,0 +1,5 @@ +package com.mesonbuild; + +public class Config { + public static final boolean FOOBAR = true; +} diff --git a/test cases/java/8 codegen custom target/com/mesonbuild/Simple.java b/test cases/java/8 codegen custom target/com/mesonbuild/Simple.java new file mode 100644 index 0000000..df3c53d --- /dev/null +++ b/test cases/java/8 codegen custom target/com/mesonbuild/Simple.java @@ -0,0 +1,12 @@ +package com.mesonbuild; + +import com.mesonbuild.Config; + +class Simple { + public static void main(String [] args) { + if (Config.FOOBAR) { + TextPrinter t = new TextPrinter("Printing from Java."); + t.print(); + } + } +} diff --git a/test cases/java/8 codegen custom target/com/mesonbuild/TextPrinter.java b/test cases/java/8 codegen custom target/com/mesonbuild/TextPrinter.java new file mode 100644 index 0000000..dc2771c --- /dev/null +++ b/test cases/java/8 codegen custom target/com/mesonbuild/TextPrinter.java @@ -0,0 +1,14 @@ +package com.mesonbuild; + +class TextPrinter { + + private String msg; + + TextPrinter(String s) { + msg = s; + } + + public void print() { + System.out.println(msg); + } +} diff --git a/test cases/java/8 codegen custom target/com/mesonbuild/meson.build b/test cases/java/8 codegen custom target/com/mesonbuild/meson.build new file mode 100644 index 0000000..0309941 --- /dev/null +++ b/test cases/java/8 codegen custom target/com/mesonbuild/meson.build @@ -0,0 +1,8 @@ +python = import('python').find_installation('python3') + +config_file = custom_target('confgen', + input : 'Config.java.in', + output : 'Config.java', + command : [python, '-c', + 'import shutil, sys, time; time.sleep(1); shutil.copy(sys.argv[1], sys.argv[2])', + '@INPUT@', '@OUTPUT@']) diff --git a/test cases/java/8 codegen custom target/meson.build b/test cases/java/8 codegen custom target/meson.build new file mode 100644 index 0000000..ab441a6 --- /dev/null +++ b/test cases/java/8 codegen custom target/meson.build @@ -0,0 +1,15 @@ +# If we generate code under the build directory then the backend needs to add +# the build directory to the -sourcepath passed to javac otherwise the compiler +# won't be able to handle the -implicit:class behaviour of automatically +# compiling dependency classes. + +project('codegenjava', 'java') + +subdir('com/mesonbuild') + +javaprog = jar('myprog', + config_file[0], + 'com/mesonbuild/Simple.java', + 'com/mesonbuild/TextPrinter.java', + main_class : 'com.mesonbuild.Simple') +test('subdirtest', javaprog) diff --git a/test cases/linuxlike/13 cmake dependency/incdir/myinc.h b/test cases/linuxlike/13 cmake dependency/incdir/myinc.h new file mode 100644 index 0000000..4b66a6c --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/incdir/myinc.h @@ -0,0 +1,3 @@ +#pragma once + +#include diff --git a/test cases/linuxlike/13 cmake dependency/meson.build b/test cases/linuxlike/13 cmake dependency/meson.build new file mode 100644 index 0000000..72773b2 --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/meson.build @@ -0,0 +1,56 @@ +project('external CMake dependency', 'c') + +if not find_program('cmake', required: false).found() + error('MESON_SKIP_TEST cmake binary not available.') +endif + +# Zlib is probably on all dev machines. + +dep = dependency('ZLIB', version : '>=1.2', method : 'cmake') +exe = executable('zlibprog', 'prog-checkver.c', + dependencies : dep, + c_args : '-DFOUND_ZLIB="' + dep.version() + '"') + +assert(dep.version().version_compare('>=1.2'), 'CMake version numbers exposed incorrectly.') + +# Check that CMake targets are extracted +dept = dependency('ZLIB', version : '>=1.2', method : 'cmake', modules : 'ZLIB::ZLIB') +exet = executable('zlibprog_target', 'prog-checkver.c', + dependencies : dep, + c_args : '-DFOUND_ZLIB="' + dep.version() + '"') + +# Check that the version exposed by zlib internally is the same as the one we +# retrieve from the pkg-config file. This assumes that the packager didn't mess +# up, but we can be reasonably sure of that. +test('zlibtest', exe) + +# Test that dependencies of dependencies work. +dep2 = declare_dependency(dependencies : dep) +exe2 = executable('zlibprog2', 'prog.c', dependencies : dep2) +test('zlibtest2', exe2) + +# Try to find a nonexistent library to ensure requires:false works. + +depf1 = dependency('nvakuhrabnsdfasdf', required : false, method : 'cmake') +depf2 = dependency('ZLIB', required : false, method : 'cmake', modules : 'dfggh::hgfgag') + +assert(depf2.found() == false, 'Invalid CMake targets should fail') + +# Try to compile a test that takes a dep and an include_directories + +cc = meson.get_compiler('c') +zlibdep = cc.find_library('z') +code = '''#include + +int main(int argc, char **argv) { + void * something = deflate; + if(something != 0) + return 0; + return 1; +} +''' + +inc = include_directories('incdir') + +r = cc.run(code, include_directories : inc, dependencies : zlibdep) +assert(r.returncode() == 0, 'Running manual zlib test failed.') diff --git a/test cases/linuxlike/13 cmake dependency/prog-checkver.c b/test cases/linuxlike/13 cmake dependency/prog-checkver.c new file mode 100644 index 0000000..16b7170 --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/prog-checkver.c @@ -0,0 +1,15 @@ +#include +#include +#include + +int main(int argc, char **argv) { + void * something = deflate; + if(strcmp(ZLIB_VERSION, FOUND_ZLIB) != 0) { + printf("Meson found '%s' but zlib is '%s'\n", FOUND_ZLIB, ZLIB_VERSION); + return 2; + } + if(something != 0) + return 0; + printf("Couldn't find 'deflate'\n"); + return 1; +} diff --git a/test cases/linuxlike/13 cmake dependency/prog.c b/test cases/linuxlike/13 cmake dependency/prog.c new file mode 100644 index 0000000..cea986d --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/prog.c @@ -0,0 +1,8 @@ +#include + +int main(int argc, char **argv) { + void * something = deflate; + if(something != 0) + return 0; + return 1; +} diff --git a/test cases/nasm/1 configure file/meson.build b/test cases/nasm/1 configure file/meson.build index e128325..85ecaf1 100644 --- a/test cases/nasm/1 configure file/meson.build +++ b/test cases/nasm/1 configure file/meson.build @@ -47,3 +47,9 @@ exe = executable('hello', asm_gen.process('hello.asm'), ) test('test-nasm-configure-file', exe) + +exe2 = executable('hello2', objects : exe.extract_all_objects(), + link_args: link_args, +) + +test('test-nasm-extract-all-objects', exe2) diff --git a/test cases/unit/35 dist script/meson.build b/test cases/unit/35 dist script/meson.build index 3415ec4..fd672a9 100644 --- a/test cases/unit/35 dist script/meson.build +++ b/test cases/unit/35 dist script/meson.build @@ -4,4 +4,4 @@ project('dist script', 'c', exe = executable('comparer', 'prog.c') test('compare', exe) -meson.add_dist_script('replacer.py') +meson.add_dist_script('replacer.py', '"incorrect"', '"correct"') diff --git a/test cases/unit/35 dist script/replacer.py b/test cases/unit/35 dist script/replacer.py index adda365..96ccdcc 100755 --- a/test cases/unit/35 dist script/replacer.py +++ b/test cases/unit/35 dist script/replacer.py @@ -2,11 +2,15 @@ import os import pathlib +import sys + +if len(sys.argv) < 3: + sys.exit('usage: replacer.py ') source_root = pathlib.Path(os.environ['MESON_DIST_ROOT']) modfile = source_root / 'prog.c' contents = modfile.read_text() -contents = contents.replace('"incorrect"', '"correct"') +contents = contents.replace(sys.argv[1], sys.argv[2]) modfile.write_text(contents) diff --git a/test cases/unit/45 vscpp17/main.cpp b/test cases/unit/45 vscpp17/main.cpp new file mode 100644 index 0000000..36e4156 --- /dev/null +++ b/test cases/unit/45 vscpp17/main.cpp @@ -0,0 +1,7 @@ +[[nodiscard]] int foo() { + return 0; +} + +int main() { + return foo(); +} diff --git a/test cases/unit/45 vscpp17/meson.build b/test cases/unit/45 vscpp17/meson.build new file mode 100644 index 0000000..afe740b --- /dev/null +++ b/test cases/unit/45 vscpp17/meson.build @@ -0,0 +1,4 @@ +project('msvc_cpp17', 'cpp', default_options: ['cpp_std=c++17']) + +exe = executable('msvc_cpp17', 'main.cpp') +test('msvc_cpp17', exe) diff --git a/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig.py b/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig.py new file mode 100755 index 0000000..f0d89ee --- /dev/null +++ b/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python3 + +import os +import sys +import subprocess + +environ = os.environ.copy() +environ['PKG_CONFIG_LIBDIR'] = os.path.join( + os.path.dirname(os.path.realpath(__file__)), 'cross_pkgconfig') + +sys.exit( + subprocess.run(['pkg-config'] + sys.argv[1:], env=environ).returncode) diff --git a/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig/dep_tester.pc b/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig/dep_tester.pc new file mode 100644 index 0000000..67d7afa --- /dev/null +++ b/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig/dep_tester.pc @@ -0,0 +1,5 @@ +dep_type=cross + +Name: dependency() test +Description: dependency() test +Version: 0 diff --git a/test cases/unit/46 native dep pkgconfig var/meson.build b/test cases/unit/46 native dep pkgconfig var/meson.build new file mode 100644 index 0000000..d95dbcd --- /dev/null +++ b/test cases/unit/46 native dep pkgconfig var/meson.build @@ -0,0 +1,15 @@ +project('native dep pkgconfig test') + +if get_option('start_native') + dep_native = dependency('dep_tester', native: true, method: 'pkg-config') + dep_cross = dependency('dep_tester', native: false, method: 'pkg-config') +else + dep_cross = dependency('dep_tester', native: false, method: 'pkg-config') + dep_native = dependency('dep_tester', native: true, method: 'pkg-config') +endif + +dep_type = dep_native.get_pkgconfig_variable('dep_type') +assert(dep_type == 'native', 'Expected native') + +dep_type = dep_cross.get_pkgconfig_variable('dep_type') +assert(dep_type == 'cross', 'Expected cross') diff --git a/test cases/unit/46 native dep pkgconfig var/meson_options.txt b/test cases/unit/46 native dep pkgconfig var/meson_options.txt new file mode 100644 index 0000000..37006dd --- /dev/null +++ b/test cases/unit/46 native dep pkgconfig var/meson_options.txt @@ -0,0 +1,6 @@ +option( + 'start_native', + type : 'boolean', + value : 'false', + description : 'Start by creating a dependency() with native : true', +) diff --git a/test cases/unit/46 native dep pkgconfig var/native_pkgconfig/dep_tester.pc b/test cases/unit/46 native dep pkgconfig var/native_pkgconfig/dep_tester.pc new file mode 100644 index 0000000..affaa97 --- /dev/null +++ b/test cases/unit/46 native dep pkgconfig var/native_pkgconfig/dep_tester.pc @@ -0,0 +1,5 @@ +dep_type=native + +Name: dependency() test +Description: dependency() test +Version: 0 diff --git a/test cases/unit/46 native file binary/meson.build b/test cases/unit/46 native file binary/meson.build new file mode 100644 index 0000000..4489ac1 --- /dev/null +++ b/test cases/unit/46 native file binary/meson.build @@ -0,0 +1,21 @@ +project('test project') + +case = get_option('case') + +if case == 'find_program' + prog = find_program('bash') + result = run_command(prog, ['--version']) + assert(result.stdout().strip().endswith('12345'), 'Didn\'t load bash from config file') +elif case == 'config_dep' + add_languages('cpp') + dep = dependency('llvm') + assert(dep.get_configtool_variable('version').endswith('12345'), 'Didn\'t load llvm from config file') +elif case == 'python3' + prog = import('python3').find_python() + result = run_command(prog, ['--version']) + assert(result.stdout().strip().endswith('12345'), 'Didn\'t load python3 from config file') +elif case == 'python' + prog = import('python').find_installation() + result = run_command(prog, ['--version']) + assert(result.stdout().strip().endswith('12345'), 'Didn\'t load python from config file') +endif diff --git a/test cases/unit/46 native file binary/meson_options.txt b/test cases/unit/46 native file binary/meson_options.txt new file mode 100644 index 0000000..651da0e --- /dev/null +++ b/test cases/unit/46 native file binary/meson_options.txt @@ -0,0 +1,5 @@ +option( + 'case', + type : 'combo', + choices : ['find_program', 'config_dep', 'python3', 'python'] +) diff --git a/test cases/unit/46 reconfigure/main.c b/test cases/unit/46 reconfigure/main.c new file mode 100644 index 0000000..25927f5 --- /dev/null +++ b/test cases/unit/46 reconfigure/main.c @@ -0,0 +1,4 @@ +int main(int argc, char *argv[]) +{ + return 0; +} diff --git a/test cases/unit/46 reconfigure/meson.build b/test cases/unit/46 reconfigure/meson.build new file mode 100644 index 0000000..6eaac5d --- /dev/null +++ b/test cases/unit/46 reconfigure/meson.build @@ -0,0 +1,9 @@ +project('test-reconfigure', 'c') + +message('opt1 ' + get_option('opt1')) +message('opt2 ' + get_option('opt2')) +message('opt3 ' + get_option('opt3')) +message('opt4 ' + get_option('opt4')) + +exe = executable('test1', 'main.c') +test('test1', exe) diff --git a/test cases/unit/46 reconfigure/meson_options.txt b/test cases/unit/46 reconfigure/meson_options.txt new file mode 100644 index 0000000..728f7b7 --- /dev/null +++ b/test cases/unit/46 reconfigure/meson_options.txt @@ -0,0 +1,4 @@ +option('opt1', type : 'string', value : 'default1') +option('opt2', type : 'string', value : 'default2') +option('opt3', type : 'string', value : 'default3') +option('opt4', type : 'string', value : 'default4') diff --git a/test cases/unit/47 testsetup default/envcheck.py b/test cases/unit/47 testsetup default/envcheck.py new file mode 100644 index 0000000..6ba3093 --- /dev/null +++ b/test cases/unit/47 testsetup default/envcheck.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python3 + +import os + +assert('ENV_A' in os.environ) +assert('ENV_B' in os.environ) +assert('ENV_C' in os.environ) + +print('ENV_A is', os.environ['ENV_A']) +print('ENV_B is', os.environ['ENV_B']) +print('ENV_C is', os.environ['ENV_C']) diff --git a/test cases/unit/47 testsetup default/meson.build b/test cases/unit/47 testsetup default/meson.build new file mode 100644 index 0000000..bdd35b8 --- /dev/null +++ b/test cases/unit/47 testsetup default/meson.build @@ -0,0 +1,23 @@ +project('testsetup default', 'c') + +envcheck = find_program('envcheck.py') + +# Defining ENV_A in test-env should overwrite ENV_A from test setup +env_1 = environment() +env_1.set('ENV_A', '1') +test('test-env', envcheck, env: env_1) + +# Defining default env which is used unless --setup is given or the +# env variable is defined in the test. +env_2 = environment() +env_2.set('ENV_A', '2') +env_2.set('ENV_B', '2') +env_2.set('ENV_C', '2') +add_test_setup('mydefault', env: env_2, is_default: true) + +# Defining a test setup that will update some of the env variables +# from the default test setup. +env_3 = env_2 +env_3.set('ENV_A', '3') +env_3.set('ENV_B', '3') +add_test_setup('other', env: env_3) diff --git a/test cases/unit/48 pkgconfig csharp library/meson.build b/test cases/unit/48 pkgconfig csharp library/meson.build new file mode 100644 index 0000000..148d40f --- /dev/null +++ b/test cases/unit/48 pkgconfig csharp library/meson.build @@ -0,0 +1,10 @@ +project('pkgformat', 'cs', + version : '1.0') + +pkgg = import('pkgconfig') + +l = library('libsomething', 'somelib.cs') + +pkgg.generate(l, + version: '1.0', + description: 'A library that does something') diff --git a/test cases/unit/48 pkgconfig csharp library/somelib.cs b/test cases/unit/48 pkgconfig csharp library/somelib.cs new file mode 100644 index 0000000..24d37ed --- /dev/null +++ b/test cases/unit/48 pkgconfig csharp library/somelib.cs @@ -0,0 +1,12 @@ +using System; + +namespace Abc +{ + public static class Something + { + public static bool Api1(this String str) + { + return str == "foo"; + } + } +} diff --git a/test cases/unit/49 ldflagdedup/bob.c b/test cases/unit/49 ldflagdedup/bob.c new file mode 100644 index 0000000..a68d4b1 --- /dev/null +++ b/test cases/unit/49 ldflagdedup/bob.c @@ -0,0 +1,5 @@ +#include + +int func() { + return 0; +} diff --git a/test cases/unit/49 ldflagdedup/meson.build b/test cases/unit/49 ldflagdedup/meson.build new file mode 100644 index 0000000..0bbcc50 --- /dev/null +++ b/test cases/unit/49 ldflagdedup/meson.build @@ -0,0 +1,12 @@ +project('lddedup', 'c') + +# Chosen because its ldflags contains -Wl,--export-dynamic, +# which must be deduplicated. +gm = dependency('gmodule-2.0') + +lib = static_library('bob', 'bob.c', + dependencies: gm) + +executable('prog', 'prog.c', + link_with: lib, + dependencies: gm) diff --git a/test cases/unit/49 ldflagdedup/prog.c b/test cases/unit/49 ldflagdedup/prog.c new file mode 100644 index 0000000..02c599d --- /dev/null +++ b/test cases/unit/49 ldflagdedup/prog.c @@ -0,0 +1,7 @@ +#include + +int func(); + +int main(int argc, char **argv) { + return func(); +} diff --git a/test cases/windows/16 gui app/meson.build b/test cases/windows/16 gui app/meson.build index 2435218..224d708 100644 --- a/test cases/windows/16 gui app/meson.build +++ b/test cases/windows/16 gui app/meson.build @@ -17,6 +17,10 @@ console_prog = executable('console_prog', 'console_prog.c', gui_app: false) tester = find_program('gui_app_tester.py') -tool = find_program('objdump', 'dumpbin') -test('is_gui', tester, args: [tool.path(), gui_prog, '2']) -test('not_gui', tester, args: [tool.path(), console_prog, '3']) +tool = find_program('objdump', 'dumpbin', required: false) +# TODO: when 'llvm-objdump -f' emits the subsystem type, we could use that also + +if tool.found() + test('is_gui', tester, args: [tool.path(), gui_prog, '2']) + test('not_gui', tester, args: [tool.path(), console_prog, '3']) +endif diff --git a/test cases/windows/7 dll versioning/installed_files.txt b/test cases/windows/7 dll versioning/installed_files.txt index 517620e..62b5c9a 100644 --- a/test cases/windows/7 dll versioning/installed_files.txt +++ b/test cases/windows/7 dll versioning/installed_files.txt @@ -14,9 +14,9 @@ ?msvc:usr/libexec/customdir.dll ?msvc:usr/libexec/customdir.lib ?msvc:usr/libexec/customdir.pdb -?msvc:usr/lib/module.dll -?msvc:usr/lib/module.lib -?msvc:usr/lib/module.pdb +?msvc:usr/lib/modules/module.dll +?msvc:usr/lib/modules/module.lib +?msvc:usr/lib/modules/module.pdb ?gcc:usr/bin/?libsome-0.dll ?gcc:usr/lib/libsome.dll.a ?gcc:usr/bin/?libnoversion.dll @@ -27,5 +27,5 @@ ?gcc:usr/lib/libonlysoversion.dll.a ?gcc:usr/libexec/?libcustomdir.dll ?gcc:usr/libexec/libcustomdir.dll.a -?gcc:usr/lib/?libmodule.dll -?gcc:usr/lib/libmodule.dll.a +?gcc:usr/lib/modules/?libmodule.dll +?gcc:usr/lib/modules/libmodule.dll.a diff --git a/test cases/windows/7 dll versioning/meson.build b/test cases/windows/7 dll versioning/meson.build index 80acf88..983c2c4 100644 --- a/test cases/windows/7 dll versioning/meson.build +++ b/test cases/windows/7 dll versioning/meson.build @@ -49,4 +49,6 @@ shared_library('customdir', 'lib.c', install : true, install_dir : get_option('libexecdir')) -shared_module('module', 'lib.c', install : true) +shared_module('module', 'lib.c', + install : true, + install_dir: join_paths(get_option('libdir'), 'modules')) -- 2.7.4