--- /dev/null
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
--- /dev/null
+graft test?cases
+graft manual?tests
+graft cross
+graft data
+graft graphics
+graft man
+graft syntax-highlighting
+graft tools
+include authors.txt
+include contributing.txt
+include COPYING
+include README.md
+include run_cross_test.py
+include run_tests.py
+include run_unittests.py
+include run_project_tests.py
+include mesonrewriter.py
+include ghwt.py
+include __main__.py
--- /dev/null
+Metadata-Version: 1.1
+Name: meson
+Version: 0.44.0
+Summary: A high performance build system
+Home-page: http://mesonbuild.com
+Author: Jussi Pakkanen
+Author-email: jpakkane@gmail.com
+License: Apache License, Version 2.0
+Description-Content-Type: UNKNOWN
+Description: Meson is a cross-platform build system designed to be both as
+ fast and as user friendly as possible. It supports many languages and compilers, including
+ GCC, Clang and Visual Studio. Its build definitions are written in a simple non-turing
+ complete DSL.
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Natural Language :: English
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: POSIX :: BSD
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Topic :: Software Development :: Build Tools
--- /dev/null
+<p align="center">
+<img src="http://mesonbuild.com/assets/images/meson_logo.png">
+</p>
+Meson® is a project to create the best possible next-generation
+build system.
+
+#### Status
+
+[![PyPI](https://img.shields.io/pypi/v/meson.svg)](https://pypi.python.org/pypi/meson)
+[![Travis](https://travis-ci.org/mesonbuild/meson.svg?branch=master)](https://travis-ci.org/mesonbuild/meson)
+[![Appveyor](https://ci.appveyor.com/api/projects/status/l5c8v71ninew2i3p?svg=true)](https://ci.appveyor.com/project/jpakkane/meson)
+[![Codecov](https://codecov.io/gh/mesonbuild/meson/coverage.svg?branch=master)](https://codecov.io/gh/mesonbuild/meson/branch/master)
+
+#### Dependencies
+
+ - [Python](http://python.org) (version 3.4 or newer)
+ - [Ninja](https://ninja-build.org) (version 1.5 or newer)
+
+#### Installing from source
+
+You can run Meson directly from a revision control checkout or an
+extracted tarball. If you wish you can install it locally with the
+standard Python distutils command `python3 setup.py install <your
+options here>`.
+
+Meson is also available from
+[PyPi](https://pypi.python.org/pypi/meson), so it can be installed
+with `pip3 install meson` (this does not require a source checkout,
+pip will download the package automatically). The exact command to
+type to install with pip can vary between systems, be sure to use the
+Python 3 version of pip.
+
+#### Running
+
+Meson requires that you have a source directory and a build directory
+and that these two are different. In your source root must exist a file
+called 'meson.build'. To generate the build system run this command:
+
+`meson <source directory> <build directory>`
+
+Depending on how you obtained Meson the command might also be called
+`meson.py` instead of plain `meson`. In the rest of this document we
+are going to use the latter form.
+
+You can omit either of the two directories, and Meson will substitute
+the current directory and autodetect what you mean. This allows you to
+do things like this:
+
+`cd source_root; mkdir builddir; cd builddir; meson ..`
+
+or
+
+`cd source_root; mkdir builddir; meson builddir`
+
+To compile, cd into your build directory and type `ninja`. To run unit
+tests, type `ninja test`.
+
+Install is the same but it can take an extra argument:
+
+`DESTDIR=/destdir/path ninja install`
+
+`DESTDIR` can be omitted. If you are installing to system directories,
+you may need to run this command with sudo.
+
+
+#### Contributing
+
+We love code contributions. See the contributing.txt file for
+details.
+
+
+#### IRC
+
+The irc channel for Meson is `#mesonbuild` over at Freenode.
+
+
+#### Further info
+
+More information about the Meson build system can be found at the
+[project's home page](http://mesonbuild.com).
+
+Meson is a registered trademark of Jussi Pakkanen
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import meson
+import sys
+
+sys.exit(meson.main())
--- /dev/null
+Contributing to the Meson build system
+
+There are two simple ways to submit your patches. The preferred way is
+to send a github pull request. Small changes can also be sent as
+patches as emails to the Meson mailing list.
+
+
+Python Coding style
+
+Meson follows the basic Python coding style. Additional rules are the
+following:
+
+- indent 4 spaces, no tabs ever
+- indent meson.build files with two spaces
+- try to keep the code as simple as possible
+- contact the mailing list before embarking on large scale projects
+ to avoid wasted effort
+- all new features must come with a test (or several if it is
+ a big feature)
+
+Meson uses Flake8 for style guide enforcement. The Flake8 options for
+the project are contained in setup.cfg.
+
+To run Flake8 on your local clone of Meson:
+
+ $ python3 -m pip install flake8
+ $ cd meson
+ $ flake8
+
+C/C++ coding style
+
+Meson has a bunch of test code in several languages. The rules for
+those are simple.
+
+- indent 4 spaces, no tabs ever
+- brace always on the same line as if/for/else/function definition
+
+
+External dependencies
+
+The goal of Meson is to be as easily usable as possible. The user
+experience should be "get Python3 and Ninja, run", even on
+Windows. Unfortunately this means that we can't have dependencies on
+projects outside of Python's standard library. This applies only to
+core functionality, though. For additional helper programs etc the use
+of external dependencies may be ok. If you feel that you are dealing
+with this kind of case, please raise the issue on the mailing list
+first.
+
+
+What not to contribute?
+
+There are a few things that people seem to want to add to Meson but which
+are not there by design and will not be added either.
+
+The first one is defining your own functions or a generalized for loop.
+These are bad because they would make Meson's DSL Turing complete. The
+second feature is a Make backend.
+
+The FAQ has specific information why these two features will not be
+added to Meson: http://mesonbuild.com/FAQ.html
+
+Merge requests adding either of these two features will be automatically
+rejected. Please save everyone's time (especially your own) and don't start
+working on these features.
+
+
+Do I need to sign a CLA?
+
+No. All contributions are welcome.
--- /dev/null
+# This is a cross compilation file from OSX Yosemite to iPhone
+# Apple keeps changing the location and names of files so
+# these might not work for you. Use the googels and xcrun.
+
+[binaries]
+c = 'clang'
+cpp = 'clang++'
+ar = 'ar'
+strip = 'strip'
+
+[properties]
+root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer'
+
+c_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
+cpp_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
+c_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
+cpp_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
+
+has_function_printf = true
+has_function_hfkerhisadf = false
+
+[host_machine]
+system = 'darwin'
+cpu_family = 'arm'
+cpu = 'armv7'
+endian = 'little'
+
--- /dev/null
+# This is a setup for compiling a program that runs natively
+# but uses a custom std lib. This test will only work on
+# x86_64.
+
+[target_machine]
+system = 'linux'
+cpu_family = 'x86_64'
+cpu = 'x86_64'
+endian = 'little'
+
+[properties]
+
+c_stdlib = ['mylibc', 'mylibc_dep'] # Subproject name, dependency name
--- /dev/null
+[binaries]
+# we could set exe_wrapper = qemu-arm-static but to test the case
+# when cross compiled binaries can't be run we don't do that
+c = '/usr/bin/arm-linux-gnueabihf-gcc-7'
+cpp = '/usr/bin/arm-linux-gnueabihf-g++-7'
+ar = '/usr/arm-linux-gnueabihf/bin/ar'
+strip = '/usr/arm-linux-gnueabihf/bin/strip'
+pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config'
+
+[properties]
+root = '/usr/arm-linux-gnueabihf'
+# Used in unit test '140 get define'
+c_args = ['-DMESON_TEST_ISSUE_1665=1']
+cpp_args = ['-DMESON_TEST_ISSUE_1665=1']
+
+has_function_printf = true
+has_function_hfkerhisadf = false
+
+[host_machine]
+system = 'linux'
+cpu_family = 'arm'
+cpu = 'armv7' # Not sure if correct.
+endian = 'little'
--- /dev/null
+# This is a setup for compiling a program that runs natively
+# but produces output that runs on a different platform.
+# That is either a cross compiler or something like binutils.
+
+# We don't need to specify any properties or compilers,
+# for we use the native ones and can run the resulting
+# binaries directly.
+
+[target_machine]
+system = 'linux'
+cpu_family = 'mips'
+cpu = 'mips'
+endian = 'little'
--- /dev/null
+# Something crazy: compiling on Linux a crosscompiler that
+# runs on Windows and generates code for OSX.
+
+[binaries]
+exe_wrapper = 'wine' # A command used to run generated executables.
+c = '/usr/bin/i686-w64-mingw32-gcc'
+cpp = '/usr/bin/i686-w64-mingw32-g++'
+ar = '/usr/bin/i686-w64-mingw32-ar'
+strip = '/usr/bin/i686-w64-mingw32-strip'
+pkgconfig = '/usr/bin/mingw32-pkg-config'
+
+[properties]
+root = '/usr/i686-w64-mingw32'
+
+[host_machine]
+system = 'windows'
+cpu_family = 'x86'
+cpu = 'i686'
+endian = 'little'
+
+[target_machine]
+system = 'darwin'
+cpu_family = 'arm'
+cpu = 'armv7h' # Don't know if this is correct.
+endian = 'little'
--- /dev/null
+%__meson %{_bindir}/meson
+%__meson_wrap_mode nodownload
+
+%meson \
+ export CFLAGS="${CFLAGS:-%__global_cflags}" \
+ export CXXFLAGS="${CXXFLAGS:-%__global_cxxflags}" \
+ export FFLAGS="${FFLAGS:-%__global_fflags}" \
+ export FCFLAGS="${FCFLAGS:-%__global_fcflags}" \
+ export LDFLAGS="${LDFLAGS:-%__global_ldflags}" \
+ %{__meson} \\\
+ --buildtype=plain \\\
+ --prefix=%{_prefix} \\\
+ --libdir=%{_libdir} \\\
+ --libexecdir=%{_libexecdir} \\\
+ --bindir=%{_bindir} \\\
+ --sbindir=%{_sbindir} \\\
+ --includedir=%{_includedir} \\\
+ --datadir=%{_datadir} \\\
+ --mandir=%{_mandir} \\\
+ --infodir=%{_infodir} \\\
+ --localedir=%{_datadir}/locale \\\
+ --sysconfdir=%{_sysconfdir} \\\
+ --localstatedir=%{_localstatedir} \\\
+ --sharedstatedir=%{_sharedstatedir} \\\
+ --wrap-mode=%{__meson_wrap_mode} \\\
+ %{_vpath_srcdir} %{_vpath_builddir} \\\
+ %{nil}
+
+%meson_build \
+ %ninja_build -C %{_vpath_builddir}
+
+%meson_install \
+ %ninja_install -C %{_vpath_builddir}
+
+%meson_test \
+ %ninja_test -C %{_vpath_builddir} || \
+ { rc=$?; \
+ echo "-----BEGIN TESTLOG-----"; \
+ cat %{_vpath_builddir}/meson-logs/testlog.txt; \
+ echo "-----END TESTLOG-----"; \
+ exit $rc; }
--- /dev/null
+#compdef meson mesonconf=meson-configure mesontest=meson-test mesonintrospect=meson-introspect
+
+# vim:ts=2 sw=2
+
+# Copyright (c) 2017 Arseny Maslennikov
+# All rights reserved. Individual authors, whether or not
+# specifically named, retain copyright in all changes; in what follows, they
+# are referred to as `the Meson development team'. This is for convenience
+# only and this body has no legal status. This file is distributed under
+# the following licence.
+#
+# Permission is hereby granted, without written agreement and without
+# licence or royalty fees, to use, copy, modify, and distribute this
+# software and to distribute modified versions of this software for any
+# purpose, provided that the above copyright notice and the following
+# two paragraphs appear in all copies of this software.
+#
+# In no event shall the Meson development team be liable to any party for
+# direct, indirect, special, incidental, or consequential damages arising out
+# of the use of this software and its documentation, even if the Meson
+# development team have been advised of the possibility of such damage.
+#
+# The Meson development team specifically disclaim any warranties, including,
+# but not limited to, the implied warranties of merchantability and fitness
+# for a particular purpose. The software provided hereunder is on an "as is"
+# basis, and the Meson development team have no obligation to provide
+# maintenance, support, updates, enhancements, or modifications.
+
+local curcontext="$curcontext" state line
+local -i ret
+
+local __meson_backends="(ninja xcode ${(j. .)${:-vs{,2010,2015,2017}}})"
+local __meson_build_types="(plain debug debugoptimized minsize release)"
+local __meson_wrap_modes="(WrapMode.{default,nofallback,nodownload})"
+
+local -a meson_commands=(
+'setup:set up a build directory'
+'configure:configure a project'
+'test:run tests'
+'introspect:query project properties'
+'wrap:manage source dependencies'
+)
+
+(( $+functions[__meson_is_build_dir] )) || __meson_is_build_dir() {
+ local mpd="${1:-$PWD}/meson-private"
+ [[ -f "$mpd/build.dat" && -f "$mpd/coredata.dat" ]]
+ return $?
+}
+
+# TODO: implement build option completion
+(( $+functions[__meson_build_options] )) || __meson_build_options() {}
+# TODO: implement target name completion
+(( $+functions[__meson_targets] )) || __meson_targets() {}
+# `meson introspect` currently can provide that information in JSON.
+# We can:
+# 1) pipe its output to python3 -m json.tool | grep "$alovelyregex" | cut <...>
+# 2) teach mintro.py to use a different output format
+# (or perhaps just to select the fields printed)
+
+(( $+functions[__meson_test_names] )) || __meson_test_names() {
+ local rtests
+ if rtests="$(_call_program meson meson test ${opt_args[-C]:+-C "$opt_args[-C]"} --list)";
+ then
+ local -a tests=(${(@f)rtests})
+ _describe -t "tests" "Meson tests" tests
+ else
+ _message -r "current working directory is not a build directory"
+ _message -r 'use -C $build_dir or cd $build_dir'
+ fi
+}
+
+(( $+functions[_meson_commands] )) || _meson_commands() {
+ _describe -t commands "Meson subcommands" meson_commands
+}
+
+(( $+functions[_meson-setup] )) || _meson-setup() {
+ local firstd secondd
+ if [[ -f "meson.build" ]]; then
+ # if there's no second argument on the command line
+ # cwd will implicitly be substituted:
+ # - as the source directory if it has a file with the name "meson.build";
+ # - as the build directory otherwise
+ # more info in mesonbuild/mesonmain.py
+ firstd="build"
+ secondd="source"
+ else
+ firstd="source"
+ secondd="build"
+ fi
+
+ _arguments \
+ '*-D-[set the value of a build option]:build option:__meson_build_options' \
+ '--prefix=[installation prefix]: :_directories' \
+ '--libdir=[library directory]: :_directories' \
+ '--libexecdir=[library executable directory]: :_directories' \
+ '--bindir=[executable directory]: :_directories' \
+ '--sbindir=[system executable directory]: :_directories' \
+ '--includedir=[header file directory]: :_directories' \
+ '--datadir=[data file directory]: :_directories' \
+ '--mandir=[manual page directory]: :_directories' \
+ '--infodir=[info page directory]: :_directories' \
+ '--localedir=[locale data directory]: :_directories' \
+ '--sysconfdir=[system configuration directory]: :_directories' \
+ '--localstatedir=[local state data directory]: :_directories' \
+ '--sharedstatedir=[arch-independent data directory]: :_directories' \
+ '--backend=[backend to use]:Meson backend:'"$__meson_backends" \
+ '--buildtype=[build type to use]:Meson build type:'"$__meson_build_types" \
+ '--strip[strip targets on install]' \
+ '--unity=[unity builds on/off]:whether to do unity builds:(on off subprojects)' \
+ '--werror[treat warnings as errors]' \
+ '--layout=[build directory layout]:build directory layout:(flat mirror)' \
+ '--default-library=[default library type]:default library type:(shared static)' \
+ '--warnlevel=[compiler warning level]:compiler warning level:warning level:(1 2 3)' \
+ '--stdsplit=[split stdout and stderr in test logs]' \
+ '--errorlogs=[prints the logs from failing tests]' \
+ '--cross-file=[cross-compilation environment description]:cross file:_files' \
+ '--wrap-mode=[special wrap mode]:wrap mode:'"$__meson_wrap_modes" \
+ ":$firstd directory:_directories" \
+ "::$secondd directory:_directories" \
+ #
+}
+
+(( $+functions[_meson-configure] )) || _meson-configure() {
+ local curcontext="$curcontext"
+ # TODO: implement 'mesonconf @file'
+ local -a specs=(
+ '--clearcache[clear cached state]'
+ '*-D-[set the value of a build option]:build option:__meson_build_options'
+ '::build directory:_directories'
+ )
+
+ _arguments \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+ "${(@)specs}"
+}
+
+(( $+functions[_meson-test] )) || _meson-test() {
+ local curcontext="$curcontext"
+
+ # TODO: complete test suites
+ local -a specs=(
+ '(--quiet -q)'{'--quiet','-q'}'[produce less output to the terminal]'
+ '(--verbose -v)'{'--verbose','-v'}'[do not redirect stdout and stderr]'
+ '(--timeout-multiplier -t)'{'--timeout-multiplier','-t'}'[a multiplier for test timeouts]:Python floating-point number: '
+ '-C[directory to cd into]: :_directories'
+ '--repeat[number of times to run the tests]:number of times to repeat: '
+ '--no-rebuild[do not rebuild before running tests]'
+ '--gdb[run tests under gdb]'
+ '--list[list available tests]'
+ '(--wrapper --wrap)'{'--wrapper=','--wrap='}'[wrapper to run tests with]:wrapper program:_path_commands'
+ '(--no-suite)--suite[only run tests from this suite]:test suite: '
+ '(--suite)--no-suite[do not run tests from this suite]:test suite: '
+ '--no-stdsplit[do not split stderr and stdout in logs]'
+ '--print-errorlogs[print logs for failing tests]'
+ '--benchmark[run benchmarks instead of tests]'
+ '--logbase[base name for log file]:filename: '
+ '--num-processes[how many threads to use]:number of processes: '
+ '--setup[which test setup to use]:test setup: '
+ '--test-args[arguments to pass to the tests]: : '
+ '*:Meson tests:__meson_test_names'
+ )
+
+ _arguments \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+ "${(@)specs}"
+}
+
+(( $+functions[_meson-introspect] )) || _meson-introspect() {
+ local curcontext="$curcontext"
+ local -a specs=(
+ '--targets[list top level targets]'
+ '--installed[list all installed files and directories]'
+ '--target-files[list source files for a given target]:target:__meson_targets'
+ '--buildsystem-files[list files that belong to the build system]'
+ '--buildoptions[list all build options]'
+ '--tests[list all unit tests]'
+ '--benchmarks[list all benchmarks]'
+ '--dependencies[list external dependencies]'
+ '--projectinfo[show project information]'
+ '::build directory:_directories'
+ )
+_arguments \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+ "${(@)specs}"
+}
+
+(( $+functions[_meson-wrap] )) || _meson-wrap() {
+ # TODO
+}
+
+if [[ $service != meson ]]; then
+ _call_function ret _$service
+ return ret
+fi
+
+_arguments -C -R \
+ '(: -)'{'--help','-h'}'[show a help message and quit]' \
+ '(: -)'{'--version','-v'}'[show version information and quit]' \
+ '(-): :_meson_commands' \
+ '*:: :->post-command' \
+#
+ret=$?
+
+[[ $ret = 300 ]] && case "$state" in
+ post-command)
+ service="meson-$words[1]"
+ curcontext=${curcontext%:*:*}:$service:
+ _call_function ret _$service
+ ;;
+esac
+
+return ret
+
--- /dev/null
+;; command to comment/uncomment text
+(defun meson-comment-dwim (arg)
+ "Comment or uncomment current line or region in a smart way.
+For detail, see `comment-dwim'."
+ (interactive "*P")
+ (require 'newcomment)
+ (let (
+ (comment-start "#") (comment-end "")
+ )
+ (comment-dwim arg)))
+
+;;(setq mymeson-keywords-regex (regex-opt '("if", "endif", "foreach", "endforeach")))
+
+;; keywords for syntax coloring
+(setq meson-keywords
+ `(
+ ( ,(regexp-opt '("if" "endif" "for" "foreach") 'word) . font-lock-keyword-face)
+ )
+ )
+
+;; syntax table
+(defvar meson-syntax-table nil "Syntax table for `meson-mode'.")
+(setq meson-syntax-table
+ (let ((synTable (make-syntax-table)))
+
+ ;; bash style comment: “# …”
+ (modify-syntax-entry ?# "< b" synTable)
+ (modify-syntax-entry ?\n "> b" synTable)
+
+ synTable))
+
+;; define the major mode.
+(define-derived-mode meson-mode fundamental-mode
+ "meson-mode is a major mode for editing Meson build definition files."
+ :syntax-table meson-syntax-table
+
+ (setq font-lock-defaults '(meson-keywords))
+ (setq mode-name "meson")
+
+ ;; modify the keymap
+ (define-key meson-mode-map [remap comment-dwim] 'meson-comment-dwim)
+)
+
--- /dev/null
+ftdetect sets the filetype
+syntax does Meson syntax highlighting
+plugin does Meson indentation
--- /dev/null
+au BufNewFile,BufRead meson.build set filetype=meson
+au BufNewFile,BufRead meson_options.txt set filetype=meson
--- /dev/null
+" Vim indent file
+" Language: Meson
+" Maintainer: Nirbheek Chauhan <nirbheek.chauhan@gmail.com>
+" Original Authors: David Bustos <bustos@caltech.edu>
+" Bram Moolenaar <Bram@vim.org>
+" Last Change: 2015 Feb 23
+
+" Only load this indent file when no other was loaded.
+if exists("b:did_indent")
+ finish
+endif
+let b:did_indent = 1
+
+" Some preliminary settings
+setlocal nolisp " Make sure lisp indenting doesn't supersede us
+setlocal autoindent " indentexpr isn't much help otherwise
+
+setlocal indentexpr=GetMesonIndent(v:lnum)
+setlocal indentkeys+==elif,=else,=endforeach,=endif,0)
+
+" Only define the function once.
+if exists("*GetMesonIndent")
+ finish
+endif
+let s:keepcpo= &cpo
+setlocal cpo&vim
+
+" Come here when loading the script the first time.
+
+let s:maxoff = 50 " maximum number of lines to look backwards for ()
+
+" Force sw=2 sts=2 because that's required by convention
+setlocal shiftwidth=2
+setlocal softtabstop=2
+
+function GetMesonIndent(lnum)
+ echom getline(line("."))
+
+ " If this line is explicitly joined: If the previous line was also joined,
+ " line it up with that one, otherwise add two 'shiftwidth'
+ if getline(a:lnum - 1) =~ '\\$'
+ if a:lnum > 1 && getline(a:lnum - 2) =~ '\\$'
+ return indent(a:lnum - 1)
+ endif
+ return indent(a:lnum - 1) + (exists("g:mesonindent_continue") ? eval(g:mesonindent_continue) : (shiftwidth() * 2))
+ endif
+
+ " If the start of the line is in a string don't change the indent.
+ if has('syntax_items')
+ \ && synIDattr(synID(a:lnum, 1, 1), "name") =~ "String$"
+ return -1
+ endif
+
+ " Search backwards for the previous non-empty line.
+ let plnum = prevnonblank(v:lnum - 1)
+
+ if plnum == 0
+ " This is the first non-empty line, use zero indent.
+ return 0
+ endif
+
+ " If the previous line is inside parenthesis, use the indent of the starting
+ " line.
+ " Trick: use the non-existing "dummy" variable to break out of the loop when
+ " going too far back.
+ call cursor(plnum, 1)
+ let parlnum = searchpair('(\|{\|\[', '', ')\|}\|\]', 'nbW',
+ \ "line('.') < " . (plnum - s:maxoff) . " ? dummy :"
+ \ . " synIDattr(synID(line('.'), col('.'), 1), 'name')"
+ \ . " =~ '\\(Comment\\|Todo\\|String\\)$'")
+ if parlnum > 0
+ let plindent = indent(parlnum)
+ let plnumstart = parlnum
+ else
+ let plindent = indent(plnum)
+ let plnumstart = plnum
+ endif
+
+
+ " When inside parenthesis: If at the first line below the parenthesis add
+ " a 'shiftwidth', otherwise same as previous line.
+ " i = (a
+ " + b
+ " + c)
+ call cursor(a:lnum, 1)
+ let p = searchpair('(\|{\|\[', '', ')\|}\|\]', 'bW',
+ \ "line('.') < " . (a:lnum - s:maxoff) . " ? dummy :"
+ \ . " synIDattr(synID(line('.'), col('.'), 1), 'name')"
+ \ . " =~ '\\(Comment\\|Todo\\|String\\)$'")
+ if p > 0
+ if p == plnum
+ " When the start is inside parenthesis, only indent one 'shiftwidth'.
+ let pp = searchpair('(\|{\|\[', '', ')\|}\|\]', 'bW',
+ \ "line('.') < " . (a:lnum - s:maxoff) . " ? dummy :"
+ \ . " synIDattr(synID(line('.'), col('.'), 1), 'name')"
+ \ . " =~ '\\(Comment\\|Todo\\|String\\)$'")
+ if pp > 0
+ return indent(plnum) + (exists("g:pyindent_nested_paren") ? eval(g:pyindent_nested_paren) : shiftwidth())
+ endif
+ return indent(plnum) + (exists("g:pyindent_open_paren") ? eval(g:pyindent_open_paren) : shiftwidth())
+ endif
+ if plnumstart == p
+ return indent(plnum)
+ endif
+ return plindent
+ endif
+
+
+ " Get the line and remove a trailing comment.
+ " Use syntax highlighting attributes when possible.
+ let pline = getline(plnum)
+ let pline_len = strlen(pline)
+ if has('syntax_items')
+ " If the last character in the line is a comment, do a binary search for
+ " the start of the comment. synID() is slow, a linear search would take
+ " too long on a long line.
+ if synIDattr(synID(plnum, pline_len, 1), "name") =~ "\\(Comment\\|Todo\\)$"
+ let min = 1
+ let max = pline_len
+ while min < max
+ let col = (min + max) / 2
+ if synIDattr(synID(plnum, col, 1), "name") =~ "\\(Comment\\|Todo\\)$"
+ let max = col
+ else
+ let min = col + 1
+ endif
+ endwhile
+ let pline = strpart(pline, 0, min - 1)
+ endif
+ else
+ let col = 0
+ while col < pline_len
+ if pline[col] == '#'
+ let pline = strpart(pline, 0, col)
+ break
+ endif
+ let col = col + 1
+ endwhile
+ endif
+
+ " If the previous line ended the conditional/loop
+ if getline(plnum) =~ '^\s*\(endif\|endforeach\)\>\s*'
+ " Maintain indent
+ return -1
+ endif
+
+ " If the previous line ended with a builtin, indent this line
+ if pline =~ '^\s*\(foreach\|if\|else\|elif\)\>\s*'
+ return plindent + shiftwidth()
+ endif
+
+ " If the current line begins with a header keyword, deindent
+ if getline(a:lnum) =~ '^\s*\(else\|elif\|endif\|endforeach\)'
+
+ " Unless the previous line was a one-liner
+ if getline(plnumstart) =~ '^\s*\(foreach\|if\)\>\s*'
+ return plindent
+ endif
+
+ " Or the user has already dedented
+ if indent(a:lnum) <= plindent - shiftwidth()
+ return -1
+ endif
+
+ return plindent - shiftwidth()
+ endif
+
+ " When after a () construct we probably want to go back to the start line.
+ " a = (b
+ " + c)
+ " here
+ if parlnum > 0
+ return plindent
+ endif
+
+ return -1
+
+endfunction
+
+let &cpo = s:keepcpo
+unlet s:keepcpo
+
+" vim:sw=2
--- /dev/null
+" Vim syntax file
+" Language: Meson
+" Maintainer: Nirbheek Chauhan <nirbheek.chauhan@gmail.com>
+" Last Change: 2016 Dec 7
+" Credits: Zvezdan Petkovic <zpetkovic@acm.org>
+" Neil Schemenauer <nas@meson.ca>
+" Dmitry Vasiliev
+"
+" This version is copied and edited from python.vim
+" It's very basic, and doesn't do many things I'd like it to
+" For instance, it should show errors for syntax that is valid in
+" Python but not in Meson.
+"
+" Optional highlighting can be controlled using these variables.
+"
+" let meson_space_error_highlight = 1
+"
+
+" For version 5.x: Clear all syntax items.
+" For version 6.x: Quit when a syntax file was already loaded.
+if version < 600
+ syntax clear
+elseif exists("b:current_syntax")
+ finish
+endif
+
+" We need nocompatible mode in order to continue lines with backslashes.
+" Original setting will be restored.
+let s:cpo_save = &cpo
+setlocal cpo&vim
+
+" https://github.com/mesonbuild/meson/wiki/Syntax
+syn keyword mesonConditional elif else if endif
+syn keyword mesonRepeat foreach endforeach
+syn keyword mesonOperator and not or
+
+syn match mesonComment "#.*$" contains=mesonTodo,@Spell
+syn keyword mesonTodo FIXME NOTE NOTES TODO XXX contained
+
+" Strings can either be single quoted or triple counted across multiple lines,
+" but always with a '
+syn region mesonString
+ \ start="\z('\)" end="\z1" skip="\\\\\|\\\z1"
+ \ contains=mesonEscape,@Spell
+syn region mesonString
+ \ start="\z('''\)" end="\z1" keepend
+ \ contains=mesonEscape,mesonSpaceError,@Spell
+
+syn match mesonEscape "\\[abfnrtv'\\]" contained
+syn match mesonEscape "\\\o\{1,3}" contained
+syn match mesonEscape "\\x\x\{2}" contained
+syn match mesonEscape "\%(\\u\x\{4}\|\\U\x\{8}\)" contained
+" Meson allows case-insensitive Unicode IDs: http://www.unicode.org/charts/
+syn match mesonEscape "\\N{\a\+\%(\s\a\+\)*}" contained
+syn match mesonEscape "\\$"
+
+" Meson only supports integer numbers
+" https://github.com/mesonbuild/meson/wiki/Syntax#numbers
+syn match mesonNumber "\<\d\+\>"
+
+" booleans
+syn keyword mesonConstant false true
+
+" Built-in functions
+syn keyword mesonBuiltin
+ \ add_global_arguments
+ \ add_global_link_arguments
+ \ add_languages
+ \ add_project_arguments
+ \ add_project_link_arguments
+ \ add_test_setup
+ \ benchmark
+ \ build_machine
+ \ build_target
+ \ configuration_data
+ \ configure_file
+ \ custom_target
+ \ declare_dependency
+ \ dependency
+ \ environment
+ \ error
+ \ executable
+ \ files
+ \ find_library
+ \ find_program
+ \ generator
+ \ get_option
+ \ get_variable
+ \ gettext
+ \ host_machine
+ \ import
+ \ include_directories
+ \ install_data
+ \ install_headers
+ \ install_man
+ \ install_subdir
+ \ is_variable
+ \ jar
+ \ join_paths
+ \ library
+ \ meson
+ \ message
+ \ project
+ \ run_command
+ \ run_target
+ \ set_variable
+ \ shared_library
+ \ shared_module
+ \ static_library
+ \ subdir
+ \ subproject
+ \ target_machine
+ \ test
+ \ vcs_tag
+
+if exists("meson_space_error_highlight")
+ " trailing whitespace
+ syn match mesonSpaceError display excludenl "\s\+$"
+ " mixed tabs and spaces
+ syn match mesonSpaceError display " \+\t"
+ syn match mesonSpaceError display "\t\+ "
+endif
+
+if version >= 508 || !exists("did_meson_syn_inits")
+ if version <= 508
+ let did_meson_syn_inits = 1
+ command -nargs=+ HiLink hi link <args>
+ else
+ command -nargs=+ HiLink hi def link <args>
+ endif
+
+ " The default highlight links. Can be overridden later.
+ HiLink mesonStatement Statement
+ HiLink mesonConditional Conditional
+ HiLink mesonRepeat Repeat
+ HiLink mesonOperator Operator
+ HiLink mesonComment Comment
+ HiLink mesonTodo Todo
+ HiLink mesonString String
+ HiLink mesonEscape Special
+ HiLink mesonNumber Number
+ HiLink mesonBuiltin Function
+ HiLink mesonConstant Number
+ if exists("meson_space_error_highlight")
+ HiLink mesonSpaceError Error
+ endif
+
+ delcommand HiLink
+endif
+
+let b:current_syntax = "meson"
+
+let &cpo = s:cpo_save
+unlet s:cpo_save
+
+" vim:set sw=2 sts=2 ts=8 noet:
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# ghwt - GitHub WrapTool
+#
+# An emergency wraptool(1) replacement downloader that downloads
+# directly from GitHub in case wrapdb.mesonbuild.com is down.
+
+import urllib.request, json, sys, os, shutil, subprocess
+import configparser, hashlib
+
+private_repos = {'meson', 'wrapweb', 'meson-ci'}
+
+def gh_get(url):
+ r = urllib.request.urlopen(url)
+ jd = json.loads(r.read().decode('utf-8'))
+ return jd
+
+def list_projects():
+ jd = gh_get('https://api.github.com/orgs/mesonbuild/repos')
+ entries = [entry['name'] for entry in jd]
+ entries = [e for e in entries if e not in private_repos]
+ entries.sort()
+ for i in entries:
+ print(i)
+ return 0
+
+def unpack(sproj, branch, outdir):
+ subprocess.check_call(['git', 'clone', '-b', branch, 'https://github.com/mesonbuild/%s.git' % sproj, outdir])
+ usfile = os.path.join(outdir, 'upstream.wrap')
+ assert(os.path.isfile(usfile))
+ config = configparser.ConfigParser()
+ config.read(usfile)
+ us_url = config['wrap-file']['source_url']
+ us = urllib.request.urlopen(us_url).read()
+ h = hashlib.sha256()
+ h.update(us)
+ dig = h.hexdigest()
+ should = config['wrap-file']['source_hash']
+ if dig != should:
+ print('Incorrect hash on download.')
+ print(' expected:', dig)
+ print(' obtained:', should)
+ return 1
+ spdir = os.path.split(outdir)[0]
+ ofilename = os.path.join(spdir, config['wrap-file']['source_filename'])
+ with open(ofilename, 'wb') as ofile:
+ ofile.write(us)
+ if 'lead_directory_missing' in config['wrap-file']:
+ os.mkdir(outdir)
+ shutil.unpack_archive(ofilename, outdir)
+ else:
+ shutil.unpack_archive(ofilename, spdir)
+ extdir = os.path.join(spdir, config['wrap-file']['directory'])
+ assert(os.path.isdir(extdir))
+ shutil.move(os.path.join(outdir, '.git'), extdir)
+ subprocess.check_call(['git', 'reset', '--hard'], cwd=extdir)
+ shutil.rmtree(outdir)
+ shutil.move(extdir, outdir)
+ shutil.rmtree(os.path.join(outdir, '.git'))
+ os.unlink(ofilename)
+
+def install(sproj):
+ sproj_dir = os.path.join('subprojects', sproj)
+ if not os.path.isdir('subprojects'):
+ print('Run this in your source root and make sure there is a subprojects directory in it.')
+ return 1
+ if os.path.isdir(sproj_dir):
+ print('Subproject is already there. To update, nuke the dir and reinstall.')
+ return 1
+ blist = gh_get('https://api.github.com/repos/mesonbuild/%s/branches' % sproj)
+ blist = [b['name'] for b in blist]
+ blist = [b for b in blist if b != 'master']
+ blist.sort()
+ branch = blist[-1]
+ print('Using branch', branch)
+ return unpack(sproj, branch, sproj_dir)
+
+def run(args):
+ if not args or args[0] == '-h' or args[0] == '--help':
+ print(sys.argv[0], 'list/install', 'package_name')
+ return 1
+ command = args[0]
+ args = args[1:]
+ if command == 'list':
+ list_projects()
+ return 0
+ elif command == 'install':
+ if len(args) != 1:
+ print('Install requires exactly one argument.')
+ return 1
+ return install(args[0])
+ else:
+ print('Unknown command')
+ return 1
+
+if __name__ == '__main__':
+ print('This is an emergency wrap downloader. Use only when wrapdb is down.')
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="744.09448819"
+ height="1052.3622047"
+ id="svg2"
+ version="1.1"
+ inkscape:version="0.91 r13725"
+ sodipodi:docname="meson_logo.svg">
+ <defs
+ id="defs4" />
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="1.4"
+ inkscape:cx="333.91453"
+ inkscape:cy="840.95374"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ inkscape:window-width="1147"
+ inkscape:window-height="710"
+ inkscape:window-x="65"
+ inkscape:window-y="24"
+ inkscape:window-maximized="1"
+ showguides="true"
+ inkscape:guide-bbox="true">
+ <inkscape:grid
+ type="xygrid"
+ id="grid3755" />
+ <sodipodi:guide
+ orientation="0,1"
+ position="121.42857,983.92857"
+ id="guide3805" />
+ <sodipodi:guide
+ orientation="0,1"
+ position="133.57143,703.92857"
+ id="guide3807" />
+ <sodipodi:guide
+ orientation="0,1"
+ position="141.42857,892.14286"
+ id="guide3809" />
+ <sodipodi:guide
+ orientation="0,1"
+ position="340,757.14285"
+ id="guide3811" />
+ <sodipodi:guide
+ position="340,1017.8571"
+ orientation="1,0"
+ id="guide4163" />
+ </sodipodi:namedview>
+ <metadata
+ id="metadata7">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Layer 1"
+ inkscape:groupmode="layer"
+ id="layer1">
+ <image
+ y="362.68222"
+ x="91.928558"
+ id="image3780"
+ xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABH0AAAPYCAIAAADIAhIwAAAAA3NCSVQICAjb4U/gAAAgAElEQVR4 nO3d0XLjyA1AUWlr//+XmQdvPB5bkkmK6AbQ5zylajeJLJFg3yYt37dtuwEAABDmn9kvAAAAoDnd BQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTS XQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs 3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE 0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABA LN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAA xNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAA QCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEA AMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcA AEAs3QXkcr/f7/f77FcBAHCl+7Zts18DsJCPptq27WhcGVYAQF26C7hY9N0qUwsAKEd3AW+Z9Uyg 2QUAFKK7gGMS/vKVOQYAJKe7gN8lbK1nzDQAICHdBfxRqK9eM9kAgFR0F6yuTWv9ZL4BRe2czKYc FKK7YEWNW+sZsw7IYMz4NfEgId0F/S1YWS8YekCQ5MPW9IO5dBc0kfx6X4WRCBxSdPaadTCe7oKq il7sqzMzYSlLTVrzDULpLihjqct/foYnZPNzSG7bZnK+w6CDC+kuSM2KITPzEwYwBpMw8eBNugvS scioy0SFPUy5Bow7OEp3QQpWIc0YrWCsrcCsg/10F8xkXdKbActSDLRlmXWwh+6CaaxRlmXw0oY5 xieTDV7TXTCBlQpfmcOUY4jxkGkGL+guGMpihRcMZDIzvtjJKIOHdBcMYsnCTsYyqZhdnGCOwU// zn4B0J9VC4d8HjAWLsxldgFcyP0uiGXhwlWMa8YwtbiKqQVf6S6IYu1CBEObIEYWEYws+KS7IIQV DNFMb65iXhHKsIIPugsuZgXDSGY4bzKyGMCkgpvuggtZvjCLSc5R5hWDGVOgu+Cw+/2vE8fyhQwM c/YztZjCmGJxugvO+EgvaxdSMc95wbwiA2OKlekuOMMKhrRMdb4xr0jFjGJZuguOsYIhP4OdD+YV OZlRrOmf2S8AKrGIoQQHKjeHAYk5OFmT7oK9XCcoxOG6OAcAQDaeM4RdLGKoyIRfjUlFIQYUq9Fd 8AvrGKoz51dgUlGR6cRSPGcIr1jK0IDDuD0fMUU5dFmK7oKnXA9ow8HcmA+X0hzArEN3wWOuBEB+ JhVAFboLHrCUoR9HdTP3+91nClCI7oLvLGWA5IwpOnE8swjdBX8x/WnM4d2Dz5F+HNWs4N/ZLwCy MPSB/EwqgKL8/S643SxlWImxX5QxRXumE715zhCsZoDsjCmA6nQXq7OaYTWO+XJ8ZAAN6C7W5VuY gfyMKdbhaKc33cWiDHdW5vivwifFahzzNKa7WJGxDuRnUrEmRz5d6S6WY6ADyXkKmsU5/mlJd7EW oxw+OBcAYCTdxUIsNIH8TCq4ORHoSHexChMcvnFSJORDgU9OB5rRXQDrsqxJxccB0JjuYglWM0By xhT85LygE90FsDTLmgx8CgDt6S76s6CB15wjc3n/4QUnCG3oLpozr4HMzCiAReguAJhDdMEezhR6 0F10ZlIDQAMu6DSguwBgAutIgKXoLtqypgEAIAndBYB9itG84XCUs4bqdBc9mc4A0IyLO6XpLhoy l4HMzCiABekuAABqsG1BXbqLbkxkIDMzCmBNuotWLGgAoDfXeorSXQAwiPUiwLJ0FwC3myQA6jCv qEh30YcpDGRmRsGFnFCUo7sAIJw1IsDidBdNWNMAwFJc+qlFdwEAAMTSXXRgxwsu4VQK4o2FIE4u CtFdAAAAsXQX5dnrAjIzoyCUU4wqdBcAAIVJL0rQXdRm1MK1nFNARWYX+ekuAIhiLQjDON1ITndR mAkLAHyyMCAz3QXAXyxcruKdhPGcd6Slu6jKYAUAoArdBQDXszcEszj7yEl3UZKRCqGcYgBwLd0F AL8QolCLc5aEdBf1GKYAANSiuwB4wAbHV9u2Hfr3vXsAfKO7KMZqBgD4lQUD2eguALiS1R4AP+ku AB7TDwBwFd1FJVaBAMBOlg2korsA4DLWeZCKU5I8dBdlGJ0AABSluwB4yn7HId4uSMiJSRK6CwAA IJbuogabVUByxhQAL+guAAA6sy1CBroLAAAglu6iANtUQHLGFACv6S4AAJqzOcJ0uovsDEoAAKrT XQAA9Gcnl7l0F6kZkQAANKC7AABYgv1cJtJd5GU4AiUYVgD8SneRlHUMAHA5Cwxm0V0AvGKNAgDv 010AAACxdBcZ2V8HAIJYZjCF7gIAAIilu0jHLhQAAM3oLgAAgFi6i1zc7AIAoB/dBcAvbIi84M2B ipy5jKe7SMQQBACgJd0FAAAQS3cBAADE0l1k4SFDAGAYCw8G010AAACxdBcAv7Mx/JC3BYCddBcp WLsAAINZfjCS7iK1bdtmvwQAAHiX7iI1G1EAADSguwAAWJQdXobRXcxn5EEJTlUAOE13AcAZQhSA /XQXAABALN3FZDaMAQBoT3cBALAuW8CMobsAAABi6S5mssMEFGV8AXCI7gIAAIiluwAAAGLpLgD2 8nAd0JLhxgC6i2nMOAAAFqG7AOAY20YAHKW7ANhr27bZLwEAStJdzGG3GADIw8qEaLoLgL2sSwDg HN3FBJZuAAAsRXcBsJff7wKAc3QXAHu5WQ0A5+guADhAfAJwgu5iNEsWAABWo7sAAMDWMLF0FwAA QCzdxVB2kgCAtCxUiKO7AAAAYukuAAD4j1teBNFdjGOQAQCwJt0FAHvZPwLgHN0FAAB/2GEhgu5i ECMMAKjCuoXL6S4AAIBYugsAACCW7mIEN+sBgFqsXriW7gIAAIiluwAA4AG3vLiQ7iKcmQVtbNs2 +yUAQEm6CwAAHrN9zFV0F7FMKwAA0F0AAACxdBcAADzl4R0uobsIZE4BAA1Y0vA+3QUAABBLdwEA wC/c8uJNugsAACCW7gIAgN+55cU7dBdRzCYAAPiguwAAAGLpLkK42QUA9GOFw2m6CwAA9pJenKO7 AAAAYukurmcfCABozFKHE3QXFzOJoKtt22a/BACoSncBwC72lYBPBgJH6S4AAIBYuguAXWzuAnxl KnKI7uJKBhDQmN9wA+A03QXALqoD4Bs7zuynu7iM0QMAAA/pLgAAOMm+MzvpLgB2sbYAgNN0FwDs ojwBOE13AQDAeTZl2EN3cQ0TBwAAntFdAAAAsXQXAAC8xYM//Ep3cQGzBgAAXtBdAADwLtvQvKa7 AAAAYukuAACAWLqLd7mrDgBwsyjiJd0FAAAQS3cBAADE0l0AAHANjxryjO4CAACIpbt4i00dAICv rI54SHcBAADE0l2cZzsHVrPyWb/yzw4cZWLwk+4CAACIpbsAAABi6S4AALiYRw35RncBcICVBMBO BiZf6S4AAIBYuouTbOEAALxmvcQn3QXAMQsuIxb8kQG4lu4CAIAoNm74oLsAACCQ9OKmuzjH+IDF GQIAcIjuAgAAiKW7AOAVN/cAeJ/u4jBLEACAQyyf0F0AAACxdBeHbds2+yUAAEAluovD3CgHbkYB wEHG5uJ0FwAAQCzdxTG2aoClGHoAXEJ3AQDACLZyVqa7AAAAYukuDrBJA3xlJgDATroLAB4TlgBc RXcBAMAgNnSWpbsAAABi6S72sj0DAADn6C4Azmu8I9P4RwPmMl7WpLsAAABi6S4A+M5uNBDKkFmQ 7gLgLVYPAPAr3cUu1lUAAHCa7gKAv9hpAuByugsAACCW7gLgXW4QARxlcq5GdwHAH1ZCAETQXfzO KgQAAN6huwDgP7aZAAiiu/iFVQiwh1kBcJTJuRTdBQAAEEt3AcDtZuMZgEi6i1esQoBFGHcAhNJd AAAwh02fdeguAFZn3QNANN3FUxYiAABwCd0FAAAQS3cBcI2iN8mLvmwAatFdAKxLdAEwhu7iMWsR AAC4iu4CAACIpbt4wM0u4Jxa06PWqwWgNN0FwIpEFwAj6S4AAIBY/85+AQAwmptdi9u27fM/OxiY 7n6/fz0m6Up3AXCl/AsI6+zFfTs+nx2uP49kRw7wjuxXR8ZzXQHelPzKYsqt7JKD0yFEhOSTk/fp Lr5zOQHel/biYsQtK+iYdERxobSTk0t4zhCAVVgic7nPhbKjC3hNdwGBfm7dWZosIuFveTn2VrNt 2+eHPuBoFGDAa+mui0zngsEl9s8Wh1xX2a4vjrQFfaTXrEPRIccJ2SYnF9JdfOc6wZvOTZWPA+/h t4d93UX+uoFNfnkuMQ6bNU0/Ah14HDX9oCWO7uIvrhC8Y9g8caBWkeES42hZU4Zj7+bw47gkhy4R /pn9AoAOtm0bealwWarifr/PXXda9TKXYcVRplZjugt415SFxeDS4x2zlhGWL8tKNRxSvRhKMLu6 0l384TznKPHDTuPHi4FGHuYkcNNdwDlJiivDa2CnkSEkupaVZDTBmwyxlnQXcFiqZU2qF8NroSuJ z/9x65VlZZ4GmV8bMIbuAo6xeuAd0eklupZlNAHJ+R55/mOxwh5pJ4YDuJZrDySfPmlH0zeOVQ6p cmCzk/tdwF6ZLwCZXxs/fb0xdXolev+/614XJTn9gRL+nf0CgAIsa4jwNb32H2NCC4CKPGfI7WYd w0uFpoQjuYHXx5uPmJ8KzaibY5iDah3evOY5Q+CVWhO/1qvloRdPD1qw8pOzHqjC/S5uN6sZnig6 HxzPsIKiA+pmRnFE3eOcn9zvAh6rO+vrvnJgJ6c5i1DpnegunNI8YE0DpGVAARXpLuC7BmuaBj8C 8FCDs7vBjwCcoLuAnqxsgA8e6wAy0F3AXzrlSqefBbidPamzjYJDf7AObBy0obsAAMb5iC7pBavR XcAf/dYB/X4ioA0Dip3c8upBd63OmUx7VjYAVGfB1oDuAv7TuE8a/2hAUZbRsBrdBdxuygRIr9+Y 8gUbHKLVq9NdS3MCsw6LGyCVz6FkOrGTQ6U63QUAMIFlNCxFdwGrXPsX+TGhn23bep+/vX864IPu AgBS81Q80IDugtXZZwWSW2FMrfAz8iYbENXpLmAhVjYAwBS6a112TQDIb53tknV+UliT7oKlLXiZ X/BHhrqcsEAbugvWZUEDZLbgjFrwR4Z16C5Y1MpX95V/dsjPGQq0pLsW5Ze7ACAh2ckL1m+l6S4A IBErS6Al3QUrsp8KADCS7gIA0ll5e2jlnx0a012wHFf0mzcBABhLd63Io/MAZGZnBJ6xiqtLd8Fa rGaA5IypmzcBOtJdAAAAsXQXLMQGKgBU51HDonTXcpyryxJd33hDAIBhdBcAkIUNkU/eCp5xbBSl uwCAFKwmgcZ0FwAwn+gCetNda/HLXcuyoHnI2wJJOBkf8rZAJ7oLAJhJXcAhttGL0l3QnzUNQFEG OLShuwAAoBK3vCrSXQDANO7nAIvQXdCcNQ1AacY49KC7FuKWNACpKApgHboLAAAglu6CzuwlAzRg mPOT55jK0V0AwARaAliK7gIAAIilu6Ate8l7eJeAEgwrqE53rcJDwAAAnVjd1aK7oCc7owDNGOxQ mu4CAEaTEMBqdBc0ZEEDACvwqGEhugtYnUwFqjCvoC7dBQAMJR6ABeku6MaCBgAgG921BM/+AgDA RLoLAKAMDzVAUboLAAAglu6CVuyDAslt2+bpd2BBuqs/lzf4lV6FkZxxcCErvSp0V3NORQDyUFzA snRXZ6JrNRY0ACsw7aEi3QUAAIXZai9BdwHcbvaPAYBIuguakA0AAGnprrbccQYAgCR0FwBAMZ5x gHJ0F3TgAnwJbyMAEER39eQhQwAAyEN3AQAAxNJdAAD1eDQaatFdUJ5LLwAszu+Y5Ke7AP4QsQBA BN0FAIxgX+Ny3lIoRHdBbS66AAD56a6GPOALALAaK8DkdBcU5mYXAEAJugvgL2oWIjizgnhjoQrd 1Y1bzAAACxLhyekuAAAoz+Z7crqrFecbAKzGXQ4oQXcBfGcRAwBcS3dBVdoAAKAK3QUAxLJPBKC7 AAAAYukuKMnmcTTvMFzI1z7BGM61zHQXABDLRkY07zDkp7v6sMMBAAA56S4AINC2bXYGB3DLC5LT XVCPiytQi6kFoLuasJUIAIA1YVq6C+oxUgH4yX1Fbg6DxHQXAABALN0FxWzbZisLyM+kgik8FJOW 7urACQZAKl+jy0VqJLkLaekugMcsX+BNTiIYz3mXlu6CSgxTID+TCuAn3QUAhPCEIcAn3VWeqxoA mW3b5lI1kvuNkNO/s18AANDH10X/x38WXQA33QWF2MIEKjK7AG6eM6zOJiIAAJ8sDtPSXQBP2aeH Q5wyAM/oLgCAVgQwJKS7AAAAYukuqMHm5SzeeQDgfbqrML83CQM40QCoxZUrJ90FAAAQS3cBABfw UG4qPg7IRncBAADE0l1VeXJ3KbYtJ/LmAwDv010AwLvsUAC8prsAAABi6S4AAIBYugsAoCEPf0Iq uqskX6oBAACF6C4AAGjFHn1CugsAACCW7oLsPKA/nY8AKMr4gjx0FwAAQCzdBQAA3fgVr2x0Vz3O IgAAqEV3AQBAQzbrU9FdxTh/AID9fLUGJKG7IDXXSwCABnQXAPAWO0QAv9JdAAAAsXQXAAD05KsB 8tBdlThzVuPRHSA/kwpgD90FANCZNoYMdFcZbnYBAHCUNWQSugsAOMmNFICddBcAAEAs3QVJ2UUG AC7hUcMMdBcAAEAs3VWDXQoAsnFbHmA/3QUZWc0AAHSiuwAAoDkPT02nuwAAAGLpLkjHQ4ZAfiYV wCG6CwAAIJbuAgBozv1JmE53AfzOkgW+ckYAHKW7AAAAYumuAnzv51LsIgMAESwp59JdAAAAsXQX AHCA2/IAJ+guAACAWLorO0/iLsUuMgBAS7oLANjL9hDAOboLAAAglu6CLOwiAwB0pbsAdhHGAMBp ugsA2MXuQ2k+PphLdwEAAMTSXQB72S1mZY5/aMAfKJpId0EKFjQAAI3prtTsSQAAQAO6C+AAdyZZ kyMf4E26CwAAIJbugvlsJAOZmVEA79NdAMdYgwIAR+kuAOApGw2d+DRhIt0FAAAQS3fBZHYfK/Kp sQiHOsBVdBcAAKzCn4edRXfl5awAYCI3uwAupLsAzrAkBQD2010AAACxdBcA8J07ul35ZGEW3QVw kuULALCT7oKZLNwBgMF8edsUugsA+IstIYDL6S4AgIXoaphCdyXl/i+UYPlCP45qgAi6CwAAIJbu AgAAiKW7YBoP8/Tgc6QTxzNAEN0FAAAQS3cBvMstAqAWUwtf4Tae7gIAbjdrcYBIugsAACCW7gIA AIiluwAu4AEtqnMMA4TSXQAAALF0FwCszs0uWJCvNBxMdwFcw8oVAHhGd8Ec1uhAEsYRwAC6KyO3 fQEYQ3Qty0cPg+kugMtYxwAAD+kuAFiUnQKAYXQXwJUsZKnCsQowku4CAACIpbvS8aUaUJ3bCOTn KAUYTHcBwFpEF8B4ugvgeta1pOXgBJhCdwHAKkQXwCy6CwCWILoAJtJdufhSDWjDGhcA+KS7YAIr cmAwYwdgLt0FEMVKlwy2bXMoAkynuwAAAGLpLoBA7jMwlyMQIAndBaNZBgFjmDYAeeiuRHyZIbRk 7ct4fqeLPRwkMJLuAghnccNIjjeAhHQXAABALN0FMIJbEAzg8UKAtHQXwCAWxIRygAFkprsAxrEy JohDCyA53QUwlPUxl3NQAef4Mu2RdFcWjntYh1UyF3I4AZSguwAmsFbmEg4k9rC3CxnoLoA5rJh5 k0OInRwqkIHuApjGYojTHDwAtegugJmsnjnBYQNcxWOow+gugMmsoQGgPd2Vgp0GWJz0Yqdt2xwt XMXyA0b6d/YLAOB2+396WQbxkNYCghgvw+gugETUF99YEgH04DlDgHQ8S8YHhwEQzU7fMO53ASTl 3tfKFBdAM7oLIDX1tRrFBdCS7prPcgr4lfpageICaMzvdwGUYV3emA8XoDf3uwAq+bo6d/urB8UF sALdBVCVhw+rU1wA6/CcIUBtvnS+Ip8akIf9uzHc7wLowL2vKuQWwJrc7wLow12UzHw6ZOOAhJHc 75rM5jRwuc+1lAmTgaUtADf3uwAac4NlLu8/UIV9ugHc7wJozu2v8eQWAN/oLoBVCLA4QouKjAIY SXcBLMeXH15CawGd3O93Yy2U7prJogeY6OH19ed1d81JZfEBwLV0FwB//OyNRTJMaAEQSncBcMCz u2Rf/2mhNpNbAIyhuwB417d6+fYFHq9TbTytBcB4uguAKC8K59k/erPHtm17+L+gtQB+5as1Quku ABL5esnfU1Bf/52Pf2TRAEBCuguApPYUlMqCcwr9Hib08M/sFwBrsUYEAFiQ7gIAAIiluwAAAGLp rmk8Vw0AAIvQXQAAa7H5yzOOjTi6CwAAIJbuAgAAiKW7pvF94gAAsAjdBQAA3G5uDETSXdP4tUUA AFKxQI2juwAAAGLprmncxgUAgEXoLgAAgFi6C4by2DQAwIJ0F4wmvQAAVqO7AAAAYukuAACAWLoL AAAglu6C0fwJAQCA1eguGM33agAArEZ3AQAA/7FBHER3AQAAxNJdMIGdJAAgJ7+IHkR3AQAAxNJd AAAAsXQXAABALN0Fc/gVLwCAdeguAACAWLoLAAAglu6CaTxqCACk4kvk4+guAACAWLoLAAAglu4C AFiIp9xhCt0FM7n4AQCsQHcBAADE0l0wmVteAADt6S4AgFXY7INZdBcAAEAs3QUAABBLdwEAAMTS XTCfp+0BAHrTXQAAALF0FwDAEjxeARPpLkjBtRAAmGvbttkvoTPdBQAAEEt3QRZueQEAdKW7AAAA YukuAACAWLoLEvGoIQBBXGJ4zZdqRNNdAAAAsXQXAABALN0FuXgOBACgH90FAAAQS3cBAADE0l2Q jkcNAbiWKwtMp7sAAABi6S4AAFiaP941gO4CAACIpbsgIw/iA3AV1xTIQHcBAMC6PGQ4hu4CAACI pbsAANrykCEkobumMQcBAJjOonQM3QVJGYIAvMmlBPLQXQAAALF0F+RlnxKA01xEIBXdBQAAEEt3 AQAAxNJdAADdeMiQ/fzd5DF0F6TmwgnAUa4dHOKAGUN3AQAAxNJdkN39frcRBcBOLhmQk+6CGtQX AEBdugsqkV4AvOAyAWnpLijGjS8AHnJ1gMx0F5SkvgD4ykWB03yP/Bi6CwpzlQXg5nIAFeguqM2N L4DFuQpACboLOnDRBViQrTcoRHdBE66+AEsx86EW3QWtuAwDrMC0h3J01zS+OoYgLsYAvZnzUJHu goZckgFa8kg51KW7oCfXZoBmTHWCOLTG0F3QmfoC6MEwh+p0F/Tnag1Qlx00BnCMDfDv7BcAjPAx T32bC0AhlsLQie6Chdzvd+kFkJbQgsZ0F6zFjS+APIQWrMPm92QGLhM5/QFmsQAgG6uCaLprMmOX uUwAgJFc98nMqiCU5wxhaR47BIimtUhu2zZH6QDud83nQCcPAwHgKq7vlGMZEMr9LuAPX3gIsNPD gam1gGf83WTgL/5AJ8AedqmAQ9zvAh7we18At91PAdiuAn6lu4Cn1BewuF8HoOICdtJdwC/U1we/ /AZ8klvAUboL2EV93aQXLE9uAadZQ6RgjlOO0QG05+rMalzcQ7nfBZzhzg/QldwCIugu4KSfSxMl Bs187rBUf9JYSgHT2bFOwfWAfswWKG3PhSntae6qCuekPal70F0puEKwCAMHkjtxPfp6Xr/+r3/e OgsaBS6mcI6r8xi6KwtXC1Zz+fDxK2dwQp6rz7Zt317MizM6z8uGBlw9x7BMycIlBD59m0tff7Hk kjPF3AMXHeCTy+IYuisLl0CYyzCkN1cZ4BlXwDF0VxauiFDFz9txBilpubgAe7iQDWC5kIirIzRg qJKBCwpwgktYKH+/CwD6UFwAOf0z+wUAtGLVC0BRLmGhdFci7u1CD/f/m/1CAIAsdBdAFPXFYI43 gLR0Vy5ueQEAMIvtmzi+VwMAyrNUAkjO/S6AQG5iAwA33QUA1bnZBVzFdmEc3QUQyIKYaI4xgBJ0 Vzq2GaAZy2LiOLoAqtBdAFCS6AIoRHcBhLM+BiA/T12F0l0ZOeihH+kFACvTXQCDSC8u5HACLne/ 382WOP5uclLbtjnuAQAYw/NW0dzvAhjHfgqXcCABlKO7AABgaW52DaC7AIZypwIAFqS78rLxAAAA PegugNHc8uIdjh/gWvb6x9BdABNYOgPAUnRXarYfAACgAd0FMIdbXpzgsAEoSncBAMCiPF01jO7K zskAwAc3uwDq0l0AAACxdFcBbnkB4GYXQGm6C2AaK2kApnMxGkN3AQDAirZtE13D6K4aPGoIXbng sYfjBKA63QUAAOuyvz+G7gIAAIiluwAAYDkft7nc7BpGd5XhrAAAgKJ0F8BkvjIBANrTXQDzSS8A RvIg1Xi6CwAAFiK6ptBdlThJoDG3vACgMd0FAKlpcuBC9vFn0V3FOFUAAKAc3QUAABBLd9Xjlhd0 5XEyAEJZRk6kuwAAAGLprpLsVQAAcIgF5Fy6CwAAIJbuAkjEr3gBEMHNrul0V1VOHoAVSHGAHnQX AAB0Zr8+A91VmFMIoDc3uwDa0F0AANCWnfokdFdtTiQAAJ6xVsxDd9nbn+0AAAiZSURBVAEAQEOi KxXdVZ4zCqAlv9wF0InuAgCAbmzNZ6O7OnBeAQDwyeIwId3VhLMLoBMPGQI08+/sFwAAAFzDXnxa 7nf14TQDAFiZ1WBm7ncBAEBhcqsE97tacdYBAEBCugsAAKqy7V6F7urGuQcAANnoroakF9Tl/AWA lnQXAACUZLeuEN3Vk5MQAKA3671adFdbTkUAgKXc7/fZL4GndBcAABRjh70c3dXZtm3OSSjECQvA O1xHMtNdAABQib6qSHf158wEAKjOiq463QUAAKl9jS4BVpTuWoLzE6AWcxv4JLp60F2rcJYCAJTz uYS73++Wc6XpLgAAyMidrk5010KcrgCFGNqwMn8NqB/dtRbnMABAclZrLekuAADI4iO67vf77BfC xfx+3rqcz5CNgcxPZjUsxYWgMfe71uXEBgDIw9qsN921NKc3QHIGNazAb+CvQHetzkkOADDRz8WY B4xb0l1IL4DUTGlo7NsJfr/fRVdXuovbzUUdAGCsn88WfhaXhVlLuov/OMMBAMZ4se6yJOtKd/GH 8xwAINqzFZdv1+hNd/EXZztAQoYzQHW6i+/stQAABHn2O1209+/sF0BS27YZBAAAV1Fci3O/i6fc 9QLIw0yG0nx1Ie538crHLLAfAwO49AJ09TC6jP3V3H3k7CG9IJppzB6mMZRjvPPBc4bsYmRAKKcY QD++q4yvdBd7mR0QxJkF0I/Zzje6i2MMEbiWc4pDHDBQglOVn3yvBof5sg0AgJ8e5tb97vsUuN3c 7+I0EwTe5zziBIcN5CS6eM2hwFvc9YJ3mMCcZvxCHoY5e7jfxVt82Qac5twBaMAwZyfdxQVMHDjK WcObHEIAtXjOkCt57gX2MHi5iqkLExnmHOJ+F1cygOBXThMu5HCCWZx9HKW7uJgxBAA05pfbOUd3 cT3DCJ5xdgDU9bC4PO7LTrqLEBaX8JPzggi23mEAJxrv011EMZ4AgOp+LS4LHnbyfYbEcvMdPhi2 RDNv4VrmNtdyv4tYZhbcnAgM4TCDCzmhuJzuIpzJxeKcAgCF/Hyw0M1kLuE5Q8YxtliQGcssRi4c 8nBcf55Hhjnv010MZR3AUgxY5jJyYY9ns1p0cS3dxQSWAqzAdCUD8xYeMqIZT3cxjdUAjRmt5GHY wgeTmbl0F5NZENCS0UoqJi0rM5BJQneRgjUBnZirJGTMsiDTmFR0F4lYFtCAoUpmxiyLMIpJSHeR jmUBdZmo5GfG0ps5TFq6i6SsDKjIRKUEA5aWTGCS012kZnFAFWYptZiudGICU4LuogZLBDIzSKnI XKUB45dCdBf1WCuQhxFKdSYqRRm/lKO7KMxygbnMT9owTinB1KU03UUTFg2MZHLSjylKQoYtnegu urF0IJqxSW+mKNMZs7Sku+jM6oHLmZmswPBkPNOV9nQXS7CG4BIGJuswNolmorIa3cWiLCnYz5yk sfv96UrAnORyxikr013wh0UG35iQYDB29THfxny+ZincdBfs8bkfbP2xCIMRfjIAT9u27ePd+3Yp +Tpqrn17P/4fjTJIxTkJ17Ai6cFIhD1MvGemzBCJBSU4UWE065WETEI4wTS7mR7AbroLUvjYrfz6 8IkFzRhmIFxlnallbgAn6C4oZp2VTSijD0K12UUyK4Cr6C5oovTKZgzjDnKKi7TGZ/3XN6rxjwmd 6C5YwrJVZsRBKr4BAliW8QcrapxhZhoAkJDuAjpkmFEGvPD1z2cBTKG7gMce/pXPVIwvAKAK3QUc 9jDD3hwmL9Lu81ftzSsAoCjdBQAAVfm6mip8TgAAALH+mf0CAAAAmtNdAAAAsXQXAABALN0FAAAQ S3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAA sXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAA EEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAA ALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUA ABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0A AACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0F AAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJd AAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBfAYff7ffZLAAAquW/bNvs1AAAAdOZ+FwAAQCzd BQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cl4i8CAQBAS/5+FwAAQCz3uwAAAGLpLgAAgFi6 CwAAIJbuAgAAiKW7AAAAYukuAACAWLoLAAAglu4CAACIpbsAAABi6S4AAIBYugsAACCW7gIAAIil uwCAJu73++yXAPDYfdu22a8BAACgM/e7AAAAYukuAACAWLoLAAAglu4CAACIpbsAAABi6S4AAIBY ugsAACCW7gIAAIiluwAAAGLpLgAAgFi6CwAAIJbuAgAAiKW7AAAAYukuAACAWLoLAAAglu4CAACI pbsAAABi6S4AAIBYugsAACCW7gIAAIiluwAAAGLpLgAAgFi6CwAAIJbuAgAAiKW7AAAAYukuAACA WLoLAAAglu4CAACIpbsAAABi6S4AAIBYugsAACCW7gIAAIiluwAAAGLpLgAAgFi6CwAAIJbuAgAA iKW7AAAAYukuAACAWLoLAAAglu4CAACI9T/cpLHUlkYrNQAAAABJRU5ErkJggg== "
+ height="367.39426"
+ width="428.99997" />
+ <path
+ style="fill:#389f00;fill-opacity:1;stroke:none;stroke-opacity:1"
+ d="m 252.71941,348.36581 c -17.21481,0 -47.14006,-0.84934 -60.97457,-13.17571 -13.83451,-12.32637 -16.84556,-17.45227 -19.09172,-35.51933 -2.08905,-16.80337 -2.6481,-33.71944 -1.49162,-50.37795 3.07225,-20.81514 5.13462,-47.62599 12.05413,-67.63597 6.23548,-30.42037 13.58284,-55.16491 25.9691,-83.71646 5.06527,-13.287784 16.76769,-22.557428 30.27757,-25.948958 15.50334,-3.30801 29.68022,-2.91134 46.74465,-1.2459 14.78626,1.27468 30.83798,17.066244 32.88238,34.597818 2.02303,17.34845 1.73385,22.0552 3.2728,37.6258 -0.19443,8.76313 6.34424,22.76616 16.45974,22.40304 9.56867,-0.34349 16.48036,-12.71843 17.51509,-20.93495 2.35599,-18.70837 -0.6551,-19.81684 2.94334,-37.3491 3.59844,-17.532268 14.18885,-33.043544 30.48873,-36.540734 16.26809,-2.12031 28.63094,-2.71731 46.02053,0.49697 18.07396,3.34079 26.93793,12.81813 33.62878,26.592994 6.73685,13.86957 10.49571,27.84178 16.21017,43.09018 4.88864,13.04479 8.56125,29.18304 10.94268,42.92888 3.32552,15.18288 3.65237,21.38717 6.77781,36.57323 0.77923,16.65538 4.45192,19.80725 5.02299,35.71533 0.35582,9.91195 -0.29524,39.12315 -2.43742,46.76046 -2.93782,13.52423 -7.12707,26.02687 -16.79636,33.20923 -12.64781,8.53102 -33.08952,11.60259 -47.55104,12.32486 -18.33849,0.12627 -30.65715,1.24014 -48.96624,-1.08017 -14.10098,-2.78518 -21.50029,-13.82094 -20.71192,-29.39227 1.06365,-21.00843 21.00671,-22.03777 35.65227,-22.0668 14.64556,-0.029 35.24942,-0.73393 37.06929,-19.42192 2.88023,-18.22229 0.38467,-36.76259 -2.58508,-54.95764 -2.30841,-16.30474 -8.74904,-35.18114 -15.47418,-50.2434 -1.96378,-11.35394 -19.50871,-15.47995 -23.80866,-4.84186 -5.41476,8.88334 -4.61169,19.77501 -7.41008,29.84511 -2.83889,10.21585 -1.44479,19.7209 -5.44492,32.54371 -4.76333,15.2693 -4.62217,18.45204 -20.66557,24.86079 -15.69676,2.99362 -39.52174,2.95141 -55.28152,0.43903 -14.06764,-2.44948 -22.74154,-14.03686 -24.98313,-27.54206 -3.16922,-11.4823 -2.40407,-23.59284 -4.92263,-35.45854 -2.5605,-12.06331 -6.37931,-35.47293 -22.0005,-29.70057 -11.4991,4.24916 -17.39709,28.66088 -22.07262,43.97383 -4.41,20.15236 -9.59637,40.99697 -6.08472,61.67032 -2.34204,29.06652 17.85275,26.14283 32.49264,29.17728 12.50724,0 38.93213,0.75389 40.84562,17.16664 1.14528,9.82353 1.53864,23.62381 -11.36782,30.99327 -9.67605,5.54646 -25.93319,4.16152 -43.14799,4.16152 z M 425.78708,171.50203 c -1.14992,-1.69089 1.64273,0.31937 0,0 z"
+ id="path3783-1"
+ inkscape:connector-curvature="0"
+ sodipodi:nodetypes="zzscccccscsszcsssccsccccszccccsscccsscccsczcc"
+ inkscape:export-filename="/home/jpakkane/workspace/meson/graphics/meson_logo_big.png"
+ inkscape:export-xdpi="300"
+ inkscape:export-ydpi="300" />
+ </g>
+</svg>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="744.09448819"
+ height="1052.3622047"
+ id="svg2"
+ version="1.1"
+ inkscape:version="0.91 r13725"
+ sodipodi:docname="wrap_logo.svg">
+ <defs
+ id="defs4" />
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="0.49497475"
+ inkscape:cx="357.33594"
+ inkscape:cy="840.95374"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ inkscape:window-width="901"
+ inkscape:window-height="504"
+ inkscape:window-x="196"
+ inkscape:window-y="73"
+ inkscape:window-maximized="0"
+ showguides="true"
+ inkscape:guide-bbox="true">
+ <inkscape:grid
+ type="xygrid"
+ id="grid3755" />
+ </sodipodi:namedview>
+ <metadata
+ id="metadata7">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Layer 1"
+ inkscape:groupmode="layer"
+ id="layer1">
+ <path
+ style="fill:#d0c719;fill-opacity:1;stroke:none;stroke-opacity:1"
+ d="m 434.2897,70.235258 c 17.21481,0 47.14006,0.84934 60.97457,13.17571 13.83451,12.32637 16.84556,17.452272 19.09172,35.519332 2.08905,16.80337 2.6481,33.71944 1.49162,50.37795 -3.07225,20.81514 -5.13462,47.62599 -12.05413,67.63597 -6.23548,30.42038 -13.58284,55.16492 -25.9691,83.71647 -5.06527,13.28778 -16.76769,22.55743 -30.27757,25.94896 -15.50334,3.30801 -29.68022,2.91134 -46.74465,1.2459 -14.78626,-1.27468 -30.83798,-17.06625 -32.88238,-34.59782 -2.02303,-17.34845 -1.73385,-22.0552 -3.2728,-37.6258 0.19443,-8.76313 -6.34424,-22.76616 -16.45974,-22.40304 -9.56867,0.34349 -16.48036,12.71843 -17.51509,20.93495 -2.35599,18.70837 0.6551,19.81684 -2.94334,37.3491 -3.59844,17.53227 -14.18885,33.04354 -30.48873,36.54073 -16.26809,2.12031 -28.63094,2.71731 -46.02053,-0.49697 -18.07396,-3.34079 -26.93793,-12.81813 -33.62878,-26.59299 -6.73685,-13.86957 -10.49571,-27.84178 -16.21017,-43.09018 -4.88864,-13.04479 -8.56125,-29.18305 -10.94268,-42.92889 -3.32552,-15.18288 -3.65237,-21.38717 -6.77781,-36.57323 -0.77923,-16.65538 -4.45192,-19.80725 -5.02299,-35.71533 -0.35582,-9.91195 0.29524,-39.12315 2.43742,-46.76046 2.93782,-13.52423 7.12707,-26.026872 16.79636,-33.209232 12.64781,-8.53102 33.08952,-11.60259 47.55104,-12.32486 18.33849,-0.12627 30.65715,-1.24014 48.96624,1.08017 14.10098,2.78518 21.50029,13.82094 20.71192,29.392272 -1.06365,21.00843 -21.00671,22.03777 -35.65227,22.0668 -14.64556,0.029 -35.24942,0.73393 -37.06929,19.42192 -2.88023,18.22229 -0.38467,36.76259 2.58508,54.95764 2.30841,16.30474 8.74904,35.18114 15.47418,50.2434 1.96378,11.35395 19.50871,15.47996 23.80866,4.84187 5.41476,-8.88335 4.61169,-19.77502 7.41008,-29.84512 2.83889,-10.21585 1.44479,-19.7209 5.44492,-32.54371 4.76333,-15.2693 4.62217,-18.45204 20.66557,-24.86079 15.69676,-2.99362 39.52174,-2.95141 55.28152,-0.43903 14.06764,2.44948 22.74154,14.03686 24.98313,27.54206 3.16922,11.4823 2.40407,23.59284 4.92263,35.45854 2.5605,12.06331 6.37931,35.47294 22.0005,29.70058 11.4991,-4.24916 17.39709,-28.66089 22.07262,-43.97384 4.41,-20.15236 9.59637,-40.99697 6.08472,-61.67032 2.34204,-29.06652 -17.85275,-26.14283 -32.49264,-29.17728 -12.50724,0 -38.93213,-0.75389 -40.84562,-17.16664 -1.14528,-9.823532 -1.53864,-23.623812 11.36782,-30.993272 9.67605,-5.54646 25.93319,-4.16152 43.14799,-4.16152 z M 261.22203,247.09904 c 1.14992,1.69089 -1.64273,-0.31937 0,0 z"
+ id="path3783-1"
+ inkscape:connector-curvature="0"
+ sodipodi:nodetypes="zzscccccscsszcsssccsccccszccccsscccsscccsczcc"
+ inkscape:export-filename="/home/jpakkane/workspace/meson/graphics/meson_logo_big.png"
+ inkscape:export-xdpi="300"
+ inkscape:export-ydpi="300" />
+ </g>
+</svg>
--- /dev/null
+.TH MESON "1" "December 2017" "meson 0.44.0" "User Commands"
+.SH NAME
+meson - a high productivity build system
+.SH DESCRIPTION
+
+Meson is a build system designed to optimize programmer
+productivity. It aims to do this by providing simple, out-of-the-box
+support for modern software development tools and practices, such as
+unit tests, coverage reports, Valgrind, CCache and the like.
+
+The main Meson executable provides many subcommands to access all
+the functionality.
+
+.SH The setup command
+
+Using Meson is simple and follows the common two-phase
+process of most build systems. First you run Meson to
+configure your build:
+
+.B meson setup [
+.I options
+.B ] [
+.I source directory
+.B ] [
+.I build directory
+.B ]
+
+Note that the build directory must be different from the source
+directory. Meson does not support building inside the source directory
+and attempting to do that leads to an error.
+
+After a successful configuration step you can build the source by
+running the actual build command in the build directory. The default
+backend of Meson is Ninja, which can be invoked like this.
+
+\fBninja [\fR \fItarget\fR \fB]\fR
+
+You only need to run the Meson command once: when you first configure
+your build dir. After that you just run the build command. Meson will
+autodetect changes in your source tree and regenerates all files
+needed to build the project.
+
+The setup command is the default operation. If no actual command is
+specified, Meson will assume you meant to do a setup. That means
+that you can set up a build directory without the setup command
+like this:
+
+.B meson [
+.I options
+.B ] [
+.I source directory
+.B ] [
+.I build directory
+.B ]
+
+.SS "options:"
+.TP
+\fB\-\-version\fR
+print version number
+.TP
+\fB\-\-help\fR
+print command line help
+
+.SH The configure command
+
+.B meson configure
+provides a way to configure a Meson project from the command line.
+Its usage is simple:
+
+.B meson configure [
+.I build directory
+.B ] [
+.I options to set
+.B ]
+
+If build directory is omitted, the current directory is used instead.
+
+If no parameters are set,
+.B meson configure
+will print the value of all build options to the console.
+
+To set values, use the \-D command line argument like this.
+
+.B meson configure \-Dopt1=value1 \-Dopt2=value2
+
+.SH The introspect command
+
+Meson introspect is a command designed to make it simple to integrate with
+other tools, such as IDEs. The output of this command is in JSON.
+
+.B meson introspect [
+.I build directory
+.B ] [
+.I option
+.B ]
+
+If build directory is omitted, the current directory is used instead.
+
+.SS "options:"
+.TP
+\fB\-\-targets\fR
+print all top level targets (executables, libraries, etc)
+.TP
+\fB\-\-target\-files\fR
+print the source files of the given target
+.TP
+\fB\-\-buildsystem\-files\fR
+print all files that make up the build system (meson.build, meson_options.txt etc)
+.TP
+\fB\-\-tests\fR
+print all unit tests
+.TP
+\fB\-\-help\fR
+print command line help
+
+.SH The test command
+
+.B meson test
+is a helper tool for running test suites of projects using Meson.
+The default way of running tests is to invoke the default build command:
+
+\fBninja [\fR \fItest\fR \fB]\fR
+
+.B meson test
+provides a richer set of tools for invoking tests.
+
+.SS "options:"
+.TP
+\fB\-\-repeat\fR
+run tests as many times as specified
+.TP
+\fB\-\-gdb\fR
+run tests under gdb
+.TP
+\fB\-\-list\fR
+list all available tests
+.TP
+\fB\-\-wrapper\fR
+invoke all tests via the given wrapper (e.g. valgrind)
+.TP
+\fB\-C\fR
+Change into the given directory before running tests (must be root of build directory).
+.TP
+\fB\-\-suite\fR
+run tests in this suite
+.TP
+\fB\-\-no\-suite\fR
+do not run tests in this suite
+.TP
+\fB\-\-no\-stdsplit\fR
+do not split stderr and stdout in test logs
+.TP
+\fB\-\-benchmark\fR
+run benchmarks instead of tests
+.TP
+\fB\-\-logbase\fR
+base of file name to use for writing test logs
+.TP
+\fB\-\-num-processes\fR
+how many parallel processes to use to run tests
+.TP
+\fB\-\-verbose\fR
+do not redirect stdout and stderr
+.TP
+\fB\-t\fR
+a multiplier to use for test timeout values (usually something like 100 for Valgrind)
+.TP
+\fB\-\-setup\fR
+use the specified test setup
+
+.SH The wrap command
+
+Wraptool is a helper utility to manage source dependencies
+using the online wrapdb service.
+
+.B meson wrap <
+.I command
+.B > [
+.I options
+.B ]
+
+You should run this command in the top level source directory
+of your project.
+
+.SS "Commands:"
+.TP
+\fBlist\fR
+list all available projects
+.TP
+\fBsearch\fR
+search projects by name
+.TP
+\fBinstall\fR
+install a project with the given name
+.TP
+\fBupdate\fR
+update the specified project to latest available version
+.TP
+\fBinfo\fR
+show available versions of the specified project
+.TP
+\fBstatus\fR
+show installed and available versions of currently used subprojects
+
+.SH SEE ALSO
+
+http://mesonbuild.com/
+
+https://wrapdb.mesonbuild.com/
--- /dev/null
+.TH MESONCONF "1" "December 2017" "mesonconf 0.44.0" "User Commands"
+.SH NAME
+mesonconf - a tool to configure Meson builds
+.SH DESCRIPTION
+
+This executable is deprecated and will be removed in the future. The
+functionality that was in this executable can be invoked via the main Meson
+command like this:
+
+.B meson configure <options>
--- /dev/null
+.TH MESONINTROSPECT "1" "December 2017" "mesonintrospect 0.44.0" "User Commands"
+.SH NAME
+mesonintrospect - a tool to extract information about a Meson build
+.SH DESCRIPTION
+
+This executable is deprecated and will be removed in the future. The
+functionality that was in this executable can be invoked via the main Meson
+command like this:
+
+.B meson introspect <options>
+
+.SH SEE ALSO
+http://mesonbuild.com/
--- /dev/null
+.TH MESON "1" "December 2017" "meson 0.44.0" "User Commands"
+.SH NAME
+mesontest - test tool for the Meson build system
+.SH DESCRIPTION
+
+This executable is deprecated and will be removed in the future. The
+functionality that was in this executable can be invoked via the main Meson
+command like this:
+
+.B meson test <options>
+
+.SH SEE ALSO
+http://mesonbuild.com/
--- /dev/null
+.TH WRAPTOOL "1" "December 2017" "meson 0.44.0" "User Commands"
+.SH NAME
+wraptool - source dependency downloader
+.SH DESCRIPTION
+
+This executable is deprecated and will be removed in the future. The
+functionality that was in this executable can be invoked via the main Meson
+command like this:
+
+.B meson wrap <options>
+
+.SH SEE ALSO
+http://wrapdb.mesonbuild.com/
--- /dev/null
+#include<sqlite3.h>
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ sqlite3 *db;
+ if(sqlite3_open(":memory:", &db) != SQLITE_OK) {
+ printf("Sqlite failed.\n");
+ return 1;
+ }
+ sqlite3_close(db);
+ return 0;
+}
--- /dev/null
+project('downloader', 'c')
+
+cc = meson.get_compiler('c')
+
+s = subproject('sqlite').get_variable('sqlite_dep')
+th = dependency('threads')
+
+libdl = cc.find_library('dl', required : false)
+
+e = executable('dtest', 'main.c',
+ dependencies : [th, libdl, s])
+
+test('dltest', e)
--- /dev/null
+[wrap-file]
+directory = sqlite-amalgamation-3080802
+
+source_url = http://sqlite.com/2015/sqlite-amalgamation-3080802.zip
+source_filename = sqlite-amalgamation-3080802.zip
+source_hash = 5ebeea0dfb75d090ea0e7ff84799b2a7a1550db3fe61eb5f6f61c2e971e57663
+
+patch_url = https://wrapdb.mesonbuild.com/v1/projects/sqlite/3080802/5/get_zip
+patch_filename = sqlite-3080802-5-wrap.zip
+patch_hash = d66469a73fa1344562d56a1d7627d5d0ee4044a77b32d16cf4bbb85741d4c9fd
--- /dev/null
+project('Subversion outchecker', 'c')
+
+sp = subproject('samplesubproject')
+
+exe = executable('gitprog', 'prog.c',
+include_directories : sp.get_variable('subproj_inc'),
+link_with : sp.get_variable('subproj_lib'),
+)
+
+test('maintest', exe)
--- /dev/null
+#include"subproj.h"
+
+int main(int argc, char **argv) {
+ subproj_function();
+ return 0;
+}
--- /dev/null
+[wrap-svn]
+directory=samplesubproject
+url=https://svn.code.sf.net/p/mesonsubproject/code/trunk
+revision=head
--- /dev/null
+project('multiwrap', 'c',
+ default_options : 'c_std=c99')
+
+# Using multiple downloaded projects for great justice.
+
+cc = meson.get_compiler('c')
+
+luadep = dependency('lua', fallback : ['lua', 'lua_dep'])
+pngdep = dependency('libpng', fallback : ['libpng', 'pngdep'])
+
+executable('prog', 'prog.c',
+ dependencies : [pngdep, luadep])
--- /dev/null
+#include<lua.h>
+#include<stdio.h>
+#include<stdlib.h>
+#include<png.h>
+#include<string.h>
+#if !defined(_MSC_VER)
+#include<unistd.h>
+#endif
+
+static void *l_alloc (void *ud, void *ptr, size_t osize,
+ size_t nsize) {
+ (void)ud;
+ (void)osize;
+ if (nsize == 0) {
+ free(ptr);
+ return NULL;
+ } else {
+ return realloc(ptr, nsize);
+ }
+}
+
+void open_image(const char *fname) {
+ png_image image;
+
+ memset(&image, 0, (sizeof image));
+ image.version = PNG_IMAGE_VERSION;
+
+ if(png_image_begin_read_from_file(&image, fname) != 0) {
+ png_bytep buffer;
+
+ image.format = PNG_FORMAT_RGBA;
+ buffer = malloc(PNG_IMAGE_SIZE(image));
+
+ if(png_image_finish_read(&image, NULL, buffer, 0, NULL) != 0) {
+ printf("Image %s read failed: %s\n", fname, image.message);
+ }
+// png_free_image(&image);
+ free(buffer);
+ } else {
+ printf("Image %s open failed: %s", fname, image.message);
+ }
+}
+
+int printer(lua_State *l) {
+ if(!lua_isstring(l, 1)) {
+ fprintf(stderr, "Incorrect call.\n");
+ return 0;
+ }
+ open_image(lua_tostring(l, 1));
+ return 0;
+}
+
+
+int main(int argc, char **argv) {
+ lua_State *l = lua_newstate(l_alloc, NULL);
+ if(!l) {
+ printf("Lua state allocation failed.\n");
+ return 1;
+ }
+ lua_register(l, "printer", printer);
+ lua_getglobal(l, "printer");
+ lua_pushliteral(l, "foobar.png");
+ lua_call(l, 1, 0);
+ lua_close(l);
+ return 0;
+}
--- /dev/null
+[wrap-file]
+directory = libpng-1.6.17
+
+source_url = ftp://ftp.simplesystems.org/pub/libpng/png/src/history/libpng16/libpng-1.6.17.tar.xz
+source_filename = libpng-1.6.17.tar.xz
+source_hash = 98507b55fbe5cd43c51981f2924e4671fd81fe35d52dc53357e20f2c77fa5dfd
+
+patch_url = https://wrapdb.mesonbuild.com/v1/projects/libpng/1.6.17/6/get_zip
+patch_filename = libpng-1.6.17-6-wrap.zip
+patch_hash = 8bd272e28e6ae84691935e84bca9f5eb02632221e6faccf427eb71bf745a7295
--- /dev/null
+[wrap-file]
+directory = lua-5.3.0
+
+source_url = http://www.lua.org/ftp/lua-5.3.0.tar.gz
+source_filename = lua-5.3.0.tar.gz
+source_hash = ae4a5eb2d660515eb191bfe3e061f2b8ffe94dce73d32cfd0de090ddcc0ddb01
+
+
+patch_url = https://wrapdb.mesonbuild.com/v1/projects/lua/5.3.0/5/get_zip
+patch_filename = lua-5.3.0-5-wrap.zip
+patch_hash = 439038309a0700adfb67d764b3fe935ed8601b31f819fc369e1438c6e79334dd
--- /dev/null
+[wrap-file]
+directory = zlib-1.2.8
+
+source_url = http://zlib.net/fossils/zlib-1.2.8.tar.gz
+source_filename = zlib-1.2.8.tar.gz
+source_hash = 36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d
+
+patch_url = https://wrapdb.mesonbuild.com/v1/projects/zlib/1.2.8/8/get_zip
+patch_filename = zlib-1.2.8-8-wrap.zip
+patch_hash = 17c52a0e0c59ce926d3959005d5cd8178c6c7e2c9a4a1304279a8320c955ac60
--- /dev/null
+project('git outcheckker', 'c')
+
+sp = subproject('samplesubproject')
+
+exe = executable('gitprog', 'prog.c',
+include_directories : sp.get_variable('subproj_inc'),
+link_with : sp.get_variable('subproj_lib'),
+)
+
+test('maintest', exe)
--- /dev/null
+#include"subproj.h"
+
+int main(int argc, char **argv) {
+ subproj_function();
+ return 0;
+}
--- /dev/null
+[wrap-git]
+directory=samplesubproject
+url=https://github.com/jpakkane/samplesubproject.git
+revision=head
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleGetInfoString</key>
+ <string>MyApp</string>
+ <key>CFBundleExecutable</key>
+ <string>myapp.sh</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.example.me</string>
+ <key>CFBundleName</key>
+ <string>myapp</string>
+ <key>CFBundleIconFile</key>
+ <string>myapp.icns</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>IFMajorVersion</key>
+ <integer>0</integer>
+ <key>IFMinorVersion</key>
+ <integer>1</integer>
+</dict>
+</plist>
--- /dev/null
+#!/bin/sh -eu
+
+curdir=`pwd`
+rm -rf buildtmp
+mkdir buildtmp
+LDFLAGS=-static-libstdc++ ~/meson/meson.py buildtmp --buildtype=release --prefix=/tmp/myapp --libdir=lib --strip
+ninja -C buildtmp install
+rm -rf buildtmp
+cd /tmp/
+tar czf myapp.tar.gz myapp
+mv myapp.tar.gz "$curdir"
+rm -rf myapp
--- /dev/null
+#!/bin/sh -eu
+
+rm -rf buildtmp
+mkdir buildtmp
+~/meson/meson.py buildtmp --buildtype=release --prefix=/tmp/myapp.app --bindir=Contents/MacOS
+ninja -C buildtmp install
+rm -rf buildtmp
+mkdir -p mnttmp
+rm -f working.dmg
+gunzip < template.dmg.gz > working.dmg
+hdiutil attach working.dmg -noautoopen -quiet -mountpoint mnttmp
+rm -rf mnttmp/myapp.app
+mv /tmp/myapp.app mnttmp
+# NOTE: output of hdiutil changes every now and then.
+# Verify that this is still working.
+hdiutil detach $(hdiutil info|grep "mnttmp"|awk '{print $1}')
+rm -rf mnttmp
+rm -f myapp.dmg
+hdiutil convert working.dmg -quiet -format UDZO -imagekey zlib-level=9 -o myapp.dmg
+rm -f working.dmg
--- /dev/null
+#!/usr/bin/env python3
+
+import os, urllib.request, shutil, subprocess
+from glob import glob
+
+sdl_url = 'http://libsdl.org/release/SDL2-devel-2.0.3-VC.zip'
+sdl_filename = 'SDL2-devel-2.0.3-VC.zip'
+sdl_dir = 'SDL2-2.0.3'
+
+shutil.rmtree('build', ignore_errors=True)
+os.mkdir('build')
+
+if not os.path.exists(sdl_filename):
+ response = urllib.request.urlopen(sdl_url)
+ data = response.read()
+ open(sdl_filename, 'wb').write(data)
+
+shutil.unpack_archive(sdl_filename, 'build')
+
+libs = glob(os.path.join('build', sdl_dir, 'lib/x86/*'))
+[shutil.copy(x, 'build') for x in libs]
+
+# Sorry for this hack but this needs to work during development
+# when Meson is not in path.
+subprocess.check_call(['python3', r'..\..\meson.py', 'build',
+ '--backend=ninja', '--buildtype=release'])
+subprocess.check_call(['ninja'], cwd='build')
+shutil.copy('myapp.iss', 'build')
+subprocess.check_call([r'\Program Files\Inno Setup 5\ISCC.exe', 'myapp.iss'],
+ cwd='build')
+shutil.copy('build/setup.exe', 'myapp 1.0.exe')
+shutil.rmtree('build')
--- /dev/null
+#!/bin/sh -eu
+
+libdir="${MESON_INSTALL_PREFIX}/lib"
+mkdir -p $libdir
+sdlfile=`ldd ${MESON_INSTALL_PREFIX}/bin/myapp | grep libSDL | cut -d ' ' -f 3`
+cp $sdlfile "${libdir}"
+strip "${libdir}/libSDL"*
--- /dev/null
+project('myapp', 'cpp')
+
+sdl = dependency('sdl2', required : host_machine.system() != 'windows')
+
+if meson.get_compiler('cpp').get_id() != 'msvc'
+ add_global_arguments('-std=c++11', language : 'cpp')
+endif
+
+if host_machine.system() == 'darwin'
+ install_data('myapp.sh',
+ install_dir : 'Contents/MacOS')
+
+ install_data('myapp.icns',
+ install_dir : 'Contents/Resources')
+
+ install_data('Info.plist',
+ install_dir : 'Contents')
+
+ meson.add_install_script('osx_bundler.sh')
+endif
+
+if host_machine.system() == 'linux'
+ install_data('myapp.sh', install_dir : '.')
+ meson.add_install_script('linux_bundler.sh')
+endif
+
+extra_link_args = []
+
+if host_machine.system() == 'windows'
+ str = '-I@0@/@1@'.format(meson.current_build_dir(), 'SDL2-2.0.3/include')
+ add_global_arguments(str, language : 'cpp')
+ extra_link_args = ['/SUBSYSTEM:CONSOLE', 'SDL2main.lib', 'SDL2.lib']
+endif
+
+prog = executable('myapp', 'myapp.cpp',
+dependencies : sdl,
+link_args : extra_link_args,
+install : true)
--- /dev/null
+#include<SDL.h>
+#include<memory>
+#include<iostream>
+#include<string>
+
+int main(int argc, char *argv[]) {
+ SDL_Surface *screenSurface;
+ SDL_Event e;
+ int keepGoing = 1;
+ std::string message;
+
+ if(SDL_Init( SDL_INIT_VIDEO ) < 0) {
+ printf( "SDL could not initialize! SDL_Error: %s\n", SDL_GetError() );
+ }
+ atexit(SDL_Quit);
+
+ std::unique_ptr<SDL_Window, void(*)(SDL_Window*)> window(SDL_CreateWindow( "My application", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 640, 480, SDL_WINDOW_SHOWN), SDL_DestroyWindow);
+ screenSurface = SDL_GetWindowSurface(window.get());
+
+ // Use iostream to make sure we have not screwed
+ // up libstdc++ linking.
+ message = "Window created.";
+ message += " Starting main loop.";
+ std::cout << message << std::endl;
+
+ while(keepGoing) {
+ while(SDL_PollEvent(&e) != 0) {
+ if(e.type == SDL_QUIT) {
+ keepGoing = 0;
+ break;
+ }
+ }
+ SDL_FillRect(screenSurface, NULL, SDL_MapRGB(screenSurface->format, 0xFF, 0x00, 0x00));
+ SDL_UpdateWindowSurface(window.get());
+ SDL_Delay(100);
+ }
+
+ return 0;
+}
--- /dev/null
+; Innosetup file for My app.\r
+\r
+[Setup]\r
+AppName=My App\r
+AppVersion=1.0\r
+DefaultDirName={pf}\My App\r
+DefaultGroupName=My App\r
+UninstallDisplayIcon={app}\myapp.exe\r
+Compression=lzma2\r
+SolidCompression=yes\r
+OutputDir=.\r
+\r
+[Files]\r
+Source: "myapp.exe"; DestDir: "{app}"\r
+Source: "SDL2.dll"; DestDir: "{app}"\r
+\r
+;[Icons]\r
+;Name: "{group}\My App"; Filename: "{app}\myapp.exe"\r
--- /dev/null
+#!/bin/bash
+
+cd "${0%/*}"
+
+if [ `uname` == 'Darwin' ]; then
+ ./myapp
+else
+ export LD_LIBRARY_PATH="`pwd`/lib"
+ bin/myapp
+fi
--- /dev/null
+#!/bin/sh -eu
+
+mkdir -p ${MESON_INSTALL_PREFIX}/Contents/Frameworks
+cp -R /Library/Frameworks/SDL2.framework ${MESON_INSTALL_PREFIX}/Contents/Frameworks
+
+install_name_tool -change @rpath/SDL2.framework/Versions/A/SDL2 @executable_path/../Frameworks/SDL2.framework/Versions/A/SDL2 ${MESON_INSTALL_PREFIX}/Contents/MacOS/myapp
--- /dev/null
+This directory shows how you can build redistributable binaries. On
+OSX this menans building an app bundle and a .dmg installer. On Linux
+it means building an archive that bundles its dependencies. On Windows
+it means building an .exe installer.
+
+To build each package you run the corresponding build_ARCH.sh build
+script.
+
+On Linux you must build the package on the oldest distribution you
+plan to support (Debian stable/oldstable and old CentOS are the common
+choice here).
+
--- /dev/null
+#include"lib.h"
+
+char *meson_print(void)
+{
+ return "Hello, world!";
+}
--- /dev/null
+char *meson_print(void);
--- /dev/null
+#include<lib.h>
+#include<stdio.h>
+int main(int argc, char **argv)
+{
+ char *t = meson_print();
+ printf("%s", t);
+ return 0;
+}
--- /dev/null
+project('test spec', 'c')
+
+rpm = import('rpm')
+dependency('zlib')
+find_program('nonexistprog', required : false)
+
+lib = shared_library('mesontest_shared', ['lib.c', 'lib.h'],
+ version : '0.1', soversion : '0',
+ install : true)
+executable('mesontest-bin', 'main.c',
+ link_with : lib,
+ install : true)
+
+rpm.generate_spec_template()
--- /dev/null
+project('Mercurial outcheckker', 'c')
+
+sp = subproject('samplesubproject')
+
+exe = executable('gitprog', 'prog.c',
+include_directories : sp.get_variable('subproj_inc'),
+link_with : sp.get_variable('subproj_lib'),
+)
+
+test('maintest', exe)
--- /dev/null
+#include"subproj.h"
+
+int main(int argc, char **argv) {
+ subproj_function();
+ return 0;
+}
--- /dev/null
+[wrap-hg]
+directory=samplesubproject
+url=https://bitbucket.org/jpakkane/samplesubproject
+revision=tip
--- /dev/null
+project('composite', 'vala', 'c')
+gnome = import('gnome')
+deps = [
+ dependency('glib-2.0', version : '>=2.38'),
+ dependency('gobject-2.0'),
+ dependency('gtk+-3.0'),
+]
+res = files('my-resources.xml')
+gres = gnome.compile_resources(
+ 'my', res,
+ source_dir : '.',
+)
+executable(
+ 'demo',
+ sources : [
+ 'mywidget.vala',
+ gres,
+ ],
+ dependencies : deps,
+ vala_args : ['--gresources', res],
+)
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<gresources>
+ <gresource prefix="/org/foo/my">
+ <file compressed="true" preprocess="xml-stripblanks">mywidget.ui</file>
+ </gresource>
+</gresources>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<interface>
+ <!-- interface-requires gtk+ 3.8 -->
+ <template class="MyWidget" parent="GtkBox">
+ <property name="visible">True</property>
+ <property name="can_focus">False</property>
+ <property name="orientation">vertical</property>
+ <property name="spacing">4</property>
+ <child>
+ <object class="GtkLabel" id="label1">
+ <property name="visible">True</property>
+ <property name="can_focus">False</property>
+ <property name="halign">start</property>
+ <property name="valign">start</property>
+ <property name="xalign">0</property>
+ <property name="label" translatable="yes">This widget is defined with composite GtkBuilder script</property>
+ <property name="wrap">True</property>
+ </object>
+ <packing>
+ <property name="expand">False</property>
+ <property name="fill">True</property>
+ <property name="position">0</property>
+ </packing>
+ </child>
+ <child>
+ <object class="GtkEntry" id="entry">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="invisible_char">•</property>
+ <signal name="changed" handler="on_entry_changed" object="MyWidget" swapped="no"/>
+ </object>
+ <packing>
+ <property name="expand">False</property>
+ <property name="fill">True</property>
+ <property name="position">1</property>
+ </packing>
+ </child>
+ <child>
+ <object class="GtkLabel" id="label2">
+ <property name="visible">True</property>
+ <property name="can_focus">False</property>
+ <property name="halign">start</property>
+ <property name="valign">start</property>
+ <property name="xalign">0</property>
+ <property name="label" translatable="yes">Press the button to fetch the internal entry text</property>
+ <property name="wrap">True</property>
+ </object>
+ <packing>
+ <property name="expand">False</property>
+ <property name="fill">True</property>
+ <property name="position">2</property>
+ </packing>
+ </child>
+ <child>
+ <object class="GtkButton" id="button">
+ <property name="label" translatable="yes">The Button</property>
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="receives_default">True</property>
+ <property name="halign">end</property>
+ <signal name="clicked" handler="on_button_clicked" swapped="no"/>
+ </object>
+ <packing>
+ <property name="expand">False</property>
+ <property name="fill">True</property>
+ <property name="position">3</property>
+ </packing>
+ </child>
+ </template>
+</interface>
--- /dev/null
+using Gtk;
+
+[GtkTemplate (ui = "/org/foo/my/mywidget.ui")]
+public class MyWidget : Box {
+ public string text {
+ get { return entry.text; }
+ set { entry.text = value; }
+ }
+
+ [GtkChild]
+ private Entry entry;
+
+ public MyWidget (string text) {
+ this.text = text;
+ }
+
+ [GtkCallback]
+ private void on_button_clicked (Button button) {
+ print ("The button was clicked with entry text: %s\n", entry.text);
+ }
+
+ [GtkCallback]
+ private void on_entry_changed (Editable editable) {
+ print ("The entry text changed: %s\n", entry.text);
+
+ notify_property ("text");
+ }
+}
+
+void main(string[] args) {
+ Gtk.init (ref args);
+ var win = new Window();
+ win.destroy.connect (Gtk.main_quit);
+
+ var widget = new MyWidget ("The entry text!");
+
+ win.add (widget);
+ win.show_all ();
+
+ Gtk.main ();
+}
--- /dev/null
+project('timeout', 'c')
+
+# This creates a test that times out. It is a manual test
+# because currently there is no test suite for test that are expected
+# to fail during unit test phase.
+
+exe = executable('sleepprog', 'sleepprog.c')
+test('timeout', exe, timeout : 1)
--- /dev/null
+#include<unistd.h>
+
+int main(int argc, char **argv) {
+ sleep(1000);
+ return 0;
+}
--- /dev/null
+project('own libc', 'c')
+
+# Not related to this test, but could not find a better place for this test.
+assert(meson.get_cross_property('nonexisting', 'defaultvalue') == 'defaultvalue',
+ 'Cross prop getting is broken.')
+
+# A simple project that uses its own libc.
+
+# Note that we don't need to specify anything, the flags to use
+# stdlib come from the cross file.
+
+exe = executable('selfcontained', 'prog.c')
+
+test('standalone test', exe)
--- /dev/null
+
+#include<stdio.h>
+
+int main() {
+ const char *message = "Hello without stdlib.\n";
+ return simple_print(message, simple_strlen(message));
+}
--- /dev/null
+/* Do not use this as the basis of your own libc.
+ * The code is probably unoptimal or wonky, as I
+ * had no prior experience with this, but instead
+ * just fiddled with the code until it worked.
+ */
+
+#include<stdio.h>
+
+#define STDOUT 1
+#define SYS_WRITE 4
+
+int simple_print(const char *msg, const long bufsize) {
+ int count;
+ long total_written = 0;
+ while(total_written < bufsize) {
+ asm(
+ "int $0x80\n\t"
+ : "=a"(count)
+ : "0"(SYS_WRITE), "b"(STDOUT), "c"(msg+total_written), "d"(bufsize-total_written)
+ :);
+ if(count == 0) {
+ return 1;
+ }
+ total_written += count;
+ }
+ return 0;
+}
+
+int simple_strlen(const char *str) {
+ int len = 0;
+ while(str[len] != '\0') {
+ len++;
+ }
+ return len;
+}
--- /dev/null
+project('own libc', 'c')
+
+# A very simple libc implementation
+
+# Do not specify -nostdlib & co. They come from cross specifications.
+
+libc = static_library('c', 'libc.c', 'stubstart.s')
+
+mylibc_dep = declare_dependency(link_with : libc,
+ include_directories : include_directories('.')
+)
--- /dev/null
+#pragma once
+
+int simple_print(const char *msg, const long bufsize);
+
+int simple_strlen(const char *str);
--- /dev/null
+.globl _start
+
+_start:
+
+ call main
+ movl %eax, %ebx
+ movl $1, %eax
+ int $0x80
--- /dev/null
+Metadata-Version: 1.1
+Name: meson
+Version: 0.44.0
+Summary: A high performance build system
+Home-page: http://mesonbuild.com
+Author: Jussi Pakkanen
+Author-email: jpakkane@gmail.com
+License: Apache License, Version 2.0
+Description-Content-Type: UNKNOWN
+Description: Meson is a cross-platform build system designed to be both as
+ fast and as user friendly as possible. It supports many languages and compilers, including
+ GCC, Clang and Visual Studio. Its build definitions are written in a simple non-turing
+ complete DSL.
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Natural Language :: English
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: POSIX :: BSD
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Topic :: Software Development :: Build Tools
--- /dev/null
+COPYING
+MANIFEST.in
+README.md
+__main__.py
+contributing.txt
+ghwt.py
+meson.py
+mesonconf.py
+mesonintrospect.py
+mesonrewriter.py
+mesontest.py
+run_cross_test.py
+run_project_tests.py
+run_tests.py
+run_unittests.py
+setup.cfg
+setup.py
+wraptool.py
+cross/iphone.txt
+cross/ownstdlib.txt
+cross/ubuntu-armhf.txt
+cross/ubuntu-faketarget.txt
+cross/ubuntu-mingw.txt
+data/macros.meson
+data/shell-completions/zsh/_meson
+data/syntax-highlighting/emacs/meson.el
+data/syntax-highlighting/vim/README
+data/syntax-highlighting/vim/ftdetect/meson.vim
+data/syntax-highlighting/vim/indent/meson.vim
+data/syntax-highlighting/vim/syntax/meson.vim
+graphics/meson_logo.svg
+graphics/meson_logo_big.png
+graphics/wrap_logo.svg
+man/meson.1
+man/mesonconf.1
+man/mesonintrospect.1
+man/mesontest.1
+man/wraptool.1
+manual tests/1 wrap/main.c
+manual tests/1 wrap/meson.build
+manual tests/1 wrap/subprojects/sqlite.wrap
+manual tests/10 svn wrap/meson.build
+manual tests/10 svn wrap/prog.c
+manual tests/10 svn wrap/subprojects/samplesubproject.wrap
+manual tests/2 multiwrap/meson.build
+manual tests/2 multiwrap/prog.c
+manual tests/2 multiwrap/subprojects/libpng.wrap
+manual tests/2 multiwrap/subprojects/lua.wrap
+manual tests/2 multiwrap/subprojects/zlib.wrap
+manual tests/3 git wrap/meson.build
+manual tests/3 git wrap/prog.c
+manual tests/3 git wrap/subprojects/samplesubproject.wrap
+manual tests/4 standalone binaries/Info.plist
+manual tests/4 standalone binaries/build_linux_package.sh
+manual tests/4 standalone binaries/build_osx_package.sh
+manual tests/4 standalone binaries/build_windows_package.py
+manual tests/4 standalone binaries/linux_bundler.sh
+manual tests/4 standalone binaries/meson.build
+manual tests/4 standalone binaries/myapp.cpp
+manual tests/4 standalone binaries/myapp.icns
+manual tests/4 standalone binaries/myapp.iss
+manual tests/4 standalone binaries/myapp.sh
+manual tests/4 standalone binaries/osx_bundler.sh
+manual tests/4 standalone binaries/readme.txt
+manual tests/4 standalone binaries/template.dmg.gz
+manual tests/5 rpm/lib.c
+manual tests/5 rpm/lib.h
+manual tests/5 rpm/main.c
+manual tests/5 rpm/meson.build
+manual tests/6 hg wrap/meson.build
+manual tests/6 hg wrap/prog.c
+manual tests/6 hg wrap/subprojects/samplesubproject.wrap
+manual tests/7 vala composite widgets/meson.build
+manual tests/7 vala composite widgets/my-resources.xml
+manual tests/7 vala composite widgets/mywidget.ui
+manual tests/7 vala composite widgets/mywidget.vala
+manual tests/8 timeout/meson.build
+manual tests/8 timeout/sleepprog.c
+manual tests/9 nostdlib/meson.build
+manual tests/9 nostdlib/prog.c
+manual tests/9 nostdlib/subprojects/mylibc/libc.c
+manual tests/9 nostdlib/subprojects/mylibc/meson.build
+manual tests/9 nostdlib/subprojects/mylibc/stdio.h
+manual tests/9 nostdlib/subprojects/mylibc/stubstart.s
+meson.egg-info/PKG-INFO
+meson.egg-info/SOURCES.txt
+meson.egg-info/dependency_links.txt
+meson.egg-info/top_level.txt
+mesonbuild/__init__.py
+mesonbuild/astinterpreter.py
+mesonbuild/build.py
+mesonbuild/coredata.py
+mesonbuild/environment.py
+mesonbuild/interpreter.py
+mesonbuild/interpreterbase.py
+mesonbuild/linkers.py
+mesonbuild/mconf.py
+mesonbuild/mesonlib.py
+mesonbuild/mesonmain.py
+mesonbuild/mintro.py
+mesonbuild/mlog.py
+mesonbuild/mparser.py
+mesonbuild/mtest.py
+mesonbuild/optinterpreter.py
+mesonbuild/rewriter.py
+mesonbuild/backend/__init__.py
+mesonbuild/backend/backends.py
+mesonbuild/backend/ninjabackend.py
+mesonbuild/backend/vs2010backend.py
+mesonbuild/backend/vs2015backend.py
+mesonbuild/backend/vs2017backend.py
+mesonbuild/backend/xcodebackend.py
+mesonbuild/compilers/__init__.py
+mesonbuild/compilers/c.py
+mesonbuild/compilers/compilers.py
+mesonbuild/compilers/cpp.py
+mesonbuild/compilers/cs.py
+mesonbuild/compilers/d.py
+mesonbuild/compilers/fortran.py
+mesonbuild/compilers/java.py
+mesonbuild/compilers/objc.py
+mesonbuild/compilers/objcpp.py
+mesonbuild/compilers/rust.py
+mesonbuild/compilers/swift.py
+mesonbuild/compilers/vala.py
+mesonbuild/dependencies/__init__.py
+mesonbuild/dependencies/base.py
+mesonbuild/dependencies/dev.py
+mesonbuild/dependencies/misc.py
+mesonbuild/dependencies/platform.py
+mesonbuild/dependencies/ui.py
+mesonbuild/modules/__init__.py
+mesonbuild/modules/gnome.py
+mesonbuild/modules/i18n.py
+mesonbuild/modules/modtest.py
+mesonbuild/modules/pkgconfig.py
+mesonbuild/modules/python3.py
+mesonbuild/modules/qt.py
+mesonbuild/modules/qt4.py
+mesonbuild/modules/qt5.py
+mesonbuild/modules/rpm.py
+mesonbuild/modules/unstable_simd.py
+mesonbuild/modules/windows.py
+mesonbuild/scripts/__init__.py
+mesonbuild/scripts/cleantrees.py
+mesonbuild/scripts/commandrunner.py
+mesonbuild/scripts/coverage.py
+mesonbuild/scripts/delwithsuffix.py
+mesonbuild/scripts/depfixer.py
+mesonbuild/scripts/dirchanger.py
+mesonbuild/scripts/dist.py
+mesonbuild/scripts/gettext.py
+mesonbuild/scripts/gtkdochelper.py
+mesonbuild/scripts/meson_exe.py
+mesonbuild/scripts/meson_install.py
+mesonbuild/scripts/msgfmthelper.py
+mesonbuild/scripts/regen_checker.py
+mesonbuild/scripts/scanbuild.py
+mesonbuild/scripts/symbolextractor.py
+mesonbuild/scripts/uninstall.py
+mesonbuild/scripts/vcstagger.py
+mesonbuild/scripts/yelphelper.py
+mesonbuild/wrap/__init__.py
+mesonbuild/wrap/wrap.py
+mesonbuild/wrap/wraptool.py
+test cases/common/1 trivial/meson.build
+test cases/common/1 trivial/trivial.c
+test cases/common/10 man install/bar.2
+test cases/common/10 man install/baz.1.in
+test cases/common/10 man install/foo.1
+test cases/common/10 man install/installed_files.txt
+test cases/common/10 man install/meson.build
+test cases/common/10 man install/vanishing/meson.build
+test cases/common/10 man install/vanishing/vanishing.1
+test cases/common/10 man install/vanishing/vanishing.2
+test cases/common/100 test workdir/meson.build
+test cases/common/100 test workdir/opener.c
+test cases/common/101 suites/exe1.c
+test cases/common/101 suites/exe2.c
+test cases/common/101 suites/meson.build
+test cases/common/101 suites/subprojects/sub/meson.build
+test cases/common/101 suites/subprojects/sub/sub1.c
+test cases/common/101 suites/subprojects/sub/sub2.c
+test cases/common/102 threads/meson.build
+test cases/common/102 threads/threadprog.c
+test cases/common/102 threads/threadprog.cpp
+test cases/common/103 manygen/depuser.c
+test cases/common/103 manygen/meson.build
+test cases/common/103 manygen/subdir/funcinfo.def
+test cases/common/103 manygen/subdir/manygen.py
+test cases/common/103 manygen/subdir/meson.build
+test cases/common/104 stringdef/meson.build
+test cases/common/104 stringdef/stringdef.c
+test cases/common/105 find program path/meson.build
+test cases/common/105 find program path/program.py
+test cases/common/106 subproject subdir/meson.build
+test cases/common/106 subproject subdir/prog.c
+test cases/common/106 subproject subdir/subprojects/sub/meson.build
+test cases/common/106 subproject subdir/subprojects/sub/lib/meson.build
+test cases/common/106 subproject subdir/subprojects/sub/lib/sub.c
+test cases/common/106 subproject subdir/subprojects/sub/lib/sub.h
+test cases/common/107 postconf/meson.build
+test cases/common/107 postconf/postconf.py
+test cases/common/107 postconf/prog.c
+test cases/common/107 postconf/raw.dat
+test cases/common/108 postconf with args/meson.build
+test cases/common/108 postconf with args/postconf.py
+test cases/common/108 postconf with args/prog.c
+test cases/common/108 postconf with args/raw.dat
+test cases/common/109 testframework options/meson.build
+test cases/common/109 testframework options/meson_options.txt
+test cases/common/109 testframework options/test_args.txt
+test cases/common/11 subdir/meson.build
+test cases/common/11 subdir/subdir/meson.build
+test cases/common/11 subdir/subdir/prog.c
+test cases/common/110 extract same name/lib.c
+test cases/common/110 extract same name/main.c
+test cases/common/110 extract same name/meson.build
+test cases/common/110 extract same name/src/lib.c
+test cases/common/111 has header symbol/meson.build
+test cases/common/112 has arg/meson.build
+test cases/common/113 generatorcustom/catter.py
+test cases/common/113 generatorcustom/gen.py
+test cases/common/113 generatorcustom/main.c
+test cases/common/113 generatorcustom/meson.build
+test cases/common/113 generatorcustom/res1.txt
+test cases/common/113 generatorcustom/res2.txt
+test cases/common/114 multiple dir configure file/meson.build
+test cases/common/114 multiple dir configure file/subdir/foo.txt
+test cases/common/114 multiple dir configure file/subdir/meson.build
+test cases/common/114 multiple dir configure file/subdir/someinput.in
+test cases/common/115 spaces backslash/comparer-end-notstring.c
+test cases/common/115 spaces backslash/comparer-end.c
+test cases/common/115 spaces backslash/comparer.c
+test cases/common/115 spaces backslash/meson.build
+test cases/common/115 spaces backslash/asm output/meson.build
+test cases/common/115 spaces backslash/include/comparer.h
+test cases/common/116 ternary/meson.build
+test cases/common/117 custom target capture/data_source.txt
+test cases/common/117 custom target capture/installed_files.txt
+test cases/common/117 custom target capture/meson.build
+test cases/common/117 custom target capture/my_compiler.py
+test cases/common/118 allgenerate/converter.py
+test cases/common/118 allgenerate/foobar.cpp.in
+test cases/common/118 allgenerate/meson.build
+test cases/common/119 pathjoin/meson.build
+test cases/common/12 data/datafile.dat
+test cases/common/12 data/etcfile.dat
+test cases/common/12 data/fileobject_datafile.dat
+test cases/common/12 data/installed_files.txt
+test cases/common/12 data/meson.build
+test cases/common/12 data/runscript.sh
+test cases/common/12 data/vanishing/meson.build
+test cases/common/12 data/vanishing/vanishing.dat
+test cases/common/12 data/vanishing/vanishing2.dat
+test cases/common/120 subdir subproject/meson.build
+test cases/common/120 subdir subproject/prog/meson.build
+test cases/common/120 subdir subproject/prog/prog.c
+test cases/common/120 subdir subproject/subprojects/sub/meson.build
+test cases/common/120 subdir subproject/subprojects/sub/sub.c
+test cases/common/120 subdir subproject/subprojects/sub/sub.h
+test cases/common/121 interpreter copy mutable var on assignment/meson.build
+test cases/common/122 skip/meson.build
+test cases/common/123 subproject project arguments/exe.c
+test cases/common/123 subproject project arguments/exe.cpp
+test cases/common/123 subproject project arguments/meson.build
+test cases/common/123 subproject project arguments/subprojects/subexe/meson.build
+test cases/common/123 subproject project arguments/subprojects/subexe/subexe.c
+test cases/common/124 test skip/meson.build
+test cases/common/124 test skip/test_skip.c
+test cases/common/125 shared module/meson.build
+test cases/common/125 shared module/module.c
+test cases/common/125 shared module/prog.c
+test cases/common/125 shared module/runtime.c
+test cases/common/126 llvm ir and assembly/main.c
+test cases/common/126 llvm ir and assembly/main.cpp
+test cases/common/126 llvm ir and assembly/meson.build
+test cases/common/126 llvm ir and assembly/square-arm.S
+test cases/common/126 llvm ir and assembly/square-x86.S
+test cases/common/126 llvm ir and assembly/square-x86_64.S
+test cases/common/126 llvm ir and assembly/square.ll
+test cases/common/126 llvm ir and assembly/symbol-underscore.h
+test cases/common/127 cpp and asm/meson.build
+test cases/common/127 cpp and asm/retval-arm.S
+test cases/common/127 cpp and asm/retval-x86.S
+test cases/common/127 cpp and asm/retval-x86_64.S
+test cases/common/127 cpp and asm/symbol-underscore.h
+test cases/common/127 cpp and asm/trivial.cc
+test cases/common/128 extract all shared library/extractor.h
+test cases/common/128 extract all shared library/four.c
+test cases/common/128 extract all shared library/func1234.def
+test cases/common/128 extract all shared library/meson.build
+test cases/common/128 extract all shared library/one.c
+test cases/common/128 extract all shared library/prog.c
+test cases/common/128 extract all shared library/three.c
+test cases/common/128 extract all shared library/two.c
+test cases/common/129 object only target/installed_files.txt
+test cases/common/129 object only target/meson.build
+test cases/common/129 object only target/obj_generator.py
+test cases/common/129 object only target/prog.c
+test cases/common/129 object only target/source.c
+test cases/common/129 object only target/source2.c
+test cases/common/129 object only target/source2.def
+test cases/common/129 object only target/source3.c
+test cases/common/13 pch/meson.build
+test cases/common/13 pch/prog.c
+test cases/common/13 pch/pch/prog.h
+test cases/common/13 pch/pch/prog_pch.c
+test cases/common/130 no buildincdir/meson.build
+test cases/common/130 no buildincdir/prog.c
+test cases/common/130 no buildincdir/include/header.h
+test cases/common/131 custom target directory install/docgen.py
+test cases/common/131 custom target directory install/installed_files.txt
+test cases/common/131 custom target directory install/meson.build
+test cases/common/132 dependency file generation/main .c
+test cases/common/132 dependency file generation/meson.build
+test cases/common/133 configure file in generator/meson.build
+test cases/common/133 configure file in generator/inc/confdata.in
+test cases/common/133 configure file in generator/inc/meson.build
+test cases/common/133 configure file in generator/src/gen.py
+test cases/common/133 configure file in generator/src/main.c
+test cases/common/133 configure file in generator/src/meson.build
+test cases/common/133 configure file in generator/src/source
+test cases/common/134 generated llvm ir/copyfile.py
+test cases/common/134 generated llvm ir/main.c
+test cases/common/134 generated llvm ir/meson.build
+test cases/common/134 generated llvm ir/square.ll.in
+test cases/common/135 generated assembly/copyfile.py
+test cases/common/135 generated assembly/main.c
+test cases/common/135 generated assembly/meson.build
+test cases/common/135 generated assembly/square-arm.S.in
+test cases/common/135 generated assembly/square-x86.S.in
+test cases/common/135 generated assembly/square-x86_64.S.in
+test cases/common/135 generated assembly/symbol-underscore.h
+test cases/common/136 build by default targets in tests/main.c
+test cases/common/136 build by default targets in tests/meson.build
+test cases/common/136 build by default targets in tests/write_file.py
+test cases/common/137 build by default/foo.c
+test cases/common/137 build by default/meson.build
+test cases/common/137 build by default/mygen.py
+test cases/common/137 build by default/source.txt
+test cases/common/138 include order/meson.build
+test cases/common/138 include order/ctsub/copyfile.py
+test cases/common/138 include order/ctsub/emptyfile.c
+test cases/common/138 include order/ctsub/main.h
+test cases/common/138 include order/ctsub/meson.build
+test cases/common/138 include order/sub1/main.h
+test cases/common/138 include order/sub1/meson.build
+test cases/common/138 include order/sub1/some.c
+test cases/common/138 include order/sub1/some.h
+test cases/common/138 include order/sub2/main.h
+test cases/common/138 include order/sub2/meson.build
+test cases/common/138 include order/sub3/main.h
+test cases/common/138 include order/sub3/meson.build
+test cases/common/138 include order/sub4/main.c
+test cases/common/138 include order/sub4/main.h
+test cases/common/138 include order/sub4/meson.build
+test cases/common/139 override options/four.c
+test cases/common/139 override options/meson.build
+test cases/common/139 override options/one.c
+test cases/common/139 override options/three.c
+test cases/common/139 override options/two.c
+test cases/common/14 cpp pch/meson.build
+test cases/common/14 cpp pch/prog.cc
+test cases/common/14 cpp pch/pch/prog.hh
+test cases/common/14 cpp pch/pch/prog_pch.cc
+test cases/common/140 get define/meson.build
+test cases/common/140 get define/meson_options.txt
+test cases/common/141 c cpp and asm/main.c
+test cases/common/141 c cpp and asm/main.cpp
+test cases/common/141 c cpp and asm/meson.build
+test cases/common/141 c cpp and asm/retval-arm.S
+test cases/common/141 c cpp and asm/retval-x86.S
+test cases/common/141 c cpp and asm/retval-x86_64.S
+test cases/common/141 c cpp and asm/somelib.c
+test cases/common/141 c cpp and asm/symbol-underscore.h
+test cases/common/142 compute int/config.h.in
+test cases/common/142 compute int/foobar.h
+test cases/common/142 compute int/meson.build
+test cases/common/142 compute int/prog.c.in
+test cases/common/143 custom target object output/meson.build
+test cases/common/143 custom target object output/obj_generator.py
+test cases/common/143 custom target object output/objdir/meson.build
+test cases/common/143 custom target object output/objdir/source.c
+test cases/common/143 custom target object output/progdir/meson.build
+test cases/common/143 custom target object output/progdir/prog.c
+test cases/common/144 empty build file/meson.build
+test cases/common/144 empty build file/subdir/meson.build
+test cases/common/145 whole archive/dylib.c
+test cases/common/145 whole archive/libfile.c
+test cases/common/145 whole archive/meson.build
+test cases/common/145 whole archive/mylib.h
+test cases/common/145 whole archive/prog.c
+test cases/common/145 whole archive/allofme/meson.build
+test cases/common/145 whole archive/exe/meson.build
+test cases/common/145 whole archive/exe2/meson.build
+test cases/common/145 whole archive/shlib/meson.build
+test cases/common/145 whole archive/stlib/meson.build
+test cases/common/145 whole archive/wholeshlib/meson.build
+test cases/common/146 C and CPP link/dummy.c
+test cases/common/146 C and CPP link/foo.c
+test cases/common/146 C and CPP link/foo.cpp
+test cases/common/146 C and CPP link/foo.h
+test cases/common/146 C and CPP link/foo.hpp
+test cases/common/146 C and CPP link/foobar.c
+test cases/common/146 C and CPP link/foobar.h
+test cases/common/146 C and CPP link/meson.build
+test cases/common/146 C and CPP link/sub.c
+test cases/common/146 C and CPP link/sub.h
+test cases/common/147 mesonintrospect from scripts/check_env.py
+test cases/common/147 mesonintrospect from scripts/meson.build
+test cases/common/148 custom target multiple outputs/generator.py
+test cases/common/148 custom target multiple outputs/installed_files.txt
+test cases/common/148 custom target multiple outputs/meson.build
+test cases/common/149 special characters/check_quoting.py
+test cases/common/149 special characters/installed_files.txt
+test cases/common/149 special characters/meson.build
+test cases/common/15 mixed pch/func.c
+test cases/common/15 mixed pch/main.cc
+test cases/common/15 mixed pch/meson.build
+test cases/common/15 mixed pch/pch/func.h
+test cases/common/15 mixed pch/pch/func_pch.c
+test cases/common/15 mixed pch/pch/main.h
+test cases/common/15 mixed pch/pch/main_pch.cc
+test cases/common/150 nested links/meson.build
+test cases/common/150 nested links/xephyr.c
+test cases/common/151 list of file sources/foo
+test cases/common/151 list of file sources/gen.py
+test cases/common/151 list of file sources/meson.build
+test cases/common/152 link depends custom target/foo.c
+test cases/common/152 link depends custom target/make_file.py
+test cases/common/152 link depends custom target/meson.build
+test cases/common/153 recursive linking/lib.h
+test cases/common/153 recursive linking/main.c
+test cases/common/153 recursive linking/meson.build
+test cases/common/153 recursive linking/3rdorderdeps/lib.c.in
+test cases/common/153 recursive linking/3rdorderdeps/main.c.in
+test cases/common/153 recursive linking/3rdorderdeps/meson.build
+test cases/common/153 recursive linking/circular/lib1.c
+test cases/common/153 recursive linking/circular/lib2.c
+test cases/common/153 recursive linking/circular/lib3.c
+test cases/common/153 recursive linking/circular/main.c
+test cases/common/153 recursive linking/circular/meson.build
+test cases/common/153 recursive linking/circular/prop1.c
+test cases/common/153 recursive linking/circular/prop2.c
+test cases/common/153 recursive linking/circular/prop3.c
+test cases/common/153 recursive linking/edge-cases/libsto.c
+test cases/common/153 recursive linking/edge-cases/meson.build
+test cases/common/153 recursive linking/edge-cases/shstmain.c
+test cases/common/153 recursive linking/edge-cases/stobuilt.c
+test cases/common/153 recursive linking/edge-cases/stomain.c
+test cases/common/153 recursive linking/shnodep/lib.c
+test cases/common/153 recursive linking/shnodep/meson.build
+test cases/common/153 recursive linking/shshdep/lib.c
+test cases/common/153 recursive linking/shshdep/meson.build
+test cases/common/153 recursive linking/shstdep/lib.c
+test cases/common/153 recursive linking/shstdep/meson.build
+test cases/common/153 recursive linking/stnodep/lib.c
+test cases/common/153 recursive linking/stnodep/meson.build
+test cases/common/153 recursive linking/stshdep/lib.c
+test cases/common/153 recursive linking/stshdep/meson.build
+test cases/common/153 recursive linking/ststdep/lib.c
+test cases/common/153 recursive linking/ststdep/meson.build
+test cases/common/154 library at root/lib.c
+test cases/common/154 library at root/meson.build
+test cases/common/154 library at root/main/main.c
+test cases/common/154 library at root/main/meson.build
+test cases/common/155 simd/fallback.c
+test cases/common/155 simd/meson.build
+test cases/common/155 simd/simd_avx.c
+test cases/common/155 simd/simd_avx2.c
+test cases/common/155 simd/simd_mmx.c
+test cases/common/155 simd/simd_neon.c
+test cases/common/155 simd/simd_sse.c
+test cases/common/155 simd/simd_sse2.c
+test cases/common/155 simd/simd_sse3.c
+test cases/common/155 simd/simd_sse41.c
+test cases/common/155 simd/simd_sse42.c
+test cases/common/155 simd/simd_ssse3.c
+test cases/common/155 simd/simdchecker.c
+test cases/common/155 simd/simdfuncs.h
+test cases/common/155 simd/include/simdheader.h
+test cases/common/156 shared module resolving symbol in executable/meson.build
+test cases/common/156 shared module resolving symbol in executable/module.c
+test cases/common/156 shared module resolving symbol in executable/prog.c
+test cases/common/157 configure file in test/meson.build
+test cases/common/157 configure file in test/test.py.in
+test cases/common/158 dotinclude/dotproc.c
+test cases/common/158 dotinclude/meson.build
+test cases/common/158 dotinclude/stdio.h
+test cases/common/159 reserved targets/meson.build
+test cases/common/159 reserved targets/test.c
+test cases/common/159 reserved targets/all/meson.build
+test cases/common/159 reserved targets/benchmark/meson.build
+test cases/common/159 reserved targets/clean/meson.build
+test cases/common/159 reserved targets/clean-ctlist/meson.build
+test cases/common/159 reserved targets/clean-gcda/meson.build
+test cases/common/159 reserved targets/clean-gcno/meson.build
+test cases/common/159 reserved targets/coverage/meson.build
+test cases/common/159 reserved targets/coverage-html/meson.build
+test cases/common/159 reserved targets/coverage-text/meson.build
+test cases/common/159 reserved targets/coverage-xml/meson.build
+test cases/common/159 reserved targets/dist/meson.build
+test cases/common/159 reserved targets/distcheck/meson.build
+test cases/common/159 reserved targets/install/meson.build
+test cases/common/159 reserved targets/phony/meson.build
+test cases/common/159 reserved targets/reconfigure/meson.build
+test cases/common/159 reserved targets/runtarget/meson.build
+test cases/common/159 reserved targets/scan-build/meson.build
+test cases/common/159 reserved targets/test/meson.build
+test cases/common/159 reserved targets/uninstall/meson.build
+test cases/common/16 configure file/basename.py
+test cases/common/16 configure file/check_file.py
+test cases/common/16 configure file/config.h
+test cases/common/16 configure file/config.h.in
+test cases/common/16 configure file/config4a.h.in
+test cases/common/16 configure file/config4b.h.in
+test cases/common/16 configure file/config5.h.in
+test cases/common/16 configure file/config6.h.in
+test cases/common/16 configure file/dummy.dat
+test cases/common/16 configure file/dumpprog.c
+test cases/common/16 configure file/file_contains.py
+test cases/common/16 configure file/generator-without-input-file.py
+test cases/common/16 configure file/generator.py
+test cases/common/16 configure file/installed_files.txt
+test cases/common/16 configure file/meson.build
+test cases/common/16 configure file/prog.c
+test cases/common/16 configure file/prog2.c
+test cases/common/16 configure file/prog4.c
+test cases/common/16 configure file/prog5.c
+test cases/common/16 configure file/prog6.c
+test cases/common/16 configure file/touch.py
+test cases/common/16 configure file/subdir/meson.build
+test cases/common/160 duplicate source names/meson.build
+test cases/common/160 duplicate source names/dir1/file.c
+test cases/common/160 duplicate source names/dir1/meson.build
+test cases/common/160 duplicate source names/dir2/file.c
+test cases/common/160 duplicate source names/dir2/meson.build
+test cases/common/160 duplicate source names/dir2/dir1/file.c
+test cases/common/160 duplicate source names/dir3/file.c
+test cases/common/160 duplicate source names/dir3/meson.build
+test cases/common/160 duplicate source names/dir3/dir1/file.c
+test cases/common/161 index customtarget/gen_sources.py
+test cases/common/161 index customtarget/lib.c
+test cases/common/161 index customtarget/meson.build
+test cases/common/161 index customtarget/subdir/foo.c
+test cases/common/161 index customtarget/subdir/meson.build
+test cases/common/162 wrap file should not failed/meson.build
+test cases/common/162 wrap file should not failed/src/meson.build
+test cases/common/162 wrap file should not failed/src/subprojects/prog.c
+test cases/common/162 wrap file should not failed/src/subprojects/foo/prog2.c
+test cases/common/162 wrap file should not failed/subprojects/zlib.wrap
+test cases/common/162 wrap file should not failed/subprojects/packagecache/zlib-1.2.8-8-wrap.zip
+test cases/common/162 wrap file should not failed/subprojects/packagecache/zlib-1.2.8.tar.gz
+test cases/common/162 wrap file should not failed/subprojects/zlib-1.2.8/foo.c
+test cases/common/162 wrap file should not failed/subprojects/zlib-1.2.8/meson.build
+test cases/common/163 includedir subproj/meson.build
+test cases/common/163 includedir subproj/prog.c
+test cases/common/163 includedir subproj/subprojects/inctest/meson.build
+test cases/common/163 includedir subproj/subprojects/inctest/include/incfile.h
+test cases/common/164 subproject dir name collision/a.c
+test cases/common/164 subproject dir name collision/meson.build
+test cases/common/164 subproject dir name collision/custom_subproject_dir/B/b.c
+test cases/common/164 subproject dir name collision/custom_subproject_dir/B/meson.build
+test cases/common/164 subproject dir name collision/custom_subproject_dir/C/c.c
+test cases/common/164 subproject dir name collision/custom_subproject_dir/C/meson.build
+test cases/common/164 subproject dir name collision/other_subdir/meson.build
+test cases/common/164 subproject dir name collision/other_subdir/custom_subproject_dir/other.c
+test cases/common/165 config tool variable/meson.build
+test cases/common/166 custom target subdir depend files/copyfile.py
+test cases/common/166 custom target subdir depend files/meson.build
+test cases/common/166 custom target subdir depend files/subdir/dep.dat
+test cases/common/166 custom target subdir depend files/subdir/foo.c.in
+test cases/common/166 custom target subdir depend files/subdir/meson.build
+test cases/common/167 external program shebang parsing/input.txt
+test cases/common/167 external program shebang parsing/main.c
+test cases/common/167 external program shebang parsing/meson.build
+test cases/common/167 external program shebang parsing/script.int.in
+test cases/common/168 disabler/meson.build
+test cases/common/169 array option/meson.build
+test cases/common/169 array option/meson_options.txt
+test cases/common/17 if/meson.build
+test cases/common/17 if/prog.c
+test cases/common/170 custom target template substitution/checkcopy.py
+test cases/common/170 custom target template substitution/foo.c.in
+test cases/common/170 custom target template substitution/meson.build
+test cases/common/171 not-found dependency/meson.build
+test cases/common/172 subdir if_found/meson.build
+test cases/common/172 subdir if_found/subdir/meson.build
+test cases/common/18 else/meson.build
+test cases/common/18 else/prog.c
+test cases/common/19 comparison/meson.build
+test cases/common/19 comparison/prog.c
+test cases/common/2 cpp/meson.build
+test cases/common/2 cpp/something.txt
+test cases/common/2 cpp/trivial.cc
+test cases/common/20 array/func.c
+test cases/common/20 array/meson.build
+test cases/common/20 array/prog.c
+test cases/common/21 includedir/meson.build
+test cases/common/21 includedir/include/func.h
+test cases/common/21 includedir/src/func.c
+test cases/common/21 includedir/src/meson.build
+test cases/common/21 includedir/src/prog.c
+test cases/common/22 header in file list/header.h
+test cases/common/22 header in file list/meson.build
+test cases/common/22 header in file list/prog.c
+test cases/common/23 global arg/meson.build
+test cases/common/23 global arg/prog.c
+test cases/common/23 global arg/prog.cc
+test cases/common/24 target arg/func.c
+test cases/common/24 target arg/func2.c
+test cases/common/24 target arg/meson.build
+test cases/common/24 target arg/prog.cc
+test cases/common/24 target arg/prog2.cc
+test cases/common/25 object extraction/lib.c
+test cases/common/25 object extraction/lib2.c
+test cases/common/25 object extraction/main.c
+test cases/common/25 object extraction/meson.build
+test cases/common/25 object extraction/src/lib.c
+test cases/common/26 endian/meson.build
+test cases/common/26 endian/prog.c
+test cases/common/27 library versions/installed_files.txt
+test cases/common/27 library versions/lib.c
+test cases/common/27 library versions/meson.build
+test cases/common/27 library versions/subdir/meson.build
+test cases/common/28 config subdir/meson.build
+test cases/common/28 config subdir/include/config.h.in
+test cases/common/28 config subdir/include/meson.build
+test cases/common/28 config subdir/src/meson.build
+test cases/common/28 config subdir/src/prog.c
+test cases/common/29 pipeline/input_src.dat
+test cases/common/29 pipeline/meson.build
+test cases/common/29 pipeline/prog.c
+test cases/common/29 pipeline/srcgen.c
+test cases/common/3 static/libfile.c
+test cases/common/3 static/libfile2.c
+test cases/common/3 static/meson.build
+test cases/common/3 static/meson_options.txt
+test cases/common/30 pipeline/meson.build
+test cases/common/30 pipeline/src/input_src.dat
+test cases/common/30 pipeline/src/meson.build
+test cases/common/30 pipeline/src/prog.c
+test cases/common/30 pipeline/src/srcgen.c
+test cases/common/31 find program/meson.build
+test cases/common/31 find program/source.in
+test cases/common/32 multiline string/meson.build
+test cases/common/33 try compile/invalid.c
+test cases/common/33 try compile/meson.build
+test cases/common/33 try compile/valid.c
+test cases/common/34 compiler id/meson.build
+test cases/common/35 sizeof/config.h.in
+test cases/common/35 sizeof/meson.build
+test cases/common/35 sizeof/prog.c.in
+test cases/common/36 define10/config.h.in
+test cases/common/36 define10/meson.build
+test cases/common/36 define10/prog.c
+test cases/common/37 has header/meson.build
+test cases/common/37 has header/ouagadougou.h
+test cases/common/38 run program/meson.build
+test cases/common/38 run program/scripts/hello.bat
+test cases/common/38 run program/scripts/hello.sh
+test cases/common/39 tryrun/error.c
+test cases/common/39 tryrun/meson.build
+test cases/common/39 tryrun/no_compile.c
+test cases/common/39 tryrun/ok.c
+test cases/common/4 shared/libfile.c
+test cases/common/4 shared/meson.build
+test cases/common/40 logic ops/meson.build
+test cases/common/41 elif/meson.build
+test cases/common/42 string operations/meson.build
+test cases/common/43 has function/meson.build
+test cases/common/44 has member/meson.build
+test cases/common/45 alignment/meson.build
+test cases/common/46 library chain/installed_files.txt
+test cases/common/46 library chain/main.c
+test cases/common/46 library chain/meson.build
+test cases/common/46 library chain/subdir/lib1.c
+test cases/common/46 library chain/subdir/meson.build
+test cases/common/46 library chain/subdir/subdir2/lib2.c
+test cases/common/46 library chain/subdir/subdir2/meson.build
+test cases/common/46 library chain/subdir/subdir3/lib3.c
+test cases/common/46 library chain/subdir/subdir3/meson.build
+test cases/common/47 options/meson.build
+test cases/common/47 options/meson_options.txt
+test cases/common/48 test args/cmd_args.c
+test cases/common/48 test args/copyfile.py
+test cases/common/48 test args/env2vars.c
+test cases/common/48 test args/envvars.c
+test cases/common/48 test args/meson.build
+test cases/common/48 test args/tester.c
+test cases/common/48 test args/tester.py
+test cases/common/48 test args/testfile.txt
+test cases/common/49 subproject/installed_files.txt
+test cases/common/49 subproject/meson.build
+test cases/common/49 subproject/user.c
+test cases/common/49 subproject/subprojects/sublib/meson.build
+test cases/common/49 subproject/subprojects/sublib/simpletest.c
+test cases/common/49 subproject/subprojects/sublib/sublib.c
+test cases/common/49 subproject/subprojects/sublib/include/subdefs.h
+test cases/common/5 linkstatic/libfile.c
+test cases/common/5 linkstatic/libfile2.c
+test cases/common/5 linkstatic/libfile3.c
+test cases/common/5 linkstatic/libfile4.c
+test cases/common/5 linkstatic/main.c
+test cases/common/5 linkstatic/meson.build
+test cases/common/50 subproject options/meson.build
+test cases/common/50 subproject options/meson_options.txt
+test cases/common/50 subproject options/subprojects/subproject/meson.build
+test cases/common/50 subproject options/subprojects/subproject/meson_options.txt
+test cases/common/51 pkgconfig-gen/installed_files.txt
+test cases/common/51 pkgconfig-gen/meson.build
+test cases/common/51 pkgconfig-gen/simple.c
+test cases/common/51 pkgconfig-gen/simple.h
+test cases/common/52 custom install dirs/datafile.cat
+test cases/common/52 custom install dirs/installed_files.txt
+test cases/common/52 custom install dirs/meson.build
+test cases/common/52 custom install dirs/prog.1
+test cases/common/52 custom install dirs/prog.c
+test cases/common/52 custom install dirs/sample.h
+test cases/common/52 custom install dirs/subdir/datafile.dog
+test cases/common/53 subproject subproject/meson.build
+test cases/common/53 subproject subproject/prog.c
+test cases/common/53 subproject subproject/subprojects/a/a.c
+test cases/common/53 subproject subproject/subprojects/a/meson.build
+test cases/common/53 subproject subproject/subprojects/b/b.c
+test cases/common/53 subproject subproject/subprojects/b/meson.build
+test cases/common/54 same file name/meson.build
+test cases/common/54 same file name/prog.c
+test cases/common/54 same file name/d1/file.c
+test cases/common/54 same file name/d2/file.c
+test cases/common/55 file grabber/a.c
+test cases/common/55 file grabber/b.c
+test cases/common/55 file grabber/c.c
+test cases/common/55 file grabber/grabber.bat
+test cases/common/55 file grabber/grabber.sh
+test cases/common/55 file grabber/grabber2.bat
+test cases/common/55 file grabber/meson.build
+test cases/common/55 file grabber/prog.c
+test cases/common/55 file grabber/subdir/meson.build
+test cases/common/55 file grabber/subdir/suba.c
+test cases/common/55 file grabber/subdir/subb.c
+test cases/common/55 file grabber/subdir/subc.c
+test cases/common/55 file grabber/subdir/subprog.c
+test cases/common/56 custom target/data_source.txt
+test cases/common/56 custom target/installed_files.txt
+test cases/common/56 custom target/meson.build
+test cases/common/56 custom target/my_compiler.py
+test cases/common/56 custom target/depfile/dep.py
+test cases/common/56 custom target/depfile/meson.build
+test cases/common/57 custom target chain/data_source.txt
+test cases/common/57 custom target chain/installed_files.txt
+test cases/common/57 custom target chain/meson.build
+test cases/common/57 custom target chain/my_compiler.py
+test cases/common/57 custom target chain/my_compiler2.py
+test cases/common/57 custom target chain/usetarget/meson.build
+test cases/common/57 custom target chain/usetarget/myexe.c
+test cases/common/57 custom target chain/usetarget/subcomp.py
+test cases/common/58 run target/check_exists.py
+test cases/common/58 run target/configure.in
+test cases/common/58 run target/converter.py
+test cases/common/58 run target/fakeburner.py
+test cases/common/58 run target/helloprinter.c
+test cases/common/58 run target/meson.build
+test cases/common/58 run target/scripts/script.sh
+test cases/common/59 object generator/meson.build
+test cases/common/59 object generator/obj_generator.py
+test cases/common/59 object generator/prog.c
+test cases/common/59 object generator/source.c
+test cases/common/59 object generator/source2.c
+test cases/common/59 object generator/source3.c
+test cases/common/6 linkshared/cpplib.cpp
+test cases/common/6 linkshared/cppmain.cpp
+test cases/common/6 linkshared/installed_files.txt
+test cases/common/6 linkshared/libfile.c
+test cases/common/6 linkshared/main.c
+test cases/common/6 linkshared/meson.build
+test cases/common/60 install script/installed_files.txt
+test cases/common/60 install script/meson.build
+test cases/common/60 install script/myinstall.py
+test cases/common/60 install script/no-installed-files
+test cases/common/60 install script/prog.c
+test cases/common/60 install script/src/meson.build
+test cases/common/60 install script/src/myinstall.py
+test cases/common/61 custom target source output/generator.py
+test cases/common/61 custom target source output/main.c
+test cases/common/61 custom target source output/meson.build
+test cases/common/62 exe static shared/meson.build
+test cases/common/62 exe static shared/prog.c
+test cases/common/62 exe static shared/shlib2.c
+test cases/common/62 exe static shared/stat.c
+test cases/common/62 exe static shared/stat2.c
+test cases/common/62 exe static shared/subdir/exports.h
+test cases/common/62 exe static shared/subdir/meson.build
+test cases/common/62 exe static shared/subdir/shlib.c
+test cases/common/63 array methods/meson.build
+test cases/common/64 custom header generator/input.def
+test cases/common/64 custom header generator/makeheader.py
+test cases/common/64 custom header generator/meson.build
+test cases/common/64 custom header generator/prog.c
+test cases/common/64 custom header generator/somefile.txt
+test cases/common/65 multiple generators/data2.dat
+test cases/common/65 multiple generators/main.cpp
+test cases/common/65 multiple generators/meson.build
+test cases/common/65 multiple generators/mygen.py
+test cases/common/65 multiple generators/subdir/data.dat
+test cases/common/65 multiple generators/subdir/meson.build
+test cases/common/66 install subdir/installed_files.txt
+test cases/common/66 install subdir/meson.build
+test cases/common/66 install subdir/sub/sub1/third.dat
+test cases/common/66 install subdir/sub1/second.dat
+test cases/common/66 install subdir/sub2/excluded-three.dat
+test cases/common/66 install subdir/sub2/one.dat
+test cases/common/66 install subdir/sub2/dircheck/excluded-three.dat
+test cases/common/66 install subdir/sub2/excluded/two.dat
+test cases/common/66 install subdir/subdir/meson.build
+test cases/common/66 install subdir/subdir/sub1/data1.dat
+test cases/common/66 install subdir/subdir/sub1/sub2/data2.dat
+test cases/common/67 foreach/installed_files.txt
+test cases/common/67 foreach/meson.build
+test cases/common/67 foreach/prog1.c
+test cases/common/67 foreach/prog2.c
+test cases/common/67 foreach/prog3.c
+test cases/common/68 number arithmetic/meson.build
+test cases/common/69 string arithmetic/meson.build
+test cases/common/7 mixed/func.c
+test cases/common/7 mixed/main.cc
+test cases/common/7 mixed/meson.build
+test cases/common/70 array arithmetic/meson.build
+test cases/common/71 arithmetic bidmas/meson.build
+test cases/common/72 build always/main.c
+test cases/common/72 build always/meson.build
+test cases/common/72 build always/version.c.in
+test cases/common/72 build always/version.h
+test cases/common/72 build always/version_gen.py
+test cases/common/73 vcstag/meson.build
+test cases/common/73 vcstag/tagprog.c
+test cases/common/73 vcstag/vcstag.c.in
+test cases/common/74 modules/meson.build
+test cases/common/75 should fail/failing.c
+test cases/common/75 should fail/meson.build
+test cases/common/76 configure file in custom target/meson.build
+test cases/common/76 configure file in custom target/inc/confdata.in
+test cases/common/76 configure file in custom target/inc/meson.build
+test cases/common/76 configure file in custom target/src/meson.build
+test cases/common/76 configure file in custom target/src/mycompiler.py
+test cases/common/77 external test program/meson.build
+test cases/common/77 external test program/mytest.py
+test cases/common/78 ctarget dependency/gen1.py
+test cases/common/78 ctarget dependency/gen2.py
+test cases/common/78 ctarget dependency/input.dat
+test cases/common/78 ctarget dependency/meson.build
+test cases/common/79 shared subproject/a.c
+test cases/common/79 shared subproject/meson.build
+test cases/common/79 shared subproject/subprojects/B/b.c
+test cases/common/79 shared subproject/subprojects/B/meson.build
+test cases/common/79 shared subproject/subprojects/C/c.c
+test cases/common/79 shared subproject/subprojects/C/meson.build
+test cases/common/8 install/installed_files.txt
+test cases/common/8 install/meson.build
+test cases/common/8 install/prog.c
+test cases/common/8 install/stat.c
+test cases/common/80 shared subproject 2/a.c
+test cases/common/80 shared subproject 2/meson.build
+test cases/common/80 shared subproject 2/subprojects/B/b.c
+test cases/common/80 shared subproject 2/subprojects/B/meson.build
+test cases/common/80 shared subproject 2/subprojects/C/c.c
+test cases/common/80 shared subproject 2/subprojects/C/meson.build
+test cases/common/81 file object/lib.c
+test cases/common/81 file object/meson.build
+test cases/common/81 file object/prog.c
+test cases/common/81 file object/subdir1/lib.c
+test cases/common/81 file object/subdir1/meson.build
+test cases/common/81 file object/subdir1/prog.c
+test cases/common/81 file object/subdir2/lib.c
+test cases/common/81 file object/subdir2/meson.build
+test cases/common/81 file object/subdir2/prog.c
+test cases/common/82 custom subproject dir/a.c
+test cases/common/82 custom subproject dir/meson.build
+test cases/common/82 custom subproject dir/custom_subproject_dir/B/b.c
+test cases/common/82 custom subproject dir/custom_subproject_dir/B/meson.build
+test cases/common/82 custom subproject dir/custom_subproject_dir/C/c.c
+test cases/common/82 custom subproject dir/custom_subproject_dir/C/meson.build
+test cases/common/83 has type/meson.build
+test cases/common/84 extract from nested subdir/meson.build
+test cases/common/84 extract from nested subdir/src/meson.build
+test cases/common/84 extract from nested subdir/src/first/lib_first.c
+test cases/common/84 extract from nested subdir/src/first/meson.build
+test cases/common/84 extract from nested subdir/tst/meson.build
+test cases/common/84 extract from nested subdir/tst/first/exe_first.c
+test cases/common/84 extract from nested subdir/tst/first/meson.build
+test cases/common/85 internal dependency/meson.build
+test cases/common/85 internal dependency/proj1/meson.build
+test cases/common/85 internal dependency/proj1/proj1f1.c
+test cases/common/85 internal dependency/proj1/proj1f2.c
+test cases/common/85 internal dependency/proj1/proj1f3.c
+test cases/common/85 internal dependency/proj1/include/proj1.h
+test cases/common/85 internal dependency/src/main.c
+test cases/common/85 internal dependency/src/meson.build
+test cases/common/86 same basename/exe1.c
+test cases/common/86 same basename/exe2.c
+test cases/common/86 same basename/lib.c
+test cases/common/86 same basename/meson.build
+test cases/common/86 same basename/sharedsub/meson.build
+test cases/common/86 same basename/staticsub/meson.build
+test cases/common/87 declare dep/main.c
+test cases/common/87 declare dep/meson.build
+test cases/common/87 declare dep/entity/entity.h
+test cases/common/87 declare dep/entity/entity1.c
+test cases/common/87 declare dep/entity/entity2.c
+test cases/common/87 declare dep/entity/meson.build
+test cases/common/88 extract all/extractor.h
+test cases/common/88 extract all/four.c
+test cases/common/88 extract all/meson.build
+test cases/common/88 extract all/one.c
+test cases/common/88 extract all/prog.c
+test cases/common/88 extract all/three.c
+test cases/common/88 extract all/two.c
+test cases/common/89 add language/meson.build
+test cases/common/89 add language/prog.c
+test cases/common/89 add language/prog.cc
+test cases/common/9 header install/installed_files.txt
+test cases/common/9 header install/meson.build
+test cases/common/9 header install/rootdir.h
+test cases/common/9 header install/subdir.h
+test cases/common/9 header install/sub/fileheader.h
+test cases/common/9 header install/sub/meson.build
+test cases/common/9 header install/vanishing_subdir/meson.build
+test cases/common/9 header install/vanishing_subdir/vanished.h
+test cases/common/90 identical target name in subproject/bar.c
+test cases/common/90 identical target name in subproject/meson.build
+test cases/common/90 identical target name in subproject/subprojects/foo/bar.c
+test cases/common/90 identical target name in subproject/subprojects/foo/meson.build
+test cases/common/91 plusassign/meson.build
+test cases/common/92 skip subdir/meson.build
+test cases/common/92 skip subdir/subdir1/meson.build
+test cases/common/92 skip subdir/subdir1/subdir2/meson.build
+test cases/common/93 private include/meson.build
+test cases/common/93 private include/stlib/compiler.py
+test cases/common/93 private include/stlib/foo1.def
+test cases/common/93 private include/stlib/foo2.def
+test cases/common/93 private include/stlib/meson.build
+test cases/common/93 private include/user/libuser.c
+test cases/common/93 private include/user/meson.build
+test cases/common/94 default options/meson.build
+test cases/common/95 dep fallback/gensrc.py
+test cases/common/95 dep fallback/meson.build
+test cases/common/95 dep fallback/tester.c
+test cases/common/95 dep fallback/subprojects/boblib/bob.c
+test cases/common/95 dep fallback/subprojects/boblib/bob.h
+test cases/common/95 dep fallback/subprojects/boblib/genbob.py
+test cases/common/95 dep fallback/subprojects/boblib/meson.build
+test cases/common/95 dep fallback/subprojects/dummylib/meson.build
+test cases/common/96 default library/ef.cpp
+test cases/common/96 default library/ef.h
+test cases/common/96 default library/eftest.cpp
+test cases/common/96 default library/meson.build
+test cases/common/97 selfbuilt custom/data.dat
+test cases/common/97 selfbuilt custom/mainprog.cpp
+test cases/common/97 selfbuilt custom/meson.build
+test cases/common/97 selfbuilt custom/tool.cpp
+test cases/common/98 gen extra/meson.build
+test cases/common/98 gen extra/name.dat
+test cases/common/98 gen extra/name.l
+test cases/common/98 gen extra/plain.c
+test cases/common/98 gen extra/srcgen.py
+test cases/common/98 gen extra/srcgen2.py
+test cases/common/98 gen extra/srcgen3.py
+test cases/common/98 gen extra/upper.c
+test cases/common/99 benchmark/delayer.c
+test cases/common/99 benchmark/meson.build
+test cases/csharp/1 basic/installed_files.txt
+test cases/csharp/1 basic/meson.build
+test cases/csharp/1 basic/prog.cs
+test cases/csharp/2 library/helper.cs
+test cases/csharp/2 library/installed_files.txt
+test cases/csharp/2 library/meson.build
+test cases/csharp/2 library/prog.cs
+test cases/csharp/3 resource/TestRes.resx
+test cases/csharp/3 resource/meson.build
+test cases/csharp/3 resource/resprog.cs
+test cases/csharp/4 external dep/hello.txt
+test cases/csharp/4 external dep/meson.build
+test cases/csharp/4 external dep/prog.cs
+test cases/d/1 simple/app.d
+test cases/d/1 simple/installed_files.txt
+test cases/d/1 simple/meson.build
+test cases/d/1 simple/utils.d
+test cases/d/2 static library/app.d
+test cases/d/2 static library/installed_files.txt
+test cases/d/2 static library/libstuff.d
+test cases/d/2 static library/meson.build
+test cases/d/3 shared library/app.d
+test cases/d/3 shared library/installed_files.txt
+test cases/d/3 shared library/libstuff.d
+test cases/d/3 shared library/meson.build
+test cases/d/4 library versions/installed_files.txt
+test cases/d/4 library versions/lib.d
+test cases/d/4 library versions/meson.build
+test cases/d/5 mixed/app.d
+test cases/d/5 mixed/installed_files.txt
+test cases/d/5 mixed/libstuff.c
+test cases/d/5 mixed/meson.build
+test cases/d/6 unittest/app.d
+test cases/d/6 unittest/installed_files.txt
+test cases/d/6 unittest/meson.build
+test cases/d/7 multilib/app.d
+test cases/d/7 multilib/installed_files.txt
+test cases/d/7 multilib/meson.build
+test cases/d/7 multilib/say1.d
+test cases/d/7 multilib/say2.d
+test cases/d/8 has multi arguments/meson.build
+test cases/d/9 features/app.d
+test cases/d/9 features/meson.build
+test cases/d/9 features/data/food.txt
+test cases/d/9 features/data/people.txt
+test cases/failing build/1 vala c werror/meson.build
+test cases/failing build/1 vala c werror/prog.vala
+test cases/failing build/1 vala c werror/unused-var.c
+test cases/failing/1 project not first/meson.build
+test cases/failing/1 project not first/prog.c
+test cases/failing/10 out of bounds/meson.build
+test cases/failing/11 object arithmetic/meson.build
+test cases/failing/12 string arithmetic/meson.build
+test cases/failing/13 array arithmetic/meson.build
+test cases/failing/14 invalid option name/meson.build
+test cases/failing/14 invalid option name/meson_options.txt
+test cases/failing/15 kwarg before arg/meson.build
+test cases/failing/15 kwarg before arg/prog.c
+test cases/failing/16 extract from subproject/main.c
+test cases/failing/16 extract from subproject/meson.build
+test cases/failing/16 extract from subproject/subprojects/sub_project/meson.build
+test cases/failing/16 extract from subproject/subprojects/sub_project/sub_lib.c
+test cases/failing/17 same name/file.c
+test cases/failing/17 same name/meson.build
+test cases/failing/17 same name/sub/file2.c
+test cases/failing/17 same name/sub/meson.build
+test cases/failing/18 wrong plusassign/meson.build
+test cases/failing/19 target clash/clash.c
+test cases/failing/19 target clash/meson.build
+test cases/failing/2 missing file/meson.build
+test cases/failing/20 version/meson.build
+test cases/failing/21 subver/meson.build
+test cases/failing/21 subver/subprojects/foo/meson.build
+test cases/failing/22 assert/meson.build
+test cases/failing/23 rel testdir/meson.build
+test cases/failing/23 rel testdir/simple.c
+test cases/failing/25 int conversion/meson.build
+test cases/failing/26 badlang/meson.build
+test cases/failing/27 output subdir/foo.in
+test cases/failing/27 output subdir/meson.build
+test cases/failing/27 output subdir/subdir/dummy.txt
+test cases/failing/28 noprog use/meson.build
+test cases/failing/29 no crossprop/meson.build
+test cases/failing/3 missing subdir/meson.build
+test cases/failing/30 nested ternary/meson.build
+test cases/failing/31 invalid man extension/meson.build
+test cases/failing/32 no man extension/meson.build
+test cases/failing/33 exe static shared/meson.build
+test cases/failing/33 exe static shared/prog.c
+test cases/failing/33 exe static shared/shlib2.c
+test cases/failing/33 exe static shared/stat.c
+test cases/failing/34 non-root subproject/meson.build
+test cases/failing/34 non-root subproject/some/meson.build
+test cases/failing/35 dependency not-required then required/meson.build
+test cases/failing/36 project argument after target/exe.c
+test cases/failing/36 project argument after target/meson.build
+test cases/failing/37 pkgconfig dependency impossible conditions/meson.build
+test cases/failing/38 has function external dependency/meson.build
+test cases/failing/38 has function external dependency/mylib.c
+test cases/failing/39 libdir must be inside prefix/meson.build
+test cases/failing/4 missing meson.build/meson.build
+test cases/failing/4 missing meson.build/subdir/dummy.txt
+test cases/failing/40 prefix absolute/meson.build
+test cases/failing/41 kwarg assign/dummy.c
+test cases/failing/41 kwarg assign/meson.build
+test cases/failing/41 kwarg assign/prog.c
+test cases/failing/42 custom target plainname many inputs/1.txt
+test cases/failing/42 custom target plainname many inputs/2.txt
+test cases/failing/42 custom target plainname many inputs/catfiles.py
+test cases/failing/42 custom target plainname many inputs/meson.build
+test cases/failing/43 custom target outputs not matching install_dirs/generator.py
+test cases/failing/43 custom target outputs not matching install_dirs/installed_files.txt
+test cases/failing/43 custom target outputs not matching install_dirs/meson.build
+test cases/failing/44 project name colon/meson.build
+test cases/failing/45 abs subdir/meson.build
+test cases/failing/45 abs subdir/bob/meson.build
+test cases/failing/46 abspath to srcdir/meson.build
+test cases/failing/47 pkgconfig variables reserved/meson.build
+test cases/failing/47 pkgconfig variables reserved/simple.c
+test cases/failing/47 pkgconfig variables reserved/simple.h
+test cases/failing/48 pkgconfig variables zero length/meson.build
+test cases/failing/48 pkgconfig variables zero length/simple.c
+test cases/failing/48 pkgconfig variables zero length/simple.h
+test cases/failing/49 pkgconfig variables zero length value/meson.build
+test cases/failing/49 pkgconfig variables zero length value/simple.c
+test cases/failing/49 pkgconfig variables zero length value/simple.h
+test cases/failing/5 misplaced option/meson.build
+test cases/failing/50 pkgconfig variables not key value/meson.build
+test cases/failing/50 pkgconfig variables not key value/simple.c
+test cases/failing/50 pkgconfig variables not key value/simple.h
+test cases/failing/51 executable comparison/meson.build
+test cases/failing/51 executable comparison/prog.c
+test cases/failing/52 inconsistent comparison/meson.build
+test cases/failing/53 slashname/meson.build
+test cases/failing/53 slashname/sub/meson.build
+test cases/failing/53 slashname/sub/prog.c
+test cases/failing/54 reserved meson prefix/meson.build
+test cases/failing/54 reserved meson prefix/meson-foo/meson.build
+test cases/failing/55 wrong shared crate type/foo.rs
+test cases/failing/55 wrong shared crate type/meson.build
+test cases/failing/56 wrong static crate type/foo.rs
+test cases/failing/56 wrong static crate type/meson.build
+test cases/failing/57 or on new line/meson.build
+test cases/failing/57 or on new line/meson_options.txt
+test cases/failing/58 kwarg in module/meson.build
+test cases/failing/59 link with executable/meson.build
+test cases/failing/59 link with executable/module.c
+test cases/failing/59 link with executable/prog.c
+test cases/failing/6 missing incdir/meson.build
+test cases/failing/60 assign custom target index/meson.build
+test cases/failing/61 getoption prefix/meson.build
+test cases/failing/61 getoption prefix/subprojects/abc/meson.build
+test cases/failing/61 getoption prefix/subprojects/abc/meson_options.txt
+test cases/failing/62 bad option argument/meson.build
+test cases/failing/62 bad option argument/meson_options.txt
+test cases/failing/63 subproj filegrab/meson.build
+test cases/failing/63 subproj filegrab/prog.c
+test cases/failing/63 subproj filegrab/subprojects/a/meson.build
+test cases/failing/64 grab subproj/meson.build
+test cases/failing/64 grab subproj/subprojects/foo/meson.build
+test cases/failing/64 grab subproj/subprojects/foo/sub.c
+test cases/failing/65 grab sibling/meson.build
+test cases/failing/65 grab sibling/subprojects/a/meson.build
+test cases/failing/65 grab sibling/subprojects/b/meson.build
+test cases/failing/65 grab sibling/subprojects/b/sneaky.c
+test cases/failing/66 string as link target/meson.build
+test cases/failing/66 string as link target/prog.c
+test cases/failing/67 dependency not-found and required/meson.build
+test cases/failing/7 go to subproject/meson.build
+test cases/failing/7 go to subproject/subprojects/meson.build
+test cases/failing/8 recursive/meson.build
+test cases/failing/8 recursive/subprojects/a/meson.build
+test cases/failing/8 recursive/subprojects/b/meson.build
+test cases/failing/9 missing extra file/meson.build
+test cases/failing/9 missing extra file/prog.c
+test cases/fortran/1 basic/meson.build
+test cases/fortran/1 basic/simple.f90
+test cases/fortran/2 modules/meson.build
+test cases/fortran/2 modules/prog.f90
+test cases/fortran/2 modules/stuff.f90
+test cases/fortran/3 module procedure/meson.build
+test cases/fortran/3 module procedure/prog.f90
+test cases/fortran/4 self dependency/meson.build
+test cases/fortran/4 self dependency/selfdep.f90
+test cases/fortran/5 static/main.f90
+test cases/fortran/5 static/meson.build
+test cases/fortran/5 static/static_hello.f90
+test cases/fortran/6 dynamic/dynamic.f90
+test cases/fortran/6 dynamic/main.f90
+test cases/fortran/6 dynamic/meson.build
+test cases/fortran/7 generated/meson.build
+test cases/fortran/7 generated/mod1.fpp
+test cases/fortran/7 generated/mod2.fpp
+test cases/fortran/7 generated/prog.f90
+test cases/fortran/8 module names/meson.build
+test cases/fortran/8 module names/mod1.f90
+test cases/fortran/8 module names/mod2.f90
+test cases/fortran/8 module names/test.f90
+test cases/frameworks/1 boost/extralib.cpp
+test cases/frameworks/1 boost/linkexe.cc
+test cases/frameworks/1 boost/meson.build
+test cases/frameworks/1 boost/nomod.cpp
+test cases/frameworks/1 boost/unit_test.cpp
+test cases/frameworks/10 gtk-doc/installed_files.txt.bak
+test cases/frameworks/10 gtk-doc/meson.build
+test cases/frameworks/10 gtk-doc/doc/foobar-docs.sgml
+test cases/frameworks/10 gtk-doc/doc/meson.build
+test cases/frameworks/10 gtk-doc/doc/version.xml.in
+test cases/frameworks/10 gtk-doc/include/foo-version.h.in
+test cases/frameworks/10 gtk-doc/include/foo.h
+test cases/frameworks/10 gtk-doc/include/meson.build
+test cases/frameworks/11 gir subproject/installed_files.txt
+test cases/frameworks/11 gir subproject/meson.build
+test cases/frameworks/11 gir subproject/gir/meson-subsample.c
+test cases/frameworks/11 gir subproject/gir/meson-subsample.h
+test cases/frameworks/11 gir subproject/gir/meson.build
+test cases/frameworks/11 gir subproject/gir/prog.c
+test cases/frameworks/11 gir subproject/gir/prog.py
+test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.c
+test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.h
+test cases/frameworks/11 gir subproject/subprojects/mesongir/meson.build
+test cases/frameworks/12 multiple gir/installed_files.txt
+test cases/frameworks/12 multiple gir/meson.build
+test cases/frameworks/12 multiple gir/gir/meson-subsample.c
+test cases/frameworks/12 multiple gir/gir/meson-subsample.h
+test cases/frameworks/12 multiple gir/gir/meson.build
+test cases/frameworks/12 multiple gir/gir/prog.c
+test cases/frameworks/12 multiple gir/mesongir/meson-sample.c
+test cases/frameworks/12 multiple gir/mesongir/meson-sample.h.in
+test cases/frameworks/12 multiple gir/mesongir/meson.build
+test cases/frameworks/13 yelp/installed_files.txt
+test cases/frameworks/13 yelp/meson.build
+test cases/frameworks/13 yelp/help/LINGUAS
+test cases/frameworks/13 yelp/help/meson.build
+test cases/frameworks/13 yelp/help/C/index.page
+test cases/frameworks/13 yelp/help/C/media/test.txt
+test cases/frameworks/13 yelp/help/de/de.po
+test cases/frameworks/13 yelp/help/es/es.po
+test cases/frameworks/13 yelp/help/es/media/test.txt
+test cases/frameworks/14 doxygen/installed_files.txt
+test cases/frameworks/14 doxygen/meson.build
+test cases/frameworks/14 doxygen/doc/Doxyfile.in
+test cases/frameworks/14 doxygen/doc/meson.build
+test cases/frameworks/14 doxygen/include/comedian.h
+test cases/frameworks/14 doxygen/include/spede.h
+test cases/frameworks/14 doxygen/src/spede.cpp
+test cases/frameworks/15 llvm/meson.build
+test cases/frameworks/15 llvm/sum.c
+test cases/frameworks/16 sdl2/meson.build
+test cases/frameworks/16 sdl2/sdl2prog.c
+test cases/frameworks/17 mpi/is_artful.py
+test cases/frameworks/17 mpi/main.c
+test cases/frameworks/17 mpi/main.cpp
+test cases/frameworks/17 mpi/main.f90
+test cases/frameworks/17 mpi/meson.build
+test cases/frameworks/18 vulkan/meson.build
+test cases/frameworks/18 vulkan/vulkanprog.c
+test cases/frameworks/19 pcap/meson.build
+test cases/frameworks/19 pcap/pcap_prog.c
+test cases/frameworks/2 gtest/meson.build
+test cases/frameworks/2 gtest/test.cc
+test cases/frameworks/2 gtest/test_nomain.cc
+test cases/frameworks/20 cups/cups_prog.c
+test cases/frameworks/20 cups/meson.build
+test cases/frameworks/21 libwmf/libwmf_prog.c
+test cases/frameworks/21 libwmf/meson.build
+test cases/frameworks/3 gmock/gmocktest.cc
+test cases/frameworks/3 gmock/meson.build
+test cases/frameworks/4 qt/main.cpp
+test cases/frameworks/4 qt/mainWindow.cpp
+test cases/frameworks/4 qt/mainWindow.h
+test cases/frameworks/4 qt/mainWindow.ui
+test cases/frameworks/4 qt/manualinclude.cpp
+test cases/frameworks/4 qt/manualinclude.h
+test cases/frameworks/4 qt/meson.build
+test cases/frameworks/4 qt/meson_options.txt
+test cases/frameworks/4 qt/q5core.cpp
+test cases/frameworks/4 qt/qt4core_fr.ts
+test cases/frameworks/4 qt/qt5core_fr.ts
+test cases/frameworks/4 qt/stuff.qrc
+test cases/frameworks/4 qt/stuff2.qrc
+test cases/frameworks/4 qt/thing.png
+test cases/frameworks/4 qt/thing2.png
+test cases/frameworks/4 qt/plugin/plugin.cpp
+test cases/frameworks/4 qt/plugin/plugin.h
+test cases/frameworks/4 qt/plugin/plugin.json
+test cases/frameworks/4 qt/pluginInterface/plugin_if.h
+test cases/frameworks/5 protocol buffers/defs.proto
+test cases/frameworks/5 protocol buffers/main.cpp
+test cases/frameworks/5 protocol buffers/meson.build
+test cases/frameworks/5 protocol buffers/asubdir/defs.proto
+test cases/frameworks/5 protocol buffers/asubdir/main.cpp
+test cases/frameworks/5 protocol buffers/asubdir/meson.build
+test cases/frameworks/6 gettext/installed_files.txt
+test cases/frameworks/6 gettext/meson.build
+test cases/frameworks/6 gettext/data/meson.build
+test cases/frameworks/6 gettext/data/test.desktop.in
+test cases/frameworks/6 gettext/po/LINGUAS
+test cases/frameworks/6 gettext/po/POTFILES
+test cases/frameworks/6 gettext/po/de.po
+test cases/frameworks/6 gettext/po/fi.po
+test cases/frameworks/6 gettext/po/intltest.pot
+test cases/frameworks/6 gettext/po/meson.build
+test cases/frameworks/6 gettext/src/intlmain.c
+test cases/frameworks/6 gettext/src/meson.build
+test cases/frameworks/7 gnome/installed_files.txt
+test cases/frameworks/7 gnome/meson.build
+test cases/frameworks/7 gnome/gdbus/com.example.Sample.xml
+test cases/frameworks/7 gnome/gdbus/gdbusprog.c
+test cases/frameworks/7 gnome/gdbus/meson.build
+test cases/frameworks/7 gnome/genmarshal/main.c
+test cases/frameworks/7 gnome/genmarshal/marshaller.list
+test cases/frameworks/7 gnome/genmarshal/meson.build
+test cases/frameworks/7 gnome/gir/meson-sample.c
+test cases/frameworks/7 gnome/gir/meson-sample.h
+test cases/frameworks/7 gnome/gir/meson.build
+test cases/frameworks/7 gnome/gir/prog.c
+test cases/frameworks/7 gnome/gir/prog.py
+test cases/frameworks/7 gnome/gir/dep1/dep1.c
+test cases/frameworks/7 gnome/gir/dep1/dep1.h
+test cases/frameworks/7 gnome/gir/dep1/meson.build
+test cases/frameworks/7 gnome/gir/dep1/dep2/dep2.c
+test cases/frameworks/7 gnome/gir/dep1/dep2/dep2.h
+test cases/frameworks/7 gnome/gir/dep1/dep2/meson.build
+test cases/frameworks/7 gnome/mkenums/enums.c.in
+test cases/frameworks/7 gnome/mkenums/enums.h.in
+test cases/frameworks/7 gnome/mkenums/enums2.c.in
+test cases/frameworks/7 gnome/mkenums/enums2.h.in
+test cases/frameworks/7 gnome/mkenums/main.c
+test cases/frameworks/7 gnome/mkenums/main4.c
+test cases/frameworks/7 gnome/mkenums/main5.c
+test cases/frameworks/7 gnome/mkenums/meson-decls.h
+test cases/frameworks/7 gnome/mkenums/meson-sample.h
+test cases/frameworks/7 gnome/mkenums/meson.build
+test cases/frameworks/7 gnome/resources/copyfile.py
+test cases/frameworks/7 gnome/resources/generated-main.c
+test cases/frameworks/7 gnome/resources/generated.gresource.xml
+test cases/frameworks/7 gnome/resources/meson.build
+test cases/frameworks/7 gnome/resources/myresource.gresource.xml
+test cases/frameworks/7 gnome/resources/resources.py
+test cases/frameworks/7 gnome/resources/simple-main.c
+test cases/frameworks/7 gnome/resources/simple.gresource.xml
+test cases/frameworks/7 gnome/resources-data/meson.build
+test cases/frameworks/7 gnome/resources-data/res1.txt
+test cases/frameworks/7 gnome/resources-data/res3.txt.in
+test cases/frameworks/7 gnome/resources-data/subdir/meson.build
+test cases/frameworks/7 gnome/resources-data/subdir/res2.txt
+test cases/frameworks/7 gnome/resources-data/subdir/res4.txt.in
+test cases/frameworks/7 gnome/schemas/com.github.meson.gschema.xml
+test cases/frameworks/7 gnome/schemas/meson.build
+test cases/frameworks/7 gnome/schemas/schemaprog.c
+test cases/frameworks/8 flex/lexer.l
+test cases/frameworks/8 flex/meson.build
+test cases/frameworks/8 flex/parser.y
+test cases/frameworks/8 flex/prog.c
+test cases/frameworks/8 flex/test.txt
+test cases/frameworks/9 wxwidgets/mainwin.h
+test cases/frameworks/9 wxwidgets/meson.build
+test cases/frameworks/9 wxwidgets/wxprog.cpp
+test cases/java/1 basic/installed_files.txt
+test cases/java/1 basic/meson.build
+test cases/java/1 basic/com/mesonbuild/Simple.java
+test cases/java/2 subdir/meson.build
+test cases/java/2 subdir/sub/meson.build
+test cases/java/2 subdir/sub/com/mesonbuild/Simple.java
+test cases/java/2 subdir/sub/com/mesonbuild/TextPrinter.java
+test cases/java/3 args/meson.build
+test cases/java/3 args/com/mesonbuild/Simple.java
+test cases/java/4 inner class/meson.build
+test cases/java/4 inner class/com/mesonbuild/Simple.java
+test cases/linuxlike/1 pkg-config/meson.build
+test cases/linuxlike/1 pkg-config/prog-checkver.c
+test cases/linuxlike/1 pkg-config/prog.c
+test cases/linuxlike/1 pkg-config/incdir/myinc.h
+test cases/linuxlike/10 large file support/meson.build
+test cases/linuxlike/11 runpath rpath ldlibrarypath/lib.c
+test cases/linuxlike/11 runpath rpath ldlibrarypath/main.c
+test cases/linuxlike/11 runpath rpath ldlibrarypath/meson.build
+test cases/linuxlike/11 runpath rpath ldlibrarypath/lib1/meson.build
+test cases/linuxlike/11 runpath rpath ldlibrarypath/lib2/meson.build
+test cases/linuxlike/2 external library/meson.build
+test cases/linuxlike/2 external library/prog.c
+test cases/linuxlike/3 linker script/bob.c
+test cases/linuxlike/3 linker script/bob.h
+test cases/linuxlike/3 linker script/bob.map
+test cases/linuxlike/3 linker script/bob.map.in
+test cases/linuxlike/3 linker script/copy.py
+test cases/linuxlike/3 linker script/meson.build
+test cases/linuxlike/3 linker script/prog.c
+test cases/linuxlike/3 linker script/sub/foo.map
+test cases/linuxlike/3 linker script/sub/meson.build
+test cases/linuxlike/4 extdep static lib/lib.c
+test cases/linuxlike/4 extdep static lib/meson.build
+test cases/linuxlike/4 extdep static lib/prog.c
+test cases/linuxlike/5 dependency versions/meson.build
+test cases/linuxlike/5 dependency versions/subprojects/somelib/lib.c
+test cases/linuxlike/5 dependency versions/subprojects/somelib/meson.build
+test cases/linuxlike/5 dependency versions/subprojects/somelibnover/lib.c
+test cases/linuxlike/5 dependency versions/subprojects/somelibnover/meson.build
+test cases/linuxlike/5 dependency versions/subprojects/somelibver/lib.c
+test cases/linuxlike/5 dependency versions/subprojects/somelibver/meson.build
+test cases/linuxlike/6 subdir include order/meson.build
+test cases/linuxlike/6 subdir include order/prog.c
+test cases/linuxlike/6 subdir include order/subdir/glib.h
+test cases/linuxlike/7 library versions/exe.orig.c
+test cases/linuxlike/7 library versions/installed_files.txt
+test cases/linuxlike/7 library versions/lib.c
+test cases/linuxlike/7 library versions/meson.build
+test cases/linuxlike/8 subproject library install/installed_files.txt
+test cases/linuxlike/8 subproject library install/meson.build
+test cases/linuxlike/8 subproject library install/subprojects/sublib/meson.build
+test cases/linuxlike/8 subproject library install/subprojects/sublib/sublib.c
+test cases/linuxlike/8 subproject library install/subprojects/sublib/include/subdefs.h
+test cases/linuxlike/9 compiler checks with dependencies/meson.build
+test cases/objc/1 simple/meson.build
+test cases/objc/1 simple/prog.m
+test cases/objc/2 nsstring/meson.build
+test cases/objc/2 nsstring/stringprog.m
+test cases/objc/3 objc++/meson.build
+test cases/objc/3 objc++/prog.mm
+test cases/objc/4 objc args/meson.build
+test cases/objc/4 objc args/prog.m
+test cases/objc/5 objc++ args/meson.build
+test cases/objc/5 objc++ args/prog.mm
+test cases/osx/1 basic/main.c
+test cases/osx/1 basic/meson.build
+test cases/osx/2 library versions/CMakeLists.txt
+test cases/osx/2 library versions/exe.orig.c
+test cases/osx/2 library versions/installed_files.txt
+test cases/osx/2 library versions/lib.c
+test cases/osx/2 library versions/meson.build
+test cases/osx/3 has function xcode8/meson.build
+test cases/osx/4 framework/meson.build
+test cases/osx/4 framework/prog.c
+test cases/osx/4 framework/stat.c
+test cases/osx/4 framework/xcode-frameworks.png
+test cases/python3/1 basic/meson.build
+test cases/python3/1 basic/prog.py
+test cases/python3/1 basic/gluon/__init__.py
+test cases/python3/1 basic/gluon/gluonator.py
+test cases/python3/1 basic/gluon/__pycache__/__init__.cpython-35.pyc
+test cases/python3/1 basic/gluon/__pycache__/__init__.cpython-36.pyc
+test cases/python3/1 basic/gluon/__pycache__/gluonator.cpython-35.pyc
+test cases/python3/1 basic/gluon/__pycache__/gluonator.cpython-36.pyc
+test cases/python3/1 basic/subdir/meson.build
+test cases/python3/1 basic/subdir/subprog.py
+test cases/python3/2 extmodule/blaster.py
+test cases/python3/2 extmodule/meson.build
+test cases/python3/2 extmodule/ext/meson.build
+test cases/python3/2 extmodule/ext/tachyon_module.c
+test cases/python3/3 cython/cytest.py
+test cases/python3/3 cython/meson.build
+test cases/python3/3 cython/libdir/cstorer.pxd
+test cases/python3/3 cython/libdir/meson.build
+test cases/python3/3 cython/libdir/storer.c
+test cases/python3/3 cython/libdir/storer.h
+test cases/python3/3 cython/libdir/storer.pyx
+test cases/python3/4 custom target depends extmodule/blaster.py
+test cases/python3/4 custom target depends extmodule/meson.build
+test cases/python3/4 custom target depends extmodule/ext/meson.build
+test cases/python3/4 custom target depends extmodule/ext/tachyon_module.c
+test cases/python3/4 custom target depends extmodule/ext/lib/meson-tachyonlib.c
+test cases/python3/4 custom target depends extmodule/ext/lib/meson-tachyonlib.h
+test cases/python3/4 custom target depends extmodule/ext/lib/meson.build
+test cases/rewrite/1 basic/added.txt
+test cases/rewrite/1 basic/meson.build
+test cases/rewrite/1 basic/removed.txt
+test cases/rewrite/2 subdirs/meson.build
+test cases/rewrite/2 subdirs/sub1/after.txt
+test cases/rewrite/2 subdirs/sub1/meson.build
+test cases/rewrite/2 subdirs/sub2/meson.build
+test cases/rust/1 basic/installed_files.txt
+test cases/rust/1 basic/meson.build
+test cases/rust/1 basic/prog.rs
+test cases/rust/1 basic/subdir/meson.build
+test cases/rust/1 basic/subdir/prog.rs
+test cases/rust/2 sharedlib/installed_files.txt
+test cases/rust/2 sharedlib/meson.build
+test cases/rust/2 sharedlib/prog.rs
+test cases/rust/2 sharedlib/stuff.rs
+test cases/rust/3 staticlib/installed_files.txt
+test cases/rust/3 staticlib/meson.build
+test cases/rust/3 staticlib/prog.rs
+test cases/rust/3 staticlib/stuff.rs
+test cases/rust/4 polyglot/installed_files.txt
+test cases/rust/4 polyglot/meson.build
+test cases/rust/4 polyglot/prog.c
+test cases/rust/4 polyglot/stuff.rs
+test cases/rust/5 polyglot static/installed_files.txt
+test cases/rust/5 polyglot static/meson.build
+test cases/rust/5 polyglot static/prog.c
+test cases/rust/5 polyglot static/stuff.rs
+test cases/rust/6 named staticlib/installed_files.txt
+test cases/rust/6 named staticlib/meson.build
+test cases/rust/6 named staticlib/prog.rs
+test cases/rust/6 named staticlib/stuff.rs
+test cases/swift/1 exe/meson.build
+test cases/swift/1 exe/prog.swift
+test cases/swift/2 multifile/libfile.swift
+test cases/swift/2 multifile/main.swift
+test cases/swift/2 multifile/meson.build
+test cases/swift/3 library/meson.build
+test cases/swift/3 library/exe/main.swift
+test cases/swift/3 library/exe/meson.build
+test cases/swift/3 library/lib/datasource.swift
+test cases/swift/3 library/lib/meson.build
+test cases/swift/3 library/lib/othersource.swift
+test cases/swift/4 generate/meson.build
+test cases/swift/4 generate/gen/main.swift
+test cases/swift/4 generate/gen/meson.build
+test cases/swift/4 generate/user/main.swift
+test cases/swift/4 generate/user/meson.build
+test cases/swift/5 mixed/main.swift
+test cases/swift/5 mixed/meson.build
+test cases/swift/5 mixed/mylib.c
+test cases/swift/5 mixed/mylib.h
+test cases/swift/6 modulemap/main.swift
+test cases/swift/6 modulemap/meson.build
+test cases/swift/6 modulemap/module.modulemap
+test cases/swift/6 modulemap/mylib.c
+test cases/swift/6 modulemap/mylib.h
+test cases/swift/7 modulemap subdir/main.swift
+test cases/swift/7 modulemap subdir/meson.build
+test cases/swift/7 modulemap subdir/mylib/meson.build
+test cases/swift/7 modulemap subdir/mylib/module.modulemap
+test cases/swift/7 modulemap subdir/mylib/mylib.c
+test cases/swift/7 modulemap subdir/mylib/mylib.h
+test cases/unit/1 soname/CMakeLists.txt
+test cases/unit/1 soname/meson.build
+test cases/unit/1 soname/versioned.c
+test cases/unit/10 d dedup/meson.build
+test cases/unit/10 d dedup/prog.c
+test cases/unit/11 build_rpath/meson.build
+test cases/unit/11 build_rpath/prog.c
+test cases/unit/11 build_rpath/sub/meson.build
+test cases/unit/11 build_rpath/sub/stuff.c
+test cases/unit/12 cross prog/meson.build
+test cases/unit/12 cross prog/some_cross_tool.py
+test cases/unit/12 cross prog/sometool.py
+test cases/unit/13 reconfigure/meson.build
+test cases/unit/14 prebuilt object/main.c
+test cases/unit/14 prebuilt object/meson.build
+test cases/unit/14 prebuilt object/source.c
+test cases/unit/15 prebuilt static/main.c
+test cases/unit/15 prebuilt static/meson.build
+test cases/unit/15 prebuilt static/libdir/best.c
+test cases/unit/15 prebuilt static/libdir/best.h
+test cases/unit/15 prebuilt static/libdir/meson.build
+test cases/unit/16 prebuilt shared/alexandria.c
+test cases/unit/16 prebuilt shared/alexandria.h
+test cases/unit/16 prebuilt shared/another_visitor.c
+test cases/unit/16 prebuilt shared/meson.build
+test cases/unit/16 prebuilt shared/patron.c
+test cases/unit/17 pkgconfig static/foo.c
+test cases/unit/17 pkgconfig static/foo.pc.in
+test cases/unit/17 pkgconfig static/main.c
+test cases/unit/17 pkgconfig static/meson.build
+test cases/unit/17 pkgconfig static/include/foo.h
+test cases/unit/18 array option/meson.build
+test cases/unit/18 array option/meson_options.txt
+test cases/unit/2 testsetups/buggy.c
+test cases/unit/2 testsetups/impl.c
+test cases/unit/2 testsetups/impl.h
+test cases/unit/2 testsetups/meson.build
+test cases/unit/3 subproject defaults/meson.build
+test cases/unit/3 subproject defaults/meson_options.txt
+test cases/unit/3 subproject defaults/subprojects/foob/meson.build
+test cases/unit/3 subproject defaults/subprojects/foob/meson_options.txt
+test cases/unit/4 suite selection/failing_test.c
+test cases/unit/4 suite selection/meson.build
+test cases/unit/4 suite selection/successful_test.c
+test cases/unit/4 suite selection/subprojects/subprjfail/failing_test.c
+test cases/unit/4 suite selection/subprojects/subprjfail/meson.build
+test cases/unit/4 suite selection/subprojects/subprjmix/failing_test.c
+test cases/unit/4 suite selection/subprojects/subprjmix/meson.build
+test cases/unit/4 suite selection/subprojects/subprjmix/successful_test.c
+test cases/unit/4 suite selection/subprojects/subprjsucc/meson.build
+test cases/unit/4 suite selection/subprojects/subprjsucc/successful_test.c
+test cases/unit/5 compiler detection/compiler wrapper.py
+test cases/unit/5 compiler detection/meson.build
+test cases/unit/5 compiler detection/trivial.c
+test cases/unit/5 compiler detection/trivial.cc
+test cases/unit/5 compiler detection/trivial.m
+test cases/unit/5 compiler detection/trivial.mm
+test cases/unit/6 std override/meson.build
+test cases/unit/6 std override/prog03.cpp
+test cases/unit/6 std override/prog11.cpp
+test cases/unit/6 std override/progp.cpp
+test cases/unit/7 run installed/meson.build
+test cases/unit/7 run installed/prog.c
+test cases/unit/7 run installed/foo/foo.c
+test cases/unit/7 run installed/foo/meson.build
+test cases/unit/9 -L -l order/first.pc
+test cases/unit/9 -L -l order/meson.build
+test cases/unit/9 -L -l order/prog.c
+test cases/unit/9 -L -l order/second.pc
+test cases/vala/1 basic/meson.build
+test cases/vala/1 basic/prog.vala
+test cases/vala/10 mixed sources/meson.build
+test cases/vala/10 mixed sources/c/foo.c
+test cases/vala/10 mixed sources/c/meson.build
+test cases/vala/10 mixed sources/c/writec.py
+test cases/vala/10 mixed sources/vala/bar.vala
+test cases/vala/11 generated vapi/installed_files.txt
+test cases/vala/11 generated vapi/main.vala
+test cases/vala/11 generated vapi/meson.build
+test cases/vala/11 generated vapi/libbar/bar.c
+test cases/vala/11 generated vapi/libbar/bar.h
+test cases/vala/11 generated vapi/libbar/meson.build
+test cases/vala/11 generated vapi/libfoo/foo.c
+test cases/vala/11 generated vapi/libfoo/foo.h
+test cases/vala/11 generated vapi/libfoo/meson.build
+test cases/vala/12 custom output/bar.vala
+test cases/vala/12 custom output/foo.vala
+test cases/vala/12 custom output/meson.build
+test cases/vala/13 find library/meson.build
+test cases/vala/13 find library/test.vala
+test cases/vala/14 target glib version and gresources/meson.build
+test cases/vala/14 target glib version and gresources/test.vala
+test cases/vala/14 target glib version and gresources/gres/meson.build
+test cases/vala/14 target glib version and gresources/gres/test-resources.xml
+test cases/vala/14 target glib version and gresources/gres/test.ui
+test cases/vala/15 static vapi in source tree/meson.build
+test cases/vala/15 static vapi in source tree/test.vala
+test cases/vala/15 static vapi in source tree/vapi/config.vapi
+test cases/vala/16 mixed dependence/app.vala
+test cases/vala/16 mixed dependence/meson.build
+test cases/vala/16 mixed dependence/mixer-glue.c
+test cases/vala/16 mixed dependence/mixer.vala
+test cases/vala/17 plain consumer/app.c
+test cases/vala/17 plain consumer/badger.vala
+test cases/vala/17 plain consumer/meson.build
+test cases/vala/18 vapi consumed twice/app.vala
+test cases/vala/18 vapi consumed twice/beer.vala
+test cases/vala/18 vapi consumed twice/meson.build
+test cases/vala/18 vapi consumed twice/person.vala
+test cases/vala/19 genie/meson.build
+test cases/vala/19 genie/prog.gs
+test cases/vala/2 multiple files/class1.vala
+test cases/vala/2 multiple files/class2.vala
+test cases/vala/2 multiple files/main.vala
+test cases/vala/2 multiple files/meson.build
+test cases/vala/20 genie multiple mixed sources/c_test_one.c
+test cases/vala/20 genie multiple mixed sources/c_test_two.c
+test cases/vala/20 genie multiple mixed sources/init.gs
+test cases/vala/20 genie multiple mixed sources/meson.build
+test cases/vala/20 genie multiple mixed sources/test_one.gs
+test cases/vala/20 genie multiple mixed sources/test_two.gs
+test cases/vala/20 genie multiple mixed sources/vala_test_one.vala
+test cases/vala/20 genie multiple mixed sources/vala_test_two.vala
+test cases/vala/21 type module/foo.vala
+test cases/vala/21 type module/meson.build
+test cases/vala/21 type module/plugin-bar.vala
+test cases/vala/21 type module/plugin-module.vala
+test cases/vala/21 type module/plugin.vala
+test cases/vala/22 same target in directories/Test.vala
+test cases/vala/22 same target in directories/meson.build
+test cases/vala/22 same target in directories/prog.vala
+test cases/vala/22 same target in directories/Subdir/Test.vala
+test cases/vala/22 same target in directories/Subdir/Subdir2/Test.vala
+test cases/vala/22 same target in directories/Subdir2/Test.vala
+test cases/vala/3 dep/gioprog.vala
+test cases/vala/3 dep/meson.build
+test cases/vala/4 config/config.vapi
+test cases/vala/4 config/meson-something-else.vapi
+test cases/vala/4 config/meson.build
+test cases/vala/4 config/prog.vala
+test cases/vala/5 target glib/GLib.Thread.vala
+test cases/vala/5 target glib/meson.build
+test cases/vala/5 target glib/retcode.c
+test cases/vala/6 static library/installed_files.txt
+test cases/vala/6 static library/meson.build
+test cases/vala/6 static library/mylib.vala
+test cases/vala/6 static library/prog.vala
+test cases/vala/7 shared library/installed_files.txt
+test cases/vala/7 shared library/meson.build
+test cases/vala/7 shared library/lib/meson.build
+test cases/vala/7 shared library/lib/mylib.vala
+test cases/vala/7 shared library/prog/meson.build
+test cases/vala/7 shared library/prog/prog.vala
+test cases/vala/8 generated sources/installed_files.txt
+test cases/vala/8 generated sources/meson.build
+test cases/vala/8 generated sources/dependency-generated/enum-types.c.template
+test cases/vala/8 generated sources/dependency-generated/enum-types.h.template
+test cases/vala/8 generated sources/dependency-generated/enums.h
+test cases/vala/8 generated sources/dependency-generated/lib.vala
+test cases/vala/8 generated sources/dependency-generated/main.vala
+test cases/vala/8 generated sources/dependency-generated/meson.build
+test cases/vala/8 generated sources/dependency-generated/null.c
+test cases/vala/8 generated sources/onlygen/maingen.in
+test cases/vala/8 generated sources/onlygen/meson.build
+test cases/vala/8 generated sources/src/config.vala.in
+test cases/vala/8 generated sources/src/copy_file.py
+test cases/vala/8 generated sources/src/meson.build
+test cases/vala/8 generated sources/src/returncode.in
+test cases/vala/8 generated sources/src/test.vala
+test cases/vala/8 generated sources/src/write_wrapper.py
+test cases/vala/8 generated sources/tools/meson.build
+test cases/vala/9 gir/foo.vala
+test cases/vala/9 gir/installed_files.txt
+test cases/vala/9 gir/meson.build
+test cases/windows/1 basic/installed_files.txt
+test cases/windows/1 basic/meson.build
+test cases/windows/1 basic/prog.c
+test cases/windows/10 vs module defs generated/meson.build
+test cases/windows/10 vs module defs generated/prog.c
+test cases/windows/10 vs module defs generated/subdir/meson.build
+test cases/windows/10 vs module defs generated/subdir/somedll.c
+test cases/windows/10 vs module defs generated/subdir/somedll.def.in
+test cases/windows/11 vs module defs generated custom target/meson.build
+test cases/windows/11 vs module defs generated custom target/prog.c
+test cases/windows/11 vs module defs generated custom target/subdir/make_def.py
+test cases/windows/11 vs module defs generated custom target/subdir/meson.build
+test cases/windows/11 vs module defs generated custom target/subdir/somedll.c
+test cases/windows/12 exe implib/installed_files.txt
+test cases/windows/12 exe implib/meson.build
+test cases/windows/12 exe implib/prog.c
+test cases/windows/2 winmain/meson.build
+test cases/windows/2 winmain/prog.c
+test cases/windows/3 cpp/meson.build
+test cases/windows/3 cpp/prog.cpp
+test cases/windows/4 winmaincpp/meson.build
+test cases/windows/4 winmaincpp/prog.cpp
+test cases/windows/5 resources/meson.build
+test cases/windows/5 resources/prog.c
+test cases/windows/5 resources/inc/meson.build
+test cases/windows/5 resources/inc/resource/resource.h
+test cases/windows/5 resources/res/meson.build
+test cases/windows/5 resources/res/myres.rc
+test cases/windows/5 resources/res/sample.ico
+test cases/windows/6 vs module defs/meson.build
+test cases/windows/6 vs module defs/prog.c
+test cases/windows/6 vs module defs/subdir/meson.build
+test cases/windows/6 vs module defs/subdir/somedll.c
+test cases/windows/6 vs module defs/subdir/somedll.def
+test cases/windows/8 dll versioning/copyfile.py
+test cases/windows/8 dll versioning/exe.orig.c
+test cases/windows/8 dll versioning/installed_files.txt
+test cases/windows/8 dll versioning/lib.c
+test cases/windows/8 dll versioning/meson.build
+test cases/windows/9 find program/meson.build
+test cases/windows/9 find program/test-script
+test cases/windows/9 find program/test-script-ext.py
+tools/ac_converter.py
+tools/boost_names.py
+tools/cmake2meson.py
\ No newline at end of file
--- /dev/null
+mesonbuild
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mesonbuild import mesonmain, mesonlib
+import sys, os, locale
+
+def main():
+ # Warn if the locale is not UTF-8. This can cause various unfixable issues
+ # such as os.stat not being able to decode filenames with unicode in them.
+ # There is no way to reset both the preferred encoding and the filesystem
+ # encoding, so we can just warn about it.
+ e = locale.getpreferredencoding()
+ if e.upper() != 'UTF-8' and not mesonlib.is_windows():
+ print('Warning: You are using {!r} which is not a Unicode-compatible '
+ 'locale.'.format(e), file=sys.stderr)
+ print('You might see errors if you use UTF-8 strings as '
+ 'filenames, as strings, or as file contents.', file=sys.stderr)
+ print('Please switch to a UTF-8 locale for your platform.', file=sys.stderr)
+ # Always resolve the command path so Ninja can find it for regen, tests, etc.
+ launcher = os.path.realpath(sys.argv[0])
+ return mesonmain.run(sys.argv[1:], launcher)
+
+if __name__ == '__main__':
+ sys.exit(main())
--- /dev/null
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+from . import interpreterbase, mlog, mparser, mesonlib
+from . import environment
+
+from .interpreterbase import InterpreterException, InvalidArguments
+
+import os, sys
+
+class DontCareObject(interpreterbase.InterpreterObject):
+ pass
+
+class MockExecutable(interpreterbase.InterpreterObject):
+ pass
+
+class MockStaticLibrary(interpreterbase.InterpreterObject):
+ pass
+
+class MockSharedLibrary(interpreterbase.InterpreterObject):
+ pass
+
+class MockCustomTarget(interpreterbase.InterpreterObject):
+ pass
+
+class MockRunTarget(interpreterbase.InterpreterObject):
+ pass
+
+ADD_SOURCE = 0
+REMOVE_SOURCE = 1
+
+class AstInterpreter(interpreterbase.InterpreterBase):
+ def __init__(self, source_root, subdir):
+ super().__init__(source_root, subdir)
+ self.asts = {}
+ self.funcs.update({'project': self.func_do_nothing,
+ 'test': self.func_do_nothing,
+ 'benchmark': self.func_do_nothing,
+ 'install_headers': self.func_do_nothing,
+ 'install_man': self.func_do_nothing,
+ 'install_data': self.func_do_nothing,
+ 'install_subdir': self.func_do_nothing,
+ 'configuration_data': self.func_do_nothing,
+ 'configure_file': self.func_do_nothing,
+ 'find_program': self.func_do_nothing,
+ 'include_directories': self.func_do_nothing,
+ 'add_global_arguments': self.func_do_nothing,
+ 'add_global_link_arguments': self.func_do_nothing,
+ 'add_project_arguments': self.func_do_nothing,
+ 'add_project_link_arguments': self.func_do_nothing,
+ 'message': self.func_do_nothing,
+ 'generator': self.func_do_nothing,
+ 'error': self.func_do_nothing,
+ 'run_command': self.func_do_nothing,
+ 'assert': self.func_do_nothing,
+ 'subproject': self.func_do_nothing,
+ 'dependency': self.func_do_nothing,
+ 'get_option': self.func_do_nothing,
+ 'join_paths': self.func_do_nothing,
+ 'environment': self.func_do_nothing,
+ 'import': self.func_do_nothing,
+ 'vcs_tag': self.func_do_nothing,
+ 'add_languages': self.func_do_nothing,
+ 'declare_dependency': self.func_do_nothing,
+ 'files': self.func_files,
+ 'executable': self.func_executable,
+ 'static_library': self.func_static_lib,
+ 'shared_library': self.func_shared_lib,
+ 'library': self.func_library,
+ 'build_target': self.func_build_target,
+ 'custom_target': self.func_custom_target,
+ 'run_target': self.func_run_target,
+ 'subdir': self.func_subdir,
+ 'set_variable': self.func_set_variable,
+ 'get_variable': self.func_get_variable,
+ 'is_variable': self.func_is_variable,
+ })
+
+ def func_do_nothing(self, node, args, kwargs):
+ return True
+
+ def method_call(self, node):
+ return True
+
+ def func_executable(self, node, args, kwargs):
+ if args[0] == self.targetname:
+ if self.operation == ADD_SOURCE:
+ self.add_source_to_target(node, args, kwargs)
+ elif self.operation == REMOVE_SOURCE:
+ self.remove_source_from_target(node, args, kwargs)
+ else:
+ raise NotImplementedError('Bleep bloop')
+ return MockExecutable()
+
+ def func_static_lib(self, node, args, kwargs):
+ return MockStaticLibrary()
+
+ def func_shared_lib(self, node, args, kwargs):
+ return MockSharedLibrary()
+
+ def func_library(self, node, args, kwargs):
+ return self.func_shared_lib(node, args, kwargs)
+
+ def func_custom_target(self, node, args, kwargs):
+ return MockCustomTarget()
+
+ def func_run_target(self, node, args, kwargs):
+ return MockRunTarget()
+
+ def func_subdir(self, node, args, kwargs):
+ prev_subdir = self.subdir
+ subdir = os.path.join(prev_subdir, args[0])
+ self.subdir = subdir
+ buildfilename = os.path.join(self.subdir, environment.build_filename)
+ absname = os.path.join(self.source_root, buildfilename)
+ if not os.path.isfile(absname):
+ self.subdir = prev_subdir
+ raise InterpreterException('Nonexistent build def file %s.' % buildfilename)
+ with open(absname, encoding='utf8') as f:
+ code = f.read()
+ assert(isinstance(code, str))
+ try:
+ codeblock = mparser.Parser(code, self.subdir).parse()
+ self.asts[subdir] = codeblock
+ except mesonlib.MesonException as me:
+ me.file = buildfilename
+ raise me
+ self.evaluate_codeblock(codeblock)
+ self.subdir = prev_subdir
+
+ def func_files(self, node, args, kwargs):
+ if not isinstance(args, list):
+ return [args]
+ return args
+
+ def evaluate_arithmeticstatement(self, cur):
+ return 0
+
+ def evaluate_plusassign(self, node):
+ return 0
+
+ def evaluate_indexing(self, node):
+ return 0
+
+ def reduce_arguments(self, args):
+ assert(isinstance(args, mparser.ArgumentNode))
+ if args.incorrect_order():
+ raise InvalidArguments('All keyword arguments must be after positional arguments.')
+ return args.arguments, args.kwargs
+
+ def transform(self):
+ self.load_root_meson_file()
+ self.asts[''] = self.ast
+ self.sanity_check_ast()
+ self.parse_project()
+ self.run()
+
+ def add_source(self, targetname, filename):
+ self.operation = ADD_SOURCE
+ self.targetname = targetname
+ self.filename = filename
+ self.transform()
+
+ def remove_source(self, targetname, filename):
+ self.operation = REMOVE_SOURCE
+ self.targetname = targetname
+ self.filename = filename
+ self.transform()
+
+ def unknown_function_called(self, func_name):
+ mlog.warning('Unknown function called: ' + func_name)
+
+ def add_source_to_target(self, node, args, kwargs):
+ namespan = node.args.arguments[0].bytespan
+ buildfilename = os.path.join(self.source_root, self.subdir, environment.build_filename)
+ raw_data = open(buildfilename, 'r').read()
+ updated = raw_data[0:namespan[1]] + (", '%s'" % self.filename) + raw_data[namespan[1]:]
+ open(buildfilename, 'w').write(updated)
+ sys.exit(0)
+
+ def remove_argument_item(self, args, i):
+ assert(isinstance(args, mparser.ArgumentNode))
+ namespan = args.arguments[i].bytespan
+ # Usually remove the comma after this item but if it is
+ # the last argument, we need to remove the one before.
+ if i >= len(args.commas):
+ i -= 1
+ if i < 0:
+ commaspan = (0, 0) # Removed every entry in the list.
+ else:
+ commaspan = args.commas[i].bytespan
+ if commaspan[0] < namespan[0]:
+ commaspan, namespan = namespan, commaspan
+ buildfilename = os.path.join(self.source_root, args.subdir, environment.build_filename)
+ raw_data = open(buildfilename, 'r').read()
+ intermediary = raw_data[0:commaspan[0]] + raw_data[commaspan[1]:]
+ updated = intermediary[0:namespan[0]] + intermediary[namespan[1]:]
+ open(buildfilename, 'w').write(updated)
+ sys.exit(0)
+
+ def hacky_find_and_remove(self, node_to_remove):
+ for a in self.asts[node_to_remove.subdir].lines:
+ if a.lineno == node_to_remove.lineno:
+ if isinstance(a, mparser.AssignmentNode):
+ v = a.value
+ if not isinstance(v, mparser.ArrayNode):
+ raise NotImplementedError('Not supported yet, bro.')
+ args = v.args
+ for i in range(len(args.arguments)):
+ if isinstance(args.arguments[i], mparser.StringNode) and self.filename == args.arguments[i].value:
+ self.remove_argument_item(args, i)
+ raise NotImplementedError('Sukkess')
+
+ def remove_source_from_target(self, node, args, kwargs):
+ for i in range(1, len(node.args)):
+ # Is file name directly in function call as a string.
+ if isinstance(node.args.arguments[i], mparser.StringNode) and self.filename == node.args.arguments[i].value:
+ self.remove_argument_item(node.args, i)
+ # Is file name in a variable that gets expanded here.
+ if isinstance(node.args.arguments[i], mparser.IdNode):
+ avar = self.get_variable(node.args.arguments[i].value)
+ if not isinstance(avar, list):
+ raise NotImplementedError('Non-arrays not supported yet, sorry.')
+ for entry in avar:
+ if isinstance(entry, mparser.StringNode) and entry.value == self.filename:
+ self.hacky_find_and_remove(entry)
+ sys.exit('Could not find source %s in target %s.' % (self.filename, args[0]))
--- /dev/null
+# Copyright 2012-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, pickle, re
+from .. import build
+from .. import dependencies
+from .. import mesonlib
+from .. import mlog
+from .. import compilers
+import json
+import subprocess
+from ..mesonlib import MesonException
+from ..mesonlib import get_compiler_for_source, classify_unity_sources
+from ..compilers import CompilerArgs
+from collections import OrderedDict
+import shlex
+
+class CleanTrees:
+ '''
+ Directories outputted by custom targets that have to be manually cleaned
+ because on Linux `ninja clean` only deletes empty directories.
+ '''
+ def __init__(self, build_dir, trees):
+ self.build_dir = build_dir
+ self.trees = trees
+
+class InstallData:
+ def __init__(self, source_dir, build_dir, prefix, strip_bin, mesonintrospect):
+ self.source_dir = source_dir
+ self.build_dir = build_dir
+ self.prefix = prefix
+ self.strip_bin = strip_bin
+ self.targets = []
+ self.headers = []
+ self.man = []
+ self.data = []
+ self.po_package_name = ''
+ self.po = []
+ self.install_scripts = []
+ self.install_subdirs = []
+ self.mesonintrospect = mesonintrospect
+
+class ExecutableSerialisation:
+ def __init__(self, name, fname, cmd_args, env, is_cross, exe_wrapper,
+ workdir, extra_paths, capture):
+ self.name = name
+ self.fname = fname
+ self.cmd_args = cmd_args
+ self.env = env
+ self.is_cross = is_cross
+ self.exe_runner = exe_wrapper
+ self.workdir = workdir
+ self.extra_paths = extra_paths
+ self.capture = capture
+
+class TestSerialisation:
+ def __init__(self, name, suite, fname, is_cross_built, exe_wrapper, is_parallel, cmd_args, env,
+ should_fail, timeout, workdir, extra_paths):
+ self.name = name
+ self.suite = suite
+ self.fname = fname
+ self.is_cross_built = is_cross_built
+ self.exe_runner = exe_wrapper
+ self.is_parallel = is_parallel
+ self.cmd_args = cmd_args
+ self.env = env
+ self.should_fail = should_fail
+ self.timeout = timeout
+ self.workdir = workdir
+ self.extra_paths = extra_paths
+
+class OptionProxy:
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+
+class OptionOverrideProxy:
+ '''Mimic an option list but transparently override
+ selected option values.'''
+ def __init__(self, overrides, options):
+ self.overrides = overrides
+ self.options = options
+
+ def __getitem__(self, option_name):
+ base_opt = self.options[option_name]
+ if option_name in self.overrides:
+ return OptionProxy(base_opt.name, base_opt.validate_value(self.overrides[option_name]))
+ return base_opt
+
+# This class contains the basic functionality that is needed by all backends.
+# Feel free to move stuff in and out of it as you see fit.
+class Backend:
+ def __init__(self, build):
+ self.build = build
+ self.environment = build.environment
+ self.processed_targets = {}
+ self.build_to_src = os.path.relpath(self.environment.get_source_dir(),
+ self.environment.get_build_dir())
+ for t in self.build.targets:
+ priv_dirname = self.get_target_private_dir_abs(t)
+ os.makedirs(priv_dirname, exist_ok=True)
+
+ def get_target_filename(self, t):
+ if isinstance(t, build.CustomTarget):
+ if len(t.get_outputs()) != 1:
+ mlog.warning('custom_target {!r} has more than one output! '
+ 'Using the first one.'.format(t.name))
+ filename = t.get_outputs()[0]
+ else:
+ assert(isinstance(t, build.BuildTarget))
+ filename = t.get_filename()
+ return os.path.join(self.get_target_dir(t), filename)
+
+ def get_target_filename_abs(self, target):
+ return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))
+
+ def get_option_for_target(self, option_name, target):
+ if option_name in target.option_overrides:
+ override = target.option_overrides[option_name]
+ return self.environment.coredata.validate_option_value(option_name, override)
+ return self.environment.coredata.get_builtin_option(option_name)
+
+ def get_target_filename_for_linking(self, target):
+ # On some platforms (msvc for instance), the file that is used for
+ # dynamic linking is not the same as the dynamic library itself. This
+ # file is called an import library, and we want to link against that.
+ # On all other platforms, we link to the library directly.
+ if isinstance(target, build.SharedLibrary):
+ link_lib = target.get_import_filename() or target.get_filename()
+ return os.path.join(self.get_target_dir(target), link_lib)
+ elif isinstance(target, build.StaticLibrary):
+ return os.path.join(self.get_target_dir(target), target.get_filename())
+ elif isinstance(target, build.Executable):
+ if target.import_filename:
+ return os.path.join(self.get_target_dir(target), target.get_import_filename())
+ else:
+ return None
+ raise AssertionError('BUG: Tried to link to {!r} which is not linkable'.format(target))
+
+ def get_target_dir(self, target):
+ if self.environment.coredata.get_builtin_option('layout') == 'mirror':
+ dirname = target.get_subdir()
+ else:
+ dirname = 'meson-out'
+ return dirname
+
+ def get_target_dir_relative_to(self, t, o):
+ '''Get a target dir relative to another target's directory'''
+ target_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t))
+ othert_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(o))
+ return os.path.relpath(target_dir, othert_dir)
+
+ def get_target_source_dir(self, target):
+ # if target dir is empty, avoid extraneous trailing / from os.path.join()
+ target_dir = self.get_target_dir(target)
+ if target_dir:
+ return os.path.join(self.build_to_src, target_dir)
+ return self.build_to_src
+
+ def get_target_private_dir(self, target):
+ dirname = os.path.join(self.get_target_dir(target), target.get_basename() + target.type_suffix())
+ return dirname
+
+ def get_target_private_dir_abs(self, target):
+ dirname = os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target))
+ return dirname
+
+ def get_target_generated_dir(self, target, gensrc, src):
+ """
+ Takes a BuildTarget, a generator source (CustomTarget or GeneratedList),
+ and a generated source filename.
+ Returns the full path of the generated source relative to the build root
+ """
+ # CustomTarget generators output to the build dir of the CustomTarget
+ if isinstance(gensrc, (build.CustomTarget, build.CustomTargetIndex)):
+ return os.path.join(self.get_target_dir(gensrc), src)
+ # GeneratedList generators output to the private build directory of the
+ # target that the GeneratedList is used in
+ return os.path.join(self.get_target_private_dir(target), src)
+
+ def get_unity_source_file(self, target, suffix):
+ osrc = target.name + '-unity.' + suffix
+ return mesonlib.File.from_built_file(self.get_target_private_dir(target), osrc)
+
+ def generate_unity_files(self, target, unity_src):
+ abs_files = []
+ result = []
+ compsrcs = classify_unity_sources(target.compilers.values(), unity_src)
+
+ def init_language_file(suffix):
+ unity_src = self.get_unity_source_file(target, suffix)
+ outfileabs = unity_src.absolute_path(self.environment.get_source_dir(),
+ self.environment.get_build_dir())
+ outfileabs_tmp = outfileabs + '.tmp'
+ abs_files.append(outfileabs)
+ outfileabs_tmp_dir = os.path.dirname(outfileabs_tmp)
+ if not os.path.exists(outfileabs_tmp_dir):
+ os.makedirs(outfileabs_tmp_dir)
+ result.append(unity_src)
+ return open(outfileabs_tmp, 'w')
+
+ # For each language, generate a unity source file and return the list
+ for comp, srcs in compsrcs.items():
+ with init_language_file(comp.get_default_suffix()) as ofile:
+ for src in srcs:
+ ofile.write('#include<%s>\n' % src)
+ [mesonlib.replace_if_different(x, x + '.tmp') for x in abs_files]
+ return result
+
+ def relpath(self, todir, fromdir):
+ return os.path.relpath(os.path.join('dummyprefixdir', todir),
+ os.path.join('dummyprefixdir', fromdir))
+
+ def flatten_object_list(self, target, proj_dir_to_build_root=''):
+ obj_list = []
+ for obj in target.get_objects():
+ if isinstance(obj, str):
+ o = os.path.join(proj_dir_to_build_root,
+ self.build_to_src, target.get_subdir(), obj)
+ obj_list.append(o)
+ elif isinstance(obj, mesonlib.File):
+ obj_list.append(obj.rel_to_builddir(self.build_to_src))
+ elif isinstance(obj, build.ExtractedObjects):
+ obj_list += self.determine_ext_objs(target, obj, proj_dir_to_build_root)
+ else:
+ raise MesonException('Unknown data type in object list.')
+ return obj_list
+
+ def serialize_executable(self, exe, cmd_args, workdir, env={},
+ extra_paths=None, capture=None):
+ import hashlib
+ if extra_paths is None:
+ # The callee didn't check if we needed extra paths, so check it here
+ if mesonlib.is_windows() or mesonlib.is_cygwin():
+ extra_paths = self.determine_windows_extra_paths(exe, [])
+ else:
+ extra_paths = []
+ # Can't just use exe.name here; it will likely be run more than once
+ if isinstance(exe, (dependencies.ExternalProgram,
+ build.BuildTarget, build.CustomTarget)):
+ basename = exe.name
+ else:
+ basename = os.path.basename(exe)
+ # Take a digest of the cmd args, env, workdir, and capture. This avoids
+ # collisions and also makes the name deterministic over regenerations
+ # which avoids a rebuild by Ninja because the cmdline stays the same.
+ data = bytes(str(sorted(env.items())) + str(cmd_args) + str(workdir) + str(capture),
+ encoding='utf-8')
+ digest = hashlib.sha1(data).hexdigest()
+ scratch_file = 'meson_exe_{0}_{1}.dat'.format(basename, digest)
+ exe_data = os.path.join(self.environment.get_scratch_dir(), scratch_file)
+ with open(exe_data, 'wb') as f:
+ if isinstance(exe, dependencies.ExternalProgram):
+ exe_cmd = exe.get_command()
+ exe_needs_wrapper = False
+ elif isinstance(exe, (build.BuildTarget, build.CustomTarget)):
+ exe_cmd = [self.get_target_filename_abs(exe)]
+ exe_needs_wrapper = exe.is_cross
+ else:
+ exe_cmd = [exe]
+ exe_needs_wrapper = False
+ is_cross_built = exe_needs_wrapper and \
+ self.environment.is_cross_build() and \
+ self.environment.cross_info.need_cross_compiler() and \
+ self.environment.cross_info.need_exe_wrapper()
+ if is_cross_built:
+ exe_wrapper = self.environment.cross_info.config['binaries'].get('exe_wrapper', None)
+ else:
+ exe_wrapper = None
+ es = ExecutableSerialisation(basename, exe_cmd, cmd_args, env,
+ is_cross_built, exe_wrapper, workdir,
+ extra_paths, capture)
+ pickle.dump(es, f)
+ return exe_data
+
+ def serialize_tests(self):
+ test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat')
+ with open(test_data, 'wb') as datafile:
+ self.write_test_file(datafile)
+ benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat')
+ with open(benchmark_data, 'wb') as datafile:
+ self.write_benchmark_file(datafile)
+ return test_data, benchmark_data
+
+ def determine_linker(self, target):
+ '''
+ If we're building a static library, there is only one static linker.
+ Otherwise, we query the target for the dynamic linker.
+ '''
+ if isinstance(target, build.StaticLibrary):
+ if target.is_cross:
+ return self.build.static_cross_linker
+ else:
+ return self.build.static_linker
+ l = target.get_clike_dynamic_linker()
+ if not l:
+ m = "Couldn't determine linker for target {!r}"
+ raise MesonException(m.format(target.name))
+ return l
+
+ def rpaths_for_bundled_shared_libraries(self, target):
+ paths = []
+ for dep in target.external_deps:
+ if isinstance(dep, dependencies.ExternalLibrary):
+ la = dep.link_args
+ if len(la) == 1 and os.path.isabs(la[0]):
+ # The only link argument is an absolute path to a library file.
+ libpath = la[0]
+ if libpath.startswith(('/usr/lib', '/lib')):
+ # No point in adding system paths.
+ continue
+ if os.path.splitext(libpath)[1] not in ['.dll', '.lib', '.so']:
+ continue
+ absdir = os.path.split(libpath)[0]
+ rel_to_src = absdir[len(self.environment.get_source_dir()) + 1:]
+ assert(not os.path.isabs(rel_to_src))
+ paths.append(os.path.join(self.build_to_src, rel_to_src))
+ return paths
+
+ def determine_rpath_dirs(self, target):
+ link_deps = target.get_all_link_deps()
+ result = []
+ for ld in link_deps:
+ if ld is target:
+ continue
+ prospective = self.get_target_dir(ld)
+ if prospective not in result:
+ result.append(prospective)
+ for rp in self.rpaths_for_bundled_shared_libraries(target):
+ if rp not in result:
+ result += [rp]
+ return result
+
+ def object_filename_from_source(self, target, source, is_unity):
+ assert isinstance(source, mesonlib.File)
+ build_dir = self.environment.get_build_dir()
+ rel_src = source.rel_to_builddir(self.build_to_src)
+ # foo.vala files compile down to foo.c and then foo.c.o, not foo.vala.o
+ if rel_src.endswith(('.vala', '.gs')):
+ # See description in generate_vala_compile for this logic.
+ if source.is_built:
+ if os.path.isabs(rel_src):
+ rel_src = rel_src[len(build_dir) + 1:]
+ rel_src = os.path.relpath(rel_src, self.get_target_private_dir(target))
+ else:
+ rel_src = os.path.basename(rel_src)
+ if is_unity:
+ return 'meson-generated_' + rel_src[:-5] + '.c.' + self.environment.get_object_suffix()
+ # A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix.
+ source = 'meson-generated_' + rel_src[:-5] + '.c'
+ elif source.is_built:
+ if os.path.isabs(rel_src):
+ rel_src = rel_src[len(build_dir) + 1:]
+ targetdir = self.get_target_private_dir(target)
+ # A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix.
+ source = 'meson-generated_' + os.path.relpath(rel_src, targetdir)
+ else:
+ if os.path.isabs(rel_src):
+ # Not from the source directory; hopefully this doesn't conflict with user's source files.
+ source = os.path.basename(rel_src)
+ else:
+ source = os.path.relpath(os.path.join(build_dir, rel_src),
+ os.path.join(self.environment.get_source_dir(), target.get_subdir()))
+ return source.replace('/', '_').replace('\\', '_') + '.' + self.environment.get_object_suffix()
+
+ def determine_ext_objs(self, target, extobj, proj_dir_to_build_root):
+ result = []
+ targetdir = self.get_target_private_dir(extobj.target)
+ # With unity builds, there's just one object that contains all the
+ # sources, and we only support extracting all the objects in this mode,
+ # so just return that.
+ if self.is_unity(target):
+ comp = get_compiler_for_source(extobj.target.compilers.values(),
+ extobj.srclist[0])
+ # There is a potential conflict here, but it is unlikely that
+ # anyone both enables unity builds and has a file called foo-unity.cpp.
+ osrc = self.get_unity_source_file(extobj.target,
+ comp.get_default_suffix())
+ objname = self.object_filename_from_source(extobj.target, osrc, True)
+ objname = objname.replace('/', '_').replace('\\', '_')
+ objpath = os.path.join(proj_dir_to_build_root, targetdir, objname)
+ return [objpath]
+ for osrc in extobj.srclist:
+ objname = self.object_filename_from_source(extobj.target, osrc, False)
+ objpath = os.path.join(proj_dir_to_build_root, targetdir, objname)
+ result.append(objpath)
+ return result
+
+ def get_pch_include_args(self, compiler, target):
+ args = []
+ pchpath = self.get_target_private_dir(target)
+ includeargs = compiler.get_include_args(pchpath, False)
+ for lang in ['c', 'cpp']:
+ p = target.get_pch(lang)
+ if not p:
+ continue
+ if compiler.can_compile(p[-1]):
+ header = p[0]
+ args += compiler.get_pch_use_args(pchpath, header)
+ if len(args) > 0:
+ args = includeargs + args
+ return args
+
+ @staticmethod
+ def escape_extra_args(compiler, args):
+ # No extra escaping/quoting needed when not running on Windows
+ if not mesonlib.is_windows():
+ return args
+ extra_args = []
+ # Compiler-specific escaping is needed for -D args but not for any others
+ if compiler.get_id() == 'msvc':
+ # MSVC needs escaping when a -D argument ends in \ or \"
+ for arg in args:
+ if arg.startswith('-D') or arg.startswith('/D'):
+ # Without extra escaping for these two, the next character
+ # gets eaten
+ if arg.endswith('\\'):
+ arg += '\\'
+ elif arg.endswith('\\"'):
+ arg = arg[:-2] + '\\\\"'
+ extra_args.append(arg)
+ else:
+ # MinGW GCC needs all backslashes in defines to be doubly-escaped
+ # FIXME: Not sure about Cygwin or Clang
+ for arg in args:
+ if arg.startswith('-D') or arg.startswith('/D'):
+ arg = arg.replace('\\', '\\\\')
+ extra_args.append(arg)
+ return extra_args
+
+ def generate_basic_compiler_args(self, target, compiler, no_warn_args=False):
+ # Create an empty commands list, and start adding arguments from
+ # various sources in the order in which they must override each other
+ # starting from hard-coded defaults followed by build options and so on.
+ commands = CompilerArgs(compiler)
+
+ copt_proxy = OptionOverrideProxy(target.option_overrides, self.environment.coredata.compiler_options)
+ # First, the trivial ones that are impossible to override.
+ #
+ # Add -nostdinc/-nostdinc++ if needed; can't be overridden
+ commands += self.get_cross_stdlib_args(target, compiler)
+ # Add things like /NOLOGO or -pipe; usually can't be overridden
+ commands += compiler.get_always_args()
+ # Only add warning-flags by default if the buildtype enables it, and if
+ # we weren't explicitly asked to not emit warnings (for Vala, f.ex)
+ if no_warn_args:
+ commands += compiler.get_no_warn_args()
+ elif self.get_option_for_target('buildtype', target) != 'plain':
+ commands += compiler.get_warn_args(self.get_option_for_target('warning_level', target))
+ # Add -Werror if werror=true is set in the build options set on the
+ # command-line or default_options inside project(). This only sets the
+ # action to be done for warnings if/when they are emitted, so it's ok
+ # to set it after get_no_warn_args() or get_warn_args().
+ if self.get_option_for_target('werror', target):
+ commands += compiler.get_werror_args()
+ # Add compile args for c_* or cpp_* build options set on the
+ # command-line or default_options inside project().
+ commands += compiler.get_option_compile_args(copt_proxy)
+ # Add buildtype args: optimization level, debugging, etc.
+ commands += compiler.get_buildtype_args(self.get_option_for_target('buildtype', target))
+ # Add compile args added using add_project_arguments()
+ commands += self.build.get_project_args(compiler, target.subproject)
+ # Add compile args added using add_global_arguments()
+ # These override per-project arguments
+ commands += self.build.get_global_args(compiler)
+ if not target.is_cross:
+ # Compile args added from the env: CFLAGS/CXXFLAGS, etc. We want these
+ # to override all the defaults, but not the per-target compile args.
+ commands += self.environment.coredata.external_args[compiler.get_language()]
+ # Always set -fPIC for shared libraries
+ if isinstance(target, build.SharedLibrary):
+ commands += compiler.get_pic_args()
+ # Set -fPIC for static libraries by default unless explicitly disabled
+ if isinstance(target, build.StaticLibrary) and target.pic:
+ commands += compiler.get_pic_args()
+ # Add compile args needed to find external dependencies. Link args are
+ # added while generating the link command.
+ # NOTE: We must preserve the order in which external deps are
+ # specified, so we reverse the list before iterating over it.
+ for dep in reversed(target.get_external_deps()):
+ if not dep.found():
+ continue
+
+ if compiler.language == 'vala':
+ if isinstance(dep, dependencies.PkgConfigDependency):
+ if dep.name == 'glib-2.0' and dep.version_reqs is not None:
+ for req in dep.version_reqs:
+ if req.startswith(('>=', '==')):
+ commands += ['--target-glib', req[2:]]
+ break
+ commands += ['--pkg', dep.name]
+ elif isinstance(dep, dependencies.ExternalLibrary):
+ commands += dep.get_link_args('vala')
+ else:
+ commands += dep.get_compile_args()
+ # Qt needs -fPIC for executables
+ # XXX: We should move to -fPIC for all executables
+ if isinstance(target, build.Executable):
+ commands += dep.get_exe_args(compiler)
+ # For 'automagic' deps: Boost and GTest. Also dependency('threads').
+ # pkg-config puts the thread flags itself via `Cflags:`
+ if dep.need_threads():
+ commands += compiler.thread_flags(self.environment)
+ # Fortran requires extra include directives.
+ if compiler.language == 'fortran':
+ for lt in target.link_targets:
+ priv_dir = os.path.join(self.get_target_dir(lt), lt.get_basename() + lt.type_suffix())
+ incflag = compiler.get_include_args(priv_dir, False)
+ commands += incflag
+ return commands
+
+ def build_target_link_arguments(self, compiler, deps):
+ args = []
+ for d in deps:
+ if not (d.is_linkable_target()):
+ raise RuntimeError('Tried to link with a non-library target "%s".' % d.get_basename())
+ d_arg = self.get_target_filename_for_linking(d)
+ if not d_arg:
+ continue
+ if isinstance(compiler, (compilers.LLVMDCompiler, compilers.DmdDCompiler)):
+ d_arg = '-L' + d_arg
+ args.append(d_arg)
+ return args
+
+ def determine_windows_extra_paths(self, target, extra_bdeps):
+ '''On Windows there is no such thing as an rpath.
+ We must determine all locations of DLLs that this exe
+ links to and return them so they can be used in unit
+ tests.'''
+ result = []
+ prospectives = []
+ if isinstance(target, build.Executable):
+ prospectives = target.get_transitive_link_deps()
+ # External deps
+ for deppath in self.rpaths_for_bundled_shared_libraries(target):
+ result.append(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath)))
+ for bdep in extra_bdeps:
+ prospectives += bdep.get_transitive_link_deps()
+ # Internal deps
+ for ld in prospectives:
+ if ld == '' or ld == '.':
+ continue
+ dirseg = os.path.join(self.environment.get_build_dir(), self.get_target_dir(ld))
+ if dirseg not in result:
+ result.append(dirseg)
+ return result
+
+ def write_benchmark_file(self, datafile):
+ self.write_test_serialisation(self.build.get_benchmarks(), datafile)
+
+ def write_test_file(self, datafile):
+ self.write_test_serialisation(self.build.get_tests(), datafile)
+
+ def write_test_serialisation(self, tests, datafile):
+ arr = []
+ for t in tests:
+ exe = t.get_exe()
+ if isinstance(exe, dependencies.ExternalProgram):
+ cmd = exe.get_command()
+ else:
+ cmd = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(t.get_exe()))]
+ is_cross = self.environment.is_cross_build() and \
+ self.environment.cross_info.need_cross_compiler() and \
+ self.environment.cross_info.need_exe_wrapper()
+ if isinstance(exe, build.BuildTarget):
+ is_cross = is_cross and exe.is_cross
+ if isinstance(exe, dependencies.ExternalProgram):
+ # E.g. an external verifier or simulator program run on a generated executable.
+ # Can always be run.
+ is_cross = False
+ if is_cross:
+ exe_wrapper = self.environment.cross_info.config['binaries'].get('exe_wrapper', None)
+ else:
+ exe_wrapper = None
+ if mesonlib.is_windows() or mesonlib.is_cygwin():
+ extra_paths = self.determine_windows_extra_paths(exe, [])
+ else:
+ extra_paths = []
+ cmd_args = []
+ for a in t.cmd_args:
+ if hasattr(a, 'held_object'):
+ a = a.held_object
+ if isinstance(a, mesonlib.File):
+ a = os.path.join(self.environment.get_build_dir(), a.rel_to_builddir(self.build_to_src))
+ cmd_args.append(a)
+ elif isinstance(a, str):
+ cmd_args.append(a)
+ elif isinstance(a, build.Target):
+ cmd_args.append(self.get_target_filename(a))
+ else:
+ raise MesonException('Bad object in test command.')
+ ts = TestSerialisation(t.get_name(), t.suite, cmd, is_cross, exe_wrapper,
+ t.is_parallel, cmd_args, t.env, t.should_fail,
+ t.timeout, t.workdir, extra_paths)
+ arr.append(ts)
+ pickle.dump(arr, datafile)
+
+ def generate_depmf_install(self, d):
+ if self.build.dep_manifest_name is None:
+ return
+ ifilename = os.path.join(self.environment.get_build_dir(), 'depmf.json')
+ ofilename = os.path.join(self.environment.get_prefix(), self.build.dep_manifest_name)
+ mfobj = {'type': 'dependency manifest', 'version': '1.0', 'projects': self.build.dep_manifest}
+ with open(ifilename, 'w') as f:
+ f.write(json.dumps(mfobj))
+ # Copy file from, to, and with mode unchanged
+ d.data.append([ifilename, ofilename, None])
+
+ def get_regen_filelist(self):
+ '''List of all files whose alteration means that the build
+ definition needs to be regenerated.'''
+ deps = [os.path.join(self.build_to_src, df)
+ for df in self.interpreter.get_build_def_files()]
+ if self.environment.is_cross_build():
+ deps.append(os.path.join(self.build_to_src,
+ self.environment.coredata.cross_file))
+ deps.append('meson-private/coredata.dat')
+ if os.path.exists(os.path.join(self.environment.get_source_dir(), 'meson_options.txt')):
+ deps.append(os.path.join(self.build_to_src, 'meson_options.txt'))
+ for sp in self.build.subprojects.keys():
+ fname = os.path.join(self.environment.get_source_dir(), sp, 'meson_options.txt')
+ if os.path.isfile(fname):
+ deps.append(os.path.join(self.build_to_src, sp, 'meson_options.txt'))
+ return deps
+
+ def exe_object_to_cmd_array(self, exe):
+ if self.environment.is_cross_build() and \
+ self.environment.cross_info.need_exe_wrapper() and \
+ isinstance(exe, build.BuildTarget) and exe.is_cross:
+ if 'exe_wrapper' not in self.environment.cross_info.config['binaries']:
+ s = 'Can not use target %s as a generator because it is cross-built\n'
+ s += 'and no exe wrapper is defined. You might want to set it to native instead.'
+ s = s % exe.name
+ raise MesonException(s)
+ if isinstance(exe, build.BuildTarget):
+ exe_arr = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(exe))]
+ else:
+ exe_arr = exe.get_command()
+ return exe_arr
+
+ def replace_extra_args(self, args, genlist):
+ final_args = []
+ for a in args:
+ if a == '@EXTRA_ARGS@':
+ final_args += genlist.get_extra_args()
+ else:
+ final_args.append(a)
+ return final_args
+
+ def replace_outputs(self, args, private_dir, output_list):
+ newargs = []
+ regex = re.compile('@OUTPUT(\d+)@')
+ for arg in args:
+ m = regex.search(arg)
+ while m is not None:
+ index = int(m.group(1))
+ src = '@OUTPUT%d@' % index
+ arg = arg.replace(src, os.path.join(private_dir, output_list[index]))
+ m = regex.search(arg)
+ newargs.append(arg)
+ return newargs
+
+ def get_build_by_default_targets(self):
+ result = OrderedDict()
+ # Get all build and custom targets that must be built by default
+ for name, t in self.build.get_targets().items():
+ if t.build_by_default or t.install or t.build_always:
+ result[name] = t
+ # Get all targets used as test executables and arguments. These must
+ # also be built by default. XXX: Sometime in the future these should be
+ # built only before running tests.
+ for t in self.build.get_tests():
+ exe = t.exe
+ if hasattr(exe, 'held_object'):
+ exe = exe.held_object
+ if isinstance(exe, (build.CustomTarget, build.BuildTarget)):
+ result[exe.get_id()] = exe
+ for arg in t.cmd_args:
+ if hasattr(arg, 'held_object'):
+ arg = arg.held_object
+ if not isinstance(arg, (build.CustomTarget, build.BuildTarget)):
+ continue
+ result[arg.get_id()] = arg
+ return result
+
+ def get_custom_target_provided_libraries(self, target):
+ libs = []
+ for t in target.get_generated_sources():
+ if not isinstance(t, build.CustomTarget):
+ continue
+ for f in t.get_outputs():
+ if self.environment.is_library(f):
+ libs.append(os.path.join(self.get_target_dir(t), f))
+ return libs
+
+ def is_unity(self, target):
+ optval = self.get_option_for_target('unity', target)
+ if optval == 'on' or (optval == 'subprojects' and target.subproject != ''):
+ return True
+ return False
+
+ def get_custom_target_sources(self, target):
+ '''
+ Custom target sources can be of various object types; strings, File,
+ BuildTarget, even other CustomTargets.
+ Returns the path to them relative to the build root directory.
+ '''
+ srcs = []
+ for i in target.get_sources():
+ if hasattr(i, 'held_object'):
+ i = i.held_object
+ if isinstance(i, str):
+ fname = [os.path.join(self.build_to_src, target.subdir, i)]
+ elif isinstance(i, build.BuildTarget):
+ fname = [self.get_target_filename(i)]
+ elif isinstance(i, build.CustomTarget):
+ fname = [os.path.join(self.get_target_dir(i), p) for p in i.get_outputs()]
+ elif isinstance(i, build.GeneratedList):
+ fname = [os.path.join(self.get_target_private_dir(target), p) for p in i.get_outputs()]
+ else:
+ fname = [i.rel_to_builddir(self.build_to_src)]
+ if target.absolute_paths:
+ fname = [os.path.join(self.environment.get_build_dir(), f) for f in fname]
+ srcs += fname
+ return srcs
+
+ def get_custom_target_depend_files(self, target, absolute_paths=False):
+ deps = []
+ for i in target.depend_files:
+ if isinstance(i, mesonlib.File):
+ if absolute_paths:
+ deps.append(i.absolute_path(self.environment.get_source_dir(),
+ self.environment.get_build_dir()))
+ else:
+ deps.append(i.rel_to_builddir(self.build_to_src))
+ else:
+ if absolute_paths:
+ deps.append(os.path.join(self.environment.get_source_dir(), target.subdir, i))
+ else:
+ deps.append(os.path.join(self.build_to_src, target.subdir, i))
+ return deps
+
+ def eval_custom_target_command(self, target, absolute_outputs=False):
+ # We want the outputs to be absolute only when using the VS backend
+ # XXX: Maybe allow the vs backend to use relative paths too?
+ source_root = self.build_to_src
+ build_root = '.'
+ outdir = self.get_target_dir(target)
+ if absolute_outputs:
+ source_root = self.environment.get_source_dir()
+ build_root = self.environment.get_source_dir()
+ outdir = os.path.join(self.environment.get_build_dir(), outdir)
+ outputs = []
+ for i in target.get_outputs():
+ outputs.append(os.path.join(outdir, i))
+ inputs = self.get_custom_target_sources(target)
+ # Evaluate the command list
+ cmd = []
+ for i in target.command:
+ if isinstance(i, build.Executable):
+ cmd += self.exe_object_to_cmd_array(i)
+ continue
+ elif isinstance(i, build.CustomTarget):
+ # GIR scanner will attempt to execute this binary but
+ # it assumes that it is in path, so always give it a full path.
+ tmp = i.get_outputs()[0]
+ i = os.path.join(self.get_target_dir(i), tmp)
+ elif isinstance(i, mesonlib.File):
+ i = i.rel_to_builddir(self.build_to_src)
+ if target.absolute_paths:
+ i = os.path.join(self.environment.get_build_dir(), i)
+ # FIXME: str types are blindly added ignoring 'target.absolute_paths'
+ # because we can't know if they refer to a file or just a string
+ elif not isinstance(i, str):
+ err_msg = 'Argument {0} is of unknown type {1}'
+ raise RuntimeError(err_msg.format(str(i), str(type(i))))
+ elif '@SOURCE_ROOT@' in i:
+ i = i.replace('@SOURCE_ROOT@', source_root)
+ elif '@BUILD_ROOT@' in i:
+ i = i.replace('@BUILD_ROOT@', build_root)
+ elif '@DEPFILE@' in i:
+ if target.depfile is None:
+ msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \
+ 'keyword argument.'.format(target.name)
+ raise MesonException(msg)
+ dfilename = os.path.join(outdir, target.depfile)
+ i = i.replace('@DEPFILE@', dfilename)
+ elif '@PRIVATE_OUTDIR_' in i:
+ match = re.search('@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
+ if not match:
+ msg = 'Custom target {!r} has an invalid argument {!r}' \
+ ''.format(target.name, i)
+ raise MesonException(msg)
+ source = match.group(0)
+ if match.group(1) is None and not target.absolute_paths:
+ lead_dir = ''
+ else:
+ lead_dir = self.environment.get_build_dir()
+ i = i.replace(source, os.path.join(lead_dir, outdir))
+ cmd.append(i)
+ # Substitute the rest of the template strings
+ values = mesonlib.get_filenames_templates_dict(inputs, outputs)
+ cmd = mesonlib.substitute_values(cmd, values)
+ # This should not be necessary but removing it breaks
+ # building GStreamer on Windows. The underlying issue
+ # is problems with quoting backslashes on Windows
+ # which is the seventh circle of hell. The downside is
+ # that this breaks custom targets whose command lines
+ # have backslashes. If you try to fix this be sure to
+ # check that it does not break GST.
+ #
+ # The bug causes file paths such as c:\foo to get escaped
+ # into c:\\foo.
+ #
+ # Unfortunately we have not been able to come up with an
+ # isolated test case for this so unless you manage to come up
+ # with one, the only way is to test the building with Gst's
+ # setup. Note this in your MR or ping us and we will get it
+ # fixed.
+ #
+ # https://github.com/mesonbuild/meson/pull/737
+ cmd = [i.replace('\\', '/') for i in cmd]
+ return inputs, outputs, cmd
+
+ def run_postconf_scripts(self):
+ env = {'MESON_SOURCE_ROOT': self.environment.get_source_dir(),
+ 'MESON_BUILD_ROOT': self.environment.get_build_dir(),
+ 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in self.environment.get_build_command() + ['introspect']]),
+ }
+ child_env = os.environ.copy()
+ child_env.update(env)
+
+ for s in self.build.postconf_scripts:
+ cmd = s['exe'] + s['args']
+ subprocess.check_call(cmd, env=child_env)
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, pickle, re, shlex, subprocess, sys
+from collections import OrderedDict
+
+from . import backends
+from .. import modules
+from .. import environment, mesonlib
+from .. import build
+from .. import mlog
+from .. import dependencies
+from .. import compilers
+from ..compilers import CompilerArgs
+from ..linkers import ArLinker
+from ..mesonlib import File, MesonException, OrderedSet
+from ..mesonlib import get_compiler_for_source
+from .backends import CleanTrees, InstallData
+from ..build import InvalidArguments
+
+if mesonlib.is_windows():
+ quote_func = lambda s: '"{}"'.format(s)
+ execute_wrapper = 'cmd /c'
+ rmfile_prefix = 'del /f /s /q {} &&'
+else:
+ quote_func = shlex.quote
+ execute_wrapper = ''
+ rmfile_prefix = 'rm -f {} &&'
+
+def ninja_quote(text):
+ for char in ('$', ' ', ':'):
+ text = text.replace(char, '$' + char)
+ if '\n' in text:
+ raise MesonException('Ninja does not support newlines in rules. '
+ 'Please report this error with a test case to the Meson bug tracker.')
+ return text
+
+
+class NinjaBuildElement:
+ def __init__(self, all_outputs, outfilenames, rule, infilenames):
+ if isinstance(outfilenames, str):
+ self.outfilenames = [outfilenames]
+ else:
+ self.outfilenames = outfilenames
+ assert(isinstance(rule, str))
+ self.rule = rule
+ if isinstance(infilenames, str):
+ self.infilenames = [infilenames]
+ else:
+ self.infilenames = infilenames
+ self.deps = set()
+ self.orderdeps = set()
+ self.elems = []
+ self.all_outputs = all_outputs
+
+ def add_dep(self, dep):
+ if isinstance(dep, list):
+ self.deps.update(dep)
+ else:
+ self.deps.add(dep)
+
+ def add_orderdep(self, dep):
+ if isinstance(dep, list):
+ self.orderdeps.update(dep)
+ else:
+ self.orderdeps.add(dep)
+
+ def add_item(self, name, elems):
+ if isinstance(elems, str):
+ elems = [elems]
+ self.elems.append((name, elems))
+
+ def write(self, outfile):
+ self.check_outputs()
+ line = 'build %s: %s %s' % (' '.join([ninja_quote(i) for i in self.outfilenames]),
+ self.rule,
+ ' '.join([ninja_quote(i) for i in self.infilenames]))
+ if len(self.deps) > 0:
+ line += ' | ' + ' '.join([ninja_quote(x) for x in self.deps])
+ if len(self.orderdeps) > 0:
+ line += ' || ' + ' '.join([ninja_quote(x) for x in self.orderdeps])
+ line += '\n'
+ # This is the only way I could find to make this work on all
+ # platforms including Windows command shell. Slash is a dir separator
+ # on Windows, too, so all characters are unambiguous and, more importantly,
+ # do not require quoting.
+ line = line.replace('\\', '/')
+ outfile.write(line)
+
+ # All the entries that should remain unquoted
+ raw_names = {'DEPFILE', 'DESC', 'pool', 'description'}
+
+ for e in self.elems:
+ (name, elems) = e
+ should_quote = name not in raw_names
+ line = ' %s = ' % name
+ noq_templ = "%s"
+ newelems = []
+ for i in elems:
+ if not should_quote or i == '&&': # Hackety hack hack
+ quoter = ninja_quote
+ else:
+ quoter = lambda x: ninja_quote(quote_func(x))
+ i = i.replace('\\', '\\\\')
+ if quote_func('') == '""':
+ i = i.replace('"', '\\"')
+ newelems.append(quoter(i))
+ line += ' '.join(newelems)
+ line += '\n'
+ outfile.write(line)
+ outfile.write('\n')
+
+ def check_outputs(self):
+ for n in self.outfilenames:
+ if n in self.all_outputs:
+ raise MesonException('Multiple producers for Ninja target "%s". Please rename your targets.' % n)
+ self.all_outputs[n] = True
+
+class NinjaBackend(backends.Backend):
+
+ def __init__(self, build):
+ super().__init__(build)
+ self.name = 'ninja'
+ self.ninja_filename = 'build.ninja'
+ self.target_arg_cache = {}
+ self.fortran_deps = {}
+ self.all_outputs = {}
+
+ def create_target_alias(self, to_target, outfile):
+ # We need to use aliases for targets that might be used as directory
+ # names to workaround a Ninja bug that breaks `ninja -t clean`.
+ # This is used for 'reserved' targets such as 'test', 'install',
+ # 'benchmark', etc, and also for RunTargets.
+ # https://github.com/mesonbuild/meson/issues/1644
+ if not to_target.startswith('meson-'):
+ m = 'Invalid usage of create_target_alias with {!r}'
+ raise AssertionError(m.format(to_target))
+ from_target = to_target[len('meson-'):]
+ elem = NinjaBuildElement(self.all_outputs, from_target, 'phony', to_target)
+ elem.write(outfile)
+
+ def detect_vs_dep_prefix(self, tempfilename):
+ '''VS writes its dependency in a locale dependent format.
+ Detect the search prefix to use.'''
+ for compiler in self.build.compilers.values():
+ # Have to detect the dependency format
+ if compiler.id == 'msvc':
+ break
+ else:
+ # None of our compilers are MSVC, we're done.
+ return open(tempfilename, 'a')
+ filename = os.path.join(self.environment.get_scratch_dir(),
+ 'incdetect.c')
+ with open(filename, 'w') as f:
+ f.write('''#include<stdio.h>
+int dummy;
+''')
+
+ # The output of cl dependency information is language
+ # and locale dependent. Any attempt at converting it to
+ # Python strings leads to failure. We _must_ do this detection
+ # in raw byte mode and write the result in raw bytes.
+ pc = subprocess.Popen(['cl', '/showIncludes', '/c', 'incdetect.c'],
+ cwd=self.environment.get_scratch_dir(),
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdo, _) = pc.communicate()
+
+ # We want to match 'Note: including file: ' in the line
+ # 'Note: including file: d:\MyDir\include\stdio.h', however
+ # different locales have different messages with a different
+ # number of colons. Match up to the the drive name 'd:\'.
+ matchre = re.compile(rb"^(.*\s)[a-zA-Z]:\\.*stdio.h$")
+ for line in stdo.split(b'\r\n'):
+ match = matchre.match(line)
+ if match:
+ with open(tempfilename, 'ab') as binfile:
+ binfile.write(b'msvc_deps_prefix = ' + match.group(1) + b'\n')
+ return open(tempfilename, 'a')
+ raise MesonException('Could not determine vs dep dependency prefix string.')
+
+ def generate(self, interp):
+ self.interpreter = interp
+ self.ninja_command = environment.detect_ninja(log=True)
+ if self.ninja_command is None:
+ raise MesonException('Could not detect Ninja v1.5 or newer')
+ outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename)
+ tempfilename = outfilename + '~'
+ with open(tempfilename, 'w') as outfile:
+ outfile.write('# This is the build file for project "%s"\n' %
+ self.build.get_project())
+ outfile.write('# It is autogenerated by the Meson build system.\n')
+ outfile.write('# Do not edit by hand.\n\n')
+ outfile.write('ninja_required_version = 1.5.1\n\n')
+ with self.detect_vs_dep_prefix(tempfilename) as outfile:
+ self.generate_rules(outfile)
+ self.generate_phony(outfile)
+ outfile.write('# Build rules for targets\n\n')
+ for t in self.build.get_targets().values():
+ self.generate_target(t, outfile)
+ outfile.write('# Test rules\n\n')
+ self.generate_tests(outfile)
+ outfile.write('# Install rules\n\n')
+ self.generate_install(outfile)
+ self.generate_dist(outfile)
+ if 'b_coverage' in self.environment.coredata.base_options and \
+ self.environment.coredata.base_options['b_coverage'].value:
+ outfile.write('# Coverage rules\n\n')
+ self.generate_coverage_rules(outfile)
+ outfile.write('# Suffix\n\n')
+ self.generate_utils(outfile)
+ self.generate_ending(outfile)
+ # Only overwrite the old build file after the new one has been
+ # fully created.
+ os.replace(tempfilename, outfilename)
+ self.generate_compdb()
+
+ # http://clang.llvm.org/docs/JSONCompilationDatabase.html
+ def generate_compdb(self):
+ pch_compilers = ['%s_PCH' % i for i in self.build.compilers]
+ native_compilers = ['%s_COMPILER' % i for i in self.build.compilers]
+ cross_compilers = ['%s_CROSS_COMPILER' % i for i in self.build.cross_compilers]
+ ninja_compdb = [self.ninja_command, '-t', 'compdb'] + pch_compilers + native_compilers + cross_compilers
+ builddir = self.environment.get_build_dir()
+ try:
+ jsondb = subprocess.check_output(ninja_compdb, cwd=builddir)
+ with open(os.path.join(builddir, 'compile_commands.json'), 'wb') as f:
+ f.write(jsondb)
+ except Exception:
+ mlog.warning('Could not create compilation database.')
+
+ # Get all generated headers. Any source file might need them so
+ # we need to add an order dependency to them.
+ def get_generated_headers(self, target):
+ header_deps = []
+ # XXX: Why don't we add deps to CustomTarget headers here?
+ for genlist in target.get_generated_sources():
+ if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)):
+ continue
+ for src in genlist.get_outputs():
+ if self.environment.is_header(src):
+ header_deps.append(self.get_target_generated_dir(target, genlist, src))
+ if 'vala' in target.compilers and not isinstance(target, build.Executable):
+ vala_header = File.from_built_file(self.get_target_dir(target), target.vala_header)
+ header_deps.append(vala_header)
+ # Recurse and find generated headers
+ for dep in target.link_targets:
+ if isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
+ header_deps += self.get_generated_headers(dep)
+ return header_deps
+
+ def get_target_generated_sources(self, target):
+ """
+ Returns a dictionary with the keys being the path to the file
+ (relative to the build directory) of that type and the value
+ being the GeneratorList or CustomTarget that generated it.
+ """
+ srcs = OrderedDict()
+ for gensrc in target.get_generated_sources():
+ for s in gensrc.get_outputs():
+ f = self.get_target_generated_dir(target, gensrc, s)
+ srcs[f] = s
+ return srcs
+
+ def get_target_sources(self, target):
+ srcs = OrderedDict()
+ for s in target.get_sources():
+ # BuildTarget sources are always mesonlib.File files which are
+ # either in the source root, or generated with configure_file and
+ # in the build root
+ if not isinstance(s, File):
+ raise InvalidArguments('All sources in target {!r} must be of type mesonlib.File'.format(s))
+ f = s.rel_to_builddir(self.build_to_src)
+ srcs[f] = s
+ return srcs
+
+ # Languages that can mix with C or C++ but don't support unity builds yet
+ # because the syntax we use for unity builds is specific to C/++/ObjC/++.
+ # Assembly files cannot be unitified and neither can LLVM IR files
+ langs_cant_unity = ('d', 'fortran')
+
+ def get_target_source_can_unity(self, target, source):
+ if isinstance(source, File):
+ source = source.fname
+ if self.environment.is_llvm_ir(source) or \
+ self.environment.is_assembly(source):
+ return False
+ suffix = os.path.splitext(source)[1][1:]
+ for lang in self.langs_cant_unity:
+ if lang not in target.compilers:
+ continue
+ if suffix in target.compilers[lang].file_suffixes:
+ return False
+ return True
+
+ def generate_target(self, target, outfile):
+ if isinstance(target, build.CustomTarget):
+ self.generate_custom_target(target, outfile)
+ if isinstance(target, build.RunTarget):
+ self.generate_run_target(target, outfile)
+ name = target.get_id()
+ if name in self.processed_targets:
+ return
+ self.processed_targets[name] = True
+ # Generate rules for all dependency targets
+ self.process_target_dependencies(target, outfile)
+ # If target uses a language that cannot link to C objects,
+ # just generate for that language and return.
+ if isinstance(target, build.Jar):
+ self.generate_jar_target(target, outfile)
+ return
+ if 'rust' in target.compilers:
+ self.generate_rust_target(target, outfile)
+ return
+ if 'cs' in target.compilers:
+ self.generate_cs_target(target, outfile)
+ return
+ if 'swift' in target.compilers:
+ self.generate_swift_target(target, outfile)
+ return
+
+ # Now we handle the following languages:
+ # ObjC++, ObjC, C++, C, D, Fortran, Vala
+
+ # target_sources:
+ # Pre-existing target C/C++ sources to be built; dict of full path to
+ # source relative to build root and the original File object.
+ # generated_sources:
+ # GeneratedList and CustomTarget sources to be built; dict of the full
+ # path to source relative to build root and the generating target/list
+ # vala_generated_sources:
+ # Array of sources generated by valac that have to be compiled
+ if 'vala' in target.compilers:
+ # Sources consumed by valac are filtered out. These only contain
+ # C/C++ sources, objects, generated libs, and unknown sources now.
+ target_sources, generated_sources, \
+ vala_generated_sources = self.generate_vala_compile(target, outfile)
+ else:
+ target_sources = self.get_target_sources(target)
+ generated_sources = self.get_target_generated_sources(target)
+ vala_generated_sources = []
+ self.scan_fortran_module_outputs(target)
+ # Generate rules for GeneratedLists
+ self.generate_generator_list_rules(target, outfile)
+
+ # Generate rules for building the remaining source files in this target
+ outname = self.get_target_filename(target)
+ obj_list = []
+ use_pch = self.environment.coredata.base_options.get('b_pch', False)
+ is_unity = self.is_unity(target)
+ if use_pch and target.has_pch():
+ pch_objects = self.generate_pch(target, outfile)
+ else:
+ pch_objects = []
+ header_deps = []
+ unity_src = []
+ unity_deps = [] # Generated sources that must be built before compiling a Unity target.
+ header_deps += self.get_generated_headers(target)
+
+ if is_unity:
+ # Warn about incompatible sources if a unity build is enabled
+ langs = set(target.compilers.keys())
+ langs_cant = langs.intersection(self.langs_cant_unity)
+ if langs_cant:
+ langs_are = langs = ', '.join(langs_cant).upper()
+ langs_are += ' are' if len(langs_cant) > 1 else ' is'
+ msg = '{} not supported in Unity builds yet, so {} ' \
+ 'sources in the {!r} target will be compiled normally' \
+ ''.format(langs_are, langs, target.name)
+ mlog.log(mlog.red('FIXME'), msg)
+
+ # Get a list of all generated headers that will be needed while building
+ # this target's sources (generated sources and pre-existing sources).
+ # This will be set as dependencies of all the target's sources. At the
+ # same time, also deal with generated sources that need to be compiled.
+ generated_source_files = []
+ for rel_src, gensrc in generated_sources.items():
+ dirpart, fnamepart = os.path.split(rel_src)
+ raw_src = File(True, dirpart, fnamepart)
+ if self.environment.is_source(rel_src) and not self.environment.is_header(rel_src):
+ if is_unity and self.get_target_source_can_unity(target, rel_src):
+ unity_deps.append(raw_src)
+ abs_src = os.path.join(self.environment.get_build_dir(), rel_src)
+ unity_src.append(abs_src)
+ else:
+ generated_source_files.append(raw_src)
+ elif self.environment.is_object(rel_src):
+ obj_list.append(rel_src)
+ elif self.environment.is_library(rel_src):
+ pass
+ else:
+ # Assume anything not specifically a source file is a header. This is because
+ # people generate files with weird suffixes (.inc, .fh) that they then include
+ # in their source files.
+ header_deps.append(raw_src)
+ # These are the generated source files that need to be built for use by
+ # this target. We create the Ninja build file elements for this here
+ # because we need `header_deps` to be fully generated in the above loop.
+ for src in generated_source_files:
+ if self.environment.is_llvm_ir(src):
+ o = self.generate_llvm_ir_compile(target, outfile, src)
+ else:
+ o = self.generate_single_compile(target, outfile, src, True,
+ header_deps=header_deps)
+ obj_list.append(o)
+
+ # Generate compilation targets for C sources generated from Vala
+ # sources. This can be extended to other $LANG->C compilers later if
+ # necessary. This needs to be separate for at least Vala
+ vala_generated_source_files = []
+ for src in vala_generated_sources:
+ dirpart, fnamepart = os.path.split(src)
+ raw_src = File(True, dirpart, fnamepart)
+ if is_unity:
+ unity_src.append(os.path.join(self.environment.get_build_dir(), src))
+ header_deps.append(raw_src)
+ else:
+ # Generated targets are ordered deps because the must exist
+ # before the sources compiling them are used. After the first
+ # compile we get precise dependency info from dep files.
+ # This should work in all cases. If it does not, then just
+ # move them from orderdeps to proper deps.
+ if self.environment.is_header(src):
+ header_deps.append(raw_src)
+ else:
+ # We gather all these and generate compile rules below
+ # after `header_deps` (above) is fully generated
+ vala_generated_source_files.append(raw_src)
+ for src in vala_generated_source_files:
+ # Passing 'vala' here signifies that we want the compile
+ # arguments to be specialized for C code generated by
+ # valac. For instance, no warnings should be emitted.
+ obj_list.append(self.generate_single_compile(target, outfile, src, 'vala', [], header_deps))
+
+ # Generate compile targets for all the pre-existing sources for this target
+ for f, src in target_sources.items():
+ if not self.environment.is_header(src):
+ if self.environment.is_llvm_ir(src):
+ obj_list.append(self.generate_llvm_ir_compile(target, outfile, src))
+ elif is_unity and self.get_target_source_can_unity(target, src):
+ abs_src = os.path.join(self.environment.get_build_dir(),
+ src.rel_to_builddir(self.build_to_src))
+ unity_src.append(abs_src)
+ else:
+ obj_list.append(self.generate_single_compile(target, outfile, src, False, [], header_deps))
+ obj_list += self.flatten_object_list(target)
+ if is_unity:
+ for src in self.generate_unity_files(target, unity_src):
+ obj_list.append(self.generate_single_compile(target, outfile, src, True, unity_deps + header_deps))
+ linker = self.determine_linker(target)
+ elem = self.generate_link(target, outfile, outname, obj_list, linker, pch_objects)
+ self.generate_shlib_aliases(target, self.get_target_dir(target))
+ elem.write(outfile)
+
+ def process_target_dependencies(self, target, outfile):
+ for t in target.get_dependencies():
+ tname = t.get_basename() + t.type_suffix()
+ if tname not in self.processed_targets:
+ self.generate_target(t, outfile)
+
+ def custom_target_generator_inputs(self, target, outfile):
+ for s in target.sources:
+ if hasattr(s, 'held_object'):
+ s = s.held_object
+ if isinstance(s, build.GeneratedList):
+ self.generate_genlist_for_target(s, target, outfile)
+
+ def unwrap_dep_list(self, target):
+ deps = []
+ for i in target.get_dependencies():
+ # FIXME, should not grab element at zero but rather expand all.
+ if isinstance(i, list):
+ i = i[0]
+ # Add a dependency on all the outputs of this target
+ for output in i.get_outputs():
+ deps.append(os.path.join(self.get_target_dir(i), output))
+ return deps
+
+ def generate_custom_target(self, target, outfile):
+ self.custom_target_generator_inputs(target, outfile)
+ (srcs, ofilenames, cmd) = self.eval_custom_target_command(target)
+ deps = self.unwrap_dep_list(target)
+ deps += self.get_custom_target_depend_files(target)
+ desc = 'Generating {0} with a {1} command.'
+ if target.build_always:
+ deps.append('PHONY')
+ if target.depfile is None:
+ rulename = 'CUSTOM_COMMAND'
+ else:
+ rulename = 'CUSTOM_COMMAND_DEP'
+ elem = NinjaBuildElement(self.all_outputs, ofilenames, rulename, srcs)
+ elem.add_dep(deps)
+ for d in target.extra_depends:
+ # Add a dependency on all the outputs of this target
+ for output in d.get_outputs():
+ elem.add_dep(os.path.join(self.get_target_dir(d), output))
+ serialize = False
+ extra_paths = []
+ # If the target requires capturing stdout, then use the serialized
+ # executable wrapper to capture that output and save it to a file.
+ if target.capture:
+ serialize = True
+ # If the command line requires a newline, also use the wrapper, as
+ # ninja does not support them in its build rule syntax.
+ if any('\n' in c for c in cmd):
+ serialize = True
+ # Windows doesn't have -rpath, so for EXEs that need DLLs built within
+ # the project, we need to set PATH so the DLLs are found. We use
+ # a serialized executable wrapper for that and check if the
+ # CustomTarget command needs extra paths first.
+ if mesonlib.is_windows() or mesonlib.is_cygwin():
+ extra_bdeps = target.get_transitive_build_target_deps()
+ extra_paths = self.determine_windows_extra_paths(target.command[0], extra_bdeps)
+ if extra_paths:
+ serialize = True
+ if serialize:
+ exe_data = self.serialize_executable(target.command[0], cmd[1:],
+ # All targets are built from the build dir
+ self.environment.get_build_dir(),
+ extra_paths=extra_paths,
+ capture=ofilenames[0] if target.capture else None)
+ cmd = self.environment.get_build_command() + ['--internal', 'exe', exe_data]
+ cmd_type = 'meson_exe.py custom'
+ else:
+ cmd_type = 'custom'
+ if target.depfile is not None:
+ rel_dfile = os.path.join(self.get_target_dir(target), target.depfile)
+ abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ os.makedirs(abs_pdir, exist_ok=True)
+ elem.add_item('DEPFILE', rel_dfile)
+ cmd = self.replace_paths(target, cmd)
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('description', desc.format(target.name, cmd_type))
+ elem.write(outfile)
+ self.processed_targets[target.get_id()] = True
+
+ def generate_run_target(self, target, outfile):
+ cmd = self.environment.get_build_command() + ['--internal', 'commandrunner']
+ deps = self.unwrap_dep_list(target)
+ arg_strings = []
+ for i in target.args:
+ if isinstance(i, str):
+ arg_strings.append(i)
+ elif isinstance(i, (build.BuildTarget, build.CustomTarget)):
+ relfname = self.get_target_filename(i)
+ arg_strings.append(os.path.join(self.environment.get_build_dir(), relfname))
+ deps.append(relfname)
+ elif isinstance(i, mesonlib.File):
+ relfname = i.rel_to_builddir(self.build_to_src)
+ arg_strings.append(os.path.join(self.environment.get_build_dir(), relfname))
+ else:
+ raise AssertionError('Unreachable code in generate_run_target: ' + str(i))
+ if target.subproject != '':
+ subproject_prefix = '{}@@'.format(target.subproject)
+ else:
+ subproject_prefix = ''
+ target_name = 'meson-{}{}'.format(subproject_prefix, target.name)
+ elem = NinjaBuildElement(self.all_outputs, target_name, 'CUSTOM_COMMAND', [])
+ cmd += [self.environment.get_source_dir(),
+ self.environment.get_build_dir(),
+ target.subdir] + self.environment.get_build_command()
+ texe = target.command
+ try:
+ texe = texe.held_object
+ except AttributeError:
+ pass
+ if isinstance(texe, build.Executable):
+ abs_exe = os.path.join(self.environment.get_build_dir(), self.get_target_filename(texe))
+ deps.append(self.get_target_filename(texe))
+ if self.environment.is_cross_build() and \
+ self.environment.cross_info.need_exe_wrapper():
+ exe_wrap = self.environment.cross_info.config['binaries'].get('exe_wrapper', None)
+ if exe_wrap is not None:
+ cmd += [exe_wrap]
+ cmd.append(abs_exe)
+ elif isinstance(texe, dependencies.ExternalProgram):
+ cmd += texe.get_command()
+ elif isinstance(texe, build.CustomTarget):
+ deps.append(self.get_target_filename(texe))
+ cmd += [os.path.join(self.environment.get_build_dir(), self.get_target_filename(texe))]
+ elif isinstance(texe, mesonlib.File):
+ cmd.append(texe.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()))
+ else:
+ cmd.append(target.command)
+ cmd += arg_strings
+
+ elem.add_dep(deps)
+ cmd = self.replace_paths(target, cmd)
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('description', 'Running external command %s.' % target.name)
+ elem.add_item('pool', 'console')
+ elem.write(outfile)
+ # Alias that runs the target defined above with the name the user specified
+ self.create_target_alias(target_name, outfile)
+ self.processed_targets[target.get_id()] = True
+
+ def generate_coverage_rules(self, outfile):
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY')
+ e.add_item('COMMAND', self.environment.get_build_command() +
+ ['--internal', 'coverage',
+ self.environment.get_source_dir(),
+ self.environment.get_build_dir(),
+ self.environment.get_log_dir()])
+ e.add_item('description', 'Generates coverage reports.')
+ e.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage', outfile)
+ self.generate_coverage_legacy_rules(outfile)
+
+ def generate_coverage_legacy_rules(self, outfile):
+ (gcovr_exe, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
+ added_rule = False
+ if gcovr_exe:
+ added_rule = True
+ elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-xml', 'CUSTOM_COMMAND', '')
+ elem.add_item('COMMAND', [gcovr_exe, '-x', '-r', self.environment.get_source_dir(),
+ '-o', os.path.join(self.environment.get_log_dir(), 'coverage.xml')])
+ elem.add_item('DESC', 'Generating XML coverage report.')
+ elem.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-xml', outfile)
+ elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-text', 'CUSTOM_COMMAND', '')
+ elem.add_item('COMMAND', [gcovr_exe, '-r', self.environment.get_source_dir(),
+ '-o', os.path.join(self.environment.get_log_dir(), 'coverage.txt')])
+ elem.add_item('DESC', 'Generating text coverage report.')
+ elem.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-text', outfile)
+ if lcov_exe and genhtml_exe:
+ added_rule = True
+ htmloutdir = os.path.join(self.environment.get_log_dir(), 'coveragereport')
+ covinfo = os.path.join(self.environment.get_log_dir(), 'coverage.info')
+ phony_elem = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'phony', os.path.join(htmloutdir, 'index.html'))
+ phony_elem.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-html', outfile)
+ elem = NinjaBuildElement(self.all_outputs, os.path.join(htmloutdir, 'index.html'), 'CUSTOM_COMMAND', '')
+ command = [lcov_exe, '--directory', self.environment.get_build_dir(),
+ '--capture', '--output-file', covinfo, '--no-checksum',
+ '&&', genhtml_exe, '--prefix', self.environment.get_build_dir(),
+ '--output-directory', htmloutdir, '--title', 'Code coverage',
+ '--legend', '--show-details', covinfo]
+ elem.add_item('COMMAND', command)
+ elem.add_item('DESC', 'Generating HTML coverage report.')
+ elem.write(outfile)
+ if not added_rule:
+ mlog.warning('coverage requested but neither gcovr nor lcov/genhtml found.')
+
+ def generate_install(self, outfile):
+ install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat')
+ if self.environment.is_cross_build():
+ bins = self.environment.cross_info.config['binaries']
+ if 'strip' not in bins:
+ mlog.warning('Cross file does not specify strip binary, result will not be stripped.')
+ strip_bin = None
+ else:
+ strip_bin = mesonlib.stringlistify(bins['strip'])
+ else:
+ strip_bin = self.environment.native_strip_bin
+ d = InstallData(self.environment.get_source_dir(),
+ self.environment.get_build_dir(),
+ self.environment.get_prefix(),
+ strip_bin, self.environment.get_build_command() + ['introspect'])
+ elem = NinjaBuildElement(self.all_outputs, 'meson-install', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_dep('all')
+ elem.add_item('DESC', 'Installing files.')
+ elem.add_item('COMMAND', self.environment.get_build_command() + ['--internal', 'install', install_data_file])
+ elem.add_item('pool', 'console')
+ self.generate_depmf_install(d)
+ self.generate_target_install(d)
+ self.generate_header_install(d)
+ self.generate_man_install(d)
+ self.generate_data_install(d)
+ self.generate_custom_install_script(d)
+ self.generate_subdir_install(d)
+ elem.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-install', outfile)
+
+ with open(install_data_file, 'wb') as ofile:
+ pickle.dump(d, ofile)
+
+ def generate_target_install(self, d):
+ for t in self.build.get_targets().values():
+ if not t.should_install():
+ continue
+ # Find the installation directory.
+ outdirs = t.get_custom_install_dir()
+ custom_install_dir = False
+ if outdirs[0] is not None and outdirs[0] is not True:
+ # Either the value is set, or is set to False which means
+ # we want this specific output out of many outputs to not
+ # be installed.
+ custom_install_dir = True
+ elif isinstance(t, build.SharedModule):
+ outdirs[0] = self.environment.get_shared_module_dir()
+ elif isinstance(t, build.SharedLibrary):
+ outdirs[0] = self.environment.get_shared_lib_dir()
+ elif isinstance(t, build.StaticLibrary):
+ outdirs[0] = self.environment.get_static_lib_dir()
+ elif isinstance(t, build.Executable):
+ outdirs[0] = self.environment.get_bindir()
+ else:
+ assert(isinstance(t, build.BuildTarget))
+ # XXX: Add BuildTarget-specific install dir cases here
+ outdirs[0] = self.environment.get_libdir()
+ # Sanity-check the outputs and install_dirs
+ num_outdirs, num_out = len(outdirs), len(t.get_outputs())
+ if num_outdirs != 1 and num_outdirs != num_out:
+ m = 'Target {!r} has {} outputs: {!r}, but only {} "install_dir"s were found.\n' \
+ "Pass 'false' for outputs that should not be installed and 'true' for\n" \
+ 'using the default installation directory for an output.'
+ raise MesonException(m.format(t.name, num_out, t.get_outputs(), num_outdirs))
+ # Install the target output(s)
+ if isinstance(t, build.BuildTarget):
+ should_strip = self.get_option_for_target('strip', t)
+ # Install primary build output (library/executable/jar, etc)
+ # Done separately because of strip/aliases/rpath
+ if outdirs[0] is not False:
+ i = [self.get_target_filename(t), outdirs[0],
+ t.get_aliases(), should_strip, t.install_rpath]
+ d.targets.append(i)
+ # On toolchains/platforms that use an import library for
+ # linking (separate from the shared library with all the
+ # code), we need to install that too (dll.a/.lib).
+ if (isinstance(t, build.SharedLibrary) or isinstance(t, build.Executable)) and t.get_import_filename():
+ if custom_install_dir:
+ # If the DLL is installed into a custom directory,
+ # install the import library into the same place so
+ # it doesn't go into a surprising place
+ implib_install_dir = outdirs[0]
+ else:
+ implib_install_dir = self.environment.get_import_lib_dir()
+ # Install the import library.
+ i = [self.get_target_filename_for_linking(t),
+ implib_install_dir,
+ # It has no aliases, should not be stripped, and
+ # doesn't have an install_rpath
+ {}, False, '']
+ d.targets.append(i)
+ # Install secondary outputs. Only used for Vala right now.
+ if num_outdirs > 1:
+ for output, outdir in zip(t.get_outputs()[1:], outdirs[1:]):
+ # User requested that we not install this output
+ if outdir is False:
+ continue
+ f = os.path.join(self.get_target_dir(t), output)
+ d.targets.append([f, outdir, {}, False, None])
+ elif isinstance(t, build.CustomTarget):
+ # If only one install_dir is specified, assume that all
+ # outputs will be installed into it. This is for
+ # backwards-compatibility and because it makes sense to
+ # avoid repetition since this is a common use-case.
+ #
+ # To selectively install only some outputs, pass `false` as
+ # the install_dir for the corresponding output by index
+ if num_outdirs == 1 and num_out > 1:
+ for output in t.get_outputs():
+ f = os.path.join(self.get_target_dir(t), output)
+ d.targets.append([f, outdirs[0], {}, False, None])
+ else:
+ for output, outdir in zip(t.get_outputs(), outdirs):
+ # User requested that we not install this output
+ if outdir is False:
+ continue
+ f = os.path.join(self.get_target_dir(t), output)
+ d.targets.append([f, outdir, {}, False, None])
+
+ def generate_custom_install_script(self, d):
+ result = []
+ srcdir = self.environment.get_source_dir()
+ builddir = self.environment.get_build_dir()
+ for i in self.build.install_scripts:
+ exe = i['exe']
+ args = i['args']
+ fixed_args = []
+ for a in args:
+ a = a.replace('@SOURCE_ROOT@', srcdir)
+ a = a.replace('@BUILD_ROOT@', builddir)
+ fixed_args.append(a)
+ result.append(build.RunScript(exe, fixed_args))
+ d.install_scripts = result
+
+ def generate_header_install(self, d):
+ incroot = self.environment.get_includedir()
+ headers = self.build.get_headers()
+
+ srcdir = self.environment.get_source_dir()
+ builddir = self.environment.get_build_dir()
+ for h in headers:
+ outdir = h.get_custom_install_dir()
+ if outdir is None:
+ outdir = os.path.join(incroot, h.get_install_subdir())
+ for f in h.get_sources():
+ if not isinstance(f, File):
+ msg = 'Invalid header type {!r} can\'t be installed'
+ raise MesonException(msg.format(f))
+ abspath = f.absolute_path(srcdir, builddir)
+ i = [abspath, outdir]
+ d.headers.append(i)
+
+ def generate_man_install(self, d):
+ manroot = self.environment.get_mandir()
+ man = self.build.get_man()
+ for m in man:
+ for f in m.get_sources():
+ num = f.split('.')[-1]
+ subdir = m.get_custom_install_dir()
+ if subdir is None:
+ subdir = os.path.join(manroot, 'man' + num)
+ srcabs = f.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir())
+ dstabs = os.path.join(subdir, os.path.split(f.fname)[1] + '.gz')
+ i = [srcabs, dstabs]
+ d.man.append(i)
+
+ def generate_data_install(self, d):
+ data = self.build.get_data()
+ srcdir = self.environment.get_source_dir()
+ builddir = self.environment.get_build_dir()
+ for de in data:
+ assert(isinstance(de, build.Data))
+ subdir = de.install_dir
+ for f in de.sources:
+ assert(isinstance(f, mesonlib.File))
+ plain_f = os.path.split(f.fname)[1]
+ dstabs = os.path.join(subdir, plain_f)
+ i = [f.absolute_path(srcdir, builddir), dstabs, de.install_mode]
+ d.data.append(i)
+
+ def generate_subdir_install(self, d):
+ for sd in self.build.get_install_subdirs():
+ inst_subdir = sd.installable_subdir.rstrip('/')
+ idir_parts = inst_subdir.split('/')
+ if len(idir_parts) > 1:
+ subdir = os.path.join(sd.source_subdir, '/'.join(idir_parts[:-1]))
+ inst_dir = idir_parts[-1]
+ else:
+ subdir = sd.source_subdir
+ inst_dir = sd.installable_subdir
+ src_dir = os.path.join(self.environment.get_source_dir(), subdir)
+ dst_dir = os.path.join(self.environment.get_prefix(), sd.install_dir)
+ d.install_subdirs.append([src_dir, inst_dir, dst_dir, sd.install_mode, sd.exclude])
+
+ def generate_tests(self, outfile):
+ self.serialize_tests()
+ cmd = self.environment.get_build_command(True) + ['test', '--no-rebuild']
+ if not self.environment.coredata.get_builtin_option('stdsplit'):
+ cmd += ['--no-stdsplit']
+ if self.environment.coredata.get_builtin_option('errorlogs'):
+ cmd += ['--print-errorlogs']
+ elem = NinjaBuildElement(self.all_outputs, 'meson-test', 'CUSTOM_COMMAND', ['all', 'PHONY'])
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('DESC', 'Running all tests.')
+ elem.add_item('pool', 'console')
+ elem.write(outfile)
+ # Alias that runs the above-defined meson-test target
+ self.create_target_alias('meson-test', outfile)
+
+ # And then benchmarks.
+ cmd = self.environment.get_build_command(True) + [
+ 'test', '--benchmark', '--logbase',
+ 'benchmarklog', '--num-processes=1', '--no-rebuild']
+ elem = NinjaBuildElement(self.all_outputs, 'meson-benchmark', 'CUSTOM_COMMAND', ['all', 'PHONY'])
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('DESC', 'Running benchmark suite.')
+ elem.add_item('pool', 'console')
+ elem.write(outfile)
+ # Alias that runs the above-defined meson-benchmark target
+ self.create_target_alias('meson-benchmark', outfile)
+
+ def generate_rules(self, outfile):
+ outfile.write('# Rules for compiling.\n\n')
+ self.generate_compile_rules(outfile)
+ outfile.write('# Rules for linking.\n\n')
+ num_pools = self.environment.coredata.backend_options['backend_max_links'].value
+ if num_pools > 0:
+ outfile.write('''pool link_pool
+ depth = %d
+
+''' % num_pools)
+ if self.environment.is_cross_build():
+ self.generate_static_link_rules(True, outfile)
+ self.generate_static_link_rules(False, outfile)
+ self.generate_dynamic_link_rules(outfile)
+ outfile.write('# Other rules\n\n')
+ outfile.write('rule CUSTOM_COMMAND\n')
+ outfile.write(' command = $COMMAND\n')
+ outfile.write(' description = $DESC\n')
+ outfile.write(' restat = 1\n\n')
+ # Ninja errors out if you have deps = gcc but no depfile, so we must
+ # have two rules for custom commands.
+ outfile.write('rule CUSTOM_COMMAND_DEP\n')
+ outfile.write(' command = $COMMAND\n')
+ outfile.write(' description = $DESC\n')
+ outfile.write(' deps = gcc\n')
+ outfile.write(' depfile = $DEPFILE\n')
+ outfile.write(' restat = 1\n\n')
+ outfile.write('rule REGENERATE_BUILD\n')
+ c = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
+ ['--internal',
+ 'regenerate',
+ ninja_quote(quote_func(self.environment.get_source_dir())),
+ ninja_quote(quote_func(self.environment.get_build_dir()))]
+ outfile.write(" command = " + ' '.join(c) + ' --backend ninja\n')
+ outfile.write(' description = Regenerating build files.\n')
+ outfile.write(' generator = 1\n\n')
+ outfile.write('\n')
+
+ def generate_phony(self, outfile):
+ outfile.write('# Phony build target, always out of date\n')
+ outfile.write('build PHONY: phony\n')
+ outfile.write('\n')
+
+ def generate_jar_target(self, target, outfile):
+ fname = target.get_filename()
+ outname_rel = os.path.join(self.get_target_dir(target), fname)
+ src_list = target.get_sources()
+ class_list = []
+ compiler = target.compilers['java']
+ c = 'c'
+ m = ''
+ e = ''
+ f = 'f'
+ main_class = target.get_main_class()
+ if main_class != '':
+ e = 'e'
+ for src in src_list:
+ plain_class_path = self.generate_single_java_compile(src, target, compiler, outfile)
+ class_list.append(plain_class_path)
+ class_dep_list = [os.path.join(self.get_target_private_dir(target), i) for i in class_list]
+ jar_rule = 'java_LINKER'
+ commands = [c + m + e + f]
+ if e != '':
+ commands.append(main_class)
+ commands.append(self.get_target_filename(target))
+ # Java compilation can produce an arbitrary number of output
+ # class files for a single source file. Thus tell jar to just
+ # grab everything in the final package.
+ commands += ['-C', self.get_target_private_dir(target), '.']
+ elem = NinjaBuildElement(self.all_outputs, outname_rel, jar_rule, [])
+ elem.add_dep(class_dep_list)
+ elem.add_item('ARGS', commands)
+ elem.write(outfile)
+
+ def generate_cs_resource_tasks(self, target, outfile):
+ args = []
+ deps = []
+ for r in target.resources:
+ rel_sourcefile = os.path.join(self.build_to_src, target.subdir, r)
+ if r.endswith('.resources'):
+ a = '-resource:' + rel_sourcefile
+ elif r.endswith('.txt') or r.endswith('.resx'):
+ ofilebase = os.path.splitext(os.path.basename(r))[0] + '.resources'
+ ofilename = os.path.join(self.get_target_private_dir(target), ofilebase)
+ elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile)
+ elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename])
+ elem.add_item('DESC', 'Compiling resource %s.' % rel_sourcefile)
+ elem.write(outfile)
+ deps.append(ofilename)
+ a = '-resource:' + ofilename
+ else:
+ raise InvalidArguments('Unknown resource file %s.' % r)
+ args.append(a)
+ return args, deps
+
+ def generate_cs_target(self, target, outfile):
+ buildtype = self.get_option_for_target('buildtype', target)
+ fname = target.get_filename()
+ outname_rel = os.path.join(self.get_target_dir(target), fname)
+ src_list = target.get_sources()
+ compiler = target.compilers['cs']
+ rel_srcs = [s.rel_to_builddir(self.build_to_src) for s in src_list]
+ deps = []
+ commands = CompilerArgs(compiler, target.extra_args.get('cs', []))
+ commands += compiler.get_buildtype_args(buildtype)
+ if isinstance(target, build.Executable):
+ commands.append('-target:exe')
+ elif isinstance(target, build.SharedLibrary):
+ commands.append('-target:library')
+ else:
+ raise MesonException('Unknown C# target type.')
+ (resource_args, resource_deps) = self.generate_cs_resource_tasks(target, outfile)
+ commands += resource_args
+ deps += resource_deps
+ commands += compiler.get_output_args(outname_rel)
+ for l in target.link_targets:
+ lname = os.path.join(self.get_target_dir(l), l.get_filename())
+ commands += compiler.get_link_args(lname)
+ deps.append(lname)
+ if '-g' in commands:
+ outputs = [outname_rel, outname_rel + '.mdb']
+ else:
+ outputs = [outname_rel]
+ generated_sources = self.get_target_generated_sources(target)
+ for rel_src in generated_sources.keys():
+ dirpart, fnamepart = os.path.split(rel_src)
+ if rel_src.lower().endswith('.cs'):
+ rel_srcs.append(rel_src)
+ deps.append(rel_src)
+
+ for dep in target.get_external_deps():
+ commands.extend_direct(dep.get_link_args())
+ commands += self.build.get_project_args(compiler, target.subproject)
+ commands += self.build.get_global_args(compiler)
+
+ elem = NinjaBuildElement(self.all_outputs, outputs, 'cs_COMPILER', rel_srcs)
+ elem.add_dep(deps)
+ elem.add_item('ARGS', commands)
+ elem.write(outfile)
+
+ self.generate_generator_list_rules(target, outfile)
+
+ def generate_single_java_compile(self, src, target, compiler, outfile):
+ args = []
+ args += compiler.get_buildtype_args(self.get_option_for_target('buildtype', target))
+ args += self.build.get_global_args(compiler)
+ args += self.build.get_project_args(compiler, target.subproject)
+ args += target.get_java_args()
+ args += compiler.get_output_args(self.get_target_private_dir(target))
+ for i in target.include_dirs:
+ for idir in i.get_incdirs():
+ args += ['-sourcepath', os.path.join(self.build_to_src, i.curdir, idir)]
+ rel_src = src.rel_to_builddir(self.build_to_src)
+ plain_class_path = src.fname[:-4] + 'class'
+ rel_obj = os.path.join(self.get_target_private_dir(target), plain_class_path)
+ element = NinjaBuildElement(self.all_outputs, rel_obj, compiler.get_language() + '_COMPILER', rel_src)
+ element.add_item('ARGS', args)
+ element.write(outfile)
+ return plain_class_path
+
+ def generate_java_link(self, outfile):
+ rule = 'rule java_LINKER\n'
+ command = ' command = jar $ARGS\n'
+ description = ' description = Creating JAR $out.\n'
+ outfile.write(rule)
+ outfile.write(command)
+ outfile.write(description)
+ outfile.write('\n')
+
+ def determine_dep_vapis(self, target):
+ """
+ Peek into the sources of BuildTargets we're linking with, and if any of
+ them was built with Vala, assume that it also generated a .vapi file of
+ the same name as the BuildTarget and return the path to it relative to
+ the build directory.
+ """
+ result = OrderedSet()
+ for dep in target.link_targets + target.link_whole_targets:
+ for i in dep.sources:
+ if hasattr(i, 'fname'):
+ i = i.fname
+ if i.endswith('vala'):
+ vapiname = dep.vala_vapi
+ fullname = os.path.join(self.get_target_dir(dep), vapiname)
+ result.add(fullname)
+ break
+ return list(result)
+
+ def split_vala_sources(self, t):
+ """
+ Splits the target's sources into .vala, .gs, .vapi, and other sources.
+ Handles both pre-existing and generated sources.
+
+ Returns a tuple (vala, vapi, others) each of which is a dictionary with
+ the keys being the path to the file (relative to the build directory)
+ and the value being the object that generated or represents the file.
+ """
+ vala = OrderedDict()
+ vapi = OrderedDict()
+ others = OrderedDict()
+ othersgen = OrderedDict()
+ # Split pre-existing sources
+ for s in t.get_sources():
+ # BuildTarget sources are always mesonlib.File files which are
+ # either in the source root, or generated with configure_file and
+ # in the build root
+ if not isinstance(s, File):
+ msg = 'All sources in target {!r} must be of type ' \
+ 'mesonlib.File, not {!r}'.format(t, s)
+ raise InvalidArguments(msg)
+ f = s.rel_to_builddir(self.build_to_src)
+ if s.endswith(('.vala', '.gs')):
+ srctype = vala
+ elif s.endswith('.vapi'):
+ srctype = vapi
+ else:
+ srctype = others
+ srctype[f] = s
+ # Split generated sources
+ for gensrc in t.get_generated_sources():
+ for s in gensrc.get_outputs():
+ f = self.get_target_generated_dir(t, gensrc, s)
+ if s.endswith(('.vala', '.gs')):
+ srctype = vala
+ elif s.endswith('.vapi'):
+ srctype = vapi
+ # Generated non-Vala (C/C++) sources. Won't be used for
+ # generating the Vala compile rule below.
+ else:
+ srctype = othersgen
+ # Duplicate outputs are disastrous
+ if f in srctype and srctype[f] is not gensrc:
+ msg = 'Duplicate output {0!r} from {1!r} {2!r}; ' \
+ 'conflicts with {0!r} from {4!r} {3!r}' \
+ ''.format(f, type(gensrc).__name__, gensrc.name,
+ srctype[f].name, type(srctype[f]).__name__)
+ raise InvalidArguments(msg)
+ # Store 'somefile.vala': GeneratedList (or CustomTarget)
+ srctype[f] = gensrc
+ return vala, vapi, (others, othersgen)
+
+ def generate_vala_compile(self, target, outfile):
+ """Vala is compiled into C. Set up all necessary build steps here."""
+ (vala_src, vapi_src, other_src) = self.split_vala_sources(target)
+ extra_dep_files = []
+ if not vala_src:
+ msg = 'Vala library {!r} has no Vala or Genie source files.'
+ raise InvalidArguments(msg.format(target.name))
+
+ valac = target.compilers['vala']
+ c_out_dir = self.get_target_private_dir(target)
+ # C files generated by valac
+ vala_c_src = []
+ # Files generated by valac
+ valac_outputs = []
+ # All sources that are passed to valac on the commandline
+ all_files = list(vapi_src.keys())
+ for (vala_file, gensrc) in vala_src.items():
+ all_files.append(vala_file)
+ # Figure out where the Vala compiler will write the compiled C file
+ # If the Vala file is in a subdir of the build dir (in our case
+ # because it was generated/built by something else), the subdir path
+ # components will be preserved in the output path. But if the Vala
+ # file is outside the build directory, the path components will be
+ # stripped and just the basename will be used.
+ if isinstance(gensrc, (build.CustomTarget, build.GeneratedList)) or gensrc.is_built:
+ vala_c_file = os.path.splitext(os.path.basename(vala_file))[0] + '.c'
+ else:
+ path_to_target = os.path.join(self.build_to_src, target.get_subdir())
+ if vala_file.startswith(path_to_target):
+ vala_c_file = os.path.splitext(os.path.relpath(vala_file, path_to_target))[0] + '.c'
+ else:
+ vala_c_file = os.path.splitext(os.path.basename(vala_file))[0] + '.c'
+ # All this will be placed inside the c_out_dir
+ vala_c_file = os.path.join(c_out_dir, vala_c_file)
+ vala_c_src.append(vala_c_file)
+ valac_outputs.append(vala_c_file)
+
+ args = self.generate_basic_compiler_args(target, valac)
+ args += valac.get_colorout_args(self.environment.coredata.base_options.get('b_colorout').value)
+ # Tell Valac to output everything in our private directory. Sadly this
+ # means it will also preserve the directory components of Vala sources
+ # found inside the build tree (generated sources).
+ args += ['--directory', c_out_dir]
+ args += ['--basedir', os.path.join(self.build_to_src, target.get_subdir())]
+ if not isinstance(target, build.Executable):
+ # Library name
+ args += ['--library', target.name]
+ # Outputted header
+ hname = os.path.join(self.get_target_dir(target), target.vala_header)
+ args += ['--header', hname]
+ if self.is_unity(target):
+ # Without this the declarations will get duplicated in the .c
+ # files and cause a build failure when all of them are
+ # #include-d in one .c file.
+ # https://github.com/mesonbuild/meson/issues/1969
+ args += ['--use-header']
+ valac_outputs.append(hname)
+ # Outputted vapi file
+ vapiname = os.path.join(self.get_target_dir(target), target.vala_vapi)
+ # Force valac to write the vapi and gir files in the target build dir.
+ # Without this, it will write it inside c_out_dir
+ args += ['--vapi', os.path.join('..', target.vala_vapi)]
+ valac_outputs.append(vapiname)
+ target.outputs += [target.vala_header, target.vala_vapi]
+ # Install header and vapi to default locations if user requests this
+ if len(target.install_dir) > 1 and target.install_dir[1] is True:
+ target.install_dir[1] = self.environment.get_includedir()
+ if len(target.install_dir) > 2 and target.install_dir[2] is True:
+ target.install_dir[2] = os.path.join(self.environment.get_datadir(), 'vala', 'vapi')
+ # Generate GIR if requested
+ if isinstance(target.vala_gir, str):
+ girname = os.path.join(self.get_target_dir(target), target.vala_gir)
+ args += ['--gir', os.path.join('..', target.vala_gir)]
+ valac_outputs.append(girname)
+ target.outputs.append(target.vala_gir)
+ # Install GIR to default location if requested by user
+ if len(target.install_dir) > 3 and target.install_dir[3] is True:
+ target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0')
+ # Detect gresources and add --gresources arguments for each
+ for (gres, gensrc) in other_src[1].items():
+ if isinstance(gensrc, modules.GResourceTarget):
+ gres_xml, = self.get_custom_target_sources(gensrc)
+ args += ['--gresources=' + gres_xml]
+ extra_args = []
+
+ for a in target.extra_args.get('vala', []):
+ if isinstance(a, File):
+ relname = a.rel_to_builddir(self.build_to_src)
+ extra_dep_files.append(relname)
+ extra_args.append(relname)
+ else:
+ extra_args.append(a)
+ dependency_vapis = self.determine_dep_vapis(target)
+ extra_dep_files += dependency_vapis
+ args += extra_args
+ element = NinjaBuildElement(self.all_outputs, valac_outputs,
+ valac.get_language() + '_COMPILER',
+ all_files + dependency_vapis)
+ element.add_item('ARGS', args)
+ element.add_dep(extra_dep_files)
+ element.write(outfile)
+ return other_src[0], other_src[1], vala_c_src
+
+ def generate_rust_target(self, target, outfile):
+ rustc = target.compilers['rust']
+ relsrc = []
+ for i in target.get_sources():
+ if not rustc.can_compile(i):
+ raise InvalidArguments('Rust target %s contains a non-rust source file.' % target.get_basename())
+ relsrc.append(i.rel_to_builddir(self.build_to_src))
+ target_name = os.path.join(target.subdir, target.get_filename())
+ args = ['--crate-type']
+ if isinstance(target, build.Executable):
+ cratetype = 'bin'
+ elif hasattr(target, 'rust_crate_type'):
+ cratetype = target.rust_crate_type
+ elif isinstance(target, build.SharedLibrary):
+ cratetype = 'dylib'
+ elif isinstance(target, build.StaticLibrary):
+ cratetype = 'rlib'
+ else:
+ raise InvalidArguments('Unknown target type for rustc.')
+ args.append(cratetype)
+ args += ['--crate-name', target.name]
+ args += rustc.get_buildtype_args(self.get_option_for_target('buildtype', target))
+ depfile = os.path.join(target.subdir, target.name + '.d')
+ args += ['--emit', 'dep-info={}'.format(depfile), '--emit', 'link']
+ args += target.get_extra_args('rust')
+ args += ['-o', os.path.join(target.subdir, target.get_filename())]
+ orderdeps = [os.path.join(t.subdir, t.get_filename()) for t in target.link_targets]
+ linkdirs = OrderedDict()
+ for d in target.link_targets:
+ linkdirs[d.subdir] = True
+ for d in linkdirs.keys():
+ if d == '':
+ d = '.'
+ args += ['-L', d]
+ has_shared_deps = False
+ for dep in target.get_dependencies():
+ if isinstance(dep, build.SharedLibrary):
+ has_shared_deps = True
+ if isinstance(target, build.SharedLibrary) or has_shared_deps:
+ # add prefer-dynamic if any of the Rust libraries we link
+ # against are dynamic, otherwise we'll end up with
+ # multiple implementations of crates
+ args += ['-C', 'prefer-dynamic']
+
+ # build the usual rpath arguments as well...
+
+ # Set runtime-paths so we can run executables without needing to set
+ # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows.
+ if '/' in target.name or '\\' in target.name:
+ # Target names really should not have slashes in them, but
+ # unfortunately we did not check for that and some downstream projects
+ # now have them. Once slashes are forbidden, remove this bit.
+ target_slashname_workaround_dir = os.path.join(os.path.split(target.name)[0],
+ self.get_target_dir(target))
+ else:
+ target_slashname_workaround_dir = self.get_target_dir(target)
+ rpath_args = rustc.build_rpath_args(self.environment.get_build_dir(),
+ target_slashname_workaround_dir,
+ self.determine_rpath_dirs(target),
+ target.build_rpath,
+ target.install_rpath)
+ # ... but then add rustc's sysroot to account for rustup
+ # installations
+ for rpath_arg in rpath_args:
+ args += ['-C', 'link-arg=' + rpath_arg + ':' + os.path.join(rustc.get_sysroot(), 'lib')]
+ element = NinjaBuildElement(self.all_outputs, target_name, 'rust_COMPILER', relsrc)
+ if len(orderdeps) > 0:
+ element.add_orderdep(orderdeps)
+ element.add_item('ARGS', args)
+ element.add_item('targetdep', depfile)
+ element.add_item('cratetype', cratetype)
+ element.write(outfile)
+ if isinstance(target, build.SharedLibrary):
+ self.generate_shsym(outfile, target)
+
+ def swift_module_file_name(self, target):
+ return os.path.join(self.get_target_private_dir(target),
+ self.target_swift_modulename(target) + '.swiftmodule')
+
+ def target_swift_modulename(self, target):
+ return target.name
+
+ def is_swift_target(self, target):
+ for s in target.sources:
+ if s.endswith('swift'):
+ return True
+ return False
+
+ def determine_swift_dep_modules(self, target):
+ result = []
+ for l in target.link_targets:
+ if self.is_swift_target(l):
+ result.append(self.swift_module_file_name(l))
+ return result
+
+ def determine_swift_dep_dirs(self, target):
+ result = []
+ for l in target.link_targets:
+ result.append(self.get_target_private_dir_abs(l))
+ return result
+
+ def get_swift_link_deps(self, target):
+ result = []
+ for l in target.link_targets:
+ result.append(self.get_target_filename(l))
+ return result
+
+ def split_swift_generated_sources(self, target):
+ all_srcs = self.get_target_generated_sources(target)
+ srcs = []
+ others = []
+ for i in all_srcs:
+ if i.endswith('.swift'):
+ srcs.append(i)
+ else:
+ others.append(i)
+ return srcs, others
+
+ def generate_swift_target(self, target, outfile):
+ module_name = self.target_swift_modulename(target)
+ swiftc = target.compilers['swift']
+ abssrc = []
+ abs_headers = []
+ header_imports = []
+ for i in target.get_sources():
+ if swiftc.can_compile(i):
+ relsrc = i.rel_to_builddir(self.build_to_src)
+ abss = os.path.normpath(os.path.join(self.environment.get_build_dir(), relsrc))
+ abssrc.append(abss)
+ elif self.environment.is_header(i):
+ relh = i.rel_to_builddir(self.build_to_src)
+ absh = os.path.normpath(os.path.join(self.environment.get_build_dir(), relh))
+ abs_headers.append(absh)
+ header_imports += swiftc.get_header_import_args(absh)
+ else:
+ raise InvalidArguments('Swift target %s contains a non-swift source file.' % target.get_basename())
+ os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True)
+ compile_args = swiftc.get_compile_only_args()
+ compile_args += swiftc.get_module_args(module_name)
+ compile_args += self.build.get_project_args(swiftc, target.subproject)
+ compile_args += self.build.get_global_args(swiftc)
+ for i in reversed(target.get_include_dirs()):
+ basedir = i.get_curdir()
+ for d in i.get_incdirs():
+ if d not in ('', '.'):
+ expdir = os.path.join(basedir, d)
+ else:
+ expdir = basedir
+ srctreedir = os.path.normpath(os.path.join(self.environment.get_build_dir(), self.build_to_src, expdir))
+ sargs = swiftc.get_include_args(srctreedir)
+ compile_args += sargs
+ link_args = swiftc.get_output_args(os.path.join(self.environment.get_build_dir(), self.get_target_filename(target)))
+ link_args += self.build.get_project_link_args(swiftc, target.subproject)
+ link_args += self.build.get_global_link_args(swiftc)
+ rundir = self.get_target_private_dir(target)
+ out_module_name = self.swift_module_file_name(target)
+ in_module_files = self.determine_swift_dep_modules(target)
+ abs_module_dirs = self.determine_swift_dep_dirs(target)
+ module_includes = []
+ for x in abs_module_dirs:
+ module_includes += swiftc.get_include_args(x)
+ link_deps = self.get_swift_link_deps(target)
+ abs_link_deps = [os.path.join(self.environment.get_build_dir(), x) for x in link_deps]
+ for d in target.link_targets:
+ reldir = self.get_target_dir(d)
+ if reldir == '':
+ reldir = '.'
+ link_args += ['-L', os.path.normpath(os.path.join(self.environment.get_build_dir(), reldir))]
+ (rel_generated, _) = self.split_swift_generated_sources(target)
+ abs_generated = [os.path.join(self.environment.get_build_dir(), x) for x in rel_generated]
+ # We need absolute paths because swiftc needs to be invoked in a subdir
+ # and this is the easiest way about it.
+ objects = [] # Relative to swift invocation dir
+ rel_objects = [] # Relative to build.ninja
+ for i in abssrc + abs_generated:
+ base = os.path.split(i)[1]
+ oname = os.path.splitext(base)[0] + '.o'
+ objects.append(oname)
+ rel_objects.append(os.path.join(self.get_target_private_dir(target), oname))
+
+ # Swiftc does not seem to be able to emit objects and module files in one go.
+ elem = NinjaBuildElement(self.all_outputs, rel_objects,
+ 'swift_COMPILER',
+ abssrc)
+ elem.add_dep(in_module_files + rel_generated)
+ elem.add_dep(abs_headers)
+ elem.add_item('ARGS', compile_args + header_imports + abs_generated + module_includes)
+ elem.add_item('RUNDIR', rundir)
+ elem.write(outfile)
+ elem = NinjaBuildElement(self.all_outputs, out_module_name,
+ 'swift_COMPILER',
+ abssrc)
+ elem.add_dep(in_module_files + rel_generated)
+ elem.add_item('ARGS', compile_args + abs_generated + module_includes + swiftc.get_mod_gen_args())
+ elem.add_item('RUNDIR', rundir)
+ elem.write(outfile)
+ if isinstance(target, build.StaticLibrary):
+ elem = self.generate_link(target, outfile, self.get_target_filename(target),
+ rel_objects, self.build.static_linker)
+ elem.write(outfile)
+ elif isinstance(target, build.Executable):
+ elem = NinjaBuildElement(self.all_outputs, self.get_target_filename(target), 'swift_COMPILER', [])
+ elem.add_dep(rel_objects)
+ elem.add_dep(link_deps)
+ elem.add_item('ARGS', link_args + swiftc.get_std_exe_link_args() + objects + abs_link_deps)
+ elem.add_item('RUNDIR', rundir)
+ elem.write(outfile)
+ else:
+ raise MesonException('Swift supports only executable and static library targets.')
+
+ def generate_static_link_rules(self, is_cross, outfile):
+ num_pools = self.environment.coredata.backend_options['backend_max_links'].value
+ if 'java' in self.build.compilers:
+ if not is_cross:
+ self.generate_java_link(outfile)
+ if is_cross:
+ if self.environment.cross_info.need_cross_compiler():
+ static_linker = self.build.static_cross_linker
+ else:
+ static_linker = self.build.static_linker
+ crstr = '_CROSS'
+ else:
+ static_linker = self.build.static_linker
+ crstr = ''
+ if static_linker is None:
+ return
+ rule = 'rule STATIC%s_LINKER\n' % crstr
+ # We don't use @file.rsp on Windows with ArLinker because llvm-ar and
+ # gcc-ar blindly pass the --plugin argument to `ar` and you cannot pass
+ # options as arguments while using the @file.rsp syntax.
+ # See: https://github.com/mesonbuild/meson/issues/1646
+ if mesonlib.is_windows() and not isinstance(static_linker, ArLinker):
+ command_template = ''' command = {executable} @$out.rsp
+ rspfile = $out.rsp
+ rspfile_content = $LINK_ARGS {output_args} $in
+'''
+ else:
+ command_template = ' command = {executable} $LINK_ARGS {output_args} $in\n'
+ cmdlist = []
+ # FIXME: Must normalize file names with pathlib.Path before writing
+ # them out to fix this properly on Windows. See:
+ # https://github.com/mesonbuild/meson/issues/1517
+ # https://github.com/mesonbuild/meson/issues/1526
+ if isinstance(static_linker, ArLinker) and not mesonlib.is_windows():
+ # `ar` has no options to overwrite archives. It always appends,
+ # which is never what we want. Delete an existing library first if
+ # it exists. https://github.com/mesonbuild/meson/issues/1355
+ cmdlist = [execute_wrapper, rmfile_prefix.format('$out')]
+ cmdlist += static_linker.get_exelist()
+ command = command_template.format(
+ executable=' '.join(cmdlist),
+ output_args=' '.join(static_linker.get_output_args('$out')))
+ description = ' description = Linking static target $out.\n\n'
+ outfile.write(rule)
+ outfile.write(command)
+ if num_pools > 0:
+ outfile.write(' pool = link_pool\n')
+ outfile.write(description)
+
+ def generate_dynamic_link_rules(self, outfile):
+ num_pools = self.environment.coredata.backend_options['backend_max_links'].value
+ ctypes = [(self.build.compilers, False)]
+ if self.environment.is_cross_build():
+ if self.environment.cross_info.need_cross_compiler():
+ ctypes.append((self.build.cross_compilers, True))
+ else:
+ # Native compiler masquerades as the cross compiler.
+ ctypes.append((self.build.compilers, True))
+ else:
+ ctypes.append((self.build.cross_compilers, True))
+ for (complist, is_cross) in ctypes:
+ for langname, compiler in complist.items():
+ if langname == 'java' \
+ or langname == 'vala' \
+ or langname == 'rust' \
+ or langname == 'cs':
+ continue
+ crstr = ''
+ cross_args = []
+ if is_cross:
+ crstr = '_CROSS'
+ try:
+ cross_args = self.environment.cross_info.config['properties'][langname + '_link_args']
+ except KeyError:
+ pass
+ rule = 'rule %s%s_LINKER\n' % (langname, crstr)
+ if mesonlib.is_windows():
+ command_template = ''' command = {executable} @$out.rsp
+ rspfile = $out.rsp
+ rspfile_content = $ARGS {output_args} $in $LINK_ARGS {cross_args} $aliasing
+'''
+ else:
+ command_template = ' command = {executable} $ARGS {output_args} $in $LINK_ARGS {cross_args} $aliasing\n'
+ command = command_template.format(
+ executable=' '.join(compiler.get_linker_exelist()),
+ cross_args=' '.join(cross_args),
+ output_args=' '.join(compiler.get_linker_output_args('$out'))
+ )
+ description = ' description = Linking target $out.\n'
+ outfile.write(rule)
+ outfile.write(command)
+ if num_pools > 0:
+ outfile.write(' pool = link_pool\n')
+ outfile.write(description)
+ outfile.write('\n')
+ outfile.write('\n')
+ args = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
+ ['--internal',
+ 'symbolextractor',
+ '$in',
+ '$out']
+ symrule = 'rule SHSYM\n'
+ symcmd = ' command = ' + ' '.join(args) + ' $CROSS\n'
+ synstat = ' restat = 1\n'
+ syndesc = ' description = Generating symbol file $out.\n'
+ outfile.write(symrule)
+ outfile.write(symcmd)
+ outfile.write(synstat)
+ outfile.write(syndesc)
+ outfile.write('\n')
+
+ def generate_java_compile_rule(self, compiler, outfile):
+ rule = 'rule %s_COMPILER\n' % compiler.get_language()
+ invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()])
+ command = ' command = %s $ARGS $in\n' % invoc
+ description = ' description = Compiling Java object $in.\n'
+ outfile.write(rule)
+ outfile.write(command)
+ outfile.write(description)
+ outfile.write('\n')
+
+ def generate_cs_compile_rule(self, compiler, outfile):
+ rule = 'rule %s_COMPILER\n' % compiler.get_language()
+ invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()])
+ command = ' command = %s $ARGS $in\n' % invoc
+ description = ' description = Compiling C Sharp target $out.\n'
+ outfile.write(rule)
+ outfile.write(command)
+ outfile.write(description)
+ outfile.write('\n')
+
+ def generate_vala_compile_rules(self, compiler, outfile):
+ rule = 'rule %s_COMPILER\n' % compiler.get_language()
+ invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()])
+ command = ' command = %s $ARGS $in\n' % invoc
+ description = ' description = Compiling Vala source $in.\n'
+ restat = ' restat = 1\n' # ValaC does this always to take advantage of it.
+ outfile.write(rule)
+ outfile.write(command)
+ outfile.write(description)
+ outfile.write(restat)
+ outfile.write('\n')
+
+ def generate_rust_compile_rules(self, compiler, outfile):
+ rule = 'rule %s_COMPILER\n' % compiler.get_language()
+ invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()])
+ command = ' command = %s $ARGS $in\n' % invoc
+ description = ' description = Compiling Rust source $in.\n'
+ depfile = ' depfile = $targetdep\n'
+
+ depstyle = ' deps = gcc\n'
+ outfile.write(rule)
+ outfile.write(command)
+ outfile.write(description)
+ outfile.write(depfile)
+ outfile.write(depstyle)
+ outfile.write('\n')
+
+ def generate_swift_compile_rules(self, compiler, outfile):
+ rule = 'rule %s_COMPILER\n' % compiler.get_language()
+ full_exe = [ninja_quote(x) for x in self.environment.get_build_command()] + [
+ '--internal',
+ 'dirchanger',
+ '$RUNDIR',
+ ]
+ invoc = (' '.join(full_exe) + ' ' +
+ ' '.join(ninja_quote(i) for i in compiler.get_exelist()))
+ command = ' command = %s $ARGS $in\n' % invoc
+ description = ' description = Compiling Swift source $in.\n'
+ outfile.write(rule)
+ outfile.write(command)
+ outfile.write(description)
+ outfile.write('\n')
+
+ def generate_fortran_dep_hack(self, outfile):
+ if mesonlib.is_windows():
+ cmd = 'cmd /C ""'
+ else:
+ cmd = 'true'
+ template = '''# Workaround for these issues:
+# https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8
+# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485
+rule FORTRAN_DEP_HACK
+ command = %s
+ description = Dep hack
+ restat = 1
+
+'''
+ outfile.write(template % cmd)
+
+ def generate_llvm_ir_compile_rule(self, compiler, is_cross, outfile):
+ if getattr(self, 'created_llvm_ir_rule', False):
+ return
+ rule = 'rule llvm_ir{}_COMPILER\n'.format('_CROSS' if is_cross else '')
+ if mesonlib.is_windows():
+ command_template = ' command = {executable} @$out.rsp\n' \
+ ' rspfile = $out.rsp\n' \
+ ' rspfile_content = {cross_args} $ARGS {output_args} {compile_only_args} $in\n'
+ else:
+ command_template = ' command = {executable} {cross_args} $ARGS {output_args} {compile_only_args} $in\n'
+ command = command_template.format(
+ executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]),
+ cross_args=' '.join(self.get_cross_info_lang_args(compiler.language, is_cross)),
+ output_args=' '.join(compiler.get_output_args('$out')),
+ compile_only_args=' '.join(compiler.get_compile_only_args())
+ )
+ description = ' description = Compiling LLVM IR object $in.\n'
+ outfile.write(rule)
+ outfile.write(command)
+ outfile.write(description)
+ outfile.write('\n')
+ self.created_llvm_ir_rule = True
+
+ def get_cross_info_lang_args(self, lang, is_cross):
+ if is_cross:
+ try:
+ return self.environment.cross_info.config['properties'][lang + '_args']
+ except KeyError:
+ pass
+ return []
+
+ def generate_compile_rule_for(self, langname, compiler, is_cross, outfile):
+ if langname == 'java':
+ if not is_cross:
+ self.generate_java_compile_rule(compiler, outfile)
+ return
+ if langname == 'cs':
+ if not is_cross:
+ self.generate_cs_compile_rule(compiler, outfile)
+ return
+ if langname == 'vala':
+ if not is_cross:
+ self.generate_vala_compile_rules(compiler, outfile)
+ return
+ if langname == 'rust':
+ if not is_cross:
+ self.generate_rust_compile_rules(compiler, outfile)
+ return
+ if langname == 'swift':
+ if not is_cross:
+ self.generate_swift_compile_rules(compiler, outfile)
+ return
+ if langname == 'fortran':
+ self.generate_fortran_dep_hack(outfile)
+ if is_cross:
+ crstr = '_CROSS'
+ else:
+ crstr = ''
+ rule = 'rule %s%s_COMPILER\n' % (langname, crstr)
+ depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
+ quoted_depargs = []
+ for d in depargs:
+ if d != '$out' and d != '$in':
+ d = quote_func(d)
+ quoted_depargs.append(d)
+ cross_args = self.get_cross_info_lang_args(langname, is_cross)
+ if mesonlib.is_windows():
+ command_template = ''' command = {executable} @$out.rsp
+ rspfile = $out.rsp
+ rspfile_content = {cross_args} $ARGS {dep_args} {output_args} {compile_only_args} $in
+'''
+ else:
+ command_template = ' command = {executable} {cross_args} $ARGS {dep_args} {output_args} {compile_only_args} $in\n'
+ command = command_template.format(
+ executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]),
+ cross_args=' '.join(cross_args),
+ dep_args=' '.join(quoted_depargs),
+ output_args=' '.join(compiler.get_output_args('$out')),
+ compile_only_args=' '.join(compiler.get_compile_only_args())
+ )
+ description = ' description = Compiling %s object $out.\n' % compiler.get_display_language()
+ if compiler.get_id() == 'msvc':
+ deps = ' deps = msvc\n'
+ else:
+ deps = ' deps = gcc\n'
+ deps += ' depfile = $DEPFILE\n'
+ outfile.write(rule)
+ outfile.write(command)
+ outfile.write(deps)
+ outfile.write(description)
+ outfile.write('\n')
+
+ def generate_pch_rule_for(self, langname, compiler, is_cross, outfile):
+ if langname != 'c' and langname != 'cpp':
+ return
+ if is_cross:
+ crstr = '_CROSS'
+ else:
+ crstr = ''
+ rule = 'rule %s%s_PCH\n' % (langname, crstr)
+ depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
+ cross_args = []
+ if is_cross:
+ try:
+ cross_args = self.environment.cross_info.config['properties'][langname + '_args']
+ except KeyError:
+ pass
+
+ quoted_depargs = []
+ for d in depargs:
+ if d != '$out' and d != '$in':
+ d = quote_func(d)
+ quoted_depargs.append(d)
+ if compiler.get_id() == 'msvc':
+ output = ''
+ else:
+ output = ' '.join(compiler.get_output_args('$out'))
+ command = " command = {executable} {cross_args} $ARGS {dep_args} {output_args} {compile_only_args} $in\n".format(
+ executable=' '.join(compiler.get_exelist()),
+ cross_args=' '.join(cross_args),
+ dep_args=' '.join(quoted_depargs),
+ output_args=output,
+ compile_only_args=' '.join(compiler.get_compile_only_args())
+ )
+ description = ' description = Precompiling header %s.\n' % '$in'
+ if compiler.get_id() == 'msvc':
+ deps = ' deps = msvc\n'
+ else:
+ deps = ' deps = gcc\n'
+ deps += ' depfile = $DEPFILE\n'
+ outfile.write(rule)
+ outfile.write(command)
+ outfile.write(deps)
+ outfile.write(description)
+ outfile.write('\n')
+
+ def generate_compile_rules(self, outfile):
+ for langname, compiler in self.build.compilers.items():
+ if compiler.get_id() == 'clang':
+ self.generate_llvm_ir_compile_rule(compiler, False, outfile)
+ self.generate_compile_rule_for(langname, compiler, False, outfile)
+ self.generate_pch_rule_for(langname, compiler, False, outfile)
+ if self.environment.is_cross_build():
+ # In case we are going a target-only build, make the native compilers
+ # masquerade as cross compilers.
+ if self.environment.cross_info.need_cross_compiler():
+ cclist = self.build.cross_compilers
+ else:
+ cclist = self.build.compilers
+ for langname, compiler in cclist.items():
+ if compiler.get_id() == 'clang':
+ self.generate_llvm_ir_compile_rule(compiler, True, outfile)
+ self.generate_compile_rule_for(langname, compiler, True, outfile)
+ self.generate_pch_rule_for(langname, compiler, True, outfile)
+ outfile.write('\n')
+
+ def generate_generator_list_rules(self, target, outfile):
+ # CustomTargets have already written their rules and
+ # CustomTargetIndexes don't actually get generated, so write rules for
+ # GeneratedLists here
+ for genlist in target.get_generated_sources():
+ if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)):
+ continue
+ self.generate_genlist_for_target(genlist, target, outfile)
+
+ def replace_paths(self, target, args):
+ source_target_dir = self.get_target_source_dir(target)
+ relout = self.get_target_private_dir(target)
+ args = [x.replace("@SOURCE_DIR@", self.build_to_src).replace("@BUILD_DIR@", relout)
+ for x in args]
+ args = [x.replace("@CURRENT_SOURCE_DIR@", source_target_dir) for x in args]
+ args = [x.replace("@SOURCE_ROOT@", self.build_to_src).replace("@BUILD_ROOT@", '.')
+ for x in args]
+ return args
+
+ def generate_genlist_for_target(self, genlist, target, outfile):
+ generator = genlist.get_generator()
+ exe = generator.get_exe()
+ exe_arr = self.exe_object_to_cmd_array(exe)
+ infilelist = genlist.get_inputs()
+ outfilelist = genlist.get_outputs()
+ extra_dependencies = [os.path.join(self.build_to_src, i) for i in genlist.extra_depends]
+ source_target_dir = self.get_target_source_dir(target)
+ for i in range(len(infilelist)):
+ if len(generator.outputs) == 1:
+ sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i])
+ else:
+ sole_output = ''
+ curfile = infilelist[i]
+ infilename = curfile.rel_to_builddir(self.build_to_src)
+ base_args = generator.get_arglist(infilename)
+ outfiles = genlist.get_outputs_for(curfile)
+ outfiles = [os.path.join(self.get_target_private_dir(target), of) for of in outfiles]
+ if generator.depfile is None:
+ rulename = 'CUSTOM_COMMAND'
+ args = base_args
+ else:
+ rulename = 'CUSTOM_COMMAND_DEP'
+ depfilename = generator.get_dep_outname(infilename)
+ depfile = os.path.join(self.get_target_private_dir(target), depfilename)
+ args = [x.replace('@DEPFILE@', depfile) for x in base_args]
+ args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output)
+ for x in args]
+ args = self.replace_outputs(args, self.get_target_private_dir(target), outfilelist)
+ # We have consumed output files, so drop them from the list of remaining outputs.
+ if sole_output == '':
+ outfilelist = outfilelist[len(generator.outputs):]
+ relout = self.get_target_private_dir(target)
+ args = self.replace_paths(target, args)
+ cmdlist = exe_arr + self.replace_extra_args(args, genlist)
+ if generator.capture:
+ exe_data = self.serialize_executable(
+ cmdlist[0],
+ cmdlist[1:],
+ self.environment.get_build_dir(),
+ capture=outfiles[0]
+ )
+ cmd = self.environment.get_build_command() + ['--internal', 'exe', exe_data]
+ abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ os.makedirs(abs_pdir, exist_ok=True)
+ else:
+ cmd = cmdlist
+
+ elem = NinjaBuildElement(self.all_outputs, outfiles, rulename, infilename)
+ if generator.depfile is not None:
+ elem.add_item('DEPFILE', depfile)
+ if len(extra_dependencies) > 0:
+ elem.add_dep(extra_dependencies)
+ elem.add_item('DESC', 'Generating {!r}.'.format(sole_output))
+ if isinstance(exe, build.BuildTarget):
+ elem.add_dep(self.get_target_filename(exe))
+ elem.add_item('COMMAND', cmd)
+ elem.write(outfile)
+
+ def scan_fortran_module_outputs(self, target):
+ compiler = None
+ for lang, c in self.build.compilers.items():
+ if lang == 'fortran':
+ compiler = c
+ break
+ if compiler is None:
+ self.fortran_deps[target.get_basename()] = {}
+ return
+ modre = re.compile(r"\s*module\s+(\w+)", re.IGNORECASE)
+ module_files = {}
+ for s in target.get_sources():
+ # FIXME, does not work for Fortran sources generated by
+ # custom_target() and generator() as those are run after
+ # the configuration (configure_file() is OK)
+ if not compiler.can_compile(s):
+ continue
+ filename = s.absolute_path(self.environment.get_source_dir(),
+ self.environment.get_build_dir())
+ # Some Fortran editors save in weird encodings,
+ # but all the parts we care about are in ASCII.
+ with open(filename, errors='ignore') as f:
+ for line in f:
+ modmatch = modre.match(line)
+ if modmatch is not None:
+ modname = modmatch.group(1).lower()
+ if modname == 'procedure':
+ # MODULE PROCEDURE construct
+ continue
+ if modname in module_files:
+ raise InvalidArguments(
+ 'Namespace collision: module %s defined in '
+ 'two files %s and %s.' %
+ (modname, module_files[modname], s))
+ module_files[modname] = s
+ self.fortran_deps[target.get_basename()] = module_files
+
+ def get_fortran_deps(self, compiler, src, target):
+ mod_files = []
+ usere = re.compile(r"\s*use\s+(\w+)", re.IGNORECASE)
+ dirname = self.get_target_private_dir(target)
+ tdeps = self.fortran_deps[target.get_basename()]
+ with open(src) as f:
+ for line in f:
+ usematch = usere.match(line)
+ if usematch is not None:
+ usename = usematch.group(1).lower()
+ if usename not in tdeps:
+ # The module is not provided by any source file. This
+ # is due to:
+ # a) missing file/typo/etc
+ # b) using a module provided by the compiler, such as
+ # OpenMP
+ # There's no easy way to tell which is which (that I
+ # know of) so just ignore this and go on. Ideally we
+ # would print a warning message to the user but this is
+ # a common occurrence, which would lead to lots of
+ # distracting noise.
+ continue
+ mod_source_file = tdeps[usename]
+ # Check if a source uses a module it exports itself.
+ # Potential bug if multiple targets have a file with
+ # the same name.
+ if mod_source_file.fname == os.path.split(src)[1]:
+ continue
+ mod_name = compiler.module_name_to_filename(
+ usematch.group(1))
+ mod_files.append(os.path.join(dirname, mod_name))
+ return mod_files
+
+ def get_cross_stdlib_args(self, target, compiler):
+ if not target.is_cross:
+ return []
+ if not self.environment.cross_info.has_stdlib(compiler.language):
+ return []
+ return compiler.get_no_stdinc_args()
+
+ def get_compile_debugfile_args(self, compiler, target, objfile):
+ if compiler.id != 'msvc':
+ return []
+ # The way MSVC uses PDB files is documented exactly nowhere so
+ # the following is what we have been able to decipher via
+ # reverse engineering.
+ #
+ # Each object file gets the path of its PDB file written
+ # inside it. This can be either the final PDB (for, say,
+ # foo.exe) or an object pdb (for foo.obj). If the former, then
+ # each compilation step locks the pdb file for writing, which
+ # is a bottleneck and object files from one target can not be
+ # used in a different target. The latter seems to be the
+ # sensible one (and what Unix does) but there is a catch. If
+ # you try to use precompiled headers MSVC will error out
+ # because both source and pch pdbs go in the same file and
+ # they must be the same.
+ #
+ # This means:
+ #
+ # - pch files must be compiled anew for every object file (negating
+ # the entire point of having them in the first place)
+ # - when using pch, output must go to the target pdb
+ #
+ # Since both of these are broken in some way, use the one that
+ # works for each target. This unfortunately means that you
+ # can't combine pch and object extraction in a single target.
+ #
+ # PDB files also lead to filename collisions. A target foo.exe
+ # has a corresponding foo.pdb. A shared library foo.dll _also_
+ # has pdb file called foo.pdb. So will a static library
+ # foo.lib, which clobbers both foo.pdb _and_ the dll file's
+ # export library called foo.lib (by default, currently we name
+ # them libfoo.a to avoidt this issue). You can give the files
+ # unique names such as foo_exe.pdb but VC also generates a
+ # bunch of other files which take their names from the target
+ # basename (i.e. "foo") and stomp on each other.
+ #
+ # CMake solves this problem by doing two things. First of all
+ # static libraries do not generate pdb files at
+ # all. Presumably you don't need them and VC is smart enough
+ # to look up the original data when linking (speculation, not
+ # tested). The second solution is that you can only have
+ # target named "foo" as an exe, shared lib _or_ static
+ # lib. This makes filename collisions not happen. The downside
+ # is that you can't have an executable foo that uses a shared
+ # library libfoo.so, which is a common idiom on Unix.
+ #
+ # If you feel that the above is completely wrong and all of
+ # this is actually doable, please send patches.
+
+ if target.has_pch():
+ tfilename = self.get_target_filename_abs(target)
+ return compiler.get_compile_debugfile_args(tfilename, pch=True)
+ else:
+ return compiler.get_compile_debugfile_args(objfile, pch=False)
+
+ def get_link_debugfile_args(self, linker, target, outname):
+ return linker.get_link_debugfile_args(outname)
+
+ def generate_llvm_ir_compile(self, target, outfile, src):
+ compiler = get_compiler_for_source(target.compilers.values(), src)
+ commands = CompilerArgs(compiler)
+ # Compiler args for compiling this target
+ commands += compilers.get_base_compile_args(self.environment.coredata.base_options,
+ compiler)
+ if isinstance(src, File):
+ if src.is_built:
+ src_filename = os.path.join(src.subdir, src.fname)
+ else:
+ src_filename = src.fname
+ elif os.path.isabs(src):
+ src_filename = os.path.basename(src)
+ else:
+ src_filename = src
+ obj_basename = src_filename.replace('/', '_').replace('\\', '_')
+ rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename)
+ rel_obj += '.' + self.environment.get_object_suffix()
+ commands += self.get_compile_debugfile_args(compiler, target, rel_obj)
+ if isinstance(src, File) and src.is_built:
+ rel_src = src.fname
+ elif isinstance(src, File):
+ rel_src = src.rel_to_builddir(self.build_to_src)
+ else:
+ raise InvalidArguments('Invalid source type: {!r}'.format(src))
+ # Write the Ninja build command
+ compiler_name = 'llvm_ir{}_COMPILER'.format('_CROSS' if target.is_cross else '')
+ element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
+ # Convert from GCC-style link argument naming to the naming used by the
+ # current compiler.
+ commands = commands.to_native()
+ element.add_item('ARGS', commands)
+ element.write(outfile)
+ return rel_obj
+
+ def get_source_dir_include_args(self, target, compiler):
+ curdir = target.get_subdir()
+ tmppath = os.path.normpath(os.path.join(self.build_to_src, curdir))
+ return compiler.get_include_args(tmppath, False)
+
+ def get_build_dir_include_args(self, target, compiler):
+ curdir = target.get_subdir()
+ if curdir == '':
+ curdir = '.'
+ return compiler.get_include_args(curdir, False)
+
+ def get_custom_target_dir_include_args(self, target, compiler):
+ custom_target_include_dirs = []
+ for i in target.get_generated_sources():
+ # Generator output goes into the target private dir which is
+ # already in the include paths list. Only custom targets have their
+ # own target build dir.
+ if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)):
+ continue
+ idir = self.get_target_dir(i)
+ if idir not in custom_target_include_dirs:
+ custom_target_include_dirs.append(idir)
+ incs = []
+ for i in custom_target_include_dirs:
+ incs += compiler.get_include_args(i, False)
+ return incs
+
+ def _generate_single_compile(self, target, compiler, is_generated=False):
+ base_proxy = backends.OptionOverrideProxy(target.option_overrides,
+ self.environment.coredata.base_options)
+ # Create an empty commands list, and start adding arguments from
+ # various sources in the order in which they must override each other
+ commands = CompilerArgs(compiler)
+ # Add compiler args for compiling this target derived from 'base' build
+ # options passed on the command-line, in default_options, etc.
+ # These have the lowest priority.
+ commands += compilers.get_base_compile_args(base_proxy,
+ compiler)
+ # The code generated by valac is usually crap and has tons of unused
+ # variables and such, so disable warnings for Vala C sources.
+ no_warn_args = (is_generated == 'vala')
+ # Add compiler args and include paths from several sources; defaults,
+ # build options, external dependencies, etc.
+ commands += self.generate_basic_compiler_args(target, compiler, no_warn_args)
+ # Add custom target dirs as includes automatically, but before
+ # target-specific include directories.
+ # XXX: Not sure if anyone actually uses this? It can cause problems in
+ # situations which increase the likelihood for a header name collision,
+ # such as in subprojects.
+ commands += self.get_custom_target_dir_include_args(target, compiler)
+ # Add include dirs from the `include_directories:` kwarg on the target
+ # and from `include_directories:` of internal deps of the target.
+ #
+ # Target include dirs should override internal deps include dirs.
+ # This is handled in BuildTarget.process_kwargs()
+ #
+ # Include dirs from internal deps should override include dirs from
+ # external deps and must maintain the order in which they are specified.
+ # Hence, we must reverse the list so that the order is preserved.
+ for i in reversed(target.get_include_dirs()):
+ basedir = i.get_curdir()
+ for d in i.get_incdirs():
+ # Avoid superfluous '/.' at the end of paths when d is '.'
+ if d not in ('', '.'):
+ expdir = os.path.join(basedir, d)
+ else:
+ expdir = basedir
+ srctreedir = os.path.join(self.build_to_src, expdir)
+ # Add source subdir first so that the build subdir overrides it
+ sargs = compiler.get_include_args(srctreedir, i.is_system)
+ commands += sargs
+ # There may be include dirs where a build directory has not been
+ # created for some source dir. For example if someone does this:
+ #
+ # inc = include_directories('foo/bar/baz')
+ #
+ # But never subdir()s into the actual dir.
+ if os.path.isdir(os.path.join(self.environment.get_build_dir(), expdir)):
+ bargs = compiler.get_include_args(expdir, i.is_system)
+ else:
+ bargs = []
+ commands += bargs
+ for d in i.get_extra_build_dirs():
+ commands += compiler.get_include_args(d, i.is_system)
+ # Add per-target compile args, f.ex, `c_args : ['-DFOO']`. We set these
+ # near the end since these are supposed to override everything else.
+ commands += self.escape_extra_args(compiler,
+ target.get_extra_args(compiler.get_language()))
+ # Add source dir and build dir. Project-specific and target-specific
+ # include paths must override per-target compile args, include paths
+ # from external dependencies, internal dependencies, and from
+ # per-target `include_directories:`
+ #
+ # We prefer headers in the build dir over the source dir since, for
+ # instance, the user might have an srcdir == builddir Autotools build
+ # in their source tree. Many projects that are moving to Meson have
+ # both Meson and Autotools in parallel as part of the transition.
+ if target.implicit_include_directories:
+ commands += self.get_source_dir_include_args(target, compiler)
+ if target.implicit_include_directories:
+ commands += self.get_build_dir_include_args(target, compiler)
+ # Finally add the private dir for the target to the include path. This
+ # must override everything else and must be the final path added.
+ commands += compiler.get_include_args(self.get_target_private_dir(target), False)
+ return commands
+
+ def generate_single_compile(self, target, outfile, src, is_generated=False, header_deps=[], order_deps=[]):
+ """
+ Compiles C/C++, ObjC/ObjC++, Fortran, and D sources
+ """
+ if isinstance(src, str) and src.endswith('.h'):
+ raise AssertionError('BUG: sources should not contain headers {!r}'.format(src))
+
+ compiler = get_compiler_for_source(target.compilers.values(), src)
+ key = (target, compiler, is_generated)
+ if key in self.target_arg_cache:
+ commands = self.target_arg_cache[key]
+ else:
+ commands = self._generate_single_compile(target, compiler, is_generated)
+ self.target_arg_cache[key] = commands
+ commands = CompilerArgs(commands.compiler, commands)
+
+ build_dir = self.environment.get_build_dir()
+ if isinstance(src, File):
+ rel_src = src.rel_to_builddir(self.build_to_src)
+ if os.path.isabs(rel_src):
+ # Source files may not be from the source directory if they originate in source-only libraries,
+ # so we can't assert that the absolute path is anywhere in particular.
+ if src.is_built:
+ assert rel_src.startswith(build_dir)
+ rel_src = rel_src[len(build_dir) + 1:]
+ elif is_generated:
+ raise AssertionError('BUG: broken generated source file handling for {!r}'.format(src))
+ else:
+ raise InvalidArguments('Invalid source type: {!r}'.format(src))
+ obj_basename = self.object_filename_from_source(target, src, self.is_unity(target))
+ rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename)
+ dep_file = compiler.depfile_for_object(rel_obj)
+
+ # Add MSVC debug file generation compile flags: /Fd /FS
+ commands += self.get_compile_debugfile_args(compiler, target, rel_obj)
+
+ # PCH handling
+ if self.environment.coredata.base_options.get('b_pch', False):
+ commands += self.get_pch_include_args(compiler, target)
+ pchlist = target.get_pch(compiler.language)
+ else:
+ pchlist = []
+ if not pchlist:
+ pch_dep = []
+ elif compiler.id == 'intel':
+ pch_dep = []
+ else:
+ arr = []
+ i = os.path.join(self.get_target_private_dir(target), compiler.get_pch_name(pchlist[0]))
+ arr.append(i)
+ pch_dep = arr
+
+ crstr = ''
+ if target.is_cross:
+ crstr = '_CROSS'
+ compiler_name = '%s%s_COMPILER' % (compiler.get_language(), crstr)
+ extra_deps = []
+ if compiler.get_language() == 'fortran':
+ # Can't read source file to scan for deps if it's generated later
+ # at build-time. Skip scanning for deps, and just set the module
+ # outdir argument instead.
+ # https://github.com/mesonbuild/meson/issues/1348
+ if not is_generated:
+ abs_src = os.path.join(build_dir, rel_src)
+ extra_deps += self.get_fortran_deps(compiler, abs_src, target)
+ # Dependency hack. Remove once multiple outputs in Ninja is fixed:
+ # https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8
+ for modname, srcfile in self.fortran_deps[target.get_basename()].items():
+ modfile = os.path.join(self.get_target_private_dir(target),
+ compiler.module_name_to_filename(modname))
+ if srcfile == src:
+ depelem = NinjaBuildElement(self.all_outputs, modfile, 'FORTRAN_DEP_HACK', rel_obj)
+ depelem.write(outfile)
+ commands += compiler.get_module_outdir_args(self.get_target_private_dir(target))
+
+ element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
+ for d in header_deps:
+ if isinstance(d, File):
+ d = d.rel_to_builddir(self.build_to_src)
+ elif not self.has_dir_part(d):
+ d = os.path.join(self.get_target_private_dir(target), d)
+ element.add_dep(d)
+ for d in extra_deps:
+ element.add_dep(d)
+ for d in order_deps:
+ if isinstance(d, File):
+ d = d.rel_to_builddir(self.build_to_src)
+ elif not self.has_dir_part(d):
+ d = os.path.join(self.get_target_private_dir(target), d)
+ element.add_orderdep(d)
+ element.add_orderdep(pch_dep)
+ # Convert from GCC-style link argument naming to the naming used by the
+ # current compiler.
+ commands = commands.to_native()
+ for i in self.get_fortran_orderdeps(target, compiler):
+ element.add_orderdep(i)
+ element.add_item('DEPFILE', dep_file)
+ element.add_item('ARGS', commands)
+ element.write(outfile)
+ return rel_obj
+
+ def has_dir_part(self, fname):
+ # FIXME FIXME: The usage of this is a terrible and unreliable hack
+ if isinstance(fname, File):
+ return fname.subdir != ''
+ return '/' in fname or '\\' in fname
+
+ # Fortran is a bit weird (again). When you link against a library, just compiling a source file
+ # requires the mod files that are output when single files are built. To do this right we would need to
+ # scan all inputs and write out explicit deps for each file. That is stoo slow and too much effort so
+ # instead just have an ordered dependency on the library. This ensures all required mod files are created.
+ # The real deps are then detected via dep file generation from the compiler. This breaks on compilers that
+ # produce incorrect dep files but such is life.
+ def get_fortran_orderdeps(self, target, compiler):
+ if compiler.language != 'fortran':
+ return []
+ return [os.path.join(self.get_target_dir(lt), lt.get_filename()) for lt in target.link_targets]
+
+ def generate_msvc_pch_command(self, target, compiler, pch):
+ if len(pch) != 2:
+ raise RuntimeError('MSVC requires one header and one source to produce precompiled headers.')
+ header = pch[0]
+ source = pch[1]
+ pchname = compiler.get_pch_name(header)
+ dst = os.path.join(self.get_target_private_dir(target), pchname)
+
+ commands = []
+ commands += self.generate_basic_compiler_args(target, compiler)
+ just_name = os.path.split(header)[1]
+ (objname, pch_args) = compiler.gen_pch_args(just_name, source, dst)
+ commands += pch_args
+ commands += self.get_compile_debugfile_args(compiler, target, objname)
+ dep = dst + '.' + compiler.get_depfile_suffix()
+ return commands, dep, dst, [objname]
+
+ def generate_gcc_pch_command(self, target, compiler, pch):
+ commands = self._generate_single_compile(target, compiler)
+ dst = os.path.join(self.get_target_private_dir(target),
+ os.path.split(pch)[-1] + '.' + compiler.get_pch_suffix())
+ dep = dst + '.' + compiler.get_depfile_suffix()
+ return commands, dep, dst, [] # Gcc does not create an object file during pch generation.
+
+ def generate_pch(self, target, outfile):
+ cstr = ''
+ pch_objects = []
+ if target.is_cross:
+ cstr = '_CROSS'
+ for lang in ['c', 'cpp']:
+ pch = target.get_pch(lang)
+ if not pch:
+ continue
+ if '/' not in pch[0] or '/' not in pch[-1]:
+ msg = 'Precompiled header of {!r} must not be in the same ' \
+ 'directory as source, please put it in a subdirectory.' \
+ ''.format(target.get_basename())
+ raise InvalidArguments(msg)
+ compiler = target.compilers[lang]
+ if compiler.id == 'msvc':
+ src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[-1])
+ (commands, dep, dst, objs) = self.generate_msvc_pch_command(target, compiler, pch)
+ extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0])
+ elif compiler.id == 'intel':
+ # Intel generates on target generation
+ continue
+ else:
+ src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0])
+ (commands, dep, dst, objs) = self.generate_gcc_pch_command(target, compiler, pch[0])
+ extradep = None
+ pch_objects += objs
+ rulename = compiler.get_language() + cstr + '_PCH'
+ elem = NinjaBuildElement(self.all_outputs, dst, rulename, src)
+ if extradep is not None:
+ elem.add_dep(extradep)
+ elem.add_item('ARGS', commands)
+ elem.add_item('DEPFILE', dep)
+ elem.write(outfile)
+ return pch_objects
+
+ def generate_shsym(self, outfile, target):
+ target_name = self.get_target_filename(target)
+ targetdir = self.get_target_private_dir(target)
+ symname = os.path.join(targetdir, target_name + '.symbols')
+ elem = NinjaBuildElement(self.all_outputs, symname, 'SHSYM', target_name)
+ if self.environment.is_cross_build() and self.environment.cross_info.need_cross_compiler():
+ elem.add_item('CROSS', '--cross-host=' + self.environment.cross_info.config['host_machine']['system'])
+ elem.write(outfile)
+
+ def get_cross_stdlib_link_args(self, target, linker):
+ if isinstance(target, build.StaticLibrary) or not target.is_cross:
+ return []
+ if not self.environment.cross_info.has_stdlib(linker.language):
+ return []
+ return linker.get_no_stdlib_link_args()
+
+ def get_target_type_link_args(self, target, linker):
+ abspath = os.path.join(self.environment.get_build_dir(), target.subdir)
+ commands = []
+ if isinstance(target, build.Executable):
+ # Currently only used with the Swift compiler to add '-emit-executable'
+ commands += linker.get_std_exe_link_args()
+ # If gui_app, and that's significant on this platform
+ if target.gui_app and hasattr(linker, 'get_gui_app_args'):
+ commands += linker.get_gui_app_args()
+ # If implib, and that's significant on this platform (i.e. Windows using either GCC or Visual Studio)
+ if target.import_filename:
+ commands += linker.gen_import_library_args(os.path.join(self.get_target_dir(target), target.import_filename))
+ elif isinstance(target, build.SharedLibrary):
+ if isinstance(target, build.SharedModule):
+ commands += linker.get_std_shared_module_link_args()
+ else:
+ commands += linker.get_std_shared_lib_link_args()
+ # All shared libraries are PIC
+ commands += linker.get_pic_args()
+ # Add -Wl,-soname arguments on Linux, -install_name on OS X
+ commands += linker.get_soname_args(target.prefix, target.name, target.suffix,
+ abspath, target.soversion,
+ isinstance(target, build.SharedModule))
+ # This is only visited when building for Windows using either GCC or Visual Studio
+ if target.vs_module_defs and hasattr(linker, 'gen_vs_module_defs_args'):
+ commands += linker.gen_vs_module_defs_args(target.vs_module_defs.rel_to_builddir(self.build_to_src))
+ # This is only visited when building for Windows using either GCC or Visual Studio
+ if target.import_filename:
+ commands += linker.gen_import_library_args(os.path.join(self.get_target_dir(target), target.import_filename))
+ elif isinstance(target, build.StaticLibrary):
+ commands += linker.get_std_link_args()
+ else:
+ raise RuntimeError('Unknown build target type.')
+ return commands
+
+ def get_link_whole_args(self, linker, target):
+ target_args = self.build_target_link_arguments(linker, target.link_whole_targets)
+ return linker.get_link_whole_for(target_args) if len(target_args) else []
+
+ def generate_link(self, target, outfile, outname, obj_list, linker, extra_args=[]):
+ if isinstance(target, build.StaticLibrary):
+ linker_base = 'STATIC'
+ else:
+ linker_base = linker.get_language() # Fixme.
+ if isinstance(target, build.SharedLibrary):
+ self.generate_shsym(outfile, target)
+ crstr = ''
+ if target.is_cross:
+ crstr = '_CROSS'
+ linker_rule = linker_base + crstr + '_LINKER'
+
+ # Create an empty commands list, and start adding link arguments from
+ # various sources in the order in which they must override each other
+ # starting from hard-coded defaults followed by build options and so on.
+ #
+ # Once all the linker options have been passed, we will start passing
+ # libraries and library paths from internal and external sources.
+ commands = CompilerArgs(linker)
+ # First, the trivial ones that are impossible to override.
+ #
+ # Add linker args for linking this target derived from 'base' build
+ # options passed on the command-line, in default_options, etc.
+ # These have the lowest priority.
+ if not isinstance(target, build.StaticLibrary):
+ commands += compilers.get_base_link_args(self.environment.coredata.base_options,
+ linker,
+ isinstance(target, build.SharedModule))
+ # Add -nostdlib if needed; can't be overridden
+ commands += self.get_cross_stdlib_link_args(target, linker)
+ # Add things like /NOLOGO; usually can't be overridden
+ commands += linker.get_linker_always_args()
+ # Add buildtype linker args: optimization level, etc.
+ commands += linker.get_buildtype_linker_args(self.get_option_for_target('buildtype', target))
+ # Add /DEBUG and the pdb filename when using MSVC
+ commands += self.get_link_debugfile_args(linker, target, outname)
+ # Add link args specific to this BuildTarget type, such as soname args,
+ # PIC, import library generation, etc.
+ commands += self.get_target_type_link_args(target, linker)
+ # Archives that are copied wholesale in the result. Must be before any
+ # other link targets so missing symbols from whole archives are found in those.
+ if not isinstance(target, build.StaticLibrary):
+ commands += self.get_link_whole_args(linker, target)
+
+ if not isinstance(target, build.StaticLibrary):
+ # Add link args added using add_project_link_arguments()
+ commands += self.build.get_project_link_args(linker, target.subproject)
+ # Add link args added using add_global_link_arguments()
+ # These override per-project link arguments
+ commands += self.build.get_global_link_args(linker)
+ if not target.is_cross:
+ # Link args added from the env: LDFLAGS. We want these to
+ # override all the defaults but not the per-target link args.
+ commands += self.environment.coredata.external_link_args[linker.get_language()]
+
+ # Now we will add libraries and library paths from various sources
+
+ # Add link args to link to all internal libraries (link_with:) and
+ # internal dependencies needed by this target.
+ if linker_base == 'STATIC':
+ # Link arguments of static libraries are not put in the command
+ # line of the library. They are instead appended to the command
+ # line where the static library is used.
+ dependencies = []
+ else:
+ dependencies = target.get_dependencies()
+ commands += self.build_target_link_arguments(linker, dependencies)
+ # For 'automagic' deps: Boost and GTest. Also dependency('threads').
+ # pkg-config puts the thread flags itself via `Cflags:`
+ for d in target.external_deps:
+ if d.need_threads():
+ commands += linker.thread_link_flags(self.environment)
+ # Only non-static built targets need link args and link dependencies
+ if not isinstance(target, build.StaticLibrary):
+ commands += target.link_args
+ # External deps must be last because target link libraries may depend on them.
+ for dep in target.get_external_deps():
+ # Extend without reordering or de-dup to preserve `-L -l` sets
+ # https://github.com/mesonbuild/meson/issues/1718
+ commands.extend_direct(dep.get_link_args())
+ for d in target.get_dependencies():
+ if isinstance(d, build.StaticLibrary):
+ for dep in d.get_external_deps():
+ commands.extend_direct(dep.get_link_args())
+ # Add link args for c_* or cpp_* build options. Currently this only
+ # adds c_winlibs and cpp_winlibs when building for Windows. This needs
+ # to be after all internal and external libraries so that unresolved
+ # symbols from those can be found here. This is needed when the
+ # *_winlibs that we want to link to are static mingw64 libraries.
+ commands += linker.get_option_link_args(self.environment.coredata.compiler_options)
+ # Set runtime-paths so we can run executables without needing to set
+ # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows.
+ if '/' in target.name or '\\' in target.name:
+ # Target names really should not have slashes in them, but
+ # unfortunately we did not check for that and some downstream projects
+ # now have them. Once slashes are forbidden, remove this bit.
+ target_slashname_workaround_dir = os.path.join(
+ os.path.split(target.name)[0],
+ self.get_target_dir(target))
+ else:
+ target_slashname_workaround_dir = self.get_target_dir(target)
+ commands += linker.build_rpath_args(self.environment.get_build_dir(),
+ target_slashname_workaround_dir,
+ self.determine_rpath_dirs(target),
+ target.build_rpath,
+ target.install_rpath)
+ # Add libraries generated by custom targets
+ custom_target_libraries = self.get_custom_target_provided_libraries(target)
+ commands += extra_args
+ commands += custom_target_libraries
+ # Convert from GCC-style link argument naming to the naming used by the
+ # current compiler.
+ commands = commands.to_native()
+ dep_targets = [self.get_dependency_filename(t) for t in dependencies]
+ dep_targets.extend([self.get_dependency_filename(t)
+ for t in target.link_depends])
+ elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list)
+ elem.add_dep(dep_targets + custom_target_libraries)
+ elem.add_item('LINK_ARGS', commands)
+ return elem
+
+ def get_dependency_filename(self, t):
+ if isinstance(t, build.SharedLibrary):
+ return os.path.join(self.get_target_private_dir(t), self.get_target_filename(t) + '.symbols')
+ elif isinstance(t, mesonlib.File):
+ if t.is_built:
+ return t.relative_name()
+ else:
+ return t.absolute_path(self.environment.get_source_dir(),
+ self.environment.get_build_dir())
+ return self.get_target_filename(t)
+
+ def generate_shlib_aliases(self, target, outdir):
+ aliases = target.get_aliases()
+ for alias, to in aliases.items():
+ aliasfile = os.path.join(self.environment.get_build_dir(), outdir, alias)
+ try:
+ os.remove(aliasfile)
+ except Exception:
+ pass
+ try:
+ os.symlink(to, aliasfile)
+ except NotImplementedError:
+ mlog.debug("Library versioning disabled because symlinks are not supported.")
+ except OSError:
+ mlog.debug("Library versioning disabled because we do not have symlink creation privileges.")
+
+ def generate_custom_target_clean(self, outfile, trees):
+ e = NinjaBuildElement(self.all_outputs, 'meson-clean-ctlist', 'CUSTOM_COMMAND', 'PHONY')
+ d = CleanTrees(self.environment.get_build_dir(), trees)
+ d_file = os.path.join(self.environment.get_scratch_dir(), 'cleantrees.dat')
+ e.add_item('COMMAND', self.environment.get_build_command() + ['--internal', 'cleantrees', d_file])
+ e.add_item('description', 'Cleaning custom target directories.')
+ e.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-clean-ctlist', outfile)
+ # Write out the data file passed to the script
+ with open(d_file, 'wb') as ofile:
+ pickle.dump(d, ofile)
+ return 'clean-ctlist'
+
+ def generate_gcov_clean(self, outfile):
+ gcno_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcno', 'CUSTOM_COMMAND', 'PHONY')
+ script_root = self.environment.get_script_dir()
+ clean_script = os.path.join(script_root, 'delwithsuffix.py')
+ gcno_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcno'])
+ gcno_elem.add_item('description', 'Deleting gcno files.')
+ gcno_elem.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-clean-gcno', outfile)
+
+ gcda_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcda', 'CUSTOM_COMMAND', 'PHONY')
+ script_root = self.environment.get_script_dir()
+ clean_script = os.path.join(script_root, 'delwithsuffix.py')
+ gcda_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcda'])
+ gcda_elem.add_item('description', 'Deleting gcda files.')
+ gcda_elem.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-clean-gcda', outfile)
+
+ def get_user_option_args(self):
+ cmds = []
+ for (k, v) in self.environment.coredata.user_options.items():
+ cmds.append('-D' + k + '=' + (v.value if isinstance(v.value, str) else str(v.value).lower()))
+ # The order of these arguments must be the same between runs of Meson
+ # to ensure reproducible output. The order we pass them shouldn't
+ # affect behavior in any other way.
+ return sorted(cmds)
+
+ def generate_dist(self, outfile):
+ elem = NinjaBuildElement(self.all_outputs, 'meson-dist', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('DESC', 'Creating source packages')
+ elem.add_item('COMMAND', self.environment.get_build_command() + [
+ '--internal', 'dist',
+ self.environment.source_dir,
+ self.environment.build_dir,
+ ] + self.environment.get_build_command())
+ elem.add_item('pool', 'console')
+ elem.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-dist', outfile)
+
+ # For things like scan-build and other helper tools we might have.
+ def generate_utils(self, outfile):
+ cmd = self.environment.get_build_command() + \
+ ['--internal', 'scanbuild', self.environment.source_dir, self.environment.build_dir] + \
+ self.environment.get_build_command() + self.get_user_option_args()
+ elem = NinjaBuildElement(self.all_outputs, 'meson-scan-build', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('pool', 'console')
+ elem.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-scan-build', outfile)
+ cmd = self.environment.get_build_command() + ['--internal', 'uninstall']
+ elem = NinjaBuildElement(self.all_outputs, 'meson-uninstall', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('pool', 'console')
+ elem.write(outfile)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-uninstall', outfile)
+
+ def generate_ending(self, outfile):
+ targetlist = []
+ for t in self.get_build_by_default_targets().values():
+ # Add the first output of each target to the 'all' target so that
+ # they are all built
+ targetlist.append(os.path.join(self.get_target_dir(t), t.get_outputs()[0]))
+
+ elem = NinjaBuildElement(self.all_outputs, 'all', 'phony', targetlist)
+ elem.write(outfile)
+
+ default = 'default all\n\n'
+ outfile.write(default)
+
+ elem = NinjaBuildElement(self.all_outputs, 'meson-clean', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('COMMAND', [self.ninja_command, '-t', 'clean'])
+ elem.add_item('description', 'Cleaning.')
+ # Alias that runs the above-defined meson-clean target
+ self.create_target_alias('meson-clean', outfile)
+
+ # If we have custom targets in this project, add all their outputs to
+ # the list that is passed to the `cleantrees.py` script. The script
+ # will manually delete all custom_target outputs that are directories
+ # instead of files. This is needed because on platforms other than
+ # Windows, Ninja only deletes directories while cleaning if they are
+ # empty. https://github.com/mesonbuild/meson/issues/1220
+ ctlist = []
+ for t in self.build.get_targets().values():
+ if isinstance(t, build.CustomTarget):
+ # Create a list of all custom target outputs
+ for o in t.get_outputs():
+ ctlist.append(os.path.join(self.get_target_dir(t), o))
+ if ctlist:
+ elem.add_dep(self.generate_custom_target_clean(outfile, ctlist))
+
+ if 'b_coverage' in self.environment.coredata.base_options and \
+ self.environment.coredata.base_options['b_coverage'].value:
+ self.generate_gcov_clean(outfile)
+ elem.add_dep('clean-gcda')
+ elem.add_dep('clean-gcno')
+ elem.write(outfile)
+
+ deps = self.get_regen_filelist()
+ elem = NinjaBuildElement(self.all_outputs, 'build.ninja', 'REGENERATE_BUILD', deps)
+ elem.add_item('pool', 'console')
+ elem.write(outfile)
+
+ elem = NinjaBuildElement(self.all_outputs, 'reconfigure', 'REGENERATE_BUILD', 'PHONY')
+ elem.add_item('pool', 'console')
+ elem.write(outfile)
+
+ elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '')
+ elem.write(outfile)
--- /dev/null
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, sys
+import pickle
+import xml.dom.minidom
+import xml.etree.ElementTree as ET
+
+from . import backends
+from .. import build
+from .. import dependencies
+from .. import mlog
+from .. import compilers
+from ..compilers import CompilerArgs
+from ..mesonlib import MesonException, File, python_command
+from ..environment import Environment
+
+def autodetect_vs_version(build):
+ vs_version = os.getenv('VisualStudioVersion', None)
+ vs_install_dir = os.getenv('VSINSTALLDIR', None)
+ if not vs_version and not vs_install_dir:
+ raise MesonException('Could not detect Visual Studio: VisualStudioVersion and VSINSTALLDIR are unset!\n'
+ 'Are we inside a Visual Studio build environment? '
+ 'You can also try specifying the exact backend to use.')
+ # VisualStudioVersion is set since Visual Studio 12.0, but sometimes
+ # vcvarsall.bat doesn't set it, so also use VSINSTALLDIR
+ if vs_version == '14.0' or 'Visual Studio 14' in vs_install_dir:
+ from mesonbuild.backend.vs2015backend import Vs2015Backend
+ return Vs2015Backend(build)
+ if vs_version == '15.0' or 'Visual Studio 17' in vs_install_dir or \
+ 'Visual Studio\\2017' in vs_install_dir:
+ from mesonbuild.backend.vs2017backend import Vs2017Backend
+ return Vs2017Backend(build)
+ if 'Visual Studio 10.0' in vs_install_dir:
+ return Vs2010Backend(build)
+ raise MesonException('Could not detect Visual Studio using VisualStudioVersion: {!r} or VSINSTALLDIR: {!r}!\n'
+ 'Please specify the exact backend to use.'.format(vs_version, vs_install_dir))
+
+def split_o_flags_args(args):
+ """
+ Splits any /O args and returns them. Does not take care of flags overriding
+ previous ones. Skips non-O flag arguments.
+
+ ['/Ox', '/Ob1'] returns ['/Ox', '/Ob1']
+ ['/Oxj', '/MP'] returns ['/Ox', '/Oj']
+ """
+ o_flags = []
+ for arg in args:
+ if not arg.startswith('/O'):
+ continue
+ flags = list(arg[2:])
+ # Assume that this one can't be clumped with the others since it takes
+ # an argument itself
+ if 'b' in flags:
+ o_flags.append(arg)
+ else:
+ o_flags += ['/O' + f for f in flags]
+ return o_flags
+
+class RegenInfo:
+ def __init__(self, source_dir, build_dir, depfiles):
+ self.source_dir = source_dir
+ self.build_dir = build_dir
+ self.depfiles = depfiles
+
+class Vs2010Backend(backends.Backend):
+ def __init__(self, build):
+ super().__init__(build)
+ self.name = 'vs2010'
+ self.project_file_version = '10.0.30319.1'
+ self.platform_toolset = None
+ self.vs_version = '2010'
+ self.windows_target_platform_version = None
+
+ def generate_custom_generator_commands(self, target, parent_node):
+ generator_output_files = []
+ custom_target_include_dirs = []
+ custom_target_output_files = []
+ target_private_dir = self.relpath(self.get_target_private_dir(target), self.get_target_dir(target))
+ source_target_dir = self.get_target_source_dir(target)
+ down = self.target_to_build_root(target)
+ for genlist in target.get_generated_sources():
+ if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)):
+ for i in genlist.get_outputs():
+ # Path to the generated source from the current vcxproj dir via the build root
+ ipath = os.path.join(down, self.get_target_dir(genlist), i)
+ custom_target_output_files.append(ipath)
+ idir = self.relpath(self.get_target_dir(genlist), self.get_target_dir(target))
+ if idir not in custom_target_include_dirs:
+ custom_target_include_dirs.append(idir)
+ else:
+ generator = genlist.get_generator()
+ exe = generator.get_exe()
+ infilelist = genlist.get_inputs()
+ outfilelist = genlist.get_outputs()
+ exe_arr = self.exe_object_to_cmd_array(exe)
+ idgroup = ET.SubElement(parent_node, 'ItemGroup')
+ for i in range(len(infilelist)):
+ if len(infilelist) == len(outfilelist):
+ sole_output = os.path.join(target_private_dir, outfilelist[i])
+ else:
+ sole_output = ''
+ curfile = infilelist[i]
+ infilename = os.path.join(down, curfile.rel_to_builddir(self.build_to_src))
+ base_args = generator.get_arglist(infilename)
+ outfiles_rel = genlist.get_outputs_for(curfile)
+ outfiles = [os.path.join(target_private_dir, of) for of in outfiles_rel]
+ generator_output_files += outfiles
+ args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output)
+ for x in base_args]
+ args = self.replace_outputs(args, target_private_dir, outfiles_rel)
+ args = [x.replace("@SOURCE_DIR@", self.environment.get_source_dir())
+ .replace("@BUILD_DIR@", target_private_dir)
+ for x in args]
+ args = [x.replace("@CURRENT_SOURCE_DIR@", source_target_dir) for x in args]
+ args = [x.replace("@SOURCE_ROOT@", self.environment.get_source_dir())
+ .replace("@BUILD_ROOT@", self.environment.get_build_dir())
+ for x in args]
+ cmd = exe_arr + self.replace_extra_args(args, genlist)
+ if generator.capture:
+ exe_data = self.serialize_executable(
+ cmd[0],
+ cmd[1:],
+ self.environment.get_build_dir(),
+ capture=outfiles[0]
+ )
+ cmd = self.environment.get_build_command() + ['--internal', 'exe', exe_data]
+ abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ os.makedirs(abs_pdir, exist_ok=True)
+ cbs = ET.SubElement(idgroup, 'CustomBuild', Include=infilename)
+ ET.SubElement(cbs, 'Command').text = ' '.join(self.quote_arguments(cmd))
+ ET.SubElement(cbs, 'Outputs').text = ';'.join(outfiles)
+ return generator_output_files, custom_target_output_files, custom_target_include_dirs
+
+ def generate(self, interp):
+ self.interpreter = interp
+ target_machine = self.interpreter.builtin['target_machine'].cpu_family_method(None, None)
+ if target_machine.endswith('64'):
+ # amd64 or x86_64
+ self.platform = 'x64'
+ elif target_machine == 'x86':
+ # x86
+ self.platform = 'Win32'
+ elif 'arm' in target_machine.lower():
+ self.platform = 'ARM'
+ else:
+ raise MesonException('Unsupported Visual Studio platform: ' + target_machine)
+ self.buildtype = self.environment.coredata.get_builtin_option('buildtype')
+ sln_filename = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.sln')
+ projlist = self.generate_projects()
+ self.gen_testproj('RUN_TESTS', os.path.join(self.environment.get_build_dir(), 'RUN_TESTS.vcxproj'))
+ self.gen_regenproj('REGEN', os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj'))
+ self.generate_solution(sln_filename, projlist)
+ self.generate_regen_info()
+ Vs2010Backend.touch_regen_timestamp(self.environment.get_build_dir())
+
+ @staticmethod
+ def get_regen_stampfile(build_dir):
+ return os.path.join(os.path.join(build_dir, Environment.private_dir), 'regen.stamp')
+
+ @staticmethod
+ def touch_regen_timestamp(build_dir):
+ with open(Vs2010Backend.get_regen_stampfile(build_dir), 'w'):
+ pass
+
+ def generate_regen_info(self):
+ deps = self.get_regen_filelist()
+ regeninfo = RegenInfo(self.environment.get_source_dir(),
+ self.environment.get_build_dir(),
+ deps)
+ filename = os.path.join(self.environment.get_scratch_dir(),
+ 'regeninfo.dump')
+ with open(filename, 'wb') as f:
+ pickle.dump(regeninfo, f)
+
+ def get_obj_target_deps(self, obj_list):
+ result = {}
+ for o in obj_list:
+ if isinstance(o, build.ExtractedObjects):
+ result[o.target.get_id()] = o.target
+ return result.items()
+
+ def get_target_deps(self, t, recursive=False):
+ all_deps = {}
+ for target in t.values():
+ if isinstance(target, build.CustomTarget):
+ for d in target.get_target_dependencies():
+ all_deps[d.get_id()] = d
+ elif isinstance(target, build.RunTarget):
+ for d in [target.command] + target.args:
+ if isinstance(d, (build.BuildTarget, build.CustomTarget)):
+ all_deps[d.get_id()] = d
+ elif isinstance(target, build.BuildTarget):
+ for ldep in target.link_targets:
+ all_deps[ldep.get_id()] = ldep
+ for ldep in target.link_whole_targets:
+ all_deps[ldep.get_id()] = ldep
+ for obj_id, objdep in self.get_obj_target_deps(target.objects):
+ all_deps[obj_id] = objdep
+ for gendep in target.get_generated_sources():
+ if isinstance(gendep, build.CustomTarget):
+ all_deps[gendep.get_id()] = gendep
+ elif isinstance(gendep, build.CustomTargetIndex):
+ all_deps[gendep.target.get_id()] = gendep.target
+ else:
+ gen_exe = gendep.generator.get_exe()
+ if isinstance(gen_exe, build.Executable):
+ all_deps[gen_exe.get_id()] = gen_exe
+ else:
+ raise MesonException('Unknown target type for target %s' % target)
+ if not t or not recursive:
+ return all_deps
+ ret = self.get_target_deps(all_deps, recursive)
+ ret.update(all_deps)
+ return ret
+
+ def generate_solution(self, sln_filename, projlist):
+ default_projlist = self.get_build_by_default_targets()
+ with open(sln_filename, 'w') as ofile:
+ ofile.write('Microsoft Visual Studio Solution File, Format '
+ 'Version 11.00\n')
+ ofile.write('# Visual Studio ' + self.vs_version + '\n')
+ prj_templ = 'Project("{%s}") = "%s", "%s", "{%s}"\n'
+ for p in projlist:
+ prj_line = prj_templ % (self.environment.coredata.guid,
+ p[0], p[1], p[2])
+ ofile.write(prj_line)
+ target = self.build.targets[p[0]]
+ t = {target.get_id(): target}
+ # Get direct deps
+ all_deps = self.get_target_deps(t)
+ # Get recursive deps
+ recursive_deps = self.get_target_deps(t, recursive=True)
+ ofile.write('\tProjectSection(ProjectDependencies) = '
+ 'postProject\n')
+ regen_guid = self.environment.coredata.regen_guid
+ ofile.write('\t\t{%s} = {%s}\n' % (regen_guid, regen_guid))
+ for dep in all_deps.keys():
+ guid = self.environment.coredata.target_guids[dep]
+ ofile.write('\t\t{%s} = {%s}\n' % (guid, guid))
+ ofile.write('EndProjectSection\n')
+ ofile.write('EndProject\n')
+ for dep, target in recursive_deps.items():
+ if p[0] in default_projlist:
+ default_projlist[dep] = target
+ test_line = prj_templ % (self.environment.coredata.guid,
+ 'RUN_TESTS', 'RUN_TESTS.vcxproj',
+ self.environment.coredata.test_guid)
+ ofile.write(test_line)
+ ofile.write('EndProject\n')
+ regen_line = prj_templ % (self.environment.coredata.guid,
+ 'REGEN', 'REGEN.vcxproj',
+ self.environment.coredata.regen_guid)
+ ofile.write(regen_line)
+ ofile.write('EndProject\n')
+ ofile.write('Global\n')
+ ofile.write('\tGlobalSection(SolutionConfigurationPlatforms) = '
+ 'preSolution\n')
+ ofile.write('\t\t%s|%s = %s|%s\n' %
+ (self.buildtype, self.platform, self.buildtype,
+ self.platform))
+ ofile.write('\tEndGlobalSection\n')
+ ofile.write('\tGlobalSection(ProjectConfigurationPlatforms) = '
+ 'postSolution\n')
+ ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
+ (self.environment.coredata.regen_guid, self.buildtype,
+ self.platform, self.buildtype, self.platform))
+ ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
+ (self.environment.coredata.regen_guid, self.buildtype,
+ self.platform, self.buildtype, self.platform))
+ # Create the solution configuration
+ for p in projlist:
+ # Add to the list of projects in this solution
+ ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
+ (p[2], self.buildtype, self.platform,
+ self.buildtype, self.platform))
+ if p[0] in default_projlist and \
+ not isinstance(self.build.targets[p[0]], build.RunTarget):
+ # Add to the list of projects to be built
+ ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
+ (p[2], self.buildtype, self.platform,
+ self.buildtype, self.platform))
+ ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
+ (self.environment.coredata.test_guid, self.buildtype,
+ self.platform, self.buildtype, self.platform))
+ ofile.write('\tEndGlobalSection\n')
+ ofile.write('\tGlobalSection(SolutionProperties) = preSolution\n')
+ ofile.write('\t\tHideSolutionNode = FALSE\n')
+ ofile.write('\tEndGlobalSection\n')
+ ofile.write('EndGlobal\n')
+
+ def generate_projects(self):
+ projlist = []
+ for name, target in self.build.targets.items():
+ outdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ fname = name + '.vcxproj'
+ relname = os.path.join(target.subdir, fname)
+ projfile = os.path.join(outdir, fname)
+ uuid = self.environment.coredata.target_guids[name]
+ self.gen_vcxproj(target, projfile, uuid)
+ projlist.append((name, relname, uuid))
+ return projlist
+
+ def split_sources(self, srclist):
+ sources = []
+ headers = []
+ objects = []
+ languages = []
+ for i in srclist:
+ if self.environment.is_header(i):
+ headers.append(i)
+ elif self.environment.is_object(i):
+ objects.append(i)
+ elif self.environment.is_source(i):
+ sources.append(i)
+ lang = self.lang_from_source_file(i)
+ if lang not in languages:
+ languages.append(lang)
+ elif self.environment.is_library(i):
+ pass
+ else:
+ # Everything that is not an object or source file is considered a header.
+ headers.append(i)
+ return sources, headers, objects, languages
+
+ def target_to_build_root(self, target):
+ if target.subdir == '':
+ return ''
+
+ directories = os.path.normpath(target.subdir).split(os.sep)
+ return os.sep.join(['..'] * len(directories))
+
+ def quote_arguments(self, arr):
+ return ['"%s"' % i for i in arr]
+
+ def add_project_reference(self, root, include, projid):
+ ig = ET.SubElement(root, 'ItemGroup')
+ pref = ET.SubElement(ig, 'ProjectReference', Include=include)
+ ET.SubElement(pref, 'Project').text = '{%s}' % projid
+
+ def create_basic_crap(self, target):
+ project_name = target.name
+ root = ET.Element('Project', {'DefaultTargets': "Build",
+ 'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
+ confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
+ prjconf = ET.SubElement(confitems, 'ProjectConfiguration',
+ {'Include': self.buildtype + '|' + self.platform})
+ p = ET.SubElement(prjconf, 'Configuration')
+ p.text = self.buildtype
+ pl = ET.SubElement(prjconf, 'Platform')
+ pl.text = self.platform
+ globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
+ guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
+ guidelem.text = '{%s}' % self.environment.coredata.test_guid
+ kw = ET.SubElement(globalgroup, 'Keyword')
+ kw.text = self.platform + 'Proj'
+ p = ET.SubElement(globalgroup, 'Platform')
+ p.text = self.platform
+ pname = ET.SubElement(globalgroup, 'ProjectName')
+ pname.text = project_name
+ if self.windows_target_platform_version:
+ ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
+ ET.SubElement(type_config, 'ConfigurationType')
+ ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
+ ET.SubElement(type_config, 'UseOfMfc').text = 'false'
+ if self.platform_toolset:
+ ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ direlem = ET.SubElement(root, 'PropertyGroup')
+ fver = ET.SubElement(direlem, '_ProjectFileVersion')
+ fver.text = self.project_file_version
+ outdir = ET.SubElement(direlem, 'OutDir')
+ outdir.text = '.\\'
+ intdir = ET.SubElement(direlem, 'IntDir')
+ intdir.text = target.get_id() + '\\'
+ tname = ET.SubElement(direlem, 'TargetName')
+ tname.text = target.name
+ return root
+
+ def gen_run_target_vcxproj(self, target, ofname, guid):
+ root = self.create_basic_crap(target)
+ action = ET.SubElement(root, 'ItemDefinitionGroup')
+ customstep = ET.SubElement(action, 'PostBuildEvent')
+ cmd_raw = [target.command] + target.args
+ cmd = python_command + \
+ [os.path.join(self.environment.get_script_dir(), 'commandrunner.py'),
+ self.environment.get_build_dir(),
+ self.environment.get_source_dir(),
+ self.get_target_dir(target)] + self.environment.get_build_command()
+ for i in cmd_raw:
+ if isinstance(i, build.BuildTarget):
+ cmd.append(os.path.join(self.environment.get_build_dir(), self.get_target_filename(i)))
+ elif isinstance(i, dependencies.ExternalProgram):
+ cmd += i.get_command()
+ elif isinstance(i, File):
+ relfname = i.rel_to_builddir(self.build_to_src)
+ cmd.append(os.path.join(self.environment.get_build_dir(), relfname))
+ else:
+ cmd.append(i)
+ cmd_templ = '''"%s" ''' * len(cmd)
+ ET.SubElement(customstep, 'Command').text = cmd_templ % tuple(cmd)
+ ET.SubElement(customstep, 'Message').text = 'Running custom command.'
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ def gen_custom_target_vcxproj(self, target, ofname, guid):
+ root = self.create_basic_crap(target)
+ action = ET.SubElement(root, 'ItemDefinitionGroup')
+ customstep = ET.SubElement(action, 'CustomBuildStep')
+ # We need to always use absolute paths because our invocation is always
+ # from the target dir, not the build root.
+ target.absolute_paths = True
+ (srcs, ofilenames, cmd) = self.eval_custom_target_command(target, True)
+ depend_files = self.get_custom_target_depend_files(target, True)
+ # Always use a wrapper because MSBuild eats random characters when
+ # there are many arguments.
+ tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ extra_bdeps = target.get_transitive_build_target_deps()
+ extra_paths = self.determine_windows_extra_paths(target.command[0], extra_bdeps)
+ exe_data = self.serialize_executable(target.command[0], cmd[1:],
+ # All targets run from the target dir
+ tdir_abs,
+ extra_paths=extra_paths,
+ capture=ofilenames[0] if target.capture else None)
+ wrapper_cmd = self.environment.get_build_command() + ['--internal', 'exe', exe_data]
+ ET.SubElement(customstep, 'Command').text = ' '.join(self.quote_arguments(wrapper_cmd))
+ ET.SubElement(customstep, 'Outputs').text = ';'.join(ofilenames)
+ ET.SubElement(customstep, 'Inputs').text = ';'.join([exe_data] + srcs + depend_files)
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.generate_custom_generator_commands(target, root)
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ @classmethod
+ def lang_from_source_file(cls, src):
+ ext = src.split('.')[-1]
+ if ext in compilers.c_suffixes:
+ return 'c'
+ if ext in compilers.cpp_suffixes:
+ return 'cpp'
+ raise MesonException('Could not guess language from source file %s.' % src)
+
+ def add_pch(self, inc_cl, proj_to_src_dir, pch_sources, source_file):
+ if len(pch_sources) <= 1:
+ # We only need per file precompiled headers if we have more than 1 language.
+ return
+ lang = Vs2010Backend.lang_from_source_file(source_file)
+ header = os.path.join(proj_to_src_dir, pch_sources[lang][0])
+ pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile')
+ pch_file.text = header
+ pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles')
+ pch_include.text = header + ';%(ForcedIncludeFiles)'
+ pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile')
+ pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % lang
+
+ def add_additional_options(self, lang, parent_node, file_args):
+ args = []
+ for arg in file_args[lang].to_native():
+ if arg == '%(AdditionalOptions)':
+ args.append(arg)
+ else:
+ args.append(self.escape_additional_option(arg))
+ ET.SubElement(parent_node, "AdditionalOptions").text = ' '.join(args)
+
+ def add_preprocessor_defines(self, lang, parent_node, file_defines):
+ defines = []
+ for define in file_defines[lang]:
+ if define == '%(PreprocessorDefinitions)':
+ defines.append(define)
+ else:
+ defines.append(self.escape_preprocessor_define(define))
+ ET.SubElement(parent_node, "PreprocessorDefinitions").text = ';'.join(defines)
+
+ def add_include_dirs(self, lang, parent_node, file_inc_dirs):
+ dirs = file_inc_dirs[lang]
+ ET.SubElement(parent_node, "AdditionalIncludeDirectories").text = ';'.join(dirs)
+
+ @staticmethod
+ def has_objects(objects, additional_objects, generated_objects):
+ # Ignore generated objects, those are automatically used by MSBuild because they are part of
+ # the CustomBuild Outputs.
+ return len(objects) + len(additional_objects) > 0
+
+ @staticmethod
+ def add_generated_objects(node, generated_objects):
+ # Do not add generated objects to project file. Those are automatically used by MSBuild, because
+ # they are part of the CustomBuild Outputs.
+ return
+
+ @staticmethod
+ def escape_preprocessor_define(define):
+ # See: https://msdn.microsoft.com/en-us/library/bb383819.aspx
+ table = str.maketrans({'%': '%25', '$': '%24', '@': '%40',
+ "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A',
+ # We need to escape backslash because it'll be un-escaped by
+ # Windows during process creation when it parses the arguments
+ # Basically, this converts `\` to `\\`.
+ '\\': '\\\\'})
+ return define.translate(table)
+
+ @staticmethod
+ def escape_additional_option(option):
+ # See: https://msdn.microsoft.com/en-us/library/bb383819.aspx
+ table = str.maketrans({'%': '%25', '$': '%24', '@': '%40',
+ "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', ' ': '%20'})
+ option = option.translate(table)
+ # Since we're surrounding the option with ", if it ends in \ that will
+ # escape the " when the process arguments are parsed and the starting
+ # " will not terminate. So we escape it if that's the case. I'm not
+ # kidding, this is how escaping works for process args on Windows.
+ if option.endswith('\\'):
+ option += '\\'
+ return '"{}"'.format(option)
+
+ @staticmethod
+ def split_link_args(args):
+ """
+ Split a list of link arguments into three lists:
+ * library search paths
+ * library filenames (or paths)
+ * other link arguments
+ """
+ lpaths = []
+ libs = []
+ other = []
+ for arg in args:
+ if arg.startswith('/LIBPATH:'):
+ lpath = arg[9:]
+ # De-dup library search paths by removing older entries when
+ # a new one is found. This is necessary because unlike other
+ # search paths such as the include path, the library is
+ # searched for in the newest (right-most) search path first.
+ if lpath in lpaths:
+ lpaths.remove(lpath)
+ lpaths.append(lpath)
+ elif arg.startswith(('/', '-')):
+ other.append(arg)
+ # It's ok if we miss libraries with non-standard extensions here.
+ # They will go into the general link arguments.
+ elif arg.endswith('.lib') or arg.endswith('.a'):
+ # De-dup
+ if arg not in libs:
+ libs.append(arg)
+ else:
+ other.append(arg)
+ return lpaths, libs, other
+
+ def _get_cl_compiler(self, target):
+ for lang, c in target.compilers.items():
+ if lang in ('c', 'cpp'):
+ return c
+ # No source files, only objects, but we still need a compiler, so
+ # return a found compiler
+ if len(target.objects) > 0:
+ for lang, c in self.environment.coredata.compilers.items():
+ if lang in ('c', 'cpp'):
+ return c
+ raise MesonException('Could not find a C or C++ compiler. MSVC can only build C/C++ projects.')
+
+ def _prettyprint_vcxproj_xml(self, tree, ofname):
+ tree.write(ofname, encoding='utf-8', xml_declaration=True)
+ # ElementTree can not do prettyprinting so do it manually
+ doc = xml.dom.minidom.parse(ofname)
+ with open(ofname, 'w') as of:
+ of.write(doc.toprettyxml())
+
+ def gen_vcxproj(self, target, ofname, guid):
+ mlog.debug('Generating vcxproj %s.' % target.name)
+ entrypoint = 'WinMainCRTStartup'
+ subsystem = 'Windows'
+ if isinstance(target, build.Executable):
+ conftype = 'Application'
+ if not target.gui_app:
+ subsystem = 'Console'
+ entrypoint = 'mainCRTStartup'
+ elif isinstance(target, build.StaticLibrary):
+ conftype = 'StaticLibrary'
+ elif isinstance(target, build.SharedLibrary):
+ conftype = 'DynamicLibrary'
+ entrypoint = '_DllMainCrtStartup'
+ elif isinstance(target, build.CustomTarget):
+ return self.gen_custom_target_vcxproj(target, ofname, guid)
+ elif isinstance(target, build.RunTarget):
+ return self.gen_run_target_vcxproj(target, ofname, guid)
+ else:
+ raise MesonException('Unknown target type for %s' % target.get_basename())
+ # Prefix to use to access the build root from the vcxproj dir
+ down = self.target_to_build_root(target)
+ # Prefix to use to access the source tree's root from the vcxproj dir
+ proj_to_src_root = os.path.join(down, self.build_to_src)
+ # Prefix to use to access the source tree's subdir from the vcxproj dir
+ proj_to_src_dir = os.path.join(proj_to_src_root, target.subdir)
+ (sources, headers, objects, languages) = self.split_sources(target.sources)
+ if self.is_unity(target):
+ sources = self.generate_unity_files(target, sources)
+ compiler = self._get_cl_compiler(target)
+ buildtype_args = compiler.get_buildtype_args(self.buildtype)
+ buildtype_link_args = compiler.get_buildtype_linker_args(self.buildtype)
+ project_name = target.name
+ target_name = target.name
+ root = ET.Element('Project', {'DefaultTargets': "Build",
+ 'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
+ confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
+ prjconf = ET.SubElement(confitems, 'ProjectConfiguration',
+ {'Include': self.buildtype + '|' + self.platform})
+ p = ET.SubElement(prjconf, 'Configuration')
+ p.text = self.buildtype
+ pl = ET.SubElement(prjconf, 'Platform')
+ pl.text = self.platform
+ # Globals
+ globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
+ guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
+ guidelem.text = '{%s}' % guid
+ kw = ET.SubElement(globalgroup, 'Keyword')
+ kw.text = self.platform + 'Proj'
+ ns = ET.SubElement(globalgroup, 'RootNamespace')
+ ns.text = target_name
+ p = ET.SubElement(globalgroup, 'Platform')
+ p.text = self.platform
+ pname = ET.SubElement(globalgroup, 'ProjectName')
+ pname.text = project_name
+ if self.windows_target_platform_version:
+ ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ # Start configuration
+ type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
+ ET.SubElement(type_config, 'ConfigurationType').text = conftype
+ ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
+ if self.platform_toolset:
+ ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
+ # FIXME: Meson's LTO support needs to be integrated here
+ ET.SubElement(type_config, 'WholeProgramOptimization').text = 'false'
+ # Let VS auto-set the RTC level
+ ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'Default'
+ o_flags = split_o_flags_args(buildtype_args)
+ if '/Oi' in o_flags:
+ ET.SubElement(type_config, 'IntrinsicFunctions').text = 'true'
+ if '/Ob1' in o_flags:
+ ET.SubElement(type_config, 'InlineFunctionExpansion').text = 'OnlyExplicitInline'
+ elif '/Ob2' in o_flags:
+ ET.SubElement(type_config, 'InlineFunctionExpansion').text = 'AnySuitable'
+ # Size-preserving flags
+ if '/Os' in o_flags:
+ ET.SubElement(type_config, 'FavorSizeOrSpeed').text = 'Size'
+ else:
+ ET.SubElement(type_config, 'FavorSizeOrSpeed').text = 'Speed'
+ # Incremental linking increases code size
+ if '/INCREMENTAL:NO' in buildtype_link_args:
+ ET.SubElement(type_config, 'LinkIncremental').text = 'false'
+ # CRT type; debug or release
+ if '/MDd' in buildtype_args:
+ ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
+ ET.SubElement(type_config, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
+ else:
+ ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
+ ET.SubElement(type_config, 'RuntimeLibrary').text = 'MultiThreadedDLL'
+ # Debug format
+ if '/ZI' in buildtype_args:
+ ET.SubElement(type_config, 'DebugInformationFormat').text = 'EditAndContinue'
+ elif '/Zi' in buildtype_args:
+ ET.SubElement(type_config, 'DebugInformationFormat').text = 'ProgramDatabase'
+ elif '/Z7' in buildtype_args:
+ ET.SubElement(type_config, 'DebugInformationFormat').text = 'OldStyle'
+ # Runtime checks
+ if '/RTC1' in buildtype_args:
+ ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'EnableFastChecks'
+ elif '/RTCu' in buildtype_args:
+ ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck'
+ elif '/RTCs' in buildtype_args:
+ ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck'
+ # Optimization flags
+ if '/Ox' in o_flags:
+ ET.SubElement(type_config, 'Optimization').text = 'Full'
+ elif '/O2' in o_flags:
+ ET.SubElement(type_config, 'Optimization').text = 'MaxSpeed'
+ elif '/O1' in o_flags:
+ ET.SubElement(type_config, 'Optimization').text = 'MinSpace'
+ elif '/Od' in o_flags:
+ ET.SubElement(type_config, 'Optimization').text = 'Disabled'
+ # End configuration
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root)
+ (gen_src, gen_hdrs, gen_objs, gen_langs) = self.split_sources(generated_files)
+ (custom_src, custom_hdrs, custom_objs, custom_langs) = self.split_sources(custom_target_output_files)
+ gen_src += custom_src
+ gen_hdrs += custom_hdrs
+ gen_langs += custom_langs
+ # Project information
+ direlem = ET.SubElement(root, 'PropertyGroup')
+ fver = ET.SubElement(direlem, '_ProjectFileVersion')
+ fver.text = self.project_file_version
+ outdir = ET.SubElement(direlem, 'OutDir')
+ outdir.text = '.\\'
+ intdir = ET.SubElement(direlem, 'IntDir')
+ intdir.text = target.get_id() + '\\'
+ tfilename = os.path.splitext(target.get_filename())
+ ET.SubElement(direlem, 'TargetName').text = tfilename[0]
+ ET.SubElement(direlem, 'TargetExt').text = tfilename[1]
+
+ # Build information
+ compiles = ET.SubElement(root, 'ItemDefinitionGroup')
+ clconf = ET.SubElement(compiles, 'ClCompile')
+ # Arguments, include dirs, defines for all files in the current target
+ target_args = []
+ target_defines = []
+ target_inc_dirs = []
+ # Arguments, include dirs, defines passed to individual files in
+ # a target; perhaps because the args are language-specific
+ #
+ # file_args is also later split out into defines and include_dirs in
+ # case someone passed those in there
+ file_args = dict((lang, CompilerArgs(comp)) for lang, comp in target.compilers.items())
+ file_defines = dict((lang, []) for lang in target.compilers)
+ file_inc_dirs = dict((lang, []) for lang in target.compilers)
+ # The order in which these compile args are added must match
+ # generate_single_compile() and generate_basic_compiler_args()
+ for l, comp in target.compilers.items():
+ if l in file_args:
+ file_args[l] += compilers.get_base_compile_args(self.environment.coredata.base_options, comp)
+ file_args[l] += comp.get_option_compile_args(self.environment.coredata.compiler_options)
+ # Add compile args added using add_project_arguments()
+ for l, args in self.build.projects_args.get(target.subproject, {}).items():
+ if l in file_args:
+ file_args[l] += args
+ # Add compile args added using add_global_arguments()
+ # These override per-project arguments
+ for l, args in self.build.global_args.items():
+ if l in file_args:
+ file_args[l] += args
+ if not target.is_cross:
+ # Compile args added from the env: CFLAGS/CXXFLAGS, etc. We want these
+ # to override all the defaults, but not the per-target compile args.
+ for l, args in self.environment.coredata.external_args.items():
+ if l in file_args:
+ file_args[l] += args
+ for args in file_args.values():
+ # This is where Visual Studio will insert target_args, target_defines,
+ # etc, which are added later from external deps (see below).
+ args += ['%(AdditionalOptions)', '%(PreprocessorDefinitions)', '%(AdditionalIncludeDirectories)']
+ # Add custom target dirs as includes automatically, but before
+ # target-specific include dirs. See _generate_single_compile() in
+ # the ninja backend for caveats.
+ args += ['-I' + arg for arg in generated_files_include_dirs]
+ # Add include dirs from the `include_directories:` kwarg on the target
+ # and from `include_directories:` of internal deps of the target.
+ #
+ # Target include dirs should override internal deps include dirs.
+ # This is handled in BuildTarget.process_kwargs()
+ #
+ # Include dirs from internal deps should override include dirs from
+ # external deps and must maintain the order in which they are
+ # specified. Hence, we must reverse so that the order is preserved.
+ #
+ # These are per-target, but we still add them as per-file because we
+ # need them to be looked in first.
+ for d in reversed(target.get_include_dirs()):
+ for i in d.get_incdirs():
+ curdir = os.path.join(d.get_curdir(), i)
+ args.append('-I' + self.relpath(curdir, target.subdir)) # build dir
+ args.append('-I' + os.path.join(proj_to_src_root, curdir)) # src dir
+ for i in d.get_extra_build_dirs():
+ curdir = os.path.join(d.get_curdir(), i)
+ args.append('-I' + self.relpath(curdir, target.subdir)) # build dir
+ # Add per-target compile args, f.ex, `c_args : ['/DFOO']`. We set these
+ # near the end since these are supposed to override everything else.
+ for l, args in target.extra_args.items():
+ if l in file_args:
+ file_args[l] += args
+ # The highest priority includes. In order of directory search:
+ # target private dir, target build dir, target source dir
+ for args in file_args.values():
+ t_inc_dirs = [self.relpath(self.get_target_private_dir(target),
+ self.get_target_dir(target))]
+ if target.implicit_include_directories:
+ t_inc_dirs += ['.']
+ if target.implicit_include_directories:
+ t_inc_dirs += [proj_to_src_dir]
+ args += ['-I' + arg for arg in t_inc_dirs]
+
+ # Split preprocessor defines and include directories out of the list of
+ # all extra arguments. The rest go into %(AdditionalOptions).
+ for l, args in file_args.items():
+ for arg in args[:]:
+ if arg.startswith(('-D', '/D')) or arg == '%(PreprocessorDefinitions)':
+ file_args[l].remove(arg)
+ # Don't escape the marker
+ if arg == '%(PreprocessorDefinitions)':
+ define = arg
+ else:
+ define = arg[2:]
+ # De-dup
+ if define in file_defines[l]:
+ file_defines[l].remove(define)
+ file_defines[l].append(define)
+ elif arg.startswith(('-I', '/I')) or arg == '%(AdditionalIncludeDirectories)':
+ file_args[l].remove(arg)
+ # Don't escape the marker
+ if arg == '%(AdditionalIncludeDirectories)':
+ inc_dir = arg
+ else:
+ inc_dir = arg[2:]
+ # De-dup
+ if inc_dir not in file_inc_dirs[l]:
+ file_inc_dirs[l].append(inc_dir)
+
+ # Split compile args needed to find external dependencies
+ # Link args are added while generating the link command
+ for d in reversed(target.get_external_deps()):
+ # Cflags required by external deps might have UNIX-specific flags,
+ # so filter them out if needed
+ d_compile_args = compiler.unix_args_to_native(d.get_compile_args())
+ for arg in d_compile_args:
+ if arg.startswith(('-D', '/D')):
+ define = arg[2:]
+ # De-dup
+ if define in target_defines:
+ target_defines.remove(define)
+ target_defines.append(define)
+ elif arg.startswith(('-I', '/I')):
+ inc_dir = arg[2:]
+ # De-dup
+ if inc_dir not in target_inc_dirs:
+ target_inc_dirs.append(inc_dir)
+ else:
+ target_args.append(arg)
+
+ languages += gen_langs
+ if len(target_args) > 0:
+ target_args.append('%(AdditionalOptions)')
+ ET.SubElement(clconf, "AdditionalOptions").text = ' '.join(target_args)
+
+ target_inc_dirs.append('%(AdditionalIncludeDirectories)')
+ ET.SubElement(clconf, 'AdditionalIncludeDirectories').text = ';'.join(target_inc_dirs)
+ target_defines.append('%(PreprocessorDefinitions)')
+ ET.SubElement(clconf, 'PreprocessorDefinitions').text = ';'.join(target_defines)
+ ET.SubElement(clconf, 'MinimalRebuild').text = 'true'
+ ET.SubElement(clconf, 'FunctionLevelLinking').text = 'true'
+ pch_node = ET.SubElement(clconf, 'PrecompiledHeader')
+ # Warning level
+ warning_level = self.get_option_for_target('warning_level', target)
+ ET.SubElement(clconf, 'WarningLevel').text = 'Level' + str(1 + int(warning_level))
+ if self.get_option_for_target('werror', target):
+ ET.SubElement(clconf, 'TreatWarningAsError').text = 'true'
+ # Note: SuppressStartupBanner is /NOLOGO and is 'true' by default
+ pch_sources = {}
+ for lang in ['c', 'cpp']:
+ pch = target.get_pch(lang)
+ if not pch:
+ continue
+ pch_node.text = 'Use'
+ pch_sources[lang] = [pch[0], pch[1], lang]
+ if len(pch_sources) == 1:
+ # If there is only 1 language with precompiled headers, we can use it for the entire project, which
+ # is cleaner than specifying it for each source file.
+ pch_source = list(pch_sources.values())[0]
+ header = os.path.join(proj_to_src_dir, pch_source[0])
+ pch_file = ET.SubElement(clconf, 'PrecompiledHeaderFile')
+ pch_file.text = header
+ pch_include = ET.SubElement(clconf, 'ForcedIncludeFiles')
+ pch_include.text = header + ';%(ForcedIncludeFiles)'
+ pch_out = ET.SubElement(clconf, 'PrecompiledHeaderOutputFile')
+ pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % pch_source[2]
+
+ resourcecompile = ET.SubElement(compiles, 'ResourceCompile')
+ ET.SubElement(resourcecompile, 'PreprocessorDefinitions')
+
+ # Linker options
+ link = ET.SubElement(compiles, 'Link')
+ extra_link_args = CompilerArgs(compiler)
+ # FIXME: Can these buildtype linker args be added as tags in the
+ # vcxproj file (similar to buildtype compiler args) instead of in
+ # AdditionalOptions?
+ extra_link_args += compiler.get_buildtype_linker_args(self.buildtype)
+ # Generate Debug info
+ if self.buildtype.startswith('debug'):
+ self.generate_debug_information(link)
+ if not isinstance(target, build.StaticLibrary):
+ if isinstance(target, build.SharedModule):
+ extra_link_args += compiler.get_std_shared_module_link_args()
+ # Add link args added using add_project_link_arguments()
+ extra_link_args += self.build.get_project_link_args(compiler, target.subproject)
+ # Add link args added using add_global_link_arguments()
+ # These override per-project link arguments
+ extra_link_args += self.build.get_global_link_args(compiler)
+ if not target.is_cross:
+ # Link args added from the env: LDFLAGS. We want these to
+ # override all the defaults but not the per-target link args.
+ extra_link_args += self.environment.coredata.external_link_args[compiler.get_language()]
+ # Only non-static built targets need link args and link dependencies
+ extra_link_args += target.link_args
+ # External deps must be last because target link libraries may depend on them.
+ for dep in target.get_external_deps():
+ # Extend without reordering or de-dup to preserve `-L -l` sets
+ # https://github.com/mesonbuild/meson/issues/1718
+ extra_link_args.extend_direct(dep.get_link_args())
+ for d in target.get_dependencies():
+ if isinstance(d, build.StaticLibrary):
+ for dep in d.get_external_deps():
+ extra_link_args.extend_direct(dep.get_link_args())
+ # Add link args for c_* or cpp_* build options. Currently this only
+ # adds c_winlibs and cpp_winlibs when building for Windows. This needs
+ # to be after all internal and external libraries so that unresolved
+ # symbols from those can be found here. This is needed when the
+ # *_winlibs that we want to link to are static mingw64 libraries.
+ extra_link_args += compiler.get_option_link_args(self.environment.coredata.compiler_options)
+ (additional_libpaths, additional_links, extra_link_args) = self.split_link_args(extra_link_args.to_native())
+
+ # Add more libraries to be linked if needed
+ for t in target.get_dependencies():
+ lobj = self.build.targets[t.get_id()]
+ linkname = os.path.join(down, self.get_target_filename_for_linking(lobj))
+ if t in target.link_whole_targets:
+ # /WHOLEARCHIVE:foo must go into AdditionalOptions
+ extra_link_args += compiler.get_link_whole_for(linkname)
+ # To force Visual Studio to build this project even though it
+ # has no sources, we include a reference to the vcxproj file
+ # that builds this target. Technically we should add this only
+ # if the current target has no sources, but it doesn't hurt to
+ # have 'extra' references.
+ trelpath = self.get_target_dir_relative_to(t, target)
+ tvcxproj = os.path.join(trelpath, t.get_id() + '.vcxproj')
+ tid = self.environment.coredata.target_guids[t.get_id()]
+ self.add_project_reference(root, tvcxproj, tid)
+ else:
+ # Other libraries go into AdditionalDependencies
+ additional_links.append(linkname)
+ for lib in self.get_custom_target_provided_libraries(target):
+ additional_links.append(self.relpath(lib, self.get_target_dir(target)))
+ additional_objects = []
+ for o in self.flatten_object_list(target, down):
+ assert(isinstance(o, str))
+ additional_objects.append(o)
+ for o in custom_objs:
+ additional_objects.append(o)
+
+ if len(extra_link_args) > 0:
+ extra_link_args.append('%(AdditionalOptions)')
+ ET.SubElement(link, "AdditionalOptions").text = ' '.join(extra_link_args)
+ if len(additional_libpaths) > 0:
+ additional_libpaths.insert(0, '%(AdditionalLibraryDirectories)')
+ ET.SubElement(link, 'AdditionalLibraryDirectories').text = ';'.join(additional_libpaths)
+ if len(additional_links) > 0:
+ additional_links.append('%(AdditionalDependencies)')
+ ET.SubElement(link, 'AdditionalDependencies').text = ';'.join(additional_links)
+ ofile = ET.SubElement(link, 'OutputFile')
+ ofile.text = '$(OutDir)%s' % target.get_filename()
+ subsys = ET.SubElement(link, 'SubSystem')
+ subsys.text = subsystem
+ if (isinstance(target, build.SharedLibrary) or isinstance(target, build.Executable)) and target.get_import_filename():
+ # DLLs built with MSVC always have an import library except when
+ # they're data-only DLLs, but we don't support those yet.
+ ET.SubElement(link, 'ImportLibrary').text = target.get_import_filename()
+ if isinstance(target, build.SharedLibrary):
+ # Add module definitions file, if provided
+ if target.vs_module_defs:
+ relpath = os.path.join(down, target.vs_module_defs.rel_to_builddir(self.build_to_src))
+ ET.SubElement(link, 'ModuleDefinitionFile').text = relpath
+ if '/ZI' in buildtype_args or '/Zi' in buildtype_args:
+ pdb = ET.SubElement(link, 'ProgramDataBaseFileName')
+ pdb.text = '$(OutDir}%s.pdb' % target_name
+ if isinstance(target, build.Executable):
+ ET.SubElement(link, 'EntryPointSymbol').text = entrypoint
+ targetmachine = ET.SubElement(link, 'TargetMachine')
+ targetplatform = self.platform.lower()
+ if targetplatform == 'win32':
+ targetmachine.text = 'MachineX86'
+ elif targetplatform == 'x64':
+ targetmachine.text = 'MachineX64'
+ elif targetplatform == 'arm':
+ targetmachine.text = 'MachineARM'
+ else:
+ raise MesonException('Unsupported Visual Studio target machine: ' + targetmachine)
+
+ extra_files = target.extra_files
+ if len(headers) + len(gen_hdrs) + len(extra_files) > 0:
+ inc_hdrs = ET.SubElement(root, 'ItemGroup')
+ for h in headers:
+ relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
+ ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath)
+ for h in gen_hdrs:
+ ET.SubElement(inc_hdrs, 'CLInclude', Include=h)
+ for h in target.extra_files:
+ relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
+ ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath)
+
+ if len(sources) + len(gen_src) + len(pch_sources) > 0:
+ inc_src = ET.SubElement(root, 'ItemGroup')
+ for s in sources:
+ relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src))
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
+ lang = Vs2010Backend.lang_from_source_file(s)
+ self.add_pch(inc_cl, proj_to_src_dir, pch_sources, s)
+ self.add_additional_options(lang, inc_cl, file_args)
+ self.add_preprocessor_defines(lang, inc_cl, file_defines)
+ self.add_include_dirs(lang, inc_cl, file_inc_dirs)
+ ET.SubElement(inc_cl, 'ObjectFileName').text = "$(IntDir)" + self.object_filename_from_source(target, s, False)
+ for s in gen_src:
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s)
+ lang = Vs2010Backend.lang_from_source_file(s)
+ self.add_pch(inc_cl, proj_to_src_dir, pch_sources, s)
+ self.add_additional_options(lang, inc_cl, file_args)
+ self.add_preprocessor_defines(lang, inc_cl, file_defines)
+ self.add_include_dirs(lang, inc_cl, file_inc_dirs)
+ for lang in pch_sources:
+ header, impl, suffix = pch_sources[lang]
+ relpath = os.path.join(proj_to_src_dir, impl)
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
+ pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
+ pch.text = 'Create'
+ pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile')
+ pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % suffix
+ pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile')
+ # MSBuild searches for the header relative from the implementation, so we have to use
+ # just the file name instead of the relative path to the file.
+ pch_file.text = os.path.split(header)[1]
+ self.add_additional_options(lang, inc_cl, file_args)
+ self.add_preprocessor_defines(lang, inc_cl, file_defines)
+ self.add_include_dirs(lang, inc_cl, file_inc_dirs)
+
+ if self.has_objects(objects, additional_objects, gen_objs):
+ inc_objs = ET.SubElement(root, 'ItemGroup')
+ for s in objects:
+ relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src))
+ ET.SubElement(inc_objs, 'Object', Include=relpath)
+ for s in additional_objects:
+ ET.SubElement(inc_objs, 'Object', Include=s)
+ self.add_generated_objects(inc_objs, gen_objs)
+
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ # Reference the regen target.
+ regen_vcxproj = os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj')
+ self.add_project_reference(root, regen_vcxproj, self.environment.coredata.regen_guid)
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ def gen_regenproj(self, project_name, ofname):
+ root = ET.Element('Project', {'DefaultTargets': 'Build',
+ 'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
+ confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
+ prjconf = ET.SubElement(confitems, 'ProjectConfiguration',
+ {'Include': self.buildtype + '|' + self.platform})
+ p = ET.SubElement(prjconf, 'Configuration')
+ p.text = self.buildtype
+ pl = ET.SubElement(prjconf, 'Platform')
+ pl.text = self.platform
+ globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
+ guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
+ guidelem.text = '{%s}' % self.environment.coredata.test_guid
+ kw = ET.SubElement(globalgroup, 'Keyword')
+ kw.text = self.platform + 'Proj'
+ p = ET.SubElement(globalgroup, 'Platform')
+ p.text = self.platform
+ pname = ET.SubElement(globalgroup, 'ProjectName')
+ pname.text = project_name
+ if self.windows_target_platform_version:
+ ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
+ ET.SubElement(type_config, 'ConfigurationType').text = "Utility"
+ ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
+ ET.SubElement(type_config, 'UseOfMfc').text = 'false'
+ if self.platform_toolset:
+ ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ direlem = ET.SubElement(root, 'PropertyGroup')
+ fver = ET.SubElement(direlem, '_ProjectFileVersion')
+ fver.text = self.project_file_version
+ outdir = ET.SubElement(direlem, 'OutDir')
+ outdir.text = '.\\'
+ intdir = ET.SubElement(direlem, 'IntDir')
+ intdir.text = 'regen-temp\\'
+ tname = ET.SubElement(direlem, 'TargetName')
+ tname.text = project_name
+
+ action = ET.SubElement(root, 'ItemDefinitionGroup')
+ midl = ET.SubElement(action, 'Midl')
+ ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)'
+ ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)'
+ ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h'
+ ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb'
+ ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
+ ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
+ regen_command = self.environment.get_build_command() + ['--internal', 'regencheck']
+ private_dir = self.environment.get_scratch_dir()
+ cmd_templ = '''setlocal
+"%s" "%s"
+if %%errorlevel%% neq 0 goto :cmEnd
+:cmEnd
+endlocal & call :cmErrorLevel %%errorlevel%% & goto :cmDone
+:cmErrorLevel
+exit /b %%1
+:cmDone
+if %%errorlevel%% neq 0 goto :VCEnd'''
+ igroup = ET.SubElement(root, 'ItemGroup')
+ rulefile = os.path.join(self.environment.get_scratch_dir(), 'regen.rule')
+ if not os.path.exists(rulefile):
+ with open(rulefile, 'w') as f:
+ f.write("# Meson regen file.")
+ custombuild = ET.SubElement(igroup, 'CustomBuild', Include=rulefile)
+ message = ET.SubElement(custombuild, 'Message')
+ message.text = 'Checking whether solution needs to be regenerated.'
+ ET.SubElement(custombuild, 'Command').text = cmd_templ % \
+ ('" "'.join(regen_command), private_dir)
+ ET.SubElement(custombuild, 'Outputs').text = Vs2010Backend.get_regen_stampfile(self.environment.get_build_dir())
+ deps = self.get_regen_filelist()
+ ET.SubElement(custombuild, 'AdditionalInputs').text = ';'.join(deps)
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets')
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ def gen_testproj(self, target_name, ofname):
+ project_name = target_name
+ root = ET.Element('Project', {'DefaultTargets': "Build",
+ 'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
+ confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
+ prjconf = ET.SubElement(confitems, 'ProjectConfiguration',
+ {'Include': self.buildtype + '|' + self.platform})
+ p = ET.SubElement(prjconf, 'Configuration')
+ p.text = self.buildtype
+ pl = ET.SubElement(prjconf, 'Platform')
+ pl.text = self.platform
+ globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
+ guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
+ guidelem.text = '{%s}' % self.environment.coredata.test_guid
+ kw = ET.SubElement(globalgroup, 'Keyword')
+ kw.text = self.platform + 'Proj'
+ p = ET.SubElement(globalgroup, 'Platform')
+ p.text = self.platform
+ pname = ET.SubElement(globalgroup, 'ProjectName')
+ pname.text = project_name
+ if self.windows_target_platform_version:
+ ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
+ ET.SubElement(type_config, 'ConfigurationType')
+ ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
+ ET.SubElement(type_config, 'UseOfMfc').text = 'false'
+ if self.platform_toolset:
+ ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ direlem = ET.SubElement(root, 'PropertyGroup')
+ fver = ET.SubElement(direlem, '_ProjectFileVersion')
+ fver.text = self.project_file_version
+ outdir = ET.SubElement(direlem, 'OutDir')
+ outdir.text = '.\\'
+ intdir = ET.SubElement(direlem, 'IntDir')
+ intdir.text = 'test-temp\\'
+ tname = ET.SubElement(direlem, 'TargetName')
+ tname.text = target_name
+
+ action = ET.SubElement(root, 'ItemDefinitionGroup')
+ midl = ET.SubElement(action, 'Midl')
+ ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)'
+ ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)'
+ ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h'
+ ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb'
+ ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
+ ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
+ postbuild = ET.SubElement(action, 'PostBuildEvent')
+ ET.SubElement(postbuild, 'Message')
+ # FIXME: No benchmarks?
+ test_command = self.environment.get_build_command() + ['test', '--no-rebuild']
+ if not self.environment.coredata.get_builtin_option('stdsplit'):
+ test_command += ['--no-stdsplit']
+ if self.environment.coredata.get_builtin_option('errorlogs'):
+ test_command += ['--print-errorlogs']
+ cmd_templ = '''setlocal
+"%s"
+if %%errorlevel%% neq 0 goto :cmEnd
+:cmEnd
+endlocal & call :cmErrorLevel %%errorlevel%% & goto :cmDone
+:cmErrorLevel
+exit /b %%1
+:cmDone
+if %%errorlevel%% neq 0 goto :VCEnd'''
+ self.serialize_tests()
+ ET.SubElement(postbuild, 'Command').text =\
+ cmd_templ % ('" "'.join(test_command))
+ ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ def generate_debug_information(self, link):
+ # valid values for vs2015 is 'false', 'true', 'DebugFastLink'
+ ET.SubElement(link, 'GenerateDebugInformation').text = 'true'
--- /dev/null
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .vs2010backend import Vs2010Backend
+
+
+class Vs2015Backend(Vs2010Backend):
+ def __init__(self, build):
+ super().__init__(build)
+ self.name = 'vs2015'
+ self.platform_toolset = 'v140'
+ self.vs_version = '2015'
--- /dev/null
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import xml.etree.ElementTree as ET
+
+from .vs2010backend import Vs2010Backend
+
+
+class Vs2017Backend(Vs2010Backend):
+ def __init__(self, build):
+ super().__init__(build)
+ self.name = 'vs2017'
+ self.platform_toolset = 'v141'
+ self.vs_version = '2017'
+ # WindowsSDKVersion should be set by command prompt.
+ sdk_version = os.environ.get('WindowsSDKVersion', None)
+ if sdk_version:
+ self.windows_target_platform_version = sdk_version.rstrip('\\')
+
+ def generate_debug_information(self, link):
+ # valid values for vs2017 is 'false', 'true', 'DebugFastLink', 'DebugFull'
+ ET.SubElement(link, 'GenerateDebugInformation').text = 'DebugFull'
--- /dev/null
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import backends
+from .. import build
+from .. import dependencies
+from .. import mesonlib
+import uuid, os, sys
+
+from ..mesonlib import MesonException
+
+class XCodeBackend(backends.Backend):
+ def __init__(self, build):
+ super().__init__(build)
+ self.name = 'xcode'
+ self.project_uid = self.environment.coredata.guid.replace('-', '')[:24]
+ self.project_conflist = self.gen_id()
+ self.indent = ' '
+ self.indent_level = 0
+ self.xcodetypemap = {'c': 'sourcecode.c.c',
+ 'a': 'archive.ar',
+ 'cc': 'sourcecode.cpp.cpp',
+ 'cxx': 'sourcecode.cpp.cpp',
+ 'cpp': 'sourcecode.cpp.cpp',
+ 'c++': 'sourcecode.cpp.cpp',
+ 'm': 'sourcecode.c.objc',
+ 'mm': 'sourcecode.cpp.objcpp',
+ 'h': 'sourcecode.c.h',
+ 'hpp': 'sourcecode.cpp.h',
+ 'hxx': 'sourcecode.cpp.h',
+ 'hh': 'sourcecode.cpp.hh',
+ 'inc': 'sourcecode.c.h',
+ 'dylib': 'compiled.mach-o.dylib',
+ 'o': 'compiled.mach-o.objfile',
+ }
+ self.maingroup_id = self.gen_id()
+ self.all_id = self.gen_id()
+ self.all_buildconf_id = self.gen_id()
+ self.buildtypes = ['debug']
+ self.test_id = self.gen_id()
+ self.test_command_id = self.gen_id()
+ self.test_buildconf_id = self.gen_id()
+
+ def gen_id(self):
+ return str(uuid.uuid4()).upper().replace('-', '')[:24]
+
+ def get_target_dir(self, target):
+ dirname = os.path.join(target.get_subdir(), self.environment.coredata.get_builtin_option('buildtype'))
+ os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True)
+ return dirname
+
+ def write_line(self, text):
+ self.ofile.write(self.indent * self.indent_level + text)
+ if not text.endswith('\n'):
+ self.ofile.write('\n')
+
+ def generate(self, interp):
+ self.interpreter = interp
+ test_data = self.serialize_tests()[0]
+ self.generate_filemap()
+ self.generate_buildmap()
+ self.generate_buildstylemap()
+ self.generate_build_phase_map()
+ self.generate_build_configuration_map()
+ self.generate_build_configurationlist_map()
+ self.generate_project_configurations_map()
+ self.generate_buildall_configurations_map()
+ self.generate_test_configurations_map()
+ self.generate_native_target_map()
+ self.generate_native_frameworks_map()
+ self.generate_source_phase_map()
+ self.generate_target_dependency_map()
+ self.generate_pbxdep_map()
+ self.generate_containerproxy_map()
+ self.proj_dir = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.xcodeproj')
+ os.makedirs(self.proj_dir, exist_ok=True)
+ self.proj_file = os.path.join(self.proj_dir, 'project.pbxproj')
+ with open(self.proj_file, 'w') as self.ofile:
+ self.generate_prefix()
+ self.generate_pbx_aggregate_target()
+ self.generate_pbx_build_file()
+ self.generate_pbx_build_style()
+ self.generate_pbx_container_item_proxy()
+ self.generate_pbx_file_reference()
+ self.generate_pbx_frameworks_buildphase()
+ self.generate_pbx_group()
+ self.generate_pbx_native_target()
+ self.generate_pbx_project()
+ self.generate_pbx_shell_build_phase(test_data)
+ self.generate_pbx_sources_build_phase()
+ self.generate_pbx_target_dependency()
+ self.generate_xc_build_configuration()
+ self.generate_xc_configurationList()
+ self.generate_suffix()
+
+ def get_xcodetype(self, fname):
+ return self.xcodetypemap[fname.split('.')[-1]]
+
+ def generate_filemap(self):
+ self.filemap = {} # Key is source file relative to src root.
+ self.target_filemap = {}
+ for name, t in self.build.targets.items():
+ for s in t.sources:
+ if isinstance(s, mesonlib.File):
+ s = os.path.join(s.subdir, s.fname)
+ self.filemap[s] = self.gen_id()
+ for o in t.objects:
+ if isinstance(o, str):
+ o = os.path.join(t.subdir, o)
+ self.filemap[o] = self.gen_id()
+ self.target_filemap[name] = self.gen_id()
+
+ def generate_buildmap(self):
+ self.buildmap = {}
+ for t in self.build.targets.values():
+ for s in t.sources:
+ s = os.path.join(s.subdir, s.fname)
+ self.buildmap[s] = self.gen_id()
+ for o in t.objects:
+ o = os.path.join(t.subdir, o)
+ if isinstance(o, str):
+ self.buildmap[o] = self.gen_id()
+
+ def generate_buildstylemap(self):
+ self.buildstylemap = {'debug': self.gen_id()}
+
+ def generate_build_phase_map(self):
+ for tname, t in self.build.targets.items():
+ # generate id for our own target-name
+ t.buildphasemap = {}
+ t.buildphasemap[tname] = self.gen_id()
+ # each target can have it's own Frameworks/Sources/..., generate id's for those
+ t.buildphasemap['Frameworks'] = self.gen_id()
+ t.buildphasemap['Resources'] = self.gen_id()
+ t.buildphasemap['Sources'] = self.gen_id()
+
+ def generate_build_configuration_map(self):
+ self.buildconfmap = {}
+ for t in self.build.targets:
+ bconfs = {'debug': self.gen_id()}
+ self.buildconfmap[t] = bconfs
+
+ def generate_project_configurations_map(self):
+ self.project_configurations = {'debug': self.gen_id()}
+
+ def generate_buildall_configurations_map(self):
+ self.buildall_configurations = {'debug': self.gen_id()}
+
+ def generate_test_configurations_map(self):
+ self.test_configurations = {'debug': self.gen_id()}
+
+ def generate_build_configurationlist_map(self):
+ self.buildconflistmap = {}
+ for t in self.build.targets:
+ self.buildconflistmap[t] = self.gen_id()
+
+ def generate_native_target_map(self):
+ self.native_targets = {}
+ for t in self.build.targets:
+ self.native_targets[t] = self.gen_id()
+
+ def generate_native_frameworks_map(self):
+ self.native_frameworks = {}
+ self.native_frameworks_fileref = {}
+ for t in self.build.targets.values():
+ for dep in t.get_external_deps():
+ if isinstance(dep, dependencies.AppleFrameworks):
+ for f in dep.frameworks:
+ self.native_frameworks[f] = self.gen_id()
+ self.native_frameworks_fileref[f] = self.gen_id()
+
+ def generate_target_dependency_map(self):
+ self.target_dependency_map = {}
+ for tname, t in self.build.targets.items():
+ for target in t.link_targets:
+ self.target_dependency_map[(tname, target.get_basename())] = self.gen_id()
+
+ def generate_pbxdep_map(self):
+ self.pbx_dep_map = {}
+ for t in self.build.targets:
+ self.pbx_dep_map[t] = self.gen_id()
+
+ def generate_containerproxy_map(self):
+ self.containerproxy_map = {}
+ for t in self.build.targets:
+ self.containerproxy_map[t] = self.gen_id()
+
+ def generate_source_phase_map(self):
+ self.source_phase = {}
+ for t in self.build.targets:
+ self.source_phase[t] = self.gen_id()
+
+ def generate_pbx_aggregate_target(self):
+ self.ofile.write('\n/* Begin PBXAggregateTarget section */\n')
+ self.write_line('%s /* ALL_BUILD */ = {' % self.all_id)
+ self.indent_level += 1
+ self.write_line('isa = PBXAggregateTarget;')
+ self.write_line('buildConfigurationList = %s;' % self.all_buildconf_id)
+ self.write_line('buildPhases = (')
+ self.write_line(');')
+ self.write_line('dependencies = (')
+ self.indent_level += 1
+ for t in self.build.targets:
+ self.write_line('%s /* PBXTargetDependency */,' % self.pbx_dep_map[t])
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('name = ALL_BUILD;')
+ self.write_line('productName = ALL_BUILD;')
+ self.indent_level -= 1
+ self.write_line('};')
+ self.write_line('%s /* RUN_TESTS */ = {' % self.test_id)
+ self.indent_level += 1
+ self.write_line('isa = PBXAggregateTarget;')
+ self.write_line('buildConfigurationList = %s;' % self.test_buildconf_id)
+ self.write_line('buildPhases = (')
+ self.indent_level += 1
+ self.write_line('%s /* test run command */,' % self.test_command_id)
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('dependencies = (')
+ self.write_line(');')
+ self.write_line('name = RUN_TESTS;')
+ self.write_line('productName = RUN_TESTS;')
+ self.indent_level -= 1
+ self.write_line('};')
+ self.ofile.write('/* End PBXAggregateTarget section */\n')
+
+ def generate_pbx_build_file(self):
+ self.ofile.write('\n/* Begin PBXBuildFile section */\n')
+ templ = '%s /* %s */ = { isa = PBXBuildFile; fileRef = %s /* %s */; settings = { COMPILER_FLAGS = "%s"; }; };\n'
+ otempl = '%s /* %s */ = { isa = PBXBuildFile; fileRef = %s /* %s */;};\n'
+
+ for t in self.build.targets.values():
+
+ for dep in t.get_external_deps():
+ if isinstance(dep, dependencies.AppleFrameworks):
+ for f in dep.frameworks:
+ self.ofile.write('%s /* %s.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = %s /* %s.framework */; };\n' % (self.native_frameworks[f], f, self.native_frameworks_fileref[f], f))
+
+ for s in t.sources:
+ if isinstance(s, mesonlib.File):
+ s = s.fname
+
+ if isinstance(s, str):
+ s = os.path.join(t.subdir, s)
+ idval = self.buildmap[s]
+ fullpath = os.path.join(self.environment.get_source_dir(), s)
+ fileref = self.filemap[s]
+ fullpath2 = fullpath
+ compiler_args = ''
+ self.ofile.write(templ % (idval, fullpath, fileref, fullpath2, compiler_args))
+ for o in t.objects:
+ o = os.path.join(t.subdir, o)
+ idval = self.buildmap[o]
+ fileref = self.filemap[o]
+ fullpath = os.path.join(self.environment.get_source_dir(), o)
+ fullpath2 = fullpath
+ self.ofile.write(otempl % (idval, fullpath, fileref, fullpath2))
+ self.ofile.write('/* End PBXBuildFile section */\n')
+
+ def generate_pbx_build_style(self):
+ self.ofile.write('\n/* Begin PBXBuildStyle section */\n')
+ for name, idval in self.buildstylemap.items():
+ self.write_line('%s /* %s */ = {\n' % (idval, name))
+ self.indent_level += 1
+ self.write_line('isa = PBXBuildStyle;\n')
+ self.write_line('buildSettings = {\n')
+ self.indent_level += 1
+ self.write_line('COPY_PHASE_STRIP = NO;\n')
+ self.indent_level -= 1
+ self.write_line('};\n')
+ self.write_line('name = "%s";\n' % name)
+ self.indent_level -= 1
+ self.write_line('};\n')
+ self.ofile.write('/* End PBXBuildStyle section */\n')
+
+ def generate_pbx_container_item_proxy(self):
+ self.ofile.write('\n/* Begin PBXContainerItemProxy section */\n')
+ for t in self.build.targets:
+ self.write_line('%s /* PBXContainerItemProxy */ = {' % self.containerproxy_map[t])
+ self.indent_level += 1
+ self.write_line('isa = PBXContainerItemProxy;')
+ self.write_line('containerPortal = %s /* Project object */;' % self.project_uid)
+ self.write_line('proxyType = 1;')
+ self.write_line('remoteGlobalIDString = %s;' % self.native_targets[t])
+ self.write_line('remoteInfo = "%s";' % t)
+ self.indent_level -= 1
+ self.write_line('};')
+ self.ofile.write('/* End PBXContainerItemProxy section */\n')
+
+ def generate_pbx_file_reference(self):
+ self.ofile.write('\n/* Begin PBXFileReference section */\n')
+
+ for t in self.build.targets.values():
+ for dep in t.get_external_deps():
+ if isinstance(dep, dependencies.AppleFrameworks):
+ for f in dep.frameworks:
+ self.ofile.write('%s /* %s.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = %s.framework; path = System/Library/Frameworks/%s.framework; sourceTree = SDKROOT; };\n' % (self.native_frameworks_fileref[f], f, f, f))
+ src_templ = '%s /* %s */ = { isa = PBXFileReference; explicitFileType = "%s"; fileEncoding = 4; name = "%s"; path = "%s"; sourceTree = SOURCE_ROOT; };\n'
+ for fname, idval in self.filemap.items():
+ fullpath = os.path.join(self.environment.get_source_dir(), fname)
+ xcodetype = self.get_xcodetype(fname)
+ name = os.path.split(fname)[-1]
+ path = fname
+ self.ofile.write(src_templ % (idval, fullpath, xcodetype, name, path))
+ target_templ = '%s /* %s */ = { isa = PBXFileReference; explicitFileType = "%s"; path = %s; refType = %d; sourceTree = BUILT_PRODUCTS_DIR; };\n'
+ for tname, idval in self.target_filemap.items():
+ t = self.build.targets[tname]
+ fname = t.get_filename()
+ reftype = 0
+ if isinstance(t, build.Executable):
+ typestr = 'compiled.mach-o.executable'
+ path = fname
+ elif isinstance(t, build.SharedLibrary):
+ typestr = self.get_xcodetype('dummy.dylib')
+ path = fname
+ else:
+ typestr = self.get_xcodetype(fname)
+ path = '"%s"' % t.get_filename()
+ self.ofile.write(target_templ % (idval, tname, typestr, path, reftype))
+ self.ofile.write('/* End PBXFileReference section */\n')
+
+ def generate_pbx_frameworks_buildphase(self):
+ for tname, t in self.build.targets.items():
+ self.ofile.write('\n/* Begin PBXFrameworksBuildPhase section */\n')
+ self.indent_level += 1
+ self.write_line('%s /* %s */ = {\n' % (t.buildphasemap['Frameworks'], 'Frameworks'))
+ self.indent_level += 1
+ self.write_line('isa = PBXFrameworksBuildPhase;\n')
+ self.write_line('buildActionMask = %s;\n' % (2147483647))
+ self.write_line('files = (\n')
+ self.indent_level += 1
+ for dep in t.get_external_deps():
+ if isinstance(dep, dependencies.AppleFrameworks):
+ for f in dep.frameworks:
+ self.write_line('%s /* %s.framework in Frameworks */,\n' % (self.native_frameworks[f], f))
+ self.indent_level -= 1
+ self.write_line(');\n')
+ self.write_line('runOnlyForDeploymentPostprocessing = 0;\n')
+ self.indent_level -= 1
+ self.write_line('};\n')
+ self.ofile.write('/* End PBXFrameworksBuildPhase section */\n')
+
+ def generate_pbx_group(self):
+ groupmap = {}
+ target_src_map = {}
+ for t in self.build.targets:
+ groupmap[t] = self.gen_id()
+ target_src_map[t] = self.gen_id()
+ self.ofile.write('\n/* Begin PBXGroup section */\n')
+ sources_id = self.gen_id()
+ resources_id = self.gen_id()
+ products_id = self.gen_id()
+ frameworks_id = self.gen_id()
+ self.write_line('%s = {' % self.maingroup_id)
+ self.indent_level += 1
+ self.write_line('isa = PBXGroup;')
+ self.write_line('children = (')
+ self.indent_level += 1
+ self.write_line('%s /* Sources */,' % sources_id)
+ self.write_line('%s /* Resources */,' % resources_id)
+ self.write_line('%s /* Products */,' % products_id)
+ self.write_line('%s /* Frameworks */,' % frameworks_id)
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('sourceTree = "<group>";')
+ self.indent_level -= 1
+ self.write_line('};')
+
+ # Sources
+ self.write_line('%s /* Sources */ = {' % sources_id)
+ self.indent_level += 1
+ self.write_line('isa = PBXGroup;')
+ self.write_line('children = (')
+ self.indent_level += 1
+ for t in self.build.targets:
+ self.write_line('%s /* %s */,' % (groupmap[t], t))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('name = Sources;')
+ self.write_line('sourcetree = "<group>";')
+ self.indent_level -= 1
+ self.write_line('};')
+
+ self.write_line('%s /* Resources */ = {' % resources_id)
+ self.indent_level += 1
+ self.write_line('isa = PBXGroup;')
+ self.write_line('children = (')
+ self.write_line(');')
+ self.write_line('name = Resources;')
+ self.write_line('sourceTree = "<group>";')
+ self.indent_level -= 1
+ self.write_line('};')
+
+ self.write_line('%s /* Frameworks */ = {' % frameworks_id)
+ self.indent_level += 1
+ self.write_line('isa = PBXGroup;')
+ self.write_line('children = (')
+ # write frameworks
+ self.indent_level += 1
+
+ for t in self.build.targets.values():
+ for dep in t.get_external_deps():
+ if isinstance(dep, dependencies.AppleFrameworks):
+ for f in dep.frameworks:
+ self.write_line('%s /* %s.framework */,\n' % (self.native_frameworks_fileref[f], f))
+
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('name = Frameworks;')
+ self.write_line('sourceTree = "<group>";')
+ self.indent_level -= 1
+ self.write_line('};')
+
+ # Targets
+ for t in self.build.targets:
+ self.write_line('%s /* %s */ = {' % (groupmap[t], t))
+ self.indent_level += 1
+ self.write_line('isa = PBXGroup;')
+ self.write_line('children = (')
+ self.indent_level += 1
+ self.write_line('%s /* Source files */,' % target_src_map[t])
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('name = "%s";' % t)
+ self.write_line('sourceTree = "<group>";')
+ self.indent_level -= 1
+ self.write_line('};')
+ self.write_line('%s /* Source files */ = {' % target_src_map[t])
+ self.indent_level += 1
+ self.write_line('isa = PBXGroup;')
+ self.write_line('children = (')
+ self.indent_level += 1
+ for s in self.build.targets[t].sources:
+ s = os.path.join(s.subdir, s.fname)
+ if isinstance(s, str):
+ self.write_line('%s /* %s */,' % (self.filemap[s], s))
+ for o in self.build.targets[t].objects:
+ o = os.path.join(self.build.targets[t].subdir, o)
+ self.write_line('%s /* %s */,' % (self.filemap[o], o))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('name = "Source files";')
+ self.write_line('sourceTree = "<group>";')
+ self.indent_level -= 1
+ self.write_line('};')
+
+ # And finally products
+ self.write_line('%s /* Products */ = {' % products_id)
+ self.indent_level += 1
+ self.write_line('isa = PBXGroup;')
+ self.write_line('children = (')
+ self.indent_level += 1
+ for t in self.build.targets:
+ self.write_line('%s /* %s */,' % (self.target_filemap[t], t))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('name = Products;')
+ self.write_line('sourceTree = "<group>";')
+ self.indent_level -= 1
+ self.write_line('};')
+ self.ofile.write('/* End PBXGroup section */\n')
+
+ def generate_pbx_native_target(self):
+ self.ofile.write('\n/* Begin PBXNativeTarget section */\n')
+ for tname, idval in self.native_targets.items():
+ t = self.build.targets[tname]
+ self.write_line('%s /* %s */ = {' % (idval, tname))
+ self.indent_level += 1
+ self.write_line('isa = PBXNativeTarget;')
+ self.write_line('buildConfigurationList = %s /* Build configuration list for PBXNativeTarget "%s" */;'
+ % (self.buildconflistmap[tname], tname))
+ self.write_line('buildPhases = (')
+ self.indent_level += 1
+ for bpname, bpval in t.buildphasemap.items():
+ self.write_line('%s /* %s yyy */,' % (bpval, bpname))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('buildRules = (')
+ self.write_line(');')
+ self.write_line('dependencies = (')
+ self.indent_level += 1
+ for lt in self.build.targets[tname].link_targets:
+ # NOT DOCUMENTED, may need to make different links
+ # to same target have different targetdependency item.
+ idval = self.pbx_dep_map[lt.get_id()]
+ self.write_line('%s /* PBXTargetDependency */,' % idval)
+ self.indent_level -= 1
+ self.write_line(");")
+ self.write_line('name = "%s";' % tname)
+ self.write_line('productName = "%s";' % tname)
+ self.write_line('productReference = %s /* %s */;' % (self.target_filemap[tname], tname))
+ if isinstance(t, build.Executable):
+ typestr = 'com.apple.product-type.tool'
+ elif isinstance(t, build.StaticLibrary):
+ typestr = 'com.apple.product-type.library.static'
+ elif isinstance(t, build.SharedLibrary):
+ typestr = 'com.apple.product-type.library.dynamic'
+ else:
+ raise MesonException('Unknown target type for %s' % tname)
+ self.write_line('productType = "%s";' % typestr)
+ self.indent_level -= 1
+ self.write_line('};')
+ self.ofile.write('/* End PBXNativeTarget section */\n')
+
+ def generate_pbx_project(self):
+ self.ofile.write('\n/* Begin PBXProject section */\n')
+ self.write_line('%s /* Project object */ = {' % self.project_uid)
+ self.indent_level += 1
+ self.write_line('isa = PBXProject;')
+ self.write_line('attributes = {')
+ self.indent_level += 1
+ self.write_line('BuildIndependentTargetsInParallel = YES;')
+ self.indent_level -= 1
+ self.write_line('};')
+ conftempl = 'buildConfigurationList = %s /* build configuration list for PBXProject "%s"*/;'
+ self.write_line(conftempl % (self.project_conflist, self.build.project_name))
+ self.write_line('buildSettings = {')
+ self.write_line('};')
+ self.write_line('buildStyles = (')
+ self.indent_level += 1
+ for name, idval in self.buildstylemap.items():
+ self.write_line('%s /* %s */,' % (idval, name))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('compatibilityVersion = "Xcode 3.2";')
+ self.write_line('hasScannedForEncodings = 0;')
+ self.write_line('mainGroup = %s;' % self.maingroup_id)
+ self.write_line('projectDirPath = "%s";' % self.build_to_src)
+ self.write_line('projectRoot = "";')
+ self.write_line('targets = (')
+ self.indent_level += 1
+ self.write_line('%s /* ALL_BUILD */,' % self.all_id)
+ self.write_line('%s /* RUN_TESTS */,' % self.test_id)
+ for t in self.build.targets:
+ self.write_line('%s /* %s */,' % (self.native_targets[t], t))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.indent_level -= 1
+ self.write_line('};')
+ self.ofile.write('/* End PBXProject section */\n')
+
+ def generate_pbx_shell_build_phase(self, test_data):
+ self.ofile.write('\n/* Begin PBXShellScriptBuildPhase section */\n')
+ self.write_line('%s = {' % self.test_command_id)
+ self.indent_level += 1
+ self.write_line('isa = PBXShellScriptBuildPhase;')
+ self.write_line('buildActionMask = 2147483647;')
+ self.write_line('files = (')
+ self.write_line(');')
+ self.write_line('inputPaths = (')
+ self.write_line(');')
+ self.write_line('outputPaths = (')
+ self.write_line(');')
+ self.write_line('runOnlyForDeploymentPostprocessing = 0;')
+ self.write_line('shellPath = /bin/sh;')
+ script_root = self.environment.get_script_dir()
+ test_script = os.path.join(script_root, 'meson_test.py')
+ cmd = mesonlib.python_command + [test_script, test_data, '--wd', self.environment.get_build_dir()]
+ cmdstr = ' '.join(["'%s'" % i for i in cmd])
+ self.write_line('shellScript = "%s";' % cmdstr)
+ self.write_line('showEnvVarsInLog = 0;')
+ self.indent_level -= 1
+ self.write_line('};')
+ self.ofile.write('/* End PBXShellScriptBuildPhase section */\n')
+
+ def generate_pbx_sources_build_phase(self):
+ self.ofile.write('\n/* Begin PBXSourcesBuildPhase section */\n')
+ for name, phase_id in self.source_phase.items():
+ t = self.build.targets[name]
+ self.write_line('%s /* Sources */ = {' % (t.buildphasemap[name]))
+ self.indent_level += 1
+ self.write_line('isa = PBXSourcesBuildPhase;')
+ self.write_line('buildActionMask = 2147483647;')
+ self.write_line('files = (')
+ self.indent_level += 1
+ for s in self.build.targets[name].sources:
+ s = os.path.join(s.subdir, s.fname)
+ if not self.environment.is_header(s):
+ self.write_line('%s /* %s */,' % (self.buildmap[s], os.path.join(self.environment.get_source_dir(), s)))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('runOnlyForDeploymentPostprocessing = 0;')
+ self.indent_level -= 1
+ self.write_line('};')
+ self.ofile.write('/* End PBXSourcesBuildPhase section */\n')
+
+ def generate_pbx_target_dependency(self):
+ self.ofile.write('\n/* Begin PBXTargetDependency section */\n')
+ for t in self.build.targets:
+ idval = self.pbx_dep_map[t] # VERIFY: is this correct?
+ self.write_line('%s /* PBXTargetDependency */ = {' % idval)
+ self.indent_level += 1
+ self.write_line('isa = PBXTargetDependency;')
+ self.write_line('target = %s /* %s */;' % (self.native_targets[t], t))
+ self.write_line('targetProxy = %s /* PBXContainerItemProxy */;' % self.containerproxy_map[t])
+ self.indent_level -= 1
+ self.write_line('};')
+ self.ofile.write('/* End PBXTargetDependency section */\n')
+
+ def generate_xc_build_configuration(self):
+ self.ofile.write('\n/* Begin XCBuildConfiguration section */\n')
+ # First the setup for the toplevel project.
+ for buildtype in self.buildtypes:
+ self.write_line('%s /* %s */ = {' % (self.project_configurations[buildtype], buildtype))
+ self.indent_level += 1
+ self.write_line('isa = XCBuildConfiguration;')
+ self.write_line('buildSettings = {')
+ self.indent_level += 1
+ self.write_line('ARCHS = "$(ARCHS_STANDARD_32_64_BIT)";')
+ self.write_line('ONLY_ACTIVE_ARCH = YES;')
+ self.write_line('SDKROOT = "macosx";')
+ self.write_line('SYMROOT = "%s/build";' % self.environment.get_build_dir())
+ self.indent_level -= 1
+ self.write_line('};')
+ self.write_line('name = "%s";' % buildtype)
+ self.indent_level -= 1
+ self.write_line('};')
+
+ # Then the all target.
+ for buildtype in self.buildtypes:
+ self.write_line('%s /* %s */ = {' % (self.buildall_configurations[buildtype], buildtype))
+ self.indent_level += 1
+ self.write_line('isa = XCBuildConfiguration;')
+ self.write_line('buildSettings = {')
+ self.indent_level += 1
+ self.write_line('COMBINE_HIDPI_IMAGES = YES;')
+ self.write_line('GCC_GENERATE_DEBUGGING_SYMBOLS = NO;')
+ self.write_line('GCC_INLINES_ARE_PRIVATE_EXTERN = NO;')
+ self.write_line('GCC_OPTIMIZATION_LEVEL = 0;')
+ self.write_line('GCC_PREPROCESSOR_DEFINITIONS = ("");')
+ self.write_line('GCC_SYMBOLS_PRIVATE_EXTERN = NO;')
+ self.write_line('INSTALL_PATH = "";')
+ self.write_line('OTHER_CFLAGS = " ";')
+ self.write_line('OTHER_LDFLAGS = " ";')
+ self.write_line('OTHER_REZFLAGS = "";')
+ self.write_line('PRODUCT_NAME = ALL_BUILD;')
+ self.write_line('SECTORDER_FLAGS = "";')
+ self.write_line('SYMROOT = "%s";' % self.environment.get_build_dir())
+ self.write_line('USE_HEADERMAP = NO;')
+ self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );')
+ self.indent_level -= 1
+ self.write_line('};')
+ self.write_line('name = "%s";' % buildtype)
+ self.indent_level -= 1
+ self.write_line('};')
+
+ # Then the test target.
+ for buildtype in self.buildtypes:
+ self.write_line('%s /* %s */ = {' % (self.test_configurations[buildtype], buildtype))
+ self.indent_level += 1
+ self.write_line('isa = XCBuildConfiguration;')
+ self.write_line('buildSettings = {')
+ self.indent_level += 1
+ self.write_line('COMBINE_HIDPI_IMAGES = YES;')
+ self.write_line('GCC_GENERATE_DEBUGGING_SYMBOLS = NO;')
+ self.write_line('GCC_INLINES_ARE_PRIVATE_EXTERN = NO;')
+ self.write_line('GCC_OPTIMIZATION_LEVEL = 0;')
+ self.write_line('GCC_PREPROCESSOR_DEFINITIONS = ("");')
+ self.write_line('GCC_SYMBOLS_PRIVATE_EXTERN = NO;')
+ self.write_line('INSTALL_PATH = "";')
+ self.write_line('OTHER_CFLAGS = " ";')
+ self.write_line('OTHER_LDFLAGS = " ";')
+ self.write_line('OTHER_REZFLAGS = "";')
+ self.write_line('PRODUCT_NAME = RUN_TESTS;')
+ self.write_line('SECTORDER_FLAGS = "";')
+ self.write_line('SYMROOT = "%s";' % self.environment.get_build_dir())
+ self.write_line('USE_HEADERMAP = NO;')
+ self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );')
+ self.indent_level -= 1
+ self.write_line('};')
+ self.write_line('name = "%s";' % buildtype)
+ self.indent_level -= 1
+ self.write_line('};')
+
+ # Now finally targets.
+ langnamemap = {'c': 'C', 'cpp': 'CPLUSPLUS', 'objc': 'OBJC', 'objcpp': 'OBJCPLUSPLUS'}
+ for target_name, target in self.build.targets.items():
+ for buildtype in self.buildtypes:
+ dep_libs = []
+ links_dylib = False
+ headerdirs = []
+ for d in target.include_dirs:
+ for sd in d.incdirs:
+ cd = os.path.join(d.curdir, sd)
+ headerdirs.append(os.path.join(self.environment.get_source_dir(), cd))
+ headerdirs.append(os.path.join(self.environment.get_build_dir(), cd))
+ for l in target.link_targets:
+ abs_path = os.path.join(self.environment.get_build_dir(),
+ l.subdir, buildtype, l.get_filename())
+ dep_libs.append("'%s'" % abs_path)
+ if isinstance(l, build.SharedLibrary):
+ links_dylib = True
+ if links_dylib:
+ dep_libs = ['-Wl,-search_paths_first', '-Wl,-headerpad_max_install_names'] + dep_libs
+ dylib_version = None
+ if isinstance(target, build.SharedLibrary):
+ ldargs = ['-dynamiclib', '-Wl,-headerpad_max_install_names'] + dep_libs
+ install_path = os.path.join(self.environment.get_build_dir(), target.subdir, buildtype)
+ dylib_version = target.version
+ else:
+ ldargs = dep_libs
+ install_path = ''
+ if dylib_version is not None:
+ product_name = target.get_basename() + '.' + dylib_version
+ else:
+ product_name = target.get_basename()
+ ldargs += target.link_args
+ ldstr = ' '.join(ldargs)
+ valid = self.buildconfmap[target_name][buildtype]
+ langargs = {}
+ for lang in self.environment.coredata.compilers:
+ if lang not in langnamemap:
+ continue
+ gargs = self.build.global_args.get(lang, [])
+ targs = target.get_extra_args(lang)
+ args = gargs + targs
+ if len(args) > 0:
+ langargs[langnamemap[lang]] = args
+ symroot = os.path.join(self.environment.get_build_dir(), target.subdir)
+ self.write_line('%s /* %s */ = {' % (valid, buildtype))
+ self.indent_level += 1
+ self.write_line('isa = XCBuildConfiguration;')
+ self.write_line('buildSettings = {')
+ self.indent_level += 1
+ self.write_line('COMBINE_HIDPI_IMAGES = YES;')
+ if dylib_version is not None:
+ self.write_line('DYLIB_CURRENT_VERSION = "%s";' % dylib_version)
+ self.write_line('EXECUTABLE_PREFIX = "%s";' % target.prefix)
+ if target.suffix == '':
+ suffix = ''
+ else:
+ suffix = '.' + target.suffix
+ self.write_line('EXECUTABLE_SUFFIX = "%s";' % suffix)
+ self.write_line('GCC_GENERATE_DEBUGGING_SYMBOLS = YES;')
+ self.write_line('GCC_INLINES_ARE_PRIVATE_EXTERN = NO;')
+ self.write_line('GCC_OPTIMIZATION_LEVEL = 0;')
+ self.write_line('GCC_PREPROCESSOR_DEFINITIONS = ("");')
+ self.write_line('GCC_SYMBOLS_PRIVATE_EXTERN = NO;')
+ if len(headerdirs) > 0:
+ quotedh = ','.join(['"\\"%s\\""' % i for i in headerdirs])
+ self.write_line('HEADER_SEARCH_PATHS=(%s);' % quotedh)
+ self.write_line('INSTALL_PATH = "%s";' % install_path)
+ self.write_line('LIBRARY_SEARCH_PATHS = "";')
+ if isinstance(target, build.SharedLibrary):
+ self.write_line('LIBRARY_STYLE = DYNAMIC;')
+ for langname, args in langargs.items():
+ argstr = ' '.join(args)
+ self.write_line('OTHER_%sFLAGS = "%s";' % (langname, argstr))
+ self.write_line('OTHER_LDFLAGS = "%s";' % ldstr)
+ self.write_line('OTHER_REZFLAGS = "";')
+ self.write_line('PRODUCT_NAME = %s;' % product_name)
+ self.write_line('SECTORDER_FLAGS = "";')
+ self.write_line('SYMROOT = "%s";' % symroot)
+ self.write_line('USE_HEADERMAP = NO;')
+ self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );')
+ self.indent_level -= 1
+ self.write_line('};')
+ self.write_line('name = "%s";' % buildtype)
+ self.indent_level -= 1
+ self.write_line('};')
+ self.ofile.write('/* End XCBuildConfiguration section */\n')
+
+ def generate_xc_configurationList(self):
+ self.ofile.write('\n/* Begin XCConfigurationList section */\n')
+ self.write_line('%s /* Build configuration list for PBXProject "%s" */ = {' % (self.project_conflist, self.build.project_name))
+ self.indent_level += 1
+ self.write_line('isa = XCConfigurationList;')
+ self.write_line('buildConfigurations = (')
+ self.indent_level += 1
+ for buildtype in self.buildtypes:
+ self.write_line('%s /* %s */,' % (self.project_configurations[buildtype], buildtype))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('defaultConfigurationIsVisible = 0;')
+ self.write_line('defaultConfigurationName = debug;')
+ self.indent_level -= 1
+ self.write_line('};')
+
+ # Now the all target
+ self.write_line('%s /* Build configuration list for PBXAggregateTarget "ALL_BUILD" */ = {' % self.all_buildconf_id)
+ self.indent_level += 1
+ self.write_line('isa = XCConfigurationList;')
+ self.write_line('buildConfigurations = (')
+ self.indent_level += 1
+ for buildtype in self.buildtypes:
+ self.write_line('%s /* %s */,' % (self.buildall_configurations[buildtype], buildtype))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('defaultConfigurationIsVisible = 0;')
+ self.write_line('defaultConfigurationName = debug;')
+ self.indent_level -= 1
+ self.write_line('};')
+
+ # Test target
+ self.write_line('%s /* Build configuration list for PBXAggregateTarget "ALL_BUILD" */ = {' % self.test_buildconf_id)
+ self.indent_level += 1
+ self.write_line('isa = XCConfigurationList;')
+ self.write_line('buildConfigurations = (')
+ self.indent_level += 1
+ for buildtype in self.buildtypes:
+ self.write_line('%s /* %s */,' % (self.test_configurations[buildtype], buildtype))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('defaultConfigurationIsVisible = 0;')
+ self.write_line('defaultConfigurationName = debug;')
+ self.indent_level -= 1
+ self.write_line('};')
+
+ for target_name in self.build.targets:
+ listid = self.buildconflistmap[target_name]
+ self.write_line('%s /* Build configuration list for PBXNativeTarget "%s" */ = {' % (listid, target_name))
+ self.indent_level += 1
+ self.write_line('isa = XCConfigurationList;')
+ self.write_line('buildConfigurations = (')
+ self.indent_level += 1
+ typestr = 'debug'
+ idval = self.buildconfmap[target_name][typestr]
+ self.write_line('%s /* %s */,' % (idval, typestr))
+ self.indent_level -= 1
+ self.write_line(');')
+ self.write_line('defaultConfigurationIsVisible = 0;')
+ self.write_line('defaultConfigurationName = "%s";' % typestr)
+ self.indent_level -= 1
+ self.write_line('};')
+ self.ofile.write('/* End XCConfigurationList section */\n')
+
+ def generate_prefix(self):
+ self.ofile.write('// !$*UTF8*$!\n{\n')
+ self.indent_level += 1
+ self.write_line('archiveVersion = 1;\n')
+ self.write_line('classes = {\n')
+ self.write_line('};\n')
+ self.write_line('objectVersion = 46;\n')
+ self.write_line('objects = {\n')
+ self.indent_level += 1
+
+ def generate_suffix(self):
+ self.indent_level -= 1
+ self.write_line('};\n')
+ self.write_line('rootObject = ' + self.project_uid + ';')
+ self.indent_level -= 1
+ self.write_line('}\n')
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy, os, re
+from collections import OrderedDict
+import itertools
+
+from . import environment
+from . import dependencies
+from . import mlog
+from .mesonlib import File, MesonException, listify, extract_as_list
+from .mesonlib import typeslistify, stringlistify, classify_unity_sources
+from .mesonlib import get_filenames_templates_dict, substitute_values
+from .mesonlib import for_windows, for_darwin, for_cygwin
+from .compilers import is_object, clike_langs, sort_clike, lang_suffixes
+
+known_basic_kwargs = {'install': True,
+ 'c_pch': True,
+ 'cpp_pch': True,
+ 'c_args': True,
+ 'objc_args': True,
+ 'objcpp_args': True,
+ 'cpp_args': True,
+ 'cs_args': True,
+ 'vala_args': True,
+ 'fortran_args': True,
+ 'd_args': True,
+ 'd_import_dirs': True,
+ 'd_unittest': True,
+ 'd_module_versions': True,
+ 'java_args': True,
+ 'rust_args': True,
+ 'link_args': True,
+ 'link_depends': True,
+ 'link_with': True,
+ 'link_whole': True,
+ 'implicit_include_directories': True,
+ 'include_directories': True,
+ 'dependencies': True,
+ 'install_dir': True,
+ 'main_class': True,
+ 'name_suffix': True,
+ 'gui_app': True,
+ 'extra_files': True,
+ 'install_rpath': True,
+ 'build_rpath': True,
+ 'resources': True,
+ 'sources': True,
+ 'objects': True,
+ 'native': True,
+ 'build_by_default': True,
+ 'override_options': True,
+ }
+
+# These contain kwargs supported by both static and shared libraries. These are
+# combined here because a library() call might be shared_library() or
+# static_library() at runtime based on the configuration.
+# FIXME: Find a way to pass that info down here so we can have proper target
+# kwargs checking when specifically using shared_library() or static_library().
+known_lib_kwargs = known_basic_kwargs.copy()
+known_lib_kwargs.update({'version': True, # Only for shared libs
+ 'soversion': True, # Only for shared libs
+ 'name_prefix': True,
+ 'vs_module_defs': True, # Only for shared libs
+ 'vala_header': True,
+ 'vala_vapi': True,
+ 'vala_gir': True,
+ 'pic': True, # Only for static libs
+ 'rust_crate_type': True, # Only for Rust libs
+ })
+
+known_exe_kwargs = known_basic_kwargs.copy()
+known_exe_kwargs.update({'implib': True,
+ })
+
+class InvalidArguments(MesonException):
+ pass
+
+class Build:
+ """A class that holds the status of one build including
+ all dependencies and so on.
+ """
+
+ def __init__(self, environment):
+ self.project_name = 'name of master project'
+ self.project_version = None
+ self.environment = environment
+ self.projects = {}
+ self.targets = OrderedDict()
+ self.compilers = OrderedDict()
+ self.cross_compilers = OrderedDict()
+ self.global_args = {}
+ self.projects_args = {}
+ self.global_link_args = {}
+ self.projects_link_args = {}
+ self.tests = []
+ self.benchmarks = []
+ self.headers = []
+ self.man = []
+ self.data = []
+ self.static_linker = None
+ self.static_cross_linker = None
+ self.subprojects = {}
+ self.install_scripts = []
+ self.postconf_scripts = []
+ self.install_dirs = []
+ self.dep_manifest_name = None
+ self.dep_manifest = {}
+ self.cross_stdlibs = {}
+ self.test_setups = {}
+
+ def add_compiler(self, compiler):
+ if self.static_linker is None and compiler.needs_static_linker():
+ self.static_linker = self.environment.detect_static_linker(compiler)
+ lang = compiler.get_language()
+ if lang not in self.compilers:
+ self.compilers[lang] = compiler
+
+ def add_cross_compiler(self, compiler):
+ if not self.cross_compilers:
+ self.static_cross_linker = self.environment.detect_static_linker(compiler)
+ lang = compiler.get_language()
+ if lang not in self.cross_compilers:
+ self.cross_compilers[lang] = compiler
+
+ def get_project(self):
+ return self.projects['']
+
+ def get_targets(self):
+ return self.targets
+
+ def get_tests(self):
+ return self.tests
+
+ def get_benchmarks(self):
+ return self.benchmarks
+
+ def get_headers(self):
+ return self.headers
+
+ def get_man(self):
+ return self.man
+
+ def get_data(self):
+ return self.data
+
+ def get_install_subdirs(self):
+ return self.install_dirs
+
+ def get_global_args(self, compiler):
+ return self.global_args.get(compiler.get_language(), [])
+
+ def get_project_args(self, compiler, project):
+ args = self.projects_args.get(project)
+ if not args:
+ return []
+ return args.get(compiler.get_language(), [])
+
+ def get_global_link_args(self, compiler):
+ return self.global_link_args.get(compiler.get_language(), [])
+
+ def get_project_link_args(self, compiler, project):
+ link_args = self.projects_link_args.get(project)
+ if not link_args:
+ return []
+
+ return link_args.get(compiler.get_language(), [])
+
+class IncludeDirs:
+ def __init__(self, curdir, dirs, is_system, extra_build_dirs=None):
+ self.curdir = curdir
+ self.incdirs = dirs
+ self.is_system = is_system
+ # Interpreter has validated that all given directories
+ # actually exist.
+ if extra_build_dirs is None:
+ self.extra_build_dirs = []
+ else:
+ self.extra_build_dirs = extra_build_dirs
+
+ def __repr__(self):
+ r = '<{} {}/{}>'
+ return r.format(self.__class__.__name__, self.curdir, self.incdirs)
+
+ def get_curdir(self):
+ return self.curdir
+
+ def get_incdirs(self):
+ return self.incdirs
+
+ def get_extra_build_dirs(self):
+ return self.extra_build_dirs
+
+class ExtractedObjects:
+ '''
+ Holds a list of sources for which the objects must be extracted
+ '''
+ def __init__(self, target, srclist, is_unity):
+ self.target = target
+ self.srclist = srclist
+ if is_unity:
+ self.check_unity_compatible()
+
+ def __repr__(self):
+ r = '<{0} {1!r}: {2}>'
+ return r.format(self.__class__.__name__, self.target.name, self.srclist)
+
+ def check_unity_compatible(self):
+ # Figure out if the extracted object list is compatible with a Unity
+ # build. When we're doing a Unified build, we go through the sources,
+ # and create a single source file from each subset of the sources that
+ # can be compiled with a specific compiler. Then we create one object
+ # from each unified source file.
+ # If the list of sources for which we want objects is the same as the
+ # list of sources that go into each unified build, we're good.
+ srclist_set = set(self.srclist)
+ # Objects for all the sources are required, so we're compatible
+ if srclist_set == set(self.target.sources):
+ return
+ # Check if the srclist is a subset (of the target's sources) that is
+ # going to form a unified source file and a single object
+ compsrcs = classify_unity_sources(self.target.compilers.values(),
+ self.target.sources)
+ for srcs in compsrcs.values():
+ if srclist_set == set(srcs):
+ return
+ msg = 'Single object files can not be extracted in Unity builds. ' \
+ 'You can only extract all the object files at once.'
+ raise MesonException(msg)
+
+
+class EnvironmentVariables:
+ def __init__(self):
+ self.envvars = []
+
+ def __repr__(self):
+ repr_str = "<{0}: {1}>"
+ return repr_str.format(self.__class__.__name__, self.envvars)
+
+ def get_value(self, values, kwargs):
+ separator = kwargs.get('separator', os.pathsep)
+
+ value = ''
+ for var in values:
+ value += separator + var
+ return separator, value.strip(separator)
+
+ def set(self, env, name, values, kwargs):
+ return self.get_value(values, kwargs)[1]
+
+ def append(self, env, name, values, kwargs):
+ sep, value = self.get_value(values, kwargs)
+ if name in env:
+ return env[name] + sep + value
+ return value
+
+ def prepend(self, env, name, values, kwargs):
+ sep, value = self.get_value(values, kwargs)
+ if name in env:
+ return value + sep + env[name]
+
+ return value
+
+ def get_env(self, full_env):
+ env = {}
+ for method, name, values, kwargs in self.envvars:
+ env[name] = method(full_env, name, values, kwargs)
+ return env
+
+class Target:
+ def __init__(self, name, subdir, subproject, build_by_default):
+ if '/' in name or '\\' in name:
+ # Fix failing test 53 when this becomes an error.
+ mlog.warning('''Target "%s" has a path separator in its name.
+This is not supported, it can cause unexpected failures and will become
+a hard error in the future.''' % name)
+ self.name = name
+ self.subdir = subdir
+ self.subproject = subproject
+ self.build_by_default = build_by_default
+ self.install = False
+ self.build_always = False
+ self.option_overrides = {}
+
+ def get_basename(self):
+ return self.name
+
+ def get_subdir(self):
+ return self.subdir
+
+ def get_id(self):
+ # This ID must also be a valid file name on all OSs.
+ # It should also avoid shell metacharacters for obvious
+ # reasons.
+ base = self.name + self.type_suffix()
+ if self.subproject == '':
+ return base
+ return self.subproject + '@@' + base
+
+ def process_kwargs(self, kwargs):
+ if 'build_by_default' in kwargs:
+ self.build_by_default = kwargs['build_by_default']
+ if not isinstance(self.build_by_default, bool):
+ raise InvalidArguments('build_by_default must be a boolean value.')
+ self.option_overrides = self.parse_overrides(kwargs)
+
+ def parse_overrides(self, kwargs):
+ result = {}
+ overrides = stringlistify(kwargs.get('override_options', []))
+ for o in overrides:
+ if '=' not in o:
+ raise InvalidArguments('Overrides must be of form "key=value"')
+ k, v = o.split('=', 1)
+ k = k.strip()
+ v = v.strip()
+ result[k] = v
+ return result
+
+
+class BuildTarget(Target):
+ def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
+ super().__init__(name, subdir, subproject, True)
+ self.is_cross = is_cross
+ unity_opt = environment.coredata.get_builtin_option('unity')
+ self.is_unity = unity_opt == 'on' or (unity_opt == 'subprojects' and subproject != '')
+ self.environment = environment
+ self.sources = []
+ self.compilers = OrderedDict()
+ self.objects = []
+ self.external_deps = []
+ self.include_dirs = []
+ self.link_targets = []
+ self.link_whole_targets = []
+ self.link_depends = []
+ self.name_prefix_set = False
+ self.name_suffix_set = False
+ self.filename = 'no_name'
+ # The list of all files outputted by this target. Useful in cases such
+ # as Vala which generates .vapi and .h besides the compiled output.
+ self.outputs = [self.filename]
+ self.need_install = False
+ self.pch = {}
+ self.extra_args = {}
+ self.generated = []
+ self.extra_files = []
+ # Sources can be:
+ # 1. Pre-existing source files in the source tree
+ # 2. Pre-existing sources generated by configure_file in the build tree
+ # 3. Sources files generated by another target or a Generator
+ self.process_sourcelist(sources)
+ # Objects can be:
+ # 1. Pre-existing objects provided by the user with the `objects:` kwarg
+ # 2. Compiled objects created by and extracted from another target
+ self.process_objectlist(objects)
+ self.process_compilers()
+ self.process_kwargs(kwargs, environment)
+ self.check_unknown_kwargs(kwargs)
+ if not any([self.sources, self.generated, self.objects, self.link_whole]):
+ raise InvalidArguments('Build target %s has no sources.' % name)
+ self.process_compilers_late()
+ self.validate_sources()
+ self.validate_cross_install(environment)
+
+ def __lt__(self, other):
+ return self.get_id() < other.get_id()
+
+ def __repr__(self):
+ repr_str = "<{0} {1}: {2}>"
+ return repr_str.format(self.__class__.__name__, self.get_id(), self.filename)
+
+ def validate_cross_install(self, environment):
+ if environment.is_cross_build() and not self.is_cross and self.install:
+ raise InvalidArguments('Tried to install a natively built target in a cross build.')
+
+ def check_unknown_kwargs(self, kwargs):
+ # Override this method in derived classes that have more
+ # keywords.
+ self.check_unknown_kwargs_int(kwargs, known_basic_kwargs)
+
+ def check_unknown_kwargs_int(self, kwargs, known_kwargs):
+ unknowns = []
+ for k in kwargs:
+ if k not in known_kwargs:
+ unknowns.append(k)
+ if len(unknowns) > 0:
+ mlog.warning('Unknown keyword argument(s) in target %s: %s.' %
+ (self.name, ', '.join(unknowns)))
+
+ def process_objectlist(self, objects):
+ assert(isinstance(objects, list))
+ for s in objects:
+ if hasattr(s, 'held_object'):
+ s = s.held_object
+ if isinstance(s, (str, File, ExtractedObjects)):
+ self.objects.append(s)
+ elif isinstance(s, (GeneratedList, CustomTarget)):
+ msg = 'Generated files are not allowed in the \'objects\' kwarg ' + \
+ 'for target {!r}.\nIt is meant only for '.format(self.name) + \
+ 'pre-built object files that are shipped with the\nsource ' + \
+ 'tree. Try adding it in the list of sources.'
+ raise InvalidArguments(msg)
+ else:
+ msg = 'Bad object of type {!r} in target {!r}.'.format(type(s).__name__, self.name)
+ raise InvalidArguments(msg)
+
+ def process_sourcelist(self, sources):
+ sources = listify(sources)
+ added_sources = {} # If the same source is defined multiple times, use it only once.
+ for s in sources:
+ # Holder unpacking. Ugly.
+ if hasattr(s, 'held_object'):
+ s = s.held_object
+ if isinstance(s, File):
+ if s not in added_sources:
+ self.sources.append(s)
+ added_sources[s] = True
+ elif isinstance(s, (GeneratedList, CustomTarget, CustomTargetIndex)):
+ self.generated.append(s)
+ else:
+ msg = 'Bad source of type {!r} in target {!r}.'.format(type(s).__name__, self.name)
+ raise InvalidArguments(msg)
+
+ @staticmethod
+ def can_compile_remove_sources(compiler, sources):
+ removed = False
+ for s in sources[:]:
+ if compiler.can_compile(s):
+ sources.remove(s)
+ removed = True
+ return removed
+
+ def process_compilers_late(self):
+ """Processes additional compilers after kwargs have been evaluated.
+
+ This can add extra compilers that might be required by keyword
+ arguments, such as link_with or dependencies. It will also try to guess
+ which compiler to use if one hasn't been selected already.
+ """
+ # Populate list of compilers
+ if self.is_cross:
+ compilers = self.environment.coredata.cross_compilers
+ else:
+ compilers = self.environment.coredata.compilers
+
+ # If this library is linked against another library we need to consider
+ # the languages of those libraries as well.
+ if self.link_targets or self.link_whole_targets:
+ extra = set()
+ for t in itertools.chain(self.link_targets, self.link_whole_targets):
+ for name, compiler in t.compilers.items():
+ if name in clike_langs:
+ extra.add((name, compiler))
+ for name, compiler in sorted(extra, key=lambda p: sort_clike(p[0])):
+ self.compilers[name] = compiler
+
+ if not self.compilers:
+ # No source files or parent targets, target consists of only object
+ # files of unknown origin. Just add the first clike compiler
+ # that we have and hope that it can link these objects
+ for lang in clike_langs:
+ if lang in compilers:
+ self.compilers[lang] = compilers[lang]
+ break
+
+ def process_compilers(self):
+ '''
+ Populate self.compilers, which is the list of compilers that this
+ target will use for compiling all its sources.
+ We also add compilers that were used by extracted objects to simplify
+ dynamic linker determination.
+ '''
+ if not self.sources and not self.generated and not self.objects:
+ return
+ # Populate list of compilers
+ if self.is_cross:
+ compilers = self.environment.coredata.cross_compilers
+ else:
+ compilers = self.environment.coredata.compilers
+ # Pre-existing sources
+ sources = list(self.sources)
+ # All generated sources
+ for gensrc in self.generated:
+ for s in gensrc.get_outputs():
+ # Generated objects can't be compiled, so don't use them for
+ # compiler detection. If our target only has generated objects,
+ # we will fall back to using the first c-like compiler we find,
+ # which is what we need.
+ if not is_object(s):
+ sources.append(s)
+ # Sources that were used to create our extracted objects
+ for o in self.objects:
+ if not isinstance(o, ExtractedObjects):
+ continue
+ for s in o.srclist:
+ # Don't add Vala sources since that will pull in the Vala
+ # compiler even though we will never use it since we are
+ # dealing with compiled C code.
+ if not s.endswith(lang_suffixes['vala']):
+ sources.append(s)
+ if sources:
+ # For each source, try to add one compiler that can compile it.
+ # It's ok if no compilers can do so, because users are expected to
+ # be able to add arbitrary non-source files to the sources list.
+ for s in sources:
+ for lang, compiler in compilers.items():
+ if compiler.can_compile(s):
+ if lang not in self.compilers:
+ self.compilers[lang] = compiler
+ break
+ # Re-sort according to clike_langs
+ self.compilers = OrderedDict(sorted(self.compilers.items(),
+ key=lambda t: sort_clike(t[0])))
+
+ # If all our sources are Vala, our target also needs the C compiler but
+ # it won't get added above.
+ if 'vala' in self.compilers and 'c' not in self.compilers:
+ self.compilers['c'] = compilers['c']
+
+ def validate_sources(self):
+ if not self.sources:
+ return
+ for lang in ('cs', 'java'):
+ if lang in self.compilers:
+ check_sources = list(self.sources)
+ compiler = self.compilers[lang]
+ if not self.can_compile_remove_sources(compiler, check_sources):
+ m = 'No {} sources found in target {!r}'.format(lang, self.name)
+ raise InvalidArguments(m)
+ if check_sources:
+ m = '{0} targets can only contain {0} files:\n'.format(lang.capitalize())
+ m += '\n'.join([repr(c) for c in check_sources])
+ raise InvalidArguments(m)
+ # CSharp and Java targets can't contain any other file types
+ assert(len(self.compilers) == 1)
+ return
+
+ def process_link_depends(self, sources, environment):
+ """Process the link_depends keyword argument.
+
+ This is designed to handle strings, Files, and the output of Custom
+ Targets. Notably it doesn't handle generator() returned objects, since
+ adding them as a link depends would inherently cause them to be
+ generated twice, since the output needs to be passed to the ld_args and
+ link_depends.
+ """
+ sources = listify(sources)
+ for s in sources:
+ if hasattr(s, 'held_object'):
+ s = s.held_object
+
+ if isinstance(s, File):
+ self.link_depends.append(s)
+ elif isinstance(s, str):
+ self.link_depends.append(
+ File.from_source_file(environment.source_dir, self.subdir, s))
+ elif hasattr(s, 'get_outputs'):
+ self.link_depends.extend(
+ [File.from_built_file(s.subdir, p) for p in s.get_outputs()])
+ else:
+ raise InvalidArguments(
+ 'Link_depends arguments must be strings, Files, '
+ 'or a Custom Target, or lists thereof.')
+
+ def get_original_kwargs(self):
+ return self.kwargs
+
+ def unpack_holder(self, d):
+ d = listify(d)
+ newd = []
+ for i in d:
+ if isinstance(i, list):
+ i = self.unpack_holder(i)
+ elif hasattr(i, 'held_object'):
+ i = i.held_object
+ for t in ['dependencies', 'link_with', 'include_directories', 'sources']:
+ if hasattr(i, t):
+ setattr(i, t, self.unpack_holder(getattr(i, t)))
+ newd.append(i)
+ return newd
+
+ def copy_kwargs(self, kwargs):
+ self.kwargs = copy.copy(kwargs)
+ # This sucks quite badly. Arguments
+ # are holders but they can't be pickled
+ # so unpack those known.
+ for k, v in self.kwargs.items():
+ if isinstance(v, list):
+ self.kwargs[k] = self.unpack_holder(v)
+ if hasattr(v, 'held_object'):
+ self.kwargs[k] = v.held_object
+ for t in ['dependencies', 'link_with', 'include_directories', 'sources']:
+ if t in self.kwargs:
+ self.kwargs[t] = self.unpack_holder(self.kwargs[t])
+
+ def extract_objects(self, srclist):
+ obj_src = []
+ for src in srclist:
+ if not isinstance(src, str):
+ raise MesonException('Object extraction arguments must be strings.')
+ src = File(False, self.subdir, src)
+ if src not in self.sources:
+ raise MesonException('Tried to extract unknown source %s.' % src)
+ obj_src.append(src)
+ return ExtractedObjects(self, obj_src, self.is_unity)
+
+ def extract_all_objects(self):
+ return ExtractedObjects(self, self.sources, self.is_unity)
+
+ def get_all_link_deps(self):
+ return self.get_transitive_link_deps()
+
+ def get_transitive_link_deps(self):
+ result = []
+ for i in self.link_targets:
+ result += i.get_all_link_deps()
+ return result
+
+ def get_custom_install_dir(self):
+ return self.install_dir
+
+ def process_kwargs(self, kwargs, environment):
+ super().process_kwargs(kwargs)
+ self.copy_kwargs(kwargs)
+ kwargs.get('modules', [])
+ self.need_install = kwargs.get('install', self.need_install)
+ llist = extract_as_list(kwargs, 'link_with')
+ for linktarget in llist:
+ # Sorry for this hack. Keyword targets are kept in holders
+ # in kwargs. Unpack here without looking at the exact type.
+ if hasattr(linktarget, "held_object"):
+ linktarget = linktarget.held_object
+ if isinstance(linktarget, dependencies.ExternalLibrary):
+ raise MesonException('''An external library was used in link_with keyword argument, which
+is reserved for libraries built as part of this project. External
+libraries must be passed using the dependencies keyword argument
+instead, because they are conceptually "external dependencies",
+just like those detected with the dependency() function.''')
+ self.link(linktarget)
+ lwhole = extract_as_list(kwargs, 'link_whole')
+ for linktarget in lwhole:
+ # Sorry for this hack. Keyword targets are kept in holders
+ # in kwargs. Unpack here without looking at the exact type.
+ if hasattr(linktarget, "held_object"):
+ linktarget = linktarget.held_object
+ self.link_whole(linktarget)
+
+ c_pchlist, cpp_pchlist, clist, cpplist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \
+ = extract_as_list(kwargs, 'c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cs_args', 'vala_args', 'objc_args',
+ 'objcpp_args', 'fortran_args', 'rust_args')
+
+ self.add_pch('c', c_pchlist)
+ self.add_pch('cpp', cpp_pchlist)
+ compiler_args = {'c': clist, 'cpp': cpplist, 'cs': cslist, 'vala': valalist, 'objc': objclist, 'objcpp': objcpplist,
+ 'fortran': fortranlist, 'rust': rustlist
+ }
+ for key, value in compiler_args.items():
+ self.add_compiler_args(key, value)
+
+ if not isinstance(self, Executable):
+ self.vala_header = kwargs.get('vala_header', self.name + '.h')
+ self.vala_vapi = kwargs.get('vala_vapi', self.name + '.vapi')
+ self.vala_gir = kwargs.get('vala_gir', None)
+
+ dlist = stringlistify(kwargs.get('d_args', []))
+ self.add_compiler_args('d', dlist)
+ dfeatures = dict()
+ dfeature_unittest = kwargs.get('d_unittest', False)
+ if dfeature_unittest:
+ dfeatures['unittest'] = dfeature_unittest
+ dfeature_versions = kwargs.get('d_module_versions', None)
+ if dfeature_versions:
+ dfeatures['versions'] = dfeature_versions
+ dfeature_import_dirs = kwargs.get('d_import_dirs', None)
+ if dfeature_import_dirs:
+ dfeatures['import_dirs'] = dfeature_import_dirs
+ if dfeatures:
+ if 'd' in self.compilers:
+ self.add_compiler_args('d', self.compilers['d'].get_feature_args(dfeatures))
+
+ self.link_args = extract_as_list(kwargs, 'link_args')
+ for i in self.link_args:
+ if not isinstance(i, str):
+ raise InvalidArguments('Link_args arguments must be strings.')
+ for l in self.link_args:
+ if '-Wl,-rpath' in l or l.startswith('-rpath'):
+ mlog.warning('''Please do not define rpath with a linker argument, use install_rpath or build_rpath properties instead.
+This will become a hard error in a future Meson release.''')
+ self.process_link_depends(kwargs.get('link_depends', []), environment)
+ # Target-specific include dirs must be added BEFORE include dirs from
+ # internal deps (added inside self.add_deps()) to override them.
+ inclist = extract_as_list(kwargs, 'include_directories')
+ self.add_include_dirs(inclist)
+ # Add dependencies (which also have include_directories)
+ deplist = extract_as_list(kwargs, 'dependencies')
+ self.add_deps(deplist)
+ # If an item in this list is False, the output corresponding to
+ # the list index of that item will not be installed
+ self.install_dir = typeslistify(kwargs.get('install_dir', [None]),
+ (str, bool))
+ main_class = kwargs.get('main_class', '')
+ if not isinstance(main_class, str):
+ raise InvalidArguments('Main class must be a string')
+ self.main_class = main_class
+ if isinstance(self, Executable):
+ self.gui_app = kwargs.get('gui_app', False)
+ if not isinstance(self.gui_app, bool):
+ raise InvalidArguments('Argument gui_app must be boolean.')
+ elif 'gui_app' in kwargs:
+ raise InvalidArguments('Argument gui_app can only be used on executables.')
+ extra_files = extract_as_list(kwargs, 'extra_files')
+ for i in extra_files:
+ assert(isinstance(i, File))
+ trial = os.path.join(environment.get_source_dir(), i.subdir, i.fname)
+ if not(os.path.isfile(trial)):
+ raise InvalidArguments('Tried to add non-existing extra file %s.' % i)
+ self.extra_files = extra_files
+ self.install_rpath = kwargs.get('install_rpath', '')
+ if not isinstance(self.install_rpath, str):
+ raise InvalidArguments('Install_rpath is not a string.')
+ self.build_rpath = kwargs.get('build_rpath', '')
+ if not isinstance(self.build_rpath, str):
+ raise InvalidArguments('Build_rpath is not a string.')
+ resources = extract_as_list(kwargs, 'resources')
+ for r in resources:
+ if not isinstance(r, str):
+ raise InvalidArguments('Resource argument is not a string.')
+ trial = os.path.join(environment.get_source_dir(), self.subdir, r)
+ if not os.path.isfile(trial):
+ raise InvalidArguments('Tried to add non-existing resource %s.' % r)
+ self.resources = resources
+ if 'name_prefix' in kwargs:
+ name_prefix = kwargs['name_prefix']
+ if isinstance(name_prefix, list):
+ if name_prefix:
+ raise InvalidArguments('name_prefix array must be empty to signify null.')
+ elif not isinstance(name_prefix, str):
+ raise InvalidArguments('name_prefix must be a string.')
+ self.prefix = name_prefix
+ self.name_prefix_set = True
+ if 'name_suffix' in kwargs:
+ name_suffix = kwargs['name_suffix']
+ if isinstance(name_suffix, list):
+ if name_suffix:
+ raise InvalidArguments('name_suffix array must be empty to signify null.')
+ else:
+ if not isinstance(name_suffix, str):
+ raise InvalidArguments('name_suffix must be a string.')
+ self.suffix = name_suffix
+ self.name_suffix_set = True
+ if isinstance(self, StaticLibrary):
+ # You can't disable PIC on OS X. The compiler ignores -fno-PIC.
+ # PIC is always on for Windows (all code is position-independent
+ # since library loading is done differently)
+ if for_darwin(self.is_cross, self.environment) or for_windows(self.is_cross, self.environment):
+ self.pic = True
+ elif '-fPIC' in clist + cpplist:
+ mlog.warning("Use the 'pic' kwarg instead of passing -fPIC manually to static library {!r}".format(self.name))
+ self.pic = True
+ else:
+ self.pic = kwargs.get('pic', False)
+ if not isinstance(self.pic, bool):
+ raise InvalidArguments('Argument pic to static library {!r} must be boolean'.format(self.name))
+ self.implicit_include_directories = kwargs.get('implicit_include_directories', True)
+ if not isinstance(self.implicit_include_directories, bool):
+ raise InvalidArguments('Implicit_include_directories must be a boolean.')
+
+ def get_filename(self):
+ return self.filename
+
+ def get_outputs(self):
+ return self.outputs
+
+ def get_extra_args(self, language):
+ return self.extra_args.get(language, [])
+
+ def get_dependencies(self):
+ transitive_deps = []
+ for t in self.link_targets + self.link_whole_targets:
+ transitive_deps.append(t)
+ if isinstance(t, StaticLibrary):
+ transitive_deps += t.get_dependencies()
+ return transitive_deps
+
+ def get_source_subdir(self):
+ return self.subdir
+
+ def get_sources(self):
+ return self.sources
+
+ def get_objects(self):
+ return self.objects
+
+ def get_generated_sources(self):
+ return self.generated
+
+ def should_install(self):
+ return self.need_install
+
+ def has_pch(self):
+ return len(self.pch) > 0
+
+ def get_pch(self, language):
+ try:
+ return self.pch[language]
+ except KeyError:
+ return[]
+
+ def get_include_dirs(self):
+ return self.include_dirs
+
+ def add_deps(self, deps):
+ deps = listify(deps)
+ for dep in deps:
+ if hasattr(dep, 'held_object'):
+ dep = dep.held_object
+ if isinstance(dep, dependencies.InternalDependency):
+ # Those parts that are internal.
+ self.process_sourcelist(dep.sources)
+ self.add_include_dirs(dep.include_directories)
+ for l in dep.libraries:
+ self.link(l)
+ # Those parts that are external.
+ extpart = dependencies.InternalDependency('undefined',
+ [],
+ dep.compile_args,
+ dep.link_args,
+ [], [], [])
+ self.external_deps.append(extpart)
+ # Deps of deps.
+ self.add_deps(dep.ext_deps)
+ elif isinstance(dep, dependencies.ExternalDependency):
+ self.external_deps.append(dep)
+ self.process_sourcelist(dep.get_sources())
+ elif isinstance(dep, BuildTarget):
+ raise InvalidArguments('''Tried to use a build target as a dependency.
+You probably should put it in link_with instead.''')
+ else:
+ # This is a bit of a hack. We do not want Build to know anything
+ # about the interpreter so we can't import it and use isinstance.
+ # This should be reliable enough.
+ if hasattr(dep, 'project_args_frozen') or hasattr(dep, 'global_args_frozen'):
+ raise InvalidArguments('Tried to use subproject object as a dependency.\n'
+ 'You probably wanted to use a dependency declared in it instead.\n'
+ 'Access it by calling get_variable() on the subproject object.')
+ raise InvalidArguments('Argument is of an unacceptable type {!r}.\nMust be '
+ 'either an external dependency (returned by find_library() or '
+ 'dependency()) or an internal dependency (returned by '
+ 'declare_dependency()).'.format(type(dep).__name__))
+
+ def get_external_deps(self):
+ return self.external_deps
+
+ def link(self, target):
+ for t in listify(target, unholder=True):
+ if not isinstance(t, Target):
+ raise InvalidArguments('{!r} is not a target.'.format(t))
+ if not t.is_linkable_target():
+ raise InvalidArguments('Link target {!r} is not linkable.'.format(t))
+ if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic:
+ msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name)
+ msg += "Use the 'pic' option to static_library to build with PIC."
+ raise InvalidArguments(msg)
+ if self.is_cross != t.is_cross:
+ raise InvalidArguments('Tried to mix cross built and native libraries in target {!r}'.format(self.name))
+ self.link_targets.append(t)
+
+ def link_whole(self, target):
+ for t in listify(target, unholder=True):
+ if not isinstance(t, StaticLibrary):
+ raise InvalidArguments('{!r} is not a static library.'.format(t))
+ if isinstance(self, SharedLibrary) and not t.pic:
+ msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name)
+ msg += "Use the 'pic' option to static_library to build with PIC."
+ raise InvalidArguments(msg)
+ if self.is_cross != t.is_cross:
+ raise InvalidArguments('Tried to mix cross built and native libraries in target {!r}'.format(self.name))
+ self.link_whole_targets.append(t)
+
+ def add_pch(self, language, pchlist):
+ if not pchlist:
+ return
+ elif len(pchlist) == 1:
+ if not environment.is_header(pchlist[0]):
+ raise InvalidArguments('PCH argument %s is not a header.' % pchlist[0])
+ elif len(pchlist) == 2:
+ if environment.is_header(pchlist[0]):
+ if not environment.is_source(pchlist[1]):
+ raise InvalidArguments('PCH definition must contain one header and at most one source.')
+ elif environment.is_source(pchlist[0]):
+ if not environment.is_header(pchlist[1]):
+ raise InvalidArguments('PCH definition must contain one header and at most one source.')
+ pchlist = [pchlist[1], pchlist[0]]
+ else:
+ raise InvalidArguments('PCH argument %s is of unknown type.' % pchlist[0])
+ elif len(pchlist) > 2:
+ raise InvalidArguments('PCH definition may have a maximum of 2 files.')
+ self.pch[language] = pchlist
+
+ def add_include_dirs(self, args):
+ ids = []
+ for a in args:
+ # FIXME same hack, forcibly unpack from holder.
+ if hasattr(a, 'held_object'):
+ a = a.held_object
+ if not isinstance(a, IncludeDirs):
+ raise InvalidArguments('Include directory to be added is not an include directory object.')
+ ids.append(a)
+ self.include_dirs += ids
+
+ def add_compiler_args(self, language, args):
+ args = listify(args)
+ for a in args:
+ if not isinstance(a, (str, File)):
+ raise InvalidArguments('A non-string passed to compiler args.')
+ if language in self.extra_args:
+ self.extra_args[language] += args
+ else:
+ self.extra_args[language] = args
+
+ def get_aliases(self):
+ return {}
+
+ def get_langs_used_by_deps(self):
+ '''
+ Sometimes you want to link to a C++ library that exports C API, which
+ means the linker must link in the C++ stdlib, and we must use a C++
+ compiler for linking. The same is also applicable for objc/objc++, etc,
+ so we can keep using clike_langs for the priority order.
+
+ See: https://github.com/mesonbuild/meson/issues/1653
+ '''
+ langs = []
+ # Check if any of the external libraries were written in this language
+ for dep in self.external_deps:
+ if dep.language not in langs:
+ langs.append(dep.language)
+ # Check if any of the internal libraries this target links to were
+ # written in this language
+ for link_target in itertools.chain(self.link_targets, self.link_whole_targets):
+ for language in link_target.compilers:
+ if language not in langs:
+ langs.append(language)
+ return langs
+
+ def get_clike_dynamic_linker(self):
+ '''
+ We use the order of languages in `clike_langs` to determine which
+ linker to use in case the target has sources compiled with multiple
+ compilers. All languages other than those in this list have their own
+ linker.
+ Note that Vala outputs C code, so Vala sources can use any linker
+ that can link compiled C. We don't actually need to add an exception
+ for Vala here because of that.
+ '''
+ # Populate list of all compilers, not just those being used to compile
+ # sources in this target
+ if self.is_cross:
+ all_compilers = self.environment.coredata.cross_compilers
+ else:
+ all_compilers = self.environment.coredata.compilers
+ # Languages used by dependencies
+ dep_langs = self.get_langs_used_by_deps()
+ # Pick a compiler based on the language priority-order
+ for l in clike_langs:
+ if l in self.compilers or l in dep_langs:
+ try:
+ return all_compilers[l]
+ except KeyError:
+ raise MesonException(
+ 'Could not get a dynamic linker for build target {!r}. '
+ 'Requires a linker for language "{}", but that is not '
+ 'a project language.'.format(self.name, l))
+
+ m = 'Could not get a dynamic linker for build target {!r}'
+ raise AssertionError(m.format(self.name))
+
+ def get_using_msvc(self):
+ '''
+ Check if the dynamic linker is MSVC. Used by Executable, StaticLibrary,
+ and SharedLibrary for deciding when to use MSVC-specific file naming
+ and debug filenames.
+
+ If at least some code is built with MSVC and the final library is
+ linked with MSVC, we can be sure that some debug info will be
+ generated. We only check the dynamic linker here because the static
+ linker is guaranteed to be of the same type.
+
+ Interesting cases:
+ 1. The Vala compiler outputs C code to be compiled by whatever
+ C compiler we're using, so all objects will still be created by the
+ MSVC compiler.
+ 2. If the target contains only objects, process_compilers guesses and
+ picks the first compiler that smells right.
+ '''
+ linker = self.get_clike_dynamic_linker()
+ if linker and linker.get_id() == 'msvc':
+ return True
+ return False
+
+ def is_linkable_target(self):
+ return False
+
+
+class Generator:
+ def __init__(self, args, kwargs):
+ if len(args) != 1:
+ raise InvalidArguments('Generator requires exactly one positional argument: the executable')
+ exe = args[0]
+ if hasattr(exe, 'held_object'):
+ exe = exe.held_object
+ if not isinstance(exe, (Executable, dependencies.ExternalProgram)):
+ raise InvalidArguments('First generator argument must be an executable.')
+ self.exe = exe
+ self.depfile = None
+ self.capture = False
+ self.process_kwargs(kwargs)
+
+ def __repr__(self):
+ repr_str = "<{0}: {1}>"
+ return repr_str.format(self.__class__.__name__, self.exe)
+
+ def get_exe(self):
+ return self.exe
+
+ def process_kwargs(self, kwargs):
+ if 'arguments' not in kwargs:
+ raise InvalidArguments('Generator must have "arguments" keyword argument.')
+ args = kwargs['arguments']
+ if isinstance(args, str):
+ args = [args]
+ if not isinstance(args, list):
+ raise InvalidArguments('"Arguments" keyword argument must be a string or a list of strings.')
+ for a in args:
+ if not isinstance(a, str):
+ raise InvalidArguments('A non-string object in "arguments" keyword argument.')
+ self.arglist = args
+ if 'output' not in kwargs:
+ raise InvalidArguments('Generator must have "output" keyword argument.')
+ outputs = listify(kwargs['output'])
+ for rule in outputs:
+ if not isinstance(rule, str):
+ raise InvalidArguments('"output" may only contain strings.')
+ if '@BASENAME@' not in rule and '@PLAINNAME@' not in rule:
+ raise InvalidArguments('Every element of "output" must contain @BASENAME@ or @PLAINNAME@.')
+ if '/' in rule or '\\' in rule:
+ raise InvalidArguments('"outputs" must not contain a directory separator.')
+ if len(outputs) > 1:
+ for o in outputs:
+ if '@OUTPUT@' in o:
+ raise InvalidArguments('Tried to use @OUTPUT@ in a rule with more than one output.')
+ self.outputs = outputs
+ if 'depfile' in kwargs:
+ depfile = kwargs['depfile']
+ if not isinstance(depfile, str):
+ raise InvalidArguments('Depfile must be a string.')
+ if os.path.split(depfile)[1] != depfile:
+ raise InvalidArguments('Depfile must be a plain filename without a subdirectory.')
+ self.depfile = depfile
+ if 'capture' in kwargs:
+ capture = kwargs['capture']
+ if not isinstance(capture, bool):
+ raise InvalidArguments('Capture must be boolean.')
+ self.capture = capture
+
+ def get_base_outnames(self, inname):
+ plainname = os.path.split(inname)[1]
+ basename = os.path.splitext(plainname)[0]
+ return [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.outputs]
+
+ def get_dep_outname(self, inname):
+ if self.depfile is None:
+ raise InvalidArguments('Tried to get dep name for rule that does not have dependency file defined.')
+ plainname = os.path.split(inname)[1]
+ basename = os.path.splitext(plainname)[0]
+ return self.depfile.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname)
+
+ def get_arglist(self, inname):
+ plainname = os.path.split(inname)[1]
+ basename = os.path.splitext(plainname)[0]
+ return [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.arglist]
+
+ def process_files(self, name, files, state, extra_args=[]):
+ output = GeneratedList(self, extra_args=extra_args)
+ for f in files:
+ if isinstance(f, str):
+ f = File.from_source_file(state.environment.source_dir, state.subdir, f)
+ elif not isinstance(f, File):
+ raise InvalidArguments('{} arguments must be strings or files not {!r}.'.format(name, f))
+ output.add_file(f)
+ return output
+
+
+class GeneratedList:
+ def __init__(self, generator, extra_args=[]):
+ if hasattr(generator, 'held_object'):
+ generator = generator.held_object
+ self.generator = generator
+ self.name = self.generator.exe
+ self.infilelist = []
+ self.outfilelist = []
+ self.outmap = {}
+ self.extra_depends = []
+ self.extra_args = extra_args
+
+ def add_file(self, newfile):
+ self.infilelist.append(newfile)
+ outfiles = self.generator.get_base_outnames(newfile.fname)
+ self.outfilelist += outfiles
+ self.outmap[newfile] = outfiles
+
+ def get_inputs(self):
+ return self.infilelist
+
+ def get_outputs(self):
+ return self.outfilelist
+
+ def get_outputs_for(self, filename):
+ return self.outmap[filename]
+
+ def get_generator(self):
+ return self.generator
+
+ def get_extra_args(self):
+ return self.extra_args
+
+class Executable(BuildTarget):
+ def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
+ super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
+ # Unless overridden, executables have no suffix or prefix. Except on
+ # Windows and with C#/Mono executables where the suffix is 'exe'
+ if not hasattr(self, 'prefix'):
+ self.prefix = ''
+ if not hasattr(self, 'suffix'):
+ # Executable for Windows or C#/Mono
+ if (for_windows(is_cross, environment) or
+ for_cygwin(is_cross, environment) or 'cs' in self.compilers):
+ self.suffix = 'exe'
+ else:
+ self.suffix = ''
+ self.filename = self.name
+ if self.suffix:
+ self.filename += '.' + self.suffix
+ self.outputs = [self.filename]
+
+ # The import library this target will generate
+ self.import_filename = None
+ # The import library that Visual Studio would generate (and accept)
+ self.vs_import_filename = None
+ # The import library that GCC would generate (and prefer)
+ self.gcc_import_filename = None
+
+ # if implib appears, this target is linkwith:-able, but that only means
+ # something on Windows platforms.
+ self.is_linkwithable = False
+ if 'implib' in kwargs and kwargs['implib']:
+ implib_basename = self.name + '.exe'
+ if not isinstance(kwargs['implib'], bool):
+ implib_basename = kwargs['implib']
+ self.is_linkwithable = True
+ if for_windows(is_cross, environment) or for_cygwin(is_cross, environment):
+ self.vs_import_filename = '{0}.lib'.format(implib_basename)
+ self.gcc_import_filename = 'lib{0}.a'.format(implib_basename)
+
+ if self.get_using_msvc():
+ self.import_filename = self.vs_import_filename
+ else:
+ self.import_filename = self.gcc_import_filename
+
+ def type_suffix(self):
+ return "@exe"
+
+ def check_unknown_kwargs(self, kwargs):
+ self.check_unknown_kwargs_int(kwargs, known_exe_kwargs)
+
+ def get_import_filename(self):
+ """
+ The name of the import library that will be outputted by the compiler
+
+ Returns None if there is no import library required for this platform
+ """
+ return self.import_filename
+
+ def get_import_filenameslist(self):
+ if self.import_filename:
+ return [self.vs_import_filename, self.gcc_import_filename]
+ return []
+
+ def is_linkable_target(self):
+ return self.is_linkwithable
+
+class StaticLibrary(BuildTarget):
+ def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
+ if 'pic' not in kwargs and 'b_staticpic' in environment.coredata.base_options:
+ kwargs['pic'] = environment.coredata.base_options['b_staticpic'].value
+ super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
+ if 'cs' in self.compilers:
+ raise InvalidArguments('Static libraries not supported for C#.')
+ if 'rust' in self.compilers:
+ # If no crate type is specified, or it's the generic lib type, use rlib
+ if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'lib':
+ mlog.debug('Defaulting Rust static library target crate type to rlib')
+ self.rust_crate_type = 'rlib'
+ # Don't let configuration proceed with a non-static crate type
+ elif self.rust_crate_type not in ['rlib', 'staticlib']:
+ raise InvalidArguments('Crate type "{0}" invalid for static libraries; must be "rlib" or "staticlib"'.format(self.rust_crate_type))
+ # By default a static library is named libfoo.a even on Windows because
+ # MSVC does not have a consistent convention for what static libraries
+ # are called. The MSVC CRT uses libfoo.lib syntax but nothing else uses
+ # it and GCC only looks for static libraries called foo.lib and
+ # libfoo.a. However, we cannot use foo.lib because that's the same as
+ # the import library. Using libfoo.a is ok because people using MSVC
+ # always pass the library filename while linking anyway.
+ if not hasattr(self, 'prefix'):
+ self.prefix = 'lib'
+ if not hasattr(self, 'suffix'):
+ if 'rust' in self.compilers:
+ if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'rlib':
+ # default Rust static library suffix
+ self.suffix = 'rlib'
+ elif self.rust_crate_type == 'staticlib':
+ self.suffix = 'a'
+ else:
+ self.suffix = 'a'
+ self.filename = self.prefix + self.name + '.' + self.suffix
+ self.outputs = [self.filename]
+
+ def type_suffix(self):
+ return "@sta"
+
+ def check_unknown_kwargs(self, kwargs):
+ self.check_unknown_kwargs_int(kwargs, known_lib_kwargs)
+
+ def process_kwargs(self, kwargs, environment):
+ super().process_kwargs(kwargs, environment)
+ if 'rust_crate_type' in kwargs:
+ rust_crate_type = kwargs['rust_crate_type']
+ if isinstance(rust_crate_type, str):
+ self.rust_crate_type = rust_crate_type
+ else:
+ raise InvalidArguments('Invalid rust_crate_type "{0}": must be a string.'.format(rust_crate_type))
+
+ def is_linkable_target(self):
+ return True
+
+class SharedLibrary(BuildTarget):
+ def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
+ self.soversion = None
+ self.ltversion = None
+ self.vs_module_defs = None
+ # The import library this target will generate
+ self.import_filename = None
+ # The import library that Visual Studio would generate (and accept)
+ self.vs_import_filename = None
+ # The import library that GCC would generate (and prefer)
+ self.gcc_import_filename = None
+ super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
+ if 'rust' in self.compilers:
+ # If no crate type is specified, or it's the generic lib type, use dylib
+ if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'lib':
+ mlog.debug('Defaulting Rust dynamic library target crate type to "dylib"')
+ self.rust_crate_type = 'dylib'
+ # Don't let configuration proceed with a non-dynamic crate type
+ elif self.rust_crate_type not in ['dylib', 'cdylib']:
+ raise InvalidArguments('Crate type "{0}" invalid for dynamic libraries; must be "dylib" or "cdylib"'.format(self.rust_crate_type))
+ if not hasattr(self, 'prefix'):
+ self.prefix = None
+ if not hasattr(self, 'suffix'):
+ self.suffix = None
+ self.basic_filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ self.determine_filenames(is_cross, environment)
+
+ def determine_filenames(self, is_cross, env):
+ """
+ See https://github.com/mesonbuild/meson/pull/417 for details.
+
+ First we determine the filename template (self.filename_tpl), then we
+ set the output filename (self.filename).
+
+ The template is needed while creating aliases (self.get_aliases),
+ which are needed while generating .so shared libraries for Linux.
+
+ Besides this, there's also the import library name, which is only used
+ on Windows since on that platform the linker uses a separate library
+ called the "import library" during linking instead of the shared
+ library (DLL). The toolchain will output an import library in one of
+ two formats: GCC or Visual Studio.
+
+ When we're building with Visual Studio, the import library that will be
+ generated by the toolchain is self.vs_import_filename, and with
+ MinGW/GCC, it's self.gcc_import_filename. self.import_filename will
+ always contain the import library name this target will generate.
+ """
+ prefix = ''
+ suffix = ''
+ self.filename_tpl = self.basic_filename_tpl
+ # If the user already provided the prefix and suffix to us, we don't
+ # need to do any filename suffix/prefix detection.
+ # NOTE: manual prefix/suffix override is currently only tested for C/C++
+ if self.prefix is not None and self.suffix is not None:
+ pass
+ # C# and Mono
+ elif 'cs' in self.compilers:
+ prefix = ''
+ suffix = 'dll'
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ # C, C++, Swift, Vala
+ # Only Windows uses a separate import library for linking
+ # For all other targets/platforms import_filename stays None
+ elif for_windows(is_cross, env):
+ suffix = 'dll'
+ self.vs_import_filename = '{0}.lib'.format(self.name)
+ self.gcc_import_filename = 'lib{0}.dll.a'.format(self.name)
+ if self.get_using_msvc():
+ # Shared library is of the form foo.dll
+ prefix = ''
+ # Import library is called foo.lib
+ self.import_filename = self.vs_import_filename
+ # Assume GCC-compatible naming
+ else:
+ # Shared library is of the form libfoo.dll
+ prefix = 'lib'
+ # Import library is called libfoo.dll.a
+ self.import_filename = self.gcc_import_filename
+ # Shared library has the soversion if it is defined
+ if self.soversion:
+ self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}'
+ else:
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ elif for_cygwin(is_cross, env):
+ suffix = 'dll'
+ self.gcc_import_filename = 'lib{0}.dll.a'.format(self.name)
+ # Shared library is of the form cygfoo.dll
+ # (ld --dll-search-prefix=cyg is the default)
+ prefix = 'cyg'
+ # Import library is called libfoo.dll.a
+ self.import_filename = self.gcc_import_filename
+ if self.soversion:
+ self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}'
+ else:
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ elif for_darwin(is_cross, env):
+ prefix = 'lib'
+ suffix = 'dylib'
+ # On macOS, the filename can only contain the major version
+ if self.soversion:
+ # libfoo.X.dylib
+ self.filename_tpl = '{0.prefix}{0.name}.{0.soversion}.{0.suffix}'
+ else:
+ # libfoo.dylib
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ else:
+ prefix = 'lib'
+ suffix = 'so'
+ if self.ltversion:
+ # libfoo.so.X[.Y[.Z]] (.Y and .Z are optional)
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}.{0.ltversion}'
+ elif self.soversion:
+ # libfoo.so.X
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}.{0.soversion}'
+ else:
+ # No versioning, libfoo.so
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ if self.prefix is None:
+ self.prefix = prefix
+ if self.suffix is None:
+ self.suffix = suffix
+ self.filename = self.filename_tpl.format(self)
+ self.outputs = [self.filename]
+
+ def process_kwargs(self, kwargs, environment):
+ super().process_kwargs(kwargs, environment)
+ # Shared library version
+ if 'version' in kwargs:
+ self.ltversion = kwargs['version']
+ if not isinstance(self.ltversion, str):
+ raise InvalidArguments('Shared library version needs to be a string, not ' + type(self.ltversion).__name__)
+ if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', self.ltversion):
+ raise InvalidArguments('Invalid Shared library version "{0}". Must be of the form X.Y.Z where all three are numbers. Y and Z are optional.'.format(self.ltversion))
+ # Try to extract/deduce the soversion
+ if 'soversion' in kwargs:
+ self.soversion = kwargs['soversion']
+ if isinstance(self.soversion, int):
+ self.soversion = str(self.soversion)
+ if not isinstance(self.soversion, str):
+ raise InvalidArguments('Shared library soversion is not a string or integer.')
+ elif self.ltversion:
+ # library version is defined, get the soversion from that
+ # We replicate what Autotools does here and take the first
+ # number of the version by default.
+ self.soversion = self.ltversion.split('.')[0]
+ # Visual Studio module-definitions file
+ if 'vs_module_defs' in kwargs:
+ path = kwargs['vs_module_defs']
+ if hasattr(path, 'held_object'):
+ path = path.held_object
+ if isinstance(path, str):
+ if os.path.isabs(path):
+ self.vs_module_defs = File.from_absolute_file(path)
+ else:
+ self.vs_module_defs = File.from_source_file(environment.source_dir, self.subdir, path)
+ self.link_depends.append(self.vs_module_defs)
+ elif isinstance(path, File):
+ # When passing a generated file.
+ self.vs_module_defs = path
+ self.link_depends.append(path)
+ elif hasattr(path, 'get_filename'):
+ # When passing output of a Custom Target
+ path = File.from_built_file(path.subdir, path.get_filename())
+ self.vs_module_defs = path
+ self.link_depends.append(path)
+ else:
+ raise InvalidArguments(
+ 'Shared library vs_module_defs must be either a string, '
+ 'a file object or a Custom Target')
+ if 'rust_crate_type' in kwargs:
+ rust_crate_type = kwargs['rust_crate_type']
+ if isinstance(rust_crate_type, str):
+ self.rust_crate_type = rust_crate_type
+ else:
+ raise InvalidArguments('Invalid rust_crate_type "{0}": must be a string.'.format(rust_crate_type))
+
+ def check_unknown_kwargs(self, kwargs):
+ self.check_unknown_kwargs_int(kwargs, known_lib_kwargs)
+
+ def get_import_filename(self):
+ """
+ The name of the import library that will be outputted by the compiler
+
+ Returns None if there is no import library required for this platform
+ """
+ return self.import_filename
+
+ def get_import_filenameslist(self):
+ if self.import_filename:
+ return [self.vs_import_filename, self.gcc_import_filename]
+ return []
+
+ def get_all_link_deps(self):
+ return [self] + self.get_transitive_link_deps()
+
+ def get_aliases(self):
+ """
+ If the versioned library name is libfoo.so.0.100.0, aliases are:
+ * libfoo.so.0 (soversion) -> libfoo.so.0.100.0
+ * libfoo.so (unversioned; for linking) -> libfoo.so.0
+ Same for dylib:
+ * libfoo.dylib (unversioned; for linking) -> libfoo.0.dylib
+ """
+ aliases = {}
+ # Aliases are only useful with .so and .dylib libraries. Also if
+ # there's no self.soversion (no versioning), we don't need aliases.
+ if self.suffix not in ('so', 'dylib') or not self.soversion:
+ return {}
+ # With .so libraries, the minor and micro versions are also in the
+ # filename. If ltversion != soversion we create an soversion alias:
+ # libfoo.so.0 -> libfoo.so.0.100.0
+ # Where libfoo.so.0.100.0 is the actual library
+ if self.suffix == 'so' and self.ltversion and self.ltversion != self.soversion:
+ alias_tpl = self.filename_tpl.replace('ltversion', 'soversion')
+ ltversion_filename = alias_tpl.format(self)
+ aliases[ltversion_filename] = self.filename
+ # libfoo.so.0/libfoo.0.dylib is the actual library
+ else:
+ ltversion_filename = self.filename
+ # Unversioned alias:
+ # libfoo.so -> libfoo.so.0
+ # libfoo.dylib -> libfoo.0.dylib
+ aliases[self.basic_filename_tpl.format(self)] = ltversion_filename
+ return aliases
+
+ def type_suffix(self):
+ return "@sha"
+
+ def is_linkable_target(self):
+ return True
+
+# A shared library that is meant to be used with dlopen rather than linking
+# into something else.
+class SharedModule(SharedLibrary):
+ def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
+ if 'version' in kwargs:
+ raise MesonException('Shared modules must not specify the version kwarg.')
+ if 'soversion' in kwargs:
+ raise MesonException('Shared modules must not specify the soversion kwarg.')
+ super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
+ self.import_filename = None
+
+class CustomTarget(Target):
+ known_kwargs = {'input': True,
+ 'output': True,
+ 'command': True,
+ 'capture': False,
+ 'install': True,
+ 'install_dir': True,
+ 'build_always': True,
+ 'depends': True,
+ 'depend_files': True,
+ 'depfile': True,
+ 'build_by_default': True,
+ 'override_options': True,
+ }
+
+ def __init__(self, name, subdir, subproject, kwargs, absolute_paths=False):
+ super().__init__(name, subdir, subproject, False)
+ self.dependencies = []
+ self.extra_depends = []
+ self.depend_files = [] # Files that this target depends on but are not on the command line.
+ self.depfile = None
+ self.process_kwargs(kwargs)
+ self.extra_files = []
+ # Whether to use absolute paths for all files on the commandline
+ self.absolute_paths = absolute_paths
+ unknowns = []
+ for k in kwargs:
+ if k not in CustomTarget.known_kwargs:
+ unknowns.append(k)
+ if len(unknowns) > 0:
+ mlog.warning('Unknown keyword arguments in target %s: %s' %
+ (self.name, ', '.join(unknowns)))
+
+ def __lt__(self, other):
+ return self.get_id() < other.get_id()
+
+ def __repr__(self):
+ repr_str = "<{0} {1}: {2}>"
+ return repr_str.format(self.__class__.__name__, self.get_id(), self.command)
+
+ def get_id(self):
+ return self.name + self.type_suffix()
+
+ def get_target_dependencies(self):
+ deps = self.dependencies[:]
+ deps += self.extra_depends
+ for c in self.sources:
+ if hasattr(c, 'held_object'):
+ c = c.held_object
+ if isinstance(c, (BuildTarget, CustomTarget)):
+ deps.append(c)
+ return deps
+
+ def get_transitive_build_target_deps(self):
+ '''
+ Recursively fetch the build targets that this custom target depends on,
+ whether through `command:`, `depends:`, or `sources:` The recursion is
+ only performed on custom targets.
+ This is useful for setting PATH on Windows for finding required DLLs.
+ F.ex, if you have a python script that loads a C module that links to
+ other DLLs in your project.
+ '''
+ bdeps = set()
+ deps = self.get_target_dependencies()
+ for d in deps:
+ if isinstance(d, BuildTarget):
+ bdeps.add(d)
+ elif isinstance(d, CustomTarget):
+ bdeps.update(d.get_transitive_build_target_deps())
+ return bdeps
+
+ def flatten_command(self, cmd):
+ cmd = listify(cmd, unholder=True)
+ final_cmd = []
+ for c in cmd:
+ if isinstance(c, str):
+ final_cmd.append(c)
+ elif isinstance(c, File):
+ self.depend_files.append(c)
+ final_cmd.append(c)
+ elif isinstance(c, dependencies.ExternalProgram):
+ if not c.found():
+ m = 'Tried to use not-found external program {!r} in "command"'
+ raise InvalidArguments(m.format(c.name))
+ self.depend_files.append(File.from_absolute_file(c.get_path()))
+ final_cmd += c.get_command()
+ elif isinstance(c, (BuildTarget, CustomTarget)):
+ self.dependencies.append(c)
+ final_cmd.append(c)
+ elif isinstance(c, list):
+ final_cmd += self.flatten_command(c)
+ else:
+ raise InvalidArguments('Argument {!r} in "command" is invalid'.format(c))
+ return final_cmd
+
+ def process_kwargs(self, kwargs):
+ super().process_kwargs(kwargs)
+ self.sources = extract_as_list(kwargs, 'input', unholder=True)
+ if 'output' not in kwargs:
+ raise InvalidArguments('Missing keyword argument "output".')
+ self.outputs = listify(kwargs['output'])
+ # This will substitute values from the input into output and return it.
+ inputs = get_sources_string_names(self.sources)
+ values = get_filenames_templates_dict(inputs, [])
+ for i in self.outputs:
+ if not(isinstance(i, str)):
+ raise InvalidArguments('Output argument not a string.')
+ if '/' in i:
+ raise InvalidArguments('Output must not contain a path segment.')
+ if '@INPUT@' in i or '@INPUT0@' in i:
+ m = 'Output cannot contain @INPUT@ or @INPUT0@, did you ' \
+ 'mean @PLAINNAME@ or @BASENAME@?'
+ raise InvalidArguments(m)
+ # We already check this during substitution, but the error message
+ # will be unclear/confusing, so check it here.
+ if len(inputs) != 1 and ('@PLAINNAME@' in i or '@BASENAME@' in i):
+ m = "Output cannot contain @PLAINNAME@ or @BASENAME@ when " \
+ "there is more than one input (we can't know which to use)"
+ raise InvalidArguments(m)
+ self.outputs = substitute_values(self.outputs, values)
+ self.capture = kwargs.get('capture', False)
+ if self.capture and len(self.outputs) != 1:
+ raise InvalidArguments('Capturing can only output to a single file.')
+ if 'command' not in kwargs:
+ raise InvalidArguments('Missing keyword argument "command".')
+ if 'depfile' in kwargs:
+ depfile = kwargs['depfile']
+ if not isinstance(depfile, str):
+ raise InvalidArguments('Depfile must be a string.')
+ if os.path.split(depfile)[1] != depfile:
+ raise InvalidArguments('Depfile must be a plain filename without a subdirectory.')
+ self.depfile = depfile
+ self.command = self.flatten_command(kwargs['command'])
+ if self.capture:
+ for c in self.command:
+ if isinstance(c, str) and '@OUTPUT@' in c:
+ raise InvalidArguments('@OUTPUT@ is not allowed when capturing output.')
+ if 'install' in kwargs:
+ self.install = kwargs['install']
+ if not isinstance(self.install, bool):
+ raise InvalidArguments('"install" must be boolean.')
+ if self.install:
+ if 'install_dir' not in kwargs:
+ raise InvalidArguments('"install_dir" must be specified '
+ 'when installing a target')
+ # If an item in this list is False, the output corresponding to
+ # the list index of that item will not be installed
+ self.install_dir = typeslistify(kwargs['install_dir'], (str, bool))
+ else:
+ self.install = False
+ self.install_dir = [None]
+ self.build_always = kwargs.get('build_always', False)
+ if not isinstance(self.build_always, bool):
+ raise InvalidArguments('Argument build_always must be a boolean.')
+ extra_deps, depend_files = extract_as_list(kwargs, 'depends', 'depend_files', pop = False)
+ for ed in extra_deps:
+ while hasattr(ed, 'held_object'):
+ ed = ed.held_object
+ if not isinstance(ed, (CustomTarget, BuildTarget)):
+ raise InvalidArguments('Can only depend on toplevel targets: custom_target or build_target (executable or a library)')
+ self.extra_depends.append(ed)
+ for i in depend_files:
+ if isinstance(i, (File, str)):
+ self.depend_files.append(i)
+ else:
+ mlog.debug(i)
+ raise InvalidArguments('Unknown type {!r} in depend_files.'.format(type(i).__name__))
+
+ def get_dependencies(self):
+ return self.dependencies
+
+ def should_install(self):
+ return self.install
+
+ def get_custom_install_dir(self):
+ return self.install_dir
+
+ def get_outputs(self):
+ return self.outputs
+
+ def get_filename(self):
+ return self.outputs[0]
+
+ def get_sources(self):
+ return self.sources
+
+ def get_generated_lists(self):
+ genlists = []
+ for c in self.sources:
+ if hasattr(c, 'held_object'):
+ c = c.held_object
+ if isinstance(c, GeneratedList):
+ genlists.append(c)
+ return genlists
+
+ def get_generated_sources(self):
+ return self.get_generated_lists()
+
+ def type_suffix(self):
+ return "@cus"
+
+ def __getitem__(self, index):
+ return CustomTargetIndex(self, self.outputs[index])
+
+ def __setitem__(self, index, value):
+ raise NotImplementedError
+
+ def __delitem__(self, index):
+ raise NotImplementedError
+
+class RunTarget(Target):
+ def __init__(self, name, command, args, dependencies, subdir, subproject):
+ super().__init__(name, subdir, subproject, False)
+ self.command = command
+ self.args = args
+ self.dependencies = dependencies
+
+ def __lt__(self, other):
+ return self.get_id() < other.get_id()
+
+ def __repr__(self):
+ repr_str = "<{0} {1}: {2}>"
+ return repr_str.format(self.__class__.__name__, self.get_id(), self.command)
+
+ def get_dependencies(self):
+ return self.dependencies
+
+ def get_generated_sources(self):
+ return []
+
+ def get_sources(self):
+ return []
+
+ def should_install(self):
+ return False
+
+ def get_filename(self):
+ return self.name
+
+ def type_suffix(self):
+ return "@run"
+
+class Jar(BuildTarget):
+ def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
+ super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
+ for s in self.sources:
+ if not s.endswith('.java'):
+ raise InvalidArguments('Jar source %s is not a java file.' % s)
+ self.filename = self.name + '.jar'
+ self.outputs = [self.filename]
+ self.java_args = kwargs.get('java_args', [])
+
+ def get_main_class(self):
+ return self.main_class
+
+ def type_suffix(self):
+ return "@jar"
+
+ def get_java_args(self):
+ return self.java_args
+
+ def validate_cross_install(self, environment):
+ # All jar targets are installable.
+ pass
+
+
+class CustomTargetIndex:
+
+ """A special opaque object returned by indexing a CustomTarget. This object
+ exists in meson, but acts as a proxy in the backends, making targets depend
+ on the CustomTarget it's derived from, but only adding one source file to
+ the sources.
+ """
+
+ def __init__(self, target, output):
+ self.target = target
+ self.output = output
+
+ def __repr__(self):
+ return '<CustomTargetIndex: {!r}[{}]>'.format(
+ self.target, self.target.output.index(self.output))
+
+ def get_outputs(self):
+ return [self.output]
+
+ def get_subdir(self):
+ return self.target.get_subdir()
+
+
+class ConfigureFile:
+
+ def __init__(self, subdir, sourcename, targetname, configuration_data):
+ self.subdir = subdir
+ self.sourcename = sourcename
+ self.targetname = targetname
+ self.configuration_data = configuration_data
+
+ def __repr__(self):
+ repr_str = "<{0}: {1} -> {2}>"
+ src = os.path.join(self.subdir, self.sourcename)
+ dst = os.path.join(self.subdir, self.targetname)
+ return repr_str.format(self.__class__.__name__, src, dst)
+
+ def get_configuration_data(self):
+ return self.configuration_data
+
+ def get_subdir(self):
+ return self.subdir
+
+ def get_source_name(self):
+ return self.sourcename
+
+ def get_target_name(self):
+ return self.targetname
+
+class ConfigurationData:
+ def __init__(self):
+ super().__init__()
+ self.values = {}
+
+ def __repr__(self):
+ return repr(self.values)
+
+ def __contains__(self, value):
+ return value in self.values
+
+ def get(self, name):
+ return self.values[name] # (val, desc)
+
+ def keys(self):
+ return self.values.keys()
+
+# A bit poorly named, but this represents plain data files to copy
+# during install.
+class Data:
+ def __init__(self, sources, install_dir, install_mode=None):
+ self.sources = sources
+ self.install_dir = install_dir
+ self.install_mode = install_mode
+ self.sources = listify(self.sources)
+ for s in self.sources:
+ assert(isinstance(s, File))
+
+class RunScript(dict):
+ def __init__(self, script, args):
+ super().__init__()
+ assert(isinstance(script, list))
+ assert(isinstance(args, list))
+ self['exe'] = script
+ self['args'] = args
+
+class TestSetup:
+ def __init__(self, *, exe_wrapper=None, gdb=None, timeout_multiplier=None, env=None):
+ self.exe_wrapper = exe_wrapper
+ self.gdb = gdb
+ self.timeout_multiplier = timeout_multiplier
+ self.env = env
+
+def get_sources_string_names(sources):
+ '''
+ For the specified list of @sources which can be strings, Files, or targets,
+ get all the output basenames.
+ '''
+ names = []
+ for s in sources:
+ if hasattr(s, 'held_object'):
+ s = s.held_object
+ if isinstance(s, str):
+ names.append(s)
+ elif isinstance(s, (BuildTarget, CustomTarget, GeneratedList)):
+ names += s.get_outputs()
+ elif isinstance(s, File):
+ names.append(s.fname)
+ else:
+ raise AssertionError('Unknown source type: {!r}'.format(s))
+ return names
--- /dev/null
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Public symbols for compilers sub-package when using 'from . import compilers'
+__all__ = [
+ 'CLANG_OSX',
+ 'CLANG_STANDARD',
+ 'CLANG_WIN',
+ 'GCC_CYGWIN',
+ 'GCC_MINGW',
+ 'GCC_OSX',
+ 'GCC_STANDARD',
+ 'ICC_OSX',
+ 'ICC_STANDARD',
+ 'ICC_WIN',
+
+ 'base_options',
+ 'clike_langs',
+ 'c_suffixes',
+ 'cpp_suffixes',
+ 'get_base_compile_args',
+ 'get_base_link_args',
+ 'is_assembly',
+ 'is_header',
+ 'is_library',
+ 'is_llvm_ir',
+ 'is_object',
+ 'is_source',
+ 'lang_suffixes',
+ 'sanitizer_compile_args',
+ 'sort_clike',
+
+ 'CCompiler',
+ 'ClangCCompiler',
+ 'ClangCompiler',
+ 'ClangCPPCompiler',
+ 'ClangObjCCompiler',
+ 'ClangObjCPPCompiler',
+ 'CompilerArgs',
+ 'CPPCompiler',
+ 'DCompiler',
+ 'DmdDCompiler',
+ 'FortranCompiler',
+ 'G95FortranCompiler',
+ 'GnuCCompiler',
+ 'GnuCompiler',
+ 'GnuCPPCompiler',
+ 'GnuDCompiler',
+ 'GnuFortranCompiler',
+ 'GnuObjCCompiler',
+ 'GnuObjCPPCompiler',
+ 'IntelCompiler',
+ 'IntelCCompiler',
+ 'IntelCPPCompiler',
+ 'IntelFortranCompiler',
+ 'JavaCompiler',
+ 'LLVMDCompiler',
+ 'MonoCompiler',
+ 'NAGFortranCompiler',
+ 'ObjCCompiler',
+ 'ObjCPPCompiler',
+ 'Open64FortranCompiler',
+ 'PathScaleFortranCompiler',
+ 'PGIFortranCompiler',
+ 'RustCompiler',
+ 'SunFortranCompiler',
+ 'SwiftCompiler',
+ 'ValaCompiler',
+ 'VisualStudioCCompiler',
+ 'VisualStudioCPPCompiler',
+]
+
+# Bring symbols from each module into compilers sub-package namespace
+from .compilers import (
+ GCC_OSX,
+ GCC_MINGW,
+ GCC_CYGWIN,
+ GCC_STANDARD,
+ CLANG_OSX,
+ CLANG_WIN,
+ CLANG_STANDARD,
+ ICC_OSX,
+ ICC_WIN,
+ ICC_STANDARD,
+ base_options,
+ clike_langs,
+ c_suffixes,
+ cpp_suffixes,
+ get_base_compile_args,
+ get_base_link_args,
+ is_header,
+ is_source,
+ is_assembly,
+ is_llvm_ir,
+ is_object,
+ is_library,
+ lang_suffixes,
+ sanitizer_compile_args,
+ sort_clike,
+ ClangCompiler,
+ CompilerArgs,
+ GnuCompiler,
+ IntelCompiler,
+)
+from .c import (
+ CCompiler,
+ ClangCCompiler,
+ GnuCCompiler,
+ IntelCCompiler,
+ VisualStudioCCompiler,
+)
+from .cpp import (
+ CPPCompiler,
+ ClangCPPCompiler,
+ GnuCPPCompiler,
+ IntelCPPCompiler,
+ VisualStudioCPPCompiler,
+)
+from .cs import MonoCompiler
+from .d import (
+ DCompiler,
+ DmdDCompiler,
+ GnuDCompiler,
+ LLVMDCompiler,
+)
+from .fortran import (
+ FortranCompiler,
+ G95FortranCompiler,
+ GnuFortranCompiler,
+ IntelFortranCompiler,
+ NAGFortranCompiler,
+ Open64FortranCompiler,
+ PathScaleFortranCompiler,
+ PGIFortranCompiler,
+ SunFortranCompiler,
+)
+from .java import JavaCompiler
+from .objc import (
+ ObjCCompiler,
+ ClangObjCCompiler,
+ GnuObjCCompiler,
+)
+from .objcpp import (
+ ObjCPPCompiler,
+ ClangObjCPPCompiler,
+ GnuObjCPPCompiler,
+)
+from .rust import RustCompiler
+from .swift import SwiftCompiler
+from .vala import ValaCompiler
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess, os.path, tempfile
+
+from .. import mlog
+from .. import coredata
+from . import compilers
+from ..mesonlib import (
+ EnvironmentException, version_compare, Popen_safe, listify,
+ for_windows, for_darwin, for_cygwin, for_haiku,
+)
+
+from .compilers import (
+ GCC_MINGW,
+ get_largefile_args,
+ gnu_winlibs,
+ msvc_buildtype_args,
+ msvc_buildtype_linker_args,
+ msvc_winlibs,
+ vs32_instruction_set_args,
+ vs64_instruction_set_args,
+ ClangCompiler,
+ Compiler,
+ CompilerArgs,
+ CrossNoRunException,
+ GnuCompiler,
+ IntelCompiler,
+ RunResult,
+)
+
+
+class CCompiler(Compiler):
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None):
+ # If a child ObjC or CPP class has already set it, don't set it ourselves
+ if not hasattr(self, 'language'):
+ self.language = 'c'
+ super().__init__(exelist, version)
+ self.id = 'unknown'
+ self.is_cross = is_cross
+ self.can_compile_suffixes.add('h')
+ if isinstance(exe_wrapper, str):
+ self.exe_wrapper = [exe_wrapper]
+ else:
+ self.exe_wrapper = exe_wrapper
+
+ def needs_static_linker(self):
+ return True # When compiling static libraries, so yes.
+
+ def get_always_args(self):
+ '''
+ Args that are always-on for all C compilers other than MSVC
+ '''
+ return ['-pipe'] + get_largefile_args(self)
+
+ def get_linker_debug_crt_args(self):
+ """
+ Arguments needed to select a debug crt for the linker
+ This is only needed for MSVC
+ """
+ return []
+
+ def get_no_stdinc_args(self):
+ return ['-nostdinc']
+
+ def get_no_stdlib_link_args(self):
+ return ['-nostdlib']
+
+ def get_warn_args(self, level):
+ return self.warn_args[level]
+
+ def get_no_warn_args(self):
+ # Almost every compiler uses this for disabling warnings
+ return ['-w']
+
+ def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
+ return []
+
+ def split_shlib_to_parts(self, fname):
+ return None, fname
+
+ # The default behavior is this, override in MSVC
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ if self.id == 'clang' and self.clang_type == compilers.CLANG_OSX:
+ return self.build_osx_rpath_args(build_dir, rpath_paths, build_rpath)
+ return self.build_unix_rpath_args(build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ return ['-MMD', '-MQ', outtarget, '-MF', outfile]
+
+ def depfile_for_object(self, objfile):
+ return objfile + '.' + self.get_depfile_suffix()
+
+ def get_depfile_suffix(self):
+ return 'd'
+
+ def get_exelist(self):
+ return self.exelist[:]
+
+ def get_linker_exelist(self):
+ return self.exelist[:]
+
+ def get_preprocess_only_args(self):
+ return ['-E', '-P']
+
+ def get_compile_only_args(self):
+ return ['-c']
+
+ def get_no_optimization_args(self):
+ return ['-O0']
+
+ def get_compiler_check_args(self):
+ '''
+ Get arguments useful for compiler checks such as being permissive in
+ the code quality and not doing any optimization.
+ '''
+ return self.get_no_optimization_args()
+
+ def get_output_args(self, target):
+ return ['-o', target]
+
+ def get_linker_output_args(self, outputname):
+ return ['-o', outputname]
+
+ def get_coverage_args(self):
+ return ['--coverage']
+
+ def get_coverage_link_args(self):
+ return ['--coverage']
+
+ def get_werror_args(self):
+ return ['-Werror']
+
+ def get_std_exe_link_args(self):
+ return []
+
+ def get_include_args(self, path, is_system):
+ if path == '':
+ path = '.'
+ if is_system:
+ return ['-isystem', path]
+ return ['-I' + path]
+
+ def get_std_shared_lib_link_args(self):
+ return ['-shared']
+
+ def get_library_dirs(self):
+ stdo = Popen_safe(self.exelist + ['--print-search-dirs'])[1]
+ for line in stdo.split('\n'):
+ if line.startswith('libraries:'):
+ libstr = line.split('=', 1)[1]
+ return libstr.split(':')
+ return []
+
+ def get_pic_args(self):
+ return ['-fPIC']
+
+ def name_string(self):
+ return ' '.join(self.exelist)
+
+ def get_pch_use_args(self, pch_dir, header):
+ return ['-include', os.path.split(header)[-1]]
+
+ def get_pch_name(self, header_name):
+ return os.path.split(header_name)[-1] + '.' + self.get_pch_suffix()
+
+ def get_linker_search_args(self, dirname):
+ return ['-L' + dirname]
+
+ def get_default_include_dirs(self):
+ return []
+
+ def gen_import_library_args(self, implibname):
+ """
+ The name of the outputted import library
+
+ This implementation is used only on Windows by compilers that use GNU ld
+ """
+ return ['-Wl,--out-implib=' + implibname]
+
+ def sanity_check_impl(self, work_dir, environment, sname, code):
+ mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist))
+ mlog.debug('Is cross compiler: %s.' % str(self.is_cross))
+
+ extra_flags = []
+ source_name = os.path.join(work_dir, sname)
+ binname = sname.rsplit('.', 1)[0]
+ if self.is_cross:
+ binname += '_cross'
+ if self.exe_wrapper is None:
+ # Linking cross built apps is painful. You can't really
+ # tell if you should use -nostdlib or not and for example
+ # on OSX the compiler binary is the same but you need
+ # a ton of compiler flags to differentiate between
+ # arm and x86_64. So just compile.
+ extra_flags += self.get_cross_extra_flags(environment, link=False)
+ extra_flags += self.get_compile_only_args()
+ else:
+ extra_flags += self.get_cross_extra_flags(environment, link=True)
+ # Is a valid executable output for all toolchains and platforms
+ binname += '.exe'
+ # Write binary check source
+ binary_name = os.path.join(work_dir, binname)
+ with open(source_name, 'w') as ofile:
+ ofile.write(code)
+ # Compile sanity check
+ cmdlist = self.exelist + extra_flags + [source_name] + self.get_output_args(binary_name)
+ pc, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
+ mlog.debug('Sanity check compiler command line:', ' '.join(cmdlist))
+ mlog.debug('Sanity check compile stdout:')
+ mlog.debug(stdo)
+ mlog.debug('-----\nSanity check compile stderr:')
+ mlog.debug(stde)
+ mlog.debug('-----')
+ if pc.returncode != 0:
+ raise EnvironmentException('Compiler {0} can not compile programs.'.format(self.name_string()))
+ # Run sanity check
+ if self.is_cross:
+ if self.exe_wrapper is None:
+ # Can't check if the binaries run so we have to assume they do
+ return
+ cmdlist = self.exe_wrapper + [binary_name]
+ else:
+ cmdlist = [binary_name]
+ mlog.debug('Running test binary command: ' + ' '.join(cmdlist))
+ pe = subprocess.Popen(cmdlist)
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException('Executables created by {0} compiler {1} are not runnable.'.format(self.language, self.name_string()))
+
+ def sanity_check(self, work_dir, environment):
+ code = 'int main(int argc, char **argv) { int class=0; return class; }\n'
+ return self.sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code)
+
+ def has_header(self, hname, prefix, env, extra_args=None, dependencies=None):
+ fargs = {'prefix': prefix, 'header': hname}
+ code = '''{prefix}
+ #ifdef __has_include
+ #if !__has_include("{header}")
+ #error "Header '{header}' could not be found"
+ #endif
+ #else
+ #include <{header}>
+ #endif'''
+ return self.compiles(code.format(**fargs), env, extra_args,
+ dependencies, 'preprocess')
+
+ def has_header_symbol(self, hname, symbol, prefix, env, extra_args=None, dependencies=None):
+ fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol}
+ t = '''{prefix}
+ #include <{header}>
+ int main () {{
+ /* If it's not defined as a macro, try to use as a symbol */
+ #ifndef {symbol}
+ {symbol};
+ #endif
+ }}'''
+ return self.compiles(t.format(**fargs), env, extra_args, dependencies)
+
+ def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'):
+ if extra_args is None:
+ extra_args = []
+ elif isinstance(extra_args, str):
+ extra_args = [extra_args]
+ if dependencies is None:
+ dependencies = []
+ elif not isinstance(dependencies, list):
+ dependencies = [dependencies]
+ # Collect compiler arguments
+ args = CompilerArgs(self)
+ for d in dependencies:
+ # Add compile flags needed by dependencies
+ args += d.get_compile_args()
+ if d.need_threads():
+ args += self.thread_flags(env)
+ if mode == 'link':
+ # Add link flags needed to find dependencies
+ args += d.get_link_args()
+ if d.need_threads():
+ args += self.thread_link_flags(env)
+ # Select a CRT if needed since we're linking
+ if mode == 'link':
+ args += self.get_linker_debug_crt_args()
+ # Read c_args/cpp_args/etc from the cross-info file (if needed)
+ args += self.get_cross_extra_flags(env, link=(mode == 'link'))
+ if not self.is_cross:
+ if mode == 'preprocess':
+ # Add CPPFLAGS from the env.
+ args += env.coredata.external_preprocess_args[self.language]
+ elif mode == 'compile':
+ # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env
+ args += env.coredata.external_args[self.language]
+ elif mode == 'link':
+ # Add LDFLAGS from the env
+ args += env.coredata.external_link_args[self.language]
+ args += self.get_compiler_check_args()
+ # extra_args must override all other arguments, so we add them last
+ args += extra_args
+ return args
+
+ def compiles(self, code, env, extra_args=None, dependencies=None, mode='compile'):
+ args = self._get_compiler_check_args(env, extra_args, dependencies, mode)
+ # We only want to compile; not link
+ with self.compile(code, args.to_native(), mode) as p:
+ return p.returncode == 0
+
+ def _links_wrapper(self, code, env, extra_args, dependencies):
+ "Shares common code between self.links and self.run"
+ args = self._get_compiler_check_args(env, extra_args, dependencies, mode='link')
+ return self.compile(code, args)
+
+ def links(self, code, env, extra_args=None, dependencies=None):
+ with self._links_wrapper(code, env, extra_args, dependencies) as p:
+ return p.returncode == 0
+
+ def run(self, code, env, extra_args=None, dependencies=None):
+ if self.is_cross and self.exe_wrapper is None:
+ raise CrossNoRunException('Can not run test applications in this cross environment.')
+ with self._links_wrapper(code, env, extra_args, dependencies) as p:
+ if p.returncode != 0:
+ mlog.debug('Could not compile test file %s: %d\n' % (
+ p.input_name,
+ p.returncode))
+ return RunResult(False)
+ if self.is_cross:
+ cmdlist = self.exe_wrapper + [p.output_name]
+ else:
+ cmdlist = p.output_name
+ try:
+ pe, so, se = Popen_safe(cmdlist)
+ except Exception as e:
+ mlog.debug('Could not run: %s (error: %s)\n' % (cmdlist, e))
+ return RunResult(False)
+
+ mlog.debug('Program stdout:\n')
+ mlog.debug(so)
+ mlog.debug('Program stderr:\n')
+ mlog.debug(se)
+ return RunResult(True, pe.returncode, so, se)
+
+ def _compile_int(self, expression, prefix, env, extra_args, dependencies):
+ fargs = {'prefix': prefix, 'expression': expression}
+ t = '''#include <stdio.h>
+ {prefix}
+ int main() {{ static int a[1-2*!({expression})]; a[0]=0; return 0; }}'''
+ return self.compiles(t.format(**fargs), env, extra_args, dependencies)
+
+ def cross_compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies):
+ if isinstance(guess, int):
+ if self._compile_int('%s == %d' % (expression, guess), prefix, env, extra_args, dependencies):
+ return guess
+
+ cur = low
+ while low < high:
+ cur = int((low + high) / 2)
+ if cur == low:
+ break
+
+ if self._compile_int('%s >= %d' % (expression, cur), prefix, env, extra_args, dependencies):
+ low = cur
+ else:
+ high = cur
+
+ if self._compile_int('%s == %d' % (expression, cur), prefix, env, extra_args, dependencies):
+ return cur
+ raise EnvironmentException('Cross-compile check overflowed')
+
+ def compute_int(self, expression, low, high, guess, prefix, env, extra_args=None, dependencies=None):
+ if extra_args is None:
+ extra_args = []
+ if self.is_cross:
+ return self.cross_compute_int(expression, low, high, guess, prefix, env, extra_args, dependencies)
+ fargs = {'prefix': prefix, 'expression': expression}
+ t = '''#include<stdio.h>
+ {prefix}
+ int main(int argc, char **argv) {{
+ printf("%ld\\n", (long)({expression}));
+ return 0;
+ }};'''
+ res = self.run(t.format(**fargs), env, extra_args, dependencies)
+ if not res.compiled:
+ return -1
+ if res.returncode != 0:
+ raise EnvironmentException('Could not run compute_int test binary.')
+ return int(res.stdout)
+
+ def cross_sizeof(self, typename, prefix, env, extra_args=None, dependencies=None):
+ if extra_args is None:
+ extra_args = []
+ fargs = {'prefix': prefix, 'type': typename}
+ t = '''#include <stdio.h>
+ {prefix}
+ int main(int argc, char **argv) {{
+ {type} something;
+ }}'''
+ if not self.compiles(t.format(**fargs), env, extra_args, dependencies):
+ return -1
+ return self.cross_compute_int('sizeof(%s)' % typename, 1, 128, None, prefix, env, extra_args, dependencies)
+
+ def sizeof(self, typename, prefix, env, extra_args=None, dependencies=None):
+ if extra_args is None:
+ extra_args = []
+ fargs = {'prefix': prefix, 'type': typename}
+ if self.is_cross:
+ return self.cross_sizeof(typename, prefix, env, extra_args, dependencies)
+ t = '''#include<stdio.h>
+ {prefix}
+ int main(int argc, char **argv) {{
+ printf("%ld\\n", (long)(sizeof({type})));
+ return 0;
+ }};'''
+ res = self.run(t.format(**fargs), env, extra_args, dependencies)
+ if not res.compiled:
+ return -1
+ if res.returncode != 0:
+ raise EnvironmentException('Could not run sizeof test binary.')
+ return int(res.stdout)
+
+ def cross_alignment(self, typename, prefix, env, extra_args=None, dependencies=None):
+ if extra_args is None:
+ extra_args = []
+ fargs = {'prefix': prefix, 'type': typename}
+ t = '''#include <stdio.h>
+ {prefix}
+ int main(int argc, char **argv) {{
+ {type} something;
+ }}'''
+ if not self.compiles(t.format(**fargs), env, extra_args, dependencies):
+ return -1
+ t = '''#include <stddef.h>
+ {prefix}
+ struct tmp {{
+ char c;
+ {type} target;
+ }};'''
+ return self.cross_compute_int('offsetof(struct tmp, target)', 1, 1024, None, t.format(**fargs), env, extra_args, dependencies)
+
+ def alignment(self, typename, prefix, env, extra_args=None, dependencies=None):
+ if extra_args is None:
+ extra_args = []
+ if self.is_cross:
+ return self.cross_alignment(typename, prefix, env, extra_args, dependencies)
+ fargs = {'prefix': prefix, 'type': typename}
+ t = '''#include <stdio.h>
+ #include <stddef.h>
+ {prefix}
+ struct tmp {{
+ char c;
+ {type} target;
+ }};
+ int main(int argc, char **argv) {{
+ printf("%d", (int)offsetof(struct tmp, target));
+ return 0;
+ }}'''
+ res = self.run(t.format(**fargs), env, extra_args, dependencies)
+ if not res.compiled:
+ raise EnvironmentException('Could not compile alignment test.')
+ if res.returncode != 0:
+ raise EnvironmentException('Could not run alignment test binary.')
+ align = int(res.stdout)
+ if align == 0:
+ raise EnvironmentException('Could not determine alignment of %s. Sorry. You might want to file a bug.' % typename)
+ return align
+
+ def get_define(self, dname, prefix, env, extra_args, dependencies):
+ delim = '"MESON_GET_DEFINE_DELIMITER"'
+ fargs = {'prefix': prefix, 'define': dname, 'delim': delim}
+ code = '''
+ {prefix}
+ #ifndef {define}
+ # define {define}
+ #endif
+ {delim}\n{define}'''
+ args = self._get_compiler_check_args(env, extra_args, dependencies,
+ mode='preprocess').to_native()
+ with self.compile(code.format(**fargs), args, 'preprocess') as p:
+ if p.returncode != 0:
+ raise EnvironmentException('Could not get define {!r}'.format(dname))
+ # Get the preprocessed value after the delimiter,
+ # minus the extra newline at the end
+ return p.stdo.split(delim + '\n')[-1][:-1]
+
+ def get_return_value(self, fname, rtype, prefix, env, extra_args, dependencies):
+ if rtype == 'string':
+ fmt = '%s'
+ cast = '(char*)'
+ elif rtype == 'int':
+ fmt = '%lli'
+ cast = '(long long int)'
+ else:
+ raise AssertionError('BUG: Unknown return type {!r}'.format(rtype))
+ fargs = {'prefix': prefix, 'f': fname, 'cast': cast, 'fmt': fmt}
+ code = '''{prefix}
+ #include <stdio.h>
+ int main(int argc, char *argv[]) {{
+ printf ("{fmt}", {cast} {f}());
+ }}'''.format(**fargs)
+ res = self.run(code, env, extra_args, dependencies)
+ if not res.compiled:
+ m = 'Could not get return value of {}()'
+ raise EnvironmentException(m.format(fname))
+ if rtype == 'string':
+ return res.stdout
+ elif rtype == 'int':
+ try:
+ return int(res.stdout.strip())
+ except:
+ m = 'Return value of {}() is not an int'
+ raise EnvironmentException(m.format(fname))
+
+ @staticmethod
+ def _no_prototype_templ():
+ """
+ Try to find the function without a prototype from a header by defining
+ our own dummy prototype and trying to link with the C library (and
+ whatever else the compiler links in by default). This is very similar
+ to the check performed by Autoconf for AC_CHECK_FUNCS.
+ """
+ # Define the symbol to something else since it is defined by the
+ # includes or defines listed by the user or by the compiler. This may
+ # include, for instance _GNU_SOURCE which must be defined before
+ # limits.h, which includes features.h
+ # Then, undef the symbol to get rid of it completely.
+ head = '''
+ #define {func} meson_disable_define_of_{func}
+ {prefix}
+ #include <limits.h>
+ #undef {func}
+ '''
+ # Override any GCC internal prototype and declare our own definition for
+ # the symbol. Use char because that's unlikely to be an actual return
+ # value for a function which ensures that we override the definition.
+ head += '''
+ #ifdef __cplusplus
+ extern "C"
+ #endif
+ char {func} ();
+ '''
+ # The actual function call
+ main = '''
+ int main () {{
+ return {func} ();
+ }}'''
+ return head, main
+
+ @staticmethod
+ def _have_prototype_templ():
+ """
+ Returns a head-er and main() call that uses the headers listed by the
+ user for the function prototype while checking if a function exists.
+ """
+ # Add the 'prefix', aka defines, includes, etc that the user provides
+ # This may include, for instance _GNU_SOURCE which must be defined
+ # before limits.h, which includes features.h
+ head = '{prefix}\n#include <limits.h>\n'
+ # We don't know what the function takes or returns, so return it as an int.
+ # Just taking the address or comparing it to void is not enough because
+ # compilers are smart enough to optimize it away. The resulting binary
+ # is not run so we don't care what the return value is.
+ main = '''\nint main() {{
+ void *a = (void*) &{func};
+ long b = (long) a;
+ return (int) b;
+ }}'''
+ return head, main
+
+ def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None):
+ """
+ First, this function looks for the symbol in the default libraries
+ provided by the compiler (stdlib + a few others usually). If that
+ fails, it checks if any of the headers specified in the prefix provide
+ an implementation of the function, and if that fails, it checks if it's
+ implemented as a compiler-builtin.
+ """
+ if extra_args is None:
+ extra_args = []
+
+ # Short-circuit if the check is already provided by the cross-info file
+ varname = 'has function ' + funcname
+ varname = varname.replace(' ', '_')
+ if self.is_cross:
+ val = env.cross_info.config['properties'].get(varname, None)
+ if val is not None:
+ if isinstance(val, bool):
+ return val
+ raise EnvironmentException('Cross variable {0} is not a boolean.'.format(varname))
+
+ fargs = {'prefix': prefix, 'func': funcname}
+
+ # glibc defines functions that are not available on Linux as stubs that
+ # fail with ENOSYS (such as e.g. lchmod). In this case we want to fail
+ # instead of detecting the stub as a valid symbol.
+ # We already included limits.h earlier to ensure that these are defined
+ # for stub functions.
+ stubs_fail = '''
+ #if defined __stub_{func} || defined __stub___{func}
+ fail fail fail this function is not going to work
+ #endif
+ '''
+
+ # If we have any includes in the prefix supplied by the user, assume
+ # that the user wants us to use the symbol prototype defined in those
+ # includes. If not, then try to do the Autoconf-style check with
+ # a dummy prototype definition of our own.
+ # This is needed when the linker determines symbol availability from an
+ # SDK based on the prototype in the header provided by the SDK.
+ # Ignoring this prototype would result in the symbol always being
+ # marked as available.
+ if '#include' in prefix:
+ head, main = self._have_prototype_templ()
+ else:
+ head, main = self._no_prototype_templ()
+ templ = head + stubs_fail + main
+
+ if self.links(templ.format(**fargs), env, extra_args, dependencies):
+ return True
+
+ # MSVC does not have compiler __builtin_-s.
+ if self.get_id() == 'msvc':
+ return False
+
+ # Detect function as a built-in
+ #
+ # Some functions like alloca() are defined as compiler built-ins which
+ # are inlined by the compiler and you can't take their address, so we
+ # need to look for them differently. On nice compilers like clang, we
+ # can just directly use the __has_builtin() macro.
+ fargs['no_includes'] = '#include' not in prefix
+ t = '''{prefix}
+ int main() {{
+ #ifdef __has_builtin
+ #if !__has_builtin(__builtin_{func})
+ #error "__builtin_{func} not found"
+ #endif
+ #elif ! defined({func})
+ /* Check for __builtin_{func} only if no includes were added to the
+ * prefix above, which means no definition of {func} can be found.
+ * We would always check for this, but we get false positives on
+ * MSYS2 if we do. Their toolchain is broken, but we can at least
+ * give them a workaround. */
+ #if {no_includes:d}
+ __builtin_{func};
+ #else
+ #error "No definition for __builtin_{func} found in the prefix"
+ #endif
+ #endif
+ }}'''
+ return self.links(t.format(**fargs), env, extra_args, dependencies)
+
+ def has_members(self, typename, membernames, prefix, env, extra_args=None, dependencies=None):
+ if extra_args is None:
+ extra_args = []
+ fargs = {'prefix': prefix, 'type': typename, 'name': 'foo'}
+ # Create code that accesses all members
+ members = ''
+ for member in membernames:
+ members += '{}.{};\n'.format(fargs['name'], member)
+ fargs['members'] = members
+ t = '''{prefix}
+ void bar() {{
+ {type} {name};
+ {members}
+ }};'''
+ return self.compiles(t.format(**fargs), env, extra_args, dependencies)
+
+ def has_type(self, typename, prefix, env, extra_args, dependencies=None):
+ fargs = {'prefix': prefix, 'type': typename}
+ t = '''{prefix}
+ void bar() {{
+ sizeof({type});
+ }};'''
+ return self.compiles(t.format(**fargs), env, extra_args, dependencies)
+
+ def symbols_have_underscore_prefix(self, env):
+ '''
+ Check if the compiler prefixes an underscore to global C symbols
+ '''
+ symbol_name = b'meson_uscore_prefix'
+ code = '''#ifdef __cplusplus
+ extern "C" {
+ #endif
+ void ''' + symbol_name.decode() + ''' () {}
+ #ifdef __cplusplus
+ }
+ #endif
+ '''
+ args = self.get_cross_extra_flags(env, link=False)
+ args += self.get_compiler_check_args()
+ n = 'symbols_have_underscore_prefix'
+ with self.compile(code, args, 'compile') as p:
+ if p.returncode != 0:
+ m = 'BUG: Unable to compile {!r} check: {}'
+ raise RuntimeError(m.format(n, p.stdo))
+ if not os.path.isfile(p.output_name):
+ m = 'BUG: Can\'t find compiled test code for {!r} check'
+ raise RuntimeError(m.format(n))
+ with open(p.output_name, 'rb') as o:
+ for line in o:
+ # Check if the underscore form of the symbol is somewhere
+ # in the output file.
+ if b'_' + symbol_name in line:
+ return True
+ # Else, check if the non-underscored form is present
+ elif symbol_name in line:
+ return False
+ raise RuntimeError('BUG: {!r} check failed unexpectedly'.format(n))
+
+ def get_library_naming(self, env, libtype):
+ '''
+ Get library prefixes and suffixes for the target platform ordered by
+ priority
+ '''
+ stlibext = ['a']
+ # We've always allowed libname to be both `foo` and `libfoo`,
+ # and now people depend on it
+ prefixes = ['lib', '']
+ # Library suffixes and prefixes
+ if for_darwin(env.is_cross_build(), env):
+ shlibext = ['dylib']
+ elif for_windows(env.is_cross_build(), env):
+ # FIXME: .lib files can be import or static so we should read the
+ # file, figure out which one it is, and reject the wrong kind.
+ if self.id == 'msvc':
+ shlibext = ['lib']
+ else:
+ shlibext = ['dll.a', 'lib', 'dll']
+ # Yep, static libraries can also be foo.lib
+ stlibext += ['lib']
+ elif for_cygwin(env.is_cross_build(), env):
+ shlibext = ['dll', 'dll.a']
+ prefixes = ['cyg'] + prefixes
+ else:
+ # Linux/BSDs
+ shlibext = ['so']
+ # Search priority
+ if libtype in ('default', 'shared-static'):
+ suffixes = shlibext + stlibext
+ elif libtype == 'static-shared':
+ suffixes = stlibext + shlibext
+ elif libtype == 'shared':
+ suffixes = shlibext
+ elif libtype == 'static':
+ suffixes = stlibext
+ else:
+ raise AssertionError('BUG: unknown libtype {!r}'.format(libtype))
+ return prefixes, suffixes
+
+ def find_library(self, libname, env, extra_dirs, libtype='default'):
+ # These libraries are either built-in or invalid
+ if libname in self.ignore_libs:
+ return []
+ # First try if we can just add the library as -l.
+ code = 'int main(int argc, char **argv) { return 0; }'
+ if extra_dirs and isinstance(extra_dirs, str):
+ extra_dirs = [extra_dirs]
+ # Gcc + co seem to prefer builtin lib dirs to -L dirs.
+ # Only try to find std libs if no extra dirs specified.
+ if not extra_dirs and libtype == 'default':
+ args = ['-l' + libname]
+ if self.links(code, env, extra_args=args):
+ return args
+ # Not found or we want to use a specific libtype? Try to find the
+ # library file itself.
+ extra_dirs += self.get_library_dirs()
+ prefixes, suffixes = self.get_library_naming(env, libtype)
+ # Triply-nested loop!
+ for d in extra_dirs:
+ for suffix in suffixes:
+ for prefix in prefixes:
+ trial = os.path.join(d, prefix + libname + '.' + suffix)
+ if os.path.isfile(trial):
+ return [trial]
+ return None
+
+ def thread_flags(self, env):
+ if for_haiku(self.is_cross, env):
+ return []
+ return ['-pthread']
+
+ def thread_link_flags(self, env):
+ if for_haiku(self.is_cross, env):
+ return []
+ return ['-pthread']
+
+ def has_multi_arguments(self, args, env):
+ return self.compiles('int i;\n', env, extra_args=args)
+
+
+class ClangCCompiler(ClangCompiler, CCompiler):
+ def __init__(self, exelist, version, clang_type, is_cross, exe_wrapper=None):
+ CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
+ ClangCompiler.__init__(self, clang_type)
+ default_warn_args = ['-Wall', '-Winvalid-pch']
+ self.warn_args = {'1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self):
+ return {'c_std': coredata.UserComboOption('c_std', 'C language standard to use',
+ ['none', 'c89', 'c99', 'c11',
+ 'gnu89', 'gnu99', 'gnu11'],
+ 'none')}
+
+ def get_option_compile_args(self, options):
+ args = []
+ std = options['c_std']
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+ def get_option_link_args(self, options):
+ return []
+
+ def get_linker_always_args(self):
+ basic = super().get_linker_always_args()
+ if self.clang_type == compilers.CLANG_OSX:
+ return basic + ['-Wl,-headerpad_max_install_names']
+ return basic
+
+
+class GnuCCompiler(GnuCompiler, CCompiler):
+ def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None):
+ CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
+ GnuCompiler.__init__(self, gcc_type, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch']
+ self.warn_args = {'1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self):
+ opts = {'c_std': coredata.UserComboOption('c_std', 'C language standard to use',
+ ['none', 'c89', 'c99', 'c11',
+ 'gnu89', 'gnu99', 'gnu11'],
+ 'none')}
+ if self.gcc_type == GCC_MINGW:
+ opts.update({
+ 'c_winlibs': coredata.UserArrayOption('c_winlibs', 'Standard Win libraries to link against',
+ gnu_winlibs), })
+ return opts
+
+ def get_option_compile_args(self, options):
+ args = []
+ std = options['c_std']
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+ def get_option_link_args(self, options):
+ if self.gcc_type == GCC_MINGW:
+ return options['c_winlibs'].value[:]
+ return []
+
+ def get_std_shared_lib_link_args(self):
+ return ['-shared']
+
+ def get_pch_use_args(self, pch_dir, header):
+ return ['-fpch-preprocess', '-include', os.path.split(header)[-1]]
+
+
+class IntelCCompiler(IntelCompiler, CCompiler):
+ def __init__(self, exelist, version, icc_type, is_cross, exe_wrapper=None):
+ CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
+ IntelCompiler.__init__(self, icc_type)
+ self.lang_header = 'c-header'
+ default_warn_args = ['-Wall', '-w3', '-diag-disable:remark', '-Wpch-messages']
+ self.warn_args = {'1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self):
+ c_stds = ['c89', 'c99']
+ g_stds = ['gnu89', 'gnu99']
+ if version_compare(self.version, '>=16.0.0'):
+ c_stds += ['c11']
+ opts = {'c_std': coredata.UserComboOption('c_std', 'C language standard to use',
+ ['none'] + c_stds + g_stds,
+ 'none')}
+ return opts
+
+ def get_option_compile_args(self, options):
+ args = []
+ std = options['c_std']
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+ def get_std_shared_lib_link_args(self):
+ return ['-shared']
+
+ def has_multi_arguments(self, args, env):
+ return super().has_multi_arguments(args + ['-diag-error', '10006'], env)
+
+
+class VisualStudioCCompiler(CCompiler):
+ std_warn_args = ['/W3']
+ std_opt_args = ['/O2']
+ ignore_libs = ('m', 'c', 'pthread')
+
+ def __init__(self, exelist, version, is_cross, exe_wrap, is_64):
+ CCompiler.__init__(self, exelist, version, is_cross, exe_wrap)
+ self.id = 'msvc'
+ # /showIncludes is needed for build dependency tracking in Ninja
+ # See: https://ninja-build.org/manual.html#_deps
+ self.always_args = ['/nologo', '/showIncludes']
+ self.warn_args = {'1': ['/W2'],
+ '2': ['/W3'],
+ '3': ['/W4']}
+ self.base_options = ['b_pch'] # FIXME add lto, pgo and the like
+ self.is_64 = is_64
+
+ # Override CCompiler.get_always_args
+ def get_always_args(self):
+ return self.always_args
+
+ def get_linker_debug_crt_args(self):
+ """
+ Arguments needed to select a debug crt for the linker
+
+ Sometimes we need to manually select the CRT (C runtime) to use with
+ MSVC. One example is when trying to link with static libraries since
+ MSVC won't auto-select a CRT for us in that case and will error out
+ asking us to select one.
+ """
+ return ['/MDd']
+
+ def get_buildtype_args(self, buildtype):
+ return msvc_buildtype_args[buildtype]
+
+ def get_buildtype_linker_args(self, buildtype):
+ return msvc_buildtype_linker_args[buildtype]
+
+ def get_pch_suffix(self):
+ return 'pch'
+
+ def get_pch_name(self, header):
+ chopped = os.path.split(header)[-1].split('.')[:-1]
+ chopped.append(self.get_pch_suffix())
+ pchname = '.'.join(chopped)
+ return pchname
+
+ def get_pch_use_args(self, pch_dir, header):
+ base = os.path.split(header)[-1]
+ pchname = self.get_pch_name(header)
+ return ['/FI' + base, '/Yu' + base, '/Fp' + os.path.join(pch_dir, pchname)]
+
+ def get_preprocess_only_args(self):
+ return ['/EP']
+
+ def get_compile_only_args(self):
+ return ['/c']
+
+ def get_no_optimization_args(self):
+ return ['/Od']
+
+ def get_output_args(self, target):
+ if target.endswith('.exe'):
+ return ['/Fe' + target]
+ return ['/Fo' + target]
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ return []
+
+ def get_linker_exelist(self):
+ return ['link'] # FIXME, should have same path as compiler.
+
+ def get_linker_always_args(self):
+ return ['/nologo']
+
+ def get_linker_output_args(self, outputname):
+ return ['/OUT:' + outputname]
+
+ def get_linker_search_args(self, dirname):
+ return ['/LIBPATH:' + dirname]
+
+ def get_gui_app_args(self):
+ return ['/SUBSYSTEM:WINDOWS']
+
+ def get_pic_args(self):
+ return [] # PIC is handled by the loader on Windows
+
+ def get_std_shared_lib_link_args(self):
+ return ['/DLL']
+
+ def gen_vs_module_defs_args(self, defsfile):
+ if not isinstance(defsfile, str):
+ raise RuntimeError('Module definitions file should be str')
+ # With MSVC, DLLs only export symbols that are explicitly exported,
+ # so if a module defs file is specified, we use that to export symbols
+ return ['/DEF:' + defsfile]
+
+ def gen_pch_args(self, header, source, pchname):
+ objname = os.path.splitext(pchname)[0] + '.obj'
+ return objname, ['/Yc' + header, '/Fp' + pchname, '/Fo' + objname]
+
+ def gen_import_library_args(self, implibname):
+ "The name of the outputted import library"
+ return ['/IMPLIB:' + implibname]
+
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ return []
+
+ # FIXME, no idea what these should be.
+ def thread_flags(self, env):
+ return []
+
+ def thread_link_flags(self, env):
+ return []
+
+ def get_options(self):
+ return {'c_winlibs': coredata.UserArrayOption('c_winlibs',
+ 'Windows libs to link against.',
+ msvc_winlibs)
+ }
+
+ def get_option_link_args(self, options):
+ return options['c_winlibs'].value[:]
+
+ @classmethod
+ def unix_args_to_native(cls, args):
+ result = []
+ for i in args:
+ # -mms-bitfields is specific to MinGW-GCC
+ # -pthread is only valid for GCC
+ if i in ('-mms-bitfields', '-pthread'):
+ continue
+ if i.startswith('-L'):
+ i = '/LIBPATH:' + i[2:]
+ # Translate GNU-style -lfoo library name to the import library
+ elif i.startswith('-l'):
+ name = i[2:]
+ if name in cls.ignore_libs:
+ # With MSVC, these are provided by the C runtime which is
+ # linked in by default
+ continue
+ else:
+ i = name + '.lib'
+ # -pthread in link flags is only used on Linux
+ elif i == '-pthread':
+ continue
+ result.append(i)
+ return result
+
+ def get_werror_args(self):
+ return ['/WX']
+
+ def get_include_args(self, path, is_system):
+ if path == '':
+ path = '.'
+ # msvc does not have a concept of system header dirs.
+ return ['-I' + path]
+
+ # Visual Studio is special. It ignores some arguments it does not
+ # understand and you can't tell it to error out on those.
+ # http://stackoverflow.com/questions/15259720/how-can-i-make-the-microsoft-c-compiler-treat-unknown-flags-as-errors-rather-t
+ def has_multi_arguments(self, args, env):
+ warning_text = '9002'
+ code = 'int i;\n'
+ (fd, srcname) = tempfile.mkstemp(suffix='.' + self.default_suffix)
+ os.close(fd)
+ with open(srcname, 'w') as ofile:
+ ofile.write(code)
+ # Read c_args/cpp_args/etc from the cross-info file (if needed)
+ extra_args = self.get_cross_extra_flags(env, link=False)
+ extra_args += self.get_compile_only_args()
+ commands = self.exelist + args + extra_args + [srcname]
+ mlog.debug('Running VS compile:')
+ mlog.debug('Command line: ', ' '.join(commands))
+ mlog.debug('Code:\n', code)
+ p, stdo, stde = Popen_safe(commands, cwd=os.path.split(srcname)[0])
+ if p.returncode != 0:
+ return False
+ return not(warning_text in stde or warning_text in stdo)
+
+ def get_compile_debugfile_args(self, rel_obj, pch=False):
+ pdbarr = rel_obj.split('.')[:-1]
+ pdbarr += ['pdb']
+ args = ['/Fd' + '.'.join(pdbarr)]
+ # When generating a PDB file with PCH, all compile commands write
+ # to the same PDB file. Hence, we need to serialize the PDB
+ # writes using /FS since we do parallel builds. This slows down the
+ # build obviously, which is why we only do this when PCH is on.
+ # This was added in Visual Studio 2013 (MSVC 18.0). Before that it was
+ # always on: https://msdn.microsoft.com/en-us/library/dn502518.aspx
+ if pch and version_compare(self.version, '>=18.0'):
+ args = ['/FS'] + args
+ return args
+
+ def get_link_debugfile_args(self, targetfile):
+ pdbarr = targetfile.split('.')[:-1]
+ pdbarr += ['pdb']
+ return ['/DEBUG', '/PDB:' + '.'.join(pdbarr)]
+
+ def get_link_whole_for(self, args):
+ # Only since VS2015
+ args = listify(args)
+ return ['/WHOLEARCHIVE:' + x for x in args]
+
+ def get_instruction_set_args(self, instruction_set):
+ if self.is_64:
+ return vs64_instruction_set_args.get(instruction_set, None)
+ if self.version.split('.')[0] == '16' and instruction_set == 'avx':
+ # VS documentation says that this exists and should work, but
+ # it does not. The headers do not contain AVX intrinsics
+ # and the can not be called.
+ return None
+ return vs32_instruction_set_args.get(instruction_set, None)
+
+ def get_toolset_version(self):
+ # See boost/config/compiler/visualc.cpp for up to date mapping
+ try:
+ version = int(''.join(self.version.split('.')[0:2]))
+ except:
+ return None
+ if version < 1310:
+ return '7.0'
+ elif version < 1400:
+ return '7.1' # (Visual Studio 2003)
+ elif version < 1500:
+ return '8.0' # (Visual Studio 2005)
+ elif version < 1600:
+ return '9.0' # (Visual Studio 2008)
+ elif version < 1700:
+ return '10.0' # (Visual Studio 2010)
+ elif version < 1800:
+ return '11.0' # (Visual Studio 2012)
+ elif version < 1900:
+ return '12.0' # (Visual Studio 2013)
+ elif version < 1910:
+ return '14.0' # (Visual Studio 2015)
+ elif version < 1920:
+ return '14.1' # (Visual Studio 2017)
+ return None
+
+ def get_default_include_dirs(self):
+ if 'INCLUDE' not in os.environ:
+ return []
+ return os.environ['INCLUDE'].split(os.pathsep)
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import contextlib, os.path, re, tempfile
+import subprocess
+
+from ..linkers import StaticLinker
+from .. import coredata
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import EnvironmentException, MesonException, version_compare, Popen_safe
+
+"""This file contains the data files of all compilers Meson knows
+about. To support a new compiler, add its information below.
+Also add corresponding autodetection code in environment.py."""
+
+header_suffixes = ('h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di')
+obj_suffixes = ('o', 'obj', 'res')
+lib_suffixes = ('a', 'lib', 'dll', 'dylib', 'so')
+# Mapping of language to suffixes of files that should always be in that language
+# This means we can't include .h headers here since they could be C, C++, ObjC, etc.
+lang_suffixes = {
+ 'c': ('c',),
+ 'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx'),
+ # f90, f95, f03, f08 are for free-form fortran ('f90' recommended)
+ # f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended)
+ 'fortran': ('f90', 'f95', 'f03', 'f08', 'f', 'for', 'ftn', 'fpp'),
+ 'd': ('d', 'di'),
+ 'objc': ('m',),
+ 'objcpp': ('mm',),
+ 'rust': ('rs',),
+ 'vala': ('vala', 'vapi', 'gs'),
+ 'cs': ('cs',),
+ 'swift': ('swift',),
+ 'java': ('java',),
+}
+cpp_suffixes = lang_suffixes['cpp'] + ('h',)
+c_suffixes = lang_suffixes['c'] + ('h',)
+# List of languages that can be linked with C code directly by the linker
+# used in build.py:process_compilers() and build.py:get_dynamic_linker()
+clike_langs = ('objcpp', 'objc', 'd', 'cpp', 'c', 'fortran',)
+clike_suffixes = ()
+for _l in clike_langs:
+ clike_suffixes += lang_suffixes[_l]
+clike_suffixes += ('h', 'll', 's')
+
+# XXX: Use this in is_library()?
+soregex = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
+
+# All these are only for C-like languages; see `clike_langs` above.
+
+def sort_clike(lang):
+ '''
+ Sorting function to sort the list of languages according to
+ reversed(compilers.clike_langs) and append the unknown langs in the end.
+ The purpose is to prefer C over C++ for files that can be compiled by
+ both such as assembly, C, etc. Also applies to ObjC, ObjC++, etc.
+ '''
+ if lang not in clike_langs:
+ return 1
+ return -clike_langs.index(lang)
+
+def is_header(fname):
+ if hasattr(fname, 'fname'):
+ fname = fname.fname
+ suffix = fname.split('.')[-1]
+ return suffix in header_suffixes
+
+def is_source(fname):
+ if hasattr(fname, 'fname'):
+ fname = fname.fname
+ suffix = fname.split('.')[-1].lower()
+ return suffix in clike_suffixes
+
+def is_assembly(fname):
+ if hasattr(fname, 'fname'):
+ fname = fname.fname
+ return fname.split('.')[-1].lower() == 's'
+
+def is_llvm_ir(fname):
+ if hasattr(fname, 'fname'):
+ fname = fname.fname
+ return fname.split('.')[-1] == 'll'
+
+def is_object(fname):
+ if hasattr(fname, 'fname'):
+ fname = fname.fname
+ suffix = fname.split('.')[-1]
+ return suffix in obj_suffixes
+
+def is_library(fname):
+ if hasattr(fname, 'fname'):
+ fname = fname.fname
+ suffix = fname.split('.')[-1]
+ return suffix in lib_suffixes
+
+gnulike_buildtype_args = {'plain': [],
+ # -O0 is passed for improved debugging information with gcc
+ # See https://github.com/mesonbuild/meson/pull/509
+ 'debug': ['-O0', '-g'],
+ 'debugoptimized': ['-O2', '-g'],
+ 'release': ['-O3'],
+ 'minsize': ['-Os', '-g']}
+
+msvc_buildtype_args = {'plain': [],
+ 'debug': ["/MDd", "/ZI", "/Ob0", "/Od", "/RTC1"],
+ 'debugoptimized': ["/MD", "/Zi", "/O2", "/Ob1"],
+ 'release': ["/MD", "/O2", "/Ob2"],
+ 'minsize': ["/MD", "/Zi", "/Os", "/Ob1"],
+ }
+
+apple_buildtype_linker_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ }
+
+gnulike_buildtype_linker_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': ['-Wl,-O1'],
+ 'minsize': [],
+ }
+
+msvc_buildtype_linker_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': ['/INCREMENTAL:NO'],
+ }
+
+java_buildtype_args = {'plain': [],
+ 'debug': ['-g'],
+ 'debugoptimized': ['-g'],
+ 'release': [],
+ 'minsize': [],
+ }
+
+rust_buildtype_args = {'plain': [],
+ 'debug': ['-C', 'debuginfo=2'],
+ 'debugoptimized': ['-C', 'debuginfo=2', '-C', 'opt-level=2'],
+ 'release': ['-C', 'opt-level=3'],
+ 'minsize': [], # In a future release: ['-C', 'opt-level=s'],
+ }
+
+d_gdc_buildtype_args = {'plain': [],
+ 'debug': ['-g', '-O0'],
+ 'debugoptimized': ['-g', '-O'],
+ 'release': ['-O3', '-frelease'],
+ 'minsize': [],
+ }
+
+d_ldc_buildtype_args = {'plain': [],
+ 'debug': ['-g', '-O0'],
+ 'debugoptimized': ['-g', '-O'],
+ 'release': ['-O3', '-release'],
+ 'minsize': [],
+ }
+
+d_dmd_buildtype_args = {'plain': [],
+ 'debug': ['-g'],
+ 'debugoptimized': ['-g', '-O'],
+ 'release': ['-O', '-release'],
+ 'minsize': [],
+ }
+
+mono_buildtype_args = {'plain': [],
+ 'debug': ['-debug'],
+ 'debugoptimized': ['-debug', '-optimize+'],
+ 'release': ['-optimize+'],
+ 'minsize': [],
+ }
+
+swift_buildtype_args = {'plain': [],
+ 'debug': ['-g'],
+ 'debugoptimized': ['-g', '-O'],
+ 'release': ['-O'],
+ 'minsize': [],
+ }
+
+gnu_winlibs = ['-lkernel32', '-luser32', '-lgdi32', '-lwinspool', '-lshell32',
+ '-lole32', '-loleaut32', '-luuid', '-lcomdlg32', '-ladvapi32']
+
+msvc_winlibs = ['kernel32.lib', 'user32.lib', 'gdi32.lib',
+ 'winspool.lib', 'shell32.lib', 'ole32.lib', 'oleaut32.lib',
+ 'uuid.lib', 'comdlg32.lib', 'advapi32.lib']
+
+gnu_color_args = {'auto': ['-fdiagnostics-color=auto'],
+ 'always': ['-fdiagnostics-color=always'],
+ 'never': ['-fdiagnostics-color=never'],
+ }
+
+clang_color_args = {'auto': ['-Xclang', '-fcolor-diagnostics'],
+ 'always': ['-Xclang', '-fcolor-diagnostics'],
+ 'never': ['-Xclang', '-fno-color-diagnostics'],
+ }
+
+base_options = {'b_pch': coredata.UserBooleanOption('b_pch', 'Use precompiled headers', True),
+ 'b_lto': coredata.UserBooleanOption('b_lto', 'Use link time optimization', False),
+ 'b_sanitize': coredata.UserComboOption('b_sanitize',
+ 'Code sanitizer to use',
+ ['none', 'address', 'thread', 'undefined', 'memory', 'address,undefined'],
+ 'none'),
+ 'b_lundef': coredata.UserBooleanOption('b_lundef', 'Use -Wl,--no-undefined when linking', True),
+ 'b_asneeded': coredata.UserBooleanOption('b_asneeded', 'Use -Wl,--as-needed when linking', True),
+ 'b_pgo': coredata.UserComboOption('b_pgo', 'Use profile guided optimization',
+ ['off', 'generate', 'use'],
+ 'off'),
+ 'b_coverage': coredata.UserBooleanOption('b_coverage',
+ 'Enable coverage tracking.',
+ False),
+ 'b_colorout': coredata.UserComboOption('b_colorout', 'Use colored output',
+ ['auto', 'always', 'never'],
+ 'always'),
+ 'b_ndebug': coredata.UserBooleanOption('b_ndebug',
+ 'Disable asserts',
+ False),
+ 'b_staticpic': coredata.UserBooleanOption('b_staticpic',
+ 'Build static libraries as position independent',
+ True),
+ }
+
+gnulike_instruction_set_args = {'mmx': ['-mmmx'],
+ 'sse': ['-msse'],
+ 'sse2': ['-msse2'],
+ 'sse3': ['-msse3'],
+ 'ssse3': ['-mssse3'],
+ 'sse41': ['-msse4.1'],
+ 'sse42': ['-msse4.2'],
+ 'avx': ['-mavx'],
+ 'avx2': ['-mavx2'],
+ 'neon': ['-mfpu=neon'],
+ }
+
+vs32_instruction_set_args = {'mmx': ['/arch:SSE'], # There does not seem to be a flag just for MMX
+ 'sse': ['/arch:SSE'],
+ 'sse2': ['/arch:SSE2'],
+ 'sse3': ['/arch:AVX'], # VS leaped from SSE2 directly to AVX.
+ 'sse41': ['/arch:AVX'],
+ 'sse42': ['/arch:AVX'],
+ 'avx': ['/arch:AVX'],
+ 'avx2': ['/arch:AVX2'],
+ 'neon': None,
+ }
+
+# The 64 bit compiler defaults to /arch:avx.
+vs64_instruction_set_args = {'mmx': ['/arch:AVX'],
+ 'sse': ['/arch:AVX'],
+ 'sse2': ['/arch:AVX'],
+ 'sse3': ['/arch:AVX'],
+ 'ssse3': ['/arch:AVX'],
+ 'sse41': ['/arch:AVX'],
+ 'sse42': ['/arch:AVX'],
+ 'avx': ['/arch:AVX'],
+ 'avx2': ['/arch:AVX2'],
+ 'neon': None,
+ }
+
+
+def sanitizer_compile_args(value):
+ if value == 'none':
+ return []
+ args = ['-fsanitize=' + value]
+ if 'address' in value: # For -fsanitize=address,undefined
+ args.append('-fno-omit-frame-pointer')
+ return args
+
+def sanitizer_link_args(value):
+ if value == 'none':
+ return []
+ args = ['-fsanitize=' + value]
+ return args
+
+def get_base_compile_args(options, compiler):
+ args = []
+ # FIXME, gcc/clang specific.
+ try:
+ if options['b_lto'].value:
+ args.append('-flto')
+ except KeyError:
+ pass
+ try:
+ args += compiler.get_colorout_args(options['b_colorout'].value)
+ except KeyError:
+ pass
+ try:
+ args += sanitizer_compile_args(options['b_sanitize'].value)
+ except KeyError:
+ pass
+ try:
+ pgo_val = options['b_pgo'].value
+ if pgo_val == 'generate':
+ args.append('-fprofile-generate')
+ elif pgo_val == 'use':
+ args.append('-fprofile-use')
+ except KeyError:
+ pass
+ try:
+ if options['b_coverage'].value:
+ args += compiler.get_coverage_args()
+ except KeyError:
+ pass
+ try:
+ if options['b_ndebug'].value:
+ args += ['-DNDEBUG']
+ except KeyError:
+ pass
+ return args
+
+def get_base_link_args(options, linker, is_shared_module):
+ args = []
+ # FIXME, gcc/clang specific.
+ try:
+ if options['b_lto'].value:
+ args.append('-flto')
+ except KeyError:
+ pass
+ try:
+ args += sanitizer_link_args(options['b_sanitize'].value)
+ except KeyError:
+ pass
+ try:
+ pgo_val = options['b_pgo'].value
+ if pgo_val == 'generate':
+ args.append('-fprofile-generate')
+ elif pgo_val == 'use':
+ args.append('-fprofile-use')
+ except KeyError:
+ pass
+ try:
+ if not is_shared_module and 'b_lundef' in linker.base_options and options['b_lundef'].value:
+ args.append('-Wl,--no-undefined')
+ except KeyError:
+ pass
+ try:
+ if 'b_asneeded' in linker.base_options and options['b_asneeded'].value:
+ args.append('-Wl,--as-needed')
+ except KeyError:
+ pass
+ try:
+ if options['b_coverage'].value:
+ args += linker.get_coverage_link_args()
+ except KeyError:
+ pass
+ return args
+
+class CrossNoRunException(MesonException):
+ pass
+
+class RunResult:
+ def __init__(self, compiled, returncode=999, stdout='UNDEFINED', stderr='UNDEFINED'):
+ self.compiled = compiled
+ self.returncode = returncode
+ self.stdout = stdout
+ self.stderr = stderr
+
+class CompilerArgs(list):
+ '''
+ Class derived from list() that manages a list of compiler arguments. Should
+ be used while constructing compiler arguments from various sources. Can be
+ operated with ordinary lists, so this does not need to be used everywhere.
+
+ All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc)
+ and can converted to the native type of each compiler by using the
+ .to_native() method to which you must pass an instance of the compiler or
+ the compiler class.
+
+ New arguments added to this class (either with .append(), .extend(), or +=)
+ are added in a way that ensures that they override previous arguments.
+ For example:
+
+ >>> a = ['-Lfoo', '-lbar']
+ >>> a += ['-Lpho', '-lbaz']
+ >>> print(a)
+ ['-Lpho', '-Lfoo', '-lbar', '-lbaz']
+
+ Arguments will also be de-duped if they can be de-duped safely.
+
+ Note that because of all this, this class is not commutative and does not
+ preserve the order of arguments if it is safe to not. For example:
+ >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror']
+ ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror']
+ >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar']
+ ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror']
+
+ '''
+ # NOTE: currently this class is only for C-like compilers, but it can be
+ # extended to other languages easily. Just move the following to the
+ # compiler class and initialize when self.compiler is set.
+
+ # Arg prefixes that override by prepending instead of appending
+ prepend_prefixes = ('-I', '-L')
+ # Arg prefixes and args that must be de-duped by returning 2
+ dedup2_prefixes = ('-I', '-L', '-D', '-U')
+ dedup2_suffixes = ()
+ dedup2_args = ()
+ # Arg prefixes and args that must be de-duped by returning 1
+ dedup1_prefixes = ('-l',)
+ dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
+ # Match a .so of the form path/to/libfoo.so.0.1.0
+ # Only UNIX shared libraries require this. Others have a fixed extension.
+ dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
+ dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
+ compiler = None
+
+ def _check_args(self, args):
+ cargs = []
+ if len(args) > 2:
+ raise TypeError("CompilerArgs() only accepts at most 2 arguments: "
+ "The compiler, and optionally an initial list")
+ elif not args:
+ return cargs
+ elif len(args) == 1:
+ if isinstance(args[0], (Compiler, StaticLinker)):
+ self.compiler = args[0]
+ else:
+ raise TypeError("you must pass a Compiler instance as one of "
+ "the arguments")
+ elif len(args) == 2:
+ if isinstance(args[0], (Compiler, StaticLinker)):
+ self.compiler = args[0]
+ cargs = args[1]
+ elif isinstance(args[1], (Compiler, StaticLinker)):
+ cargs = args[0]
+ self.compiler = args[1]
+ else:
+ raise TypeError("you must pass a Compiler instance as one of "
+ "the two arguments")
+ else:
+ raise AssertionError('Not reached')
+ return cargs
+
+ def __init__(self, *args):
+ super().__init__(self._check_args(args))
+
+ @classmethod
+ def _can_dedup(cls, arg):
+ '''
+ Returns whether the argument can be safely de-duped. This is dependent
+ on three things:
+
+ a) Whether an argument can be 'overridden' by a later argument. For
+ example, -DFOO defines FOO and -UFOO undefines FOO. In this case, we
+ can safely remove the previous occurrence and add a new one. The same
+ is true for include paths and library paths with -I and -L. For
+ these we return `2`. See `dedup2_prefixes` and `dedup2_args`.
+ b) Arguments that once specified cannot be undone, such as `-c` or
+ `-pipe`. New instances of these can be completely skipped. For these
+ we return `1`. See `dedup1_prefixes` and `dedup1_args`.
+ c) Whether it matters where or how many times on the command-line
+ a particular argument is present. This can matter for symbol
+ resolution in static or shared libraries, so we cannot de-dup or
+ reorder them. For these we return `0`. This is the default.
+
+ In addition to these, we handle library arguments specially.
+ With GNU ld, we surround library arguments with -Wl,--start/end-group
+ to recursively search for symbols in the libraries. This is not needed
+ with other linkers.
+ '''
+
+ # A standalone argument must never be deduplicated because it is
+ # defined by what comes _after_ it. Thus dedupping this:
+ # -D FOO -D BAR
+ # would yield either
+ # -D FOO BAR
+ # or
+ # FOO -D BAR
+ # both of which are invalid.
+ if arg in cls.dedup2_prefixes:
+ return 0
+ if arg in cls.dedup2_args or \
+ arg.startswith(cls.dedup2_prefixes) or \
+ arg.endswith(cls.dedup2_suffixes):
+ return 2
+ if arg in cls.dedup1_args or \
+ arg.startswith(cls.dedup1_prefixes) or \
+ arg.endswith(cls.dedup1_suffixes) or \
+ re.search(cls.dedup1_regex, arg):
+ return 1
+ return 0
+
+ @classmethod
+ def _should_prepend(cls, arg):
+ if arg.startswith(cls.prepend_prefixes):
+ return True
+ return False
+
+ def to_native(self):
+ # Check if we need to add --start/end-group for circular dependencies
+ # between static libraries, and for recursively searching for symbols
+ # needed by static libraries that are provided by object files or
+ # shared libraries.
+ if get_compiler_uses_gnuld(self.compiler):
+ global soregex
+ group_start = -1
+ for each in self:
+ if not each.startswith('-l') and not each.endswith('.a') and \
+ not soregex.match(each):
+ continue
+ i = self.index(each)
+ if group_start < 0:
+ # First occurrence of a library
+ group_start = i
+ if group_start >= 0:
+ # Last occurrence of a library
+ self.insert(i + 1, '-Wl,--end-group')
+ self.insert(group_start, '-Wl,--start-group')
+ return self.compiler.unix_args_to_native(self)
+
+ def append_direct(self, arg):
+ '''
+ Append the specified argument without any reordering or de-dup
+ '''
+ super().append(arg)
+
+ def extend_direct(self, iterable):
+ '''
+ Extend using the elements in the specified iterable without any
+ reordering or de-dup
+ '''
+ super().extend(iterable)
+
+ def __add__(self, args):
+ new = CompilerArgs(self, self.compiler)
+ new += args
+ return new
+
+ def __iadd__(self, args):
+ '''
+ Add two CompilerArgs while taking into account overriding of arguments
+ and while preserving the order of arguments as much as possible
+ '''
+ pre = []
+ post = []
+ if not isinstance(args, list):
+ raise TypeError('can only concatenate list (not "{}") to list'.format(args))
+ for arg in args:
+ # If the argument can be de-duped, do it either by removing the
+ # previous occurrence of it and adding a new one, or not adding the
+ # new occurrence.
+ dedup = self._can_dedup(arg)
+ if dedup == 1:
+ # Argument already exists and adding a new instance is useless
+ if arg in self or arg in pre or arg in post:
+ continue
+ if dedup == 2:
+ # Remove all previous occurrences of the arg and add it anew
+ if arg in self:
+ self.remove(arg)
+ if arg in pre:
+ pre.remove(arg)
+ if arg in post:
+ post.remove(arg)
+ if self._should_prepend(arg):
+ pre.append(arg)
+ else:
+ post.append(arg)
+ # Insert at the beginning
+ self[:0] = pre
+ # Append to the end
+ super().__iadd__(post)
+ return self
+
+ def __radd__(self, args):
+ new = CompilerArgs(args, self.compiler)
+ new += self
+ return new
+
+ def __mul__(self, args):
+ raise TypeError("can't multiply compiler arguments")
+
+ def __imul__(self, args):
+ raise TypeError("can't multiply compiler arguments")
+
+ def __rmul__(self, args):
+ raise TypeError("can't multiply compiler arguments")
+
+ def append(self, arg):
+ self.__iadd__([arg])
+
+ def extend(self, args):
+ self.__iadd__(args)
+
+class Compiler:
+ # Libraries to ignore in find_library() since they are provided by the
+ # compiler or the C library. Currently only used for MSVC.
+ ignore_libs = ()
+
+ def __init__(self, exelist, version):
+ if isinstance(exelist, str):
+ self.exelist = [exelist]
+ elif isinstance(exelist, list):
+ self.exelist = exelist
+ else:
+ raise TypeError('Unknown argument to Compiler')
+ # In case it's been overridden by a child class already
+ if not hasattr(self, 'file_suffixes'):
+ self.file_suffixes = lang_suffixes[self.language]
+ if not hasattr(self, 'can_compile_suffixes'):
+ self.can_compile_suffixes = set(self.file_suffixes)
+ self.default_suffix = self.file_suffixes[0]
+ self.version = version
+ self.base_options = []
+
+ def __repr__(self):
+ repr_str = "<{0}: v{1} `{2}`>"
+ return repr_str.format(self.__class__.__name__, self.version,
+ ' '.join(self.exelist))
+
+ def can_compile(self, src):
+ if hasattr(src, 'fname'):
+ src = src.fname
+ suffix = os.path.splitext(src)[1].lower()
+ if suffix and suffix[1:] in self.can_compile_suffixes:
+ return True
+ return False
+
+ def get_id(self):
+ return self.id
+
+ def get_language(self):
+ return self.language
+
+ def get_display_language(self):
+ return self.language.capitalize()
+
+ def get_default_suffix(self):
+ return self.default_suffix
+
+ def get_exelist(self):
+ return self.exelist[:]
+
+ def get_builtin_define(self, *args, **kwargs):
+ raise EnvironmentException('%s does not support get_builtin_define.' % self.id)
+
+ def has_builtin_define(self, *args, **kwargs):
+ raise EnvironmentException('%s does not support has_builtin_define.' % self.id)
+
+ def get_always_args(self):
+ return []
+
+ def get_linker_always_args(self):
+ return []
+
+ def gen_import_library_args(self, implibname):
+ """
+ Used only on Windows for libraries that need an import library.
+ This currently means C, C++, Fortran.
+ """
+ return []
+
+ def get_options(self):
+ return {} # build afresh every time
+
+ def get_option_compile_args(self, options):
+ return []
+
+ def get_option_link_args(self, options):
+ return []
+
+ def has_header(self, *args, **kwargs):
+ raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language())
+
+ def has_header_symbol(self, *args, **kwargs):
+ raise EnvironmentException('Language %s does not support header symbol checks.' % self.get_display_language())
+
+ def compiles(self, *args, **kwargs):
+ raise EnvironmentException('Language %s does not support compile checks.' % self.get_display_language())
+
+ def links(self, *args, **kwargs):
+ raise EnvironmentException('Language %s does not support link checks.' % self.get_display_language())
+
+ def run(self, *args, **kwargs):
+ raise EnvironmentException('Language %s does not support run checks.' % self.get_display_language())
+
+ def sizeof(self, *args, **kwargs):
+ raise EnvironmentException('Language %s does not support sizeof checks.' % self.get_display_language())
+
+ def alignment(self, *args, **kwargs):
+ raise EnvironmentException('Language %s does not support alignment checks.' % self.get_display_language())
+
+ def has_function(self, *args, **kwargs):
+ raise EnvironmentException('Language %s does not support function checks.' % self.get_display_language())
+
+ @classmethod
+ def unix_args_to_native(cls, args):
+ "Always returns a copy that can be independently mutated"
+ return args[:]
+
+ def find_library(self, *args, **kwargs):
+ raise EnvironmentException('Language {} does not support library finding.'.format(self.get_display_language()))
+
+ def get_library_dirs(self):
+ return []
+
+ def has_argument(self, arg, env):
+ return self.has_multi_arguments([arg], env)
+
+ def has_multi_arguments(self, args, env):
+ raise EnvironmentException(
+ 'Language {} does not support has_multi_arguments.'.format(
+ self.get_display_language()))
+
+ def get_supported_arguments(self, args, env):
+ supported_args = []
+ for arg in args:
+ if self.has_argument(arg, env):
+ supported_args.append(arg)
+ return supported_args
+
+ def get_cross_extra_flags(self, environment, link):
+ extra_flags = []
+ if self.is_cross and environment:
+ if 'properties' in environment.cross_info.config:
+ props = environment.cross_info.config['properties']
+ lang_args_key = self.language + '_args'
+ extra_flags += props.get(lang_args_key, [])
+ lang_link_args_key = self.language + '_link_args'
+ if link:
+ extra_flags += props.get(lang_link_args_key, [])
+ return extra_flags
+
+ def _get_compile_output(self, dirname, mode):
+ # In pre-processor mode, the output is sent to stdout and discarded
+ if mode == 'preprocess':
+ return None
+ # Extension only matters if running results; '.exe' is
+ # guaranteed to be executable on every platform.
+ if mode == 'link':
+ suffix = 'exe'
+ else:
+ suffix = 'obj'
+ return os.path.join(dirname, 'output.' + suffix)
+
+ @contextlib.contextmanager
+ def compile(self, code, extra_args=None, mode='link'):
+ if extra_args is None:
+ extra_args = []
+ try:
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ if isinstance(code, str):
+ srcname = os.path.join(tmpdirname,
+ 'testfile.' + self.default_suffix)
+ with open(srcname, 'w') as ofile:
+ ofile.write(code)
+ elif isinstance(code, mesonlib.File):
+ srcname = code.fname
+ output = self._get_compile_output(tmpdirname, mode)
+
+ # Construct the compiler command-line
+ commands = CompilerArgs(self)
+ commands.append(srcname)
+ commands += extra_args
+ commands += self.get_always_args()
+ if mode == 'compile':
+ commands += self.get_compile_only_args()
+ # Preprocess mode outputs to stdout, so no output args
+ if mode == 'preprocess':
+ commands += self.get_preprocess_only_args()
+ else:
+ commands += self.get_output_args(output)
+ # Generate full command-line with the exelist
+ commands = self.get_exelist() + commands.to_native()
+ mlog.debug('Running compile:')
+ mlog.debug('Working directory: ', tmpdirname)
+ mlog.debug('Command line: ', ' '.join(commands), '\n')
+ mlog.debug('Code:\n', code)
+ p, p.stdo, p.stde = Popen_safe(commands, cwd=tmpdirname)
+ mlog.debug('Compiler stdout:\n', p.stdo)
+ mlog.debug('Compiler stderr:\n', p.stde)
+ p.input_name = srcname
+ p.output_name = output
+ yield p
+ except (PermissionError, OSError):
+ # On Windows antivirus programs and the like hold on to files so
+ # they can't be deleted. There's not much to do in this case. Also,
+ # catch OSError because the directory is then no longer empty.
+ pass
+
+ def get_colorout_args(self, colortype):
+ return []
+
+ # Some compilers (msvc) write debug info to a separate file.
+ # These args specify where it should be written.
+ def get_compile_debugfile_args(self, rel_obj, **kwargs):
+ return []
+
+ def get_link_debugfile_args(self, rel_obj):
+ return []
+
+ def get_std_shared_lib_link_args(self):
+ return []
+
+ def get_std_shared_module_link_args(self):
+ return self.get_std_shared_lib_link_args()
+
+ def get_link_whole_for(self, args):
+ if isinstance(args, list) and not args:
+ return []
+ raise EnvironmentException('Language %s does not support linking whole archives.' % self.get_display_language())
+
+ # Compiler arguments needed to enable the given instruction set.
+ # May be [] meaning nothing needed or None meaning the given set
+ # is not supported.
+ def get_instruction_set_args(self, instruction_set):
+ return None
+
+ def build_osx_rpath_args(self, build_dir, rpath_paths, build_rpath):
+ if not rpath_paths and not build_rpath:
+ return []
+ # On OSX, rpaths must be absolute.
+ abs_rpaths = [os.path.join(build_dir, p) for p in rpath_paths]
+ if build_rpath != '':
+ abs_rpaths.append(build_rpath)
+ args = ['-Wl,-rpath,' + rp for rp in abs_rpaths]
+ return args
+
+ def build_unix_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ if not rpath_paths and not install_rpath and not build_rpath:
+ return []
+ # The rpaths we write must be relative, because otherwise
+ # they have different length depending on the build
+ # directory. This breaks reproducible builds.
+ rel_rpaths = []
+ for p in rpath_paths:
+ if p == from_dir:
+ relative = '' # relpath errors out in this case
+ else:
+ relative = os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir))
+ rel_rpaths.append(relative)
+ paths = ':'.join([os.path.join('$ORIGIN', p) for p in rel_rpaths])
+ # Build_rpath is used as-is (it is usually absolute).
+ if build_rpath != '':
+ if paths != '':
+ paths += ':'
+ paths += build_rpath
+ if len(paths) < len(install_rpath):
+ padding = 'X' * (len(install_rpath) - len(paths))
+ if not paths:
+ paths = padding
+ else:
+ paths = paths + ':' + padding
+ args = ['-Wl,-rpath,' + paths]
+ if get_compiler_is_linuxlike(self):
+ # Rpaths to use while linking must be absolute. These are not
+ # written to the binary. Needed only with GNU ld:
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=16936
+ # Not needed on Windows or other platforms that don't use RPATH
+ # https://github.com/mesonbuild/meson/issues/1897
+ lpaths = ':'.join([os.path.join(build_dir, p) for p in rpath_paths])
+ args += ['-Wl,-rpath-link,' + lpaths]
+ return args
+
+
+GCC_STANDARD = 0
+GCC_OSX = 1
+GCC_MINGW = 2
+GCC_CYGWIN = 3
+
+CLANG_STANDARD = 0
+CLANG_OSX = 1
+CLANG_WIN = 2
+# Possibly clang-cl?
+
+ICC_STANDARD = 0
+ICC_OSX = 1
+ICC_WIN = 2
+
+def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module):
+ if soversion is None:
+ sostr = ''
+ else:
+ sostr = '.' + soversion
+ if gcc_type in (GCC_STANDARD, GCC_MINGW, GCC_CYGWIN):
+ # Might not be correct for mingw but seems to work.
+ return ['-Wl,-soname,%s%s.%s%s' % (prefix, shlib_name, suffix, sostr)]
+ elif gcc_type == GCC_OSX:
+ if is_shared_module:
+ return []
+ install_name = prefix + shlib_name
+ if soversion is not None:
+ install_name += '.' + soversion
+ install_name += '.dylib'
+ return ['-install_name', os.path.join('@rpath', install_name)]
+ else:
+ raise RuntimeError('Not implemented yet.')
+
+def get_compiler_is_linuxlike(compiler):
+ if (getattr(compiler, 'gcc_type', None) == GCC_STANDARD) or \
+ (getattr(compiler, 'clang_type', None) == CLANG_STANDARD) or \
+ (getattr(compiler, 'icc_type', None) == ICC_STANDARD):
+ return True
+ return False
+
+def get_compiler_uses_gnuld(c):
+ # FIXME: Perhaps we should detect the linker in the environment?
+ # FIXME: Assumes that *BSD use GNU ld, but they might start using lld soon
+ if (getattr(c, 'gcc_type', None) in (GCC_STANDARD, GCC_MINGW, GCC_CYGWIN)) or \
+ (getattr(c, 'clang_type', None) in (CLANG_STANDARD, CLANG_WIN)) or \
+ (getattr(c, 'icc_type', None) in (ICC_STANDARD, ICC_WIN)):
+ return True
+ return False
+
+def get_largefile_args(compiler):
+ '''
+ Enable transparent large-file-support for 32-bit UNIX systems
+ '''
+ if get_compiler_is_linuxlike(compiler):
+ # Enable large-file support unconditionally on all platforms other
+ # than macOS and Windows. macOS is now 64-bit-only so it doesn't
+ # need anything special, and Windows doesn't have automatic LFS.
+ # You must use the 64-bit counterparts explicitly.
+ # glibc, musl, and uclibc, and all BSD libcs support this. On Android,
+ # support for transparent LFS is available depending on the version of
+ # Bionic: https://github.com/android/platform_bionic#32-bit-abi-bugs
+ # https://code.google.com/p/android/issues/detail?id=64613
+ #
+ # If this breaks your code, fix it! It's been 20+ years!
+ return ['-D_FILE_OFFSET_BITS=64']
+ # We don't enable -D_LARGEFILE64_SOURCE since that enables
+ # transitionary features and must be enabled by programs that use
+ # those features explicitly.
+ return []
+
+# TODO: The result from calling compiler should be cached. So that calling this
+# function multiple times don't add latency.
+def gnulike_default_include_dirs(compiler, lang):
+ if lang == 'cpp':
+ lang = 'c++'
+ env = os.environ.copy()
+ env["LC_ALL"] = 'C'
+ cmd = compiler + ['-x{}'.format(lang), '-E', '-v', '-']
+ p = subprocess.Popen(
+ cmd,
+ stdin=subprocess.DEVNULL,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ env=env
+ )
+ stderr = p.stderr.read().decode('utf-8')
+ parse_state = 0
+ paths = []
+ for line in stderr.split('\n'):
+ if parse_state == 0:
+ if line == '#include "..." search starts here:':
+ parse_state = 1
+ elif parse_state == 1:
+ if line == '#include <...> search starts here:':
+ parse_state = 2
+ else:
+ paths.append(line[1:])
+ elif parse_state == 2:
+ if line == 'End of search list.':
+ break
+ else:
+ paths.append(line[1:])
+ if len(paths) == 0:
+ mlog.warning('No include directory found parsing "{cmd}" output'.format(cmd=" ".join(cmd)))
+ return paths
+
+class GnuCompiler:
+ # Functionality that is common to all GNU family compilers.
+ def __init__(self, gcc_type, defines):
+ self.id = 'gcc'
+ self.gcc_type = gcc_type
+ self.defines = defines or {}
+ self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage',
+ 'b_colorout', 'b_ndebug', 'b_staticpic']
+ if self.gcc_type != GCC_OSX:
+ self.base_options.append('b_lundef')
+ self.base_options.append('b_asneeded')
+ # All GCC backends can do assembly
+ self.can_compile_suffixes.add('s')
+
+ def get_colorout_args(self, colortype):
+ if mesonlib.version_compare(self.version, '>=4.9.0'):
+ return gnu_color_args[colortype][:]
+ return []
+
+ def get_warn_args(self, level):
+ args = super().get_warn_args(level)
+ if mesonlib.version_compare(self.version, '<4.8.0') and '-Wpedantic' in args:
+ # -Wpedantic was added in 4.8.0
+ # https://gcc.gnu.org/gcc-4.8/changes.html
+ args[args.index('-Wpedantic')] = '-pedantic'
+ return args
+
+ def has_builtin_define(self, define):
+ return define in self.defines
+
+ def get_builtin_define(self, define):
+ if define in self.defines:
+ return self.defines[define]
+
+ def get_pic_args(self):
+ if self.gcc_type in (GCC_CYGWIN, GCC_MINGW, GCC_OSX):
+ return [] # On Window and OS X, pic is always on.
+ return ['-fPIC']
+
+ def get_buildtype_args(self, buildtype):
+ return gnulike_buildtype_args[buildtype]
+
+ def get_buildtype_linker_args(self, buildtype):
+ if self.gcc_type == GCC_OSX:
+ return apple_buildtype_linker_args[buildtype]
+ return gnulike_buildtype_linker_args[buildtype]
+
+ def get_pch_suffix(self):
+ return 'gch'
+
+ def split_shlib_to_parts(self, fname):
+ return os.path.split(fname)[0], fname
+
+ def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
+ return get_gcc_soname_args(self.gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module)
+
+ def get_std_shared_lib_link_args(self):
+ return ['-shared']
+
+ def get_link_whole_for(self, args):
+ return ['-Wl,--whole-archive'] + args + ['-Wl,--no-whole-archive']
+
+ def gen_vs_module_defs_args(self, defsfile):
+ if not isinstance(defsfile, str):
+ raise RuntimeError('Module definitions file should be str')
+ # On Windows targets, .def files may be specified on the linker command
+ # line like an object file.
+ if self.gcc_type in (GCC_CYGWIN, GCC_MINGW):
+ return [defsfile]
+ # For other targets, discard the .def file.
+ return []
+
+ def get_gui_app_args(self):
+ if self.gcc_type in (GCC_CYGWIN, GCC_MINGW):
+ return ['-mwindows']
+ return []
+
+ def get_instruction_set_args(self, instruction_set):
+ return gnulike_instruction_set_args.get(instruction_set, None)
+
+ def get_default_include_dirs(self):
+ return gnulike_default_include_dirs(self.exelist, self.language)
+
+
+class ClangCompiler:
+ def __init__(self, clang_type):
+ self.id = 'clang'
+ self.clang_type = clang_type
+ self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage',
+ 'b_ndebug', 'b_staticpic', 'b_colorout']
+ if self.clang_type != CLANG_OSX:
+ self.base_options.append('b_lundef')
+ self.base_options.append('b_asneeded')
+ # All Clang backends can do assembly and LLVM IR
+ self.can_compile_suffixes.update(['ll', 's'])
+
+ def get_pic_args(self):
+ if self.clang_type in (CLANG_WIN, CLANG_OSX):
+ return [] # On Window and OS X, pic is always on.
+ return ['-fPIC']
+
+ def get_colorout_args(self, colortype):
+ return clang_color_args[colortype][:]
+
+ def get_buildtype_args(self, buildtype):
+ return gnulike_buildtype_args[buildtype]
+
+ def get_buildtype_linker_args(self, buildtype):
+ if self.clang_type == CLANG_OSX:
+ return apple_buildtype_linker_args[buildtype]
+ return gnulike_buildtype_linker_args[buildtype]
+
+ def get_pch_suffix(self):
+ return 'pch'
+
+ def get_pch_use_args(self, pch_dir, header):
+ # Workaround for Clang bug http://llvm.org/bugs/show_bug.cgi?id=15136
+ # This flag is internal to Clang (or at least not documented on the man page)
+ # so it might change semantics at any time.
+ return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))]
+
+ def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
+ if self.clang_type == CLANG_STANDARD:
+ gcc_type = GCC_STANDARD
+ elif self.clang_type == CLANG_OSX:
+ gcc_type = GCC_OSX
+ elif self.clang_type == CLANG_WIN:
+ gcc_type = GCC_MINGW
+ else:
+ raise MesonException('Unreachable code when converting clang type to gcc type.')
+ return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module)
+
+ def has_multi_arguments(self, args, env):
+ myargs = ['-Werror=unknown-warning-option', '-Werror=unused-command-line-argument']
+ if mesonlib.version_compare(self.version, '>=3.6.0'):
+ myargs.append('-Werror=ignored-optimization-argument')
+ return super().has_multi_arguments(
+ myargs + args,
+ env)
+
+ def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None):
+ if extra_args is None:
+ extra_args = []
+ # Starting with XCode 8, we need to pass this to force linker
+ # visibility to obey OS X and iOS minimum version targets with
+ # -mmacosx-version-min, -miphoneos-version-min, etc.
+ # https://github.com/Homebrew/homebrew-core/issues/3727
+ if self.clang_type == CLANG_OSX and version_compare(self.version, '>=8.0'):
+ extra_args.append('-Wl,-no_weak_imports')
+ return super().has_function(funcname, prefix, env, extra_args, dependencies)
+
+ def get_std_shared_module_link_args(self):
+ if self.clang_type == CLANG_OSX:
+ return ['-bundle', '-Wl,-undefined,dynamic_lookup']
+ return ['-shared']
+
+ def get_link_whole_for(self, args):
+ if self.clang_type == CLANG_OSX:
+ result = []
+ for a in args:
+ result += ['-Wl,-force_load', a]
+ return result
+ return ['-Wl,--whole-archive'] + args + ['-Wl,--no-whole-archive']
+
+ def get_instruction_set_args(self, instruction_set):
+ return gnulike_instruction_set_args.get(instruction_set, None)
+
+ def get_default_include_dirs(self):
+ return gnulike_default_include_dirs(self.exelist, self.language)
+
+
+# Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1
+class IntelCompiler:
+ def __init__(self, icc_type):
+ self.id = 'intel'
+ self.icc_type = icc_type
+ self.lang_header = 'none'
+ self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage',
+ 'b_colorout', 'b_ndebug', 'b_staticpic', 'b_lundef', 'b_asneeded']
+ # Assembly
+ self.can_compile_suffixes.add('s')
+
+ def get_pic_args(self):
+ return ['-fPIC']
+
+ def get_buildtype_args(self, buildtype):
+ return gnulike_buildtype_args[buildtype]
+
+ def get_buildtype_linker_args(self, buildtype):
+ return gnulike_buildtype_linker_args[buildtype]
+
+ def get_pch_suffix(self):
+ return 'pchi'
+
+ def get_pch_use_args(self, pch_dir, header):
+ return ['-pch', '-pch_dir', os.path.join(pch_dir), '-x',
+ self.lang_header, '-include', header, '-x', 'none']
+
+ def get_pch_name(self, header_name):
+ return os.path.split(header_name)[-1] + '.' + self.get_pch_suffix()
+
+ def split_shlib_to_parts(self, fname):
+ return os.path.split(fname)[0], fname
+
+ def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
+ if self.icc_type == ICC_STANDARD:
+ gcc_type = GCC_STANDARD
+ elif self.icc_type == ICC_OSX:
+ gcc_type = GCC_OSX
+ elif self.icc_type == ICC_WIN:
+ gcc_type = GCC_MINGW
+ else:
+ raise MesonException('Unreachable code when converting icc type to gcc type.')
+ return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module)
+
+ def get_std_shared_lib_link_args(self):
+ # FIXME: Don't know how icc works on OSX
+ # if self.icc_type == ICC_OSX:
+ # return ['-bundle']
+ return ['-shared']
+
+ def get_default_include_dirs(self):
+ return gnulike_default_include_dirs(self.exelist, self.language)
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+
+from .. import coredata
+from ..mesonlib import version_compare
+
+from .c import CCompiler, VisualStudioCCompiler
+from .compilers import (
+ GCC_MINGW,
+ gnu_winlibs,
+ msvc_winlibs,
+ ClangCompiler,
+ GnuCompiler,
+ IntelCompiler,
+)
+
+class CPPCompiler(CCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrap):
+ # If a child ObjCPP class has already set it, don't set it ourselves
+ if not hasattr(self, 'language'):
+ self.language = 'cpp'
+ CCompiler.__init__(self, exelist, version, is_cross, exe_wrap)
+
+ def get_display_language(self):
+ return 'C++'
+
+ def get_no_stdinc_args(self):
+ return ['-nostdinc++']
+
+ def sanity_check(self, work_dir, environment):
+ code = 'class breakCCompiler;int main(int argc, char **argv) { return 0; }\n'
+ return self.sanity_check_impl(work_dir, environment, 'sanitycheckcpp.cc', code)
+
+ def get_compiler_check_args(self):
+ # -fpermissive allows non-conforming code to compile which is necessary
+ # for many C++ checks. Particularly, the has_header_symbol check is
+ # too strict without this and always fails.
+ return super().get_compiler_check_args() + ['-fpermissive']
+
+ def has_header_symbol(self, hname, symbol, prefix, env, extra_args=None, dependencies=None):
+ # Check if it's a C-like symbol
+ if super().has_header_symbol(hname, symbol, prefix, env, extra_args, dependencies):
+ return True
+ # Check if it's a class or a template
+ if extra_args is None:
+ extra_args = []
+ fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol}
+ t = '''{prefix}
+ #include <{header}>
+ using {symbol};
+ int main () {{ return 0; }}'''
+ return self.compiles(t.format(**fargs), env, extra_args, dependencies)
+
+
+class ClangCPPCompiler(ClangCompiler, CPPCompiler):
+ def __init__(self, exelist, version, cltype, is_cross, exe_wrapper=None):
+ CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
+ ClangCompiler.__init__(self, cltype)
+ default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ self.warn_args = {'1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self):
+ return {'cpp_std': coredata.UserComboOption('cpp_std', 'C++ language standard to use',
+ ['none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z',
+ 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++1z'],
+ 'none')}
+
+ def get_option_compile_args(self, options):
+ args = []
+ std = options['cpp_std']
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+ def get_option_link_args(self, options):
+ return []
+
+
+class GnuCPPCompiler(GnuCompiler, CPPCompiler):
+ def __init__(self, exelist, version, gcc_type, is_cross, exe_wrap, defines):
+ CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap)
+ GnuCompiler.__init__(self, gcc_type, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ self.warn_args = {'1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self):
+ opts = {'cpp_std': coredata.UserComboOption('cpp_std', 'C++ language standard to use',
+ ['none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z',
+ 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++1z'],
+ 'none'),
+ 'cpp_debugstl': coredata.UserBooleanOption('cpp_debugstl',
+ 'STL debug mode',
+ False)}
+ if self.gcc_type == GCC_MINGW:
+ opts.update({
+ 'cpp_winlibs': coredata.UserArrayOption('cpp_winlibs', 'Standard Win libraries to link against',
+ gnu_winlibs), })
+ return opts
+
+ def get_option_compile_args(self, options):
+ args = []
+ std = options['cpp_std']
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ if options['cpp_debugstl'].value:
+ args.append('-D_GLIBCXX_DEBUG=1')
+ return args
+
+ def get_option_link_args(self, options):
+ if self.gcc_type == GCC_MINGW:
+ return options['cpp_winlibs'].value[:]
+ return []
+
+ def get_pch_use_args(self, pch_dir, header):
+ return ['-fpch-preprocess', '-include', os.path.split(header)[-1]]
+
+
+class IntelCPPCompiler(IntelCompiler, CPPCompiler):
+ def __init__(self, exelist, version, icc_type, is_cross, exe_wrap):
+ CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap)
+ IntelCompiler.__init__(self, icc_type)
+ self.lang_header = 'c++-header'
+ default_warn_args = ['-Wall', '-w3', '-diag-disable:remark',
+ '-Wpch-messages', '-Wnon-virtual-dtor']
+ self.warn_args = {'1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self):
+ c_stds = []
+ g_stds = ['gnu++98']
+ if version_compare(self.version, '>=15.0.0'):
+ c_stds += ['c++11', 'c++14']
+ g_stds += ['gnu++11']
+ if version_compare(self.version, '>=16.0.0'):
+ c_stds += ['c++17']
+ if version_compare(self.version, '>=17.0.0'):
+ g_stds += ['gnu++14']
+ opts = {'cpp_std': coredata.UserComboOption('cpp_std', 'C++ language standard to use',
+ ['none'] + c_stds + g_stds,
+ 'none'),
+ 'cpp_debugstl': coredata.UserBooleanOption('cpp_debugstl',
+ 'STL debug mode',
+ False)}
+ return opts
+
+ def get_option_compile_args(self, options):
+ args = []
+ std = options['cpp_std']
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ if options['cpp_debugstl'].value:
+ args.append('-D_GLIBCXX_DEBUG=1')
+ return args
+
+ def get_option_link_args(self, options):
+ return []
+
+ def has_multi_arguments(self, args, env):
+ return super().has_multi_arguments(args + ['-diag-error', '10006'], env)
+
+
+class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrap, is_64):
+ self.language = 'cpp'
+ VisualStudioCCompiler.__init__(self, exelist, version, is_cross, exe_wrap, is_64)
+ self.base_options = ['b_pch'] # FIXME add lto, pgo and the like
+
+ def get_options(self):
+ return {'cpp_eh': coredata.UserComboOption('cpp_eh',
+ 'C++ exception handling type.',
+ ['none', 'a', 's', 'sc'],
+ 'sc'),
+ 'cpp_winlibs': coredata.UserArrayOption('cpp_winlibs',
+ 'Windows libs to link against.',
+ msvc_winlibs)
+ }
+
+ def get_option_compile_args(self, options):
+ args = []
+ std = options['cpp_eh']
+ if std.value != 'none':
+ args.append('/EH' + std.value)
+ return args
+
+ def get_option_link_args(self, options):
+ return options['cpp_winlibs'].value[:]
+
+ def get_compiler_check_args(self):
+ # Visual Studio C++ compiler doesn't support -fpermissive,
+ # so just use the plain C args.
+ return super(VisualStudioCCompiler, self).get_compiler_check_args()
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path, subprocess
+
+from ..mesonlib import EnvironmentException
+
+from .compilers import Compiler, mono_buildtype_args
+
+class MonoCompiler(Compiler):
+ def __init__(self, exelist, version):
+ self.language = 'cs'
+ super().__init__(exelist, version)
+ self.id = 'mono'
+ self.monorunner = 'mono'
+
+ def get_display_language(self):
+ return 'C sharp'
+
+ def get_output_args(self, fname):
+ return ['-out:' + fname]
+
+ def get_link_args(self, fname):
+ return ['-r:' + fname]
+
+ def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
+ return []
+
+ def get_werror_args(self):
+ return ['-warnaserror']
+
+ def split_shlib_to_parts(self, fname):
+ return None, fname
+
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ return []
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ return []
+
+ def get_linker_exelist(self):
+ return self.exelist[:]
+
+ def get_compile_only_args(self):
+ return []
+
+ def get_linker_output_args(self, outputname):
+ return []
+
+ def get_coverage_args(self):
+ return []
+
+ def get_coverage_link_args(self):
+ return []
+
+ def get_std_exe_link_args(self):
+ return []
+
+ def get_include_args(self, path):
+ return []
+
+ def get_pic_args(self):
+ return []
+
+ def name_string(self):
+ return ' '.join(self.exelist)
+
+ def get_pch_use_args(self, pch_dir, header):
+ return []
+
+ def get_pch_name(self, header_name):
+ return ''
+
+ def sanity_check(self, work_dir, environment):
+ src = 'sanity.cs'
+ obj = 'sanity.exe'
+ source_name = os.path.join(work_dir, src)
+ with open(source_name, 'w') as ofile:
+ ofile.write('''public class Sanity {
+ static public void Main () {
+ }
+}
+''')
+ pc = subprocess.Popen(self.exelist + [src], cwd=work_dir)
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('Mono compiler %s can not compile programs.' % self.name_string())
+ cmdlist = [self.monorunner, obj]
+ pe = subprocess.Popen(cmdlist, cwd=work_dir)
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException('Executables created by Mono compiler %s are not runnable.' % self.name_string())
+
+ def needs_static_linker(self):
+ return False
+
+ def get_buildtype_args(self, buildtype):
+ return mono_buildtype_args[buildtype]
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path, subprocess
+
+from ..mesonlib import EnvironmentException, version_compare
+
+from .compilers import (
+ GCC_STANDARD,
+ d_dmd_buildtype_args,
+ d_gdc_buildtype_args,
+ d_ldc_buildtype_args,
+ get_gcc_soname_args,
+ gnu_color_args,
+ Compiler,
+ CompilerArgs,
+)
+
+d_feature_args = {'gcc': {'unittest': '-funittest',
+ 'version': '-fversion',
+ 'import_dir': '-J'
+ },
+ 'llvm': {'unittest': '-unittest',
+ 'version': '-d-version',
+ 'import_dir': '-J'
+ },
+ 'dmd': {'unittest': '-unittest',
+ 'version': '-version',
+ 'import_dir': '-J'
+ }
+ }
+
+class DCompiler(Compiler):
+ def __init__(self, exelist, version, is_cross):
+ self.language = 'd'
+ super().__init__(exelist, version)
+ self.id = 'unknown'
+ self.is_cross = is_cross
+
+ def sanity_check(self, work_dir, environment):
+ source_name = os.path.join(work_dir, 'sanity.d')
+ output_name = os.path.join(work_dir, 'dtest')
+ with open(source_name, 'w') as ofile:
+ ofile.write('''void main() {
+}
+''')
+ pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + [source_name], cwd=work_dir)
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('D compiler %s can not compile programs.' % self.name_string())
+ if subprocess.call(output_name) != 0:
+ raise EnvironmentException('Executables created by D compiler %s are not runnable.' % self.name_string())
+
+ def needs_static_linker(self):
+ return True
+
+ def name_string(self):
+ return ' '.join(self.exelist)
+
+ def get_linker_exelist(self):
+ return self.exelist[:]
+
+ def get_preprocess_only_args(self):
+ return ['-E']
+
+ def get_compile_only_args(self):
+ return ['-c']
+
+ def depfile_for_object(self, objfile):
+ return objfile + '.' + self.get_depfile_suffix()
+
+ def get_depfile_suffix(self):
+ return 'dep'
+
+ def get_pic_args(self):
+ return ['-fPIC']
+
+ def get_std_shared_lib_link_args(self):
+ return ['-shared']
+
+ def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
+ # FIXME: Make this work for Windows, MacOS and cross-compiling
+ return get_gcc_soname_args(GCC_STANDARD, prefix, shlib_name, suffix, path, soversion, is_shared_module)
+
+ def get_feature_args(self, kwargs):
+ res = []
+ if 'unittest' in kwargs:
+ unittest = kwargs.pop('unittest')
+ unittest_arg = d_feature_args[self.id]['unittest']
+ if not unittest_arg:
+ raise EnvironmentException('D compiler %s does not support the "unittest" feature.' % self.name_string())
+ if unittest:
+ res.append(unittest_arg)
+
+ if 'versions' in kwargs:
+ versions = kwargs.pop('versions')
+ if not isinstance(versions, list):
+ versions = [versions]
+
+ version_arg = d_feature_args[self.id]['version']
+ if not version_arg:
+ raise EnvironmentException('D compiler %s does not support the "feature versions" feature.' % self.name_string())
+ for v in versions:
+ res.append('{0}={1}'.format(version_arg, v))
+
+ if 'import_dirs' in kwargs:
+ import_dirs = kwargs.pop('import_dirs')
+ if not isinstance(import_dirs, list):
+ import_dirs = [import_dirs]
+
+ import_dir_arg = d_feature_args[self.id]['import_dir']
+ if not import_dir_arg:
+ raise EnvironmentException('D compiler %s does not support the "string import directories" feature.' % self.name_string())
+ for d in import_dirs:
+ res.append('{0}{1}'.format(import_dir_arg, d))
+
+ if kwargs:
+ raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys()))
+
+ return res
+
+ def get_buildtype_linker_args(self, buildtype):
+ return []
+
+ def get_std_exe_link_args(self):
+ return []
+
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ # This method is to be used by LDC and DMD.
+ # GDC can deal with the verbatim flags.
+ if not rpath_paths and not install_rpath:
+ return []
+ paths = ':'.join([os.path.join(build_dir, p) for p in rpath_paths])
+ if build_rpath != '':
+ paths += ':' + build_rpath
+ if len(paths) < len(install_rpath):
+ padding = 'X' * (len(install_rpath) - len(paths))
+ if not paths:
+ paths = padding
+ else:
+ paths = paths + ':' + padding
+ return ['-L-rpath={}'.format(paths)]
+
+ def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'):
+ if extra_args is None:
+ extra_args = []
+ elif isinstance(extra_args, str):
+ extra_args = [extra_args]
+ if dependencies is None:
+ dependencies = []
+ elif not isinstance(dependencies, list):
+ dependencies = [dependencies]
+ # Collect compiler arguments
+ args = CompilerArgs(self)
+ for d in dependencies:
+ # Add compile flags needed by dependencies
+ args += d.get_compile_args()
+ if mode == 'link':
+ # Add link flags needed to find dependencies
+ args += d.get_link_args()
+
+ if mode == 'compile':
+ # Add DFLAGS from the env
+ args += env.coredata.external_args[self.language]
+ elif mode == 'link':
+ # Add LDFLAGS from the env
+ args += env.coredata.external_link_args[self.language]
+ # extra_args must override all other arguments, so we add them last
+ args += extra_args
+ return args
+
+ def compiles(self, code, env, extra_args=None, dependencies=None, mode='compile'):
+ args = self._get_compiler_check_args(env, extra_args, dependencies, mode)
+
+ with self.compile(code, args, mode) as p:
+ return p.returncode == 0
+
+ def has_multi_arguments(self, args, env):
+ return self.compiles('int i;\n', env, extra_args=args)
+
+ @classmethod
+ def translate_args_to_nongnu(cls, args):
+ dcargs = []
+ # Translate common arguments to flags the LDC/DMD compilers
+ # can understand.
+ # The flags might have been added by pkg-config files,
+ # and are therefore out of the user's control.
+ for arg in args:
+ if arg == '-pthread':
+ continue
+ if arg.startswith('-Wl,'):
+ linkargs = arg[arg.index(',') + 1:].split(',')
+ for la in linkargs:
+ dcargs.append('-L' + la.strip())
+ continue
+ elif arg.startswith('-l'):
+ # translate library link flag
+ dcargs.append('-L' + arg)
+ continue
+ elif arg.startswith('-L/') or arg.startswith('-L./'):
+ # we need to handle cases where -L is set by e.g. a pkg-config
+ # setting to select a linker search path. We can however not
+ # unconditionally prefix '-L' with '-L' because the user might
+ # have set this flag too to do what it is intended to for this
+ # compiler (pass flag through to the linker)
+ # Hence, we guess here whether the flag was intended to pass
+ # a linker search path.
+ dcargs.append('-L' + arg)
+ continue
+ dcargs.append(arg)
+
+ return dcargs
+
+
+class GnuDCompiler(DCompiler):
+ def __init__(self, exelist, version, is_cross):
+ DCompiler.__init__(self, exelist, version, is_cross)
+ self.id = 'gcc'
+ default_warn_args = ['-Wall', '-Wdeprecated']
+ self.warn_args = {'1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic']
+
+ def get_colorout_args(self, colortype):
+ if version_compare(self.version, '>=4.9.0'):
+ return gnu_color_args[colortype][:]
+ return []
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ return ['-fmake-deps=' + outfile]
+
+ def get_output_args(self, target):
+ return ['-o', target]
+
+ def get_linker_output_args(self, target):
+ return ['-o', target]
+
+ def get_include_args(self, path, is_system):
+ return ['-I' + path]
+
+ def get_warn_args(self, level):
+ return self.warn_args[level]
+
+ def get_werror_args(self):
+ return ['-Werror']
+
+ def get_linker_search_args(self, dirname):
+ return ['-L' + dirname]
+
+ def get_buildtype_args(self, buildtype):
+ return d_gdc_buildtype_args[buildtype]
+
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ return self.build_unix_rpath_args(build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+
+
+class LLVMDCompiler(DCompiler):
+ def __init__(self, exelist, version, is_cross):
+ DCompiler.__init__(self, exelist, version, is_cross)
+ self.id = 'llvm'
+ self.base_options = ['b_coverage', 'b_colorout']
+
+ def get_colorout_args(self, colortype):
+ if colortype == 'always':
+ return ['-enable-color']
+ return []
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ # LDC using the -deps flag returns a non-Makefile dependency-info file, which
+ # the backends can not use. So we disable this feature for now.
+ return []
+
+ def get_output_args(self, target):
+ return ['-of', target]
+
+ def get_linker_output_args(self, target):
+ return ['-of', target]
+
+ def get_include_args(self, path, is_system):
+ return ['-I' + path]
+
+ def get_warn_args(self, level):
+ if level == '2' or level == '3':
+ return ['-wi', '-dw']
+ else:
+ return ['-wi']
+
+ def get_werror_args(self):
+ return ['-w']
+
+ def get_coverage_args(self):
+ return ['-cov']
+
+ def get_buildtype_args(self, buildtype):
+ return d_ldc_buildtype_args[buildtype]
+
+ def get_pic_args(self):
+ return ['-relocation-model=pic']
+
+ def get_linker_search_args(self, dirname):
+ # -L is recognized as "add this to the search path" by the linker,
+ # while the compiler recognizes it as "pass to linker". So, the first
+ # -L is for the compiler, telling it to pass the second -L to the linker.
+ return ['-L-L' + dirname]
+
+ @classmethod
+ def unix_args_to_native(cls, args):
+ return cls.translate_args_to_nongnu(args)
+
+
+class DmdDCompiler(DCompiler):
+ def __init__(self, exelist, version, is_cross):
+ DCompiler.__init__(self, exelist, version, is_cross)
+ self.id = 'dmd'
+ self.base_options = ['b_coverage', 'b_colorout']
+
+ def get_colorout_args(self, colortype):
+ if colortype == 'always':
+ return ['-color=on']
+ return []
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ # LDC using the -deps flag returns a non-Makefile dependency-info file, which
+ # the backends can not use. So we disable this feature for now.
+ return []
+
+ def get_output_args(self, target):
+ return ['-of' + target]
+
+ def get_werror_args(self):
+ return ['-w']
+
+ def get_linker_output_args(self, target):
+ return ['-of' + target]
+
+ def get_include_args(self, path, is_system):
+ return ['-I' + path]
+
+ def get_warn_args(self, level):
+ return ['-wi']
+
+ def get_coverage_args(self):
+ return ['-cov']
+
+ def get_linker_search_args(self, dirname):
+ # -L is recognized as "add this to the search path" by the linker,
+ # while the compiler recognizes it as "pass to linker". So, the first
+ # -L is for the compiler, telling it to pass the second -L to the linker.
+ return ['-L-L' + dirname]
+
+ def get_buildtype_args(self, buildtype):
+ return d_dmd_buildtype_args[buildtype]
+
+ def get_std_shared_lib_link_args(self):
+ return ['-shared', '-defaultlib=libphobos2.so']
+
+ @classmethod
+ def unix_args_to_native(cls, args):
+ return cls.translate_args_to_nongnu(args)
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path, subprocess
+
+from ..mesonlib import EnvironmentException, is_osx
+
+from .compilers import (
+ GCC_CYGWIN,
+ GCC_MINGW,
+ GCC_OSX,
+ GCC_STANDARD,
+ ICC_STANDARD,
+ apple_buildtype_linker_args,
+ get_gcc_soname_args,
+ gnulike_buildtype_args,
+ gnulike_buildtype_linker_args,
+ Compiler,
+ IntelCompiler,
+)
+
+class FortranCompiler(Compiler):
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None):
+ self.language = 'fortran'
+ super().__init__(exelist, version)
+ self.is_cross = is_cross
+ self.exe_wrapper = exe_wrapper
+ # Not really correct but I don't have Fortran compilers to test with. Sorry.
+ self.gcc_type = GCC_STANDARD
+ self.id = "IMPLEMENTATION CLASSES MUST SET THIS"
+
+ def name_string(self):
+ return ' '.join(self.exelist)
+
+ def get_pic_args(self):
+ if self.gcc_type in (GCC_CYGWIN, GCC_MINGW, GCC_OSX):
+ return [] # On Window and OS X, pic is always on.
+ return ['-fPIC']
+
+ def get_std_shared_lib_link_args(self):
+ return ['-shared']
+
+ def needs_static_linker(self):
+ return True
+
+ def sanity_check(self, work_dir, environment):
+ source_name = os.path.join(work_dir, 'sanitycheckf.f90')
+ binary_name = os.path.join(work_dir, 'sanitycheckf')
+ with open(source_name, 'w') as ofile:
+ ofile.write('''program prog
+ print *, "Fortran compilation is working."
+end program prog
+''')
+ extra_flags = self.get_cross_extra_flags(environment, link=True)
+ pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('Compiler %s can not compile programs.' % self.name_string())
+ if self.is_cross:
+ if self.exe_wrapper is None:
+ # Can't check if the binaries run so we have to assume they do
+ return
+ cmdlist = self.exe_wrapper + [binary_name]
+ else:
+ cmdlist = [binary_name]
+ pe = subprocess.Popen(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException('Executables created by Fortran compiler %s are not runnable.' % self.name_string())
+
+ def get_std_warn_args(self, level):
+ return FortranCompiler.std_warn_args
+
+ def get_buildtype_args(self, buildtype):
+ return gnulike_buildtype_args[buildtype]
+
+ def get_buildtype_linker_args(self, buildtype):
+ if is_osx():
+ return apple_buildtype_linker_args[buildtype]
+ return gnulike_buildtype_linker_args[buildtype]
+
+ def split_shlib_to_parts(self, fname):
+ return os.path.split(fname)[0], fname
+
+ def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
+ return get_gcc_soname_args(self.gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module)
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ # Disabled until this is fixed:
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=62162
+ # return ['-cpp', '-MMD', '-MQ', outtarget]
+ return []
+
+ def get_output_args(self, target):
+ return ['-o', target]
+
+ def get_preprocess_only_args(self):
+ return ['-E']
+
+ def get_compile_only_args(self):
+ return ['-c']
+
+ def get_linker_exelist(self):
+ return self.exelist[:]
+
+ def get_linker_output_args(self, outputname):
+ return ['-o', outputname]
+
+ def get_include_args(self, path, is_system):
+ return ['-I' + path]
+
+ def get_module_incdir_args(self):
+ return ('-I', )
+
+ def get_module_outdir_args(self, path):
+ return ['-J' + path]
+
+ def depfile_for_object(self, objfile):
+ return objfile + '.' + self.get_depfile_suffix()
+
+ def get_depfile_suffix(self):
+ return 'd'
+
+ def get_std_exe_link_args(self):
+ return []
+
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ return self.build_unix_rpath_args(build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+
+ def module_name_to_filename(self, module_name):
+ return module_name.lower() + '.mod'
+
+ def get_warn_args(self, level):
+ return ['-Wall']
+
+ def get_no_warn_args(self):
+ return ['-w']
+
+
+class GnuFortranCompiler(FortranCompiler):
+ def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None):
+ super().__init__(exelist, version, is_cross, exe_wrapper=None)
+ self.gcc_type = gcc_type
+ self.defines = defines or {}
+ self.id = 'gcc'
+
+ def has_builtin_define(self, define):
+ return define in self.defines
+
+ def get_builtin_define(self, define):
+ if define in self.defines:
+ return self.defines[define]
+
+ def get_always_args(self):
+ return ['-pipe']
+
+ def get_coverage_args(self):
+ return ['--coverage']
+
+ def get_coverage_link_args(self):
+ return ['--coverage']
+
+ def gen_import_library_args(self, implibname):
+ """
+ The name of the outputted import library
+
+ Used only on Windows
+ """
+ return ['-Wl,--out-implib=' + implibname]
+
+
+class G95FortranCompiler(FortranCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None):
+ super().__init__(exelist, version, is_cross, exe_wrapper=None)
+ self.id = 'g95'
+
+ def get_module_outdir_args(self, path):
+ return ['-fmod=' + path]
+
+ def get_always_args(self):
+ return ['-pipe']
+
+ def get_no_warn_args(self):
+ # FIXME: Confirm that there's no compiler option to disable all warnings
+ return []
+
+ def gen_import_library_args(self, implibname):
+ """
+ The name of the outputted import library
+
+ Used only on Windows
+ """
+ return ['-Wl,--out-implib=' + implibname]
+
+
+class SunFortranCompiler(FortranCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None):
+ super().__init__(exelist, version, is_cross, exe_wrapper=None)
+ self.id = 'sun'
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ return ['-fpp']
+
+ def get_always_args(self):
+ return []
+
+ def get_warn_args(self, level):
+ return []
+
+ def get_module_incdir_args(self):
+ return ('-M', )
+
+ def get_module_outdir_args(self, path):
+ return ['-moddir=' + path]
+
+
+class IntelFortranCompiler(IntelCompiler, FortranCompiler):
+ std_warn_args = ['-warn', 'all']
+
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None):
+ self.file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp')
+ FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
+ # FIXME: Add support for OS X and Windows in detect_fortran_compiler so
+ # we are sent the type of compiler
+ IntelCompiler.__init__(self, ICC_STANDARD)
+ self.id = 'intel'
+
+ def get_module_outdir_args(self, path):
+ return ['-module', path]
+
+ def get_warn_args(self, level):
+ return IntelFortranCompiler.std_warn_args
+
+
+class PathScaleFortranCompiler(FortranCompiler):
+ std_warn_args = ['-fullwarn']
+
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None):
+ super().__init__(exelist, version, is_cross, exe_wrapper=None)
+ self.id = 'pathscale'
+
+ def get_module_outdir_args(self, path):
+ return ['-module', path]
+
+ def get_std_warn_args(self, level):
+ return PathScaleFortranCompiler.std_warn_args
+
+class PGIFortranCompiler(FortranCompiler):
+ std_warn_args = ['-Minform=inform']
+
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None):
+ super().__init__(exelist, version, is_cross, exe_wrapper=None)
+ self.id = 'pgi'
+
+ def get_module_incdir_args(self):
+ return ('-module', )
+
+ def get_module_outdir_args(self, path):
+ return ['-module', path]
+
+ def get_warn_args(self, level):
+ return PGIFortranCompiler.std_warn_args
+
+ def get_no_warn_args(self):
+ return ['-silent']
+
+
+class Open64FortranCompiler(FortranCompiler):
+ std_warn_args = ['-fullwarn']
+
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None):
+ super().__init__(exelist, version, is_cross, exe_wrapper=None)
+ self.id = 'open64'
+
+ def get_module_outdir_args(self, path):
+ return ['-module', path]
+
+ def get_warn_args(self, level):
+ return Open64FortranCompiler.std_warn_args
+
+
+class NAGFortranCompiler(FortranCompiler):
+ std_warn_args = []
+
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None):
+ super().__init__(exelist, version, is_cross, exe_wrapper=None)
+ self.id = 'nagfor'
+
+ def get_module_outdir_args(self, path):
+ return ['-mdir', path]
+
+ def get_warn_args(self, level):
+ return NAGFortranCompiler.std_warn_args
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path, shutil, subprocess
+
+from ..mesonlib import EnvironmentException
+
+from .compilers import Compiler, java_buildtype_args
+
+class JavaCompiler(Compiler):
+ def __init__(self, exelist, version):
+ self.language = 'java'
+ super().__init__(exelist, version)
+ self.id = 'unknown'
+ self.javarunner = 'java'
+
+ def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
+ return []
+
+ def get_werror_args(self):
+ return ['-Werror']
+
+ def split_shlib_to_parts(self, fname):
+ return None, fname
+
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ return []
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ return []
+
+ def get_linker_exelist(self):
+ return self.exelist[:]
+
+ def get_compile_only_args(self):
+ return []
+
+ def get_output_args(self, subdir):
+ if subdir == '':
+ subdir = './'
+ return ['-d', subdir, '-s', subdir]
+
+ def get_linker_output_args(self, outputname):
+ return []
+
+ def get_coverage_args(self):
+ return []
+
+ def get_coverage_link_args(self):
+ return []
+
+ def get_std_exe_link_args(self):
+ return []
+
+ def get_include_args(self, path):
+ return []
+
+ def get_pic_args(self):
+ return []
+
+ def name_string(self):
+ return ' '.join(self.exelist)
+
+ def get_pch_use_args(self, pch_dir, header):
+ return []
+
+ def get_pch_name(self, header_name):
+ return ''
+
+ def get_buildtype_args(self, buildtype):
+ return java_buildtype_args[buildtype]
+
+ def sanity_check(self, work_dir, environment):
+ src = 'SanityCheck.java'
+ obj = 'SanityCheck'
+ source_name = os.path.join(work_dir, src)
+ with open(source_name, 'w') as ofile:
+ ofile.write('''class SanityCheck {
+ public static void main(String[] args) {
+ int i;
+ }
+}
+''')
+ pc = subprocess.Popen(self.exelist + [src], cwd=work_dir)
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('Java compiler %s can not compile programs.' % self.name_string())
+ runner = shutil.which(self.javarunner)
+ if runner:
+ cmdlist = [runner, obj]
+ pe = subprocess.Popen(cmdlist, cwd=work_dir)
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException('Executables created by Java compiler %s are not runnable.' % self.name_string())
+ else:
+ m = "Java Virtual Machine wasn't found, but it's needed by Meson. " \
+ "Please install a JRE.\nIf you have specific needs where this " \
+ "requirement doesn't make sense, please open a bug at " \
+ "https://github.com/mesonbuild/meson/issues/new and tell us " \
+ "all about it."
+ raise EnvironmentException(m)
+
+ def needs_static_linker(self):
+ return False
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path, subprocess
+
+from ..mesonlib import EnvironmentException
+
+from .c import CCompiler
+from .compilers import ClangCompiler, GnuCompiler
+
+class ObjCCompiler(CCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrap):
+ self.language = 'objc'
+ CCompiler.__init__(self, exelist, version, is_cross, exe_wrap)
+
+ def get_display_language(self):
+ return 'Objective-C'
+
+ def sanity_check(self, work_dir, environment):
+ # TODO try to use sanity_check_impl instead of duplicated code
+ source_name = os.path.join(work_dir, 'sanitycheckobjc.m')
+ binary_name = os.path.join(work_dir, 'sanitycheckobjc')
+ extra_flags = self.get_cross_extra_flags(environment, link=False)
+ if self.is_cross:
+ extra_flags += self.get_compile_only_args()
+ with open(source_name, 'w') as ofile:
+ ofile.write('#import<stdio.h>\n'
+ 'int main(int argc, char **argv) { return 0; }\n')
+ pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('ObjC compiler %s can not compile programs.' % self.name_string())
+ if self.is_cross:
+ # Can't check if the binaries run so we have to assume they do
+ return
+ pe = subprocess.Popen(binary_name)
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException('Executables created by ObjC compiler %s are not runnable.' % self.name_string())
+
+
+class GnuObjCCompiler(GnuCompiler, ObjCCompiler):
+ def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None):
+ ObjCCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
+ GnuCompiler.__init__(self, gcc_type, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch']
+ self.warn_args = {'1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+
+class ClangObjCCompiler(ClangCompiler, GnuObjCCompiler):
+ def __init__(self, exelist, version, cltype, is_cross, exe_wrapper=None):
+ GnuObjCCompiler.__init__(self, exelist, version, cltype, is_cross, exe_wrapper)
+ ClangCompiler.__init__(self, cltype)
+ self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage']
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path, subprocess
+
+from ..mesonlib import EnvironmentException
+
+from .cpp import CPPCompiler
+from .compilers import ClangCompiler, GnuCompiler
+
+class ObjCPPCompiler(CPPCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrap):
+ self.language = 'objcpp'
+ CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap)
+
+ def get_display_language(self):
+ return 'Objective-C++'
+
+ def sanity_check(self, work_dir, environment):
+ # TODO try to use sanity_check_impl instead of duplicated code
+ source_name = os.path.join(work_dir, 'sanitycheckobjcpp.mm')
+ binary_name = os.path.join(work_dir, 'sanitycheckobjcpp')
+ extra_flags = self.get_cross_extra_flags(environment, link=False)
+ if self.is_cross:
+ extra_flags += self.get_compile_only_args()
+ with open(source_name, 'w') as ofile:
+ ofile.write('#import<stdio.h>\n'
+ 'class MyClass;'
+ 'int main(int argc, char **argv) { return 0; }\n')
+ pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('ObjC++ compiler %s can not compile programs.' % self.name_string())
+ if self.is_cross:
+ # Can't check if the binaries run so we have to assume they do
+ return
+ pe = subprocess.Popen(binary_name)
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException('Executables created by ObjC++ compiler %s are not runnable.' % self.name_string())
+
+
+class GnuObjCPPCompiler(GnuCompiler, ObjCPPCompiler):
+ def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None):
+ ObjCPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
+ GnuCompiler.__init__(self, gcc_type, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ self.warn_args = {'1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+
+class ClangObjCPPCompiler(ClangCompiler, GnuObjCPPCompiler):
+ def __init__(self, exelist, version, cltype, is_cross, exe_wrapper=None):
+ GnuObjCPPCompiler.__init__(self, exelist, version, cltype, is_cross, exe_wrapper)
+ ClangCompiler.__init__(self, cltype)
+ self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage']
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess, os.path
+
+from ..mesonlib import EnvironmentException, Popen_safe
+
+from .compilers import Compiler, rust_buildtype_args
+
+class RustCompiler(Compiler):
+ def __init__(self, exelist, version):
+ self.language = 'rust'
+ super().__init__(exelist, version)
+ self.id = 'rustc'
+
+ def needs_static_linker(self):
+ return False
+
+ def name_string(self):
+ return ' '.join(self.exelist)
+
+ def sanity_check(self, work_dir, environment):
+ source_name = os.path.join(work_dir, 'sanity.rs')
+ output_name = os.path.join(work_dir, 'rusttest')
+ with open(source_name, 'w') as ofile:
+ ofile.write('''fn main() {
+}
+''')
+ pc = subprocess.Popen(self.exelist + ['-o', output_name, source_name], cwd=work_dir)
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('Rust compiler %s can not compile programs.' % self.name_string())
+ if subprocess.call(output_name) != 0:
+ raise EnvironmentException('Executables created by Rust compiler %s are not runnable.' % self.name_string())
+
+ def get_dependency_gen_args(self, outfile):
+ return ['--dep-info', outfile]
+
+ def get_buildtype_args(self, buildtype):
+ return rust_buildtype_args[buildtype]
+
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ return self.build_unix_rpath_args(build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+
+ def get_sysroot(self):
+ cmd = self.exelist + ['--print', 'sysroot']
+ p, stdo, stde = Popen_safe(cmd)
+ return stdo.split('\n')[0]
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess, os.path
+
+from ..mesonlib import EnvironmentException
+
+from .compilers import Compiler, swift_buildtype_args
+
+class SwiftCompiler(Compiler):
+ def __init__(self, exelist, version):
+ self.language = 'swift'
+ super().__init__(exelist, version)
+ self.version = version
+ self.id = 'llvm'
+ self.is_cross = False
+
+ def get_linker_exelist(self):
+ return self.exelist[:]
+
+ def name_string(self):
+ return ' '.join(self.exelist)
+
+ def needs_static_linker(self):
+ return True
+
+ def get_werror_args(self):
+ return ['--fatal-warnings']
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ return ['-emit-dependencies']
+
+ def depfile_for_object(self, objfile):
+ return os.path.splitext(objfile)[0] + '.' + self.get_depfile_suffix()
+
+ def get_depfile_suffix(self):
+ return 'd'
+
+ def get_output_args(self, target):
+ return ['-o', target]
+
+ def get_linker_output_args(self, target):
+ return ['-o', target]
+
+ def get_header_import_args(self, headername):
+ return ['-import-objc-header', headername]
+
+ def get_warn_args(self, level):
+ return []
+
+ def get_buildtype_args(self, buildtype):
+ return swift_buildtype_args[buildtype]
+
+ def get_buildtype_linker_args(self, buildtype):
+ return []
+
+ def get_std_exe_link_args(self):
+ return ['-emit-executable']
+
+ def get_module_args(self, modname):
+ return ['-module-name', modname]
+
+ def get_mod_gen_args(self):
+ return ['-emit-module']
+
+ def build_rpath_args(self, *args):
+ return [] # FIXME
+
+ def get_include_args(self, dirname):
+ return ['-I' + dirname]
+
+ def get_compile_only_args(self):
+ return ['-c']
+
+ def sanity_check(self, work_dir, environment):
+ src = 'swifttest.swift'
+ source_name = os.path.join(work_dir, src)
+ output_name = os.path.join(work_dir, 'swifttest')
+ with open(source_name, 'w') as ofile:
+ ofile.write('''print("Swift compilation is working.")
+''')
+ extra_flags = self.get_cross_extra_flags(environment, link=True)
+ pc = subprocess.Popen(self.exelist + extra_flags + ['-emit-executable', '-o', output_name, src], cwd=work_dir)
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('Swift compiler %s can not compile programs.' % self.name_string())
+ if subprocess.call(output_name) != 0:
+ raise EnvironmentException('Executables created by Swift compiler %s are not runnable.' % self.name_string())
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+
+from .. import mlog
+from ..mesonlib import EnvironmentException, version_compare
+
+from .compilers import Compiler
+
+class ValaCompiler(Compiler):
+ def __init__(self, exelist, version):
+ self.language = 'vala'
+ super().__init__(exelist, version)
+ self.version = version
+ self.id = 'valac'
+ self.is_cross = False
+ self.base_options = ['b_colorout']
+
+ def name_string(self):
+ return ' '.join(self.exelist)
+
+ def needs_static_linker(self):
+ return False # Because compiles into C.
+
+ def get_output_args(self, target):
+ return ['-o', target]
+
+ def get_compile_only_args(self):
+ return ['-C']
+
+ def get_pic_args(self):
+ return []
+
+ def get_always_args(self):
+ return ['-C']
+
+ def get_warn_args(self, warning_level):
+ return []
+
+ def get_no_warn_args(self):
+ return ['--disable-warnings']
+
+ def get_werror_args(self):
+ return ['--fatal-warnings']
+
+ def get_colorout_args(self, colortype):
+ if version_compare(self.version, '>=0.37.1'):
+ return ['--color=' + colortype]
+ return []
+
+ def sanity_check(self, work_dir, environment):
+ code = 'class MesonSanityCheck : Object { }'
+ args = self.get_cross_extra_flags(environment, link=False)
+ with self.compile(code, args, 'compile') as p:
+ if p.returncode != 0:
+ msg = 'Vala compiler {!r} can not compile programs' \
+ ''.format(self.name_string())
+ raise EnvironmentException(msg)
+
+ def get_buildtype_args(self, buildtype):
+ if buildtype == 'debug' or buildtype == 'debugoptimized' or buildtype == 'minsize':
+ return ['--debug']
+ return []
+
+ def find_library(self, libname, env, extra_dirs):
+ if extra_dirs and isinstance(extra_dirs, str):
+ extra_dirs = [extra_dirs]
+ # Valac always looks in the default vapi dir, so only search there if
+ # no extra dirs are specified.
+ if not extra_dirs:
+ code = 'class MesonFindLibrary : Object { }'
+ vapi_args = ['--pkg', libname]
+ args = self.get_cross_extra_flags(env, link=False)
+ args += vapi_args
+ with self.compile(code, args, 'compile') as p:
+ if p.returncode == 0:
+ return vapi_args
+ # Not found? Try to find the vapi file itself.
+ for d in extra_dirs:
+ vapi = os.path.join(d, libname + '.vapi')
+ if os.path.isfile(vapi):
+ return [vapi]
+ mlog.debug('Searched {!r} and {!r} wasn\'t found'.format(extra_dirs, libname))
+ return None
+
+ def thread_flags(self):
+ return []
+
+ def thread_link_flags(self):
+ return []
--- /dev/null
+
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pickle, os, uuid
+import sys
+from pathlib import PurePath
+from collections import OrderedDict
+from .mesonlib import MesonException, commonpath
+from .mesonlib import default_libdir, default_libexecdir, default_prefix
+import ast
+
+version = '0.44.0'
+backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'xcode']
+
+class UserOption:
+ def __init__(self, name, description, choices):
+ super().__init__()
+ self.name = name
+ self.choices = choices
+ self.description = description
+
+ # Check that the input is a valid value and return the
+ # "cleaned" or "native" version. For example the Boolean
+ # option could take the string "true" and return True.
+ def validate_value(self, value):
+ raise RuntimeError('Derived option class did not override validate_value.')
+
+class UserStringOption(UserOption):
+ def __init__(self, name, description, value, choices=None):
+ super().__init__(name, description, choices)
+ self.set_value(value)
+
+ def validate(self, value):
+ if not isinstance(value, str):
+ raise MesonException('Value "%s" for string option "%s" is not a string.' % (str(value), self.name))
+
+ def set_value(self, newvalue):
+ self.validate(newvalue)
+ self.value = newvalue
+
+ def validate_value(self, value):
+ self.validate(value)
+ return value
+
+class UserBooleanOption(UserOption):
+ def __init__(self, name, description, value):
+ super().__init__(name, description, [True, False])
+ self.set_value(value)
+
+ def tobool(self, thing):
+ if isinstance(thing, bool):
+ return thing
+ if thing.lower() == 'true':
+ return True
+ if thing.lower() == 'false':
+ return False
+ raise MesonException('Value %s is not boolean (true or false).' % thing)
+
+ def set_value(self, newvalue):
+ self.value = self.tobool(newvalue)
+
+ def __bool__(self):
+ return self.value
+
+ def validate_value(self, value):
+ return self.tobool(value)
+
+class UserIntegerOption(UserOption):
+ def __init__(self, name, description, min_value, max_value, value):
+ super().__init__(name, description, [True, False])
+ self.min_value = min_value
+ self.max_value = max_value
+ self.set_value(value)
+
+ def set_value(self, newvalue):
+ if isinstance(newvalue, str):
+ newvalue = self.toint(newvalue)
+ if not isinstance(newvalue, int):
+ raise MesonException('New value for integer option is not an integer.')
+ if self.min_value is not None and newvalue < self.min_value:
+ raise MesonException('New value %d is less than minimum value %d.' % (newvalue, self.min_value))
+ if self.max_value is not None and newvalue > self.max_value:
+ raise MesonException('New value %d is more than maximum value %d.' % (newvalue, self.max_value))
+ self.value = newvalue
+
+ def toint(self, valuestring):
+ try:
+ return int(valuestring)
+ except:
+ raise MesonException('Value string "%s" is not convertable to an integer.' % valuestring)
+
+ def validate_value(self, value):
+ return self.toint(value)
+
+class UserComboOption(UserOption):
+ def __init__(self, name, description, choices, value):
+ super().__init__(name, description, choices)
+ if not isinstance(self.choices, list):
+ raise MesonException('Combo choices must be an array.')
+ for i in self.choices:
+ if not isinstance(i, str):
+ raise MesonException('Combo choice elements must be strings.')
+ self.set_value(value)
+
+ def set_value(self, newvalue):
+ if newvalue not in self.choices:
+ optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices])
+ raise MesonException('Value "%s" for combo option "%s" is not one of the choices. Possible choices are: %s.' % (newvalue, self.name, optionsstring))
+ self.value = newvalue
+
+ def validate_value(self, value):
+ if value not in self.choices:
+ raise MesonException('Value %s not one of accepted values.' % value)
+ return value
+
+class UserArrayOption(UserOption):
+ def __init__(self, name, description, value, **kwargs):
+ super().__init__(name, description, kwargs.get('choices', []))
+ self.set_value(value, user_input=False)
+
+ def validate(self, value, user_input):
+ # User input is for options defined on the command line (via -D
+ # options). Users can put their input in as a comma separated
+ # string, but for defining options in meson_options.txt the format
+ # should match that of a combo
+ if not user_input:
+ if isinstance(value, str):
+ if not value.startswith('['):
+ raise MesonException('Valuestring does not define an array: ' + value)
+ newvalue = ast.literal_eval(value)
+ else:
+ newvalue = value
+ else:
+ assert isinstance(value, str)
+ if value.startswith('['):
+ newvalue = ast.literal_eval(value)
+ else:
+ newvalue = [v.strip() for v in value.split(',')]
+ if not isinstance(newvalue, list):
+ raise MesonException('"{0}" should be a string array, but it is not'.format(str(newvalue)))
+ for i in newvalue:
+ if not isinstance(i, str):
+ raise MesonException('String array element "{0}" is not a string.'.format(str(newvalue)))
+ if self.choices:
+ bad = [x for x in newvalue if x not in self.choices]
+ if bad:
+ raise MesonException('Options "{}" are not in allowed choices: "{}"'.format(
+ ', '.join(bad), ', '.join(self.choices)))
+ return newvalue
+
+ def set_value(self, newvalue, user_input=True):
+ self.value = self.validate(newvalue, user_input)
+
+ def validate_value(self, value):
+ self.validate(value)
+ return value
+
+# This class contains all data that must persist over multiple
+# invocations of Meson. It is roughly the same thing as
+# cmakecache.
+
+class CoreData:
+
+ def __init__(self, options):
+ self.guid = str(uuid.uuid4()).upper()
+ self.test_guid = str(uuid.uuid4()).upper()
+ self.regen_guid = str(uuid.uuid4()).upper()
+ self.target_guids = {}
+ self.version = version
+ self.init_builtins(options)
+ self.init_backend_options(self.builtins['backend'].value)
+ self.user_options = {}
+ self.compiler_options = {}
+ self.base_options = {}
+ # These external_*args, are set via env vars CFLAGS, LDFLAGS, etc
+ # but only when not cross-compiling.
+ self.external_preprocess_args = {} # CPPFLAGS only
+ self.external_args = {} # CPPFLAGS + CFLAGS
+ self.external_link_args = {} # CFLAGS + LDFLAGS (with MSVC: only LDFLAGS)
+ self.cross_file = self.__load_cross_file(options.cross_file)
+ self.wrap_mode = options.wrap_mode
+ self.compilers = OrderedDict()
+ self.cross_compilers = OrderedDict()
+ self.deps = OrderedDict()
+ self.modules = {}
+ # Only to print a warning if it changes between Meson invocations.
+ self.pkgconf_envvar = os.environ.get('PKG_CONFIG_PATH', '')
+
+ @staticmethod
+ def __load_cross_file(filename):
+ """Try to load the cross file.
+
+ If the filename is None return None. If the filename is an absolute
+ (after resolving variables and ~), return that absolute path. Next,
+ check if the file is relative to the current source dir. If the path
+ still isn't resolved do the following:
+ Linux + BSD:
+ - $XDG_DATA_HOME/meson/cross (or ~/.local/share/meson/cross if
+ undefined)
+ - $XDG_DATA_DIRS/meson/cross (or
+ /usr/local/share/meson/cross:/usr/share/meson/cross if undefined)
+ - Error
+ *:
+ - Error
+ BSD follows the Linux path and will honor XDG_* if set. This simplifies
+ the implementation somewhat, especially since most BSD users wont set
+ those environment variables.
+ """
+ if filename is None:
+ return None
+ filename = os.path.expanduser(os.path.expandvars(filename))
+ if os.path.isabs(filename):
+ return filename
+ path_to_try = os.path.abspath(filename)
+ if os.path.exists(path_to_try):
+ return path_to_try
+ if sys.platform == 'linux' or 'bsd' in sys.platform.lower():
+ paths = [
+ os.environ.get('XDG_DATA_HOME', os.path.expanduser('~/.local/share')),
+ ] + os.environ.get('XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(':')
+ for path in paths:
+ path_to_try = os.path.join(path, 'meson', 'cross', filename)
+ if os.path.exists(path_to_try):
+ return path_to_try
+ raise MesonException('Cannot find specified cross file: ' + filename)
+
+ raise MesonException('Cannot find specified cross file: ' + filename)
+
+ def sanitize_prefix(self, prefix):
+ if not os.path.isabs(prefix):
+ raise MesonException('prefix value {!r} must be an absolute path'
+ ''.format(prefix))
+ if prefix.endswith('/') or prefix.endswith('\\'):
+ # On Windows we need to preserve the trailing slash if the
+ # string is of type 'C:\' because 'C:' is not an absolute path.
+ if len(prefix) == 3 and prefix[1] == ':':
+ pass
+ # If prefix is a single character, preserve it since it is
+ # the root directory.
+ elif len(prefix) == 1:
+ pass
+ else:
+ prefix = prefix[:-1]
+ return prefix
+
+ def sanitize_dir_option_value(self, prefix, option, value):
+ '''
+ If the option is an installation directory option and the value is an
+ absolute path, check that it resides within prefix and return the value
+ as a path relative to the prefix.
+
+ This way everyone can do f.ex, get_option('libdir') and be sure to get
+ the library directory relative to prefix.
+ '''
+ if option.endswith('dir') and os.path.isabs(value) and \
+ option not in builtin_dir_noprefix_options:
+ # Value must be a subdir of the prefix
+ # commonpath will always return a path in the native format, so we
+ # must use pathlib.PurePath to do the same conversion before
+ # comparing.
+ if commonpath([value, prefix]) != str(PurePath(prefix)):
+ m = 'The value of the {!r} option is {!r} which must be a ' \
+ 'subdir of the prefix {!r}.\nNote that if you pass a ' \
+ 'relative path, it is assumed to be a subdir of prefix.'
+ raise MesonException(m.format(option, value, prefix))
+ # Convert path to be relative to prefix
+ skip = len(prefix) + 1
+ value = value[skip:]
+ return value
+
+ def init_builtins(self, options):
+ self.builtins = {}
+ # Sanitize prefix
+ options.prefix = self.sanitize_prefix(options.prefix)
+ # Initialize other builtin options
+ for key in get_builtin_options():
+ if hasattr(options, key):
+ value = getattr(options, key)
+ value = self.sanitize_dir_option_value(options.prefix, key, value)
+ setattr(options, key, value)
+ else:
+ value = get_builtin_option_default(key, prefix=options.prefix)
+ args = [key] + builtin_options[key][1:-1] + [value]
+ self.builtins[key] = builtin_options[key][0](*args)
+
+ def init_backend_options(self, backend_name):
+ self.backend_options = {}
+ if backend_name == 'ninja':
+ self.backend_options['backend_max_links'] = UserIntegerOption('backend_max_links',
+ 'Maximum number of linker processes to run or 0 for no limit',
+ 0, None, 0)
+
+ def get_builtin_option(self, optname):
+ if optname in self.builtins:
+ return self.builtins[optname].value
+ raise RuntimeError('Tried to get unknown builtin option %s.' % optname)
+
+ def set_builtin_option(self, optname, value):
+ if optname == 'prefix':
+ value = self.sanitize_prefix(value)
+ elif optname in self.builtins:
+ prefix = self.builtins['prefix'].value
+ value = self.sanitize_dir_option_value(prefix, optname, value)
+ else:
+ raise RuntimeError('Tried to set unknown builtin option %s.' % optname)
+ self.builtins[optname].set_value(value)
+
+ def validate_option_value(self, option_name, override_value):
+ for opts in (self.builtins, self.base_options, self.compiler_options, self.user_options):
+ if option_name in opts:
+ opt = opts[option_name]
+ return opt.validate_value(override_value)
+ raise MesonException('Tried to validate unknown option %s.' % option_name)
+
+def load(filename):
+ load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename)
+ try:
+ with open(filename, 'rb') as f:
+ obj = pickle.load(f)
+ except pickle.UnpicklingError:
+ raise MesonException(load_fail_msg)
+ if not isinstance(obj, CoreData):
+ raise MesonException(load_fail_msg)
+ if obj.version != version:
+ raise MesonException('Build directory has been generated with Meson version %s, which is incompatible with current version %s.\nPlease delete this build directory AND create a new one.' %
+ (obj.version, version))
+ return obj
+
+def save(obj, filename):
+ if obj.version != version:
+ raise MesonException('Fatal version mismatch corruption.')
+ with open(filename, 'wb') as f:
+ pickle.dump(obj, f)
+
+def get_builtin_options():
+ return list(builtin_options.keys())
+
+def is_builtin_option(optname):
+ return optname in get_builtin_options()
+
+def get_builtin_option_choices(optname):
+ if is_builtin_option(optname):
+ if builtin_options[optname][0] == UserStringOption:
+ return None
+ elif builtin_options[optname][0] == UserBooleanOption:
+ return [True, False]
+ else:
+ return builtin_options[optname][2]
+ else:
+ raise RuntimeError('Tried to get the supported values for an unknown builtin option \'%s\'.' % optname)
+
+def get_builtin_option_description(optname):
+ if is_builtin_option(optname):
+ return builtin_options[optname][1]
+ else:
+ raise RuntimeError('Tried to get the description for an unknown builtin option \'%s\'.' % optname)
+
+def get_builtin_option_default(optname, prefix='', noneIfSuppress=False):
+ if is_builtin_option(optname):
+ o = builtin_options[optname]
+ if o[0] == UserComboOption:
+ return o[3]
+ if optname in builtin_dir_noprefix_options:
+ if noneIfSuppress:
+ # Return None if argparse defaulting should be suppressed for
+ # this option (so we can determine the default later based on
+ # prefix)
+ return None
+ elif prefix in builtin_dir_noprefix_options[optname]:
+ return builtin_dir_noprefix_options[optname][prefix]
+ return o[2]
+ else:
+ raise RuntimeError('Tried to get the default value for an unknown builtin option \'%s\'.' % optname)
+
+builtin_options = {
+ 'buildtype': [UserComboOption, 'Build type to use.', ['plain', 'debug', 'debugoptimized', 'release', 'minsize'], 'debug'],
+ 'strip': [UserBooleanOption, 'Strip targets on install.', False],
+ 'unity': [UserComboOption, 'Unity build.', ['on', 'off', 'subprojects'], 'off'],
+ 'prefix': [UserStringOption, 'Installation prefix.', default_prefix()],
+ 'libdir': [UserStringOption, 'Library directory.', default_libdir()],
+ 'libexecdir': [UserStringOption, 'Library executable directory.', default_libexecdir()],
+ 'bindir': [UserStringOption, 'Executable directory.', 'bin'],
+ 'sbindir': [UserStringOption, 'System executable directory.', 'sbin'],
+ 'includedir': [UserStringOption, 'Header file directory.', 'include'],
+ 'datadir': [UserStringOption, 'Data file directory.', 'share'],
+ 'mandir': [UserStringOption, 'Manual page directory.', 'share/man'],
+ 'infodir': [UserStringOption, 'Info page directory.', 'share/info'],
+ 'localedir': [UserStringOption, 'Locale data directory.', 'share/locale'],
+ 'sysconfdir': [UserStringOption, 'Sysconf data directory.', 'etc'],
+ 'localstatedir': [UserStringOption, 'Localstate data directory.', 'var'],
+ 'sharedstatedir': [UserStringOption, 'Architecture-independent data directory.', 'com'],
+ 'werror': [UserBooleanOption, 'Treat warnings as errors.', False],
+ 'warning_level': [UserComboOption, 'Compiler warning level to use.', ['1', '2', '3'], '1'],
+ 'layout': [UserComboOption, 'Build directory layout.', ['mirror', 'flat'], 'mirror'],
+ 'default_library': [UserComboOption, 'Default library type.', ['shared', 'static'], 'shared'],
+ 'backend': [UserComboOption, 'Backend to use.', backendlist, 'ninja'],
+ 'stdsplit': [UserBooleanOption, 'Split stdout and stderr in test logs.', True],
+ 'errorlogs': [UserBooleanOption, "Whether to print the logs from failing tests.", True],
+}
+
+# Special prefix-dependent defaults for installation directories that reside in
+# a path outside of the prefix in FHS and common usage.
+builtin_dir_noprefix_options = {
+ 'sysconfdir': {'/usr': '/etc'},
+ 'localstatedir': {'/usr': '/var', '/usr/local': '/var/local'},
+ 'sharedstatedir': {'/usr': '/var/lib', '/usr/local': '/var/local/lib'},
+}
+
+forbidden_target_names = {'clean': None,
+ 'clean-ctlist': None,
+ 'clean-gcno': None,
+ 'clean-gcda': None,
+ 'coverage': None,
+ 'coverage-text': None,
+ 'coverage-xml': None,
+ 'coverage-html': None,
+ 'phony': None,
+ 'PHONY': None,
+ 'all': None,
+ 'test': None,
+ 'benchmark': None,
+ 'install': None,
+ 'uninstall': None,
+ 'build.ninja': None,
+ 'scan-build': None,
+ 'reconfigure': None,
+ 'dist': None,
+ 'distcheck': None,
+ }
--- /dev/null
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import ( # noqa: F401
+ Dependency, DependencyException, DependencyMethods, ExternalProgram,
+ ExternalDependency, ExternalLibrary, ExtraFrameworkDependency, InternalDependency,
+ PkgConfigDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language)
+from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency
+from .misc import (BoostDependency, MPIDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency)
+from .platform import AppleFrameworks
+from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency, VulkanDependency
+
+
+packages.update({
+ # From dev:
+ 'gtest': GTestDependency,
+ 'gmock': GMockDependency,
+ 'llvm': LLVMDependency,
+ 'valgrind': ValgrindDependency,
+
+ # From misc:
+ 'boost': BoostDependency,
+ 'mpi': MPIDependency,
+ 'python3': Python3Dependency,
+ 'threads': ThreadDependency,
+ 'pcap': PcapDependency,
+ 'cups': CupsDependency,
+ 'libwmf': LibWmfDependency,
+
+ # From platform:
+ 'appleframeworks': AppleFrameworks,
+
+ # From ui:
+ 'gl': GLDependency,
+ 'gnustep': GnuStepDependency,
+ 'qt4': Qt4Dependency,
+ 'qt5': Qt5Dependency,
+ 'sdl2': SDL2Dependency,
+ 'wxwidgets': WxDependency,
+ 'vulkan': VulkanDependency,
+})
+_packages_accept_language.update({
+ 'mpi',
+})
--- /dev/null
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies.
+# Custom logic for several other packages are in separate files.
+
+import os
+import sys
+import stat
+import shlex
+import shutil
+import textwrap
+from enum import Enum
+from pathlib import PurePath
+
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import (
+ MesonException, Popen_safe, version_compare_many, version_compare, listify
+)
+
+
+# These must be defined in this file to avoid cyclical references.
+packages = {}
+_packages_accept_language = set()
+
+
+class DependencyException(MesonException):
+ '''Exceptions raised while trying to find dependencies'''
+
+
+class DependencyMethods(Enum):
+ # Auto means to use whatever dependency checking mechanisms in whatever order meson thinks is best.
+ AUTO = 'auto'
+ PKGCONFIG = 'pkg-config'
+ QMAKE = 'qmake'
+ # Just specify the standard link arguments, assuming the operating system provides the library.
+ SYSTEM = 'system'
+ # This is only supported on OSX - search the frameworks directory by name.
+ EXTRAFRAMEWORK = 'extraframework'
+ # Detect using the sysconfig module.
+ SYSCONFIG = 'sysconfig'
+ # Specify using a "program"-config style tool
+ CONFIG_TOOL = 'config-tool'
+ # For backewards compatibility
+ SDLCONFIG = 'sdlconfig'
+ CUPSCONFIG = 'cups-config'
+ PCAPCONFIG = 'pcap-config'
+ LIBWMFCONFIG = 'libwmf-config'
+
+
+class Dependency:
+ def __init__(self, type_name, kwargs):
+ self.name = "null"
+ self.version = 'none'
+ self.language = None # None means C-like
+ self.is_found = False
+ self.type_name = type_name
+ self.compile_args = []
+ self.link_args = []
+ self.sources = []
+ method = kwargs.get('method', 'auto')
+ if method not in [e.value for e in DependencyMethods]:
+ raise DependencyException('method {!r} is invalid'.format(method))
+ method = DependencyMethods(method)
+
+ # This sets per-too config methods which are deprecated to to the new
+ # generic CONFIG_TOOL value.
+ if method in [DependencyMethods.SDLCONFIG, DependencyMethods.CUPSCONFIG,
+ DependencyMethods.PCAPCONFIG, DependencyMethods.LIBWMFCONFIG]:
+ mlog.warning(textwrap.dedent("""\
+ Configuration method {} has been deprecated in favor of
+ 'config-tool'. This will be removed in a future version of
+ meson.""".format(method)))
+ method = DependencyMethods.CONFIG_TOOL
+
+ # Set the detection method. If the method is set to auto, use any available method.
+ # If method is set to a specific string, allow only that detection method.
+ if method == DependencyMethods.AUTO:
+ self.methods = self.get_methods()
+ elif method in self.get_methods():
+ self.methods = [method]
+ else:
+ raise DependencyException(
+ 'Unsupported detection method: {}, allowed methods are {}'.format(
+ method.value,
+ mlog.format_list([x.value for x in [DependencyMethods.AUTO] + self.get_methods()])))
+
+ def __repr__(self):
+ s = '<{0} {1}: {2}>'
+ return s.format(self.__class__.__name__, self.name, self.is_found)
+
+ def get_compile_args(self):
+ return self.compile_args
+
+ def get_link_args(self):
+ return self.link_args
+
+ def found(self):
+ return self.is_found
+
+ def get_sources(self):
+ """Source files that need to be added to the target.
+ As an example, gtest-all.cc when using GTest."""
+ return self.sources
+
+ def get_methods(self):
+ return [DependencyMethods.AUTO]
+
+ def get_name(self):
+ return self.name
+
+ def get_version(self):
+ return self.version
+
+ def get_exe_args(self, compiler):
+ return []
+
+ def need_threads(self):
+ return False
+
+ def get_pkgconfig_variable(self, variable_name, kwargs):
+ raise DependencyException('{!r} is not a pkgconfig dependency'.format(self.name))
+
+ def get_configtool_variable(self, variable_name):
+ raise DependencyException('{!r} is not a config-tool dependency'.format(self.name))
+
+
+class InternalDependency(Dependency):
+ def __init__(self, version, incdirs, compile_args, link_args, libraries, sources, ext_deps):
+ super().__init__('internal', {})
+ self.version = version
+ self.is_found = True
+ self.include_directories = incdirs
+ self.compile_args = compile_args
+ self.link_args = link_args
+ self.libraries = libraries
+ self.sources = sources
+ self.ext_deps = ext_deps
+
+ def get_pkgconfig_variable(self, variable_name, kwargs):
+ raise DependencyException('Method "get_pkgconfig_variable()" is '
+ 'invalid for an internal dependency')
+
+ def get_configtool_variable(self, variable_name):
+ raise DependencyException('Method "get_configtool_variable()" is '
+ 'invalid for an internal dependency')
+
+
+class ExternalDependency(Dependency):
+ def __init__(self, type_name, environment, language, kwargs):
+ super().__init__(type_name, kwargs)
+ self.env = environment
+ self.name = type_name # default
+ self.is_found = False
+ self.language = language
+ self.version_reqs = kwargs.get('version', None)
+ self.required = kwargs.get('required', True)
+ self.silent = kwargs.get('silent', False)
+ self.static = kwargs.get('static', False)
+ if not isinstance(self.static, bool):
+ raise DependencyException('Static keyword must be boolean')
+ # Is this dependency for cross-compilation?
+ if 'native' in kwargs and self.env.is_cross_build():
+ self.want_cross = not kwargs['native']
+ else:
+ self.want_cross = self.env.is_cross_build()
+ # Set the compiler that will be used by this dependency
+ # This is only used for configuration checks
+ if self.want_cross:
+ compilers = self.env.coredata.cross_compilers
+ else:
+ compilers = self.env.coredata.compilers
+ # Set the compiler for this dependency if a language is specified,
+ # else try to pick something that looks usable.
+ if self.language:
+ if self.language not in compilers:
+ m = self.name.capitalize() + ' requires a {} compiler'
+ raise DependencyException(m.format(self.language.capitalize()))
+ self.compiler = compilers[self.language]
+ else:
+ # Try to find a compiler that this dependency can use for compiler
+ # checks. It's ok if we don't find one.
+ for lang in ('c', 'cpp', 'objc', 'objcpp', 'fortran', 'd'):
+ self.compiler = compilers.get(lang, None)
+ if self.compiler:
+ break
+
+ def get_compiler(self):
+ return self.compiler
+
+
+class ConfigToolDependency(ExternalDependency):
+
+ """Class representing dependencies found using a config tool."""
+
+ tools = None
+ tool_name = None
+
+ def __init__(self, name, environment, language, kwargs):
+ super().__init__('config-tool', environment, language, kwargs)
+ self.name = name
+ self.tools = listify(kwargs.get('tools', self.tools))
+
+ req_version = kwargs.get('version', None)
+ tool, version = self.find_config(req_version)
+ self.config = tool
+ self.is_found = self.report_config(version, req_version)
+ if not self.is_found:
+ self.config = None
+ return
+ self.version = version
+
+ @classmethod
+ def factory(cls, name, environment, language, kwargs, tools, tool_name):
+ """Constructor for use in dependencies that can be found multiple ways.
+
+ In addition to the standard constructor values, this constructor sets
+ the tool_name and tools values of the instance.
+ """
+ # This deserves some explanation, because metaprogramming is hard.
+ # This uses type() to create a dynamic subclass of ConfigToolDependency
+ # with the tools and tool_name class attributes set, this class is then
+ # instantiated and returned. The reduce function (method) is also
+ # attached, since python's pickle module won't be able to do anything
+ # with this dynamically generated class otherwise.
+ def reduce(_):
+ return (cls.factory,
+ (name, environment, language, kwargs, tools, tool_name))
+ sub = type('{}Dependency'.format(name.capitalize()), (cls, ),
+ {'tools': tools, 'tool_name': tool_name, '__reduce__': reduce})
+
+ return sub(name, environment, language, kwargs)
+
+ def find_config(self, versions=None):
+ """Helper method that searchs for config tool binaries in PATH and
+ returns the one that best matches the given version requirements.
+ """
+ if not isinstance(versions, list) and versions is not None:
+ versions = listify(versions)
+
+ best_match = (None, None)
+ for tool in self.tools:
+ try:
+ p, out = Popen_safe([tool, '--version'])[:2]
+ except (FileNotFoundError, PermissionError):
+ continue
+ if p.returncode != 0:
+ continue
+
+ out = out.strip()
+ # Some tools, like pcap-config don't supply a version, but also
+ # dont fail with --version, in that case just assume that there is
+ # only one verison and return it.
+ if not out:
+ return (tool, 'none')
+ if versions:
+ is_found = version_compare_many(out, versions)[0]
+ # This allows returning a found version without a config tool,
+ # which is useful to inform the user that you found version x,
+ # but y was required.
+ if not is_found:
+ tool = None
+ if best_match[1]:
+ if version_compare(out, '> {}'.format(best_match[1])):
+ best_match = (tool, out)
+ else:
+ best_match = (tool, out)
+
+ return best_match
+
+ def report_config(self, version, req_version):
+ """Helper method to print messages about the tool."""
+ if self.config is None:
+ if version is not None:
+ mlog.log('found {} {!r} but need:'.format(self.tool_name, version),
+ req_version)
+ else:
+ mlog.log("No {} found; can't detect dependency".format(self.tool_name))
+ mlog.log('Dependency {} found:'.format(self.name), mlog.red('NO'))
+ if self.required:
+ raise DependencyException('Dependency {} not found'.format(self.name))
+ return False
+ mlog.log('Found {}:'.format(self.tool_name), mlog.bold(shutil.which(self.config)),
+ '({})'.format(version))
+ mlog.log('Dependency {} found:'.format(self.name), mlog.green('YES'))
+ return True
+
+ def get_config_value(self, args, stage):
+ p, out, err = Popen_safe([self.config] + args)
+ if p.returncode != 0:
+ if self.required:
+ raise DependencyException(
+ 'Could not generate {} for {}.\n{}'.format(
+ stage, self.name, err))
+ return []
+ return shlex.split(out)
+
+ def get_methods(self):
+ return [DependencyMethods.AUTO, DependencyMethods.CONFIG_TOOL]
+
+ def get_configtool_variable(self, variable_name):
+ p, out, _ = Popen_safe([self.config, '--{}'.format(variable_name)])
+ if p.returncode != 0:
+ if self.required:
+ raise DependencyException(
+ 'Could not get variable "{}" for dependency {}'.format(
+ variable_name, self.name))
+ variable = out.strip()
+ mlog.debug('Got config-tool variable {} : {}'.format(variable_name, variable))
+ return variable
+
+
+class PkgConfigDependency(ExternalDependency):
+ # The class's copy of the pkg-config path. Avoids having to search for it
+ # multiple times in the same Meson invocation.
+ class_pkgbin = None
+
+ def __init__(self, name, environment, kwargs, language=None):
+ super().__init__('pkgconfig', environment, language, kwargs)
+ self.name = name
+ self.is_libtool = False
+ # Store a copy of the pkg-config path on the object itself so it is
+ # stored in the pickled coredata and recovered.
+ self.pkgbin = None
+
+ # When finding dependencies for cross-compiling, we don't care about
+ # the 'native' pkg-config
+ if self.want_cross:
+ if 'pkgconfig' not in environment.cross_info.config['binaries']:
+ if self.required:
+ raise DependencyException('Pkg-config binary missing from cross file')
+ else:
+ pkgname = environment.cross_info.config['binaries']['pkgconfig']
+ potential_pkgbin = ExternalProgram(pkgname, silent=True)
+ if potential_pkgbin.found():
+ # FIXME, we should store all pkg-configs in ExternalPrograms.
+ # However that is too destabilizing a change to do just before release.
+ self.pkgbin = potential_pkgbin.get_command()[0]
+ PkgConfigDependency.class_pkgbin = self.pkgbin
+ else:
+ mlog.debug('Cross pkg-config %s not found.' % potential_pkgbin.name)
+ # Only search for the native pkg-config the first time and
+ # store the result in the class definition
+ elif PkgConfigDependency.class_pkgbin is None:
+ self.pkgbin = self.check_pkgconfig()
+ PkgConfigDependency.class_pkgbin = self.pkgbin
+ else:
+ self.pkgbin = PkgConfigDependency.class_pkgbin
+
+ if not self.pkgbin:
+ if self.required:
+ raise DependencyException('Pkg-config not found.')
+ return
+ if self.want_cross:
+ self.type_string = 'Cross'
+ else:
+ self.type_string = 'Native'
+
+ mlog.debug('Determining dependency {!r} with pkg-config executable '
+ '{!r}'.format(name, self.pkgbin))
+ ret, self.version = self._call_pkgbin(['--modversion', name])
+ if ret != 0:
+ if self.required:
+ raise DependencyException('{} dependency {!r} not found'
+ ''.format(self.type_string, name))
+ return
+ found_msg = [self.type_string + ' dependency', mlog.bold(name), 'found:']
+ if self.version_reqs is None:
+ self.is_found = True
+ else:
+ if not isinstance(self.version_reqs, (str, list)):
+ raise DependencyException('Version argument must be string or list.')
+ if isinstance(self.version_reqs, str):
+ self.version_reqs = [self.version_reqs]
+ (self.is_found, not_found, found) = \
+ version_compare_many(self.version, self.version_reqs)
+ if not self.is_found:
+ found_msg += [mlog.red('NO'),
+ 'found {!r} but need:'.format(self.version),
+ ', '.join(["'{}'".format(e) for e in not_found])]
+ if found:
+ found_msg += ['; matched:',
+ ', '.join(["'{}'".format(e) for e in found])]
+ if not self.silent:
+ mlog.log(*found_msg)
+ if self.required:
+ m = 'Invalid version of dependency, need {!r} {!r} found {!r}.'
+ raise DependencyException(m.format(name, not_found, self.version))
+ return
+ found_msg += [mlog.green('YES'), self.version]
+ # Fetch cargs to be used while using this dependency
+ self._set_cargs()
+ # Fetch the libraries and library paths needed for using this
+ self._set_libs()
+ # Print the found message only at the very end because fetching cflags
+ # and libs can also fail if other needed pkg-config files aren't found.
+ if not self.silent:
+ mlog.log(*found_msg)
+
+ def __repr__(self):
+ s = '<{0} {1}: {2} {3}>'
+ return s.format(self.__class__.__name__, self.name, self.is_found,
+ self.version_reqs)
+
+ def _call_pkgbin(self, args, env=None):
+ if not env:
+ env = os.environ
+ p, out = Popen_safe([self.pkgbin] + args, env=env)[0:2]
+ return p.returncode, out.strip()
+
+ def _convert_mingw_paths(self, args):
+ '''
+ Both MSVC and native Python on Windows cannot handle MinGW-esque /c/foo
+ paths so convert them to C:/foo. We cannot resolve other paths starting
+ with / like /home/foo so leave them as-is so that the user gets an
+ error/warning from the compiler/linker.
+ '''
+ if not mesonlib.is_windows():
+ return args
+ converted = []
+ for arg in args:
+ pargs = []
+ # Library search path
+ if arg.startswith('-L/'):
+ pargs = PurePath(arg[2:]).parts
+ tmpl = '-L{}:/{}'
+ elif arg.startswith('-I/'):
+ pargs = PurePath(arg[2:]).parts
+ tmpl = '-I{}:/{}'
+ # Full path to library or .la file
+ elif arg.startswith('/'):
+ pargs = PurePath(arg).parts
+ tmpl = '{}:/{}'
+ if len(pargs) > 1 and len(pargs[1]) == 1:
+ arg = tmpl.format(pargs[1], '/'.join(pargs[2:]))
+ converted.append(arg)
+ return converted
+
+ def _set_cargs(self):
+ ret, out = self._call_pkgbin(['--cflags', self.name])
+ if ret != 0:
+ raise DependencyException('Could not generate cargs for %s:\n\n%s' %
+ (self.name, out))
+ self.compile_args = self._convert_mingw_paths(shlex.split(out))
+
+ def _set_libs(self):
+ env = None
+ libcmd = [self.name, '--libs']
+ if self.static:
+ libcmd.append('--static')
+ # Force pkg-config to output -L fields even if they are system
+ # paths so we can do manual searching with cc.find_library() later.
+ env = os.environ.copy()
+ env['PKG_CONFIG_ALLOW_SYSTEM_LIBS'] = '1'
+ ret, out = self._call_pkgbin(libcmd, env=env)
+ if ret != 0:
+ raise DependencyException('Could not generate libs for %s:\n\n%s' %
+ (self.name, out))
+ self.link_args = []
+ libpaths = []
+ for lib in self._convert_mingw_paths(shlex.split(out)):
+ # If we want to use only static libraries, we have to look for the
+ # file ourselves instead of depending on the compiler to find it
+ # with -lfoo or foo.lib. However, we can only do this if we already
+ # have some library paths gathered.
+ if self.static:
+ if lib.startswith('-L'):
+ libpaths.append(lib[2:])
+ continue
+ elif lib.startswith('-l') and libpaths:
+ args = self.compiler.find_library(lib[2:], self.env, libpaths, libtype='static')
+ if not args or len(args) < 1:
+ raise DependencyException('Static library not found for {!r}'
+ ''.format(lib[2:]))
+ lib = args[0]
+ elif lib.endswith(".la"):
+ shared_libname = self.extract_libtool_shlib(lib)
+ shared_lib = os.path.join(os.path.dirname(lib), shared_libname)
+ if not os.path.exists(shared_lib):
+ shared_lib = os.path.join(os.path.dirname(lib), ".libs", shared_libname)
+
+ if not os.path.exists(shared_lib):
+ raise DependencyException('Got a libtools specific "%s" dependencies'
+ 'but we could not compute the actual shared'
+ 'library path' % lib)
+ lib = shared_lib
+ self.is_libtool = True
+ self.link_args.append(lib)
+
+ def get_pkgconfig_variable(self, variable_name, kwargs):
+ options = ['--variable=' + variable_name, self.name]
+
+ if 'define_variable' in kwargs:
+ definition = kwargs.get('define_variable', [])
+ if not isinstance(definition, list):
+ raise MesonException('define_variable takes a list')
+
+ if len(definition) != 2 or not all(isinstance(i, str) for i in definition):
+ raise MesonException('define_variable must be made up of 2 strings for VARIABLENAME and VARIABLEVALUE')
+
+ options = ['--define-variable=' + '='.join(definition)] + options
+
+ ret, out = self._call_pkgbin(options)
+ variable = ''
+ if ret != 0:
+ if self.required:
+ raise DependencyException('%s dependency %s not found.' %
+ (self.type_string, self.name))
+ else:
+ variable = out.strip()
+ mlog.debug('Got pkgconfig variable %s : %s' % (variable_name, variable))
+ return variable
+
+ def get_methods(self):
+ return [DependencyMethods.PKGCONFIG]
+
+ def check_pkgconfig(self):
+ evar = 'PKG_CONFIG'
+ if evar in os.environ:
+ pkgbin = os.environ[evar].strip()
+ else:
+ pkgbin = 'pkg-config'
+ try:
+ p, out = Popen_safe([pkgbin, '--version'])[0:2]
+ if p.returncode != 0:
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ pkgbin = False
+ except (FileNotFoundError, PermissionError):
+ pkgbin = False
+ if pkgbin and not os.path.isabs(pkgbin) and shutil.which(pkgbin):
+ # Sometimes shutil.which fails where Popen succeeds, so
+ # only find the abs path if it can be found by shutil.which
+ pkgbin = shutil.which(pkgbin)
+ if not self.silent:
+ if pkgbin:
+ mlog.log('Found pkg-config:', mlog.bold(pkgbin),
+ '(%s)' % out.strip())
+ else:
+ mlog.log('Found Pkg-config:', mlog.red('NO'))
+ return pkgbin
+
+ def extract_field(self, la_file, fieldname):
+ with open(la_file) as f:
+ for line in f:
+ arr = line.strip().split('=')
+ if arr[0] == fieldname:
+ return arr[1][1:-1]
+ return None
+
+ def extract_dlname_field(self, la_file):
+ return self.extract_field(la_file, 'dlname')
+
+ def extract_libdir_field(self, la_file):
+ return self.extract_field(la_file, 'libdir')
+
+ def extract_libtool_shlib(self, la_file):
+ '''
+ Returns the path to the shared library
+ corresponding to this .la file
+ '''
+ dlname = self.extract_dlname_field(la_file)
+ if dlname is None:
+ return None
+
+ # Darwin uses absolute paths where possible; since the libtool files never
+ # contain absolute paths, use the libdir field
+ if mesonlib.is_osx():
+ dlbasename = os.path.basename(dlname)
+ libdir = self.extract_libdir_field(la_file)
+ if libdir is None:
+ return dlbasename
+ return os.path.join(libdir, dlbasename)
+ # From the comments in extract_libtool(), older libtools had
+ # a path rather than the raw dlname
+ return os.path.basename(dlname)
+
+
+class ExternalProgram:
+ windows_exts = ('exe', 'msc', 'com', 'bat', 'cmd')
+
+ def __init__(self, name, command=None, silent=False, search_dir=None):
+ self.name = name
+ if command is not None:
+ self.command = listify(command)
+ else:
+ self.command = self._search(name, search_dir)
+ if not silent:
+ if self.found():
+ mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'),
+ '(%s)' % ' '.join(self.command))
+ else:
+ mlog.log('Program', mlog.bold(name), 'found:', mlog.red('NO'))
+
+ def __repr__(self):
+ r = '<{} {!r} -> {!r}>'
+ return r.format(self.__class__.__name__, self.name, self.command)
+
+ @staticmethod
+ def _shebang_to_cmd(script):
+ """
+ Check if the file has a shebang and manually parse it to figure out
+ the interpreter to use. This is useful if the script is not executable
+ or if we're on Windows (which does not understand shebangs).
+ """
+ try:
+ with open(script) as f:
+ first_line = f.readline().strip()
+ if first_line.startswith('#!'):
+ # In a shebang, everything before the first space is assumed to
+ # be the command to run and everything after the first space is
+ # the single argument to pass to that command. So we must split
+ # exactly once.
+ commands = first_line[2:].split('#')[0].strip().split(maxsplit=1)
+ if mesonlib.is_windows():
+ # Windows does not have UNIX paths so remove them,
+ # but don't remove Windows paths
+ if commands[0].startswith('/'):
+ commands[0] = commands[0].split('/')[-1]
+ if len(commands) > 0 and commands[0] == 'env':
+ commands = commands[1:]
+ # Windows does not ship python3.exe, but we know the path to it
+ if len(commands) > 0 and commands[0] == 'python3':
+ commands = mesonlib.python_command + commands[1:]
+ elif mesonlib.is_haiku():
+ # Haiku does not have /usr, but a lot of scripts assume that
+ # /usr/bin/env always exists. Detect that case and run the
+ # script with the interpreter after it.
+ if commands[0] == '/usr/bin/env':
+ commands = commands[1:]
+ # We know what python3 is, we're running on it
+ if len(commands) > 0 and commands[0] == 'python3':
+ commands = mesonlib.python_command + commands[1:]
+ return commands + [script]
+ except Exception as e:
+ mlog.debug(e)
+ pass
+ mlog.debug('Unusable script {!r}'.format(script))
+ return False
+
+ def _is_executable(self, path):
+ suffix = os.path.splitext(path)[-1].lower()[1:]
+ if mesonlib.is_windows():
+ if suffix in self.windows_exts:
+ return True
+ elif os.access(path, os.X_OK):
+ return not os.path.isdir(path)
+ return False
+
+ def _search_dir(self, name, search_dir):
+ if search_dir is None:
+ return False
+ trial = os.path.join(search_dir, name)
+ if os.path.exists(trial):
+ if self._is_executable(trial):
+ return [trial]
+ # Now getting desperate. Maybe it is a script file that is
+ # a) not chmodded executable, or
+ # b) we are on windows so they can't be directly executed.
+ return self._shebang_to_cmd(trial)
+ else:
+ if mesonlib.is_windows():
+ for ext in self.windows_exts:
+ trial_ext = '{}.{}'.format(trial, ext)
+ if os.path.exists(trial_ext):
+ return [trial_ext]
+ return False
+
+ def _search_windows_special_cases(self, name, command):
+ '''
+ Lots of weird Windows quirks:
+ 1. PATH search for @name returns files with extensions from PATHEXT,
+ but only self.windows_exts are executable without an interpreter.
+ 2. @name might be an absolute path to an executable, but without the
+ extension. This works inside MinGW so people use it a lot.
+ 3. The script is specified without an extension, in which case we have
+ to manually search in PATH.
+ 4. More special-casing for the shebang inside the script.
+ '''
+ if command:
+ # On Windows, even if the PATH search returned a full path, we can't be
+ # sure that it can be run directly if it's not a native executable.
+ # For instance, interpreted scripts sometimes need to be run explicitly
+ # with an interpreter if the file association is not done properly.
+ name_ext = os.path.splitext(command)[1]
+ if name_ext[1:].lower() in self.windows_exts:
+ # Good, it can be directly executed
+ return [command]
+ # Try to extract the interpreter from the shebang
+ commands = self._shebang_to_cmd(command)
+ if commands:
+ return commands
+ return [None]
+ # Maybe the name is an absolute path to a native Windows
+ # executable, but without the extension. This is technically wrong,
+ # but many people do it because it works in the MinGW shell.
+ if os.path.isabs(name):
+ for ext in self.windows_exts:
+ command = '{}.{}'.format(name, ext)
+ if os.path.exists(command):
+ return [command]
+ # On Windows, interpreted scripts must have an extension otherwise they
+ # cannot be found by a standard PATH search. So we do a custom search
+ # where we manually search for a script with a shebang in PATH.
+ search_dirs = os.environ.get('PATH', '').split(';')
+ for search_dir in search_dirs:
+ commands = self._search_dir(name, search_dir)
+ if commands:
+ return commands
+ return [None]
+
+ def _search(self, name, search_dir):
+ '''
+ Search in the specified dir for the specified executable by name
+ and if not found search in PATH
+ '''
+ commands = self._search_dir(name, search_dir)
+ if commands:
+ return commands
+ # Do a standard search in PATH
+ command = shutil.which(name)
+ if mesonlib.is_windows():
+ return self._search_windows_special_cases(name, command)
+ # On UNIX-like platforms, shutil.which() is enough to find
+ # all executables whether in PATH or with an absolute path
+ return [command]
+
+ def found(self):
+ return self.command[0] is not None
+
+ def get_command(self):
+ return self.command[:]
+
+ def get_path(self):
+ if self.found():
+ # Assume that the last element is the full path to the script or
+ # binary being run
+ return self.command[-1]
+ return None
+
+ def get_name(self):
+ return self.name
+
+
+class ExternalLibrary(ExternalDependency):
+ def __init__(self, name, link_args, environment, language, silent=False):
+ super().__init__('external', environment, language, {})
+ self.name = name
+ self.language = language
+ self.is_found = False
+ if link_args:
+ self.is_found = True
+ self.link_args = link_args
+ if not silent:
+ if self.is_found:
+ mlog.log('Library', mlog.bold(name), 'found:', mlog.green('YES'))
+ else:
+ mlog.log('Library', mlog.bold(name), 'found:', mlog.red('NO'))
+
+ def get_link_args(self, language=None):
+ '''
+ External libraries detected using a compiler must only be used with
+ compatible code. For instance, Vala libraries (.vapi files) cannot be
+ used with C code, and not all Rust library types can be linked with
+ C-like code. Note that C++ libraries *can* be linked with C code with
+ a C++ linker (and vice-versa).
+ '''
+ # Using a vala library in a non-vala target, or a non-vala library in a vala target
+ # XXX: This should be extended to other non-C linkers such as Rust
+ if (self.language == 'vala' and language != 'vala') or \
+ (language == 'vala' and self.language != 'vala'):
+ return []
+ return self.link_args
+
+
+class ExtraFrameworkDependency(ExternalDependency):
+ def __init__(self, name, required, path, env, lang, kwargs):
+ super().__init__('extraframeworks', env, lang, kwargs)
+ self.name = None
+ self.required = required
+ self.detect(name, path)
+ if self.found():
+ mlog.log('Dependency', mlog.bold(name), 'found:', mlog.green('YES'),
+ os.path.join(self.path, self.name))
+ else:
+ mlog.log('Dependency', name, 'found:', mlog.red('NO'))
+
+ def detect(self, name, path):
+ lname = name.lower()
+ if path is None:
+ paths = ['/System/Library/Frameworks', '/Library/Frameworks']
+ else:
+ paths = [path]
+ for p in paths:
+ for d in os.listdir(p):
+ fullpath = os.path.join(p, d)
+ if lname != d.split('.')[0].lower():
+ continue
+ if not stat.S_ISDIR(os.stat(fullpath).st_mode):
+ continue
+ self.path = p
+ self.name = d
+ self.is_found = True
+ return
+ if not self.found() and self.required:
+ raise DependencyException('Framework dependency %s not found.' % (name, ))
+
+ def get_compile_args(self):
+ if self.found():
+ return ['-I' + os.path.join(self.path, self.name, 'Headers')]
+ return []
+
+ def get_link_args(self):
+ if self.found():
+ return ['-F' + self.path, '-framework', self.name.split('.')[0]]
+ return []
+
+ def get_version(self):
+ return 'unknown'
+
+
+def get_dep_identifier(name, kwargs, want_cross):
+ # Need immutable objects since the identifier will be used as a dict key
+ version_reqs = listify(kwargs.get('version', []))
+ if isinstance(version_reqs, list):
+ version_reqs = frozenset(version_reqs)
+ identifier = (name, version_reqs, want_cross)
+ for key, value in kwargs.items():
+ # 'version' is embedded above as the second element for easy access
+ # 'native' is handled above with `want_cross`
+ # 'required' is irrelevant for caching; the caller handles it separately
+ # 'fallback' subprojects cannot be cached -- they must be initialized
+ if key in ('version', 'native', 'required', 'fallback',):
+ continue
+ # All keyword arguments are strings, ints, or lists (or lists of lists)
+ if isinstance(value, list):
+ value = frozenset(listify(value))
+ identifier += (key, value)
+ return identifier
+
+
+def find_external_dependency(name, env, kwargs):
+ required = kwargs.get('required', True)
+ if not isinstance(required, bool):
+ raise DependencyException('Keyword "required" must be a boolean.')
+ if not isinstance(kwargs.get('method', ''), str):
+ raise DependencyException('Keyword "method" must be a string.')
+ lname = name.lower()
+ if lname in packages:
+ if lname not in _packages_accept_language and 'language' in kwargs:
+ raise DependencyException('%s dependency does not accept "language" keyword argument' % (lname, ))
+ dep = packages[lname](env, kwargs)
+ if required and not dep.found():
+ raise DependencyException('Dependency "%s" not found' % name)
+ return dep
+ if 'language' in kwargs:
+ # Remove check when PkgConfigDependency supports language.
+ raise DependencyException('%s dependency does not accept "language" keyword argument' % (lname, ))
+ pkg_exc = None
+ pkgdep = None
+ try:
+ pkgdep = PkgConfigDependency(name, env, kwargs)
+ if pkgdep.found():
+ return pkgdep
+ except Exception as e:
+ pkg_exc = e
+ if mesonlib.is_osx():
+ fwdep = ExtraFrameworkDependency(name, False, None, env, None, kwargs)
+ if required and not fwdep.found():
+ m = 'Dependency {!r} not found, tried Extra Frameworks ' \
+ 'and Pkg-Config:\n\n' + str(pkg_exc)
+ raise DependencyException(m.format(name))
+ return fwdep
+ if pkg_exc is not None:
+ raise pkg_exc
+ mlog.log('Dependency', mlog.bold(name), 'found:', mlog.red('NO'))
+ return pkgdep
+
+
+def strip_system_libdirs(environment, link_args):
+ """Remove -L<system path> arguments.
+
+ leaving these in will break builds where a user has a version of a library
+ in the system path, and a different version not in the system path if they
+ want to link against the non-system path version.
+ """
+ exclude = {'-L{}'.format(p) for p in environment.get_compiler_system_dirs()}
+ return [l for l in link_args if l not in exclude]
--- /dev/null
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies useful for
+# development purposes, such as testing, debugging, etc..
+
+import os
+import re
+import shutil
+
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import version_compare, Popen_safe, stringlistify, extract_as_list
+from .base import (
+ DependencyException, ExternalDependency, PkgConfigDependency,
+ strip_system_libdirs, ConfigToolDependency,
+)
+
+
+class GTestDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('gtest', environment, 'cpp', kwargs)
+ self.main = kwargs.get('main', False)
+ self.src_dirs = ['/usr/src/gtest/src', '/usr/src/googletest/googletest/src']
+ self.detect()
+
+ def detect(self):
+ self.version = '1.something_maybe'
+ gtest_detect = self.compiler.find_library("gtest", self.env, [])
+ gtest_main_detect = self.compiler.find_library("gtest_main", self.env, [])
+ if gtest_detect and (not self.main or gtest_main_detect):
+ self.is_found = True
+ self.compile_args = []
+ self.link_args = gtest_detect
+ if self.main:
+ self.link_args += gtest_main_detect
+ self.sources = []
+ mlog.log('Dependency GTest found:', mlog.green('YES'), '(prebuilt)')
+ elif self.detect_srcdir():
+ self.is_found = True
+ self.compile_args = ['-I' + self.src_include_dir]
+ self.link_args = []
+ if self.main:
+ self.sources = [self.all_src, self.main_src]
+ else:
+ self.sources = [self.all_src]
+ mlog.log('Dependency GTest found:', mlog.green('YES'), '(building self)')
+ else:
+ mlog.log('Dependency GTest found:', mlog.red('NO'))
+ self.is_found = False
+
+ def detect_srcdir(self):
+ for s in self.src_dirs:
+ if os.path.exists(s):
+ self.src_dir = s
+ self.all_src = mesonlib.File.from_absolute_file(
+ os.path.join(self.src_dir, 'gtest-all.cc'))
+ self.main_src = mesonlib.File.from_absolute_file(
+ os.path.join(self.src_dir, 'gtest_main.cc'))
+ self.src_include_dir = os.path.normpath(os.path.join(self.src_dir, '..'))
+ return True
+ return False
+
+ def need_threads(self):
+ return True
+
+
+class GMockDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('gmock', environment, 'cpp', kwargs)
+ self.version = '1.something_maybe'
+ # GMock may be a library or just source.
+ # Work with both.
+ gmock_detect = self.compiler.find_library("gmock", self.env, [])
+ if gmock_detect:
+ self.is_found = True
+ self.compile_args = []
+ self.link_args = gmock_detect
+ self.sources = []
+ mlog.log('Dependency GMock found:', mlog.green('YES'), '(prebuilt)')
+ return
+
+ for d in ['/usr/src/googletest/googlemock/src', '/usr/src/gmock/src', '/usr/src/gmock']:
+ if os.path.exists(d):
+ self.is_found = True
+ # Yes, we need both because there are multiple
+ # versions of gmock that do different things.
+ d2 = os.path.normpath(os.path.join(d, '..'))
+ self.compile_args = ['-I' + d, '-I' + d2]
+ self.link_args = []
+ all_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock-all.cc'))
+ main_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock_main.cc'))
+ if kwargs.get('main', False):
+ self.sources = [all_src, main_src]
+ else:
+ self.sources = [all_src]
+ mlog.log('Dependency GMock found:', mlog.green('YES'), '(building self)')
+ return
+ mlog.log('Dependency GMock found:', mlog.red('NO'))
+ self.is_found = False
+
+
+class LLVMDependency(ConfigToolDependency):
+ """
+ LLVM uses a special tool, llvm-config, which has arguments for getting
+ c args, cxx args, and ldargs as well as version.
+ """
+
+ # Ordered list of llvm-config binaries to try. Start with base, then try
+ # newest back to oldest (3.5 is arbitrary), and finally the devel version.
+ # Please note that llvm-config-6.0 is a development snapshot and it should
+ # not be moved to the beginning of the list. The only difference between
+ # llvm-config-6.0 and llvm-config-devel is that the former is used by
+ # Debian and the latter is used by FreeBSD.
+ tools = [
+ 'llvm-config', # base
+ 'llvm-config-5.0', 'llvm-config50', # latest stable release
+ 'llvm-config-4.0', 'llvm-config40', # old stable releases
+ 'llvm-config-3.9', 'llvm-config39',
+ 'llvm-config-3.8', 'llvm-config38',
+ 'llvm-config-3.7', 'llvm-config37',
+ 'llvm-config-3.6', 'llvm-config36',
+ 'llvm-config-3.5', 'llvm-config35',
+ 'llvm-config-6.0', 'llvm-config-devel', # development snapshot
+ ]
+ tool_name = 'llvm-config'
+ __cpp_blacklist = {'-DNDEBUG'}
+
+ def __init__(self, environment, kwargs):
+ # It's necessary for LLVM <= 3.8 to use the C++ linker. For 3.9 and 4.0
+ # the C linker works fine if only using the C API.
+ super().__init__('config-tool', environment, 'cpp', kwargs)
+ self.provided_modules = []
+ self.required_modules = set()
+ if not self.is_found:
+ return
+ self.static = kwargs.get('static', False)
+
+ # Currently meson doesn't really attempt to handle pre-release versions,
+ # so strip the 'svn' off the end, since it will probably cuase problems
+ # for users who want the patch version.
+ self.version = self.version.rstrip('svn')
+
+ self.provided_modules = self.get_config_value(['--components'], 'modules')
+ modules = stringlistify(extract_as_list(kwargs, 'modules'))
+ self.check_components(modules)
+ opt_modules = stringlistify(extract_as_list(kwargs, 'optional_modules'))
+ self.check_components(opt_modules, required=False)
+
+ cargs = set(self.get_config_value(['--cppflags'], 'compile_args'))
+ self.compile_args = list(cargs.difference(self.__cpp_blacklist))
+
+ if version_compare(self.version, '>= 3.9'):
+ self._set_new_link_args()
+ else:
+ self._set_old_link_args()
+ self.link_args = strip_system_libdirs(environment, self.link_args)
+
+ def _set_new_link_args(self):
+ """How to set linker args for LLVM versions >= 3.9"""
+ link_args = ['--link-static', '--system-libs'] if self.static else ['--link-shared']
+ self.link_args = self.get_config_value(
+ ['--libs', '--ldflags'] + link_args + list(self.required_modules),
+ 'link_args')
+
+ def _set_old_link_args(self):
+ """Setting linker args for older versions of llvm.
+
+ Old versions of LLVM bring an extra level of insanity with them.
+ llvm-config will provide the correct arguments for static linking, but
+ not for shared-linnking, we have to figure those out ourselves, because
+ of course we do.
+ """
+ if self.static:
+ self.link_args = self.get_config_value(
+ ['--libs', '--ldflags', '--system-libs'] + list(self.required_modules),
+ 'link_args')
+ else:
+ # llvm-config will provide arguments for static linking, so we get
+ # to figure out for ourselves what to link with. We'll do that by
+ # checking in the directory provided by --libdir for a library
+ # called libLLVM-<ver>.(so|dylib|dll)
+ libdir = self.get_config_value(['--libdir'], 'link_args')[0]
+
+ expected_name = 'libLLVM-{}'.format(self.version)
+ re_name = re.compile(r'{}.(so|dll|dylib)'.format(expected_name))
+
+ for file_ in os.listdir(libdir):
+ if re_name.match(file_):
+ self.link_args = ['-L{}'.format(libdir),
+ '-l{}'.format(os.path.splitext(file_.lstrip('lib'))[0])]
+ break
+ else:
+ raise DependencyException(
+ 'Could not find a dynamically linkable library for LLVM.')
+
+ def check_components(self, modules, required=True):
+ """Check for llvm components (modules in meson terms).
+
+ The required option is whether the module is required, not whether LLVM
+ is required.
+ """
+ for mod in sorted(set(modules)):
+ if mod not in self.provided_modules:
+ mlog.log('LLVM module', mod, 'found:', mlog.red('NO'),
+ '(optional)' if not required else '')
+ if required:
+ self.is_found = False
+ if self.required:
+ raise DependencyException(
+ 'Could not find required LLVM Component: {}'.format(mod))
+ else:
+ self.required_modules.add(mod)
+ mlog.log('LLVM module', mod, 'found:', mlog.green('YES'))
+
+ def need_threads(self):
+ return True
+
+
+class ValgrindDependency(PkgConfigDependency):
+ '''
+ Consumers of Valgrind usually only need the compile args and do not want to
+ link to its (static) libraries.
+ '''
+ def __init__(self, env, kwargs):
+ super().__init__('valgrind', env, kwargs)
+
+ def get_link_args(self):
+ return []
--- /dev/null
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for miscellaneous external dependencies.
+
+import glob
+import os
+import re
+import shlex
+import shutil
+import sysconfig
+
+from pathlib import Path
+
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import Popen_safe, extract_as_list
+from ..environment import detect_cpu_family
+
+from .base import (
+ DependencyException, DependencyMethods, ExternalDependency,
+ ExternalProgram, ExtraFrameworkDependency, PkgConfigDependency,
+ ConfigToolDependency,
+)
+
+# On windows 3 directory layouts are supported:
+# * The default layout (versioned) installed:
+# - $BOOST_ROOT/include/boost-x_x/boost/*.hpp
+# - $BOOST_ROOT/lib/*.lib
+# * The non-default layout (system) installed:
+# - $BOOST_ROOT/include/boost/*.hpp
+# - $BOOST_ROOT/lib/*.lib
+# * The pre-built binaries from sf.net:
+# - $BOOST_ROOT/boost/*.hpp
+# - $BOOST_ROOT/lib<arch>-<compiler>/*.lib where arch=32/64 and compiler=msvc-14.1
+#
+# Library names supported:
+# - libboost_<module>-<compiler>-mt-gd-x_x.lib (static)
+# - boost_<module>-<compiler>-mt-gd-x_x.lib|.dll (shared)
+# - libboost_<module>.lib (static)
+# - boost_<module>.lib|.dll (shared)
+# where compiler is vc141 for example.
+#
+# NOTE: -gb means runtime and build time debugging is on
+# -mt means threading=multi
+#
+# The `modules` argument accept library names. This is because every module that
+# has libraries to link against also has multiple options regarding how to
+# link. See for example:
+# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html
+# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html
+# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html
+
+class BoostDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('boost', environment, 'cpp', kwargs)
+ self.need_static_link = ['boost_exception', 'boost_test_exec_monitor']
+ self.is_debug = environment.cmd_line_options.buildtype.startswith('debug')
+ threading = kwargs.get("threading", "multi")
+ self.is_multithreading = threading == "multi"
+
+ self.requested_modules = self.get_requested(kwargs)
+ invalid_modules = [c for c in self.requested_modules if 'boost_' + c not in BOOST_LIBS]
+ if invalid_modules:
+ mlog.warning('Invalid Boost modules: ' + ', '.join(invalid_modules))
+ self.log_fail()
+ return
+
+ self.boost_root = None
+ self.boost_roots = []
+ self.incdir = None
+ self.libdir = None
+
+ if 'BOOST_ROOT' in os.environ:
+ self.boost_root = os.environ['BOOST_ROOT']
+ self.boost_roots = [self.boost_root]
+ if not os.path.isabs(self.boost_root):
+ raise DependencyException('BOOST_ROOT must be an absolute path.')
+ if 'BOOST_INCLUDEDIR' in os.environ:
+ self.incdir = os.environ['BOOST_INCLUDEDIR']
+ if 'BOOST_LIBRARYDIR' in os.environ:
+ self.libdir = os.environ['BOOST_LIBRARYDIR']
+
+ if self.boost_root is None:
+ if mesonlib.is_windows():
+ self.boost_roots = self.detect_win_roots()
+ else:
+ self.boost_roots = self.detect_nix_roots()
+
+ if self.boost_root is None and not self.boost_roots:
+ self.log_fail()
+ return
+
+ if self.incdir is None:
+ if mesonlib.is_windows():
+ self.incdir = self.detect_win_incdir()
+ else:
+ self.incdir = self.detect_nix_incdir()
+
+ if self.incdir is None:
+ self.log_fail()
+ return
+
+ mlog.debug('Boost library root dir is', mlog.bold(self.boost_root))
+ mlog.debug('Boost include directory is', mlog.bold(self.incdir))
+
+ self.lib_modules = {}
+ self.detect_version()
+ if self.is_found:
+ self.detect_lib_modules()
+ mlog.debug('Boost library directory is', mlog.bold(self.libdir))
+ self.validate_requested()
+ self.log_success()
+ else:
+ self.log_fail()
+
+ def log_fail(self):
+ module_str = ', '.join(self.requested_modules)
+ mlog.log("Dependency Boost (%s) found:" % module_str, mlog.red('NO'))
+
+ def log_success(self):
+ module_str = ', '.join(self.requested_modules)
+ if self.boost_root:
+ info = self.version + ', ' + self.boost_root
+ else:
+ info = self.version
+ mlog.log('Dependency Boost (%s) found:' % module_str, mlog.green('YES'), info)
+
+ def detect_nix_roots(self):
+ return [os.path.abspath(os.path.join(x, '..'))
+ for x in self.compiler.get_default_include_dirs()]
+
+ def detect_win_roots(self):
+ res = []
+ # Where boost documentation says it should be
+ globtext = 'C:\\Program Files\\boost\\boost_*'
+ files = glob.glob(globtext)
+ res.extend(files)
+
+ # Where boost built from source actually installs it
+ if os.path.isdir('C:\\Boost'):
+ res.append('C:\\Boost')
+
+ # Where boost prebuilt binaries are
+ globtext = 'C:\\local\\boost_*'
+ files = glob.glob(globtext)
+ res.extend(files)
+ return res
+
+ def detect_nix_incdir(self):
+ for root in self.boost_roots:
+ incdir = os.path.join(root, 'include', 'boost')
+ if os.path.isdir(incdir):
+ return os.path.join(root, 'include')
+ return None
+
+ # FIXME: Should pick a version that matches the requested version
+ # Returns the folder that contains the boost folder.
+ def detect_win_incdir(self):
+ for root in self.boost_roots:
+ globtext = os.path.join(root, 'include', 'boost-*')
+ incdirs = glob.glob(globtext)
+ if len(incdirs) > 0:
+ return incdirs[0]
+ incboostdir = os.path.join(root, 'include', 'boost')
+ if os.path.isdir(incboostdir):
+ return os.path.join(root, 'include')
+ incboostdir = os.path.join(root, 'boost')
+ if os.path.isdir(incboostdir):
+ return root
+ return None
+
+ def get_compile_args(self):
+ args = []
+ include_dir = self.incdir
+
+ # Use "-isystem" when including boost headers instead of "-I"
+ # to avoid compiler warnings/failures when "-Werror" is used
+
+ # Careful not to use "-isystem" on default include dirs as it
+ # breaks some of the headers for certain gcc versions
+
+ # For example, doing g++ -isystem /usr/include on a simple
+ # "int main()" source results in the error:
+ # "/usr/include/c++/6.3.1/cstdlib:75:25: fatal error: stdlib.h: No such file or directory"
+
+ # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70129
+ # and http://stackoverflow.com/questions/37218953/isystem-on-a-system-include-directory-causes-errors
+ # for more details
+
+ if include_dir and include_dir not in self.compiler.get_default_include_dirs():
+ args.append("".join(self.compiler.get_include_args(include_dir, True)))
+ return args
+
+ def get_requested(self, kwargs):
+ candidates = extract_as_list(kwargs, 'modules')
+ for c in candidates:
+ if not isinstance(c, str):
+ raise DependencyException('Boost module argument is not a string.')
+ return candidates
+
+ def validate_requested(self):
+ for m in self.requested_modules:
+ if 'boost_' + m not in self.lib_modules:
+ msg = 'Requested Boost library {!r} not found'
+ raise DependencyException(msg.format(m))
+
+ def detect_version(self):
+ try:
+ ifile = open(os.path.join(self.incdir, 'boost', 'version.hpp'))
+ except FileNotFoundError:
+ return
+ except TypeError:
+ return
+ with ifile:
+ for line in ifile:
+ if line.startswith("#define") and 'BOOST_LIB_VERSION' in line:
+ ver = line.split()[-1]
+ ver = ver[1:-1]
+ self.version = ver.replace('_', '.')
+ self.is_found = True
+ return
+
+ def detect_lib_modules(self):
+ if mesonlib.is_windows():
+ return self.detect_lib_modules_win()
+ return self.detect_lib_modules_nix()
+
+ def detect_lib_modules_win(self):
+ arch = detect_cpu_family(self.env.coredata.compilers)
+ comp_ts_version = self.env.detect_cpp_compiler(self.want_cross).get_toolset_version()
+ compiler_ts = comp_ts_version.split('.')
+ compiler = 'vc{}{}'.format(compiler_ts[0], compiler_ts[1])
+ if not self.libdir:
+ # The libdirs in the distributed binaries (from sf)
+ if arch == 'x86':
+ lib_sf = 'lib32-msvc-{}'.format(comp_ts_version)
+ elif arch == 'x86_64':
+ lib_sf = 'lib64-msvc-{}'.format(comp_ts_version)
+ else:
+ # Does anyone do Boost cross-compiling to other archs on Windows?
+ lib_sf = None
+ if self.boost_root:
+ roots = [self.boost_root]
+ else:
+ roots = self.boost_roots
+ for root in roots:
+ # The default libdir when building
+ libdir = os.path.join(root, 'lib')
+ if os.path.isdir(libdir):
+ self.libdir = libdir
+ break
+ if lib_sf:
+ full_path = os.path.join(root, lib_sf)
+ if os.path.isdir(full_path):
+ self.libdir = full_path
+ break
+
+ if not self.libdir:
+ return
+
+ for name in self.need_static_link:
+ libname = "lib{}".format(name) + '-' + compiler
+ if self.is_multithreading:
+ libname = libname + '-mt'
+ if self.is_debug:
+ libname = libname + '-gd'
+ libname = libname + "-{}.lib".format(self.version.replace('.', '_'))
+ if os.path.isfile(os.path.join(self.libdir, libname)):
+ modname = libname.split('-', 1)[0][3:]
+ self.lib_modules[modname] = libname
+ else:
+ libname = "lib{}.lib".format(name)
+ if os.path.isfile(os.path.join(self.libdir, libname)):
+ self.lib_modules[name[3:]] = libname
+
+ # globber1 applies to a layout=system installation
+ # globber2 applies to a layout=versioned installation
+ globber1 = 'libboost_*' if self.static else 'boost_*'
+ globber2 = globber1 + '-' + compiler
+ if self.is_multithreading:
+ globber2 = globber2 + '-mt'
+ if self.is_debug:
+ globber2 = globber2 + '-gd'
+ globber2 = globber2 + '-{}'.format(self.version.replace('.', '_'))
+ globber2_matches = glob.glob(os.path.join(self.libdir, globber2 + '.lib'))
+ for entry in globber2_matches:
+ (_, fname) = os.path.split(entry)
+ modname = fname.split('-', 1)
+ if len(modname) > 1:
+ modname = modname[0]
+ else:
+ modname = modname.split('.', 1)[0]
+ if self.static:
+ modname = modname[3:]
+ self.lib_modules[modname] = fname
+ if len(globber2_matches) == 0:
+ for entry in glob.glob(os.path.join(self.libdir, globber1 + '.lib')):
+ (_, fname) = os.path.split(entry)
+ modname = fname.split('.', 1)[0]
+ if self.static:
+ modname = modname[3:]
+ self.lib_modules[modname] = fname
+
+ def detect_lib_modules_nix(self):
+ if self.static:
+ libsuffix = 'a'
+ elif mesonlib.is_osx() and not self.want_cross:
+ libsuffix = 'dylib'
+ else:
+ libsuffix = 'so'
+
+ globber = 'libboost_*.{}'.format(libsuffix)
+ if self.libdir:
+ libdirs = [self.libdir]
+ elif self.boost_root is None:
+ libdirs = mesonlib.get_library_dirs()
+ else:
+ libdirs = [os.path.join(self.boost_root, 'lib')]
+ for libdir in libdirs:
+ for name in self.need_static_link:
+ libname = 'lib{}.a'.format(name)
+ if os.path.isfile(os.path.join(libdir, libname)):
+ self.lib_modules[name] = libname
+ for entry in glob.glob(os.path.join(libdir, globber)):
+ lib = os.path.basename(entry)
+ name = lib.split('.')[0][3:]
+ # I'm not 100% sure what to do here. Some distros
+ # have modules such as thread only as -mt versions.
+ # On debian all packages are built threading=multi
+ # but not suffixed with -mt.
+ # FIXME: implement detect_lib_modules_{debian, redhat, ...}
+ if self.is_multithreading and mesonlib.is_debianlike():
+ self.lib_modules[name] = lib
+ elif self.is_multithreading and entry.endswith('-mt.{}'.format(libsuffix)):
+ self.lib_modules[name] = lib
+ elif not entry.endswith('-mt.{}'.format(libsuffix)):
+ self.lib_modules[name] = lib
+
+ def get_win_link_args(self):
+ args = []
+ # TODO: should this check self.libdir?
+ if self.libdir:
+ args.append('-L' + self.libdir)
+ for lib in self.requested_modules:
+ args.append(self.lib_modules['boost_' + lib])
+ return args
+
+ def get_link_args(self):
+ if mesonlib.is_windows():
+ return self.get_win_link_args()
+ args = []
+ if self.boost_root:
+ args.append('-L' + os.path.join(self.boost_root, 'lib'))
+ elif self.libdir:
+ args.append('-L' + self.libdir)
+ for lib in self.requested_modules:
+ # The compiler's library detector is the most reliable so use that first.
+ boost_lib = 'boost_' + lib
+ default_detect = self.compiler.find_library(boost_lib, self.env, [])
+ if default_detect is not None:
+ args += default_detect
+ elif boost_lib in self.lib_modules:
+ linkcmd = '-l' + boost_lib
+ args.append(linkcmd)
+ return args
+
+ def get_sources(self):
+ return []
+
+ def need_threads(self):
+ return 'thread' in self.requested_modules
+
+
+class MPIDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ language = kwargs.get('language', 'c')
+ super().__init__('mpi', environment, language, kwargs)
+ required = kwargs.pop('required', True)
+ kwargs['required'] = False
+ kwargs['silent'] = True
+ self.is_found = False
+
+ # NOTE: Only OpenMPI supplies a pkg-config file at the moment.
+ if language == 'c':
+ env_vars = ['MPICC']
+ pkgconfig_files = ['ompi-c']
+ default_wrappers = ['mpicc']
+ elif language == 'cpp':
+ env_vars = ['MPICXX']
+ pkgconfig_files = ['ompi-cxx']
+ default_wrappers = ['mpic++', 'mpicxx', 'mpiCC']
+ elif language == 'fortran':
+ env_vars = ['MPIFC', 'MPIF90', 'MPIF77']
+ pkgconfig_files = ['ompi-fort']
+ default_wrappers = ['mpifort', 'mpif90', 'mpif77']
+ else:
+ raise DependencyException('Language {} is not supported with MPI.'.format(language))
+
+ for pkg in pkgconfig_files:
+ try:
+ pkgdep = PkgConfigDependency(pkg, environment, kwargs, language=self.language)
+ if pkgdep.found():
+ self.compile_args = pkgdep.get_compile_args()
+ self.link_args = pkgdep.get_link_args()
+ self.version = pkgdep.get_version()
+ self.is_found = True
+ break
+ except Exception:
+ pass
+
+ if not self.is_found:
+ # Prefer environment.
+ for var in env_vars:
+ if var in os.environ:
+ wrappers = [os.environ[var]]
+ break
+ else:
+ # Or search for default wrappers.
+ wrappers = default_wrappers
+
+ for prog in wrappers:
+ result = self._try_openmpi_wrapper(prog)
+ if result is not None:
+ self.is_found = True
+ self.version = result[0]
+ self.compile_args = self._filter_compile_args(result[1])
+ self.link_args = self._filter_link_args(result[2])
+ break
+ result = self._try_other_wrapper(prog)
+ if result is not None:
+ self.is_found = True
+ self.version = result[0]
+ self.compile_args = self._filter_compile_args(result[1])
+ self.link_args = self._filter_link_args(result[2])
+ break
+
+ if not self.is_found and mesonlib.is_windows():
+ result = self._try_msmpi()
+ if result is not None:
+ self.is_found = True
+ self.version, self.compile_args, self.link_args = result
+
+ if self.is_found:
+ mlog.log('Dependency', mlog.bold(self.name), 'for', self.language, 'found:', mlog.green('YES'), self.version)
+ else:
+ mlog.log('Dependency', mlog.bold(self.name), 'for', self.language, 'found:', mlog.red('NO'))
+ if required:
+ raise DependencyException('MPI dependency {!r} not found'.format(self.name))
+
+ def _filter_compile_args(self, args):
+ """
+ MPI wrappers return a bunch of garbage args.
+ Drop -O2 and everything that is not needed.
+ """
+ result = []
+ multi_args = ('-I', )
+ if self.language == 'fortran':
+ fc = self.env.coredata.compilers['fortran']
+ multi_args += fc.get_module_incdir_args()
+
+ include_next = False
+ for f in args:
+ if f.startswith(('-D', '-f') + multi_args) or f == '-pthread' \
+ or (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')):
+ result.append(f)
+ if f in multi_args:
+ # Path is a separate argument.
+ include_next = True
+ elif include_next:
+ include_next = False
+ result.append(f)
+ return result
+
+ def _filter_link_args(self, args):
+ """
+ MPI wrappers return a bunch of garbage args.
+ Drop -O2 and everything that is not needed.
+ """
+ result = []
+ include_next = False
+ for f in args:
+ if f.startswith(('-L', '-l', '-Xlinker')) or f == '-pthread' \
+ or (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')):
+ result.append(f)
+ if f in ('-L', '-Xlinker'):
+ include_next = True
+ elif include_next:
+ include_next = False
+ result.append(f)
+ return result
+
+ def _try_openmpi_wrapper(self, prog):
+ prog = ExternalProgram(prog, silent=True)
+ if prog.found():
+ cmd = prog.get_command() + ['--showme:compile']
+ p, o, e = mesonlib.Popen_safe(cmd)
+ p.wait()
+ if p.returncode != 0:
+ mlog.debug('Command', mlog.bold(cmd), 'failed to run:')
+ mlog.debug(mlog.bold('Standard output\n'), o)
+ mlog.debug(mlog.bold('Standard error\n'), e)
+ return
+ cargs = shlex.split(o)
+
+ cmd = prog.get_command() + ['--showme:link']
+ p, o, e = mesonlib.Popen_safe(cmd)
+ p.wait()
+ if p.returncode != 0:
+ mlog.debug('Command', mlog.bold(cmd), 'failed to run:')
+ mlog.debug(mlog.bold('Standard output\n'), o)
+ mlog.debug(mlog.bold('Standard error\n'), e)
+ return
+ libs = shlex.split(o)
+
+ cmd = prog.get_command() + ['--showme:version']
+ p, o, e = mesonlib.Popen_safe(cmd)
+ p.wait()
+ if p.returncode != 0:
+ mlog.debug('Command', mlog.bold(cmd), 'failed to run:')
+ mlog.debug(mlog.bold('Standard output\n'), o)
+ mlog.debug(mlog.bold('Standard error\n'), e)
+ return
+ version = re.search('\d+.\d+.\d+', o)
+ if version:
+ version = version.group(0)
+ else:
+ version = 'none'
+
+ return version, cargs, libs
+
+ def _try_other_wrapper(self, prog):
+ prog = ExternalProgram(prog, silent=True)
+ if prog.found():
+ cmd = prog.get_command() + ['-show']
+ p, o, e = mesonlib.Popen_safe(cmd)
+ p.wait()
+ if p.returncode != 0:
+ mlog.debug('Command', mlog.bold(cmd), 'failed to run:')
+ mlog.debug(mlog.bold('Standard output\n'), o)
+ mlog.debug(mlog.bold('Standard error\n'), e)
+ return
+ args = shlex.split(o)
+
+ version = 'none'
+
+ return version, args, args
+
+ def _try_msmpi(self):
+ if self.language == 'cpp':
+ # MS-MPI does not support the C++ version of MPI, only the standard C API.
+ return
+ if 'MSMPI_INC' not in os.environ:
+ return
+ incdir = os.environ['MSMPI_INC']
+ arch = detect_cpu_family(self.env.coredata.compilers)
+ if arch == 'x86':
+ if 'MSMPI_LIB32' not in os.environ:
+ return
+ libdir = os.environ['MSMPI_LIB32']
+ post = 'x86'
+ elif arch == 'x86_64':
+ if 'MSMPI_LIB64' not in os.environ:
+ return
+ libdir = os.environ['MSMPI_LIB64']
+ post = 'x64'
+ else:
+ return
+ if self.language == 'fortran':
+ return ('none',
+ ['-I' + incdir, '-I' + os.path.join(incdir, post)],
+ [os.path.join(libdir, 'msmpi.lib'), os.path.join(libdir, 'msmpifec.lib')])
+ else:
+ return ('none',
+ ['-I' + incdir, '-I' + os.path.join(incdir, post)],
+ [os.path.join(libdir, 'msmpi.lib')])
+
+
+class ThreadDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('threads', environment, None, {})
+ self.name = 'threads'
+ self.is_found = True
+ mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES'))
+
+ def need_threads(self):
+ return True
+
+ def get_version(self):
+ return 'unknown'
+
+
+class Python3Dependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('python3', environment, None, kwargs)
+ self.name = 'python3'
+ self.static = kwargs.get('static', False)
+ # We can only be sure that it is Python 3 at this point
+ self.version = '3'
+ self.pkgdep = None
+ if DependencyMethods.PKGCONFIG in self.methods:
+ try:
+ self.pkgdep = PkgConfigDependency('python3', environment, kwargs)
+ if self.pkgdep.found():
+ self.compile_args = self.pkgdep.get_compile_args()
+ self.link_args = self.pkgdep.get_link_args()
+ self.version = self.pkgdep.get_version()
+ self.is_found = True
+ return
+ else:
+ self.pkgdep = None
+ except Exception:
+ pass
+ if not self.is_found:
+ if mesonlib.is_windows() and DependencyMethods.SYSCONFIG in self.methods:
+ self._find_libpy3_windows(environment)
+ elif mesonlib.is_osx() and DependencyMethods.EXTRAFRAMEWORK in self.methods:
+ # In OSX the Python 3 framework does not have a version
+ # number in its name.
+ # There is a python in /System/Library/Frameworks, but that's
+ # python 2, Python 3 will always bin in /Library
+ fw = ExtraFrameworkDependency(
+ 'python', False, '/Library/Frameworks', self.env, self.language, kwargs)
+ if fw.found():
+ self.compile_args = fw.get_compile_args()
+ self.link_args = fw.get_link_args()
+ self.is_found = True
+ if self.is_found:
+ mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES'))
+ else:
+ mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.red('NO'))
+
+ @staticmethod
+ def get_windows_python_arch():
+ pyplat = sysconfig.get_platform()
+ if pyplat == 'mingw':
+ pycc = sysconfig.get_config_var('CC')
+ if pycc.startswith('x86_64'):
+ return '64'
+ elif pycc.startswith(('i686', 'i386')):
+ return '32'
+ else:
+ mlog.log('MinGW Python built with unknown CC {!r}, please file'
+ 'a bug'.format(pycc))
+ return None
+ elif pyplat == 'win32':
+ return '32'
+ elif pyplat in ('win64', 'win-amd64'):
+ return '64'
+ mlog.log('Unknown Windows Python platform {!r}'.format(pyplat))
+ return None
+
+ def get_windows_link_args(self):
+ pyplat = sysconfig.get_platform()
+ if pyplat.startswith('win'):
+ vernum = sysconfig.get_config_var('py_version_nodot')
+ if self.static:
+ libname = 'libpython{}.a'.format(vernum)
+ else:
+ libname = 'python{}.lib'.format(vernum)
+ lib = Path(sysconfig.get_config_var('base')) / 'libs' / libname
+ elif pyplat == 'mingw':
+ if self.static:
+ libname = sysconfig.get_config_var('LIBRARY')
+ else:
+ libname = sysconfig.get_config_var('LDLIBRARY')
+ lib = Path(sysconfig.get_config_var('LIBDIR')) / libname
+ if not lib.exists():
+ mlog.log('Could not find Python3 library {!r}'.format(str(lib)))
+ return None
+ return [str(lib)]
+
+ def _find_libpy3_windows(self, env):
+ '''
+ Find python3 libraries on Windows and also verify that the arch matches
+ what we are building for.
+ '''
+ pyarch = self.get_windows_python_arch()
+ if pyarch is None:
+ self.is_found = False
+ return
+ arch = detect_cpu_family(env.coredata.compilers)
+ if arch == 'x86':
+ arch = '32'
+ elif arch == 'x86_64':
+ arch = '64'
+ else:
+ # We can't cross-compile Python 3 dependencies on Windows yet
+ mlog.log('Unknown architecture {!r} for'.format(arch),
+ mlog.bold(self.name))
+ self.is_found = False
+ return
+ # Pyarch ends in '32' or '64'
+ if arch != pyarch:
+ mlog.log('Need', mlog.bold(self.name), 'for {}-bit, but '
+ 'found {}-bit'.format(arch, pyarch))
+ self.is_found = False
+ return
+ # This can fail if the library is not found
+ largs = self.get_windows_link_args()
+ if largs is None:
+ self.is_found = False
+ return
+ self.link_args = largs
+ # Compile args
+ inc = sysconfig.get_path('include')
+ platinc = sysconfig.get_path('platinclude')
+ self.compile_args = ['-I' + inc]
+ if inc != platinc:
+ self.compile_args.append('-I' + platinc)
+ self.version = sysconfig.get_config_var('py_version')
+ self.is_found = True
+
+ def get_methods(self):
+ if mesonlib.is_windows():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
+ elif mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK]
+ else:
+ return [DependencyMethods.PKGCONFIG]
+
+ def get_pkgconfig_variable(self, variable_name, kwargs):
+ if self.pkgdep:
+ return self.pkgdep.get_pkgconfig_variable(variable_name, kwargs)
+ else:
+ return super().get_pkgconfig_variable(variable_name, kwargs)
+
+
+class PcapDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('pcap', environment, None, kwargs)
+ kwargs['required'] = False
+ if DependencyMethods.PKGCONFIG in self.methods:
+ try:
+ pcdep = PkgConfigDependency('pcap', environment, kwargs)
+ if pcdep.found():
+ self.type_name = 'pkgconfig'
+ self.is_found = True
+ self.compile_args = pcdep.get_compile_args()
+ self.link_args = pcdep.get_link_args()
+ self.version = pcdep.get_version()
+ return
+ except Exception as e:
+ mlog.debug('Pcap not found via pkgconfig. Trying next, error was:', str(e))
+ if DependencyMethods.CONFIG_TOOL in self.methods:
+ try:
+ ctdep = ConfigToolDependency.factory(
+ 'pcap', environment, None, kwargs, ['pcap-config'], 'pcap-config')
+ if ctdep.found():
+ self.config = ctdep.config
+ self.type_name = 'config-tool'
+ self.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+ self.version = self.get_pcap_lib_version()
+ self.is_found = True
+ return
+ except Exception as e:
+ mlog.debug('Pcap not found via pcap-config. Trying next, error was:', str(e))
+
+ def get_methods(self):
+ if mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK]
+ else:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+ def get_pcap_lib_version(self):
+ return self.compiler.get_return_value('pcap_lib_version', 'string',
+ '#include <pcap.h>', self.env, [], [self])
+
+
+class CupsDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('cups', environment, None, kwargs)
+ kwargs['required'] = False
+ if DependencyMethods.PKGCONFIG in self.methods:
+ try:
+ pcdep = PkgConfigDependency('cups', environment, kwargs)
+ if pcdep.found():
+ self.type_name = 'pkgconfig'
+ self.is_found = True
+ self.compile_args = pcdep.get_compile_args()
+ self.link_args = pcdep.get_link_args()
+ self.version = pcdep.get_version()
+ return
+ except Exception as e:
+ mlog.debug('cups not found via pkgconfig. Trying next, error was:', str(e))
+ if DependencyMethods.CONFIG_TOOL in self.methods:
+ try:
+ ctdep = ConfigToolDependency.factory(
+ 'cups', environment, None, kwargs, ['cups-config'], 'cups-config')
+ if ctdep.found():
+ self.config = ctdep.config
+ self.type_name = 'config-tool'
+ self.version = ctdep.version
+ self.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+ self.is_found = True
+ return
+ except Exception as e:
+ mlog.debug('cups not found via cups-config. Trying next, error was:', str(e))
+ if DependencyMethods.EXTRAFRAMEWORK in self.methods:
+ if mesonlib.is_osx():
+ fwdep = ExtraFrameworkDependency('cups', False, None, self.env,
+ self.language, kwargs)
+ if fwdep.found():
+ self.is_found = True
+ self.compile_args = fwdep.get_compile_args()
+ self.link_args = fwdep.get_link_args()
+ self.version = fwdep.get_version()
+ return
+ mlog.log('Dependency', mlog.bold('cups'), 'found:', mlog.red('NO'))
+
+ def get_methods(self):
+ if mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK]
+ else:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+class LibWmfDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('libwmf', environment, None, kwargs)
+ if DependencyMethods.PKGCONFIG in self.methods:
+ try:
+ kwargs['required'] = False
+ pcdep = PkgConfigDependency('libwmf', environment, kwargs)
+ if pcdep.found():
+ self.type_name = 'pkgconfig'
+ self.is_found = True
+ self.compile_args = pcdep.get_compile_args()
+ self.link_args = pcdep.get_link_args()
+ self.version = pcdep.get_version()
+ return
+ except Exception as e:
+ mlog.debug('LibWmf not found via pkgconfig. Trying next, error was:', str(e))
+ if DependencyMethods.CONFIG_TOOL in self.methods:
+ try:
+ ctdep = ConfigToolDependency.factory(
+ 'libwmf', environment, None, kwargs, ['libwmf-config'], 'libwmf-config')
+ if ctdep.found():
+ self.config = ctdep.config
+ self.type_name = 'config-too'
+ self.version = ctdep.version
+ self.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+ self.is_found = True
+ return
+ except Exception as e:
+ mlog.debug('cups not found via libwmf-config. Trying next, error was:', str(e))
+
+ def get_methods(self):
+ if mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK]
+ else:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+# Generated with boost_names.py
+BOOST_LIBS = [
+ 'boost_atomic',
+ 'boost_chrono',
+ 'boost_chrono',
+ 'boost_container',
+ 'boost_context',
+ 'boost_coroutine',
+ 'boost_date_time',
+ 'boost_exception',
+ 'boost_fiber',
+ 'boost_filesystem',
+ 'boost_graph',
+ 'boost_iostreams',
+ 'boost_locale',
+ 'boost_log',
+ 'boost_log_setup',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_math_tr1',
+ 'boost_math_tr1f',
+ 'boost_math_tr1l',
+ 'boost_math_c99',
+ 'boost_math_c99f',
+ 'boost_math_c99l',
+ 'boost_mpi',
+ 'boost_program_options',
+ 'boost_python',
+ 'boost_python3',
+ 'boost_numpy',
+ 'boost_numpy3',
+ 'boost_random',
+ 'boost_regex',
+ 'boost_serialization',
+ 'boost_wserialization',
+ 'boost_signals',
+ 'boost_stacktrace_noop',
+ 'boost_stacktrace_backtrace',
+ 'boost_stacktrace_addr2line',
+ 'boost_stacktrace_basic',
+ 'boost_stacktrace_windbg',
+ 'boost_stacktrace_windbg_cached',
+ 'boost_system',
+ 'boost_prg_exec_monitor',
+ 'boost_test_exec_monitor',
+ 'boost_unit_test_framework',
+ 'boost_thread',
+ 'boost_timer',
+ 'boost_type_erasure',
+ 'boost_wave'
+]
--- /dev/null
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that are
+# platform-specific (generally speaking).
+
+from .. import mesonlib
+
+from .base import ExternalDependency, DependencyException
+
+
+class AppleFrameworks(ExternalDependency):
+ def __init__(self, env, kwargs):
+ super().__init__('appleframeworks', env, None, kwargs)
+ modules = kwargs.get('modules', [])
+ if isinstance(modules, str):
+ modules = [modules]
+ if not modules:
+ raise DependencyException("AppleFrameworks dependency requires at least one module.")
+ self.frameworks = modules
+ # FIXME: Use self.compiler to check if the frameworks are available
+ for f in self.frameworks:
+ self.link_args += ['-framework', f]
+
+ def found(self):
+ return mesonlib.is_osx()
+
+ def get_version(self):
+ return 'unknown'
--- /dev/null
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that
+# are UI-related.
+
+import os
+import re
+import shutil
+import subprocess
+from collections import OrderedDict
+
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import (
+ MesonException, Popen_safe, extract_as_list, for_windows,
+ version_compare_many
+)
+from ..environment import detect_cpu
+
+from .base import DependencyException, DependencyMethods
+from .base import ExternalDependency, ExternalProgram
+from .base import ExtraFrameworkDependency, PkgConfigDependency
+from .base import ConfigToolDependency
+
+
+class GLDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('gl', environment, None, kwargs)
+ if DependencyMethods.PKGCONFIG in self.methods:
+ try:
+ pcdep = PkgConfigDependency('gl', environment, kwargs)
+ if pcdep.found():
+ self.type_name = 'pkgconfig'
+ self.is_found = True
+ self.compile_args = pcdep.get_compile_args()
+ self.link_args = pcdep.get_link_args()
+ self.version = pcdep.get_version()
+ return
+ except Exception:
+ pass
+ if DependencyMethods.SYSTEM in self.methods:
+ if mesonlib.is_osx():
+ self.is_found = True
+ # FIXME: Use AppleFrameworks dependency
+ self.link_args = ['-framework', 'OpenGL']
+ # FIXME: Detect version using self.compiler
+ self.version = '1'
+ return
+ if mesonlib.is_windows():
+ self.is_found = True
+ # FIXME: Use self.compiler.find_library()
+ self.link_args = ['-lopengl32']
+ # FIXME: Detect version using self.compiler
+ self.version = '1'
+ return
+
+ def get_methods(self):
+ if mesonlib.is_osx() or mesonlib.is_windows():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
+ else:
+ return [DependencyMethods.PKGCONFIG]
+
+
+class GnuStepDependency(ConfigToolDependency):
+
+ tools = ['gnustep-config']
+ tool_name = 'gnustep-config'
+
+ def __init__(self, environment, kwargs):
+ super().__init__('gnustep', environment, 'objc', kwargs)
+ if not self.is_found:
+ return
+ self.modules = kwargs.get('modules', [])
+ self.compile_args = self.filter_args(
+ self.get_config_value(['--objc-flags'], 'compile_args'))
+ self.link_args = self.weird_filter(self.get_config_value(
+ ['--gui-libs' if 'gui' in self.modules else '--base-libs'],
+ 'link_args'))
+
+ def find_config(self, versions=None):
+ tool = self.tools[0]
+ try:
+ p, out = Popen_safe([tool, '--help'])[:2]
+ except (FileNotFoundError, PermissionError):
+ return (None, None)
+ if p.returncode != 0:
+ return (None, None)
+ self.config = tool
+ found_version = self.detect_version()
+ if versions and not version_compare_many(found_version, versions)[0]:
+ return (None, found_version)
+
+ return (tool, found_version)
+
+ def weird_filter(self, elems):
+ """When building packages, the output of the enclosing Make is
+ sometimes mixed among the subprocess output. I have no idea why. As a
+ hack filter out everything that is not a flag.
+ """
+ return [e for e in elems if e.startswith('-')]
+
+ def filter_args(self, args):
+ """gnustep-config returns a bunch of garbage args such as -O2 and so
+ on. Drop everything that is not needed.
+ """
+ result = []
+ for f in args:
+ if f.startswith('-D') \
+ or f.startswith('-f') \
+ or f.startswith('-I') \
+ or f == '-pthread' \
+ or (f.startswith('-W') and not f == '-Wall'):
+ result.append(f)
+ return result
+
+ def detect_version(self):
+ gmake = self.get_config_value(['--variable=GNUMAKE'], 'variable')[0]
+ makefile_dir = self.get_config_value(['--variable=GNUSTEP_MAKEFILES'], 'variable')[0]
+ # This Makefile has the GNUStep version set
+ base_make = os.path.join(makefile_dir, 'Additional', 'base.make')
+ # Print the Makefile variable passed as the argument. For instance, if
+ # you run the make target `print-SOME_VARIABLE`, this will print the
+ # value of the variable `SOME_VARIABLE`.
+ printver = "print-%:\n\t@echo '$($*)'"
+ env = os.environ.copy()
+ # See base.make to understand why this is set
+ env['FOUNDATION_LIB'] = 'gnu'
+ p, o, e = Popen_safe([gmake, '-f', '-', '-f', base_make,
+ 'print-GNUSTEP_BASE_VERSION'],
+ env=env, write=printver, stdin=subprocess.PIPE)
+ version = o.strip()
+ if not version:
+ mlog.debug("Couldn't detect GNUStep version, falling back to '1'")
+ # Fallback to setting some 1.x version
+ version = '1'
+ return version
+
+
+class QtBaseDependency(ExternalDependency):
+ def __init__(self, name, env, kwargs):
+ super().__init__(name, env, 'cpp', kwargs)
+ self.qtname = name.capitalize()
+ self.qtver = name[-1]
+ if self.qtver == "4":
+ self.qtpkgname = 'Qt'
+ else:
+ self.qtpkgname = self.qtname
+ self.root = '/usr'
+ self.bindir = None
+ mods = kwargs.get('modules', [])
+ if isinstance(mods, str):
+ mods = [mods]
+ if not mods:
+ raise DependencyException('No ' + self.qtname + ' modules specified.')
+ type_text = 'cross' if env.is_cross_build() else 'native'
+ found_msg = '{} {} {{}} dependency (modules: {}) found:' \
+ ''.format(self.qtname, type_text, ', '.join(mods))
+ from_text = 'pkg-config'
+
+ # Keep track of the detection methods used, for logging purposes.
+ methods = []
+ # Prefer pkg-config, then fallback to `qmake -query`
+ if DependencyMethods.PKGCONFIG in self.methods:
+ self._pkgconfig_detect(mods, kwargs)
+ methods.append('pkgconfig')
+ if not self.is_found and DependencyMethods.QMAKE in self.methods:
+ from_text = self._qmake_detect(mods, kwargs)
+ methods.append('qmake-' + self.name)
+ methods.append('qmake')
+ if not self.is_found:
+ # Reset compile args and link args
+ self.compile_args = []
+ self.link_args = []
+ from_text = '(checked {})'.format(mlog.format_list(methods))
+ self.version = 'none'
+ if self.required:
+ err_msg = '{} {} dependency not found {}' \
+ ''.format(self.qtname, type_text, from_text)
+ raise DependencyException(err_msg)
+ if not self.silent:
+ mlog.log(found_msg.format(from_text), mlog.red('NO'))
+ return
+ from_text = '`{}`'.format(from_text)
+ if not self.silent:
+ mlog.log(found_msg.format(from_text), mlog.green('YES'))
+
+ def compilers_detect(self):
+ "Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH"
+ if self.bindir:
+ moc = ExternalProgram(os.path.join(self.bindir, 'moc'), silent=True)
+ uic = ExternalProgram(os.path.join(self.bindir, 'uic'), silent=True)
+ rcc = ExternalProgram(os.path.join(self.bindir, 'rcc'), silent=True)
+ lrelease = ExternalProgram(os.path.join(self.bindir, 'lrelease'), silent=True)
+ else:
+ # We don't accept unsuffixed 'moc', 'uic', and 'rcc' because they
+ # are sometimes older, or newer versions.
+ moc = ExternalProgram('moc-' + self.name, silent=True)
+ uic = ExternalProgram('uic-' + self.name, silent=True)
+ rcc = ExternalProgram('rcc-' + self.name, silent=True)
+ lrelease = ExternalProgram('lrelease-' + self.name, silent=True)
+ return moc, uic, rcc, lrelease
+
+ def _pkgconfig_detect(self, mods, kwargs):
+ # We set the value of required to False so that we can try the
+ # qmake-based fallback if pkg-config fails.
+ kwargs['required'] = False
+ modules = OrderedDict()
+ for module in mods:
+ modules[module] = PkgConfigDependency(self.qtpkgname + module, self.env,
+ kwargs, language=self.language)
+ for m in modules.values():
+ if not m.found():
+ self.is_found = False
+ return
+ self.compile_args += m.get_compile_args()
+ self.link_args += m.get_link_args()
+ self.is_found = True
+ self.version = m.version
+ # Try to detect moc, uic, rcc
+ if 'Core' in modules:
+ core = modules['Core']
+ else:
+ corekwargs = {'required': 'false', 'silent': 'true'}
+ core = PkgConfigDependency(self.qtpkgname + 'Core', self.env, corekwargs,
+ language=self.language)
+ # Used by self.compilers_detect()
+ self.bindir = self.get_pkgconfig_host_bins(core)
+ if not self.bindir:
+ # If exec_prefix is not defined, the pkg-config file is broken
+ prefix = core.get_pkgconfig_variable('exec_prefix', {})
+ if prefix:
+ self.bindir = os.path.join(prefix, 'bin')
+
+ def _find_qmake(self, qmake):
+ # Even when cross-compiling, if we don't get a cross-info qmake, we
+ # fallback to using the qmake in PATH because that's what we used to do
+ if self.env.is_cross_build():
+ qmake = self.env.cross_info.config['binaries'].get('qmake', qmake)
+ return ExternalProgram(qmake, silent=True)
+
+ def _qmake_detect(self, mods, kwargs):
+ for qmake in ('qmake-' + self.name, 'qmake'):
+ self.qmake = self._find_qmake(qmake)
+ if not self.qmake.found():
+ continue
+ # Check that the qmake is for qt5
+ pc, stdo = Popen_safe(self.qmake.get_command() + ['-v'])[0:2]
+ if pc.returncode != 0:
+ continue
+ if not 'Qt version ' + self.qtver in stdo:
+ mlog.log('QMake is not for ' + self.qtname)
+ continue
+ # Found qmake for Qt5!
+ break
+ else:
+ # Didn't find qmake :(
+ self.is_found = False
+ return
+ self.version = re.search(self.qtver + '(\.\d+)+', stdo).group(0)
+ # Query library path, header path, and binary path
+ mlog.log("Found qmake:", mlog.bold(self.qmake.get_name()), '(%s)' % self.version)
+ stdo = Popen_safe(self.qmake.get_command() + ['-query'])[1]
+ qvars = {}
+ for line in stdo.split('\n'):
+ line = line.strip()
+ if line == '':
+ continue
+ (k, v) = tuple(line.split(':', 1))
+ qvars[k] = v
+ if mesonlib.is_osx():
+ return self._framework_detect(qvars, mods, kwargs)
+ incdir = qvars['QT_INSTALL_HEADERS']
+ self.compile_args.append('-I' + incdir)
+ libdir = qvars['QT_INSTALL_LIBS']
+ # Used by self.compilers_detect()
+ self.bindir = self.get_qmake_host_bins(qvars)
+ self.is_found = True
+ for module in mods:
+ mincdir = os.path.join(incdir, 'Qt' + module)
+ self.compile_args.append('-I' + mincdir)
+ if for_windows(self.env.is_cross_build(), self.env):
+ is_debug = self.env.cmd_line_options.buildtype.startswith('debug')
+ dbg = 'd' if is_debug else ''
+ if self.qtver == '4':
+ base_name = 'Qt' + module + dbg + '4'
+ else:
+ base_name = 'Qt5' + module + dbg
+ libfile = os.path.join(libdir, base_name + '.lib')
+ if not os.path.isfile(libfile):
+ # MinGW can link directly to .dll
+ libfile = os.path.join(self.bindir, base_name + '.dll')
+ if not os.path.isfile(libfile):
+ self.is_found = False
+ break
+ else:
+ libfile = os.path.join(libdir, 'lib{}{}.so'.format(self.qtpkgname, module))
+ if not os.path.isfile(libfile):
+ self.is_found = False
+ break
+ self.link_args.append(libfile)
+ return qmake
+
+ def _framework_detect(self, qvars, modules, kwargs):
+ libdir = qvars['QT_INSTALL_LIBS']
+ for m in modules:
+ fname = 'Qt' + m
+ fwdep = ExtraFrameworkDependency(fname, False, libdir, self.env,
+ self.language, kwargs)
+ self.compile_args.append('-F' + libdir)
+ if fwdep.found():
+ self.is_found = True
+ self.compile_args += fwdep.get_compile_args()
+ self.link_args += fwdep.get_link_args()
+ # Used by self.compilers_detect()
+ self.bindir = self.get_qmake_host_bins(qvars)
+
+ def get_qmake_host_bins(self, qvars):
+ # Prefer QT_HOST_BINS (qt5, correct for cross and native compiling)
+ # but fall back to QT_INSTALL_BINS (qt4)
+ if 'QT_HOST_BINS' in qvars:
+ return qvars['QT_HOST_BINS']
+ else:
+ return qvars['QT_INSTALL_BINS']
+
+ def get_methods(self):
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.QMAKE]
+
+ def get_exe_args(self, compiler):
+ # Originally this was -fPIE but nowadays the default
+ # for upstream and distros seems to be -reduce-relocations
+ # which requires -fPIC. This may cause a performance
+ # penalty when using self-built Qt or on platforms
+ # where -fPIC is not required. If this is an issue
+ # for you, patches are welcome.
+ return compiler.get_pic_args()
+
+
+class Qt4Dependency(QtBaseDependency):
+ def __init__(self, env, kwargs):
+ QtBaseDependency.__init__(self, 'qt4', env, kwargs)
+
+ def get_pkgconfig_host_bins(self, core):
+ # Only return one bins dir, because the tools are generally all in one
+ # directory for Qt4, in Qt5, they must all be in one directory. Return
+ # the first one found among the bin variables, in case one tool is not
+ # configured to be built.
+ applications = ['moc', 'uic', 'rcc', 'lupdate', 'lrelease']
+ for application in applications:
+ try:
+ return os.path.dirname(core.get_pkgconfig_variable('%s_location' % application, {}))
+ except MesonException:
+ pass
+
+
+class Qt5Dependency(QtBaseDependency):
+ def __init__(self, env, kwargs):
+ QtBaseDependency.__init__(self, 'qt5', env, kwargs)
+
+ def get_pkgconfig_host_bins(self, core):
+ return core.get_pkgconfig_variable('host_bins', {})
+
+
+# There are three different ways of depending on SDL2:
+# sdl2-config, pkg-config and OSX framework
+class SDL2Dependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('sdl2', environment, None, kwargs)
+ kwargs['required'] = False
+ if DependencyMethods.PKGCONFIG in self.methods:
+ try:
+ pcdep = PkgConfigDependency('sdl2', environment, kwargs)
+ if pcdep.found():
+ self.type_name = 'pkgconfig'
+ self.is_found = True
+ self.compile_args = pcdep.get_compile_args()
+ self.link_args = pcdep.get_link_args()
+ self.version = pcdep.get_version()
+ return
+ except Exception as e:
+ mlog.debug('SDL 2 not found via pkgconfig. Trying next, error was:', str(e))
+ if DependencyMethods.CONFIG_TOOL in self.methods:
+ try:
+ ctdep = ConfigToolDependency.factory(
+ 'sdl2', environment, None, kwargs, ['sdl2-config'], 'sdl2-config')
+ if ctdep.found():
+ self.type_name = 'config-tool'
+ self.config = ctdep.config
+ self.version = ctdep.version
+ self.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
+ self.links_args = ctdep.get_config_value(['--libs'], 'link_args')
+ self.is_found = True
+ return
+ except Exception as e:
+ mlog.debug('SDL 2 not found via sdl2-config. Trying next, error was:', str(e))
+ if DependencyMethods.EXTRAFRAMEWORK in self.methods:
+ if mesonlib.is_osx():
+ fwdep = ExtraFrameworkDependency('sdl2', False, None, self.env,
+ self.language, kwargs)
+ if fwdep.found():
+ self.is_found = True
+ self.compile_args = fwdep.get_compile_args()
+ self.link_args = fwdep.get_link_args()
+ self.version = '2' # FIXME
+ return
+ mlog.log('Dependency', mlog.bold('sdl2'), 'found:', mlog.red('NO'))
+
+ def get_methods(self):
+ if mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK]
+ else:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+class WxDependency(ConfigToolDependency):
+
+ tools = ['wx-config-3.0', 'wx-config']
+ tool_name = 'wx-config'
+
+ def __init__(self, environment, kwargs):
+ super().__init__('WxWidgets', environment, None, kwargs)
+ if not self.is_found:
+ return
+ self.requested_modules = self.get_requested(kwargs)
+ # wx-config seems to have a cflags as well but since it requires C++,
+ # this should be good, at least for now.
+ self.compile_args = self.get_config_value(['--cxxflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+ def get_requested(self, kwargs):
+ if 'modules' not in kwargs:
+ return []
+ candidates = extract_as_list(kwargs, 'modules')
+ for c in candidates:
+ if not isinstance(c, str):
+ raise DependencyException('wxwidgets module argument is not a string')
+ return candidates
+
+
+class VulkanDependency(ExternalDependency):
+ def __init__(self, environment, kwargs):
+ super().__init__('vulkan', environment, None, kwargs)
+
+ if DependencyMethods.PKGCONFIG in self.methods:
+ try:
+ pcdep = PkgConfigDependency('vulkan', environment, kwargs)
+ if pcdep.found():
+ self.type_name = 'pkgconfig'
+ self.is_found = True
+ self.compile_args = pcdep.get_compile_args()
+ self.link_args = pcdep.get_link_args()
+ self.version = pcdep.get_version()
+ return
+ except Exception:
+ pass
+
+ if DependencyMethods.SYSTEM in self.methods:
+ try:
+ self.vulkan_sdk = os.environ['VULKAN_SDK']
+ if not os.path.isabs(self.vulkan_sdk):
+ raise DependencyException('VULKAN_SDK must be an absolute path.')
+ except KeyError:
+ self.vulkan_sdk = None
+
+ if self.vulkan_sdk:
+ # TODO: this config might not work on some platforms, fix bugs as reported
+ # we should at least detect other 64-bit platforms (e.g. armv8)
+ lib_name = 'vulkan'
+ if mesonlib.is_windows():
+ lib_name = 'vulkan-1'
+ lib_dir = 'Lib32'
+ inc_dir = 'Include'
+ if detect_cpu({}) == 'x86_64':
+ lib_dir = 'Lib'
+ else:
+ lib_name = 'vulkan'
+ lib_dir = 'lib'
+ inc_dir = 'include'
+
+ # make sure header and lib are valid
+ inc_path = os.path.join(self.vulkan_sdk, inc_dir)
+ header = os.path.join(inc_path, 'vulkan', 'vulkan.h')
+ lib_path = os.path.join(self.vulkan_sdk, lib_dir)
+ find_lib = self.compiler.find_library(lib_name, environment, lib_path)
+
+ if not find_lib:
+ raise DependencyException('VULKAN_SDK point to invalid directory (no lib)')
+
+ if not os.path.isfile(header):
+ raise DependencyException('VULKAN_SDK point to invalid directory (no include)')
+
+ self.type_name = 'vulkan_sdk'
+ self.is_found = True
+ self.compile_args.append('-I' + inc_path)
+ self.link_args.append('-L' + lib_path)
+ self.link_args.append('-l' + lib_name)
+
+ # TODO: find a way to retrieve the version from the sdk?
+ # Usually it is a part of the path to it (but does not have to be)
+ self.version = '1'
+ return
+ else:
+ # simply try to guess it, usually works on linux
+ libs = self.compiler.find_library('vulkan', environment, [])
+ if libs is not None and self.compiler.has_header('vulkan/vulkan.h', '', environment):
+ self.type_name = 'system'
+ self.is_found = True
+ self.version = 1 # TODO
+ for lib in libs:
+ self.link_args.append(lib)
+ return
+
+ def get_methods(self):
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
--- /dev/null
+# Copyright 2012-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import configparser, os, platform, re, shlex, shutil, subprocess
+
+from . import coredata
+from .linkers import ArLinker, VisualStudioLinker
+from . import mesonlib
+from .mesonlib import EnvironmentException, Popen_safe
+from . import mlog
+import sys
+
+from . import compilers
+from .compilers import (
+ CLANG_OSX,
+ CLANG_STANDARD,
+ CLANG_WIN,
+ GCC_CYGWIN,
+ GCC_MINGW,
+ GCC_OSX,
+ GCC_STANDARD,
+ ICC_STANDARD,
+ is_assembly,
+ is_header,
+ is_library,
+ is_llvm_ir,
+ is_object,
+ is_source,
+)
+from .compilers import (
+ ClangCCompiler,
+ ClangCPPCompiler,
+ ClangObjCCompiler,
+ ClangObjCPPCompiler,
+ G95FortranCompiler,
+ GnuCCompiler,
+ GnuCPPCompiler,
+ GnuFortranCompiler,
+ GnuObjCCompiler,
+ GnuObjCPPCompiler,
+ IntelCCompiler,
+ IntelCPPCompiler,
+ IntelFortranCompiler,
+ JavaCompiler,
+ MonoCompiler,
+ NAGFortranCompiler,
+ Open64FortranCompiler,
+ PathScaleFortranCompiler,
+ PGIFortranCompiler,
+ RustCompiler,
+ SunFortranCompiler,
+ ValaCompiler,
+ VisualStudioCCompiler,
+ VisualStudioCPPCompiler,
+)
+
+build_filename = 'meson.build'
+
+# Environment variables that each lang uses.
+cflags_mapping = {'c': 'CFLAGS',
+ 'cpp': 'CXXFLAGS',
+ 'objc': 'OBJCFLAGS',
+ 'objcpp': 'OBJCXXFLAGS',
+ 'fortran': 'FFLAGS',
+ 'd': 'DFLAGS',
+ 'vala': 'VALAFLAGS'}
+
+
+def find_coverage_tools():
+ gcovr_exe = 'gcovr'
+ lcov_exe = 'lcov'
+ genhtml_exe = 'genhtml'
+
+ if not mesonlib.exe_exists([gcovr_exe, '--version']):
+ gcovr_exe = None
+ if not mesonlib.exe_exists([lcov_exe, '--version']):
+ lcov_exe = None
+ if not mesonlib.exe_exists([genhtml_exe, '--version']):
+ genhtml_exe = None
+ return gcovr_exe, lcov_exe, genhtml_exe
+
+def detect_ninja(version='1.5', log=False):
+ for n in ['ninja', 'ninja-build']:
+ try:
+ p, found = Popen_safe([n, '--version'])[0:2]
+ except (FileNotFoundError, PermissionError):
+ # Doesn't exist in PATH or isn't executable
+ continue
+ found = found.strip()
+ # Perhaps we should add a way for the caller to know the failure mode
+ # (not found or too old)
+ if p.returncode == 0 and mesonlib.version_compare(found, '>=' + version):
+ if log:
+ mlog.log('Found ninja-{} at {}'.format(found, shlex.quote(shutil.which(n))))
+ return n
+
+def detect_native_windows_arch():
+ """
+ The architecture of Windows itself: x86 or amd64
+ """
+ # These env variables are always available. See:
+ # https://msdn.microsoft.com/en-us/library/aa384274(VS.85).aspx
+ # https://blogs.msdn.microsoft.com/david.wang/2006/03/27/howto-detect-process-bitness/
+ arch = os.environ.get('PROCESSOR_ARCHITEW6432', '').lower()
+ if not arch:
+ try:
+ # If this doesn't exist, something is messing with the environment
+ arch = os.environ['PROCESSOR_ARCHITECTURE'].lower()
+ except KeyError:
+ raise EnvironmentException('Unable to detect native OS architecture')
+ return arch
+
+def detect_windows_arch(compilers):
+ """
+ Detecting the 'native' architecture of Windows is not a trivial task. We
+ cannot trust that the architecture that Python is built for is the 'native'
+ one because you can run 32-bit apps on 64-bit Windows using WOW64 and
+ people sometimes install 32-bit Python on 64-bit Windows.
+
+ We also can't rely on the architecture of the OS itself, since it's
+ perfectly normal to compile and run 32-bit applications on Windows as if
+ they were native applications. It's a terrible experience to require the
+ user to supply a cross-info file to compile 32-bit applications on 64-bit
+ Windows. Thankfully, the only way to compile things with Visual Studio on
+ Windows is by entering the 'msvc toolchain' environment, which can be
+ easily detected.
+
+ In the end, the sanest method is as follows:
+ 1. Check if we're in an MSVC toolchain environment, and if so, return the
+ MSVC toolchain architecture as our 'native' architecture.
+ 2. If not, check environment variables that are set by Windows and WOW64 to
+ find out the architecture that Windows is built for, and use that as our
+ 'native' architecture.
+ """
+ os_arch = detect_native_windows_arch()
+ if os_arch != 'amd64':
+ return os_arch
+ # If we're on 64-bit Windows, 32-bit apps can be compiled without
+ # cross-compilation. So if we're doing that, just set the native arch as
+ # 32-bit and pretend like we're running under WOW64. Else, return the
+ # actual Windows architecture that we deduced above.
+ for compiler in compilers.values():
+ # Check if we're using and inside an MSVC toolchain environment
+ if compiler.id == 'msvc' and 'VCINSTALLDIR' in os.environ:
+ # 'Platform' is only set when the target arch is not 'x86'.
+ # It's 'x64' when targeting x86_64 and 'arm' when targeting ARM.
+ platform = os.environ.get('Platform', 'x86').lower()
+ if platform == 'x86':
+ return platform
+ if compiler.id == 'gcc' and compiler.has_builtin_define('__i386__'):
+ return 'x86'
+ return os_arch
+
+def detect_cpu_family(compilers):
+ """
+ Python is inconsistent in its platform module.
+ It returns different values for the same cpu.
+ For x86 it might return 'x86', 'i686' or somesuch.
+ Do some canonicalization.
+ """
+ if mesonlib.is_windows():
+ trial = detect_windows_arch(compilers)
+ else:
+ trial = platform.machine().lower()
+ if trial.startswith('i') and trial.endswith('86'):
+ return 'x86'
+ if trial.startswith('arm'):
+ return 'arm'
+ if trial in ('amd64', 'x64'):
+ trial = 'x86_64'
+ if trial == 'x86_64':
+ # On Linux (and maybe others) there can be any mixture of 32/64 bit
+ # code in the kernel, Python, system etc. The only reliable way
+ # to know is to check the compiler defines.
+ for c in compilers.values():
+ try:
+ if c.has_builtin_define('__i386__'):
+ return 'x86'
+ except mesonlib.MesonException:
+ # Ignore compilers that do not support has_builtin_define.
+ pass
+ return 'x86_64'
+ # Add fixes here as bugs are reported.
+ return trial
+
+def detect_cpu(compilers):
+ if mesonlib.is_windows():
+ trial = detect_windows_arch(compilers)
+ else:
+ trial = platform.machine().lower()
+ if trial in ('amd64', 'x64'):
+ trial = 'x86_64'
+ if trial == 'x86_64':
+ # Same check as above for cpu_family
+ for c in compilers.values():
+ try:
+ if c.has_builtin_define('__i386__'):
+ return 'i686' # All 64 bit cpus have at least this level of x86 support.
+ except mesonlib.MesonException:
+ pass
+ return 'x86_64'
+ # Add fixes here as bugs are reported.
+ return trial
+
+def detect_system():
+ system = platform.system().lower()
+ if system.startswith('cygwin'):
+ return 'cygwin'
+ return system
+
+def search_version(text):
+ # Usually of the type 4.1.4 but compiler output may contain
+ # stuff like this:
+ # (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
+ # Limiting major version number to two digits seems to work
+ # thus far. When we get to GCC 100, this will break, but
+ # if we are still relevant when that happens, it can be
+ # considered an achievement in itself.
+ #
+ # This regex is reaching magic levels. If it ever needs
+ # to be updated, do not complexify but convert to something
+ # saner instead.
+ version_regex = '(?<!(\d|\.))(\d{1,2}(\.\d+)+(-[a-zA-Z0-9]+)?)'
+ match = re.search(version_regex, text)
+ if match:
+ return match.group(0)
+ return 'unknown version'
+
+class Environment:
+ private_dir = 'meson-private'
+ log_dir = 'meson-logs'
+ coredata_file = os.path.join(private_dir, 'coredata.dat')
+
+ def __init__(self, source_dir, build_dir, main_script_launcher, options, original_cmd_line_args):
+ self.source_dir = source_dir
+ self.build_dir = build_dir
+ self.meson_script_launcher = main_script_launcher
+ self.scratch_dir = os.path.join(build_dir, Environment.private_dir)
+ self.log_dir = os.path.join(build_dir, Environment.log_dir)
+ os.makedirs(self.scratch_dir, exist_ok=True)
+ os.makedirs(self.log_dir, exist_ok=True)
+ try:
+ cdf = os.path.join(self.get_build_dir(), Environment.coredata_file)
+ self.coredata = coredata.load(cdf)
+ self.first_invocation = False
+ except FileNotFoundError:
+ # WARNING: Don't use any values from coredata in __init__. It gets
+ # re-initialized with project options by the interpreter during
+ # build file parsing.
+ self.coredata = coredata.CoreData(options)
+ self.coredata.meson_script_launcher = self.meson_script_launcher
+ self.first_invocation = True
+ if self.coredata.cross_file:
+ self.cross_info = CrossBuildInfo(self.coredata.cross_file)
+ else:
+ self.cross_info = None
+ self.cmd_line_options = options
+ self.original_cmd_line_args = original_cmd_line_args
+
+ # List of potential compilers.
+ if mesonlib.is_windows():
+ self.default_c = ['cl', 'cc', 'gcc', 'clang']
+ self.default_cpp = ['cl', 'c++', 'g++', 'clang++']
+ else:
+ self.default_c = ['cc', 'gcc', 'clang']
+ self.default_cpp = ['c++', 'g++', 'clang++']
+ self.default_objc = ['cc']
+ self.default_objcpp = ['c++']
+ self.default_fortran = ['gfortran', 'g95', 'f95', 'f90', 'f77', 'ifort']
+ self.default_static_linker = ['ar']
+ self.vs_static_linker = ['lib']
+ self.gcc_static_linker = ['gcc-ar']
+ self.clang_static_linker = ['llvm-ar']
+
+ # Various prefixes and suffixes for import libraries, shared libraries,
+ # static libraries, and executables.
+ # Versioning is added to these names in the backends as-needed.
+ cross = self.is_cross_build()
+ if (not cross and mesonlib.is_windows()) \
+ or (cross and self.cross_info.has_host() and self.cross_info.config['host_machine']['system'] == 'windows'):
+ self.exe_suffix = 'exe'
+ self.object_suffix = 'obj'
+ self.win_libdir_layout = True
+ elif (not cross and mesonlib.is_cygwin()) \
+ or (cross and self.cross_info.has_host() and self.cross_info.config['host_machine']['system'] == 'cygwin'):
+ self.exe_suffix = 'exe'
+ self.object_suffix = 'o'
+ self.win_libdir_layout = True
+ else:
+ self.exe_suffix = ''
+ self.object_suffix = 'o'
+ self.win_libdir_layout = False
+ if 'STRIP' in os.environ:
+ self.native_strip_bin = shlex.split('STRIP')
+ else:
+ self.native_strip_bin = ['strip']
+
+ def is_cross_build(self):
+ return self.cross_info is not None
+
+ def dump_coredata(self):
+ cdf = os.path.join(self.get_build_dir(), Environment.coredata_file)
+ coredata.save(self.coredata, cdf)
+ return cdf
+
+ def get_script_dir(self):
+ import mesonbuild.scripts
+ return os.path.dirname(mesonbuild.scripts.__file__)
+
+ def get_log_dir(self):
+ return self.log_dir
+
+ def get_coredata(self):
+ return self.coredata
+
+ def get_build_command(self, unbuffered=False):
+ cmd = mesonlib.meson_command[:]
+ if unbuffered and 'python' in cmd[0]:
+ cmd.insert(1, '-u')
+ return cmd
+
+ def is_header(self, fname):
+ return is_header(fname)
+
+ def is_source(self, fname):
+ return is_source(fname)
+
+ def is_assembly(self, fname):
+ return is_assembly(fname)
+
+ def is_llvm_ir(self, fname):
+ return is_llvm_ir(fname)
+
+ def is_object(self, fname):
+ return is_object(fname)
+
+ def is_library(self, fname):
+ return is_library(fname)
+
+ def had_argument_for(self, option):
+ trial1 = '--' + option
+ trial2 = '-D' + option
+ previous_is_plaind = False
+ for i in self.original_cmd_line_args:
+ if i.startswith(trial1) or i.startswith(trial2):
+ return True
+ if previous_is_plaind and i.startswith(option):
+ return True
+ previous_is_plaind = i == '-D'
+ return False
+
+ def merge_options(self, options):
+ for (name, value) in options.items():
+ if name not in self.coredata.user_options:
+ self.coredata.user_options[name] = value
+ else:
+ oldval = self.coredata.user_options[name]
+ if type(oldval) != type(value):
+ self.coredata.user_options[name] = value
+
+ @staticmethod
+ def get_gnu_compiler_defines(compiler):
+ """
+ Detect GNU compiler platform type (Apple, MinGW, Unix)
+ """
+ # Arguments to output compiler pre-processor defines to stdout
+ # gcc, g++, and gfortran all support these arguments
+ args = compiler + ['-E', '-dM', '-']
+ p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE)
+ if p.returncode != 0:
+ raise EnvironmentException('Unable to detect GNU compiler type:\n' + output + error)
+ # Parse several lines of the type:
+ # `#define ___SOME_DEF some_value`
+ # and extract `___SOME_DEF`
+ defines = {}
+ for line in output.split('\n'):
+ if not line:
+ continue
+ d, *rest = line.split(' ', 2)
+ if d != '#define':
+ continue
+ if len(rest) == 1:
+ defines[rest] = True
+ if len(rest) == 2:
+ defines[rest[0]] = rest[1]
+ return defines
+
+ @staticmethod
+ def get_gnu_version_from_defines(defines):
+ dot = '.'
+ major = defines.get('__GNUC__', '0')
+ minor = defines.get('__GNUC_MINOR__', '0')
+ patch = defines.get('__GNUC_PATCHLEVEL__', '0')
+ return dot.join((major, minor, patch))
+
+ @staticmethod
+ def get_gnu_compiler_type(defines):
+ # Detect GCC type (Apple, MinGW, Cygwin, Unix)
+ if '__APPLE__' in defines:
+ return GCC_OSX
+ elif '__MINGW32__' in defines or '__MINGW64__' in defines:
+ return GCC_MINGW
+ elif '__CYGWIN__' in defines:
+ return GCC_CYGWIN
+ return GCC_STANDARD
+
+ def _get_compilers(self, lang, evar, want_cross):
+ '''
+ The list of compilers is detected in the exact same way for
+ C, C++, ObjC, ObjC++, Fortran so consolidate it here.
+ '''
+ if self.is_cross_build() and want_cross:
+ compilers = mesonlib.stringlistify(self.cross_info.config['binaries'][lang])
+ # Ensure ccache exists and remove it if it doesn't
+ if compilers[0] == 'ccache':
+ compilers = compilers[1:]
+ ccache = self.detect_ccache()
+ else:
+ ccache = []
+ # Return value has to be a list of compiler 'choices'
+ compilers = [compilers]
+ is_cross = True
+ if self.cross_info.need_exe_wrapper():
+ exe_wrap = self.cross_info.config['binaries'].get('exe_wrapper', None)
+ else:
+ exe_wrap = []
+ elif evar in os.environ:
+ compilers = shlex.split(os.environ[evar])
+ # Ensure ccache exists and remove it if it doesn't
+ if compilers[0] == 'ccache':
+ compilers = compilers[1:]
+ ccache = self.detect_ccache()
+ else:
+ ccache = []
+ # Return value has to be a list of compiler 'choices'
+ compilers = [compilers]
+ is_cross = False
+ exe_wrap = None
+ else:
+ compilers = getattr(self, 'default_' + lang)
+ ccache = self.detect_ccache()
+ is_cross = False
+ exe_wrap = None
+ return compilers, ccache, is_cross, exe_wrap
+
+ def _handle_exceptions(self, exceptions, binaries, bintype='compiler'):
+ errmsg = 'Unknown {}(s): {}'.format(bintype, binaries)
+ if exceptions:
+ errmsg += '\nThe follow exceptions were encountered:'
+ for (c, e) in exceptions.items():
+ errmsg += '\nRunning "{0}" gave "{1}"'.format(c, e)
+ raise EnvironmentException(errmsg)
+
+ def _detect_c_or_cpp_compiler(self, lang, evar, want_cross):
+ popen_exceptions = {}
+ compilers, ccache, is_cross, exe_wrap = self._get_compilers(lang, evar, want_cross)
+ for compiler in compilers:
+ if isinstance(compiler, str):
+ compiler = [compiler]
+ if 'cl' in compiler or 'cl.exe' in compiler:
+ # Watcom C provides it's own cl.exe clone that mimics an older
+ # version of Microsoft's compiler. Since Watcom's cl.exe is
+ # just a wrapper, we skip using it if we detect its presence
+ # so as not to confuse Meson when configuring for MSVC.
+ #
+ # Additionally the help text of Watcom's cl.exe is paged, and
+ # the binary will not exit without human intervention. In
+ # practice, Meson will block waiting for Watcom's cl.exe to
+ # exit, which requires user input and thus will never exit.
+ if 'WATCOM' in os.environ:
+ def sanitize(p):
+ return os.path.normcase(os.path.abspath(p))
+
+ watcom_cls = [sanitize(os.path.join(os.environ['WATCOM'], 'BINNT', 'cl')),
+ sanitize(os.path.join(os.environ['WATCOM'], 'BINNT', 'cl.exe'))]
+ found_cl = sanitize(shutil.which('cl'))
+ if found_cl in watcom_cls:
+ continue
+ arg = '/?'
+ else:
+ arg = '--version'
+ try:
+ p, out, err = Popen_safe(compiler + [arg])
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + [arg])] = e
+ continue
+ version = search_version(out)
+ if 'Free Software Foundation' in out:
+ defines = self.get_gnu_compiler_defines(compiler)
+ if not defines:
+ popen_exceptions[' '.join(compiler)] = 'no pre-processor defines'
+ continue
+ gtype = self.get_gnu_compiler_type(defines)
+ version = self.get_gnu_version_from_defines(defines)
+ cls = GnuCCompiler if lang == 'c' else GnuCPPCompiler
+ return cls(ccache + compiler, version, gtype, is_cross, exe_wrap, defines)
+ if 'clang' in out:
+ if 'Apple' in out or mesonlib.for_darwin(want_cross, self):
+ cltype = CLANG_OSX
+ elif 'windows' in out or mesonlib.for_windows(want_cross, self):
+ cltype = CLANG_WIN
+ else:
+ cltype = CLANG_STANDARD
+ cls = ClangCCompiler if lang == 'c' else ClangCPPCompiler
+ return cls(ccache + compiler, version, cltype, is_cross, exe_wrap)
+ if 'Microsoft' in out or 'Microsoft' in err:
+ # Latest versions of Visual Studio print version
+ # number to stderr but earlier ones print version
+ # on stdout. Why? Lord only knows.
+ # Check both outputs to figure out version.
+ version = search_version(err)
+ if version == 'unknown version':
+ version = search_version(out)
+ if version == 'unknown version':
+ m = 'Failed to detect MSVC compiler arch: stderr was\n{!r}'
+ raise EnvironmentException(m.format(err))
+ is_64 = err.split('\n')[0].endswith(' x64')
+ cls = VisualStudioCCompiler if lang == 'c' else VisualStudioCPPCompiler
+ return cls(compiler, version, is_cross, exe_wrap, is_64)
+ if '(ICC)' in out:
+ # TODO: add microsoft add check OSX
+ inteltype = ICC_STANDARD
+ cls = IntelCCompiler if lang == 'c' else IntelCPPCompiler
+ return cls(ccache + compiler, version, inteltype, is_cross, exe_wrap)
+ self._handle_exceptions(popen_exceptions, compilers)
+
+ def detect_c_compiler(self, want_cross):
+ return self._detect_c_or_cpp_compiler('c', 'CC', want_cross)
+
+ def detect_cpp_compiler(self, want_cross):
+ return self._detect_c_or_cpp_compiler('cpp', 'CXX', want_cross)
+
+ def detect_fortran_compiler(self, want_cross):
+ popen_exceptions = {}
+ compilers, ccache, is_cross, exe_wrap = self._get_compilers('fortran', 'FC', want_cross)
+ for compiler in compilers:
+ if isinstance(compiler, str):
+ compiler = [compiler]
+ for arg in ['--version', '-V']:
+ try:
+ p, out, err = Popen_safe(compiler + [arg])
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + [arg])] = e
+ continue
+
+ version = search_version(out)
+
+ if 'GNU Fortran' in out:
+ defines = self.get_gnu_compiler_defines(compiler)
+ if not defines:
+ popen_exceptions[' '.join(compiler)] = 'no pre-processor defines'
+ continue
+ gtype = self.get_gnu_compiler_type(defines)
+ version = self.get_gnu_version_from_defines(defines)
+ return GnuFortranCompiler(compiler, version, gtype, is_cross, exe_wrap, defines)
+
+ if 'G95' in out:
+ return G95FortranCompiler(compiler, version, is_cross, exe_wrap)
+
+ if 'Sun Fortran' in err:
+ version = search_version(err)
+ return SunFortranCompiler(compiler, version, is_cross, exe_wrap)
+
+ if 'ifort (IFORT)' in out:
+ return IntelFortranCompiler(compiler, version, is_cross, exe_wrap)
+
+ if 'PathScale EKOPath(tm)' in err:
+ return PathScaleFortranCompiler(compiler, version, is_cross, exe_wrap)
+
+ if 'PGI Compilers' in out:
+ return PGIFortranCompiler(compiler, version, is_cross, exe_wrap)
+
+ if 'Open64 Compiler Suite' in err:
+ return Open64FortranCompiler(compiler, version, is_cross, exe_wrap)
+
+ if 'NAG Fortran' in err:
+ return NAGFortranCompiler(compiler, version, is_cross, exe_wrap)
+ self._handle_exceptions(popen_exceptions, compilers)
+
+ def get_scratch_dir(self):
+ return self.scratch_dir
+
+ def get_depfixer(self):
+ path = os.path.split(__file__)[0]
+ return os.path.join(path, 'depfixer.py')
+
+ def detect_objc_compiler(self, want_cross):
+ popen_exceptions = {}
+ compilers, ccache, is_cross, exe_wrap = self._get_compilers('objc', 'OBJC', want_cross)
+ for compiler in compilers:
+ if isinstance(compiler, str):
+ compiler = [compiler]
+ arg = ['--version']
+ try:
+ p, out, err = Popen_safe(compiler + arg)
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + arg)] = e
+ version = search_version(out)
+ if 'Free Software Foundation' in out:
+ defines = self.get_gnu_compiler_defines(compiler)
+ if not defines:
+ popen_exceptions[' '.join(compiler)] = 'no pre-processor defines'
+ continue
+ gtype = self.get_gnu_compiler_type(defines)
+ version = self.get_gnu_version_from_defines(defines)
+ return GnuObjCCompiler(ccache + compiler, version, gtype, is_cross, exe_wrap, defines)
+ if out.startswith('Apple LLVM'):
+ return ClangObjCCompiler(ccache + compiler, version, CLANG_OSX, is_cross, exe_wrap)
+ if out.startswith('clang'):
+ return ClangObjCCompiler(ccache + compiler, version, CLANG_STANDARD, is_cross, exe_wrap)
+ self._handle_exceptions(popen_exceptions, compilers)
+
+ def detect_objcpp_compiler(self, want_cross):
+ popen_exceptions = {}
+ compilers, ccache, is_cross, exe_wrap = self._get_compilers('objcpp', 'OBJCXX', want_cross)
+ for compiler in compilers:
+ if isinstance(compiler, str):
+ compiler = [compiler]
+ arg = ['--version']
+ try:
+ p, out, err = Popen_safe(compiler + arg)
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + arg)] = e
+ version = search_version(out)
+ if 'Free Software Foundation' in out:
+ defines = self.get_gnu_compiler_defines(compiler)
+ if not defines:
+ popen_exceptions[' '.join(compiler)] = 'no pre-processor defines'
+ continue
+ gtype = self.get_gnu_compiler_type(defines)
+ version = self.get_gnu_version_from_defines(defines)
+ return GnuObjCPPCompiler(ccache + compiler, version, gtype, is_cross, exe_wrap, defines)
+ if out.startswith('Apple LLVM'):
+ return ClangObjCPPCompiler(ccache + compiler, version, CLANG_OSX, is_cross, exe_wrap)
+ if out.startswith('clang'):
+ return ClangObjCPPCompiler(ccache + compiler, version, CLANG_STANDARD, is_cross, exe_wrap)
+ self._handle_exceptions(popen_exceptions, compilers)
+
+ def detect_java_compiler(self):
+ exelist = ['javac']
+ try:
+ p, out, err = Popen_safe(exelist + ['-version'])
+ except OSError:
+ raise EnvironmentException('Could not execute Java compiler "%s"' % ' '.join(exelist))
+ version = search_version(err)
+ if 'javac' in err:
+ return JavaCompiler(exelist, version)
+ raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+
+ def detect_cs_compiler(self):
+ exelist = ['mcs']
+ try:
+ p, out, err = Popen_safe(exelist + ['--version'])
+ except OSError:
+ raise EnvironmentException('Could not execute C# compiler "%s"' % ' '.join(exelist))
+ version = search_version(out)
+ if 'Mono' in out:
+ return MonoCompiler(exelist, version)
+ raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+
+ def detect_vala_compiler(self):
+ if 'VALAC' in os.environ:
+ exelist = shlex.split(os.environ['VALAC'])
+ else:
+ exelist = ['valac']
+ try:
+ p, out = Popen_safe(exelist + ['--version'])[0:2]
+ except OSError:
+ raise EnvironmentException('Could not execute Vala compiler "%s"' % ' '.join(exelist))
+ version = search_version(out)
+ if 'Vala' in out:
+ return ValaCompiler(exelist, version)
+ raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+
+ def detect_rust_compiler(self):
+ exelist = ['rustc']
+ try:
+ p, out = Popen_safe(exelist + ['--version'])[0:2]
+ except OSError:
+ raise EnvironmentException('Could not execute Rust compiler "%s"' % ' '.join(exelist))
+ version = search_version(out)
+ if 'rustc' in out:
+ return RustCompiler(exelist, version)
+ raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+
+ def detect_d_compiler(self, want_cross):
+ is_cross = False
+ # Search for a D compiler.
+ # We prefer LDC over GDC unless overridden with the DC
+ # environment variable because LDC has a much more
+ # up to date language version at time (2016).
+ if 'DC' in os.environ:
+ exelist = shlex.split(os.environ['DC'])
+ elif self.is_cross_build() and want_cross:
+ exelist = mesonlib.stringlistify(self.cross_info.config['binaries']['d'])
+ is_cross = True
+ elif shutil.which("ldc2"):
+ exelist = ['ldc2']
+ elif shutil.which("ldc"):
+ exelist = ['ldc']
+ elif shutil.which("gdc"):
+ exelist = ['gdc']
+ elif shutil.which("dmd"):
+ exelist = ['dmd']
+ else:
+ raise EnvironmentException('Could not find any supported D compiler.')
+
+ try:
+ p, out = Popen_safe(exelist + ['--version'])[0:2]
+ except OSError:
+ raise EnvironmentException('Could not execute D compiler "%s"' % ' '.join(exelist))
+ version = search_version(out)
+ if 'LLVM D compiler' in out:
+ return compilers.LLVMDCompiler(exelist, version, is_cross)
+ elif 'gdc' in out:
+ return compilers.GnuDCompiler(exelist, version, is_cross)
+ elif 'Digital Mars' in out:
+ return compilers.DmdDCompiler(exelist, version, is_cross)
+ raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+
+ def detect_swift_compiler(self):
+ exelist = ['swiftc']
+ try:
+ p, _, err = Popen_safe(exelist + ['-v'])
+ except OSError:
+ raise EnvironmentException('Could not execute Swift compiler "%s"' % ' '.join(exelist))
+ version = search_version(err)
+ if 'Swift' in err:
+ return compilers.SwiftCompiler(exelist, version)
+ raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+
+ def detect_static_linker(self, compiler):
+ if compiler.is_cross:
+ linker = self.cross_info.config['binaries']['ar']
+ if isinstance(linker, str):
+ linker = [linker]
+ linkers = [linker]
+ else:
+ evar = 'AR'
+ if evar in os.environ:
+ linkers = [shlex.split(os.environ[evar])]
+ elif isinstance(compiler, compilers.VisualStudioCCompiler):
+ linkers = [self.vs_static_linker]
+ elif isinstance(compiler, compilers.GnuCompiler):
+ # Use gcc-ar if available; needed for LTO
+ linkers = [self.gcc_static_linker, self.default_static_linker]
+ elif isinstance(compiler, compilers.ClangCompiler):
+ # Use llvm-ar if available; needed for LTO
+ linkers = [self.clang_static_linker, self.default_static_linker]
+ else:
+ linkers = [self.default_static_linker]
+ popen_exceptions = {}
+ for linker in linkers:
+ if 'lib' in linker or 'lib.exe' in linker:
+ arg = '/?'
+ else:
+ arg = '--version'
+ try:
+ p, out, err = Popen_safe(linker + [arg])
+ except OSError as e:
+ popen_exceptions[' '.join(linker + [arg])] = e
+ continue
+ if '/OUT:' in out or '/OUT:' in err:
+ return VisualStudioLinker(linker)
+ if p.returncode == 0:
+ return ArLinker(linker)
+ if p.returncode == 1 and err.startswith('usage'): # OSX
+ return ArLinker(linker)
+ self._handle_exceptions(popen_exceptions, linkers, 'linker')
+ raise EnvironmentException('Unknown static linker "%s"' % ' '.join(linkers))
+
+ def detect_ccache(self):
+ try:
+ has_ccache = subprocess.call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except OSError:
+ has_ccache = 1
+ if has_ccache == 0:
+ cmdlist = ['ccache']
+ else:
+ cmdlist = []
+ return cmdlist
+
+ def get_source_dir(self):
+ return self.source_dir
+
+ def get_build_dir(self):
+ return self.build_dir
+
+ def get_exe_suffix(self):
+ return self.exe_suffix
+
+ def get_import_lib_dir(self):
+ "Install dir for the import library (library used for linking)"
+ return self.get_libdir()
+
+ def get_shared_module_dir(self):
+ "Install dir for shared modules that are loaded at runtime"
+ return self.get_libdir()
+
+ def get_shared_lib_dir(self):
+ "Install dir for the shared library"
+ if self.win_libdir_layout:
+ return self.get_bindir()
+ return self.get_libdir()
+
+ def get_static_lib_dir(self):
+ "Install dir for the static library"
+ return self.get_libdir()
+
+ def get_object_suffix(self):
+ return self.object_suffix
+
+ def get_prefix(self):
+ return self.coredata.get_builtin_option('prefix')
+
+ def get_libdir(self):
+ return self.coredata.get_builtin_option('libdir')
+
+ def get_libexecdir(self):
+ return self.coredata.get_builtin_option('libexecdir')
+
+ def get_bindir(self):
+ return self.coredata.get_builtin_option('bindir')
+
+ def get_includedir(self):
+ return self.coredata.get_builtin_option('includedir')
+
+ def get_mandir(self):
+ return self.coredata.get_builtin_option('mandir')
+
+ def get_datadir(self):
+ return self.coredata.get_builtin_option('datadir')
+
+ def get_compiler_system_dirs(self):
+ for comp in self.coredata.compilers.values():
+ if isinstance(comp, compilers.ClangCompiler):
+ index = 1
+ break
+ elif isinstance(comp, compilers.GnuCompiler):
+ index = 2
+ break
+ else:
+ # This option is only supported by gcc and clang. If we don't get a
+ # GCC or Clang compiler return and empty list.
+ return []
+
+ p, out, _ = Popen_safe(comp.get_exelist() + ['-print-search-dirs'])
+ if p.returncode != 0:
+ raise mesonlib.MesonException('Could not calculate system search dirs')
+ out = out.split('\n')[index].lstrip('libraries: =').split(':')
+ return [os.path.normpath(p) for p in out]
+
+def get_args_from_envvars(compiler):
+ """
+ @compiler: Compiler to fetch environment flags for
+
+ Returns a tuple of (compile_flags, link_flags) for the specified language
+ from the inherited environment
+ """
+ def log_var(var, val):
+ if val:
+ mlog.log('Appending {} from environment: {!r}'.format(var, val))
+
+ lang = compiler.get_language()
+ compiler_is_linker = False
+ if hasattr(compiler, 'get_linker_exelist'):
+ compiler_is_linker = (compiler.get_exelist() == compiler.get_linker_exelist())
+
+ if lang not in cflags_mapping:
+ return [], [], []
+
+ compile_flags = os.environ.get(cflags_mapping[lang], '')
+ log_var(cflags_mapping[lang], compile_flags)
+ compile_flags = shlex.split(compile_flags)
+
+ # Link flags (same for all languages)
+ link_flags = os.environ.get('LDFLAGS', '')
+ log_var('LDFLAGS', link_flags)
+ link_flags = shlex.split(link_flags)
+ if compiler_is_linker:
+ # When the compiler is used as a wrapper around the linker (such as
+ # with GCC and Clang), the compile flags can be needed while linking
+ # too. This is also what Autotools does. However, we don't want to do
+ # this when the linker is stand-alone such as with MSVC C/C++, etc.
+ link_flags = compile_flags + link_flags
+
+ # Pre-processor flags (not for fortran or D)
+ preproc_flags = ''
+ if lang in ('c', 'cpp', 'objc', 'objcpp'):
+ preproc_flags = os.environ.get('CPPFLAGS', '')
+ log_var('CPPFLAGS', preproc_flags)
+ preproc_flags = shlex.split(preproc_flags)
+ compile_flags += preproc_flags
+
+ return preproc_flags, compile_flags, link_flags
+
+class CrossBuildInfo:
+ def __init__(self, filename):
+ self.config = {'properties': {}}
+ self.parse_datafile(filename)
+ if 'target_machine' in self.config:
+ return
+ if 'host_machine' not in self.config:
+ raise mesonlib.MesonException('Cross info file must have either host or a target machine.')
+ if 'binaries' not in self.config:
+ raise mesonlib.MesonException('Cross file is missing "binaries".')
+
+ def ok_type(self, i):
+ return isinstance(i, (str, int, bool))
+
+ def parse_datafile(self, filename):
+ config = configparser.ConfigParser()
+ try:
+ with open(filename, 'r') as f:
+ config.read_file(f, filename)
+ except FileNotFoundError:
+ raise EnvironmentException('File not found: %s.' % filename)
+ # This is a bit hackish at the moment.
+ for s in config.sections():
+ self.config[s] = {}
+ for entry in config[s]:
+ value = config[s][entry]
+ if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
+ raise EnvironmentException('Malformed variable name %s in cross file..' % entry)
+ try:
+ res = eval(value, {'__builtins__': None}, {'true': True, 'false': False})
+ except Exception:
+ raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
+ if self.ok_type(res):
+ self.config[s][entry] = res
+ elif isinstance(res, list):
+ for i in res:
+ if not self.ok_type(i):
+ raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
+ self.config[s][entry] = res
+ else:
+ raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
+
+ def has_host(self):
+ return 'host_machine' in self.config
+
+ def has_target(self):
+ return 'target_machine' in self.config
+
+ def has_stdlib(self, language):
+ return language + '_stdlib' in self.config['properties']
+
+ def get_stdlib(self, language):
+ return self.config['properties'][language + '_stdlib']
+
+ def get_properties(self):
+ return self.config['properties']
+
+ # Wehn compiling a cross compiler we use the native compiler for everything.
+ # But not when cross compiling a cross compiler.
+ def need_cross_compiler(self):
+ return 'host_machine' in self.config
+
+ def need_exe_wrapper(self):
+ value = self.config['properties'].get('needs_exe_wrapper', None)
+ if value is not None:
+ return value
+ # Can almost always run 32-bit binaries on 64-bit natively if the host
+ # and build systems are the same. We don't pass any compilers to
+ # detect_cpu_family() here because we always want to know the OS
+ # architecture, not what the compiler environment tells us.
+ if self.has_host() and detect_cpu_family({}) == 'x86_64' and \
+ self.config['host_machine']['cpu_family'] == 'x86' and \
+ self.config['host_machine']['system'] == detect_system():
+ return False
+ return True
+
+
+class MachineInfo:
+ def __init__(self, system, cpu_family, cpu, endian):
+ self.system = system
+ self.cpu_family = cpu_family
+ self.cpu = cpu
+ self.endian = endian
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import mparser
+from . import environment
+from . import coredata
+from . import dependencies
+from . import mlog
+from . import build
+from . import optinterpreter
+from . import compilers
+from .wrap import wrap, WrapMode
+from . import mesonlib
+from .mesonlib import FileMode, Popen_safe, listify, extract_as_list
+from .dependencies import ExternalProgram
+from .dependencies import InternalDependency, Dependency, DependencyException
+from .interpreterbase import InterpreterBase
+from .interpreterbase import check_stringlist, noPosargs, noKwargs, stringArgs, permittedKwargs
+from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode
+from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler
+from .modules import ModuleReturnValue
+
+import os, sys, shutil, uuid
+import re, shlex
+from collections import namedtuple
+
+import importlib
+
+run_depr_printed = False
+
+def stringifyUserArguments(args):
+ if isinstance(args, list):
+ return '[%s]' % ', '.join([stringifyUserArguments(x) for x in args])
+ elif isinstance(args, int):
+ return str(args)
+ elif isinstance(args, str):
+ return "'%s'" % args
+ raise InvalidArguments('Function accepts only strings, integers, lists and lists thereof.')
+
+
+class ObjectHolder:
+ def __init__(self, obj):
+ self.held_object = obj
+
+ def __repr__(self):
+ return '<Holder: {!r}>'.format(self.held_object)
+
+
+class TryRunResultHolder(InterpreterObject):
+ def __init__(self, res):
+ super().__init__()
+ self.res = res
+ self.methods.update({'returncode': self.returncode_method,
+ 'compiled': self.compiled_method,
+ 'stdout': self.stdout_method,
+ 'stderr': self.stderr_method,
+ })
+
+ def returncode_method(self, args, kwargs):
+ return self.res.returncode
+
+ def compiled_method(self, args, kwargs):
+ return self.res.compiled
+
+ def stdout_method(self, args, kwargs):
+ return self.res.stdout
+
+ def stderr_method(self, args, kwargs):
+ return self.res.stderr
+
+class RunProcess(InterpreterObject):
+
+ def __init__(self, cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir=False):
+ super().__init__()
+ if not isinstance(cmd, ExternalProgram):
+ raise AssertionError('BUG: RunProcess must be passed an ExternalProgram')
+ pc, self.stdout, self.stderr = self.run_command(cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir)
+ self.returncode = pc.returncode
+ self.methods.update({'returncode': self.returncode_method,
+ 'stdout': self.stdout_method,
+ 'stderr': self.stderr_method,
+ })
+
+ def run_command(self, cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir):
+ command_array = cmd.get_command() + args
+ env = {'MESON_SOURCE_ROOT': source_dir,
+ 'MESON_BUILD_ROOT': build_dir,
+ 'MESON_SUBDIR': subdir,
+ 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in mesonintrospect]),
+ }
+ if in_builddir:
+ cwd = os.path.join(build_dir, subdir)
+ else:
+ cwd = os.path.join(source_dir, subdir)
+ child_env = os.environ.copy()
+ child_env.update(env)
+ mlog.debug('Running command:', ' '.join(command_array))
+ try:
+ p, o, e = Popen_safe(command_array, env=child_env, cwd=cwd)
+ mlog.debug('--- stdout----')
+ mlog.debug(o)
+ mlog.debug('----stderr----')
+ mlog.debug(e)
+ mlog.debug('')
+ return p, o, e
+ except FileNotFoundError:
+ raise InterpreterException('Could not execute command "%s".' % ' '.join(command_array))
+
+ def returncode_method(self, args, kwargs):
+ return self.returncode
+
+ def stdout_method(self, args, kwargs):
+ return self.stdout
+
+ def stderr_method(self, args, kwargs):
+ return self.stderr
+
+class ConfigureFileHolder(InterpreterObject, ObjectHolder):
+
+ def __init__(self, subdir, sourcename, targetname, configuration_data):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, build.ConfigureFile(subdir, sourcename,
+ targetname, configuration_data))
+
+
+class EnvironmentVariablesHolder(MutableInterpreterObject, ObjectHolder):
+ def __init__(self):
+ MutableInterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, build.EnvironmentVariables())
+ self.methods.update({'set': self.set_method,
+ 'append': self.append_method,
+ 'prepend': self.prepend_method,
+ })
+
+ def __repr__(self):
+ repr_str = "<{0}: {1}>"
+ return repr_str.format(self.__class__.__name__, self.held_object.envvars)
+
+ @stringArgs
+ def add_var(self, method, args, kwargs):
+ if not isinstance(kwargs.get("separator", ""), str):
+ raise InterpreterException("EnvironmentVariablesHolder methods 'separator'"
+ " argument needs to be a string.")
+ if len(args) < 2:
+ raise InterpreterException("EnvironmentVariablesHolder methods require at least"
+ "2 arguments, first is the name of the variable and"
+ " following one are values")
+ self.held_object.envvars.append((method, args[0], args[1:], kwargs))
+
+ def set_method(self, args, kwargs):
+ self.add_var(self.held_object.set, args, kwargs)
+
+ def append_method(self, args, kwargs):
+ self.add_var(self.held_object.append, args, kwargs)
+
+ def prepend_method(self, args, kwargs):
+ self.add_var(self.held_object.prepend, args, kwargs)
+
+
+class ConfigurationDataHolder(MutableInterpreterObject, ObjectHolder):
+ def __init__(self):
+ MutableInterpreterObject.__init__(self)
+ self.used = False # These objects become immutable after use in configure_file.
+ ObjectHolder.__init__(self, build.ConfigurationData())
+ self.methods.update({'set': self.set_method,
+ 'set10': self.set10_method,
+ 'set_quoted': self.set_quoted_method,
+ 'has': self.has_method,
+ 'get': self.get_method,
+ 'get_unquoted': self.get_unquoted_method,
+ 'merge_from': self.merge_from_method,
+ })
+
+ def is_used(self):
+ return self.used
+
+ def mark_used(self):
+ self.used = True
+
+ def validate_args(self, args, kwargs):
+ if len(args) != 2:
+ raise InterpreterException("Configuration set requires 2 arguments.")
+ if self.used:
+ raise InterpreterException("Can not set values on configuration object that has been used.")
+ name = args[0]
+ val = args[1]
+ desc = kwargs.get('description', None)
+ if not isinstance(name, str):
+ raise InterpreterException("First argument to set must be a string.")
+ if desc is not None and not isinstance(desc, str):
+ raise InterpreterException('Description must be a string.')
+
+ return name, val, desc
+
+ def set_method(self, args, kwargs):
+ (name, val, desc) = self.validate_args(args, kwargs)
+ self.held_object.values[name] = (val, desc)
+
+ def set_quoted_method(self, args, kwargs):
+ (name, val, desc) = self.validate_args(args, kwargs)
+ if not isinstance(val, str):
+ raise InterpreterException("Second argument to set_quoted must be a string.")
+ escaped_val = '\\"'.join(val.split('"'))
+ self.held_object.values[name] = ('"' + escaped_val + '"', desc)
+
+ def set10_method(self, args, kwargs):
+ (name, val, desc) = self.validate_args(args, kwargs)
+ if val:
+ self.held_object.values[name] = (1, desc)
+ else:
+ self.held_object.values[name] = (0, desc)
+
+ def has_method(self, args, kwargs):
+ return args[0] in self.held_object.values
+
+ def get_method(self, args, kwargs):
+ if len(args) < 1 or len(args) > 2:
+ raise InterpreterException('Get method takes one or two arguments.')
+ name = args[0]
+ if name in self.held_object:
+ return self.held_object.get(name)[0]
+ if len(args) > 1:
+ return args[1]
+ raise InterpreterException('Entry %s not in configuration data.' % name)
+
+ def get_unquoted_method(self, args, kwargs):
+ if len(args) < 1 or len(args) > 2:
+ raise InterpreterException('Get method takes one or two arguments.')
+ name = args[0]
+ if name in self.held_object:
+ val = self.held_object.get(name)[0]
+ elif len(args) > 1:
+ val = args[1]
+ else:
+ raise InterpreterException('Entry %s not in configuration data.' % name)
+ if val[0] == '"' and val[-1] == '"':
+ return val[1:-1]
+ return val
+
+ def get(self, name):
+ return self.held_object.values[name] # (val, desc)
+
+ def keys(self):
+ return self.held_object.values.keys()
+
+ def merge_from_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Merge_from takes one positional argument.')
+ from_object = args[0]
+ if not isinstance(from_object, ConfigurationDataHolder):
+ raise InterpreterException('Merge_from argument must be a configuration data object.')
+ from_object = from_object.held_object
+ for k, v in from_object.values.items():
+ self.held_object.values[k] = v
+
+# Interpreter objects can not be pickled so we must have
+# these wrappers.
+
+class DependencyHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, dep):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, dep)
+ self.methods.update({'found': self.found_method,
+ 'type_name': self.type_name_method,
+ 'version': self.version_method,
+ 'get_pkgconfig_variable': self.pkgconfig_method,
+ 'get_configtool_variable': self.configtool_method,
+ })
+
+ def type_name_method(self, args, kwargs):
+ return self.held_object.type_name
+
+ def found_method(self, args, kwargs):
+ if self.held_object.type_name == 'internal':
+ return True
+ return self.held_object.found()
+
+ def version_method(self, args, kwargs):
+ return self.held_object.get_version()
+
+ def pkgconfig_method(self, args, kwargs):
+ args = listify(args)
+ if len(args) != 1:
+ raise InterpreterException('get_pkgconfig_variable takes exactly one argument.')
+ varname = args[0]
+ if not isinstance(varname, str):
+ raise InterpreterException('Variable name must be a string.')
+ return self.held_object.get_pkgconfig_variable(varname, kwargs)
+
+ def configtool_method(self, args, kwargs):
+ args = listify(args)
+ if len(args) != 1:
+ raise InterpreterException('get_configtool_variable takes exactly one argument.')
+ varname = args[0]
+ if not isinstance(varname, str):
+ raise InterpreterException('Variable name must be a string.')
+ return self.held_object.get_configtool_variable(varname)
+
+class InternalDependencyHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, dep):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, dep)
+ self.methods.update({'found': self.found_method,
+ 'version': self.version_method,
+ })
+
+ def found_method(self, args, kwargs):
+ return True
+
+ def version_method(self, args, kwargs):
+ return self.held_object.get_version()
+
+class ExternalProgramHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, ep):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, ep)
+ self.methods.update({'found': self.found_method,
+ 'path': self.path_method})
+
+ def found_method(self, args, kwargs):
+ return self.found()
+
+ def path_method(self, args, kwargs):
+ return self.held_object.get_path()
+
+ def found(self):
+ return self.held_object.found()
+
+ def get_command(self):
+ return self.held_object.get_command()
+
+ def get_name(self):
+ return self.held_object.get_name()
+
+class ExternalLibraryHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, el):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, el)
+ self.methods.update({'found': self.found_method})
+
+ def found(self):
+ return self.held_object.found()
+
+ def found_method(self, args, kwargs):
+ return self.found()
+
+ def get_name(self):
+ return self.held_object.name
+
+ def get_compile_args(self):
+ return self.held_object.get_compile_args()
+
+ def get_link_args(self):
+ return self.held_object.get_link_args()
+
+ def get_exe_args(self):
+ return self.held_object.get_exe_args()
+
+class GeneratorHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, interpreter, args, kwargs):
+ InterpreterObject.__init__(self)
+ self.interpreter = interpreter
+ ObjectHolder.__init__(self, build.Generator(args, kwargs))
+ self.methods.update({'process': self.process_method})
+
+ def process_method(self, args, kwargs):
+ extras = mesonlib.stringlistify(kwargs.get('extra_args', []))
+ gl = self.held_object.process_files('Generator', args, self.interpreter, extra_args=extras)
+ return GeneratedListHolder(gl)
+
+
+class GeneratedListHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, arg1, extra_args=[]):
+ InterpreterObject.__init__(self)
+ if isinstance(arg1, GeneratorHolder):
+ ObjectHolder.__init__(self, build.GeneratedList(arg1.held_object, extra_args))
+ else:
+ ObjectHolder.__init__(self, arg1)
+
+ def __repr__(self):
+ r = '<{}: {!r}>'
+ return r.format(self.__class__.__name__, self.held_object.get_outputs())
+
+ def add_file(self, a):
+ self.held_object.add_file(a)
+
+class BuildMachine(InterpreterObject, ObjectHolder):
+ def __init__(self, compilers):
+ self.compilers = compilers
+ InterpreterObject.__init__(self)
+ held_object = environment.MachineInfo(environment.detect_system(),
+ environment.detect_cpu_family(self.compilers),
+ environment.detect_cpu(self.compilers),
+ sys.byteorder)
+ ObjectHolder.__init__(self, held_object)
+ self.methods.update({'system': self.system_method,
+ 'cpu_family': self.cpu_family_method,
+ 'cpu': self.cpu_method,
+ 'endian': self.endian_method,
+ })
+
+ def cpu_family_method(self, args, kwargs):
+ return self.held_object.cpu_family
+
+ def cpu_method(self, args, kwargs):
+ return self.held_object.cpu
+
+ def system_method(self, args, kwargs):
+ return self.held_object.system
+
+ def endian_method(self, args, kwargs):
+ return self.held_object.endian
+
+# This class will provide both host_machine and
+# target_machine
+class CrossMachineInfo(InterpreterObject, ObjectHolder):
+ def __init__(self, cross_info):
+ InterpreterObject.__init__(self)
+ minimum_cross_info = {'cpu', 'cpu_family', 'endian', 'system'}
+ if set(cross_info) < minimum_cross_info:
+ raise InterpreterException(
+ 'Machine info is currently {}\n'.format(cross_info) +
+ 'but is missing {}.'.format(minimum_cross_info - set(cross_info)))
+ self.info = cross_info
+ minfo = environment.MachineInfo(cross_info['system'],
+ cross_info['cpu_family'],
+ cross_info['cpu'],
+ cross_info['endian'])
+ ObjectHolder.__init__(self, minfo)
+ self.methods.update({'system': self.system_method,
+ 'cpu': self.cpu_method,
+ 'cpu_family': self.cpu_family_method,
+ 'endian': self.endian_method,
+ })
+
+ def cpu_family_method(self, args, kwargs):
+ return self.held_object.cpu_family
+
+ def cpu_method(self, args, kwargs):
+ return self.held_object.cpu
+
+ def system_method(self, args, kwargs):
+ return self.held_object.system
+
+ def endian_method(self, args, kwargs):
+ return self.held_object.endian
+
+class IncludeDirsHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, idobj):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, idobj)
+
+class Headers(InterpreterObject):
+
+ def __init__(self, sources, kwargs):
+ InterpreterObject.__init__(self)
+ self.sources = sources
+ self.install_subdir = kwargs.get('subdir', '')
+ self.custom_install_dir = kwargs.get('install_dir', None)
+ if self.custom_install_dir is not None:
+ if not isinstance(self.custom_install_dir, str):
+ raise InterpreterException('Custom_install_dir must be a string.')
+
+ def set_install_subdir(self, subdir):
+ self.install_subdir = subdir
+
+ def get_install_subdir(self):
+ return self.install_subdir
+
+ def get_sources(self):
+ return self.sources
+
+ def get_custom_install_dir(self):
+ return self.custom_install_dir
+
+class DataHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, data):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, data)
+
+ def get_source_subdir(self):
+ return self.held_object.source_subdir
+
+ def get_sources(self):
+ return self.held_object.sources
+
+ def get_install_dir(self):
+ return self.held_object.install_dir
+
+class InstallDir(InterpreterObject):
+ def __init__(self, src_subdir, inst_subdir, install_dir, install_mode, exclude):
+ InterpreterObject.__init__(self)
+ self.source_subdir = src_subdir
+ self.installable_subdir = inst_subdir
+ self.install_dir = install_dir
+ self.install_mode = install_mode
+ self.exclude = exclude
+
+class Man(InterpreterObject):
+
+ def __init__(self, sources, kwargs):
+ InterpreterObject.__init__(self)
+ self.sources = sources
+ self.validate_sources()
+ self.custom_install_dir = kwargs.get('install_dir', None)
+ if self.custom_install_dir is not None and not isinstance(self.custom_install_dir, str):
+ raise InterpreterException('Custom_install_dir must be a string.')
+
+ def validate_sources(self):
+ for s in self.sources:
+ try:
+ num = int(s.split('.')[-1])
+ except (IndexError, ValueError):
+ num = 0
+ if num < 1 or num > 8:
+ raise InvalidArguments('Man file must have a file extension of a number between 1 and 8')
+
+ def get_custom_install_dir(self):
+ return self.custom_install_dir
+
+ def get_sources(self):
+ return self.sources
+
+class GeneratedObjectsHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, held_object):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, held_object)
+
+class TargetHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, target, interp):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, target)
+ self.interpreter = interp
+
+class BuildTargetHolder(TargetHolder):
+ def __init__(self, target, interp):
+ super().__init__(target, interp)
+ self.methods.update({'extract_objects': self.extract_objects_method,
+ 'extract_all_objects': self.extract_all_objects_method,
+ 'get_id': self.get_id_method,
+ 'outdir': self.outdir_method,
+ 'full_path': self.full_path_method,
+ 'private_dir_include': self.private_dir_include_method,
+ })
+
+ def __repr__(self):
+ r = '<{} {}: {}>'
+ h = self.held_object
+ return r.format(self.__class__.__name__, h.get_id(), h.filename)
+
+ def is_cross(self):
+ return self.held_object.is_cross()
+
+ def private_dir_include_method(self, args, kwargs):
+ return IncludeDirsHolder(build.IncludeDirs('', [], False,
+ [self.interpreter.backend.get_target_private_dir(self.held_object)]))
+
+ def full_path_method(self, args, kwargs):
+ return self.interpreter.backend.get_target_filename_abs(self.held_object)
+
+ def outdir_method(self, args, kwargs):
+ return self.interpreter.backend.get_target_dir(self.held_object)
+
+ def extract_objects_method(self, args, kwargs):
+ gobjs = self.held_object.extract_objects(args)
+ return GeneratedObjectsHolder(gobjs)
+
+ def extract_all_objects_method(self, args, kwargs):
+ gobjs = self.held_object.extract_all_objects()
+ return GeneratedObjectsHolder(gobjs)
+
+ def get_id_method(self, args, kwargs):
+ return self.held_object.get_id()
+
+class ExecutableHolder(BuildTargetHolder):
+ def __init__(self, target, interp):
+ super().__init__(target, interp)
+
+class StaticLibraryHolder(BuildTargetHolder):
+ def __init__(self, target, interp):
+ super().__init__(target, interp)
+
+class SharedLibraryHolder(BuildTargetHolder):
+ def __init__(self, target, interp):
+ super().__init__(target, interp)
+
+class SharedModuleHolder(BuildTargetHolder):
+ def __init__(self, target, interp):
+ super().__init__(target, interp)
+
+class JarHolder(BuildTargetHolder):
+ def __init__(self, target, interp):
+ super().__init__(target, interp)
+
+class CustomTargetIndexHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, object_to_hold):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, object_to_hold)
+
+class CustomTargetHolder(TargetHolder):
+ def __init__(self, target, interp):
+ super().__init__(target, interp)
+ self.methods.update({'full_path': self.full_path_method,
+ })
+
+ def __repr__(self):
+ r = '<{} {}: {}>'
+ h = self.held_object
+ return r.format(self.__class__.__name__, h.get_id(), h.command)
+
+ def full_path_method(self, args, kwargs):
+ return self.interpreter.backend.get_target_filename_abs(self.held_object)
+
+ def __getitem__(self, index):
+ return CustomTargetIndexHolder(self.held_object[index])
+
+ def __setitem__(self, index, value):
+ raise InterpreterException('Cannot set a member of a CustomTarget')
+
+ def __delitem__(self, index):
+ raise InterpreterException('Cannot delete a member of a CustomTarget')
+
+class RunTargetHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, name, command, args, dependencies, subdir, subproject):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, build.RunTarget(name, command, args, dependencies, subdir, subproject))
+
+ def __repr__(self):
+ r = '<{} {}: {}>'
+ h = self.held_object
+ return r.format(self.__class__.__name__, h.get_id(), h.command)
+
+class Test(InterpreterObject):
+ def __init__(self, name, suite, exe, is_parallel, cmd_args, env, should_fail, timeout, workdir):
+ InterpreterObject.__init__(self)
+ self.name = name
+ self.suite = suite
+ self.exe = exe
+ self.is_parallel = is_parallel
+ self.cmd_args = cmd_args
+ self.env = env
+ self.should_fail = should_fail
+ self.timeout = timeout
+ self.workdir = workdir
+
+ def get_exe(self):
+ return self.exe
+
+ def get_name(self):
+ return self.name
+
+class SubprojectHolder(InterpreterObject, ObjectHolder):
+
+ def __init__(self, subinterpreter):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, subinterpreter)
+ self.methods.update({'get_variable': self.get_variable_method,
+ })
+
+ def get_variable_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Get_variable takes one argument.')
+ varname = args[0]
+ if not isinstance(varname, str):
+ raise InterpreterException('Get_variable takes a string argument.')
+ return self.held_object.variables[varname]
+
+class CompilerHolder(InterpreterObject):
+ def __init__(self, compiler, env):
+ InterpreterObject.__init__(self)
+ self.compiler = compiler
+ self.environment = env
+ self.methods.update({'compiles': self.compiles_method,
+ 'links': self.links_method,
+ 'get_id': self.get_id_method,
+ 'compute_int': self.compute_int_method,
+ 'sizeof': self.sizeof_method,
+ 'get_define': self.get_define_method,
+ 'has_header': self.has_header_method,
+ 'has_header_symbol': self.has_header_symbol_method,
+ 'run': self.run_method,
+ 'has_function': self.has_function_method,
+ 'has_member': self.has_member_method,
+ 'has_members': self.has_members_method,
+ 'has_type': self.has_type_method,
+ 'alignment': self.alignment_method,
+ 'version': self.version_method,
+ 'cmd_array': self.cmd_array_method,
+ 'find_library': self.find_library_method,
+ 'has_argument': self.has_argument_method,
+ 'has_multi_arguments': self.has_multi_arguments_method,
+ 'get_supported_arguments': self.get_supported_arguments_method,
+ 'first_supported_argument': self.first_supported_argument_method,
+ 'unittest_args': self.unittest_args_method,
+ 'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method,
+ })
+
+ def version_method(self, args, kwargs):
+ return self.compiler.version
+
+ def cmd_array_method(self, args, kwargs):
+ return self.compiler.exelist
+
+ def determine_args(self, kwargs):
+ nobuiltins = kwargs.get('no_builtin_args', False)
+ if not isinstance(nobuiltins, bool):
+ raise InterpreterException('Type of no_builtin_args not a boolean.')
+ args = []
+ incdirs = extract_as_list(kwargs, 'include_directories')
+ for i in incdirs:
+ if not isinstance(i, IncludeDirsHolder):
+ raise InterpreterException('Include directories argument must be an include_directories object.')
+ for idir in i.held_object.get_incdirs():
+ idir = os.path.join(self.environment.get_source_dir(),
+ i.held_object.get_curdir(), idir)
+ args += self.compiler.get_include_args(idir, False)
+ if not nobuiltins:
+ opts = self.environment.coredata.compiler_options
+ args += self.compiler.get_option_compile_args(opts)
+ args += self.compiler.get_option_link_args(opts)
+ args += mesonlib.stringlistify(kwargs.get('args', []))
+ return args
+
+ def determine_dependencies(self, kwargs):
+ deps = kwargs.get('dependencies', None)
+ if deps is not None:
+ deps = listify(deps)
+ final_deps = []
+ for d in deps:
+ try:
+ d = d.held_object
+ except Exception:
+ pass
+ if isinstance(d, InternalDependency) or not isinstance(d, Dependency):
+ raise InterpreterException('Dependencies must be external dependencies')
+ final_deps.append(d)
+ deps = final_deps
+ return deps
+
+ def alignment_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Alignment method takes exactly one positional argument.')
+ check_stringlist(args)
+ typename = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of sizeof must be a string.')
+ extra_args = mesonlib.stringlistify(kwargs.get('args', []))
+ deps = self.determine_dependencies(kwargs)
+ result = self.compiler.alignment(typename, prefix, self.environment, extra_args, deps)
+ mlog.log('Checking for alignment of "', mlog.bold(typename), '": ', result, sep='')
+ return result
+
+ def run_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Run method takes exactly one positional argument.')
+ code = args[0]
+ if isinstance(code, mesonlib.File):
+ code = mesonlib.File.from_absolute_file(
+ code.rel_to_builddir(self.environment.source_dir))
+ elif not isinstance(code, str):
+ raise InvalidArguments('Argument must be string or file.')
+ testname = kwargs.get('name', '')
+ if not isinstance(testname, str):
+ raise InterpreterException('Testname argument must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ result = self.compiler.run(code, self.environment, extra_args, deps)
+ if len(testname) > 0:
+ if not result.compiled:
+ h = mlog.red('DID NOT COMPILE')
+ elif result.returncode == 0:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO (%d)' % result.returncode)
+ mlog.log('Checking if "', mlog.bold(testname), '" runs: ', h, sep='')
+ return TryRunResultHolder(result)
+
+ def get_id_method(self, args, kwargs):
+ return self.compiler.get_id()
+
+ def symbols_have_underscore_prefix_method(self, args, kwargs):
+ '''
+ Check if the compiler prefixes _ (underscore) to global C symbols
+ See: https://en.wikipedia.org/wiki/Name_mangling#C
+ '''
+ return self.compiler.symbols_have_underscore_prefix(self.environment)
+
+ def unittest_args_method(self, args, kwargs):
+ '''
+ This function is deprecated and should not be used.
+ It can be removed in a future version of Meson.
+ '''
+ if not hasattr(self.compiler, 'get_feature_args'):
+ raise InterpreterException('This {} compiler has no feature arguments.'.format(self.compiler.get_display_language()))
+ return self.compiler.get_feature_args({'unittest': 'true'})
+
+ def has_member_method(self, args, kwargs):
+ if len(args) != 2:
+ raise InterpreterException('Has_member takes exactly two arguments.')
+ check_stringlist(args)
+ typename = args[0]
+ membername = args[1]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_member must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ had = self.compiler.has_members(typename, [membername], prefix,
+ self.environment, extra_args, deps)
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ mlog.log('Checking whether type "', mlog.bold(typename),
+ '" has member "', mlog.bold(membername), '": ', hadtxt, sep='')
+ return had
+
+ def has_members_method(self, args, kwargs):
+ check_stringlist(args)
+ typename = args[0]
+ membernames = args[1:]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_members must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ had = self.compiler.has_members(typename, membernames, prefix,
+ self.environment, extra_args, deps)
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ members = mlog.bold(', '.join(['"{}"'.format(m) for m in membernames]))
+ mlog.log('Checking whether type "', mlog.bold(typename),
+ '" has members ', members, ': ', hadtxt, sep='')
+ return had
+
+ def has_function_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Has_function takes exactly one argument.')
+ check_stringlist(args)
+ funcname = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_function must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ had = self.compiler.has_function(funcname, prefix, self.environment, extra_args, deps)
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ mlog.log('Checking for function "', mlog.bold(funcname), '": ', hadtxt, sep='')
+ return had
+
+ def has_type_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Has_type takes exactly one argument.')
+ check_stringlist(args)
+ typename = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_type must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ had = self.compiler.has_type(typename, prefix, self.environment, extra_args, deps)
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ mlog.log('Checking for type "', mlog.bold(typename), '": ', hadtxt, sep='')
+ return had
+
+ def compute_int_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Compute_int takes exactly one argument.')
+ check_stringlist(args)
+ expression = args[0]
+ prefix = kwargs.get('prefix', '')
+ l = kwargs.get('low', -1024)
+ h = kwargs.get('high', 1024)
+ guess = kwargs.get('guess', None)
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of compute_int must be a string.')
+ if not isinstance(l, int):
+ raise InterpreterException('Low argument of compute_int must be an int.')
+ if not isinstance(h, int):
+ raise InterpreterException('High argument of compute_int must be an int.')
+ if guess is not None and not isinstance(guess, int):
+ raise InterpreterException('Guess argument of compute_int must be an int.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ res = self.compiler.compute_int(expression, l, h, guess, prefix, self.environment, extra_args, deps)
+ mlog.log('Computing int of "%s": %d' % (expression, res))
+ return res
+
+ def sizeof_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Sizeof takes exactly one argument.')
+ check_stringlist(args)
+ element = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of sizeof must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ esize = self.compiler.sizeof(element, prefix, self.environment, extra_args, deps)
+ mlog.log('Checking for size of "%s": %d' % (element, esize))
+ return esize
+
+ def get_define_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('get_define() takes exactly one argument.')
+ check_stringlist(args)
+ element = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of get_define() must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ value = self.compiler.get_define(element, prefix, self.environment, extra_args, deps)
+ mlog.log('Fetching value of define "%s": %s' % (element, value))
+ return value
+
+ def compiles_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('compiles method takes exactly one argument.')
+ code = args[0]
+ if isinstance(code, mesonlib.File):
+ code = mesonlib.File.from_absolute_file(
+ code.rel_to_builddir(self.environment.source_dir))
+ elif not isinstance(code, str):
+ raise InvalidArguments('Argument must be string or file.')
+ testname = kwargs.get('name', '')
+ if not isinstance(testname, str):
+ raise InterpreterException('Testname argument must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ result = self.compiler.compiles(code, self.environment, extra_args, deps)
+ if len(testname) > 0:
+ if result:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Checking if "', mlog.bold(testname), '" compiles: ', h, sep='')
+ return result
+
+ def links_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('links method takes exactly one argument.')
+ code = args[0]
+ if isinstance(code, mesonlib.File):
+ code = mesonlib.File.from_absolute_file(
+ code.rel_to_builddir(self.environment.source_dir))
+ elif not isinstance(code, str):
+ raise InvalidArguments('Argument must be string or file.')
+ testname = kwargs.get('name', '')
+ if not isinstance(testname, str):
+ raise InterpreterException('Testname argument must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ result = self.compiler.links(code, self.environment, extra_args, deps)
+ if len(testname) > 0:
+ if result:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Checking if "', mlog.bold(testname), '" links: ', h, sep='')
+ return result
+
+ def has_header_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('has_header method takes exactly one argument.')
+ check_stringlist(args)
+ hname = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_header must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ haz = self.compiler.has_header(hname, prefix, self.environment, extra_args, deps)
+ if haz:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Has header "%s":' % hname, h)
+ return haz
+
+ def has_header_symbol_method(self, args, kwargs):
+ if len(args) != 2:
+ raise InterpreterException('has_header_symbol method takes exactly two arguments.')
+ check_stringlist(args)
+ hname = args[0]
+ symbol = args[1]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_header_symbol must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps = self.determine_dependencies(kwargs)
+ haz = self.compiler.has_header_symbol(hname, symbol, prefix, self.environment, extra_args, deps)
+ if haz:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Header <{0}> has symbol "{1}":'.format(hname, symbol), h)
+ return haz
+
+ def find_library_method(self, args, kwargs):
+ # TODO add dependencies support?
+ if len(args) != 1:
+ raise InterpreterException('find_library method takes one argument.')
+ libname = args[0]
+ if not isinstance(libname, str):
+ raise InterpreterException('Library name not a string.')
+ required = kwargs.get('required', True)
+ if not isinstance(required, bool):
+ raise InterpreterException('required must be boolean.')
+ search_dirs = mesonlib.stringlistify(kwargs.get('dirs', []))
+ for i in search_dirs:
+ if not os.path.isabs(i):
+ raise InvalidCode('Search directory %s is not an absolute path.' % i)
+ linkargs = self.compiler.find_library(libname, self.environment, search_dirs)
+ if required and not linkargs:
+ raise InterpreterException('{} library {!r} not found'.format(self.compiler.get_display_language(), libname))
+ lib = dependencies.ExternalLibrary(libname, linkargs, self.environment,
+ self.compiler.language)
+ return ExternalLibraryHolder(lib)
+
+ def has_argument_method(self, args, kwargs):
+ args = mesonlib.stringlistify(args)
+ if len(args) != 1:
+ raise InterpreterException('Has_arg takes exactly one argument.')
+ result = self.compiler.has_argument(args[0], self.environment)
+ if result:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Compiler for {} supports argument {}:'.format(self.compiler.get_display_language(), args[0]), h)
+ return result
+
+ def has_multi_arguments_method(self, args, kwargs):
+ args = mesonlib.stringlistify(args)
+ result = self.compiler.has_multi_arguments(args, self.environment)
+ if result:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log(
+ 'Compiler for {} supports arguments {}:'.format(
+ self.compiler.get_display_language(), ' '.join(args)),
+ h)
+ return result
+
+ def get_supported_arguments_method(self, args, kwargs):
+ args = mesonlib.stringlistify(args)
+ result = self.compiler.get_supported_arguments(args, self.environment)
+ if len(result) == len(args):
+ h = mlog.green('YES')
+ elif len(result) > 0:
+ h = mlog.yellow('SOME')
+ else:
+ h = mlog.red('NO')
+ mlog.log(
+ 'Compiler for {} supports arguments {}:'.format(
+ self.compiler.get_display_language(), ' '.join(args)),
+ h)
+ return result
+
+ def first_supported_argument_method(self, args, kwargs):
+ for i in mesonlib.stringlistify(args):
+ if self.compiler.has_argument(i, self.environment):
+ mlog.log('First supported argument:', mlog.bold(i))
+ return [i]
+ mlog.log('First supported argument:', mlog.red('None'))
+ return []
+
+ModuleState = namedtuple('ModuleState', [
+ 'build_to_src', 'subproject', 'subdir', 'current_lineno', 'environment',
+ 'project_name', 'project_version', 'backend', 'compilers', 'targets',
+ 'data', 'headers', 'man', 'global_args', 'project_args', 'build_machine',
+ 'host_machine', 'target_machine'])
+
+class ModuleHolder(InterpreterObject, ObjectHolder):
+ def __init__(self, modname, module, interpreter):
+ InterpreterObject.__init__(self)
+ ObjectHolder.__init__(self, module)
+ self.modname = modname
+ self.interpreter = interpreter
+
+ def method_call(self, method_name, args, kwargs):
+ try:
+ fn = getattr(self.held_object, method_name)
+ except AttributeError:
+ raise InvalidArguments('Module %s does not have method %s.' % (self.modname, method_name))
+ if method_name.startswith('_'):
+ raise InvalidArguments('Function {!r} in module {!r} is private.'.format(method_name, self.modname))
+ # This is not 100% reliable but we can't use hash()
+ # because the Build object contains dicts and lists.
+ num_targets = len(self.interpreter.build.targets)
+ state = ModuleState(
+ build_to_src=os.path.relpath(self.interpreter.environment.get_source_dir(),
+ self.interpreter.environment.get_build_dir()),
+ subproject=self.interpreter.subproject,
+ subdir=self.interpreter.subdir,
+ current_lineno=self.interpreter.current_lineno,
+ environment=self.interpreter.environment,
+ project_name=self.interpreter.build.project_name,
+ project_version=self.interpreter.build.dep_manifest[self.interpreter.active_projectname],
+ # The backend object is under-used right now, but we will need it:
+ # https://github.com/mesonbuild/meson/issues/1419
+ backend=self.interpreter.backend,
+ compilers=self.interpreter.build.compilers,
+ targets=self.interpreter.build.targets,
+ data=self.interpreter.build.data,
+ headers=self.interpreter.build.get_headers(),
+ man=self.interpreter.build.get_man(),
+ global_args=self.interpreter.build.global_args,
+ project_args=self.interpreter.build.projects_args.get(self.interpreter.subproject, {}),
+ build_machine=self.interpreter.builtin['build_machine'].held_object,
+ host_machine=self.interpreter.builtin['host_machine'].held_object,
+ target_machine=self.interpreter.builtin['target_machine'].held_object,
+ )
+ if self.held_object.is_snippet(method_name):
+ value = fn(self.interpreter, state, args, kwargs)
+ return self.interpreter.holderify(value)
+ else:
+ value = fn(state, args, kwargs)
+ if num_targets != len(self.interpreter.build.targets):
+ raise InterpreterException('Extension module altered internal state illegally.')
+ return self.interpreter.module_method_callback(value)
+
+class MesonMain(InterpreterObject):
+ def __init__(self, build, interpreter):
+ InterpreterObject.__init__(self)
+ self.build = build
+ self.interpreter = interpreter
+ self._found_source_scripts = {}
+ self.methods.update({'get_compiler': self.get_compiler_method,
+ 'is_cross_build': self.is_cross_build_method,
+ 'has_exe_wrapper': self.has_exe_wrapper_method,
+ 'is_unity': self.is_unity_method,
+ 'is_subproject': self.is_subproject_method,
+ 'current_source_dir': self.current_source_dir_method,
+ 'current_build_dir': self.current_build_dir_method,
+ 'source_root': self.source_root_method,
+ 'build_root': self.build_root_method,
+ 'add_install_script': self.add_install_script_method,
+ 'add_postconf_script': self.add_postconf_script_method,
+ 'install_dependency_manifest': self.install_dependency_manifest_method,
+ 'project_version': self.project_version_method,
+ 'version': self.version_method,
+ 'project_name': self.project_name_method,
+ 'get_cross_property': self.get_cross_property_method,
+ 'backend': self.backend_method,
+ })
+
+ def _find_source_script(self, name, args):
+ # Prefer scripts in the current source directory
+ search_dir = os.path.join(self.interpreter.environment.source_dir,
+ self.interpreter.subdir)
+ key = (name, search_dir)
+ if key in self._found_source_scripts:
+ found = self._found_source_scripts[key]
+ else:
+ found = dependencies.ExternalProgram(name, search_dir=search_dir)
+ if found.found():
+ self._found_source_scripts[key] = found
+ else:
+ m = 'Script or command {!r} not found or not executable'
+ raise InterpreterException(m.format(name))
+ return build.RunScript(found.get_command(), args)
+
+ def add_install_script_method(self, args, kwargs):
+ if len(args) < 1:
+ raise InterpreterException('add_install_script takes one or more arguments')
+ check_stringlist(args, 'add_install_script args must be strings')
+ script = self._find_source_script(args[0], args[1:])
+ self.build.install_scripts.append(script)
+
+ def add_postconf_script_method(self, args, kwargs):
+ if len(args) < 1:
+ raise InterpreterException('add_postconf_script takes one or more arguments')
+ check_stringlist(args, 'add_postconf_script arguments must be strings')
+ script = self._find_source_script(args[0], args[1:])
+ self.build.postconf_scripts.append(script)
+
+ def current_source_dir_method(self, args, kwargs):
+ src = self.interpreter.environment.source_dir
+ sub = self.interpreter.subdir
+ if sub == '':
+ return src
+ return os.path.join(src, sub)
+
+ def current_build_dir_method(self, args, kwargs):
+ src = self.interpreter.environment.build_dir
+ sub = self.interpreter.subdir
+ if sub == '':
+ return src
+ return os.path.join(src, sub)
+
+ def backend_method(self, args, kwargs):
+ return self.interpreter.backend.name
+
+ def source_root_method(self, args, kwargs):
+ return self.interpreter.environment.source_dir
+
+ def build_root_method(self, args, kwargs):
+ return self.interpreter.environment.build_dir
+
+ def has_exe_wrapper_method(self, args, kwargs):
+ if self.is_cross_build_method(None, None) and \
+ 'binaries' in self.build.environment.cross_info.config and \
+ self.build.environment.cross_info.need_exe_wrapper():
+ exe_wrap = self.build.environment.cross_info.config['binaries'].get('exe_wrapper', None)
+ if exe_wrap is None:
+ return False
+ # We return True when exe_wrap is defined, when it's not needed, and
+ # when we're compiling natively. The last two are semantically confusing.
+ # Need to revisit this.
+ return True
+
+ def is_cross_build_method(self, args, kwargs):
+ return self.build.environment.is_cross_build()
+
+ def get_compiler_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('get_compiler_method must have one and only one argument.')
+ cname = args[0]
+ native = kwargs.get('native', None)
+ if native is None:
+ if self.build.environment.is_cross_build():
+ native = False
+ else:
+ native = True
+ if not isinstance(native, bool):
+ raise InterpreterException('Type of "native" must be a boolean.')
+ if native:
+ clist = self.build.compilers
+ else:
+ clist = self.build.cross_compilers
+ if cname in clist:
+ return CompilerHolder(clist[cname], self.build.environment)
+ raise InterpreterException('Tried to access compiler for unspecified language "%s".' % cname)
+
+ def is_unity_method(self, args, kwargs):
+ optval = self.interpreter.environment.coredata.get_builtin_option('unity')
+ if optval == 'on' or (optval == 'subprojects' and self.interpreter.subproject != ''):
+ return True
+ return False
+
+ def is_subproject_method(self, args, kwargs):
+ return self.interpreter.is_subproject()
+
+ def install_dependency_manifest_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Must specify manifest install file name')
+ if not isinstance(args[0], str):
+ raise InterpreterException('Argument must be a string.')
+ self.build.dep_manifest_name = args[0]
+
+ def project_version_method(self, args, kwargs):
+ return self.build.dep_manifest[self.interpreter.active_projectname]['version']
+
+ def version_method(self, args, kwargs):
+ return coredata.version
+
+ def project_name_method(self, args, kwargs):
+ return self.interpreter.active_projectname
+
+ def get_cross_property_method(self, args, kwargs):
+ if len(args) < 1 or len(args) > 2:
+ raise InterpreterException('Must have one or two arguments.')
+ propname = args[0]
+ if not isinstance(propname, str):
+ raise InterpreterException('Property name must be string.')
+ try:
+ props = self.interpreter.environment.cross_info.get_properties()
+ return props[propname]
+ except Exception:
+ if len(args) == 2:
+ return args[1]
+ raise InterpreterException('Unknown cross property: %s.' % propname)
+
+pch_kwargs = set(['c_pch', 'cpp_pch'])
+
+lang_arg_kwargs = set([
+ 'c_args',
+ 'cpp_args',
+ 'd_args',
+ 'd_import_dirs',
+ 'd_unittest',
+ 'd_module_versions',
+ 'fortran_args',
+ 'java_args',
+ 'objc_args',
+ 'objcpp_args',
+ 'rust_args',
+ 'vala_args',
+ 'cs_args',
+])
+
+vala_kwargs = set(['vala_header', 'vala_gir', 'vala_vapi'])
+rust_kwargs = set(['rust_crate_type'])
+cs_kwargs = set(['resources', 'cs_args'])
+
+buildtarget_kwargs = set([
+ 'build_by_default',
+ 'build_rpath',
+ 'dependencies',
+ 'extra_files',
+ 'gui_app',
+ 'link_with',
+ 'link_whole',
+ 'link_args',
+ 'link_depends',
+ 'implicit_include_directories',
+ 'include_directories',
+ 'install',
+ 'install_rpath',
+ 'install_dir',
+ 'name_prefix',
+ 'name_suffix',
+ 'native',
+ 'objects',
+ 'override_options',
+ 'pic',
+ 'sources',
+ 'vs_module_defs',
+])
+
+build_target_common_kwargs = (
+ buildtarget_kwargs |
+ lang_arg_kwargs |
+ pch_kwargs |
+ vala_kwargs |
+ rust_kwargs |
+ cs_kwargs)
+
+exe_kwargs = (build_target_common_kwargs) | {'implib'}
+shlib_kwargs = (build_target_common_kwargs) | {'version', 'soversion'}
+shmod_kwargs = shlib_kwargs
+stlib_kwargs = shlib_kwargs
+
+jar_kwargs = exe_kwargs.copy()
+jar_kwargs.update(['main_class'])
+
+build_target_kwargs = exe_kwargs.copy()
+build_target_kwargs.update(['target_type'])
+
+permitted_kwargs = {'add_global_arguments': {'language'},
+ 'add_global_link_arguments': {'language'},
+ 'add_project_link_arguments': {'language'},
+ 'add_languages': {'required'},
+ 'add_project_arguments': {'language'},
+ 'add_test_setup': {'exe_wrapper', 'gdb', 'timeout_multiplier', 'env'},
+ 'benchmark': {'args', 'env', 'should_fail', 'timeout', 'workdir', 'suite'},
+ 'build_target': build_target_kwargs,
+ 'configure_file': {'input', 'output', 'configuration', 'command', 'install_dir', 'capture', 'install'},
+ 'custom_target': {'input', 'output', 'command', 'install', 'install_dir', 'build_always', 'capture', 'depends', 'depend_files', 'depfile', 'build_by_default'},
+ 'dependency': {'default_options', 'fallback', 'language', 'method', 'modules', 'optional_modules', 'native', 'required', 'static', 'version'},
+ 'declare_dependency': {'include_directories', 'link_with', 'sources', 'dependencies', 'compile_args', 'link_args', 'version'},
+ 'executable': exe_kwargs,
+ 'find_program': {'required', 'native'},
+ 'generator': {'arguments', 'output', 'depfile', 'capture'},
+ 'include_directories': {'is_system'},
+ 'install_data': {'install_dir', 'install_mode', 'sources'},
+ 'install_headers': {'install_dir', 'subdir'},
+ 'install_man': {'install_dir'},
+ 'install_subdir': {'exclude_files', 'exclude_directories', 'install_dir', 'install_mode'},
+ 'jar': jar_kwargs,
+ 'project': {'version', 'meson_version', 'default_options', 'license', 'subproject_dir'},
+ 'run_target': {'command', 'depends'},
+ 'shared_library': shlib_kwargs,
+ 'shared_module': shmod_kwargs,
+ 'static_library': stlib_kwargs,
+ 'subdir': {'if_found'},
+ 'subproject': {'version', 'default_options'},
+ 'test': {'args', 'env', 'is_parallel', 'should_fail', 'timeout', 'workdir', 'suite'},
+ 'vcs_tag': {'input', 'output', 'fallback', 'command', 'replace_string'},
+ }
+
+
+class Interpreter(InterpreterBase):
+
+ def __init__(self, build, backend, subproject='', subdir='', subproject_dir='subprojects',
+ default_project_options=[]):
+ super().__init__(build.environment.get_source_dir(), subdir)
+ self.an_unpicklable_object = mesonlib.an_unpicklable_object
+ self.build = build
+ self.environment = build.environment
+ self.coredata = self.environment.get_coredata()
+ self.backend = backend
+ self.subproject = subproject
+ # Subproject directory is usually the name of the subproject, but can
+ # be different for dependencies provided by wrap files.
+ self.subproject_directory_name = subdir.split(os.path.sep)[-1]
+ self.subproject_dir = subproject_dir
+ self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
+ self.load_root_meson_file()
+ self.sanity_check_ast()
+ self.builtin.update({'meson': MesonMain(build, self)})
+ self.generators = []
+ self.visited_subdirs = {}
+ self.project_args_frozen = False
+ self.global_args_frozen = False # implies self.project_args_frozen
+ self.subprojects = {}
+ self.subproject_stack = []
+ self.default_project_options = default_project_options[:] # Passed from the outside, only used in subprojects.
+ self.build_func_dict()
+ # build_def_files needs to be defined before parse_project is called
+ self.build_def_files = [os.path.join(self.subdir, environment.build_filename)]
+ self.parse_project()
+ self.builtin['build_machine'] = BuildMachine(self.coredata.compilers)
+ if not self.build.environment.is_cross_build():
+ self.builtin['host_machine'] = self.builtin['build_machine']
+ self.builtin['target_machine'] = self.builtin['build_machine']
+ else:
+ cross_info = self.build.environment.cross_info
+ if cross_info.has_host():
+ self.builtin['host_machine'] = CrossMachineInfo(cross_info.config['host_machine'])
+ else:
+ self.builtin['host_machine'] = self.builtin['build_machine']
+ if cross_info.has_target():
+ self.builtin['target_machine'] = CrossMachineInfo(cross_info.config['target_machine'])
+ else:
+ self.builtin['target_machine'] = self.builtin['host_machine']
+
+ def build_func_dict(self):
+ self.funcs.update({'add_global_arguments': self.func_add_global_arguments,
+ 'add_project_arguments': self.func_add_project_arguments,
+ 'add_global_link_arguments': self.func_add_global_link_arguments,
+ 'add_project_link_arguments': self.func_add_project_link_arguments,
+ 'add_test_setup': self.func_add_test_setup,
+ 'add_languages': self.func_add_languages,
+ 'assert': self.func_assert,
+ 'benchmark': self.func_benchmark,
+ 'build_target': self.func_build_target,
+ 'configuration_data': self.func_configuration_data,
+ 'configure_file': self.func_configure_file,
+ 'custom_target': self.func_custom_target,
+ 'declare_dependency': self.func_declare_dependency,
+ 'dependency': self.func_dependency,
+ 'disabler': self.func_disabler,
+ 'environment': self.func_environment,
+ 'error': self.func_error,
+ 'executable': self.func_executable,
+ 'generator': self.func_generator,
+ 'gettext': self.func_gettext,
+ 'get_option': self.func_get_option,
+ 'get_variable': self.func_get_variable,
+ 'files': self.func_files,
+ 'find_library': self.func_find_library,
+ 'find_program': self.func_find_program,
+ 'include_directories': self.func_include_directories,
+ 'import': self.func_import,
+ 'install_data': self.func_install_data,
+ 'install_headers': self.func_install_headers,
+ 'install_man': self.func_install_man,
+ 'install_subdir': self.func_install_subdir,
+ 'is_variable': self.func_is_variable,
+ 'jar': self.func_jar,
+ 'join_paths': self.func_join_paths,
+ 'library': self.func_library,
+ 'message': self.func_message,
+ 'warning': self.func_warning,
+ 'option': self.func_option,
+ 'project': self.func_project,
+ 'run_target': self.func_run_target,
+ 'run_command': self.func_run_command,
+ 'set_variable': self.func_set_variable,
+ 'subdir': self.func_subdir,
+ 'subproject': self.func_subproject,
+ 'shared_library': self.func_shared_lib,
+ 'shared_module': self.func_shared_module,
+ 'static_library': self.func_static_lib,
+ 'test': self.func_test,
+ 'vcs_tag': self.func_vcs_tag,
+ })
+
+ def holderify(self, item):
+ if isinstance(item, list):
+ return [self.holderify(x) for x in item]
+ if isinstance(item, build.CustomTarget):
+ return CustomTargetHolder(item, self)
+ elif isinstance(item, (int, str)) or item is None:
+ return item
+ elif isinstance(item, build.Executable):
+ return ExecutableHolder(item, self)
+ elif isinstance(item, build.GeneratedList):
+ return GeneratedListHolder(item)
+ elif isinstance(item, build.RunTarget):
+ raise RuntimeError('This is not a pipe.')
+ elif isinstance(item, build.RunScript):
+ raise RuntimeError('Do not do this.')
+ elif isinstance(item, build.Data):
+ return DataHolder(item)
+ elif isinstance(item, dependencies.InternalDependency):
+ return InternalDependencyHolder(item)
+ elif isinstance(item, dependencies.ExternalProgram):
+ return ExternalProgramHolder(item)
+ elif hasattr(item, 'held_object'):
+ return item
+ else:
+ raise InterpreterException('Module returned a value of unknown type.')
+
+ def process_new_values(self, invalues):
+ invalues = listify(invalues)
+ for v in invalues:
+ if isinstance(v, (build.BuildTarget, build.CustomTarget, build.RunTarget)):
+ self.add_target(v.name, v)
+ elif isinstance(v, list):
+ self.module_method_callback(v)
+ elif isinstance(v, build.GeneratedList):
+ pass
+ elif isinstance(v, build.RunScript):
+ self.build.install_scripts.append(v)
+ elif isinstance(v, build.Data):
+ self.build.data.append(v)
+ elif isinstance(v, dependencies.ExternalProgram):
+ return ExternalProgramHolder(v)
+ elif isinstance(v, dependencies.InternalDependency):
+ # FIXME: This is special cased and not ideal:
+ # The first source is our new VapiTarget, the rest are deps
+ self.process_new_values(v.sources[0])
+ elif hasattr(v, 'held_object'):
+ pass
+ else:
+ raise InterpreterException('Module returned a value of unknown type.')
+
+ def module_method_callback(self, return_object):
+ if not isinstance(return_object, ModuleReturnValue):
+ raise InterpreterException('Bug in module, it returned an invalid object')
+ invalues = return_object.new_objects
+ self.process_new_values(invalues)
+ return self.holderify(return_object.return_value)
+
+ def get_build_def_files(self):
+ return self.build_def_files
+
+ def get_variables(self):
+ return self.variables
+
+ def check_cross_stdlibs(self):
+ if self.build.environment.is_cross_build():
+ cross_info = self.build.environment.cross_info
+ for l, c in self.build.cross_compilers.items():
+ try:
+ di = mesonlib.stringlistify(cross_info.get_stdlib(l))
+ if len(di) != 2:
+ raise InterpreterException('Stdlib definition for %s should have exactly two elements.'
+ % l)
+ projname, depname = di
+ subproj = self.do_subproject(projname, {})
+ self.build.cross_stdlibs[l] = subproj.get_variable_method([depname], {})
+ except KeyError:
+ pass
+
+ @stringArgs
+ @noKwargs
+ def func_import(self, node, args, kwargs):
+ if len(args) != 1:
+ raise InvalidCode('Import takes one argument.')
+ modname = args[0]
+ if modname.startswith('unstable-'):
+ plainname = modname.split('-', 1)[1]
+ mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname)
+ modname = 'unstable_' + plainname
+ if modname not in self.environment.coredata.modules:
+ try:
+ module = importlib.import_module('mesonbuild.modules.' + modname)
+ except ImportError:
+ raise InvalidArguments('Module "%s" does not exist' % (modname, ))
+ self.environment.coredata.modules[modname] = module.initialize()
+ return ModuleHolder(modname, self.environment.coredata.modules[modname], self)
+
+ @stringArgs
+ @noKwargs
+ def func_files(self, node, args, kwargs):
+ return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in args]
+
+ @permittedKwargs(permitted_kwargs['declare_dependency'])
+ @noPosargs
+ def func_declare_dependency(self, node, args, kwargs):
+ version = kwargs.get('version', self.project_version)
+ if not isinstance(version, str):
+ raise InterpreterException('Version must be a string.')
+ incs = extract_as_list(kwargs, 'include_directories', unholder=True)
+ libs = extract_as_list(kwargs, 'link_with', unholder=True)
+ sources = extract_as_list(kwargs, 'sources')
+ sources = listify(self.source_strings_to_files(sources), unholder=True)
+ deps = extract_as_list(kwargs, 'dependencies', unholder=True)
+ compile_args = mesonlib.stringlistify(kwargs.get('compile_args', []))
+ link_args = mesonlib.stringlistify(kwargs.get('link_args', []))
+ final_deps = []
+ for d in deps:
+ try:
+ d = d.held_object
+ except Exception:
+ pass
+ if not isinstance(d, (dependencies.Dependency, dependencies.ExternalLibrary, dependencies.InternalDependency)):
+ raise InterpreterException('Dependencies must be external deps')
+ final_deps.append(d)
+ for l in libs:
+ if isinstance(l, dependencies.Dependency):
+ raise InterpreterException('''Entries in "link_with" may only be self-built targets,
+external dependencies (including libraries) must go to "dependencies".''')
+ dep = dependencies.InternalDependency(version, incs, compile_args,
+ link_args, libs, sources, final_deps)
+ return DependencyHolder(dep)
+
+ @noKwargs
+ def func_assert(self, node, args, kwargs):
+ if len(args) != 2:
+ raise InterpreterException('Assert takes exactly two arguments')
+ value, message = args
+ if not isinstance(value, bool):
+ raise InterpreterException('Assert value not bool.')
+ if not isinstance(message, str):
+ raise InterpreterException('Assert message not a string.')
+ if not value:
+ raise InterpreterException('Assert failed: ' + message)
+
+ def validate_arguments(self, args, argcount, arg_types):
+ if argcount is not None:
+ if argcount != len(args):
+ raise InvalidArguments('Expected %d arguments, got %d.' %
+ (argcount, len(args)))
+ for i in range(min(len(args), len(arg_types))):
+ wanted = arg_types[i]
+ actual = args[i]
+ if wanted is not None:
+ if not isinstance(actual, wanted):
+ raise InvalidArguments('Incorrect argument type.')
+
+ @noKwargs
+ def func_run_command(self, node, args, kwargs):
+ return self.run_command_impl(node, args, kwargs)
+
+ def run_command_impl(self, node, args, kwargs, in_builddir=False):
+ if len(args) < 1:
+ raise InterpreterException('Not enough arguments')
+ cmd = args[0]
+ cargs = args[1:]
+ srcdir = self.environment.get_source_dir()
+ builddir = self.environment.get_build_dir()
+ m = 'must be a string, or the output of find_program(), files(), or ' \
+ 'configure_file(); not {!r}'
+ if isinstance(cmd, ExternalProgramHolder):
+ cmd = cmd.held_object
+ else:
+ if isinstance(cmd, mesonlib.File):
+ cmd = cmd.absolute_path(srcdir, builddir)
+ elif not isinstance(cmd, str):
+ raise InterpreterException('First argument ' + m.format(cmd))
+ # Prefer scripts in the current source directory
+ search_dir = os.path.join(srcdir, self.subdir)
+ prog = ExternalProgram(cmd, silent=True, search_dir=search_dir)
+ if not prog.found():
+ raise InterpreterException('Program or command {!r} not found'
+ 'or not executable'.format(cmd))
+ cmd = prog
+ cmd_path = os.path.relpath(cmd.get_path(), start=srcdir)
+ if not cmd_path.startswith('..') and cmd_path not in self.build_def_files:
+ self.build_def_files.append(cmd_path)
+ expanded_args = []
+ for a in listify(cargs):
+ if isinstance(a, str):
+ expanded_args.append(a)
+ elif isinstance(a, mesonlib.File):
+ expanded_args.append(a.absolute_path(srcdir, builddir))
+ elif isinstance(a, ExternalProgramHolder):
+ expanded_args.append(a.held_object.get_path())
+ else:
+ raise InterpreterException('Arguments ' + m.format(a))
+ for a in expanded_args:
+ if not os.path.isabs(a):
+ a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a)
+ if os.path.exists(a):
+ a = os.path.relpath(a, start=srcdir)
+ if not a.startswith('..'):
+ if a not in self.build_def_files:
+ self.build_def_files.append(a)
+ return RunProcess(cmd, expanded_args, srcdir, builddir, self.subdir,
+ self.environment.get_build_command() + ['introspect'], in_builddir)
+
+ @stringArgs
+ def func_gettext(self, nodes, args, kwargs):
+ raise InterpreterException('Gettext() function has been moved to module i18n. Import it and use i18n.gettext() instead')
+
+ def func_option(self, nodes, args, kwargs):
+ raise InterpreterException('Tried to call option() in build description file. All options must be in the option file.')
+
+ @permittedKwargs(permitted_kwargs['subproject'])
+ @stringArgs
+ def func_subproject(self, nodes, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Subproject takes exactly one argument')
+ dirname = args[0]
+ return self.do_subproject(dirname, kwargs)
+
+ def do_subproject(self, dirname, kwargs):
+ if '/' in dirname or '\\' in dirname:
+ raise InterpreterException('Subproject name must not contain a path separator.')
+ if dirname in self.subproject_stack:
+ fullstack = self.subproject_stack + [dirname]
+ incpath = ' => '.join(fullstack)
+ raise InvalidCode('Recursive include of subprojects: %s.' % incpath)
+ if dirname in self.subprojects:
+ return self.subprojects[dirname]
+ subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
+ r = wrap.Resolver(subproject_dir_abs, self.coredata.wrap_mode)
+ try:
+ resolved = r.resolve(dirname)
+ except RuntimeError as e:
+ msg = 'Subproject directory {!r} does not exist and cannot be downloaded:\n{}'
+ raise InterpreterException(msg.format(os.path.join(self.subproject_dir, dirname), e))
+ subdir = os.path.join(self.subproject_dir, resolved)
+ os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
+ self.global_args_frozen = True
+ mlog.log('\nExecuting subproject ', mlog.bold(dirname), '.\n', sep='')
+ subi = Interpreter(self.build, self.backend, dirname, subdir, self.subproject_dir,
+ mesonlib.stringlistify(kwargs.get('default_options', [])))
+ subi.subprojects = self.subprojects
+
+ subi.subproject_stack = self.subproject_stack + [dirname]
+ current_active = self.active_projectname
+ subi.run()
+ if 'version' in kwargs:
+ pv = subi.project_version
+ wanted = kwargs['version']
+ if pv == 'undefined' or not mesonlib.version_compare(pv, wanted):
+ raise InterpreterException('Subproject %s version is %s but %s required.' % (dirname, pv, wanted))
+ self.active_projectname = current_active
+ mlog.log('\nSubproject', mlog.bold(dirname), 'finished.')
+ self.build.subprojects[dirname] = subi.project_version
+ self.subprojects.update(subi.subprojects)
+ self.subprojects[dirname] = SubprojectHolder(subi)
+ self.build_def_files += subi.build_def_files
+ return self.subprojects[dirname]
+
+ @stringArgs
+ @noKwargs
+ def func_get_option(self, nodes, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Argument required for get_option.')
+ optname = args[0]
+ if ':' in optname:
+ raise InterpreterException('''Having a colon in option name is forbidden, projects are not allowed
+to directly access options of other subprojects.''')
+ try:
+ return self.environment.get_coredata().base_options[optname].value
+ except KeyError:
+ pass
+ try:
+ return self.environment.coredata.get_builtin_option(optname)
+ except RuntimeError:
+ pass
+ try:
+ return self.environment.coredata.compiler_options[optname].value
+ except KeyError:
+ pass
+ if not coredata.is_builtin_option(optname) and self.is_subproject():
+ optname = self.subproject + ':' + optname
+ try:
+ return self.environment.coredata.user_options[optname].value
+ except KeyError:
+ pass
+ if optname.endswith('_link_args'):
+ try:
+ lang = optname[:-10]
+ return self.coredata.external_link_args[lang]
+ except KeyError:
+ pass
+ if optname.endswith('_args'):
+ try:
+ lang = optname[:-5]
+ return self.coredata.external_args[lang]
+ except KeyError:
+ pass
+ # Some base options are not defined in some environments, return the default value.
+ try:
+ return compilers.base_options[optname].value
+ except KeyError:
+ pass
+ raise InterpreterException('Tried to access unknown option "%s".' % optname)
+
+ @noKwargs
+ def func_configuration_data(self, node, args, kwargs):
+ if args:
+ raise InterpreterException('configuration_data takes no arguments')
+ return ConfigurationDataHolder()
+
+ def parse_default_options(self, default_options):
+ default_options = listify(default_options)
+ for option in default_options:
+ if not isinstance(option, str):
+ mlog.debug(option)
+ raise InterpreterException('Default options must be strings')
+ if '=' not in option:
+ raise InterpreterException('All default options must be of type key=value.')
+ key, value = option.split('=', 1)
+ if coredata.is_builtin_option(key):
+ if self.subproject != '':
+ continue # Only the master project is allowed to set global options.
+ if not self.environment.had_argument_for(key):
+ self.coredata.set_builtin_option(key, value)
+ # If this was set on the command line, do not override.
+ else:
+ # Option values set with subproject() default_options override those
+ # set in project() default_options.
+ pref = key + '='
+ for i in self.default_project_options:
+ if i.startswith(pref):
+ option = i
+ break
+ # If we are in a subproject, add the subproject prefix to option
+ # name.
+ if self.subproject != '':
+ option = self.subproject + ':' + option
+ newoptions = [option] + self.environment.cmd_line_options.projectoptions
+ self.environment.cmd_line_options.projectoptions = newoptions
+ # Add options that are only in default_options.
+ for defopt in self.default_project_options:
+ key, value = defopt.split('=')
+ pref = key + '='
+ for i in default_options:
+ if i.startswith(pref):
+ break
+ else:
+ defopt = self.subproject + ':' + defopt
+ newoptions = [defopt] + self.environment.cmd_line_options.projectoptions
+ self.environment.cmd_line_options.projectoptions = newoptions
+
+ @stringArgs
+ @permittedKwargs(permitted_kwargs['project'])
+ def func_project(self, node, args, kwargs):
+ if len(args) < 1:
+ raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
+ proj_name = args[0]
+ proj_langs = args[1:]
+ if ':' in proj_name:
+ raise InvalidArguments("Project name {!r} must not contain ':'".format(proj_name))
+ default_options = kwargs.get('default_options', [])
+ if self.environment.first_invocation and (len(default_options) > 0 or
+ len(self.default_project_options) > 0):
+ self.parse_default_options(default_options)
+ if not self.is_subproject():
+ self.build.project_name = proj_name
+ if os.path.exists(self.option_file):
+ oi = optinterpreter.OptionInterpreter(self.subproject,
+ self.build.environment.cmd_line_options.projectoptions,
+ )
+ oi.process(self.option_file)
+ self.build.environment.merge_options(oi.options)
+ self.active_projectname = proj_name
+ self.project_version = kwargs.get('version', 'undefined')
+ if self.build.project_version is None:
+ self.build.project_version = self.project_version
+ proj_license = mesonlib.stringlistify(kwargs.get('license', 'unknown'))
+ self.build.dep_manifest[proj_name] = {'version': self.project_version,
+ 'license': proj_license}
+ if self.subproject in self.build.projects:
+ raise InvalidCode('Second call to project().')
+ if not self.is_subproject() and 'subproject_dir' in kwargs:
+ spdirname = kwargs['subproject_dir']
+ if '/' in spdirname or '\\' in spdirname:
+ raise InterpreterException('Subproject_dir must not contain a path segment.')
+ if spdirname.startswith('.'):
+ raise InterpreterException('Subproject_dir must not begin with a period.')
+ self.subproject_dir = spdirname
+
+ if 'meson_version' in kwargs:
+ cv = coredata.version
+ pv = kwargs['meson_version']
+ if not mesonlib.version_compare(cv, pv):
+ raise InterpreterException('Meson version is %s but project requires %s.' % (cv, pv))
+ self.build.projects[self.subproject] = proj_name
+ mlog.log('Project name: ', mlog.bold(proj_name), sep='')
+ self.add_languages(proj_langs, True)
+ langs = self.coredata.compilers.keys()
+ if 'vala' in langs:
+ if 'c' not in langs:
+ raise InterpreterException('Compiling Vala requires C. Add C to your project languages and rerun Meson.')
+ if not self.is_subproject():
+ self.check_cross_stdlibs()
+
+ @permittedKwargs(permitted_kwargs['add_languages'])
+ @stringArgs
+ def func_add_languages(self, node, args, kwargs):
+ return self.add_languages(args, kwargs.get('required', True))
+
+ def get_message_string_arg(self, node):
+ # reduce arguments again to avoid flattening posargs
+ (posargs, _) = self.reduce_arguments(node.args)
+ if len(posargs) != 1:
+ raise InvalidArguments('Expected 1 argument, got %d' % len(posargs))
+
+ arg = posargs[0]
+ if isinstance(arg, list):
+ argstr = stringifyUserArguments(arg)
+ elif isinstance(arg, str):
+ argstr = arg
+ elif isinstance(arg, int):
+ argstr = str(arg)
+ else:
+ raise InvalidArguments('Function accepts only strings, integers, lists and lists thereof.')
+
+ return argstr
+
+ @noKwargs
+ def func_message(self, node, args, kwargs):
+ argstr = self.get_message_string_arg(node)
+ mlog.log(mlog.bold('Message:'), argstr)
+
+ @noKwargs
+ def func_warning(self, node, args, kwargs):
+ argstr = self.get_message_string_arg(node)
+ mlog.warning(argstr)
+
+ @noKwargs
+ def func_error(self, node, args, kwargs):
+ self.validate_arguments(args, 1, [str])
+ raise InterpreterException('Error encountered: ' + args[0])
+
+ def detect_compilers(self, lang, need_cross_compiler):
+ cross_comp = None
+ if lang == 'c':
+ comp = self.environment.detect_c_compiler(False)
+ if need_cross_compiler:
+ cross_comp = self.environment.detect_c_compiler(True)
+ elif lang == 'cpp':
+ comp = self.environment.detect_cpp_compiler(False)
+ if need_cross_compiler:
+ cross_comp = self.environment.detect_cpp_compiler(True)
+ elif lang == 'objc':
+ comp = self.environment.detect_objc_compiler(False)
+ if need_cross_compiler:
+ cross_comp = self.environment.detect_objc_compiler(True)
+ elif lang == 'objcpp':
+ comp = self.environment.detect_objcpp_compiler(False)
+ if need_cross_compiler:
+ cross_comp = self.environment.detect_objcpp_compiler(True)
+ elif lang == 'java':
+ comp = self.environment.detect_java_compiler()
+ if need_cross_compiler:
+ cross_comp = comp # Java is platform independent.
+ elif lang == 'cs':
+ comp = self.environment.detect_cs_compiler()
+ if need_cross_compiler:
+ cross_comp = comp # C# is platform independent.
+ elif lang == 'vala':
+ comp = self.environment.detect_vala_compiler()
+ if need_cross_compiler:
+ cross_comp = comp # Vala compiles to platform-independent C
+ elif lang == 'd':
+ comp = self.environment.detect_d_compiler(False)
+ if need_cross_compiler:
+ cross_comp = self.environment.detect_d_compiler(True)
+ elif lang == 'rust':
+ comp = self.environment.detect_rust_compiler()
+ if need_cross_compiler:
+ cross_comp = comp # FIXME, not correct.
+ elif lang == 'fortran':
+ comp = self.environment.detect_fortran_compiler(False)
+ if need_cross_compiler:
+ cross_comp = self.environment.detect_fortran_compiler(True)
+ elif lang == 'swift':
+ comp = self.environment.detect_swift_compiler()
+ if need_cross_compiler:
+ raise InterpreterException('Cross compilation with Swift is not working yet.')
+ # cross_comp = self.environment.detect_fortran_compiler(True)
+ else:
+ raise InvalidCode('Tried to use unknown language "%s".' % lang)
+ comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
+ self.coredata.compilers[lang] = comp
+ # Native compiler always exist so always add its options.
+ new_options = comp.get_options()
+ if cross_comp is not None:
+ cross_comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
+ self.coredata.cross_compilers[lang] = cross_comp
+ new_options.update(cross_comp.get_options())
+ optprefix = lang + '_'
+ for i in new_options:
+ if not i.startswith(optprefix):
+ raise InterpreterException('Internal error, %s has incorrect prefix.' % i)
+ cmd_prefix = i + '='
+ for cmd_arg in self.environment.cmd_line_options.projectoptions:
+ if cmd_arg.startswith(cmd_prefix):
+ value = cmd_arg.split('=', 1)[1]
+ new_options[i].set_value(value)
+ new_options.update(self.coredata.compiler_options)
+ self.coredata.compiler_options = new_options
+ return comp, cross_comp
+
+ def add_languages(self, args, required):
+ success = True
+ need_cross_compiler = self.environment.is_cross_build() and self.environment.cross_info.need_cross_compiler()
+ for lang in sorted(args, key=compilers.sort_clike):
+ lang = lang.lower()
+ if lang in self.coredata.compilers:
+ comp = self.coredata.compilers[lang]
+ cross_comp = self.coredata.cross_compilers.get(lang, None)
+ else:
+ try:
+ (comp, cross_comp) = self.detect_compilers(lang, need_cross_compiler)
+ except Exception:
+ if not required:
+ mlog.log('Compiler for language', mlog.bold(lang), 'not found.')
+ success = False
+ continue
+ else:
+ raise
+ mlog.log('Native %s compiler: ' % comp.get_display_language(), mlog.bold(' '.join(comp.get_exelist())), ' (%s %s)' % (comp.id, comp.version), sep='')
+ if not comp.get_language() in self.coredata.external_args:
+ (preproc_args, compile_args, link_args) = environment.get_args_from_envvars(comp)
+ self.coredata.external_preprocess_args[comp.get_language()] = preproc_args
+ self.coredata.external_args[comp.get_language()] = compile_args
+ self.coredata.external_link_args[comp.get_language()] = link_args
+ self.build.add_compiler(comp)
+ if need_cross_compiler:
+ mlog.log('Cross %s compiler: ' % cross_comp.get_display_language(), mlog.bold(' '.join(cross_comp.get_exelist())), ' (%s %s)' % (cross_comp.id, cross_comp.version), sep='')
+ self.build.add_cross_compiler(cross_comp)
+ if self.environment.is_cross_build() and not need_cross_compiler:
+ self.build.add_cross_compiler(comp)
+ self.add_base_options(comp)
+ return success
+
+ def add_base_options(self, compiler):
+ proj_opt = self.environment.cmd_line_options.projectoptions
+ for optname in compiler.base_options:
+ if optname in self.coredata.base_options:
+ continue
+ oobj = compilers.base_options[optname]
+ for po in proj_opt:
+ if po.startswith(optname + '='):
+ oobj.set_value(po.split('=', 1)[1])
+ break
+ self.coredata.base_options[optname] = oobj
+
+ def program_from_cross_file(self, prognames):
+ bins = self.environment.cross_info.config['binaries']
+ for p in prognames:
+ if hasattr(p, 'held_object'):
+ p = p.held_object
+ if isinstance(p, mesonlib.File):
+ continue # Always points to a local (i.e. self generated) file.
+ if not isinstance(p, str):
+ raise InterpreterException('Executable name must be a string.')
+ if p in bins:
+ exename = bins[p]
+ extprog = dependencies.ExternalProgram(exename)
+ progobj = ExternalProgramHolder(extprog)
+ return progobj
+
+ def program_from_system(self, args):
+ # Search for scripts relative to current subdir.
+ # Do not cache found programs because find_program('foobar')
+ # might give different results when run from different source dirs.
+ source_dir = os.path.join(self.environment.get_source_dir(), self.subdir)
+ for exename in args:
+ if isinstance(exename, mesonlib.File):
+ if exename.is_built:
+ search_dir = os.path.join(self.environment.get_build_dir(),
+ exename.subdir)
+ else:
+ search_dir = os.path.join(self.environment.get_source_dir(),
+ exename.subdir)
+ exename = exename.fname
+ elif isinstance(exename, str):
+ search_dir = source_dir
+ else:
+ raise InvalidArguments('find_program only accepts strings and '
+ 'files, not {!r}'.format(exename))
+ extprog = dependencies.ExternalProgram(exename, search_dir=search_dir)
+ progobj = ExternalProgramHolder(extprog)
+ if progobj.found():
+ return progobj
+
+ @permittedKwargs(permitted_kwargs['find_program'])
+ def func_find_program(self, node, args, kwargs):
+ if not args:
+ raise InterpreterException('No program name specified.')
+ required = kwargs.get('required', True)
+ if not isinstance(required, bool):
+ raise InvalidArguments('"required" argument must be a boolean.')
+ progobj = None
+ if self.build.environment.is_cross_build():
+ use_native = kwargs.get('native', False)
+ if not isinstance(use_native, bool):
+ raise InvalidArguments('Argument to "native" must be a boolean.')
+ if not use_native:
+ progobj = self.program_from_cross_file(args)
+ if progobj is None:
+ progobj = self.program_from_system(args)
+ if required and (progobj is None or not progobj.found()):
+ raise InvalidArguments('Program "%s" not found or not executable' % args[0])
+ if progobj is None:
+ return ExternalProgramHolder(dependencies.ExternalProgram('nonexistingprogram'))
+ return progobj
+
+ def func_find_library(self, node, args, kwargs):
+ mlog.log(mlog.red('DEPRECATION:'), 'find_library() is removed, use the corresponding method in compiler object instead.')
+
+ def _find_cached_dep(self, name, kwargs):
+ # Check if we want this as a cross-dep or a native-dep
+ # FIXME: Not all dependencies support such a distinction right now,
+ # and we repeat this check inside dependencies that do. We need to
+ # consolidate this somehow.
+ is_cross = self.environment.is_cross_build()
+ if 'native' in kwargs and is_cross:
+ want_cross = not kwargs['native']
+ else:
+ want_cross = is_cross
+ identifier = dependencies.get_dep_identifier(name, kwargs, want_cross)
+ cached_dep = None
+ # Check if we've already searched for and found this dep
+ if identifier in self.coredata.deps:
+ cached_dep = self.coredata.deps[identifier]
+ else:
+ # Check if exactly the same dep with different version requirements
+ # was found already.
+ wanted = identifier[1]
+ for trial, trial_dep in self.coredata.deps.items():
+ # trial[1], identifier[1] are the version requirements
+ if trial[0] != identifier[0] or trial[2:] != identifier[2:]:
+ continue
+ found = trial_dep.get_version()
+ if not wanted or mesonlib.version_compare_many(found, wanted)[0]:
+ # We either don't care about the version, or our
+ # version requirements matched the trial dep's version.
+ cached_dep = trial_dep
+ break
+ return identifier, cached_dep
+
+ @permittedKwargs(permitted_kwargs['dependency'])
+ def func_dependency(self, node, args, kwargs):
+ self.validate_arguments(args, 1, [str])
+ name = args[0]
+
+ if name == '':
+ if kwargs.get('required', True):
+ raise InvalidArguments('Dependency is both required and not-found')
+ return DependencyHolder(Dependency('not-found', {}))
+
+ if '<' in name or '>' in name or '=' in name:
+ raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
+ 'version\n requirements use the \'version\' keyword argument instead.')
+ identifier, cached_dep = self._find_cached_dep(name, kwargs)
+
+ if cached_dep:
+ if kwargs.get('required', True) and not cached_dep.found():
+ m = 'Dependency {!r} was already checked and was not found'
+ raise DependencyException(m.format(name))
+ dep = cached_dep
+ else:
+ # If the dependency has already been configured, possibly by
+ # a higher level project, try to use it first.
+ if 'fallback' in kwargs:
+ dirname, varname = self.get_subproject_infos(kwargs)
+ if dirname in self.subprojects:
+ subproject = self.subprojects[dirname]
+ try:
+ # Never add fallback deps to self.coredata.deps
+ return subproject.get_variable_method([varname], {})
+ except KeyError:
+ pass
+
+ # We need to actually search for this dep
+ exception = None
+ dep = None
+
+ # Search for it outside the project
+ try:
+ dep = dependencies.find_external_dependency(name, self.environment, kwargs)
+ except DependencyException as e:
+ exception = e
+
+ # Search inside the projects list
+ if not dep or not dep.found():
+ if 'fallback' in kwargs:
+ fallback_dep = self.dependency_fallback(name, kwargs)
+ if fallback_dep:
+ # Never add fallback deps to self.coredata.deps since we
+ # cannot cache them. They must always be evaluated else
+ # we won't actually read all the build files.
+ return fallback_dep
+ if not dep:
+ assert(exception is not None)
+ raise exception
+
+ # Only store found-deps in the cache
+ if dep.found():
+ self.coredata.deps[identifier] = dep
+ return DependencyHolder(dep)
+
+ @noKwargs
+ @noPosargs
+ def func_disabler(self, node, args, kwargs):
+ return Disabler()
+
+ def get_subproject_infos(self, kwargs):
+ fbinfo = kwargs['fallback']
+ check_stringlist(fbinfo)
+ if len(fbinfo) != 2:
+ raise InterpreterException('Fallback info must have exactly two items.')
+ return fbinfo
+
+ def dependency_fallback(self, name, kwargs):
+ if self.coredata.wrap_mode in (WrapMode.nofallback, WrapMode.nodownload):
+ mlog.log('Not looking for a fallback subproject for the dependency',
+ mlog.bold(name), 'because:\nAutomatic wrap-based fallback '
+ 'dependency downloading is disabled.')
+ return None
+ dirname, varname = self.get_subproject_infos(kwargs)
+ # Try to execute the subproject
+ try:
+ sp_kwargs = {}
+ try:
+ sp_kwargs['default_options'] = kwargs['default_options']
+ except KeyError:
+ pass
+ self.do_subproject(dirname, sp_kwargs)
+ # Invalid code is always an error
+ except InvalidCode:
+ raise
+ # If the subproject execution failed in a non-fatal way, don't raise an
+ # exception; let the caller handle things.
+ except:
+ mlog.log('Also couldn\'t find a fallback subproject in',
+ mlog.bold(os.path.join(self.subproject_dir, dirname)),
+ 'for the dependency', mlog.bold(name))
+ return None
+ try:
+ dep = self.subprojects[dirname].get_variable_method([varname], {})
+ except KeyError:
+ if kwargs.get('required', True):
+ m = 'Fallback variable {!r} in the subproject {!r} does not exist'
+ raise DependencyException(m.format(varname, dirname))
+ # If the dependency is not required, don't raise an exception
+ mlog.log('Also couldn\'t find the dependency', mlog.bold(name),
+ 'in the fallback subproject',
+ mlog.bold(os.path.join(self.subproject_dir, dirname)))
+ return None
+ if not isinstance(dep, DependencyHolder):
+ raise InvalidCode('Fallback variable {!r} in the subproject {!r} is '
+ 'not a dependency object.'.format(varname, dirname))
+ # Check if the version of the declared dependency matches what we want
+ if 'version' in kwargs:
+ wanted = kwargs['version']
+ found = dep.version_method([], {})
+ if found == 'undefined' or not mesonlib.version_compare(found, wanted):
+ mlog.log('Subproject', mlog.bold(dirname), 'dependency',
+ mlog.bold(varname), 'version is', mlog.bold(found),
+ 'but', mlog.bold(wanted), 'is required.')
+ return None
+ mlog.log('Found a', mlog.green('fallback'), 'subproject',
+ mlog.bold(os.path.join(self.subproject_dir, dirname)), 'for',
+ mlog.bold(name))
+ return dep
+
+ @permittedKwargs(permitted_kwargs['executable'])
+ def func_executable(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, ExecutableHolder)
+
+ @permittedKwargs(permitted_kwargs['static_library'])
+ def func_static_lib(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, StaticLibraryHolder)
+
+ @permittedKwargs(permitted_kwargs['shared_library'])
+ def func_shared_lib(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, SharedLibraryHolder)
+
+ @permittedKwargs(permitted_kwargs['shared_module'])
+ def func_shared_module(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, SharedModuleHolder)
+
+ def func_library(self, node, args, kwargs):
+ if self.coredata.get_builtin_option('default_library') == 'shared':
+ return self.func_shared_lib(node, args, kwargs)
+ return self.func_static_lib(node, args, kwargs)
+
+ @permittedKwargs(permitted_kwargs['jar'])
+ def func_jar(self, node, args, kwargs):
+ kwargs['target_type'] = 'jar'
+ return self.build_target(node, args, kwargs, JarHolder)
+
+ @permittedKwargs(permitted_kwargs['build_target'])
+ def func_build_target(self, node, args, kwargs):
+ if 'target_type' not in kwargs:
+ raise InterpreterException('Missing target_type keyword argument')
+ target_type = kwargs.pop('target_type')
+ if target_type == 'executable':
+ return self.func_executable(node, args, kwargs)
+ elif target_type == 'shared_library':
+ return self.func_shared_lib(node, args, kwargs)
+ elif target_type == 'static_library':
+ return self.func_static_lib(node, args, kwargs)
+ elif target_type == 'library':
+ return self.func_library(node, args, kwargs)
+ elif target_type == 'jar':
+ return self.func_jar(node, args, kwargs)
+ else:
+ raise InterpreterException('Unknown target_type.')
+
+ @permittedKwargs(permitted_kwargs['vcs_tag'])
+ def func_vcs_tag(self, node, args, kwargs):
+ if 'input' not in kwargs or 'output' not in kwargs:
+ raise InterpreterException('Keyword arguments input and output must exist')
+ fallback = kwargs.pop('fallback', self.project_version)
+ if not isinstance(fallback, str):
+ raise InterpreterException('Keyword argument fallback must be a string.')
+ replace_string = kwargs.pop('replace_string', '@VCS_TAG@')
+ regex_selector = '(.*)' # default regex selector for custom command: use complete output
+ vcs_cmd = kwargs.get('command', None)
+ if vcs_cmd and not isinstance(vcs_cmd, list):
+ vcs_cmd = [vcs_cmd]
+ source_dir = os.path.normpath(os.path.join(self.environment.get_source_dir(), self.subdir))
+ if vcs_cmd:
+ # Is the command an executable in path or maybe a script in the source tree?
+ vcs_cmd[0] = shutil.which(vcs_cmd[0]) or os.path.join(source_dir, vcs_cmd[0])
+ else:
+ vcs = mesonlib.detect_vcs(source_dir)
+ if vcs:
+ mlog.log('Found %s repository at %s' % (vcs['name'], vcs['wc_dir']))
+ vcs_cmd = vcs['get_rev'].split()
+ regex_selector = vcs['rev_regex']
+ else:
+ vcs_cmd = [' '] # executing this cmd will fail in vcstagger.py and force to use the fallback string
+ # vcstagger.py parameters: infile, outfile, fallback, source_dir, replace_string, regex_selector, command...
+ kwargs['command'] = self.environment.get_build_command() + \
+ ['--internal',
+ 'vcstagger',
+ '@INPUT0@',
+ '@OUTPUT0@',
+ fallback,
+ source_dir,
+ replace_string,
+ regex_selector] + vcs_cmd
+ kwargs.setdefault('build_always', True)
+ return self.func_custom_target(node, [kwargs['output']], kwargs)
+
+ @stringArgs
+ @permittedKwargs(permitted_kwargs['custom_target'])
+ def func_custom_target(self, node, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('custom_target: Only one positional argument is allowed, and it must be a string name')
+ name = args[0]
+ tg = CustomTargetHolder(build.CustomTarget(name, self.subdir, self.subproject, kwargs), self)
+ self.add_target(name, tg.held_object)
+ return tg
+
+ @permittedKwargs(permitted_kwargs['run_target'])
+ def func_run_target(self, node, args, kwargs):
+ global run_depr_printed
+ if len(args) > 1:
+ if not run_depr_printed:
+ mlog.log(mlog.red('DEPRECATION'), 'positional version of run_target is deprecated, use the keyword version instead.')
+ run_depr_printed = True
+ if 'command' in kwargs:
+ raise InterpreterException('Can not have command both in positional and keyword arguments.')
+ all_args = args[1:]
+ deps = []
+ elif len(args) == 1:
+ if 'command' not in kwargs:
+ raise InterpreterException('Missing "command" keyword argument')
+ all_args = extract_as_list(kwargs, 'command')
+ deps = extract_as_list(kwargs, 'depends')
+ else:
+ raise InterpreterException('Run_target needs at least one positional argument.')
+
+ cleaned_args = []
+ for i in listify(all_args, unholder=True):
+ if not isinstance(i, (str, build.BuildTarget, build.CustomTarget, dependencies.ExternalProgram, mesonlib.File)):
+ mlog.debug('Wrong type:', str(i))
+ raise InterpreterException('Invalid argument to run_target.')
+ cleaned_args.append(i)
+ name = args[0]
+ if not isinstance(name, str):
+ raise InterpreterException('First argument must be a string.')
+ cleaned_deps = []
+ for d in deps:
+ try:
+ d = d.held_object
+ except AttributeError:
+ pass
+ if not isinstance(d, (build.BuildTarget, build.CustomTarget)):
+ raise InterpreterException('Depends items must be build targets.')
+ cleaned_deps.append(d)
+ command = cleaned_args[0]
+ cmd_args = cleaned_args[1:]
+ tg = RunTargetHolder(name, command, cmd_args, cleaned_deps, self.subdir, self.subproject)
+ self.add_target(name, tg.held_object)
+ return tg
+
+ @permittedKwargs(permitted_kwargs['generator'])
+ def func_generator(self, node, args, kwargs):
+ gen = GeneratorHolder(self, args, kwargs)
+ self.generators.append(gen)
+ return gen
+
+ @permittedKwargs(permitted_kwargs['benchmark'])
+ def func_benchmark(self, node, args, kwargs):
+ self.add_test(node, args, kwargs, False)
+
+ @permittedKwargs(permitted_kwargs['test'])
+ def func_test(self, node, args, kwargs):
+ self.add_test(node, args, kwargs, True)
+
+ def unpack_env_kwarg(self, kwargs):
+ envlist = kwargs.get('env', EnvironmentVariablesHolder())
+ if isinstance(envlist, EnvironmentVariablesHolder):
+ env = envlist.held_object
+ else:
+ envlist = listify(envlist)
+ # Convert from array to environment object
+ env = EnvironmentVariablesHolder()
+ for e in envlist:
+ if '=' not in e:
+ raise InterpreterException('Env var definition must be of type key=val.')
+ (k, val) = e.split('=', 1)
+ k = k.strip()
+ val = val.strip()
+ if ' ' in k:
+ raise InterpreterException('Env var key must not have spaces in it.')
+ env.set_method([k, val], {})
+ env = env.held_object
+ return env
+
+ def add_test(self, node, args, kwargs, is_base_test):
+ if len(args) != 2:
+ raise InterpreterException('Incorrect number of arguments')
+ if not isinstance(args[0], str):
+ raise InterpreterException('First argument of test must be a string.')
+ exe = args[1]
+ if not isinstance(exe, (ExecutableHolder, JarHolder, ExternalProgramHolder)):
+ if isinstance(exe, mesonlib.File):
+ exe = self.func_find_program(node, (args[1], ), {})
+ else:
+ raise InterpreterException('Second argument must be executable.')
+ par = kwargs.get('is_parallel', True)
+ if not isinstance(par, bool):
+ raise InterpreterException('Keyword argument is_parallel must be a boolean.')
+ cmd_args = extract_as_list(kwargs, 'args', unholder=True)
+ for i in cmd_args:
+ if not isinstance(i, (str, mesonlib.File, build.Target)):
+ raise InterpreterException('Command line arguments must be strings, files or targets.')
+ env = self.unpack_env_kwarg(kwargs)
+ should_fail = kwargs.get('should_fail', False)
+ if not isinstance(should_fail, bool):
+ raise InterpreterException('Keyword argument should_fail must be a boolean.')
+ timeout = kwargs.get('timeout', 30)
+ if 'workdir' in kwargs:
+ workdir = kwargs['workdir']
+ if not isinstance(workdir, str):
+ raise InterpreterException('Workdir keyword argument must be a string.')
+ if not os.path.isabs(workdir):
+ raise InterpreterException('Workdir keyword argument must be an absolute path.')
+ else:
+ workdir = None
+ if not isinstance(timeout, int):
+ raise InterpreterException('Timeout must be an integer.')
+ suite = []
+ for s in mesonlib.stringlistify(kwargs.get('suite', '')):
+ if len(s) > 0:
+ s = ':' + s
+ if self.is_subproject():
+ suite.append(self.subproject.replace(' ', '_').replace(':', '_') + s)
+ else:
+ suite.append(self.build.project_name.replace(' ', '_').replace(':', '_') + s)
+ t = Test(args[0], suite, exe.held_object, par, cmd_args, env, should_fail, timeout, workdir)
+ if is_base_test:
+ self.build.tests.append(t)
+ mlog.debug('Adding test "', mlog.bold(args[0]), '".', sep='')
+ else:
+ self.build.benchmarks.append(t)
+ mlog.debug('Adding benchmark "', mlog.bold(args[0]), '".', sep='')
+
+ @permittedKwargs(permitted_kwargs['install_headers'])
+ def func_install_headers(self, node, args, kwargs):
+ source_files = self.source_strings_to_files(args)
+ h = Headers(source_files, kwargs)
+ self.build.headers.append(h)
+ return h
+
+ @permittedKwargs(permitted_kwargs['install_man'])
+ def func_install_man(self, node, args, kwargs):
+ fargs = self.source_strings_to_files(args)
+ m = Man(fargs, kwargs)
+ self.build.man.append(m)
+ return m
+
+ @permittedKwargs(permitted_kwargs['subdir'])
+ def func_subdir(self, node, args, kwargs):
+ self.validate_arguments(args, 1, [str])
+ if '..' in args[0]:
+ raise InvalidArguments('Subdir contains ..')
+ if self.subdir == '' and args[0] == self.subproject_dir:
+ raise InvalidArguments('Must not go into subprojects dir with subdir(), use subproject() instead.')
+ if self.subdir == '' and args[0].startswith('meson-'):
+ raise InvalidArguments('The "meson-" prefix is reserved and cannot be used for top-level subdir().')
+ for i in mesonlib.extract_as_list(kwargs, 'if_found'):
+ if not hasattr(i, 'found_method'):
+ raise InterpreterException('Object used in if_found does not have a found method.')
+ if not i.found_method([], {}):
+ return
+ prev_subdir = self.subdir
+ subdir = os.path.join(prev_subdir, args[0])
+ if os.path.isabs(subdir):
+ raise InvalidArguments('Subdir argument must be a relative path.')
+ absdir = os.path.join(self.environment.get_source_dir(), subdir)
+ symlinkless_dir = os.path.realpath(absdir)
+ if symlinkless_dir in self.visited_subdirs:
+ raise InvalidArguments('Tried to enter directory "%s", which has already been visited.'
+ % subdir)
+ self.visited_subdirs[symlinkless_dir] = True
+ self.subdir = subdir
+ os.makedirs(os.path.join(self.environment.build_dir, subdir), exist_ok=True)
+ buildfilename = os.path.join(self.subdir, environment.build_filename)
+ self.build_def_files.append(buildfilename)
+ absname = os.path.join(self.environment.get_source_dir(), buildfilename)
+ if not os.path.isfile(absname):
+ self.subdir = prev_subdir
+ raise InterpreterException('Non-existent build file {!r}'.format(buildfilename))
+ with open(absname, encoding='utf8') as f:
+ code = f.read()
+ assert(isinstance(code, str))
+ try:
+ codeblock = mparser.Parser(code, self.subdir).parse()
+ except mesonlib.MesonException as me:
+ me.file = buildfilename
+ raise me
+ self.evaluate_codeblock(codeblock)
+ self.subdir = prev_subdir
+
+ def _get_kwarg_install_mode(self, kwargs):
+ if 'install_mode' not in kwargs:
+ return None
+ install_mode = []
+ mode = mesonlib.typeslistify(kwargs.get('install_mode', []), (str, int))
+ for m in mode:
+ # We skip any arguments that are set to `false`
+ if m is False:
+ m = None
+ install_mode.append(m)
+ if len(install_mode) > 3:
+ raise InvalidArguments('Keyword argument install_mode takes at '
+ 'most 3 arguments.')
+ if len(install_mode) > 0 and install_mode[0] is not None and \
+ not isinstance(install_mode[0], str):
+ raise InvalidArguments('Keyword argument install_mode requires the '
+ 'permissions arg to be a string or false')
+ return FileMode(*install_mode)
+
+ @permittedKwargs(permitted_kwargs['install_data'])
+ def func_install_data(self, node, args, kwargs):
+ kwsource = mesonlib.stringlistify(kwargs.get('sources', []))
+ raw_sources = args + kwsource
+ sources = []
+ source_strings = []
+ for s in raw_sources:
+ if isinstance(s, mesonlib.File):
+ sources.append(s)
+ else:
+ source_strings.append(s)
+ sources += self.source_strings_to_files(source_strings)
+ install_dir = kwargs.get('install_dir', None)
+ if not isinstance(install_dir, (str, type(None))):
+ raise InvalidArguments('Keyword argument install_dir not a string.')
+ install_mode = self._get_kwarg_install_mode(kwargs)
+ data = DataHolder(build.Data(sources, install_dir, install_mode))
+ self.build.data.append(data.held_object)
+ return data
+
+ @permittedKwargs(permitted_kwargs['install_subdir'])
+ @stringArgs
+ def func_install_subdir(self, node, args, kwargs):
+ if len(args) != 1:
+ raise InvalidArguments('Install_subdir requires exactly one argument.')
+ subdir = args[0]
+ if 'install_dir' not in kwargs:
+ raise InvalidArguments('Missing keyword argument install_dir')
+ install_dir = kwargs['install_dir']
+ if not isinstance(install_dir, str):
+ raise InvalidArguments('Keyword argument install_dir not a string.')
+ if 'exclude_files' in kwargs:
+ exclude = extract_as_list(kwargs, 'exclude_files')
+ for f in exclude:
+ if not isinstance(f, str):
+ raise InvalidArguments('Exclude argument not a string.')
+ elif os.path.isabs(f):
+ raise InvalidArguments('Exclude argument cannot be absolute.')
+ exclude_files = {os.path.join(subdir, f) for f in exclude}
+ else:
+ exclude_files = set()
+ if 'exclude_directories' in kwargs:
+ exclude = extract_as_list(kwargs, 'exclude_directories')
+ for d in exclude:
+ if not isinstance(d, str):
+ raise InvalidArguments('Exclude argument not a string.')
+ elif os.path.isabs(d):
+ raise InvalidArguments('Exclude argument cannot be absolute.')
+ exclude_directories = {os.path.join(subdir, f) for f in exclude}
+ else:
+ exclude_directories = set()
+ exclude = (exclude_files, exclude_directories)
+ install_mode = self._get_kwarg_install_mode(kwargs)
+ idir = InstallDir(self.subdir, subdir, install_dir, install_mode, exclude)
+ self.build.install_dirs.append(idir)
+ return idir
+
+ @permittedKwargs(permitted_kwargs['configure_file'])
+ def func_configure_file(self, node, args, kwargs):
+ if len(args) > 0:
+ raise InterpreterException("configure_file takes only keyword arguments.")
+ if 'output' not in kwargs:
+ raise InterpreterException('Required keyword argument "output" not defined.')
+ if 'configuration' in kwargs and 'command' in kwargs:
+ raise InterpreterException('Must not specify both "configuration" '
+ 'and "command" keyword arguments since '
+ 'they are mutually exclusive.')
+ if 'capture' in kwargs:
+ if not isinstance(kwargs['capture'], bool):
+ raise InterpreterException('"capture" keyword must be a boolean.')
+ if 'command' not in kwargs:
+ raise InterpreterException('"capture" keyword requires "command" keyword.')
+
+ # Validate input
+ inputfile = None
+ ifile_abs = None
+ if 'input' in kwargs:
+ inputfile = kwargs['input']
+ if isinstance(inputfile, list):
+ if len(inputfile) != 1:
+ m = "Keyword argument 'input' requires exactly one file"
+ raise InterpreterException(m)
+ inputfile = inputfile[0]
+ if not isinstance(inputfile, (str, mesonlib.File)):
+ raise InterpreterException('Input must be a string or a file')
+ if isinstance(inputfile, str):
+ inputfile = mesonlib.File.from_source_file(self.environment.source_dir,
+ self.subdir, inputfile)
+ ifile_abs = inputfile.absolute_path(self.environment.source_dir,
+ self.environment.build_dir)
+ elif 'command' in kwargs and '@INPUT@' in kwargs['command']:
+ raise InterpreterException('@INPUT@ used as command argument, but no input file specified.')
+ # Validate output
+ output = kwargs['output']
+ if not isinstance(output, str):
+ raise InterpreterException('Output file name must be a string')
+ if ifile_abs:
+ values = mesonlib.get_filenames_templates_dict([ifile_abs], None)
+ outputs = mesonlib.substitute_values([output], values)
+ output = outputs[0]
+ if os.path.split(output)[0] != '':
+ raise InterpreterException('Output file name must not contain a subdirectory.')
+ (ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output))
+ ofile_abs = os.path.join(self.environment.build_dir, ofile_path, ofile_fname)
+ if 'configuration' in kwargs:
+ conf = kwargs['configuration']
+ if not isinstance(conf, ConfigurationDataHolder):
+ raise InterpreterException('Argument "configuration" is not of type configuration_data')
+ mlog.log('Configuring', mlog.bold(output), 'using configuration')
+ if inputfile is not None:
+ os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
+ missing_variables = mesonlib.do_conf_file(ifile_abs, ofile_abs,
+ conf.held_object)
+ if missing_variables:
+ var_list = ", ".join(map(repr, sorted(missing_variables)))
+ mlog.warning(
+ "The variable(s) %s in the input file %s are not "
+ "present in the given configuration data" % (
+ var_list, inputfile))
+ else:
+ mesonlib.dump_conf_header(ofile_abs, conf.held_object)
+ conf.mark_used()
+ elif 'command' in kwargs:
+ # We use absolute paths for input and output here because the cwd
+ # that the command is run from is 'unspecified', so it could change.
+ # Currently it's builddir/subdir for in_builddir else srcdir/subdir.
+ if ifile_abs:
+ values = mesonlib.get_filenames_templates_dict([ifile_abs], [ofile_abs])
+ else:
+ values = mesonlib.get_filenames_templates_dict(None, [ofile_abs])
+ # Substitute @INPUT@, @OUTPUT@, etc here.
+ cmd = mesonlib.substitute_values(kwargs['command'], values)
+ mlog.log('Configuring', mlog.bold(output), 'with command')
+ res = self.run_command_impl(node, cmd, {}, True)
+ if res.returncode != 0:
+ raise InterpreterException('Running configure command failed.\n%s\n%s' %
+ (res.stdout, res.stderr))
+ if 'capture' in kwargs and kwargs['capture']:
+ dst_tmp = ofile_abs + '~'
+ with open(dst_tmp, 'w', encoding='utf-8') as f:
+ f.writelines(res.stdout)
+ if ifile_abs:
+ shutil.copymode(ifile_abs, dst_tmp)
+ mesonlib.replace_if_different(ofile_abs, dst_tmp)
+ else:
+ raise InterpreterException('Configure_file must have either "configuration" or "command".')
+ # If the input is a source file, add it to the list of files that we
+ # need to reconfigure on when they change. FIXME: Do the same for
+ # files() objects in the command: kwarg.
+ if inputfile and not inputfile.is_built:
+ # Normalize the path of the conffile (relative to the
+ # source root) to avoid duplicates. This is especially
+ # important to convert '/' to '\' on Windows
+ conffile = os.path.normpath(inputfile.relative_name())
+ if conffile not in self.build_def_files:
+ self.build_def_files.append(conffile)
+ # Install file if requested
+ idir = kwargs.get('install_dir', None)
+ if isinstance(idir, str):
+ cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname)
+ self.build.data.append(build.Data([cfile], idir))
+ return mesonlib.File.from_built_file(self.subdir, output)
+
+ @permittedKwargs(permitted_kwargs['include_directories'])
+ @stringArgs
+ def func_include_directories(self, node, args, kwargs):
+ src_root = self.environment.get_source_dir()
+ build_root = self.environment.get_build_dir()
+ absbase_src = os.path.join(src_root, self.subdir)
+ absbase_build = os.path.join(build_root, self.subdir)
+
+ for a in args:
+ if a.startswith(src_root):
+ raise InvalidArguments('''Tried to form an absolute path to a source dir. You should not do that but use
+relative paths instead.
+
+To get include path to any directory relative to the current dir do
+
+incdir = include_directories(dirname)
+
+After this incdir will contain both the current source dir as well as the
+corresponding build dir. It can then be used in any subdirectory and
+Meson will take care of all the busywork to make paths work.
+
+Dirname can even be '.' to mark the current directory. Though you should
+remember that the current source and build directories are always
+put in the include directories by default so you only need to do
+include_directories('.') if you intend to use the result in a
+different subdirectory.
+''')
+ absdir_src = os.path.join(absbase_src, a)
+ absdir_build = os.path.join(absbase_build, a)
+ if not os.path.isdir(absdir_src) and not os.path.isdir(absdir_build):
+ raise InvalidArguments('Include dir %s does not exist.' % a)
+ is_system = kwargs.get('is_system', False)
+ if not isinstance(is_system, bool):
+ raise InvalidArguments('Is_system must be boolean.')
+ i = IncludeDirsHolder(build.IncludeDirs(self.subdir, args, is_system))
+ return i
+
+ @permittedKwargs(permitted_kwargs['add_test_setup'])
+ @stringArgs
+ def func_add_test_setup(self, node, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Add_test_setup needs one argument for the setup name.')
+ setup_name = args[0]
+ if re.fullmatch('[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None:
+ raise InterpreterException('Setup name may only contain alphanumeric characters.')
+ try:
+ inp = extract_as_list(kwargs, 'exe_wrapper')
+ exe_wrapper = []
+ for i in inp:
+ if hasattr(i, 'held_object'):
+ i = i.held_object
+ if isinstance(i, str):
+ exe_wrapper.append(i)
+ elif isinstance(i, dependencies.ExternalProgram):
+ if not i.found():
+ raise InterpreterException('Tried to use non-found executable.')
+ exe_wrapper += i.get_command()
+ else:
+ raise InterpreterException('Exe wrapper can only contain strings or external binaries.')
+ except KeyError:
+ exe_wrapper = None
+ gdb = kwargs.get('gdb', False)
+ if not isinstance(gdb, bool):
+ raise InterpreterException('Gdb option must be a boolean')
+ timeout_multiplier = kwargs.get('timeout_multiplier', 1)
+ if not isinstance(timeout_multiplier, int):
+ raise InterpreterException('Timeout multiplier must be a number.')
+ env = self.unpack_env_kwarg(kwargs)
+ setupobj = build.TestSetup(exe_wrapper=exe_wrapper,
+ gdb=gdb,
+ timeout_multiplier=timeout_multiplier,
+ env=env)
+ if self.subproject == '':
+ # Dunno what we should do with subprojects really. Let's start simple
+ # and just use the master project ones.
+ self.build.test_setups[setup_name] = setupobj
+
+ @permittedKwargs(permitted_kwargs['add_global_arguments'])
+ @stringArgs
+ def func_add_global_arguments(self, node, args, kwargs):
+ self.add_global_arguments(node, self.build.global_args, args, kwargs)
+
+ @permittedKwargs(permitted_kwargs['add_global_link_arguments'])
+ @stringArgs
+ def func_add_global_link_arguments(self, node, args, kwargs):
+ self.add_global_arguments(node, self.build.global_link_args, args, kwargs)
+
+ @permittedKwargs(permitted_kwargs['add_project_arguments'])
+ @stringArgs
+ def func_add_project_arguments(self, node, args, kwargs):
+ self.add_project_arguments(node, self.build.projects_args, args, kwargs)
+
+ @permittedKwargs(permitted_kwargs['add_project_link_arguments'])
+ @stringArgs
+ def func_add_project_link_arguments(self, node, args, kwargs):
+ self.add_project_arguments(node, self.build.projects_link_args, args, kwargs)
+
+ def add_global_arguments(self, node, argsdict, args, kwargs):
+ if self.subproject != '':
+ msg = 'Function \'{}\' cannot be used in subprojects because ' \
+ 'there is no way to make that reliable.\nPlease only call ' \
+ 'this if is_subproject() returns false. Alternatively, ' \
+ 'define a variable that\ncontains your language-specific ' \
+ 'arguments and add it to the appropriate *_args kwarg ' \
+ 'in each target.'.format(node.func_name)
+ raise InvalidCode(msg)
+ frozen = self.project_args_frozen or self.global_args_frozen
+ self.add_arguments(node, argsdict, frozen, args, kwargs)
+
+ def add_project_arguments(self, node, argsdict, args, kwargs):
+ if self.subproject not in argsdict:
+ argsdict[self.subproject] = {}
+ self.add_arguments(node, argsdict[self.subproject],
+ self.project_args_frozen, args, kwargs)
+
+ def add_arguments(self, node, argsdict, args_frozen, args, kwargs):
+ if args_frozen:
+ msg = 'Tried to use \'{}\' after a build target has been declared.\n' \
+ 'This is not permitted. Please declare all ' \
+ 'arguments before your targets.'.format(node.func_name)
+ raise InvalidCode(msg)
+
+ if 'language' not in kwargs:
+ raise InvalidCode('Missing language definition in {}'.format(node.func_name))
+
+ for lang in mesonlib.stringlistify(kwargs['language']):
+ lang = lang.lower()
+ argsdict[lang] = argsdict.get(lang, []) + args
+
+ @noKwargs
+ @noPosargs
+ def func_environment(self, node, args, kwargs):
+ return EnvironmentVariablesHolder()
+
+ @stringArgs
+ @noKwargs
+ def func_join_paths(self, node, args, kwargs):
+ return os.path.join(*args).replace('\\', '/')
+
+ def run(self):
+ super().run()
+ mlog.log('Build targets in project:', mlog.bold(str(len(self.build.targets))))
+
+ def evaluate_subproject_info(self, path_from_source_root, subproject_dirname):
+ depth = 0
+ subproj_name = ''
+ segs = path_from_source_root.split(os.path.sep)
+ while segs and segs[0] == subproject_dirname:
+ depth += 1
+ subproj_name = segs[1]
+ segs = segs[2:]
+ return (depth, subproj_name)
+
+ # Check that the indicated file is within the same subproject
+ # as we currently are. This is to stop people doing
+ # nasty things like:
+ #
+ # f = files('../../master_src/file.c')
+ #
+ # Note that this is validated only when the file
+ # object is generated. The result can be used in a different
+ # subproject than it is defined in (due to e.g. a
+ # declare_dependency).
+ def validate_within_subproject(self, subdir, fname):
+ norm = os.path.normpath(os.path.join(subdir, fname))
+ if os.path.isabs(norm):
+ if not norm.startswith(self.environment.source_dir):
+ # Grabbing files outside the source tree is ok.
+ # This is for vendor stuff like:
+ #
+ # /opt/vendorsdk/src/file_with_license_restrictions.c
+ return
+ norm = os.path.relpath(norm, self.environment.source_dir)
+ assert(not os.path.isabs(norm))
+ (num_sps, sproj_name) = self.evaluate_subproject_info(norm, self.subproject_dir)
+ plain_filename = os.path.split(norm)[-1]
+ if num_sps == 0:
+ if self.subproject == '':
+ return
+ raise InterpreterException('Sandbox violation: Tried to grab file %s from a different subproject.' % plain_filename)
+ if num_sps > 1:
+ raise InterpreterException('Sandbox violation: Tried to grab file %s from a nested subproject.' % plain_filename)
+ if sproj_name != self.subproject_directory_name:
+ raise InterpreterException('Sandbox violation: Tried to grab file %s from a different subproject.' % plain_filename)
+
+ def source_strings_to_files(self, sources):
+ results = []
+ for s in sources:
+ if isinstance(s, (mesonlib.File, GeneratedListHolder,
+ CustomTargetHolder, CustomTargetIndexHolder)):
+ pass
+ elif isinstance(s, str):
+ self.validate_within_subproject(self.subdir, s)
+ s = mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, s)
+ else:
+ raise InterpreterException('Source item is {!r} instead of '
+ 'string or File-type object'.format(s))
+ results.append(s)
+ return results
+
+ def add_target(self, name, tobj):
+ if name == '':
+ raise InterpreterException('Target name must not be empty.')
+ if name.startswith('meson-'):
+ raise InvalidArguments("Target names starting with 'meson-' are reserved "
+ "for Meson's internal use. Please rename.")
+ if name in coredata.forbidden_target_names:
+ raise InvalidArguments("Target name '%s' is reserved for Meson's "
+ "internal use. Please rename." % name)
+ # To permit an executable and a shared library to have the
+ # same name, such as "foo.exe" and "libfoo.a".
+ idname = tobj.get_id()
+ if idname in self.build.targets:
+ raise InvalidCode('Tried to create target "%s", but a target of that name already exists.' % name)
+ self.build.targets[idname] = tobj
+ if idname not in self.coredata.target_guids:
+ self.coredata.target_guids[idname] = str(uuid.uuid4()).upper()
+
+ def build_target(self, node, args, kwargs, targetholder):
+ if not args:
+ raise InterpreterException('Target does not have a name.')
+ name = args[0]
+ sources = listify(args[1:])
+ if self.environment.is_cross_build():
+ if kwargs.get('native', False):
+ is_cross = False
+ else:
+ is_cross = True
+ else:
+ is_cross = False
+ if 'sources' in kwargs:
+ sources += listify(kwargs['sources'])
+ sources = self.source_strings_to_files(sources)
+ objs = extract_as_list(kwargs, 'objects')
+ kwargs['dependencies'] = extract_as_list(kwargs, 'dependencies')
+ if 'extra_files' in kwargs:
+ ef = extract_as_list(kwargs, 'extra_files')
+ kwargs['extra_files'] = self.source_strings_to_files(ef)
+ self.check_sources_exist(os.path.join(self.source_root, self.subdir), sources)
+ if targetholder is ExecutableHolder:
+ targetclass = build.Executable
+ elif targetholder is SharedLibraryHolder:
+ targetclass = build.SharedLibrary
+ elif targetholder is SharedModuleHolder:
+ targetclass = build.SharedModule
+ elif targetholder is StaticLibraryHolder:
+ targetclass = build.StaticLibrary
+ elif targetholder is JarHolder:
+ targetclass = build.Jar
+ else:
+ mlog.debug('Unknown target type:', str(targetholder))
+ raise RuntimeError('Unreachable code')
+ target = targetclass(name, self.subdir, self.subproject, is_cross, sources, objs, self.environment, kwargs)
+ if is_cross:
+ self.add_cross_stdlib_info(target)
+ l = targetholder(target, self)
+ self.add_target(name, l.held_object)
+ self.project_args_frozen = True
+ return l
+
+ def get_used_languages(self, target):
+ result = {}
+ for i in target.sources:
+ for lang, c in self.build.compilers.items():
+ if c.can_compile(i):
+ result[lang] = True
+ break
+ return result
+
+ def add_cross_stdlib_info(self, target):
+ for l in self.get_used_languages(target):
+ if self.environment.cross_info.has_stdlib(l) \
+ and self.subproject != self.environment.cross_info.get_stdlib(l)[0]:
+ target.add_deps(self.build.cross_stdlibs[l])
+
+ def check_sources_exist(self, subdir, sources):
+ for s in sources:
+ if not isinstance(s, str):
+ continue # This means a generated source and they always exist.
+ fname = os.path.join(subdir, s)
+ if not os.path.isfile(fname):
+ raise InterpreterException('Tried to add non-existing source file %s.' % s)
+
+ def format_string(self, templ, args):
+ if isinstance(args, mparser.ArgumentNode):
+ args = args.arguments
+ arg_strings = []
+ for arg in args:
+ arg = self.evaluate_statement(arg)
+ if isinstance(arg, bool): # Python boolean is upper case.
+ arg = str(arg).lower()
+ arg_strings.append(str(arg))
+
+ def arg_replace(match):
+ idx = int(match.group(1))
+ if idx >= len(arg_strings):
+ raise InterpreterException('Format placeholder @{}@ out of range.'.format(idx))
+ return arg_strings[idx]
+ return re.sub(r'@(\d+)@', arg_replace, templ)
+
+ # Only permit object extraction from the same subproject
+ def validate_extraction(self, buildtarget):
+ if not self.subdir.startswith(self.subproject_dir):
+ if buildtarget.subdir.startswith(self.subproject_dir):
+ raise InterpreterException('Tried to extract objects from a subproject target.')
+ else:
+ if not buildtarget.subdir.startswith(self.subproject_dir):
+ raise InterpreterException('Tried to extract objects from the main project from a subproject.')
+ if self.subdir.split('/')[1] != buildtarget.subdir.split('/')[1]:
+ raise InterpreterException('Tried to extract objects from a different subproject.')
+
+ def check_contains(self, obj, args):
+ if len(args) != 1:
+ raise InterpreterException('Contains method takes exactly one argument.')
+ item = args[0]
+ for element in obj:
+ if isinstance(element, list):
+ found = self.check_contains(element, args)
+ if found:
+ return True
+ if element == item:
+ return True
+ return False
+
+ def is_subproject(self):
+ return self.subproject != ''
+
+ @noKwargs
+ def func_set_variable(self, node, args, kwargs):
+ if len(args) != 2:
+ raise InvalidCode('Set_variable takes two arguments.')
+ varname = args[0]
+ value = args[1]
+ self.set_variable(varname, value)
+
+ @noKwargs
+ def func_get_variable(self, node, args, kwargs):
+ if len(args) < 1 or len(args) > 2:
+ raise InvalidCode('Get_variable takes one or two arguments.')
+ varname = args[0]
+ if not isinstance(varname, str):
+ raise InterpreterException('First argument must be a string.')
+ try:
+ return self.variables[varname]
+ except KeyError:
+ pass
+ if len(args) == 2:
+ return args[1]
+ raise InterpreterException('Tried to get unknown variable "%s".' % varname)
+
+ @stringArgs
+ @noKwargs
+ def func_is_variable(self, node, args, kwargs):
+ if len(args) != 1:
+ raise InvalidCode('Is_variable takes two arguments.')
+ varname = args[0]
+ return varname in self.variables
--- /dev/null
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+from . import mparser, mesonlib, mlog
+from . import environment, dependencies
+
+import os, copy, re
+from functools import wraps
+
+# Decorators for method calls.
+
+def check_stringlist(a, msg='Arguments must be strings.'):
+ if not isinstance(a, list):
+ mlog.debug('Not a list:', str(a))
+ raise InvalidArguments('Argument not a list.')
+ if not all(isinstance(s, str) for s in a):
+ mlog.debug('Element not a string:', str(a))
+ raise InvalidArguments(msg)
+
+def noPosargs(f):
+ @wraps(f)
+ def wrapped(self, node, args, kwargs):
+ if args:
+ raise InvalidArguments('Function does not take positional arguments.')
+ return f(self, node, args, kwargs)
+ return wrapped
+
+def noKwargs(f):
+ @wraps(f)
+ def wrapped(self, node, args, kwargs):
+ if kwargs:
+ raise InvalidArguments('Function does not take keyword arguments.')
+ return f(self, node, args, kwargs)
+ return wrapped
+
+def stringArgs(f):
+ @wraps(f)
+ def wrapped(self, node, args, kwargs):
+ assert(isinstance(args, list))
+ check_stringlist(args)
+ return f(self, node, args, kwargs)
+ return wrapped
+
+class permittedKwargs:
+
+ def __init__(self, permitted):
+ self.permitted = permitted
+
+ def __call__(self, f):
+ @wraps(f)
+ def wrapped(s, node_or_state, args, kwargs):
+ if hasattr(s, 'subdir'):
+ subdir = s.subdir
+ lineno = s.current_lineno
+ elif hasattr(node_or_state, 'subdir'):
+ subdir = node_or_state.subdir
+ lineno = node_or_state.current_lineno
+ for k in kwargs:
+ if k not in self.permitted:
+ fname = os.path.join(subdir, environment.build_filename)
+ mlog.warning('''Passed invalid keyword argument "%s" in %s line %d.
+This will become a hard error in the future.''' % (k, fname, lineno))
+ return f(s, node_or_state, args, kwargs)
+ return wrapped
+
+
+class InterpreterException(mesonlib.MesonException):
+ pass
+
+class InvalidCode(InterpreterException):
+ pass
+
+class InvalidArguments(InterpreterException):
+ pass
+
+class InterpreterObject:
+ def __init__(self):
+ self.methods = {}
+
+ def method_call(self, method_name, args, kwargs):
+ if method_name in self.methods:
+ return self.methods[method_name](args, kwargs)
+ raise InvalidCode('Unknown method "%s" in object.' % method_name)
+
+class MutableInterpreterObject(InterpreterObject):
+ def __init__(self):
+ super().__init__()
+
+class Disabler(InterpreterObject):
+ def __init__(self):
+ super().__init__()
+ self.methods.update({'found': self.found_method})
+
+ def found_method(self, args, kwargs):
+ return False
+
+def is_disabler(i):
+ return isinstance(i, Disabler)
+
+def is_disabled(args, kwargs):
+ for i in args:
+ if isinstance(i, Disabler):
+ return True
+ for i in kwargs.values():
+ if isinstance(i, Disabler):
+ return True
+ if isinstance(i, list):
+ for j in i:
+ if isinstance(j, Disabler):
+ return True
+ return False
+
+class InterpreterBase:
+ def __init__(self, source_root, subdir):
+ self.source_root = source_root
+ self.funcs = {}
+ self.builtin = {}
+ self.subdir = subdir
+ self.variables = {}
+ self.argument_depth = 0
+ self.current_lineno = -1
+
+ def load_root_meson_file(self):
+ mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename)
+ if not os.path.isfile(mesonfile):
+ raise InvalidArguments('Missing Meson file in %s' % mesonfile)
+ with open(mesonfile, encoding='utf8') as mf:
+ code = mf.read()
+ if code.isspace():
+ raise InvalidCode('Builder file is empty.')
+ assert(isinstance(code, str))
+ try:
+ self.ast = mparser.Parser(code, self.subdir).parse()
+ except mesonlib.MesonException as me:
+ me.file = environment.build_filename
+ raise me
+
+ def parse_project(self):
+ """
+ Parses project() and initializes languages, compilers etc. Do this
+ early because we need this before we parse the rest of the AST.
+ """
+ self.evaluate_codeblock(self.ast, end=1)
+
+ def sanity_check_ast(self):
+ if not isinstance(self.ast, mparser.CodeBlockNode):
+ raise InvalidCode('AST is of invalid type. Possibly a bug in the parser.')
+ if not self.ast.lines:
+ raise InvalidCode('No statements in code.')
+ first = self.ast.lines[0]
+ if not isinstance(first, mparser.FunctionNode) or first.func_name != 'project':
+ raise InvalidCode('First statement must be a call to project')
+
+ def run(self):
+ # Evaluate everything after the first line, which is project() because
+ # we already parsed that in self.parse_project()
+ self.evaluate_codeblock(self.ast, start=1)
+
+ def evaluate_codeblock(self, node, start=0, end=None):
+ if node is None:
+ return
+ if not isinstance(node, mparser.CodeBlockNode):
+ e = InvalidCode('Tried to execute a non-codeblock. Possibly a bug in the parser.')
+ e.lineno = node.lineno
+ e.colno = node.colno
+ raise e
+ statements = node.lines[start:end]
+ i = 0
+ while i < len(statements):
+ cur = statements[i]
+ try:
+ self.current_lineno = cur.lineno
+ self.evaluate_statement(cur)
+ except Exception as e:
+ if not(hasattr(e, 'lineno')):
+ e.lineno = cur.lineno
+ e.colno = cur.colno
+ e.file = os.path.join(self.subdir, 'meson.build')
+ raise e
+ i += 1 # In THE FUTURE jump over blocks and stuff.
+
+ def evaluate_statement(self, cur):
+ if isinstance(cur, mparser.FunctionNode):
+ return self.function_call(cur)
+ elif isinstance(cur, mparser.AssignmentNode):
+ return self.assignment(cur)
+ elif isinstance(cur, mparser.MethodNode):
+ return self.method_call(cur)
+ elif isinstance(cur, mparser.StringNode):
+ return cur.value
+ elif isinstance(cur, mparser.BooleanNode):
+ return cur.value
+ elif isinstance(cur, mparser.IfClauseNode):
+ return self.evaluate_if(cur)
+ elif isinstance(cur, mparser.IdNode):
+ return self.get_variable(cur.value)
+ elif isinstance(cur, mparser.ComparisonNode):
+ return self.evaluate_comparison(cur)
+ elif isinstance(cur, mparser.ArrayNode):
+ return self.evaluate_arraystatement(cur)
+ elif isinstance(cur, mparser.NumberNode):
+ return cur.value
+ elif isinstance(cur, mparser.AndNode):
+ return self.evaluate_andstatement(cur)
+ elif isinstance(cur, mparser.OrNode):
+ return self.evaluate_orstatement(cur)
+ elif isinstance(cur, mparser.NotNode):
+ return self.evaluate_notstatement(cur)
+ elif isinstance(cur, mparser.UMinusNode):
+ return self.evaluate_uminusstatement(cur)
+ elif isinstance(cur, mparser.ArithmeticNode):
+ return self.evaluate_arithmeticstatement(cur)
+ elif isinstance(cur, mparser.ForeachClauseNode):
+ return self.evaluate_foreach(cur)
+ elif isinstance(cur, mparser.PlusAssignmentNode):
+ return self.evaluate_plusassign(cur)
+ elif isinstance(cur, mparser.IndexNode):
+ return self.evaluate_indexing(cur)
+ elif isinstance(cur, mparser.TernaryNode):
+ return self.evaluate_ternary(cur)
+ elif self.is_elementary_type(cur):
+ return cur
+ else:
+ raise InvalidCode("Unknown statement.")
+
+ def evaluate_arraystatement(self, cur):
+ (arguments, kwargs) = self.reduce_arguments(cur.args)
+ if len(kwargs) > 0:
+ raise InvalidCode('Keyword arguments are invalid in array construction.')
+ return arguments
+
+ def evaluate_notstatement(self, cur):
+ v = self.evaluate_statement(cur.value)
+ if not isinstance(v, bool):
+ raise InterpreterException('Argument to "not" is not a boolean.')
+ return not v
+
+ def evaluate_if(self, node):
+ assert(isinstance(node, mparser.IfClauseNode))
+ for i in node.ifs:
+ result = self.evaluate_statement(i.condition)
+ if is_disabler(result):
+ return result
+ if not(isinstance(result, bool)):
+ raise InvalidCode('If clause {!r} does not evaluate to true or false.'.format(result))
+ if result:
+ self.evaluate_codeblock(i.block)
+ return
+ if not isinstance(node.elseblock, mparser.EmptyNode):
+ self.evaluate_codeblock(node.elseblock)
+
+ def evaluate_comparison(self, node):
+ val1 = self.evaluate_statement(node.left)
+ if is_disabler(val1):
+ return val1
+ val2 = self.evaluate_statement(node.right)
+ if is_disabler(val2):
+ return val2
+ if node.ctype == '==':
+ return val1 == val2
+ elif node.ctype == '!=':
+ return val1 != val2
+ elif not isinstance(val1, type(val2)):
+ raise InterpreterException(
+ 'Values of different types ({}, {}) cannot be compared using {}.'.format(type(val1).__name__,
+ type(val2).__name__,
+ node.ctype))
+ elif not self.is_elementary_type(val1):
+ raise InterpreterException('{} can only be compared for equality.'.format(node.left.value))
+ elif not self.is_elementary_type(val2):
+ raise InterpreterException('{} can only be compared for equality.'.format(node.right.value))
+ elif node.ctype == '<':
+ return val1 < val2
+ elif node.ctype == '<=':
+ return val1 <= val2
+ elif node.ctype == '>':
+ return val1 > val2
+ elif node.ctype == '>=':
+ return val1 >= val2
+ else:
+ raise InvalidCode('You broke my compare eval.')
+
+ def evaluate_andstatement(self, cur):
+ l = self.evaluate_statement(cur.left)
+ if is_disabler(l):
+ return l
+ if not isinstance(l, bool):
+ raise InterpreterException('First argument to "and" is not a boolean.')
+ if not l:
+ return False
+ r = self.evaluate_statement(cur.right)
+ if is_disabler(r):
+ return r
+ if not isinstance(r, bool):
+ raise InterpreterException('Second argument to "and" is not a boolean.')
+ return r
+
+ def evaluate_orstatement(self, cur):
+ l = self.evaluate_statement(cur.left)
+ if is_disabler(l):
+ return l
+ if not isinstance(l, bool):
+ raise InterpreterException('First argument to "or" is not a boolean.')
+ if l:
+ return True
+ r = self.evaluate_statement(cur.right)
+ if is_disabler(r):
+ return r
+ if not isinstance(r, bool):
+ raise InterpreterException('Second argument to "or" is not a boolean.')
+ return r
+
+ def evaluate_uminusstatement(self, cur):
+ v = self.evaluate_statement(cur.value)
+ if is_disabler(v):
+ return v
+ if not isinstance(v, int):
+ raise InterpreterException('Argument to negation is not an integer.')
+ return -v
+
+ def evaluate_arithmeticstatement(self, cur):
+ l = self.evaluate_statement(cur.left)
+ if is_disabler(l):
+ return l
+ r = self.evaluate_statement(cur.right)
+ if is_disabler(r):
+ return r
+
+ if cur.operation == 'add':
+ try:
+ return l + r
+ except Exception as e:
+ raise InvalidCode('Invalid use of addition: ' + str(e))
+ elif cur.operation == 'sub':
+ if not isinstance(l, int) or not isinstance(r, int):
+ raise InvalidCode('Subtraction works only with integers.')
+ return l - r
+ elif cur.operation == 'mul':
+ if not isinstance(l, int) or not isinstance(r, int):
+ raise InvalidCode('Multiplication works only with integers.')
+ return l * r
+ elif cur.operation == 'div':
+ if not isinstance(l, int) or not isinstance(r, int):
+ raise InvalidCode('Division works only with integers.')
+ return l // r
+ elif cur.operation == 'mod':
+ if not isinstance(l, int) or not isinstance(r, int):
+ raise InvalidCode('Modulo works only with integers.')
+ return l % r
+ else:
+ raise InvalidCode('You broke me.')
+
+ def evaluate_ternary(self, node):
+ assert(isinstance(node, mparser.TernaryNode))
+ result = self.evaluate_statement(node.condition)
+ if is_disabler(result):
+ return result
+ if not isinstance(result, bool):
+ raise InterpreterException('Ternary condition is not boolean.')
+ if result:
+ return self.evaluate_statement(node.trueblock)
+ else:
+ return self.evaluate_statement(node.falseblock)
+
+ def evaluate_foreach(self, node):
+ assert(isinstance(node, mparser.ForeachClauseNode))
+ varname = node.varname.value
+ items = self.evaluate_statement(node.items)
+ if is_disabler(items):
+ return items
+ if not isinstance(items, list):
+ raise InvalidArguments('Items of foreach loop is not an array')
+ for item in items:
+ self.set_variable(varname, item)
+ self.evaluate_codeblock(node.block)
+
+ def evaluate_plusassign(self, node):
+ assert(isinstance(node, mparser.PlusAssignmentNode))
+ varname = node.var_name
+ addition = self.evaluate_statement(node.value)
+ if is_disabler(addition):
+ set_variable(varname, addition)
+ return
+ # Remember that all variables are immutable. We must always create a
+ # full new variable and then assign it.
+ old_variable = self.get_variable(varname)
+ if isinstance(old_variable, str):
+ if not isinstance(addition, str):
+ raise InvalidArguments('The += operator requires a string on the right hand side if the variable on the left is a string')
+ new_value = old_variable + addition
+ elif isinstance(old_variable, int):
+ if not isinstance(addition, int):
+ raise InvalidArguments('The += operator requires an int on the right hand side if the variable on the left is an int')
+ new_value = old_variable + addition
+ elif not isinstance(old_variable, list):
+ raise InvalidArguments('The += operator currently only works with arrays, strings or ints ')
+ # Add other data types here.
+ else:
+ if isinstance(addition, list):
+ new_value = old_variable + addition
+ else:
+ new_value = old_variable + [addition]
+ self.set_variable(varname, new_value)
+
+ def evaluate_indexing(self, node):
+ assert(isinstance(node, mparser.IndexNode))
+ iobject = self.evaluate_statement(node.iobject)
+ if is_disabler(iobject):
+ return iobject
+ if not hasattr(iobject, '__getitem__'):
+ raise InterpreterException(
+ 'Tried to index an object that doesn\'t support indexing.')
+ index = self.evaluate_statement(node.index)
+ if not isinstance(index, int):
+ raise InterpreterException('Index value is not an integer.')
+ try:
+ return iobject[index]
+ except IndexError:
+ raise InterpreterException('Index %d out of bounds of array of size %d.' % (index, len(iobject)))
+
+ def function_call(self, node):
+ func_name = node.func_name
+ (posargs, kwargs) = self.reduce_arguments(node.args)
+ if is_disabled(posargs, kwargs):
+ return Disabler()
+ if func_name in self.funcs:
+ return self.funcs[func_name](node, self.flatten(posargs), kwargs)
+ else:
+ self.unknown_function_called(func_name)
+
+ def method_call(self, node):
+ invokable = node.source_object
+ if isinstance(invokable, mparser.IdNode):
+ object_name = invokable.value
+ obj = self.get_variable(object_name)
+ else:
+ obj = self.evaluate_statement(invokable)
+ method_name = node.name
+ args = node.args
+ if isinstance(obj, str):
+ return self.string_method_call(obj, method_name, args)
+ if isinstance(obj, bool):
+ return self.bool_method_call(obj, method_name, args)
+ if isinstance(obj, int):
+ return self.int_method_call(obj, method_name, args)
+ if isinstance(obj, list):
+ return self.array_method_call(obj, method_name, args)
+ if isinstance(obj, mesonlib.File):
+ raise InvalidArguments('File object "%s" is not callable.' % obj)
+ if not isinstance(obj, InterpreterObject):
+ raise InvalidArguments('Variable "%s" is not callable.' % object_name)
+ (args, kwargs) = self.reduce_arguments(args)
+ # Special case. This is the only thing you can do with a disabler
+ # object. Every other use immediately returns the disabler object.
+ if isinstance(obj, Disabler) and method_name == 'found':
+ return False
+ if is_disabled(args, kwargs):
+ return Disabler()
+ if method_name == 'extract_objects':
+ self.validate_extraction(obj.held_object)
+ return obj.method_call(method_name, self.flatten(args), kwargs)
+
+ def bool_method_call(self, obj, method_name, args):
+ (posargs, kwargs) = self.reduce_arguments(args)
+ if is_disabled(posargs, kwargs):
+ return Disabler()
+ if method_name == 'to_string':
+ if not posargs:
+ if obj:
+ return 'true'
+ else:
+ return 'false'
+ elif len(posargs) == 2 and isinstance(posargs[0], str) and isinstance(posargs[1], str):
+ if obj:
+ return posargs[0]
+ else:
+ return posargs[1]
+ else:
+ raise InterpreterException('bool.to_string() must have either no arguments or exactly two string arguments that signify what values to return for true and false.')
+ elif method_name == 'to_int':
+ if obj:
+ return 1
+ else:
+ return 0
+ else:
+ raise InterpreterException('Unknown method "%s" for a boolean.' % method_name)
+
+ def int_method_call(self, obj, method_name, args):
+ (posargs, kwargs) = self.reduce_arguments(args)
+ if is_disabled(posargs, kwargs):
+ return Disabler()
+ if method_name == 'is_even':
+ if not posargs:
+ return obj % 2 == 0
+ else:
+ raise InterpreterException('int.is_even() must have no arguments.')
+ elif method_name == 'is_odd':
+ if not posargs:
+ return obj % 2 != 0
+ else:
+ raise InterpreterException('int.is_odd() must have no arguments.')
+ elif method_name == 'to_string':
+ if not posargs:
+ return str(obj)
+ else:
+ raise InterpreterException('int.to_string() must have no arguments.')
+ else:
+ raise InterpreterException('Unknown method "%s" for an integer.' % method_name)
+
+ @staticmethod
+ def _get_one_string_posarg(posargs, method_name):
+ if len(posargs) > 1:
+ m = '{}() must have zero or one arguments'
+ raise InterpreterException(m.format(method_name))
+ elif len(posargs) == 1:
+ s = posargs[0]
+ if not isinstance(s, str):
+ m = '{}() argument must be a string'
+ raise InterpreterException(m.format(method_name))
+ return s
+ return None
+
+ def string_method_call(self, obj, method_name, args):
+ (posargs, kwargs) = self.reduce_arguments(args)
+ if is_disabled(posargs, kwargs):
+ return Disabler()
+ if method_name == 'strip':
+ s = self._get_one_string_posarg(posargs, 'strip')
+ if s is not None:
+ return obj.strip(s)
+ return obj.strip()
+ elif method_name == 'format':
+ return self.format_string(obj, args)
+ elif method_name == 'to_upper':
+ return obj.upper()
+ elif method_name == 'to_lower':
+ return obj.lower()
+ elif method_name == 'underscorify':
+ return re.sub(r'[^a-zA-Z0-9]', '_', obj)
+ elif method_name == 'split':
+ s = self._get_one_string_posarg(posargs, 'split')
+ if s is not None:
+ return obj.split(s)
+ return obj.split()
+ elif method_name == 'startswith' or method_name == 'contains' or method_name == 'endswith':
+ s = posargs[0]
+ if not isinstance(s, str):
+ raise InterpreterException('Argument must be a string.')
+ if method_name == 'startswith':
+ return obj.startswith(s)
+ elif method_name == 'contains':
+ return obj.find(s) >= 0
+ return obj.endswith(s)
+ elif method_name == 'to_int':
+ try:
+ return int(obj)
+ except Exception:
+ raise InterpreterException('String {!r} cannot be converted to int'.format(obj))
+ elif method_name == 'join':
+ if len(posargs) != 1:
+ raise InterpreterException('Join() takes exactly one argument.')
+ strlist = posargs[0]
+ check_stringlist(strlist)
+ return obj.join(strlist)
+ elif method_name == 'version_compare':
+ if len(posargs) != 1:
+ raise InterpreterException('Version_compare() takes exactly one argument.')
+ cmpr = posargs[0]
+ if not isinstance(cmpr, str):
+ raise InterpreterException('Version_compare() argument must be a string.')
+ return mesonlib.version_compare(obj, cmpr)
+ raise InterpreterException('Unknown method "%s" for a string.' % method_name)
+
+ def unknown_function_called(self, func_name):
+ raise InvalidCode('Unknown function "%s".' % func_name)
+
+ def array_method_call(self, obj, method_name, args):
+ (posargs, kwargs) = self.reduce_arguments(args)
+ if is_disabled(posargs, kwargs):
+ return Disabler()
+ if method_name == 'contains':
+ return self.check_contains(obj, posargs)
+ elif method_name == 'length':
+ return len(obj)
+ elif method_name == 'get':
+ index = posargs[0]
+ fallback = None
+ if len(posargs) == 2:
+ fallback = posargs[1]
+ elif len(posargs) > 2:
+ m = 'Array method \'get()\' only takes two arguments: the ' \
+ 'index and an optional fallback value if the index is ' \
+ 'out of range.'
+ raise InvalidArguments(m)
+ if not isinstance(index, int):
+ raise InvalidArguments('Array index must be a number.')
+ if index < -len(obj) or index >= len(obj):
+ if fallback is None:
+ m = 'Array index {!r} is out of bounds for array of size {!r}.'
+ raise InvalidArguments(m.format(index, len(obj)))
+ return fallback
+ return obj[index]
+ m = 'Arrays do not have a method called {!r}.'
+ raise InterpreterException(m.format(method_name))
+
+ def reduce_arguments(self, args):
+ assert(isinstance(args, mparser.ArgumentNode))
+ if args.incorrect_order():
+ raise InvalidArguments('All keyword arguments must be after positional arguments.')
+ self.argument_depth += 1
+ reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments]
+ reduced_kw = {}
+ for key in args.kwargs.keys():
+ if not isinstance(key, str):
+ raise InvalidArguments('Keyword argument name is not a string.')
+ a = args.kwargs[key]
+ reduced_kw[key] = self.evaluate_statement(a)
+ self.argument_depth -= 1
+ return reduced_pos, reduced_kw
+
+ def flatten(self, args):
+ if isinstance(args, mparser.StringNode):
+ return args.value
+ if isinstance(args, (int, str, mesonlib.File, InterpreterObject)):
+ return args
+ result = []
+ for a in args:
+ if isinstance(a, list):
+ rest = self.flatten(a)
+ result = result + rest
+ elif isinstance(a, mparser.StringNode):
+ result.append(a.value)
+ else:
+ result.append(a)
+ return result
+
+ def assignment(self, node):
+ assert(isinstance(node, mparser.AssignmentNode))
+ if self.argument_depth != 0:
+ raise InvalidArguments('''Tried to assign values inside an argument list.
+To specify a keyword argument, use : instead of =.''')
+ var_name = node.var_name
+ if not isinstance(var_name, str):
+ raise InvalidArguments('Tried to assign value to a non-variable.')
+ value = self.evaluate_statement(node.value)
+ if not self.is_assignable(value):
+ raise InvalidCode('Tried to assign an invalid value to variable.')
+ # For mutable objects we need to make a copy on assignment
+ if isinstance(value, MutableInterpreterObject):
+ value = copy.deepcopy(value)
+ self.set_variable(var_name, value)
+ return None
+
+ def set_variable(self, varname, variable):
+ if variable is None:
+ raise InvalidCode('Can not assign None to variable.')
+ if not isinstance(varname, str):
+ raise InvalidCode('First argument to set_variable must be a string.')
+ if not self.is_assignable(variable):
+ raise InvalidCode('Assigned value not of assignable type.')
+ if re.match('[_a-zA-Z][_0-9a-zA-Z]*$', varname) is None:
+ raise InvalidCode('Invalid variable name: ' + varname)
+ if varname in self.builtin:
+ raise InvalidCode('Tried to overwrite internal variable "%s"' % varname)
+ self.variables[varname] = variable
+
+ def get_variable(self, varname):
+ if varname in self.builtin:
+ return self.builtin[varname]
+ if varname in self.variables:
+ return self.variables[varname]
+ raise InvalidCode('Unknown variable "%s".' % varname)
+
+ def is_assignable(self, value):
+ return isinstance(value, (InterpreterObject, dependencies.Dependency,
+ str, int, list, mesonlib.File))
+
+ def is_elementary_type(self, v):
+ return isinstance(v, (int, float, str, bool, list))
--- /dev/null
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .mesonlib import Popen_safe
+
+class StaticLinker:
+ pass
+
+
+class VisualStudioLinker(StaticLinker):
+ always_args = ['/NOLOGO']
+
+ def __init__(self, exelist):
+ self.exelist = exelist
+
+ def get_exelist(self):
+ return self.exelist[:]
+
+ def get_std_link_args(self):
+ return []
+
+ def get_buildtype_linker_args(self, buildtype):
+ return []
+
+ def get_output_args(self, target):
+ return ['/OUT:' + target]
+
+ def get_coverage_link_args(self):
+ return []
+
+ def get_always_args(self):
+ return VisualStudioLinker.always_args[:]
+
+ def get_linker_always_args(self):
+ return VisualStudioLinker.always_args[:]
+
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ return []
+
+ def thread_link_flags(self, env):
+ return []
+
+ def get_option_link_args(self, options):
+ return []
+
+ @classmethod
+ def unix_args_to_native(cls, args):
+ from .compilers import VisualStudioCCompiler
+ return VisualStudioCCompiler.unix_args_to_native(args)
+
+ def get_link_debugfile_args(self, targetfile):
+ # Static libraries do not have PDB files
+ return []
+
+
+class ArLinker(StaticLinker):
+
+ def __init__(self, exelist):
+ self.exelist = exelist
+ self.id = 'ar'
+ pc, stdo = Popen_safe(self.exelist + ['-h'])[0:2]
+ # Enable deterministic builds if they are available.
+ if '[D]' in stdo:
+ self.std_args = ['csrD']
+ else:
+ self.std_args = ['csr']
+
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ return []
+
+ def get_exelist(self):
+ return self.exelist[:]
+
+ def get_std_link_args(self):
+ return self.std_args
+
+ def get_output_args(self, target):
+ return [target]
+
+ def get_buildtype_linker_args(self, buildtype):
+ return []
+
+ def get_linker_always_args(self):
+ return []
+
+ def get_coverage_link_args(self):
+ return []
+
+ def get_always_args(self):
+ return []
+
+ def thread_link_flags(self, env):
+ return []
+
+ def get_option_link_args(self, options):
+ return []
+
+ @classmethod
+ def unix_args_to_native(cls, args):
+ return args[:]
+
+ def get_link_debugfile_args(self, targetfile):
+ return []
--- /dev/null
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os
+import pickle
+import argparse
+from . import coredata, mesonlib
+
+parser = argparse.ArgumentParser(prog='meson configure')
+
+parser.add_argument('-D', action='append', default=[], dest='sets',
+ help='Set an option to the given value.')
+parser.add_argument('directory', nargs='*')
+parser.add_argument('--clearcache', action='store_true', default=False,
+ help='Clear cached state (e.g. found dependencies)')
+
+class ConfException(mesonlib.MesonException):
+ pass
+
+class Conf:
+ def __init__(self, build_dir):
+ self.build_dir = build_dir
+ self.coredata_file = os.path.join(build_dir, 'meson-private/coredata.dat')
+ self.build_file = os.path.join(build_dir, 'meson-private/build.dat')
+ if not os.path.isfile(self.coredata_file) or not os.path.isfile(self.build_file):
+ raise ConfException('Directory %s does not seem to be a Meson build directory.' % build_dir)
+ with open(self.coredata_file, 'rb') as f:
+ self.coredata = pickle.load(f)
+ with open(self.build_file, 'rb') as f:
+ self.build = pickle.load(f)
+ if self.coredata.version != coredata.version:
+ raise ConfException('Version mismatch (%s vs %s)' %
+ (coredata.version, self.coredata.version))
+
+ def clear_cache(self):
+ self.coredata.deps = {}
+
+ def save(self):
+ # Only called if something has changed so overwrite unconditionally.
+ with open(self.coredata_file, 'wb') as f:
+ pickle.dump(self.coredata, f)
+ # We don't write the build file because any changes to it
+ # are erased when Meson is executed the next time, i.e. when
+ # Ninja is run.
+
+ def print_aligned(self, arr):
+ if not arr:
+ return
+ titles = {'name': 'Option', 'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
+ len_name = longest_name = len(titles['name'])
+ len_descr = longest_descr = len(titles['descr'])
+ len_value = longest_value = len(titles['value'])
+ longest_choices = 0 # not printed if we don't get any optional values
+
+ # calculate the max length of each
+ for x in arr:
+ name = x['name']
+ descr = x['descr']
+ value = x['value'] if isinstance(x['value'], str) else str(x['value']).lower()
+ choices = ''
+ if isinstance(x['choices'], list):
+ if x['choices']:
+ x['choices'] = [s if isinstance(s, str) else str(s).lower() for s in x['choices']]
+ choices = '[%s]' % ', '.join(map(str, x['choices']))
+ elif x['choices']:
+ choices = x['choices'] if isinstance(x['choices'], str) else str(x['choices']).lower()
+
+ longest_name = max(longest_name, len(name))
+ longest_descr = max(longest_descr, len(descr))
+ longest_value = max(longest_value, len(value))
+ longest_choices = max(longest_choices, len(choices))
+
+ # update possible non strings
+ x['value'] = value
+ x['choices'] = choices
+
+ # prints header
+ namepad = ' ' * (longest_name - len_name)
+ valuepad = ' ' * (longest_value - len_value)
+ if longest_choices:
+ len_choices = len(titles['choices'])
+ longest_choices = max(longest_choices, len_choices)
+ choicepad = ' ' * (longest_choices - len_choices)
+ print(' %s%s %s%s %s%s %s' % (titles['name'], namepad, titles['value'], valuepad, titles['choices'], choicepad, titles['descr']))
+ print(' %s%s %s%s %s%s %s' % ('-' * len_name, namepad, '-' * len_value, valuepad, '-' * len_choices, choicepad, '-' * len_descr))
+ else:
+ print(' %s%s %s%s %s' % (titles['name'], namepad, titles['value'], valuepad, titles['descr']))
+ print(' %s%s %s%s %s' % ('-' * len_name, namepad, '-' * len_value, valuepad, '-' * len_descr))
+
+ # print values
+ for i in arr:
+ name = i['name']
+ descr = i['descr']
+ value = i['value']
+ choices = i['choices']
+
+ namepad = ' ' * (longest_name - len(name))
+ valuepad = ' ' * (longest_value - len(value))
+ if longest_choices:
+ choicespad = ' ' * (longest_choices - len(choices))
+ f = ' %s%s %s%s %s%s %s' % (name, namepad, value, valuepad, choices, choicespad, descr)
+ else:
+ f = ' %s%s %s%s %s' % (name, namepad, value, valuepad, descr)
+
+ print(f)
+
+ def set_options(self, options):
+ for o in options:
+ if '=' not in o:
+ raise ConfException('Value "%s" not of type "a=b".' % o)
+ (k, v) = o.split('=', 1)
+ if coredata.is_builtin_option(k):
+ self.coredata.set_builtin_option(k, v)
+ elif k in self.coredata.backend_options:
+ tgt = self.coredata.backend_options[k]
+ tgt.set_value(v)
+ elif k in self.coredata.user_options:
+ tgt = self.coredata.user_options[k]
+ tgt.set_value(v)
+ elif k in self.coredata.compiler_options:
+ tgt = self.coredata.compiler_options[k]
+ tgt.set_value(v)
+ elif k in self.coredata.base_options:
+ tgt = self.coredata.base_options[k]
+ tgt.set_value(v)
+ elif k.endswith('_link_args'):
+ lang = k[:-10]
+ if lang not in self.coredata.external_link_args:
+ raise ConfException('Unknown language %s in linkargs.' % lang)
+ # TODO, currently split on spaces, make it so that user
+ # can pass in an array string.
+ newvalue = v.split()
+ self.coredata.external_link_args[lang] = newvalue
+ elif k.endswith('_args'):
+ lang = k[:-5]
+ if lang not in self.coredata.external_args:
+ raise ConfException('Unknown language %s in compile args' % lang)
+ # TODO same fix as above
+ newvalue = v.split()
+ self.coredata.external_args[lang] = newvalue
+ else:
+ raise ConfException('Unknown option %s.' % k)
+
+ def print_conf(self):
+ print('Core properties:')
+ print(' Source dir', self.build.environment.source_dir)
+ print(' Build dir ', self.build.environment.build_dir)
+ print('')
+ print('Core options:')
+ carr = []
+ for key in ['buildtype', 'warning_level', 'werror', 'strip', 'unity', 'default_library']:
+ carr.append({'name': key,
+ 'descr': coredata.get_builtin_option_description(key),
+ 'value': self.coredata.get_builtin_option(key),
+ 'choices': coredata.get_builtin_option_choices(key)})
+ self.print_aligned(carr)
+ print('')
+ bekeys = sorted(self.coredata.backend_options.keys())
+ if not bekeys:
+ print(' No backend options\n')
+ else:
+ bearr = []
+ for k in bekeys:
+ o = self.coredata.backend_options[k]
+ bearr.append({'name': k, 'descr': o.description, 'value': o.value, 'choices': ''})
+ self.print_aligned(bearr)
+ print('')
+ print('Base options:')
+ okeys = sorted(self.coredata.base_options.keys())
+ if not okeys:
+ print(' No base options\n')
+ else:
+ coarr = []
+ for k in okeys:
+ o = self.coredata.base_options[k]
+ coarr.append({'name': k, 'descr': o.description, 'value': o.value, 'choices': o.choices})
+ self.print_aligned(coarr)
+ print('')
+ print('Compiler arguments:')
+ for (lang, args) in self.coredata.external_args.items():
+ print(' ' + lang + '_args', str(args))
+ print('')
+ print('Linker args:')
+ for (lang, args) in self.coredata.external_link_args.items():
+ print(' ' + lang + '_link_args', str(args))
+ print('')
+ print('Compiler options:')
+ okeys = sorted(self.coredata.compiler_options.keys())
+ if not okeys:
+ print(' No compiler options\n')
+ else:
+ coarr = []
+ for k in okeys:
+ o = self.coredata.compiler_options[k]
+ coarr.append({'name': k, 'descr': o.description, 'value': o.value, 'choices': ''})
+ self.print_aligned(coarr)
+ print('')
+ print('Directories:')
+ parr = []
+ for key in ['prefix',
+ 'libdir',
+ 'libexecdir',
+ 'bindir',
+ 'sbindir',
+ 'includedir',
+ 'datadir',
+ 'mandir',
+ 'infodir',
+ 'localedir',
+ 'sysconfdir',
+ 'localstatedir',
+ 'sharedstatedir',
+ ]:
+ parr.append({'name': key,
+ 'descr': coredata.get_builtin_option_description(key),
+ 'value': self.coredata.get_builtin_option(key),
+ 'choices': coredata.get_builtin_option_choices(key)})
+ self.print_aligned(parr)
+ print('')
+ print('Project options:')
+ if not self.coredata.user_options:
+ print(' This project does not have any options')
+ else:
+ options = self.coredata.user_options
+ keys = list(options.keys())
+ keys.sort()
+ optarr = []
+ for key in keys:
+ opt = options[key]
+ if (opt.choices is None) or (not opt.choices):
+ # Zero length list or string
+ choices = ''
+ else:
+ # A non zero length list or string, convert to string
+ choices = str(opt.choices)
+ optarr.append({'name': key,
+ 'descr': opt.description,
+ 'value': opt.value,
+ 'choices': choices})
+ self.print_aligned(optarr)
+ print('')
+ print('Testing options:')
+ tarr = []
+ for key in ['stdsplit', 'errorlogs']:
+ tarr.append({'name': key,
+ 'descr': coredata.get_builtin_option_description(key),
+ 'value': self.coredata.get_builtin_option(key),
+ 'choices': coredata.get_builtin_option_choices(key)})
+ self.print_aligned(tarr)
+
+def run(args):
+ args = mesonlib.expand_arguments(args)
+ if not args:
+ args = [os.getcwd()]
+ options = parser.parse_args(args)
+ if len(options.directory) > 1:
+ print('%s <build directory>' % args[0])
+ print('If you omit the build directory, the current directory is substituted.')
+ return 1
+ if not options.directory:
+ builddir = os.getcwd()
+ else:
+ builddir = options.directory[0]
+ try:
+ c = Conf(builddir)
+ save = False
+ if len(options.sets) > 0:
+ c.set_options(options.sets)
+ save = True
+ elif options.clearcache:
+ c.clear_cache()
+ save = True
+ else:
+ c.print_conf()
+ if save:
+ c.save()
+ except ConfException as e:
+ print('Meson configurator encountered an error:\n')
+ print(e)
+ return 1
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+# Copyright 2012-2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A library of random helper functionality."""
+
+import sys
+import stat
+import time
+import platform, subprocess, operator, os, shutil, re
+import collections
+
+from glob import glob
+
+def detect_meson_py_location():
+ c = sys.argv[0]
+ c_fname = os.path.split(c)[1]
+ if c_fname == 'meson' or c_fname == 'meson.py':
+ # $ /foo/meson.py <args>
+ if os.path.isabs(c):
+ return c
+ # $ meson <args> (gets run from /usr/bin/meson)
+ in_path_exe = shutil.which(c_fname)
+ if in_path_exe:
+ # Special case: when run like "./meson.py <opts>" and user has
+ # period in PATH, we need to expand it out, because, for example,
+ # "ninja test" will be run from a different directory.
+ if '.' in os.environ['PATH'].split(':'):
+ p, f = os.path.split(in_path_exe)
+ if p == '' or p == '.':
+ return os.path.join(os.getcwd(), f)
+ return in_path_exe
+ # $ python3 ./meson.py <args>
+ if os.path.exists(c):
+ return os.path.join(os.getcwd(), c)
+
+ # The only thing remaining is to try to find the bundled executable and
+ # pray distro packagers have not moved it.
+ fname = os.path.join(os.path.dirname(__file__), '..', 'meson.py')
+ if not os.path.exists(fname):
+ raise RuntimeError('Could not determine how to run Meson. Please file a bug with details.')
+ return fname
+
+if os.path.basename(sys.executable) == 'meson.exe':
+ # In Windows and using the MSI installed executable.
+ meson_command = [sys.executable]
+ python_command = [sys.executable, 'runpython']
+else:
+ python_command = [sys.executable]
+ meson_command = python_command + [detect_meson_py_location()]
+
+# Put this in objects that should not get dumped to pickle files
+# by accident.
+import threading
+an_unpicklable_object = threading.Lock()
+
+class MesonException(Exception):
+ '''Exceptions thrown by Meson'''
+
+class EnvironmentException(MesonException):
+ '''Exceptions thrown while processing and creating the build environment'''
+
+class FileMode:
+ # The first triad is for owner permissions, the second for group permissions,
+ # and the third for others (everyone else).
+ # For the 1st character:
+ # 'r' means can read
+ # '-' means not allowed
+ # For the 2nd character:
+ # 'w' means can write
+ # '-' means not allowed
+ # For the 3rd character:
+ # 'x' means can execute
+ # 's' means can execute and setuid/setgid is set (owner/group triads only)
+ # 'S' means cannot execute and setuid/setgid is set (owner/group triads only)
+ # 't' means can execute and sticky bit is set ("others" triads only)
+ # 'T' means cannot execute and sticky bit is set ("others" triads only)
+ # '-' means none of these are allowed
+ #
+ # The meanings of 'rwx' perms is not obvious for directories; see:
+ # https://www.hackinglinuxexposed.com/articles/20030424.html
+ #
+ # For information on this notation such as setuid/setgid/sticky bits, see:
+ # https://en.wikipedia.org/wiki/File_system_permissions#Symbolic_notation
+ symbolic_perms_regex = re.compile('[r-][w-][xsS-]' # Owner perms
+ '[r-][w-][xsS-]' # Group perms
+ '[r-][w-][xtT-]') # Others perms
+
+ def __init__(self, perms=None, owner=None, group=None):
+ self.perms_s = perms
+ self.perms = self.perms_s_to_bits(perms)
+ self.owner = owner
+ self.group = group
+
+ def __repr__(self):
+ ret = '<FileMode: {!r} owner={} group={}'
+ return ret.format(self.perms_s, self.owner, self.group)
+
+ @classmethod
+ def perms_s_to_bits(cls, perms_s):
+ '''
+ Does the opposite of stat.filemode(), converts strings of the form
+ 'rwxr-xr-x' to st_mode enums which can be passed to os.chmod()
+ '''
+ if perms_s is None:
+ # No perms specified, we will not touch the permissions
+ return -1
+ eg = 'rwxr-xr-x'
+ if not isinstance(perms_s, str):
+ msg = 'Install perms must be a string. For example, {!r}'
+ raise MesonException(msg.format(eg))
+ if len(perms_s) != 9 or not cls.symbolic_perms_regex.match(perms_s):
+ msg = 'File perms {!r} must be exactly 9 chars. For example, {!r}'
+ raise MesonException(msg.format(perms_s, eg))
+ perms = 0
+ # Owner perms
+ if perms_s[0] == 'r':
+ perms |= stat.S_IRUSR
+ if perms_s[1] == 'w':
+ perms |= stat.S_IWUSR
+ if perms_s[2] == 'x':
+ perms |= stat.S_IXUSR
+ elif perms_s[2] == 'S':
+ perms |= stat.S_ISUID
+ elif perms_s[2] == 's':
+ perms |= stat.S_IXUSR
+ perms |= stat.S_ISUID
+ # Group perms
+ if perms_s[3] == 'r':
+ perms |= stat.S_IRGRP
+ if perms_s[4] == 'w':
+ perms |= stat.S_IWGRP
+ if perms_s[5] == 'x':
+ perms |= stat.S_IXGRP
+ elif perms_s[5] == 'S':
+ perms |= stat.S_ISGID
+ elif perms_s[5] == 's':
+ perms |= stat.S_IXGRP
+ perms |= stat.S_ISGID
+ # Others perms
+ if perms_s[6] == 'r':
+ perms |= stat.S_IROTH
+ if perms_s[7] == 'w':
+ perms |= stat.S_IWOTH
+ if perms_s[8] == 'x':
+ perms |= stat.S_IXOTH
+ elif perms_s[8] == 'T':
+ perms |= stat.S_ISVTX
+ elif perms_s[8] == 't':
+ perms |= stat.S_IXOTH
+ perms |= stat.S_ISVTX
+ return perms
+
+class File:
+ def __init__(self, is_built, subdir, fname):
+ self.is_built = is_built
+ self.subdir = subdir
+ self.fname = fname
+ assert(isinstance(self.subdir, str))
+ assert(isinstance(self.fname, str))
+
+ def __str__(self):
+ return self.relative_name()
+
+ def __repr__(self):
+ ret = '<File: {0}'
+ if not self.is_built:
+ ret += ' (not built)'
+ ret += '>'
+ return ret.format(self.relative_name())
+
+ @staticmethod
+ def from_source_file(source_root, subdir, fname):
+ if not os.path.isfile(os.path.join(source_root, subdir, fname)):
+ raise MesonException('File %s does not exist.' % fname)
+ return File(False, subdir, fname)
+
+ @staticmethod
+ def from_built_file(subdir, fname):
+ return File(True, subdir, fname)
+
+ @staticmethod
+ def from_absolute_file(fname):
+ return File(False, '', fname)
+
+ def rel_to_builddir(self, build_to_src):
+ if self.is_built:
+ return self.relative_name()
+ else:
+ return os.path.join(build_to_src, self.subdir, self.fname)
+
+ def absolute_path(self, srcdir, builddir):
+ absdir = srcdir
+ if self.is_built:
+ absdir = builddir
+ return os.path.join(absdir, self.relative_name())
+
+ def endswith(self, ending):
+ return self.fname.endswith(ending)
+
+ def split(self, s):
+ return self.fname.split(s)
+
+ def __eq__(self, other):
+ return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
+
+ def __hash__(self):
+ return hash((self.fname, self.subdir, self.is_built))
+
+ def relative_name(self):
+ return os.path.join(self.subdir, self.fname)
+
+def get_compiler_for_source(compilers, src):
+ for comp in compilers:
+ if comp.can_compile(src):
+ return comp
+ raise RuntimeError('No specified compiler can handle file {!s}'.format(src))
+
+def classify_unity_sources(compilers, sources):
+ compsrclist = {}
+ for src in sources:
+ comp = get_compiler_for_source(compilers, src)
+ if comp not in compsrclist:
+ compsrclist[comp] = [src]
+ else:
+ compsrclist[comp].append(src)
+ return compsrclist
+
+def is_osx():
+ return platform.system().lower() == 'darwin'
+
+def is_linux():
+ return platform.system().lower() == 'linux'
+
+def is_haiku():
+ return platform.system().lower() == 'haiku'
+
+def is_windows():
+ platname = platform.system().lower()
+ return platname == 'windows' or 'mingw' in platname
+
+def is_cygwin():
+ platname = platform.system().lower()
+ return platname.startswith('cygwin')
+
+def is_debianlike():
+ return os.path.isfile('/etc/debian_version')
+
+def for_windows(is_cross, env):
+ """
+ Host machine is windows?
+
+ Note: 'host' is the machine on which compiled binaries will run
+ """
+ if not is_cross:
+ return is_windows()
+ elif env.cross_info.has_host():
+ return env.cross_info.config['host_machine']['system'] == 'windows'
+ return False
+
+def for_cygwin(is_cross, env):
+ """
+ Host machine is cygwin?
+
+ Note: 'host' is the machine on which compiled binaries will run
+ """
+ if not is_cross:
+ return is_cygwin()
+ elif env.cross_info.has_host():
+ return env.cross_info.config['host_machine']['system'] == 'cygwin'
+ return False
+
+def for_darwin(is_cross, env):
+ """
+ Host machine is Darwin (iOS/OS X)?
+
+ Note: 'host' is the machine on which compiled binaries will run
+ """
+ if not is_cross:
+ return is_osx()
+ elif env.cross_info.has_host():
+ return env.cross_info.config['host_machine']['system'] == 'darwin'
+ return False
+
+def for_haiku(is_cross, env):
+ """
+ Host machine is Haiku?
+
+ Note: 'host' is the machine on which compiled binaries will run
+ """
+ if not is_cross:
+ return is_haiku()
+ elif env.cross_info.has_host():
+ return env.cross_info.config['host_machine']['system'] == 'haiku'
+ return False
+
+def exe_exists(arglist):
+ try:
+ p = subprocess.Popen(arglist, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ p.communicate()
+ if p.returncode == 0:
+ return True
+ except FileNotFoundError:
+ pass
+ return False
+
+def detect_vcs(source_dir):
+ vcs_systems = [
+ dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
+ dict(name = 'mercurial', cmd = 'hg', repo_dir = '.hg', get_rev = 'hg id -i', rev_regex = '(.*)', dep = '.hg/dirstate'),
+ dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info', rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'),
+ dict(name = 'bazaar', cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno', rev_regex = '(.*)', dep = '.bzr'),
+ ]
+
+ segs = source_dir.replace('\\', '/').split('/')
+ for i in range(len(segs), -1, -1):
+ curdir = '/'.join(segs[:i])
+ for vcs in vcs_systems:
+ if os.path.isdir(os.path.join(curdir, vcs['repo_dir'])) and shutil.which(vcs['cmd']):
+ vcs['wc_dir'] = curdir
+ return vcs
+ return None
+
+def grab_leading_numbers(vstr, strict=False):
+ result = []
+ for x in vstr.rstrip('.').split('.'):
+ try:
+ result.append(int(x))
+ except ValueError as e:
+ if strict:
+ msg = 'Invalid version to compare against: {!r}; only ' \
+ 'numeric digits separated by "." are allowed: ' + str(e)
+ raise MesonException(msg.format(vstr))
+ break
+ return result
+
+numpart = re.compile('[0-9.]+')
+
+def version_compare(vstr1, vstr2, strict=False):
+ match = numpart.match(vstr1.strip())
+ if match is None:
+ msg = 'Uncomparable version string {!r}.'
+ raise MesonException(msg.format(vstr1))
+ vstr1 = match.group(0)
+ if vstr2.startswith('>='):
+ cmpop = operator.ge
+ vstr2 = vstr2[2:]
+ elif vstr2.startswith('<='):
+ cmpop = operator.le
+ vstr2 = vstr2[2:]
+ elif vstr2.startswith('!='):
+ cmpop = operator.ne
+ vstr2 = vstr2[2:]
+ elif vstr2.startswith('=='):
+ cmpop = operator.eq
+ vstr2 = vstr2[2:]
+ elif vstr2.startswith('='):
+ cmpop = operator.eq
+ vstr2 = vstr2[1:]
+ elif vstr2.startswith('>'):
+ cmpop = operator.gt
+ vstr2 = vstr2[1:]
+ elif vstr2.startswith('<'):
+ cmpop = operator.lt
+ vstr2 = vstr2[1:]
+ else:
+ cmpop = operator.eq
+ varr1 = grab_leading_numbers(vstr1, strict)
+ varr2 = grab_leading_numbers(vstr2, strict)
+ return cmpop(varr1, varr2)
+
+def version_compare_many(vstr1, conditions):
+ if not isinstance(conditions, (list, tuple, frozenset)):
+ conditions = [conditions]
+ found = []
+ not_found = []
+ for req in conditions:
+ if not version_compare(vstr1, req, strict=True):
+ not_found.append(req)
+ else:
+ found.append(req)
+ return not_found == [], not_found, found
+
+def default_libdir():
+ if is_debianlike():
+ try:
+ pc = subprocess.Popen(['dpkg-architecture', '-qDEB_HOST_MULTIARCH'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL)
+ (stdo, _) = pc.communicate()
+ if pc.returncode == 0:
+ archpath = stdo.decode().strip()
+ return 'lib/' + archpath
+ except Exception:
+ pass
+ if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
+ return 'lib64'
+ return 'lib'
+
+def default_libexecdir():
+ # There is no way to auto-detect this, so it must be set at build time
+ return 'libexec'
+
+def default_prefix():
+ return 'c:/' if is_windows() else '/usr/local'
+
+def get_library_dirs():
+ if is_windows():
+ return ['C:/mingw/lib'] # Fixme
+ if is_osx():
+ return ['/usr/lib'] # Fix me as well.
+ # The following is probably Debian/Ubuntu specific.
+ # /usr/local/lib is first because it contains stuff
+ # installed by the sysadmin and is probably more up-to-date
+ # than /usr/lib. If you feel that this search order is
+ # problematic, please raise the issue on the mailing list.
+ unixdirs = ['/usr/local/lib', '/usr/lib', '/lib']
+ plat = subprocess.check_output(['uname', '-m']).decode().strip()
+ # This is a terrible hack. I admit it and I'm really sorry.
+ # I just don't know what the correct solution is.
+ if plat == 'i686':
+ plat = 'i386'
+ if plat.startswith('arm'):
+ plat = 'arm'
+ unixdirs += glob('/usr/lib/' + plat + '*')
+ if os.path.exists('/usr/lib64'):
+ unixdirs.append('/usr/lib64')
+ unixdirs += glob('/lib/' + plat + '*')
+ if os.path.exists('/lib64'):
+ unixdirs.append('/lib64')
+ unixdirs += glob('/lib/' + plat + '*')
+ return unixdirs
+
+
+def do_replacement(regex, line, confdata):
+ missing_variables = set()
+
+ def variable_replace(match):
+ # Pairs of escape characters before '@' or '\@'
+ if match.group(0).endswith('\\'):
+ num_escapes = match.end(0) - match.start(0)
+ return '\\' * (num_escapes // 2)
+ # Single escape character and '@'
+ elif match.group(0) == '\\@':
+ return '@'
+ # Template variable to be replaced
+ else:
+ varname = match.group(1)
+ if varname in confdata:
+ (var, desc) = confdata.get(varname)
+ if isinstance(var, str):
+ pass
+ elif isinstance(var, int):
+ var = str(var)
+ else:
+ raise RuntimeError('Tried to replace a variable with something other than a string or int.')
+ else:
+ missing_variables.add(varname)
+ var = ''
+ return var
+ return re.sub(regex, variable_replace, line), missing_variables
+
+def do_mesondefine(line, confdata):
+ arr = line.split()
+ if len(arr) != 2:
+ raise MesonException('#mesondefine does not contain exactly two tokens: %s', line.strip())
+ varname = arr[1]
+ try:
+ (v, desc) = confdata.get(varname)
+ except KeyError:
+ return '/* #undef %s */\n' % varname
+ if isinstance(v, bool):
+ if v:
+ return '#define %s\n' % varname
+ else:
+ return '#undef %s\n' % varname
+ elif isinstance(v, int):
+ return '#define %s %d\n' % (varname, v)
+ elif isinstance(v, str):
+ return '#define %s %s\n' % (varname, v)
+ else:
+ raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
+
+
+def do_conf_file(src, dst, confdata):
+ try:
+ with open(src, encoding='utf-8') as f:
+ data = f.readlines()
+ except Exception as e:
+ raise MesonException('Could not read input file %s: %s' % (src, str(e)))
+ # Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
+ # Also allow escaping '@' with '\@'
+ regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
+ result = []
+ missing_variables = set()
+ for line in data:
+ if line.startswith('#mesondefine'):
+ line = do_mesondefine(line, confdata)
+ else:
+ line, missing = do_replacement(regex, line, confdata)
+ missing_variables.update(missing)
+ result.append(line)
+ dst_tmp = dst + '~'
+ with open(dst_tmp, 'w', encoding='utf-8') as f:
+ f.writelines(result)
+ shutil.copymode(src, dst_tmp)
+ replace_if_different(dst, dst_tmp)
+ return missing_variables
+
+def dump_conf_header(ofilename, cdata):
+ with open(ofilename, 'w', encoding='utf-8') as ofile:
+ ofile.write('''/*
+ * Autogenerated by the Meson build system.
+ * Do not edit, your changes will be lost.
+ */
+
+#pragma once
+
+''')
+ for k in sorted(cdata.keys()):
+ (v, desc) = cdata.get(k)
+ if desc:
+ ofile.write('/* %s */\n' % desc)
+ if isinstance(v, bool):
+ if v:
+ ofile.write('#define %s\n\n' % k)
+ else:
+ ofile.write('#undef %s\n\n' % k)
+ elif isinstance(v, (int, str)):
+ ofile.write('#define %s %s\n\n' % (k, v))
+ else:
+ raise MesonException('Unknown data type in configuration file entry: ' + k)
+
+def replace_if_different(dst, dst_tmp):
+ # If contents are identical, don't touch the file to prevent
+ # unnecessary rebuilds.
+ different = True
+ try:
+ with open(dst, 'rb') as f1, open(dst_tmp, 'rb') as f2:
+ if f1.read() == f2.read():
+ different = False
+ except FileNotFoundError:
+ pass
+ if different:
+ os.replace(dst_tmp, dst)
+ else:
+ os.unlink(dst_tmp)
+
+def listify(item, flatten=True, unholder=False):
+ '''
+ Returns a list with all args embedded in a list if they are not a list.
+ This function preserves order.
+ @flatten: Convert lists of lists to a flat list
+ @unholder: Replace each item with the object it holds, if required
+
+ Note: unholding only works recursively when flattening
+ '''
+ if not isinstance(item, list):
+ if unholder and hasattr(item, 'held_object'):
+ item = item.held_object
+ return [item]
+ result = []
+ for i in item:
+ if unholder and hasattr(i, 'held_object'):
+ i = i.held_object
+ if flatten and isinstance(i, list):
+ result += listify(i, flatten=True, unholder=unholder)
+ else:
+ result.append(i)
+ return result
+
+
+def extract_as_list(dict_object, *keys, pop=False, **kwargs):
+ '''
+ Extracts all values from given dict_object and listifies them.
+ '''
+ result = []
+ fetch = dict_object.get
+ if pop:
+ fetch = dict_object.pop
+ # If there's only one key, we don't return a list with one element
+ if len(keys) == 1:
+ return listify(fetch(keys[0], []), **kwargs)
+ # Return a list of values corresponding to *keys
+ for key in keys:
+ result.append(listify(fetch(key, []), **kwargs))
+ return result
+
+
+def typeslistify(item, types):
+ '''
+ Ensure that type(@item) is one of @types or a
+ list of items all of which are of type @types
+ '''
+ if isinstance(item, types):
+ item = [item]
+ if not isinstance(item, list):
+ raise MesonException('Item must be a list or one of {!r}'.format(types))
+ for i in item:
+ if i is not None and not isinstance(i, types):
+ raise MesonException('List item must be one of {!r}'.format(types))
+ return item
+
+def stringlistify(item):
+ return typeslistify(item, str)
+
+def expand_arguments(args):
+ expended_args = []
+ for arg in args:
+ if not arg.startswith('@'):
+ expended_args.append(arg)
+ continue
+
+ args_file = arg[1:]
+ try:
+ with open(args_file) as f:
+ extended_args = f.read().split()
+ expended_args += extended_args
+ except Exception as e:
+ print('Error expanding command line arguments, %s not found' % args_file)
+ print(e)
+ return None
+ return expended_args
+
+def Popen_safe(args, write=None, stderr=subprocess.PIPE, **kwargs):
+ if sys.version_info < (3, 6) or not sys.stdout.encoding:
+ return Popen_safe_legacy(args, write=write, stderr=stderr, **kwargs)
+ p = subprocess.Popen(args, universal_newlines=True,
+ close_fds=False,
+ stdout=subprocess.PIPE,
+ stderr=stderr, **kwargs)
+ o, e = p.communicate(write)
+ return p, o, e
+
+def Popen_safe_legacy(args, write=None, stderr=subprocess.PIPE, **kwargs):
+ p = subprocess.Popen(args, universal_newlines=False,
+ stdout=subprocess.PIPE,
+ stderr=stderr, **kwargs)
+ if write is not None:
+ write = write.encode('utf-8')
+ o, e = p.communicate(write)
+ if o is not None:
+ if sys.stdout.encoding:
+ o = o.decode(encoding=sys.stdout.encoding, errors='replace').replace('\r\n', '\n')
+ else:
+ o = o.decode(errors='replace').replace('\r\n', '\n')
+ if e is not None:
+ if sys.stderr.encoding:
+ e = e.decode(encoding=sys.stderr.encoding, errors='replace').replace('\r\n', '\n')
+ else:
+ e = e.decode(errors='replace').replace('\r\n', '\n')
+ return p, o, e
+
+def commonpath(paths):
+ '''
+ For use on Python 3.4 where os.path.commonpath is not available.
+ We currently use it everywhere so this receives enough testing.
+ '''
+ # XXX: Replace me with os.path.commonpath when we start requiring Python 3.5
+ import pathlib
+ if not paths:
+ raise ValueError('arg is an empty sequence')
+ common = pathlib.PurePath(paths[0])
+ for path in paths[1:]:
+ new = []
+ path = pathlib.PurePath(path)
+ for c, p in zip(common.parts, path.parts):
+ if c != p:
+ break
+ new.append(c)
+ # Don't convert '' into '.'
+ if not new:
+ common = ''
+ break
+ new = os.path.join(*new)
+ common = pathlib.PurePath(new)
+ return str(common)
+
+def iter_regexin_iter(regexiter, initer):
+ '''
+ Takes each regular expression in @regexiter and tries to search for it in
+ every item in @initer. If there is a match, returns that match.
+ Else returns False.
+ '''
+ for regex in regexiter:
+ for ii in initer:
+ if not isinstance(ii, str):
+ continue
+ match = re.search(regex, ii)
+ if match:
+ return match.group()
+ return False
+
+def _substitute_values_check_errors(command, values):
+ # Error checking
+ inregex = ('@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@')
+ outregex = ('@OUTPUT([0-9]+)?@', '@OUTDIR@')
+ if '@INPUT@' not in values:
+ # Error out if any input-derived templates are present in the command
+ match = iter_regexin_iter(inregex, command)
+ if match:
+ m = 'Command cannot have {!r}, since no input files were specified'
+ raise MesonException(m.format(match))
+ else:
+ if len(values['@INPUT@']) > 1:
+ # Error out if @PLAINNAME@ or @BASENAME@ is present in the command
+ match = iter_regexin_iter(inregex[1:], command)
+ if match:
+ raise MesonException('Command cannot have {!r} when there is '
+ 'more than one input file'.format(match))
+ # Error out if an invalid @INPUTnn@ template was specified
+ for each in command:
+ if not isinstance(each, str):
+ continue
+ match = re.search(inregex[0], each)
+ if match and match.group() not in values:
+ m = 'Command cannot have {!r} since there are only {!r} inputs'
+ raise MesonException(m.format(match.group(), len(values['@INPUT@'])))
+ if '@OUTPUT@' not in values:
+ # Error out if any output-derived templates are present in the command
+ match = iter_regexin_iter(outregex, command)
+ if match:
+ m = 'Command cannot have {!r} since there are no outputs'
+ raise MesonException(m.format(match))
+ else:
+ # Error out if an invalid @OUTPUTnn@ template was specified
+ for each in command:
+ if not isinstance(each, str):
+ continue
+ match = re.search(outregex[0], each)
+ if match and match.group() not in values:
+ m = 'Command cannot have {!r} since there are only {!r} outputs'
+ raise MesonException(m.format(match.group(), len(values['@OUTPUT@'])))
+
+def substitute_values(command, values):
+ '''
+ Substitute the template strings in the @values dict into the list of
+ strings @command and return a new list. For a full list of the templates,
+ see get_filenames_templates_dict()
+
+ If multiple inputs/outputs are given in the @values dictionary, we
+ substitute @INPUT@ and @OUTPUT@ only if they are the entire string, not
+ just a part of it, and in that case we substitute *all* of them.
+ '''
+ # Error checking
+ _substitute_values_check_errors(command, values)
+ # Substitution
+ outcmd = []
+ rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
+ value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
+ for vv in command:
+ if not isinstance(vv, str):
+ outcmd.append(vv)
+ elif '@INPUT@' in vv:
+ inputs = values['@INPUT@']
+ if vv == '@INPUT@':
+ outcmd += inputs
+ elif len(inputs) == 1:
+ outcmd.append(vv.replace('@INPUT@', inputs[0]))
+ else:
+ raise MesonException("Command has '@INPUT@' as part of a "
+ "string and more than one input file")
+ elif '@OUTPUT@' in vv:
+ outputs = values['@OUTPUT@']
+ if vv == '@OUTPUT@':
+ outcmd += outputs
+ elif len(outputs) == 1:
+ outcmd.append(vv.replace('@OUTPUT@', outputs[0]))
+ else:
+ raise MesonException("Command has '@OUTPUT@' as part of a "
+ "string and more than one output file")
+ # Append values that are exactly a template string.
+ # This is faster than a string replace.
+ elif vv in values:
+ outcmd.append(values[vv])
+ # Substitute everything else with replacement
+ elif value_rx:
+ outcmd.append(value_rx.sub(lambda m: values[m.group(0)], vv))
+ else:
+ outcmd.append(vv)
+ return outcmd
+
+def get_filenames_templates_dict(inputs, outputs):
+ '''
+ Create a dictionary with template strings as keys and values as values for
+ the following templates:
+
+ @INPUT@ - the full path to one or more input files, from @inputs
+ @OUTPUT@ - the full path to one or more output files, from @outputs
+ @OUTDIR@ - the full path to the directory containing the output files
+
+ If there is only one input file, the following keys are also created:
+
+ @PLAINNAME@ - the filename of the input file
+ @BASENAME@ - the filename of the input file with the extension removed
+
+ If there is more than one input file, the following keys are also created:
+
+ @INPUT0@, @INPUT1@, ... one for each input file
+
+ If there is more than one output file, the following keys are also created:
+
+ @OUTPUT0@, @OUTPUT1@, ... one for each output file
+ '''
+ values = {}
+ # Gather values derived from the input
+ if inputs:
+ # We want to substitute all the inputs.
+ values['@INPUT@'] = inputs
+ for (ii, vv) in enumerate(inputs):
+ # Write out @INPUT0@, @INPUT1@, ...
+ values['@INPUT{}@'.format(ii)] = vv
+ if len(inputs) == 1:
+ # Just one value, substitute @PLAINNAME@ and @BASENAME@
+ values['@PLAINNAME@'] = plain = os.path.split(inputs[0])[1]
+ values['@BASENAME@'] = os.path.splitext(plain)[0]
+ if outputs:
+ # Gather values derived from the outputs, similar to above.
+ values['@OUTPUT@'] = outputs
+ for (ii, vv) in enumerate(outputs):
+ values['@OUTPUT{}@'.format(ii)] = vv
+ # Outdir should be the same for all outputs
+ values['@OUTDIR@'] = os.path.split(outputs[0])[0]
+ # Many external programs fail on empty arguments.
+ if values['@OUTDIR@'] == '':
+ values['@OUTDIR@'] = '.'
+ return values
+
+
+def windows_proof_rmtree(f):
+ # On Windows if anyone is holding a file open you can't
+ # delete it. As an example an anti virus scanner might
+ # be scanning files you are trying to delete. The only
+ # way to fix this is to try again and again.
+ delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
+ for d in delays:
+ try:
+ shutil.rmtree(f)
+ return
+ except (OSError, PermissionError):
+ time.sleep(d)
+ # Try one last time and throw if it fails.
+ shutil.rmtree(f)
+
+class OrderedSet(collections.MutableSet):
+ """A set that preserves the order in which items are added, by first
+ insertion.
+ """
+ def __init__(self, iterable=None):
+ self.__container = collections.OrderedDict()
+ if iterable:
+ self.update(iterable)
+
+ def __contains__(self, value):
+ return value in self.__container
+
+ def __iter__(self):
+ return iter(self.__container.keys())
+
+ def __len__(self):
+ return len(self.__container)
+
+ def __repr__(self):
+ # Don't print 'OrderedSet("")' for an empty set.
+ if self.__container:
+ return 'OrderedSet("{}")'.format(
+ '", "'.join(repr(e) for e in self.__container.keys()))
+ return 'OrderedSet()'
+
+ def add(self, value):
+ self.__container[value] = None
+
+ def discard(self, value):
+ if value in self.__container:
+ del self.__container[value]
+
+ def update(self, iterable):
+ for item in iterable:
+ self.__container[item] = None
+
+ def difference(self, set_):
+ return type(self)(e for e in self if e not in set_)
--- /dev/null
+# Copyright 2012-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, stat, traceback, pickle, argparse
+import time, datetime
+import os.path
+from . import environment, interpreter, mesonlib
+from . import build
+from . import mconf, mintro, mtest, rewriter
+import platform
+from . import mlog, coredata
+from .mesonlib import MesonException
+from .wrap import WrapMode, wraptool
+
+default_warning = '1'
+
+def add_builtin_argument(p, name, **kwargs):
+ k = kwargs.get('dest', name.replace('-', '_'))
+ c = coredata.get_builtin_option_choices(k)
+ b = True if kwargs.get('action', None) in ['store_true', 'store_false'] else False
+ h = coredata.get_builtin_option_description(k)
+ if not b:
+ h = h.rstrip('.') + ' (default: %s).' % coredata.get_builtin_option_default(k)
+ if c and not b:
+ kwargs['choices'] = c
+ default = coredata.get_builtin_option_default(k, noneIfSuppress=True)
+ if default is not None:
+ kwargs['default'] = default
+ else:
+ kwargs['default'] = argparse.SUPPRESS
+ p.add_argument('--' + name, help=h, **kwargs)
+
+def create_parser():
+ p = argparse.ArgumentParser(prog='meson')
+ add_builtin_argument(p, 'prefix')
+ add_builtin_argument(p, 'libdir')
+ add_builtin_argument(p, 'libexecdir')
+ add_builtin_argument(p, 'bindir')
+ add_builtin_argument(p, 'sbindir')
+ add_builtin_argument(p, 'includedir')
+ add_builtin_argument(p, 'datadir')
+ add_builtin_argument(p, 'mandir')
+ add_builtin_argument(p, 'infodir')
+ add_builtin_argument(p, 'localedir')
+ add_builtin_argument(p, 'sysconfdir')
+ add_builtin_argument(p, 'localstatedir')
+ add_builtin_argument(p, 'sharedstatedir')
+ add_builtin_argument(p, 'backend')
+ add_builtin_argument(p, 'buildtype')
+ add_builtin_argument(p, 'strip', action='store_true')
+ add_builtin_argument(p, 'unity')
+ add_builtin_argument(p, 'werror', action='store_true')
+ add_builtin_argument(p, 'layout')
+ add_builtin_argument(p, 'default-library')
+ add_builtin_argument(p, 'warnlevel', dest='warning_level')
+ add_builtin_argument(p, 'stdsplit', action='store_false')
+ add_builtin_argument(p, 'errorlogs', action='store_false')
+ p.add_argument('--cross-file', default=None,
+ help='File describing cross compilation environment.')
+ p.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option",
+ help='Set the value of an option, can be used several times to set multiple options.')
+ p.add_argument('-v', '--version', action='version',
+ version=coredata.version)
+ # See the mesonlib.WrapMode enum for documentation
+ p.add_argument('--wrap-mode', default=WrapMode.default,
+ type=wrapmodetype, choices=WrapMode,
+ help='Special wrap mode to use')
+ p.add_argument('directories', nargs='*')
+ return p
+
+def wrapmodetype(string):
+ try:
+ return getattr(WrapMode, string)
+ except AttributeError:
+ msg = ', '.join([t.name.lower() for t in WrapMode])
+ msg = 'invalid argument {!r}, use one of {}'.format(string, msg)
+ raise argparse.ArgumentTypeError(msg)
+
+class MesonApp:
+
+ def __init__(self, dir1, dir2, script_launcher, handshake, options, original_cmd_line_args):
+ (self.source_dir, self.build_dir) = self.validate_dirs(dir1, dir2, handshake)
+ self.meson_script_launcher = script_launcher
+ self.options = options
+ self.original_cmd_line_args = original_cmd_line_args
+
+ def has_build_file(self, dirname):
+ fname = os.path.join(dirname, environment.build_filename)
+ return os.path.exists(fname)
+
+ def validate_core_dirs(self, dir1, dir2):
+ ndir1 = os.path.abspath(os.path.realpath(dir1))
+ ndir2 = os.path.abspath(os.path.realpath(dir2))
+ if not os.path.exists(ndir1):
+ os.makedirs(ndir1)
+ if not os.path.exists(ndir2):
+ os.makedirs(ndir2)
+ if not stat.S_ISDIR(os.stat(ndir1).st_mode):
+ raise RuntimeError('%s is not a directory' % dir1)
+ if not stat.S_ISDIR(os.stat(ndir2).st_mode):
+ raise RuntimeError('%s is not a directory' % dir2)
+ if os.path.samefile(dir1, dir2):
+ raise RuntimeError('Source and build directories must not be the same. Create a pristine build directory.')
+ if self.has_build_file(ndir1):
+ if self.has_build_file(ndir2):
+ raise RuntimeError('Both directories contain a build file %s.' % environment.build_filename)
+ return ndir1, ndir2
+ if self.has_build_file(ndir2):
+ return ndir2, ndir1
+ raise RuntimeError('Neither directory contains a build file %s.' % environment.build_filename)
+
+ def validate_dirs(self, dir1, dir2, handshake):
+ (src_dir, build_dir) = self.validate_core_dirs(dir1, dir2)
+ priv_dir = os.path.join(build_dir, 'meson-private/coredata.dat')
+ if os.path.exists(priv_dir):
+ if not handshake:
+ print('Directory already configured, exiting Meson. Just run your build command\n'
+ '(e.g. ninja) and Meson will regenerate as necessary. If ninja fails, run ninja\n'
+ 'reconfigure to force Meson to regenerate.\n'
+ '\nIf build failures persist, manually wipe your build directory to clear any\n'
+ 'stored system data.\n'
+ '\nTo change option values, run meson configure instead.')
+ sys.exit(0)
+ else:
+ if handshake:
+ raise RuntimeError('Something went terribly wrong. Please file a bug.')
+ return src_dir, build_dir
+
+ def check_pkgconfig_envvar(self, env):
+ curvar = os.environ.get('PKG_CONFIG_PATH', '')
+ if curvar != env.coredata.pkgconf_envvar:
+ mlog.warning('PKG_CONFIG_PATH has changed between invocations from "%s" to "%s".' %
+ (env.coredata.pkgconf_envvar, curvar))
+ env.coredata.pkgconf_envvar = curvar
+
+ def generate(self):
+ env = environment.Environment(self.source_dir, self.build_dir, self.meson_script_launcher, self.options, self.original_cmd_line_args)
+ mlog.initialize(env.get_log_dir())
+ try:
+ self._generate(env)
+ finally:
+ mlog.shutdown()
+
+ def _generate(self, env):
+ mlog.debug('Build started at', datetime.datetime.now().isoformat())
+ mlog.debug('Main binary:', sys.executable)
+ mlog.debug('Python system:', platform.system())
+ mlog.log(mlog.bold('The Meson build system'))
+ self.check_pkgconfig_envvar(env)
+ mlog.log('Version:', coredata.version)
+ mlog.log('Source dir:', mlog.bold(self.source_dir))
+ mlog.log('Build dir:', mlog.bold(self.build_dir))
+ if env.is_cross_build():
+ mlog.log('Build type:', mlog.bold('cross build'))
+ else:
+ mlog.log('Build type:', mlog.bold('native build'))
+ b = build.Build(env)
+ if self.options.backend == 'ninja':
+ from .backend import ninjabackend
+ g = ninjabackend.NinjaBackend(b)
+ elif self.options.backend == 'vs':
+ from .backend import vs2010backend
+ g = vs2010backend.autodetect_vs_version(b)
+ mlog.log('Auto detected Visual Studio backend:', mlog.bold(g.name))
+ elif self.options.backend == 'vs2010':
+ from .backend import vs2010backend
+ g = vs2010backend.Vs2010Backend(b)
+ elif self.options.backend == 'vs2015':
+ from .backend import vs2015backend
+ g = vs2015backend.Vs2015Backend(b)
+ elif self.options.backend == 'vs2017':
+ from .backend import vs2017backend
+ g = vs2017backend.Vs2017Backend(b)
+ elif self.options.backend == 'xcode':
+ from .backend import xcodebackend
+ g = xcodebackend.XCodeBackend(b)
+ else:
+ raise RuntimeError('Unknown backend "%s".' % self.options.backend)
+
+ intr = interpreter.Interpreter(b, g)
+ if env.is_cross_build():
+ mlog.log('Host machine cpu family:', mlog.bold(intr.builtin['host_machine'].cpu_family_method([], {})))
+ mlog.log('Host machine cpu:', mlog.bold(intr.builtin['host_machine'].cpu_method([], {})))
+ mlog.log('Target machine cpu family:', mlog.bold(intr.builtin['target_machine'].cpu_family_method([], {})))
+ mlog.log('Target machine cpu:', mlog.bold(intr.builtin['target_machine'].cpu_method([], {})))
+ mlog.log('Build machine cpu family:', mlog.bold(intr.builtin['build_machine'].cpu_family_method([], {})))
+ mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {})))
+ intr.run()
+ try:
+ # We would like to write coredata as late as possible since we use the existence of
+ # this file to check if we generated the build file successfully. Since coredata
+ # includes settings, the build files must depend on it and appear newer. However, due
+ # to various kernel caches, we cannot guarantee that any time in Python is exactly in
+ # sync with the time that gets applied to any files. Thus, we dump this file as late as
+ # possible, but before build files, and if any error occurs, delete it.
+ cdf = env.dump_coredata()
+ g.generate(intr)
+ dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
+ with open(dumpfile, 'wb') as f:
+ pickle.dump(b, f)
+ # Post-conf scripts must be run after writing coredata or else introspection fails.
+ g.run_postconf_scripts()
+ except:
+ os.unlink(cdf)
+ raise
+
+
+def run_script_command(args):
+ cmdname = args[0]
+ cmdargs = args[1:]
+ if cmdname == 'exe':
+ import mesonbuild.scripts.meson_exe as abc
+ cmdfunc = abc.run
+ elif cmdname == 'cleantrees':
+ import mesonbuild.scripts.cleantrees as abc
+ cmdfunc = abc.run
+ elif cmdname == 'install':
+ import mesonbuild.scripts.meson_install as abc
+ cmdfunc = abc.run
+ elif cmdname == 'commandrunner':
+ import mesonbuild.scripts.commandrunner as abc
+ cmdfunc = abc.run
+ elif cmdname == 'delsuffix':
+ import mesonbuild.scripts.delwithsuffix as abc
+ cmdfunc = abc.run
+ elif cmdname == 'depfixer':
+ import mesonbuild.scripts.depfixer as abc
+ cmdfunc = abc.run
+ elif cmdname == 'dirchanger':
+ import mesonbuild.scripts.dirchanger as abc
+ cmdfunc = abc.run
+ elif cmdname == 'gtkdoc':
+ import mesonbuild.scripts.gtkdochelper as abc
+ cmdfunc = abc.run
+ elif cmdname == 'msgfmthelper':
+ import mesonbuild.scripts.msgfmthelper as abc
+ cmdfunc = abc.run
+ elif cmdname == 'regencheck':
+ import mesonbuild.scripts.regen_checker as abc
+ cmdfunc = abc.run
+ elif cmdname == 'symbolextractor':
+ import mesonbuild.scripts.symbolextractor as abc
+ cmdfunc = abc.run
+ elif cmdname == 'scanbuild':
+ import mesonbuild.scripts.scanbuild as abc
+ cmdfunc = abc.run
+ elif cmdname == 'vcstagger':
+ import mesonbuild.scripts.vcstagger as abc
+ cmdfunc = abc.run
+ elif cmdname == 'gettext':
+ import mesonbuild.scripts.gettext as abc
+ cmdfunc = abc.run
+ elif cmdname == 'yelphelper':
+ import mesonbuild.scripts.yelphelper as abc
+ cmdfunc = abc.run
+ elif cmdname == 'uninstall':
+ import mesonbuild.scripts.uninstall as abc
+ cmdfunc = abc.run
+ elif cmdname == 'dist':
+ import mesonbuild.scripts.dist as abc
+ cmdfunc = abc.run
+ elif cmdname == 'coverage':
+ import mesonbuild.scripts.coverage as abc
+ cmdfunc = abc.run
+ else:
+ raise MesonException('Unknown internal command {}.'.format(cmdname))
+ return cmdfunc(cmdargs)
+
+def run(original_args, mainfile=None):
+ if sys.version_info < (3, 4):
+ print('Meson works correctly only with python 3.4+.')
+ print('You have python %s.' % sys.version)
+ print('Please update your environment')
+ return 1
+ args = original_args[:]
+ if len(args) > 0:
+ # First check if we want to run a subcommand.
+ cmd_name = args[0]
+ remaining_args = args[1:]
+ if cmd_name == 'test':
+ return mtest.run(remaining_args)
+ elif cmd_name == 'setup':
+ args = remaining_args
+ # FALLTHROUGH like it's 1972.
+ elif cmd_name == 'introspect':
+ return mintro.run(remaining_args)
+ elif cmd_name == 'test':
+ return mtest.run(remaining_args)
+ elif cmd_name == 'rewrite':
+ return rewriter.run(remaining_args)
+ elif cmd_name == 'configure':
+ try:
+ return mconf.run(remaining_args)
+ except MesonException as e:
+ mlog.log(mlog.red('\nError configuring project:'), e)
+ sys.exit(1)
+ elif cmd_name == 'wrap':
+ return wraptool.run(remaining_args)
+ elif cmd_name == 'runpython':
+ import runpy
+ script_file = remaining_args[0]
+ sys.argv[1:] = remaining_args[1:]
+ runpy.run_path(script_file, run_name='__main__')
+ sys.exit(0)
+
+ # No special command? Do the basic setup/reconf.
+ if len(args) >= 2 and args[0] == '--internal':
+ if args[1] != 'regenerate':
+ script = args[1]
+ try:
+ sys.exit(run_script_command(args[1:]))
+ except MesonException as e:
+ mlog.log(mlog.red('\nError in {} helper script:'.format(script)))
+ mlog.log(e)
+ sys.exit(1)
+ args = args[2:]
+ handshake = True
+ else:
+ handshake = False
+
+ parser = create_parser()
+
+ args = mesonlib.expand_arguments(args)
+ options = parser.parse_args(args)
+ args = options.directories
+ if not args or len(args) > 2:
+ # if there's a meson.build in the dir above, and not in the current
+ # directory, assume we're in the build directory
+ if not args and not os.path.exists('meson.build') and os.path.exists('../meson.build'):
+ dir1 = '..'
+ dir2 = '.'
+ else:
+ print('{} <source directory> <build directory>'.format(sys.argv[0]))
+ print('If you omit either directory, the current directory is substituted.')
+ print('Run {} --help for more information.'.format(sys.argv[0]))
+ return 1
+ else:
+ dir1 = args[0]
+ if len(args) > 1:
+ dir2 = args[1]
+ else:
+ dir2 = '.'
+ try:
+ if mainfile is None:
+ raise AssertionError('I iz broken. Sorry.')
+ app = MesonApp(dir1, dir2, mainfile, handshake, options, original_args)
+ except Exception as e:
+ # Log directory does not exist, so just print
+ # to stdout.
+ print('Error during basic setup:\n')
+ print(e)
+ return 1
+ try:
+ app.generate()
+ except Exception as e:
+ if isinstance(e, MesonException):
+ if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'):
+ mlog.log(mlog.red('\nMeson encountered an error in file %s, line %d, column %d:' % (e.file, e.lineno, e.colno)))
+ else:
+ mlog.log(mlog.red('\nMeson encountered an error:'))
+ # Error message
+ mlog.log(e)
+ # Path to log file
+ logfile = os.path.join(app.build_dir, environment.Environment.log_dir, mlog.log_fname)
+ mlog.log("\nA full log can be found at", mlog.bold(logfile))
+ if os.environ.get('MESON_FORCE_BACKTRACE'):
+ raise
+ else:
+ if os.environ.get('MESON_FORCE_BACKTRACE'):
+ raise
+ traceback.print_exc()
+ return 1
+ return 0
--- /dev/null
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This is a helper script for IDE developers. It allows you to
+extract information such as list of targets, files, compiler flags,
+tests and so on. All output is in JSON for simple parsing.
+
+Currently only works for the Ninja backend. Others use generated
+project files and don't need this info."""
+
+import json, pickle
+from . import coredata, build
+import argparse
+import sys, os
+import pathlib
+
+parser = argparse.ArgumentParser(prog='meson introspect')
+parser.add_argument('--targets', action='store_true', dest='list_targets', default=False,
+ help='List top level targets.')
+parser.add_argument('--installed', action='store_true', dest='list_installed', default=False,
+ help='List all installed files and directories.')
+parser.add_argument('--target-files', action='store', dest='target_files', default=None,
+ help='List source files for a given target.')
+parser.add_argument('--buildsystem-files', action='store_true', dest='buildsystem_files', default=False,
+ help='List files that make up the build system.')
+parser.add_argument('--buildoptions', action='store_true', dest='buildoptions', default=False,
+ help='List all build options.')
+parser.add_argument('--tests', action='store_true', dest='tests', default=False,
+ help='List all unit tests.')
+parser.add_argument('--benchmarks', action='store_true', dest='benchmarks', default=False,
+ help='List all benchmarks.')
+parser.add_argument('--dependencies', action='store_true', dest='dependencies', default=False,
+ help='List external dependencies.')
+parser.add_argument('--projectinfo', action='store_true', dest='projectinfo', default=False,
+ help='Information about projects.')
+parser.add_argument('builddir', nargs='?', help='The build directory')
+
+def determine_installed_path(target, installdata):
+ install_target = None
+ for i in installdata.targets:
+ if os.path.split(i[0])[1] == target.get_filename(): # FIXME, might clash due to subprojects.
+ install_target = i
+ break
+ if install_target is None:
+ raise RuntimeError('Something weird happened. File a bug.')
+ fname = i[0]
+ outdir = i[1]
+ outname = os.path.join(installdata.prefix, outdir, os.path.split(fname)[-1])
+ # Normalize the path by using os.path.sep consistently, etc.
+ # Does not change the effective path.
+ return str(pathlib.PurePath(outname))
+
+
+def list_installed(installdata):
+ res = {}
+ if installdata is not None:
+ for path, installdir, aliases, unknown1, unknown2 in installdata.targets:
+ res[os.path.join(installdata.build_dir, path)] = os.path.join(installdata.prefix, installdir, os.path.basename(path))
+ for path, installpath, unused_prefix in installdata.data:
+ res[path] = os.path.join(installdata.prefix, installpath)
+ for path, installdir in installdata.headers:
+ res[path] = os.path.join(installdata.prefix, installdir, os.path.basename(path))
+ for path, installpath in installdata.man:
+ res[path] = os.path.join(installdata.prefix, installpath)
+ print(json.dumps(res))
+
+
+def list_targets(coredata, builddata, installdata):
+ tlist = []
+ for (idname, target) in builddata.get_targets().items():
+ t = {'name': target.get_basename(), 'id': idname}
+ fname = target.get_filename()
+ if isinstance(fname, list):
+ fname = [os.path.join(target.subdir, x) for x in fname]
+ else:
+ fname = os.path.join(target.subdir, fname)
+ t['filename'] = fname
+ if isinstance(target, build.Executable):
+ typename = 'executable'
+ elif isinstance(target, build.SharedLibrary):
+ typename = 'shared library'
+ elif isinstance(target, build.StaticLibrary):
+ typename = 'static library'
+ elif isinstance(target, build.CustomTarget):
+ typename = 'custom'
+ elif isinstance(target, build.RunTarget):
+ typename = 'run'
+ else:
+ typename = 'unknown'
+ t['type'] = typename
+ if installdata and target.should_install():
+ t['installed'] = True
+ t['install_filename'] = determine_installed_path(target, installdata)
+ else:
+ t['installed'] = False
+ t['build_by_default'] = target.build_by_default
+ tlist.append(t)
+ print(json.dumps(tlist))
+
+def list_target_files(target_name, coredata, builddata):
+ try:
+ t = builddata.targets[target_name]
+ sources = t.sources + t.extra_files
+ except KeyError:
+ print("Unknown target %s." % target_name)
+ sys.exit(1)
+ sources = [os.path.join(i.subdir, i.fname) for i in sources]
+ print(json.dumps(sources))
+
+def list_buildoptions(coredata, builddata):
+ optlist = []
+ add_keys(optlist, coredata.user_options)
+ add_keys(optlist, coredata.compiler_options)
+ add_keys(optlist, coredata.base_options)
+ add_keys(optlist, coredata.builtins)
+ print(json.dumps(optlist))
+
+def add_keys(optlist, options):
+ keys = list(options.keys())
+ keys.sort()
+ for key in keys:
+ opt = options[key]
+ optdict = {'name': key, 'value': opt.value}
+ if isinstance(opt, coredata.UserStringOption):
+ typestr = 'string'
+ elif isinstance(opt, coredata.UserBooleanOption):
+ typestr = 'boolean'
+ elif isinstance(opt, coredata.UserComboOption):
+ optdict['choices'] = opt.choices
+ typestr = 'combo'
+ elif isinstance(opt, coredata.UserArrayOption):
+ typestr = 'array'
+ else:
+ raise RuntimeError("Unknown option type")
+ optdict['type'] = typestr
+ optdict['description'] = opt.description
+ optlist.append(optdict)
+
+def list_buildsystem_files(coredata, builddata):
+ src_dir = builddata.environment.get_source_dir()
+ # I feel dirty about this. But only slightly.
+ filelist = []
+ for root, _, files in os.walk(src_dir):
+ for f in files:
+ if f == 'meson.build' or f == 'meson_options.txt':
+ filelist.append(os.path.relpath(os.path.join(root, f), src_dir))
+ print(json.dumps(filelist))
+
+def list_deps(coredata):
+ result = []
+ for d in coredata.deps.values():
+ if d.found():
+ result += [{'name': d.name,
+ 'compile_args': d.get_compile_args(),
+ 'link_args': d.get_link_args()}]
+ print(json.dumps(result))
+
+def list_tests(testdata):
+ result = []
+ for t in testdata:
+ to = {}
+ if isinstance(t.fname, str):
+ fname = [t.fname]
+ else:
+ fname = t.fname
+ to['cmd'] = fname + t.cmd_args
+ if isinstance(t.env, build.EnvironmentVariables):
+ to['env'] = t.env.get_env(os.environ)
+ else:
+ to['env'] = t.env
+ to['name'] = t.name
+ to['workdir'] = t.workdir
+ to['timeout'] = t.timeout
+ to['suite'] = t.suite
+ result.append(to)
+ print(json.dumps(result))
+
+def list_projinfo(builddata):
+ result = {'name': builddata.project_name, 'version': builddata.project_version}
+ subprojects = []
+ for k, v in builddata.subprojects.items():
+ c = {'name': k,
+ 'version': v}
+ subprojects.append(c)
+ result['subprojects'] = subprojects
+ print(json.dumps(result))
+
+def run(args):
+ datadir = 'meson-private'
+ options = parser.parse_args(args)
+ if options.builddir is not None:
+ datadir = os.path.join(options.builddir, datadir)
+ if not os.path.isdir(datadir):
+ print('Current directory is not a build dir. Please specify it or '
+ 'change the working directory to it.')
+ return 1
+
+ corefile = os.path.join(datadir, 'coredata.dat')
+ buildfile = os.path.join(datadir, 'build.dat')
+ installfile = os.path.join(datadir, 'install.dat')
+ testfile = os.path.join(datadir, 'meson_test_setup.dat')
+ benchmarkfile = os.path.join(datadir, 'meson_benchmark_setup.dat')
+
+ # Load all data files
+ with open(corefile, 'rb') as f:
+ coredata = pickle.load(f)
+ with open(buildfile, 'rb') as f:
+ builddata = pickle.load(f)
+ with open(testfile, 'rb') as f:
+ testdata = pickle.load(f)
+ with open(benchmarkfile, 'rb') as f:
+ benchmarkdata = pickle.load(f)
+ # Install data is only available with the Ninja backend
+ if os.path.isfile(installfile):
+ with open(installfile, 'rb') as f:
+ installdata = pickle.load(f)
+ else:
+ installdata = None
+
+ if options.list_targets:
+ list_targets(coredata, builddata, installdata)
+ elif options.list_installed:
+ list_installed(installdata)
+ elif options.target_files is not None:
+ list_target_files(options.target_files, coredata, builddata)
+ elif options.buildsystem_files:
+ list_buildsystem_files(coredata, builddata)
+ elif options.buildoptions:
+ list_buildoptions(coredata, builddata)
+ elif options.tests:
+ list_tests(testdata)
+ elif options.benchmarks:
+ list_tests(benchmarkdata)
+ elif options.dependencies:
+ list_deps(coredata)
+ elif options.projectinfo:
+ list_projinfo(builddata)
+ else:
+ print('No command specified')
+ return 1
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os, platform, io
+
+"""This is (mostly) a standalone module used to write logging
+information about Meson runs. Some output goes to screen,
+some to logging dir and some goes to both."""
+
+if platform.system().lower() == 'windows':
+ colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('ANSICON')
+else:
+ colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM') != 'dumb'
+log_dir = None
+log_file = None
+log_fname = 'meson-log.txt'
+
+def initialize(logdir):
+ global log_dir, log_file
+ log_dir = logdir
+ log_file = open(os.path.join(logdir, log_fname), 'w', encoding='utf8')
+
+def shutdown():
+ global log_file
+ if log_file is not None:
+ exception_around_goer = log_file
+ log_file = None
+ exception_around_goer.close()
+
+class AnsiDecorator:
+ plain_code = "\033[0m"
+
+ def __init__(self, text, code):
+ self.text = text
+ self.code = code
+
+ def get_text(self, with_codes):
+ if with_codes:
+ return self.code + self.text + AnsiDecorator.plain_code
+ return self.text
+
+def bold(text):
+ return AnsiDecorator(text, "\033[1m")
+
+def red(text):
+ return AnsiDecorator(text, "\033[1;31m")
+
+def green(text):
+ return AnsiDecorator(text, "\033[1;32m")
+
+def yellow(text):
+ return AnsiDecorator(text, "\033[1;33m")
+
+def cyan(text):
+ return AnsiDecorator(text, "\033[1;36m")
+
+def process_markup(args, keep):
+ arr = []
+ for arg in args:
+ if isinstance(arg, str):
+ arr.append(arg)
+ elif isinstance(arg, AnsiDecorator):
+ arr.append(arg.get_text(keep))
+ else:
+ arr.append(str(arg))
+ return arr
+
+def force_print(*args, **kwargs):
+ # _Something_ is going to get printed.
+ try:
+ print(*args, **kwargs)
+ except UnicodeEncodeError:
+ iostr = io.StringIO()
+ kwargs['file'] = iostr
+ print(*args, **kwargs)
+ cleaned = iostr.getvalue().encode('ascii', 'replace').decode('ascii')
+ print(cleaned)
+
+def debug(*args, **kwargs):
+ arr = process_markup(args, False)
+ if log_file is not None:
+ print(*arr, file=log_file, **kwargs) # Log file never gets ANSI codes.
+ log_file.flush()
+
+def log(*args, **kwargs):
+ arr = process_markup(args, False)
+ if log_file is not None:
+ print(*arr, file=log_file, **kwargs) # Log file never gets ANSI codes.
+ log_file.flush()
+ if colorize_console:
+ arr = process_markup(args, True)
+ force_print(*arr, **kwargs)
+
+def warning(*args, **kwargs):
+ log(yellow('WARNING:'), *args, **kwargs)
+
+# Format a list for logging purposes as a string. It separates
+# all but the last item with commas, and the last with 'and'.
+def format_list(list):
+ l = len(list)
+ if l > 2:
+ return ' and '.join([', '.join(list[:-1]), list[-1]])
+ elif l == 2:
+ return ' and '.join(list)
+ elif l == 1:
+ return list[0]
+ else:
+ return ''
--- /dev/null
+import os
+
+from .. import build
+from .. import dependencies
+from .. import mlog
+from ..mesonlib import MesonException
+
+class permittedSnippetKwargs:
+
+ def __init__(self, permitted):
+ self.permitted = permitted
+
+ def __call__(self, f):
+ def wrapped(s, interpreter, state, args, kwargs):
+ for k in kwargs:
+ if k not in self.permitted:
+ mlog.warning('Passed invalid keyword argument "%s". This will become a hard error in the future.' % k)
+ return f(s, interpreter, state, args, kwargs)
+ return wrapped
+
+_found_programs = {}
+
+
+class ExtensionModule:
+ def __init__(self):
+ self.snippets = set() # List of methods that operate only on the interpreter.
+
+ def is_snippet(self, funcname):
+ return funcname in self.snippets
+
+def find_program(program_name, target_name):
+ if program_name in _found_programs:
+ return _found_programs[program_name]
+ program = dependencies.ExternalProgram(program_name)
+ if not program.found():
+ m = "Target {!r} can't be generated as {!r} could not be found"
+ raise MesonException(m.format(target_name, program_name))
+ _found_programs[program_name] = program
+ return program
+
+
+def get_include_args(include_dirs, prefix='-I'):
+ '''
+ Expand include arguments to refer to the source and build dirs
+ by using @SOURCE_ROOT@ and @BUILD_ROOT@ for later substitution
+ '''
+ if not include_dirs:
+ return []
+
+ dirs_str = []
+ for incdirs in include_dirs:
+ if hasattr(incdirs, "held_object"):
+ dirs = incdirs.held_object
+ else:
+ dirs = incdirs
+
+ if isinstance(dirs, str):
+ dirs_str += ['%s%s' % (prefix, dirs)]
+ continue
+
+ # Should be build.IncludeDirs object.
+ basedir = dirs.get_curdir()
+ for d in dirs.get_incdirs():
+ expdir = os.path.join(basedir, d)
+ srctreedir = os.path.join('@SOURCE_ROOT@', expdir)
+ buildtreedir = os.path.join('@BUILD_ROOT@', expdir)
+ dirs_str += ['%s%s' % (prefix, buildtreedir),
+ '%s%s' % (prefix, srctreedir)]
+ for d in dirs.get_extra_build_dirs():
+ dirs_str += ['%s%s' % (prefix, d)]
+
+ return dirs_str
+
+class ModuleReturnValue:
+ def __init__(self, return_value, new_objects):
+ self.return_value = return_value
+ assert(isinstance(new_objects, list))
+ self.new_objects = new_objects
+
+class GResourceTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
+
+class GResourceHeaderTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
+
+class GirTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
+
+class TypelibTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
+
+class VapiTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
--- /dev/null
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''This module provides helper functions for Gnome/GLib related
+functionality such as gobject-introspection, gresources and gtk-doc'''
+
+from .. import build
+import os
+import copy
+import subprocess
+from . import ModuleReturnValue
+from ..mesonlib import MesonException, OrderedSet, Popen_safe, extract_as_list
+from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
+from .. import mlog
+from .. import mesonlib
+from .. import compilers
+from .. import interpreter
+from . import GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget
+from . import find_program, get_include_args
+from . import ExtensionModule
+from ..interpreterbase import noKwargs, permittedKwargs
+
+# gresource compilation is broken due to the way
+# the resource compiler and Ninja clash about it
+#
+# https://github.com/ninja-build/ninja/issues/1184
+# https://bugzilla.gnome.org/show_bug.cgi?id=774368
+gresource_dep_needed_version = '>= 2.51.1'
+
+native_glib_version = None
+girwarning_printed = False
+gdbuswarning_printed = False
+gresource_warning_printed = False
+_gir_has_extra_lib_arg = None
+
+def gir_has_extra_lib_arg():
+ global _gir_has_extra_lib_arg
+ if _gir_has_extra_lib_arg is not None:
+ return _gir_has_extra_lib_arg
+
+ _gir_has_extra_lib_arg = False
+ try:
+ g_ir_scanner = find_program('g-ir-scanner', '').get_command()
+ opts = Popen_safe(g_ir_scanner + ['--help'], stderr=subprocess.STDOUT)[1]
+ _gir_has_extra_lib_arg = '--extra-library' in opts
+ except (MesonException, FileNotFoundError, subprocess.CalledProcessError):
+ pass
+ return _gir_has_extra_lib_arg
+
+class GnomeModule(ExtensionModule):
+ gir_dep = None
+
+ @staticmethod
+ def _get_native_glib_version(state):
+ global native_glib_version
+ if native_glib_version is None:
+ glib_dep = PkgConfigDependency('glib-2.0', state.environment,
+ {'native': True})
+ native_glib_version = glib_dep.get_version()
+ return native_glib_version
+
+ def __print_gresources_warning(self, state):
+ global gresource_warning_printed
+ if not gresource_warning_printed:
+ if not mesonlib.version_compare(self._get_native_glib_version(state), gresource_dep_needed_version):
+ mlog.warning('GLib compiled dependencies do not work reliably with \n'
+ 'the current version of GLib. See the following upstream issue:',
+ mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368'))
+ gresource_warning_printed = True
+ return []
+
+ @staticmethod
+ def _print_gdbus_warning():
+ global gdbuswarning_printed
+ if not gdbuswarning_printed:
+ mlog.warning('Code generated with gdbus_codegen() requires the root directory be added to\n'
+ ' include_directories of targets with GLib < 2.51.3:',
+ mlog.bold('https://github.com/mesonbuild/meson/issues/1387'))
+ gdbuswarning_printed = True
+
+ @permittedKwargs({'source_dir', 'c_name', 'dependencies', 'export', 'gresource_bundle', 'install_header',
+ 'install', 'install_dir', 'extra_args', 'build_by_default'})
+ def compile_resources(self, state, args, kwargs):
+ self.__print_gresources_warning(state)
+ glib_version = self._get_native_glib_version(state)
+
+ cmd = ['glib-compile-resources', '@INPUT@']
+
+ source_dirs, dependencies = mesonlib.extract_as_list(kwargs, 'source_dir', 'dependencies', pop=True)
+
+ if len(args) < 2:
+ raise MesonException('Not enough arguments; the name of the resource '
+ 'and the path to the XML file are required')
+
+ # Validate dependencies
+ for (ii, dep) in enumerate(dependencies):
+ if hasattr(dep, 'held_object'):
+ dependencies[ii] = dep = dep.held_object
+ if not isinstance(dep, (mesonlib.File, build.CustomTarget, build.CustomTargetIndex)):
+ m = 'Unexpected dependency type {!r} for gnome.compile_resources() ' \
+ '"dependencies" argument.\nPlease pass the return value of ' \
+ 'custom_target() or configure_file()'
+ raise MesonException(m.format(dep))
+ if isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
+ if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+ m = 'The "dependencies" argument of gnome.compile_resources() can not\n' \
+ 'be used with the current version of glib-compile-resources due to\n' \
+ '<https://bugzilla.gnome.org/show_bug.cgi?id=774368>'
+ raise MesonException(m)
+
+ ifile = args[1]
+ if isinstance(ifile, mesonlib.File):
+ # glib-compile-resources will be run inside the source dir,
+ # so we need either 'src_to_build' or the absolute path.
+ # Absolute path is the easiest choice.
+ if ifile.is_built:
+ ifile = os.path.join(state.environment.get_build_dir(), ifile.subdir, ifile.fname)
+ else:
+ ifile = os.path.join(ifile.subdir, ifile.fname)
+ elif isinstance(ifile, str):
+ ifile = os.path.join(state.subdir, ifile)
+ elif isinstance(ifile, (interpreter.CustomTargetHolder,
+ interpreter.CustomTargetIndexHolder,
+ interpreter.GeneratedObjectsHolder)):
+ m = 'Resource xml files generated at build-time cannot be used ' \
+ 'with gnome.compile_resources() because we need to scan ' \
+ 'the xml for dependencies. Use configure_file() instead ' \
+ 'to generate it at configure-time.'
+ raise MesonException(m)
+ else:
+ raise MesonException('Invalid file argument: {!r}'.format(ifile))
+
+ depend_files, depends, subdirs = self._get_gresource_dependencies(
+ state, ifile, source_dirs, dependencies)
+
+ # Make source dirs relative to build dir now
+ source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs]
+ # Always include current directory, but after paths set by user
+ source_dirs.append(os.path.join(state.build_to_src, state.subdir))
+ # Ensure build directories of generated deps are included
+ source_dirs += subdirs
+
+ for source_dir in OrderedSet(source_dirs):
+ cmd += ['--sourcedir', source_dir]
+
+ if 'c_name' in kwargs:
+ cmd += ['--c-name', kwargs.pop('c_name')]
+ export = kwargs.pop('export', False)
+ if not export:
+ cmd += ['--internal']
+
+ cmd += ['--generate', '--target', '@OUTPUT@']
+
+ cmd += mesonlib.stringlistify(kwargs.pop('extra_args', []))
+
+ gresource = kwargs.pop('gresource_bundle', False)
+ if gresource:
+ output = args[0] + '.gresource'
+ name = args[0] + '_gresource'
+ else:
+ output = args[0] + '.c'
+ name = args[0] + '_c'
+
+ if kwargs.get('install', False) and not gresource:
+ raise MesonException('The install kwarg only applies to gresource bundles, see install_header')
+
+ install_header = kwargs.pop('install_header', False)
+ if install_header and gresource:
+ raise MesonException('The install_header kwarg does not apply to gresource bundles')
+ if install_header and not export:
+ raise MesonException('GResource header is installed yet export is not enabled')
+
+ kwargs['input'] = args[1]
+ kwargs['output'] = output
+ kwargs['depends'] = depends
+ if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+ # This will eventually go out of sync if dependencies are added
+ kwargs['depend_files'] = depend_files
+ kwargs['command'] = cmd
+ else:
+ depfile = kwargs['output'] + '.d'
+ kwargs['depfile'] = depfile
+ kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
+ target_c = GResourceTarget(name, state.subdir, state.subproject, kwargs)
+
+ if gresource: # Only one target for .gresource files
+ return ModuleReturnValue(target_c, [target_c])
+
+ h_kwargs = {
+ 'command': cmd,
+ 'input': args[1],
+ 'output': args[0] + '.h',
+ # The header doesn't actually care about the files yet it errors if missing
+ 'depends': depends
+ }
+ if 'build_by_default' in kwargs:
+ h_kwargs['build_by_default'] = kwargs['build_by_default']
+ if install_header:
+ h_kwargs['install'] = install_header
+ h_kwargs['install_dir'] = kwargs.get('install_dir',
+ state.environment.coredata.get_builtin_option('includedir'))
+ target_h = GResourceHeaderTarget(args[0] + '_h', state.subdir, state.subproject, h_kwargs)
+ rv = [target_c, target_h]
+ return ModuleReturnValue(rv, rv)
+
+ def _get_gresource_dependencies(self, state, input_file, source_dirs, dependencies):
+
+ cmd = ['glib-compile-resources',
+ input_file,
+ '--generate-dependencies']
+
+ # Prefer generated files over source files
+ cmd += ['--sourcedir', state.subdir] # Current build dir
+ for source_dir in source_dirs:
+ cmd += ['--sourcedir', os.path.join(state.subdir, source_dir)]
+
+ pc, stdout, stderr = Popen_safe(cmd, cwd=state.environment.get_source_dir())
+ if pc.returncode != 0:
+ m = 'glib-compile-resources failed to get dependencies for {}:\n{}'
+ mlog.warning(m.format(cmd[1], stderr))
+ raise subprocess.CalledProcessError(pc.returncode, cmd)
+
+ dep_files = stdout.split('\n')[:-1]
+
+ # In generate-dependencies mode, glib-compile-resources doesn't raise
+ # an error for missing resources but instead prints whatever filename
+ # was listed in the input file. That's good because it means we can
+ # handle resource files that get generated as part of the build, as
+ # follows.
+ #
+ # If there are multiple generated resource files with the same basename
+ # then this code will get confused.
+ def exists_in_srcdir(f):
+ return os.path.exists(os.path.join(state.environment.get_source_dir(), f))
+
+ depends = []
+ subdirs = []
+ for resfile in dep_files[:]:
+ resbasename = os.path.basename(resfile)
+ for dep in dependencies:
+ if hasattr(dep, 'held_object'):
+ dep = dep.held_object
+ if isinstance(dep, mesonlib.File):
+ if dep.fname != resbasename:
+ continue
+ dep_files.remove(resfile)
+ dep_files.append(dep)
+ subdirs.append(dep.subdir)
+ break
+ elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
+ fname = None
+ outputs = {(o, os.path.basename(o)) for o in dep.get_outputs()}
+ for o, baseo in outputs:
+ if baseo == resbasename:
+ fname = o
+ break
+ if fname is not None:
+ dep_files.remove(resfile)
+ dep_files.append(
+ mesonlib.File(
+ is_built=True,
+ subdir=dep.get_subdir(),
+ fname=fname))
+ depends.append(dep)
+ subdirs.append(dep.get_subdir())
+ break
+ else:
+ if not exists_in_srcdir(resfile):
+ raise MesonException(
+ 'Resource "%s" listed in "%s" was not found. If this is a '
+ 'generated file, pass the target that generates it to '
+ 'gnome.compile_resources() using the "dependencies" '
+ 'keyword argument.' % (resfile, input_file))
+ return dep_files, depends, subdirs
+
+ def _get_link_args(self, state, lib, depends=None, include_rpath=False,
+ use_gir_args=False):
+ link_command = []
+ # Construct link args
+ if isinstance(lib, build.SharedLibrary):
+ libdir = os.path.join(state.environment.get_build_dir(), state.backend.get_target_dir(lib))
+ link_command.append('-L' + libdir)
+ # Needed for the following binutils bug:
+ # https://github.com/mesonbuild/meson/issues/1911
+ # However, g-ir-scanner does not understand -Wl,-rpath
+ # so we need to use -L instead
+ for d in state.backend.determine_rpath_dirs(lib):
+ d = os.path.join(state.environment.get_build_dir(), d)
+ link_command.append('-L' + d)
+ if include_rpath:
+ link_command.append('-Wl,-rpath,' + d)
+ if include_rpath:
+ link_command.append('-Wl,-rpath,' + libdir)
+ if depends:
+ depends.append(lib)
+ if gir_has_extra_lib_arg() and use_gir_args:
+ link_command.append('--extra-library=' + lib.name)
+ else:
+ link_command.append('-l' + lib.name)
+ return link_command
+
+ def _get_dependencies_flags(self, deps, state, depends=None, include_rpath=False,
+ use_gir_args=False):
+ cflags = OrderedSet()
+ ldflags = OrderedSet()
+ gi_includes = OrderedSet()
+ deps = mesonlib.listify(deps, unholder=True)
+
+ for dep in deps:
+ if isinstance(dep, InternalDependency):
+ cflags.update(get_include_args(dep.include_directories))
+ for lib in dep.libraries:
+ if hasattr(lib, 'held_object'):
+ lib = lib.held_object
+ ldflags.update(self._get_link_args(state, lib, depends, include_rpath))
+ libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath,
+ use_gir_args)
+ cflags.update(libdepflags[0])
+ ldflags.update(libdepflags[1])
+ gi_includes.update(libdepflags[2])
+ extdepflags = self._get_dependencies_flags(dep.ext_deps, state, depends, include_rpath,
+ use_gir_args)
+ cflags.update(extdepflags[0])
+ ldflags.update(extdepflags[1])
+ gi_includes.update(extdepflags[2])
+ for source in dep.sources:
+ if hasattr(source, 'held_object'):
+ source = source.held_object
+ if isinstance(source, GirTarget):
+ gi_includes.update([os.path.join(state.environment.get_build_dir(),
+ source.get_subdir())])
+ # This should be any dependency other than an internal one.
+ elif isinstance(dep, Dependency):
+ cflags.update(dep.get_compile_args())
+ for lib in dep.get_link_args():
+ if (os.path.isabs(lib) and
+ # For PkgConfigDependency only:
+ getattr(dep, 'is_libtool', False)):
+ lib_dir = os.path.dirname(lib)
+ ldflags.update(["-L%s" % lib_dir])
+ if include_rpath:
+ ldflags.update(['-Wl,-rpath {}'.format(lib_dir)])
+ libname = os.path.basename(lib)
+ if libname.startswith("lib"):
+ libname = libname[3:]
+ libname = libname.split(".so")[0]
+ lib = "-l%s" % libname
+ # Hack to avoid passing some compiler options in
+ if lib.startswith("-W"):
+ continue
+ ldflags.update([lib])
+
+ if isinstance(dep, PkgConfigDependency):
+ girdir = dep.get_pkgconfig_variable("girdir", {})
+ if girdir:
+ gi_includes.update([girdir])
+ elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
+ cflags.update(get_include_args(dep.get_include_dirs()))
+ else:
+ mlog.log('dependency {!r} not handled to build gir files'.format(dep))
+ continue
+
+ if gir_has_extra_lib_arg() and use_gir_args:
+ fixed_ldflags = set()
+ for ldflag in ldflags:
+ if ldflag.startswith("-l"):
+ fixed_ldflags.add(ldflag.replace('-l', '--extra-library=', 1))
+ else:
+ fixed_ldflags.add(ldflag)
+ ldflags = fixed_ldflags
+ return cflags, ldflags, gi_includes
+
+ @permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix',
+ 'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories',
+ 'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args',
+ 'packages', 'header', 'build_by_default'})
+ def generate_gir(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException('Gir takes one argument')
+ if kwargs.get('install_dir'):
+ raise MesonException('install_dir is not supported with generate_gir(), see "install_dir_gir" and "install_dir_typelib"')
+ giscanner = find_program('g-ir-scanner', 'Gir')
+ gicompiler = find_program('g-ir-compiler', 'Gir')
+ girtarget = args[0]
+ while hasattr(girtarget, 'held_object'):
+ girtarget = girtarget.held_object
+ if not isinstance(girtarget, (build.Executable, build.SharedLibrary)):
+ raise MesonException('Gir target must be an executable or shared library')
+ try:
+ if not self.gir_dep:
+ self.gir_dep = PkgConfigDependency('gobject-introspection-1.0',
+ state.environment,
+ {'native': True})
+ pkgargs = self.gir_dep.get_compile_args()
+ except Exception:
+ raise MesonException('gobject-introspection dependency was not found, gir cannot be generated.')
+ ns = kwargs.pop('namespace')
+ nsversion = kwargs.pop('nsversion')
+ libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True)
+ girfile = '%s-%s.gir' % (ns, nsversion)
+ srcdir = os.path.join(state.environment.get_source_dir(), state.subdir)
+ builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
+ depends = [girtarget]
+ gir_inc_dirs = []
+
+ scan_command = [giscanner]
+ scan_command += pkgargs
+ scan_command += ['--no-libtool', '--namespace=' + ns, '--nsversion=' + nsversion, '--warn-all',
+ '--output', '@OUTPUT@']
+
+ header = kwargs.pop('header', None)
+ if header:
+ if not isinstance(header, str):
+ raise MesonException('header must be a string')
+ scan_command += ['--c-include=' + header]
+
+ extra_args = mesonlib.stringlistify(kwargs.pop('extra_args', []))
+ scan_command += extra_args
+ scan_command += ['-I' + srcdir,
+ '-I' + builddir]
+ scan_command += get_include_args(girtarget.get_include_dirs())
+
+ gir_filelist_dir = state.backend.get_target_private_dir_abs(girtarget)
+ if not os.path.isdir(gir_filelist_dir):
+ os.mkdir(gir_filelist_dir)
+ gir_filelist_filename = os.path.join(gir_filelist_dir, '%s_%s_gir_filelist' % (ns, nsversion))
+
+ with open(gir_filelist_filename, 'w', encoding='utf-8') as gir_filelist:
+ for s in libsources:
+ if hasattr(s, 'held_object'):
+ s = s.held_object
+ if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
+ gir_filelist.write(os.path.join(state.environment.get_build_dir(),
+ state.backend.get_target_dir(s),
+ s.get_outputs()[0]) + '\n')
+ elif isinstance(s, mesonlib.File):
+ gir_filelist.write(s.rel_to_builddir(state.build_to_src) + '\n')
+ elif isinstance(s, build.GeneratedList):
+ for gen_src in s.get_outputs():
+ gir_filelist.write(os.path.join(srcdir, gen_src) + '\n')
+ else:
+ gir_filelist.write(os.path.join(srcdir, s) + '\n')
+ scan_command += ['--filelist=' + gir_filelist_filename]
+
+ if 'link_with' in kwargs:
+ link_with = mesonlib.extract_as_list(kwargs, 'link_with', pop = True)
+
+ for link in link_with:
+ scan_command += self._get_link_args(state, link.held_object, depends,
+ use_gir_args=True)
+
+ if 'includes' in kwargs:
+ includes = mesonlib.extract_as_list(kwargs, 'includes', pop = True)
+ for inc in includes:
+ if hasattr(inc, 'held_object'):
+ inc = inc.held_object
+ if isinstance(inc, str):
+ scan_command += ['--include=%s' % (inc, )]
+ elif isinstance(inc, GirTarget):
+ gir_inc_dirs += [
+ os.path.join(state.environment.get_build_dir(),
+ inc.get_subdir()),
+ ]
+ scan_command += [
+ "--include-uninstalled=%s" % (os.path.join(inc.get_subdir(), inc.get_basename()), )
+ ]
+ depends += [inc]
+ else:
+ raise MesonException(
+ 'Gir includes must be str, GirTarget, or list of them')
+
+ cflags = []
+ ldflags = []
+ for lang, compiler in girtarget.compilers.items():
+ # XXX: Can you use g-i with any other language?
+ if lang in ('c', 'cpp', 'objc', 'objcpp', 'd'):
+ break
+ else:
+ lang = None
+ compiler = None
+ if lang and compiler:
+ if state.global_args.get(lang):
+ cflags += state.global_args[lang]
+ if state.project_args.get(lang):
+ cflags += state.project_args[lang]
+ if 'b_sanitize' in compiler.base_options:
+ sanitize = state.environment.coredata.base_options['b_sanitize'].value
+ cflags += compilers.sanitizer_compile_args(sanitize)
+ if 'address' in sanitize.split(','):
+ ldflags += ['-lasan']
+ # FIXME: Linking directly to libasan is not recommended but g-ir-scanner
+ # does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892
+ # ldflags += compilers.sanitizer_link_args(sanitize)
+ if 'symbol_prefix' in kwargs:
+ sym_prefixes = mesonlib.stringlistify(kwargs.pop('symbol_prefix', []))
+ scan_command += ['--symbol-prefix=%s' % sym_prefix for sym_prefix in sym_prefixes]
+ if 'identifier_prefix' in kwargs:
+ identifier_prefix = kwargs.pop('identifier_prefix')
+ if not isinstance(identifier_prefix, str):
+ raise MesonException('Gir identifier prefix must be str')
+ scan_command += ['--identifier-prefix=%s' % identifier_prefix]
+ if 'export_packages' in kwargs:
+ pkgs = kwargs.pop('export_packages')
+ if isinstance(pkgs, str):
+ scan_command += ['--pkg-export=%s' % pkgs]
+ elif isinstance(pkgs, list):
+ scan_command += ['--pkg-export=%s' % pkg for pkg in pkgs]
+ else:
+ raise MesonException('Gir export packages must be str or list')
+
+ deps = (girtarget.get_all_link_deps() + girtarget.get_external_deps() +
+ extract_as_list(kwargs, 'dependencies', pop=True, unholder=True))
+ # Need to recursively add deps on GirTarget sources from our
+ # dependencies and also find the include directories needed for the
+ # typelib generation custom target below.
+ typelib_includes = []
+ for dep in deps:
+ if hasattr(dep, 'held_object'):
+ dep = dep.held_object
+ # Add a dependency on each GirTarget listed in dependencies and add
+ # the directory where it will be generated to the typelib includes
+ if isinstance(dep, InternalDependency):
+ for source in dep.sources:
+ if hasattr(source, 'held_object'):
+ source = source.held_object
+ if isinstance(source, GirTarget) and source not in depends:
+ depends.append(source)
+ subdir = os.path.join(state.environment.get_build_dir(),
+ source.get_subdir())
+ if subdir not in typelib_includes:
+ typelib_includes.append(subdir)
+ # Do the same, but for dependencies of dependencies. These are
+ # stored in the list of generated sources for each link dep (from
+ # girtarget.get_all_link_deps() above).
+ # FIXME: Store this in the original form from declare_dependency()
+ # so it can be used here directly.
+ elif isinstance(dep, build.SharedLibrary):
+ for source in dep.generated:
+ if isinstance(source, GirTarget):
+ subdir = os.path.join(state.environment.get_build_dir(),
+ source.get_subdir())
+ if subdir not in typelib_includes:
+ typelib_includes.append(subdir)
+ elif isinstance(dep, PkgConfigDependency):
+ girdir = dep.get_pkgconfig_variable("girdir", {})
+ if girdir and girdir not in typelib_includes:
+ typelib_includes.append(girdir)
+ # ldflags will be misinterpreted by gir scanner (showing
+ # spurious dependencies) but building GStreamer fails if they
+ # are not used here.
+ dep_cflags, dep_ldflags, gi_includes = self._get_dependencies_flags(deps, state, depends,
+ use_gir_args=True)
+ cflags += list(dep_cflags)
+ ldflags += list(dep_ldflags)
+ scan_command += ['--cflags-begin']
+ scan_command += cflags
+ scan_command += ['--cflags-end']
+ # need to put our output directory first as we need to use the
+ # generated libraries instead of any possibly installed system/prefix
+ # ones.
+ if isinstance(girtarget, build.SharedLibrary):
+ scan_command += ["-L@PRIVATE_OUTDIR_ABS_%s@" % girtarget.get_id()]
+ scan_command += list(ldflags)
+ for i in gi_includes:
+ scan_command += ['--add-include-path=%s' % i]
+
+ inc_dirs = mesonlib.extract_as_list(kwargs, 'include_directories', pop = True)
+ for incd in inc_dirs:
+ if not isinstance(incd.held_object, (str, build.IncludeDirs)):
+ raise MesonException(
+ 'Gir include dirs should be include_directories().')
+ scan_command += get_include_args(inc_dirs)
+ scan_command += get_include_args(gir_inc_dirs + inc_dirs, prefix='--add-include-path=')
+
+ if isinstance(girtarget, build.Executable):
+ scan_command += ['--program', girtarget]
+ elif isinstance(girtarget, build.SharedLibrary):
+ libname = girtarget.get_basename()
+ # Needed for the following binutils bug:
+ # https://github.com/mesonbuild/meson/issues/1911
+ # However, g-ir-scanner does not understand -Wl,-rpath
+ # so we need to use -L instead
+ for d in state.backend.determine_rpath_dirs(girtarget):
+ d = os.path.join(state.environment.get_build_dir(), d)
+ scan_command.append('-L' + d)
+ scan_command += ['--library', libname]
+ scankwargs = {'output': girfile,
+ 'command': scan_command,
+ 'depends': depends}
+ if 'install' in kwargs:
+ scankwargs['install'] = kwargs['install']
+ scankwargs['install_dir'] = kwargs.get('install_dir_gir',
+ os.path.join(state.environment.get_datadir(), 'gir-1.0'))
+ if 'build_by_default' in kwargs:
+ scankwargs['build_by_default'] = kwargs['build_by_default']
+ scan_target = GirTarget(girfile, state.subdir, state.subproject, scankwargs)
+
+ typelib_output = '%s-%s.typelib' % (ns, nsversion)
+ typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@']
+ typelib_cmd += get_include_args(gir_inc_dirs, prefix='--includedir=')
+ for incdir in typelib_includes:
+ typelib_cmd += ["--includedir=" + incdir]
+
+ typelib_kwargs = {
+ 'output': typelib_output,
+ 'command': typelib_cmd,
+ }
+ if 'install' in kwargs:
+ typelib_kwargs['install'] = kwargs['install']
+ typelib_kwargs['install_dir'] = kwargs.get('install_dir_typelib',
+ os.path.join(state.environment.get_libdir(), 'girepository-1.0'))
+ if 'build_by_default' in kwargs:
+ typelib_kwargs['build_by_default'] = kwargs['build_by_default']
+ typelib_target = TypelibTarget(typelib_output, state.subdir, state.subproject, typelib_kwargs)
+ rv = [scan_target, typelib_target]
+ return ModuleReturnValue(rv, rv)
+
+ @permittedKwargs({'build_by_default'})
+ def compile_schemas(self, state, args, kwargs):
+ if args:
+ raise MesonException('Compile_schemas does not take positional arguments.')
+ srcdir = os.path.join(state.build_to_src, state.subdir)
+ outdir = state.subdir
+
+ cmd = [find_program('glib-compile-schemas', 'gsettings-compile')]
+ cmd += ['--targetdir', outdir, srcdir]
+ kwargs['command'] = cmd
+ kwargs['input'] = []
+ kwargs['output'] = 'gschemas.compiled'
+ if state.subdir == '':
+ targetname = 'gsettings-compile'
+ else:
+ targetname = 'gsettings-compile-' + state.subdir.replace('/', '_')
+ target_g = build.CustomTarget(targetname, state.subdir, state.subproject, kwargs)
+ return ModuleReturnValue(target_g, [target_g])
+
+ @permittedKwargs({'sources', 'media', 'symlink_media', 'languages'})
+ def yelp(self, state, args, kwargs):
+ if len(args) < 1:
+ raise MesonException('Yelp requires a project id')
+
+ project_id = args[0]
+ sources = mesonlib.stringlistify(kwargs.pop('sources', []))
+ if not sources:
+ if len(args) > 1:
+ sources = mesonlib.stringlistify(args[1:])
+ if not sources:
+ raise MesonException('Yelp requires a list of sources')
+ source_str = '@@'.join(sources)
+
+ langs = mesonlib.stringlistify(kwargs.pop('languages', []))
+ if langs:
+ mlog.log(mlog.red('DEPRECATION:'), '''The "languages" argument of gnome.yelp() is deprecated.
+Use a LINGUAS file in the sources directory instead.
+This will become a hard error in the future.''')
+
+ media = mesonlib.stringlistify(kwargs.pop('media', []))
+ symlinks = kwargs.pop('symlink_media', True)
+
+ if not isinstance(symlinks, bool):
+ raise MesonException('symlink_media must be a boolean')
+
+ if kwargs:
+ raise MesonException('Unknown arguments passed: {}'.format(', '.join(kwargs.keys())))
+
+ script = state.environment.get_build_command()
+ args = ['--internal',
+ 'yelphelper',
+ 'install',
+ '--subdir=' + state.subdir,
+ '--id=' + project_id,
+ '--installdir=' + os.path.join(state.environment.get_datadir(), 'help'),
+ '--sources=' + source_str]
+ if symlinks:
+ args.append('--symlinks=true')
+ if media:
+ args.append('--media=' + '@@'.join(media))
+ if langs:
+ args.append('--langs=' + '@@'.join(langs))
+ inscript = build.RunScript(script, args)
+
+ potargs = state.environment.get_build_command() + [
+ '--internal', 'yelphelper', 'pot',
+ '--subdir=' + state.subdir,
+ '--id=' + project_id,
+ '--sources=' + source_str,
+ ]
+ pottarget = build.RunTarget('help-' + project_id + '-pot', potargs[0],
+ potargs[1:], [], state.subdir, state.subproject)
+
+ poargs = state.environment.get_build_command() + [
+ '--internal', 'yelphelper', 'update-po',
+ '--subdir=' + state.subdir,
+ '--id=' + project_id,
+ '--sources=' + source_str,
+ '--langs=' + '@@'.join(langs),
+ ]
+ potarget = build.RunTarget('help-' + project_id + '-update-po', poargs[0],
+ poargs[1:], [], state.subdir, state.subproject)
+
+ rv = [inscript, pottarget, potarget]
+ return ModuleReturnValue(None, rv)
+
+ @permittedKwargs({'main_xml', 'main_sgml', 'src_dir', 'dependencies', 'install',
+ 'install_dir', 'scan_args', 'scanobjs_args', 'gobject_typesfile',
+ 'fixxref_args', 'html_args', 'html_assets', 'content_files',
+ 'mkdb_args', 'ignore_headers', 'include_directories',
+ 'namespace', 'mode', 'expand_content_files'})
+ def gtkdoc(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException('Gtkdoc must have one positional argument.')
+ modulename = args[0]
+ if not isinstance(modulename, str):
+ raise MesonException('Gtkdoc arg must be string.')
+ if 'src_dir' not in kwargs:
+ raise MesonException('Keyword argument src_dir missing.')
+ main_file = kwargs.get('main_sgml', '')
+ if not isinstance(main_file, str):
+ raise MesonException('Main sgml keyword argument must be a string.')
+ main_xml = kwargs.get('main_xml', '')
+ if not isinstance(main_xml, str):
+ raise MesonException('Main xml keyword argument must be a string.')
+ if main_xml != '':
+ if main_file != '':
+ raise MesonException('You can only specify main_xml or main_sgml, not both.')
+ main_file = main_xml
+ targetname = modulename + '-doc'
+ command = state.environment.get_build_command()
+
+ namespace = kwargs.get('namespace', '')
+ mode = kwargs.get('mode', 'auto')
+ VALID_MODES = ('xml', 'sgml', 'none', 'auto')
+ if mode not in VALID_MODES:
+ raise MesonException('gtkdoc: Mode {} is not a valid mode: {}'.format(mode, VALID_MODES))
+
+ src_dirs = mesonlib.extract_as_list(kwargs, 'src_dir')
+ header_dirs = []
+ for src_dir in src_dirs:
+ if hasattr(src_dir, 'held_object'):
+ src_dir = src_dir.held_object
+ if not isinstance(src_dir, build.IncludeDirs):
+ raise MesonException('Invalid keyword argument for src_dir.')
+ for inc_dir in src_dir.get_incdirs():
+ header_dirs.append(os.path.join(state.environment.get_source_dir(),
+ src_dir.get_curdir(), inc_dir))
+ header_dirs.append(os.path.join(state.environment.get_build_dir(),
+ src_dir.get_curdir(), inc_dir))
+ else:
+ header_dirs.append(src_dir)
+
+ args = ['--internal', 'gtkdoc',
+ '--sourcedir=' + state.environment.get_source_dir(),
+ '--builddir=' + state.environment.get_build_dir(),
+ '--subdir=' + state.subdir,
+ '--headerdirs=' + '@@'.join(header_dirs),
+ '--mainfile=' + main_file,
+ '--modulename=' + modulename,
+ '--mode=' + mode]
+ if namespace:
+ args.append('--namespace=' + namespace)
+ args += self._unpack_args('--htmlargs=', 'html_args', kwargs)
+ args += self._unpack_args('--scanargs=', 'scan_args', kwargs)
+ args += self._unpack_args('--scanobjsargs=', 'scanobjs_args', kwargs)
+ args += self._unpack_args('--gobjects-types-file=', 'gobject_typesfile', kwargs, state)
+ args += self._unpack_args('--fixxrefargs=', 'fixxref_args', kwargs)
+ args += self._unpack_args('--mkdbargs=', 'mkdb_args', kwargs)
+ args += self._unpack_args('--html-assets=', 'html_assets', kwargs, state)
+ args += self._unpack_args('--content-files=', 'content_files', kwargs, state)
+ args += self._unpack_args('--expand-content-files=', 'expand_content_files', kwargs, state)
+ args += self._unpack_args('--ignore-headers=', 'ignore_headers', kwargs)
+ args += self._unpack_args('--installdir=', 'install_dir', kwargs, state)
+ args += self._get_build_args(kwargs, state)
+ res = [build.RunTarget(targetname, command[0], command[1:] + args, [], state.subdir, state.subproject)]
+ if kwargs.get('install', True):
+ res.append(build.RunScript(command, args))
+ return ModuleReturnValue(None, res)
+
+ def _get_build_args(self, kwargs, state):
+ args = []
+ deps = extract_as_list(kwargs, 'dependencies', unholder=True)
+ cflags, ldflags, gi_includes = self._get_dependencies_flags(deps, state, include_rpath=True)
+ inc_dirs = mesonlib.extract_as_list(kwargs, 'include_directories')
+ for incd in inc_dirs:
+ if not isinstance(incd.held_object, (str, build.IncludeDirs)):
+ raise MesonException(
+ 'Gir include dirs should be include_directories().')
+ cflags.update(get_include_args(inc_dirs))
+ if cflags:
+ args += ['--cflags=%s' % ' '.join(cflags)]
+ if ldflags:
+ args += ['--ldflags=%s' % ' '.join(ldflags)]
+ compiler = state.environment.coredata.compilers.get('c')
+ if compiler:
+ args += ['--cc=%s' % ' '.join(compiler.get_exelist())]
+ args += ['--ld=%s' % ' '.join(compiler.get_linker_exelist())]
+
+ return args
+
+ @noKwargs
+ def gtkdoc_html_dir(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException('Must have exactly one argument.')
+ modulename = args[0]
+ if not isinstance(modulename, str):
+ raise MesonException('Argument must be a string')
+ return ModuleReturnValue(os.path.join('share/gtk-doc/html', modulename), [])
+
+ @staticmethod
+ def _unpack_args(arg, kwarg_name, kwargs, expend_file_state=None):
+ if kwarg_name not in kwargs:
+ return []
+
+ new_args = mesonlib.extract_as_list(kwargs, kwarg_name)
+ args = []
+ for i in new_args:
+ if expend_file_state and isinstance(i, mesonlib.File):
+ i = i.absolute_path(expend_file_state.environment.get_source_dir(), expend_file_state.environment.get_build_dir())
+ elif not isinstance(i, str):
+ raise MesonException(kwarg_name + ' values must be strings.')
+ args.append(i)
+
+ if args:
+ return [arg + '@@'.join(args)]
+
+ return []
+
+ @permittedKwargs({'interface_prefix', 'namespace', 'object_manager', 'build_by_default',
+ 'annotations', 'docbook'})
+ def gdbus_codegen(self, state, args, kwargs):
+ if len(args) != 2:
+ raise MesonException('Gdbus_codegen takes two arguments, name and xml file.')
+ namebase = args[0]
+ xml_file = args[1]
+ target_name = namebase + '-gdbus'
+ cmd = [find_program('gdbus-codegen', target_name)]
+ if 'interface_prefix' in kwargs:
+ cmd += ['--interface-prefix', kwargs.pop('interface_prefix')]
+ if 'namespace' in kwargs:
+ cmd += ['--c-namespace', kwargs.pop('namespace')]
+ if kwargs.get('object_manager', False):
+ cmd += ['--c-generate-object-manager']
+ if 'docbook' in kwargs:
+ cmd += ['--generate-docbook', kwargs.pop('docbook')]
+
+ # Annotations are a bit ugly in that they are a list of lists of strings...
+ annotations = kwargs.pop('annotations', [])
+ if not isinstance(annotations, list):
+ raise MesonException('annotations takes a list')
+ if annotations and isinstance(annotations, list) and not isinstance(annotations[0], list):
+ annotations = [annotations]
+
+ for annotation in annotations:
+ if len(annotation) != 3 or not all(isinstance(i, str) for i in annotation):
+ raise MesonException('Annotations must be made up of 3 strings for ELEMENT, KEY, and VALUE')
+ cmd += ['--annotate'] + annotation
+
+ # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a
+ if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.3'):
+ cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@']
+ else:
+ self._print_gdbus_warning()
+ cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@']
+ outputs = [namebase + '.c', namebase + '.h']
+ custom_kwargs = {'input': xml_file,
+ 'output': outputs,
+ 'command': cmd
+ }
+ if 'build_by_default' in kwargs:
+ custom_kwargs['build_by_default'] = kwargs['build_by_default']
+ ct = build.CustomTarget(target_name, state.subdir, state.subproject, custom_kwargs)
+ return ModuleReturnValue(ct, [ct])
+
+ @permittedKwargs({'sources', 'c_template', 'h_template', 'install_header', 'install_dir',
+ 'comments', 'identifier_prefix', 'symbol_prefix', 'eprod', 'vprod',
+ 'fhead', 'fprod', 'ftail', 'vhead', 'vtail', 'depends'})
+ def mkenums(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException('Mkenums requires one positional argument.')
+ basename = args[0]
+
+ if 'sources' not in kwargs:
+ raise MesonException('Missing keyword argument "sources".')
+ sources = kwargs.pop('sources')
+ if isinstance(sources, str):
+ sources = [sources]
+ elif not isinstance(sources, list):
+ raise MesonException(
+ 'Sources keyword argument must be a string or array.')
+
+ cmd = []
+ known_kwargs = ['comments', 'eprod', 'fhead', 'fprod', 'ftail',
+ 'identifier_prefix', 'symbol_prefix', 'template',
+ 'vhead', 'vprod', 'vtail']
+ known_custom_target_kwargs = ['install_dir', 'build_always',
+ 'depends', 'depend_files']
+ c_template = h_template = None
+ install_header = False
+ for arg, value in kwargs.items():
+ if arg == 'sources':
+ raise AssertionError("sources should've already been handled")
+ elif arg == 'c_template':
+ c_template = value
+ if 'template' in kwargs:
+ raise MesonException('Mkenums does not accept both '
+ 'c_template and template keyword '
+ 'arguments at the same time.')
+ elif arg == 'h_template':
+ h_template = value
+ if 'template' in kwargs:
+ raise MesonException('Mkenums does not accept both '
+ 'h_template and template keyword '
+ 'arguments at the same time.')
+ elif arg == 'install_header':
+ install_header = value
+ elif arg in known_kwargs:
+ cmd += ['--' + arg.replace('_', '-'), value]
+ elif arg not in known_custom_target_kwargs:
+ raise MesonException(
+ 'Mkenums does not take a %s keyword argument.' % (arg, ))
+ cmd = [find_program('glib-mkenums', 'mkenums')] + cmd
+ custom_kwargs = {}
+ for arg in known_custom_target_kwargs:
+ if arg in kwargs:
+ custom_kwargs[arg] = kwargs[arg]
+
+ targets = []
+
+ if h_template is not None:
+ h_output = os.path.splitext(h_template)[0]
+ # We always set template as the first element in the source array
+ # so --template consumes it.
+ h_cmd = cmd + ['--template', '@INPUT@']
+ h_sources = [h_template] + sources
+ custom_kwargs['install'] = install_header
+ if 'install_dir' not in custom_kwargs:
+ custom_kwargs['install_dir'] = \
+ state.environment.coredata.get_builtin_option('includedir')
+ h_target = self._make_mkenum_custom_target(state, h_sources,
+ h_output, h_cmd,
+ custom_kwargs)
+ targets.append(h_target)
+
+ if c_template is not None:
+ c_output = os.path.splitext(c_template)[0]
+ # We always set template as the first element in the source array
+ # so --template consumes it.
+ c_cmd = cmd + ['--template', '@INPUT@']
+ c_sources = [c_template] + sources
+ # Never install the C file. Complain on bug tracker if you need it.
+ custom_kwargs['install'] = False
+ if h_template is not None:
+ if 'depends' in custom_kwargs:
+ custom_kwargs['depends'] += [h_target]
+ else:
+ custom_kwargs['depends'] = h_target
+ c_target = self._make_mkenum_custom_target(state, c_sources,
+ c_output, c_cmd,
+ custom_kwargs)
+ targets.insert(0, c_target)
+
+ if c_template is None and h_template is None:
+ generic_cmd = cmd + ['@INPUT@']
+ custom_kwargs['install'] = install_header
+ if 'install_dir' not in custom_kwargs:
+ custom_kwargs['install_dir'] = \
+ state.environment.coredata.get_builtin_option('includedir')
+ target = self._make_mkenum_custom_target(state, sources, basename,
+ generic_cmd, custom_kwargs)
+ return ModuleReturnValue(target, [target])
+ elif len(targets) == 1:
+ return ModuleReturnValue(targets[0], [targets[0]])
+ else:
+ return ModuleReturnValue(targets, targets)
+
+ def mkenums_simple(self, state, args, kwargs):
+ hdr_filename = args[0] + '.h'
+ body_filename = args[0] + '.c'
+
+ # not really needed, just for sanity checking
+ forbidden_kwargs = ['c_template', 'h_template', 'eprod', 'fhead',
+ 'fprod', 'ftail', 'vhead', 'vtail', 'comments']
+ for arg in forbidden_kwargs:
+ if arg in kwargs:
+ raise MesonException('mkenums_simple() does not take a %s keyword argument' % (arg, ))
+
+ # kwargs to pass as-is from mkenums_simple() to mkenums()
+ shared_kwargs = ['sources', 'install_header', 'install_dir',
+ 'identifier_prefix', 'symbol_prefix']
+ mkenums_kwargs = {}
+ for arg in shared_kwargs:
+ if arg in kwargs:
+ mkenums_kwargs[arg] = kwargs[arg]
+
+ # .c file generation
+ c_file_kwargs = copy.deepcopy(mkenums_kwargs)
+ if 'sources' not in kwargs:
+ raise MesonException('Missing keyword argument "sources".')
+ sources = kwargs['sources']
+ if isinstance(sources, str):
+ sources = [sources]
+ elif not isinstance(sources, list):
+ raise MesonException(
+ 'Sources keyword argument must be a string or array.')
+
+ header_prefix = kwargs.get('header_prefix', '')
+ decl_decorator = kwargs.get('decorator', '')
+ func_prefix = kwargs.get('function_prefix', '')
+ body_prefix = kwargs.get('body_prefix', '')
+
+ # Maybe we should write our own template files into the build dir
+ # instead, but that seems like much more work, nice as it would be.
+ fhead = ''
+ if body_prefix != '':
+ fhead += '%s\n' % body_prefix
+ fhead += '#include "%s"\n' % hdr_filename
+ for hdr in sources:
+ fhead += '#include "%s"\n' % hdr
+ fhead += '''
+#define C_ENUM(v) ((gint) v)
+#define C_FLAGS(v) ((guint) v)
+'''
+ c_file_kwargs['fhead'] = fhead
+
+ c_file_kwargs['fprod'] = '''
+/* enumerations from "@basename@" */
+'''
+
+ c_file_kwargs['vhead'] = '''
+GType
+%s@enum_name@_get_type (void)
+{
+ static volatile gsize gtype_id = 0;
+ static const G@Type@Value values[] = {''' % func_prefix
+
+ c_file_kwargs['vprod'] = ' { C_@TYPE@(@VALUENAME@), "@VALUENAME@", "@valuenick@" },'
+
+ c_file_kwargs['vtail'] = ''' { 0, NULL, NULL }
+ };
+ if (g_once_init_enter (>ype_id)) {
+ GType new_type = g_@type@_register_static ("@EnumName@", values);
+ g_once_init_leave (>ype_id, new_type);
+ }
+ return (GType) gtype_id;
+}'''
+
+ rv = self.mkenums(state, [body_filename], c_file_kwargs)
+ c_file = rv.return_value
+
+ # .h file generation
+ h_file_kwargs = copy.deepcopy(mkenums_kwargs)
+
+ h_file_kwargs['fhead'] = '''#pragma once
+
+#include <glib-object.h>
+{}
+
+G_BEGIN_DECLS
+'''.format(header_prefix)
+
+ h_file_kwargs['fprod'] = '''
+/* enumerations from "@basename@" */
+'''
+
+ h_file_kwargs['vhead'] = '''
+{}
+GType {}@enum_name@_get_type (void);
+#define @ENUMPREFIX@_TYPE_@ENUMSHORT@ ({}@enum_name@_get_type())'''.format(decl_decorator, func_prefix, func_prefix)
+
+ h_file_kwargs['ftail'] = '''
+G_END_DECLS'''
+
+ rv = self.mkenums(state, [hdr_filename], h_file_kwargs)
+ h_file = rv.return_value
+
+ return ModuleReturnValue([c_file, h_file], [c_file, h_file])
+
+ @staticmethod
+ def _make_mkenum_custom_target(state, sources, output, cmd, kwargs):
+ custom_kwargs = {
+ 'input': sources,
+ 'output': output,
+ 'capture': True,
+ 'command': cmd
+ }
+ custom_kwargs.update(kwargs)
+ return build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs,
+ # https://github.com/mesonbuild/meson/issues/973
+ absolute_paths=True)
+
+ @permittedKwargs({'sources', 'prefix', 'install_header', 'install_dir', 'stdinc',
+ 'nostdinc', 'internal', 'skip_source', 'valist_marshallers',
+ 'extra_args'})
+ def genmarshal(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException(
+ 'Genmarshal requires one positional argument.')
+ output = args[0]
+
+ if 'sources' not in kwargs:
+ raise MesonException('Missing keyword argument "sources".')
+ sources = kwargs.pop('sources')
+ if isinstance(sources, str):
+ sources = [sources]
+ elif not isinstance(sources, list):
+ raise MesonException(
+ 'Sources keyword argument must be a string or array.')
+
+ new_genmarshal = mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.3')
+
+ cmd = [find_program('glib-genmarshal', output + '_genmarshal')]
+ known_kwargs = ['internal', 'nostdinc', 'skip_source', 'stdinc',
+ 'valist_marshallers', 'extra_args']
+ known_custom_target_kwargs = ['build_always', 'depends',
+ 'depend_files', 'install_dir',
+ 'install_header']
+ for arg, value in kwargs.items():
+ if arg == 'prefix':
+ cmd += ['--prefix', value]
+ elif arg == 'extra_args':
+ if new_genmarshal:
+ cmd += mesonlib.stringlistify(value)
+ else:
+ mlog.warning('The current version of GLib does not support extra arguments \n'
+ 'for glib-genmarshal. You need at least GLib 2.53.3. See ',
+ mlog.bold('https://github.com/mesonbuild/meson/pull/2049'))
+ elif arg in known_kwargs and value:
+ cmd += ['--' + arg.replace('_', '-')]
+ elif arg not in known_custom_target_kwargs:
+ raise MesonException(
+ 'Genmarshal does not take a %s keyword argument.' % (
+ arg, ))
+
+ install_header = kwargs.pop('install_header', False)
+ install_dir = kwargs.pop('install_dir', None)
+
+ custom_kwargs = {
+ 'input': sources,
+ }
+
+ # https://github.com/GNOME/glib/commit/0fbc98097fac4d3e647684f344e508abae109fdf
+ if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.0'):
+ cmd += ['--output', '@OUTPUT@']
+ else:
+ custom_kwargs['capture'] = True
+
+ for arg in known_custom_target_kwargs:
+ if arg in kwargs:
+ custom_kwargs[arg] = kwargs[arg]
+
+ header_file = output + '.h'
+ custom_kwargs['command'] = cmd + ['--body', '@INPUT@']
+ if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.4'):
+ # Silence any warnings about missing prototypes
+ custom_kwargs['command'] += ['--include-header', header_file]
+ custom_kwargs['output'] = output + '.c'
+ body = build.CustomTarget(output + '_c', state.subdir, state.subproject, custom_kwargs)
+
+ custom_kwargs['install'] = install_header
+ if install_dir is not None:
+ custom_kwargs['install_dir'] = install_dir
+ if new_genmarshal:
+ cmd += ['--pragma-once']
+ custom_kwargs['command'] = cmd + ['--header', '@INPUT@']
+ custom_kwargs['output'] = header_file
+ header = build.CustomTarget(output + '_h', state.subdir, state.subproject, custom_kwargs)
+
+ rv = [body, header]
+ return ModuleReturnValue(rv, rv)
+
+ @staticmethod
+ def _vapi_args_to_command(prefix, variable, kwargs, accept_vapi=False):
+ arg_list = mesonlib.extract_as_list(kwargs, variable)
+ ret = []
+ for arg in arg_list:
+ if not isinstance(arg, str):
+ types = 'strings' + ' or InternalDependencys' if accept_vapi else ''
+ raise MesonException('All {} must be {}'.format(variable, types))
+ ret.append(prefix + arg)
+ return ret
+
+ def _extract_vapi_packages(self, state, kwargs):
+ '''
+ Packages are special because we need to:
+ - Get a list of packages for the .deps file
+ - Get a list of depends for any VapiTargets
+ - Get package name from VapiTargets
+ - Add include dirs for any VapiTargets
+ '''
+ arg_list = kwargs.get('packages')
+ if not arg_list:
+ return [], [], [], []
+ arg_list = mesonlib.listify(arg_list)
+ vapi_depends = []
+ vapi_packages = []
+ vapi_includes = []
+ ret = []
+ remaining_args = []
+ for arg in arg_list:
+ if hasattr(arg, 'held_object'):
+ arg = arg.held_object
+ if isinstance(arg, InternalDependency):
+ targets = [t for t in arg.sources if isinstance(t, VapiTarget)]
+ for target in targets:
+ srcdir = os.path.join(state.environment.get_source_dir(),
+ target.get_subdir())
+ outdir = os.path.join(state.environment.get_build_dir(),
+ target.get_subdir())
+ outfile = target.get_outputs()[0][:-5] # Strip .vapi
+ ret.append('--vapidir=' + outdir)
+ ret.append('--girdir=' + outdir)
+ ret.append('--pkg=' + outfile)
+ vapi_depends.append(target)
+ vapi_packages.append(outfile)
+ vapi_includes.append(srcdir)
+ else:
+ vapi_packages.append(arg)
+ remaining_args.append(arg)
+
+ kwargs['packages'] = remaining_args
+ vapi_args = ret + self._vapi_args_to_command('--pkg=', 'packages', kwargs, accept_vapi=True)
+ return vapi_args, vapi_depends, vapi_packages, vapi_includes
+
+ def _generate_deps(self, state, library, packages, install_dir):
+ outdir = state.environment.scratch_dir
+ fname = os.path.join(outdir, library + '.deps')
+ with open(fname, 'w') as ofile:
+ for package in packages:
+ ofile.write(package + '\n')
+ return build.Data(mesonlib.File(True, outdir, fname), install_dir)
+
+ def _get_vapi_link_with(self, target):
+ link_with = []
+ for dep in target.get_target_dependencies():
+ if isinstance(dep, build.SharedLibrary):
+ link_with.append(dep)
+ elif isinstance(dep, GirTarget):
+ link_with += self._get_vapi_link_with(dep)
+ return link_with
+
+ @permittedKwargs({'sources', 'packages', 'metadata_dirs', 'gir_dirs',
+ 'vapi_dirs', 'install', 'install_dir'})
+ def generate_vapi(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException('The library name is required')
+
+ if not isinstance(args[0], str):
+ raise MesonException('The first argument must be the name of the library')
+ created_values = []
+
+ library = args[0]
+ build_dir = os.path.join(state.environment.get_build_dir(), state.subdir)
+ source_dir = os.path.join(state.environment.get_source_dir(), state.subdir)
+ pkg_cmd, vapi_depends, vapi_packages, vapi_includes = self._extract_vapi_packages(state, kwargs)
+ target_name = 'generate_vapi({})'.format(library)
+ if 'VAPIGEN' in os.environ:
+ cmd = [find_program(os.environ['VAPIGEN'], target_name)]
+ else:
+ cmd = [find_program('vapigen', target_name)]
+ cmd += ['--quiet', '--library=' + library, '--directory=' + build_dir]
+ cmd += self._vapi_args_to_command('--vapidir=', 'vapi_dirs', kwargs)
+ cmd += self._vapi_args_to_command('--metadatadir=', 'metadata_dirs', kwargs)
+ cmd += self._vapi_args_to_command('--girdir=', 'gir_dirs', kwargs)
+ cmd += pkg_cmd
+ cmd += ['--metadatadir=' + source_dir]
+
+ if 'sources' not in kwargs:
+ raise MesonException('sources are required to generate the vapi file')
+
+ inputs = mesonlib.extract_as_list(kwargs, 'sources')
+
+ link_with = []
+ for i in inputs:
+ if isinstance(i, str):
+ cmd.append(os.path.join(source_dir, i))
+ elif hasattr(i, 'held_object') and isinstance(i.held_object, GirTarget):
+ link_with += self._get_vapi_link_with(i.held_object)
+ subdir = os.path.join(state.environment.get_build_dir(),
+ i.held_object.get_subdir())
+ gir_file = os.path.join(subdir, i.held_object.get_outputs()[0])
+ cmd.append(gir_file)
+ else:
+ raise MesonException('Input must be a str or GirTarget')
+
+ vapi_output = library + '.vapi'
+ custom_kwargs = {
+ 'command': cmd,
+ 'input': inputs,
+ 'output': vapi_output,
+ 'depends': vapi_depends,
+ }
+ install_dir = kwargs.get('install_dir',
+ os.path.join(state.environment.coredata.get_builtin_option('datadir'),
+ 'vala', 'vapi'))
+ if kwargs.get('install'):
+ custom_kwargs['install'] = kwargs['install']
+ custom_kwargs['install_dir'] = install_dir
+
+ # We shouldn't need this locally but we install it
+ deps_target = self._generate_deps(state, library, vapi_packages, install_dir)
+ created_values.append(deps_target)
+ vapi_target = VapiTarget(vapi_output, state.subdir, state.subproject, custom_kwargs)
+
+ # So to try our best to get this to just work we need:
+ # - link with with the correct library
+ # - include the vapi and dependent vapi files in sources
+ # - add relevant directories to include dirs
+ incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)]
+ sources = [vapi_target] + vapi_depends
+ rv = InternalDependency(None, incs, [], [], link_with, sources, [])
+ created_values.append(rv)
+ return ModuleReturnValue(rv, created_values)
+
+def initialize():
+ return GnomeModule()
--- /dev/null
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import shutil
+
+from os import path
+from .. import coredata, mesonlib, build
+from ..mesonlib import MesonException
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import permittedKwargs
+
+PRESET_ARGS = {
+ 'glib': [
+ '--from-code=UTF-8',
+ '--add-comments',
+
+ # https://developer.gnome.org/glib/stable/glib-I18N.html
+ '--keyword=_',
+ '--keyword=N_',
+ '--keyword=C_:1c,2',
+ '--keyword=NC_:1c,2',
+ '--keyword=g_dcgettext:2',
+ '--keyword=g_dngettext:2,3',
+ '--keyword=g_dpgettext2:2c,3',
+
+ '--flag=N_:1:pass-c-format',
+ '--flag=C_:2:pass-c-format',
+ '--flag=NC_:2:pass-c-format',
+ '--flag=g_dngettext:2:pass-c-format',
+ '--flag=g_strdup_printf:1:c-format',
+ '--flag=g_string_printf:2:c-format',
+ '--flag=g_string_append_printf:2:c-format',
+ '--flag=g_error_new:3:c-format',
+ '--flag=g_set_error:4:c-format',
+ ]
+}
+
+class I18nModule(ExtensionModule):
+
+ @staticmethod
+ def _get_data_dirs(state, dirs):
+ """Returns source directories of relative paths"""
+ src_dir = path.join(state.environment.get_source_dir(), state.subdir)
+ return [path.join(src_dir, d) for d in dirs]
+
+ @permittedKwargs({'languages', 'data_dirs', 'preset', 'args', 'po_dir', 'type',
+ 'input', 'output', 'install', 'install_dir'})
+ def merge_file(self, state, args, kwargs):
+ podir = kwargs.pop('po_dir', None)
+ if not podir:
+ raise MesonException('i18n: po_dir is a required kwarg')
+ podir = path.join(state.build_to_src, state.subdir, podir)
+
+ file_type = kwargs.pop('type', 'xml')
+ VALID_TYPES = ('xml', 'desktop')
+ if file_type not in VALID_TYPES:
+ raise MesonException('i18n: "{}" is not a valid type {}'.format(file_type, VALID_TYPES))
+
+ datadirs = self._get_data_dirs(state, mesonlib.stringlistify(kwargs.pop('data_dirs', [])))
+ datadirs = '--datadirs=' + ':'.join(datadirs) if datadirs else None
+
+ command = state.environment.get_build_command() + [
+ '--internal', 'msgfmthelper',
+ '@INPUT@', '@OUTPUT@', file_type, podir
+ ]
+ if datadirs:
+ command.append(datadirs)
+
+ kwargs['command'] = command
+ ct = build.CustomTarget(kwargs['output'] + '_merge', state.subdir, state.subproject, kwargs)
+ return ModuleReturnValue(ct, [ct])
+
+ @permittedKwargs({'po_dir', 'data_dirs', 'type', 'languages', 'args', 'preset', 'install'})
+ def gettext(self, state, args, kwargs):
+ if len(args) != 1:
+ raise coredata.MesonException('Gettext requires one positional argument (package name).')
+ if not shutil.which('xgettext'):
+ raise coredata.MesonException('Can not do gettext because xgettext is not installed.')
+ packagename = args[0]
+ languages = mesonlib.stringlistify(kwargs.get('languages', []))
+ datadirs = self._get_data_dirs(state, mesonlib.stringlistify(kwargs.get('data_dirs', [])))
+ extra_args = mesonlib.stringlistify(kwargs.get('args', []))
+
+ preset = kwargs.pop('preset', None)
+ if preset:
+ preset_args = PRESET_ARGS.get(preset)
+ if not preset_args:
+ raise coredata.MesonException('i18n: Preset "{}" is not one of the valid options: {}'.format(
+ preset, list(PRESET_ARGS.keys())))
+ extra_args = set(preset_args + extra_args)
+
+ pkg_arg = '--pkgname=' + packagename
+ lang_arg = '--langs=' + '@@'.join(languages) if languages else None
+ datadirs = '--datadirs=' + ':'.join(datadirs) if datadirs else None
+ extra_args = '--extra-args=' + '@@'.join(extra_args) if extra_args else None
+
+ potargs = state.environment.get_build_command() + ['--internal', 'gettext', 'pot', pkg_arg]
+ if datadirs:
+ potargs.append(datadirs)
+ if extra_args:
+ potargs.append(extra_args)
+ pottarget = build.RunTarget(packagename + '-pot', potargs[0], potargs[1:], [], state.subdir, state.subproject)
+
+ gmoargs = state.environment.get_build_command() + ['--internal', 'gettext', 'gen_gmo']
+ if lang_arg:
+ gmoargs.append(lang_arg)
+ gmotarget = build.RunTarget(packagename + '-gmo', gmoargs[0], gmoargs[1:], [], state.subdir, state.subproject)
+
+ updatepoargs = state.environment.get_build_command() + ['--internal', 'gettext', 'update_po', pkg_arg]
+ if lang_arg:
+ updatepoargs.append(lang_arg)
+ if datadirs:
+ updatepoargs.append(datadirs)
+ if extra_args:
+ updatepoargs.append(extra_args)
+ updatepotarget = build.RunTarget(packagename + '-update-po', updatepoargs[0], updatepoargs[1:], [], state.subdir, state.subproject)
+
+ targets = [pottarget, gmotarget, updatepotarget]
+
+ install = kwargs.get('install', True)
+ if install:
+ script = state.environment.get_build_command()
+ args = ['--internal', 'gettext', 'install',
+ '--subdir=' + state.subdir,
+ '--localedir=' + state.environment.coredata.get_builtin_option('localedir'),
+ pkg_arg]
+ if lang_arg:
+ args.append(lang_arg)
+ iscript = build.RunScript(script, args)
+ targets.append(iscript)
+
+ return ModuleReturnValue(None, targets)
+
+def initialize():
+ return I18nModule()
--- /dev/null
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import noKwargs
+
+class TestModule(ExtensionModule):
+
+ @noKwargs
+ def print_hello(self, state, args, kwargs):
+ print('Hello from a Meson module')
+ rv = ModuleReturnValue(None, [])
+ return rv
+
+def initialize():
+ return TestModule()
--- /dev/null
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from pathlib import PurePath
+
+from .. import build
+from .. import mesonlib
+from .. import mlog
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import permittedKwargs
+
+
+class PkgConfigModule(ExtensionModule):
+
+ def _get_lname(self, l, msg, pcfile):
+ # Nothing special
+ if not l.name_prefix_set:
+ return l.name
+ # Sometimes people want the library to start with 'lib' everywhere,
+ # which is achieved by setting name_prefix to '' and the target name to
+ # 'libfoo'. In that case, try to get the pkg-config '-lfoo' arg correct.
+ if l.prefix == '' and l.name.startswith('lib'):
+ return l.name[3:]
+ # If the library is imported via an import library which is always
+ # named after the target name, '-lfoo' is correct.
+ if l.import_filename:
+ return l.name
+ # In other cases, we can't guarantee that the compiler will be able to
+ # find the library via '-lfoo', so tell the user that.
+ mlog.warning(msg.format(l.name, 'name_prefix', l.name, pcfile))
+ return l.name
+
+ def _escape(self, value):
+ '''
+ We cannot use shlex.quote because it quotes with ' and " which does not
+ work with pkg-config and pkgconf at all.
+ '''
+ # We should always write out paths with / because pkg-config requires
+ # spaces to be quoted with \ and that messes up on Windows:
+ # https://bugs.freedesktop.org/show_bug.cgi?id=103203
+ if isinstance(value, PurePath):
+ value = value.as_posix()
+ return value.replace(' ', '\ ')
+
+ def _make_relative(self, prefix, subdir):
+ if isinstance(prefix, PurePath):
+ prefix = prefix.as_posix()
+ if isinstance(subdir, PurePath):
+ subdir = subdir.as_posix()
+ if subdir.startswith(prefix):
+ subdir = subdir.replace(prefix, '')
+ return subdir
+
+ def generate_pkgconfig_file(self, state, libraries, subdirs, name, description,
+ url, version, pcfile, pub_reqs, priv_reqs,
+ conflicts, priv_libs, extra_cflags, variables):
+ coredata = state.environment.get_coredata()
+ outdir = state.environment.scratch_dir
+ fname = os.path.join(outdir, pcfile)
+ prefix = PurePath(coredata.get_builtin_option('prefix'))
+ # These always return paths relative to prefix
+ libdir = PurePath(coredata.get_builtin_option('libdir'))
+ incdir = PurePath(coredata.get_builtin_option('includedir'))
+ with open(fname, 'w') as ofile:
+ ofile.write('prefix={}\n'.format(self._escape(prefix)))
+ ofile.write('libdir={}\n'.format(self._escape('${prefix}' / libdir)))
+ ofile.write('includedir={}\n'.format(self._escape('${prefix}' / incdir)))
+ for k, v in variables:
+ ofile.write('{}={}\n'.format(k, self._escape(v)))
+ ofile.write('\n')
+ ofile.write('Name: %s\n' % name)
+ if len(description) > 0:
+ ofile.write('Description: %s\n' % description)
+ if len(url) > 0:
+ ofile.write('URL: %s\n' % url)
+ ofile.write('Version: %s\n' % version)
+ if len(pub_reqs) > 0:
+ ofile.write('Requires: {}\n'.format(' '.join(pub_reqs)))
+ if len(priv_reqs) > 0:
+ ofile.write(
+ 'Requires.private: {}\n'.format(' '.join(priv_reqs)))
+ if len(conflicts) > 0:
+ ofile.write('Conflicts: {}\n'.format(' '.join(conflicts)))
+
+ def generate_libs_flags(libs):
+ msg = 'Library target {0!r} has {1!r} set. Compilers ' \
+ 'may not find it from its \'-l{2}\' linker flag in the ' \
+ '{3!r} pkg-config file.'
+ for l in libs:
+ if isinstance(l, str):
+ yield l
+ else:
+ install_dir = l.get_custom_install_dir()[0]
+ if install_dir is False:
+ continue
+ if isinstance(install_dir, str):
+ yield '-L${prefix}/%s ' % self._escape(self._make_relative(prefix, install_dir))
+ else: # install_dir is True
+ yield '-L${libdir}'
+ lname = self._get_lname(l, msg, pcfile)
+ # If using a custom suffix, the compiler may not be able to
+ # find the library
+ if l.name_suffix_set:
+ mlog.warning(msg.format(l.name, 'name_suffix', lname, pcfile))
+ yield '-l%s' % lname
+
+ if len(libraries) > 0:
+ ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(libraries))))
+ if len(priv_libs) > 0:
+ ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(priv_libs))))
+ ofile.write('Cflags:')
+ for h in subdirs:
+ ofile.write(' ')
+ if h == '.':
+ ofile.write('-I${includedir}')
+ else:
+ ofile.write(self._escape(PurePath('-I${includedir}') / h))
+ for f in extra_cflags:
+ ofile.write(' ')
+ ofile.write(self._escape(f))
+ ofile.write('\n')
+
+ def process_libs(self, libs):
+ libs = mesonlib.listify(libs)
+ processed_libs = []
+ for l in libs:
+ if hasattr(l, 'held_object'):
+ l = l.held_object
+ if not isinstance(l, (build.SharedLibrary, build.StaticLibrary, str)):
+ raise mesonlib.MesonException('Library argument not a library object nor a string.')
+ processed_libs.append(l)
+ return processed_libs
+
+ @permittedKwargs({'libraries', 'version', 'name', 'description', 'filebase',
+ 'subdirs', 'requires', 'requires_private', 'libraries_private',
+ 'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions'})
+ def generate(self, state, args, kwargs):
+ if len(args) > 0:
+ raise mesonlib.MesonException('Pkgconfig_gen takes no positional arguments.')
+ libs = self.process_libs(kwargs.get('libraries', []))
+ priv_libs = self.process_libs(kwargs.get('libraries_private', []))
+ subdirs = mesonlib.stringlistify(kwargs.get('subdirs', ['.']))
+ version = kwargs.get('version', None)
+ if not isinstance(version, str):
+ raise mesonlib.MesonException('Version must be specified.')
+ name = kwargs.get('name', None)
+ if not isinstance(name, str):
+ raise mesonlib.MesonException('Name not specified.')
+ filebase = kwargs.get('filebase', name)
+ if not isinstance(filebase, str):
+ raise mesonlib.MesonException('Filebase must be a string.')
+ description = kwargs.get('description', None)
+ if not isinstance(description, str):
+ raise mesonlib.MesonException('Description is not a string.')
+ url = kwargs.get('url', '')
+ if not isinstance(url, str):
+ raise mesonlib.MesonException('URL is not a string.')
+ pub_reqs = mesonlib.stringlistify(kwargs.get('requires', []))
+ priv_reqs = mesonlib.stringlistify(kwargs.get('requires_private', []))
+ conflicts = mesonlib.stringlistify(kwargs.get('conflicts', []))
+ extra_cflags = mesonlib.stringlistify(kwargs.get('extra_cflags', []))
+
+ dversions = kwargs.get('d_module_versions', None)
+ if dversions:
+ compiler = state.environment.coredata.compilers.get('d')
+ if compiler:
+ extra_cflags.extend(compiler.get_feature_args({'versions': dversions}))
+
+ def parse_variable_list(stringlist):
+ reserved = ['prefix', 'libdir', 'includedir']
+ variables = []
+ for var in stringlist:
+ # foo=bar=baz is ('foo', 'bar=baz')
+ l = var.split('=', 1)
+ if len(l) < 2:
+ raise mesonlib.MesonException('Invalid variable "{}". Variables must be in \'name=value\' format'.format(var))
+
+ name, value = l[0].strip(), l[1].strip()
+ if not name or not value:
+ raise mesonlib.MesonException('Invalid variable "{}". Variables must be in \'name=value\' format'.format(var))
+
+ # Variable names must not contain whitespaces
+ if any(c.isspace() for c in name):
+ raise mesonlib.MesonException('Invalid whitespace in assignment "{}"'.format(var))
+
+ if name in reserved:
+ raise mesonlib.MesonException('Variable "{}" is reserved'.format(name))
+
+ variables.append((name, value))
+
+ return variables
+
+ variables = parse_variable_list(mesonlib.stringlistify(kwargs.get('variables', [])))
+
+ pcfile = filebase + '.pc'
+ pkgroot = kwargs.get('install_dir', None)
+ if pkgroot is None:
+ pkgroot = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'pkgconfig')
+ if not isinstance(pkgroot, str):
+ raise mesonlib.MesonException('Install_dir must be a string.')
+ self.generate_pkgconfig_file(state, libs, subdirs, name, description, url,
+ version, pcfile, pub_reqs, priv_reqs,
+ conflicts, priv_libs, extra_cflags, variables)
+ res = build.Data(mesonlib.File(True, state.environment.get_scratch_dir(), pcfile), pkgroot)
+ return ModuleReturnValue(res, [res])
+
+def initialize():
+ return PkgConfigModule()
--- /dev/null
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import sysconfig
+from .. import mesonlib, dependencies
+
+from . import ExtensionModule
+from mesonbuild.modules import ModuleReturnValue
+from . import permittedSnippetKwargs
+from ..interpreterbase import noKwargs
+from ..interpreter import shlib_kwargs
+
+mod_kwargs = set()
+mod_kwargs.update(shlib_kwargs)
+
+
+class Python3Module(ExtensionModule):
+ def __init__(self):
+ super().__init__()
+ self.snippets.add('extension_module')
+
+ @permittedSnippetKwargs(mod_kwargs)
+ def extension_module(self, interpreter, state, args, kwargs):
+ if 'name_prefix' in kwargs:
+ raise mesonlib.MesonException('Name_prefix is set automatically, specifying it is forbidden.')
+ if 'name_suffix' in kwargs:
+ raise mesonlib.MesonException('Name_suffix is set automatically, specifying it is forbidden.')
+ host_system = state.host_machine.system
+ if host_system == 'darwin':
+ # Default suffix is 'dylib' but Python does not use it for extensions.
+ suffix = 'so'
+ elif host_system == 'windows':
+ # On Windows the extension is pyd for some unexplainable reason.
+ suffix = 'pyd'
+ else:
+ suffix = []
+ kwargs['name_prefix'] = ''
+ kwargs['name_suffix'] = suffix
+ return interpreter.func_shared_module(None, args, kwargs)
+
+ @noKwargs
+ def find_python(self, state, args, kwargs):
+ py3 = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True)
+ return ModuleReturnValue(py3, [py3])
+
+ @noKwargs
+ def language_version(self, state, args, kwargs):
+ return ModuleReturnValue(sysconfig.get_python_version(), [])
+
+ @noKwargs
+ def sysconfig_path(self, state, args, kwargs):
+ if len(args) != 1:
+ raise mesonlib.MesonException('sysconfig_path() requires passing the name of path to get.')
+ path_name = args[0]
+ valid_names = sysconfig.get_path_names()
+ if path_name not in valid_names:
+ raise mesonlib.MesonException('{} is not a valid path name {}.'.format(path_name, valid_names))
+
+ # Get a relative path without a prefix, e.g. lib/python3.6/site-packages
+ path = sysconfig.get_path(path_name, vars={'base': '', 'platbase': '', 'installed_base': ''})[1:]
+ return ModuleReturnValue(path, [])
+
+
+def initialize():
+ return Python3Module()
--- /dev/null
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from .. import mlog
+from .. import build
+from ..mesonlib import MesonException, Popen_safe, extract_as_list
+from ..dependencies import Qt4Dependency, Qt5Dependency
+import xml.etree.ElementTree as ET
+from . import ModuleReturnValue, get_include_args
+from ..interpreterbase import permittedKwargs
+
+_QT_DEPS_LUT = {
+ 4: Qt4Dependency,
+ 5: Qt5Dependency
+}
+
+
+class QtBaseModule:
+ tools_detected = False
+
+ def __init__(self, qt_version=5):
+ self.qt_version = qt_version
+
+ def _detect_tools(self, env, method):
+ if self.tools_detected:
+ return
+ mlog.log('Detecting Qt{version} tools'.format(version=self.qt_version))
+ # FIXME: We currently require QtX to exist while importing the module.
+ # We should make it gracefully degrade and not create any targets if
+ # the import is marked as 'optional' (not implemented yet)
+ kwargs = {'required': 'true', 'modules': 'Core', 'silent': 'true', 'method': method}
+ qt = _QT_DEPS_LUT[self.qt_version](env, kwargs)
+ # Get all tools and then make sure that they are the right version
+ self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect()
+ # Moc, uic and rcc write their version strings to stderr.
+ # Moc and rcc return a non-zero result when doing so.
+ # What kind of an idiot thought that was a good idea?
+ for compiler, compiler_name in ((self.moc, "Moc"), (self.uic, "Uic"), (self.rcc, "Rcc"), (self.lrelease, "lrelease")):
+ if compiler.found():
+ # Workaround since there is no easy way to know which tool/version support which flag
+ for flag in ['-v', '-version']:
+ p, stdout, stderr = Popen_safe(compiler.get_command() + [flag])[0:3]
+ if p.returncode == 0:
+ break
+ stdout = stdout.strip()
+ stderr = stderr.strip()
+ if 'Qt {}'.format(self.qt_version) in stderr:
+ compiler_ver = stderr
+ elif 'version {}.'.format(self.qt_version) in stderr:
+ compiler_ver = stderr
+ elif ' {}.'.format(self.qt_version) in stdout:
+ compiler_ver = stdout
+ else:
+ raise MesonException('{name} preprocessor is not for Qt {version}. Output:\n{stdo}\n{stderr}'.format(
+ name=compiler_name, version=self.qt_version, stdo=stdout, stderr=stderr))
+ mlog.log(' {}:'.format(compiler_name.lower()), mlog.green('YES'), '({path}, {version})'.format(
+ path=compiler.get_path(), version=compiler_ver.split()[-1]))
+ else:
+ mlog.log(' {}:'.format(compiler_name.lower()), mlog.red('NO'))
+ self.tools_detected = True
+
+ def parse_qrc(self, state, fname):
+ abspath = os.path.join(state.environment.source_dir, state.subdir, fname)
+ relative_part = os.path.split(fname)[0]
+ try:
+ tree = ET.parse(abspath)
+ root = tree.getroot()
+ result = []
+ for child in root[0]:
+ if child.tag != 'file':
+ mlog.warning("malformed rcc file: ", os.path.join(state.subdir, fname))
+ break
+ else:
+ result.append(os.path.join(state.subdir, relative_part, child.text))
+ return result
+ except Exception:
+ return []
+
+ @permittedKwargs({'moc_headers', 'moc_sources', 'moc_extra_arguments', 'include_directories', 'ui_files', 'qresources', 'method'})
+ def preprocess(self, state, args, kwargs):
+ rcc_files, ui_files, moc_headers, moc_sources, moc_extra_arguments, sources, include_directories \
+ = extract_as_list(kwargs, 'qresources', 'ui_files', 'moc_headers', 'moc_sources', 'moc_extra_arguments', 'sources', 'include_directories', pop = True)
+ sources += args[1:]
+ method = kwargs.get('method', 'auto')
+ self._detect_tools(state.environment, method)
+ err_msg = "{0} sources specified and couldn't find {1}, " \
+ "please check your qt{2} installation"
+ if len(moc_headers) + len(moc_sources) > 0 and not self.moc.found():
+ raise MesonException(err_msg.format('MOC', 'moc-qt{}'.format(self.qt_version), self.qt_version))
+ if len(rcc_files) > 0:
+ if not self.rcc.found():
+ raise MesonException(err_msg.format('RCC', 'rcc-qt{}'.format(self.qt_version), self.qt_version))
+ qrc_deps = []
+ for i in rcc_files:
+ qrc_deps += self.parse_qrc(state, i)
+ # custom output name set? -> one output file, multiple otherwise
+ if len(args) > 0:
+ name = args[0]
+ rcc_kwargs = {'input': rcc_files,
+ 'output': name + '.cpp',
+ 'command': [self.rcc, '-name', name, '-o', '@OUTPUT@', '@INPUT@'],
+ 'depend_files': qrc_deps}
+ res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs)
+ sources.append(res_target)
+ else:
+ for rcc_file in rcc_files:
+ basename = os.path.split(rcc_file)[1]
+ name = 'qt' + str(self.qt_version) + '-' + basename.replace('.', '_')
+ rcc_kwargs = {'input': rcc_file,
+ 'output': name + '.cpp',
+ 'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', '@INPUT@'],
+ 'depend_files': qrc_deps}
+ res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs)
+ sources.append(res_target)
+ if len(ui_files) > 0:
+ if not self.uic.found():
+ raise MesonException(err_msg.format('UIC', 'uic-qt' + self.qt_version))
+ ui_kwargs = {'output': 'ui_@BASENAME@.h',
+ 'arguments': ['-o', '@OUTPUT@', '@INPUT@']}
+ ui_gen = build.Generator([self.uic], ui_kwargs)
+ ui_output = ui_gen.process_files('Qt{} ui'.format(self.qt_version), ui_files, state)
+ sources.append(ui_output)
+ inc = get_include_args(include_dirs=include_directories)
+ if len(moc_headers) > 0:
+ arguments = moc_extra_arguments + inc + ['@INPUT@', '-o', '@OUTPUT@']
+ moc_kwargs = {'output': 'moc_@BASENAME@.cpp',
+ 'arguments': arguments}
+ moc_gen = build.Generator([self.moc], moc_kwargs)
+ moc_output = moc_gen.process_files('Qt{} moc header'.format(self.qt_version), moc_headers, state)
+ sources.append(moc_output)
+ if len(moc_sources) > 0:
+ arguments = moc_extra_arguments + inc + ['@INPUT@', '-o', '@OUTPUT@']
+ moc_kwargs = {'output': '@BASENAME@.moc',
+ 'arguments': arguments}
+ moc_gen = build.Generator([self.moc], moc_kwargs)
+ moc_output = moc_gen.process_files('Qt{} moc source'.format(self.qt_version), moc_sources, state)
+ sources.append(moc_output)
+ return ModuleReturnValue(sources, sources)
+
+ @permittedKwargs({'ts_files', 'install', 'install_dir', 'build_by_default', 'method'})
+ def compile_translations(self, state, args, kwargs):
+ ts_files, install_dir = extract_as_list(kwargs, 'ts_files', 'install_dir', pop=True)
+ self._detect_tools(state.environment, kwargs.get('method', 'auto'))
+ translations = []
+ for ts in ts_files:
+ cmd = [self.lrelease, '@INPUT@', '-qm', '@OUTPUT@']
+ lrelease_kwargs = {'output': '@BASENAME@.qm',
+ 'input': ts,
+ 'install': kwargs.get('install', False),
+ 'build_by_default': kwargs.get('build_by_default', False),
+ 'command': cmd}
+ if install_dir is not None:
+ lrelease_kwargs['install_dir'] = install_dir
+ lrelease_target = build.CustomTarget('qt{}-compile-{}'.format(self.qt_version, ts), state.subdir, state.subproject, lrelease_kwargs)
+ translations.append(lrelease_target)
+ return ModuleReturnValue(translations, translations)
--- /dev/null
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mlog
+from .qt import QtBaseModule
+from . import ExtensionModule
+
+
+class Qt4Module(ExtensionModule, QtBaseModule):
+
+ def __init__(self):
+ QtBaseModule.__init__(self, qt_version=4)
+ ExtensionModule.__init__(self)
+
+def initialize():
+ mlog.warning('rcc dependencies will not work properly until this upstream issue is fixed:',
+ mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'))
+ return Qt4Module()
--- /dev/null
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mlog
+from .qt import QtBaseModule
+from . import ExtensionModule
+
+
+class Qt5Module(ExtensionModule, QtBaseModule):
+
+ def __init__(self):
+ QtBaseModule.__init__(self, qt_version=5)
+ ExtensionModule.__init__(self)
+
+def initialize():
+ mlog.warning('rcc dependencies will not work reliably until this upstream issue is fixed:',
+ mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'))
+ return Qt5Module()
--- /dev/null
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''This module provides helper functions for RPM related
+functionality such as generating template RPM spec file.'''
+
+from .. import build
+from .. import compilers
+import datetime
+from .. import mlog
+from . import GirTarget, TypelibTarget
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import noKwargs
+
+import os
+
+class RPMModule(ExtensionModule):
+
+ @noKwargs
+ def generate_spec_template(self, state, args, kwargs):
+ compiler_deps = set()
+ for compiler in state.compilers.values():
+ if isinstance(compiler, compilers.GnuCCompiler):
+ compiler_deps.add('gcc')
+ elif isinstance(compiler, compilers.GnuCPPCompiler):
+ compiler_deps.add('gcc-c++')
+ elif isinstance(compiler, compilers.ValaCompiler):
+ compiler_deps.add('vala')
+ elif isinstance(compiler, compilers.GnuFortranCompiler):
+ compiler_deps.add('gcc-gfortran')
+ elif isinstance(compiler, compilers.GnuObjCCompiler):
+ compiler_deps.add('gcc-objc')
+ elif compiler == compilers.GnuObjCPPCompiler:
+ compiler_deps.add('gcc-objc++')
+ else:
+ mlog.log('RPM spec file will not created, generating not allowed for:',
+ mlog.bold(compiler.get_id()))
+ return
+ proj = state.project_name.replace(' ', '_').replace('\t', '_')
+ so_installed = False
+ devel_subpkg = False
+ files = set()
+ files_devel = set()
+ to_delete = set()
+ for target in state.targets.values():
+ if isinstance(target, build.Executable) and target.need_install:
+ files.add('%%{_bindir}/%s' % target.get_filename())
+ elif isinstance(target, build.SharedLibrary) and target.need_install:
+ files.add('%%{_libdir}/%s' % target.get_filename())
+ for alias in target.get_aliases():
+ if alias.endswith('.so'):
+ files_devel.add('%%{_libdir}/%s' % alias)
+ else:
+ files.add('%%{_libdir}/%s' % alias)
+ so_installed = True
+ elif isinstance(target, build.StaticLibrary) and target.need_install:
+ to_delete.add('%%{buildroot}%%{_libdir}/%s' % target.get_filename())
+ mlog.warning('removing', mlog.bold(target.get_filename()),
+ 'from package because packaging static libs not recommended')
+ elif isinstance(target, GirTarget) and target.should_install():
+ files_devel.add('%%{_datadir}/gir-1.0/%s' % target.get_filename()[0])
+ elif isinstance(target, TypelibTarget) and target.should_install():
+ files.add('%%{_libdir}/girepository-1.0/%s' % target.get_filename()[0])
+ for header in state.headers:
+ if len(header.get_install_subdir()) > 0:
+ files_devel.add('%%{_includedir}/%s/' % header.get_install_subdir())
+ else:
+ for hdr_src in header.get_sources():
+ files_devel.add('%%{_includedir}/%s' % hdr_src)
+ for man in state.man:
+ for man_file in man.get_sources():
+ files.add('%%{_mandir}/man%u/%s.*' % (int(man_file.split('.')[-1]), man_file))
+ if len(files_devel) > 0:
+ devel_subpkg = True
+ filename = os.path.join(state.environment.get_build_dir(),
+ '%s.spec' % proj)
+ with open(filename, 'w+') as fn:
+ fn.write('Name: %s\n' % proj)
+ fn.write('Version: # FIXME\n')
+ fn.write('Release: 1%{?dist}\n')
+ fn.write('Summary: # FIXME\n')
+ fn.write('License: # FIXME\n')
+ fn.write('\n')
+ fn.write('Source0: %{name}-%{version}.tar.xz # FIXME\n')
+ fn.write('\n')
+ for compiler in compiler_deps:
+ fn.write('BuildRequires: %s\n' % compiler)
+ for dep in state.environment.coredata.deps:
+ fn.write('BuildRequires: pkgconfig(%s)\n' % dep[0])
+ for lib in state.environment.coredata.ext_libs.values():
+ name = lib.get_name()
+ fn.write('BuildRequires: {} # FIXME\n'.format(name))
+ mlog.warning('replace', mlog.bold(name), 'with the real package.',
+ 'You can use following command to find package which '
+ 'contains this lib:',
+ mlog.bold("dnf provides '*/lib{}.so'".format(name)))
+ for prog in state.environment.coredata.ext_progs.values():
+ if not prog.found():
+ fn.write('BuildRequires: %%{_bindir}/%s # FIXME\n' %
+ prog.get_name())
+ else:
+ fn.write('BuildRequires: {}\n'.format(prog.get_path()))
+ fn.write('BuildRequires: meson\n')
+ fn.write('\n')
+ fn.write('%description\n')
+ fn.write('\n')
+ if devel_subpkg:
+ fn.write('%package devel\n')
+ fn.write('Summary: Development files for %{name}\n')
+ fn.write('Requires: %{name}%{?_isa} = %{?epoch:%{epoch}:}{version}-%{release}\n')
+ fn.write('\n')
+ fn.write('%description devel\n')
+ fn.write('Development files for %{name}.\n')
+ fn.write('\n')
+ fn.write('%prep\n')
+ fn.write('%autosetup\n')
+ fn.write('\n')
+ fn.write('%build\n')
+ fn.write('%meson\n')
+ fn.write('%meson_build\n')
+ fn.write('\n')
+ fn.write('%install\n')
+ fn.write('%meson_install\n')
+ if len(to_delete) > 0:
+ fn.write('rm -vf %s\n' % ' '.join(to_delete))
+ fn.write('\n')
+ fn.write('%check\n')
+ fn.write('%meson_test\n')
+ fn.write('\n')
+ fn.write('%files\n')
+ for f in files:
+ fn.write('%s\n' % f)
+ fn.write('\n')
+ if devel_subpkg:
+ fn.write('%files devel\n')
+ for f in files_devel:
+ fn.write('%s\n' % f)
+ fn.write('\n')
+ if so_installed:
+ fn.write('%post -p /sbin/ldconfig\n')
+ fn.write('%postun -p /sbin/ldconfig\n')
+ fn.write('\n')
+ fn.write('%changelog\n')
+ fn.write('* %s meson <meson@example.com> - \n' %
+ datetime.date.today().strftime('%a %b %d %Y'))
+ fn.write('- \n')
+ fn.write('\n')
+ mlog.log('RPM spec template written to %s.spec.\n' % proj)
+ return ModuleReturnValue(None, [])
+
+def initialize():
+ return RPMModule()
--- /dev/null
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mesonlib, compilers, mlog
+
+from . import ExtensionModule
+
+class SimdModule(ExtensionModule):
+
+ def __init__(self):
+ super().__init__()
+ self.snippets.add('check')
+ # FIXME add Altivec and AVX512.
+ self.isets = ('mmx',
+ 'sse',
+ 'sse2',
+ 'sse3',
+ 'ssse3',
+ 'sse41',
+ 'sse42',
+ 'avx',
+ 'avx2',
+ 'neon',
+ )
+
+ def check(self, interpreter, state, args, kwargs):
+ result = []
+ if len(args) != 1:
+ raise mesonlib.MesonException('Check requires one argument, a name prefix for checks.')
+ prefix = args[0]
+ if not isinstance(prefix, str):
+ raise mesonlib.MesonException('Argument must be a string.')
+ if 'compiler' not in kwargs:
+ raise mesonlib.MesonException('Must specify compiler keyword')
+ if 'sources' in kwargs:
+ raise mesonlib.MesonException('SIMD module does not support the "sources" keyword')
+ basic_kwargs = {}
+ for key, value in kwargs.items():
+ if key not in self.isets and key != 'compiler':
+ basic_kwargs[key] = value
+ compiler = kwargs['compiler'].compiler
+ if not isinstance(compiler, compilers.compilers.Compiler):
+ raise mesonlib.MesonException('Compiler argument must be a compiler object.')
+ cdata = interpreter.func_configuration_data(None, [], {})
+ conf = cdata.held_object
+ for iset in self.isets:
+ if iset not in kwargs:
+ continue
+ iset_fname = kwargs[iset] # Migth also be an array or Files. static_library will validate.
+ args = compiler.get_instruction_set_args(iset)
+ if args is None:
+ mlog.log('Compiler supports %s:' % iset, mlog.red('NO'))
+ continue
+ if len(args) > 0:
+ if not compiler.has_multi_arguments(args, state.environment):
+ mlog.log('Compiler supports %s:' % iset, mlog.red('NO'))
+ continue
+ mlog.log('Compiler supports %s:' % iset, mlog.green('YES'))
+ conf.values['HAVE_' + iset.upper()] = ('1', 'Compiler supports %s.' % iset)
+ libname = prefix + '_' + iset
+ lib_kwargs = {'sources': iset_fname,
+ }
+ lib_kwargs.update(basic_kwargs)
+ langarg_key = compiler.get_language() + '_args'
+ old_lang_args = mesonlib.extract_as_list(lib_kwargs, langarg_key)
+ all_lang_args = old_lang_args + args
+ lib_kwargs[langarg_key] = all_lang_args
+ result.append(interpreter.func_static_lib(None, [libname], lib_kwargs))
+ return [result, cdata]
+
+def initialize():
+ return SimdModule()
--- /dev/null
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from .. import mlog
+from .. import mesonlib, dependencies, build
+from ..mesonlib import MesonException, extract_as_list
+from . import get_include_args
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import permittedKwargs
+
+class WindowsModule(ExtensionModule):
+
+ def detect_compiler(self, compilers):
+ for l in ('c', 'cpp'):
+ if l in compilers:
+ return compilers[l]
+ raise MesonException('Resource compilation requires a C or C++ compiler.')
+
+ @permittedKwargs({'args', 'include_directories'})
+ def compile_resources(self, state, args, kwargs):
+ comp = self.detect_compiler(state.compilers)
+
+ extra_args = mesonlib.stringlistify(kwargs.get('args', []))
+ inc_dirs = extract_as_list(kwargs, 'include_directories', pop = True)
+ for incd in inc_dirs:
+ if not isinstance(incd.held_object, (str, build.IncludeDirs)):
+ raise MesonException('Resource include dirs should be include_directories().')
+ extra_args += get_include_args(inc_dirs)
+
+ if comp.id == 'msvc':
+ rescomp = dependencies.ExternalProgram('rc', silent=True)
+ res_args = extra_args + ['/nologo', '/fo@OUTPUT@', '@INPUT@']
+ suffix = 'res'
+ else:
+ m = 'Argument {!r} has a space which may not work with windres due to ' \
+ 'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933'
+ for arg in extra_args:
+ if ' ' in arg:
+ mlog.warning(m.format(arg))
+ rescomp_name = None
+ # FIXME: Does not handle `native: true` executables, see
+ # https://github.com/mesonbuild/meson/issues/1531
+ if state.environment.is_cross_build():
+ # If cross compiling see if windres has been specified in the
+ # cross file before trying to find it another way.
+ rescomp_name = state.environment.cross_info.config['binaries'].get('windres')
+ if rescomp_name is None:
+ # Pick-up env var WINDRES if set. This is often used for
+ # specifying an arch-specific windres.
+ rescomp_name = os.environ.get('WINDRES', 'windres')
+ rescomp = dependencies.ExternalProgram(rescomp_name, silent=True)
+ res_args = extra_args + ['@INPUT@', '@OUTPUT@']
+ suffix = 'o'
+ if not rescomp.found():
+ raise MesonException('Could not find Windows resource compiler %s.' % ' '.join(rescomp.get_command()))
+ res_kwargs = {'output': '@BASENAME@.' + suffix,
+ 'arguments': res_args}
+ res_gen = build.Generator([rescomp], res_kwargs)
+ res_output = res_gen.process_files('Windows resource', args, state)
+ return ModuleReturnValue(res_output, [res_output])
+
+def initialize():
+ return WindowsModule()
--- /dev/null
+# Copyright 2014-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+from .mesonlib import MesonException
+from . import mlog
+
+class ParseException(MesonException):
+ def __init__(self, text, line, lineno, colno):
+ # Format as error message, followed by the line with the error, followed by a caret to show the error column.
+ super().__init__("%s\n%s\n%s" % (text, line, '%s^' % (' ' * colno)))
+ self.lineno = lineno
+ self.colno = colno
+
+class BlockParseException(MesonException):
+ def __init__(self, text, line, lineno, colno, start_line, start_lineno, start_colno):
+ # This can be formatted in two ways - one if the block start and end are on the same line, and a different way if they are on different lines.
+
+ if lineno == start_lineno:
+ # If block start and end are on the same line, it is formatted as:
+ # Error message
+ # Followed by the line with the error
+ # Followed by a caret to show the block start
+ # Followed by underscores
+ # Followed by a caret to show the block end.
+ super().__init__("%s\n%s\n%s" % (text, line, '%s^%s^' % (' ' * start_colno, '_' * (colno - start_colno - 1))))
+ else:
+ # If block start and end are on different lines, it is formatted as:
+ # Error message
+ # Followed by the line with the error
+ # Followed by a caret to show the error column.
+ # Followed by a message saying where the block started.
+ # Followed by the line of the block start.
+ # Followed by a caret for the block start.
+ super().__init__("%s\n%s\n%s\nFor a block that started at %d,%d\n%s\n%s" % (text, line, '%s^' % (' ' * colno), start_lineno, start_colno, start_line, "%s^" % (' ' * start_colno)))
+ self.lineno = lineno
+ self.colno = colno
+
+class Token:
+ def __init__(self, tid, subdir, line_start, lineno, colno, bytespan, value):
+ self.tid = tid
+ self.subdir = subdir
+ self.line_start = line_start
+ self.lineno = lineno
+ self.colno = colno
+ self.bytespan = bytespan
+ self.value = value
+
+ def __eq__(self, other):
+ if isinstance(other, str):
+ return self.tid == other
+ return self.tid == other.tid
+
+class Lexer:
+ def __init__(self, code):
+ self.code = code
+ self.keywords = {'true', 'false', 'if', 'else', 'elif',
+ 'endif', 'and', 'or', 'not', 'foreach', 'endforeach'}
+ self.token_specification = [
+ # Need to be sorted longest to shortest.
+ ('ignore', re.compile(r'[ \t]')),
+ ('id', re.compile('[_a-zA-Z][_0-9a-zA-Z]*')),
+ ('number', re.compile(r'\d+')),
+ ('eol_cont', re.compile(r'\\\n')),
+ ('eol', re.compile(r'\n')),
+ ('multiline_string', re.compile(r"'''(.|\n)*?'''", re.M)),
+ ('comment', re.compile(r'#.*')),
+ ('lparen', re.compile(r'\(')),
+ ('rparen', re.compile(r'\)')),
+ ('lbracket', re.compile(r'\[')),
+ ('rbracket', re.compile(r'\]')),
+ ('dblquote', re.compile(r'"')),
+ ('string', re.compile(r"'([^'\\]|(\\.))*'")),
+ ('comma', re.compile(r',')),
+ ('plusassign', re.compile(r'\+=')),
+ ('dot', re.compile(r'\.')),
+ ('plus', re.compile(r'\+')),
+ ('dash', re.compile(r'-')),
+ ('star', re.compile(r'\*')),
+ ('percent', re.compile(r'%')),
+ ('fslash', re.compile(r'/')),
+ ('colon', re.compile(r':')),
+ ('equal', re.compile(r'==')),
+ ('nequal', re.compile(r'!=')),
+ ('assign', re.compile(r'=')),
+ ('le', re.compile(r'<=')),
+ ('lt', re.compile(r'<')),
+ ('ge', re.compile(r'>=')),
+ ('gt', re.compile(r'>')),
+ ('questionmark', re.compile(r'\?')),
+ ]
+
+ def getline(self, line_start):
+ return self.code[line_start:self.code.find('\n', line_start)]
+
+ def lex(self, subdir):
+ line_start = 0
+ lineno = 1
+ loc = 0
+ par_count = 0
+ bracket_count = 0
+ col = 0
+ newline_rx = re.compile(r'(?<!\\)((?:\\\\)*)\\n')
+ while loc < len(self.code):
+ matched = False
+ value = None
+ for (tid, reg) in self.token_specification:
+ mo = reg.match(self.code, loc)
+ if mo:
+ curline = lineno
+ curline_start = line_start
+ col = mo.start() - line_start
+ matched = True
+ span_start = loc
+ loc = mo.end()
+ span_end = loc
+ bytespan = (span_start, span_end)
+ match_text = mo.group()
+ if tid == 'ignore' or tid == 'comment':
+ break
+ elif tid == 'lparen':
+ par_count += 1
+ elif tid == 'rparen':
+ par_count -= 1
+ elif tid == 'lbracket':
+ bracket_count += 1
+ elif tid == 'rbracket':
+ bracket_count -= 1
+ elif tid == 'dblquote':
+ raise ParseException('Double quotes are not supported. Use single quotes.', self.getline(line_start), lineno, col)
+ elif tid == 'string':
+ value = match_text[1:-1].replace(r"\'", "'")
+ value = newline_rx.sub(r'\1\n', value)
+ value = value.replace(r" \\ ".strip(), r" \ ".strip())
+ elif tid == 'multiline_string':
+ tid = 'string'
+ value = match_text[3:-3]
+ lines = match_text.split('\n')
+ if len(lines) > 1:
+ lineno += len(lines) - 1
+ line_start = mo.end() - len(lines[-1])
+ elif tid == 'number':
+ value = int(match_text)
+ elif tid == 'eol' or tid == 'eol_cont':
+ lineno += 1
+ line_start = loc
+ if par_count > 0 or bracket_count > 0:
+ break
+ elif tid == 'id':
+ if match_text in self.keywords:
+ tid = match_text
+ else:
+ value = match_text
+ yield Token(tid, subdir, curline_start, curline, col, bytespan, value)
+ break
+ if not matched:
+ raise ParseException('lexer', self.getline(line_start), lineno, col)
+
+class ElementaryNode:
+ def __init__(self, token):
+ self.lineno = token.lineno
+ self.subdir = token.subdir
+ self.colno = token.colno
+ self.value = token.value
+ self.bytespan = token.bytespan
+
+class BooleanNode(ElementaryNode):
+ def __init__(self, token, value):
+ super().__init__(token)
+ self.value = value
+ assert(isinstance(self.value, bool))
+
+class IdNode(ElementaryNode):
+ def __init__(self, token):
+ super().__init__(token)
+ assert(isinstance(self.value, str))
+
+ def __str__(self):
+ return "Id node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
+
+class NumberNode(ElementaryNode):
+ def __init__(self, token):
+ super().__init__(token)
+ assert(isinstance(self.value, int))
+
+class StringNode(ElementaryNode):
+ def __init__(self, token):
+ super().__init__(token)
+ assert(isinstance(self.value, str))
+
+ def __str__(self):
+ return "String node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
+
+class ArrayNode:
+ def __init__(self, args):
+ self.subdir = args.subdir
+ self.lineno = args.lineno
+ self.colno = args.colno
+ self.args = args
+
+class EmptyNode:
+ def __init__(self, lineno, colno):
+ self.subdir = ''
+ self.lineno = lineno
+ self.colno = colno
+ self.value = None
+
+class OrNode:
+ def __init__(self, left, right):
+ self.subdir = left.subdir
+ self.lineno = left.lineno
+ self.colno = left.colno
+ self.left = left
+ self.right = right
+
+class AndNode:
+ def __init__(self, left, right):
+ self.subdir = left.subdir
+ self.lineno = left.lineno
+ self.colno = left.colno
+ self.left = left
+ self.right = right
+
+class ComparisonNode:
+ def __init__(self, ctype, left, right):
+ self.lineno = left.lineno
+ self.colno = left.colno
+ self.subdir = left.subdir
+ self.left = left
+ self.right = right
+ self.ctype = ctype
+
+class ArithmeticNode:
+ def __init__(self, operation, left, right):
+ self.subdir = left.subdir
+ self.lineno = left.lineno
+ self.colno = left.colno
+ self.left = left
+ self.right = right
+ self.operation = operation
+
+class NotNode:
+ def __init__(self, location_node, value):
+ self.subdir = location_node.subdir
+ self.lineno = location_node.lineno
+ self.colno = location_node.colno
+ self.value = value
+
+class CodeBlockNode:
+ def __init__(self, location_node):
+ self.subdir = location_node.subdir
+ self.lineno = location_node.lineno
+ self.colno = location_node.colno
+ self.lines = []
+
+class IndexNode:
+ def __init__(self, iobject, index):
+ self.iobject = iobject
+ self.index = index
+ self.subdir = iobject.subdir
+ self.lineno = iobject.lineno
+ self.colno = iobject.colno
+
+class MethodNode:
+ def __init__(self, subdir, lineno, colno, source_object, name, args):
+ self.subdir = subdir
+ self.lineno = lineno
+ self.colno = colno
+ self.source_object = source_object
+ self.name = name
+ assert(isinstance(self.name, str))
+ self.args = args
+
+class FunctionNode:
+ def __init__(self, subdir, lineno, colno, func_name, args):
+ self.subdir = subdir
+ self.lineno = lineno
+ self.colno = colno
+ self.func_name = func_name
+ assert(isinstance(func_name, str))
+ self.args = args
+
+class AssignmentNode:
+ def __init__(self, lineno, colno, var_name, value):
+ self.lineno = lineno
+ self.colno = colno
+ self.var_name = var_name
+ assert(isinstance(var_name, str))
+ self.value = value
+
+class PlusAssignmentNode:
+ def __init__(self, lineno, colno, var_name, value):
+ self.lineno = lineno
+ self.colno = colno
+ self.var_name = var_name
+ assert(isinstance(var_name, str))
+ self.value = value
+
+class ForeachClauseNode:
+ def __init__(self, lineno, colno, varname, items, block):
+ self.lineno = lineno
+ self.colno = colno
+ self.varname = varname
+ self.items = items
+ self.block = block
+
+class IfClauseNode:
+ def __init__(self, lineno, colno):
+ self.lineno = lineno
+ self.colno = colno
+ self.ifs = []
+ self.elseblock = EmptyNode(lineno, colno)
+
+class UMinusNode:
+ def __init__(self, current_location, value):
+ self.subdir = current_location.subdir
+ self.lineno = current_location.lineno
+ self.colno = current_location.colno
+ self.value = value
+
+class IfNode:
+ def __init__(self, lineno, colno, condition, block):
+ self.lineno = lineno
+ self.colno = colno
+ self.condition = condition
+ self.block = block
+
+class TernaryNode:
+ def __init__(self, lineno, colno, condition, trueblock, falseblock):
+ self.lineno = lineno
+ self.colno = colno
+ self.condition = condition
+ self.trueblock = trueblock
+ self.falseblock = falseblock
+
+class ArgumentNode:
+ def __init__(self, token):
+ self.lineno = token.lineno
+ self.colno = token.colno
+ self.subdir = token.subdir
+ self.arguments = []
+ self.commas = []
+ self.kwargs = {}
+ self.order_error = False
+
+ def prepend(self, statement):
+ if self.num_kwargs() > 0:
+ self.order_error = True
+ if not isinstance(statement, EmptyNode):
+ self.arguments = [statement] + self.arguments
+
+ def append(self, statement):
+ if self.num_kwargs() > 0:
+ self.order_error = True
+ if not isinstance(statement, EmptyNode):
+ self.arguments += [statement]
+
+ def set_kwarg(self, name, value):
+ if name in self.kwargs:
+ mlog.warning('Keyword argument "%s" defined multiple times. This will be a an error in future Meson releases.' % name)
+ self.kwargs[name] = value
+
+ def num_args(self):
+ return len(self.arguments)
+
+ def num_kwargs(self):
+ return len(self.kwargs)
+
+ def incorrect_order(self):
+ return self.order_error
+
+ def __len__(self):
+ return self.num_args() # Fixme
+
+comparison_map = {'equal': '==',
+ 'nequal': '!=',
+ 'lt': '<',
+ 'le': '<=',
+ 'gt': '>',
+ 'ge': '>='
+ }
+
+# Recursive descent parser for Meson's definition language.
+# Very basic apart from the fact that we have many precedence
+# levels so there are not enough words to describe them all.
+# Enter numbering:
+#
+# 1 assignment
+# 2 or
+# 3 and
+# 4 comparison
+# 5 arithmetic
+# 6 negation
+# 7 funcall, method call
+# 8 parentheses
+# 9 plain token
+
+class Parser:
+ def __init__(self, code, subdir):
+ self.lexer = Lexer(code)
+ self.stream = self.lexer.lex(subdir)
+ self.current = Token('eof', '', 0, 0, 0, (0, 0), None)
+ self.getsym()
+ self.in_ternary = False
+
+ def getsym(self):
+ try:
+ self.current = next(self.stream)
+ except StopIteration:
+ self.current = Token('eof', '', self.current.line_start, self.current.lineno, self.current.colno + self.current.bytespan[1] - self.current.bytespan[0], (0, 0), None)
+
+ def getline(self):
+ return self.lexer.getline(self.current.line_start)
+
+ def accept(self, s):
+ if self.current.tid == s:
+ self.getsym()
+ return True
+ return False
+
+ def expect(self, s):
+ if self.accept(s):
+ return True
+ raise ParseException('Expecting %s got %s.' % (s, self.current.tid), self.getline(), self.current.lineno, self.current.colno)
+
+ def block_expect(self, s, block_start):
+ if self.accept(s):
+ return True
+ raise BlockParseException('Expecting %s got %s.' % (s, self.current.tid), self.getline(), self.current.lineno, self.current.colno, self.lexer.getline(block_start.line_start), block_start.lineno, block_start.colno)
+
+ def parse(self):
+ block = self.codeblock()
+ self.expect('eof')
+ return block
+
+ def statement(self):
+ return self.e1()
+
+ def e1(self):
+ left = self.e2()
+ if self.accept('plusassign'):
+ value = self.e1()
+ if not isinstance(left, IdNode):
+ raise ParseException('Plusassignment target must be an id.', self.getline(), left.lineno, left.colno)
+ return PlusAssignmentNode(left.lineno, left.colno, left.value, value)
+ elif self.accept('assign'):
+ value = self.e1()
+ if not isinstance(left, IdNode):
+ raise ParseException('Assignment target must be an id.',
+ self.getline(), left.lineno, left.colno)
+ return AssignmentNode(left.lineno, left.colno, left.value, value)
+ elif self.accept('questionmark'):
+ if self.in_ternary:
+ raise ParseException('Nested ternary operators are not allowed.',
+ self.getline(), left.lineno, left.colno)
+ self.in_ternary = True
+ trueblock = self.e1()
+ self.expect('colon')
+ falseblock = self.e1()
+ self.in_ternary = False
+ return TernaryNode(left.lineno, left.colno, left, trueblock, falseblock)
+ return left
+
+ def e2(self):
+ left = self.e3()
+ while self.accept('or'):
+ if isinstance(left, EmptyNode):
+ raise ParseException('Invalid or clause.',
+ self.getline(), left.lineno, left.colno)
+ left = OrNode(left, self.e3())
+ return left
+
+ def e3(self):
+ left = self.e4()
+ while self.accept('and'):
+ if isinstance(left, EmptyNode):
+ raise ParseException('Invalid and clause.',
+ self.getline(), left.lineno, left.colno)
+ left = AndNode(left, self.e4())
+ return left
+
+ def e4(self):
+ left = self.e5()
+ for nodename, operator_type in comparison_map.items():
+ if self.accept(nodename):
+ return ComparisonNode(operator_type, left, self.e5())
+ return left
+
+ def e5(self):
+ return self.e5add()
+
+ def e5add(self):
+ left = self.e5sub()
+ if self.accept('plus'):
+ return ArithmeticNode('add', left, self.e5add())
+ return left
+
+ def e5sub(self):
+ left = self.e5mod()
+ if self.accept('dash'):
+ return ArithmeticNode('sub', left, self.e5sub())
+ return left
+
+ def e5mod(self):
+ left = self.e5mul()
+ if self.accept('percent'):
+ return ArithmeticNode('mod', left, self.e5mod())
+ return left
+
+ def e5mul(self):
+ left = self.e5div()
+ if self.accept('star'):
+ return ArithmeticNode('mul', left, self.e5mul())
+ return left
+
+ def e5div(self):
+ left = self.e6()
+ if self.accept('fslash'):
+ return ArithmeticNode('div', left, self.e5div())
+ return left
+
+ def e6(self):
+ if self.accept('not'):
+ return NotNode(self.current, self.e7())
+ if self.accept('dash'):
+ return UMinusNode(self.current, self.e7())
+ return self.e7()
+
+ def e7(self):
+ left = self.e8()
+ block_start = self.current
+ if self.accept('lparen'):
+ args = self.args()
+ self.block_expect('rparen', block_start)
+ if not isinstance(left, IdNode):
+ raise ParseException('Function call must be applied to plain id',
+ self.getline(), left.lineno, left.colno)
+ left = FunctionNode(left.subdir, left.lineno, left.colno, left.value, args)
+ go_again = True
+ while go_again:
+ go_again = False
+ if self.accept('dot'):
+ go_again = True
+ left = self.method_call(left)
+ if self.accept('lbracket'):
+ go_again = True
+ left = self.index_call(left)
+ return left
+
+ def e8(self):
+ block_start = self.current
+ if self.accept('lparen'):
+ e = self.statement()
+ self.block_expect('rparen', block_start)
+ return e
+ elif self.accept('lbracket'):
+ args = self.args()
+ self.block_expect('rbracket', block_start)
+ return ArrayNode(args)
+ else:
+ return self.e9()
+
+ def e9(self):
+ t = self.current
+ if self.accept('true'):
+ return BooleanNode(t, True)
+ if self.accept('false'):
+ return BooleanNode(t, False)
+ if self.accept('id'):
+ return IdNode(t)
+ if self.accept('number'):
+ return NumberNode(t)
+ if self.accept('string'):
+ return StringNode(t)
+ return EmptyNode(self.current.lineno, self.current.colno)
+
+ def args(self):
+ s = self.statement()
+ a = ArgumentNode(s)
+
+ while not isinstance(s, EmptyNode):
+ potential = self.current
+ if self.accept('comma'):
+ a.commas.append(potential)
+ a.append(s)
+ elif self.accept('colon'):
+ if not isinstance(s, IdNode):
+ raise ParseException('Keyword argument must be a plain identifier.',
+ self.getline(), s.lineno, s.colno)
+ a.set_kwarg(s.value, self.statement())
+ potential = self.current
+ if not self.accept('comma'):
+ return a
+ a.commas.append(potential)
+ else:
+ a.append(s)
+ return a
+ s = self.statement()
+ return a
+
+ def method_call(self, source_object):
+ methodname = self.e9()
+ if not(isinstance(methodname, IdNode)):
+ raise ParseException('Method name must be plain id',
+ self.getline(), self.current.lineno, self.current.colno)
+ self.expect('lparen')
+ args = self.args()
+ self.expect('rparen')
+ method = MethodNode(methodname.subdir, methodname.lineno, methodname.colno, source_object, methodname.value, args)
+ if self.accept('dot'):
+ return self.method_call(method)
+ return method
+
+ def index_call(self, source_object):
+ index_statement = self.statement()
+ self.expect('rbracket')
+ return IndexNode(source_object, index_statement)
+
+ def foreachblock(self):
+ t = self.current
+ self.expect('id')
+ varname = t
+ self.expect('colon')
+ items = self.statement()
+ block = self.codeblock()
+ return ForeachClauseNode(varname.lineno, varname.colno, varname, items, block)
+
+ def ifblock(self):
+ condition = self.statement()
+ clause = IfClauseNode(condition.lineno, condition.colno)
+ self.expect('eol')
+ block = self.codeblock()
+ clause.ifs.append(IfNode(clause.lineno, clause.colno, condition, block))
+ self.elseifblock(clause)
+ clause.elseblock = self.elseblock()
+ return clause
+
+ def elseifblock(self, clause):
+ while self.accept('elif'):
+ s = self.statement()
+ self.expect('eol')
+ b = self.codeblock()
+ clause.ifs.append(IfNode(s.lineno, s.colno, s, b))
+
+ def elseblock(self):
+ if self.accept('else'):
+ self.expect('eol')
+ return self.codeblock()
+
+ def line(self):
+ block_start = self.current
+ if self.current == 'eol':
+ return EmptyNode(self.current.lineno, self.current.colno)
+ if self.accept('if'):
+ block = self.ifblock()
+ self.block_expect('endif', block_start)
+ return block
+ if self.accept('foreach'):
+ block = self.foreachblock()
+ self.block_expect('endforeach', block_start)
+ return block
+ return self.statement()
+
+ def codeblock(self):
+ block = CodeBlockNode(self.current)
+ cond = True
+ while cond:
+ curline = self.line()
+ if not isinstance(curline, EmptyNode):
+ block.lines.append(curline)
+ cond = self.accept('eol')
+ return block
--- /dev/null
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A tool to run tests in many different ways.
+
+import shlex
+import subprocess, sys, os, argparse
+import pickle
+from mesonbuild import build
+from mesonbuild import environment
+from mesonbuild.dependencies import ExternalProgram
+from mesonbuild import mesonlib
+from mesonbuild import mlog
+
+import time, datetime, multiprocessing, json
+import concurrent.futures as conc
+import platform
+import signal
+import random
+
+# GNU autotools interprets a return code of 77 from tests it executes to
+# mean that the test should be skipped.
+GNU_SKIP_RETURNCODE = 77
+
+def is_windows():
+ platname = platform.system().lower()
+ return platname == 'windows' or 'mingw' in platname
+
+def is_cygwin():
+ platname = platform.system().lower()
+ return 'cygwin' in platname
+
+def determine_worker_count():
+ varname = 'MESON_TESTTHREADS'
+ if varname in os.environ:
+ try:
+ num_workers = int(os.environ[varname])
+ except ValueError:
+ print('Invalid value in %s, using 1 thread.' % varname)
+ num_workers = 1
+ else:
+ try:
+ # Fails in some weird environments such as Debian
+ # reproducible build.
+ num_workers = multiprocessing.cpu_count()
+ except Exception:
+ num_workers = 1
+ return num_workers
+
+parser = argparse.ArgumentParser(prog='meson test')
+parser.add_argument('--repeat', default=1, dest='repeat', type=int,
+ help='Number of times to run the tests.')
+parser.add_argument('--no-rebuild', default=False, action='store_true',
+ help='Do not rebuild before running tests.')
+parser.add_argument('--gdb', default=False, dest='gdb', action='store_true',
+ help='Run test under gdb.')
+parser.add_argument('--list', default=False, dest='list', action='store_true',
+ help='List available tests.')
+parser.add_argument('--wrapper', default=None, dest='wrapper', type=shlex.split,
+ help='wrapper to run tests with (e.g. Valgrind)')
+parser.add_argument('-C', default='.', dest='wd',
+ help='directory to cd into before running')
+parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
+ help='Only run tests belonging to the given suite.')
+parser.add_argument('--no-suite', default=[], dest='exclude_suites', action='append', metavar='SUITE',
+ help='Do not run tests belonging to the given suite.')
+parser.add_argument('--no-stdsplit', default=True, dest='split', action='store_false',
+ help='Do not split stderr and stdout in test logs.')
+parser.add_argument('--print-errorlogs', default=False, action='store_true',
+ help="Whether to print failing tests' logs.")
+parser.add_argument('--benchmark', default=False, action='store_true',
+ help="Run benchmarks instead of tests.")
+parser.add_argument('--logbase', default='testlog',
+ help="Base name for log file.")
+parser.add_argument('--num-processes', default=determine_worker_count(), type=int,
+ help='How many parallel processes to use.')
+parser.add_argument('-v', '--verbose', default=False, action='store_true',
+ help='Do not redirect stdout and stderr')
+parser.add_argument('-q', '--quiet', default=False, action='store_true',
+ help='Produce less output to the terminal.')
+parser.add_argument('-t', '--timeout-multiplier', type=float, default=None,
+ help='Define a multiplier for test timeout, for example '
+ ' when running tests in particular conditions they might take'
+ ' more time to execute.')
+parser.add_argument('--setup', default=None, dest='setup',
+ help='Which test setup to use.')
+parser.add_argument('--test-args', default=[], type=shlex.split,
+ help='Arguments to pass to the specified test(s) or all tests')
+parser.add_argument('args', nargs='*',
+ help='Optional list of tests to run')
+
+
+class TestException(mesonlib.MesonException):
+ pass
+
+
+class TestRun:
+ def __init__(self, res, returncode, should_fail, duration, stdo, stde, cmd,
+ env):
+ self.res = res
+ self.returncode = returncode
+ self.duration = duration
+ self.stdo = stdo
+ self.stde = stde
+ self.cmd = cmd
+ self.env = env
+ self.should_fail = should_fail
+
+ def get_log(self):
+ res = '--- command ---\n'
+ if self.cmd is None:
+ res += 'NONE\n'
+ else:
+ res += "%s%s\n" % (''.join(["%s='%s' " % (k, v) for k, v in self.env.items()]), ' ' .join(self.cmd))
+ if self.stdo:
+ res += '--- stdout ---\n'
+ res += self.stdo
+ if self.stde:
+ if res[-1:] != '\n':
+ res += '\n'
+ res += '--- stderr ---\n'
+ res += self.stde
+ if res[-1:] != '\n':
+ res += '\n'
+ res += '-------\n\n'
+ return res
+
+def decode(stream):
+ if stream is None:
+ return ''
+ try:
+ return stream.decode('utf-8')
+ except UnicodeDecodeError:
+ return stream.decode('iso-8859-1', errors='ignore')
+
+def write_json_log(jsonlogfile, test_name, result):
+ jresult = {'name': test_name,
+ 'stdout': result.stdo,
+ 'result': result.res,
+ 'duration': result.duration,
+ 'returncode': result.returncode,
+ 'command': result.cmd}
+ if isinstance(result.env, dict):
+ jresult['env'] = result.env
+ else:
+ jresult['env'] = result.env.get_env(os.environ)
+ if result.stde:
+ jresult['stderr'] = result.stde
+ jsonlogfile.write(json.dumps(jresult) + '\n')
+
+def run_with_mono(fname):
+ if fname.endswith('.exe') and not (is_windows() or is_cygwin()):
+ return True
+ return False
+
+class TestHarness:
+ def __init__(self, options):
+ self.options = options
+ self.collected_logs = []
+ self.fail_count = 0
+ self.success_count = 0
+ self.skip_count = 0
+ self.timeout_count = 0
+ self.is_run = False
+ self.tests = None
+ self.suites = None
+ self.logfilename = None
+ self.logfile = None
+ self.jsonlogfile = None
+ if self.options.benchmark:
+ datafile = os.path.join(options.wd, 'meson-private', 'meson_benchmark_setup.dat')
+ else:
+ datafile = os.path.join(options.wd, 'meson-private', 'meson_test_setup.dat')
+ if not os.path.isfile(datafile):
+ raise TestException('Directory %s does not seem to be a Meson build directory.' % options.wd)
+ self.load_datafile(datafile)
+
+ def __del__(self):
+ if self.logfile:
+ self.logfile.close()
+ if self.jsonlogfile:
+ self.jsonlogfile.close()
+
+ def run_single_test(self, wrap, test):
+ if test.fname[0].endswith('.jar'):
+ cmd = ['java', '-jar'] + test.fname
+ elif not test.is_cross_built and run_with_mono(test.fname[0]):
+ cmd = ['mono'] + test.fname
+ else:
+ if test.is_cross_built:
+ if test.exe_runner is None:
+ # Can not run test on cross compiled executable
+ # because there is no execute wrapper.
+ cmd = None
+ else:
+ cmd = [test.exe_runner] + test.fname
+ else:
+ cmd = test.fname
+
+ if cmd is None:
+ res = 'SKIP'
+ duration = 0.0
+ stdo = 'Not run because can not execute cross compiled binaries.'
+ stde = None
+ returncode = GNU_SKIP_RETURNCODE
+ else:
+ cmd = wrap + cmd + test.cmd_args + self.options.test_args
+ starttime = time.time()
+ child_env = os.environ.copy()
+ child_env.update(self.options.global_env.get_env(child_env))
+ if isinstance(test.env, build.EnvironmentVariables):
+ test.env = test.env.get_env(child_env)
+
+ child_env.update(test.env)
+ if len(test.extra_paths) > 0:
+ child_env['PATH'] = os.pathsep.join(test.extra_paths + ['']) + child_env['PATH']
+
+ # If MALLOC_PERTURB_ is not set, or if it is set to an empty value,
+ # (i.e., the test or the environment don't explicitly set it), set
+ # it ourselves. We do this unconditionally because it is extremely
+ # useful to have in tests.
+ # Setting MALLOC_PERTURB_="0" will completely disable this feature.
+ if 'MALLOC_PERTURB_' not in child_env or not child_env['MALLOC_PERTURB_']:
+ child_env['MALLOC_PERTURB_'] = str(random.randint(1, 255))
+
+ setsid = None
+ stdout = None
+ stderr = None
+ if not self.options.verbose:
+ stdout = subprocess.PIPE
+ stderr = subprocess.PIPE if self.options and self.options.split else subprocess.STDOUT
+
+ if not is_windows():
+ setsid = os.setsid
+
+ p = subprocess.Popen(cmd,
+ stdout=stdout,
+ stderr=stderr,
+ env=child_env,
+ cwd=test.workdir,
+ preexec_fn=setsid)
+ timed_out = False
+ if test.timeout is None:
+ timeout = None
+ else:
+ timeout = test.timeout * self.options.timeout_multiplier
+ try:
+ (stdo, stde) = p.communicate(timeout=timeout)
+ except subprocess.TimeoutExpired:
+ if self.options.verbose:
+ print("%s time out (After %d seconds)" % (test.name, timeout))
+ timed_out = True
+ # Python does not provide multiplatform support for
+ # killing a process and all its children so we need
+ # to roll our own.
+ if is_windows():
+ subprocess.call(['taskkill', '/F', '/T', '/PID', str(p.pid)])
+ else:
+ try:
+ os.killpg(os.getpgid(p.pid), signal.SIGKILL)
+ except ProcessLookupError:
+ # Sometimes (e.g. with Wine) this happens.
+ # There's nothing we can do (maybe the process
+ # already died) so carry on.
+ pass
+ (stdo, stde) = p.communicate()
+ endtime = time.time()
+ duration = endtime - starttime
+ stdo = decode(stdo)
+ if stde:
+ stde = decode(stde)
+ if timed_out:
+ res = 'TIMEOUT'
+ self.timeout_count += 1
+ self.fail_count += 1
+ elif p.returncode == GNU_SKIP_RETURNCODE:
+ res = 'SKIP'
+ self.skip_count += 1
+ elif test.should_fail == bool(p.returncode):
+ res = 'OK'
+ self.success_count += 1
+ else:
+ res = 'FAIL'
+ self.fail_count += 1
+ returncode = p.returncode
+ result = TestRun(res, returncode, test.should_fail, duration, stdo, stde, cmd, test.env)
+
+ return result
+
+ def print_stats(self, numlen, tests, name, result, i):
+ startpad = ' ' * (numlen - len('%d' % (i + 1)))
+ num = '%s%d/%d' % (startpad, i + 1, len(tests))
+ padding1 = ' ' * (38 - len(name))
+ padding2 = ' ' * (8 - len(result.res))
+ result_str = '%s %s %s%s%s%5.2f s' % \
+ (num, name, padding1, result.res, padding2, result.duration)
+ if not self.options.quiet or result.res != 'OK':
+ if result.res != 'OK' and mlog.colorize_console:
+ if result.res == 'FAIL' or result.res == 'TIMEOUT':
+ decorator = mlog.red
+ elif result.res == 'SKIP':
+ decorator = mlog.yellow
+ else:
+ sys.exit('Unreachable code was ... well ... reached.')
+ print(decorator(result_str).get_text(True))
+ else:
+ print(result_str)
+ result_str += "\n\n" + result.get_log()
+ if (result.returncode != GNU_SKIP_RETURNCODE) \
+ and (result.returncode != 0) != result.should_fail:
+ if self.options.print_errorlogs:
+ self.collected_logs.append(result_str)
+ if self.logfile:
+ self.logfile.write(result_str)
+ if self.jsonlogfile:
+ write_json_log(self.jsonlogfile, name, result)
+
+ def print_summary(self):
+ msg = '''
+OK: %4d
+FAIL: %4d
+SKIP: %4d
+TIMEOUT: %4d
+''' % (self.success_count, self.fail_count, self.skip_count, self.timeout_count)
+ print(msg)
+ if self.logfile:
+ self.logfile.write(msg)
+
+ def print_collected_logs(self):
+ if len(self.collected_logs) > 0:
+ if len(self.collected_logs) > 10:
+ print('\nThe output from 10 first failed tests:\n')
+ else:
+ print('\nThe output from the failed tests:\n')
+ for log in self.collected_logs[:10]:
+ lines = log.splitlines()
+ if len(lines) > 104:
+ print('\n'.join(lines[0:4]))
+ print('--- Listing only the last 100 lines from a long log. ---')
+ lines = lines[-100:]
+ for line in lines:
+ print(line)
+
+ def doit(self):
+ if self.is_run:
+ raise RuntimeError('Test harness object can only be used once.')
+ if not os.path.isfile(self.datafile):
+ print('Test data file. Probably this means that you did not run this in the build directory.')
+ return 1
+ self.is_run = True
+ tests = self.get_tests()
+ if not tests:
+ return 0
+ self.run_tests(tests)
+ return self.fail_count
+
+ @staticmethod
+ def split_suite_string(suite):
+ if ':' in suite:
+ return suite.split(':', 1)
+ else:
+ return suite, ""
+
+ @staticmethod
+ def test_in_suites(test, suites):
+ for suite in suites:
+ (prj_match, st_match) = TestHarness.split_suite_string(suite)
+ for prjst in test.suite:
+ (prj, st) = TestHarness.split_suite_string(prjst)
+ if prj_match and prj != prj_match:
+ continue
+ if st_match and st != st_match:
+ continue
+ return True
+ return False
+
+ def test_suitable(self, test):
+ return (not self.options.include_suites or TestHarness.test_in_suites(test, self.options.include_suites)) \
+ and not TestHarness.test_in_suites(test, self.options.exclude_suites)
+
+ def load_suites(self):
+ ss = set()
+ for t in self.tests:
+ for s in t.suite:
+ ss.add(s)
+ self.suites = list(ss)
+
+ def load_tests(self):
+ with open(self.datafile, 'rb') as f:
+ self.tests = pickle.load(f)
+
+ def load_datafile(self, datafile):
+ self.datafile = datafile
+ self.load_tests()
+ self.load_suites()
+
+ def get_tests(self):
+ if not self.tests:
+ print('No tests defined.')
+ return []
+
+ if len(self.options.include_suites) or len(self.options.exclude_suites):
+ tests = []
+ for tst in self.tests:
+ if self.test_suitable(tst):
+ tests.append(tst)
+ else:
+ tests = self.tests
+
+ if self.options.args:
+ tests = [t for t in tests if t.name in self.options.args]
+
+ if not tests:
+ print('No suitable tests defined.')
+ return []
+
+ for test in tests:
+ test.rebuilt = False
+
+ return tests
+
+ def open_log_files(self):
+ if not self.options.logbase or self.options.verbose:
+ return None, None, None, None
+
+ namebase = None
+ logfile_base = os.path.join(self.options.wd, 'meson-logs', self.options.logbase)
+
+ if self.options.wrapper:
+ namebase = os.path.split(self.get_wrapper()[0])[1]
+ elif self.options.setup:
+ namebase = self.options.setup
+
+ if namebase:
+ logfile_base += '-' + namebase.replace(' ', '_')
+ self.logfilename = logfile_base + '.txt'
+ self.jsonlogfilename = logfile_base + '.json'
+
+ self.jsonlogfile = open(self.jsonlogfilename, 'w')
+ self.logfile = open(self.logfilename, 'w')
+
+ self.logfile.write('Log of Meson test suite run on %s\n\n'
+ % datetime.datetime.now().isoformat())
+
+ def get_wrapper(self):
+ wrap = []
+ if self.options.gdb:
+ wrap = ['gdb', '--quiet', '--nh']
+ if self.options.repeat > 1:
+ wrap += ['-ex', 'run', '-ex', 'quit']
+ # Signal the end of arguments to gdb
+ wrap += ['--args']
+ if self.options.wrapper:
+ wrap += self.options.wrapper
+ assert(isinstance(wrap, list))
+ return wrap
+
+ def get_pretty_suite(self, test):
+ if len(self.suites) > 1:
+ rv = TestHarness.split_suite_string(test.suite[0])[0]
+ s = "+".join(TestHarness.split_suite_string(s)[1] for s in test.suite)
+ if len(s):
+ rv += ":"
+ return rv + s + " / " + test.name
+ else:
+ return test.name
+
+ def run_tests(self, tests):
+ executor = None
+ futures = []
+ numlen = len('%d' % len(tests))
+ self.open_log_files()
+ wrap = self.get_wrapper()
+ startdir = os.getcwd()
+ if self.options.wd:
+ os.chdir(self.options.wd)
+
+ try:
+ for _ in range(self.options.repeat):
+ for i, test in enumerate(tests):
+ visible_name = self.get_pretty_suite(test)
+
+ if self.options.gdb:
+ test.timeout = None
+
+ if not test.is_parallel or self.options.gdb:
+ self.drain_futures(futures)
+ futures = []
+ res = self.run_single_test(wrap, test)
+ self.print_stats(numlen, tests, visible_name, res, i)
+ else:
+ if not executor:
+ executor = conc.ThreadPoolExecutor(max_workers=self.options.num_processes)
+ f = executor.submit(self.run_single_test, wrap, test)
+ futures.append((f, numlen, tests, visible_name, i))
+ if self.options.repeat > 1 and self.fail_count:
+ break
+ if self.options.repeat > 1 and self.fail_count:
+ break
+
+ self.drain_futures(futures)
+ self.print_summary()
+ self.print_collected_logs()
+
+ if self.logfilename:
+ print('Full log written to %s' % self.logfilename)
+ finally:
+ os.chdir(startdir)
+
+ def drain_futures(self, futures):
+ for i in futures:
+ (result, numlen, tests, name, i) = i
+ if self.options.repeat > 1 and self.fail_count:
+ result.cancel()
+ if self.options.verbose:
+ result.result()
+ self.print_stats(numlen, tests, name, result.result(), i)
+
+ def run_special(self):
+ 'Tests run by the user, usually something like "under gdb 1000 times".'
+ if self.is_run:
+ raise RuntimeError('Can not use run_special after a full run.')
+ tests = self.get_tests()
+ if not tests:
+ return 0
+ self.run_tests(tests)
+ return self.fail_count
+
+
+def list_tests(th):
+ tests = th.get_tests()
+ for t in tests:
+ print(th.get_pretty_suite(t))
+
+def merge_suite_options(options):
+ buildfile = os.path.join(options.wd, 'meson-private/build.dat')
+ with open(buildfile, 'rb') as f:
+ build = pickle.load(f)
+ setups = build.test_setups
+ if options.setup not in setups:
+ sys.exit('Unknown test setup: %s' % options.setup)
+ current = setups[options.setup]
+ if not options.gdb:
+ options.gdb = current.gdb
+ if options.timeout_multiplier is None:
+ options.timeout_multiplier = current.timeout_multiplier
+# if options.env is None:
+# options.env = current.env # FIXME, should probably merge options here.
+ if options.wrapper is not None and current.exe_wrapper is not None:
+ sys.exit('Conflict: both test setup and command line specify an exe wrapper.')
+ if options.wrapper is None:
+ options.wrapper = current.exe_wrapper
+ return current.env
+
+def rebuild_all(wd):
+ if not os.path.isfile(os.path.join(wd, 'build.ninja')):
+ print("Only ninja backend is supported to rebuild tests before running them.")
+ return True
+
+ ninja = environment.detect_ninja()
+ if not ninja:
+ print("Can't find ninja, can't rebuild test.")
+ return False
+
+ p = subprocess.Popen([ninja, '-C', wd])
+ p.communicate()
+
+ if p.returncode != 0:
+ print("Could not rebuild")
+ return False
+
+ return True
+
+def run(args):
+ options = parser.parse_args(args)
+
+ if options.benchmark:
+ options.num_processes = 1
+
+ if options.setup is not None:
+ global_env = merge_suite_options(options)
+ else:
+ global_env = build.EnvironmentVariables()
+ if options.timeout_multiplier is None:
+ options.timeout_multiplier = 1
+
+ setattr(options, 'global_env', global_env)
+
+ if options.verbose and options.quiet:
+ print('Can not be both quiet and verbose at the same time.')
+ return 1
+
+ check_bin = None
+ if options.gdb:
+ options.verbose = True
+ if options.wrapper:
+ print('Must not specify both a wrapper and gdb at the same time.')
+ return 1
+ check_bin = 'gdb'
+
+ if options.wrapper:
+ check_bin = options.wrapper[0]
+
+ if check_bin is not None:
+ exe = ExternalProgram(check_bin, silent=True)
+ if not exe.found():
+ sys.exit("Could not find requested program: %s" % check_bin)
+ options.wd = os.path.abspath(options.wd)
+
+ if not options.list and not options.no_rebuild:
+ if not rebuild_all(options.wd):
+ sys.exit(-1)
+
+ try:
+ th = TestHarness(options)
+ if options.list:
+ list_tests(th)
+ return 0
+ if not options.args:
+ return th.doit()
+ return th.run_special()
+ except TestException as e:
+ print('Meson test encountered an error:\n')
+ print(e)
+ return 1
--- /dev/null
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, re
+import functools
+
+from . import mparser
+from . import coredata
+from . import mesonlib
+
+
+forbidden_option_names = coredata.get_builtin_options()
+forbidden_prefixes = {'c_',
+ 'cpp_',
+ 'd_',
+ 'rust_',
+ 'fortran_',
+ 'objc_',
+ 'objcpp_',
+ 'vala_',
+ 'csharp_',
+ 'swift_',
+ 'b_',
+ 'backend_',
+ }
+
+def is_invalid_name(name):
+ if name in forbidden_option_names:
+ return True
+ pref = name.split('_')[0] + '_'
+ if pref in forbidden_prefixes:
+ return True
+ return False
+
+class OptionException(mesonlib.MesonException):
+ pass
+
+
+def permitted_kwargs(permitted):
+ """Function that validates kwargs for options."""
+ def _wraps(func):
+ @functools.wraps(func)
+ def _inner(name, description, kwargs):
+ bad = [a for a in kwargs.keys() if a not in permitted]
+ if bad:
+ raise OptionException('Invalid kwargs for option "{}": "{}"'.format(
+ name, ' '.join(bad)))
+ return func(name, description, kwargs)
+ return _inner
+ return _wraps
+
+
+optname_regex = re.compile('[^a-zA-Z0-9_-]')
+
+@permitted_kwargs({'value'})
+def StringParser(name, description, kwargs):
+ return coredata.UserStringOption(name, description,
+ kwargs.get('value', ''), kwargs.get('choices', []))
+
+@permitted_kwargs({'value'})
+def BooleanParser(name, description, kwargs):
+ return coredata.UserBooleanOption(name, description, kwargs.get('value', True))
+
+@permitted_kwargs({'value', 'choices'})
+def ComboParser(name, description, kwargs):
+ if 'choices' not in kwargs:
+ raise OptionException('Combo option missing "choices" keyword.')
+ choices = kwargs['choices']
+ if not isinstance(choices, list):
+ raise OptionException('Combo choices must be an array.')
+ for i in choices:
+ if not isinstance(i, str):
+ raise OptionException('Combo choice elements must be strings.')
+ return coredata.UserComboOption(name, description, choices, kwargs.get('value', choices[0]))
+
+@permitted_kwargs({'value', 'choices'})
+def string_array_parser(name, description, kwargs):
+ if 'choices' in kwargs:
+ choices = kwargs['choices']
+ if not isinstance(choices, list):
+ raise OptionException('Array choices must be an array.')
+ for i in choices:
+ if not isinstance(i, str):
+ raise OptionException('Array choice elements must be strings.')
+ value = kwargs.get('value', choices)
+ else:
+ choices = None
+ value = kwargs.get('value', [])
+ if not isinstance(value, list):
+ raise OptionException('Array choices must be passed as an array.')
+ return coredata.UserArrayOption(name, description, value, choices=choices)
+
+option_types = {'string': StringParser,
+ 'boolean': BooleanParser,
+ 'combo': ComboParser,
+ 'array': string_array_parser,
+ }
+
+class OptionInterpreter:
+ def __init__(self, subproject, command_line_options):
+ self.options = {}
+ self.subproject = subproject
+ self.sbprefix = subproject + ':'
+ self.cmd_line_options = {}
+ for o in command_line_options:
+ if self.subproject != '': # Strip the beginning.
+ # Ignore options that aren't for this subproject
+ if not o.startswith(self.sbprefix):
+ continue
+ try:
+ (key, value) = o.split('=', 1)
+ except ValueError:
+ raise OptionException('Option {!r} must have a value separated by equals sign.'.format(o))
+ # Ignore subproject options if not fetching subproject options
+ if self.subproject == '' and ':' in key:
+ continue
+ self.cmd_line_options[key] = value
+
+ def process(self, option_file):
+ try:
+ with open(option_file, 'r', encoding='utf8') as f:
+ ast = mparser.Parser(f.read(), '').parse()
+ except mesonlib.MesonException as me:
+ me.file = option_file
+ raise me
+ if not isinstance(ast, mparser.CodeBlockNode):
+ e = OptionException('Option file is malformed.')
+ e.lineno = ast.lineno()
+ raise e
+ for cur in ast.lines:
+ try:
+ self.evaluate_statement(cur)
+ except Exception as e:
+ e.lineno = cur.lineno
+ e.colno = cur.colno
+ e.file = os.path.join('meson_options.txt')
+ raise e
+
+ def reduce_single(self, arg):
+ if isinstance(arg, str):
+ return arg
+ elif isinstance(arg, (mparser.StringNode, mparser.BooleanNode,
+ mparser.NumberNode)):
+ return arg.value
+ elif isinstance(arg, mparser.ArrayNode):
+ return [self.reduce_single(curarg) for curarg in arg.args.arguments]
+ else:
+ raise OptionException('Arguments may only be string, int, bool, or array of those.')
+
+ def reduce_arguments(self, args):
+ assert(isinstance(args, mparser.ArgumentNode))
+ if args.incorrect_order():
+ raise OptionException('All keyword arguments must be after positional arguments.')
+ reduced_pos = [self.reduce_single(arg) for arg in args.arguments]
+ reduced_kw = {}
+ for key in args.kwargs.keys():
+ if not isinstance(key, str):
+ raise OptionException('Keyword argument name is not a string.')
+ a = args.kwargs[key]
+ reduced_kw[key] = self.reduce_single(a)
+ return reduced_pos, reduced_kw
+
+ def evaluate_statement(self, node):
+ if not isinstance(node, mparser.FunctionNode):
+ raise OptionException('Option file may only contain option definitions')
+ func_name = node.func_name
+ if func_name != 'option':
+ raise OptionException('Only calls to option() are allowed in option files.')
+ (posargs, kwargs) = self.reduce_arguments(node.args)
+ if 'type' not in kwargs:
+ raise OptionException('Option call missing mandatory "type" keyword argument')
+ opt_type = kwargs.pop('type')
+ if opt_type not in option_types:
+ raise OptionException('Unknown type %s.' % opt_type)
+ if len(posargs) != 1:
+ raise OptionException('Option() must have one (and only one) positional argument')
+ opt_name = posargs[0]
+ if not isinstance(opt_name, str):
+ raise OptionException('Positional argument must be a string.')
+ if optname_regex.search(opt_name) is not None:
+ raise OptionException('Option names can only contain letters, numbers or dashes.')
+ if is_invalid_name(opt_name):
+ raise OptionException('Option name %s is reserved.' % opt_name)
+ if self.subproject != '':
+ opt_name = self.subproject + ':' + opt_name
+ opt = option_types[opt_type](opt_name, kwargs.pop('description', ''), kwargs)
+ if opt.description == '':
+ opt.description = opt_name
+ if opt_name in self.cmd_line_options:
+ opt.set_value(self.cmd_line_options[opt_name])
+ self.options[opt_name] = opt
--- /dev/null
+#!/usr/bin/env python3
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+# This tool is used to manipulate an existing Meson build definition.
+#
+# - add a file to a target
+# - remove files from a target
+# - move targets
+# - reindent?
+
+import mesonbuild.astinterpreter
+from mesonbuild.mesonlib import MesonException
+from mesonbuild import mlog
+import sys, traceback
+import argparse
+
+parser = argparse.ArgumentParser(prog='meson rewrite')
+
+parser.add_argument('--sourcedir', default='.',
+ help='Path to source directory.')
+parser.add_argument('--target', default=None,
+ help='Name of target to edit.')
+parser.add_argument('--filename', default=None,
+ help='Name of source file to add or remove to target.')
+parser.add_argument('commands', nargs='+')
+
+def run(args):
+ options = parser.parse_args(args)
+ if options.target is None or options.filename is None:
+ sys.exit("Must specify both target and filename.")
+ print('This tool is highly experimental, use with care.')
+ rewriter = mesonbuild.astinterpreter.AstInterpreter(options.sourcedir, '')
+ try:
+ if options.commands[0] == 'add':
+ rewriter.add_source(options.target, options.filename)
+ elif options.commands[0] == 'remove':
+ rewriter.remove_source(options.target, options.filename)
+ else:
+ sys.exit('Unknown command: ' + options.commands[0])
+ except Exception as e:
+ if isinstance(e, MesonException):
+ if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'):
+ mlog.log(mlog.red('\nMeson encountered an error in file %s, line %d, column %d:' % (e.file, e.lineno, e.colno)))
+ else:
+ mlog.log(mlog.red('\nMeson encountered an error:'))
+ mlog.log(e)
+ else:
+ traceback.print_exc()
+ return 1
+ return 0
--- /dev/null
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def destdir_join(d1, d2):
+ # c:\destdir + c:\prefix must produce c:\destdir\prefix
+ if len(d1) > 1 and d1[1] == ':' \
+ and len(d2) > 1 and d2[1] == ':':
+ return d1 + d2[2:]
+ return d1 + d2
--- /dev/null
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import shutil
+import pickle
+
+def rmtrees(build_dir, trees):
+ for t in trees:
+ # Never delete trees outside of the builddir
+ if os.path.isabs(t):
+ print('Cannot delete dir with absolute path {!r}'.format(t))
+ continue
+ bt = os.path.join(build_dir, t)
+ # Skip if it doesn't exist, or if it is not a directory
+ if os.path.isdir(bt):
+ shutil.rmtree(bt, ignore_errors=True)
+
+def run(args):
+ if len(args) != 1:
+ print('Cleaner script for Meson. Do not run on your own please.')
+ print('cleantrees.py <data-file>')
+ return 1
+ with open(args[0], 'rb') as f:
+ data = pickle.load(f)
+ rmtrees(data.build_dir, data.trees)
+ # Never fail cleaning
+ return 0
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
--- /dev/null
+# Copyright 2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This program is a wrapper to run external commands. It determines
+what to run, sets up the environment and executes the command."""
+
+import sys, os, subprocess, shutil, shlex
+
+def run_command(source_dir, build_dir, subdir, meson_command, command, arguments):
+ env = {'MESON_SOURCE_ROOT': source_dir,
+ 'MESON_BUILD_ROOT': build_dir,
+ 'MESON_SUBDIR': subdir,
+ 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in meson_command + ['introspect']]),
+ }
+ cwd = os.path.join(source_dir, subdir)
+ child_env = os.environ.copy()
+ child_env.update(env)
+
+ # Is the command an executable in path?
+ exe = shutil.which(command)
+ if exe is not None:
+ command_array = [exe] + arguments
+ else:# No? Maybe it is a script in the source tree.
+ fullpath = os.path.join(source_dir, subdir, command)
+ command_array = [fullpath] + arguments
+ try:
+ return subprocess.Popen(command_array, env=child_env, cwd=cwd)
+ except FileNotFoundError:
+ print('Could not execute command "%s". File not found.' % command)
+ sys.exit(1)
+ except PermissionError:
+ print('Could not execute command "%s". File not executable.' % command)
+ sys.exit(1)
+ except OSError as err:
+ print('Could not execute command "{}": {}'.format(command, err))
+ sys.exit(1)
+ except subprocess.SubprocessError as err:
+ print('Could not execute command "{}": {}'.format(command, err))
+ sys.exit(1)
+
+
+def run(args):
+ if len(args) < 4:
+ print('commandrunner.py <source dir> <build dir> <subdir> <command> [arguments]')
+ return 1
+ src_dir = args[0]
+ build_dir = args[1]
+ subdir = args[2]
+ meson_command = args[3]
+ if 'python' in meson_command: # Hack.
+ meson_command = [meson_command, args[4]]
+ command = args[5]
+ arguments = args[6:]
+ else:
+ meson_command = [meson_command]
+ command = args[4]
+ arguments = args[5:]
+ pc = run_command(src_dir, build_dir, subdir, meson_command, command, arguments)
+ pc.wait()
+ return pc.returncode
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mesonbuild import environment
+
+import sys, os, subprocess
+
+def remove_dir_from_trace(lcov_command, covfile, dirname):
+ tmpfile = covfile + '.tmp'
+ subprocess.check_call([lcov_command, '--remove', covfile, dirname, '-o', tmpfile])
+ os.replace(tmpfile, covfile)
+
+def coverage(source_root, build_root, log_dir):
+ (gcovr_exe, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
+ if gcovr_exe:
+ subprocess.check_call([gcovr_exe,
+ '-x',
+ '-r', source_root,
+ '-o', os.path.join(log_dir, 'coverage.xml'),
+ ])
+ subprocess.check_call([gcovr_exe,
+ '-r', source_root,
+ '-o', os.path.join(log_dir, 'coverage.txt'),
+ ])
+ if lcov_exe and genhtml_exe:
+ htmloutdir = os.path.join(log_dir, 'coveragereport')
+ covinfo = os.path.join(log_dir, 'coverage.info')
+ initial_tracefile = covinfo + '.initial'
+ run_tracefile = covinfo + '.run'
+ subprocess.check_call([lcov_exe,
+ '--directory', build_root,
+ '--capture',
+ '--initial',
+ '--output-file',
+ initial_tracefile])
+ subprocess.check_call([lcov_exe,
+ '--directory', build_root,
+ '--capture',
+ '--output-file', run_tracefile,
+ '--no-checksum',
+ '--rc', 'lcov_branch_coverage=1',
+ ])
+ # Join initial and test results.
+ subprocess.check_call([lcov_exe,
+ '-a', initial_tracefile,
+ '-a', run_tracefile,
+ '-o', covinfo])
+ remove_dir_from_trace(lcov_exe, covinfo, '/usr/include/*')
+ remove_dir_from_trace(lcov_exe, covinfo, '/usr/local/include/*')
+ remove_dir_from_trace(lcov_exe, covinfo, '/usr/src/*')
+ subprocess.check_call([genhtml_exe,
+ '--prefix', build_root,
+ '--output-directory', htmloutdir,
+ '--title', 'Code coverage',
+ '--legend',
+ '--show-details',
+ '--branch-coverage',
+ covinfo])
+ return 0
+
+def run(args):
+ if not os.path.isfile('build.ninja'):
+ print('Coverage currently only works with the Ninja backend.')
+ return 1
+ source_root, build_root, log_dir = args[:]
+ return coverage(source_root, build_root, log_dir)
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+# Copyright 2013 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, sys
+
+def run(args):
+ if len(args) != 2:
+ print('delwithsuffix.py <root of subdir to process> <suffix to delete>')
+ sys.exit(1)
+
+ topdir = args[0]
+ suffix = args[1]
+ if suffix[0] != '.':
+ suffix = '.' + suffix
+
+ for (root, _, files) in os.walk(topdir):
+ for f in files:
+ if f.endswith(suffix):
+ fullname = os.path.join(root, f)
+ os.unlink(fullname)
+ return 0
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
--- /dev/null
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import sys, struct
+
+SHT_STRTAB = 3
+DT_NEEDED = 1
+DT_RPATH = 15
+DT_RUNPATH = 29
+DT_STRTAB = 5
+DT_SONAME = 14
+DT_MIPS_RLD_MAP_REL = 1879048245
+
+class DataSizes:
+ def __init__(self, ptrsize, is_le):
+ if is_le:
+ p = '<'
+ else:
+ p = '>'
+ self.Half = p + 'h'
+ self.HalfSize = 2
+ self.Word = p + 'I'
+ self.WordSize = 4
+ self.Sword = p + 'i'
+ self.SwordSize = 4
+ if ptrsize == 64:
+ self.Addr = p + 'Q'
+ self.AddrSize = 8
+ self.Off = p + 'Q'
+ self.OffSize = 8
+ self.XWord = p + 'Q'
+ self.XWordSize = 8
+ self.Sxword = p + 'q'
+ self.SxwordSize = 8
+ else:
+ self.Addr = p + 'I'
+ self.AddrSize = 4
+ self.Off = p + 'I'
+ self.OffSize = 4
+
+class DynamicEntry(DataSizes):
+ def __init__(self, ifile, ptrsize, is_le):
+ super().__init__(ptrsize, is_le)
+ self.ptrsize = ptrsize
+ if ptrsize == 64:
+ self.d_tag = struct.unpack(self.Sxword, ifile.read(self.SxwordSize))[0]
+ self.val = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.d_tag = struct.unpack(self.Sword, ifile.read(self.SwordSize))[0]
+ self.val = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+
+ def write(self, ofile):
+ if self.ptrsize == 64:
+ ofile.write(struct.pack(self.Sxword, self.d_tag))
+ ofile.write(struct.pack(self.XWord, self.val))
+ else:
+ ofile.write(struct.pack(self.Sword, self.d_tag))
+ ofile.write(struct.pack(self.Word, self.val))
+
+class SectionHeader(DataSizes):
+ def __init__(self, ifile, ptrsize, is_le):
+ super().__init__(ptrsize, is_le)
+ if ptrsize == 64:
+ is_64 = True
+ else:
+ is_64 = False
+# Elf64_Word
+ self.sh_name = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+ self.sh_type = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_flags = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_flags = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Addr
+ self.sh_addr = struct.unpack(self.Addr, ifile.read(self.AddrSize))[0]
+# Elf64_Off
+ self.sh_offset = struct.unpack(self.Off, ifile.read(self.OffSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_size = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_size = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+ self.sh_link = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+ self.sh_info = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_addralign = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_addralign = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_entsize = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_entsize = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+
+class Elf(DataSizes):
+ def __init__(self, bfile, verbose=True):
+ self.bfile = bfile
+ self.verbose = verbose
+ self.bf = open(bfile, 'r+b')
+ try:
+ (self.ptrsize, self.is_le) = self.detect_elf_type()
+ super().__init__(self.ptrsize, self.is_le)
+ self.parse_header()
+ self.parse_sections()
+ self.parse_dynamic()
+ except:
+ self.bf.close()
+ raise
+
+ def __enter__(self):
+ return self
+
+ def __del__(self):
+ if self.bf:
+ self.bf.close()
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.bf.close()
+ self.bf = None
+
+ def detect_elf_type(self):
+ data = self.bf.read(6)
+ if data[1:4] != b'ELF':
+ # This script gets called to non-elf targets too
+ # so just ignore them.
+ if self.verbose:
+ print('File "%s" is not an ELF file.' % self.bfile)
+ sys.exit(0)
+ if data[4] == 1:
+ ptrsize = 32
+ elif data[4] == 2:
+ ptrsize = 64
+ else:
+ sys.exit('File "%s" has unknown ELF class.' % self.bfile)
+ if data[5] == 1:
+ is_le = True
+ elif data[5] == 2:
+ is_le = False
+ else:
+ sys.exit('File "%s" has unknown ELF endianness.' % self.bfile)
+ return ptrsize, is_le
+
+ def parse_header(self):
+ self.bf.seek(0)
+ self.e_ident = struct.unpack('16s', self.bf.read(16))[0]
+ self.e_type = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_machine = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_version = struct.unpack(self.Word, self.bf.read(self.WordSize))[0]
+ self.e_entry = struct.unpack(self.Addr, self.bf.read(self.AddrSize))[0]
+ self.e_phoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0]
+ self.e_shoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0]
+ self.e_flags = struct.unpack(self.Word, self.bf.read(self.WordSize))[0]
+ self.e_ehsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_phentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_phnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_shentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_shnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_shstrndx = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+
+ def parse_sections(self):
+ self.bf.seek(self.e_shoff)
+ self.sections = []
+ for i in range(self.e_shnum):
+ self.sections.append(SectionHeader(self.bf, self.ptrsize, self.is_le))
+
+ def read_str(self):
+ arr = []
+ x = self.bf.read(1)
+ while x != b'\0':
+ arr.append(x)
+ x = self.bf.read(1)
+ if x == b'':
+ raise RuntimeError('Tried to read past the end of the file')
+ return b''.join(arr)
+
+ def find_section(self, target_name):
+ section_names = self.sections[self.e_shstrndx]
+ for i in self.sections:
+ self.bf.seek(section_names.sh_offset + i.sh_name)
+ name = self.read_str()
+ if name == target_name:
+ return i
+
+ def parse_dynamic(self):
+ sec = self.find_section(b'.dynamic')
+ self.dynamic = []
+ if sec is None:
+ return
+ self.bf.seek(sec.sh_offset)
+ while True:
+ e = DynamicEntry(self.bf, self.ptrsize, self.is_le)
+ self.dynamic.append(e)
+ if e.d_tag == 0:
+ break
+
+ def print_section_names(self):
+ section_names = self.sections[self.e_shstrndx]
+ for i in self.sections:
+ self.bf.seek(section_names.sh_offset + i.sh_name)
+ name = self.read_str()
+ print(name.decode())
+
+ def print_soname(self):
+ soname = None
+ strtab = None
+ for i in self.dynamic:
+ if i.d_tag == DT_SONAME:
+ soname = i
+ if i.d_tag == DT_STRTAB:
+ strtab = i
+ if soname is None or strtab is None:
+ print("This file does not have a soname")
+ return
+ self.bf.seek(strtab.val + soname.val)
+ print(self.read_str())
+
+ def get_entry_offset(self, entrynum):
+ sec = self.find_section(b'.dynstr')
+ for i in self.dynamic:
+ if i.d_tag == entrynum:
+ return sec.sh_offset + i.val
+ return None
+
+ def print_rpath(self):
+ offset = self.get_entry_offset(DT_RPATH)
+ if offset is None:
+ print("This file does not have an rpath.")
+ else:
+ self.bf.seek(offset)
+ print(self.read_str())
+
+ def print_runpath(self):
+ offset = self.get_entry_offset(DT_RUNPATH)
+ if offset is None:
+ print("This file does not have a runpath.")
+ else:
+ self.bf.seek(offset)
+ print(self.read_str())
+
+ def print_deps(self):
+ sec = self.find_section(b'.dynstr')
+ deps = []
+ for i in self.dynamic:
+ if i.d_tag == DT_NEEDED:
+ deps.append(i)
+ for i in deps:
+ offset = sec.sh_offset + i.val
+ self.bf.seek(offset)
+ name = self.read_str()
+ print(name)
+
+ def fix_deps(self, prefix):
+ sec = self.find_section(b'.dynstr')
+ deps = []
+ for i in self.dynamic:
+ if i.d_tag == DT_NEEDED:
+ deps.append(i)
+ for i in deps:
+ offset = sec.sh_offset + i.val
+ self.bf.seek(offset)
+ name = self.read_str()
+ if name.startswith(prefix):
+ basename = name.split(b'/')[-1]
+ padding = b'\0' * (len(name) - len(basename))
+ newname = basename + padding
+ assert(len(newname) == len(name))
+ self.bf.seek(offset)
+ self.bf.write(newname)
+
+ def fix_rpath(self, new_rpath):
+ # The path to search for can be either rpath or runpath.
+ # Fix both of them to be sure.
+ self.fix_rpathtype_entry(new_rpath, DT_RPATH)
+ self.fix_rpathtype_entry(new_rpath, DT_RUNPATH)
+
+ def fix_rpathtype_entry(self, new_rpath, entrynum):
+ if isinstance(new_rpath, str):
+ new_rpath = new_rpath.encode('utf8')
+ rp_off = self.get_entry_offset(entrynum)
+ if rp_off is None:
+ if self.verbose:
+ print('File does not have rpath. It should be a fully static executable.')
+ return
+ self.bf.seek(rp_off)
+ old_rpath = self.read_str()
+ if len(old_rpath) < len(new_rpath):
+ sys.exit("New rpath must not be longer than the old one.")
+ # The linker does read-only string deduplication. If there is a
+ # string that shares a suffix with the rpath, they might get
+ # dedupped. This means changing the rpath string might break something
+ # completely unrelated. This has already happened once with X.org.
+ # Thus we want to keep this change as small as possible to minimize
+ # the chance of obliterating other strings. It might still happen
+ # but our behavior is identical to what chrpath does and it has
+ # been in use for ages so based on that this should be rare.
+ if not new_rpath:
+ self.remove_rpath_entry(entrynum)
+ else:
+ self.bf.seek(rp_off)
+ self.bf.write(new_rpath)
+ self.bf.write(b'\0')
+
+ def remove_rpath_entry(self, entrynum):
+ sec = self.find_section(b'.dynamic')
+ if sec is None:
+ return None
+ for (i, entry) in enumerate(self.dynamic):
+ if entry.d_tag == entrynum:
+ rpentry = self.dynamic[i]
+ rpentry.d_tag = 0
+ self.dynamic = self.dynamic[:i] + self.dynamic[i + 1:] + [rpentry]
+ break
+ # DT_MIPS_RLD_MAP_REL is relative to the offset of the tag. Adjust it consequently.
+ for entry in self.dynamic[i:]:
+ if entry.d_tag == DT_MIPS_RLD_MAP_REL:
+ entry.val += 2 * (self.ptrsize // 8)
+ break
+ self.bf.seek(sec.sh_offset)
+ for entry in self.dynamic:
+ entry.write(self.bf)
+ return None
+
+def run(args):
+ if len(args) < 1 or len(args) > 2:
+ print('This application resets target rpath.')
+ print('Don\'t run this unless you know what you are doing.')
+ print('%s: <binary file> <prefix>' % sys.argv[0])
+ sys.exit(1)
+ with Elf(args[0]) as e:
+ if len(args) == 1:
+ e.print_rpath()
+ e.print_runpath()
+ else:
+ new_rpath = args[1]
+ e.fix_rpath(new_rpath)
+ return 0
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
--- /dev/null
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''CD into dir given as first argument and execute
+the command given in the rest of the arguments.'''
+
+import os, subprocess, sys
+
+def run(args):
+ dirname = args[0]
+ command = args[1:]
+
+ os.chdir(dirname)
+ return subprocess.call(command)
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import lzma
+import os
+import shutil
+import subprocess
+import pickle
+import hashlib
+import tarfile, zipfile
+import tempfile
+from glob import glob
+from mesonbuild.environment import detect_ninja
+from mesonbuild.mesonlib import windows_proof_rmtree
+
+def create_hash(fname):
+ hashname = fname + '.sha256sum'
+ m = hashlib.sha256()
+ m.update(open(fname, 'rb').read())
+ with open(hashname, 'w') as f:
+ f.write('%s %s\n' % (m.hexdigest(), os.path.basename(fname)))
+
+
+def create_zip(zipfilename, packaging_dir):
+ prefix = os.path.dirname(packaging_dir)
+ removelen = len(prefix) + 1
+ with zipfile.ZipFile(zipfilename,
+ 'w',
+ compression=zipfile.ZIP_DEFLATED,
+ allowZip64=True) as zf:
+ zf.write(packaging_dir, packaging_dir[removelen:])
+ for root, dirs, files in os.walk(packaging_dir):
+ for d in dirs:
+ dname = os.path.join(root, d)
+ zf.write(dname, dname[removelen:])
+ for f in files:
+ fname = os.path.join(root, f)
+ zf.write(fname, fname[removelen:])
+
+def del_gitfiles(dirname):
+ for f in glob(os.path.join(dirname, '.git*')):
+ if os.path.isdir(f) and not os.path.islink(f):
+ windows_proof_rmtree(f)
+ else:
+ os.unlink(f)
+
+def process_submodules(dirname):
+ module_file = os.path.join(dirname, '.gitmodules')
+ if not os.path.exists(module_file):
+ return
+ subprocess.check_call(['git', 'submodule', 'update', '--init'], cwd=dirname)
+ for line in open(module_file):
+ line = line.strip()
+ if '=' not in line:
+ continue
+ k, v = line.split('=', 1)
+ k = k.strip()
+ v = v.strip()
+ if k != 'path':
+ continue
+ del_gitfiles(os.path.join(dirname, v))
+
+
+def create_dist_git(dist_name, src_root, bld_root, dist_sub):
+ distdir = os.path.join(dist_sub, dist_name)
+ if os.path.exists(distdir):
+ shutil.rmtree(distdir)
+ os.makedirs(distdir)
+ subprocess.check_call(['git', 'clone', '--shared', src_root, distdir])
+ process_submodules(distdir)
+ del_gitfiles(distdir)
+ xzname = distdir + '.tar.xz'
+ # Should use shutil but it got xz support only in 3.5.
+ with tarfile.open(xzname, 'w:xz') as tf:
+ tf.add(distdir, dist_name)
+ # Create only .tar.xz for now.
+ # zipname = distdir + '.zip'
+ # create_zip(zipname, distdir)
+ shutil.rmtree(distdir)
+ return (xzname, )
+
+
+def create_dist_hg(dist_name, src_root, bld_root, dist_sub):
+ os.makedirs(dist_sub, exist_ok=True)
+
+ tarname = os.path.join(dist_sub, dist_name + '.tar')
+ xzname = tarname + '.xz'
+ subprocess.check_call(['hg', 'archive', '-R', src_root, '-S', '-t', 'tar', tarname])
+ with lzma.open(xzname, 'wb') as xf, open(tarname, 'rb') as tf:
+ shutil.copyfileobj(tf, xf)
+ os.unlink(tarname)
+ # Create only .tar.xz for now.
+ # zipname = os.path.join(dist_sub, dist_name + '.zip')
+ # subprocess.check_call(['hg', 'archive', '-R', src_root, '-S', '-t', 'zip', zipname])
+ return (xzname, )
+
+
+def check_dist(packagename, meson_command):
+ print('Testing distribution package %s.' % packagename)
+ unpackdir = tempfile.mkdtemp()
+ builddir = tempfile.mkdtemp()
+ installdir = tempfile.mkdtemp()
+ ninja_bin = detect_ninja()
+ try:
+ tf = tarfile.open(packagename)
+ tf.extractall(unpackdir)
+ srcdir = glob(os.path.join(unpackdir, '*'))[0]
+ if subprocess.call(meson_command + ['--backend=ninja', srcdir, builddir]) != 0:
+ print('Running Meson on distribution package failed')
+ return 1
+ if subprocess.call([ninja_bin], cwd=builddir) != 0:
+ print('Compiling the distribution package failed.')
+ return 1
+ if subprocess.call([ninja_bin, 'test'], cwd=builddir) != 0:
+ print('Running unit tests on the distribution package failed.')
+ return 1
+ myenv = os.environ.copy()
+ myenv['DESTDIR'] = installdir
+ if subprocess.call([ninja_bin, 'install'], cwd=builddir, env=myenv) != 0:
+ print('Installing the distribution package failed.')
+ return 1
+ finally:
+ shutil.rmtree(unpackdir)
+ shutil.rmtree(builddir)
+ shutil.rmtree(installdir)
+ print('Distribution package %s tested.' % packagename)
+ return 0
+
+def run(args):
+ src_root = args[0]
+ bld_root = args[1]
+ meson_command = args[2:]
+ priv_dir = os.path.join(bld_root, 'meson-private')
+ dist_sub = os.path.join(bld_root, 'meson-dist')
+
+ buildfile = os.path.join(priv_dir, 'build.dat')
+
+ build = pickle.load(open(buildfile, 'rb'))
+
+ dist_name = build.project_name + '-' + build.project_version
+
+ if os.path.isdir(os.path.join(src_root, '.git')):
+ names = create_dist_git(dist_name, src_root, bld_root, dist_sub)
+ elif os.path.isdir(os.path.join(src_root, '.hg')):
+ names = create_dist_hg(dist_name, src_root, bld_root, dist_sub)
+ else:
+ print('Dist currently only works with Git or Mercurial repos.')
+ return 1
+ if names is None:
+ return 1
+ error_count = 0
+ for name in names:
+ rc = check_dist(name, meson_command) # Check only one.
+ if rc == 0:
+ create_hash(name)
+ error_count += rc
+ return 1 if error_count else 0
--- /dev/null
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import shutil
+import argparse
+import subprocess
+from . import destdir_join
+
+parser = argparse.ArgumentParser()
+parser.add_argument('command')
+parser.add_argument('--pkgname', default='')
+parser.add_argument('--datadirs', default='')
+parser.add_argument('--langs', default='')
+parser.add_argument('--localedir', default='')
+parser.add_argument('--subdir', default='')
+parser.add_argument('--extra-args', default='')
+
+def read_linguas(src_sub):
+ # Syntax of this file is documented here:
+ # https://www.gnu.org/software/gettext/manual/html_node/po_002fLINGUAS.html
+ linguas = os.path.join(src_sub, 'LINGUAS')
+ try:
+ langs = []
+ with open(linguas) as f:
+ for line in f:
+ line = line.strip()
+ if line and not line.startswith('#'):
+ langs += line.split()
+ return langs
+ except (FileNotFoundError, PermissionError):
+ print('Could not find file LINGUAS in {}'.format(src_sub))
+ return []
+
+def run_potgen(src_sub, pkgname, datadirs, args):
+ listfile = os.path.join(src_sub, 'POTFILES')
+ if not os.path.exists(listfile):
+ listfile = os.path.join(src_sub, 'POTFILES.in')
+ if not os.path.exists(listfile):
+ print('Could not find file POTFILES in %s' % src_sub)
+ return 1
+
+ child_env = os.environ.copy()
+ if datadirs:
+ child_env['GETTEXTDATADIRS'] = datadirs
+
+ ofile = os.path.join(src_sub, pkgname + '.pot')
+ return subprocess.call(['xgettext', '--package-name=' + pkgname, '-p', src_sub, '-f', listfile,
+ '-D', os.environ['MESON_SOURCE_ROOT'], '-k_', '-o', ofile] + args,
+ env=child_env)
+
+def gen_gmo(src_sub, bld_sub, langs):
+ for l in langs:
+ subprocess.check_call(['msgfmt', os.path.join(src_sub, l + '.po'),
+ '-o', os.path.join(bld_sub, l + '.gmo')])
+ return 0
+
+def update_po(src_sub, pkgname, langs):
+ potfile = os.path.join(src_sub, pkgname + '.pot')
+ for l in langs:
+ pofile = os.path.join(src_sub, l + '.po')
+ if os.path.exists(pofile):
+ subprocess.check_call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
+ else:
+ subprocess.check_call(['msginit', '--input', potfile, '--output-file', pofile, '--locale', l, '--no-translator'])
+ return 0
+
+def do_install(src_sub, bld_sub, dest, pkgname, langs):
+ for l in langs:
+ srcfile = os.path.join(bld_sub, l + '.gmo')
+ outfile = os.path.join(dest, l, 'LC_MESSAGES',
+ pkgname + '.mo')
+ os.makedirs(os.path.split(outfile)[0], exist_ok=True)
+ shutil.copyfile(srcfile, outfile)
+ shutil.copystat(srcfile, outfile)
+ print('Installing %s to %s' % (srcfile, outfile))
+ return 0
+
+def run(args):
+ options = parser.parse_args(args)
+ subcmd = options.command
+ langs = options.langs.split('@@') if options.langs else None
+ extra_args = options.extra_args.split('@@') if options.extra_args else []
+ subdir = os.environ.get('MESON_SUBDIR', '')
+ if options.subdir:
+ subdir = options.subdir
+ src_sub = os.path.join(os.environ['MESON_SOURCE_ROOT'], subdir)
+ bld_sub = os.path.join(os.environ['MESON_BUILD_ROOT'], subdir)
+
+ if not langs:
+ langs = read_linguas(src_sub)
+
+ if subcmd == 'pot':
+ return run_potgen(src_sub, options.pkgname, options.datadirs, extra_args)
+ elif subcmd == 'gen_gmo':
+ return gen_gmo(src_sub, bld_sub, langs)
+ elif subcmd == 'update_po':
+ if run_potgen(src_sub, options.pkgname, options.datadirs, extra_args) != 0:
+ return 1
+ return update_po(src_sub, options.pkgname, langs)
+ elif subcmd == 'install':
+ destdir = os.environ.get('DESTDIR', '')
+ dest = destdir_join(destdir, os.path.join(os.environ['MESON_INSTALL_PREFIX'],
+ options.localedir))
+ if gen_gmo(src_sub, bld_sub, langs) != 0:
+ return 1
+ do_install(src_sub, bld_sub, dest, options.pkgname, langs)
+ else:
+ print('Unknown subcommand.')
+ return 1
--- /dev/null
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os
+import subprocess
+import shutil
+import argparse
+from ..mesonlib import MesonException, Popen_safe
+from . import destdir_join
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--sourcedir', dest='sourcedir')
+parser.add_argument('--builddir', dest='builddir')
+parser.add_argument('--subdir', dest='subdir')
+parser.add_argument('--headerdirs', dest='headerdirs')
+parser.add_argument('--mainfile', dest='mainfile')
+parser.add_argument('--modulename', dest='modulename')
+parser.add_argument('--htmlargs', dest='htmlargs', default='')
+parser.add_argument('--scanargs', dest='scanargs', default='')
+parser.add_argument('--scanobjsargs', dest='scanobjsargs', default='')
+parser.add_argument('--gobjects-types-file', dest='gobject_typesfile', default='')
+parser.add_argument('--fixxrefargs', dest='fixxrefargs', default='')
+parser.add_argument('--mkdbargs', dest='mkdbargs', default='')
+parser.add_argument('--ld', dest='ld', default='')
+parser.add_argument('--cc', dest='cc', default='')
+parser.add_argument('--ldflags', dest='ldflags', default='')
+parser.add_argument('--cflags', dest='cflags', default='')
+parser.add_argument('--content-files', dest='content_files', default='')
+parser.add_argument('--expand-content-files', dest='expand_content_files', default='')
+parser.add_argument('--html-assets', dest='html_assets', default='')
+parser.add_argument('--ignore-headers', dest='ignore_headers', default='')
+parser.add_argument('--namespace', dest='namespace', default='')
+parser.add_argument('--mode', dest='mode', default='')
+parser.add_argument('--installdir', dest='install_dir')
+
+def gtkdoc_run_check(cmd, cwd):
+ # Put stderr into stdout since we want to print it out anyway.
+ # This preserves the order of messages.
+ p, out = Popen_safe(cmd, cwd=cwd, stderr=subprocess.STDOUT)[0:2]
+ if p.returncode != 0:
+ err_msg = ["{!r} failed with status {:d}".format(cmd[0], p.returncode)]
+ if out:
+ err_msg.append(out)
+ raise MesonException('\n'.join(err_msg))
+
+def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs,
+ main_file, module,
+ html_args, scan_args, fixxref_args, mkdb_args,
+ gobject_typesfile, scanobjs_args, ld, cc, ldflags, cflags,
+ html_assets, content_files, ignore_headers, namespace,
+ expand_content_files, mode):
+ print("Building documentation for %s" % module)
+
+ src_dir_args = []
+ for src_dir in src_subdirs:
+ if not os.path.isabs(src_dir):
+ dirs = [os.path.join(source_root, src_dir),
+ os.path.join(build_root, src_dir)]
+ else:
+ dirs = [src_dir]
+ src_dir_args += ['--source-dir=' + d for d in dirs]
+
+ doc_src = os.path.join(source_root, doc_subdir)
+ abs_out = os.path.join(build_root, doc_subdir)
+ htmldir = os.path.join(abs_out, 'html')
+
+ content_files += [main_file]
+ sections = os.path.join(doc_src, module + "-sections.txt")
+ if os.path.exists(sections):
+ content_files.append(sections)
+
+ overrides = os.path.join(doc_src, module + "-overrides.txt")
+ if os.path.exists(overrides):
+ content_files.append(overrides)
+
+ # Copy files to build directory
+ for f in content_files:
+ f_abs = os.path.join(doc_src, f)
+ shutil.copyfile(f_abs, os.path.join(
+ abs_out, os.path.basename(f_abs)))
+
+ shutil.rmtree(htmldir, ignore_errors=True)
+ try:
+ os.mkdir(htmldir)
+ except Exception:
+ pass
+
+ for f in html_assets:
+ f_abs = os.path.join(doc_src, f)
+ shutil.copyfile(f_abs, os.path.join(htmldir, os.path.basename(f_abs)))
+
+ scan_cmd = ['gtkdoc-scan', '--module=' + module] + src_dir_args
+ if ignore_headers:
+ scan_cmd.append('--ignore-headers=' + ' '.join(ignore_headers))
+ # Add user-specified arguments
+ scan_cmd += scan_args
+ gtkdoc_run_check(scan_cmd, abs_out)
+
+ if gobject_typesfile:
+ scanobjs_cmd = ['gtkdoc-scangobj'] + scanobjs_args + ['--types=' + gobject_typesfile,
+ '--module=' + module,
+ '--cflags=' + cflags,
+ '--ldflags=' + ldflags,
+ '--ld=' + ld]
+
+ gtkdoc_run_check(scanobjs_cmd, abs_out)
+
+ # Make docbook files
+ if mode == 'auto':
+ # Guessing is probably a poor idea but these keeps compat
+ # with previous behavior
+ if main_file.endswith('sgml'):
+ modeflag = '--sgml-mode'
+ else:
+ modeflag = '--xml-mode'
+ elif mode == 'xml':
+ modeflag = '--xml-mode'
+ elif mode == 'sgml':
+ modeflag = '--sgml-mode'
+ else: # none
+ modeflag = None
+
+ mkdb_cmd = ['gtkdoc-mkdb',
+ '--module=' + module,
+ '--output-format=xml',
+ '--expand-content-files=' + ' '.join(expand_content_files),
+ ] + src_dir_args
+ if namespace:
+ mkdb_cmd.append('--name-space=' + namespace)
+ if modeflag:
+ mkdb_cmd.append(modeflag)
+ if len(main_file) > 0:
+ # Yes, this is the flag even if the file is in xml.
+ mkdb_cmd.append('--main-sgml-file=' + main_file)
+ # Add user-specified arguments
+ mkdb_cmd += mkdb_args
+ gtkdoc_run_check(mkdb_cmd, abs_out)
+
+ # Make HTML documentation
+ mkhtml_cmd = ['gtkdoc-mkhtml',
+ '--path=' + ':'.join((doc_src, abs_out)),
+ module,
+ ] + html_args
+ if len(main_file) > 0:
+ mkhtml_cmd.append('../' + main_file)
+ else:
+ mkhtml_cmd.append('%s-docs.xml' % module)
+ # html gen must be run in the HTML dir
+ gtkdoc_run_check(mkhtml_cmd, os.path.join(abs_out, 'html'))
+
+ # Fix cross-references in HTML files
+ fixref_cmd = ['gtkdoc-fixxref',
+ '--module=' + module,
+ '--module-dir=html'] + fixxref_args
+ gtkdoc_run_check(fixref_cmd, abs_out)
+
+def install_gtkdoc(build_root, doc_subdir, install_prefix, datadir, module):
+ source = os.path.join(build_root, doc_subdir, 'html')
+ final_destination = os.path.join(install_prefix, datadir, module)
+ shutil.rmtree(final_destination, ignore_errors=True)
+ shutil.copytree(source, final_destination)
+
+def run(args):
+ options = parser.parse_args(args)
+ if len(options.htmlargs) > 0:
+ htmlargs = options.htmlargs.split('@@')
+ else:
+ htmlargs = []
+ if len(options.scanargs) > 0:
+ scanargs = options.scanargs.split('@@')
+ else:
+ scanargs = []
+ if len(options.scanobjsargs) > 0:
+ scanobjsargs = options.scanobjsargs.split('@@')
+ else:
+ scanobjsargs = []
+ if len(options.fixxrefargs) > 0:
+ fixxrefargs = options.fixxrefargs.split('@@')
+ else:
+ fixxrefargs = []
+ if len(options.mkdbargs) > 0:
+ mkdbargs = options.mkdbargs.split('@@')
+ else:
+ mkdbargs = []
+ build_gtkdoc(
+ options.sourcedir,
+ options.builddir,
+ options.subdir,
+ options.headerdirs.split('@@'),
+ options.mainfile,
+ options.modulename,
+ htmlargs,
+ scanargs,
+ fixxrefargs,
+ mkdbargs,
+ options.gobject_typesfile,
+ scanobjsargs,
+ options.ld,
+ options.cc,
+ options.ldflags,
+ options.cflags,
+ options.html_assets.split('@@') if options.html_assets else [],
+ options.content_files.split('@@') if options.content_files else [],
+ options.ignore_headers.split('@@') if options.ignore_headers else [],
+ options.namespace,
+ options.expand_content_files.split('@@') if options.expand_content_files else [],
+ options.mode)
+
+ if 'MESON_INSTALL_PREFIX' in os.environ:
+ destdir = os.environ.get('DESTDIR', '')
+ install_prefix = destdir_join(destdir, os.environ['MESON_INSTALL_PREFIX'])
+ install_dir = options.install_dir if options.install_dir else options.modulename
+ if os.path.isabs(install_dir):
+ install_dir = destdir_join(destdir, install_dir)
+ install_gtkdoc(options.builddir,
+ options.subdir,
+ install_prefix,
+ 'share/gtk-doc/html',
+ install_dir)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import argparse
+import pickle
+import platform
+
+from ..mesonlib import Popen_safe
+
+options = None
+
+parser = argparse.ArgumentParser()
+parser.add_argument('args', nargs='+')
+
+def is_windows():
+ platname = platform.system().lower()
+ return platname == 'windows' or 'mingw' in platname
+
+def is_cygwin():
+ platname = platform.system().lower()
+ return 'cygwin' in platname
+
+def run_with_mono(fname):
+ if fname.endswith('.exe') and not (is_windows() or is_cygwin()):
+ return True
+ return False
+
+def run_exe(exe):
+ if exe.fname[0].endswith('.jar'):
+ cmd = ['java', '-jar'] + exe.fname
+ elif not exe.is_cross and run_with_mono(exe.fname[0]):
+ cmd = ['mono'] + exe.fname
+ else:
+ if exe.is_cross:
+ if exe.exe_runner is None:
+ raise AssertionError('BUG: Trying to run cross-compiled exes with no wrapper')
+ else:
+ cmd = [exe.exe_runner] + exe.fname
+ else:
+ cmd = exe.fname
+ child_env = os.environ.copy()
+ child_env.update(exe.env)
+ if len(exe.extra_paths) > 0:
+ child_env['PATH'] = (os.pathsep.join(exe.extra_paths + ['']) +
+ child_env['PATH'])
+ p, stdout, stderr = Popen_safe(cmd + exe.cmd_args, env=child_env, cwd=exe.workdir)
+ if exe.capture and p.returncode == 0:
+ with open(exe.capture, 'w') as output:
+ output.write(stdout)
+ if stderr:
+ sys.stderr.write(stderr)
+ return p.returncode
+
+def run(args):
+ global options
+ options = parser.parse_args(args)
+ if len(options.args) != 1:
+ print('Test runner for Meson. Do not run on your own, mmm\'kay?')
+ print(sys.argv[0] + ' [data file]')
+ exe_data_file = options.args[0]
+ with open(exe_data_file, 'rb') as f:
+ exe = pickle.load(f)
+ return run_exe(exe)
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, pickle, os, shutil, subprocess, gzip, platform, errno
+import shlex
+from glob import glob
+from . import depfixer
+from . import destdir_join
+from ..mesonlib import is_windows, Popen_safe
+
+install_log_file = None
+selinux_updates = []
+
+class DirMaker:
+ def __init__(self):
+ self.dirs = []
+
+ def makedirs(self, path, exist_ok=False):
+ dirname = os.path.normpath(path)
+ dirs = []
+ while dirname != os.path.dirname(dirname):
+ if not os.path.exists(dirname):
+ dirs.append(dirname)
+ dirname = os.path.dirname(dirname)
+ os.makedirs(path, exist_ok=exist_ok)
+
+ # store the directories in creation order, with the parent directory
+ # before the child directories. Future calls of makedir() will not
+ # create the parent directories, so the last element in the list is
+ # the last one to be created. That is the first one to be removed on
+ # __exit__
+ dirs.reverse()
+ self.dirs += dirs
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.dirs.reverse()
+ for d in self.dirs:
+ append_to_log(d)
+
+def set_mode(path, mode):
+ if mode is None:
+ # Keep mode unchanged
+ return
+ if (mode.perms_s or mode.owner or mode.group) is None:
+ # Nothing to set
+ return
+ # No chown() on Windows, and must set one of owner/group
+ if not is_windows() and (mode.owner or mode.group) is not None:
+ try:
+ shutil.chown(path, mode.owner, mode.group)
+ except PermissionError as e:
+ msg = '{!r}: Unable to set owner {!r} and group {!r}: {}, ignoring...'
+ print(msg.format(path, mode.owner, mode.group, e.strerror))
+ except LookupError:
+ msg = '{!r}: Non-existent owner {!r} or group {!r}: ignoring...'
+ print(msg.format(path, mode.owner, mode.group))
+ except OSError as e:
+ if e.errno == errno.EINVAL:
+ msg = '{!r}: Non-existent numeric owner {!r} or group {!r}: ignoring...'
+ print(msg.format(path, mode.owner, mode.group))
+ else:
+ raise
+ # Must set permissions *after* setting owner/group otherwise the
+ # setuid/setgid bits will get wiped by chmod
+ # NOTE: On Windows you can set read/write perms; the rest are ignored
+ if mode.perms_s is not None:
+ try:
+ os.chmod(path, mode.perms)
+ except PermissionError as e:
+ msg = '{!r}: Unable to set permissions {!r}: {}, ignoring...'
+ print(msg.format(path, mode.perms_s, e.strerror))
+
+def restore_selinux_contexts():
+ '''
+ Restores the SELinux context for files in @selinux_updates
+
+ If $DESTDIR is set, do not warn if the call fails.
+ '''
+ try:
+ subprocess.check_call(['selinuxenabled'])
+ except (FileNotFoundError, PermissionError, subprocess.CalledProcessError) as e:
+ # If we don't have selinux or selinuxenabled returned 1, failure
+ # is ignored quietly.
+ return
+
+ with subprocess.Popen(['restorecon', '-F', '-f-', '-0'],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
+ out, err = proc.communicate(input=b'\0'.join(os.fsencode(f) for f in selinux_updates) + b'\0')
+ if proc.returncode != 0 and not os.environ.get('DESTDIR'):
+ print('Failed to restore SELinux context of installed files...',
+ 'Standard output:', out.decode(),
+ 'Standard error:', err.decode(), sep='\n')
+
+def append_to_log(line):
+ install_log_file.write(line)
+ if not line.endswith('\n'):
+ install_log_file.write('\n')
+ install_log_file.flush()
+
+def do_copyfile(from_file, to_file):
+ if not os.path.isfile(from_file):
+ raise RuntimeError('Tried to install something that isn\'t a file:'
+ '{!r}'.format(from_file))
+ # copyfile fails if the target file already exists, so remove it to
+ # allow overwriting a previous install. If the target is not a file, we
+ # want to give a readable error.
+ if os.path.exists(to_file):
+ if not os.path.isfile(to_file):
+ raise RuntimeError('Destination {!r} already exists and is not '
+ 'a file'.format(to_file))
+ os.unlink(to_file)
+ shutil.copyfile(from_file, to_file)
+ shutil.copystat(from_file, to_file)
+ selinux_updates.append(to_file)
+ append_to_log(to_file)
+
+def do_copydir(data, src_prefix, src_dir, dst_dir, exclude):
+ '''
+ Copies the directory @src_prefix (full path) into @dst_dir
+
+ @src_dir is simply the parent directory of @src_prefix
+ '''
+ if exclude is not None:
+ exclude_files, exclude_dirs = exclude
+ else:
+ exclude_files = exclude_dirs = set()
+ for root, dirs, files in os.walk(src_prefix):
+ for d in dirs[:]:
+ abs_src = os.path.join(src_dir, root, d)
+ filepart = abs_src[len(src_dir) + 1:]
+ abs_dst = os.path.join(dst_dir, filepart)
+ # Remove these so they aren't visited by os.walk at all.
+ if filepart in exclude_dirs:
+ dirs.remove(d)
+ continue
+ if os.path.isdir(abs_dst):
+ continue
+ if os.path.exists(abs_dst):
+ print('Tried to copy directory %s but a file of that name already exists.' % abs_dst)
+ sys.exit(1)
+ data.dirmaker.makedirs(abs_dst)
+ shutil.copystat(abs_src, abs_dst)
+ for f in files:
+ abs_src = os.path.join(src_dir, root, f)
+ filepart = abs_src[len(src_dir) + 1:]
+ if filepart in exclude_files:
+ continue
+ abs_dst = os.path.join(dst_dir, filepart)
+ if os.path.isdir(abs_dst):
+ print('Tried to copy file %s but a directory of that name already exists.' % abs_dst)
+ if os.path.exists(abs_dst):
+ os.unlink(abs_dst)
+ parent_dir = os.path.split(abs_dst)[0]
+ if not os.path.isdir(parent_dir):
+ os.mkdir(parent_dir)
+ shutil.copystat(os.path.split(abs_src)[0], parent_dir)
+ shutil.copy2(abs_src, abs_dst, follow_symlinks=False)
+ append_to_log(abs_dst)
+
+def get_destdir_path(d, path):
+ if os.path.isabs(path):
+ output = destdir_join(d.destdir, path)
+ else:
+ output = os.path.join(d.fullprefix, path)
+ return output
+
+def do_install(datafilename):
+ with open(datafilename, 'rb') as ifile:
+ d = pickle.load(ifile)
+ d.destdir = os.environ.get('DESTDIR', '')
+ d.fullprefix = destdir_join(d.destdir, d.prefix)
+
+ d.dirmaker = DirMaker()
+ with d.dirmaker:
+ install_subdirs(d) # Must be first, because it needs to delete the old subtree.
+ install_targets(d)
+ install_headers(d)
+ install_man(d)
+ install_data(d)
+ restore_selinux_contexts()
+ run_install_script(d)
+
+def install_subdirs(d):
+ for (src_dir, inst_dir, dst_dir, mode, exclude) in d.install_subdirs:
+ if src_dir.endswith('/') or src_dir.endswith('\\'):
+ src_dir = src_dir[:-1]
+ src_prefix = os.path.join(src_dir, inst_dir)
+ print('Installing subdir %s to %s' % (src_prefix, dst_dir))
+ dst_dir = get_destdir_path(d, dst_dir)
+ d.dirmaker.makedirs(dst_dir, exist_ok=True)
+ do_copydir(d, src_prefix, src_dir, dst_dir, exclude)
+ dst_prefix = os.path.join(dst_dir, inst_dir)
+ set_mode(dst_prefix, mode)
+
+def install_data(d):
+ for i in d.data:
+ fullfilename = i[0]
+ outfilename = get_destdir_path(d, i[1])
+ mode = i[2]
+ outdir = os.path.split(outfilename)[0]
+ d.dirmaker.makedirs(outdir, exist_ok=True)
+ print('Installing %s to %s' % (fullfilename, outdir))
+ do_copyfile(fullfilename, outfilename)
+ set_mode(outfilename, mode)
+
+def install_man(d):
+ for m in d.man:
+ full_source_filename = m[0]
+ outfilename = get_destdir_path(d, m[1])
+ outdir = os.path.split(outfilename)[0]
+ d.dirmaker.makedirs(outdir, exist_ok=True)
+ print('Installing %s to %s' % (full_source_filename, outdir))
+ if outfilename.endswith('.gz') and not full_source_filename.endswith('.gz'):
+ with open(outfilename, 'wb') as of:
+ with open(full_source_filename, 'rb') as sf:
+ # Set mtime and filename for reproducibility.
+ with gzip.GzipFile(fileobj=of, mode='wb', filename='', mtime=0) as gz:
+ gz.write(sf.read())
+ shutil.copystat(full_source_filename, outfilename)
+ append_to_log(outfilename)
+ else:
+ do_copyfile(full_source_filename, outfilename)
+
+def install_headers(d):
+ for t in d.headers:
+ fullfilename = t[0]
+ fname = os.path.split(fullfilename)[1]
+ outdir = get_destdir_path(d, t[1])
+ outfilename = os.path.join(outdir, fname)
+ print('Installing %s to %s' % (fname, outdir))
+ d.dirmaker.makedirs(outdir, exist_ok=True)
+ do_copyfile(fullfilename, outfilename)
+
+def run_install_script(d):
+ env = {'MESON_SOURCE_ROOT': d.source_dir,
+ 'MESON_BUILD_ROOT': d.build_dir,
+ 'MESON_INSTALL_PREFIX': d.prefix,
+ 'MESON_INSTALL_DESTDIR_PREFIX': d.fullprefix,
+ 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in d.mesonintrospect]),
+ }
+
+ child_env = os.environ.copy()
+ child_env.update(env)
+
+ for i in d.install_scripts:
+ script = i['exe']
+ args = i['args']
+ name = ' '.join(script + args)
+ print('Running custom install script {!r}'.format(name))
+ try:
+ rc = subprocess.call(script + args, env=child_env)
+ if rc != 0:
+ sys.exit(rc)
+ except:
+ print('Failed to run install script {!r}'.format(name))
+ sys.exit(1)
+
+def is_elf_platform():
+ platname = platform.system().lower()
+ if platname == 'darwin' or platname == 'windows' or platname == 'cygwin':
+ return False
+ return True
+
+def check_for_stampfile(fname):
+ '''Some languages e.g. Rust have output files
+ whose names are not known at configure time.
+ Check if this is the case and return the real
+ file instead.'''
+ if fname.endswith('.so') or fname.endswith('.dll'):
+ if os.stat(fname).st_size == 0:
+ (base, suffix) = os.path.splitext(fname)
+ files = glob(base + '-*' + suffix)
+ if len(files) > 1:
+ print("Stale dynamic library files in build dir. Can't install.")
+ sys.exit(1)
+ if len(files) == 1:
+ return files[0]
+ elif fname.endswith('.a') or fname.endswith('.lib'):
+ if os.stat(fname).st_size == 0:
+ (base, suffix) = os.path.splitext(fname)
+ files = glob(base + '-*' + '.rlib')
+ if len(files) > 1:
+ print("Stale static library files in build dir. Can't install.")
+ sys.exit(1)
+ if len(files) == 1:
+ return files[0]
+ return fname
+
+def install_targets(d):
+ for t in d.targets:
+ fname = check_for_stampfile(t[0])
+ outdir = get_destdir_path(d, t[1])
+ outname = os.path.join(outdir, os.path.split(fname)[-1])
+ aliases = t[2]
+ should_strip = t[3]
+ install_rpath = t[4]
+ print('Installing %s to %s' % (fname, outname))
+ d.dirmaker.makedirs(outdir, exist_ok=True)
+ if not os.path.exists(fname):
+ raise RuntimeError('File {!r} could not be found'.format(fname))
+ elif os.path.isfile(fname):
+ do_copyfile(fname, outname)
+ if should_strip and d.strip_bin is not None:
+ if fname.endswith('.jar'):
+ print('Not stripping jar target:', os.path.split(fname)[1])
+ continue
+ print('Stripping target {!r}'.format(fname))
+ ps, stdo, stde = Popen_safe(d.strip_bin + [outname])
+ if ps.returncode != 0:
+ print('Could not strip file.\n')
+ print('Stdout:\n%s\n' % stdo)
+ print('Stderr:\n%s\n' % stde)
+ sys.exit(1)
+ pdb_filename = os.path.splitext(fname)[0] + '.pdb'
+ if not should_strip and os.path.exists(pdb_filename):
+ pdb_outname = os.path.splitext(outname)[0] + '.pdb'
+ print('Installing pdb file %s to %s' % (pdb_filename, pdb_outname))
+ do_copyfile(pdb_filename, pdb_outname)
+ elif os.path.isdir(fname):
+ fname = os.path.join(d.build_dir, fname.rstrip('/'))
+ do_copydir(d, fname, os.path.dirname(fname), outdir, None)
+ else:
+ raise RuntimeError('Unknown file type for {!r}'.format(fname))
+ printed_symlink_error = False
+ for alias, to in aliases.items():
+ try:
+ symlinkfilename = os.path.join(outdir, alias)
+ try:
+ os.unlink(symlinkfilename)
+ except FileNotFoundError:
+ pass
+ os.symlink(to, symlinkfilename)
+ append_to_log(symlinkfilename)
+ except (NotImplementedError, OSError):
+ if not printed_symlink_error:
+ print("Symlink creation does not work on this platform. "
+ "Skipping all symlinking.")
+ printed_symlink_error = True
+ if is_elf_platform() and os.path.isfile(outname):
+ try:
+ e = depfixer.Elf(outname, False)
+ e.fix_rpath(install_rpath)
+ except SystemExit as e:
+ if isinstance(e.code, int) and e.code == 0:
+ pass
+ else:
+ raise
+
+def run(args):
+ global install_log_file
+ if len(args) != 1:
+ print('Installer script for Meson. Do not run on your own, mmm\'kay?')
+ print('meson_install.py [install info file]')
+ datafilename = args[0]
+ private_dir = os.path.split(datafilename)[0]
+ log_dir = os.path.join(private_dir, '../meson-logs')
+ with open(os.path.join(log_dir, 'install-log.txt'), 'w') as lf:
+ install_log_file = lf
+ append_to_log('# List of files installed by Meson')
+ append_to_log('# Does not contain files installed by custom scripts.')
+ do_install(datafilename)
+ install_log_file = None
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import subprocess
+import os
+
+parser = argparse.ArgumentParser()
+parser.add_argument('input')
+parser.add_argument('output')
+parser.add_argument('type')
+parser.add_argument('podir')
+parser.add_argument('--datadirs', default='')
+
+
+def run(args):
+ options = parser.parse_args(args)
+ env = None
+ if options.datadirs:
+ env = os.environ.copy()
+ env.update({'GETTEXTDATADIRS': options.datadirs})
+ return subprocess.call(['msgfmt', '--' + options.type, '-d', options.podir,
+ '--template', options.input, '-o', options.output],
+ env=env)
--- /dev/null
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os
+import pickle, subprocess
+from mesonbuild.mesonlib import meson_command
+
+# This could also be used for XCode.
+
+def need_regen(regeninfo, regen_timestamp):
+ for i in regeninfo.depfiles:
+ curfile = os.path.join(regeninfo.build_dir, i)
+ curtime = os.stat(curfile).st_mtime
+ if curtime > regen_timestamp:
+ return True
+ # The timestamp file gets automatically deleted by MSBuild during a 'Clean' build.
+ # We must make sure to recreate it, even if we do not regenerate the solution.
+ # Otherwise, Visual Studio will always consider the REGEN project out of date.
+ print("Everything is up-to-date, regeneration of build files is not needed.")
+ from ..backend.vs2010backend import Vs2010Backend
+ Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)
+ return False
+
+def regen(regeninfo, mesonscript, backend):
+ cmd = meson_command + ['--internal',
+ 'regenerate',
+ regeninfo.build_dir,
+ regeninfo.source_dir,
+ '--backend=' + backend]
+ subprocess.check_call(cmd)
+
+def run(args):
+ private_dir = args[0]
+ dumpfile = os.path.join(private_dir, 'regeninfo.dump')
+ coredata = os.path.join(private_dir, 'coredata.dat')
+ with open(dumpfile, 'rb') as f:
+ regeninfo = pickle.load(f)
+ with open(coredata, 'rb') as f:
+ coredata = pickle.load(f)
+ mesonscript = coredata.meson_script_launcher
+ backend = coredata.get_builtin_option('backend')
+ regen_timestamp = os.stat(dumpfile).st_mtime
+ if need_regen(regeninfo, regen_timestamp):
+ regen(regeninfo, mesonscript, backend)
+ sys.exit(0)
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
--- /dev/null
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import shutil
+import tempfile
+from ..environment import detect_ninja
+
+def scanbuild(exename, srcdir, blddir, privdir, logdir, args):
+ with tempfile.TemporaryDirectory(dir=privdir) as scandir:
+ meson_cmd = [exename] + args
+ build_cmd = [exename, '-o', logdir, detect_ninja(), '-C', scandir]
+ rc = subprocess.call(meson_cmd + [srcdir, scandir])
+ if rc != 0:
+ return rc
+ return subprocess.call(build_cmd)
+
+def run(args):
+ srcdir = args[0]
+ blddir = args[1]
+ meson_cmd = args[2:]
+ privdir = os.path.join(blddir, 'meson-private')
+ logdir = os.path.join(blddir, 'meson-logs/scanbuild')
+ shutil.rmtree(logdir, ignore_errors=True)
+ exename = os.environ.get('SCANBUILD', 'scan-build')
+ if not shutil.which(exename):
+ print('Scan-build not installed.')
+ return 1
+ return scanbuild(exename, srcdir, blddir, privdir, logdir, meson_cmd)
--- /dev/null
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script extracts the symbols of a given shared library
+# into a file. If the symbols have not changed, the file is not
+# touched. This information is used to skip link steps if the
+# ABI has not changed.
+
+# This file is basically a reimplementation of
+# http://cgit.freedesktop.org/libreoffice/core/commit/?id=3213cd54b76bc80a6f0516aac75a48ff3b2ad67c
+
+import os, sys
+from .. import mesonlib
+from ..mesonlib import Popen_safe
+import argparse
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--cross-host', default=None, dest='cross_host',
+ help='cross compilation host platform')
+parser.add_argument('args', nargs='+')
+
+def dummy_syms(outfilename):
+ """Just touch it so relinking happens always."""
+ with open(outfilename, 'w'):
+ pass
+
+def write_if_changed(text, outfilename):
+ try:
+ with open(outfilename, 'r') as f:
+ oldtext = f.read()
+ if text == oldtext:
+ return
+ except FileNotFoundError:
+ pass
+ with open(outfilename, 'w') as f:
+ f.write(text)
+
+def linux_syms(libfilename, outfilename):
+ evar = 'READELF'
+ if evar in os.environ:
+ readelfbin = os.environ[evar].strip()
+ else:
+ readelfbin = 'readelf'
+ evar = 'NM'
+ if evar in os.environ:
+ nmbin = os.environ[evar].strip()
+ else:
+ nmbin = 'nm'
+ pe, output = Popen_safe([readelfbin, '-d', libfilename])[0:2]
+ if pe.returncode != 0:
+ raise RuntimeError('Readelf does not work')
+ result = [x for x in output.split('\n') if 'SONAME' in x]
+ assert(len(result) <= 1)
+ pnm, output = Popen_safe([nmbin, '--dynamic', '--extern-only',
+ '--defined-only', '--format=posix',
+ libfilename])[0:2]
+ if pnm.returncode != 0:
+ raise RuntimeError('nm does not work.')
+ result += [' '.join(x.split()[0:2]) for x in output.split('\n') if len(x) > 0]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def osx_syms(libfilename, outfilename):
+ pe, output = Popen_safe(['otool', '-l', libfilename])[0:2]
+ if pe.returncode != 0:
+ raise RuntimeError('Otool does not work.')
+ arr = output.split('\n')
+ for (i, val) in enumerate(arr):
+ if 'LC_ID_DYLIB' in val:
+ match = i
+ break
+ result = [arr[match + 2], arr[match + 5]] # Libreoffice stores all 5 lines but the others seem irrelevant.
+ pnm, output = Popen_safe(['nm', '-g', '-P', libfilename])[0:2]
+ if pnm.returncode != 0:
+ raise RuntimeError('nm does not work.')
+ result += [' '.join(x.split()[0:2]) for x in output.split('\n') if len(x) > 0 and not x.endswith('U')]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def gen_symbols(libfilename, outfilename, cross_host):
+ if cross_host is not None:
+ # In case of cross builds just always relink.
+ # In theory we could determine the correct
+ # toolset but there are more important things
+ # to do.
+ dummy_syms(outfilename)
+ elif mesonlib.is_linux():
+ linux_syms(libfilename, outfilename)
+ elif mesonlib.is_osx():
+ osx_syms(libfilename, outfilename)
+ else:
+ dummy_syms(outfilename)
+
+def run(args):
+ options = parser.parse_args(args)
+ if len(options.args) != 2:
+ print('symbolextractor.py <shared library file> <output file>')
+ sys.exit(1)
+ libfile = options.args[0]
+ outfile = options.args[1]
+ gen_symbols(libfile, outfile, options.cross_host)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+logfile = 'meson-logs/install-log.txt'
+
+def do_uninstall(log):
+ failures = 0
+ successes = 0
+ for line in open(log):
+ if line.startswith('#'):
+ continue
+ fname = line.strip()
+ try:
+ if os.path.isdir(fname) and not os.path.islink(fname):
+ os.rmdir(fname)
+ else:
+ os.unlink(fname)
+ print('Deleted:', fname)
+ successes += 1
+ except Exception as e:
+ print('Could not delete %s: %s.' % (fname, e))
+ failures += 1
+ print('\nUninstall finished.\n')
+ print('Deleted:', successes)
+ print('Failed:', failures)
+ print('\nRemember that files created by custom scripts have not been removed.')
+
+def run(args):
+ if args:
+ print('Weird error.')
+ return 1
+ if not os.path.exists(logfile):
+ print('Log file does not exist, no installation has been done.')
+ return 0
+ do_uninstall(logfile)
+ return 0
--- /dev/null
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os, subprocess, re
+
+def config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, cmd):
+ try:
+ output = subprocess.check_output(cmd, cwd=source_dir)
+ new_string = re.search(regex_selector, output.decode()).group(1).strip()
+ except Exception:
+ new_string = fallback
+
+ with open(infile) as f:
+ new_data = f.read().replace(replace_string, new_string)
+ if os.path.exists(outfile):
+ with open(outfile) as f:
+ needs_update = (f.read() != new_data)
+ else:
+ needs_update = True
+ if needs_update:
+ with open(outfile, 'w') as f:
+ f.write(new_data)
+
+def run(args):
+ infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
+ command = args[6:]
+ config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import shutil
+import argparse
+from .. import mlog
+from . import destdir_join
+from .gettext import read_linguas
+
+parser = argparse.ArgumentParser()
+parser.add_argument('command')
+parser.add_argument('--id', dest='project_id')
+parser.add_argument('--subdir', dest='subdir')
+parser.add_argument('--installdir', dest='install_dir')
+parser.add_argument('--sources', dest='sources')
+parser.add_argument('--media', dest='media', default='')
+parser.add_argument('--langs', dest='langs', default='')
+parser.add_argument('--symlinks', type=bool, dest='symlinks', default=False)
+
+def build_pot(srcdir, project_id, sources):
+ # Must be relative paths
+ sources = [os.path.join('C', source) for source in sources]
+ outfile = os.path.join(srcdir, project_id + '.pot')
+ subprocess.call(['itstool', '-o', outfile] + sources)
+
+def update_po(srcdir, project_id, langs):
+ potfile = os.path.join(srcdir, project_id + '.pot')
+ for lang in langs:
+ pofile = os.path.join(srcdir, lang, lang + '.po')
+ subprocess.call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
+
+def build_translations(srcdir, blddir, langs):
+ for lang in langs:
+ outdir = os.path.join(blddir, lang)
+ os.makedirs(outdir, exist_ok=True)
+ subprocess.call([
+ 'msgfmt', os.path.join(srcdir, lang, lang + '.po'),
+ '-o', os.path.join(outdir, lang + '.gmo')
+ ])
+
+def merge_translations(blddir, sources, langs):
+ for lang in langs:
+ subprocess.call([
+ 'itstool', '-m', os.path.join(blddir, lang, lang + '.gmo'),
+ '-o', os.path.join(blddir, lang)
+ ] + sources)
+
+def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, project_id, symlinks):
+ c_install_dir = os.path.join(install_dir, 'C', project_id)
+ for lang in langs + ['C']:
+ indir = destdir_join(destdir, os.path.join(install_dir, lang, project_id))
+ os.makedirs(indir, exist_ok=True)
+ for source in sources:
+ infile = os.path.join(srcdir if lang == 'C' else blddir, lang, source)
+ outfile = os.path.join(indir, source)
+ mlog.log('Installing %s to %s' % (infile, outfile))
+ shutil.copyfile(infile, outfile)
+ shutil.copystat(infile, outfile)
+ for m in media:
+ infile = os.path.join(srcdir, lang, m)
+ outfile = os.path.join(indir, m)
+ if not os.path.exists(infile):
+ if lang == 'C':
+ mlog.warning('Media file "%s" did not exist in C directory' % m)
+ continue
+ elif symlinks:
+ srcfile = os.path.join(c_install_dir, m)
+ mlog.log('Symlinking %s to %s.' % (outfile, srcfile))
+ if '/' in m or '\\' in m:
+ os.makedirs(os.path.dirname(outfile), exist_ok=True)
+ try:
+ try:
+ os.symlink(srcfile, outfile)
+ except FileExistsError:
+ os.remove(outfile)
+ os.symlink(srcfile, outfile)
+ continue
+ except (NotImplementedError, OSError):
+ mlog.warning('Symlinking not supported, falling back to copying')
+ else:
+ # Lang doesn't have media file so copy it over 'C' one
+ infile = os.path.join(srcdir, 'C', m)
+ mlog.log('Installing %s to %s' % (infile, outfile))
+ if '/' in m or '\\' in m:
+ os.makedirs(os.path.dirname(outfile), exist_ok=True)
+ shutil.copyfile(infile, outfile)
+ shutil.copystat(infile, outfile)
+
+def run(args):
+ options = parser.parse_args(args)
+ langs = options.langs.split('@@') if options.langs else []
+ media = options.media.split('@@') if options.media else []
+ sources = options.sources.split('@@')
+ destdir = os.environ.get('DESTDIR', '')
+ src_subdir = os.path.join(os.environ['MESON_SOURCE_ROOT'], options.subdir)
+ build_subdir = os.path.join(os.environ['MESON_BUILD_ROOT'], options.subdir)
+ abs_sources = [os.path.join(src_subdir, 'C', source) for source in sources]
+
+ if not langs:
+ langs = read_linguas(src_subdir)
+
+ if options.command == 'pot':
+ build_pot(src_subdir, options.project_id, sources)
+ elif options.command == 'update-po':
+ build_pot(src_subdir, options.project_id, sources)
+ update_po(src_subdir, options.project_id, langs)
+ elif options.command == 'build':
+ if langs:
+ build_translations(src_subdir, build_subdir, langs)
+ elif options.command == 'install':
+ install_dir = os.path.join(os.environ['MESON_INSTALL_PREFIX'], options.install_dir)
+ if langs:
+ build_translations(src_subdir, build_subdir, langs)
+ merge_translations(build_subdir, abs_sources, langs)
+ install_help(src_subdir, build_subdir, sources, media, langs, install_dir,
+ destdir, options.project_id, options.symlinks)
--- /dev/null
+from enum import Enum
+
+# Used for the --wrap-mode command-line argument
+#
+# Special wrap modes:
+# nofallback: Don't download wraps for dependency() fallbacks
+# nodownload: Don't download wraps for all subproject() calls
+#
+# subprojects are used for two purposes:
+# 1. To download and build dependencies by using .wrap
+# files if they are not provided by the system. This is
+# usually expressed via dependency(..., fallback: ...).
+# 2. To download and build 'copylibs' which are meant to be
+# used by copying into your project. This is always done
+# with an explicit subproject() call.
+#
+# --wrap-mode=nofallback will never do (1)
+# --wrap-mode=nodownload will do neither (1) nor (2)
+#
+# If you are building from a release tarball, you should be
+# able to safely use 'nodownload' since upstream is
+# expected to ship all required sources with the tarball.
+#
+# If you are building from a git repository, you will want
+# to use 'nofallback' so that any 'copylib' wraps will be
+# download as subprojects.
+#
+# Note that these options do not affect subprojects that
+# are git submodules since those are only usable in git
+# repositories, and you almost always want to download them.
+WrapMode = Enum('WrapMode', 'default nofallback nodownload')
--- /dev/null
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mlog
+import contextlib
+import urllib.request, os, hashlib, shutil, tempfile, stat
+import subprocess
+import sys
+from pathlib import Path
+from . import WrapMode
+from ..mesonlib import Popen_safe
+
+try:
+ import ssl
+ has_ssl = True
+ API_ROOT = 'https://wrapdb.mesonbuild.com/v1/'
+except ImportError:
+ has_ssl = False
+ API_ROOT = 'http://wrapdb.mesonbuild.com/v1/'
+
+ssl_warning_printed = False
+
+def build_ssl_context():
+ ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ ctx.options |= ssl.OP_NO_SSLv2
+ ctx.options |= ssl.OP_NO_SSLv3
+ ctx.verify_mode = ssl.CERT_REQUIRED
+ ctx.load_default_certs()
+ return ctx
+
+def quiet_git(cmd, workingdir):
+ pc = subprocess.Popen(['git', '-C', workingdir] + cmd,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = pc.communicate()
+ if pc.returncode != 0:
+ return False, err
+ return True, out
+
+def open_wrapdburl(urlstring):
+ global ssl_warning_printed
+ if has_ssl:
+ try:
+ return urllib.request.urlopen(urlstring)# , context=build_ssl_context())
+ except urllib.error.URLError:
+ if not ssl_warning_printed:
+ print('SSL connection failed. Falling back to unencrypted connections.')
+ ssl_warning_printed = True
+ if not ssl_warning_printed:
+ print('Warning: SSL not available, traffic not authenticated.',
+ file=sys.stderr)
+ ssl_warning_printed = True
+ # Trying to open SSL connection to wrapdb fails because the
+ # certificate is not known.
+ if urlstring.startswith('https'):
+ urlstring = 'http' + urlstring[5:]
+ return urllib.request.urlopen(urlstring)
+
+
+class PackageDefinition:
+ def __init__(self, fname):
+ self.values = {}
+ with open(fname) as ifile:
+ first = ifile.readline().strip()
+
+ if first == '[wrap-file]':
+ self.type = 'file'
+ elif first == '[wrap-git]':
+ self.type = 'git'
+ elif first == '[wrap-hg]':
+ self.type = 'hg'
+ elif first == '[wrap-svn]':
+ self.type = 'svn'
+ else:
+ raise RuntimeError('Invalid format of package file')
+ for line in ifile:
+ line = line.strip()
+ if line == '':
+ continue
+ (k, v) = line.split('=', 1)
+ k = k.strip()
+ v = v.strip()
+ self.values[k] = v
+
+ def get(self, key):
+ return self.values[key]
+
+ def has_patch(self):
+ return 'patch_url' in self.values
+
+class Resolver:
+ def __init__(self, subdir_root, wrap_mode=WrapMode(1)):
+ self.wrap_mode = wrap_mode
+ self.subdir_root = subdir_root
+ self.cachedir = os.path.join(self.subdir_root, 'packagecache')
+
+ def resolve(self, packagename):
+ # Check if the directory is already resolved
+ dirname = Path(os.path.join(self.subdir_root, packagename))
+ subprojdir = os.path.join(*dirname.parts[-2:])
+ if dirname.is_dir():
+ if (dirname / 'meson.build').is_file():
+ # The directory is there and has meson.build? Great, use it.
+ return packagename
+ # Is the dir not empty and also not a git submodule dir that is
+ # not checkout properly? Can't do anything, exception!
+ elif next(dirname.iterdir(), None) and not (dirname / '.git').is_file():
+ m = '{!r} is not empty and has no meson.build files'
+ raise RuntimeError(m.format(subprojdir))
+ elif dirname.exists():
+ m = '{!r} already exists and is not a dir; cannot use as subproject'
+ raise RuntimeError(m.format(subprojdir))
+
+ dirname = str(dirname)
+ # Check if the subproject is a git submodule
+ if self.resolve_git_submodule(dirname):
+ return packagename
+
+ # Don't download subproject data based on wrap file if requested.
+ # Git submodules are ok (see above)!
+ if self.wrap_mode is WrapMode.nodownload:
+ m = 'Automatic wrap-based subproject downloading is disabled'
+ raise RuntimeError(m)
+
+ # Check if there's a .wrap file for this subproject
+ fname = os.path.join(self.subdir_root, packagename + '.wrap')
+ if not os.path.isfile(fname):
+ # No wrap file with this name? Give up.
+ m = 'No {}.wrap found for {!r}'
+ raise RuntimeError(m.format(packagename, subprojdir))
+ p = PackageDefinition(fname)
+ if p.type == 'file':
+ if not os.path.isdir(self.cachedir):
+ os.mkdir(self.cachedir)
+ self.download(p, packagename)
+ self.extract_package(p)
+ elif p.type == 'git':
+ self.get_git(p)
+ elif p.type == "hg":
+ self.get_hg(p)
+ elif p.type == "svn":
+ self.get_svn(p)
+ else:
+ raise AssertionError('Unreachable code.')
+ return p.get('directory')
+
+ def resolve_git_submodule(self, dirname):
+ # Are we in a git repository?
+ ret, out = quiet_git(['rev-parse'], self.subdir_root)
+ if not ret:
+ return False
+ # Is `dirname` a submodule?
+ ret, out = quiet_git(['submodule', 'status', dirname], self.subdir_root)
+ if not ret:
+ return False
+ # Submodule has not been added, add it
+ if out.startswith(b'-'):
+ if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', dirname]) != 0:
+ return False
+ # Submodule was added already, but it wasn't populated. Do a checkout.
+ elif out.startswith(b' '):
+ if subprocess.call(['git', 'checkout', '.'], cwd=dirname):
+ return True
+ else:
+ m = 'Unknown git submodule output: {!r}'
+ raise AssertionError(m.format(out))
+ return True
+
+ def get_git(self, p):
+ checkoutdir = os.path.join(self.subdir_root, p.get('directory'))
+ revno = p.get('revision')
+ is_there = os.path.isdir(checkoutdir)
+ if is_there:
+ try:
+ subprocess.check_call(['git', 'rev-parse'], cwd=checkoutdir)
+ except subprocess.CalledProcessError:
+ raise RuntimeError('%s is not empty but is not a valid '
+ 'git repository, we can not work with it'
+ ' as a subproject directory.' % (
+ checkoutdir))
+
+ if revno.lower() == 'head':
+ # Failure to do pull is not a fatal error,
+ # because otherwise you can't develop without
+ # a working net connection.
+ subprocess.call(['git', 'pull'], cwd=checkoutdir)
+ else:
+ if subprocess.call(['git', 'checkout', revno], cwd=checkoutdir) != 0:
+ subprocess.check_call(['git', 'fetch'], cwd=checkoutdir)
+ subprocess.check_call(['git', 'checkout', revno],
+ cwd=checkoutdir)
+ else:
+ subprocess.check_call(['git', 'clone', p.get('url'),
+ p.get('directory')], cwd=self.subdir_root)
+ if revno.lower() != 'head':
+ subprocess.check_call(['git', 'checkout', revno],
+ cwd=checkoutdir)
+ push_url = p.values.get('push-url')
+ if push_url:
+ subprocess.check_call(['git', 'remote', 'set-url',
+ '--push', 'origin', push_url],
+ cwd=checkoutdir)
+
+ def get_hg(self, p):
+ checkoutdir = os.path.join(self.subdir_root, p.get('directory'))
+ revno = p.get('revision')
+ is_there = os.path.isdir(checkoutdir)
+ if is_there:
+ if revno.lower() == 'tip':
+ # Failure to do pull is not a fatal error,
+ # because otherwise you can't develop without
+ # a working net connection.
+ subprocess.call(['hg', 'pull'], cwd=checkoutdir)
+ else:
+ if subprocess.call(['hg', 'checkout', revno], cwd=checkoutdir) != 0:
+ subprocess.check_call(['hg', 'pull'], cwd=checkoutdir)
+ subprocess.check_call(['hg', 'checkout', revno],
+ cwd=checkoutdir)
+ else:
+ subprocess.check_call(['hg', 'clone', p.get('url'),
+ p.get('directory')], cwd=self.subdir_root)
+ if revno.lower() != 'tip':
+ subprocess.check_call(['hg', 'checkout', revno],
+ cwd=checkoutdir)
+
+ def get_svn(self, p):
+ checkoutdir = os.path.join(self.subdir_root, p.get('directory'))
+ revno = p.get('revision')
+ is_there = os.path.isdir(checkoutdir)
+ if is_there:
+ p, out = Popen_safe(['svn', 'info', '--show-item', 'revision', checkoutdir])
+ current_revno = out
+ if current_revno == revno:
+ return
+
+ if revno.lower() == 'head':
+ # Failure to do pull is not a fatal error,
+ # because otherwise you can't develop without
+ # a working net connection.
+ subprocess.call(['svn', 'update'], cwd=checkoutdir)
+ else:
+ subprocess.check_call(['svn', 'update', '-r', revno], cwd=checkoutdir)
+ else:
+ subprocess.check_call(['svn', 'checkout', '-r', revno, p.get('url'),
+ p.get('directory')], cwd=self.subdir_root)
+
+ def get_data(self, url):
+ blocksize = 10 * 1024
+ h = hashlib.sha256()
+ tmpfile = tempfile.NamedTemporaryFile(mode='wb', dir=self.cachedir, delete=False)
+ if url.startswith('https://wrapdb.mesonbuild.com'):
+ resp = open_wrapdburl(url)
+ else:
+ resp = urllib.request.urlopen(url)
+ with contextlib.closing(resp) as resp:
+ try:
+ dlsize = int(resp.info()['Content-Length'])
+ except TypeError:
+ dlsize = None
+ if dlsize is None:
+ print('Downloading file of unknown size.')
+ while True:
+ block = resp.read(blocksize)
+ if block == b'':
+ break
+ h.update(block)
+ tmpfile.write(block)
+ hashvalue = h.hexdigest()
+ return hashvalue, tmpfile.name
+ print('Download size:', dlsize)
+ print('Downloading: ', end='')
+ sys.stdout.flush()
+ printed_dots = 0
+ downloaded = 0
+ while True:
+ block = resp.read(blocksize)
+ if block == b'':
+ break
+ downloaded += len(block)
+ h.update(block)
+ tmpfile.write(block)
+ ratio = int(downloaded / dlsize * 10)
+ while printed_dots < ratio:
+ print('.', end='')
+ sys.stdout.flush()
+ printed_dots += 1
+ print('')
+ hashvalue = h.hexdigest()
+ return hashvalue, tmpfile.name
+
+ def get_hash(self, data):
+ h = hashlib.sha256()
+ h.update(data)
+ hashvalue = h.hexdigest()
+ return hashvalue
+
+ def download(self, p, packagename):
+ ofname = os.path.join(self.cachedir, p.get('source_filename'))
+ if os.path.exists(ofname):
+ mlog.log('Using', mlog.bold(packagename), 'from cache.')
+ else:
+ srcurl = p.get('source_url')
+ mlog.log('Downloading', mlog.bold(packagename), 'from', mlog.bold(srcurl))
+ dhash, tmpfile = self.get_data(srcurl)
+ expected = p.get('source_hash')
+ if dhash != expected:
+ os.remove(tmpfile)
+ raise RuntimeError('Incorrect hash for source %s:\n %s expected\n %s actual.' % (packagename, expected, dhash))
+ os.rename(tmpfile, ofname)
+ if p.has_patch():
+ patch_filename = p.get('patch_filename')
+ filename = os.path.join(self.cachedir, patch_filename)
+ if os.path.exists(filename):
+ mlog.log('Using', mlog.bold(patch_filename), 'from cache.')
+ else:
+ purl = p.get('patch_url')
+ mlog.log('Downloading patch from', mlog.bold(purl))
+ phash, tmpfile = self.get_data(purl)
+ expected = p.get('patch_hash')
+ if phash != expected:
+ os.remove(tmpfile)
+ raise RuntimeError('Incorrect hash for patch %s:\n %s expected\n %s actual' % (packagename, expected, phash))
+ os.rename(tmpfile, filename)
+ else:
+ mlog.log('Package does not require patch.')
+
+ def copy_tree(self, root_src_dir, root_dst_dir):
+ """
+ Copy directory tree. Overwrites also read only files.
+ """
+ for src_dir, dirs, files in os.walk(root_src_dir):
+ dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
+ if not os.path.exists(dst_dir):
+ os.makedirs(dst_dir)
+ for file_ in files:
+ src_file = os.path.join(src_dir, file_)
+ dst_file = os.path.join(dst_dir, file_)
+ if os.path.exists(dst_file):
+ try:
+ os.remove(dst_file)
+ except PermissionError as exc:
+ os.chmod(dst_file, stat.S_IWUSR)
+ os.remove(dst_file)
+ shutil.copy2(src_file, dst_dir)
+
+ def extract_package(self, package):
+ if sys.version_info < (3, 5):
+ try:
+ import lzma # noqa: F401
+ del lzma
+ except ImportError:
+ pass
+ else:
+ try:
+ shutil.register_unpack_format('xztar', ['.tar.xz', '.txz'], shutil._unpack_tarfile, [], "xz'ed tar-file")
+ except shutil.RegistryError:
+ pass
+ target_dir = os.path.join(self.subdir_root, package.get('directory'))
+ if os.path.isdir(target_dir):
+ return
+ extract_dir = self.subdir_root
+ # Some upstreams ship packages that do not have a leading directory.
+ # Create one for them.
+ try:
+ package.get('lead_directory_missing')
+ os.mkdir(target_dir)
+ extract_dir = target_dir
+ except KeyError:
+ pass
+ shutil.unpack_archive(os.path.join(self.cachedir, package.get('source_filename')), extract_dir)
+ if package.has_patch():
+ try:
+ shutil.unpack_archive(os.path.join(self.cachedir, package.get('patch_filename')), self.subdir_root)
+ except Exception:
+ with tempfile.TemporaryDirectory() as workdir:
+ shutil.unpack_archive(os.path.join(self.cachedir, package.get('patch_filename')), workdir)
+ self.copy_tree(workdir, self.subdir_root)
--- /dev/null
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import sys, os
+import configparser
+import shutil
+
+from glob import glob
+
+from .wrap import API_ROOT, open_wrapdburl
+
+help_templ = '''This program allows you to manage your Wrap dependencies
+using the online wrap database http://wrapdb.mesonbuild.com.
+
+Run this command in your top level source directory.
+
+Usage:
+
+%s <command> [options]
+
+Commands:
+
+ list - show all available projects
+ search - search the db by name
+ install - install the specified project
+ update - update the project to its newest available release
+ info - show available versions of a project
+ status - show installed and available versions of your projects
+
+'''
+
+
+def print_help():
+ print(help_templ % sys.argv[0])
+
+def get_result(urlstring):
+ u = open_wrapdburl(urlstring)
+ data = u.read().decode('utf-8')
+ jd = json.loads(data)
+ if jd['output'] != 'ok':
+ print('Got bad output from server.')
+ print(data)
+ sys.exit(1)
+ return jd
+
+def get_projectlist():
+ jd = get_result(API_ROOT + 'projects')
+ projects = jd['projects']
+ return projects
+
+def list_projects():
+ projects = get_projectlist()
+ for p in projects:
+ print(p)
+
+def search(name):
+ jd = get_result(API_ROOT + 'query/byname/' + name)
+ for p in jd['projects']:
+ print(p)
+
+def get_latest_version(name):
+ jd = get_result(API_ROOT + 'query/get_latest/' + name)
+ branch = jd['branch']
+ revision = jd['revision']
+ return branch, revision
+
+def install(name):
+ if not os.path.isdir('subprojects'):
+ print('Subprojects dir not found. Run this script in your source root directory.')
+ sys.exit(1)
+ if os.path.isdir(os.path.join('subprojects', name)):
+ print('Subproject directory for this project already exists.')
+ sys.exit(1)
+ wrapfile = os.path.join('subprojects', name + '.wrap')
+ if os.path.exists(wrapfile):
+ print('Wrap file already exists.')
+ sys.exit(1)
+ (branch, revision) = get_latest_version(name)
+ u = open_wrapdburl(API_ROOT + 'projects/%s/%s/%s/get_wrap' % (name, branch, revision))
+ data = u.read()
+ with open(wrapfile, 'wb') as f:
+ f.write(data)
+ print('Installed', name, 'branch', branch, 'revision', revision)
+
+def get_current_version(wrapfile):
+ cp = configparser.ConfigParser()
+ cp.read(wrapfile)
+ cp = cp['wrap-file']
+ patch_url = cp['patch_url']
+ arr = patch_url.split('/')
+ branch = arr[-3]
+ revision = int(arr[-2])
+ return branch, revision, cp['directory'], cp['source_filename'], cp['patch_filename']
+
+def update(name):
+ if not os.path.isdir('subprojects'):
+ print('Subprojects dir not found. Run this command in your source root directory.')
+ sys.exit(1)
+ wrapfile = os.path.join('subprojects', name + '.wrap')
+ if not os.path.exists(wrapfile):
+ print('Project', name, 'is not in use.')
+ sys.exit(1)
+ (branch, revision, subdir, src_file, patch_file) = get_current_version(wrapfile)
+ (new_branch, new_revision) = get_latest_version(name)
+ if new_branch == branch and new_revision == revision:
+ print('Project', name, 'is already up to date.')
+ sys.exit(0)
+ u = open_wrapdburl(API_ROOT + 'projects/%s/%s/%d/get_wrap' % (name, new_branch, new_revision))
+ data = u.read()
+ shutil.rmtree(os.path.join('subprojects', subdir), ignore_errors=True)
+ try:
+ os.unlink(os.path.join('subprojects/packagecache', src_file))
+ except FileNotFoundError:
+ pass
+ try:
+ os.unlink(os.path.join('subprojects/packagecache', patch_file))
+ except FileNotFoundError:
+ pass
+ with open(wrapfile, 'wb') as f:
+ f.write(data)
+ print('Updated', name, 'to branch', new_branch, 'revision', new_revision)
+
+def info(name):
+ jd = get_result(API_ROOT + 'projects/' + name)
+ versions = jd['versions']
+ if not versions:
+ print('No available versions of', name)
+ sys.exit(0)
+ print('Available versions of %s:' % name)
+ for v in versions:
+ print(' ', v['branch'], v['revision'])
+
+def status():
+ print('Subproject status')
+ for w in glob('subprojects/*.wrap'):
+ name = os.path.split(w)[1][:-5]
+ try:
+ (latest_branch, latest_revision) = get_latest_version(name)
+ except Exception:
+ print('', name, 'not available in wrapdb.')
+ continue
+ try:
+ (current_branch, current_revision, _, _, _) = get_current_version(w)
+ except Exception:
+ print('Wrap file not from wrapdb.')
+ continue
+ if current_branch == latest_branch and current_revision == latest_revision:
+ print('', name, 'up to date. Branch %s, revision %d.' % (current_branch, current_revision))
+ else:
+ print('', name, 'not up to date. Have %s %d, but %s %d is available.' % (current_branch, current_revision, latest_branch, latest_revision))
+
+def run(args):
+ if not args or args[0] == '-h' or args[0] == '--help':
+ print_help()
+ return 0
+ command = args[0]
+ args = args[1:]
+ if command == 'list':
+ list_projects()
+ elif command == 'search':
+ if len(args) != 1:
+ print('Search requires exactly one argument.')
+ return 1
+ search(args[0])
+ elif command == 'install':
+ if len(args) != 1:
+ print('Install requires exactly one argument.')
+ return 1
+ install(args[0])
+ elif command == 'update':
+ if len(args) != 1:
+ print('update requires exactly one argument.')
+ return 1
+ update(args[0])
+ elif command == 'info':
+ if len(args) != 1:
+ print('info requires exactly one argument.')
+ return 1
+ info(args[0])
+ elif command == 'status':
+ status()
+ else:
+ print('Unknown command', command)
+ return 1
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mesonbuild import mesonmain
+import sys
+
+if __name__ == '__main__':
+ print('Warning: This executable is deprecated. Use "meson configure" instead.',
+ file=sys.stderr)
+ sys.exit(mesonmain.run(['configure'] + sys.argv[1:]))
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mesonbuild import mesonmain
+import sys
+
+if __name__ == '__main__':
+ print('Warning: This executable is deprecated. Use "meson introspect" instead.',
+ file=sys.stderr)
+ sys.exit(mesonmain.run(['introspect'] + sys.argv[1:]))
--- /dev/null
+#!/usr/bin/env python3
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+# This tool is used to manipulate an existing Meson build definition.
+#
+# - add a file to a target
+# - remove files from a target
+# - move targets
+# - reindent?
+
+from mesonbuild import mesonmain
+import sys
+
+if __name__ == '__main__':
+ print('Warning: This executable is deprecated. Use "meson rewrite" instead.',
+ file=sys.stderr)
+ sys.exit(mesonmain.run(['rewrite'] + sys.argv[1:]))
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A tool to run tests in many different ways.
+
+from mesonbuild import mesonmain
+import sys
+
+if __name__ == '__main__':
+ print('Warning: This executable is deprecated. Use "meson test" instead.',
+ file=sys.stderr)
+ sys.exit(mesonmain.run(['test'] + sys.argv[1:]))
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''Runs the basic test suite through a cross compiler.
+Not part of the main test suite because of two reasons:
+
+1) setup of the cross build is platform specific
+2) it can be slow (e.g. when invoking test apps via wine)
+
+Eventually migrate to something fancier.'''
+
+import sys, os
+
+from run_project_tests import gather_tests, run_tests, StopException, setup_commands
+from run_project_tests import failing_logs
+
+def runtests(cross_file):
+ commontests = [('common', gather_tests('test cases/common'), False)]
+ try:
+ (passing_tests, failing_tests, skipped_tests) = run_tests(commontests, 'meson-cross-test-run', ['--cross', cross_file])
+ except StopException:
+ pass
+ print('\nTotal passed cross tests:', passing_tests)
+ print('Total failed cross tests:', failing_tests)
+ print('Total skipped cross tests:', skipped_tests)
+ if failing_tests > 0 and ('TRAVIS' in os.environ or 'APPVEYOR' in os.environ):
+ print('\nMesonlogs of failing tests\n')
+ for l in failing_logs:
+ print(l, '\n')
+ sys.exit(failing_tests)
+
+if __name__ == '__main__':
+ setup_commands('ninja')
+ cross_file = sys.argv[1]
+ runtests(cross_file)
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2012-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from glob import glob
+import itertools
+import os, subprocess, shutil, sys, signal
+from io import StringIO
+from ast import literal_eval
+from enum import Enum
+import tempfile
+from mesonbuild import mtest
+from mesonbuild import environment
+from mesonbuild import mesonlib
+from mesonbuild import mlog
+from mesonbuild.mesonlib import stringlistify, Popen_safe
+from mesonbuild.coredata import backendlist
+import argparse
+import xml.etree.ElementTree as ET
+import time
+import multiprocessing
+import concurrent.futures as conc
+import re
+from run_unittests import get_fake_options, run_configure
+
+from run_tests import get_backend_commands, get_backend_args_for_dir, Backend
+from run_tests import ensure_backend_detects_changes
+
+
+class BuildStep(Enum):
+ configure = 1
+ build = 2
+ test = 3
+ install = 4
+ clean = 5
+ validate = 6
+
+
+class TestResult:
+ def __init__(self, msg, step, stdo, stde, mlog, conftime=0, buildtime=0, testtime=0):
+ self.msg = msg
+ self.step = step
+ self.stdo = stdo
+ self.stde = stde
+ self.mlog = mlog
+ self.conftime = conftime
+ self.buildtime = buildtime
+ self.testtime = testtime
+
+class DummyFuture(conc.Future):
+ '''
+ Dummy Future implementation that executes the provided function when you
+ ask for the result. Used on platforms where sem_open() is not available:
+ MSYS2, OpenBSD, etc: https://bugs.python.org/issue3770
+ '''
+ def set_function(self, fn, *args, **kwargs):
+ self.fn = fn
+ self.fn_args = args
+ self.fn_kwargs = kwargs
+
+ def result(self, **kwargs):
+ try:
+ result = self.fn(*self.fn_args, **self.fn_kwargs)
+ except BaseException as e:
+ self.set_exception(e)
+ else:
+ self.set_result(result)
+ return super().result(**kwargs)
+
+
+class DummyExecutor(conc.Executor):
+ '''
+ Dummy single-thread 'concurrent' executor for use on platforms where
+ sem_open is not available: https://bugs.python.org/issue3770
+ '''
+
+ def __init__(self):
+ from threading import Lock
+ self._shutdown = False
+ self._shutdownLock = Lock()
+
+ def submit(self, fn, *args, **kwargs):
+ with self._shutdownLock:
+ if self._shutdown:
+ raise RuntimeError('Cannot schedule new futures after shutdown')
+ f = DummyFuture()
+ f.set_function(fn, *args, **kwargs)
+ return f
+
+ def shutdown(self, wait=True):
+ with self._shutdownLock:
+ self._shutdown = True
+
+
+class AutoDeletedDir:
+ def __init__(self, d):
+ self.dir = d
+
+ def __enter__(self):
+ os.makedirs(self.dir, exist_ok=True)
+ return self.dir
+
+ def __exit__(self, _type, value, traceback):
+ # We don't use tempfile.TemporaryDirectory, but wrap the
+ # deletion in the AutoDeletedDir class because
+ # it fails on Windows due antivirus programs
+ # holding files open.
+ mesonlib.windows_proof_rmtree(self.dir)
+
+failing_logs = []
+print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ
+do_debug = not {'MESON_PRINT_TEST_OUTPUT', 'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ)
+no_meson_log_msg = 'No meson-log.txt found.'
+
+system_compiler = None
+
+meson_command = os.path.join(os.getcwd(), 'meson')
+if not os.path.exists(meson_command):
+ meson_command += '.py'
+ if not os.path.exists(meson_command):
+ raise RuntimeError('Could not find main Meson script to run.')
+
+class StopException(Exception):
+ def __init__(self):
+ super().__init__('Stopped by user')
+
+stop = False
+def stop_handler(signal, frame):
+ global stop
+ stop = True
+signal.signal(signal.SIGINT, stop_handler)
+signal.signal(signal.SIGTERM, stop_handler)
+
+def setup_commands(optbackend):
+ global do_debug, backend, backend_flags
+ global compile_commands, clean_commands, test_commands, install_commands, uninstall_commands
+ backend = optbackend
+ msbuild_exe = shutil.which('msbuild')
+ # Auto-detect backend if unspecified
+ if backend is None:
+ if msbuild_exe is not None:
+ backend = 'vs' # Meson will auto-detect VS version to use
+ elif mesonlib.is_osx():
+ backend = 'xcode'
+ else:
+ backend = 'ninja'
+ # Set backend arguments for Meson
+ if backend.startswith('vs'):
+ backend_flags = ['--backend=' + backend]
+ backend = Backend.vs
+ elif backend == 'xcode':
+ backend_flags = ['--backend=xcode']
+ backend = Backend.xcode
+ elif backend == 'ninja':
+ backend_flags = ['--backend=ninja']
+ backend = Backend.ninja
+ else:
+ raise RuntimeError('Unknown backend: {!r}'.format(backend))
+ compile_commands, clean_commands, test_commands, install_commands, \
+ uninstall_commands = get_backend_commands(backend, do_debug)
+
+def get_relative_files_list_from_dir(fromdir):
+ paths = []
+ for (root, _, files) in os.walk(fromdir):
+ reldir = os.path.relpath(root, start=fromdir)
+ for f in files:
+ path = os.path.join(reldir, f).replace('\\', '/')
+ if path.startswith('./'):
+ path = path[2:]
+ paths.append(path)
+ return paths
+
+def platform_fix_name(fname, compiler):
+ if '?lib' in fname:
+ if mesonlib.is_cygwin():
+ fname = re.sub(r'\?lib(.*)\.dll$', r'cyg\1.dll', fname)
+ else:
+ fname = re.sub(r'\?lib', 'lib', fname)
+
+ if fname.endswith('?exe'):
+ fname = fname[:-4]
+ if mesonlib.is_windows() or mesonlib.is_cygwin():
+ return fname + '.exe'
+
+ if fname.startswith('?msvc:'):
+ fname = fname[6:]
+ if compiler != 'cl':
+ return None
+
+ if fname.startswith('?gcc:'):
+ fname = fname[5:]
+ if compiler == 'cl':
+ return None
+
+ return fname
+
+def validate_install(srcdir, installdir, compiler):
+ # List of installed files
+ info_file = os.path.join(srcdir, 'installed_files.txt')
+ # If this exists, the test does not install any other files
+ noinst_file = 'usr/no-installed-files'
+ expected = {}
+ ret_msg = ''
+ # Generate list of expected files
+ if os.path.exists(os.path.join(installdir, noinst_file)):
+ expected[noinst_file] = False
+ elif os.path.exists(info_file):
+ with open(info_file) as f:
+ for line in f:
+ line = platform_fix_name(line.strip(), compiler)
+ if line:
+ expected[line] = False
+ # Check if expected files were found
+ for fname in expected:
+ file_path = os.path.join(installdir, fname)
+ if os.path.exists(file_path) or os.path.islink(file_path):
+ expected[fname] = True
+ for (fname, found) in expected.items():
+ if not found:
+ # Ignore missing PDB files if we aren't using cl
+ if fname.endswith('.pdb') and compiler != 'cl':
+ continue
+ ret_msg += 'Expected file {0} missing.\n'.format(fname)
+ # Check if there are any unexpected files
+ found = get_relative_files_list_from_dir(installdir)
+ for fname in found:
+ # Windows-specific tests check for the existence of installed PDB
+ # files, but common tests do not, for obvious reasons. Ignore any
+ # extra PDB files found.
+ if fname not in expected and not fname.endswith('.pdb') and compiler == 'cl':
+ ret_msg += 'Extra file {0} found.\n'.format(fname)
+ return ret_msg
+
+def log_text_file(logfile, testdir, stdo, stde):
+ global stop, executor, futures
+ logfile.write('%s\nstdout\n\n---\n' % testdir)
+ logfile.write(stdo)
+ logfile.write('\n\n---\n\nstderr\n\n---\n')
+ logfile.write(stde)
+ logfile.write('\n\n---\n\n')
+ if print_debug:
+ print(stdo)
+ print(stde, file=sys.stderr)
+ if stop:
+ print("Aborting..")
+ for f in futures:
+ f[2].cancel()
+ executor.shutdown()
+ raise StopException()
+
+
+def bold(text):
+ return mlog.bold(text).get_text(mlog.colorize_console)
+
+
+def green(text):
+ return mlog.green(text).get_text(mlog.colorize_console)
+
+
+def red(text):
+ return mlog.red(text).get_text(mlog.colorize_console)
+
+
+def yellow(text):
+ return mlog.yellow(text).get_text(mlog.colorize_console)
+
+
+def run_test_inprocess(testdir):
+ old_stdout = sys.stdout
+ sys.stdout = mystdout = StringIO()
+ old_stderr = sys.stderr
+ sys.stderr = mystderr = StringIO()
+ old_cwd = os.getcwd()
+ os.chdir(testdir)
+ test_log_fname = 'meson-logs/testlog.txt'
+ try:
+ returncode_test = mtest.run(['--no-rebuild'])
+ if os.path.exists(test_log_fname):
+ test_log = open(test_log_fname, errors='ignore').read()
+ else:
+ test_log = ''
+ returncode_benchmark = mtest.run(['--no-rebuild', '--benchmark', '--logbase', 'benchmarklog'])
+ finally:
+ sys.stdout = old_stdout
+ sys.stderr = old_stderr
+ os.chdir(old_cwd)
+ return max(returncode_test, returncode_benchmark), mystdout.getvalue(), mystderr.getvalue(), test_log
+
+def parse_test_args(testdir):
+ args = []
+ try:
+ with open(os.path.join(testdir, 'test_args.txt'), 'r') as f:
+ content = f.read()
+ try:
+ args = literal_eval(content)
+ except Exception:
+ raise Exception('Malformed test_args file.')
+ args = stringlistify(args)
+ except FileNotFoundError:
+ pass
+ return args
+
+def run_test(skipped, testdir, extra_args, compiler, backend, flags, commands, should_fail):
+ if skipped:
+ return None
+ with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
+ with AutoDeletedDir(tempfile.mkdtemp(prefix='i ', dir=os.getcwd())) as install_dir:
+ try:
+ return _run_test(testdir, build_dir, install_dir, extra_args, compiler, backend, flags, commands, should_fail)
+ finally:
+ mlog.shutdown() # Close the log file because otherwise Windows wets itself.
+
+def pass_prefix_to_test(dirname):
+ if '40 prefix' in dirname:
+ return False
+ return True
+
+def pass_libdir_to_test(dirname):
+ if '8 install' in dirname:
+ return False
+ if '39 libdir' in dirname:
+ return False
+ return True
+
+def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backend, flags, commands, should_fail):
+ compile_commands, clean_commands, install_commands, uninstall_commands = commands
+ test_args = parse_test_args(testdir)
+ gen_start = time.time()
+ # Configure in-process
+ if pass_prefix_to_test(testdir):
+ gen_args = ['--prefix', '/usr']
+ else:
+ gen_args = []
+ if pass_libdir_to_test(testdir):
+ gen_args += ['--libdir', 'lib']
+ gen_args += [testdir, test_build_dir] + flags + test_args + extra_args
+ (returncode, stdo, stde) = run_configure(meson_command, gen_args)
+ try:
+ logfile = os.path.join(test_build_dir, 'meson-logs/meson-log.txt')
+ with open(logfile, errors='ignore') as f:
+ mesonlog = f.read()
+ except Exception:
+ mesonlog = no_meson_log_msg
+ gen_time = time.time() - gen_start
+ if should_fail == 'meson':
+ if returncode != 0:
+ return TestResult('', BuildStep.configure, stdo, stde, mesonlog, gen_time)
+ return TestResult('Test that should have failed succeeded', BuildStep.configure, stdo, stde, mesonlog, gen_time)
+ if returncode != 0:
+ return TestResult('Generating the build system failed.', BuildStep.configure, stdo, stde, mesonlog, gen_time)
+ # Touch the meson.build file to force a regenerate so we can test that
+ # regeneration works before a build is run.
+ ensure_backend_detects_changes(backend)
+ os.utime(os.path.join(testdir, 'meson.build'))
+ # Build with subprocess
+ dir_args = get_backend_args_for_dir(backend, test_build_dir)
+ build_start = time.time()
+ pc, o, e = Popen_safe(compile_commands + dir_args, cwd=test_build_dir)
+ build_time = time.time() - build_start
+ stdo += o
+ stde += e
+ if should_fail == 'build':
+ if pc.returncode != 0:
+ return TestResult('', BuildStep.build, stdo, stde, mesonlog, gen_time)
+ return TestResult('Test that should have failed to build succeeded', BuildStep.build, stdo, stde, mesonlog, gen_time)
+ if pc.returncode != 0:
+ return TestResult('Compiling source code failed.', BuildStep.build, stdo, stde, mesonlog, gen_time, build_time)
+ # Touch the meson.build file to force a regenerate so we can test that
+ # regeneration works after a build is complete.
+ ensure_backend_detects_changes(backend)
+ os.utime(os.path.join(testdir, 'meson.build'))
+ test_start = time.time()
+ # Test in-process
+ (returncode, tstdo, tstde, test_log) = run_test_inprocess(test_build_dir)
+ test_time = time.time() - test_start
+ stdo += tstdo
+ stde += tstde
+ mesonlog += test_log
+ if should_fail == 'test':
+ if returncode != 0:
+ return TestResult('', BuildStep.test, stdo, stde, mesonlog, gen_time)
+ return TestResult('Test that should have failed to run unit tests succeeded', BuildStep.test, stdo, stde, mesonlog, gen_time)
+ if returncode != 0:
+ return TestResult('Running unit tests failed.', BuildStep.test, stdo, stde, mesonlog, gen_time, build_time, test_time)
+ # Do installation, if the backend supports it
+ if install_commands:
+ env = os.environ.copy()
+ env['DESTDIR'] = install_dir
+ # Install with subprocess
+ pi, o, e = Popen_safe(install_commands, cwd=test_build_dir, env=env)
+ stdo += o
+ stde += e
+ if pi.returncode != 0:
+ return TestResult('Running install failed.', BuildStep.install, stdo, stde, mesonlog, gen_time, build_time, test_time)
+ # Clean with subprocess
+ env = os.environ.copy()
+ pi, o, e = Popen_safe(clean_commands + dir_args, cwd=test_build_dir, env=env)
+ stdo += o
+ stde += e
+ if pi.returncode != 0:
+ return TestResult('Running clean failed.', BuildStep.clean, stdo, stde, mesonlog, gen_time, build_time, test_time)
+ if not install_commands:
+ return TestResult('', BuildStep.install, '', '', mesonlog, gen_time, build_time, test_time)
+ return TestResult(validate_install(testdir, install_dir, compiler), BuildStep.validate, stdo, stde, mesonlog, gen_time, build_time, test_time)
+
+def gather_tests(testdir):
+ tests = [t.replace('\\', '/').split('/', 2)[2] for t in glob(testdir + '/*')]
+ testlist = [(int(t.split()[0]), t) for t in tests]
+ testlist.sort()
+ tests = [os.path.join(testdir, t[1]) for t in testlist]
+ return tests
+
+def have_d_compiler():
+ if shutil.which("ldc2"):
+ return True
+ elif shutil.which("ldc"):
+ return True
+ elif shutil.which("gdc"):
+ return True
+ elif shutil.which("dmd"):
+ return True
+ return False
+
+def have_objc_compiler():
+ with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
+ env = environment.Environment(None, build_dir, None, get_fake_options('/'), [])
+ try:
+ objc_comp = env.detect_objc_compiler(False)
+ except:
+ return False
+ if not objc_comp:
+ return False
+ try:
+ objc_comp.sanity_check(env.get_scratch_dir(), env)
+ objcpp_comp = env.detect_objc_compiler(False)
+ except:
+ return False
+ if not objcpp_comp:
+ return False
+ try:
+ objcpp_comp.sanity_check(env.get_scratch_dir(), env)
+ except:
+ return False
+ return True
+
+def have_java():
+ if shutil.which('javac') and shutil.which('java'):
+ return True
+ return False
+
+def detect_tests_to_run():
+ # Name, subdirectory, skip condition.
+ all_tests = [
+ ('common', 'common', False),
+ ('failing-meson', 'failing', False),
+ ('failing-build', 'failing build', False),
+ ('failing-tests', 'failing tests', False),
+
+ ('platform-osx', 'osx', not mesonlib.is_osx()),
+ ('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()),
+ ('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()),
+
+ ('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()),
+ ('C#', 'csharp', backend is not Backend.ninja or not shutil.which('mcs')),
+ ('vala', 'vala', backend is not Backend.ninja or not shutil.which('valac')),
+ ('rust', 'rust', backend is not Backend.ninja or not shutil.which('rustc')),
+ ('d', 'd', backend is not Backend.ninja or not have_d_compiler()),
+ ('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or mesonlib.is_windows() or not have_objc_compiler()),
+ ('fortran', 'fortran', backend is not Backend.ninja or not shutil.which('gfortran')),
+ ('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')),
+ ('python3', 'python3', backend is not Backend.ninja),
+ ]
+ gathered_tests = [(name, gather_tests('test cases/' + subdir), skip) for name, subdir, skip in all_tests]
+ if mesonlib.is_windows():
+ # TODO: Set BOOST_ROOT in .appveyor.yml
+ gathered_tests += [('framework', ['test cases/frameworks/1 boost'], 'BOOST_ROOT' not in os.environ)]
+ elif mesonlib.is_osx() or mesonlib.is_cygwin():
+ gathered_tests += [('framework', gather_tests('test cases/frameworks'), True)]
+ else:
+ gathered_tests += [('framework', gather_tests('test cases/frameworks'), False)]
+ return gathered_tests
+
+def run_tests(all_tests, log_name_base, extra_args):
+ global logfile
+ txtname = log_name_base + '.txt'
+ with open(txtname, 'w', encoding="utf_8") as lf:
+ logfile = lf
+ return _run_tests(all_tests, log_name_base, extra_args)
+
+def _run_tests(all_tests, log_name_base, extra_args):
+ global stop, executor, futures, system_compiler
+ xmlname = log_name_base + '.xml'
+ junit_root = ET.Element('testsuites')
+ conf_time = 0
+ build_time = 0
+ test_time = 0
+ passing_tests = 0
+ failing_tests = 0
+ skipped_tests = 0
+ commands = (compile_commands, clean_commands, install_commands, uninstall_commands)
+
+ try:
+ # This fails in some CI environments for unknown reasons.
+ num_workers = multiprocessing.cpu_count()
+ except Exception as e:
+ print('Could not determine number of CPUs due to the following reason:' + str(e))
+ print('Defaulting to using only one process')
+ num_workers = 1
+ # Due to Ninja deficiency, almost 50% of build time
+ # is spent waiting. Do something useful instead.
+ #
+ # Remove this once the following issue has been resolved:
+ # https://github.com/mesonbuild/meson/pull/2082
+ num_workers *= 2
+ try:
+ executor = conc.ProcessPoolExecutor(max_workers=num_workers)
+ except ImportError:
+ print('Platform doesn\'t ProcessPoolExecutor, falling back to single-threaded testing\n')
+ executor = DummyExecutor()
+
+ for name, test_cases, skipped in all_tests:
+ current_suite = ET.SubElement(junit_root, 'testsuite', {'name': name, 'tests': str(len(test_cases))})
+ print()
+ if skipped:
+ print(bold('Not running %s tests.' % name))
+ else:
+ print(bold('Running %s tests.' % name))
+ print()
+ futures = []
+ for t in test_cases:
+ # Jenkins screws us over by automatically sorting test cases by name
+ # and getting it wrong by not doing logical number sorting.
+ (testnum, testbase) = os.path.split(t)[-1].split(' ', 1)
+ testname = '%.3d %s' % (int(testnum), testbase)
+ should_fail = False
+ if name.startswith('failing'):
+ should_fail = name.split('failing-')[1]
+ result = executor.submit(run_test, skipped, t, extra_args, system_compiler, backend, backend_flags, commands, should_fail)
+ futures.append((testname, t, result))
+ for (testname, t, result) in futures:
+ sys.stdout.flush()
+ result = result.result()
+ if result is None or 'MESON_SKIP_TEST' in result.stdo:
+ print(yellow('Skipping:'), t)
+ current_test = ET.SubElement(current_suite, 'testcase', {'name': testname,
+ 'classname': name})
+ ET.SubElement(current_test, 'skipped', {})
+ skipped_tests += 1
+ else:
+ without_install = "" if len(install_commands) > 0 else " (without install)"
+ if result.msg != '':
+ print(red('Failed test{} during {}: {!r}'.format(without_install, result.step.name, t)))
+ print('Reason:', result.msg)
+ failing_tests += 1
+ if result.step == BuildStep.configure and result.mlog != no_meson_log_msg:
+ # For configure failures, instead of printing stdout,
+ # print the meson log if available since it's a superset
+ # of stdout and often has very useful information.
+ failing_logs.append(result.mlog)
+ else:
+ failing_logs.append(result.stdo)
+ failing_logs.append(result.stde)
+ else:
+ print('Succeeded test%s: %s' % (without_install, t))
+ passing_tests += 1
+ conf_time += result.conftime
+ build_time += result.buildtime
+ test_time += result.testtime
+ total_time = conf_time + build_time + test_time
+ log_text_file(logfile, t, result.stdo, result.stde)
+ current_test = ET.SubElement(current_suite, 'testcase', {'name': testname,
+ 'classname': name,
+ 'time': '%.3f' % total_time})
+ if result.msg != '':
+ ET.SubElement(current_test, 'failure', {'message': result.msg})
+ stdoel = ET.SubElement(current_test, 'system-out')
+ stdoel.text = result.stdo
+ stdeel = ET.SubElement(current_test, 'system-err')
+ stdeel.text = result.stde
+ print("\nTotal configuration time: %.2fs" % conf_time)
+ print("Total build time: %.2fs" % build_time)
+ print("Total test time: %.2fs" % test_time)
+ ET.ElementTree(element=junit_root).write(xmlname, xml_declaration=True, encoding='UTF-8')
+ return passing_tests, failing_tests, skipped_tests
+
+def check_file(fname):
+ linenum = 1
+ with open(fname, 'rb') as f:
+ lines = f.readlines()
+ for line in lines:
+ if line.startswith(b'\t'):
+ print("File %s contains a literal tab on line %d. Only spaces are permitted." % (fname, linenum))
+ sys.exit(1)
+ if b'\r' in line:
+ print("File %s contains DOS line ending on line %d. Only unix-style line endings are permitted." % (fname, linenum))
+ sys.exit(1)
+ linenum += 1
+
+def check_format():
+ for (root, _, files) in os.walk('.'):
+ for file in files:
+ if file.endswith('.py') or file.endswith('.build') or file == 'meson_options.txt':
+ fullname = os.path.join(root, file)
+ check_file(fullname)
+
+def check_meson_commands_work():
+ global backend, meson_command, compile_commands, test_commands, install_commands
+ testdir = 'test cases/common/1 trivial'
+ with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir:
+ print('Checking that configuring works...')
+ gen_cmd = mesonlib.meson_command + [testdir, build_dir] + backend_flags
+ pc, o, e = Popen_safe(gen_cmd)
+ if pc.returncode != 0:
+ raise RuntimeError('Failed to configure {!r}:\n{}\n{}'.format(testdir, e, o))
+ print('Checking that building works...')
+ dir_args = get_backend_args_for_dir(backend, build_dir)
+ pc, o, e = Popen_safe(compile_commands + dir_args, cwd=build_dir)
+ if pc.returncode != 0:
+ raise RuntimeError('Failed to build {!r}:\n{}\n{}'.format(testdir, e, o))
+ print('Checking that testing works...')
+ pc, o, e = Popen_safe(test_commands, cwd=build_dir)
+ if pc.returncode != 0:
+ raise RuntimeError('Failed to test {!r}:\n{}\n{}'.format(testdir, e, o))
+ if install_commands:
+ print('Checking that installing works...')
+ pc, o, e = Popen_safe(install_commands, cwd=build_dir)
+ if pc.returncode != 0:
+ raise RuntimeError('Failed to install {!r}:\n{}\n{}'.format(testdir, e, o))
+
+
+def detect_system_compiler():
+ global system_compiler
+ if shutil.which('cl'):
+ system_compiler = 'cl'
+ elif shutil.which('cc'):
+ system_compiler = 'cc'
+ elif shutil.which('gcc'):
+ system_compiler = 'gcc'
+ else:
+ raise RuntimeError("Could not find C compiler.")
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description="Run the test suite of Meson.")
+ parser.add_argument('extra_args', nargs='*',
+ help='arguments that are passed directly to Meson (remember to have -- before these).')
+ parser.add_argument('--backend', default=None, dest='backend',
+ choices=backendlist)
+ options = parser.parse_args()
+ setup_commands(options.backend)
+
+ detect_system_compiler()
+ script_dir = os.path.split(__file__)[0]
+ if script_dir != '':
+ os.chdir(script_dir)
+ check_format()
+ check_meson_commands_work()
+ try:
+ all_tests = detect_tests_to_run()
+ (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.extra_args)
+ except StopException:
+ pass
+ print('\nTotal passed tests:', green(str(passing_tests)))
+ print('Total failed tests:', red(str(failing_tests)))
+ print('Total skipped tests:', yellow(str(skipped_tests)))
+ if failing_tests > 0:
+ print('\nMesonlogs of failing tests\n')
+ for l in failing_logs:
+ print(l, '\n')
+ for name, dirs, skip in all_tests:
+ dirs = (os.path.basename(x) for x in dirs)
+ for k, g in itertools.groupby(dirs, key=lambda x: x.split()[0]):
+ tests = list(g)
+ if len(tests) != 1:
+ print('WARNING: The %s suite contains duplicate "%s" tests: "%s"' % (name, k, '", "'.join(tests)))
+ sys.exit(failing_tests)
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import time
+import shutil
+import subprocess
+import tempfile
+import platform
+from mesonbuild import mesonlib
+from mesonbuild import mesonmain
+from mesonbuild import mlog
+from mesonbuild.environment import detect_ninja
+from io import StringIO
+from enum import Enum
+from glob import glob
+
+Backend = Enum('Backend', 'ninja vs xcode')
+
+if 'MESON_EXE' in os.environ:
+ import shlex
+ meson_exe = shlex.split(os.environ['MESON_EXE'])
+else:
+ meson_exe = None
+
+if mesonlib.is_windows() or mesonlib.is_cygwin():
+ exe_suffix = '.exe'
+else:
+ exe_suffix = ''
+
+def get_backend_args_for_dir(backend, builddir):
+ '''
+ Visual Studio backend needs to be given the solution to build
+ '''
+ if backend is Backend.vs:
+ sln_name = glob(os.path.join(builddir, '*.sln'))[0]
+ return [os.path.split(sln_name)[-1]]
+ return []
+
+def find_vcxproj_with_target(builddir, target):
+ import re, fnmatch
+ t, ext = os.path.splitext(target)
+ if ext:
+ p = '<TargetName>{}</TargetName>\s*<TargetExt>\{}</TargetExt>'.format(t, ext)
+ else:
+ p = '<TargetName>{}</TargetName>'.format(t)
+ for root, dirs, files in os.walk(builddir):
+ for f in fnmatch.filter(files, '*.vcxproj'):
+ f = os.path.join(builddir, f)
+ with open(f, 'r', encoding='utf-8') as o:
+ if re.search(p, o.read(), flags=re.MULTILINE):
+ return f
+ raise RuntimeError('No vcxproj matching {!r} in {!r}'.format(p, builddir))
+
+def get_builddir_target_args(backend, builddir, target):
+ dir_args = []
+ if not target:
+ dir_args = get_backend_args_for_dir(backend, builddir)
+ if target is None:
+ return dir_args
+ if backend is Backend.vs:
+ vcxproj = find_vcxproj_with_target(builddir, target)
+ target_args = [vcxproj]
+ elif backend is Backend.xcode:
+ target_args = ['-target', target]
+ elif backend is Backend.ninja:
+ target_args = [target]
+ else:
+ raise AssertionError('Unknown backend: {!r}'.format(backend))
+ return target_args + dir_args
+
+def get_backend_commands(backend, debug=False):
+ install_cmd = []
+ uninstall_cmd = []
+ if backend is Backend.vs:
+ cmd = ['msbuild']
+ clean_cmd = cmd + ['/target:Clean']
+ test_cmd = cmd + ['RUN_TESTS.vcxproj']
+ elif backend is Backend.xcode:
+ cmd = ['xcodebuild']
+ clean_cmd = cmd + ['-alltargets', 'clean']
+ test_cmd = cmd + ['-target', 'RUN_TESTS']
+ elif backend is Backend.ninja:
+ # We need at least 1.6 because of -w dupbuild=err
+ cmd = [detect_ninja('1.6'), '-w', 'dupbuild=err', '-d', 'explain']
+ if cmd[0] is None:
+ raise RuntimeError('Could not find Ninja v1.6 or newer')
+ if debug:
+ cmd += ['-v']
+ clean_cmd = cmd + ['clean']
+ test_cmd = cmd + ['test', 'benchmark']
+ install_cmd = cmd + ['install']
+ uninstall_cmd = cmd + ['uninstall']
+ else:
+ raise AssertionError('Unknown backend: {!r}'.format(backend))
+ return cmd, clean_cmd, test_cmd, install_cmd, uninstall_cmd
+
+def ensure_backend_detects_changes(backend):
+ # We're using a ninja with QuLogic's patch for sub-1s resolution timestamps
+ # and not running on HFS+ which only stores dates in seconds:
+ # https://developer.apple.com/legacy/library/technotes/tn/tn1150.html#HFSPlusDates
+ # FIXME: Upgrade Travis image to Apple FS when that becomes available
+ if 'MESON_FIXED_NINJA' in os.environ and not mesonlib.is_osx():
+ return
+ # This is needed to increase the difference between build.ninja's
+ # timestamp and the timestamp of whatever you changed due to a Ninja
+ # bug: https://github.com/ninja-build/ninja/issues/371
+ if backend is Backend.ninja:
+ time.sleep(1)
+
+def get_fake_options(prefix):
+ import argparse
+ opts = argparse.Namespace()
+ opts.cross_file = None
+ opts.wrap_mode = None
+ opts.prefix = prefix
+ return opts
+
+def should_run_linux_cross_tests():
+ return shutil.which('arm-linux-gnueabihf-gcc-7') and not platform.machine().lower().startswith('arm')
+
+def run_configure_inprocess(meson_command, commandlist):
+ old_stdout = sys.stdout
+ sys.stdout = mystdout = StringIO()
+ old_stderr = sys.stderr
+ sys.stderr = mystderr = StringIO()
+ try:
+ returncode = mesonmain.run(commandlist, meson_command)
+ finally:
+ sys.stdout = old_stdout
+ sys.stderr = old_stderr
+ return returncode, mystdout.getvalue(), mystderr.getvalue()
+
+def run_configure_external(full_command):
+ pc, o, e = mesonlib.Popen_safe(full_command)
+ return pc.returncode, o, e
+
+def run_configure(meson_command, commandlist):
+ global meson_exe
+ if meson_exe:
+ return run_configure_external(meson_exe + commandlist)
+ return run_configure_inprocess(meson_command, commandlist)
+
+def print_system_info():
+ print(mlog.bold('System information.').get_text(mlog.colorize_console))
+ print('Architecture:', platform.architecture())
+ print('Machine:', platform.machine())
+ print('Platform:', platform.system())
+ print('Processor:', platform.processor())
+ print('System:', platform.system())
+ print('')
+
+if __name__ == '__main__':
+ print_system_info()
+ # Enable coverage early...
+ enable_coverage = '--cov' in sys.argv
+ if enable_coverage:
+ os.makedirs('.coverage', exist_ok=True)
+ sys.argv.remove('--cov')
+ import coverage
+ coverage.process_startup()
+ returncode = 0
+ # Iterate over list in reverse order to find the last --backend arg
+ backend = Backend.ninja
+ for arg in reversed(sys.argv[1:]):
+ if arg.startswith('--backend'):
+ if arg.startswith('--backend=vs'):
+ backend = Backend.vs
+ elif arg == '--backend=xcode':
+ backend = Backend.xcode
+ break
+ # Running on a developer machine? Be nice!
+ if not mesonlib.is_windows() and not mesonlib.is_haiku() and 'TRAVIS' not in os.environ:
+ os.nice(20)
+ # Appveyor sets the `platform` environment variable which completely messes
+ # up building with the vs2010 and vs2015 backends.
+ #
+ # Specifically, MSBuild reads the `platform` environment variable to set
+ # the configured value for the platform (Win32/x64/arm), which breaks x86
+ # builds.
+ #
+ # Appveyor setting this also breaks our 'native build arch' detection for
+ # Windows in environment.py:detect_windows_arch() by overwriting the value
+ # of `platform` set by vcvarsall.bat.
+ #
+ # While building for x86, `platform` should be unset.
+ if 'APPVEYOR' in os.environ and os.environ['arch'] == 'x86':
+ os.environ.pop('platform')
+ # Run tests
+ print(mlog.bold('Running unittests.').get_text(mlog.colorize_console))
+ print()
+ # Can't pass arguments to unit tests, so set the backend to use in the environment
+ env = os.environ.copy()
+ env['MESON_UNIT_TEST_BACKEND'] = backend.name
+ with tempfile.TemporaryDirectory() as td:
+ # Enable coverage on all subsequent processes.
+ if enable_coverage:
+ with open(os.path.join(td, 'usercustomize.py'), 'w') as f:
+ f.write('import coverage\n'
+ 'coverage.process_startup()\n')
+ env['COVERAGE_PROCESS_START'] = '.coveragerc'
+ env['PYTHONPATH'] = os.pathsep.join([td] + env.get('PYTHONPATH', []))
+ returncode += subprocess.call(mesonlib.python_command + ['run_unittests.py', '-v'], env=env)
+ # Ubuntu packages do not have a binary without -6 suffix.
+ if should_run_linux_cross_tests():
+ print(mlog.bold('Running cross compilation tests.').get_text(mlog.colorize_console))
+ print()
+ returncode += subprocess.call(mesonlib.python_command + ['run_cross_test.py', 'cross/ubuntu-armhf.txt'],
+ env=env)
+ returncode += subprocess.call(mesonlib.python_command + ['run_project_tests.py'] + sys.argv[1:], env=env)
+ sys.exit(returncode)
--- /dev/null
+#!/usr/bin/env python3
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import stat
+import shlex
+import subprocess
+import re, json
+import tempfile
+import textwrap
+import os
+import shutil
+import sys
+import unittest
+from unittest import mock
+from configparser import ConfigParser
+from glob import glob
+from pathlib import PurePath
+
+import mesonbuild.mlog
+import mesonbuild.compilers
+import mesonbuild.environment
+import mesonbuild.mesonlib
+import mesonbuild.coredata
+from mesonbuild.interpreter import ObjectHolder
+from mesonbuild.mesonlib import is_linux, is_windows, is_osx, is_cygwin, windows_proof_rmtree
+from mesonbuild.mesonlib import python_command, meson_command, version_compare
+from mesonbuild.environment import Environment
+from mesonbuild.dependencies import DependencyException
+from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram
+
+from run_tests import exe_suffix, get_fake_options
+from run_tests import get_builddir_target_args, get_backend_commands, Backend
+from run_tests import ensure_backend_detects_changes, run_configure, meson_exe
+from run_tests import should_run_linux_cross_tests
+
+
+def get_dynamic_section_entry(fname, entry):
+ try:
+ raw_out = subprocess.check_output(['readelf', '-d', fname],
+ universal_newlines=True)
+ except FileNotFoundError:
+ # FIXME: Try using depfixer.py:Elf() as a fallback
+ raise unittest.SkipTest('readelf not found')
+ pattern = re.compile(entry + r': \[(.*?)\]')
+ for line in raw_out.split('\n'):
+ m = pattern.search(line)
+ if m is not None:
+ return m.group(1)
+ return None # The file did not contain the specified entry.
+
+def get_soname(fname):
+ return get_dynamic_section_entry(fname, 'soname')
+
+def get_rpath(fname):
+ return get_dynamic_section_entry(fname, r'(?:rpath|runpath)')
+
+class InternalTests(unittest.TestCase):
+
+ def test_version_number(self):
+ searchfunc = mesonbuild.environment.search_version
+ self.assertEqual(searchfunc('foobar 1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('foobar 2016.10.28 1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('2016.10.28 1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('foobar 2016.10.128'), 'unknown version')
+ self.assertEqual(searchfunc('2016.10.128'), 'unknown version')
+
+ def test_mode_symbolic_to_bits(self):
+ modefunc = mesonbuild.mesonlib.FileMode.perms_s_to_bits
+ self.assertEqual(modefunc('---------'), 0)
+ self.assertEqual(modefunc('r--------'), stat.S_IRUSR)
+ self.assertEqual(modefunc('---r-----'), stat.S_IRGRP)
+ self.assertEqual(modefunc('------r--'), stat.S_IROTH)
+ self.assertEqual(modefunc('-w-------'), stat.S_IWUSR)
+ self.assertEqual(modefunc('----w----'), stat.S_IWGRP)
+ self.assertEqual(modefunc('-------w-'), stat.S_IWOTH)
+ self.assertEqual(modefunc('--x------'), stat.S_IXUSR)
+ self.assertEqual(modefunc('-----x---'), stat.S_IXGRP)
+ self.assertEqual(modefunc('--------x'), stat.S_IXOTH)
+ self.assertEqual(modefunc('--S------'), stat.S_ISUID)
+ self.assertEqual(modefunc('-----S---'), stat.S_ISGID)
+ self.assertEqual(modefunc('--------T'), stat.S_ISVTX)
+ self.assertEqual(modefunc('--s------'), stat.S_ISUID | stat.S_IXUSR)
+ self.assertEqual(modefunc('-----s---'), stat.S_ISGID | stat.S_IXGRP)
+ self.assertEqual(modefunc('--------t'), stat.S_ISVTX | stat.S_IXOTH)
+ self.assertEqual(modefunc('rwx------'), stat.S_IRWXU)
+ self.assertEqual(modefunc('---rwx---'), stat.S_IRWXG)
+ self.assertEqual(modefunc('------rwx'), stat.S_IRWXO)
+ # We could keep listing combinations exhaustively but that seems
+ # tedious and pointless. Just test a few more.
+ self.assertEqual(modefunc('rwxr-xr-x'),
+ stat.S_IRWXU |
+ stat.S_IRGRP | stat.S_IXGRP |
+ stat.S_IROTH | stat.S_IXOTH)
+ self.assertEqual(modefunc('rw-r--r--'),
+ stat.S_IRUSR | stat.S_IWUSR |
+ stat.S_IRGRP |
+ stat.S_IROTH)
+ self.assertEqual(modefunc('rwsr-x---'),
+ stat.S_IRWXU | stat.S_ISUID |
+ stat.S_IRGRP | stat.S_IXGRP)
+
+ def test_compiler_args_class(self):
+ cargsfunc = mesonbuild.compilers.CompilerArgs
+ c = mesonbuild.compilers.CCompiler([], 'fake', False)
+ # Test that bad initialization fails
+ self.assertRaises(TypeError, cargsfunc, [])
+ self.assertRaises(TypeError, cargsfunc, [], [])
+ self.assertRaises(TypeError, cargsfunc, c, [], [])
+ # Test that empty initialization works
+ a = cargsfunc(c)
+ self.assertEqual(a, [])
+ # Test that list initialization works
+ a = cargsfunc(['-I.', '-I..'], c)
+ self.assertEqual(a, ['-I.', '-I..'])
+ # Test that there is no de-dup on initialization
+ self.assertEqual(cargsfunc(['-I.', '-I.'], c), ['-I.', '-I.'])
+
+ ## Test that appending works
+ a.append('-I..')
+ self.assertEqual(a, ['-I..', '-I.'])
+ a.append('-O3')
+ self.assertEqual(a, ['-I..', '-I.', '-O3'])
+
+ ## Test that in-place addition works
+ a += ['-O2', '-O2']
+ self.assertEqual(a, ['-I..', '-I.', '-O3', '-O2', '-O2'])
+ # Test that removal works
+ a.remove('-O2')
+ self.assertEqual(a, ['-I..', '-I.', '-O3', '-O2'])
+ # Test that de-dup happens on addition
+ a += ['-Ifoo', '-Ifoo']
+ self.assertEqual(a, ['-Ifoo', '-I..', '-I.', '-O3', '-O2'])
+
+ # .extend() is just +=, so we don't test it
+
+ ## Test that addition works
+ # Test that adding a list with just one old arg works and yields the same array
+ a = a + ['-Ifoo']
+ self.assertEqual(a, ['-Ifoo', '-I..', '-I.', '-O3', '-O2'])
+ # Test that adding a list with one arg new and one old works
+ a = a + ['-Ifoo', '-Ibaz']
+ self.assertEqual(a, ['-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2'])
+ # Test that adding args that must be prepended and appended works
+ a = a + ['-Ibar', '-Wall']
+ self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2', '-Wall'])
+
+ ## Test that reflected addition works
+ # Test that adding to a list with just one old arg works and yields the same array
+ a = ['-Ifoo'] + a
+ self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2', '-Wall'])
+ # Test that adding to a list with just one new arg that is not pre-pended works
+ a = ['-Werror'] + a
+ self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Werror', '-O3', '-O2', '-Wall'])
+ # Test that adding to a list with two new args preserves the order
+ a = ['-Ldir', '-Lbah'] + a
+ self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall'])
+ # Test that adding to a list with old args does nothing
+ a = ['-Ibar', '-Ibaz', '-Ifoo'] + a
+ self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall'])
+
+ ## Test that adding libraries works
+ l = cargsfunc(c, ['-Lfoodir', '-lfoo'])
+ self.assertEqual(l, ['-Lfoodir', '-lfoo'])
+ # Adding a library and a libpath appends both correctly
+ l += ['-Lbardir', '-lbar']
+ self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar'])
+ # Adding the same library again does nothing
+ l += ['-lbar']
+ self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar'])
+
+ ## Test that 'direct' append and extend works
+ l = cargsfunc(c, ['-Lfoodir', '-lfoo'])
+ self.assertEqual(l, ['-Lfoodir', '-lfoo'])
+ # Direct-adding a library and a libpath appends both correctly
+ l.extend_direct(['-Lbardir', '-lbar'])
+ self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar'])
+ # Direct-adding the same library again still adds it
+ l.append_direct('-lbar')
+ self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar'])
+
+ def test_commonpath(self):
+ from os.path import sep
+ commonpath = mesonbuild.mesonlib.commonpath
+ self.assertRaises(ValueError, commonpath, [])
+ self.assertEqual(commonpath(['/usr', '/usr']), sep + 'usr')
+ self.assertEqual(commonpath(['/usr', '/usr/']), sep + 'usr')
+ self.assertEqual(commonpath(['/usr', '/usr/bin']), sep + 'usr')
+ self.assertEqual(commonpath(['/usr/', '/usr/bin']), sep + 'usr')
+ self.assertEqual(commonpath(['/usr/./', '/usr/bin']), sep + 'usr')
+ self.assertEqual(commonpath(['/usr/bin', '/usr/bin']), sep + 'usr' + sep + 'bin')
+ self.assertEqual(commonpath(['/usr//bin', '/usr/bin']), sep + 'usr' + sep + 'bin')
+ self.assertEqual(commonpath(['/usr/./bin', '/usr/bin']), sep + 'usr' + sep + 'bin')
+ self.assertEqual(commonpath(['/usr/local', '/usr/lib']), sep + 'usr')
+ self.assertEqual(commonpath(['/usr', '/bin']), sep)
+ self.assertEqual(commonpath(['/usr', 'bin']), '')
+ self.assertEqual(commonpath(['blam', 'bin']), '')
+ prefix = '/some/path/to/prefix'
+ libdir = '/some/path/to/prefix/libdir'
+ self.assertEqual(commonpath([prefix, libdir]), str(PurePath(prefix)))
+
+ def test_string_templates_substitution(self):
+ dictfunc = mesonbuild.mesonlib.get_filenames_templates_dict
+ substfunc = mesonbuild.mesonlib.substitute_values
+ ME = mesonbuild.mesonlib.MesonException
+
+ # Identity
+ self.assertEqual(dictfunc([], []), {})
+
+ # One input, no outputs
+ inputs = ['bar/foo.c.in']
+ outputs = []
+ ret = dictfunc(inputs, outputs)
+ d = {'@INPUT@': inputs, '@INPUT0@': inputs[0],
+ '@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c'}
+ # Check dictionary
+ self.assertEqual(ret, d)
+ # Check substitutions
+ cmd = ['some', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), cmd)
+ cmd = ['@INPUT@.out', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + cmd[1:])
+ cmd = ['@INPUT0@.out', '@PLAINNAME@.ok', 'strings']
+ self.assertEqual(substfunc(cmd, d),
+ [inputs[0] + '.out'] + [d['@PLAINNAME@'] + '.ok'] + cmd[2:])
+ cmd = ['@INPUT@', '@BASENAME@.hah', 'strings']
+ self.assertEqual(substfunc(cmd, d),
+ inputs + [d['@BASENAME@'] + '.hah'] + cmd[2:])
+ cmd = ['@OUTPUT@']
+ self.assertRaises(ME, substfunc, cmd, d)
+
+ # One input, one output
+ inputs = ['bar/foo.c.in']
+ outputs = ['out.c']
+ ret = dictfunc(inputs, outputs)
+ d = {'@INPUT@': inputs, '@INPUT0@': inputs[0],
+ '@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c',
+ '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': '.'}
+ # Check dictionary
+ self.assertEqual(ret, d)
+ # Check substitutions
+ cmd = ['some', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), cmd)
+ cmd = ['@INPUT@.out', '@OUTPUT@', 'strings']
+ self.assertEqual(substfunc(cmd, d),
+ [inputs[0] + '.out'] + outputs + cmd[2:])
+ cmd = ['@INPUT0@.out', '@PLAINNAME@.ok', '@OUTPUT0@']
+ self.assertEqual(substfunc(cmd, d),
+ [inputs[0] + '.out', d['@PLAINNAME@'] + '.ok'] + outputs)
+ cmd = ['@INPUT@', '@BASENAME@.hah', 'strings']
+ self.assertEqual(substfunc(cmd, d),
+ inputs + [d['@BASENAME@'] + '.hah'] + cmd[2:])
+
+ # One input, one output with a subdir
+ outputs = ['dir/out.c']
+ ret = dictfunc(inputs, outputs)
+ d = {'@INPUT@': inputs, '@INPUT0@': inputs[0],
+ '@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c',
+ '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': 'dir'}
+ # Check dictionary
+ self.assertEqual(ret, d)
+
+ # Two inputs, no outputs
+ inputs = ['bar/foo.c.in', 'baz/foo.c.in']
+ outputs = []
+ ret = dictfunc(inputs, outputs)
+ d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1]}
+ # Check dictionary
+ self.assertEqual(ret, d)
+ # Check substitutions
+ cmd = ['some', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), cmd)
+ cmd = ['@INPUT@', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), inputs + cmd[1:])
+ cmd = ['@INPUT0@.out', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + cmd[1:])
+ cmd = ['@INPUT0@.out', '@INPUT1@.ok', 'strings']
+ self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out', inputs[1] + '.ok'] + cmd[2:])
+ cmd = ['@INPUT0@', '@INPUT1@', 'strings']
+ self.assertEqual(substfunc(cmd, d), inputs + cmd[2:])
+ # Many inputs, can't use @INPUT@ like this
+ cmd = ['@INPUT@.out', 'ordinary', 'strings']
+ self.assertRaises(ME, substfunc, cmd, d)
+ # Not enough inputs
+ cmd = ['@INPUT2@.out', 'ordinary', 'strings']
+ self.assertRaises(ME, substfunc, cmd, d)
+ # Too many inputs
+ cmd = ['@PLAINNAME@']
+ self.assertRaises(ME, substfunc, cmd, d)
+ cmd = ['@BASENAME@']
+ self.assertRaises(ME, substfunc, cmd, d)
+ # No outputs
+ cmd = ['@OUTPUT@']
+ self.assertRaises(ME, substfunc, cmd, d)
+ cmd = ['@OUTPUT0@']
+ self.assertRaises(ME, substfunc, cmd, d)
+ cmd = ['@OUTDIR@']
+ self.assertRaises(ME, substfunc, cmd, d)
+
+ # Two inputs, one output
+ outputs = ['dir/out.c']
+ ret = dictfunc(inputs, outputs)
+ d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1],
+ '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': 'dir'}
+ # Check dictionary
+ self.assertEqual(ret, d)
+ # Check substitutions
+ cmd = ['some', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), cmd)
+ cmd = ['@OUTPUT@', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), outputs + cmd[1:])
+ cmd = ['@OUTPUT@.out', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out'] + cmd[1:])
+ cmd = ['@OUTPUT0@.out', '@INPUT1@.ok', 'strings']
+ self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out', inputs[1] + '.ok'] + cmd[2:])
+ # Many inputs, can't use @INPUT@ like this
+ cmd = ['@INPUT@.out', 'ordinary', 'strings']
+ self.assertRaises(ME, substfunc, cmd, d)
+ # Not enough inputs
+ cmd = ['@INPUT2@.out', 'ordinary', 'strings']
+ self.assertRaises(ME, substfunc, cmd, d)
+ # Not enough outputs
+ cmd = ['@OUTPUT2@.out', 'ordinary', 'strings']
+ self.assertRaises(ME, substfunc, cmd, d)
+
+ # Two inputs, two outputs
+ outputs = ['dir/out.c', 'dir/out2.c']
+ ret = dictfunc(inputs, outputs)
+ d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1],
+ '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTPUT1@': outputs[1],
+ '@OUTDIR@': 'dir'}
+ # Check dictionary
+ self.assertEqual(ret, d)
+ # Check substitutions
+ cmd = ['some', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), cmd)
+ cmd = ['@OUTPUT@', 'ordinary', 'strings']
+ self.assertEqual(substfunc(cmd, d), outputs + cmd[1:])
+ cmd = ['@OUTPUT0@', '@OUTPUT1@', 'strings']
+ self.assertEqual(substfunc(cmd, d), outputs + cmd[2:])
+ cmd = ['@OUTPUT0@.out', '@INPUT1@.ok', '@OUTDIR@']
+ self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out', inputs[1] + '.ok', 'dir'])
+ # Many inputs, can't use @INPUT@ like this
+ cmd = ['@INPUT@.out', 'ordinary', 'strings']
+ self.assertRaises(ME, substfunc, cmd, d)
+ # Not enough inputs
+ cmd = ['@INPUT2@.out', 'ordinary', 'strings']
+ self.assertRaises(ME, substfunc, cmd, d)
+ # Not enough outputs
+ cmd = ['@OUTPUT2@.out', 'ordinary', 'strings']
+ self.assertRaises(ME, substfunc, cmd, d)
+ # Many outputs, can't use @OUTPUT@ like this
+ cmd = ['@OUTPUT@.out', 'ordinary', 'strings']
+ self.assertRaises(ME, substfunc, cmd, d)
+
+ def test_needs_exe_wrapper_override(self):
+ config = ConfigParser()
+ config['binaries'] = {
+ 'c': '\'/usr/bin/gcc\'',
+ }
+ config['host_machine'] = {
+ 'system': '\'linux\'',
+ 'cpu_family': '\'arm\'',
+ 'cpu': '\'armv7\'',
+ 'endian': '\'little\'',
+ }
+ # Can not be used as context manager because we need to
+ # open it a second time and this is not possible on
+ # Windows.
+ configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False)
+ configfilename = configfile.name
+ config.write(configfile)
+ configfile.flush()
+ configfile.close()
+ detected_value = mesonbuild.environment.CrossBuildInfo(configfile.name).need_exe_wrapper()
+ os.unlink(configfilename)
+
+ desired_value = not detected_value
+ config['properties'] = {
+ 'needs_exe_wrapper': 'true' if desired_value else 'false'
+ }
+
+ configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False)
+ configfilename = configfile.name
+ config.write(configfile)
+ configfile.close()
+ forced_value = mesonbuild.environment.CrossBuildInfo(configfile.name).need_exe_wrapper()
+ os.unlink(configfilename)
+
+ self.assertEqual(forced_value, desired_value)
+
+ def test_listify(self):
+ listify = mesonbuild.mesonlib.listify
+ # Test sanity
+ self.assertEqual([1], listify(1))
+ self.assertEqual([], listify([]))
+ self.assertEqual([1], listify([1]))
+ # Test flattening
+ self.assertEqual([1, 2, 3], listify([1, [2, 3]]))
+ self.assertEqual([1, 2, 3], listify([1, [2, [3]]]))
+ self.assertEqual([1, [2, [3]]], listify([1, [2, [3]]], flatten=False))
+ # Test flattening and unholdering
+ holder1 = ObjectHolder(1)
+ holder3 = ObjectHolder(3)
+ self.assertEqual([holder1], listify(holder1))
+ self.assertEqual([holder1], listify([holder1]))
+ self.assertEqual([holder1, 2], listify([holder1, 2]))
+ self.assertEqual([holder1, 2, 3], listify([holder1, 2, [3]]))
+ self.assertEqual([1], listify(holder1, unholder=True))
+ self.assertEqual([1], listify([holder1], unholder=True))
+ self.assertEqual([1, 2], listify([holder1, 2], unholder=True))
+ self.assertEqual([1, 2, 3], listify([holder1, 2, [holder3]], unholder=True))
+ # Unholding doesn't work recursively when not flattening
+ self.assertEqual([1, [2], [holder3]], listify([holder1, [2], [holder3]], unholder=True, flatten=False))
+
+ def test_extract_as_list(self):
+ extract = mesonbuild.mesonlib.extract_as_list
+ # Test sanity
+ kwargs = {'sources': [1, 2, 3]}
+ self.assertEqual([1, 2, 3], extract(kwargs, 'sources'))
+ self.assertEqual(kwargs, {'sources': [1, 2, 3]})
+ self.assertEqual([1, 2, 3], extract(kwargs, 'sources', pop=True))
+ self.assertEqual(kwargs, {})
+ # Test unholding
+ holder3 = ObjectHolder(3)
+ kwargs = {'sources': [1, 2, holder3]}
+ self.assertEqual([1, 2, 3], extract(kwargs, 'sources', unholder=True))
+ self.assertEqual(kwargs, {'sources': [1, 2, holder3]})
+ self.assertEqual([1, 2, 3], extract(kwargs, 'sources', unholder=True, pop=True))
+ self.assertEqual(kwargs, {})
+ # Test listification
+ kwargs = {'sources': [1, 2, 3], 'pch_sources': [4, 5, 6]}
+ self.assertEqual([[1, 2, 3], [4, 5, 6]], extract(kwargs, 'sources', 'pch_sources'))
+
+
+class BasePlatformTests(unittest.TestCase):
+ def setUp(self):
+ super().setUp()
+ src_root = os.path.dirname(__file__)
+ src_root = os.path.join(os.getcwd(), src_root)
+ self.src_root = src_root
+ # In case the directory is inside a symlinked directory, find the real
+ # path otherwise we might not find the srcdir from inside the builddir.
+ self.builddir = os.path.realpath(tempfile.mkdtemp())
+ self.logdir = os.path.join(self.builddir, 'meson-logs')
+ self.prefix = '/usr'
+ self.libdir = os.path.join(self.prefix, 'lib')
+ self.installdir = os.path.join(self.builddir, 'install')
+ self.distdir = os.path.join(self.builddir, 'meson-dist')
+ # Get the backend
+ # FIXME: Extract this from argv?
+ self.backend = getattr(Backend, os.environ.get('MESON_UNIT_TEST_BACKEND', 'ninja'))
+ self.meson_mainfile = os.path.join(src_root, 'meson.py')
+ self.meson_args = ['--backend=' + self.backend.name]
+ self.meson_command = meson_command + self.meson_args
+ self.mconf_command = meson_command + ['configure']
+ self.mintro_command = meson_command + ['introspect']
+ self.mtest_command = meson_command + ['test', '-C', self.builddir]
+ # Backend-specific build commands
+ self.build_command, self.clean_command, self.test_command, self.install_command, \
+ self.uninstall_command = get_backend_commands(self.backend)
+ # Test directories
+ self.common_test_dir = os.path.join(src_root, 'test cases/common')
+ self.vala_test_dir = os.path.join(src_root, 'test cases/vala')
+ self.framework_test_dir = os.path.join(src_root, 'test cases/frameworks')
+ self.unit_test_dir = os.path.join(src_root, 'test cases/unit')
+ # Misc stuff
+ self.orig_env = os.environ.copy()
+ if self.backend is Backend.ninja:
+ self.no_rebuild_stdout = 'ninja: no work to do.'
+ else:
+ # VS doesn't have a stable output when no changes are done
+ # XCode backend is untested with unit tests, help welcome!
+ self.no_rebuild_stdout = 'UNKNOWN BACKEND {!r}'.format(self.backend.name)
+
+ def _print_meson_log(self):
+ log = os.path.join(self.logdir, 'meson-log.txt')
+ if not os.path.isfile(log):
+ print("{!r} doesn't exist".format(log))
+ return
+ with open(log, 'r', encoding='utf-8') as f:
+ print(f.read())
+
+ def tearDown(self):
+ try:
+ windows_proof_rmtree(self.builddir)
+ except FileNotFoundError:
+ pass
+ os.environ = self.orig_env
+ super().tearDown()
+
+ def _run(self, command, workdir=None):
+ '''
+ Run a command while printing the stdout and stderr to stdout,
+ and also return a copy of it
+ '''
+ p = subprocess.Popen(command, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, env=os.environ.copy(),
+ universal_newlines=True, cwd=workdir)
+ output = p.communicate()[0]
+ print(output)
+ if p.returncode != 0:
+ if 'MESON_SKIP_TEST' in output:
+ raise unittest.SkipTest('Project requested skipping.')
+ raise subprocess.CalledProcessError(p.returncode, command)
+ return output
+
+ def init(self, srcdir, extra_args=None, default_args=True, inprocess=False):
+ self.assertPathExists(srcdir)
+ if extra_args is None:
+ extra_args = []
+ if not isinstance(extra_args, list):
+ extra_args = [extra_args]
+ args = [srcdir, self.builddir]
+ if default_args:
+ args += ['--prefix', self.prefix,
+ '--libdir', self.libdir]
+ self.privatedir = os.path.join(self.builddir, 'meson-private')
+ if inprocess:
+ try:
+ out = run_configure(self.meson_mainfile, self.meson_args + args + extra_args)[1]
+ except:
+ self._print_meson_log()
+ raise
+ finally:
+ # Close log file to satisfy Windows file locking
+ mesonbuild.mlog.shutdown()
+ mesonbuild.mlog.log_dir = None
+ mesonbuild.mlog.log_file = None
+ else:
+ try:
+ out = self._run(self.meson_command + args + extra_args)
+ except unittest.SkipTest:
+ raise unittest.SkipTest('Project requested skipping: ' + srcdir)
+ except:
+ self._print_meson_log()
+ raise
+ return out
+
+ def build(self, target=None, extra_args=None):
+ if extra_args is None:
+ extra_args = []
+ # Add arguments for building the target (if specified),
+ # and using the build dir (if required, with VS)
+ args = get_builddir_target_args(self.backend, self.builddir, target)
+ return self._run(self.build_command + args + extra_args, workdir=self.builddir)
+
+ def clean(self):
+ dir_args = get_builddir_target_args(self.backend, self.builddir, None)
+ self._run(self.clean_command + dir_args, workdir=self.builddir)
+
+ def run_tests(self):
+ self._run(self.test_command, workdir=self.builddir)
+
+ def install(self):
+ if self.backend is not Backend.ninja:
+ raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name))
+ os.environ['DESTDIR'] = self.installdir
+ self._run(self.install_command, workdir=self.builddir)
+
+ def uninstall(self):
+ self._run(self.uninstall_command, workdir=self.builddir)
+
+ def run_target(self, target):
+ '''
+ Run a Ninja target while printing the stdout and stderr to stdout,
+ and also return a copy of it
+ '''
+ return self.build(target=target)
+
+ def setconf(self, arg, will_build=True):
+ if will_build:
+ ensure_backend_detects_changes(self.backend)
+ self._run(self.mconf_command + [arg, self.builddir])
+
+ def wipe(self):
+ windows_proof_rmtree(self.builddir)
+
+ def utime(self, f):
+ ensure_backend_detects_changes(self.backend)
+ os.utime(f)
+
+ def get_compdb(self):
+ if self.backend is not Backend.ninja:
+ raise unittest.SkipTest('Compiler db not available with {} backend'.format(self.backend.name))
+ with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile:
+ contents = json.load(ifile)
+ # If Ninja is using .rsp files, generate them, read their contents, and
+ # replace it as the command for all compile commands in the parsed json.
+ if len(contents) > 0 and contents[0]['command'].endswith('.rsp'):
+ # Pretend to build so that the rsp files are generated
+ self.build(extra_args=['-d', 'keeprsp', '-n'])
+ for each in contents:
+ # Extract the actual command from the rsp file
+ compiler, rsp = each['command'].split(' @')
+ rsp = os.path.join(self.builddir, rsp)
+ # Replace the command with its contents
+ with open(rsp, 'r', encoding='utf-8') as f:
+ each['command'] = compiler + ' ' + f.read()
+ return contents
+
+ def get_meson_log(self):
+ with open(os.path.join(self.builddir, 'meson-logs', 'meson-log.txt')) as f:
+ return f.readlines()
+
+ def get_meson_log_compiler_checks(self):
+ '''
+ Fetch a list command-lines run by meson for compiler checks.
+ Each command-line is returned as a list of arguments.
+ '''
+ log = self.get_meson_log()
+ prefix = 'Command line:'
+ cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)]
+ return cmds
+
+ def introspect(self, arg):
+ out = subprocess.check_output(self.mintro_command + [arg, self.builddir],
+ universal_newlines=True)
+ return json.loads(out)
+
+ def assertPathEqual(self, path1, path2):
+ '''
+ Handles a lot of platform-specific quirks related to paths such as
+ separator, case-sensitivity, etc.
+ '''
+ self.assertEqual(PurePath(path1), PurePath(path2))
+
+ def assertPathBasenameEqual(self, path, basename):
+ msg = '{!r} does not end with {!r}'.format(path, basename)
+ # We cannot use os.path.basename because it returns '' when the path
+ # ends with '/' for some silly reason. This is not how the UNIX utility
+ # `basename` works.
+ path_basename = PurePath(path).parts[-1]
+ self.assertEqual(PurePath(path_basename), PurePath(basename), msg)
+
+ def assertBuildIsNoop(self):
+ ret = self.build()
+ if self.backend is Backend.ninja:
+ self.assertEqual(ret.split('\n')[-2], self.no_rebuild_stdout)
+ elif self.backend is Backend.vs:
+ # Ensure that some target said that no rebuild was done
+ self.assertIn('CustomBuild:\n All outputs are up-to-date.', ret)
+ self.assertIn('ClCompile:\n All outputs are up-to-date.', ret)
+ self.assertIn('Link:\n All outputs are up-to-date.', ret)
+ # Ensure that no targets were built
+ clre = re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE)
+ linkre = re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE)
+ self.assertNotRegex(ret, clre)
+ self.assertNotRegex(ret, linkre)
+ elif self.backend is Backend.xcode:
+ raise unittest.SkipTest('Please help us fix this test on the xcode backend')
+ else:
+ raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
+
+ def assertRebuiltTarget(self, target):
+ ret = self.build()
+ if self.backend is Backend.ninja:
+ self.assertIn('Linking target {}'.format(target), ret)
+ elif self.backend is Backend.vs:
+ # Ensure that this target was rebuilt
+ clre = re.compile('ClCompile:\n [^\n]*cl[^\n]*' + target, flags=re.IGNORECASE)
+ linkre = re.compile('Link:\n [^\n]*link[^\n]*' + target, flags=re.IGNORECASE)
+ self.assertRegex(ret, clre)
+ self.assertRegex(ret, linkre)
+ elif self.backend is Backend.xcode:
+ raise unittest.SkipTest('Please help us fix this test on the xcode backend')
+ else:
+ raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
+
+ def assertPathExists(self, path):
+ m = 'Path {!r} should exist'.format(path)
+ self.assertTrue(os.path.exists(path), msg=m)
+
+ def assertPathDoesNotExist(self, path):
+ m = 'Path {!r} should not exist'.format(path)
+ self.assertFalse(os.path.exists(path), msg=m)
+
+
+class AllPlatformTests(BasePlatformTests):
+ '''
+ Tests that should run on all platforms
+ '''
+ def test_default_options_prefix(self):
+ '''
+ Tests that setting a prefix in default_options in project() works.
+ Can't be an ordinary test because we pass --prefix to meson there.
+ https://github.com/mesonbuild/meson/issues/1349
+ '''
+ testdir = os.path.join(self.common_test_dir, '94 default options')
+ self.init(testdir, default_args=False)
+ opts = self.introspect('--buildoptions')
+ for opt in opts:
+ if opt['name'] == 'prefix':
+ prefix = opt['value']
+ self.assertEqual(prefix, '/absoluteprefix')
+
+ def test_absolute_prefix_libdir(self):
+ '''
+ Tests that setting absolute paths for --prefix and --libdir work. Can't
+ be an ordinary test because these are set via the command-line.
+ https://github.com/mesonbuild/meson/issues/1341
+ https://github.com/mesonbuild/meson/issues/1345
+ '''
+ testdir = os.path.join(self.common_test_dir, '94 default options')
+ prefix = '/someabs'
+ libdir = 'libdir'
+ extra_args = ['--prefix=' + prefix,
+ # This can just be a relative path, but we want to test
+ # that passing this as an absolute path also works
+ '--libdir=' + prefix + '/' + libdir]
+ self.init(testdir, extra_args, default_args=False)
+ opts = self.introspect('--buildoptions')
+ for opt in opts:
+ if opt['name'] == 'prefix':
+ self.assertEqual(prefix, opt['value'])
+ elif opt['name'] == 'libdir':
+ self.assertEqual(libdir, opt['value'])
+
+ def test_libdir_must_be_inside_prefix(self):
+ '''
+ Tests that libdir is forced to be inside prefix no matter how it is set.
+ Must be a unit test for obvious reasons.
+ '''
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ # libdir being inside prefix is ok
+ args = ['--prefix', '/opt', '--libdir', '/opt/lib32']
+ self.init(testdir, args)
+ self.wipe()
+ # libdir not being inside prefix is not ok
+ args = ['--prefix', '/usr', '--libdir', '/opt/lib32']
+ self.assertRaises(subprocess.CalledProcessError, self.init, testdir, args)
+ self.wipe()
+ # libdir must be inside prefix even when set via mesonconf
+ self.init(testdir)
+ self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=/opt', False)
+
+ def test_prefix_dependent_defaults(self):
+ '''
+ Tests that configured directory paths are set to prefix dependent
+ defaults.
+ '''
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ expected = {
+ '/opt': {'prefix': '/opt',
+ 'bindir': 'bin', 'datadir': 'share', 'includedir': 'include',
+ 'infodir': 'share/info',
+ 'libexecdir': 'libexec', 'localedir': 'share/locale',
+ 'localstatedir': 'var', 'mandir': 'share/man',
+ 'sbindir': 'sbin', 'sharedstatedir': 'com',
+ 'sysconfdir': 'etc'},
+ '/usr': {'prefix': '/usr',
+ 'bindir': 'bin', 'datadir': 'share', 'includedir': 'include',
+ 'infodir': 'share/info',
+ 'libexecdir': 'libexec', 'localedir': 'share/locale',
+ 'localstatedir': '/var', 'mandir': 'share/man',
+ 'sbindir': 'sbin', 'sharedstatedir': '/var/lib',
+ 'sysconfdir': '/etc'},
+ '/usr/local': {'prefix': '/usr/local',
+ 'bindir': 'bin', 'datadir': 'share',
+ 'includedir': 'include', 'infodir': 'share/info',
+ 'libexecdir': 'libexec',
+ 'localedir': 'share/locale',
+ 'localstatedir': '/var/local', 'mandir': 'share/man',
+ 'sbindir': 'sbin', 'sharedstatedir': '/var/local/lib',
+ 'sysconfdir': 'etc'},
+ # N.B. We don't check 'libdir' as it's platform dependent, see
+ # default_libdir():
+ }
+ for prefix in expected:
+ args = ['--prefix', prefix]
+ self.init(testdir, args, default_args=False)
+ opts = self.introspect('--buildoptions')
+ for opt in opts:
+ name = opt['name']
+ value = opt['value']
+ if name in expected[prefix]:
+ self.assertEqual(value, expected[prefix][name])
+ self.wipe()
+
+ def test_static_library_overwrite(self):
+ '''
+ Tests that static libraries are never appended to, always overwritten.
+ Has to be a unit test because this involves building a project,
+ reconfiguring, and building it again so that `ar` is run twice on the
+ same static library.
+ https://github.com/mesonbuild/meson/issues/1355
+ '''
+ testdir = os.path.join(self.common_test_dir, '3 static')
+ env = Environment(testdir, self.builddir, self.meson_command,
+ get_fake_options(self.prefix), [])
+ cc = env.detect_c_compiler(False)
+ static_linker = env.detect_static_linker(cc)
+ if is_windows():
+ raise unittest.SkipTest('https://github.com/mesonbuild/meson/issues/1526')
+ if not isinstance(static_linker, mesonbuild.linkers.ArLinker):
+ raise unittest.SkipTest('static linker is not `ar`')
+ # Configure
+ self.init(testdir)
+ # Get name of static library
+ targets = self.introspect('--targets')
+ self.assertEqual(len(targets), 1)
+ libname = targets[0]['filename']
+ # Build and get contents of static library
+ self.build()
+ before = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split()
+ # Filter out non-object-file contents
+ before = [f for f in before if f.endswith(('.o', '.obj'))]
+ # Static library should contain only one object
+ self.assertEqual(len(before), 1, msg=before)
+ # Change the source to be built into the static library
+ self.setconf('-Dsource=libfile2.c')
+ self.build()
+ after = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split()
+ # Filter out non-object-file contents
+ after = [f for f in after if f.endswith(('.o', '.obj'))]
+ # Static library should contain only one object
+ self.assertEqual(len(after), 1, msg=after)
+ # and the object must have changed
+ self.assertNotEqual(before, after)
+
+ def test_static_compile_order(self):
+ '''
+ Test that the order of files in a compiler command-line while compiling
+ and linking statically is deterministic. This can't be an ordinary test
+ case because we need to inspect the compiler database.
+ https://github.com/mesonbuild/meson/pull/951
+ '''
+ testdir = os.path.join(self.common_test_dir, '5 linkstatic')
+ self.init(testdir)
+ compdb = self.get_compdb()
+ # Rules will get written out in this order
+ self.assertTrue(compdb[0]['file'].endswith("libfile.c"))
+ self.assertTrue(compdb[1]['file'].endswith("libfile2.c"))
+ self.assertTrue(compdb[2]['file'].endswith("libfile3.c"))
+ self.assertTrue(compdb[3]['file'].endswith("libfile4.c"))
+ # FIXME: We don't have access to the linker command
+
+ def test_run_target_files_path(self):
+ '''
+ Test that run_targets are run from the correct directory
+ https://github.com/mesonbuild/meson/issues/957
+ '''
+ testdir = os.path.join(self.common_test_dir, '58 run target')
+ self.init(testdir)
+ self.run_target('check_exists')
+
+ def test_install_introspection(self):
+ '''
+ Tests that the Meson introspection API exposes install filenames correctly
+ https://github.com/mesonbuild/meson/issues/829
+ '''
+ if self.backend is not Backend.ninja:
+ raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name))
+ testdir = os.path.join(self.common_test_dir, '8 install')
+ self.init(testdir)
+ intro = self.introspect('--targets')
+ if intro[0]['type'] == 'executable':
+ intro = intro[::-1]
+ self.assertPathEqual(intro[0]['install_filename'], '/usr/lib/libstat.a')
+ self.assertPathEqual(intro[1]['install_filename'], '/usr/bin/prog' + exe_suffix)
+
+ def test_uninstall(self):
+ exename = os.path.join(self.installdir, 'usr/bin/prog' + exe_suffix)
+ testdir = os.path.join(self.common_test_dir, '8 install')
+ self.init(testdir)
+ self.assertPathDoesNotExist(exename)
+ self.install()
+ self.assertPathExists(exename)
+ self.uninstall()
+ self.assertPathDoesNotExist(exename)
+
+ def test_testsetups(self):
+ if not shutil.which('valgrind'):
+ raise unittest.SkipTest('Valgrind not installed.')
+ testdir = os.path.join(self.unit_test_dir, '2 testsetups')
+ self.init(testdir)
+ self.build()
+ # Run tests without setup
+ self.run_tests()
+ with open(os.path.join(self.logdir, 'testlog.txt')) as f:
+ basic_log = f.read()
+ # Run buggy test with setup that has env that will make it fail
+ self.assertRaises(subprocess.CalledProcessError,
+ self._run, self.mtest_command + ['--setup=valgrind'])
+ with open(os.path.join(self.logdir, 'testlog-valgrind.txt')) as f:
+ vg_log = f.read()
+ self.assertFalse('TEST_ENV is set' in basic_log)
+ self.assertFalse('Memcheck' in basic_log)
+ self.assertTrue('TEST_ENV is set' in vg_log)
+ self.assertTrue('Memcheck' in vg_log)
+ # Run buggy test with setup without env that will pass
+ self._run(self.mtest_command + ['--setup=wrapper'])
+ # Setup with no properties works
+ self._run(self.mtest_command + ['--setup=empty'])
+ # Setup with only env works
+ self._run(self.mtest_command + ['--setup=onlyenv'])
+ self._run(self.mtest_command + ['--setup=onlyenv2'])
+ self._run(self.mtest_command + ['--setup=onlyenv3'])
+ # Setup with only a timeout works
+ self._run(self.mtest_command + ['--setup=timeout'])
+
+ def assertFailedTestCount(self, failure_count, command):
+ try:
+ self._run(command)
+ self.assertEqual(0, failure_count, 'Expected %d tests to fail.' % failure_count)
+ except subprocess.CalledProcessError as e:
+ self.assertEqual(e.returncode, failure_count)
+
+ def test_suite_selection(self):
+ testdir = os.path.join(self.unit_test_dir, '4 suite selection')
+ self.init(testdir)
+ self.build()
+
+ self.assertFailedTestCount(3, self.mtest_command)
+
+ self.assertFailedTestCount(0, self.mtest_command + ['--suite', ':success'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--suite', ':fail'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', ':success'])
+ self.assertFailedTestCount(0, self.mtest_command + ['--no-suite', ':fail'])
+
+ self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj'])
+ self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc'])
+ self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail'])
+ self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix'])
+ self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'mainprj'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjsucc'])
+ self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjfail'])
+ self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjmix'])
+
+ self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj:fail'])
+ self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'mainprj:success'])
+ self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'mainprj:fail'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj:success'])
+
+ self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail:fail'])
+ self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjfail:success'])
+ self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjfail:fail'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail:success'])
+
+ self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:fail'])
+ self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:success'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjsucc:fail'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjsucc:success'])
+
+ self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix:fail'])
+ self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjmix:success'])
+ self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjmix:fail'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix:success'])
+
+ self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix:fail'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj'])
+ self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail'])
+ self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail', 'mainprj-failing_test'])
+
+ self.assertFailedTestCount(1, self.mtest_command + ['--no-suite', 'subprjfail:fail', '--no-suite', 'subprjmix:fail'])
+
+ def test_build_by_default(self):
+ testdir = os.path.join(self.common_test_dir, '137 build by default')
+ self.init(testdir)
+ self.build()
+ genfile = os.path.join(self.builddir, 'generated.dat')
+ exe = os.path.join(self.builddir, 'fooprog' + exe_suffix)
+ self.assertPathExists(genfile)
+ self.assertPathDoesNotExist(exe)
+ self.build(target=('fooprog' + exe_suffix))
+ self.assertPathExists(exe)
+
+ def test_internal_include_order(self):
+ testdir = os.path.join(self.common_test_dir, '138 include order')
+ self.init(testdir)
+ execmd = fxecmd = None
+ for cmd in self.get_compdb():
+ if 'someexe' in cmd['command']:
+ execmd = cmd['command']
+ continue
+ if 'somefxe' in cmd['command']:
+ fxecmd = cmd['command']
+ continue
+ if not execmd or not fxecmd:
+ raise Exception('Could not find someexe and somfxe commands')
+ # Check include order for 'someexe'
+ incs = [a for a in shlex.split(execmd) if a.startswith("-I")]
+ self.assertEqual(len(incs), 9)
+ # target private dir
+ self.assertPathEqual(incs[0], "-Isub4/someexe@exe")
+ # target build subdir
+ self.assertPathEqual(incs[1], "-Isub4")
+ # target source subdir
+ self.assertPathBasenameEqual(incs[2], 'sub4')
+ # include paths added via per-target c_args: ['-I'...]
+ self.assertPathBasenameEqual(incs[3], 'sub3')
+ # target include_directories: build dir
+ self.assertPathEqual(incs[4], "-Isub2")
+ # target include_directories: source dir
+ self.assertPathBasenameEqual(incs[5], 'sub2')
+ # target internal dependency include_directories: build dir
+ self.assertPathEqual(incs[6], "-Isub1")
+ # target internal dependency include_directories: source dir
+ self.assertPathBasenameEqual(incs[7], 'sub1')
+ # custom target include dir
+ self.assertPathEqual(incs[8], '-Ictsub')
+ # Check include order for 'somefxe'
+ incs = [a for a in shlex.split(fxecmd) if a.startswith('-I')]
+ self.assertEqual(len(incs), 9)
+ # target private dir
+ self.assertPathEqual(incs[0], '-Isomefxe@exe')
+ # target build dir
+ self.assertPathEqual(incs[1], '-I.')
+ # target source dir
+ self.assertPathBasenameEqual(incs[2], os.path.basename(testdir))
+ # target internal dependency correct include_directories: build dir
+ self.assertPathEqual(incs[3], "-Isub4")
+ # target internal dependency correct include_directories: source dir
+ self.assertPathBasenameEqual(incs[4], 'sub4')
+ # target internal dependency dep include_directories: build dir
+ self.assertPathEqual(incs[5], "-Isub1")
+ # target internal dependency dep include_directories: source dir
+ self.assertPathBasenameEqual(incs[6], 'sub1')
+ # target internal dependency wrong include_directories: build dir
+ self.assertPathEqual(incs[7], "-Isub2")
+ # target internal dependency wrong include_directories: source dir
+ self.assertPathBasenameEqual(incs[8], 'sub2')
+
+ def test_compiler_detection(self):
+ '''
+ Test that automatic compiler detection and setting from the environment
+ both work just fine. This is needed because while running project tests
+ and other unit tests, we always read CC/CXX/etc from the environment.
+ '''
+ gnu = mesonbuild.compilers.GnuCompiler
+ clang = mesonbuild.compilers.ClangCompiler
+ intel = mesonbuild.compilers.IntelCompiler
+ msvc = mesonbuild.compilers.VisualStudioCCompiler
+ ar = mesonbuild.linkers.ArLinker
+ lib = mesonbuild.linkers.VisualStudioLinker
+ langs = [('c', 'CC'), ('cpp', 'CXX')]
+ if not is_windows():
+ langs += [('objc', 'OBJC'), ('objcpp', 'OBJCXX')]
+ testdir = os.path.join(self.unit_test_dir, '5 compiler detection')
+ env = Environment(testdir, self.builddir, self.meson_command,
+ get_fake_options(self.prefix), [])
+ for lang, evar in langs:
+ # Detect with evar and do sanity checks on that
+ if evar in os.environ:
+ ecc = getattr(env, 'detect_{}_compiler'.format(lang))(False)
+ self.assertTrue(ecc.version)
+ elinker = env.detect_static_linker(ecc)
+ # Pop it so we don't use it for the next detection
+ evalue = os.environ.pop(evar)
+ # Very rough/strict heuristics. Would never work for actual
+ # compiler detection, but should be ok for the tests.
+ ebase = os.path.basename(evalue)
+ if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')):
+ self.assertIsInstance(ecc, gnu)
+ self.assertIsInstance(elinker, ar)
+ elif 'clang' in ebase:
+ self.assertIsInstance(ecc, clang)
+ self.assertIsInstance(elinker, ar)
+ elif ebase.startswith('ic'):
+ self.assertIsInstance(ecc, intel)
+ self.assertIsInstance(elinker, ar)
+ elif ebase.startswith('cl'):
+ self.assertIsInstance(ecc, msvc)
+ self.assertIsInstance(elinker, lib)
+ else:
+ raise AssertionError('Unknown compiler {!r}'.format(evalue))
+ # Check that we actually used the evalue correctly as the compiler
+ self.assertEqual(ecc.get_exelist(), shlex.split(evalue))
+ # Do auto-detection of compiler based on platform, PATH, etc.
+ cc = getattr(env, 'detect_{}_compiler'.format(lang))(False)
+ self.assertTrue(cc.version)
+ linker = env.detect_static_linker(cc)
+ # Check compiler type
+ if isinstance(cc, gnu):
+ self.assertIsInstance(linker, ar)
+ if is_osx():
+ self.assertEqual(cc.gcc_type, mesonbuild.compilers.GCC_OSX)
+ elif is_windows():
+ self.assertEqual(cc.gcc_type, mesonbuild.compilers.GCC_MINGW)
+ elif is_cygwin():
+ self.assertEqual(cc.gcc_type, mesonbuild.compilers.GCC_CYGWIN)
+ else:
+ self.assertEqual(cc.gcc_type, mesonbuild.compilers.GCC_STANDARD)
+ if isinstance(cc, clang):
+ self.assertIsInstance(linker, ar)
+ if is_osx():
+ self.assertEqual(cc.clang_type, mesonbuild.compilers.CLANG_OSX)
+ elif is_windows():
+ # Not implemented yet
+ self.assertEqual(cc.clang_type, mesonbuild.compilers.CLANG_WIN)
+ else:
+ self.assertEqual(cc.clang_type, mesonbuild.compilers.CLANG_STANDARD)
+ if isinstance(cc, intel):
+ self.assertIsInstance(linker, ar)
+ if is_osx():
+ self.assertEqual(cc.icc_type, mesonbuild.compilers.ICC_OSX)
+ elif is_windows():
+ self.assertEqual(cc.icc_type, mesonbuild.compilers.ICC_WIN)
+ else:
+ self.assertEqual(cc.icc_type, mesonbuild.compilers.ICC_STANDARD)
+ if isinstance(cc, msvc):
+ self.assertTrue(is_windows())
+ self.assertIsInstance(linker, lib)
+ self.assertEqual(cc.id, 'msvc')
+ self.assertTrue(hasattr(cc, 'is_64'))
+ # If we're in the appveyor CI, we know what the compiler will be
+ if 'arch' in os.environ:
+ if os.environ['arch'] == 'x64':
+ self.assertTrue(cc.is_64)
+ else:
+ self.assertFalse(cc.is_64)
+ # Set evar ourselves to a wrapper script that just calls the same
+ # exelist + some argument. This is meant to test that setting
+ # something like `ccache gcc -pipe` or `distcc ccache gcc` works.
+ wrapper = os.path.join(testdir, 'compiler wrapper.py')
+ wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG']
+ wrappercc_s = ''
+ for w in wrappercc:
+ wrappercc_s += shlex.quote(w) + ' '
+ os.environ[evar] = wrappercc_s
+ wcc = getattr(env, 'detect_{}_compiler'.format(lang))(False)
+ # Check static linker too
+ wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args()
+ wrapperlinker_s = ''
+ for w in wrapperlinker:
+ wrapperlinker_s += shlex.quote(w) + ' '
+ os.environ['AR'] = wrapperlinker_s
+ wlinker = env.detect_static_linker(wcc)
+ # Must be the same type since it's a wrapper around the same exelist
+ self.assertIs(type(cc), type(wcc))
+ self.assertIs(type(linker), type(wlinker))
+ # Ensure that the exelist is correct
+ self.assertEqual(wcc.get_exelist(), wrappercc)
+ self.assertEqual(wlinker.get_exelist(), wrapperlinker)
+ # Ensure that the version detection worked correctly
+ self.assertEqual(cc.version, wcc.version)
+ if hasattr(cc, 'is_64'):
+ self.assertEqual(cc.is_64, wcc.is_64)
+
+ def test_always_prefer_c_compiler_for_asm(self):
+ testdir = os.path.join(self.common_test_dir, '141 c cpp and asm')
+ # Skip if building with MSVC
+ env = Environment(testdir, self.builddir, self.meson_command,
+ get_fake_options(self.prefix), [])
+ if env.detect_c_compiler(False).get_id() == 'msvc':
+ raise unittest.SkipTest('MSVC can\'t compile assembly')
+ self.init(testdir)
+ commands = {'c-asm': {}, 'cpp-asm': {}, 'cpp-c-asm': {}, 'c-cpp-asm': {}}
+ for cmd in self.get_compdb():
+ # Get compiler
+ split = shlex.split(cmd['command'])
+ if split[0] == 'ccache':
+ compiler = split[1]
+ else:
+ compiler = split[0]
+ # Classify commands
+ if 'Ic-asm' in cmd['command']:
+ if cmd['file'].endswith('.S'):
+ commands['c-asm']['asm'] = compiler
+ elif cmd['file'].endswith('.c'):
+ commands['c-asm']['c'] = compiler
+ else:
+ raise AssertionError('{!r} found in cpp-asm?'.format(cmd['command']))
+ elif 'Icpp-asm' in cmd['command']:
+ if cmd['file'].endswith('.S'):
+ commands['cpp-asm']['asm'] = compiler
+ elif cmd['file'].endswith('.cpp'):
+ commands['cpp-asm']['cpp'] = compiler
+ else:
+ raise AssertionError('{!r} found in cpp-asm?'.format(cmd['command']))
+ elif 'Ic-cpp-asm' in cmd['command']:
+ if cmd['file'].endswith('.S'):
+ commands['c-cpp-asm']['asm'] = compiler
+ elif cmd['file'].endswith('.c'):
+ commands['c-cpp-asm']['c'] = compiler
+ elif cmd['file'].endswith('.cpp'):
+ commands['c-cpp-asm']['cpp'] = compiler
+ else:
+ raise AssertionError('{!r} found in c-cpp-asm?'.format(cmd['command']))
+ elif 'Icpp-c-asm' in cmd['command']:
+ if cmd['file'].endswith('.S'):
+ commands['cpp-c-asm']['asm'] = compiler
+ elif cmd['file'].endswith('.c'):
+ commands['cpp-c-asm']['c'] = compiler
+ elif cmd['file'].endswith('.cpp'):
+ commands['cpp-c-asm']['cpp'] = compiler
+ else:
+ raise AssertionError('{!r} found in cpp-c-asm?'.format(cmd['command']))
+ else:
+ raise AssertionError('Unknown command {!r} found'.format(cmd['command']))
+ # Check that .S files are always built with the C compiler
+ self.assertEqual(commands['c-asm']['asm'], commands['c-asm']['c'])
+ self.assertEqual(commands['c-asm']['asm'], commands['cpp-asm']['asm'])
+ self.assertEqual(commands['cpp-asm']['asm'], commands['c-cpp-asm']['c'])
+ self.assertEqual(commands['c-cpp-asm']['asm'], commands['c-cpp-asm']['c'])
+ self.assertEqual(commands['cpp-c-asm']['asm'], commands['cpp-c-asm']['c'])
+ self.assertNotEqual(commands['cpp-asm']['asm'], commands['cpp-asm']['cpp'])
+ self.assertNotEqual(commands['c-cpp-asm']['c'], commands['c-cpp-asm']['cpp'])
+ self.assertNotEqual(commands['cpp-c-asm']['c'], commands['cpp-c-asm']['cpp'])
+ # Check that the c-asm target is always linked with the C linker
+ build_ninja = os.path.join(self.builddir, 'build.ninja')
+ with open(build_ninja, 'r', encoding='utf-8') as f:
+ contents = f.read()
+ m = re.search('build c-asm.*: c_LINKER', contents)
+ self.assertIsNotNone(m, msg=contents)
+
+ def test_preprocessor_checks_CPPFLAGS(self):
+ '''
+ Test that preprocessor compiler checks read CPPFLAGS but not CFLAGS
+ '''
+ testdir = os.path.join(self.common_test_dir, '140 get define')
+ define = 'MESON_TEST_DEFINE_VALUE'
+ # NOTE: this list can't have \n, ' or "
+ # \n is never substituted by the GNU pre-processor via a -D define
+ # ' and " confuse shlex.split() even when they are escaped
+ # % and # confuse the MSVC preprocessor
+ value = 'spaces and fun!@$^&*()-=_+{}[]:;<>?,./~`'
+ os.environ['CPPFLAGS'] = '-D{}="{}"'.format(define, value)
+ os.environ['CFLAGS'] = '-DMESON_FAIL_VALUE=cflags-read'.format(define)
+ self.init(testdir, ['-D{}={}'.format(define, value)])
+
+ def test_custom_target_exe_data_deterministic(self):
+ testdir = os.path.join(self.common_test_dir, '117 custom target capture')
+ self.init(testdir)
+ meson_exe_dat1 = glob(os.path.join(self.privatedir, 'meson_exe*.dat'))
+ self.wipe()
+ self.init(testdir)
+ meson_exe_dat2 = glob(os.path.join(self.privatedir, 'meson_exe*.dat'))
+ self.assertListEqual(meson_exe_dat1, meson_exe_dat2)
+
+ def test_source_changes_cause_rebuild(self):
+ '''
+ Test that changes to sources and headers cause rebuilds, but not
+ changes to unused files (as determined by the dependency file) in the
+ input files list.
+ '''
+ testdir = os.path.join(self.common_test_dir, '22 header in file list')
+ self.init(testdir)
+ self.build()
+ # Immediately rebuilding should not do anything
+ self.assertBuildIsNoop()
+ # Changing mtime of header.h should rebuild everything
+ self.utime(os.path.join(testdir, 'header.h'))
+ self.assertRebuiltTarget('prog')
+
+ def test_custom_target_changes_cause_rebuild(self):
+ '''
+ Test that in a custom target, changes to the input files, the
+ ExternalProgram, and any File objects on the command-line cause
+ a rebuild.
+ '''
+ testdir = os.path.join(self.common_test_dir, '64 custom header generator')
+ self.init(testdir)
+ self.build()
+ # Immediately rebuilding should not do anything
+ self.assertBuildIsNoop()
+ # Changing mtime of these should rebuild everything
+ for f in ('input.def', 'makeheader.py', 'somefile.txt'):
+ self.utime(os.path.join(testdir, f))
+ self.assertRebuiltTarget('prog')
+
+ def test_static_library_lto(self):
+ '''
+ Test that static libraries can be built with LTO and linked to
+ executables. On Linux, this requires the use of gcc-ar.
+ https://github.com/mesonbuild/meson/issues/1646
+ '''
+ testdir = os.path.join(self.common_test_dir, '5 linkstatic')
+ self.init(testdir, extra_args='-Db_lto=true')
+ self.build()
+ self.run_tests()
+
+ def test_dist_git(self):
+ if not shutil.which('git'):
+ raise unittest.SkipTest('Git not found')
+
+ def git_init(project_dir):
+ subprocess.check_call(['git', 'init'], cwd=project_dir, stdout=subprocess.DEVNULL)
+ subprocess.check_call(['git', 'config',
+ 'user.name', 'Author Person'], cwd=project_dir)
+ subprocess.check_call(['git', 'config',
+ 'user.email', 'teh_coderz@example.com'], cwd=project_dir)
+ subprocess.check_call(['git', 'add', 'meson.build', 'distexe.c'], cwd=project_dir,
+ stdout=subprocess.DEVNULL)
+ subprocess.check_call(['git', 'commit', '-a', '-m', 'I am a project'], cwd=project_dir,
+ stdout=subprocess.DEVNULL)
+
+ try:
+ self.dist_impl(git_init)
+ except PermissionError:
+ # When run under Windows CI, something (virus scanner?)
+ # holds on to the git files so cleaning up the dir
+ # fails sometimes.
+ pass
+
+ def test_dist_hg(self):
+ if not shutil.which('hg'):
+ raise unittest.SkipTest('Mercurial not found')
+ if self.backend is not Backend.ninja:
+ raise unittest.SkipTest('Dist is only supported with Ninja')
+
+ def hg_init(project_dir):
+ subprocess.check_call(['hg', 'init'], cwd=project_dir)
+ with open(os.path.join(project_dir, '.hg', 'hgrc'), 'w') as f:
+ print('[ui]', file=f)
+ print('username=Author Person <teh_coderz@example.com>', file=f)
+ subprocess.check_call(['hg', 'add', 'meson.build', 'distexe.c'], cwd=project_dir)
+ subprocess.check_call(['hg', 'commit', '-m', 'I am a project'], cwd=project_dir)
+
+ try:
+ self.dist_impl(hg_init)
+ except PermissionError:
+ # When run under Windows CI, something (virus scanner?)
+ # holds on to the hg files so cleaning up the dir
+ # fails sometimes.
+ pass
+
+ def dist_impl(self, vcs_init):
+ # Create this on the fly because having rogue .git directories inside
+ # the source tree leads to all kinds of trouble.
+ with tempfile.TemporaryDirectory() as project_dir:
+ with open(os.path.join(project_dir, 'meson.build'), 'w') as ofile:
+ ofile.write('''project('disttest', 'c', version : '1.4.3')
+e = executable('distexe', 'distexe.c')
+test('dist test', e)
+''')
+ with open(os.path.join(project_dir, 'distexe.c'), 'w') as ofile:
+ ofile.write('''#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I am a distribution test.\\n");
+ return 0;
+}
+''')
+ vcs_init(project_dir)
+ self.init(project_dir)
+ self.build('dist')
+ distfile = os.path.join(self.distdir, 'disttest-1.4.3.tar.xz')
+ checksumfile = distfile + '.sha256sum'
+ self.assertPathExists(distfile)
+ self.assertPathExists(checksumfile)
+
+ def test_rpath_uses_ORIGIN(self):
+ '''
+ Test that built targets use $ORIGIN in rpath, which ensures that they
+ are relocatable and ensures that builds are reproducible since the
+ build directory won't get embedded into the built binaries.
+ '''
+ if is_windows() or is_cygwin():
+ raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH')
+ testdir = os.path.join(self.common_test_dir, '46 library chain')
+ self.init(testdir)
+ self.build()
+ for each in ('prog', 'subdir/liblib1.so', ):
+ rpath = get_rpath(os.path.join(self.builddir, each))
+ self.assertTrue(rpath)
+ for path in rpath.split(':'):
+ self.assertTrue(path.startswith('$ORIGIN'), msg=(each, path))
+ # These two don't link to anything else, so they do not need an rpath entry.
+ for each in ('subdir/subdir2/liblib2.so', 'subdir/subdir3/liblib3.so'):
+ rpath = get_rpath(os.path.join(self.builddir, each))
+ self.assertTrue(rpath is None)
+
+ def test_dash_d_dedup(self):
+ testdir = os.path.join(self.unit_test_dir, '10 d dedup')
+ self.init(testdir)
+ cmd = self.get_compdb()[0]['command']
+ self.assertTrue('-D FOO -D BAR' in cmd or
+ '"-D" "FOO" "-D" "BAR"' in cmd or
+ '/D FOO /D BAR' in cmd or
+ '"/D" "FOO" "/D" "BAR"' in cmd)
+
+ def test_all_forbidden_targets_tested(self):
+ '''
+ Test that all forbidden targets are tested in the '159 reserved targets'
+ test. Needs to be a unit test because it accesses Meson internals.
+ '''
+ testdir = os.path.join(self.common_test_dir, '159 reserved targets')
+ targets = mesonbuild.coredata.forbidden_target_names
+ # We don't actually define a target with this name
+ targets.pop('build.ninja')
+ # Remove this to avoid multiple entries with the same name
+ # but different case.
+ targets.pop('PHONY')
+ for i in targets:
+ self.assertPathExists(os.path.join(testdir, i))
+
+ def detect_prebuild_env(self):
+ env = Environment('', self.builddir, self.meson_command,
+ get_fake_options(self.prefix), [])
+ cc = env.detect_c_compiler(False)
+ stlinker = env.detect_static_linker(cc)
+ if mesonbuild.mesonlib.is_windows():
+ object_suffix = 'obj'
+ shared_suffix = 'dll'
+ elif mesonbuild.mesonlib.is_cygwin():
+ object_suffix = 'o'
+ shared_suffix = 'dll'
+ elif mesonbuild.mesonlib.is_osx():
+ object_suffix = 'o'
+ shared_suffix = 'dylib'
+ else:
+ object_suffix = 'o'
+ shared_suffix = 'so'
+ return (cc, stlinker, object_suffix, shared_suffix)
+
+ def pbcompile(self, compiler, source, objectfile, extra_args=[]):
+ cmd = compiler.get_exelist()
+ if compiler.id == 'msvc':
+ cmd += ['/nologo', '/Fo' + objectfile, '/c', source] + extra_args
+ else:
+ cmd += ['-c', source, '-o', objectfile] + extra_args
+ subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+
+
+ def test_prebuilt_object(self):
+ (compiler, _, object_suffix, _) = self.detect_prebuild_env()
+ tdir = os.path.join(self.unit_test_dir, '14 prebuilt object')
+ source = os.path.join(tdir, 'source.c')
+ objectfile = os.path.join(tdir, 'prebuilt.' + object_suffix)
+ self.pbcompile(compiler, source, objectfile)
+ try:
+ self.init(tdir)
+ self.build()
+ self.run_tests()
+ finally:
+ os.unlink(objectfile)
+
+ def build_static_lib(self, compiler, linker, source, objectfile, outfile, extra_args=None):
+ if extra_args is None:
+ extra_args = []
+ if compiler.id == 'msvc':
+ link_cmd = ['lib', '/NOLOGO', '/OUT:' + outfile, objectfile]
+ else:
+ link_cmd = ['ar', 'csr', outfile, objectfile]
+ link_cmd = linker.get_exelist()
+ link_cmd += linker.get_always_args()
+ link_cmd += linker.get_std_link_args()
+ link_cmd += linker.get_output_args(outfile)
+ link_cmd += [objectfile]
+ self.pbcompile(compiler, source, objectfile, extra_args=extra_args)
+ try:
+ subprocess.check_call(link_cmd)
+ finally:
+ os.unlink(objectfile)
+
+ def test_prebuilt_static_lib(self):
+ (cc, stlinker, object_suffix, _) = self.detect_prebuild_env()
+ tdir = os.path.join(self.unit_test_dir, '15 prebuilt static')
+ source = os.path.join(tdir, 'libdir/best.c')
+ objectfile = os.path.join(tdir, 'libdir/best.' + object_suffix)
+ stlibfile = os.path.join(tdir, 'libdir/libbest.a')
+ self.build_static_lib(cc, stlinker, source, objectfile, stlibfile)
+ # Run the test
+ try:
+ self.init(tdir)
+ self.build()
+ self.run_tests()
+ finally:
+ os.unlink(stlibfile)
+
+ def build_shared_lib(self, compiler, source, objectfile, outfile, impfile, extra_args=None):
+ if extra_args is None:
+ extra_args = []
+ if compiler.id == 'msvc':
+ link_cmd = ['link', '/NOLOGO', '/DLL', '/DEBUG',
+ '/IMPLIB:' + impfile, '/OUT:' + outfile, objectfile]
+ else:
+ extra_args += ['-fPIC']
+ link_cmd = compiler.get_exelist() + ['-shared', '-o', outfile, objectfile]
+ if not mesonbuild.mesonlib.is_osx():
+ link_cmd += ['-Wl,-soname=' + os.path.basename(outfile)]
+ self.pbcompile(compiler, source, objectfile, extra_args=extra_args)
+ try:
+ subprocess.check_call(link_cmd)
+ finally:
+ os.unlink(objectfile)
+
+ def test_prebuilt_shared_lib(self):
+ (cc, _, object_suffix, shared_suffix) = self.detect_prebuild_env()
+ tdir = os.path.join(self.unit_test_dir, '16 prebuilt shared')
+ source = os.path.join(tdir, 'alexandria.c')
+ objectfile = os.path.join(tdir, 'alexandria.' + object_suffix)
+ impfile = os.path.join(tdir, 'alexandria.lib')
+ if cc.id == 'msvc':
+ shlibfile = os.path.join(tdir, 'alexandria.' + shared_suffix)
+ elif is_cygwin():
+ shlibfile = os.path.join(tdir, 'cygalexandria.' + shared_suffix)
+ else:
+ shlibfile = os.path.join(tdir, 'libalexandria.' + shared_suffix)
+ self.build_shared_lib(cc, source, objectfile, shlibfile, impfile)
+ # Run the test
+ try:
+ self.init(tdir)
+ self.build()
+ self.run_tests()
+ finally:
+ os.unlink(shlibfile)
+ if mesonbuild.mesonlib.is_windows():
+ # Clean up all the garbage MSVC writes in the
+ # source tree.
+ for fname in glob(os.path.join(tdir, 'alexandria.*')):
+ if os.path.splitext(fname)[1] not in ['.c', '.h']:
+ os.unlink(fname)
+
+ def test_pkgconfig_static(self):
+ '''
+ Test that the we only use static libraries when `static: true` is
+ passed to dependency() with pkg-config. Can't be an ordinary test
+ because we need to build libs and try to find them from meson.build
+ '''
+ if not shutil.which('pkg-config'):
+ raise unittest.SkipTest('pkg-config not found')
+ (cc, stlinker, objext, shext) = self.detect_prebuild_env()
+ testdir = os.path.join(self.unit_test_dir, '17 pkgconfig static')
+ source = os.path.join(testdir, 'foo.c')
+ objectfile = os.path.join(testdir, 'foo.' + objext)
+ stlibfile = os.path.join(testdir, 'libfoo.a')
+ impfile = os.path.join(testdir, 'foo.lib')
+ if cc.id == 'msvc':
+ shlibfile = os.path.join(testdir, 'foo.' + shext)
+ elif is_cygwin():
+ shlibfile = os.path.join(testdir, 'cygfoo.' + shext)
+ else:
+ shlibfile = os.path.join(testdir, 'libfoo.' + shext)
+ # Build libs
+ self.build_static_lib(cc, stlinker, source, objectfile, stlibfile, extra_args=['-DFOO_STATIC'])
+ self.build_shared_lib(cc, source, objectfile, shlibfile, impfile)
+ # Run test
+ os.environ['PKG_CONFIG_LIBDIR'] = self.builddir
+ try:
+ self.init(testdir)
+ self.build()
+ self.run_tests()
+ finally:
+ os.unlink(stlibfile)
+ os.unlink(shlibfile)
+ if mesonbuild.mesonlib.is_windows():
+ # Clean up all the garbage MSVC writes in the
+ # source tree.
+ for fname in glob(os.path.join(testdir, 'foo.*')):
+ if os.path.splitext(fname)[1] not in ['.c', '.h', '.in']:
+ os.unlink(fname)
+
+ def test_pkgconfig_gen_escaping(self):
+ if not shutil.which('pkg-config'):
+ raise unittest.SkipTest('pkg-config not found')
+ testdir = os.path.join(self.common_test_dir, '51 pkgconfig-gen')
+ prefix = '/usr/with spaces'
+ libdir = 'lib'
+ self.init(testdir, extra_args=['--prefix=' + prefix,
+ '--libdir=' + libdir])
+ # Find foo dependency
+ os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir
+ env = Environment(testdir, self.builddir, self.meson_command,
+ get_fake_options(self.prefix), [])
+ kwargs = {'required': True, 'silent': True}
+ foo_dep = PkgConfigDependency('libfoo', env, kwargs)
+ # Ensure link_args are properly quoted
+ libdir = PurePath(prefix) / PurePath(libdir)
+ link_args = ['-L' + libdir.as_posix(), '-lfoo']
+ self.assertEqual(foo_dep.get_link_args(), link_args)
+ # Ensure include args are properly quoted
+ incdir = PurePath(prefix) / PurePath('include')
+ cargs = ['-I' + incdir.as_posix()]
+ self.assertEqual(foo_dep.get_compile_args(), cargs)
+
+ def test_array_option_change(self):
+ def get_opt():
+ opts = self.introspect('--buildoptions')
+ for x in opts:
+ if x.get('name') == 'list':
+ return x
+ raise Exception(opts)
+
+ expected = {
+ 'name': 'list',
+ 'description': 'list',
+ 'type': 'array',
+ 'value': ['foo', 'bar'],
+ }
+ tdir = os.path.join(self.unit_test_dir, '18 array option')
+ self.init(tdir)
+ original = get_opt()
+ self.assertDictEqual(original, expected)
+
+ expected['value'] = ['oink', 'boink']
+ self.setconf('-Dlist=oink,boink')
+ changed = get_opt()
+ self.assertEqual(changed, expected)
+
+ def test_array_option_bad_change(self):
+ def get_opt():
+ opts = self.introspect('--buildoptions')
+ for x in opts:
+ if x.get('name') == 'list':
+ return x
+ raise Exception(opts)
+
+ expected = {
+ 'name': 'list',
+ 'description': 'list',
+ 'type': 'array',
+ 'value': ['foo', 'bar'],
+ }
+ tdir = os.path.join(self.unit_test_dir, '18 array option')
+ self.init(tdir)
+ original = get_opt()
+ self.assertDictEqual(original, expected)
+ with self.assertRaises(subprocess.CalledProcessError):
+ self.setconf('-Dlist=bad')
+ changed = get_opt()
+ self.assertDictEqual(changed, expected)
+
+ def opt_has(self, name, value):
+ res = self.introspect('--buildoptions')
+ found = False
+ for i in res:
+ if i['name'] == name:
+ self.assertEqual(i['value'], value)
+ found = True
+ break
+ self.assertTrue(found, "Array option not found in introspect data.")
+
+ def test_free_stringarray_setting(self):
+ testdir = os.path.join(self.common_test_dir, '47 options')
+ self.init(testdir)
+ self.opt_has('free_array_opt', [])
+ self.setconf('-Dfree_array_opt=foo,bar', will_build=False)
+ self.opt_has('free_array_opt', ['foo', 'bar'])
+ self.setconf("-Dfree_array_opt=['a,b', 'c,d']", will_build=False)
+ self.opt_has('free_array_opt', ['a,b', 'c,d'])
+
+
+class FailureTests(BasePlatformTests):
+ '''
+ Tests that test failure conditions. Build files here should be dynamically
+ generated and static tests should go into `test cases/failing*`.
+ This is useful because there can be many ways in which a particular
+ function can fail, and creating failing tests for all of them is tedious
+ and slows down testing.
+ '''
+ dnf = "[Dd]ependency.*not found"
+
+ def setUp(self):
+ super().setUp()
+ self.srcdir = os.path.realpath(tempfile.mkdtemp())
+ self.mbuild = os.path.join(self.srcdir, 'meson.build')
+
+ def tearDown(self):
+ super().tearDown()
+ windows_proof_rmtree(self.srcdir)
+
+ def assertMesonRaises(self, contents, match, extra_args=None, langs=None):
+ '''
+ Assert that running meson configure on the specified @contents raises
+ a error message matching regex @match.
+ '''
+ if meson_exe is not None:
+ # Because the exception happens in a different process.
+ raise unittest.SkipTest('Can not test assert raise tests with an external Meson command.')
+ if langs is None:
+ langs = []
+ with open(self.mbuild, 'w') as f:
+ f.write("project('failure test', 'c', 'cpp')\n")
+ for lang in langs:
+ f.write("add_languages('{}', required : false)\n".format(lang))
+ f.write(contents)
+ # Force tracebacks so we can detect them properly
+ os.environ['MESON_FORCE_BACKTRACE'] = '1'
+ with self.assertRaisesRegex(DependencyException, match, msg=contents):
+ # Must run in-process or we'll get a generic CalledProcessError
+ self.init(self.srcdir, extra_args=extra_args, inprocess=True)
+
+ def assertMesonOutputs(self, contents, match, extra_args=None, langs=None):
+ '''
+ Assert that running meson configure on the specified @contents outputs
+ something that matches regex @match.
+ '''
+ if langs is None:
+ langs = []
+ with open(self.mbuild, 'w') as f:
+ f.write("project('output test', 'c', 'cpp')\n")
+ for lang in langs:
+ f.write("add_languages('{}', required : false)\n".format(lang))
+ f.write(contents)
+ # Run in-process for speed and consistency with assertMesonRaises
+ out = self.init(self.srcdir, extra_args=extra_args, inprocess=True)
+ self.assertRegex(out, match)
+
+ def test_dependency(self):
+ if not shutil.which('pkg-config'):
+ raise unittest.SkipTest('pkg-config not found')
+ if subprocess.call(['pkg-config', '--exists', 'zlib']) != 0:
+ raise unittest.SkipTest('zlib not found with pkg-config')
+ a = (("dependency('zlib', method : 'fail')", "'fail' is invalid"),
+ ("dependency('zlib', static : '1')", "[Ss]tatic.*boolean"),
+ ("dependency('zlib', version : 1)", "[Vv]ersion.*string or list"),
+ ("dependency('zlib', required : 1)", "[Rr]equired.*boolean"),
+ ("dependency('zlib', method : 1)", "[Mm]ethod.*string"),
+ ("dependency('zlibfail')", self.dnf),)
+ for contents, match in a:
+ self.assertMesonRaises(contents, match)
+
+ def test_apple_frameworks_dependency(self):
+ if not is_osx():
+ raise unittest.SkipTest('only run on macOS')
+ self.assertMesonRaises("dependency('appleframeworks')",
+ "requires at least one module")
+
+ def test_sdl2_notfound_dependency(self):
+ # Want to test failure, so skip if available
+ if shutil.which('sdl2-config'):
+ raise unittest.SkipTest('sdl2-config found')
+ self.assertMesonRaises("dependency('sdl2', method : 'sdlconfig')", self.dnf)
+ self.assertMesonRaises("dependency('sdl2', method : 'pkg-config')", self.dnf)
+
+ def test_gnustep_notfound_dependency(self):
+ # Want to test failure, so skip if available
+ if shutil.which('gnustep-config'):
+ raise unittest.SkipTest('gnustep-config found')
+ self.assertMesonRaises("dependency('gnustep')",
+ "(requires a Objc compiler|{})".format(self.dnf),
+ langs = ['objc'])
+
+ def test_wx_notfound_dependency(self):
+ # Want to test failure, so skip if available
+ if shutil.which('wx-config-3.0') or shutil.which('wx-config'):
+ raise unittest.SkipTest('wx-config or wx-config-3.0 found')
+ self.assertMesonRaises("dependency('wxwidgets')", self.dnf)
+ self.assertMesonOutputs("dependency('wxwidgets', required : false)",
+ "No wx-config found;")
+
+ def test_wx_dependency(self):
+ if not shutil.which('wx-config-3.0') and not shutil.which('wx-config'):
+ raise unittest.SkipTest('Neither wx-config nor wx-config-3.0 found')
+ self.assertMesonRaises("dependency('wxwidgets', modules : 1)",
+ "module argument is not a string")
+
+ def test_llvm_dependency(self):
+ self.assertMesonRaises("dependency('llvm', modules : 'fail')",
+ "(required.*fail|{})".format(self.dnf))
+
+ def test_boost_notfound_dependency(self):
+ # Can be run even if Boost is found or not
+ self.assertMesonRaises("dependency('boost', modules : 1)",
+ "module.*not a string")
+ self.assertMesonRaises("dependency('boost', modules : 'fail')",
+ "(fail.*not found|{})".format(self.dnf))
+
+ def test_boost_BOOST_ROOT_dependency(self):
+ # Test BOOST_ROOT; can be run even if Boost is found or not
+ os.environ['BOOST_ROOT'] = 'relative/path'
+ self.assertMesonRaises("dependency('boost')",
+ "(BOOST_ROOT.*absolute|{})".format(self.dnf))
+
+ def test_dependency_invalid_method(self):
+ code = '''zlib_dep = dependency('zlib', required : false)
+ zlib_dep.get_configtool_variable('foo')
+ '''
+ self.assertMesonRaises(code, "'zlib' is not a config-tool dependency")
+ code = '''zlib_dep = dependency('zlib', required : false)
+ dep = declare_dependency(dependencies : zlib_dep)
+ dep.get_pkgconfig_variable('foo')
+ '''
+ self.assertMesonRaises(code, "Method.*pkgconfig.*is invalid.*internal")
+ code = '''zlib_dep = dependency('zlib', required : false)
+ dep = declare_dependency(dependencies : zlib_dep)
+ dep.get_configtool_variable('foo')
+ '''
+ self.assertMesonRaises(code, "Method.*configtool.*is invalid.*internal")
+
+
+class WindowsTests(BasePlatformTests):
+ '''
+ Tests that should run on Cygwin, MinGW, and MSVC
+ '''
+ def setUp(self):
+ super().setUp()
+ self.platform_test_dir = os.path.join(self.src_root, 'test cases/windows')
+
+ def test_find_program(self):
+ '''
+ Test that Windows-specific edge-cases in find_program are functioning
+ correctly. Cannot be an ordinary test because it involves manipulating
+ PATH to point to a directory with Python scripts.
+ '''
+ testdir = os.path.join(self.platform_test_dir, '9 find program')
+ # Find `cmd` and `cmd.exe`
+ prog1 = ExternalProgram('cmd')
+ self.assertTrue(prog1.found(), msg='cmd not found')
+ prog2 = ExternalProgram('cmd.exe')
+ self.assertTrue(prog2.found(), msg='cmd.exe not found')
+ self.assertPathEqual(prog1.get_path(), prog2.get_path())
+ # Find cmd with an absolute path that's missing the extension
+ cmd_path = prog2.get_path()[:-4]
+ prog = ExternalProgram(cmd_path)
+ self.assertTrue(prog.found(), msg='{!r} not found'.format(cmd_path))
+ # Finding a script with no extension inside a directory works
+ prog = ExternalProgram(os.path.join(testdir, 'test-script'))
+ self.assertTrue(prog.found(), msg='test-script not found')
+ # Finding a script with an extension inside a directory works
+ prog = ExternalProgram(os.path.join(testdir, 'test-script-ext.py'))
+ self.assertTrue(prog.found(), msg='test-script-ext.py not found')
+ # Finding a script in PATH w/o extension works and adds the interpreter
+ os.environ['PATH'] += os.pathsep + testdir
+ prog = ExternalProgram('test-script-ext')
+ self.assertTrue(prog.found(), msg='test-script-ext not found in PATH')
+ self.assertPathEqual(prog.get_command()[0], python_command[0])
+ self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py')
+ # Finding a script in PATH with extension works and adds the interpreter
+ prog = ExternalProgram('test-script-ext.py')
+ self.assertTrue(prog.found(), msg='test-script-ext.py not found in PATH')
+ self.assertPathEqual(prog.get_command()[0], python_command[0])
+ self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py')
+
+ def test_ignore_libs(self):
+ '''
+ Test that find_library on libs that are to be ignored returns an empty
+ array of arguments. Must be a unit test because we cannot inspect
+ ExternalLibraryHolder from build files.
+ '''
+ testdir = os.path.join(self.platform_test_dir, '1 basic')
+ env = Environment(testdir, self.builddir, self.meson_command,
+ get_fake_options(self.prefix), [])
+ cc = env.detect_c_compiler(False)
+ if cc.id != 'msvc':
+ raise unittest.SkipTest('Not using MSVC')
+ # To force people to update this test, and also test
+ self.assertEqual(set(cc.ignore_libs), {'c', 'm', 'pthread'})
+ for l in cc.ignore_libs:
+ self.assertEqual(cc.find_library(l, env, []), [])
+
+
+class LinuxlikeTests(BasePlatformTests):
+ '''
+ Tests that should run on Linux and *BSD
+ '''
+ def test_basic_soname(self):
+ '''
+ Test that the soname is set correctly for shared libraries. This can't
+ be an ordinary test case because we need to run `readelf` and actually
+ check the soname.
+ https://github.com/mesonbuild/meson/issues/785
+ '''
+ testdir = os.path.join(self.common_test_dir, '4 shared')
+ self.init(testdir)
+ self.build()
+ lib1 = os.path.join(self.builddir, 'libmylib.so')
+ soname = get_soname(lib1)
+ self.assertEqual(soname, 'libmylib.so')
+
+ def test_custom_soname(self):
+ '''
+ Test that the soname is set correctly for shared libraries when
+ a custom prefix and/or suffix is used. This can't be an ordinary test
+ case because we need to run `readelf` and actually check the soname.
+ https://github.com/mesonbuild/meson/issues/785
+ '''
+ testdir = os.path.join(self.common_test_dir, '27 library versions')
+ self.init(testdir)
+ self.build()
+ lib1 = os.path.join(self.builddir, 'prefixsomelib.suffix')
+ soname = get_soname(lib1)
+ self.assertEqual(soname, 'prefixsomelib.suffix')
+
+ def test_pic(self):
+ '''
+ Test that -fPIC is correctly added to static libraries when b_staticpic
+ is true and not when it is false. This can't be an ordinary test case
+ because we need to inspect the compiler database.
+ '''
+ testdir = os.path.join(self.common_test_dir, '3 static')
+ self.init(testdir)
+ compdb = self.get_compdb()
+ self.assertIn('-fPIC', compdb[0]['command'])
+ self.setconf('-Db_staticpic=false')
+ # Regenerate build
+ self.build()
+ compdb = self.get_compdb()
+ self.assertNotIn('-fPIC', compdb[0]['command'])
+
+ def test_pkgconfig_gen(self):
+ '''
+ Test that generated pkg-config files can be found and have the correct
+ version and link args. This can't be an ordinary test case because we
+ need to run pkg-config outside of a Meson build file.
+ https://github.com/mesonbuild/meson/issues/889
+ '''
+ testdir = os.path.join(self.common_test_dir, '51 pkgconfig-gen')
+ self.init(testdir)
+ env = Environment(testdir, self.builddir, self.meson_command,
+ get_fake_options(self.prefix), [])
+ kwargs = {'required': True, 'silent': True}
+ os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir
+ foo_dep = PkgConfigDependency('libfoo', env, kwargs)
+ self.assertTrue(foo_dep.found())
+ self.assertEqual(foo_dep.get_version(), '1.0')
+ self.assertIn('-lfoo', foo_dep.get_link_args())
+ self.assertEqual(foo_dep.get_pkgconfig_variable('foo', {}), 'bar')
+ self.assertPathEqual(foo_dep.get_pkgconfig_variable('datadir', {}), '/usr/data')
+
+ def test_vala_c_warnings(self):
+ '''
+ Test that no warnings are emitted for C code generated by Vala. This
+ can't be an ordinary test case because we need to inspect the compiler
+ database.
+ https://github.com/mesonbuild/meson/issues/864
+ '''
+ testdir = os.path.join(self.vala_test_dir, '5 target glib')
+ self.init(testdir)
+ compdb = self.get_compdb()
+ vala_command = None
+ c_command = None
+ for each in compdb:
+ if each['file'].endswith('GLib.Thread.c'):
+ vala_command = each['command']
+ elif each['file'].endswith('GLib.Thread.vala'):
+ continue
+ elif each['file'].endswith('retcode.c'):
+ c_command = each['command']
+ else:
+ m = 'Unknown file {!r} in vala_c_warnings test'.format(each['file'])
+ raise AssertionError(m)
+ self.assertIsNotNone(vala_command)
+ self.assertIsNotNone(c_command)
+ # -w suppresses all warnings, should be there in Vala but not in C
+ self.assertIn(" -w ", vala_command)
+ self.assertNotIn(" -w ", c_command)
+ # -Wall enables all warnings, should be there in C but not in Vala
+ self.assertNotIn(" -Wall ", vala_command)
+ self.assertIn(" -Wall ", c_command)
+ # -Werror converts warnings to errors, should always be there since it's
+ # injected by an unrelated piece of code and the project has werror=true
+ self.assertIn(" -Werror ", vala_command)
+ self.assertIn(" -Werror ", c_command)
+
+ def test_qt5dependency_pkgconfig_detection(self):
+ '''
+ Test that qt4 and qt5 detection with pkgconfig works.
+ '''
+ # Verify Qt4 or Qt5 can be found with pkg-config
+ if not shutil.which('pkg-config'):
+ raise unittest.SkipTest('pkg-config not found')
+ qt4 = subprocess.call(['pkg-config', '--exists', 'QtCore'])
+ qt5 = subprocess.call(['pkg-config', '--exists', 'Qt5Core'])
+ if qt4 != 0 or qt5 != 0:
+ raise unittest.SkipTest('Qt not found with pkg-config')
+ testdir = os.path.join(self.framework_test_dir, '4 qt')
+ self.init(testdir, ['-Dmethod=pkg-config'])
+ # Confirm that the dependency was found with qmake
+ msg = 'Qt4 native `pkg-config` dependency (modules: Core, Gui) found: YES\n'
+ msg2 = 'Qt5 native `pkg-config` dependency (modules: Core, Gui) found: YES\n'
+ mesonlog = self.get_meson_log()
+ self.assertTrue(msg in mesonlog or msg2 in mesonlog)
+
+ def test_qt5dependency_qmake_detection(self):
+ '''
+ Test that qt5 detection with qmake works. This can't be an ordinary
+ test case because it involves setting the environment.
+ '''
+ # Verify that qmake is for Qt5
+ if not shutil.which('qmake-qt5'):
+ if not shutil.which('qmake'):
+ raise unittest.SkipTest('QMake not found')
+ # For some inexplicable reason qmake --version gives different
+ # results when run from the command line vs invoked by Python.
+ # Check for both cases in case this behavior changes in the future.
+ output = subprocess.getoutput(['qmake', '--version'])
+ if 'Qt version 5' not in output and 'qt5' not in output:
+ raise unittest.SkipTest('Qmake found, but it is not for Qt 5.')
+ # Disable pkg-config codepath and force searching with qmake/qmake-qt5
+ testdir = os.path.join(self.framework_test_dir, '4 qt')
+ self.init(testdir, ['-Dmethod=qmake'])
+ # Confirm that the dependency was found with qmake
+ msg = 'Qt5 native `qmake-qt5` dependency (modules: Core) found: YES\n'
+ msg2 = 'Qt5 native `qmake` dependency (modules: Core) found: YES\n'
+ mesonlog = self.get_meson_log()
+ self.assertTrue(msg in mesonlog or msg2 in mesonlog)
+
+ def _test_soname_impl(self, libpath, install):
+ testdir = os.path.join(self.unit_test_dir, '1 soname')
+ self.init(testdir)
+ self.build()
+ if install:
+ self.install()
+
+ # File without aliases set.
+ nover = os.path.join(libpath, 'libnover.so')
+ self.assertPathExists(nover)
+ self.assertFalse(os.path.islink(nover))
+ self.assertEqual(get_soname(nover), 'libnover.so')
+ self.assertEqual(len(glob(nover[:-3] + '*')), 1)
+
+ # File with version set
+ verset = os.path.join(libpath, 'libverset.so')
+ self.assertPathExists(verset + '.4.5.6')
+ self.assertEqual(os.readlink(verset), 'libverset.so.4')
+ self.assertEqual(get_soname(verset), 'libverset.so.4')
+ self.assertEqual(len(glob(verset[:-3] + '*')), 3)
+
+ # File with soversion set
+ soverset = os.path.join(libpath, 'libsoverset.so')
+ self.assertPathExists(soverset + '.1.2.3')
+ self.assertEqual(os.readlink(soverset), 'libsoverset.so.1.2.3')
+ self.assertEqual(get_soname(soverset), 'libsoverset.so.1.2.3')
+ self.assertEqual(len(glob(soverset[:-3] + '*')), 2)
+
+ # File with version and soversion set to same values
+ settosame = os.path.join(libpath, 'libsettosame.so')
+ self.assertPathExists(settosame + '.7.8.9')
+ self.assertEqual(os.readlink(settosame), 'libsettosame.so.7.8.9')
+ self.assertEqual(get_soname(settosame), 'libsettosame.so.7.8.9')
+ self.assertEqual(len(glob(settosame[:-3] + '*')), 2)
+
+ # File with version and soversion set to different values
+ bothset = os.path.join(libpath, 'libbothset.so')
+ self.assertPathExists(bothset + '.1.2.3')
+ self.assertEqual(os.readlink(bothset), 'libbothset.so.1.2.3')
+ self.assertEqual(os.readlink(bothset + '.1.2.3'), 'libbothset.so.4.5.6')
+ self.assertEqual(get_soname(bothset), 'libbothset.so.1.2.3')
+ self.assertEqual(len(glob(bothset[:-3] + '*')), 3)
+
+ def test_soname(self):
+ self._test_soname_impl(self.builddir, False)
+
+ def test_installed_soname(self):
+ self._test_soname_impl(self.installdir + self.libdir, True)
+
+ def test_compiler_check_flags_order(self):
+ '''
+ Test that compiler check flags override all other flags. This can't be
+ an ordinary test case because it needs the environment to be set.
+ '''
+ Oflag = '-O3'
+ os.environ['CFLAGS'] = os.environ['CXXFLAGS'] = Oflag
+ testdir = os.path.join(self.common_test_dir, '43 has function')
+ self.init(testdir)
+ cmds = self.get_meson_log_compiler_checks()
+ for cmd in cmds:
+ if cmd[0] == 'ccache':
+ cmd = cmd[1:]
+ # Verify that -I flags from the `args` kwarg are first
+ # This is set in the '43 has function' test case
+ self.assertEqual(cmd[1], '-I/tmp')
+ # Verify that -O3 set via the environment is overridden by -O0
+ Oargs = [arg for arg in cmd if arg.startswith('-O')]
+ self.assertEqual(Oargs, [Oflag, '-O0'])
+
+ def _test_stds_impl(self, testdir, compiler, p):
+ lang_std = p + '_std'
+ # Check that all the listed -std=xxx options for this compiler work
+ # just fine when used
+ for v in compiler.get_options()[lang_std].choices:
+ if compiler.get_id() == 'clang' and version_compare(compiler.version, '<5.0.0') and '17' in v:
+ continue
+ std_opt = '{}={}'.format(lang_std, v)
+ self.init(testdir, ['-D' + std_opt])
+ cmd = self.get_compdb()[0]['command']
+ if v != 'none':
+ cmd_std = " -std={} ".format(v)
+ self.assertIn(cmd_std, cmd)
+ try:
+ self.build()
+ except:
+ print('{} was {!r}'.format(lang_std, v))
+ raise
+ self.wipe()
+ # Check that an invalid std option in CFLAGS/CPPFLAGS fails
+ # Needed because by default ICC ignores invalid options
+ cmd_std = '-std=FAIL'
+ env_flags = p.upper() + 'FLAGS'
+ os.environ[env_flags] = cmd_std
+ self.init(testdir)
+ cmd = self.get_compdb()[0]['command']
+ qcmd_std = " {} ".format(cmd_std)
+ self.assertIn(qcmd_std, cmd)
+ with self.assertRaises(subprocess.CalledProcessError,
+ msg='{} should have failed'.format(qcmd_std)):
+ self.build()
+
+ def test_compiler_c_stds(self):
+ '''
+ Test that C stds specified for this compiler can all be used. Can't be
+ an ordinary test because it requires passing options to meson.
+ '''
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = Environment(testdir, self.builddir, self.meson_command,
+ get_fake_options(self.prefix), [])
+ cc = env.detect_c_compiler(False)
+ self._test_stds_impl(testdir, cc, 'c')
+
+ def test_compiler_cpp_stds(self):
+ '''
+ Test that C++ stds specified for this compiler can all be used. Can't
+ be an ordinary test because it requires passing options to meson.
+ '''
+ testdir = os.path.join(self.common_test_dir, '2 cpp')
+ env = Environment(testdir, self.builddir, self.meson_command,
+ get_fake_options(self.prefix), [])
+ cpp = env.detect_cpp_compiler(False)
+ self._test_stds_impl(testdir, cpp, 'cpp')
+
+ def test_unity_subproj(self):
+ testdir = os.path.join(self.common_test_dir, '49 subproject')
+ self.init(testdir, extra_args='--unity=subprojects')
+ self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/simpletest@exe/simpletest-unity.c'))
+ self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/sublib@sha/sublib-unity.c'))
+ self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c'))
+ self.build()
+
+ def test_installed_modes(self):
+ '''
+ Test that files installed by these tests have the correct permissions.
+ Can't be an ordinary test because our installed_files.txt is very basic.
+ '''
+ # Test file modes
+ testdir = os.path.join(self.common_test_dir, '12 data')
+ self.init(testdir)
+ self.install()
+
+ f = os.path.join(self.installdir, 'etc', 'etcfile.dat')
+ found_mode = stat.filemode(os.stat(f).st_mode)
+ want_mode = 'rw------T'
+ self.assertEqual(want_mode, found_mode[1:])
+
+ f = os.path.join(self.installdir, 'usr', 'bin', 'runscript.sh')
+ statf = os.stat(f)
+ found_mode = stat.filemode(statf.st_mode)
+ want_mode = 'rwxr-sr-x'
+ self.assertEqual(want_mode, found_mode[1:])
+ if os.getuid() == 0:
+ # The chown failed nonfatally if we're not root
+ self.assertEqual(0, statf.st_uid)
+ self.assertEqual(0, statf.st_gid)
+
+ f = os.path.join(self.installdir, 'usr', 'share', 'progname',
+ 'fileobject_datafile.dat')
+ orig = os.path.join(testdir, 'fileobject_datafile.dat')
+ statf = os.stat(f)
+ statorig = os.stat(orig)
+ found_mode = stat.filemode(statf.st_mode)
+ orig_mode = stat.filemode(statorig.st_mode)
+ self.assertEqual(orig_mode[1:], found_mode[1:])
+ self.assertEqual(os.getuid(), statf.st_uid)
+ if os.getuid() == 0:
+ # The chown failed nonfatally if we're not root
+ self.assertEqual(0, statf.st_gid)
+
+ self.wipe()
+ # Test directory modes
+ testdir = os.path.join(self.common_test_dir, '66 install subdir')
+ self.init(testdir)
+ self.install()
+
+ f = os.path.join(self.installdir, 'usr', 'share', 'sub1')
+ statf = os.stat(f)
+ found_mode = stat.filemode(statf.st_mode)
+ want_mode = 'rwxr-x--t'
+ self.assertEqual(want_mode, found_mode[1:])
+ if os.getuid() == 0:
+ # The chown failed nonfatally if we're not root
+ self.assertEqual(0, statf.st_uid)
+
+ def test_cpp_std_override(self):
+ testdir = os.path.join(self.unit_test_dir, '6 std override')
+ self.init(testdir)
+ compdb = self.get_compdb()
+ for i in compdb:
+ if 'prog03' in i['file']:
+ c03_comp = i['command']
+ if 'prog11' in i['file']:
+ c11_comp = i['command']
+ if 'progp' in i['file']:
+ plain_comp = i['command']
+ self.assertNotEqual(len(plain_comp), 0)
+ self.assertIn('-std=c++03', c03_comp)
+ self.assertNotIn('-std=c++11', c03_comp)
+ self.assertIn('-std=c++11', c11_comp)
+ self.assertNotIn('-std=c++03', c11_comp)
+ self.assertNotIn('-std=c++03', plain_comp)
+ self.assertNotIn('-std=c++11', plain_comp)
+ # Now werror
+ self.assertIn('-Werror', plain_comp)
+ self.assertNotIn('-Werror', c03_comp)
+
+ def test_run_installed(self):
+ testdir = os.path.join(self.unit_test_dir, '7 run installed')
+ self.init(testdir)
+ self.build()
+ self.install()
+ installed_exe = os.path.join(self.installdir, 'usr/bin/prog')
+ installed_libdir = os.path.join(self.installdir, 'usr/foo')
+ installed_lib = os.path.join(installed_libdir, 'libfoo.so')
+ self.assertTrue(os.path.isfile(installed_exe))
+ self.assertTrue(os.path.isdir(installed_libdir))
+ self.assertTrue(os.path.isfile(installed_lib))
+ # Must fail when run without LD_LIBRARY_PATH to ensure that
+ # rpath has been properly stripped rather than pointing to the builddir.
+ self.assertNotEqual(subprocess.call(installed_exe, stderr=subprocess.DEVNULL), 0)
+ # When LD_LIBRARY_PATH is set it should start working.
+ # For some reason setting LD_LIBRARY_PATH in os.environ fails
+ # when all tests are run (but works when only this test is run),
+ # but doing this explicitly works.
+ env = os.environ.copy()
+ env['LD_LIBRARY_PATH'] = installed_libdir
+ self.assertEqual(subprocess.call(installed_exe, env=env), 0)
+
+ def test_order_of_l_arguments(self):
+ testdir = os.path.join(self.unit_test_dir, '9 -L -l order')
+ os.environ['PKG_CONFIG_PATH'] = testdir
+ self.init(testdir)
+ # NOTE: .pc file has -Lfoo -lfoo -Lbar -lbar but pkg-config reorders
+ # the flags before returning them to -Lfoo -Lbar -lfoo -lbar
+ # but pkgconf seems to not do that. Sigh. Support both.
+ expected_order = [('-L/me/first', '-lfoo1'),
+ ('-L/me/second', '-lfoo2'),
+ ('-L/me/first', '-L/me/second'),
+ ('-lfoo1', '-lfoo2'),
+ ('-L/me/second', '-L/me/third'),
+ ('-L/me/third', '-L/me/fourth',),
+ ('-L/me/third', '-lfoo3'),
+ ('-L/me/fourth', '-lfoo4'),
+ ('-lfoo3', '-lfoo4'),
+ ]
+ with open(os.path.join(self.builddir, 'build.ninja')) as ifile:
+ for line in ifile:
+ if expected_order[0][0] in line:
+ for first, second in expected_order:
+ self.assertLess(line.index(first), line.index(second))
+ return
+ raise RuntimeError('Linker entries not found in the Ninja file.')
+
+ def test_introspect_dependencies(self):
+ '''
+ Tests that mesonintrospect --dependencies returns expected output.
+ '''
+ testdir = os.path.join(self.framework_test_dir, '7 gnome')
+ self.init(testdir)
+ glib_found = False
+ gobject_found = False
+ deps = self.introspect('--dependencies')
+ self.assertIsInstance(deps, list)
+ for dep in deps:
+ self.assertIsInstance(dep, dict)
+ self.assertIn('name', dep)
+ self.assertIn('compile_args', dep)
+ self.assertIn('link_args', dep)
+ if dep['name'] == 'glib-2.0':
+ glib_found = True
+ elif dep['name'] == 'gobject-2.0':
+ gobject_found = True
+ self.assertTrue(glib_found)
+ self.assertTrue(gobject_found)
+
+ def test_build_rpath(self):
+ testdir = os.path.join(self.unit_test_dir, '11 build_rpath')
+ self.init(testdir)
+ self.build()
+ build_rpath = get_rpath(os.path.join(self.builddir, 'prog'))
+ self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar')
+ self.install()
+ install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/prog'))
+ self.assertEqual(install_rpath, '/baz')
+
+ def test_pch_with_address_sanitizer(self):
+ testdir = os.path.join(self.common_test_dir, '13 pch')
+ self.init(testdir, ['-Db_sanitize=address'])
+ self.build()
+ compdb = self.get_compdb()
+ for i in compdb:
+ self.assertIn("-fsanitize=address", i["command"])
+
+ def test_coverage(self):
+ if not shutil.which('gcovr'):
+ raise unittest.SkipTest('gcovr not found')
+ if not shutil.which('genhtml'):
+ raise unittest.SkipTest('genhtml not found')
+ if 'clang' in os.environ.get('CC', ''):
+ # We need to use llvm-cov instead of gcovr with clang
+ raise unittest.SkipTest('Coverage does not work with clang right now, help wanted!')
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ self.init(testdir, ['-Db_coverage=true'])
+ self.build()
+ self.run_tests()
+ self.run_target('coverage-html')
+
+ def test_cross_find_program(self):
+ testdir = os.path.join(self.unit_test_dir, '12 cross prog')
+ crossfile = tempfile.NamedTemporaryFile(mode='w')
+ print(os.path.join(testdir, 'some_cross_tool.py'))
+ crossfile.write('''[binaries]
+c = '/usr/bin/cc'
+ar = '/usr/bin/ar'
+strip = '/usr/bin/ar'
+sometool.py = '%s'
+
+[properties]
+
+[host_machine]
+system = 'linux'
+cpu_family = 'arm'
+cpu = 'armv7' # Not sure if correct.
+endian = 'little'
+''' % os.path.join(testdir, 'some_cross_tool.py'))
+ crossfile.flush()
+ self.init(testdir, ['--cross-file=' + crossfile.name])
+
+ def test_reconfigure(self):
+ testdir = os.path.join(self.unit_test_dir, '13 reconfigure')
+ self.init(testdir, ['-Db_lto=true'], default_args=False)
+ self.build('reconfigure')
+
+ def test_cross_file_system_paths(self):
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ cross_content = textwrap.dedent("""\
+ [binaries]
+ c = '/usr/bin/cc'
+ ar = '/usr/bin/ar'
+ strip = '/usr/bin/ar'
+
+ [properties]
+
+ [host_machine]
+ system = 'linux'
+ cpu_family = 'x86'
+ cpu = 'i686'
+ endian = 'little'
+ """)
+
+ with tempfile.TemporaryDirectory() as d:
+ dir_ = os.path.join(d, 'meson', 'cross')
+ os.makedirs(dir_)
+ with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
+ f.write(cross_content)
+ name = os.path.basename(f.name)
+
+ with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):
+ self.init(testdir, ['--cross-file=' + name], inprocess=True)
+ self.wipe()
+
+ with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):
+ os.environ.pop('XDG_DATA_HOME', None)
+ self.init(testdir, ['--cross-file=' + name], inprocess=True)
+ self.wipe()
+
+ with tempfile.TemporaryDirectory() as d:
+ dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')
+ os.makedirs(dir_)
+ with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
+ f.write(cross_content)
+ name = os.path.basename(f.name)
+
+ with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):
+ self.init(testdir, ['--cross-file=' + name], inprocess=True)
+ self.wipe()
+
+
+class LinuxArmCrossCompileTests(BasePlatformTests):
+ '''
+ Tests that verify cross-compilation to Linux/ARM
+ '''
+ def setUp(self):
+ super().setUp()
+ src_root = os.path.dirname(__file__)
+ self.meson_command += ['--cross=' + os.path.join(src_root, 'cross', 'ubuntu-armhf.txt')]
+
+ def test_cflags_cross_environment_pollution(self):
+ '''
+ Test that the CFLAGS environment variable does not pollute the cross
+ environment. This can't be an ordinary test case because we need to
+ inspect the compiler database.
+ '''
+ testdir = os.path.join(self.common_test_dir, '3 static')
+ os.environ['CFLAGS'] = '-DBUILD_ENVIRONMENT_ONLY'
+ self.init(testdir)
+ compdb = self.get_compdb()
+ self.assertNotIn('-DBUILD_ENVIRONMENT_ONLY', compdb[0]['command'])
+
+class RewriterTests(unittest.TestCase):
+
+ def setUp(self):
+ super().setUp()
+ src_root = os.path.dirname(__file__)
+ self.testroot = os.path.realpath(tempfile.mkdtemp())
+ self.rewrite_command = python_command + [os.path.join(src_root, 'mesonrewriter.py')]
+ self.tmpdir = os.path.realpath(tempfile.mkdtemp())
+ self.workdir = os.path.join(self.tmpdir, 'foo')
+ self.test_dir = os.path.join(src_root, 'test cases/rewrite')
+
+ def tearDown(self):
+ windows_proof_rmtree(self.tmpdir)
+
+ def read_contents(self, fname):
+ with open(os.path.join(self.workdir, fname)) as f:
+ return f.read()
+
+ def check_effectively_same(self, mainfile, truth):
+ mf = self.read_contents(mainfile)
+ t = self.read_contents(truth)
+ # Rewriting is not guaranteed to do a perfect job of
+ # maintaining whitespace.
+ self.assertEqual(mf.replace(' ', ''), t.replace(' ', ''))
+
+ def prime(self, dirname):
+ shutil.copytree(os.path.join(self.test_dir, dirname), self.workdir)
+
+ def test_basic(self):
+ self.prime('1 basic')
+ subprocess.check_call(self.rewrite_command + ['remove',
+ '--target=trivialprog',
+ '--filename=notthere.c',
+ '--sourcedir', self.workdir],
+ universal_newlines=True)
+ self.check_effectively_same('meson.build', 'removed.txt')
+ subprocess.check_call(self.rewrite_command + ['add',
+ '--target=trivialprog',
+ '--filename=notthere.c',
+ '--sourcedir', self.workdir],
+ universal_newlines=True)
+ self.check_effectively_same('meson.build', 'added.txt')
+ subprocess.check_call(self.rewrite_command + ['remove',
+ '--target=trivialprog',
+ '--filename=notthere.c',
+ '--sourcedir', self.workdir],
+ universal_newlines=True)
+ self.check_effectively_same('meson.build', 'removed.txt')
+
+ def test_subdir(self):
+ self.prime('2 subdirs')
+ top = self.read_contents('meson.build')
+ s2 = self.read_contents('sub2/meson.build')
+ subprocess.check_call(self.rewrite_command + ['remove',
+ '--target=something',
+ '--filename=second.c',
+ '--sourcedir', self.workdir],
+ universal_newlines=True)
+ self.check_effectively_same('sub1/meson.build', 'sub1/after.txt')
+ self.assertEqual(top, self.read_contents('meson.build'))
+ self.assertEqual(s2, self.read_contents('sub2/meson.build'))
+
+
+def unset_envs():
+ # For unit tests we must fully control all commend lines
+ # so that there are no unexpected changes coming from the
+ # environment, for example when doing a package build.
+ varnames = ['CPPFLAGS', 'LDFLAGS'] + list(mesonbuild.environment.cflags_mapping.values())
+ for v in varnames:
+ if v in os.environ:
+ del os.environ[v]
+
+if __name__ == '__main__':
+ unset_envs()
+ cases = ['InternalTests', 'AllPlatformTests', 'FailureTests']
+ if is_linux():
+ cases += ['LinuxlikeTests']
+ if should_run_linux_cross_tests():
+ cases += ['LinuxArmCrossCompileTests']
+ elif is_windows():
+ cases += ['WindowsTests']
+
+ unittest.main(defaultTest=cases, buffer=True)
--- /dev/null
+[flake8]
+ignore =
+ E241,
+ E251,
+ E261,
+ E501,
+ E302,
+ E305,
+ E401,
+ E266,
+ E402,
+ E731
+max-line-length = 120
+
+[egg_info]
+tag_build =
+tag_date = 0
+
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+
+from mesonbuild.coredata import version
+
+if sys.version_info[0] < 3:
+ print('Tried to install with Python 2, Meson only supports Python 3.')
+ sys.exit(1)
+
+# We need to support Python installations that have nothing but the basic
+# Python installation. Use setuptools when possible and fall back to
+# plain distutils when setuptools is not available.
+try:
+ from setuptools import setup
+ from setuptools.command.install_scripts import install_scripts as orig
+except ImportError:
+ from distutils.core import setup
+ from distutils.command.install_scripts import install_scripts as orig
+
+class install_scripts(orig):
+ def run(self):
+ if sys.platform == 'win32':
+ super().run()
+ return
+
+ if not self.skip_build:
+ self.run_command('build_scripts')
+ self.outfiles = []
+ if not self.dry_run:
+ self.mkpath(self.install_dir)
+
+ # We want the files to be installed without a suffix on Unix
+ for infile in self.get_inputs():
+ infile = os.path.basename(infile)
+ in_built = os.path.join(self.build_dir, infile)
+ in_stripped = infile[:-3] if infile.endswith('.py') else infile
+ outfile = os.path.join(self.install_dir, in_stripped)
+ # NOTE: Mode is preserved by default
+ self.copy_file(in_built, outfile)
+ self.outfiles.append(outfile)
+
+setup(name='meson',
+ version=version,
+ description='A high performance build system',
+ author='Jussi Pakkanen',
+ author_email='jpakkane@gmail.com',
+ url='http://mesonbuild.com',
+ license=' Apache License, Version 2.0',
+ packages=['mesonbuild',
+ 'mesonbuild.backend',
+ 'mesonbuild.compilers',
+ 'mesonbuild.dependencies',
+ 'mesonbuild.modules',
+ 'mesonbuild.scripts',
+ 'mesonbuild.wrap'],
+ scripts=['meson.py',
+ 'mesonconf.py',
+ 'mesontest.py',
+ 'mesonintrospect.py',
+ 'wraptool.py'],
+ cmdclass={'install_scripts': install_scripts},
+ data_files=[('share/man/man1', ['man/meson.1',
+ 'man/mesonconf.1',
+ 'man/mesonintrospect.1',
+ 'man/mesontest.1',
+ 'man/wraptool.1'])],
+ classifiers=['Development Status :: 5 - Production/Stable',
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Natural Language :: English',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Operating System :: Microsoft :: Windows',
+ 'Operating System :: POSIX :: BSD',
+ 'Operating System :: POSIX :: Linux',
+ 'Programming Language :: Python :: 3 :: Only',
+ 'Topic :: Software Development :: Build Tools',
+ ],
+ long_description='''Meson is a cross-platform build system designed to be both as
+fast and as user friendly as possible. It supports many languages and compilers, including
+GCC, Clang and Visual Studio. Its build definitions are written in a simple non-turing
+complete DSL.''')
--- /dev/null
+# Comment on the first line
+project('trivial test',
+ # Comment inside a function call + array for language list
+ ['c'],
+ meson_version : '>=0.27.0')
+#this is a comment
+sources = 'trivial.c'
+
+if meson.get_compiler('c').get_id() == 'intel'
+ # Error out if the -std=xxx option is incorrect
+ add_project_arguments('-diag-error', '10159', language : 'c')
+endif
+
+if meson.is_cross_build()
+ native_exe = executable('native-trivialprog', sources : sources, native : true)
+ test('native exe in cross build', native_exe)
+endif
+
+exe = executable('trivialprog', sources : sources)
+
+test('runtest', exe) # This is a comment
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("Trivial test is working.\n");
+ return 0;
+}
--- /dev/null
+this is a man page of bar.2, its contents are irrelevant
\ No newline at end of file
--- /dev/null
+This is a man page of baz.1 it was generated @TODAY@.
+
+You should not put generation timestamps in real world projects
+because they break reproducible builds. This manpage is written
+by professionals or under the supervision of professionals. Do
+not try this at home.
--- /dev/null
+this is a man page of foo.1 its contents are irrelevant
--- /dev/null
+usr/share/man/man1/foo.1.gz
+usr/share/man/man2/bar.2.gz
+usr/share/man/man1/vanishing.1.gz
+usr/share/man/man2/vanishing.2.gz
+usr/share/man/man1/baz.1.gz
--- /dev/null
+project('man install', 'c')
+m1 = install_man('foo.1')
+m2 = install_man('bar.2')
+install_man('vanishing/vanishing.2')
+subdir('vanishing')
+
+cdata = configuration_data()
+cdata.set('TODAY', '$this_day')
+b1 = configure_file(input : 'baz.1.in',
+ output : 'baz.1',
+ configuration : cdata)
+
+install_man(b1)
--- /dev/null
+install_man('vanishing.1')
--- /dev/null
+This is a man page of the vanishing subdirectory.
--- /dev/null
+This is a second man page of the vanishing subdirectory.
--- /dev/null
+project('test workdir', 'c')
+
+exe = executable('opener', 'opener.c')
+
+test('basic', exe, workdir : meson.source_root())
+test('shouldfail', exe, should_fail : true)
--- /dev/null
+// This test only succeeds if run in the source root dir.
+
+#include<stdio.h>
+
+int main(int arg, char **argv) {
+ FILE *f = fopen("opener.c", "r");
+ if(f) {
+ fclose(f);
+ return 0;
+ }
+ return 1;
+}
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I am test exe1.\n");
+ return 0;
+}
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I am test exe2.\n");
+ return 0;
+}
--- /dev/null
+project('multiple test suites', 'c')
+
+subproject('sub')
+
+exe1 = executable('exe1', 'exe1.c')
+exe2 = executable('exe2', 'exe2.c')
+
+test('exe1', exe1)
+test('exe2', exe2, suite : ['suite2', 'super-special'])
--- /dev/null
+project('subproject test suites', 'c')
+
+sub1 = executable('sub1', 'sub1.c')
+sub2 = executable('sub2', 'sub2.c')
+
+test('sub1', sub1)
+test('sub2', sub2, suite : 'suite2')
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I am test sub1.\n");
+ return 0;
+}
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I am test sub2.\n");
+ return 0;
+}
--- /dev/null
+project('threads', 'cpp', 'c',
+ default_options : ['cpp_std=c++11'])
+
+threaddep = dependency('threads')
+
+test('cppthreadtest',
+ executable('cppthreadprog', 'threadprog.cpp',
+ dependencies : threaddep
+ )
+)
+
+test('cthreadtest',
+ executable('cthreadprog', 'threadprog.c',
+ dependencies : threaddep
+ )
+)
--- /dev/null
+#if defined _WIN32
+
+#include<windows.h>
+#include<stdio.h>
+
+DWORD WINAPI thread_func(LPVOID ignored) {
+ printf("Printing from a thread.\n");
+ return 0;
+}
+
+int main(int argc, char **argv) {
+ DWORD id;
+ HANDLE th;
+ printf("Starting thread.\n");
+ th = CreateThread(NULL, 0, thread_func, NULL, 0, &id);
+ WaitForSingleObject(th, INFINITE);
+ printf("Stopped thread.\n");
+ return 0;
+}
+#else
+
+#include<pthread.h>
+#include<stdio.h>
+
+void* main_func(void* ignored) {
+ printf("Printing from a thread.\n");
+ return NULL;
+}
+
+int main(int argc, char** argv) {
+ pthread_t thread;
+ int rc;
+ printf("Starting thread.\n");
+ rc = pthread_create(&thread, NULL, main_func, NULL);
+ rc = pthread_join(thread, NULL);
+ printf("Stopped thread.\n");
+ return rc;
+}
+
+#endif
--- /dev/null
+/* On Windows not all versions of VS support C++11 and
+ * some (most?) versions of mingw don't support std::thread,
+ * even though they do support c++11. Since we only care about
+ * threads working, do the test with raw win threads.
+ */
+
+#if defined _WIN32
+
+#include<windows.h>
+#include<stdio.h>
+
+DWORD WINAPI thread_func(LPVOID) {
+ printf("Printing from a thread.\n");
+ return 0;
+}
+
+int main(int, char**) {
+ printf("Starting thread.\n");
+ HANDLE th;
+ DWORD id;
+ th = CreateThread(NULL, 0, thread_func, NULL, 0, &id);
+ WaitForSingleObject(th, INFINITE);
+ printf("Stopped thread.\n");
+ return 0;
+}
+#else
+
+#include<thread>
+#include<cstdio>
+
+void main_func() {
+ printf("Printing from a thread.\n");
+}
+
+int main(int, char**) {
+ printf("Starting thread.\n");
+ std::thread th(main_func);
+ th.join();
+ printf("Stopped thread.\n");
+ return 0;
+}
+
+#endif
--- /dev/null
+#include"gen_func.h"
+
+int main(int argc, char **argv) {
+ unsigned int i = (unsigned int) gen_func_in_lib();
+ unsigned int j = (unsigned int) gen_func_in_obj();
+ unsigned int k = (unsigned int) gen_func_in_src();
+ return (int)(i + j + k);
+}
--- /dev/null
+project('manygen', 'c')
+
+if meson.is_cross_build()
+ # FIXME error out with skip message once cross test runner
+ # recognizes it.
+ message('Not running this test during cross build.')
+else
+ subdir('subdir')
+
+ exe = executable('depuser', 'depuser.c',
+ generated)
+
+ test('depuser test', exe)
+endif
--- /dev/null
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+# Generates a static library, object file, source
+# file and a header file.
+
+import sys, os
+import shutil, subprocess
+
+with open(sys.argv[1]) as f:
+ funcname = f.readline().strip()
+outdir = sys.argv[2]
+buildtype_args = sys.argv[3]
+
+if not os.path.isdir(outdir):
+ print('Outdir does not exist.')
+ sys.exit(1)
+
+# Emulate the environment.detect_c_compiler() logic
+compiler = os.environ.get('CC', None)
+if not compiler:
+ compiler = shutil.which('cl') or \
+ shutil.which('gcc') or \
+ shutil.which('clang') or \
+ shutil.which('cc')
+
+compbase = os.path.basename(compiler)
+if 'cl' in compbase and 'clang' not in compbase:
+ libsuffix = '.lib'
+ is_vs = True
+ compiler = 'cl'
+ linker = 'lib'
+else:
+ libsuffix = '.a'
+ is_vs = False
+ linker = 'ar'
+ if compiler is None:
+ print('No known compilers found.')
+ sys.exit(1)
+
+objsuffix = '.o'
+
+outo = os.path.join(outdir, funcname + objsuffix)
+outa = os.path.join(outdir, funcname + libsuffix)
+outh = os.path.join(outdir, funcname + '.h')
+outc = os.path.join(outdir, funcname + '.c')
+
+tmpc = 'diibadaaba.c'
+tmpo = 'diibadaaba' + objsuffix
+
+with open(outc, 'w') as f:
+ f.write('''#include"%s.h"
+int %s_in_src() {
+ return 0;
+}
+''' % (funcname, funcname))
+
+with open(outh, 'w') as f:
+ f.write('''#pragma once
+int %s_in_lib();
+int %s_in_obj();
+int %s_in_src();
+''' % (funcname, funcname, funcname))
+
+with open(tmpc, 'w') as f:
+ f.write('''int %s_in_obj() {
+ return 0;
+}
+''' % funcname)
+
+if is_vs:
+ subprocess.check_call([compiler, '/nologo', '/c', buildtype_args, '/Fo' + outo, tmpc])
+else:
+ subprocess.check_call([compiler, '-c', '-o', outo, tmpc])
+
+with open(tmpc, 'w') as f:
+ f.write('''int %s_in_lib() {
+ return 0;
+}
+''' % funcname)
+
+if is_vs:
+ subprocess.check_call([compiler, '/nologo', '/c', '/Fo' + tmpo, tmpc])
+ subprocess.check_call([linker, '/NOLOGO', '/OUT:' + outa, tmpo])
+else:
+ subprocess.check_call([compiler, '-c', '-o', tmpo, tmpc])
+ subprocess.check_call([linker, 'csr', outa, tmpo])
+
+os.unlink(tmpo)
+os.unlink(tmpc)
--- /dev/null
+gen = files('manygen.py')
+py3_bin = import('python3').find_python()
+
+buildtype = get_option('buildtype')
+buildtype_args = '-Dfooxxx' # a useless compiler argument
+if meson.get_compiler('c').get_id() == 'msvc'
+ # We need our manually generated code to use the same CRT as the executable.
+ # Taken from compilers.py since build files do not have access to this.
+ if buildtype == 'debug'
+ buildtype_args = '/MDd'
+ elif buildtype == 'debugoptimized'
+ buildtype_args = '/MDd'
+ elif buildtype == 'release'
+ buildtype_args = '/MD'
+ endif
+ outfiles = ['gen_func.lib', 'gen_func.c', 'gen_func.h', 'gen_func.o']
+else
+ outfiles = ['gen_func.a', 'gen_func.c', 'gen_func.h', 'gen_func.o']
+endif
+
+generated = custom_target('manygen',
+ output : outfiles,
+ input : ['funcinfo.def'],
+ command : [py3_bin, gen[0], '@INPUT@', '@OUTDIR@', buildtype_args],
+)
--- /dev/null
+project('stringdef', 'c')
+
+test('stringdef', executable('stringdef', 'stringdef.c', c_args : '-DFOO="bar"'))
--- /dev/null
+#include<stdio.h>
+#include<string.h>
+
+int main(int argc, char **argv) {
+ if(strcmp(FOO, "bar")) {
+ printf("FOO is misquoted: %s\n", FOO);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+project('find program', 'c')
+
+python = import('python3').find_python()
+
+# Source file via string
+prog = find_program('program.py')
+# Source file via files()
+progf = files('program.py')
+# Built file
+py = configure_file(input : 'program.py',
+ output : 'builtprogram.py',
+ configuration : configuration_data())
+
+foreach f : [prog, progf, py, find_program(py), find_program(progf)]
+ ret = run_command(python, f)
+ assert(ret.returncode() == 0, 'can\'t manually run @0@'.format(prog.path()))
+ assert(ret.stdout().strip() == 'Found', 'wrong output from manually-run @0@'.format(prog.path()))
+
+ ret = run_command(f)
+ assert(ret.returncode() == 0, 'can\'t run @0@'.format(prog.path()))
+ assert(ret.stdout().strip() == 'Found', 'wrong output from @0@'.format(prog.path()))
+endforeach
--- /dev/null
+#!/usr/bin/env python3
+
+print("Found")
--- /dev/null
+project('proj', 'c')
+subproject('sub')
+libSub = dependency('sub', fallback: ['sub', 'libSub'])
+
+exe = executable('prog', 'prog.c', dependencies: libSub)
+test('subproject subdir', exe)
--- /dev/null
+#include <sub.h>
+
+int main() {
+ return sub();
+}
--- /dev/null
+lib = static_library('sub', 'sub.c')
+libSub = declare_dependency(include_directories: include_directories('.'), link_with: lib)
--- /dev/null
+#include "sub.h"
+
+int sub() {
+ return 0;
+}
--- /dev/null
+#ifndef SUB_H
+#define SUB_H
+
+int sub();
+
+#endif
--- /dev/null
+project('sub', 'c')
+subdir('lib')
--- /dev/null
+project('postconf script', 'c')
+
+meson.add_postconf_script('postconf.py')
+
+test('post', executable('prog', 'prog.c'))
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+
+template = '''#pragma once
+
+#define THE_NUMBER {}
+'''
+
+input_file = os.path.join(os.environ['MESON_SOURCE_ROOT'], 'raw.dat')
+output_file = os.path.join(os.environ['MESON_BUILD_ROOT'], 'generated.h')
+
+with open(input_file) as f:
+ data = f.readline().strip()
+with open(output_file, 'w') as f:
+ f.write(template.format(data))
--- /dev/null
+#include"generated.h"
+
+int main(int argc, char **argv) {
+ return THE_NUMBER != 9;
+}
--- /dev/null
+project('postconf script', 'c')
+
+meson.add_postconf_script('postconf.py', '5', '33')
+
+test('post', executable('prog', 'prog.c'))
--- /dev/null
+#!/usr/bin/env python3
+
+import sys, os
+
+template = '''#pragma once
+
+#define THE_NUMBER {}
+#define THE_ARG1 {}
+#define THE_ARG2 {}
+'''
+
+input_file = os.path.join(os.environ['MESON_SOURCE_ROOT'], 'raw.dat')
+output_file = os.path.join(os.environ['MESON_BUILD_ROOT'], 'generated.h')
+
+with open(input_file) as f:
+ data = f.readline().strip()
+with open(output_file, 'w') as f:
+ f.write(template.format(data, sys.argv[1], sys.argv[2]))
--- /dev/null
+#include"generated.h"
+
+int main(int argc, char **argv) {
+ return THE_NUMBER != 9 || THE_ARG1 != 5 || THE_ARG2 != 33;
+}
--- /dev/null
+project('options', 'c')
+
+assert(get_option('testoption') == 'A string with spaces', 'Incorrect value for testoption option.')
+assert(get_option('other_one') == true, 'Incorrect value for other_one option.')
+assert(get_option('combo_opt') == 'one', 'Incorrect value for combo_opt option.')
--- /dev/null
+option('testoption', type : 'string', value : 'optval', description : 'An option to do something')
+option('other_one', type : 'boolean', value : false)
+option('combo_opt', type : 'combo', choices : ['one', 'two', 'combo'], value : 'combo')
--- /dev/null
+# This file is not read by meson itself, but by the test framework.
+# It is not possible to pass arguments to meson from a file.
+['--werror', '-D', 'testoption=A string with spaces', '-D', 'other_one=true', \
+ '-D', 'combo_opt=one']
--- /dev/null
+project('subdir test', 'c')
+subdir('subdir')
--- /dev/null
+executable('prog', 'prog.c')
--- /dev/null
+int main(int argc, char **argv) { return 0; }
--- /dev/null
+int func1() {
+ return 23;
+}
--- /dev/null
+int func1();
+int func2();
+
+int main(int argc, char **argv) {
+ return !(func1() == 23 && func2() == 42);
+}
--- /dev/null
+project('object extraction', 'c')
+
+lib = shared_library('somelib', ['lib.c', 'src/lib.c'])
+# Also tests that the object list is flattened properly
+obj = lib.extract_objects(['lib.c', ['src/lib.c']])
+exe = executable('main', 'main.c', objects: obj)
+test('extraction', exe)
--- /dev/null
+int func2() {
+ return 42;
+}
--- /dev/null
+project('has header symbol', 'c', 'cpp')
+
+cc = meson.get_compiler('c')
+cpp = meson.get_compiler('cpp')
+
+foreach comp : [cc, cpp]
+ assert (comp.has_header_symbol('stdio.h', 'int'), 'base types should always be available')
+ assert (comp.has_header_symbol('stdio.h', 'printf'), 'printf function not found')
+ assert (comp.has_header_symbol('stdio.h', 'FILE'), 'FILE structure not found')
+ assert (comp.has_header_symbol('limits.h', 'INT_MAX'), 'INT_MAX define not found')
+ assert (not comp.has_header_symbol('limits.h', 'guint64'), 'guint64 is not defined in limits.h')
+ assert (not comp.has_header_symbol('stdlib.h', 'FILE'), 'FILE structure is defined in stdio.h, not stdlib.h')
+ assert (not comp.has_header_symbol('stdlol.h', 'printf'), 'stdlol.h shouldn\'t exist')
+ assert (not comp.has_header_symbol('stdlol.h', 'int'), 'shouldn\'t be able to find "int" with invalid header')
+endforeach
+
+# This is available on Glibc, Solaris & the BSD's, so just test for _GNU_SOURCE
+# on Linux
+if cc.has_function('ppoll') and host_machine.system() == 'linux'
+ assert (not cc.has_header_symbol('poll.h', 'ppoll'), 'ppoll should not be accessible without _GNU_SOURCE')
+ assert (cc.has_header_symbol('poll.h', 'ppoll', prefix : '#define _GNU_SOURCE'), 'ppoll should be accessible with _GNU_SOURCE')
+endif
+
+assert (cpp.has_header_symbol('iostream', 'std::iostream'), 'iostream not found in iostream.h')
+assert (cpp.has_header_symbol('vector', 'std::vector'), 'vector not found in vector.h')
+assert (not cpp.has_header_symbol('limits.h', 'std::iostream'), 'iostream should not be defined in limits.h')
+
+# Cross compilation and boost do not mix.
+if not meson.is_cross_build()
+ boost = dependency('boost', required : false)
+ if boost.found()
+ assert (cpp.has_header_symbol('boost/math/quaternion.hpp', 'boost::math::quaternion', dependencies : boost), 'quaternion not found')
+ else
+ assert (not cpp.has_header_symbol('boost/math/quaternion.hpp', 'boost::math::quaternion', dependencies : boost), 'quaternion found?!')
+ endif
+endif
--- /dev/null
+project('has arg', 'c', 'cpp')
+
+cc = meson.get_compiler('c')
+cpp = meson.get_compiler('cpp')
+
+if cc.get_id() == 'msvc'
+ is_arg = '/O2'
+ useless = '/DFOO'
+else
+ is_arg = '-O2'
+ useless = '-DFOO'
+endif
+
+isnt_arg = '-fiambroken'
+
+assert(cc.has_argument(is_arg), 'Arg that should have worked does not work.')
+assert(not cc.has_argument(isnt_arg), 'Arg that should be broken is not.')
+
+assert(cpp.has_argument(is_arg), 'Arg that should have worked does not work.')
+assert(not cpp.has_argument(isnt_arg), 'Arg that should be broken is not.')
+
+assert(cc.get_supported_arguments([is_arg, isnt_arg, useless]) == [is_arg, useless], 'Arg filtering returned different result.')
+assert(cpp.get_supported_arguments([is_arg, isnt_arg, useless]) == [is_arg, useless], 'Arg filtering returned different result.')
+
+# Have useless at the end to ensure that the search goes from front to back.
+l1 = cc.first_supported_argument([isnt_arg, is_arg, isnt_arg, useless])
+l2 = cc.first_supported_argument(isnt_arg, isnt_arg, isnt_arg)
+
+assert(l1.length() == 1, 'First supported returned wrong result.')
+assert(l1.get(0) == is_arg, 'First supported returned wrong argument.')
+assert(l2.length() == 0, 'First supported did not return empty array.')
+
+l1 = cpp.first_supported_argument([isnt_arg, is_arg, isnt_arg, useless])
+l2 = cpp.first_supported_argument(isnt_arg, isnt_arg, isnt_arg)
+
+assert(l1.length() == 1, 'First supported returned wrong result.')
+assert(l1.get(0) == is_arg, 'First supported returned wrong argument.')
+assert(l2.length() == 0, 'First supported did not return empty array.')
+
+if cc.get_id() == 'gcc'
+ pre_arg = '-Wformat'
+ anti_pre_arg = '-Wno-format'
+ arg = '-Werror=format-security'
+ assert(not cc.has_multi_arguments([anti_pre_arg, arg]), 'Arg that should be broken is not.')
+ assert(cc.has_multi_arguments(pre_arg), 'Arg that should have worked does not work.')
+ assert(cc.has_multi_arguments([pre_arg, arg]), 'Arg that should have worked does not work.')
+endif
+
+if cc.get_id() == 'clang' and cc.version().version_compare('<=4.0.0')
+ # 4.0.0 does not support -fpeel-loops. Newer versions may.
+ # Please adjust above version number as new versions of clang are released.
+ notyet_arg = '-fpeel-loops'
+ assert(not cc.has_argument(notyet_arg), 'Arg that should be broken (unless clang added support recently) is not.')
+endif
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+output = sys.argv[-1]
+inputs = sys.argv[1:-1]
+
+with open(output, 'w') as ofile:
+ ofile.write('#pragma once\n')
+ for i in inputs:
+ with open(i, 'r') as ifile:
+ content = ifile.read()
+ ofile.write(content)
+ ofile.write('\n')
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+ifile = sys.argv[1]
+ofile = sys.argv[2]
+
+with open(ifile, 'r') as f:
+ resname = f.readline().strip()
+
+templ = 'const char %s[] = "%s";\n'
+with open(ofile, 'w') as f:
+ f.write(templ % (resname, resname))
--- /dev/null
+#include<stdio.h>
+
+#include"alltogether.h"
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('generatorcustom', 'c')
+
+creator = find_program('gen.py')
+catter = find_program('catter.py')
+
+gen = generator(creator,
+ output: '@BASENAME@.h',
+ arguments : ['@INPUT@', '@OUTPUT@'])
+
+hs = gen.process('res1.txt', 'res2.txt')
+
+allinone = custom_target('alltogether',
+ input : hs,
+ output : 'alltogether.h',
+ command : [catter, '@INPUT@', '@OUTPUT@'])
+
+executable('proggie', 'main.c', allinone)
+
--- /dev/null
+project('multiple dir configure file', 'c')
+
+subdir('subdir')
+
+configure_file(input : 'subdir/someinput.in',
+ output : 'outputhere',
+ configuration : configuration_data())
+
+configure_file(input : cfile1,
+ output : '@BASENAME@',
+ configuration : configuration_data())
--- /dev/null
+configure_file(input : 'someinput.in',
+ output : 'outputsubdir',
+ install : false,
+ configuration : configuration_data())
+
+py3 = import('python3').find_python()
+
+cfile1 = configure_file(input : 'foo.txt',
+ output : 'foo.h.in',
+ capture : true,
+ command : [py3, '-c', 'print("#mesondefine FOO_BAR")'])
--- /dev/null
+configure_file(output : 'blank.txt', configuration : configuration_data())
+
--- /dev/null
+#include "comparer.h"
+
+#ifndef COMPARER_INCLUDED
+#error "comparer.h not included"
+#endif
+
+/* This converts foo\\\\bar\\\\ to "foo\\bar\\" (string literal) */
+#define Q(x) #x
+#define QUOTE(x) Q(x)
+
+#define COMPARE_WITH "foo\\bar\\" /* This is the literal `foo\bar\` */
+
+int main(int argc, char **argv) {
+ if(strcmp(QUOTE(DEF_WITH_BACKSLASH), COMPARE_WITH)) {
+ printf("Arg string is quoted incorrectly: %s instead of %s\n",
+ QUOTE(DEF_WITH_BACKSLASH), COMPARE_WITH);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#include "comparer.h"
+
+#ifndef COMPARER_INCLUDED
+#error "comparer.h not included"
+#endif
+
+#define COMPARE_WITH "foo\\bar\\" /* This is `foo\bar\` */
+
+int main (int argc, char **argv) {
+ if (strcmp (DEF_WITH_BACKSLASH, COMPARE_WITH)) {
+ printf ("Arg string is quoted incorrectly: %s vs %s\n",
+ DEF_WITH_BACKSLASH, COMPARE_WITH);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#include "comparer.h"
+
+#ifndef COMPARER_INCLUDED
+#error "comparer.h not included"
+#endif
+
+#define COMPARE_WITH "foo\\bar" /* This is the literal `foo\bar` */
+
+int main (int argc, char **argv) {
+ if (strcmp (DEF_WITH_BACKSLASH, COMPARE_WITH)) {
+ printf ("Arg string is quoted incorrectly: %s instead of %s\n",
+ DEF_WITH_BACKSLASH, COMPARE_WITH);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#include <string.h>
+#include <stdio.h>
+
+#define COMPARER_INCLUDED
--- /dev/null
+project('comparer', 'c')
+
+# Added manually as a c_arg to test handling of include paths with backslashes
+# and spaces. This is especially useful on Windows in vcxproj files since it
+# stores include directories in a separate element that has its own
+# context-specific escaping/quoting.
+include_dir = meson.current_source_dir() + '/include'
+default_c_args = ['-I' + include_dir]
+
+if meson.get_compiler('c').get_id() == 'msvc'
+ default_c_args += ['/Faasm output\\']
+ # Hack to create the 'asm output' directory in the builddir
+ subdir('asm output')
+endif
+
+# Path can contain \. Here we're sending `"foo\bar"`.
+test('backslash quoting',
+ executable('comparer', 'comparer.c',
+ c_args : default_c_args + ['-DDEF_WITH_BACKSLASH="foo\\bar"']))
+# Path can end in \ without any special quoting. Here we send `"foo\bar\"`.
+test('backslash end quoting',
+ executable('comparer-end', 'comparer-end.c',
+ c_args : default_c_args + ['-DDEF_WITH_BACKSLASH="foo\\bar\\"']))
+# Path can (really) end in \ if we're not passing a string literal without any
+# special quoting. Here we're sending `foo\bar\`.
+test('backslash end quoting when not a string literal',
+ executable('comparer-end-notstring', 'comparer-end-notstring.c',
+ c_args : default_c_args + ['-DDEF_WITH_BACKSLASH=foo\\bar\\']))
--- /dev/null
+project('ternary operator', 'c')
+
+one = true ? 1 : error('False branch should not be evaluated')
+two = false ? error('True branch should not be evaluated.') : 2
+
+assert(one == 1, 'Return value from ternary true is wrong.')
+assert(two == 2, 'Return value from ternary false is wrong.')
--- /dev/null
+This is a text only input file.
--- /dev/null
+usr/subdir/data.dat
--- /dev/null
+project('custom target', 'c')
+
+python3 = import('python3').find_python()
+
+# Note that this will not add a dependency to the compiler executable.
+# Code will not be rebuilt if it changes.
+comp = '@0@/@1@'.format(meson.current_source_dir(), 'my_compiler.py')
+
+mytarget = custom_target('bindat',
+ output : 'data.dat',
+ input : 'data_source.txt',
+ capture : true,
+ command : [python3, comp, '@INPUT@'],
+ install : true,
+ install_dir : 'subdir'
+)
+
+ct_output_exists = '''import os, sys
+if not os.path.exists(sys.argv[1]):
+ print("could not find {!r} in {!r}".format(sys.argv[1], os.getcwd()))
+ sys.exit(1)
+'''
+
+test('capture-wrote', python3, args : ['-c', ct_output_exists, mytarget])
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+if __name__ == '__main__':
+ if len(sys.argv) != 2:
+ print(sys.argv[0], 'input_file')
+ sys.exit(1)
+ with open(sys.argv[1]) as f:
+ ifile = f.read()
+ if ifile != 'This is a text only input file.\n':
+ print('Malformed input')
+ sys.exit(1)
+ print('This is a binary output file.')
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+ifile = sys.argv[1]
+ofile = sys.argv[2]
+
+open(ofile, 'w').write(open(ifile).read())
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I am a program.\n");
+ return 0;
+}
--- /dev/null
+# Must have two languages here to exercise linker language
+# selection bug
+project('all sources generated', 'c', 'cpp')
+
+comp = find_program('converter.py')
+
+g = generator(comp,
+ output : '@BASENAME@',
+ arguments : ['@INPUT@', '@OUTPUT@'])
+
+c = g.process('foobar.cpp.in')
+
+prog = executable('genexe', c)
+
+c2 = custom_target('c2gen',
+ output : '@BASENAME@',
+ input : 'foobar.cpp.in',
+ command : [comp, '@INPUT@', '@OUTPUT@'])
+
+prog2 = executable('genexe2', c2)
--- /dev/null
+project('pathjoin', 'c')
+
+# Test string-args form since that is the canonical way
+assert(join_paths('foo') == 'foo', 'Single argument join is broken')
+assert(join_paths('foo', 'bar') == 'foo/bar', 'Path joining is broken')
+assert(join_paths('foo', 'bar', 'baz') == 'foo/bar/baz', 'Path joining is broken')
+assert(join_paths('/foo', 'bar') == '/foo/bar', 'Path joining is broken')
+assert(join_paths('foo', '/bar') == '/bar', 'Absolute path joining is broken')
+assert(join_paths('/foo', '/bar') == '/bar', 'Absolute path joining is broken')
+
+# Test array form since people are using that too
+assert(join_paths(['foo']) == 'foo', 'Single argument join is broken')
+assert(join_paths(['foo', 'bar']) == 'foo/bar', 'Path joining is broken')
+assert(join_paths(['foo', 'bar', 'baz']) == 'foo/bar/baz', 'Path joining is broken')
+assert(join_paths(['/foo', 'bar']) == '/foo/bar', 'Path joining is broken')
+assert(join_paths(['foo', '/bar']) == '/bar', 'Absolute path joining is broken')
+assert(join_paths(['/foo', '/bar']) == '/bar', 'Absolute path joining is broken')
--- /dev/null
+this is a data file
--- /dev/null
+This goes into /etc/etcfile.dat
--- /dev/null
+This is a data file that is installed via a File object.
--- /dev/null
+usr/share/progname/datafile.dat
+usr/share/progname/fileobject_datafile.dat
+usr/share/progname/vanishing.dat
+usr/share/progname/vanishing2.dat
+etc/etcfile.dat
+usr/bin/runscript.sh
--- /dev/null
+project('data install test', 'c')
+install_data(sources : 'datafile.dat', install_dir : 'share/progname')
+# Some file in /etc that is only read-write by root; add a sticky bit for testing
+install_data(sources : 'etcfile.dat', install_dir : '/etc', install_mode : 'rw------T')
+# Some script that needs to be executable by the group
+install_data('runscript.sh',
+ install_dir : get_option('bindir'),
+ install_mode : ['rwxr-sr-x', 'root', 0])
+install_data(files('fileobject_datafile.dat'),
+ install_dir : 'share/progname',
+ install_mode : [false, false, 0])
+
+subdir('vanishing')
+
+install_data(sources : 'vanishing/vanishing2.dat', install_dir : 'share/progname')
--- /dev/null
+#!/bin/sh
+
+echo "Runscript"
--- /dev/null
+install_data(sources : 'vanishing.dat', install_dir : 'share/progname')
--- /dev/null
+This is a data file to be installed in a subdirectory.
--- /dev/null
+This is a data file to be installed in a subdirectory.
+
+It is installed from a different subdir to test that the
+installer strips the source tree dir prefix.
--- /dev/null
+project('proj', 'c')
+subdir('prog')
--- /dev/null
+subproject('sub')
+libSub = dependency('sub', fallback: ['sub', 'libSub'])
+
+exe = executable('prog', 'prog.c', dependencies: libSub)
+test('subdir subproject', exe)
--- /dev/null
+#include <sub.h>
+
+int main() {
+ return sub();
+}
--- /dev/null
+project('sub', 'c')
+lib = static_library('sub', 'sub.c')
+libSub = declare_dependency(include_directories: include_directories('.'), link_with: lib)
--- /dev/null
+#include "sub.h"
+
+int sub() {
+ return 0;
+}
--- /dev/null
+#ifndef SUB_H
+#define SUB_H
+
+int sub();
+
+#endif
--- /dev/null
+project('foo', 'c')
+
+a = configuration_data()
+a.set('HELLO', 1)
+
+b = a
+
+assert(a.has('HELLO'), 'Original config data should be set on a')
+assert(b.has('HELLO'), 'Original config data should be set on copy')
+
+configure_file(output : 'b.h', configuration : b)
+
+# This should still work, as we didn't use the original above but a copy!
+a.set('WORLD', 1)
+
+assert(a.has('WORLD'), 'New config data should have been set')
+assert(not b.has('WORLD'), 'New config data set should not affect var copied earlier')
+
+configure_file(output : 'a.h', configuration : a)
+
--- /dev/null
+project('skip', 'c')
+
+error('MESON_SKIP_TEST this test is always skipped.')
+
--- /dev/null
+#ifndef PROJECT_OPTION
+#error
+#endif
+
+#ifndef PROJECT_OPTION_1
+#error
+#endif
+
+#ifndef GLOBAL_ARGUMENT
+#error
+#endif
+
+#ifdef SUBPROJECT_OPTION
+#error
+#endif
+
+#ifdef OPTION_CPP
+#error
+#endif
+
+#ifndef PROJECT_OPTION_C_CPP
+#error
+#endif
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+#ifdef PROJECT_OPTION
+#error
+#endif
+
+#ifdef PROJECT_OPTION_1
+#error
+#endif
+
+#ifdef GLOBAL_ARGUMENT
+#error
+#endif
+
+#ifdef SUBPROJECT_OPTION
+#error
+#endif
+
+#ifndef PROJECT_OPTION_CPP
+#error
+#endif
+
+#ifndef PROJECT_OPTION_C_CPP
+#error
+#endif
+
+int main(int argc, char **argv) {
+ return 0;
+}
+
--- /dev/null
+project('project options tester', 'c', 'cpp',
+ version : '2.3.4',
+ license : 'mylicense')
+
+add_global_arguments('-DGLOBAL_ARGUMENT', language: 'c')
+add_project_arguments('-DPROJECT_OPTION', language: 'c')
+add_project_arguments('-DPROJECT_OPTION_CPP', language: 'cpp')
+add_project_arguments('-DPROJECT_OPTION_C_CPP', language: ['c', 'cpp'])
+
+sub = subproject('subexe', version : '1.0.0')
+
+add_project_arguments('-DPROJECT_OPTION_1', language: 'c')
+
+e = executable('exe', 'exe.c')
+e = executable('execpp', 'exe.cpp')
+test('exetest', e)
+test('execpptest', e)
--- /dev/null
+project('subproject', 'c',
+ version : '1.0.0',
+ license : ['sublicense1', 'sublicense2'])
+
+if not meson.is_subproject()
+ error('Claimed to be master project even though we are a subproject.')
+endif
+
+assert(meson.project_name() == 'subproject', 'Incorrect subproject name')
+
+add_project_arguments('-DSUBPROJECT_OPTION', language: 'c')
+e = executable('subexe', 'subexe.c')
+test('subexetest', e)
--- /dev/null
+#ifdef PROJECT_OPTION
+#error
+#endif
+
+#ifdef PROJECT_OPTION_1
+#error
+#endif
+
+#ifdef PROJECT_OPTION_C_CPP
+#error
+#endif
+
+#ifndef GLOBAL_ARGUMENT
+#error
+#endif
+
+#ifndef SUBPROJECT_OPTION
+#error
+#endif
+
+#ifdef OPTION_CPP
+#error
+#endif
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('test skip', 'c')
+
+exe_test_skip = executable('test_skip', 'test_skip.c')
+test('test_skip', exe_test_skip)
--- /dev/null
+int main(int argc, char *argv[])
+{
+ return 77;
+}
--- /dev/null
+project('shared module', 'c')
+
+dl = meson.get_compiler('c').find_library('dl', required : false)
+l = shared_library('runtime', 'runtime.c')
+# Do NOT link the module with the runtime library. This
+# is a common approach for plugins that are only used
+# with dlopen. Any symbols are resolved dynamically
+# at runtime. This requires extra help on Windows, so
+# should be avoided unless really necessary.
+m = shared_module('mymodule', 'module.c')
+e = executable('prog', 'prog.c', link_with : l, dependencies : dl)
+test('import test', e, args : m)
+
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+#if defined(_WIN32) || defined(__CYGWIN__)
+
+#include <stdio.h>
+
+typedef int (*fptr) (void);
+
+#ifdef __CYGWIN__
+
+#include <dlfcn.h>
+
+fptr find_any_f (const char *name) {
+ return (fptr) dlsym(RTLD_DEFAULT, name);
+}
+#else /* _WIN32 */
+
+#include <windows.h>
+#include <tlhelp32.h>
+
+/* Unlike Linux and OS X, when a library is loaded, all the symbols aren't
+ * loaded into a single namespace. You must fetch the symbol by iterating over
+ * all loaded modules. Code for finding the function from any of the loaded
+ * modules is taken from gmodule.c in glib */
+fptr find_any_f (const char *name) {
+ fptr f;
+ HANDLE snapshot;
+ MODULEENTRY32 me32;
+
+ snapshot = CreateToolhelp32Snapshot (TH32CS_SNAPMODULE, 0);
+ if (snapshot == (HANDLE) -1) {
+ printf("Could not get snapshot\n");
+ return 0;
+ }
+
+ me32.dwSize = sizeof (me32);
+
+ f = NULL;
+ if (Module32First (snapshot, &me32)) {
+ do {
+ if ((f = (fptr) GetProcAddress (me32.hModule, name)) != NULL)
+ break;
+ } while (Module32Next (snapshot, &me32));
+ }
+
+ CloseHandle (snapshot);
+ return f;
+}
+#endif
+
+int DLL_PUBLIC func() {
+ fptr f;
+
+ f = find_any_f ("func_from_language_runtime");
+ if (f != NULL)
+ return f();
+ printf ("Could not find function\n");
+ return 1;
+}
+
+#else
+/*
+ * Shared modules often have references to symbols that are not defined
+ * at link time, but which will be provided from deps of the executable that
+ * dlopens it. We need to make sure that this works, i.e. that we do
+ * not pass -Wl,--no-undefined when linking modules.
+ */
+int func_from_language_runtime();
+
+int DLL_PUBLIC func(void) {
+ return func_from_language_runtime();
+}
+#endif
--- /dev/null
+
+#include <stdio.h>
+
+int func_from_language_runtime(void);
+typedef int (*fptr) (void);
+
+#ifdef _WIN32
+
+#include <windows.h>
+
+wchar_t*
+win32_get_last_error (void)
+{
+ wchar_t *msg = NULL;
+
+ FormatMessageW (FORMAT_MESSAGE_ALLOCATE_BUFFER
+ | FORMAT_MESSAGE_IGNORE_INSERTS
+ | FORMAT_MESSAGE_FROM_SYSTEM,
+ NULL, GetLastError (), 0,
+ (LPWSTR) &msg, 0, NULL);
+ return msg;
+}
+
+int
+main (int argc, char **argv)
+{
+ HINSTANCE handle;
+ fptr importedfunc;
+ int expected, actual;
+ int ret = 1;
+
+ handle = LoadLibraryA (argv[1]);
+ if (!handle) {
+ wchar_t *msg = win32_get_last_error ();
+ printf ("Could not open %s: %S\n", argv[1], msg);
+ goto nohandle;
+ }
+
+ importedfunc = (fptr) GetProcAddress (handle, "func");
+ if (importedfunc == NULL) {
+ wchar_t *msg = win32_get_last_error ();
+ printf ("Could not find 'func': %S\n", msg);
+ goto out;
+ }
+
+ actual = importedfunc ();
+ expected = func_from_language_runtime ();
+ if (actual != expected) {
+ printf ("Got %i instead of %i\n", actual, expected);
+ goto out;
+ }
+
+ ret = 0;
+out:
+ FreeLibrary (handle);
+nohandle:
+ return ret;
+}
+
+#else
+
+#include<dlfcn.h>
+#include<assert.h>
+
+int main(int argc, char **argv) {
+ void *dl;
+ fptr importedfunc;
+ int expected, actual;
+ char *error;
+ int ret = 1;
+
+ dlerror();
+ dl = dlopen(argv[1], RTLD_LAZY);
+ error = dlerror();
+ if(error) {
+ printf("Could not open %s: %s\n", argv[1], error);
+ goto nodl;
+ }
+
+ importedfunc = (fptr) dlsym(dl, "func");
+ if (importedfunc == NULL) {
+ printf ("Could not find 'func'\n");
+ goto out;
+ }
+
+ assert(importedfunc != func_from_language_runtime);
+
+ actual = (*importedfunc)();
+ expected = func_from_language_runtime ();
+ if (actual != expected) {
+ printf ("Got %i instead of %i\n", actual, expected);
+ goto out;
+ }
+
+ ret = 0;
+out:
+ dlclose(dl);
+nodl:
+ return ret;
+}
+
+#endif
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+/*
+ * This file pretends to be a language runtime that supports extension
+ * modules.
+ */
+
+int DLL_PUBLIC func_from_language_runtime(void) {
+ return 86;
+}
--- /dev/null
+#include <stdio.h>
+
+unsigned square_unsigned (unsigned a);
+
+int
+main (int argc, char * argv[])
+{
+ unsigned int ret = square_unsigned (2);
+ if (ret != 4) {
+ printf("Got %u instead of 4\n", ret);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#include <stdio.h>
+
+extern "C" {
+ unsigned square_unsigned (unsigned a);
+}
+
+int
+main (int argc, char * argv[])
+{
+ unsigned int ret = square_unsigned (2);
+ if (ret != 4) {
+ printf("Got %u instead of 4\n", ret);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+project('llvm-ir', 'c', 'cpp')
+
+cpu = host_machine.cpu_family()
+supported_cpus = ['arm', 'x86', 'x86_64']
+
+foreach lang : ['c', 'cpp']
+ cc = meson.get_compiler(lang)
+ cc_id = cc.get_id()
+ ## Build a trivial executable with mixed LLVM IR source
+ if cc_id == 'clang'
+ e = executable('square_ir_' + lang, 'square.ll', 'main.' + lang)
+ test('test IR square' + lang, e)
+ endif
+ ## Build a trivial executable with mixed assembly source
+ # This also helps test whether cc.symbols_have_underscore_prefix() is working
+ # properly. This is done by assembling some assembly into an object that will
+ # provide the unsigned_squared() symbol to main.c/cpp. This requires the
+ # C symbol mangling to be known in advance.
+ if cc.symbols_have_underscore_prefix()
+ uscore_args = ['-DMESON_TEST__UNDERSCORE_SYMBOL']
+ message('underscore is prefixed')
+ else
+ uscore_args = []
+ message('underscore is NOT prefixed')
+ endif
+ square_base = 'square-' + cpu
+ square_impl = square_base + '.S'
+ # MSVC cannot directly compile assembly files, so we pass it through the
+ # cl.exe pre-processor first and then assemble it with the ml.exe assembler.
+ # Then we can link it into the executable.
+ if cc_id == 'msvc'
+ cl = find_program('cl')
+ if cpu == 'x86'
+ ml = find_program('ml')
+ elif cpu == 'x86_64'
+ ml = find_program('ml64')
+ else
+ error('Unsupported cpu family: "' + cpu + '"')
+ endif
+ # Preprocess file (ml doesn't support pre-processing)
+ preproc_name = lang + square_base + '.i'
+ square_preproc = custom_target(lang + square_impl + 'preproc',
+ input : square_impl,
+ output : preproc_name,
+ command : [cl, '/EP', '/P', '/Fi' + preproc_name, '@INPUT@'] + uscore_args)
+ # Use assembled object file instead of the original .S assembly source
+ square_impl = custom_target(lang + square_impl,
+ input : square_preproc,
+ output : lang + square_base + '.obj',
+ command : [ml, '/Fo', '@OUTPUT@', '/c', '@INPUT@'])
+ endif
+ if supported_cpus.contains(cpu)
+ e = executable('square_asm_' + lang, square_impl, 'main.' + lang,
+ c_args : uscore_args, cpp_args : uscore_args)
+ test('test ASM square' + lang, e)
+ elif cc_id != 'clang'
+ error('MESON_SKIP_TEST: Unsupported cpu: "' + cpu + '", and LLVM not found')
+ endif
+endforeach
--- /dev/null
+#include "symbol-underscore.h"
+
+.text
+.globl SYMBOL_NAME(square_unsigned)
+# ifdef __linux__
+.type square_unsigned, %function
+#endif
+
+SYMBOL_NAME(square_unsigned):
+ mul r1, r0, r0
+ mov r0, r1
+ mov pc, lr
--- /dev/null
+#include "symbol-underscore.h"
+
+/* This sadly doesn't test the symbol underscore stuff. I can't figure out how
+ * to not use an automatic stdcall mechanism and do everything manually. */
+#ifdef _MSC_VER
+
+.386
+.MODEL FLAT, C
+
+PUBLIC square_unsigned
+_TEXT SEGMENT
+
+square_unsigned PROC var1:DWORD
+ mov eax, var1
+ imul eax, eax
+ ret
+square_unsigned ENDP
+
+_TEXT ENDS
+END
+
+#else
+
+.text
+.globl SYMBOL_NAME(square_unsigned)
+# ifdef __linux__
+.type square_unsigned, %function
+#endif
+
+SYMBOL_NAME(square_unsigned):
+ movl 4(%esp), %eax
+ imull %eax, %eax
+ retl
+
+#endif
--- /dev/null
+#include "symbol-underscore.h"
+
+#ifdef _MSC_VER /* MSVC on Windows */
+
+PUBLIC SYMBOL_NAME(square_unsigned)
+_TEXT SEGMENT
+
+SYMBOL_NAME(square_unsigned) PROC
+ mov eax, ecx
+ imul eax, eax
+ ret
+SYMBOL_NAME(square_unsigned) ENDP
+
+_TEXT ENDS
+END
+
+#else
+
+.text
+.globl SYMBOL_NAME(square_unsigned)
+# ifdef __linux__
+.type square_unsigned, %function
+#endif
+
+# if defined(_WIN32) || defined(__CYGWIN__) /* msabi */
+SYMBOL_NAME(square_unsigned):
+ imull %ecx, %ecx
+ movl %ecx, %eax
+ retq
+# else /* sysvabi */
+SYMBOL_NAME(square_unsigned):
+ imull %edi, %edi
+ movl %edi, %eax
+ retq
+# endif
+
+#endif
--- /dev/null
+define i32 @square_unsigned(i32 %a) {
+ %1 = mul i32 %a, %a
+ ret i32 %1
+}
--- /dev/null
+#if defined(MESON_TEST__UNDERSCORE_SYMBOL)
+# define SYMBOL_NAME(name) _##name
+#else
+# define SYMBOL_NAME(name) name
+#endif
--- /dev/null
+project('c++ and assembly test', 'cpp')
+
+cpp = meson.get_compiler('cpp')
+cpu = host_machine.cpu_family()
+
+supported_cpus = ['arm', 'x86', 'x86_64']
+
+if not supported_cpus.contains(cpu)
+ error('MESON_SKIP_TEST unsupported cpu:' + cpu)
+endif
+
+if cpp.symbols_have_underscore_prefix()
+ add_project_arguments('-DMESON_TEST__UNDERSCORE_SYMBOL', language : 'cpp')
+endif
+
+sources = ['trivial.cc']
+# If the compiler cannot compile assembly, don't use it
+if meson.get_compiler('cpp').get_id() != 'msvc'
+ sources += ['retval-' + cpu + '.S']
+ cpp_args = ['-DUSE_ASM']
+ message('Using ASM')
+else
+ cpp_args = ['-DNO_USE_ASM']
+endif
+
+exe = executable('trivialprog', sources,
+ cpp_args : cpp_args)
+test('runtest', exe)
--- /dev/null
+#include "symbol-underscore.h"
+
+.text
+.globl SYMBOL_NAME(get_retval)
+# ifdef __linux__
+.type get_retval, %function
+#endif
+
+SYMBOL_NAME(get_retval):
+ mov r0, #0
+ mov pc, lr
--- /dev/null
+#include "symbol-underscore.h"
+
+.text
+.globl SYMBOL_NAME(get_retval)
+# ifdef __linux__
+.type get_retval, %function
+#endif
+
+SYMBOL_NAME(get_retval):
+ xorl %eax, %eax
+ retl
--- /dev/null
+#include "symbol-underscore.h"
+
+.text
+.globl SYMBOL_NAME(get_retval)
+# ifdef __linux__
+.type get_retval, %function
+#endif
+
+SYMBOL_NAME(get_retval):
+ xorl %eax, %eax
+ retq
--- /dev/null
+#if defined(MESON_TEST__UNDERSCORE_SYMBOL)
+# define SYMBOL_NAME(name) _##name
+#else
+# define SYMBOL_NAME(name) name
+#endif
--- /dev/null
+#include<iostream>
+
+extern "C" {
+ int get_retval(void);
+}
+
+int main(int argc, char **argv) {
+ std::cout << "C++ seems to be working." << std::endl;
+#if defined(USE_ASM)
+ return get_retval();
+#elif defined(NO_USE_ASM)
+ return 0;
+#else
+ #error "Forgot to pass asm define"
+#endif
+}
--- /dev/null
+#pragma once
+
+int func1();
+int func2();
+int func3();
+int func4();
--- /dev/null
+#include"extractor.h"
+
+int func4() {
+ return 4;
+}
--- /dev/null
+EXPORTS
+ func1
+ func2
+ func3
+ func4
--- /dev/null
+project('extract all', 'c', 'cpp')
+
+a = static_library('a', 'one.c', 'two.c')
+b = static_library('b', 'three.c', 'four.c')
+c = shared_library('c',
+ objects : [a.extract_all_objects(), b.extract_all_objects()],
+ vs_module_defs : 'func1234.def')
+
+e = executable('proggie', 'prog.c', link_with : c)
+test('extall', e)
--- /dev/null
+#include"extractor.h"
+
+int func1() {
+ return 1;
+}
--- /dev/null
+#include"extractor.h"
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ if((1+2+3+4) != (func1() + func2() + func3() + func4())) {
+ printf("Arithmetic is fail.\n");
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#include"extractor.h"
+
+int func3() {
+ return 3;
+}
--- /dev/null
+#include"extractor.h"
+
+int func2() {
+ return 2;
+}
--- /dev/null
+usr/bin/prog?exe
--- /dev/null
+project('object generator', 'c')
+
+# FIXME: Note that this will not add a dependency to the compiler executable.
+# Code will not be rebuilt if it changes.
+comp = find_program('obj_generator.py')
+
+if host_machine.system() == 'windows'
+ ext = '.obj'
+else
+ ext = '.o'
+endif
+
+cc = meson.get_compiler('c').cmd_array().get(-1)
+
+# Generate an object file with configure_file to mimic prebuilt objects
+# provided by the source tree
+source1 = configure_file(input : 'source.c',
+ output : 'source' + ext,
+ command : [comp, cc, files('source.c'),
+ join_paths(meson.current_build_dir(), 'source' + ext)])
+
+obj = static_library('obj', objects : source1)
+
+# Generate an object file manually.
+gen = generator(comp,
+ output : '@BASENAME@' + ext,
+ arguments : [cc, '@INPUT@', '@OUTPUT@'])
+
+generated = gen.process(['source2.c'])
+
+shr = shared_library('shr', generated,
+ vs_module_defs : 'source2.def')
+
+# Generate an object file with indexed OUTPUT replacement.
+gen2 = generator(comp,
+ output : '@BASENAME@' + ext,
+ arguments : [cc, '@INPUT@', '@OUTPUT0@'])
+generated2 = gen2.process(['source3.c'])
+
+stc = static_library('stc', generated2)
+
+e = executable('prog', 'prog.c', link_with : [obj, shr, stc],
+ install : true)
+
+test('objgen', e)
--- /dev/null
+#!/usr/bin/env python3
+
+# Mimic a binary that generates an object file (e.g. windres).
+
+import sys, subprocess
+
+if __name__ == '__main__':
+ if len(sys.argv) != 4:
+ print(sys.argv[0], 'compiler input_file output_file')
+ sys.exit(1)
+ compiler = sys.argv[1]
+ ifile = sys.argv[2]
+ ofile = sys.argv[3]
+ if compiler.endswith('cl'):
+ cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile]
+ else:
+ cmd = [compiler, '-c', ifile, '-o', ofile]
+ sys.exit(subprocess.call(cmd))
--- /dev/null
+int func1_in_obj();
+int func2_in_obj();
+int func3_in_obj();
+
+int main(int argc, char **argv) {
+ return func1_in_obj() + func2_in_obj() + func3_in_obj();
+}
--- /dev/null
+int func1_in_obj() {
+ return 0;
+}
--- /dev/null
+int func2_in_obj() {
+ return 0;
+}
--- /dev/null
+EXPORTS
+ func2_in_obj
--- /dev/null
+int func3_in_obj() {
+ return 0;
+}
--- /dev/null
+project('pch test', 'c')
+
+exe = executable('prog', 'prog.c',
+c_pch : ['pch/prog_pch.c', 'pch/prog.h'])
--- /dev/null
+#include<stdio.h>
--- /dev/null
+#if !defined(_MSC_VER)
+#error "This file is only for use with MSVC."
+#endif
+
+#include "prog.h"
--- /dev/null
+// No includes here, they need to come from the PCH
+
+void func() {
+ fprintf(stdout, "This is a function that fails if stdio is not #included.\n");
+}
+
+int main(int argc, char **argv) {
+ return 0;
+}
+
--- /dev/null
+#pragma once
+
+int foobar();
--- /dev/null
+project('nobuilddir', 'c',
+ default_options : 'werror=true')
+
+cc = meson.get_compiler('c')
+
+incwarg = '-Wmissing-include-dirs'
+
+if cc.has_argument(incwarg)
+ executable('prog', 'prog.c',
+ c_args : incwarg,
+ include_directories : include_directories('include'))
+else
+ error('MESON_SKIP_TEST compiler does not support bad inc dir argument.')
+endif
--- /dev/null
+#include"header.h"
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import sys
+
+out = sys.argv[1]
+
+os.mkdir(out)
+
+for name in ('a', 'b', 'c'):
+ with open(os.path.join(out, name + '.html'), 'w') as f:
+ f.write(name)
--- /dev/null
+usr/share/doc/testpkgname/html/a.html
+usr/share/doc/testpkgname/html/b.html
+usr/share/doc/testpkgname/html/c.html
--- /dev/null
+project('custom-target-dir-install', 'c')
+
+docgen = find_program('docgen.py')
+
+custom_target('docgen',
+ output : 'html',
+ command : [docgen, '@OUTPUT@'],
+ install : true,
+ install_dir : join_paths(get_option('datadir'), 'doc/testpkgname'))
--- /dev/null
+int main(int argc, char *argv[]) {
+ return 0;
+}
--- /dev/null
+project('dep file gen', 'c')
+
+cc_id = meson.get_compiler('c').get_id()
+if cc_id == 'intel'
+ # ICC does not escape spaces in paths in the dependency file, so Ninja
+ # (correctly) thinks that the rule has multiple outputs and errors out:
+ # 'depfile has multiple output paths'
+ error('MESON_SKIP_TEST: Skipping test with Intel compiler because it generates broken dependency files')
+endif
+
+e = executable('main file', 'main .c')
+test('test it', e)
--- /dev/null
+cdata = configuration_data()
+cdata.set('VALUE', '42')
+
+cfile = configure_file(input : 'confdata.in',
+output : 'confdata',
+configuration : cdata)
--- /dev/null
+project('conf file in generator', 'c')
+
+subdir('inc')
+subdir('src')
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+ifile = sys.argv[1]
+ofile = sys.argv[2]
+
+with open(ifile, 'r') as f:
+ resval = f.readline().strip()
+
+templ = '#define RESULT (%s)\n'
+with open(ofile, 'w') as f:
+ f.write(templ % (resval, ))
--- /dev/null
+#include<stdio.h>
+
+#include"confdata.h"
+#if RESULT != 42
+#error Configuration RESULT is not defined correctly
+#endif
+
+#undef RESULT
+
+#include"source.h"
+#if RESULT != 23
+#error Source RESULT is not defined correctly
+#endif
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+compiler = find_program('gen.py')
+gen = generator(compiler,
+ output: '@BASENAME@.h',
+ arguments : ['@INPUT@', '@OUTPUT@'])
+hs = gen.process(cfile, files('source'))
+
+executable('proggie', 'main.c', hs)
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import shutil
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
--- /dev/null
+#include <stdio.h>
+
+unsigned square_unsigned (unsigned a);
+
+int
+main (int argc, char * argv[])
+{
+ unsigned int ret = square_unsigned (2);
+ if (ret != 4) {
+ printf("Got %u instead of 4\n", ret);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+project('generated llvm ir', 'c')
+
+if meson.get_compiler('c').get_id() != 'clang'
+ error('MESON_SKIP_TEST: LLVM IR files can only be built with clang')
+endif
+
+copy = find_program('copyfile.py')
+
+copygen = generator(copy,
+ arguments : ['@INPUT@', '@OUTPUT@'],
+ output : '@BASENAME@')
+
+l = shared_library('square-gen', copygen.process('square.ll.in'))
+
+test('square-gen-test', executable('square-gen-test', 'main.c', link_with : l))
+
+copyct = custom_target('square',
+ input : 'square.ll.in',
+ output : 'square.ll',
+ command : [copy, '@INPUT@', '@OUTPUT@'])
+
+l = shared_library('square-ct', copyct)
+
+test('square-ct-test', executable('square-ct-test', 'main.c', link_with : l))
--- /dev/null
+define i32 @square_unsigned(i32 %a) {
+ %1 = mul i32 %a, %a
+ ret i32 %1
+}
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import shutil
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
--- /dev/null
+#include <stdio.h>
+
+#if defined(_WIN32) || defined(__CYGWIN__)
+ __declspec(dllimport)
+#endif
+unsigned square_unsigned (unsigned a);
+
+int
+main (int argc, char * argv[])
+{
+ unsigned int ret = square_unsigned (2);
+ if (ret != 4) {
+ printf("Got %u instead of 4\n", ret);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+project('generated assembly', 'c')
+
+cc = meson.get_compiler('c')
+
+if cc.get_id() == 'msvc'
+ error('MESON_SKIP_TEST: assembly files cannot be compiled directly by MSVC')
+endif
+
+cpu = host_machine.cpu_family()
+supported_cpus = ['arm', 'x86', 'x86_64']
+
+if not supported_cpus.contains(cpu)
+ error('MESON_SKIP_TEST: unsupported cpu family: ' + cpu)
+endif
+
+if cc.symbols_have_underscore_prefix()
+ add_project_arguments('-DMESON_TEST__UNDERSCORE_SYMBOL', language : 'c')
+endif
+
+copy = find_program('copyfile.py')
+output = 'square-@0@.S'.format(cpu)
+input = output + '.in'
+
+copygen = generator(copy,
+ arguments : ['@INPUT@', '@OUTPUT@'],
+ output : '@BASENAME@')
+
+l = shared_library('square-gen', copygen.process(input))
+
+test('square-gen-test', executable('square-gen-test', 'main.c', link_with : l))
+
+copyct = custom_target('square',
+ input : input,
+ output : output,
+ command : [copy, '@INPUT@', '@OUTPUT@'])
+
+l = shared_library('square-ct', copyct)
+
+test('square-ct-test', executable('square-ct-test', 'main.c', link_with : l))
--- /dev/null
+#include "symbol-underscore.h"
+
+.text
+.globl SYMBOL_NAME(square_unsigned)
+/* Only supported on Linux with GAS */
+# ifdef __linux__
+.type square_unsigned,%function
+#endif
+
+SYMBOL_NAME(square_unsigned):
+ mul r1, r0, r0
+ mov r0, r1
+ mov pc, lr
--- /dev/null
+#include "symbol-underscore.h"
+
+#ifdef _MSC_VER
+
+.386
+.MODEL FLAT, C
+
+PUBLIC square_unsigned
+_TEXT SEGMENT
+
+square_unsigned PROC var1:DWORD
+ mov eax, var1
+ imul eax, eax
+ ret
+square_unsigned ENDP
+
+_TEXT ENDS
+END
+
+#else
+
+.text
+.globl SYMBOL_NAME(square_unsigned)
+/* Only supported on Linux with GAS */
+# ifdef __linux__
+.type square_unsigned,@function
+# endif
+
+SYMBOL_NAME(square_unsigned):
+ movl 4(%esp), %eax
+ imull %eax, %eax
+ retl
+
+#endif
--- /dev/null
+#include "symbol-underscore.h"
+
+#ifdef _MSC_VER /* MSVC on Windows */
+
+PUBLIC SYMBOL_NAME(square_unsigned)
+_TEXT SEGMENT
+
+SYMBOL_NAME(square_unsigned) PROC
+ mov eax, ecx
+ imul eax, eax
+ ret
+SYMBOL_NAME(square_unsigned) ENDP
+
+_TEXT ENDS
+END
+
+#else
+
+.text
+.globl SYMBOL_NAME(square_unsigned)
+/* Only supported on Linux with GAS */
+# ifdef __linux__
+.type square_unsigned,@function
+# endif
+
+# if defined(_WIN32) || defined(__CYGWIN__) /* msabi */
+SYMBOL_NAME(square_unsigned):
+ imull %ecx, %ecx
+ movl %ecx, %eax
+ retq
+# else /* sysvabi */
+SYMBOL_NAME(square_unsigned):
+ imull %edi, %edi
+ movl %edi, %eax
+ retq
+# endif
+
+#endif
--- /dev/null
+#if defined(MESON_TEST__UNDERSCORE_SYMBOL)
+# define SYMBOL_NAME(name) _##name
+#else
+# define SYMBOL_NAME(name) name
+#endif
--- /dev/null
+int main (int argc, char *argv[]) {
+ return 0;
+}
--- /dev/null
+project('unit-test', 'c', version : '1.0')
+
+write_file = find_program('write_file.py')
+
+# A test that consumes and verifies the output generated by a custom target.
+# Should work even if target is not built by default. Makes sure that foo.out
+# is actually created before the test command that uses foo_out is run.
+foo_out = custom_target('foo.out',
+ output : 'foo.out',
+ command : [write_file, '@OUTPUT@'])
+
+# Also verify that a build_by_default : false BuildTarget added to a test is
+# built before the test is run.
+exe_out = executable('out', 'main.c', build_by_default : false)
+
+py_file_exists = '''import os, sys
+if not os.path.exists(sys.argv[1]) or not os.path.exists(sys.argv[2]):
+ print("could not find {!r} or {!r} in {!r}"
+ "".format(sys.argv[1], sys.argv[2], os.getcwd()))
+ sys.exit(1)'''
+
+python = import('python3').find_python()
+test('output-check', python, args : ['-c', py_file_exists, foo_out, exe_out])
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+with open(sys.argv[1], 'w') as f:
+ f.write('Test')
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("Existentialism.\n");
+ return 0;
+}
--- /dev/null
+project('build on all', 'c')
+
+py3_mod = import('python3')
+py3 = py3_mod.find_python()
+
+executable('fooprog', 'foo.c', build_by_default : false)
+comp = files('mygen.py')
+mytarget = custom_target('gendat',
+ output : 'generated.dat',
+ input : 'source.txt',
+ command : [py3] + comp + ['@INPUT@', '@OUTPUT@'],
+ build_by_default : true,
+)
+
+ct_output = join_paths(meson.build_root(), 'generated.dat')
+exe_output = join_paths(meson.build_root(), 'fooprog')
+if host_machine.system() == 'windows'
+ exe_output += '.exe'
+endif
+
+ct_exists_exe_nexists = 'import os.path, sys; sys.exit(not os.path.exists(sys.argv[1]) and os.path.exists(sys.argv[2]))'
+
+test('check-build-by-default', py3,
+ args : ['-c', ct_exists_exe_nexists, ct_output, exe_output])
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+ifile = open(sys.argv[1])
+ofile = open(sys.argv[2], 'w')
+
+ofile.write(ifile.read())
--- /dev/null
+I am a bunch of text.
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import shutil
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
--- /dev/null
+#error "ctsub/main.h included"
--- /dev/null
+# https://github.com/mesonbuild/meson/pull/2291
+copy = find_program('copyfile.py')
+configure_file(input : 'main.h',
+ output : 'main.h',
+ command : [copy, '@INPUT@', '@OUTPUT@'])
+ctfile = custom_target('emptyfile',
+ input : 'emptyfile.c',
+ output : 'emptyfile.c',
+ command : [copy, '@INPUT@', '@OUTPUT@'])
--- /dev/null
+project('include order', 'c')
+
+# Test that the order of priority of include paths (from first to last) is:
+#
+# 1. Target's current build directory
+# 2. Target's current source directory
+# 3. Include paths added with the `c_args:` kwarg
+# 4. Include paths added with the `include_directories`: kwarg
+# Within this, the build dir takes precedence over the source dir
+# 5. Include paths added via `include_directories:` of internal deps
+# Within this, the build dir takes precedence over the source dir
+
+# Custom target dir with a built header
+subdir('ctsub')
+# Defines an internal dep
+subdir('sub1')
+# Defines a per-target include path
+subdir('sub2')
+# Directory for `c_args:` include path
+subdir('sub3')
+# The directory where the target resides
+subdir('sub4')
+
+# Test that the order in which internal dependencies are specified is
+# preserved. This is needed especially when subprojects get involved and
+# multiple build-root config.h files exist, and we must be sure that the
+# correct one is found: https://github.com/mesonbuild/meson/issues/1495
+f = executable('somefxe', 'sub4/main.c',
+ dependencies : [correctinc, dep, wronginc])
+
+test('eh', e)
+test('oh', f)
--- /dev/null
+#error "sub1/main.h included"
--- /dev/null
+i = include_directories('.')
+l = shared_library('somelib', 'some.c')
+dep = declare_dependency(link_with : l,
+ include_directories : i)
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ __declspec(dllexport)
+#endif
+int somefunc(void) {
+ return 1984;
+}
--- /dev/null
+#pragma once
+
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllimport)
+#else
+ #define DLL_PUBLIC
+#endif
+
+DLL_PUBLIC
+int somefunc(void);
--- /dev/null
+#error "sub2/main.h included"
--- /dev/null
+j = include_directories('.')
+wronginc = declare_dependency(include_directories : j)
--- /dev/null
+#error "sub3/main.h included"
--- /dev/null
+sub3 = meson.current_source_dir()
--- /dev/null
+/* Use the <> include notation to force searching in include directories */
+#include <main.h>
+
+int main(int argc, char *argv[]) {
+ if (somefunc() == 1984)
+ return 0;
+ return 1;
+}
--- /dev/null
+#pragma once
+
+#include "some.h"
--- /dev/null
+e = executable('someexe', 'main.c', ctfile,
+ c_args : ['-I' + sub3],
+ include_directories : j,
+ dependencies : dep)
+
+correctinc = declare_dependency(include_directories : include_directories('.'))
--- /dev/null
+int func();
+
+static int duplicate_func() {
+ return -4;
+}
+
+int main(int argc, char **argv) {
+ return duplicate_func() + func();
+}
--- /dev/null
+project('option override', 'c',
+ default_options : 'unity=on')
+
+executable('mustunity', 'one.c', 'two.c')
+executable('notunity', 'three.c', 'four.c',
+ override_options : ['unity=off'])
--- /dev/null
+static int hidden_func() {
+ return 0;
+}
--- /dev/null
+static int duplicate_func() {
+ return 4;
+}
+
+int func() {
+ return duplicate_func();
+}
--- /dev/null
+/*
+ * Requires a Unity build. Otherwise hidden_func is not specified.
+ */
+int main(int argc, char **argv) {
+ return hidden_func();
+}
--- /dev/null
+project('c++ pch test', 'cpp')
+exe = executable('prog', 'prog.cc', cpp_pch : ['pch/prog.hh', 'pch/prog_pch.cc'])
--- /dev/null
+#include<iostream>
--- /dev/null
+#if !defined(_MSC_VER)
+#error "This file is only for use with MSVC."
+#endif
+
+#include "prog.hh"
--- /dev/null
+void func() {
+ std::cout << "This is a function that fails to compile if iostream is not included."
+ << std::endl;
+}
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('get define', 'c', 'cpp')
+
+host_system = host_machine.system()
+
+foreach lang : ['c', 'cpp']
+ cc = meson.get_compiler(lang)
+ if host_system == 'linux'
+ d = cc.get_define('__linux__')
+ assert(d == '1', '__linux__ value is @0@ instead of 1'.format(d))
+ elif host_system == 'darwin'
+ d = cc.get_define('__APPLE__')
+ assert(d == '1', '__APPLE__ value is @0@ instead of 1'.format(d))
+ elif host_system == 'windows'
+ d = cc.get_define('_WIN32')
+ assert(d == '1', '_WIN32 value is @0@ instead of 1'.format(d))
+ elif host_system == 'cygwin'
+ d = cc.get_define('__CYGWIN__')
+ assert(d == '1', '__CYGWIN__ value is @0@ instead of 1'.format(d))
+ elif host_system == 'haiku'
+ d = cc.get_define('__HAIKU__')
+ assert(d == '1', '__HAIKU__ value is @0@ instead of 1'.format(d))
+ else
+ error('Please report a bug and help us improve support for this platform')
+ endif
+
+ if cc.find_library('z', required : false).found()
+ # When a C file containing #include <foo.h> is pre-processed and foo.h is
+ # found in the compiler's default search path, GCC inserts an extra comment
+ # between the delimiter and the define which causes a parsing error.
+ # https://github.com/mesonbuild/meson/issues/1726
+ ver = cc.get_define('ZLIB_VER_MAJOR', prefix : '#include <zlib.h>')
+ assert(ver == '1', 'ZLIB_VER_MAJOR value is "@0@" instead of "1"'.format(ver))
+ endif
+
+ # Check that an undefined value is empty.
+ have = cc.get_define('MESON_FAIL_VALUE')
+ assert(have == '', 'MESON_FAIL_VALUE value is "@0@" instead of ""'.format(have))
+
+ # This is used in the test_preprocessor_checks_CPPFLAGS() unit test.
+ have = cc.get_define('MESON_TEST_DEFINE_VALUE')
+ expect = get_option('MESON_TEST_DEFINE_VALUE')
+ assert(have == expect, 'MESON_TEST_DEFINE_VALUE value is "@0@" instead of "@1@"'.format(have, expect))
+
+ run_1665_test = false
+ if meson.is_cross_build()
+ # Can't use an empty array as a fallback here because of
+ # https://github.com/mesonbuild/meson/issues/1481
+ lang_args = meson.get_cross_property(lang + '_args', false)
+ if lang_args != false
+ foreach lang_arg : lang_args
+ if lang_arg.contains('MESON_TEST_ISSUE_1665')
+ run_1665_test = true
+ endif
+ endforeach
+ endif
+ endif
+
+ if run_1665_test
+ have = cc.get_define('MESON_TEST_ISSUE_1665')
+ assert(have == '1', 'MESON_TEST_ISSUE_1665 value is "@0@" instead of "1"'.format(have))
+ endif
+endforeach
--- /dev/null
+option('MESON_TEST_DEFINE_VALUE', type : 'string', value : '')
--- /dev/null
+#include <stdio.h>
+
+int get_retval(void);
+
+int main(int argc, char **argv) {
+ printf("C seems to be working.\n");
+ return get_retval();
+}
--- /dev/null
+#include <iostream>
+
+extern "C" {
+ int get_retval(void);
+ int get_cval(void);
+}
+
+int main(int argc, char **argv) {
+ std::cout << "C++ seems to be working." << std::endl;
+ return get_retval();
+}
--- /dev/null
+project('c cpp and asm', 'c', 'cpp')
+
+cpu = host_machine.cpu_family()
+cc = meson.get_compiler('c')
+
+supported_cpus = ['arm', 'x86', 'x86_64']
+
+if not supported_cpus.contains(cpu)
+ error('MESON_SKIP_TEST unsupported cpu:' + cpu)
+endif
+
+if meson.get_compiler('c').get_id() == 'msvc'
+ error('MESON_SKIP_TEST MSVC can\'t compile assembly')
+endif
+
+if cc.symbols_have_underscore_prefix()
+ add_project_arguments('-DMESON_TEST__UNDERSCORE_SYMBOL', language: 'c')
+endif
+
+test('test-c-asm', executable('c-asm', ['main.c', 'retval-' + cpu + '.S']))
+test('test-cpp-asm', executable('cpp-asm', ['main.cpp', 'retval-' + cpu + '.S']))
+test('test-c-cpp-asm', executable('c-cpp-asm', ['somelib.c', 'main.cpp', 'retval-' + cpu + '.S']))
+test('test-cpp-c-asm', executable('cpp-c-asm', ['main.cpp', 'somelib.c', 'retval-' + cpu + '.S']))
--- /dev/null
+#include "symbol-underscore.h"
+
+.text
+.globl SYMBOL_NAME(get_retval)
+# ifdef __linux__
+.type get_retval, %function
+#endif
+
+SYMBOL_NAME(get_retval):
+ mov r0, #0
+ mov pc, lr
--- /dev/null
+#include "symbol-underscore.h"
+
+.text
+.globl SYMBOL_NAME(get_retval)
+/* Only supported on Linux with GAS */
+# ifdef __linux__
+.type get_retval, %function
+#endif
+
+SYMBOL_NAME(get_retval):
+ xorl %eax, %eax
+ retl
--- /dev/null
+#include "symbol-underscore.h"
+
+.text
+.globl SYMBOL_NAME(get_retval)
+# ifdef __linux__
+.type get_retval, %function
+#endif
+
+SYMBOL_NAME(get_retval):
+ xorl %eax, %eax
+ retq
--- /dev/null
+int get_cval (void) {
+ return 0;
+}
--- /dev/null
+#if defined(MESON_TEST__UNDERSCORE_SYMBOL)
+# define SYMBOL_NAME(name) _##name
+#else
+# define SYMBOL_NAME(name) name
+#endif
--- /dev/null
+#define INTSIZE @INTSIZE@
+#define FOOBAR_IN_CONFIG_H @FOOBAR@
--- /dev/null
+#ifndef __FOOBAR_H__
+#define __FOOBAR_H__
+
+#define FOOBAR_IN_FOOBAR_H 10
+
+#endif /*__FOOBAR_H__*/
--- /dev/null
+project('compute int', 'c', 'cpp')
+
+inc = include_directories('.')
+
+# Test with C
+cc = meson.get_compiler('c')
+
+intsize = cc.compute_int('sizeof(int)', low : 1, high : 16, guess : 4)
+foobar = cc.compute_int('FOOBAR_IN_FOOBAR_H', prefix : '#include "foobar.h"', include_directories : inc)
+
+cd = configuration_data()
+cd.set('INTSIZE', intsize)
+cd.set('FOOBAR', foobar)
+cd.set('CONFIG', 'config.h')
+configure_file(input : 'config.h.in', output : 'config.h', configuration : cd)
+s = configure_file(input : 'prog.c.in', output : 'prog.c', configuration : cd)
+
+e = executable('prog', s)
+test('compute int test', e)
+
+# Test with C++
+cpp = meson.get_compiler('cpp')
+
+intsize = cpp.compute_int('sizeof(int)')
+foobar = cpp.compute_int('FOOBAR_IN_FOOBAR_H', prefix : '#include "foobar.h"', include_directories : inc)
+
+cdpp = configuration_data()
+cdpp.set('INTSIZE', intsize)
+cdpp.set('FOOBAR', foobar)
+cdpp.set('CONFIG', 'config.hpp')
+configure_file(input : 'config.h.in', output : 'config.hpp', configuration : cdpp)
+spp = configure_file(input : 'prog.c.in', output : 'prog.cc', configuration : cdpp)
+
+epp = executable('progpp', spp)
+test('compute int test c++', epp)
--- /dev/null
+#include "@CONFIG@"
+#include <stdio.h>
+#include <wchar.h>
+#include "foobar.h"
+
+int main(int argc, char **argv) {
+ if(INTSIZE != sizeof(int)) {
+ fprintf(stderr, "Mismatch: computed int size %d, actual size %d.\n", INTSIZE, (int)sizeof(int));
+ return 1;
+ }
+ if(FOOBAR_IN_CONFIG_H != FOOBAR_IN_FOOBAR_H) {
+ fprintf(stderr, "Mismatch: computed int %d, should be %d.\n", FOOBAR_IN_CONFIG_H, FOOBAR_IN_FOOBAR_H);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+project('custom target object output', 'c')
+
+comp = find_program('obj_generator.py')
+
+if host_machine.system() == 'windows'
+ outputname = '@BASENAME@.obj'
+else
+ outputname = '@BASENAME@.o'
+endif
+
+cc = meson.get_compiler('c').cmd_array().get(-1)
+
+subdir('objdir')
+subdir('progdir')
+
+test('objgen', e)
--- /dev/null
+#!/usr/bin/env python3
+
+# Mimic a binary that generates an object file (e.g. windres).
+
+import sys, subprocess
+
+if __name__ == '__main__':
+ if len(sys.argv) != 4:
+ print(sys.argv[0], 'compiler input_file output_file')
+ sys.exit(1)
+ compiler = sys.argv[1]
+ ifile = sys.argv[2]
+ ofile = sys.argv[3]
+ if compiler.endswith('cl'):
+ cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile]
+ else:
+ cmd = [compiler, '-c', ifile, '-o', ofile]
+ sys.exit(subprocess.call(cmd))
--- /dev/null
+# Generate an object file manually.
+object = custom_target('object',
+ input : 'source.c',
+ output : outputname,
+ command : [comp, cc, '@INPUT@', '@OUTPUT@'])
--- /dev/null
+int func1_in_obj() {
+ return 0;
+}
--- /dev/null
+e = executable('prog', 'prog.c', object)
--- /dev/null
+int func1_in_obj();
+
+int main(int argc, char **argv) {
+ return func1_in_obj();
+}
--- /dev/null
+project('subdir with empty meson.build test', 'c')
+subdir('subdir')
--- /dev/null
+stlib = static_library('allofme', '../libfile.c')
--- /dev/null
+#define BUILDING_DLL
+
+#include<mylib.h>
+
+int func2() {
+ return 42;
+}
--- /dev/null
+exe = executable('prog', '../prog.c',
+ link_with : dylib)
--- /dev/null
+exe2 = executable('prog2', '../prog.c', link_with : dylib2)
--- /dev/null
+#define BUILDING_DLL
+
+#include<mylib.h>
+
+int func1() {
+ return 42;
+}
--- /dev/null
+project('whole archive', 'c')
+
+add_project_arguments('-I' + meson.source_root(), language : 'c')
+
+cc = meson.get_compiler('c')
+
+if cc.get_id() == 'msvc'
+ if cc.version().version_compare('<19')
+ error('MESON_SKIP_TEST link_whole only works on VS2015 or newer.')
+ endif
+endif
+
+subdir('allofme')
+subdir('shlib')
+subdir('exe')
+
+test('prog', exe)
+
+# link_whole only
+subdir('stlib')
+subdir('wholeshlib')
+subdir('exe2')
+
+test('prog2', exe2)
--- /dev/null
+#pragma once
+
+/* Both funcs here for simplicity. */
+
+#if defined _WIN32 || defined __CYGWIN__
+#if defined BUILDING_DLL
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #define DLL_PUBLIC __declspec(dllimport)
+#endif
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC func1();
+int DLL_PUBLIC func2();
--- /dev/null
+#include<mylib.h>
+
+int main(int argc, char **argv) {
+ return func1() - func2();
+}
--- /dev/null
+# Nothing in dylib.c uses func1, so the linker would throw it
+# away and thus linking the exe would fail.
+dylib = shared_library('shlib', '../dylib.c',
+ link_whole : stlib)
--- /dev/null
+static = static_library('static', '../dylib.c')
--- /dev/null
+dylib2 = shared_library('link_whole', link_whole : [stlib, static])
--- /dev/null
+/* Copyright © 2017 Dylan Baker
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "foo.h"
+
+int forty_two(void) {
+ return 42;
+}
--- /dev/null
+/* Copyright © 2017 Dylan Baker
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <vector>
+
+const int cnums[] = {0, 61};
+
+template<typename T, int N>
+std::vector<T> makeVector(const T (&data)[N])
+{
+ return std::vector<T>(data, data+N);
+}
+
+namespace {
+ std::vector<int> numbers = makeVector(cnums);
+}
+
+extern "C" int six_one(void) {
+ return numbers[1];
+}
--- /dev/null
+/* Copyright © 2017 Dylan Baker
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+int forty_two(void);
--- /dev/null
+/* Copyright © 2017 Dylan Baker
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int six_one(void);
+
+#ifdef __cplusplus
+}
+#endif
--- /dev/null
+/* Copyright © 2017 Dylan Baker
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "foo.h"
+#include "foo.hpp"
+#include "foobar.h"
+
+void mynumbers(int nums[]) {
+ nums[0] = forty_two();
+ nums[1] = six_one();
+}
--- /dev/null
+/* Copyright © 2017 Dylan Baker
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+void mynumbers(int nums[]);
--- /dev/null
+# Copyright © 2017 Dylan Baker
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+project('C and C++ static link test', ['c', 'cpp'])
+
+# Verify that adding link arguments works.
+add_global_link_arguments('', language : 'c')
+add_project_link_arguments('', language : 'c')
+
+libc = static_library('cfoo', ['foo.c', 'foo.h'])
+
+# Test that linking C libs to external static C++ libs uses the C++ linker
+# Since we can't depend on the test system to provide this, we create one
+# ourselves at configure time and then 'find' it with cxx.find_library().
+cxx = meson.get_compiler('cpp')
+
+if cxx.get_id() == 'msvc'
+ compile_cmd = ['/c', '@INPUT@', '/Fo@OUTPUT@']
+ stlib_cmd = ['lib', '/OUT:@OUTPUT@', '@INPUT@']
+else
+ compile_cmd = ['-c', '-fPIC', '@INPUT@', '-o', '@OUTPUT@']
+ stlib_cmd = ['ar', 'csr', '@OUTPUT@', '@INPUT@']
+endif
+
+foo_cpp_o = configure_file(
+ input : 'foo.cpp',
+ output : 'foo.cpp.o',
+ command : cxx.cmd_array() + compile_cmd)
+
+configure_file(
+ input : foo_cpp_o,
+ output : 'libstcppext.a',
+ command : stlib_cmd)
+
+libstcppext = cxx.find_library('stcppext', dirs : meson.current_build_dir())
+
+libfooext = shared_library(
+ 'fooext',
+ ['foobar.c', 'foobar.h'],
+ link_with : libc,
+ dependencies : libstcppext,
+)
+
+# Test that linking C libs to internal static C++ libs uses the C++ linker
+libcpp = static_library('cppfoo', ['foo.cpp', 'foo.hpp'])
+
+libfoo = shared_library(
+ 'foo',
+ ['foobar.c', 'foobar.h'],
+ link_with : [libc, libcpp],
+)
+
+# Test that link_whole is also honored
+#
+# VS2010 lacks the /WHOLEARCHIVE option that later versions of MSVC support, so
+# don't run this tests on that backend.
+if not (cxx.get_id() == 'msvc' and cxx.version().version_compare('<19'))
+ libfoowhole = shared_library(
+ 'foowhole',
+ ['foobar.c', 'foobar.h'],
+ link_whole : [libc, libcpp],
+ )
+endif
+
+# Test sublinking (linking C and C++, then linking that to C)
+libfoo_static = static_library(
+ 'foo_static',
+ ['foobar.c', 'foobar.h'],
+ link_with : [libc, libcpp],
+)
+
+libsub = shared_library(
+ 'sub',
+ ['sub.c', 'sub.h'],
+ link_with : libfoo_static,
+)
+
+if not (cxx.get_id() == 'msvc' and cxx.version().version_compare('<19'))
+ libsubwhole = shared_library(
+ 'subwhole',
+ ['sub.c', 'sub.h'],
+ link_whole : libfoo_static,
+ )
+endif
+
+# Test that it really is recursive
+libsub_static = static_library(
+ 'sub_static',
+ ['sub.c', 'sub.h'],
+ link_with : libfoo_static,
+)
+
+libsubsub = shared_library(
+ 'subsub',
+ ['dummy.c'],
+ link_with : libsub_static,
+)
--- /dev/null
+/* Copyright © 2017 Dylan Baker
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "sub.h"
+
+float a_half(void) {
+ return .5;
+}
--- /dev/null
+/* Copyright © 2017 Dylan Baker
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+float a_half(void);
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import sys
+import shlex
+
+do_print = False
+
+if len(sys.argv) > 1:
+ do_print = bool(sys.argv[1])
+
+if 'MESONINTROSPECT' not in os.environ:
+ raise RuntimeError('MESONINTROSPECT not found')
+
+mesonintrospect = os.environ['MESONINTROSPECT']
+
+introspect_arr = shlex.split(mesonintrospect)
+
+# print(mesonintrospect)
+# print(introspect_arr)
+
+some_executable = introspect_arr[0]
+
+if not os.path.isfile(some_executable):
+ raise RuntimeError('{!r} does not exist'.format(mesonintrospect))
+
+if do_print:
+ print(some_executable, end='')
--- /dev/null
+project('mesonintrospect from scripts', 'c')
+
+python = import('python3').find_python()
+
+ret = run_command(python, ['check_env.py', '1'])
+if ret.returncode() == 0
+ find_program(ret.stdout())
+else
+ message(ret.stdout())
+ message(ret.stderr())
+endif
+
+meson.add_postconf_script('check_env.py')
+meson.add_install_script('check_env.py')
--- /dev/null
+#!/usr/bin/env python3
+
+import sys, os
+
+if len(sys.argv) != 3:
+ print(sys.argv[0], '<namespace>', '<output dir>')
+
+name = sys.argv[1]
+odir = sys.argv[2]
+
+with open(os.path.join(odir, name + '.h'), 'w') as f:
+ f.write('int func();\n')
+with open(os.path.join(odir, name + '.sh'), 'w') as f:
+ f.write('#!/bin/bash')
--- /dev/null
+usr/include/diff.h
+usr/include/first.h
+usr/bin/diff.sh
+usr/bin/second.sh
+opt/same.h
+opt/same.sh
--- /dev/null
+project('multiple outputs install', 'c')
+
+gen = find_program('generator.py')
+
+custom_target('different-install-dirs',
+ output : ['diff.h', 'diff.sh'],
+ command : [gen, 'diff', '@OUTDIR@'],
+ install : true,
+ install_dir : [join_paths(get_option('prefix'), get_option('includedir')),
+ join_paths(get_option('prefix'), get_option('bindir'))])
+
+custom_target('same-install-dir',
+ output : ['same.h', 'same.sh'],
+ command : [gen, 'same', '@OUTDIR@'],
+ install : true,
+ install_dir : '/opt')
+
+custom_target('only-install-first',
+ output : ['first.h', 'first.sh'],
+ command : [gen, 'first', '@OUTDIR@'],
+ install : true,
+ install_dir : [join_paths(get_option('prefix'), get_option('includedir')), false])
+
+custom_target('only-install-second',
+ output : ['second.h', 'second.sh'],
+ command : [gen, 'second', '@OUTDIR@'],
+ install : true,
+ install_dir : [false, join_paths(get_option('prefix'), get_option('bindir'))])
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+expected = {
+ 'newline': '\n',
+ 'dollar': '$',
+ 'colon': ':',
+ 'space': ' ',
+ 'multi1': ' ::$$ ::$$',
+ 'multi2': ' ::$$\n\n \n\n::$$',
+}
+
+output = None
+
+for arg in sys.argv[1:]:
+ try:
+ name, value = arg.split('=', 1)
+ except ValueError:
+ output = arg
+ continue
+
+ if expected[name] != value:
+ raise RuntimeError('{!r} is {!r} but should be {!r}'.format(name, value, expected[name]))
+
+if output is not None:
+ with open(output, 'w') as f:
+ f.write('Success!')
--- /dev/null
+usr/share/result
+usr/share/result2
--- /dev/null
+project('ninja special characters' ,'c')
+
+python = import('python3').find_python()
+
+# Without newlines, this should appear directly in build.ninja.
+gen = custom_target('gen',
+ command : [
+ python,
+ files('check_quoting.py'),
+ 'dollar=$',
+ 'colon=:',
+ 'space= ',
+ '''multi1= ::$$ ::$$''',
+ '@OUTPUT@'],
+ output : 'result',
+ install : true,
+ install_dir : get_option('datadir'))
+
+# With newlines, this should go through the exe wrapper.
+gen2 = custom_target('gen2',
+ command : [
+ python,
+ files('check_quoting.py'),
+ '''newline=
+''',
+ 'dollar=$',
+ 'colon=:',
+ 'space= ',
+ '''multi2= ::$$
+
+
+
+::$$''',
+ '@OUTPUT@'],
+ output : 'result2',
+ install : true,
+ install_dir : get_option('datadir'))
--- /dev/null
+void tmp_func() {
+ fprintf(stdout, "This is a function that fails if stdio is not #included.\n");
+}
+
+int cfunc() {
+ return 0;
+}
--- /dev/null
+extern "C" int cfunc();
+
+void func() {
+ std::cout << "This is a function that fails to compile if iostream is not included."
+ << std::endl;
+}
+
+int main(int argc, char **argv) {
+ return cfunc();
+}
--- /dev/null
+project('mixed C and C++ pch test', 'cpp', 'c')
+
+
+exe = executable('prog', 'main.cc', 'func.c',
+c_pch : ['pch/func.h', 'pch/func_pch.c'],
+cpp_pch : ['pch/main_pch.cc', 'pch/main.h'])
--- /dev/null
+#include<stdio.h>
--- /dev/null
+#include"func.h"
--- /dev/null
+#include<iostream>
--- /dev/null
+#include"main.h"
--- /dev/null
+project('test', 'c')
+
+libxserver_dri3 = []
+libxserver = [ libxserver_dri3 ]
+
+executable('Xephyr', 'xephyr.c', link_with: [ libxserver ])
+
+executable('Zephyr', 'xephyr.c', link_args: [[], []])
--- /dev/null
+int main() {
+ return 0;
+}
--- /dev/null
+import shutil
+import sys
+
+if __name__ == '__main__':
+ if len(sys.argv) != 3:
+ raise Exception('Requires exactly 2 args')
+ shutil.copy2(sys.argv[1], sys.argv[2])
--- /dev/null
+project('test', 'c')
+
+mod_py = import('python3')
+python = mod_py.find_python()
+
+test_target = custom_target(
+ 'test_target',
+ input : [files('gen.py'), files('foo')],
+ output : 'bar',
+ command : [python, '@INPUT0@', '@INPUT1@', '@OUTPUT@'],
+ build_by_default : true,
+)
--- /dev/null
+#include <stdio.h>
+
+int main() {
+ const char *fn = DEPFILE;
+ FILE *f = fopen(fn, "r");
+ if (!f) {
+ printf("could not open %s", fn);
+ return 1;
+ }
+ else {
+ printf("successfully opened %s", fn);
+ }
+
+ return 0;
+}
--- /dev/null
+#!/usr/bin/env python3
+import sys
+
+with open(sys.argv[1], 'w') as f:
+ print('# this file does nothing', file=f)
--- /dev/null
+project('link_depends_custom_target', 'c')
+
+if meson.backend().startswith('vs')
+ # FIXME: Broken on the VS backends
+ error('MESON_SKIP_TEST see https://github.com/mesonbuild/meson/issues/1799')
+endif
+
+cmd = find_program('make_file.py')
+
+dep_file = custom_target('gen_dep',
+ command: [cmd, '@OUTPUT@'],
+ output: 'dep_file')
+
+exe = executable('foo', 'foo.c',
+ link_depends: dep_file,
+ c_args: ['-DDEPFILE="' + dep_file.full_path()+ '"'])
+
+# check that dep_file exists, which means that link_depends target ran
+test('runtest', exe)
--- /dev/null
+#include "../lib.h"
+
+int get_@DEPENDENCY@dep_value (void);
+
+SYMBOL_EXPORT
+int get_@LIBTYPE@@DEPENDENCY@dep_value (void) {
+ return get_@DEPENDENCY@dep_value ();
+}
--- /dev/null
+#include <stdio.h>
+
+#include "../lib.h"
+
+SYMBOL_IMPORT int get_@LIBTYPE@@DEPENDENCY@dep_value (void);
+
+int main(int argc, char *argv[]) {
+ int val;
+
+ val = get_@LIBTYPE@@DEPENDENCY@dep_value ();
+ if (val != @VALUE@) {
+ printf("@LIBTYPE@@DEPENDENCY@ was %i instead of @VALUE@\n", val);
+ return -1;
+ }
+ return 0;
+}
--- /dev/null
+dep3_libs = []
+
+# Permutate all combinations of shared and static libraries up to three levels
+# executable -> shared -> static -> shared (etc)
+foreach dep2 : ['sh', 'st']
+ foreach dep1 : ['sh', 'st']
+ foreach libtype : ['sh', 'st']
+ name = libtype + dep1 + dep2
+ if dep2 == 'sh'
+ libret = 1
+ elif dep2 == 'st'
+ libret = 2
+ else
+ error('Unknown dep2 "@0@"'.format(dep2))
+ endif
+
+ if libtype == 'sh'
+ target = 'shared_library'
+ build_args = []
+ elif libtype == 'st'
+ target = 'static_library'
+ build_args = ['-DMESON_STATIC_BUILD']
+ else
+ error('Unknown libtype "@0@"'.format(libtype))
+ endif
+
+ cdata = configuration_data()
+ cdata.set('DEPENDENCY', dep1 + dep2)
+ cdata.set('LIBTYPE', libtype)
+ cdata.set('VALUE', libret)
+
+ lib_c = configure_file(input : 'lib.c.in',
+ output : name + '-lib.c',
+ configuration : cdata)
+ dep = get_variable(dep1 + dep2 + 'dep')
+ dep3_lib = build_target(name, lib_c, link_with : dep,
+ target_type : target,
+ c_args : build_args)
+ dep3_libs += [dep3_lib]
+
+ main_c = configure_file(input : 'main.c.in',
+ output : name + '-main.c',
+ configuration : cdata)
+ dep3_bin = executable(name, main_c, link_with : dep3_lib,
+ c_args : build_args)
+ test(name + 'test', dep3_bin)
+ endforeach
+ endforeach
+endforeach
--- /dev/null
+int get_st2_prop (void);
+int get_st3_prop (void);
+
+int get_st1_value (void) {
+ return get_st2_prop () + get_st3_prop ();
+}
--- /dev/null
+int get_st1_prop (void);
+int get_st3_prop (void);
+
+int get_st2_value (void) {
+ return get_st1_prop () + get_st3_prop ();
+}
--- /dev/null
+int get_st1_prop (void);
+int get_st2_prop (void);
+
+int get_st3_value (void) {
+ return get_st1_prop () + get_st2_prop ();
+}
--- /dev/null
+#include <stdio.h>
+
+#include "../lib.h"
+
+int get_st1_value (void);
+int get_st2_value (void);
+int get_st3_value (void);
+
+int main(int argc, char *argv[]) {
+ int val;
+
+ val = get_st1_value ();
+ if (val != 5) {
+ printf("st1 value was %i instead of 5\n", val);
+ return -1;
+ }
+ val = get_st2_value ();
+ if (val != 4) {
+ printf("st2 value was %i instead of 4\n", val);
+ return -2;
+ }
+ val = get_st3_value ();
+ if (val != 3) {
+ printf("st3 value was %i instead of 3\n", val);
+ return -3;
+ }
+ return 0;
+}
--- /dev/null
+st1 = static_library('st1', 'lib1.c', 'prop1.c')
+st2 = static_library('st2', 'lib2.c', 'prop2.c')
+st3 = static_library('st3', 'lib3.c', 'prop3.c')
+
+test('circular', executable('circular', 'main.c', link_with : [st1, st2, st3]))
--- /dev/null
+int get_st1_prop (void) {
+ return 1;
+}
--- /dev/null
+int get_st2_prop (void) {
+ return 2;
+}
--- /dev/null
+int get_st3_prop (void) {
+ return 3;
+}
--- /dev/null
+#include "../lib.h"
+
+int get_builto_value (void);
+
+SYMBOL_EXPORT
+int get_stodep_value (void) {
+ return get_builto_value ();
+}
--- /dev/null
+# Test https://github.com/mesonbuild/meson/issues/2096
+# Note that removing 'shnodep' from link_with: makes the error go away because
+# then it is added after the static library is added to the link command.
+test('shared-static', executable('shstexe', 'shstmain.c', link_with : [shnodep, stshdep]))
+
+# Static library that needs a symbol defined in an object file. This already
+# works, but good to add a test case early.
+stodep = static_library('stodep', 'libsto.c')
+test('stodep', executable('stodep', 'stomain.c', 'stobuilt.c', link_with : stodep))
--- /dev/null
+#include <stdio.h>
+
+#include "../lib.h"
+
+int get_stshdep_value (void);
+
+int main(int argc, char *argv[]) {
+ int val;
+
+ val = get_stshdep_value ();
+ if (val != 1) {
+ printf("st1 value was %i instead of 1\n", val);
+ return -1;
+ }
+ return 0;
+}
--- /dev/null
+#include "../lib.h"
+
+
+SYMBOL_EXPORT
+int get_builto_value (void) {
+ return 1;
+}
--- /dev/null
+#include <stdio.h>
+
+#include "../lib.h"
+
+int get_stodep_value (void);
+
+int main(int argc, char *argv[]) {
+ int val;
+
+ val = get_stodep_value ();
+ if (val != 1) {
+ printf("st1 value was %i instead of 1\n", val);
+ return -1;
+ }
+ return 0;
+}
--- /dev/null
+#if defined _WIN32
+ #ifdef MESON_STATIC_BUILD
+ #define SYMBOL_EXPORT
+ #define SYMBOL_IMPORT
+ #else
+ #define SYMBOL_IMPORT __declspec(dllimport)
+ #define SYMBOL_EXPORT __declspec(dllexport)
+ #endif
+#else
+ #define SYMBOL_IMPORT
+ #if defined __GNUC__
+ #define SYMBOL_EXPORT __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define SYMBOL_EXPORT
+ #endif
+#endif
--- /dev/null
+#include <stdio.h>
+
+#include "lib.h"
+
+int get_stnodep_value (void);
+int get_stshdep_value (void);
+int get_ststdep_value (void);
+SYMBOL_IMPORT int get_shnodep_value (void);
+SYMBOL_IMPORT int get_shshdep_value (void);
+SYMBOL_IMPORT int get_shstdep_value (void);
+
+int main(int argc, char *argv[]) {
+ int val;
+
+ val = get_shnodep_value ();
+ if (val != 1) {
+ printf("shnodep was %i instead of 1\n", val);
+ return -1;
+ }
+ val = get_stnodep_value ();
+ if (val != 2) {
+ printf("stnodep was %i instead of 2\n", val);
+ return -2;
+ }
+ val = get_shshdep_value ();
+ if (val != 1) {
+ printf("shshdep was %i instead of 1\n", val);
+ return -3;
+ }
+ val = get_shstdep_value ();
+ if (val != 2) {
+ printf("shstdep was %i instead of 2\n", val);
+ return -4;
+ }
+ val = get_stshdep_value ();
+ if (val != 1) {
+ printf("shstdep was %i instead of 1\n", val);
+ return -5;
+ }
+ val = get_ststdep_value ();
+ if (val != 2) {
+ printf("ststdep was %i instead of 2\n", val);
+ return -6;
+ }
+ return 0;
+}
--- /dev/null
+project('recursive dependencies', 'c')
+
+# Test that you can link a shared executable to:
+# - A shared library with no other deps
+subdir('shnodep')
+# - A static library with no other deps
+subdir('stnodep')
+# - A shared library with a shared library dep
+subdir('shshdep')
+# - A shared library with a static library dep
+subdir('shstdep')
+# - A static library with a shared library dep
+subdir('stshdep')
+# - A static library with a static library dep
+subdir('ststdep')
+
+test('alldeps',
+ executable('alldeps', 'main.c',
+ link_with : [shshdep, shstdep, ststdep, stshdep]))
+
+# More combinations of static and shared libraries
+subdir('3rdorderdeps')
+
+# Circular dependencies between static libraries
+# This requires the use of --start/end-group with GNU ld
+subdir('circular')
+
+# Various edge cases that have been reported
+subdir('edge-cases')
--- /dev/null
+#include "../lib.h"
+
+SYMBOL_EXPORT
+int get_shnodep_value (void) {
+ return 1;
+}
--- /dev/null
+shnodep = shared_library('shnodep', 'lib.c', version: '0.0.0')
--- /dev/null
+#include "../lib.h"
+
+int get_shnodep_value (void);
+
+SYMBOL_EXPORT
+int get_shshdep_value (void) {
+ return get_shnodep_value ();
+}
--- /dev/null
+shshdep = shared_library('shshdep', 'lib.c', link_with : shnodep)
--- /dev/null
+#include "../lib.h"
+
+int get_stnodep_value (void);
+
+SYMBOL_EXPORT
+int get_shstdep_value (void) {
+ return get_stnodep_value ();
+}
--- /dev/null
+shstdep = shared_library('shstdep', 'lib.c', link_with : stnodep)
--- /dev/null
+#include "../lib.h"
+
+SYMBOL_EXPORT
+int get_stnodep_value (void) {
+ return 2;
+}
--- /dev/null
+stnodep = static_library('stnodep', 'lib.c',
+ c_args : '-DMESON_STATIC_BUILD')
--- /dev/null
+#include "../lib.h"
+
+int get_shnodep_value (void);
+
+SYMBOL_EXPORT
+int get_stshdep_value (void) {
+ return get_shnodep_value ();
+}
--- /dev/null
+stshdep = static_library('stshdep', 'lib.c', link_with : shnodep,
+ c_args : '-DMESON_STATIC_BUILD')
--- /dev/null
+#include "../lib.h"
+
+int get_stnodep_value (void);
+
+SYMBOL_EXPORT
+int get_ststdep_value (void) {
+ return get_stnodep_value ();
+}
--- /dev/null
+ststdep = static_library('ststdep', 'lib.c', link_with : stnodep,
+ c_args : '-DMESON_STATIC_BUILD')
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+__declspec(dllexport)
+#endif
+int fn(void) {
+ return -1;
+}
--- /dev/null
+extern int fn(void);
+
+int main() {
+ return 1 + fn();
+}
--- /dev/null
+exe = executable('main', 'main.c', link_with : lib)
+test('stuff works', exe)
--- /dev/null
+project('lib@root', 'c')
+lib = shared_library('lib', 'lib.c')
+subdir('main')
--- /dev/null
+#include<simdfuncs.h>
+
+void increment_fallback(float arr[4]) {
+ int i;
+ for(i=0; i<4; i++) {
+ arr[i]++;
+ }
+}
--- /dev/null
+#pragma once
+
+#define I_CAN_HAZ_SIMD
--- /dev/null
+project('simd', 'c')
+
+simd = import('unstable-simd')
+
+cc = meson.get_compiler('c')
+
+cdata = configuration_data()
+
+if not meson.is_cross_build() and host_machine.cpu_family() == 'arm' and cc.get_id() == 'clang'
+ message('Adding -march=armv7 because assuming that this build happens on Raspbian.')
+ message('Its Clang seems to be misconfigured and does not support NEON by default.')
+ add_project_arguments('-march=armv7', language : 'c')
+endif
+
+if cc.get_id() == 'msvc' and cc.version().version_compare('<17')
+ error('MESON_SKIP_TEST VS2010 produces broken binaries on x86.')
+endif
+
+# FIXME add [a, b] = function()
+rval = simd.check('mysimds',
+ mmx : 'simd_mmx.c',
+ sse : 'simd_sse.c',
+ sse2 : 'simd_sse2.c',
+ sse3 : 'simd_sse3.c',
+ ssse3 : 'simd_ssse3.c',
+ sse41 : 'simd_sse41.c',
+ sse42 : 'simd_sse42.c',
+ avx : 'simd_avx.c',
+ avx2 : 'simd_avx2.c',
+ neon : 'simd_neon.c',
+ compiler : cc,
+ include_directories : include_directories('include'))
+
+simdlibs = rval[0]
+cdata.merge_from(rval[1])
+
+configure_file(output : 'simdconfig.h',
+ configuration : cdata)
+
+p = executable('simdtest', 'simdchecker.c', 'fallback.c',
+ link_with : simdlibs)
+
+test('simdtest', p)
+
--- /dev/null
+#include<simdheader.h>
+
+#ifndef I_CAN_HAZ_SIMD
+#error The correct internal header was not used
+#endif
+
+#include<simdconfig.h>
+#include<simdfuncs.h>
+#include<stdint.h>
+
+#ifdef _MSC_VER
+#include<intrin.h>
+int avx_available() {
+ return 1;
+}
+#else
+#include<immintrin.h>
+#include<cpuid.h>
+
+#ifdef __APPLE__
+/*
+ * Apple ships a broken __builtin_cpu_supports and
+ * some machines in the CI farm seem to be too
+ * old to have AVX so just always return 0 here.
+ */
+int avx_available() { return 0; }
+#else
+
+int avx_available() {
+ return __builtin_cpu_supports("avx");
+}
+#endif
+#endif
+
+void increment_avx(float arr[4]) {
+ double darr[4];
+ darr[0] = arr[0];
+ darr[1] = arr[1];
+ darr[2] = arr[2];
+ darr[3] = arr[3];
+ __m256d val = _mm256_loadu_pd(darr);
+ __m256d one = _mm256_set1_pd(1.0);
+ __m256d result = _mm256_add_pd(val, one);
+ _mm256_storeu_pd(darr, result);
+ arr[0] = (float)darr[0];
+ arr[1] = (float)darr[1];
+ arr[2] = (float)darr[2];
+ arr[3] = (float)darr[3];
+}
--- /dev/null
+#include<simdconfig.h>
+#include<simdfuncs.h>
+#include<stdint.h>
+
+/*
+ * FIXME add proper runtime detection for VS.
+ */
+
+#ifdef _MSC_VER
+#include<intrin.h>
+int avx2_available() {
+ return 0;
+}
+#else
+#include<immintrin.h>
+#include<cpuid.h>
+
+#if defined(__APPLE__)
+int avx2_available() { return 0; }
+#else
+int avx2_available() {
+ return __builtin_cpu_supports("avx2");
+}
+#endif
+#endif
+
+void increment_avx2(float arr[4]) {
+ double darr[4];
+ darr[0] = arr[0];
+ darr[1] = arr[1];
+ darr[2] = arr[2];
+ darr[3] = arr[3];
+ __m256d val = _mm256_loadu_pd(darr);
+ __m256d one = _mm256_set1_pd(1.0);
+ __m256d result = _mm256_add_pd(val, one);
+ _mm256_storeu_pd(darr, result);
+ one = _mm256_permute4x64_pd(one, 66); /* A no-op, just here to use AVX2. */
+ arr[0] = (float)darr[0];
+ arr[1] = (float)darr[1];
+ arr[2] = (float)darr[2];
+ arr[3] = (float)darr[3];
+}
--- /dev/null
+#include<simdconfig.h>
+#include<simdfuncs.h>
+
+#include<stdint.h>
+
+#ifdef _MSC_VER
+#include<intrin.h>
+int mmx_available() {
+ return 1;
+}
+/* Contrary to MSDN documentation, MMX intrinsics
+ * just plain don't work.
+ */
+void increment_mmx(float arr[4]) {
+ arr[0]++;
+ arr[1]++;
+ arr[2]++;
+ arr[3]++;
+}
+#elif defined(__MINGW32__)
+int mmx_available() {
+ return 1;
+}
+/* MinGW does not seem to ship with MMX or it is broken.
+ */
+void increment_mmx(float arr[4]) {
+ arr[0]++;
+ arr[1]++;
+ arr[2]++;
+ arr[3]++;
+}
+#else
+#include<mmintrin.h>
+#include<cpuid.h>
+
+#if defined(__APPLE__)
+int mmx_available() { return 1; }
+#else
+int mmx_available() {
+ return __builtin_cpu_supports("mmx");
+}
+#endif
+void increment_mmx(float arr[4]) {
+ /* Super ugly but we know that values in arr are always small
+ * enough to fit in int16;
+ */
+ int i;
+ __m64 packed = _mm_set_pi16(arr[3], arr[2], arr[1], arr[0]);
+ __m64 incr = _mm_set1_pi16(1);
+ __m64 result = _mm_add_pi16(packed, incr);
+ /* Should be
+ * int64_t unpacker = _m_to_int64(result);
+ * but it does not exist on 32 bit platforms for some reason.
+ */
+ int64_t unpacker = (int64_t)(result);
+ _mm_empty();
+ for(i=0; i<4; i++) {
+ arr[i] = (float)(unpacker & ((1<<16)-1));
+ unpacker >>= 16;
+ }
+}
+
+#endif
--- /dev/null
+#include<simdconfig.h>
+#include<simdfuncs.h>
+
+#include<arm_neon.h>
+#include<stdint.h>
+
+int neon_available() {
+ return 1; /* Incorrect, but I don't know how to check this properly. */
+}
+
+void increment_neon(float arr[4]) {
+ float32x2_t a1, a2, one;
+ a1 = vld1_f32(arr);
+ a2 = vld1_f32(&arr[2]);
+ one = vdup_n_f32(1.0);
+ a1 = vadd_f32(a1, one);
+ a2 = vadd_f32(a2, one);
+ vst1_f32(arr, a1);
+ vst1_f32(&arr[2], a2);
+}
--- /dev/null
+#include<simdconfig.h>
+#include<simdfuncs.h>
+
+#ifdef _MSC_VER
+#include<intrin.h>
+int sse_available() {
+ return 1;
+}
+#else
+
+#include<xmmintrin.h>
+#include<cpuid.h>
+#include<stdint.h>
+
+#if defined(__APPLE__)
+int sse_available() { return 1; }
+#else
+int sse_available() {
+ return __builtin_cpu_supports("sse");
+}
+#endif
+#endif
+
+void increment_sse(float arr[4]) {
+ __m128 val = _mm_load_ps(arr);
+ __m128 one = _mm_set_ps1(1.0);
+ __m128 result = _mm_add_ps(val, one);
+ _mm_storeu_ps(arr, result);
+}
--- /dev/null
+#include<simdconfig.h>
+#include<simdfuncs.h>
+#include<emmintrin.h>
+
+#ifdef _MSC_VER
+int sse2_available() {
+ return 1;
+}
+
+#else
+#include<cpuid.h>
+#include<stdint.h>
+
+#if defined(__APPLE__)
+int sse2_available() { return 1; }
+#else
+int sse2_available() {
+ return __builtin_cpu_supports("sse2");
+}
+#endif
+#endif
+
+void increment_sse2(float arr[4]) {
+ double darr[4];
+ __m128d val1 = _mm_set_pd(arr[0], arr[1]);
+ __m128d val2 = _mm_set_pd(arr[2], arr[3]);
+ __m128d one = _mm_set_pd(1.0, 1.0);
+ __m128d result = _mm_add_pd(val1, one);
+ _mm_store_pd(darr, result);
+ result = _mm_add_pd(val2, one);
+ _mm_store_pd(&darr[2], result);
+ arr[0] = (float)darr[1];
+ arr[1] = (float)darr[0];
+ arr[2] = (float)darr[3];
+ arr[3] = (float)darr[2];
+}
+
--- /dev/null
+#include<simdconfig.h>
+#include<simdfuncs.h>
+
+#ifdef _MSC_VER
+#include<intrin.h>
+int sse3_available() {
+ return 1;
+}
+#else
+
+#include<pmmintrin.h>
+#include<cpuid.h>
+#include<stdint.h>
+
+#if defined(__APPLE__)
+int sse3_available() { return 1; }
+#else
+int sse3_available() {
+ return __builtin_cpu_supports("sse3");
+}
+#endif
+#endif
+
+void increment_sse3(float arr[4]) {
+ double darr[4];
+ __m128d val1 = _mm_set_pd(arr[0], arr[1]);
+ __m128d val2 = _mm_set_pd(arr[2], arr[3]);
+ __m128d one = _mm_set_pd(1.0, 1.0);
+ __m128d result = _mm_add_pd(val1, one);
+ _mm_store_pd(darr, result);
+ result = _mm_add_pd(val2, one);
+ _mm_store_pd(&darr[2], result);
+ result = _mm_hadd_pd(val1, val2); /* This does nothing. Only here so we use an SSE3 instruction. */
+ arr[0] = (float)darr[1];
+ arr[1] = (float)darr[0];
+ arr[2] = (float)darr[3];
+ arr[3] = (float)darr[2];
+}
--- /dev/null
+#include<simdconfig.h>
+#include<simdfuncs.h>
+
+#include<stdint.h>
+
+#ifdef _MSC_VER
+#include<intrin.h>
+
+int sse41_available() {
+ return 1;
+}
+
+#else
+#include<smmintrin.h>
+#include<cpuid.h>
+
+#if defined(__APPLE__)
+int sse41_available() { return 1; }
+#else
+int sse41_available() {
+ return __builtin_cpu_supports("sse4.1");
+}
+#endif
+#endif
+
+void increment_sse41(float arr[4]) {
+ double darr[4];
+ __m128d val1 = _mm_set_pd(arr[0], arr[1]);
+ __m128d val2 = _mm_set_pd(arr[2], arr[3]);
+ __m128d one = _mm_set_pd(1.0, 1.0);
+ __m128d result = _mm_add_pd(val1, one);
+ result = _mm_ceil_pd(result); /* A no-op, only here to use a SSE4.1 intrinsic. */
+ _mm_store_pd(darr, result);
+ result = _mm_add_pd(val2, one);
+ _mm_store_pd(&darr[2], result);
+ arr[0] = (float)darr[1];
+ arr[1] = (float)darr[0];
+ arr[2] = (float)darr[3];
+ arr[3] = (float)darr[2];
+}
--- /dev/null
+#include<simdconfig.h>
+#include<simdfuncs.h>
+#include<stdint.h>
+
+#ifdef _MSC_VER
+#include<intrin.h>
+
+int sse42_available() {
+ return 1;
+}
+
+#else
+
+#include<nmmintrin.h>
+#include<cpuid.h>
+
+#ifdef __APPLE__
+int sse42_available() {
+ return 1;
+}
+#else
+int sse42_available() {
+ return __builtin_cpu_supports("sse4.2");
+}
+#endif
+
+#endif
+
+void increment_sse42(float arr[4]) {
+ double darr[4];
+ __m128d val1 = _mm_set_pd(arr[0], arr[1]);
+ __m128d val2 = _mm_set_pd(arr[2], arr[3]);
+ __m128d one = _mm_set_pd(1.0, 1.0);
+ __m128d result = _mm_add_pd(val1, one);
+ _mm_store_pd(darr, result);
+ result = _mm_add_pd(val2, one);
+ _mm_store_pd(&darr[2], result);
+ _mm_crc32_u32(42, 99); /* A no-op, only here to use an SSE4.2 instruction. */
+ arr[0] = (float)darr[1];
+ arr[1] = (float)darr[0];
+ arr[2] = (float)darr[3];
+ arr[3] = (float)darr[2];
+}
--- /dev/null
+#include<simdconfig.h>
+#include<simdfuncs.h>
+
+#include<emmintrin.h>
+#include<tmmintrin.h>
+
+#ifdef _MSC_VER
+#include<intrin.h>
+
+int ssse3_available() {
+ return 1;
+}
+
+#else
+
+#include<cpuid.h>
+#include<stdint.h>
+
+int ssse3_available() {
+#ifdef __APPLE__
+ return 1;
+#elif defined(__clang__)
+ /* https://github.com/numpy/numpy/issues/8130 */
+ return __builtin_cpu_supports("sse4.1");
+#else
+ return __builtin_cpu_supports("ssse3");
+#endif
+}
+
+#endif
+
+void increment_ssse3(float arr[4]) {
+ double darr[4];
+ __m128d val1 = _mm_set_pd(arr[0], arr[1]);
+ __m128d val2 = _mm_set_pd(arr[2], arr[3]);
+ __m128d one = _mm_set_pd(1.0, 1.0);
+ __m128d result = _mm_add_pd(val1, one);
+ __m128i tmp1, tmp2;
+ tmp1 = tmp2 = _mm_set1_epi16(0);
+ _mm_store_pd(darr, result);
+ result = _mm_add_pd(val2, one);
+ _mm_store_pd(&darr[2], result);
+ tmp1 = _mm_hadd_epi32(tmp1, tmp2); /* This does nothing. Only here so we use an SSSE3 instruction. */
+ arr[0] = (float)darr[1];
+ arr[1] = (float)darr[0];
+ arr[2] = (float)darr[3];
+ arr[3] = (float)darr[2];
+}
--- /dev/null
+#include<simdfuncs.h>
+#include<stdio.h>
+
+/*
+ * A function that checks at runtime which simd accelerations are
+ * available and calls the best one. Falls
+ * back to plain C implementation if SIMD is not available.
+ */
+
+int main(int argc, char **argv) {
+ float four[4] = {2.0, 3.0, 4.0, 5.0};
+ const float expected[4] = {3.0, 4.0, 5.0, 6.0};
+ void (*fptr)(float[4]) = NULL;
+ const char *type;
+ int i;
+
+/* Add here. The first matched one is used so put "better" instruction
+ * sets at the top.
+ */
+#if HAVE_NEON
+ if(fptr == NULL && neon_available()) {
+ fptr = increment_neon;
+ type = "NEON";
+ }
+#endif
+#if HAVE_AVX2
+ if(fptr == NULL && avx2_available()) {
+ fptr = increment_avx2;
+ type = "AVX2";
+ }
+#endif
+#if HAVE_AVX
+ if(fptr == NULL && avx_available()) {
+ fptr = increment_avx;
+ type = "AVX";
+ }
+#endif
+#if HAVE_SSE42
+ if(fptr == NULL && sse42_available()) {
+ fptr = increment_sse42;
+ type = "SSE42";
+ }
+#endif
+#if HAVE_SSE41
+ if(fptr == NULL && sse41_available()) {
+ fptr = increment_sse41;
+ type = "SSE41";
+ }
+#endif
+#if HAVE_SSSE3
+ if(fptr == NULL && ssse3_available()) {
+ fptr = increment_ssse3;
+ type = "SSSE3";
+ }
+#endif
+#if HAVE_SSE3
+ if(fptr == NULL && sse3_available()) {
+ fptr = increment_sse3;
+ type = "SSE3";
+ }
+#endif
+#if HAVE_SSE2
+ if(fptr == NULL && sse2_available()) {
+ fptr = increment_sse2;
+ type = "SSE2";
+ }
+#endif
+#if HAVE_SSE
+ if(fptr == NULL && sse_available()) {
+ fptr = increment_sse;
+ type = "SSE";
+ }
+#endif
+#if HAVE_MMX
+ if(fptr == NULL && mmx_available()) {
+ fptr = increment_mmx;
+ type = "MMX";
+ }
+#endif
+ if(fptr == NULL) {
+ fptr = increment_fallback;
+ type = "fallback";
+ }
+ printf("Using %s.\n", type);
+ fptr(four);
+ for(i=0; i<4; i++) {
+ if(four[i] != expected[i]) {
+ printf("Increment function failed, got %f expected %f.\n", four[i], expected[i]);
+ return 1;
+ }
+ }
+ return 0;
+}
--- /dev/null
+#pragma once
+
+#include<simdconfig.h>
+
+/* Yes, I do know that arr[4] decays into a pointer
+ * as a function argument. Don't do this in real code
+ * but for this test it is ok.
+ */
+
+void increment_fallback(float arr[4]);
+
+#if HAVE_MMX
+int mmx_available();
+void increment_mmx(float arr[4]);
+#endif
+
+#if HAVE_SSE
+int sse_available();
+void increment_sse(float arr[4]);
+#endif
+
+#if HAVE_SSE2
+int sse2_available();
+void increment_sse2(float arr[4]);
+#endif
+
+#if HAVE_SSE3
+int sse3_available();
+void increment_sse3(float arr[4]);
+#endif
+
+#if HAVE_SSSE3
+int ssse3_available();
+void increment_ssse3(float arr[4]);
+#endif
+
+#if HAVE_SSE41
+int sse41_available();
+void increment_sse41(float arr[4]);
+#endif
+
+#if HAVE_SSE42
+int sse42_available();
+void increment_sse42(float arr[4]);
+#endif
+
+#if HAVE_AVX
+int avx_available();
+void increment_avx(float arr[4]);
+#endif
+
+#if HAVE_AVX2
+int avx2_available();
+void increment_avx2(float arr[4]);
+#endif
+
+#if HAVE_NEON
+int neon_available();
+void increment_neon(float arr[4]);
+#endif
+
+#if HAVE_ALTIVEC
+int altivec_available();
+void increment_altivec(float arr[4]);
+#endif
+
+/* And so on. */
--- /dev/null
+project('shared module resolving symbol in executable', 'c')
+
+# The shared module contains a reference to the symbol 'func_from_executable',
+# which is always provided by the executable which loads it. This symbol can be
+# resolved at run-time by an ELF loader. But when building PE/COFF objects, all
+# symbols must be resolved at link-time, so an implib is generated for the
+# executable, and the shared module linked with it.
+#
+# See testcase 125 for an example of the more complex portability gymnastics
+# required if we do not know (at link-time) what provides the symbol.
+
+link_flags = []
+if host_machine.system() != 'windows'
+ # Needed to export dynamic symbols from the executable
+ link_flags += ['-rdynamic']
+endif
+
+dl = meson.get_compiler('c').find_library('dl', required: false)
+e = executable('prog', 'prog.c', dependencies: dl, implib: true, link_args: link_flags)
+m = shared_module('module', 'module.c', link_with: e)
+test('test', e, args: m.full_path())
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+extern int func_from_executable(void);
+
+int DLL_PUBLIC func(void) {
+ return func_from_executable();
+}
--- /dev/null
+#include <stdio.h>
+#include <assert.h>
+#ifdef _WIN32
+#include <windows.h>
+#else
+#include <dlfcn.h>
+#endif
+
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+typedef int (*fptr) (void);
+
+int DLL_PUBLIC
+func_from_executable(void)
+{
+ return 42;
+}
+
+int
+main (int argc, char **argv)
+{
+ int expected, actual;
+ fptr importedfunc;
+
+#ifdef _WIN32
+ HMODULE h = LoadLibraryA(argv[1]);
+#else
+ void *h = dlopen(argv[1], RTLD_NOW);
+#endif
+ assert(h != NULL);
+
+#ifdef _WIN32
+ importedfunc = (fptr) GetProcAddress (h, "func");
+#else
+ importedfunc = (fptr) dlsym(h, "func");
+#endif
+ assert(importedfunc != NULL);
+ assert(importedfunc != func_from_executable);
+
+ actual = (*importedfunc)();
+ expected = func_from_executable();
+ assert(actual == expected);
+
+#ifdef _WIN32
+ FreeLibrary(h);
+#else
+ dlclose(h);
+#endif
+
+ return 0;
+}
--- /dev/null
+project('conf file in test')
+
+test_file = configure_file(
+ input: 'test.py.in',
+ output: 'test.py',
+ configuration: configuration_data()
+)
+
+test('configure-file', test_file)
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+sys.exit(0)
--- /dev/null
+#include"stdio.h"
+
+#ifndef WRAPPER_INCLUDED
+#error The wrapper stdio.h was not included.
+#endif
+
+int main(int argc, char **argv) {
+ printf("Eventually I got printed.\n");
+ return 0;
+}
--- /dev/null
+project('dotinclude', 'c')
+
+executable('dotproc', 'dotproc.c',
+ implicit_include_directories : false)
+
--- /dev/null
+// There is no #pragma once because we _want_ to cause an eternal loop
+// if this wrapper invokes itself.
+
+#define WRAPPER_INCLUDED
+
+#include<stdio.h>
--- /dev/null
+executable('test-all', '../test.c')
--- /dev/null
+executable('test-benchmark', '../test.c')
--- /dev/null
+executable('test-clean-ctlist', '../test.c')
--- /dev/null
+executable('test-clean-gcda', '../test.c')
--- /dev/null
+executable('test-clean-gcno', '../test.c')
--- /dev/null
+executable('test-clean', '../test.c')
--- /dev/null
+executable('test-coverage-html', '../test.c')
--- /dev/null
+executable('test-coverage-text', '../test.c')
--- /dev/null
+executable('test-coverage-xml', '../test.c')
--- /dev/null
+executable('test-coverage', '../test.c')
--- /dev/null
+executable('test-dist', '../test.c')
--- /dev/null
+executable('test-distcheck', '../test.c')
--- /dev/null
+executable('test-install', '../test.c')
--- /dev/null
+project('reserved target names', 'c')
+ # FIXME: Setting this causes it to leak to all other tests
+ #default_options : ['b_coverage=true']
+
+subdir('all')
+subdir('benchmark')
+subdir('clean')
+subdir('clean-ctlist')
+subdir('clean-gcda')
+subdir('clean-gcno')
+subdir('coverage')
+subdir('coverage-html')
+subdir('coverage-text')
+subdir('coverage-xml')
+subdir('dist')
+subdir('distcheck')
+subdir('install')
+# We don't have a 'PHONY' directory because Windows and OSX
+# choke horribly when there are two entries with the same
+# name but different case.
+subdir('phony')
+subdir('reconfigure')
+subdir('scan-build')
+subdir('test')
+subdir('uninstall')
+
+subdir('runtarget')
+
+py3 = import('python3').find_python()
+
+custom_target('ctlist-test', output : 'out.txt',
+ command : [py3, '-c', 'print("")'],
+ capture : true,
+ build_by_default : true)
--- /dev/null
+executable('test-phony', '../test.c')
--- /dev/null
+executable('test-reconfigure', '../test.c')
--- /dev/null
+configure_file(output : 'config.h', configuration: configuration_data())
+run_target('runtarget', command : ['echo'])
--- /dev/null
+executable('test-scan-build', '../test.c')
--- /dev/null
+int main(int argc, char *argv[]) {
+ return 0;
+}
--- /dev/null
+executable('test-test', '../test.c')
--- /dev/null
+executable('test-uninstall', '../test.c')
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import argparse
+import os
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('text', nargs='*', type=str)
+ args = parser.parse_args()
+
+ text = args.text if isinstance(args.text, list) else [args.text]
+
+ output = ''
+ for t in text:
+ t = os.path.basename(t)
+
+ if not output:
+ output += t
+ else:
+ output += ' ' + t
+
+ output += '\n'
+
+ sys.stdout.write(output)
+
+if __name__ == '__main__':
+ sys.exit(main())
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import sys
+
+assert(os.path.exists(sys.argv[1]))
--- /dev/null
+#error "This file should not be included. Build dir must become before source dir in search order"
--- /dev/null
+#define MESSAGE "@var@"
+#define OTHER "@other@" "@second@" "@empty@"
+
+#mesondefine BE_TRUE
+#mesondefine SHOULD_BE_UNDEF
--- /dev/null
+/* Dummy file */
+#define RESULTA @ZERO@
--- /dev/null
+/* Dummy file */
+#define RESULTB @ZERO@
--- /dev/null
+#define MESSAGE "@var@"
--- /dev/null
+/* No escape */
+#define MESSAGE1 "@var1@"
+
+/* Single escape means no replace */
+#define MESSAGE2 "\@var1@"
+
+/* Replace pairs of escapes before '@' or '\@' with escape characters
+ * (note we have to double number of pairs due to C string escaping)
+ */
+#define MESSAGE3 "\\\\@var1@"
+
+/* Pairs of escapes and then single escape to avoid replace */
+#define MESSAGE4 "\\\\\@var1@"
+
+/* Check escaped variable does not overlap following variable */
+#define MESSAGE5 "\@var1@var2@"
+
+/* Check escape character outside variables */
+#define MESSAGE6 "\\ @ \@ \\\\@ \\\\\@"
--- /dev/null
+#define SHOULD_BE_UNDEFINED 1
+
+#include"config3.h"
+#include<string.h>
+#include<stdio.h>
+
+#ifdef SHOULD_BE_UNDEFINED
+#error Token did not get undefined.
+#endif
+
+#ifndef SHOULD_BE_DEFINED
+#error Token did not get defined
+#endif
+
+#define stringify(s) str(s)
+#define str(s) #s
+
+int main(int argc, char **argv) {
+#if !(SHOULD_BE_UNQUOTED_STRING == string)
+ printf("String token (unquoted) defined wrong.\n");
+ return 1;
+#endif
+ if(strcmp(SHOULD_BE_STRING, "string") != 0) {
+ printf("String token defined wrong.\n");
+ return 1;
+ }
+ if(strcmp(SHOULD_BE_STRING2, "A \"B\" C") != 0) {
+ printf("String token 2 defined wrong.\n");
+ return 1;
+ }
+ if(strcmp(SHOULD_BE_STRING3, "A \"\" C") != 0) {
+ printf("String token 3 defined wrong.\n");
+ return 1;
+ }
+ if(strcmp(SHOULD_BE_STRING4, "A \" C") != 0) {
+ printf("String token 4 defined wrong.\n");
+ return 1;
+ }
+ if(SHOULD_BE_ONE != 1) {
+ printf("One defined incorrectly.\n");
+ return 1;
+ }
+ if(SHOULD_BE_ZERO != 0) {
+ printf("Zero defined incorrectly.\n");
+ return 1;
+ }
+ if(strcmp(SHOULD_BE_QUOTED_ONE, "1") != 0) {
+ printf("Quoted number defined incorrectly.\n");
+ return 1;
+ }
+ SHOULD_BE_RETURN 0;
+}
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import argparse
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('file', nargs=1, type=str)
+ parser.add_argument('text', nargs=1, type=str)
+ args = parser.parse_args()
+
+ text = args.text[0]
+
+ with open(args.file[0], 'r', encoding='utf-8') as f:
+ for line in f:
+ if line.strip() == text:
+ return 0
+
+ return 1
+
+if __name__ == '__main__':
+ sys.exit(main())
--- /dev/null
+#!/usr/bin/env python3
+
+import sys, os
+from pathlib import Path
+
+if len(sys.argv) != 2:
+ print("Wrong amount of parameters.")
+
+build_dir = Path(os.environ['MESON_BUILD_ROOT'])
+subdir = Path(os.environ['MESON_SUBDIR'])
+outputf = Path(sys.argv[1])
+
+with outputf.open('w') as ofile:
+ ofile.write("#define ZERO_RESULT 0\n")
--- /dev/null
+#!/usr/bin/env python3
+
+import sys, os
+from pathlib import Path
+
+if len(sys.argv) != 3:
+ print("Wrong amount of parameters.")
+
+build_dir = Path(os.environ['MESON_BUILD_ROOT'])
+subdir = Path(os.environ['MESON_SUBDIR'])
+inputf = Path(sys.argv[1])
+outputf = Path(sys.argv[2])
+
+assert(inputf.exists())
+
+with outputf.open('w') as ofile:
+ ofile.write("#define ZERO_RESULT 0\n")
--- /dev/null
+usr/share/appdir/config2.h
+usr/share/appdir/config2b.h
+usr/share/appdireh/config2-1.h
+usr/share/appdirok/config2-2.h
--- /dev/null
+project('configure file test', 'c')
+
+conf = configuration_data()
+
+conf.set('var', 'mystring')
+conf.set('other', 'string 2')
+conf.set('second', ' bonus')
+conf.set('BE_TRUE', true)
+
+assert(conf.get('var') == 'mystring', 'Get function is not working.')
+assert(conf.get('var', 'default') == 'mystring', 'Get function is not working.')
+assert(conf.get('notthere', 'default') == 'default', 'Default value getting is not working.')
+
+cfile = configure_file(input : 'config.h.in',
+output : 'config.h',
+configuration : conf)
+
+e = executable('inctest', 'prog.c',
+# Note that you should NOT do this. Don't add generated headers here
+# This tests that we do the right thing even if people add in conf files
+# to their sources.
+cfile)
+test('inctest', e)
+
+# Test if we can also pass files() as input
+configure_file(input : files('config.h.in'),
+ output : 'config2.h',
+ configuration : conf)
+
+# Now generate a header file with an external script.
+genprog = import('python3').find_python()
+scriptfile = '@0@/generator.py'.format(meson.current_source_dir())
+ifile = '@0@/dummy.dat'.format(meson.current_source_dir())
+ofile = '@0@/config2.h'.format(meson.current_build_dir())
+
+check_file = find_program('check_file.py')
+# Configure in source root with command and absolute paths
+configure_file(input : 'dummy.dat',
+ output : 'config2.h',
+ command : [genprog, scriptfile, ifile, ofile],
+ install_dir : 'share/appdir')
+run_command(check_file, join_paths(meson.current_build_dir(), 'config2.h'))
+
+# Same again as before, but an input file should not be required in
+# this case where we use a command/script to generate the output file.
+genscript2b = '@0@/generator-without-input-file.py'.format(meson.current_source_dir())
+ofile2b = '@0@/config2b.h'.format(meson.current_build_dir())
+configure_file(
+ output : 'config2b.h',
+ command : [genprog, genscript2b, ofile2b],
+ install_dir : 'share/appdir')
+run_command(check_file, join_paths(meson.current_build_dir(), 'config2b.h'))
+
+found_script = find_program('generator.py')
+# More configure_file tests in here
+subdir('subdir')
+
+test('inctest2', executable('prog2', 'prog2.c'))
+
+# Generate a conf file without an input file.
+
+dump = configuration_data()
+dump.set_quoted('SHOULD_BE_STRING', 'string', description : 'A string')
+dump.set_quoted('SHOULD_BE_STRING2', 'A "B" C')
+dump.set_quoted('SHOULD_BE_STRING3', 'A "" C')
+dump.set_quoted('SHOULD_BE_STRING4', 'A " C')
+dump.set('SHOULD_BE_RETURN', 'return')
+dump.set('SHOULD_BE_DEFINED', true)
+dump.set('SHOULD_BE_UNDEFINED', false)
+dump.set('SHOULD_BE_ONE', 1)
+dump.set('SHOULD_BE_ZERO', 0, description : 'Absolutely zero')
+dump.set('SHOULD_BE_QUOTED_ONE', '"1"')
+
+dump.set_quoted('INTEGER_AS_STRING', '12')
+if dump.get_unquoted('INTEGER_AS_STRING').to_int() == 12
+ dump.set('SHOULD_BE_UNQUOTED_STRING', dump.get_unquoted('SHOULD_BE_STRING'))
+endif
+
+configure_file(output : 'config3.h',
+ configuration : dump)
+
+test('Configless.', executable('dumpprog', 'dumpprog.c'))
+
+
+# Config file generation in a loop with @BASENAME@ substitution
+dump = configuration_data()
+dump.set('ZERO', 0)
+config_templates = files(['config4a.h.in', 'config4b.h.in'])
+foreach config_template : config_templates
+ configure_file(input : config_template, output : '@BASENAME@',
+ configuration : dump)
+endforeach
+
+test('Substituted', executable('prog4', 'prog4.c'))
+
+# Test `capture` keyword
+
+basename_py = find_program('basename.py')
+file_contains_py = find_program('file_contains.py')
+test_string = 'hello world'
+test_input_file = join_paths(meson.current_build_dir(), test_string)
+run_command(find_program('touch.py'), test_input_file)
+configs = [
+ # no input
+ configure_file(command: [ basename_py, test_string ], capture: true, output: 'capture test 1'),
+ # with input
+ configure_file(input: test_input_file, command: [ basename_py, '@INPUT@' ], capture: true, output: 'capture test 2'),
+]
+foreach c : configs
+ test('@0@'.format(c), file_contains_py, args: [ c, test_string ])
+endforeach
+
+# Test variable is substituted only once
+conf5 = configuration_data()
+conf5.set('var', '@var2@')
+conf5.set('var2', 'error')
+configure_file(
+ input : 'config5.h.in',
+ output : '@BASENAME@',
+ configuration : conf5
+)
+test('test5', executable('prog5', 'prog5.c'))
+
+# Test escaping
+conf6 = configuration_data()
+conf6.set('var1', 'foo')
+conf6.set('var2', 'bar')
+configure_file(
+ input : 'config6.h.in',
+ output : '@BASENAME@',
+ configuration : conf6
+)
+test('test6', executable('prog6', 'prog6.c'))
--- /dev/null
+#include <string.h>
+/* config.h must not be in quotes:
+ * https://gcc.gnu.org/onlinedocs/cpp/Search-Path.html
+ */
+#include <config.h>
+
+#ifdef SHOULD_BE_UNDEF
+#error "FAIL!"
+#endif
+
+int main(int argc, char **argv) {
+#ifndef BE_TRUE
+ return 1;
+#else
+ return strcmp(MESSAGE, "mystring");
+#endif
+}
--- /dev/null
+#include<config2.h>
+
+int main(int argc, char **argv) {
+ return ZERO_RESULT;
+}
--- /dev/null
+#include <config4a.h>
+#include <config4b.h>
+
+int main(int argc, char **argv) {
+ return RESULTA + RESULTB;
+}
--- /dev/null
+#include <string.h>
+#include <config5.h>
+
+int main(int argc, char **argv) {
+ return strcmp(MESSAGE, "@var2@");
+}
--- /dev/null
+#include <string.h>
+#include <config6.h>
+
+int main(int argc, char **argv) {
+ return strcmp(MESSAGE1, "foo")
+ || strcmp(MESSAGE2, "@var1@")
+ || strcmp(MESSAGE3, "\\foo")
+ || strcmp(MESSAGE4, "\\@var1@")
+ || strcmp(MESSAGE5, "@var1bar")
+ || strcmp(MESSAGE6, "\\ @ @ \\@ \\@");
+}
--- /dev/null
+# Configure in subdir with absolute paths for input and relative for output
+configure_file(input : '../dummy.dat',
+ output : 'config2-1.h',
+ command : [genprog, scriptfile, ifile, 'config2-1.h'],
+ install_dir : 'share/appdireh')
+run_command(check_file, join_paths(meson.current_build_dir(), 'config2-1.h'))
+
+# Configure in subdir with files() for input and relative for output
+configure_file(input : '../dummy.dat',
+ output : 'config2-2.h',
+ command : [genprog, scriptfile, files('../dummy.dat'), 'config2-2.h'],
+ install_dir : 'share/appdirok')
+run_command(check_file, join_paths(meson.current_build_dir(), 'config2-2.h'))
+
+# Configure in subdir with string templates for input and output
+configure_file(input : '../dummy.dat',
+ output : 'config2-3.h',
+ command : [found_script, '@INPUT@', '@OUTPUT@'])
+run_command(check_file, join_paths(meson.current_build_dir(), 'config2-3.h'))
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import argparse
+from pathlib import Path
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('files', nargs='*', type=str)
+ args = parser.parse_args()
+
+ for filepath in args.files:
+ Path(filepath).touch()
+
+if __name__ == '__main__':
+ sys.exit(main())
--- /dev/null
+extern int dir2;
+extern int dir2_dir1;
+extern int dir3;
+extern int dir3_dir1;
+
+int main() {
+ if (dir2 != 20)
+ return 1;
+ if (dir2_dir1 != 21)
+ return 1;
+ if (dir3 != 30)
+ return 1;
+ if (dir3_dir1 != 31)
+ return 1;
+ return 0;
+}
--- /dev/null
+sources += files('file.c')
--- /dev/null
+int dir2_dir1 = 21;
--- /dev/null
+int dir2 = 20;
--- /dev/null
+sources += files('file.c', 'dir1/file.c')
--- /dev/null
+int dir3_dir1 = 31;
--- /dev/null
+int dir3 = 30;
--- /dev/null
+lib = static_library('lib', 'file.c', 'dir1/file.c')
--- /dev/null
+project('proj', 'c')
+
+sources = []
+subdir('dir1')
+subdir('dir2')
+subdir('dir3')
+executable('a.out', sources : sources, objects : lib.extract_all_objects())
--- /dev/null
+# Copyright © 2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import textwrap
+
+HEADER = textwrap.dedent('''\
+ void stringify(int foo, char * buffer);
+ ''')
+
+CODE = textwrap.dedent('''\
+ #include <stdio.h>
+
+ #ifndef WORKS
+ # error "This shouldn't have been included"
+ #endif
+
+ void stringify(int foo, char * buffer) {
+ sprintf(buffer, "%i", foo);
+ }
+ ''')
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--header')
+ parser.add_argument('--code')
+ args = parser.parse_args()
+
+ with open(args.header, 'w') as f:
+ f.write(HEADER)
+
+ with open(args.code, 'w') as f:
+ f.write(CODE)
+
+
+if __name__ == '__main__':
+ main()
--- /dev/null
+/* Copyright © 2017 Intel Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "gen.h"
+
+void func(char * buffer) {
+ stringify(1, buffer);
+}
--- /dev/null
+# Copyright © 2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+project('custom_target_index', 'c', default_options : 'c_std=c89')
+
+py_mod = import('python3')
+prog_python = py_mod.find_python()
+
+gen = custom_target(
+ 'gen.[ch]',
+ input : 'gen_sources.py',
+ output : ['gen.c', 'gen.h'],
+ command : [prog_python, '@INPUT@', '--header', '@OUTPUT1@', '--code', '@OUTPUT0@'],
+)
+
+lib = static_library(
+ 'libfoo',
+ ['lib.c', gen[1]],
+)
+
+subdir('subdir')
--- /dev/null
+/* Copyright © 2017 Intel Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "gen.h"
+
+int main(void) {
+ char buf[50];
+ stringify(10, buf);
+ return 0;
+}
--- /dev/null
+# Copyright © 2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+foo = executable(
+ 'foo',
+ ['foo.c', gen[0], gen[1]],
+ c_args : '-DWORKS',
+)
--- /dev/null
+project('mainproj', 'c')
+
+subproject('zlib')
+
+executable('grabprog', files('src/subprojects/prog.c'))
+executable('grabprog2', files('src/subprojects/foo/prog2.c'))
+subdir('src')
--- /dev/null
+executable('grabprog3', files('subprojects/prog.c'))
+executable('grabprog4', files('subprojects/foo/prog2.c'))
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("Do not have a file layout like this in your own projects.\n");
+ printf("This is only to test that this works.\n");
+ return 0;
+}
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("Do not have a file layout like this in your own projects.\n");
+ printf("This is only to test that this works.\n");
+ return 0;
+}
--- /dev/null
+int dummy_func() {
+ return 42;
+}
--- /dev/null
+project('shared lib', 'c')
+shared_library('foo', 'foo.c')
--- /dev/null
+[wrap-file]
+directory = zlib-1.2.8
+
+source_url = http://zlib.net/fossils/zlib-1.2.8.tar.gz
+source_filename = zlib-1.2.8.tar.gz
+source_hash = 36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d
+
+patch_url = https://wrapdb.mesonbuild.com/v1/projects/zlib/1.2.8/8/get_zip
+patch_filename = zlib-1.2.8-8-wrap.zip
+patch_hash = 17c52a0e0c59ce926d3959005d5cd8178c6c7e2c9a4a1304279a8320c955ac60
--- /dev/null
+project('include dir in subproj test', 'c')
+
+
+subproject('inctest')
+
+
+exe = executable('prog', 'prog.c')
+
+test('dummy', exe)
--- /dev/null
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+
+/* file which is used in the subproject */
--- /dev/null
+
+project('subproj with includedir', 'c')
+
+
+
+compile_check = '''
+#include "incfile.h"
+'''
+
+if not meson.get_compiler('c').compiles(compile_check, name : 'include in subproj',
+ include_directories: include_directories('include'))
+ error('failed')
+endif
--- /dev/null
+#include<assert.h>
+char func_b();
+char func_c();
+
+int main(int argc, char **argv) {
+ if(func_b() != 'b') {
+ return 1;
+ }
+ if(func_c() != 'c') {
+ return 2;
+ }
+ return 0;
+}
--- /dev/null
+#include<stdlib.h>
+char func_c();
+
+#if defined _WIN32 || defined __CYGWIN__
+#define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+char DLL_PUBLIC func_b() {
+ if(func_c() != 'c') {
+ exit(3);
+ }
+ return 'b';
+}
--- /dev/null
+project('B', 'c')
+C = subproject('C')
+c = C.get_variable('c')
+b = shared_library('b', 'b.c', link_with : c)
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+#define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+char DLL_PUBLIC func_c() {
+ return 'c';
+}
--- /dev/null
+project('C', 'c')
+c = shared_library('c', 'c.c')
--- /dev/null
+project('A', 'c', subproject_dir:'custom_subproject_dir')
+
+B = subproject('B')
+b = B.get_variable('b')
+
+C = subproject('C')
+c = C.get_variable('c')
+
+subdir('other_subdir')
+
+a = executable('a', 'a.c', link_with : [b, c])
+test('a test', a)
--- /dev/null
+#include<stdlib.h>
+
+#if defined _WIN32 || defined __CYGWIN__
+#define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+char DLL_PUBLIC func_b() {
+ if('c' != 'c') {
+ exit(3);
+ }
+ return 'b';
+}
--- /dev/null
+other = shared_library('other', 'custom_subproject_dir/other.c')
--- /dev/null
+# Copyright © 2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+project('config tool variable', 'cpp')
+
+
+dep_llvm = dependency('llvm', required : false)
+if not dep_llvm.found()
+ error('MESON_SKIP_TEST LLVM not installed.')
+endif
+
+includedir = dep_llvm.get_configtool_variable('includedir')
+includedir = join_paths(includedir, 'llvm')
+if host_machine.system() == 'windows'
+ cmd = run_command(['dir', includedir])
+else
+ cmd = run_command(['ls', includedir])
+endif
+
+assert(cmd.returncode() == 0, 'did not run successfully')
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import shutil
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
--- /dev/null
+project('custom target subdir depend files', 'c')
+
+copy = find_program('copyfile.py')
+
+subdir('subdir')
+
+executable('foo', foo_src)
--- /dev/null
+You can depend on this file.
\ No newline at end of file
--- /dev/null
+#include <stdio.h>
+
+int main() {
+ printf("foo is working.\n");
+ return 0;
+}
--- /dev/null
+foo_src = custom_target('foo_src',
+ depend_files : 'dep.dat',
+ input : 'foo.c.in',
+ output : 'foo.c',
+ command : [copy, '@INPUT@', '@OUTPUT@']
+)
--- /dev/null
+some stuff here
--- /dev/null
+#include <stdio.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <string.h>
+#include <stdlib.h>
+#include <sys/types.h>
+
+#ifdef _WIN32
+ #include <io.h>
+ #include <windows.h>
+#else
+ #include <unistd.h>
+#endif
+
+/* Who cares about stack sizes in test programs anyway */
+#define LINE_LENGTH 4096
+
+static int
+intrp_copyfile (char * src, char * dest)
+{
+#ifdef _WIN32
+ if (!CopyFile (src, dest, FALSE))
+ return 1;
+ return 0;
+#else
+ return execlp ("cp", "copyfile", src, dest, NULL);
+#endif
+}
+
+static void
+parser_get_line (FILE * f, char line[LINE_LENGTH])
+{
+ if (!fgets (line, LINE_LENGTH, f))
+ fprintf (stderr, "%s\n", strerror (errno));
+}
+
+int
+main (int argc, char * argv[])
+{
+ FILE *f = NULL;
+ char line[LINE_LENGTH];
+
+ if (argc != 4) {
+ fprintf (stderr, "Invalid number of arguments: %i\n", argc);
+ goto err;
+ }
+
+ if ((f = fopen (argv[1], "r")) == NULL) {
+ fprintf (stderr, "%s\n", strerror (errno));
+ goto err;
+ }
+
+ parser_get_line (f, line);
+
+ if (!line || line[0] != '#' || line[1] != '!') {
+ fprintf (stderr, "Invalid script\n");
+ goto err;
+ }
+
+ parser_get_line (f, line);
+
+ if (!line || strncmp (line, "copy", 4) != 0) {
+ fprintf (stderr, "Syntax error: %s\n", line);
+ goto err;
+ }
+
+ return intrp_copyfile (argv[2], argv[3]);
+
+err:
+ fclose (f);
+ return 1;
+}
--- /dev/null
+project('shebang parsing', 'c')
+
+interpreter = executable('aninterp', 'main.c', native : true)
+
+cdata = configuration_data()
+cdata.set('INTRP', interpreter.full_path())
+
+f = configure_file(input : 'script.int.in',
+ output : 'script.int',
+ configuration : cdata)
+
+# Test that parsing a shebang with spaces works properly. See `man execve`,
+# specifically the section on "Interpreter scripts" and the one under "NOTES".
+script = find_program(f)
+
+custom_target('interpthis',
+ input : 'input.txt',
+ output : 'output.txt',
+ depends : interpreter,
+ command : [script, '@INPUT@', '@OUTPUT@'],
+ build_by_default : true)
--- /dev/null
+#!/usr/bin/env @INTRP@
+copy
--- /dev/null
+project('dolphin option', 'c')
+
+d = disabler()
+
+d2 = dependency(d)
+d3 = (d == d2)
+d4 = d + 0
+d5 = d2 or true
+
+assert(d, 'Disabler did not cause this to be skipped.')
+assert(d2, 'Function laundered disabler did not cause this to be skipped.')
+assert(d3, 'Disabler comparison should yield disabler and thus this would not be called.')
+assert(d4, 'Disabler addition should yield disabler and thus this would not be called.')
+assert(d5, 'Disabler logic op should yield disabler and thus this would not be called.')
+
+number = 0
+
+if d
+ number = 1
+else
+ number = 2
+endif
+
+assert(d == 0, 'Plain if handled incorrectly, value should be 0 but is @0@'.format(number))
+
+if d.found()
+ number = 1
+else
+ number = 2
+endif
+
+assert(d == 1, 'If found handled incorrectly, value should be 1 but is @0@'.format(number))
+
+
--- /dev/null
+# Copyright © 2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+project('array default options')
+
+assert(get_option('array') == ['foo', 'bar'], 'Default value for array is not equal to choices')
--- /dev/null
+# Copyright © 2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+option(
+ 'array',
+ type : 'array',
+ choices : ['foo', 'bar'],
+)
--- /dev/null
+project('if test', 'c')
+
+var1 = true
+set_variable('var2', false)
+
+if var1
+ exe = executable('prog', 'prog.c')
+endif
+
+if var2
+ exe = executable('breakbreakbreak', 'crashing.c')
+endif
+
+test('iftest', exe)
+
+if not is_variable('var1')
+ error('Is_variable fail.')
+endif
+
+if is_variable('nonexisting')
+ error('Is_variable fail 2.')
+endif
+
+if not get_variable('var1', false)
+ error('Get_variable fail.')
+endif
+
+if get_variable('nonexisting', false)
+ error('Get_variable fail.')
+endif
--- /dev/null
+int main(int argc, char **argv) { return 0; }
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import shutil
+
+if '@INPUT1@' in sys.argv[1]:
+ shutil.copyfile(sys.argv[2], sys.argv[3])
+else:
+ sys.exit('String @INPUT1@ not found in "{}"'.format(sys.argv[1]))
--- /dev/null
+#include <stdio.h>
+
+int main() {
+ printf("foo is working.\n");
+ return 0;
+}
--- /dev/null
+project('custom target template substitution', 'c')
+
+check = find_program('checkcopy.py')
+
+config = configuration_data()
+
+in = configure_file(configuration : config, output : 'x@IN')
+
+# Check that substitution does not find @FOO@ and then misses @INPUT0@.
+# Check the resulting x@INPUT1@ is not replaced.
+foo = custom_target('runcheck',
+ input : [in, 'foo.c.in'],
+ output : 'foo.c',
+ command : [check, '-D@FOO@INPUT0@PUT1@', '@INPUT1@', '@OUTPUT@']
+)
+
+executable('foo', foo)
--- /dev/null
+project('dep-test')
+
+dep = dependency('', required:false)
+if dep.found()
+ error('not-found dependency was found')
+endif
+
+assert(dep.type_name() == 'not-found', 'dependency should be of type "not-found" not ' + dep.type_name())
--- /dev/null
+project('subdir if found', 'c')
+
+found_dep = declare_dependency()
+not_found_dep = dependency('nonexisting', required : false)
+
+subdir('nonexisting_dir', if_found : not_found_dep)
+
+variable = 3
+
+subdir('subdir', if_found : found_dep)
+assert(variable == 5, 'Subdir was not properly entered.')
--- /dev/null
+variable = 5
--- /dev/null
+project('else test', 'c')
+
+var = false
+
+if var
+ exe = executable('break', 'break.c')
+else
+ exe = executable('prog', 'prog.c')
+endif
+
+test('elsetest', exe)
--- /dev/null
+int main(int argc, char **argv) { return 0; }
--- /dev/null
+project('comparison', 'c')
+
+# Compare equality of strings
+
+var1 = 'foo'
+var2 = 'bar'
+
+if var1 == var2
+ exe1 = executable('broken', 'broken.c')
+else
+ exe1 = executable('prog1', 'prog.c')
+endif
+
+if var1 == var1
+ exe2 = executable('prog2', 'prog.c')
+else
+ exe2 = executable('broken', 'broken.c')
+endif
+
+if var1 != var2
+ exe3 = executable('prog3', 'prog.c')
+else
+ exe3 = executable('broken', 'broken.c')
+endif
+
+if var1 != var1
+ exe4 = executable('broken', 'broken.c')
+else
+ exe4 = executable('prog4', 'prog.c')
+endif
+
+test('equalfalse', exe1)
+test('equaltrue', exe2)
+test('nequaltrue', exe3)
+test('nequalfalse', exe4)
+
+# Non-equality comparisons
+
+var3 = 3
+var4 = 4
+
+if var3 < var4
+ exe5 = executable('prog5', 'prog.c')
+else
+ exe5 = executable('broken', 'broken.c')
+endif
+
+if var3 < var3
+ exe6 = executable('broken', 'broken.c')
+else
+ exe6 = executable('prog6', 'prog.c')
+endif
+
+if var4 > var3
+ exe7 = executable('prog7', 'prog.c')
+else
+ exe7 = executable('broken', 'broken.c')
+endif
+
+if var3 > var3
+ exe8 = executable('broken', 'broken.c')
+else
+ exe8 = executable('prog8', 'prog.c')
+endif
+
+if var4 <= var3
+ exe9 = executable('broken', 'broken.c')
+else
+ exe9 = executable('prog9', 'prog.c')
+endif
+
+if var3 <= var3
+ exe10 = executable('prog10', 'prog.c')
+else
+ exe10 = executable('broken', 'broken.c')
+endif
+
+if var3 >= var4
+ exe11 = executable('broken', 'broken.c')
+else
+ exe11 = executable('prog11', 'prog.c')
+endif
+
+if var3 >= var3
+ exe12 = executable('prog12', 'prog.c')
+else
+ exe12 = executable('broken', 'broken.c')
+endif
+
+test('lttrue', exe5)
+test('ltfalse', exe6)
+test('gttrue', exe7)
+test('gtfalse', exe8)
+test('lefalse', exe9)
+test('letrue', exe10)
+test('gefalse', exe11)
+test('getrue', exe12)
+
+# Non-elementary type comparisons
+
+if exe1 == exe2
+ exe13 = executable('broken', 'broken.c')
+else
+ exe13 = executable('prog13', 'prog.c')
+endif
+
+if exe1 == exe1
+ exe14 = executable('prog14', 'prog.c')
+else
+ exe14 = executable('broken', 'broken.c')
+endif
+
+if exe1 != exe2
+ exe15 = executable('prog15', 'prog.c')
+else
+ exe15 = executable('broken', 'broken.c')
+endif
+
+if exe1 != exe1
+ exe16 = executable('broken', 'broken.c')
+else
+ exe16 = executable('prog16', 'prog.c')
+endif
+
+test('equalfalse', exe13)
+test('equaltrue', exe14)
+test('nequaltrue', exe15)
+test('nequalfalse', exe16)
--- /dev/null
+int main(int argc, char **argv) { return 0; }
--- /dev/null
+project('c++ test', 'cpp')
+
+if meson.get_compiler('cpp').get_id() == 'intel'
+ # Error out if the -std=xxx option is incorrect
+ add_project_arguments('-diag-error', '10159', language : 'cpp')
+endif
+
+exe = executable('trivialprog', 'trivial.cc', extra_files : 'something.txt')
+test('runtest', exe)
--- /dev/null
+This file is only here so it shows up in IDEs as part of this target.
--- /dev/null
+#include<iostream>
+
+int main(int argc, char **argv) {
+ std::cout << "C++ seems to be working." << std::endl;
+ return 0;
+}
--- /dev/null
+int func() { return 0; }
--- /dev/null
+project('array test', 'c')
+
+arr = [
+ 'func.c',
+ 'prog.c']
+
+exe = executable('prog', sources : arr)
+test('arr test', exe)
--- /dev/null
+extern int func();
+
+int main(int argc, char **argv) { return func(); }
--- /dev/null
+#ifndef FUNC_H__
+#define FUNC_H__
+
+int func();
+
+#endif
--- /dev/null
+project('include dir test', 'c')
+
+inc = include_directories('include')
+subdir('src')
--- /dev/null
+#include "func.h"
+
+int func() {
+ return 0;
+}
--- /dev/null
+exe = executable('prog', 'prog.c', 'func.c', include_directories : inc)
+test('inc test', exe)
--- /dev/null
+#include "func.h"
+
+int main(int argc, char **argv) {
+ return func();
+}
--- /dev/null
+#include<stdio.h>
--- /dev/null
+project('header in file list', 'c')
+
+exe = executable('prog', 'prog.c', 'header.h')
+test('basic', exe)
--- /dev/null
+#include "header.h"
+
+int main(int argc, char **argv) { return 0; }
--- /dev/null
+project('global arg test', 'cpp', 'c')
+
+add_global_arguments('-DMYTHING', language : 'c')
+add_global_arguments('-DMYCPPTHING', language : 'cpp')
+
+add_global_arguments('-DMYCANDCPPTHING', language: ['c', 'cpp'])
+
+exe1 = executable('prog', 'prog.c')
+exe2 = executable('prog2', 'prog.cc')
+
+test('prog1', exe1)
+test('prog2', exe2)
+
--- /dev/null
+#ifndef MYTHING
+#error "Global argument not set"
+#endif
+
+#ifdef MYCPPTHING
+#error "Wrong global argument set"
+#endif
+
+#ifndef MYCANDCPPTHING
+#error "Global argument not set"
+#endif
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+#ifdef MYTHING
+#error "Wrong global argument set"
+#endif
+
+#ifndef MYCPPTHING
+#error "Global argument not set"
+#endif
+
+#ifndef MYCANDCPPTHING
+#error "Global argument not set"
+#endif
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+#ifndef CTHING
+#error "Local argument not set"
+#endif
+
+#ifdef CPPTHING
+#error "Wrong local argument set"
+#endif
+
+int func() { return 0; }
--- /dev/null
+#ifdef CTHING
+#error "Local C argument set in wrong target"
+#endif
+
+#ifdef CPPTHING
+#error "Local CPP argument set in wrong target"
+#endif
+
+int func() { return 0; }
--- /dev/null
+project('local arg test', 'cpp', 'c')
+
+exe1 = executable('prog', 'prog.cc', 'func.c', \
+c_args : '-DCTHING', \
+cpp_args : '-DCPPTHING')
+exe2 = executable('prog2', 'prog2.cc', 'func2.c')
+
+test('prog1', exe1)
+test('prog2', exe2)
--- /dev/null
+#ifdef CTHING
+#error "Wrong local argument set"
+#endif
+
+#ifndef CPPTHING
+#error "Local argument not set"
+#endif
+
+extern "C" int func();
+
+int main(int argc, char **argv) {
+ return func();
+}
--- /dev/null
+#ifdef CTHING
+#error "Local C argument set in wrong target"
+#endif
+
+#ifdef CPPTHING
+#error "Local CPP argument set in wrong target"
+#endif
+
+extern "C" int func();
+
+int main(int argc, char **argv) {
+ return func();
+}
--- /dev/null
+int func() {
+ return 42;
+}
--- /dev/null
+int retval() {
+ return 43;
+}
--- /dev/null
+int func();
+
+int main(int argc, char **argv) {
+ return func() == 42 ? 0 : 1;
+}
--- /dev/null
+project('object extraction', 'c')
+
+if meson.is_unity()
+ message('Skipping extraction test because this is a Unity build.')
+else
+ lib1 = shared_library('somelib', 'src/lib.c')
+ lib2 = shared_library('somelib2', 'lib.c', 'lib2.c')
+
+ obj1 = lib1.extract_objects('src/lib.c')
+ obj2 = lib2.extract_objects(['lib.c'])
+
+ e1 = executable('main1', 'main.c', objects : obj1)
+ e2 = executable('main2', 'main.c', objects : obj2)
+
+ test('extraction test 1', e1)
+ test('extraction test 2', e2)
+endif
--- /dev/null
+int func() {
+ return 42;
+}
--- /dev/null
+project('endian check', 'c')
+
+if host_machine.endian() == 'big'
+ add_global_arguments('-DIS_BE', language : 'c')
+endif
+
+test('endiantest', executable('prog', 'prog.c'))
--- /dev/null
+#include<stdint.h>
+
+int is_big_endian(void) {
+ uint32_t one = 1;
+ if(*((uint8_t*) &one) == 1)
+ return 0;
+ return 1;
+}
+
+
+int main(int argc, char **argv) {
+ int is_be_check = is_big_endian();
+ int is_be;
+#ifdef IS_BE
+ is_be = 1;
+#else
+ is_be = 0;
+#endif
+ if(is_be_check && is_be)
+ return 0;
+ if(!is_be_check && !is_be)
+ return 0;
+ return 1;
+}
--- /dev/null
+usr/lib/prefixsomelib.suffix
--- /dev/null
+int myFunc() {
+ return 55;
+}
--- /dev/null
+project('library versions', 'c')
+
+shared_library('somelib', 'lib.c',
+ name_prefix : 'prefix',
+ name_suffix : 'suffix',
+ install_dir : 'lib',
+ install : true)
+
+subdir('subdir')
--- /dev/null
+# Test that using files generated with configure_file as sources works.
+# We do this inside a subdir so that the path isn't accidentally correct
+# because there is no structure in the build dir.
+genlib = configure_file(input : '../lib.c',
+ output : 'genlib.c',
+ configuration : configuration_data())
+shared_library('genlib', genlib,
+ install : false)
--- /dev/null
+#ifndef CONFIG_H_
+#define CONFIG_H_
+
+#define RETURN_VALUE @number@
+
+#endif
--- /dev/null
+conf_data = configuration_data()
+conf_data.set('number', '0')
+
+configure_file(input:'config.h.in', output:'config.h', configuration:conf_data)
--- /dev/null
+project('subdirconfig', 'c')
+
+inc = include_directories('include')
+
+subdir('include')
+subdir('src')
--- /dev/null
+exe = executable('prog', 'prog.c', include_directories : inc)
+test('subdir config', exe)
--- /dev/null
+#include "config.h"
+
+int main(int argc, char **argv) {
+ return RETURN_VALUE;
+}
--- /dev/null
+int func() { return 0; }
--- /dev/null
+project('pipeline test', 'c')
+
+# We need to run this executable locally so build it with
+# the host compiler.
+e1 = executable('srcgen', 'srcgen.c', native : true)
+
+# Generate a source file that needs to be included in the build.
+gen = generator(e1, \
+ depfile : '@BASENAME@.d',
+ output : '@BASENAME@.c', # Line continuation inside arguments should work without needing a "\".
+ arguments : ['@INPUT@', '@OUTPUT@', '@DEPFILE@'])
+
+generated = gen.process(['input_src.dat'])
+
+e2 = executable('prog', 'prog.c', generated)
+
+test('pipelined', e2)
--- /dev/null
+int func();
+
+int main(int argc, char **argv) {
+ return func();
+}
--- /dev/null
+#include<stdio.h>
+#include<assert.h>
+#include<string.h>
+
+#define ARRSIZE 80
+
+int main(int argc, char **argv) {
+ char arr[ARRSIZE];
+ char *ofilename;
+ char *ifilename;
+ char *dfilename;
+ FILE *ifile;
+ FILE *ofile;
+ FILE *depfile;
+ size_t bytes;
+ int i;
+
+ if(argc != 4) {
+ fprintf(stderr, "%s <input file> <output file> <dependency file>\n", argv[0]);
+ return 1;
+ }
+ ifilename = argv[1];
+ ofilename = argv[2];
+ dfilename = argv[3];
+ ifile = fopen(argv[1], "r");
+ if(!ifile) {
+ fprintf(stderr, "Could not open source file %s.\n", argv[1]);
+ return 1;
+ }
+ ofile = fopen(ofilename, "w");
+ if(!ofile) {
+ fprintf(stderr, "Could not open target file %s\n", ofilename);
+ fclose(ifile);
+ return 1;
+ }
+ bytes = fread(arr, 1, ARRSIZE, ifile);
+ assert(bytes < 80);
+ assert(bytes > 0);
+ fwrite(arr, 1, bytes, ofile);
+
+ depfile = fopen(dfilename, "w");
+ if(!depfile) {
+ fprintf(stderr, "Could not open depfile %s\n", ofilename);
+ fclose(ifile);
+ fclose(ofile);
+ return 1;
+ }
+ for(i=0; i<strlen(ofilename); i++) {
+ if(ofilename[i] == ' ') {
+ fwrite("\\ ", 1, 2, depfile);
+ } else {
+ fwrite(&ofilename[i], 1, 1, depfile);
+ }
+ }
+ fwrite(": ", 1, 2, depfile);
+ for(i=0; i<strlen(ifilename); i++) {
+ if(ifilename[i] == ' ') {
+ fwrite("\\ ", 1, 2, depfile);
+ } else {
+ fwrite(&ifilename[i], 1, 1, depfile);
+ }
+ }
+ fwrite("\n", 1, 1, depfile);
+
+ fclose(ifile);
+ fclose(ofile);
+ fclose(depfile);
+ return 0;
+}
--- /dev/null
+int libfunc() {
+ return 3;
+}
--- /dev/null
+int libfunc2() {
+ return 4;
+}
--- /dev/null
+project('static library test', 'c')
+
+lib = static_library('mylib', get_option('source'),
+ link_args : '-THISMUSTNOBEUSED') # Static linker needs to ignore all link args.
--- /dev/null
+option('source', type : 'combo', choices : ['libfile.c', 'libfile2.c'], value : 'libfile.c')
--- /dev/null
+project('pipeline test', 'c')
+
+# This is in a subdirectory to make sure
+# we write proper subdir paths to output.
+subdir('src')
--- /dev/null
+#include<stdio.h>
--- /dev/null
+e1 = executable('srcgen', 'srcgen.c', native : true)
+
+# Generate a header file that needs to be included.
+gen = generator(e1,
+ output : '@BASENAME@.h',
+ arguments : ['@INPUT@', '@OUTPUT@'])
+
+generated = gen.process('input_src.dat')
+
+e2 = executable('prog', 'prog.c', generated)
+
+test('pipelined', e2)
--- /dev/null
+#include"input_src.h"
+
+int main(int argc, char **argv) {
+ void *foo = printf;
+ if(foo) {
+ return 0;
+ }
+ return 1;
+}
--- /dev/null
+#include<stdio.h>
+#include<assert.h>
+
+#define ARRSIZE 80
+
+int main(int argc, char **argv) {
+ char arr[ARRSIZE];
+ char *ifilename;
+ char *ofilename;
+ FILE *ifile;
+ FILE *ofile;
+ size_t bytes;
+
+ if(argc != 3) {
+ fprintf(stderr, "%s <input file> <output file>\n", argv[0]);
+ return 1;
+ }
+ ifilename = argv[1];
+ ofilename = argv[2];
+ printf("%s\n", ifilename);
+ ifile = fopen(ifilename, "r");
+ if(!ifile) {
+ fprintf(stderr, "Could not open source file %s.\n", ifilename);
+ return 1;
+ }
+ ofile = fopen(ofilename, "w");
+ if(!ofile) {
+ fprintf(stderr, "Could not open target file %s\n", ofilename);
+ fclose(ifile);
+ return 1;
+ }
+ bytes = fread(arr, 1, ARRSIZE, ifile);
+ assert(bytes < 80);
+ assert(bytes > 0);
+ fwrite(arr, 1, bytes, ofile);
+
+ fclose(ifile);
+ fclose(ofile);
+ return 0;
+}
--- /dev/null
+project('find program', 'c')
+
+if build_machine.system() == 'windows'
+ # Things Windows does not provide:
+ # - an executable to copy files without prompting
+ # - working command line quoting
+ # - anything that you might actually need
+ # Because of these reasons we only check that
+ # the program can be found.
+ cp = find_program('xcopy')
+else
+ cp = find_program('donotfindme', 'cp')
+ gen = generator(cp, \
+ output : '@BASENAME@.c', \
+ arguments : ['@INPUT@', '@OUTPUT@'])
+
+ generated = gen.process('source.in')
+ e = executable('prog', generated)
+ test('external exe', e)
+endif
--- /dev/null
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('multiline string', 'c')
+
+x = '''hello again'''
+y = '''hello
+again'''
+
+if x == y
+ error('Things are wrong.')
+endif
+
+multieol = '''
+'''
+singleeol = '\n'
+
+if multieol != singleeol
+ error('Newline quoting is broken.')
+endif
+
+# And one more for good measure.
+quote1 = ''' ' '''.strip()
+quote2 = '\''
+
+if quote1 != quote2
+ error('Single quote quoting is broken.')
+endif
--- /dev/null
+#include<nonexisting.h>
+void func() { printf("This won't work.\n"); }
--- /dev/null
+project('try compile', 'c', 'cpp')
+
+code = '''#include<stdio.h>
+void func() { printf("Something.\n"); }
+'''
+
+breakcode = '''#include<nonexisting.h>
+void func() { printf("This won't work.\n"); }
+'''
+
+foreach compiler : [meson.get_compiler('c'), meson.get_compiler('cpp')]
+ if compiler.compiles(code, name : 'should succeed') == false
+ error('Compiler ' + compiler.get_id() + ' is fail.')
+ endif
+
+ if compiler.compiles(files('valid.c'), name : 'should succeed') == false
+ error('Compiler ' + compiler.get_id() + ' is fail.')
+ endif
+
+ if compiler.compiles(breakcode, name : 'should fail')
+ error('Compiler ' + compiler.get_id() + ' returned true on broken code.')
+ endif
+
+ if compiler.compiles(files('invalid.c'), name : 'should fail')
+ error('Compiler ' + compiler.get_id() + ' returned true on broken code.')
+ endif
+endforeach
--- /dev/null
+#include<stdio.h>
+void func() { printf("Something.\n"); }
--- /dev/null
+project('compiler id', 'c')
+
+comp = meson.get_compiler('c')
+str = comp.get_id()
+
+message('Compiler name is:')
+message(str)
--- /dev/null
+#define INTSIZE @INTSIZE@
+#define WCHARSIZE @WCHARSIZE@
--- /dev/null
+project('sizeof', 'c', 'cpp')
+
+# Test with C
+cc = meson.get_compiler('c')
+
+intsize = cc.sizeof('int')
+wcharsize = cc.sizeof('wchar_t', prefix : '#include<wchar.h>')
+
+cd = configuration_data()
+cd.set('INTSIZE', intsize)
+cd.set('WCHARSIZE', wcharsize)
+cd.set('CONFIG', 'config.h')
+configure_file(input : 'config.h.in', output : 'config.h', configuration : cd)
+s = configure_file(input : 'prog.c.in', output : 'prog.c', configuration : cd)
+
+e = executable('prog', s)
+test('sizeof test', e)
+
+# Test with C++
+cpp = meson.get_compiler('cpp')
+
+intsize = cpp.sizeof('int')
+wcharsize = cpp.sizeof('wchar_t', prefix : '#include<wchar.h>')
+
+cdpp = configuration_data()
+cdpp.set('INTSIZE', intsize)
+cdpp.set('WCHARSIZE', wcharsize)
+cdpp.set('CONFIG', 'config.hpp')
+configure_file(input : 'config.h.in', output : 'config.hpp', configuration : cdpp)
+spp = configure_file(input : 'prog.c.in', output : 'prog.cc', configuration : cdpp)
+
+epp = executable('progpp', spp)
+test('sizeof test c++', epp)
--- /dev/null
+#include "@CONFIG@"
+#include <stdio.h>
+#include <wchar.h>
+
+int main(int argc, char **argv) {
+ if(INTSIZE != sizeof(int)) {
+ fprintf(stderr, "Mismatch: detected int size %d, actual size %d.\n", INTSIZE, (int)sizeof(int));
+ return 1;
+ }
+ if(WCHARSIZE != sizeof(wchar_t)) {
+ fprintf(stderr, "Mismatch: detected wchar size %d, actual size %d.\n", WCHARSIZE, (int)sizeof(wchar_t));
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#mesondefine ONE
+#mesondefine ZERO
--- /dev/null
+project('set10test', 'c')
+
+conf = configuration_data()
+conf.set10('ONE', true)
+conf.set10('ZERO', false)
+
+configure_file(input : 'config.h.in',
+ output : 'config.h',
+ configuration : conf)
+
+exe = executable('prog', 'prog.c')
+test('10test', exe)
--- /dev/null
+#include<stdio.h>
+#include"config.h"
+
+int main(int argc, char **argv) {
+ if(ONE != 1) {
+ fprintf(stderr, "ONE is not 1.\n");
+ return 1;
+ }
+ if(ZERO != 0) {
+ fprintf(stderr, "ZERO is not 0.\n");
+ }
+ return 0;
+}
--- /dev/null
+project('has header', 'c', 'cpp')
+
+host_system = host_machine.system()
+
+non_existant_header = 'ouagadougou.h'
+
+# Copy it into the builddir to ensure that it isn't found even if it's there
+configure_file(input : non_existant_header,
+ output : non_existant_header,
+ configuration : configuration_data())
+
+# Test that the fallback to __has_include also works on all compilers
+if host_system != 'darwin'
+ fallbacks = ['', '\n#undef __has_include']
+else
+ # On Darwin's clang you can't redefine builtin macros so the above doesn't work
+ fallbacks = ['']
+endif
+
+foreach fallback : fallbacks
+ foreach comp : [meson.get_compiler('c'), meson.get_compiler('cpp')]
+ assert(comp.has_header('stdio.h', prefix : fallback), 'Stdio missing.')
+
+ # stdio.h doesn't actually need stdlib.h, but just test that setting the
+ # prefix does not result in an error.
+ assert(comp.has_header('stdio.h', prefix : '#include <stdlib.h>' + fallback),
+ 'Stdio missing.')
+
+ # XInput.h should not require type definitions from windows.h, but it does
+ # require macro definitions. Specifically, it requires an arch setting for
+ # VS2015 at least.
+ # We only do this check on MSVC because MinGW often defines its own wrappers
+ # that pre-include windows.h
+ if comp.get_id() == 'msvc'
+ assert(comp.has_header('XInput.h', prefix : '#include <windows.h>' + fallback),
+ 'XInput.h should not be missing on Windows')
+ assert(comp.has_header('XInput.h', prefix : '#define _X86_' + fallback),
+ 'XInput.h should not need windows.h')
+ endif
+
+ # Test that the following GCC bug doesn't happen:
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80005
+ # https://github.com/mesonbuild/meson/issues/1458
+ if host_system == 'linux'
+ assert(comp.has_header('linux/if.h', prefix : fallback),
+ 'Could not find <linux/if.h>')
+ endif
+
+ # This header exists in the source and the builddir, but we still must not
+ # find it since we are looking in the system directories.
+ assert(not comp.has_header(non_existant_header, prefix : fallback),
+ 'Found non-existant header.')
+ endforeach
+endforeach
--- /dev/null
+#define OMG_THIS_SHOULDNT_BE_FOUND
--- /dev/null
+project('run command', 'c')
+
+if build_machine.system() == 'windows'
+ c = run_command('cmd', '/c', 'echo', 'hello')
+else
+ c = run_command('echo', 'hello')
+endif
+
+correct = 'hello'
+
+if c.returncode() != 0
+ error('Executing echo failed.')
+endif
+
+result = c.stdout().strip()
+
+if result != correct
+ error('Getting stdout failed.')
+endif
+
+if c.stderr() != ''
+ error('Extra text in stderr.')
+endif
+
+# Now the same with a script.
+
+if build_machine.system() == 'windows'
+ cs = run_command('scripts/hello.bat')
+else
+ cs = run_command('scripts/hello.sh')
+endif
+
+if cs.returncode() != 0
+ error('Executing script failed.')
+endif
+
+if cs.stdout().strip() != correct
+ error('Getting stdout failed (script).')
+endif
+
+if cs.stderr() != ''
+ error('Extra text in stderr (script).')
+endif
+
+# We should be able to have files() in argument
+f = files('meson.build')
+
+if build_machine.system() == 'windows'
+ c = run_command('cmd', '/c', 'echo', f)
+else
+ c = run_command('echo', f)
+endif
+
+if c.returncode() != 0
+ error('Using files() in argument failed.')
+endif
+
--- /dev/null
+@ECHO OFF
+ECHO hello
--- /dev/null
+#!/bin/sh
+
+echo hello
--- /dev/null
+int main(int argc, char **argv) {
+ return 1;
+}
--- /dev/null
+project('tryrun', 'c', 'cpp')
+
+# Complex to exercise all code paths.
+if meson.is_cross_build()
+ if meson.has_exe_wrapper()
+ compilers = [meson.get_compiler('c', native : false), meson.get_compiler('cpp', native : false)]
+ else
+ compilers = [meson.get_compiler('c', native : true), meson.get_compiler('cpp', native : true)]
+ endif
+else
+ compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')]
+endif
+
+ok_code = '''#include<stdio.h>
+int main(int argc, char **argv) {
+ printf("%s\n", "stdout");
+ fprintf(stderr, "%s\n", "stderr");
+ return 0;
+}
+'''
+
+error_code = '''int main(int argc, char **argv) {
+ return 1;
+}
+'''
+
+no_compile_code = '''int main(int argc, char **argv) {
+'''
+
+INPUTS = [
+ ['String', ok_code, error_code, no_compile_code],
+ ['File', files('ok.c'), files('error.c'), files('no_compile.c')],
+]
+
+foreach cc : compilers
+ foreach input : INPUTS
+ type = input[0]
+ ok = cc.run(input[1], name : type + ' should succeed')
+ err = cc.run(input[2], name : type + ' should fail')
+ noc = cc.run(input[3], name : type + ' does not compile')
+
+ if noc.compiled()
+ error(type + ' compilation fail test failed.')
+ else
+ message(type + ' fail detected properly.')
+ endif
+
+ if ok.compiled()
+ message(type + ' compilation worked.')
+ else
+ error(type + ' compilation did not work.')
+ endif
+
+ if ok.returncode() == 0
+ message(type + ' return code ok.')
+ else
+ error(type + ' return code fail')
+ endif
+
+ if err.returncode() == 1
+ message(type + ' bad return code ok.')
+ else
+ error(type + ' bad return code fail.')
+ endif
+
+ if ok.stdout().strip() == 'stdout'
+ message(type + ' stdout ok.')
+ else
+ message(type + ' bad stdout.')
+ endif
+
+ if ok.stderr().strip() == 'stderr'
+ message(type + ' stderr ok.')
+ else
+ message(type + ' bad stderr.')
+ endif
+ endforeach
+endforeach
--- /dev/null
+int main(int argc, char **argv) {
--- /dev/null
+#include<stdio.h>
+int main(int argc, char **argv) {
+ printf("%s\n", "stdout");
+ fprintf(stderr, "%s\n", "stderr");
+ return 0;
+}
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC libfunc() {
+ return 3;
+}
--- /dev/null
+project('shared library test', 'c')
+lib = shared_library('mylib', 'libfile.c')
--- /dev/null
+project('logicopts', 'c')
+
+t = true
+f = false
+
+if (true)
+ message('Ok.')
+else
+ error('Not ok.')
+endif
+
+if (false)
+ error('Not ok.')
+else
+ message('Ok.')
+endif
+
+if (f)
+ error('Not ok.')
+else
+ message('Ok.')
+endif
+
+if (t)
+ message('Ok.')
+else
+ error('Not ok.')
+endif
+
+if true and t
+ message('Ok.')
+else
+ error('Not ok.')
+endif
+
+if t and false
+ error('Not ok.')
+else
+ message('Ok.')
+endif
+
+if f and t
+ error('Not ok.')
+else
+ message('Ok.')
+endif
+
+if f or false
+ error('Not ok.')
+else
+ message('Ok.')
+endif
+
+if true or f
+ message('Ok.')
+else
+ error('Not ok.')
+endif
+
+if t or true
+ message('Ok.')
+else
+ error('Not ok.')
+endif
+
+if not true
+ error('Negation failed.')
+else
+ message('Ok.')
+endif
+
+if not f
+ message('Ok.')
+else
+ error('Negation failed.')
+endif
+
+
+if f or f or f or f or f or f or f or f or t
+ message('Ok.')
+else
+ error('Chain of ors failed.')
+endif
+
+if t and t and t and t and t and t and t and t and f
+ error('Chain of ands failed.')
+else
+ message('Ok.')
+endif
+
+if t and t or t
+ message('Ok.')
+else
+ error('Combination of and-or failed.')
+endif
--- /dev/null
+project('elseif', 'c')
+
+t = true
+f = false
+
+if true
+ message('Ok.')
+elif true
+ error('Error')
+else
+ error('Error')
+endif
+
+if f
+ error('Error.')
+elif t
+ message('Ok')
+else
+ error('Error')
+endif
+
+if f
+ error('Error.')
+elif false
+ error('Error')
+else
+ message('Ok')
+endif
--- /dev/null
+project('string formatting', 'c')
+
+templ = '@0@bar@1@'
+
+assert(templ.format('foo', 'baz') == 'foobarbaz', 'Basic string formatting is broken.')
+
+assert('@0@'.format(1) == '1', 'String number formatting is broken.')
+
+assert('@0@'.format(true) == 'true', 'String boolean formatting is broken.')
+
+templ2 = '@0@'
+subs2 = '42'
+
+assert(templ2.format(subs2) == '42', 'String formatting with variables is broken.')
+
+assert('@@0@@ @@1@@'.format(1, 2) == '@1@ @2@', 'String format is recursive.')
+
+long = 'abcde'
+prefix = 'abc'
+suffix = 'cde'
+
+assert(long.startswith(prefix), 'Prefix.')
+
+assert(not long.startswith(suffix), 'Not prefix.')
+
+assert(long.endswith(suffix), 'Suffix.')
+
+assert(not long.endswith(prefix), 'Not suffix.')
+
+assert(long.contains(prefix), 'Does not contain prefix')
+
+assert(long.contains(suffix), 'Does not contain suffix')
+
+assert(long.contains('bcd'), 'Does not contain middle part')
+
+assert(not long.contains('dc'), 'Broken contains')
+
+assert(long.to_upper() == 'ABCDE', 'Broken to_upper')
+
+assert(long.to_upper().to_lower() == long, 'Broken to_lower')
+
+assert('struct stat.st_foo'.underscorify() == 'struct_stat_st_foo', 'Broken underscorify')
+
+assert('#include <foo/bar.h>'.underscorify() == '_include__foo_bar_h_', 'Broken underscorify')
+
+# case should not change, space should be replaced, numbers are ok too
+assert('Do SomeThing 09'.underscorify() == 'Do_SomeThing_09', 'Broken underscorify')
+
+assert('3'.to_int() == 3, 'String int conversion does not work.')
+
+assert(true.to_string() == 'true', 'bool string conversion failed')
+assert(false.to_string() == 'false', 'bool string conversion failed')
+assert(true.to_string('yes', 'no') == 'yes', 'bool string conversion with args failed')
+assert(false.to_string('yes', 'no') == 'no', 'bool string conversion with args failed')
+assert('@0@'.format(true) == 'true', 'bool string formatting failed')
+
+assert(' '.join(['a', 'b', 'c']) == 'a b c', 'join() array broken')
+assert(''.join(['a', 'b', 'c']) == 'abc', 'empty join() broken')
+assert(' '.join(['a']) == 'a', 'single join broken')
+
+version_number = '1.2.8'
+
+assert(version_number.version_compare('>=1.2.8'), 'Version_compare gt broken')
+assert(not version_number.version_compare('>1.2.8'), 'Version_compare greater broken')
+assert(not version_number.version_compare('<1.2.8'), 'Version_compare less broken')
+assert(version_number.version_compare('<=1.2.8'), 'Version_compare le broken')
+assert(version_number.version_compare('==1.2.8'), 'Version_compare eq broken')
+assert(not version_number.version_compare('!=1.2.8'), 'Version_compare neq broken')
+
+assert(version_number.version_compare('<2.0'), 'Version_compare major less broken')
+assert(version_number.version_compare('>0.9'), 'Version_compare major greater broken')
+
+assert(' spaces tabs '.strip() == 'spaces tabs', 'Spaces and tabs badly stripped')
+assert('''
+multiline string '''.strip() == '''multiline string''', 'Newlines badly stripped')
+assert('"1.1.20"'.strip('"') == '1.1.20', '" badly stripped')
+assert('"1.1.20"'.strip('".') == '1.1.20', '". badly stripped')
+assert('"1.1.20" '.strip('" ') == '1.1.20', '". badly stripped')
+
+bs_b = '''\b'''
+bs_bs_b = '''\\b'''
+nl = '''
+'''
+bs_n = '''\n'''
+bs_nl = '''\
+'''
+bs_bs_n = '''\\n'''
+bs_bs_nl = '''\\
+'''
+
+assert('\b' == bs_b, 'Single backslash broken')
+assert('\\b' == bs_b, 'Double backslash broken')
+assert('\\\b' == bs_bs_b, 'Three backslash broken')
+assert('\\\\b' == bs_bs_b, 'Four backslash broken')
+assert('\n' == nl, 'Newline escape broken')
+assert('\\n' == bs_n, 'Double backslash broken before n')
+assert('\\\n' == bs_nl, 'Three backslash broken before n')
+assert('\\\\n' == bs_bs_n, 'Four backslash broken before n')
+assert('\\\\\n' == bs_bs_nl, 'Five backslash broken before n')
--- /dev/null
+project('has function', 'c', 'cpp')
+
+host_system = host_machine.system()
+
+# This is used in the `test_compiler_check_flags_order` unit test
+unit_test_args = '-I/tmp'
+defines_has_builtin = '''#ifndef __has_builtin
+#error "no __has_builtin"
+#endif
+'''
+compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')]
+
+foreach cc : compilers
+ if not cc.has_function('printf', prefix : '#include<stdio.h>',
+ args : unit_test_args)
+ error('"printf" function not found (should always exist).')
+ endif
+
+ # Should also be able to detect it without specifying the header
+ # We check for a different function here to make sure the result is
+ # not taken from a cache (ie. the check above)
+ # On MSVC fprintf is defined as an inline function in the header, so it cannot
+ # be found without the include.
+ if cc.get_id() != 'msvc'
+ assert(cc.has_function('fprintf', args : unit_test_args),
+ '"fprintf" function not found without include (on !msvc).')
+ else
+ assert(cc.has_function('fprintf', prefix : '#include <stdio.h>',
+ args : unit_test_args),
+ '"fprintf" function not found with include (on msvc).')
+ # Compiler intrinsics
+ assert(cc.has_function('strcmp'),
+ 'strcmp intrinsic should have been found on MSVC')
+ assert(cc.has_function('strcmp', prefix : '#include <string.h>'),
+ 'strcmp intrinsic should have been found with #include on MSVC')
+ endif
+
+ if cc.has_function('hfkerhisadf', prefix : '#include<stdio.h>',
+ args : unit_test_args)
+ error('Found non-existent function "hfkerhisadf".')
+ endif
+
+ if cc.has_function('hfkerhisadf', args : unit_test_args)
+ error('Found non-existent function "hfkerhisadf".')
+ endif
+
+ # With glibc on Linux lchmod is a stub that will always return an error,
+ # we want to detect that and declare that the function is not available.
+ # We can't check for the C library used here of course, but if it's not
+ # implemented in glibc it's probably not implemented in any other 'slimmer'
+ # C library variants either, so the check should be safe either way hopefully.
+ if host_system == 'linux' or host_system == 'darwin'
+ assert (cc.has_function('poll', prefix : '#include <poll.h>',
+ args : unit_test_args),
+ 'couldn\'t detect "poll" when defined by a header')
+ lchmod_prefix = '#include <sys/stat.h>\n#include <unistd.h>'
+ if host_system == 'linux'
+ assert (not cc.has_function('lchmod', prefix : lchmod_prefix,
+ args : unit_test_args),
+ '"lchmod" check should have failed')
+ else
+ # macOS and *BSD have lchmod
+ assert (cc.has_function('lchmod', prefix : lchmod_prefix,
+ args : unit_test_args),
+ '"lchmod" check should have succeeded')
+ endif
+ # Check that built-ins are found properly both with and without headers
+ assert(cc.has_function('alloca', args : unit_test_args),
+ 'built-in alloca must be found on ' + host_system)
+ assert(cc.has_function('alloca', prefix : '#include <alloca.h>',
+ args : unit_test_args),
+ 'built-in alloca must be found with #include')
+ if not cc.compiles(defines_has_builtin, args : unit_test_args)
+ assert(not cc.has_function('alloca',
+ prefix : '#include <alloca.h>\n#undef alloca',
+ args : unit_test_args),
+ 'built-in alloca must not be found with #include and #undef')
+ endif
+ endif
+
+ # For some functions one needs to define _GNU_SOURCE before including the
+ # right headers to get them picked up. Make sure we can detect these functions
+ # as well without any prefix
+ if cc.has_header_symbol('sys/socket.h', 'recvmmsg',
+ prefix : '#define _GNU_SOURCE',
+ args : unit_test_args)
+ # We assume that if recvmmsg exists sendmmsg does too
+ assert (cc.has_function('sendmmsg', args : unit_test_args),
+ 'Failed to detect function "sendmmsg" (should always exist).')
+ endif
+endforeach
--- /dev/null
+project('has member', 'c', 'cpp')
+
+compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')]
+
+foreach cc : compilers
+ if not cc.has_member('struct tm', 'tm_sec', prefix : '#include<time.h>')
+ error('Did not detect member of "struct tm" that exists: "tm_sec"')
+ endif
+
+ if cc.has_member('struct tm', 'tm_nonexistent', prefix : '#include<time.h>')
+ error('Not existing member "tm_nonexistent" found.')
+ endif
+
+ if not cc.has_members('struct tm', 'tm_sec', 'tm_min', prefix : '#include<time.h>')
+ error('Did not detect members of "struct tm" that exist: "tm_sec" "tm_min"')
+ endif
+
+ if cc.has_members('struct tm', 'tm_sec', 'tm_nonexistent2', prefix : '#include<time.h>')
+ error('Not existing member "tm_nonexistent2" found.')
+ endif
+endforeach
--- /dev/null
+project('alignment', 'c', 'cpp')
+
+compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')]
+
+foreach cc : compilers
+ # These tests should return the same value on all
+ # platforms. If (and when) they don't, fix 'em up.
+ if cc.alignment('char') != 1
+ error('Alignment of char misdetected.')
+ endif
+
+ ptr_size = cc.sizeof('void*')
+ dbl_alignment = cc.alignment('double')
+
+ # These tests are not thorough. Doing this properly
+ # would take a lot of work because it is strongly
+ # platform and compiler dependent. So just check
+ # that they produce something fairly sane.
+
+ if ptr_size == 8 or ptr_size == 4
+ message('Size of ptr ok.')
+ else
+ error('Size of ptr misdetected.')
+ endif
+
+ if dbl_alignment == 8 or dbl_alignment == 4
+ message('Alignment of double ok.')
+ else
+ error('Alignment of double misdetected.')
+ endif
+endforeach
--- /dev/null
+usr/bin/prog?exe
--- /dev/null
+int libfun();
+
+int main() {
+ return libfun();
+}
--- /dev/null
+project('libchain', 'c')
+
+subdir('subdir')
+e = executable('prog', 'main.c', link_with : lib1, install : true)
+test('tst', e)
--- /dev/null
+int lib2fun();
+int lib3fun();
+
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC libfun() {
+ return lib2fun() + lib3fun();
+}
--- /dev/null
+subdir('subdir2')
+subdir('subdir3')
+
+lib1 = shared_library('lib1', 'lib1.c', install : false, link_with : [lib2, lib3])
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC lib2fun() {
+ return 0;
+}
--- /dev/null
+lib2 = shared_library('lib2', 'lib2.c', install : false)
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC lib3fun() {
+ return 0;
+}
--- /dev/null
+lib3 = shared_library('lib3', 'lib3.c', install : false)
--- /dev/null
+project('options', 'c')
+
+if get_option('testoption') != 'optval'
+ error('Incorrect value to test option')
+endif
+
+if get_option('other_one') != false
+ error('Incorrect value to boolean option.')
+endif
+
+if get_option('combo_opt') != 'combo'
+ error('Incorrect value to combo option.')
+endif
+
+if get_option('array_opt') != ['one', 'two']
+ message(get_option('array_opt'))
+ error('Incorrect value for array option')
+endif
+
+# If the default changes, update test cases/unit/13 reconfigure
+if get_option('b_lto') != false
+ error('Incorrect value in base option.')
+endif
+
+if get_option('includedir') != 'include'
+ error('Incorrect value in builtin option.')
+endif
--- /dev/null
+option('testoption', type : 'string', value : 'optval', description : 'An option to do something')
+option('other_one', type : 'boolean', value : false)
+option('combo_opt', type : 'combo', choices : ['one', 'two', 'combo'], value : 'combo')
+option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two'])
+option('free_array_opt', type : 'array')
--- /dev/null
+#include<stdio.h>
+#include<string.h>
+
+int main(int argc, char **argv) {
+ if(argc != 3) {
+ fprintf(stderr, "Incorrect number of arguments.\n");
+ return 1;
+ }
+ if(strcmp(argv[1], "first") != 0) {
+ fprintf(stderr, "First argument is wrong.\n");
+ return 1;
+ }
+ if(strcmp(argv[2], "second") != 0) {
+ fprintf(stderr, "Second argument is wrong.\n");
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import shutil
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
--- /dev/null
+#include<stdio.h>
+#include<string.h>
+#include<stdlib.h>
+
+int main(int argc, char **argv) {
+ if(strcmp(getenv("first"), "something-else") != 0) {
+ fprintf(stderr, "First envvar is wrong. %s\n", getenv("first"));
+ return 1;
+ }
+ if(strcmp(getenv("second"), "val2") != 0) {
+ fprintf(stderr, "Second envvar is wrong.\n");
+ return 1;
+ }
+ if(strcmp(getenv("third"), "val3:and_more") != 0) {
+ fprintf(stderr, "Third envvar is wrong.\n");
+ return 1;
+ }
+ if(strstr(getenv("PATH"), "fakepath:") != NULL) {
+ fprintf(stderr, "Third envvar is wrong.\n");
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#include<stdio.h>
+#include<string.h>
+#include<stdlib.h>
+
+int main(int argc, char **argv) {
+ if(strcmp(getenv("first"), "val1") != 0) {
+ fprintf(stderr, "First envvar is wrong. %s\n", getenv("first"));
+ return 1;
+ }
+ if(strcmp(getenv("second"), "val2") != 0) {
+ fprintf(stderr, "Second envvar is wrong.\n");
+ return 1;
+ }
+ if(strcmp(getenv("third"), "val3:and_more") != 0) {
+ fprintf(stderr, "Third envvar is wrong.\n");
+ return 1;
+ }
+ if(strstr(getenv("PATH"), "fakepath:") != NULL) {
+ fprintf(stderr, "Third envvar is wrong.\n");
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+project('test features', 'c')
+
+e1 = executable('cmd_args', 'cmd_args.c')
+e2 = executable('envvars', 'envvars.c')
+e3 = executable('env2vars', 'env2vars.c')
+
+env = environment()
+env.set('first', 'val1')
+env.set('second', 'val2')
+env.set('third', 'val3', 'and_more', separator: ':')
+env.append('PATH', 'fakepath', separator: ':')
+
+# Make sure environment objects are copied on assignment and we can
+# change the copy without affecting the original environment object.
+env2 = env
+env2.set('first', 'something-else')
+
+test('command line arguments', e1, args : ['first', 'second'])
+test('environment variables', e2, env : env)
+test('environment variables 2', e3, env : env2)
+
+# https://github.com/mesonbuild/meson/issues/2211#issuecomment-327741571
+env_array = ['MESONTESTING=picklerror']
+testfile = files('testfile.txt')
+testerpy = find_program('tester.py')
+test('file arg', testerpy, args : testfile, env : env_array)
+
+copy = find_program('copyfile.py')
+tester = executable('tester', 'tester.c')
+testfilect = custom_target('testfile',
+ input : testfile,
+ output : 'outfile.txt',
+ build_by_default : true,
+ command : [copy, '@INPUT@', '@OUTPUT@'])
+test('custom target arg', tester, args : testfilect, env : env_array)
--- /dev/null
+#include <stdio.h>
+#include <string.h>
+#include <fcntl.h>
+#include <errno.h>
+
+#ifndef _MSC_VER
+#include <unistd.h>
+#endif
+
+int main(int argc, char **argv) {
+ char data[10];
+ int fd, size;
+
+ if (argc != 2) {
+ fprintf(stderr, "Incorrect number of arguments, got %i\n", argc);
+ return 1;
+ }
+ fd = open(argv[1], O_RDONLY);
+ if (fd < 0) {
+ fprintf(stderr, "First argument is wrong.\n");
+ return 1;
+ }
+
+ size = read(fd, data, 8);
+ if (size < 0) {
+ fprintf(stderr, "Failed to read: %s\n", strerror(errno));
+ return 1;
+ }
+ if (strncmp(data, "contents", 8) != 0) {
+ fprintf(stderr, "Contents don't match, got %s\n", data);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+with open(sys.argv[1]) as f:
+ if f.read() != 'contents\n':
+ sys.exit(1)
--- /dev/null
+usr/bin/user?exe
+usr/share/sublib/sublib.depmf
--- /dev/null
+project('subproj user', 'c',
+ version : '2.3.4',
+ license : 'mylicense')
+
+assert(meson.project_name() == 'subproj user', 'Incorrect project name')
+
+sub = subproject('sublib', version : '1.0.0')
+
+if meson.project_version() != '2.3.4'
+ error('Incorrect master project version string:' + meson.project_version())
+endif
+
+if meson.is_subproject()
+ error('Claimed to be a subproject even though we are the master project.')
+endif
+
+inc = sub.get_variable('i')
+lib = sub.get_variable('l')
+
+e = executable('user', 'user.c', include_directories : inc, link_with : lib, install : true)
+test('subdirtest', e)
+
+meson.install_dependency_manifest('share/sublib/sublib.depmf')
--- /dev/null
+#ifndef SUBDEFS_H_
+#define SUBDEFS_H_
+
+#if defined _WIN32 || defined __CYGWIN__
+#if defined BUILDING_SUB
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #define DLL_PUBLIC __declspec(dllimport)
+#endif
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC subfunc();
+
+#endif
--- /dev/null
+project('subproject', 'c',
+ version : '1.0.0',
+ license : ['sublicense1', 'sublicense2'])
+
+if not meson.is_subproject()
+ error('Claimed to be master project even though we are a subproject.')
+endif
+
+assert(meson.project_name() == 'subproject', 'Incorrect subproject name')
+
+if meson.project_version() != '1.0.0'
+ error('Incorrect version string in subproject.')
+endif
+
+i = include_directories('include')
+l = shared_library('sublib', 'sublib.c', include_directories : i, install : false,
+ c_args : '-DBUILDING_SUB=2')
+t = executable('simpletest', 'simpletest.c', include_directories : i, link_with : l)
+test('plain', t)
--- /dev/null
+#include<subdefs.h>
+
+int main(int argc, char **argv) {
+ return subfunc() == 42 ? 0 : 1;
+}
--- /dev/null
+#include<subdefs.h>
+
+int DLL_PUBLIC subfunc() {
+ return 42;
+}
--- /dev/null
+#include<subdefs.h>
+#include<stdio.h>
+
+
+int main(int argc, char **argv) {
+ int res;
+ printf("Calling into sublib now.\n");
+ res = subfunc();
+ if(res == 42) {
+ printf("Everything is fine.\n");
+ return 0;
+ } else {
+ printf("Something went wrong.\n");
+ return 1;
+ }
+}
--- /dev/null
+int func() {
+ return 0;
+}
--- /dev/null
+int func2() {
+ return 2;
+}
--- /dev/null
+int func3() {
+ return 3;
+}
--- /dev/null
+int func4() {
+ return 4;
+}
--- /dev/null
+int func();
+
+int main(int argc, char **arg) {
+ return func();
+}
--- /dev/null
+project('static library linking test', 'c')
+
+lib = build_target('mylib', 'libfile.c', 'libfile2.c', 'libfile3.c', 'libfile4.c', target_type : 'static_library')
+exe = executable('prog', 'main.c', link_with : lib)
+
+test('runtest', exe)
--- /dev/null
+project('suboptions', 'c')
+
+subproject('subproject')
+
+if not get_option('opt')
+ error('option unset when it should be set')
+endif
--- /dev/null
+option('opt', type : 'boolean', value : true, description : 'main project option')
--- /dev/null
+project('subproject', 'c')
+
+if get_option('opt')
+ error('option set when it should be unset.')
+endif
--- /dev/null
+option('opt', type : 'boolean', value : false, description : 'subproject option')
--- /dev/null
+usr/include/simple.h
+usr/lib/pkgconfig/simple.pc
+usr/lib/pkgconfig/libfoo.pc
--- /dev/null
+project('pkgconfig-gen', 'c')
+
+pkgg = import('pkgconfig')
+
+lib = shared_library('simple', 'simple.c')
+libver = '1.0'
+h = install_headers('simple.h')
+
+pkgg.generate(
+ libraries : [lib, '-lz'],
+ subdirs : '.',
+ version : libver,
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ requires : 'glib-2.0', # Not really, but only here to test that this works.
+ requires_private : ['gio-2.0', 'gobject-2.0'],
+ libraries_private : [lib, '-lz'],
+)
+
+pkgconfig = find_program('pkg-config', required: false)
+if pkgconfig.found()
+ v = run_command(pkgconfig, '--version').stdout().strip()
+ if v.version_compare('>=0.29')
+ test('pkgconfig-validation', pkgconfig,
+ args: ['--validate', 'simple'],
+ env: ['PKG_CONFIG_PATH=' + meson.current_build_dir() + '/meson-private' ])
+ else
+ message('pkg-config version \'' + v + '\' too old, skipping validate test')
+ endif
+else
+ message('pkg-config not found, skipping validate test')
+endif
+
+# Test that name_prefix='' and name='libfoo' results in '-lfoo'
+lib2 = shared_library('libfoo', 'simple.c',
+ name_prefix : '',
+ version : libver)
+
+pkgg.generate(
+ libraries : lib2,
+ name : 'libfoo',
+ version : libver,
+ description : 'A foo library.',
+ variables : ['foo=bar', 'datadir=${prefix}/data']
+)
--- /dev/null
+#include"simple.h"
+
+int simple_function() {
+ return 42;
+}
--- /dev/null
+#ifndef SIMPLE_H_
+#define SIMPLE_H_
+
+int simple_function();
+
+#endif
--- /dev/null
+Installed cat is installed.
--- /dev/null
+usr/dib/dab/dub/prog?exe
+usr/dib/dab/dub2/prog2?exe
+usr/some/dir/sample.h
+usr/some/dir2/sample.h
+usr/woman/prog.1.gz
+usr/woman2/prog.1.gz
+usr/meow/datafile.cat
+usr/meow2/datafile.cat
+usr/woof/subdir/datafile.dog
+usr/woof2/subdir/datafile.dog
--- /dev/null
+project('custom install dirs', 'c')
+executable('prog', 'prog.c', install : true, install_dir : 'dib/dab/dub')
+executable('prog2', 'prog.c', install : true, install_dir : get_option('prefix') + '/dib/dab/dub2')
+install_headers('sample.h', install_dir : 'some/dir')
+install_headers('sample.h', install_dir : get_option('prefix') + '/some/dir2')
+install_man('prog.1', install_dir : 'woman')
+install_man('prog.1', install_dir : get_option('prefix') + '/woman2')
+install_data('datafile.cat', install_dir : 'meow')
+install_data('datafile.cat', install_dir : get_option('prefix') + '/meow2')
+install_subdir('subdir', install_dir : 'woof')
+install_subdir('subdir', install_dir : get_option('prefix') + '/woof2')
--- /dev/null
+Man up, you.
--- /dev/null
+int main(int argc, char **arv) {
+ return 0;
+}
--- /dev/null
+#ifndef SAMPLE_H
+#define SAMPLE_H
+
+int wackiness();
+
+#endif
--- /dev/null
+Installed dog is installed.
--- /dev/null
+project('sub sub', 'c')
+
+a = subproject('a')
+lib = a.get_variable('l')
+
+exe = executable('prog', 'prog.c', link_with : lib)
+test('basic', exe)
\ No newline at end of file
--- /dev/null
+int func();
+
+int main(int argc, char **argv) {
+ return func() == 42 ? 0 : 1;
+}
--- /dev/null
+int func2();
+
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC func() { return func2(); }
+
--- /dev/null
+project('a', 'c')
+
+b = subproject('b')
+l = shared_library('a', 'a.c', link_with : b.get_variable('lb'))
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC func2() {
+ return 42;
+}
--- /dev/null
+project('b', 'c')
+
+lb = shared_library('b', 'b.c')
--- /dev/null
+int func1() { return 42; }
--- /dev/null
+int func2() { return 42; }
--- /dev/null
+project('samefile', 'c')
+
+test('basic', executable('prog', 'prog.c', 'd1/file.c', 'd2/file.c'))
--- /dev/null
+int func1();
+int func2();
+
+int main(int argc, char **argv) {
+ return func1() - func2();
+}
--- /dev/null
+int funca() { return 0; }
--- /dev/null
+int funcb() { return 0; }
--- /dev/null
+int funcc() { return 0; }
--- /dev/null
+@ECHO OFF
+echo a.c
+echo b.c
+echo c.c
+echo prog.c
--- /dev/null
+#!/bin/sh
+
+for i in *.c; do
+ echo $i
+done
--- /dev/null
+@ECHO OFF
+echo suba.c
+echo subb.c
+echo subc.c
+echo subprog.c
--- /dev/null
+project('grabber', 'c')
+
+# What this script does is NOT reliable. Simply adding a file in this directory
+# will NOT make it automatically appear in the build. You have to manually
+# re-invoke Meson (not just Ninja) for that to happen. The simplest way
+# is to touch meson-private/coredata.dat.
+
+# This is not the recommended way to do things, but if the tradeoffs are
+# acceptable to you, then we're certainly not going to stop you. Just don't
+# file bugs when it fails. :)
+
+if build_machine.system() == 'windows'
+ c = run_command('grabber.bat')
+ grabber = find_program('grabber2.bat')
+else
+ c = run_command('grabber.sh')
+ grabber = find_program('grabber.sh')
+endif
+
+
+# First test running command explicitly.
+if c.returncode() != 0
+ error('Executing script failed.')
+endif
+
+newline = '''
+'''
+
+sources = c.stdout().strip().split(newline)
+
+e = executable('prog', sources)
+test('grabtest', e)
+
+# Then test using program with find_program
+subdir('subdir')
--- /dev/null
+int funca();
+int funcb();
+int funcc();
+
+int main(int argc, char **argv) {
+ return funca() + funcb() + funcc();
+}
--- /dev/null
+sc = run_command(grabber)
+subsources = sc.stdout().strip().split(newline)
+
+se = executable('subprog', subsources)
+test('subgrabtest', se)
--- /dev/null
+int funca() { return 0; }
--- /dev/null
+int funcb() { return 0; }
--- /dev/null
+int funcc() { return 0; }
--- /dev/null
+int funca();
+int funcb();
+int funcc();
+
+int main(int argc, char **argv) {
+ return funca() + funcb() + funcc();
+}
--- /dev/null
+This is a text only input file.
--- /dev/null
+#!/usr/bin/env python
+
+import sys, os
+from glob import glob
+
+_, srcdir, depfile, output = sys.argv
+
+depfiles = glob(os.path.join(srcdir, '*'))
+
+quoted_depfiles = [x.replace(' ', '\ ') for x in depfiles]
+
+with open(output, 'w') as f:
+ f.write('I am the result of globbing.')
+with open(depfile, 'w') as f:
+ f.write('%s: %s\n' % (output, ' '.join(quoted_depfiles)))
--- /dev/null
+
+
+mytarget = custom_target('depfile',
+ output : 'dep.dat',
+ depfile : 'dep.dat.d',
+ command : [find_program('dep.py'), meson.current_source_dir(), '@DEPFILE@', '@OUTPUT@'],
+)
--- /dev/null
+usr/subdir/data.dat
--- /dev/null
+project('custom target', 'c')
+
+python = find_program('python3', required : false)
+if not python.found()
+ python = find_program('python')
+endif
+
+# Note that this will not add a dependency to the compiler executable.
+# Code will not be rebuilt if it changes.
+comp = '@0@/@1@'.format(meson.current_source_dir(), 'my_compiler.py')
+# Test that files() in command: works. The compiler just discards it.
+useless = files('installed_files.txt')
+
+mytarget = custom_target('bindat',
+output : 'data.dat',
+input : 'data_source.txt',
+command : [python, comp, '--input=@INPUT@', '--output=@OUTPUT@', useless],
+install : true,
+install_dir : 'subdir'
+)
+
+subdir('depfile')
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import sys
+
+assert(os.path.exists(sys.argv[3]))
+
+args = sys.argv[:-1]
+
+if __name__ == '__main__':
+ if len(args) != 3 or not args[1].startswith('--input') or \
+ not args[2].startswith('--output'):
+ print(args[0], '--input=input_file --output=output_file')
+ sys.exit(1)
+ with open(args[1].split('=')[1]) as f:
+ ifile = f.read()
+ if ifile != 'This is a text only input file.\n':
+ print('Malformed input')
+ sys.exit(1)
+ with open(args[2].split('=')[1], 'w') as ofile:
+ ofile.write('This is a binary output file.\n')
--- /dev/null
+This is a text only input file.
--- /dev/null
+usr/subdir/data2.dat
+usr/subdir/data3.dat
--- /dev/null
+project('custom target', 'c')
+
+python = find_program('python3', required : false)
+if not python.found()
+ python = find_program('python')
+endif
+
+# files() is the correct way to do this, but some people
+# do this so test that it works.
+comp = '@0@/@1@'.format(meson.current_source_dir(), 'my_compiler.py')
+comp2 = '@0@/@1@'.format(meson.current_source_dir(), 'my_compiler2.py')
+infile = files('data_source.txt')[0]
+
+mytarget = custom_target('bindat',
+ output : 'data.dat',
+ command : [python, comp, infile, '@OUTPUT@'],
+)
+
+mytarget2 = custom_target('bindat2',
+ output : 'data2.dat',
+ command : [python, comp2, mytarget, '@OUTPUT@'],
+ install : true,
+ install_dir : 'subdir'
+)
+
+mytarget3 = custom_target('bindat3',
+ output : 'data3.dat',
+ input : [mytarget],
+ command : [python, comp2, '@INPUT@', '@OUTPUT@'],
+ install : true,
+ install_dir : 'subdir'
+)
+
+subdir('usetarget')
--- /dev/null
+#!/usr/bin/env python
+
+import sys
+
+if __name__ == '__main__':
+ if len(sys.argv) != 3:
+ print(sys.argv[0], 'input_file output_file')
+ sys.exit(1)
+ with open(sys.argv[1]) as f:
+ ifile = f.read()
+ if ifile != 'This is a text only input file.\n':
+ print('Malformed input')
+ sys.exit(1)
+ with open(sys.argv[2], 'w') as ofile:
+ ofile.write('This is a binary output file.\n')
--- /dev/null
+#!/usr/bin/env python
+
+import sys
+
+if __name__ == '__main__':
+ if len(sys.argv) != 3:
+ print(sys.argv[0], 'input_file output_file')
+ sys.exit(1)
+ with open(sys.argv[1]) as f:
+ ifile = f.read()
+ if ifile != 'This is a binary output file.\n':
+ print('Malformed input')
+ sys.exit(1)
+ with open(sys.argv[2], 'w') as ofile:
+ ofile.write('This is a different binary output file.\n')
--- /dev/null
+e = executable('myexe', 'myexe.c')
+subexe = find_program('subcomp.py')
+
+custom_target('use_exe',
+ input : e,
+ output : 'subout.res',
+ command : [subexe, '@INPUT@', '@OUTPUT@'],
+)
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I am myexe.\n");
+ return 0;
+}
--- /dev/null
+#!/usr/bin/env python
+
+import sys
+
+with open(sys.argv[1], 'rb') as ifile:
+ with open(sys.argv[2], 'w') as ofile:
+ ofile.write('Everything ok.\n')
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import sys
+
+if not os.path.isfile(sys.argv[1]):
+ raise Exception("Couldn't find {!r}".format(sys.argv[1]))
--- /dev/null
+#!/usr/bin/env python3
+
+print('Success')
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+with open(sys.argv[1], 'rb') as ifile, open(sys.argv[2], 'wb') as ofile:
+ ofile.write(ifile.read())
--- /dev/null
+#!/usr/bin/env python3
+
+from __future__ import print_function
+
+import sys
+
+plain_arg = sys.argv[1]
+_, filename, _ = plain_arg.split(':')
+try:
+ with open(filename, 'rb') as f:
+ content = f.read()
+except FileNotFoundError:
+ print('Could not open file. Missing dependency?')
+ sys.exit(1)
+print('File opened, pretending to send it somewhere.')
+print(len(content), 'bytes uploaded')
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ if(argc != 2) {
+ printf("I can not haz argument.\n");
+ return 1;
+ } else {
+ printf("I can haz argument: %s\n", argv[1]);
+ }
+ return 0;
+}
--- /dev/null
+project('run target', 'c')
+
+# deprecated format, fix once we remove support for it.
+run_target('mycommand','scripts/script.sh')
+
+# Make it possible to run built programs.
+# In cross builds exe_wrapper should be added if it exists.
+
+exe = executable('helloprinter', 'helloprinter.c')
+run_target('runhello',
+ command : [exe, 'argument'])
+
+converter = find_program('converter.py')
+
+hex = custom_target('exe.hex',
+ input : exe,
+ output : 'exe.hex',
+ command : [converter, '@INPUT@', '@OUTPUT@',
+ ],
+)
+
+fakeburner = find_program('fakeburner.py')
+
+# These emulates the Arduino flasher application. It sandwiches the filename inside
+# a packed argument. Thus we need to declare it manually.
+run_target('upload',
+ command : [fakeburner, 'x:@0@:y'.format(exe.full_path())],
+ depends : exe,
+)
+
+run_target('upload2',
+ command : [fakeburner, 'x:@0@:y'.format(hex.full_path())],
+ depends : hex,
+)
+
+python3 = find_program('python3', required : false)
+if not python3.found()
+ python3 = find_program('python')
+endif
+
+run_target('py3hi',
+ command : [python3, '-c', 'print("I am Python3.")'])
+
+run_target('check_exists',
+ command : [find_program('check_exists.py'), files('helloprinter.c')])
+
+# What if the output of a custom_target is the command to
+# execute. Obviously this will not work as hex is not an
+# executable but test that the output is generated correctly.
+run_target('donotrunme',
+ command : hex)
+
+# Ensure configure files can be passed
+conf = configure_file(
+ input: 'configure.in',
+ output: 'configure',
+ configuration: configuration_data()
+)
+
+run_target('configure_script',
+ command : conf
+)
--- /dev/null
+#!/bin/sh
+
+cd "$MESON_SOURCE_ROOT"
+echo My current directory is `pwd`
+echo Build dir is at $MESON_BUILD_ROOT
--- /dev/null
+project('object generator', 'c')
+
+python = find_program('python3', required : false)
+if not python.found()
+ python = find_program('python')
+endif
+
+# Note that this will not add a dependency to the compiler executable.
+# Code will not be rebuilt if it changes.
+comp = '@0@/@1@'.format(meson.current_source_dir(), 'obj_generator.py')
+
+if host_machine.system() == 'windows'
+ outputname = '@BASENAME@.obj'
+else
+ outputname = '@BASENAME@.o'
+endif
+
+cc = meson.get_compiler('c').cmd_array().get(-1)
+# Generate an object file manually.
+gen = generator(python,
+ output : outputname,
+ arguments : [comp, cc, '@INPUT@', '@OUTPUT@'])
+
+generated = gen.process(['source.c', 'source2.c'])
+
+# Generate an object file with indexed OUTPUT replacement.
+gen2 = generator(python,
+ output : outputname,
+ arguments : [comp, cc, '@INPUT@', '@OUTPUT0@'])
+generated2 = gen2.process(['source3.c'])
+
+e = executable('prog', 'prog.c', generated, generated2)
+
+test('objgen', e)
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env python3
+
+# Mimic a binary that generates an object file (e.g. windres).
+
+import sys, subprocess
+
+if __name__ == '__main__':
+ if len(sys.argv) != 4:
+ print(sys.argv[0], 'compiler input_file output_file')
+ sys.exit(1)
+ compiler = sys.argv[1]
+ ifile = sys.argv[2]
+ ofile = sys.argv[3]
+ if compiler.endswith('cl'):
+ cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile]
+ else:
+ cmd = [compiler, '-c', ifile, '-o', ofile]
+ sys.exit(subprocess.call(cmd))
--- /dev/null
+int func1_in_obj();
+int func2_in_obj();
+int func3_in_obj();
+
+int main(int argc, char **argv) {
+ return func1_in_obj() + func2_in_obj() + func3_in_obj();
+}
--- /dev/null
+int func1_in_obj() {
+ return 0;
+}
--- /dev/null
+int func2_in_obj() {
+ return 0;
+}
--- /dev/null
+int func3_in_obj() {
+ return 0;
+}
--- /dev/null
+#if defined _WIN32
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #define DLL_PUBLIC __attribute__ ((visibility ("default")))
+#endif
+
+int DLL_PUBLIC cppfunc() {
+ return 42;
+}
--- /dev/null
+int cppfunc();
+
+int main(int argc, char **argv) {
+ return cppfunc() != 42;
+}
--- /dev/null
+usr/bin/prog?exe
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC func() {
+ return 0;
+}
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_IMPORT __declspec(dllimport)
+#else
+ #define DLL_IMPORT
+#endif
+
+int DLL_IMPORT func();
+
+int main(int argc, char **arg) {
+ return func();
+}
--- /dev/null
+project('shared library linking test', 'c', 'cpp')
+
+lib = shared_library('mylib',
+ 'libfile.c' # Split to different lines before and after the comma to test parser.
+ , install : false) # Don't install libraries in common tests; the path is platform-specific
+exe = executable('prog', 'main.c', link_with : lib, install : true)
+
+test('runtest', exe)
+
+cpplib = shared_library('mycpplib', 'cpplib.cpp')
+cppexe = executable('cppprog', 'cppmain.cpp', link_with : cpplib)
+test('cpptest', cppexe)
--- /dev/null
+usr/bin/prog?exe
+usr/diiba/daaba/file.dat
+usr/this/should/also-work.dat
+usr/this/does/something-different.dat.in
--- /dev/null
+project('custom install script', 'c')
+
+executable('prog', 'prog.c', install : true)
+meson.add_install_script('myinstall.py', 'diiba/daaba', 'file.dat')
+meson.add_install_script('myinstall.py', 'this/should', 'also-work.dat')
+
+subdir('src')
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import sys
+
+prefix = os.environ['MESON_INSTALL_DESTDIR_PREFIX']
+
+dirname = os.path.join(prefix, sys.argv[1])
+
+os.makedirs(dirname)
+with open(os.path.join(dirname, sys.argv[2]), 'w') as f:
+ f.write('')
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("This is text.\n");
+ return 0;
+}
--- /dev/null
+meson.add_install_script('myinstall.py', 'this/does', 'something-different.dat')
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import sys
+
+prefix = os.environ['MESON_INSTALL_DESTDIR_PREFIX']
+
+dirname = os.path.join(prefix, sys.argv[1])
+
+os.makedirs(dirname)
+with open(os.path.join(dirname, sys.argv[2] + '.in'), 'w') as f:
+ f.write('')
--- /dev/null
+#!/usr/bin/env python3
+
+import sys, os
+
+if len(sys.argv) != 2:
+ print(sys.argv[0], '<output dir>')
+
+odir = sys.argv[1]
+
+with open(os.path.join(odir, 'mylib.h'), 'w') as f:
+ f.write('int func();\n')
+with open(os.path.join(odir, 'mylib.c'), 'w') as f:
+ f.write('''int func() {
+ return 0;
+}
+''')
--- /dev/null
+#include"mylib.h"
+
+int main(int argc, char **argv) {
+ return func();
+}
--- /dev/null
+project('source generation', 'c')
+
+ct = custom_target('gen',
+output : ['mylib.h', 'mylib.c'],
+command : [find_program('generator.py'), '@OUTDIR@'],
+)
+
+e = executable('prog', 'main.c', ct)
+test('gentest', e)
--- /dev/null
+project('statchain', 'c')
+
+subdir('subdir')
+# Test that -fPIC in c_args is also accepted
+statlib2 = static_library('stat2', 'stat2.c', c_args : '-fPIC', pic : false)
+# Test that pic is needed for both direct and indirect static library
+# dependencies of shared libraries (on Linux and BSD)
+statlib = static_library('stat', 'stat.c', link_with : [shlib, statlib2], pic : true)
+shlib2 = shared_library('shr2', 'shlib2.c', link_with : statlib)
+exe = executable('prog', 'prog.c', link_with : shlib2)
+test('runtest', exe)
--- /dev/null
+int shlibfunc2();
+int statlibfunc();
+
+int main(int argc, char **argv) {
+ if (statlibfunc() != 42)
+ return 1;
+ if (shlibfunc2() != 24)
+ return 1;
+ return 0;
+}
--- /dev/null
+#include "subdir/exports.h"
+
+int statlibfunc(void);
+int statlibfunc2(void);
+
+int DLL_PUBLIC shlibfunc2(void) {
+ return statlibfunc() - statlibfunc2();
+}
--- /dev/null
+#include "subdir/exports.h"
+
+int shlibfunc();
+
+int DLL_PUBLIC statlibfunc() {
+ return shlibfunc();
+}
--- /dev/null
+int statlibfunc2() {
+ return 18;
+}
--- /dev/null
+#pragma once
+
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
--- /dev/null
+shlib = shared_library('shar', 'shlib.c')
--- /dev/null
+#include "exports.h"
+
+int DLL_PUBLIC shlibfunc() {
+ return 42;
+}
--- /dev/null
+project('array methods', 'c')
+
+empty = []
+one = ['abc']
+two = ['def', 'ghi']
+combined = [empty, one, two]
+
+if empty.contains('abc')
+ error('Empty is not empty.')
+endif
+
+if one.contains('a')
+ error('One claims to contain a')
+endif
+
+if not one.contains('abc')
+ error('One claims to not contain abc.')
+endif
+
+if one.contains('abcd')
+ error('One claims to contain abcd.')
+endif
+
+if two.contains('abc')
+ error('Two claims to contain abc.')
+endif
+
+if not two.contains('def')
+ error('Two claims not to contain def.')
+endif
+
+if not two.contains('ghi')
+ error('Two claims not to contain ghi.')
+endif
+
+if two.contains('defg')
+ error('Two claims to contain defg.')
+endif
+
+if not combined.contains('abc')
+ error('Combined claims not to contain abc.')
+endif
+
+if not combined.contains('ghi')
+ error('Combined claims not to contain ghi.')
+endif
--- /dev/null
+#!/usr/bin/env python3
+
+# NOTE: this file does not have the executable bit set. This tests that
+# Meson can automatically parse shebang lines.
+
+import sys
+
+template = '#define RET_VAL %s\n'
+with open(sys.argv[1]) as f:
+ output = template % (f.readline().strip(), )
+with open(sys.argv[2], 'w') as f:
+ f.write(output)
--- /dev/null
+project('custom header generator', 'c')
+
+gen = find_program('makeheader.py')
+
+generated_h = custom_target('makeheader.py',
+output : 'myheader.lh', # Suffix not .h to ensure this works with custom suffixes, too.
+input : 'input.def',
+command : [gen, '@INPUT0@', '@OUTPUT0@', files('somefile.txt')])
+
+prog = executable('prog', 'prog.c', generated_h)
+test('gentest', prog)
--- /dev/null
+#include"myheader.lh"
+
+int main(int argc, char **argv) {
+ return RET_VAL;
+}
--- /dev/null
+#include"source1.h"
+#include"source2.h"
+
+int main(int argc, char **argv) {
+ return func1() + func2();
+}
--- /dev/null
+project('trickier generator', 'cpp')
+
+comp = find_program('mygen.py')
+subdir('subdir')
+
+generated2 = custom_target('generated2',
+ output : ['source2.h', 'source2.cpp'],
+ input : 'data2.dat',
+ command : [comp, '@INPUT0@', '@OUTDIR@'])
+
+exe = executable('prog', 'main.cpp', generated, generated2,
+ include_directories : include_directories('subdir'))
+ test('generated test', exe)
--- /dev/null
+#!/usr/bin/env python3
+
+import sys, os
+
+if len(sys.argv) != 3:
+ print("You is fail.")
+ sys.exit(1)
+
+with open(sys.argv[1]) as f:
+ val = f.read().strip()
+outdir = sys.argv[2]
+
+outhdr = os.path.join(outdir, 'source%s.h' % val)
+outsrc = os.path.join(outdir, 'source%s.cpp' % val)
+
+with open(outhdr, 'w') as f:
+ f.write('int func%s();\n' % val)
+with open(outsrc, 'w') as f:
+ f.write('''int func%s() {
+ return 0;
+}
+''' % val)
--- /dev/null
+generated = custom_target('generated',
+output : ['source1.h', 'source1.cpp'],
+input : 'data.dat',
+command : [comp, '@INPUT0@', '@OUTDIR@'])
--- /dev/null
+usr/share/sub1/data1.dat
+usr/share/sub1/second.dat
+usr/share/sub1/third.dat
+usr/share/sub1/sub2/data2.dat
+usr/share/sub2/one.dat
+usr/share/sub2/dircheck/excluded-three.dat
--- /dev/null
+project('install a whole subdir', 'c')
+
+# A subdir with an exclusion:
+install_subdir('sub2',
+ exclude_files : ['excluded-three.dat'],
+ exclude_directories : ['excluded'],
+ install_dir : 'share')
+
+subdir('subdir')
+# A subdir with write perms only for the owner
+# and read-list perms for owner and group
+install_subdir('sub1', install_dir : 'share', install_mode : ['rwxr-x--t', 'root'])
+install_subdir('sub/sub1', install_dir : 'share')
--- /dev/null
+This is a third data file for sub1 dir.
--- /dev/null
+Test that multiple install_subdirs meld their results.
\ No newline at end of file
--- /dev/null
+install_subdir('sub1', install_dir : 'share',
+ # This mode will be overridden by the mode set in the outer install_subdir
+ install_mode : 'rwxr-x---')
--- /dev/null
+This is a data file in a subdir.
--- /dev/null
+This is a data file in a deeper subdir.
--- /dev/null
+usr/bin/prog1?exe
+usr/bin/prog2?exe
+usr/bin/prog3?exe
--- /dev/null
+project('foreach', 'c')
+
+tests = [['test1', 'prog1', 'prog1.c'],
+ ['test2', 'prog2', 'prog2.c', 'fallback'],
+ ['test3', 'prog3', 'prog3.c', 'urgh']]
+
+assert(tests[0].get(3, 'fallbck') == 'fallbck', 'array #1 fallback did not match')
+assert(tests[1].get(3, 'failbk') == 'fallback', 'array #2 value did not match')
+assert(tests[2].get(3, 'urgh') == 'urgh', 'array #3 value did not match')
+
+foreach i : tests
+ test(i.get(0), executable(i.get(1), i.get(2), install : true))
+
+ # Ensure that changing the tests variable does not
+ # affect ongoing iteration in the foreach loop.
+ #
+ # Being able to do that would make Meson Turing complete and
+ # we definitely don't want that.
+ tests = ['test4', 'prog4', 'prog4.c']
+endforeach
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("This is test #1.\n");
+ return 0;
+}
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("This is test #2.\n");
+ return 0;
+}
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("This is test #3.\n");
+ return 0;
+}
--- /dev/null
+project('number arithmetic', 'c')
+
+if 6 + 4 != 10
+ error('Number addition is broken')
+endif
+if 6 - 4 != 2
+ error('Number subtraction is broken')
+endif
+
+if 6 * 4 != 24
+ error('Number multiplication is broken')
+endif
+if 16 / 4 != 4
+ error('Number division is broken')
+endif
+
+#if (1 / 3) * 3 != 1
+# error('Float interconversion broken')
+#endif
+if (5 / 3) * 3 != 3
+ error('Integer division is broken')
+endif
+
+assert((5 % 2) == 1, 'Integer modulo (odd) is broken')
+assert((4 % 2) == 0, 'Integer modulo (even) is broken')
+
+if 2 * 1 % 2 != 0
+ error('Modulo precedence with multiplication is broken')
+endif
+if 2 + 1 % 2 != 3
+ error('Modulo precedence with addition is broken')
+endif
+if 9 / 9 % 2 != 1
+ error('Modulo precedence with division is broken')
+endif
+if 9 - 9 % 2 != 8
+ error('Modulo precedence with subtraction is broken')
+endif
+
+assert(2.is_even(), 'int is_even() broken for even value')
+assert(not(2.is_odd()), 'int is_odd() broken for even value')
+assert(not(3.is_even()), 'int is_even() broken for odd value')
+assert(3.is_odd(), 'int is_odd() broken for odd value')
+
+assert(3 < 4, 'Lt broken')
+assert(not(4 < 3), 'Lt broken')
+assert(3 <= 4, 'Lte broken')
+assert(not(4 <= 3), 'Lte broken')
+assert(3 <= 3, 'Lte broken')
+
+assert(4 > 3, 'Gt broken')
+assert(not(3 > 4), 'Gt broken')
+assert(4 >= 3, 'Gte broken')
+assert(not(3 >= 4), 'Gte broken')
+assert(3 >= 3, 'Gte broken')
+
+assert(true.to_int() == 1,'bool to_int() broken')
+assert(false.to_int() == 0,'bool to_int() broken')
--- /dev/null
+project('string arithmetic', 'c')
+
+if 'foo' + 'bar' != 'foobar'
+ error('String concatenation is broken')
+endif
+
+if 'foo' + 'bar' + 'baz' != 'foobarbaz'
+ error('Many-string concatenation is broken')
+endif
+
+a = 'a'
+b = 'b'
+
+if a + b + 'c' != 'abc'
+ error('String concat with variables is broken.')
+endif
--- /dev/null
+int func() {
+ int class = 0;
+ return class;
+}
--- /dev/null
+extern "C" int func();
+
+class BreakPlainCCompiler;
+
+int main(int argc, char **argv) {
+ return func();
+}
--- /dev/null
+project('mixed C and C++', 'c', 'cpp')
+exe = executable('prog', 'main.cc', 'func.c')
+test('mixtest', exe)
--- /dev/null
+project('array arithmetic', 'c')
+
+array1 = ['foo', 'bar']
+array2 = ['qux', 'baz']
+
+if array1 + array2 != ['foo', 'bar', 'qux', 'baz']
+ error('Array concatenation is broken')
+endif
+if array2 + array1 != ['qux', 'baz', 'foo', 'bar']
+ error('Array concatenation is broken')
+endif
+
+if array1 + array1 + array1 != ['foo', 'bar', 'foo', 'bar', 'foo', 'bar']
+ error('Many-array concatenation is broken')
+endif
--- /dev/null
+project('arithmetic bidmas', 'c')
+
+if 5 * 3 - 6 / 2 + 1 != 13
+ error('Arithmetic bidmas broken')
+endif
+if 5 * (3 - 6 / 2) + 1 != 1
+ error('Arithmetic bidmas with brackets broken')
+endif
+
+if 5 * 12 / 2 * 3 != 90
+ error('Sequential multiplication and division broken')
+endif
+if 5 * (12 / (2 * 3)) != 10
+ error('Sequential multiplication and division with brackets broken')
+endif
--- /dev/null
+#include<stdio.h>
+#include"version.h"
+
+int main(int argc, char **argv) {
+ printf("Version is %s.\n", version_string);
+ return 0;
+}
--- /dev/null
+project('run always', 'c')
+
+version = '1.0.0'
+
+vgen = find_program('version_gen.py')
+
+version_src = custom_target('Version string',
+input : 'version.c.in',
+output : 'version.c',
+command : [vgen, '@INPUT@', '@OUTPUT@', version],
+build_always : true,
+)
+
+executable('versionprinter', 'main.c', version_src)
--- /dev/null
+#include"version.h"
+
+const char *version_string = "@VERSION@";
--- /dev/null
+#pragma once
+
+const char *version_string;
--- /dev/null
+#!/usr/bin/env python3
+
+import sys, os, subprocess
+
+def generate(infile, outfile, fallback):
+ workdir = os.path.split(infile)[0]
+ if workdir == '':
+ workdir = '.'
+ version = fallback
+ try:
+ p = subprocess.Popen(['git', 'describe'], cwd=workdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdo, _) = p.communicate()
+ if p.returncode == 0:
+ version = stdo.decode().strip()
+ except:
+ pass
+ with open(infile) as f:
+ newdata = f.read().replace('@VERSION@', version)
+ try:
+ with open(outfile) as f:
+ olddata = f.read()
+ if olddata == newdata:
+ return
+ except:
+ pass
+ with open(outfile, 'w') as f:
+ f.write(newdata)
+
+if __name__ == '__main__':
+ infile = sys.argv[1]
+ outfile = sys.argv[2]
+ fallback = sys.argv[3]
+ generate(infile, outfile, fallback)
--- /dev/null
+project('vcstag', 'c')
+
+version_src = vcs_tag(input : 'vcstag.c.in',
+output : 'vcstag.c',
+fallback : '1.0.0')
+
+version_src_custom = vcs_tag(input : 'vcstag.c.in',
+output : 'vcstag-custom.c',
+command : ['git', 'show-ref', '-s', 'refs/heads/master'],
+fallback : '1.0.0')
+
+version_src_fallback = vcs_tag(input : 'vcstag.c.in',
+output : 'vcstag-fallback.c')
+
+executable('tagprog', 'tagprog.c', version_src)
+executable('tagprog-custom', 'tagprog.c', version_src_custom)
+executable('tagprog-fallback', 'tagprog.c', version_src_fallback)
+
--- /dev/null
+#include<stdio.h>
+
+const char *vcstag;
+
+int main(int argc, char **argv) {
+ printf("Version is %s\n", vcstag);
+ return 0;
+}
+
--- /dev/null
+const char *vcstag = "@VCS_TAG@";
+
--- /dev/null
+project('module test', 'c')
+
+modtest = import('modtest')
+modtest.print_hello()
--- /dev/null
+int main(int argc, char **argv) {
+ return 1;
+}
--- /dev/null
+project('should fail', 'c')
+
+exe = executable('prog', 'failing.c')
+test('failing', exe, should_fail : true)
--- /dev/null
+cdata = configuration_data()
+cdata.set('VALUE', '42')
+
+cfile = configure_file(input : 'confdata.in',
+output : 'confdata',
+configuration : cdata)
--- /dev/null
+project('conf file in custom target', 'c')
+
+subdir('inc')
+subdir('src')
--- /dev/null
+custom_target('thing',
+output : 'final.dat',
+input : cfile,
+command : [find_program('mycompiler.py'), '@INPUT@', '@OUTPUT@'])
+
+# Test usage of a `configure_file` as part of the command list
+py3 = find_program('python3', required : false)
+if not py3.found()
+ # Maybe 'python' is Python 3
+ py3 = find_program('python')
+endif
+
+compiler = configure_file(input : 'mycompiler.py',
+ output : 'mycompiler2.py',
+ configuration : configuration_data())
+
+custom_target('thing2',
+output : 'final2.dat',
+input : cfile,
+command : [py3, compiler, '@INPUT@', '@OUTPUT@'])
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+with open(sys.argv[1]) as ifile:
+ if ifile.readline().strip() != '42':
+ print('Incorrect input')
+with open(sys.argv[2], 'w') as ofile:
+ ofile.write('Success\n')
--- /dev/null
+project('test is external', 'c')
+
+test('external', find_program('mytest.py'), args : ['correct'])
--- /dev/null
+#!/usr/bin/env python3
+
+from __future__ import print_function
+
+import sys
+
+if sys.argv[1] == 'correct':
+ print('Argument is correct.')
+ sys.exit(0)
+print('Argument is incorrect:', sys.argv[1])
+sys.exit(1)
--- /dev/null
+#!/usr/bin/env python3
+
+import time, sys
+
+# Make sure other script runs first if dependency
+# is missing.
+time.sleep(0.5)
+
+with open(sys.argv[1], 'r') as f:
+ contents = f.read()
+with open(sys.argv[2], 'w') as f:
+ f.write(contents)
--- /dev/null
+#!/usr/bin/env python3
+
+import sys, os
+from glob import glob
+
+files = glob(os.path.join(sys.argv[1], '*.tmp'))
+assert(len(files) == 1)
+
+with open(files[0], 'r') as ifile, open(sys.argv[2], 'w') as ofile:
+ ofile.write(ifile.read())
--- /dev/null
+This is a piece of text.
--- /dev/null
+project('custom target dependency', 'c')
+
+# Sometimes custom targets do not take input files
+# but instead do globbing or some similar wackiness.
+# In this case we need to be able to specify a
+# manual dependency between two custom targets,
+# if one needs to be run before the other.
+
+g1 = find_program('gen1.py')
+g2 = find_program('gen2.py')
+
+c1 = custom_target('medput',
+input : 'input.dat',
+output : 'medput.tmp',
+command : [g1, '@INPUT@', '@OUTPUT@'])
+
+custom_target('output',
+output : 'output.dat',
+command : [g2, '@OUTDIR@', '@OUTPUT@'],
+depends : c1)
--- /dev/null
+#include<assert.h>
+char func_b();
+char func_c();
+
+int main(int argc, char **argv) {
+ if(func_b() != 'b') {
+ return 1;
+ }
+ if(func_c() != 'c') {
+ return 2;
+ }
+ return 0;
+}
--- /dev/null
+project('A', 'c')
+
+B = subproject('B')
+b = B.get_variable('b')
+
+C = subproject('C')
+c = C.get_variable('c')
+
+a = executable('a', 'a.c', link_with : [b, c])
+test('a test', a)
--- /dev/null
+#include<stdlib.h>
+#if defined _WIN32 || defined __CYGWIN__
+#define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+
+char func_c();
+
+char DLL_PUBLIC func_b() {
+ if(func_c() != 'c') {
+ exit(3);
+ }
+ return 'b';
+}
--- /dev/null
+project('B', 'c')
+C = subproject('C')
+c = C.get_variable('c')
+b = shared_library('b', 'b.c', link_with : c)
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+#define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+char DLL_PUBLIC func_c() {
+ return 'c';
+}
--- /dev/null
+project('C', 'c')
+c = shared_library('c', 'c.c')
--- /dev/null
+usr/bin/prog?exe
+usr/libtest/libstat.a
--- /dev/null
+project('install test', 'c', default_options : ['libdir=libtest'])
+
+stlib = static_library('stat', 'stat.c', install : true)
+exe = executable('prog', 'prog.c', install : true)
--- /dev/null
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+int func() { return 933; }
--- /dev/null
+#include<assert.h>
+char func_b();
+char func_c();
+
+int main(int argc, char **argv) {
+ if(func_b() != 'b') {
+ return 1;
+ }
+ if(func_c() != 'c') {
+ return 2;
+ }
+ return 0;
+}
--- /dev/null
+project('A', 'c')
+
+# Same as the previous test but use C and B in
+# the opposite order.
+
+C = subproject('C')
+c = C.get_variable('c')
+
+B = subproject('B')
+b = B.get_variable('b')
+
+a = executable('a', 'a.c', link_with : [b, c])
+test('a test', a)
--- /dev/null
+#include<stdlib.h>
+char func_c();
+
+#if defined _WIN32 || defined __CYGWIN__
+#define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+char DLL_PUBLIC func_b() {
+ if(func_c() != 'c') {
+ exit(3);
+ }
+ return 'b';
+}
--- /dev/null
+project('B', 'c')
+C = subproject('C')
+c = C.get_variable('c')
+b = shared_library('b', 'b.c', link_with : c)
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+#define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+char DLL_PUBLIC func_c() {
+ return 'c';
+}
--- /dev/null
+project('C', 'c')
+c = shared_library('c', 'c.c')
--- /dev/null
+int func() {
+ return 0;
+}
--- /dev/null
+project('file object', 'c')
+
+prog0 = files('prog.c')
+lib0 = files('lib.c')
+test('fobj', executable('fobj', prog0, lib0))
+
+subdir('subdir1')
+subdir('subdir2')
+
--- /dev/null
+#include<stdio.h>
+
+int func(); /* Files in different subdirs return different values. */
+
+int main(int argc, char **argv) {
+ if(func() == 0) {
+ printf("Iz success.\n");
+ } else {
+ printf("Iz fail.\n");
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+int func() {
+ return 1;
+}
--- /dev/null
+prog1 = files('prog.c')
+lib1 = files('lib.c')
+
+test('subdir0', executable('subdir0', prog0, lib1), should_fail : true)
+test('subdir1', executable('subdir1', prog1, lib0), should_fail : true)
+
+test('subdir2', executable('subdir2', prog1, lib1))
\ No newline at end of file
--- /dev/null
+#include<stdio.h>
+
+int func();
+
+int main(int argc, char **argv) {
+ if(func() == 1) {
+ printf("Iz success.\n");
+ } else {
+ printf("Iz fail.\n");
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+int func() {
+ return 2;
+}
--- /dev/null
+prog2 = files('prog.c')
+lib2 = files('lib.c')
+
+test('subdir3', executable('subdir3', prog1, lib2), should_fail : true)
+test('subdir4', executable('subdir4', prog2, lib1), should_fail : true)
+
+test('subdir4', executable('subdir5', prog2, lib2))
\ No newline at end of file
--- /dev/null
+#include<stdio.h>
+
+int func();
+
+int main(int argc, char **argv) {
+ if(func() == 2) {
+ printf("Iz success.\n");
+ } else {
+ printf("Iz fail.\n");
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#include<assert.h>
+char func_b();
+char func_c();
+
+int main(int argc, char **argv) {
+ if(func_b() != 'b') {
+ return 1;
+ }
+ if(func_c() != 'c') {
+ return 2;
+ }
+ return 0;
+}
--- /dev/null
+#include<stdlib.h>
+char func_c();
+
+#if defined _WIN32 || defined __CYGWIN__
+#define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+char DLL_PUBLIC func_b() {
+ if(func_c() != 'c') {
+ exit(3);
+ }
+ return 'b';
+}
--- /dev/null
+project('B', 'c')
+C = subproject('C')
+c = C.get_variable('c')
+b = shared_library('b', 'b.c', link_with : c)
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+#define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+char DLL_PUBLIC func_c() {
+ return 'c';
+}
--- /dev/null
+project('C', 'c')
+c = shared_library('c', 'c.c')
--- /dev/null
+project('A', 'c', subproject_dir:'custom_subproject_dir')
+
+B = subproject('B')
+b = B.get_variable('b')
+
+C = subproject('C')
+c = C.get_variable('c')
+
+a = executable('a', 'a.c', link_with : [b, c])
+test('a test', a)
--- /dev/null
+project('has type', 'c', 'cpp')
+
+compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')]
+
+foreach cc : compilers
+ if not cc.has_type('time_t', prefix : '#include<time.h>')
+ error('Did not detect type that exists.')
+ endif
+
+ if cc.has_type('no_time_t', prefix : '#include<time.h>')
+ error('Not existing type found.')
+ endif
+endforeach
--- /dev/null
+project('Extract objects from subdirs.', 'c')
+
+if meson.is_unity()
+ message('Unity build: skipping incompatible test')
+else
+ subdir('src')
+ subdir('tst')
+endif
--- /dev/null
+int first() {
+ return 1001;
+}
--- /dev/null
+first_lib = shared_library('first_lib', 'lib_first.c')
--- /dev/null
+subdir('first')
--- /dev/null
+int first(void);
+
+int main() {
+ return first() - 1001;
+}
--- /dev/null
+first_exe = executable('first_exe', 'exe_first.c',
+ objects : first_lib.extract_objects('lib_first.c'))
+
+test('first_test', first_exe)
--- /dev/null
+subdir('first')
--- /dev/null
+project('internal dependency', 'c')
+
+subdir('proj1')
+subdir('src')
--- /dev/null
+#pragma once
+
+void proj1_func1();
+void proj1_func2();
+void proj1_func3();
--- /dev/null
+incdirs = include_directories('include')
+
+p1lib = static_library('proj1', 'proj1f1.c',
+ include_directories : incdirs
+)
+
+indirect_source = files('proj1f2.c')
+
+proj1_dep = declare_dependency(include_directories : incdirs,
+ link_with : p1lib,
+ sources : ['proj1f3.c', indirect_source])
--- /dev/null
+#include<proj1.h>
+#include<stdio.h>
+
+void proj1_func1() {
+ printf("In proj1_func1.\n");
+}
--- /dev/null
+#include<proj1.h>
+#include<stdio.h>
+
+void proj1_func2() {
+ printf("In proj1_func2.\n");
+}
--- /dev/null
+#include<proj1.h>
+#include<stdio.h>
+
+void proj1_func3() {
+ printf("In proj1_func3.\n");
+}
--- /dev/null
+#include<stdio.h>
+#include<proj1.h>
+
+int main(int argc, char **argv) {
+ printf("Now calling into library.\n");
+ proj1_func1();
+ proj1_func2();
+ proj1_func3();
+ return 0;
+}
--- /dev/null
+exe = executable('projtest', 'main.c', dependencies : proj1_dep)
+test('projtest', exe)
--- /dev/null
+int func();
+
+int main(int argc, char **argv) {
+ return func();
+}
--- /dev/null
+int func();
+
+int main(int argc, char **argv) {
+ return func() == 1 ? 0 : 1;
+}
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+#define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+#if defined SHAR
+int DLL_PUBLIC func() {
+ return 1;
+}
+#elif defined STAT
+int func() {
+ return 0;
+}
+#else
+#error "Missing type definition."
+#endif
+
--- /dev/null
+project('same basename', 'c')
+
+subdir('sharedsub')
+subdir('staticsub')
+
+# Use the same source file to check that each top level target
+# has its own unique working directory. If they don't
+# then the .o files will clobber each other.
+
+exe1 = executable('name', 'exe1.c', link_with : stlib)
+exe2 = executable('name2', 'exe2.c', link_with : shlib)
+
+test('static', exe1)
+test('shared', exe2)
--- /dev/null
+shlib = shared_library('name', '../lib.c', c_args : '-DSHAR')
--- /dev/null
+# On Windows a static lib is now libfoo.a, so it does not conflict with foo.lib
+# from the shared library above
+stlib = static_library('name', '../lib.c', c_args : '-DSTAT')
--- /dev/null
+#pragma once
+
+int entity_func1();
+int entity_func2();
--- /dev/null
+#include"entity.h"
+
+#ifdef USING_ENT
+#error "Entity use flag leaked into entity compilation."
+#endif
+
+int entity_func1() {
+ return 5;
+}
--- /dev/null
+#include<entity.h>
+
+int entity_func2() {
+ return 9;
+}
--- /dev/null
+entity_lib = static_library('entity', 'entity1.c')
+
+entity_dep = declare_dependency(link_with : [[entity_lib]],
+ include_directories : include_directories('.'),
+ sources : 'entity2.c',
+ compile_args : ['-DUSING_ENT=1'],
+ version : '1.2.3',
+ link_args : []) # No simple way of testing linker flags :(.
+
+assert(entity_dep.version().version_compare('==1.2.3'), 'Declare_dep has incorrect version string.')
--- /dev/null
+#include<entity.h>
+#include<stdio.h>
+
+#ifndef USING_ENT
+#error "Entity use flag not used for compilation."
+#endif
+
+int main(int argc, char **argv) {
+ if(entity_func1() != 5) {
+ printf("Error in func1.\n");
+ return 1;
+ }
+ if(entity_func2() != 9) {
+ printf("Error in func2.\n");
+ return 2;
+ }
+ return 0;
+}
--- /dev/null
+project('declare dependency', 'c')
+
+subdir('entity')
+
+exe = executable('dep_user', 'main.c',
+ dependencies : entity_dep)
+test('dep', exe)
+
+# just to make sure [] works as a no-op dep here
+executable('dummy', 'main.c',
+ dependencies : [entity_dep, []])
+
+# simple case
+declare_dependency(dependencies : entity_dep)
+
+# nested deps should be flattened
+declare_dependency(dependencies : [entity_dep])
+declare_dependency(dependencies : [[entity_dep]])
+
+# check that [] properly works as a no-op dep in declare_dependency() too
+declare_dependency(dependencies : [])
+declare_dependency(dependencies : [[]])
+declare_dependency(dependencies : [entity_dep, []])
+declare_dependency(dependencies : [[], entity_dep])
--- /dev/null
+#pragma once
+
+int func1();
+int func2();
+int func3();
+int func4();
--- /dev/null
+#include"extractor.h"
+
+int func4() {
+ return 4;
+}
--- /dev/null
+project('extract all', 'c')
+
+a = static_library('a', 'one.c', 'two.c')
+b = static_library('b', 'three.c', 'four.c')
+c = static_library('c',
+ objects : [a.extract_all_objects(), b.extract_all_objects()])
+
+e = executable('proggie', 'prog.c', link_with : c)
+test('extall', e)
--- /dev/null
+#include"extractor.h"
+
+int func1() {
+ return 1;
+}
--- /dev/null
+#include"extractor.h"
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ if((1+2+3+4) != (func1() + func2() + func3() + func4())) {
+ printf("Arithmetic is fail.\n");
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#include"extractor.h"
+
+int func3() {
+ return 3;
+}
--- /dev/null
+#include"extractor.h"
+
+int func2() {
+ return 2;
+}
--- /dev/null
+project('add language', 'c')
+
+test('C', executable('cprog', 'prog.c'))
+
+assert(add_languages('cpp'), 'Add_languages returned false on success')
+assert(not add_languages('klingon', required : false), 'Add_languages returned true on failure.')
+
+test('C++', executable('cppprog', 'prog.cc'))
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I am plain C.\n");
+ return 0;
+}
--- /dev/null
+#include<iostream>
+
+int main(int, char**) {
+ std::cout << "I am C++.\n";
+ return 0;
+}
--- /dev/null
+usr/include/rootdir.h
+usr/include/subdir/subdir.h
+usr/include/vanished.h
+usr/include/fileheader.h
--- /dev/null
+project('header install')
+
+as_array = ['subdir.h']
+
+subdir('vanishing_subdir')
+subdir('sub')
+
+h1 = install_headers('rootdir.h')
+h2 = install_headers(as_array, subdir : 'subdir')
+h3 = install_headers(subheader)
+
--- /dev/null
+/* This header goes to include dir root. */
+
+int root_func();
--- /dev/null
+#pragma once
+
+#define LIFE "Is life! Na naa, naa-na na."
--- /dev/null
+subheader = files('fileheader.h')
+
--- /dev/null
+/* This file goes to subdirectory of include root. */
+
+int subdir_func();
--- /dev/null
+install_headers('vanished.h')
--- /dev/null
+#pragma once
+
+/* This is a header in a subdirectory. Make sure it installs into
+ * /prefix/include and not /prefix/include/vanishing_subdir.
+ */
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I'm a main project bar.\n");
+ return 0;
+}
--- /dev/null
+project('toplevel bar', 'c')
+
+subproject('foo')
+
+executable('bar', 'bar.c')
+run_target('nop', 'true')
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I'm a subproject bar.\n");
+ return 0;
+}
--- /dev/null
+project('subfoo', 'c')
+
+executable('bar', 'bar.c')
+run_target('nop', 'true')
--- /dev/null
+project('plusassign', 'c')
+
+x = []
+
+x += 'a'
+
+if x.length() != 1
+ error('Incorrect append')
+endif
+
+if x[0] != 'a'
+ error('Incorrect append 2.')
+endif
+
+y = x
+
+x += 'b'
+
+if y.length() != 1
+ error('Immutability broken.')
+endif
+
+if y[0] != 'a'
+ error('Immutability broken 2.')
+endif
+
+if x.length() != 2
+ error('Incorrect append 3')
+endif
+
+if x[0] != 'a'
+ error('Incorrect append 4.')
+endif
+
+if x[1] != 'b'
+ error('Incorrect append 5.')
+endif
+
+# Now with evil added: append yourself.
+
+x += x
+
+if x.length() != 4
+ error('Incorrect selfappend.')
+endif
+
+# += on strings
+
+bra = 'bra'
+foo = 'A'
+foo += bra
+foo += 'cada'
+foo += bra
+assert (foo == 'Abracadabra', 'string += failure [@0@]'.format(foo))
+assert (bra == 'bra', 'string += modified right argument!')
+foo += ' ' + foo
+assert (foo == 'Abracadabra Abracadabra', 'string += failure [@0@]'.format(foo))
+
+# += on ints
+
+foo = 5
+foo += 6
+assert (foo == 11, 'int += failure [@0@]'.format(foo))
+bar = 99
+foo += bar
+assert (foo == 110, 'int += failure [@0@]'.format(foo))
+assert (bar == 99, 'int += modified right argument"')
+bar += foo + 1
+assert (bar == 210, 'int += failure [@0@]'.format(bar))
+assert (foo == 110, 'int += modified right argument"')
--- /dev/null
+project('foo', 'c')
+
+subdir('subdir1/subdir2')
--- /dev/null
+error('This should not be called.')
--- /dev/null
+message('I\'m in subdir subdir.')
--- /dev/null
+project('access private', 'c')
+
+subdir('stlib')
+subdir('user')
--- /dev/null
+#!/usr/bin/env python3
+
+import sys, os
+
+assert(len(sys.argv) == 3)
+
+h_templ = '''#pragma once
+unsigned int %s();
+'''
+
+c_templ = '''#include"%s.h"
+
+unsigned int %s() {
+ return 0;
+}
+'''
+
+ifile = sys.argv[1]
+outdir = sys.argv[2]
+
+base = os.path.splitext(os.path.split(ifile)[-1])[0]
+
+cfile = os.path.join(outdir, base + '.c')
+hfile = os.path.join(outdir, base + '.h')
+
+c_code = c_templ % (base, base)
+h_code = h_templ % base
+
+with open(cfile, 'w') as f:
+ f.write(c_code)
+with open(hfile, 'w') as f:
+ f.write(h_code)
--- /dev/null
+genbin = find_program('compiler.py')
+
+gen = generator(genbin,
+ output : ['@BASENAME@.h', '@BASENAME@.c'],
+ arguments : ['@INPUT@', '@BUILD_DIR@']
+ )
+
+defs = ['foo1.def', 'foo2.def']
+generated = gen.process(defs)
+
+stlib = static_library('st', generated)
+st_priv_inc = stlib.private_dir_include()
--- /dev/null
+#include"foo1.h"
+#include"foo2.h"
+
+int main(int argc, char **argv) {
+ return foo1() + foo2();
+}
--- /dev/null
+exe = executable('libuser', 'libuser.c',
+ link_with : stlib,
+ include_directories : st_priv_inc)
+
+test('libuser', exe)
--- /dev/null
+project('default options', 'cpp', 'c', default_options : [
+ 'prefix=/absoluteprefix',
+ 'buildtype=debugoptimized',
+ 'cpp_std=c++11',
+ 'cpp_eh=none',
+ 'warning_level=3',
+ ])
+
+cpp_id = meson.get_compiler('cpp').get_id()
+
+assert(get_option('buildtype') == 'debugoptimized', 'Build type default value wrong.')
+
+if cpp_id == 'msvc'
+ cpp_eh = get_option('cpp_eh')
+ assert(cpp_eh == 'none', 'MSVC eh value is "' + cpp_eh + '" instead of "none"')
+else
+ cpp_std = get_option('cpp_std')
+ assert(cpp_std == 'c++11', 'C++ std value is "' + cpp_std + '" instead of c++11.')
+endif
+
+w_level = get_option('warning_level')
+assert(w_level == '3', 'warning level "' + w_level + '" instead of "3"')
+
+# FIXME. Since we no longer accept invalid options to c_std etc,
+# there is no simple way to test this. Gcc does not seem to expose
+# the C std used in a preprocessor token so we can't check for it.
+# Think of a way to fix this.
+#
+# # Verify that project args are not used when told not to.
+# # MSVC plain C does not have a simple arg to test so skip it.
+# if cpp.get_id() != 'msvc'
+# cc = meson.get_compiler('c')
+# assert(not cc.compiles('int foobar;'), 'Default arg not used in test.')
+# assert(cc.compiles('int foobar;', no_builtin_args : true), 'No_builtin did not disable builtins.')
+# endif
+
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import shutil
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
--- /dev/null
+project('dep fallback', 'c')
+
+bob = dependency('boblib', fallback : ['boblib', 'bob_dep'], required: false,
+ default_options : 'warning_level=1')
+if not bob.found()
+ error('Bob is actually needed')
+endif
+# boblib subproject exists, but sita_dep doesn't exist
+sita = dependency('sitalib', fallback : ['boblib', 'sita_dep'], required: false)
+# jimmylib subproject doesn't exist
+jimmy = dependency('jimmylib', fallback : ['jimmylib', 'jimmy_dep'], required: false)
+# dummylib subproject fails to configure
+dummy = dependency('dummylib', fallback : ['dummylib', 'dummy_dep'], required: false)
+
+gensrc_py = find_program('gensrc.py')
+gensrc = custom_target('gensrc.c',
+ input : 'tester.c',
+ output : 'gensrc.c',
+ command : [gensrc_py, '@INPUT@', '@OUTPUT@'])
+
+exe = executable('bobtester',
+ [gensrc],
+ dependencies : bob)
+
+test('bobtester', exe)
--- /dev/null
+#include"bob.h"
+
+#ifdef _MSC_VER
+__declspec(dllexport)
+#endif
+const char* get_bob() {
+ return "bob";
+}
--- /dev/null
+#pragma once
+
+#ifdef _MSC_VER
+__declspec(dllimport)
+#endif
+const char* get_bob();
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+with open(sys.argv[1], 'w') as f:
+ f.write('')
--- /dev/null
+project('bob', 'c')
+
+gensrc_py = find_program('genbob.py')
+genbob_h = custom_target('genbob.h',
+ output : 'genbob.h',
+ command : [gensrc_py, '@OUTPUT@'])
+genbob_c = custom_target('genbob.c',
+ output : 'genbob.c',
+ command : [gensrc_py, '@OUTPUT@'])
+
+boblib = library('bob', ['bob.c', genbob_c])
+bobinc = include_directories('.')
+
+bob_dep = declare_dependency(link_with : boblib,
+ sources : [genbob_h],
+ include_directories : bobinc)
--- /dev/null
+project('dummylib', 'c')
+
+dummy_dep = declare_dependency()
+error('this subproject fails to configure')
--- /dev/null
+#include"bob.h"
+#include"genbob.h"
+#include<string.h>
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ if(strcmp("bob", get_bob()) == 0) {
+ printf("Bob is indeed bob.\n");
+ } else {
+ printf("ERROR: bob is not bob.\n");
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#include"ef.h"
+
+DLL_PUBLIC Ef::Ef() : x(99) {
+}
+
+int DLL_PUBLIC Ef::get_x() const {
+ return x;
+}
--- /dev/null
+#pragma once
+
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+class Ef {
+private:
+ int x;
+
+public:
+
+ DLL_PUBLIC Ef();
+ int DLL_PUBLIC get_x() const;
+};
--- /dev/null
+#include"ef.h"
+
+#include<iostream>
+
+int main(int, char **) {
+ Ef var;
+ if(var.get_x() == 99) {
+ std::cout << "All is fine.\n";
+ return 0;
+ } else {
+ std::cout << "Something went wrong.\n";
+ return 1;
+ }
+}
--- /dev/null
+project('default library', 'cpp')
+
+flib = library('ef', 'ef.cpp')
+exe = executable('eftest', 'eftest.cpp', link_with : flib)
+test('eftest', exe)
--- /dev/null
+generated_function
--- /dev/null
+#include"data.h"
+
+int main(int, char **) {
+ return generated_function() != 52;
+}
--- /dev/null
+project('selfbuilt custom', 'cpp')
+
+# Build an exe and use it in a custom target
+# whose output is used to build a different exe.
+
+tool = executable('tool', 'tool.cpp', native : true)
+
+hfile = custom_target('datah',
+ output : 'data.h',
+ input : 'data.dat',
+ command : [tool, '@INPUT@', '@OUTPUT@'],
+)
+
+main = executable('mainprog', 'mainprog.cpp', hfile)
+
+test('maintest', main)
--- /dev/null
+#include<iostream>
+#include<fstream>
+#include<string>
+
+using namespace std;
+
+const char prefix[] = "int ";
+const char suffix[] = " () {\n return 52;}\n";
+
+int main(int argc, char **argv) {
+ if(argc != 3) {
+ cout << "You is fail.\n";
+ return 1;
+ }
+ ifstream is(argv[1], ifstream::binary);
+ if(!is) {
+ cout << "Opening input file failed.\n";
+ return 1;
+ }
+ string funcname;
+ is >> funcname;
+ ofstream os(argv[2], ofstream::binary);
+ if(!os) {
+ cout << "Opening output file failed.\n";
+ return 1;
+ }
+ os << prefix << funcname << suffix;
+ os.close();
+ if(!os.good()) {
+ cout << "Writing data out failed.\n";
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+project('extra args in gen', 'c')
+
+prog = find_program('srcgen.py')
+
+gen = generator(prog,
+ output : '@BASENAME@.c',
+ arguments : ['--input=@INPUT@', '--output=@OUTPUT@', '@EXTRA_ARGS@'])
+
+g1 = gen.process('name.dat')
+g2 = gen.process('name.dat', extra_args: '--upper')
+
+test('basic', executable('basic', 'plain.c', g1))
+test('upper', executable('upper', 'upper.c', g2))
+
+prog2 = find_program('srcgen2.py')
+basename_gen = generator(prog2,
+ output : ['@BASENAME@.tab.c', '@BASENAME@.tab.h'],
+ arguments : ['@BUILD_DIR@', '@BASENAME@', '@INPUT@'])
+
+basename_src = basename_gen.process('name.l')
+
+test('basename', executable('basename', basename_src))
+
+plainname_gen = generator(prog2,
+ output : ['@PLAINNAME@.tab.c', '@PLAINNAME@.tab.h'],
+ arguments : ['@BUILD_DIR@', '@PLAINNAME@', '@INPUT@'])
+
+plainname_src = plainname_gen.process('name.l')
+
+test('plainname', executable('plainname', plainname_src))
+
+prog3 = find_program('srcgen3.py')
+capture_gen = generator(prog3,
+ output : ['@BASENAME@.yy.c'],
+ arguments : ['@INPUT@'],
+ capture : true)
+
+capture_src = capture_gen.process('name.l')
+
+test('capture', executable('capture', capture_src))
--- /dev/null
+int main() {
+return 0;
+}
--- /dev/null
+int bob_mcbob();
+
+int main(int argc, char **argv) {
+ return bob_mcbob();
+}
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import argparse
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--input', dest='input',
+ help='the input file')
+parser.add_argument('--output', dest='output',
+ help='the output file')
+parser.add_argument('--upper', dest='upper', action='store_true', default=False,
+ help='Convert to upper case.')
+
+c_templ = '''int %s() {
+ return 0;
+}
+'''
+
+options = parser.parse_args(sys.argv[1:])
+
+with open(options.input) as f:
+ funcname = f.readline().strip()
+if options.upper:
+ funcname = funcname.upper()
+
+with open(options.output, 'w') as f:
+ f.write(c_templ % funcname)
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import sys
+import argparse
+
+parser = argparse.ArgumentParser()
+parser.add_argument('target_dir',
+ help='the target dir')
+parser.add_argument('stem',
+ help='the stem')
+parser.add_argument('input',
+ help='the input file')
+
+options = parser.parse_args(sys.argv[1:])
+
+with open(options.input) as f:
+ content = f.read()
+
+
+output_c = os.path.join(options.target_dir, options.stem + ".tab.c")
+with open(output_c, 'w') as f:
+ f.write(content)
+
+
+output_h = os.path.join(options.target_dir, options.stem + ".tab.h")
+h_content = '''#pragma once
+
+int myfun(void);
+'''
+with open(output_h, 'w') as f:
+ f.write(h_content)
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import sys
+import argparse
+
+parser = argparse.ArgumentParser()
+parser.add_argument('input',
+ help='the input file')
+
+options = parser.parse_args(sys.argv[1:])
+
+with open(options.input) as f:
+ content = f.read().strip()
+
+print(content)
--- /dev/null
+int BOB_MCBOB();
+
+int main(int argc, char **argv) {
+ return BOB_MCBOB();
+}
--- /dev/null
+/* Simple prog that sleeps for a random time. */
+
+#include<stdlib.h>
+#include<time.h>
+#if defined(_WIN32)
+#include<windows.h>
+#endif
+
+int main(int argc, char **argv) {
+ srand(time(NULL));
+#if !defined(_WIN32)
+ struct timespec t;
+ t.tv_sec = 0;
+ t.tv_nsec = 199999999.0*rand()/RAND_MAX;
+ nanosleep(&t, NULL);
+#else
+ Sleep(50.0*rand()/RAND_MAX);
+#endif
+ return 0;
+}
--- /dev/null
+project('benchmark', 'c')
+
+delayer = executable('delayer', 'delayer.c', c_args : '-D_GNU_SOURCE')
+benchmark('delayer', delayer)
--- /dev/null
+usr/bin/prog.exe
--- /dev/null
+project('simple c#', 'cs')
+
+e = executable('prog', 'prog.cs', install : true)
+test('basic', e)
--- /dev/null
+using System;
+
+public class Prog {
+ static public void Main () {
+ Console.WriteLine("C# is working.");
+ }
+}
--- /dev/null
+using System;
+
+public class Helper {
+ public void print() {
+ Console.WriteLine("Library class called.");
+ }
+}
--- /dev/null
+usr/bin/prog.exe
+usr/lib/helper.dll
--- /dev/null
+project('C# library', 'cs')
+
+python3 = import('python3').find_python()
+generated_sources = custom_target('gen_sources',
+ input: 'helper.cs',
+ output: 'helper.cs',
+ command: [python3, '-c',
+ 'import shutil, sys; shutil.copyfile(sys.argv[1], sys.argv[2])',
+ '@INPUT@', '@OUTPUT@']
+)
+
+l = shared_library('helper', generated_sources, install : true)
+
+e = executable('prog', 'prog.cs', link_with : l, install : true)
+test('libtest', e)
--- /dev/null
+using System;
+
+public class Prog {
+ static public void Main () {
+ Helper h = new Helper();
+ h.print();
+ }
+}
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<root>
+ <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
+ <xsd:element name="root" msdata:IsDataSet="true">
+ <xsd:complexType>
+ <xsd:choice maxOccurs="unbounded">
+ <xsd:element name="data">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" msdata:Ordinal="1" />
+ <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
+ <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="resheader">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" />
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:choice>
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:schema>
+<resheader name="resmimetype"><value>text/microsoft-resx</value></resheader><resheader name="version"><value>1.3</value></resheader><resheader name="reader"><value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value></resheader><resheader name="writer"><value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value></resheader><data name="message"><value>Hello from resources!</value></data>
+ </root>
\ No newline at end of file
--- /dev/null
+project('C# resource', 'cs')
+
+e = executable('resprog', 'resprog.cs',
+resources : 'TestRes.resx')
+
+test('restest', e)
--- /dev/null
+using System;
+using System.Resources;
+
+public class Prog {
+
+ static public void Main () {
+ ResourceManager res = new ResourceManager(typeof(TestRes));
+ Console.WriteLine(res.GetString("message"));
+ }
+
+ internal class TestRes {
+ }
+}
--- /dev/null
+Hello World!
--- /dev/null
+project('C# external library', 'cs')
+glib_sharp_2 = dependency('glib-sharp-2.0')
+e = executable('prog', 'prog.cs', dependencies: glib_sharp_2, install : true)
+test('libtest', e, args: [join_paths(meson.current_source_dir(), 'hello.txt')])
--- /dev/null
+using System;
+using GLib;
+
+public class Prog {
+ static public void Main (string[] args) {
+ Console.WriteLine(GLib.FileUtils.GetFileContents(args[0]));
+ }
+}
--- /dev/null
+
+import std.stdio;
+import utils;
+
+void main ()
+{
+ printGreeting ("a Meson D test");
+}
--- /dev/null
+usr/bin/dsimpleapp?exe
--- /dev/null
+project('D Simple Test', 'd')
+
+e = executable('dsimpleapp', ['app.d', 'utils.d'], install : true)
+test('apptest', e)
--- /dev/null
+
+import std.stdio;
+import std.string : format;
+
+void printGreeting (string name)
+{
+ writeln ("Hello, I am %s.".format (name));
+}
--- /dev/null
+
+import libstuff;
+
+void main ()
+{
+ immutable ret = printLibraryString ("foo");
+ assert (ret == 4);
+}
--- /dev/null
+usr/bin/app_s?exe
+usr/lib/libstuff.a
--- /dev/null
+
+import std.stdio;
+import std.string : format;
+
+int printLibraryString (string str)
+{
+ writeln ("Static Library says: %s".format (str));
+ return 4;
+}
--- /dev/null
+project('D Static Library', 'd')
+
+lstatic = static_library('stuff', 'libstuff.d', install : true)
+es = executable('app_s', 'app.d', link_with : lstatic, install : true)
+test('linktest_static', es)
--- /dev/null
+
+import libstuff;
+
+void main ()
+{
+ immutable ret = printLibraryString ("foo");
+ assert (ret == 4);
+}
--- /dev/null
+usr/bin/app_d?exe
+usr/lib/libstuff.so
--- /dev/null
+
+import std.stdio;
+import std.string : format;
+
+int printLibraryString (string str)
+{
+ writeln ("Library says: %s".format (str));
+ return 4;
+}
--- /dev/null
+project('D Shared Library', 'd')
+
+dc = meson.get_compiler('d')
+if dc.get_id() == 'gcc'
+ if dc.version().version_compare('< 8')
+ error('MESON_SKIP_TEST: GDC < 8.0 can not build shared libraries')
+ endif
+endif
+
+ldyn = shared_library('stuff', 'libstuff.d', install : true)
+ed = executable('app_d', 'app.d', link_with : ldyn, install : true)
+test('linktest_dyn', ed)
--- /dev/null
+usr/lib/libsome.so
+usr/lib/libsome.so.0
+usr/lib/libsome.so.1.2.3
+usr/lib/libnoversion.so
+usr/lib/libonlyversion.so
+usr/lib/libonlyversion.so.1
+usr/lib/libonlyversion.so.1.4.5
+usr/lib/libonlysoversion.so
+usr/lib/libonlysoversion.so.5
--- /dev/null
+
+import std.stdio;
+import std.string : format;
+
+@safe
+int printLibraryString (string str)
+{
+ writeln ("Library says: %s".format (str));
+ return 4;
+}
--- /dev/null
+project('D library versions', 'd')
+
+dc = meson.get_compiler('d')
+if dc.get_id() == 'gcc'
+ if dc.version().version_compare('< 8')
+ error('MESON_SKIP_TEST: GDC < 8.0 can not build shared libraries')
+ endif
+endif
+
+shared_library('some', 'lib.d',
+ version : '1.2.3',
+ soversion : '0',
+ install : true)
+
+shared_library('noversion', 'lib.d',
+ install : true)
+
+shared_library('onlyversion', 'lib.d',
+ version : '1.4.5',
+ install : true)
+
+shared_library('onlysoversion', 'lib.d',
+ # Also test that int soversion is acceptable
+ soversion : 5,
+ install : true)
--- /dev/null
+
+extern(C) int printLibraryString(const char *str);
+
+void main ()
+{
+ immutable ret = printLibraryString ("C foo");
+ assert (ret == 3);
+}
--- /dev/null
+usr/bin/appdc_d?exe
+usr/lib/libstuff.so
+usr/bin/appdc_s?exe
+usr/lib/libstuff.a
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+#include <stdio.h>
+
+int DLL_PUBLIC printLibraryString(const char *str)
+{
+ printf("C library says: %s", str);
+ return 3;
+}
--- /dev/null
+project('Mixing C and D', 'd', 'c')
+
+ldyn = shared_library('stuff', 'libstuff.c', install : true)
+ed = executable('appdc_d', 'app.d', link_with : ldyn, install : true)
+test('linktest_cdyn', ed)
+
+lstatic = static_library('stuff', 'libstuff.c', install : true)
+es = executable('appdc_s', 'app.d', link_with : lstatic, install : true)
+test('linktest_cstatic', es)
--- /dev/null
+
+import std.stdio;
+
+uint getFour ()
+{
+ auto getTwo ()
+ {
+ return 1 + 1;
+ }
+
+ return getTwo () + getTwo ();
+}
+
+void main ()
+{
+ import core.stdc.stdlib : exit;
+
+ writeln ("Four: ", getFour ());
+ exit (4);
+}
+
+unittest
+{
+ writeln ("TEST");
+ import core.stdc.stdlib : exit;
+
+ assert (getFour () > 2);
+ assert (getFour () == 4);
+
+ // we explicitly terminate here to give the unittest program a different exit
+ // code than the main application has.
+ // (this prevents the regular main() from being executed)
+ exit (0);
+}
--- /dev/null
+usr/bin/dapp?exe
--- /dev/null
+project('D Unittests', 'd')
+
+e = executable('dapp', 'app.d', install : true)
+test('dapp_run', e, should_fail: true)
+
+e_test = executable('dapp_test', 'app.d',
+ d_args: meson.get_compiler('d').unittest_args())
+test('dapp_test', e_test)
--- /dev/null
+
+import say1;
+import say2;
+
+void main ()
+{
+ assert (sayHello1 ("Dave") == 4);
+ assert (sayHello2 ("HAL 9000") == 8);
+}
--- /dev/null
+usr/bin/app_d?exe
+usr/lib/libsay1.so
+usr/lib/libsay1.so.0
+usr/lib/libsay1.so.1.2.3
+usr/lib/libsay2.so
+usr/lib/libsay2.so.1
+usr/lib/libsay2.so.1.2.4
--- /dev/null
+project('D Multiple Versioned Shared Libraries', 'd')
+
+dc = meson.get_compiler('d')
+if dc.get_id() == 'gcc'
+ if dc.version().version_compare('< 8')
+ error('MESON_SKIP_TEST: GDC < 8.0 can not build shared libraries')
+ endif
+endif
+
+ldyn1 = shared_library('say1',
+ 'say1.d',
+ install: true,
+ version : '1.2.3',
+ soversion : '0'
+)
+ldyn2 = shared_library('say2',
+ 'say2.d',
+ install: true,
+ version : '1.2.4',
+ soversion : '1'
+)
+
+ed = executable('app_d', 'app.d', link_with: [ldyn1, ldyn2], install: true)
+test('multilink_test', ed)
--- /dev/null
+
+import std.stdio;
+import std.string : format;
+
+int sayHello1 (string str)
+{
+ writeln ("Hello %s from library 1.".format (str));
+ return 4;
+}
--- /dev/null
+
+import std.stdio;
+import std.string : format;
+
+int sayHello2 (string str)
+{
+ writeln ("Hello %s from library 2.".format (str));
+ return 8;
+}
--- /dev/null
+project('D has arguments test', 'd')
+
+compiler = meson.get_compiler('d')
+
+assert(compiler.compiles('int i;'), 'Basic code test does not compile: ' + compiler.get_id())
+assert(compiler.has_multi_arguments(['-I.', '-J.']), 'Multi argument test does not work: ' + compiler.get_id())
+assert(compiler.has_argument('-I.'), 'Basic argument test does not work: ' + compiler.get_id())
+assert(compiler.has_argument('-flag_a_d_compiler_definitely_does_not_have') == false, 'Basic argument test does not work: ' + compiler.get_id())
--- /dev/null
+
+import std.stdio;
+import std.array : split;
+import std.string : strip;
+
+auto getMenu ()
+{
+ auto foods = import ("food.txt").strip.split ("\n");
+ return foods;
+}
+
+auto getPeople ()
+{
+ return import ("people.txt").strip.split ("\n");
+}
+
+void main (string[] args)
+{
+ import std.array : join;
+ import core.stdc.stdlib : exit;
+
+ immutable request = args[1];
+ if (request == "menu") {
+ version (No_Menu) {
+ } else {
+ writeln ("On the menu: ", getMenu.join (", "));
+ exit (0);
+ }
+ }
+
+ version (With_People) {
+ if (request == "people") {
+ writeln ("People: ", getPeople.join (", "));
+ exit (0);
+ }
+ }
+
+ // we fail here
+ exit (1);
+}
+
+unittest
+{
+ writeln ("TEST");
+ import core.stdc.stdlib : exit;
+
+ writeln(getMenu);
+ assert (getMenu () == ["Spam", "Eggs", "Spam", "Baked Beans", "Spam", "Spam"]);
+
+ exit (0);
+}
--- /dev/null
+Spam
+Eggs
+Spam
+Baked Beans
+Spam
+Spam
--- /dev/null
+Rick
+Morty
+Summer
+Beth
+Jerry
--- /dev/null
+project('D Features', 'd')
+
+# directory for data
+data_dir = join_paths(meson.current_source_dir(), 'data')
+
+e_plain = executable('dapp_menu',
+ 'app.d',
+ d_import_dirs: [data_dir]
+)
+test('dapp_menu_t_fail', e_plain, should_fail: true)
+test('dapp_menu_t', e_plain, args: ['menu'])
+
+# test feature versions and string imports
+e_versions = executable('dapp_versions',
+ 'app.d',
+ d_import_dirs: [data_dir],
+ d_module_versions: ['No_Menu', 'With_People']
+)
+test('dapp_versions_t_fail', e_versions, args: ['menu'], should_fail: true)
+test('dapp_versions_t', e_versions, args: ['people'])
+
+# test everything and unittests
+e_test = executable('dapp_test',
+ 'app.d',
+ d_import_dirs: [data_dir],
+ d_module_versions: ['No_Menu', 'With_People'],
+ d_unittest: true
+)
+test('dapp_test', e_test)
--- /dev/null
+project('valatest', 'c', default_options : 'werror=true')
+
+if find_program('valac', required : false).found()
+ add_languages('vala')
+ valadeps = [dependency('glib-2.0'), dependency('gobject-2.0')]
+ # Must fail due to -Werror and unused variable in C file
+ executable('valaprog', 'prog.vala', 'unused-var.c', dependencies : valadeps)
+else
+ executable('failprog', 'unused-var.c')
+endif
--- /dev/null
+class MainProg : GLib.Object {
+
+ public static int main(string[] args) {
+ stdout.printf("Vala is working.\n");
+ return 0;
+ }
+}
--- /dev/null
+#warning "something"
+
+int
+somelib(void)
+{
+ int unused_var;
+ return 33;
+}
--- /dev/null
+var = 'assignment before project() call'
+project('no worky', 'c')
+
+test('not run', executable('prog', 'prog.c'))
--- /dev/null
+int main(int argc, char **argv) { return 0; }
--- /dev/null
+project('out of bounds', 'c')
+
+x = []
+y = x[0]
--- /dev/null
+project('object arithmetic', 'c')
+
+foo = '5' + meson
--- /dev/null
+project('string arithmetic', 'c')
+
+foo = 'a' + 3
--- /dev/null
+project('array arithmetic', 'c')
+
+foo = ['a', 'b'] * 3
--- /dev/null
+project('foo', 'c')
--- /dev/null
+option('invalid:name', type : 'boolean', value : false)
--- /dev/null
+project('kwarg before arg', 'c')
+
+executable(sources : 'prog.c', 'prog')
--- /dev/null
+int main(int argc, char **argv) { return 0; }
--- /dev/null
+int sub_lib_method(void);
+
+int main() {
+ return 1337 - sub_lib_method();
+}
--- /dev/null
+project('extract subproject object', 'c')
+
+sub = subproject('sub_project')
+lib = sub.get_variable('lib')
+
+exe = executable('exe', 'main.c',
+ objects : lib.extract_objects('sub_lib.c'))
+
+test('extraction test', exe)
--- /dev/null
+project('extract subproject object -- subproject', 'c')
+
+lib = shared_library('sub_lib', 'sub_lib.c')
--- /dev/null
+int sub_lib_method() {
+ return 1337;
+}
--- /dev/null
+int func() {
+ return 0;
+}
--- /dev/null
+project('same name', 'c')
+
+static_library('foo', 'file.c')
+subdir('sub')
--- /dev/null
+int func() {
+ return 5;
+}
--- /dev/null
+static_library('foo', 'file2.c')
--- /dev/null
+project('false plusassign', 'c')
+
+3 += 4
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("Clash 2.\n");
+ return 0;
+}
--- /dev/null
+project('clash', 'c')
+
+# This setup causes a namespace clash when two Meson targets would
+# produce a Ninja targets with the same name. It only works on
+# unix, because on Windows the target has a '.exe' suffix.
+#
+# This test might fail to work on different backends or when
+# output location is redirected.
+
+if host_machine.system() == 'windows' or host_machine.system() == 'cygwin'
+ error('This is expected.')
+endif
+
+executable('clash', 'clash.c')
+run_target('clash', 'echo', 'clash 1')
--- /dev/null
+project('missing file', 'c')
+
+executable('prog', 'missing.c')
--- /dev/null
+project('version mismatch', 'c', meson_version : '>100.0.0')
--- /dev/null
+project('master', 'c')
+
+x = subproject('foo', version : '>1.0.0')
--- /dev/null
+project('foo', 'c', version : '1.0.0')
--- /dev/null
+project('failing assert', 'c')
+
+assert(false, 'I am fail.')
--- /dev/null
+project('nonabs workdir', 'c')
+
+exe = executable('simple', 'simple.c')
+test('simple', exe, workdir : '.')
--- /dev/null
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('int conversion', 'c')
+
+'notanumber'.to_int()
--- /dev/null
+project('badlang', 'c')
+
+add_languages('nonexisting')
--- /dev/null
+Nothing here.
--- /dev/null
+project('outdir path', 'c')
+
+configure_file(input : 'foo.in',
+ output : 'subdir/foo',
+ configuration : configuration_data())
--- /dev/null
+I'm only here because Git is stupid about empty dirs.
+
--- /dev/null
+project('using not found exe', 'c')
+
+nope = find_program('nonexisting', required : false)
+
+custom_target( 'aa',
+ input: 'meson.build',
+ output: 'foobar',
+ command: [nope, '@INPUT@', '@OUTPUT@']
+)
--- /dev/null
+project('no crossprop', 'c')
+
+message(meson.get_cross_property('nonexisting'))
--- /dev/null
+project('subdir', 'c')
+
+subdir('missing')
--- /dev/null
+project('nested ternary', 'c')
+
+x = true ? (false ? 1 : 0) : 2
--- /dev/null
+project('man install', 'c')
+m1 = install_man('foo.a1')
--- /dev/null
+project('man install', 'c')
+m1 = install_man('foo')
--- /dev/null
+project('statchain', 'c')
+
+host_system = host_machine.system()
+if host_system == 'windows' or host_system == 'darwin'
+ error('Test only fails on Linux and BSD')
+endif
+
+statlib = static_library('stat', 'stat.c', pic : false)
+shlib2 = shared_library('shr2', 'shlib2.c', link_with : statlib)
+exe = executable('prog', 'prog.c', link_with : shlib2)
+test('runtest', exe)
--- /dev/null
+int shlibfunc2();
+int statlibfunc();
+
+int main(int argc, char **argv) {
+ if (statlibfunc() != 42)
+ return 1;
+ if (shlibfunc2() != 24)
+ return 1;
+ return 0;
+}
--- /dev/null
+#if defined _WIN32 || defined __CYGWIN__
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int statlibfunc(void);
+
+int DLL_PUBLIC shlibfunc2(void) {
+ return 24;
+}
--- /dev/null
+int statlibfunc() {
+ return 42;
+}
--- /dev/null
+project('non-root subproject', 'c')
+
+subdir('some')
--- /dev/null
+dependency('definitely-doesnt-exist', fallback : ['someproj', 'some_dep'])
--- /dev/null
+project('dep-test', 'c', version : '1.0')
+
+foo_dep = dependency('foo-bar-xyz-12.3', required : false)
+bar_dep = dependency('foo-bar-xyz-12.3')
--- /dev/null
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('project argument after target failing', 'c',
+ version : '2.3.4',
+ license : 'mylicense')
+
+add_project_arguments('-DPROJECT_OPTION', language: 'c')
+e = executable('exe', 'exe.c')
+add_project_arguments('-DPROJECT_OPTION1', language: 'c')
--- /dev/null
+project('impossible-dep-test', 'c', version : '1.0')
+
+dependency('zlib', version : ['>=1.0', '<1.0'])
--- /dev/null
+project('has function ext dep', 'c')
+
+cc = meson.get_compiler('c')
+
+mylib = shared_library('mylib', 'mylib.c')
+mylib_dep = declare_dependency(link_with : mylib)
+# Only external dependencies can work here
+cc.has_function('malloc', dependencies : mylib_dep)
--- /dev/null
+int testfunc(void) { return 0; }
--- /dev/null
+project('libdir prefix', 'c',
+ default_options : ['libdir=/opt/lib'])
--- /dev/null
+project('missing meson.build', 'c')
+
+subdir('subdir')
--- /dev/null
+This needs to be here because Git can't handle empty dirs.
--- /dev/null
+project('prefix-abs', 'c',
+ default_options : ['prefix=some/path/notabs'])
--- /dev/null
+const char* dummy() {
+ return "I do nothing.";
+}
--- /dev/null
+project('assign in kwarg', 'c')
+
+executable('prog', 'dummy.c', args = 'prog.c')
+
--- /dev/null
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+out = sys.argv[-1]
+with open(out, 'wb') as o:
+ for infile in sys.argv[1:-1]:
+ with open(infile, 'rb') as f:
+ o.write(f.read())
--- /dev/null
+project('plain name many inputs', 'c')
+
+catfiles = find_program('catfiles.py')
+
+custom_target('plainname-inputs',
+ input : ['1.txt', '2.txt'],
+ output : '@PLAINNAME@.dat',
+ command : [catfiles, '@INPUT@', '@OUTPUT@'])
--- /dev/null
+#!/usr/bin/env python3
+
+import sys, os
+
+if len(sys.argv) != 3:
+ print(sys.argv[0], '<namespace>', '<output dir>')
+
+name = sys.argv[1]
+odir = sys.argv[2]
+
+with open(os.path.join(odir, name + '.h'), 'w') as f:
+ f.write('int func();\n')
+with open(os.path.join(odir, name + '.c'), 'w') as f:
+ f.write('int main(int argc, char *argv[]) { return 0; }')
+with open(os.path.join(odir, name + '.sh'), 'w') as f:
+ f.write('#!/bin/bash')
--- /dev/null
+usr/include/diff.h
+usr/include/first.h
+usr/bin/diff.sh
+usr/bin/second.sh
+opt/same.h
+opt/same.sh
--- /dev/null
+project('outputs not matching install_dirs', 'c')
+
+gen = find_program('generator.py')
+
+if meson.backend() != 'ninja'
+ error('Failing manually, test is only for the ninja backend')
+endif
+
+custom_target('too-few-install-dirs',
+ output : ['toofew.h', 'toofew.c', 'toofew.sh'],
+ command : [gen, 'toofew', '@OUTDIR@'],
+ install : true,
+ install_dir : [join_paths(get_option('prefix'), get_option('includedir')), false])
--- /dev/null
+project('name with :')
--- /dev/null
+# This file is never reached.
+x = 3
--- /dev/null
+project('abs subdir', 'c')
+
+# For some reason people insist on doing this, probably
+# because Make has taught them to never rely on anything.
+subdir(join_paths(meson.source_root(), 'bob'))
+
--- /dev/null
+project('meson', 'c')
+
+include_directories(meson.current_source_dir())
--- /dev/null
+project('variables-reserved-test', 'c', version : '1.0')
+
+pkgg = import('pkgconfig')
+lib = shared_library('simple', 'simple.c')
+libver = '1.0'
+h = install_headers('simple.h')
+
+pkgg.generate(
+ libraries : [lib, '-lz'],
+ subdirs : '.',
+ version : libver,
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ variables : [ 'prefix=/tmp/' ]
+)
--- /dev/null
+#include"simple.h"
+
+int simple_function() {
+ return 42;
+}
--- /dev/null
+#ifndef SIMPLE_H_
+#define SIMPLE_H_
+
+int simple_function();
+
+#endif
--- /dev/null
+project('variables-zero-length-test', 'c', version : '1.0')
+
+pkgg = import('pkgconfig')
+lib = shared_library('simple', 'simple.c')
+libver = '1.0'
+h = install_headers('simple.h')
+
+pkgg.generate(
+ libraries : [lib, '-lz'],
+ subdirs : '.',
+ version : libver,
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ variables : [ '=value' ]
+)
--- /dev/null
+#include"simple.h"
+
+int simple_function() {
+ return 42;
+}
--- /dev/null
+#ifndef SIMPLE_H_
+#define SIMPLE_H_
+
+int simple_function();
+
+#endif
--- /dev/null
+project('variables-zero-length-value-test', 'c', version : '1.0')
+
+pkgg = import('pkgconfig')
+lib = shared_library('simple', 'simple.c')
+libver = '1.0'
+h = install_headers('simple.h')
+
+pkgg.generate(
+ libraries : [lib, '-lz'],
+ subdirs : '.',
+ version : libver,
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ variables : [ 'key=' ]
+)
--- /dev/null
+#include"simple.h"
+
+int simple_function() {
+ return 42;
+}
--- /dev/null
+#ifndef SIMPLE_H_
+#define SIMPLE_H_
+
+int simple_function();
+
+#endif
--- /dev/null
+project('misplaced option', 'c')
+
+option('dummy', type : 'string')
--- /dev/null
+project('variables-not-key-value-test', 'c', version : '1.0')
+
+pkgg = import('pkgconfig')
+lib = shared_library('simple', 'simple.c')
+libver = '1.0'
+h = install_headers('simple.h')
+
+pkgg.generate(
+ libraries : [lib, '-lz'],
+ subdirs : '.',
+ version : libver,
+ name : 'libsimple',
+ filebase : 'simple',
+ description : 'A simple demo library.',
+ variables : [ 'this_should_be_key_value' ]
+)
--- /dev/null
+#include"simple.h"
+
+int simple_function() {
+ return 42;
+}
--- /dev/null
+#ifndef SIMPLE_H_
+#define SIMPLE_H_
+
+int simple_function();
+
+#endif
--- /dev/null
+project('executable comparison', 'c')
+
+exe1 = executable('prog1', sources : 'prog.c')
+exe2 = executable('prog2', sources : 'prog.c')
+
+assert(exe1 < exe2, 'should fail')
--- /dev/null
+int main(int argc, char **argv) { return 0; }
--- /dev/null
+project('kwarg before arg', 'c')
+
+# All of these should fail, though only the first one will error out if
+# everything's working correctly.
+assert([] < 'st', 'should fail')
+assert([] < 1, 'should fail')
+assert(2 < 'st', 'should fail')
--- /dev/null
+project('slashname', 'c')
+
+# Traverse this subdir so the corresponding dir
+# is created inside the build dir.
+subdir('sub')
+
+# Try to create an executable that would go in the "sub" dir
+# inside the build dir. This is prohibited.
+executable('sub/prog', pf)
+
+error('Re-enable me once slash in name is finally prohibited.')
+
--- /dev/null
+pf = files('prog.c')
+
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I should not be run ever.\n");
+ return 1;
+}
--- /dev/null
+project('test')
+
+subdir('meson-foo')
--- /dev/null
+project('test', 'rust')
+
+shared_library('test', 'foo.rs', rust_crate_type : 'staticlib')
--- /dev/null
+project('test', 'rust')
+
+static_library('test', 'foo.rs', rust_crate_type : 'cdylib')
--- /dev/null
+project('silent_or', 'c')
+
+if get_option('foo') == 'true'
+ or get_option('foo') == 'auto'
+else
+ message('If this message is printed then something is wrong. The or above should give a syntax error.')
+endif
--- /dev/null
+option('foo', type: 'combo', choices: ['true', 'false', 'auto'], value: 'auto')
--- /dev/null
+project('module test', 'c')
+
+modtest = import('modtest', i_cause: 'a_build_failure')
+modtest.print_hello()
+
--- /dev/null
+project('link with exe', 'c')
+
+e = executable('prog', 'prog.c')
+m = shared_module('module', 'module.c', link_with: e)
--- /dev/null
+
+int func(void) {
+ return 42;
+}
--- /dev/null
+int
+main (int argc, char **argv)
+{
+ return 0;
+}
--- /dev/null
+project('missing incdir', 'c')
+
+inc = include_directories('nosuchdir')
--- /dev/null
+# Copyright © 2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+prog_python = import('python3').find_python()
+
+target = custom_target(
+ 'target',
+ output : ['1', '2'],
+ command : [prog_python, '-c',
+ 'with open("1", "w") as f: f.write("foo"); with open("2", "w") as f: f.write("foo")'],
+)
+
+target[0] = 'foo'
--- /dev/null
+project('getopt prefix')
+
+subproject('abc')
+
+get_option('abc:foo')
--- /dev/null
+project('abc', 'c')
--- /dev/null
+option('foo', type : 'boolean')
--- /dev/null
+project('bad option')
+
+get_option('name')
--- /dev/null
+option('name', type : 'string', vaule : 'foo')
--- /dev/null
+project('mainproj', 'c')
+
+# Try to grab a file from a parent project.
+
+subproject('a')
--- /dev/null
+int main(int argc, char **argv) { return 0; }
--- /dev/null
+project('a', 'c')
+
+executable('prog', '../../prog.c')
--- /dev/null
+project('grabber', 'c')
+
+# Try to grab a file from a child subproject.
+
+subproject('foo')
+
+executable('foo', 'subprojects/foo/sub.c')
--- /dev/null
+project('foo', 'c')
+
+message('I do nothing.')
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I am a subproject executable file.\n");
+ return 0;
+}
--- /dev/null
+project('master', 'c')
+
+subproject('a')
--- /dev/null
+project('a', 'c')
+
+executable('sneaky', '../b/sneaky.c')
--- /dev/null
+projecT('b', 'c')
+
+message('I do nothing.')
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("I can only come into existence via trickery.\n");
+ return 0;
+}
--- /dev/null
+project('string as link argument', 'c')
+executable('myprog', 'prog.c', link_with: [ '' ])
--- /dev/null
+int main(int argc, char **argv) { return 0; }
--- /dev/null
+project('dep-test')
+dep = dependency('', required:true)
--- /dev/null
+project('fff', 'c')
+
+subdir('subprojects')
--- /dev/null
+project('recursive', 'c')
+
+a = subproject('a')
--- /dev/null
+project('a', 'c')
+
+b = subproject('b')
--- /dev/null
+project('b', 'c')
+
+a = subproject('a')
--- /dev/null
+project('missing extra file', 'c')
+
+executable('myprog', 'prog.c', extra_files : 'missing.txt')
--- /dev/null
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('simple fortran', 'fortran')
+
+add_global_arguments('-fbounds-check', language : 'fortran')
+
+e = executable('simple', 'simple.f90',
+ fortran_args : '-ffree-form')
+test('Simple Fortran', e)
--- /dev/null
+program prog
+ print *, "Fortran compilation is working."
+end program prog
--- /dev/null
+project('modules', 'fortran')
+
+e = executable('modprog', 'stuff.f90', 'prog.f90')
+test('moduletest', e)
--- /dev/null
+PROGRAM prog
+
+use Circle
+IMPLICIT NONE
+
+END PROGRAM prog
+
--- /dev/null
+MODULE Circle
+ REAL, PARAMETER :: Pi = 3.1415927
+ REAL :: radius
+END MODULE Circle
+
--- /dev/null
+project('use in same file', 'fortran')
+
+e = executable('prog', 'prog.f90')
+test('simple', e)
--- /dev/null
+MODULE Circle
+ REAL, PARAMETER :: Pi = 3.1415927
+ REAL :: radius
+ INTERFACE DEFAULT
+ MODULE PROCEDURE func
+ END INTERFACE
+ CONTAINS
+ FUNCTION func()
+ func = 0
+ END FUNCTION
+END MODULE Circle
+
+PROGRAM PROG
+ print *, "Module procedure is working."
+END PROGRAM PROG
--- /dev/null
+project('selfdep', 'fortran')
+
+e = executable('selfdep', 'selfdep.f90')
+test('selfdep', e)
--- /dev/null
+MODULE Circle
+ REAL, PARAMETER :: Pi = 3.1415927
+ REAL :: radius
+END MODULE Circle
+
+PROGRAM prog
+
+use Circle
+IMPLICIT NONE
+
+END PROGRAM prog
--- /dev/null
+program hello
+ use static_hello
+ implicit none
+
+ call static_say_hello()
+end program hello
--- /dev/null
+project('try-static-library', 'fortran')
+
+static_hello = static_library('static_hello', 'static_hello.f90')
+
+exe = executable('test_exe', 'main.f90', link_with : static_hello)
+test('static-fortran', exe)
--- /dev/null
+module static_hello
+ implicit none
+
+ private
+ public :: static_say_hello
+
+ interface static_say_hello
+ module procedure say_hello
+ end interface static_say_hello
+
+contains
+
+ subroutine say_hello
+ print *, "Static library called."
+ end subroutine say_hello
+
+end module static_hello
--- /dev/null
+module dynamic
+ implicit none
+
+ private
+ public :: hello
+
+ interface hello
+ module procedure say
+ end interface hello
+
+contains
+
+ subroutine say
+ print *, "Hello, hello..."
+ end subroutine say
+
+end module dynamic
--- /dev/null
+program main
+ use dynamic
+ implicit none
+
+ call hello()
+end program main
--- /dev/null
+project('dynamic_fortran', 'fortran')
+
+dynamic = shared_library('dynamic', 'dynamic.f90')
+exe = executable('test_exe', 'main.f90', link_with : dynamic)
+test('dynamic-fortran', exe)
--- /dev/null
+# Tests whether fortran sources files created during configuration are properly
+# scanned for dependency information
+
+project('generated', 'fortran')
+
+conf_data = configuration_data()
+conf_data.set('ONE', 1)
+conf_data.set('TWO', 2)
+
+templates_basenames = ['mod2', 'mod1']
+generated_sources = []
+foreach template_basename : templates_basenames
+ infilename = '@0@.fpp'.format(template_basename)
+ outfilename = '@0@.f90'.format(template_basename)
+ outfile = configure_file(
+ input : infilename, output : outfilename, configuration : conf_data)
+ generated_sources += [outfile]
+endforeach
+
+sources = ['prog.f90'] + generated_sources
+exe = executable('generated', sources)
+test('generated', exe)
--- /dev/null
+module mod1
+ implicit none
+
+ integer, parameter :: modval1 = @ONE@
+
+end module mod1
--- /dev/null
+module mod2
+ use mod1
+ implicit none
+
+ integer, parameter :: modval2 = @TWO@
+
+end module mod2
--- /dev/null
+program prog
+ use mod2
+ implicit none
+
+ if (modval1 + modval2 /= 3) then
+ stop 1
+ end if
+
+end program prog
--- /dev/null
+project('mod_name_case', 'fortran')
+
+sources = ['test.f90', 'mod1.f90', 'mod2.f90']
+
+exe = executable('mod_name_case', sources)
+test('mod_name_case', exe)
--- /dev/null
+module MyMod1
+ implicit none
+
+ integer, parameter :: myModVal1 = 1
+
+end module MyMod1
--- /dev/null
+module mymod2
+ implicit none
+
+ integer, parameter :: myModVal2 = 2
+
+end module mymod2
--- /dev/null
+program test
+ use mymod1
+ use MyMod2
+
+ integer, parameter :: testVar = myModVal1 + myModVal2
+
+end program test
--- /dev/null
+#include <iostream>
+#include <boost/log/trivial.hpp>
+#include <boost/log/expressions.hpp>
+#include <boost/log/utility/setup/console.hpp>
+#include <boost/log/utility/setup/common_attributes.hpp>
+
+using namespace std;
+namespace logging = boost::log;
+
+void InitLogger() {
+ logging::add_common_attributes();
+ logging::register_simple_formatter_factory<logging::trivial::severity_level, char>("Severity");
+ string log_format = "%TimeStamp% [%Severity%] - %Message%";
+
+ logging::add_console_log(
+ cout,
+ logging::keywords::format = log_format
+ );
+}
+
+int main(int argc, char **argv) {
+ InitLogger();
+ BOOST_LOG_TRIVIAL(trace) << "SOMETHING";
+ return 0;
+}
--- /dev/null
+#include<boost/thread.hpp>
+
+boost::recursive_mutex m;
+
+struct callable {
+ void operator()() {
+ boost::recursive_mutex::scoped_lock l(m);
+ };
+};
+
+int main(int argc, char **argv) {
+ callable x;
+ boost::thread thr(x);
+ thr.join();
+ return 0;
+}
--- /dev/null
+project('boosttest', 'cpp',
+ default_options : ['cpp_std=c++11'])
+
+add_project_arguments(['-DBOOST_LOG_DYN_LINK'],
+ language : 'cpp'
+)
+
+# We want to have multiple separate configurations of Boost
+# within one project. The need to be independent of each other.
+# Use one without a library dependency and one with it.
+
+linkdep = dependency('boost', modules : ['thread', 'system'])
+staticdep = dependency('boost', modules : ['thread', 'system'], static : true)
+testdep = dependency('boost', modules : ['unit_test_framework'])
+nomoddep = dependency('boost')
+extralibdep = dependency('boost', modules : ['thread', 'system', 'log_setup', 'log'])
+
+linkexe = executable('linkedexe', 'linkexe.cc', dependencies : linkdep)
+staticexe = executable('staticlinkedexe', 'linkexe.cc', dependencies : staticdep)
+unitexe = executable('utf', 'unit_test.cpp', dependencies: testdep)
+nomodexe = executable('nomod', 'nomod.cpp', dependencies : nomoddep)
+extralibexe = executable('extralibexe', 'extralib.cpp', dependencies : extralibdep)
+
+test('Boost linktest', linkexe)
+test('Boost statictest', staticexe)
+test('Boost UTF test', unitexe)
+test('Boost nomod', nomodexe)
+test('Boost extralib test', extralibexe)
--- /dev/null
+#include<boost/any.hpp>
+#include<iostream>
+
+boost::any get_any() {
+ boost::any foobar = 3;
+ return foobar;
+}
+
+int main(int argc, char **argv) {
+ boost::any result = get_any();
+ if(boost::any_cast<int>(result) == 3) {
+ std::cout << "Everything is fine in the world.\n";
+ return 0;
+ } else {
+ std::cout << "Mathematics stopped working.\n";
+ return 1;
+ }
+}
--- /dev/null
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "MesonTest"
+#define BOOST_TEST_MAIN
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE(m_test) {
+ int x = 2+2;
+ BOOST_CHECK(true);
+ BOOST_CHECK_EQUAL(x, 4);
+}
--- /dev/null
+<?xml version="1.0"?>
+<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.1.2//EN"
+ "http://www.oasis-open.org/docbook/xml/4.1.2/docbookx.dtd" [
+<!ENTITY version SYSTEM "version.xml">
+]>
+<book id="index" xmlns:xi="http://www.w3.org/2003/XInclude">
+ <bookinfo>
+ <title>Foolib Reference Manual</title>
+ <releaseinfo>
+ for Foobar &version;
+ </releaseinfo>
+ <authorgroup>
+ <author>
+ <firstname>Jonny</firstname>
+ <surname>Example</surname>
+ <affiliation>
+ <address>
+ <email>unknown@example.com</email>
+ </address>
+ </affiliation>
+ </author>
+ </authorgroup>
+ <copyright>
+ <year>2015</year>
+ <holder>Foobar corporation holdings ltd</holder>
+ </copyright>
+ </bookinfo>
+
+ <reference id="foobar">
+ <title>Foobar library</title>
+ <partintro>
+ <para>
+ This part documents Foobar libs.
+ </para>
+ </partintro>
+ <xi:include href="xml/foo.xml"/>
+ <xi:include href="xml/foo-version.xml"/>
+ </reference>
+
+</book>
--- /dev/null
+cdata = configuration_data()
+cdata.set('VERSION', '1.0')
+configure_file(input : 'version.xml.in',
+ output : 'version.xml',
+ configuration : cdata)
+
+gnome.gtkdoc('foobar', src_dir : inc, main_sgml : 'foobar-docs.sgml', install : true)
--- /dev/null
+#pragma once
+
+/**
+ * SECTION:version
+ * @section_id: foo-version
+ * @short_description: <filename>foo-version.h</filename>
+ * @title: Foo Versioning
+ */
+
+/**
+ * FOO_MAJOR_VERSION:
+ *
+ * The major version of foo.
+ */
+#define FOO_MAJOR_VERSION (@FOO_MAJOR_VERSION@)
+
+/**
+ * FOO_MINOR_VERSION:
+ *
+ * The minor version of foo.
+ */
+#define FOO_MINOR_VERSION (@FOO_MINOR_VERSION@)
+
+/**
+ * FOO_MICRO_VERSION:
+ *
+ * The micro version of foo.
+ */
+#define FOO_MICRO_VERSION (@FOO_MICRO_VERSION@)
--- /dev/null
+#pragma once
+
+/**
+ * FooIndecision:
+ * @FOO_MAYBE: Something maybe
+ * @FOO_POSSIBLY: Something possible
+ *
+ * The indecision type.
+ **/
+
+typedef enum {
+ FOO_MAYBE,
+ FOO_POSSIBLY,
+} FooIndecision;
+
--- /dev/null
+cdata = configuration_data()
+parts = meson.project_version().split('.')
+cdata.set('FOO_MAJOR_VERSION', parts[0])
+cdata.set('FOO_MINOR_VERSION', parts[1])
+cdata.set('FOO_MICRO_VERSION', parts[2])
+configure_file(input : 'foo-version.h.in',
+ output : 'foo-version.h',
+ configuration : cdata,
+ install : true,
+ install_dir : get_option('includedir'))
--- /dev/null
+usr/share/gtk-doc/html/foobar/foobar.devhelp2
+usr/share/gtk-doc/html/foobar/foobar-foo.html
+usr/share/gtk-doc/html/foobar/foobar.html
+usr/share/gtk-doc/html/foobar/home.png
+usr/share/gtk-doc/html/foobar/index.html
+usr/share/gtk-doc/html/foobar/index.sgml
+usr/share/gtk-doc/html/foobar/left-insensitive.png
+usr/share/gtk-doc/html/foobar/left.png
+usr/share/gtk-doc/html/foobar/right-insensitive.png
+usr/share/gtk-doc/html/foobar/right.png
+usr/share/gtk-doc/html/foobar/style.css
+usr/share/gtk-doc/html/foobar/up-insensitive.png
+usr/share/gtk-doc/html/foobar/up.png
--- /dev/null
+project('gtkdoctest', 'c', version : '1.0.0')
+
+gnome = import('gnome')
+
+assert(gnome.gtkdoc_html_dir('foobar') == 'share/gtk-doc/html/foobar', 'Gtkdoc install dir is incorrect.')
+
+inc = include_directories('include')
+
+subdir('include')
+
+# We have to disable this test until this bug fix has landed to
+# distros https://bugzilla.gnome.org/show_bug.cgi?id=753145
+error('MESON_SKIP_TEST can not enable gtk-doc test until upstream fixes have landed.')
+
+subdir('doc')
--- /dev/null
+#include "meson-subsample.h"
+
+struct _MesonSubSample
+{
+ MesonSample parent_instance;
+
+ gchar *msg;
+};
+
+G_DEFINE_TYPE (MesonSubSample, meson_sub_sample, MESON_TYPE_SAMPLE)
+
+enum {
+ PROP_0,
+ PROP_MSG,
+ LAST_PROP
+};
+
+static GParamSpec *gParamSpecs [LAST_PROP];
+
+/**
+ * meson_sub_sample_new:
+ * @msg: The message to set.
+ *
+ * Allocates a new #MesonSubSample.
+ *
+ * Returns: (transfer full): a #MesonSubSample.
+ */
+MesonSubSample *
+meson_sub_sample_new (const gchar *msg)
+{
+ g_return_val_if_fail (msg != NULL, NULL);
+
+ return g_object_new (MESON_TYPE_SUB_SAMPLE,
+ "message", msg,
+ NULL);
+}
+
+static void
+meson_sub_sample_finalize (GObject *object)
+{
+ MesonSubSample *self = (MesonSubSample *)object;
+
+ g_clear_pointer (&self->msg, g_free);
+
+ G_OBJECT_CLASS (meson_sub_sample_parent_class)->finalize (object);
+}
+
+static void
+meson_sub_sample_get_property (GObject *object,
+ guint prop_id,
+ GValue *value,
+ GParamSpec *pspec)
+{
+ MesonSubSample *self = MESON_SUB_SAMPLE (object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ g_value_set_string (value, self->msg);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_sub_sample_set_property (GObject *object,
+ guint prop_id,
+ const GValue *value,
+ GParamSpec *pspec)
+{
+ MesonSubSample *self = MESON_SUB_SAMPLE (object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ self->msg = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_sub_sample_class_init (MesonSubSampleClass *klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ object_class->finalize = meson_sub_sample_finalize;
+ object_class->get_property = meson_sub_sample_get_property;
+ object_class->set_property = meson_sub_sample_set_property;
+
+ gParamSpecs [PROP_MSG] =
+ g_param_spec_string ("message",
+ "Message",
+ "The message to print.",
+ NULL,
+ (G_PARAM_READWRITE |
+ G_PARAM_CONSTRUCT_ONLY |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs);
+}
+
+static void
+meson_sub_sample_init (MesonSubSample *self)
+{
+}
+
+/**
+ * meson_sub_sample_print_message:
+ * @self: a #MesonSubSample.
+ *
+ * Prints the message.
+ *
+ * Returns: Nothing.
+ */
+void
+meson_sub_sample_print_message (MesonSubSample *self)
+{
+ g_return_if_fail (MESON_IS_SUB_SAMPLE (self));
+
+ g_print ("Message: %s\n", self->msg);
+}
--- /dev/null
+#ifndef MESON_SUB_SAMPLE_H
+#define MESON_SUB_SAMPLE_H
+
+#if !defined (MESON_TEST)
+#error "MESON_TEST not defined."
+#endif
+
+#include <glib-object.h>
+#include <meson-sample.h>
+
+G_BEGIN_DECLS
+
+#define MESON_TYPE_SUB_SAMPLE (meson_sub_sample_get_type())
+
+G_DECLARE_FINAL_TYPE (MesonSubSample, meson_sub_sample, MESON, SUB_SAMPLE, MesonSample)
+
+MesonSubSample *meson_sub_sample_new (const gchar *msg);
+
+G_END_DECLS
+
+#endif /* MESON_SUB_SAMPLE_H */
--- /dev/null
+libsources = ['meson-subsample.c', 'meson-subsample.h']
+
+girsubproject = shared_library(
+ 'girsubproject',
+ sources : libsources,
+ dependencies : [gobj, meson_gir],
+ install : true
+)
+
+girexe = executable(
+ 'girprog',
+ sources : 'prog.c',
+ dependencies : [gobj, meson_gir],
+ link_with : girsubproject
+)
+
+gnome.generate_gir(
+ girsubproject,
+ sources : libsources,
+ dependencies : [gobj, meson_gir],
+ nsversion : '1.0',
+ namespace : 'MesonSub',
+ symbol_prefix : 'meson_sub_',
+ identifier_prefix : 'MesonSub',
+ includes : ['GObject-2.0', 'Meson-1.0'],
+ install : true
+)
+
+message('TEST: ' + girsubproject.outdir())
+
+envdata = environment()
+envdata.append('GI_TYPELIB_PATH', girsubproject.outdir(), 'subprojects/mesongir', separator : ':')
+envdata.append('LD_LIBRARY_PATH', girsubproject.outdir(), 'subprojects/mesongir')
+
+test('gobject introspection/subproject/c', girexe)
+test('gobject introspection/subproject/py', find_program('prog.py'),
+ env : envdata)
--- /dev/null
+#include "meson-subsample.h"
+
+gint
+main (gint argc,
+ gchar *argv[])
+{
+ MesonSample * i = (MesonSample*) meson_sub_sample_new ("Hello, sub/meson/c!");
+ meson_sample_print_message (i);
+ g_object_unref (i);
+
+ return 0;
+}
--- /dev/null
+#!/usr/bin/env python3
+from gi.repository import MesonSub
+
+if __name__ == "__main__":
+ s = MesonSub.Sample.new("Hello, sub/meson/py!")
+ s.print_message()
--- /dev/null
+usr/lib/girepository-1.0/Meson-1.0.typelib
+usr/lib/girepository-1.0/MesonSub-1.0.typelib
+usr/share/gir-1.0/Meson-1.0.gir
+usr/share/gir-1.0/MesonSub-1.0.gir
+usr/lib/libgirsubproject.so
+usr/lib/libgirlib.so
--- /dev/null
+project('gobject-introspection-with-subproject', 'c')
+
+gnome = import('gnome')
+gobj = dependency('gobject-2.0')
+
+add_global_arguments('-DMESON_TEST', language : 'c')
+meson_gir = dependency('meson-gir', fallback : ['mesongir', 'meson_gir'])
+
+subdir('gir')
+
--- /dev/null
+#include "meson-sample.h"
+
+typedef struct _MesonSamplePrivate
+{
+ gchar *msg;
+} MesonSamplePrivate;
+
+
+G_DEFINE_TYPE_WITH_PRIVATE (MesonSample, meson_sample, G_TYPE_OBJECT)
+
+enum {
+ PROP_0,
+ PROP_MSG,
+ LAST_PROP
+};
+
+static GParamSpec *gParamSpecs [LAST_PROP];
+
+/**
+ * meson_sample_new:
+ * @msg: The message to set.
+ *
+ * Allocates a new #MesonSample.
+ *
+ * Returns: (transfer full): a #MesonSample.
+ */
+MesonSample *
+meson_sample_new (const gchar *msg)
+{
+ g_return_val_if_fail (msg != NULL, NULL);
+
+ return g_object_new (MESON_TYPE_SAMPLE,
+ "message", msg,
+ NULL);
+}
+
+static void
+meson_sample_finalize (GObject *object)
+{
+ MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object);
+
+ g_clear_pointer (&priv->msg, g_free);
+
+ G_OBJECT_CLASS (meson_sample_parent_class)->finalize (object);
+}
+
+static void
+meson_sample_get_property (GObject *object,
+ guint prop_id,
+ GValue *value,
+ GParamSpec *pspec)
+{
+ MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ g_value_set_string (value, priv->msg);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_sample_set_property (GObject *object,
+ guint prop_id,
+ const GValue *value,
+ GParamSpec *pspec)
+{
+ MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ priv->msg = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_sample_class_init (MesonSampleClass *klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ object_class->finalize = meson_sample_finalize;
+ object_class->get_property = meson_sample_get_property;
+ object_class->set_property = meson_sample_set_property;
+
+ gParamSpecs [PROP_MSG] =
+ g_param_spec_string ("message",
+ "Message",
+ "The message to print.",
+ NULL,
+ (G_PARAM_READWRITE |
+ G_PARAM_CONSTRUCT_ONLY |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs);
+}
+
+static void
+meson_sample_init (MesonSample *self)
+{
+}
+
+/**
+ * meson_sample_print_message:
+ * @self: a #MesonSample.
+ *
+ * Prints the message.
+ *
+ * Returns: Nothing.
+ */
+void
+meson_sample_print_message (MesonSample *self)
+{
+ MesonSamplePrivate *priv;
+
+ g_return_if_fail (MESON_IS_SAMPLE (self));
+
+ priv = meson_sample_get_instance_private (self);
+
+ g_print ("Message: %s\n", priv->msg);
+}
--- /dev/null
+#ifndef MESON_SAMPLE_H
+#define MESON_SAMPLE_H
+
+#if !defined (MESON_TEST)
+#error "MESON_TEST not defined."
+#endif
+
+#include <glib-object.h>
+
+G_BEGIN_DECLS
+
+#define MESON_TYPE_SAMPLE (meson_sample_get_type())
+
+G_DECLARE_DERIVABLE_TYPE (MesonSample, meson_sample, MESON, SAMPLE, GObject)
+
+struct _MesonSampleClass {
+ GObjectClass parent_class;
+};
+
+
+MesonSample *meson_sample_new (const gchar *msg);
+void meson_sample_print_message (MesonSample *self);
+
+G_END_DECLS
+
+#endif /* MESON_SAMPLE_H */
--- /dev/null
+project('gobject-introspection-subproject', 'c')
+
+gnome = import('gnome')
+gobj = dependency('gobject-2.0')
+
+libsources = ['meson-sample.c', 'meson-sample.h']
+
+girlib = shared_library(
+ 'girlib',
+ sources : libsources,
+ dependencies : gobj,
+ install : true
+)
+
+girtarget = gnome.generate_gir(
+ girlib,
+ sources : libsources,
+ nsversion : '1.0',
+ namespace : 'Meson',
+ symbol_prefix : 'meson_',
+ identifier_prefix : 'Meson',
+ includes : ['GObject-2.0'],
+ install : true
+)
+
+meson_gir = declare_dependency(link_with : girlib,
+ include_directories : [include_directories('.')],
+ dependencies : [gobj],
+ # Everything that uses libgst needs this built to compile
+ sources : girtarget,
+)
--- /dev/null
+#include "meson-subsample.h"
+
+struct _MesonSubSample
+{
+ MesonSample parent_instance;
+
+ gchar *msg;
+};
+
+G_DEFINE_TYPE (MesonSubSample, meson_sub_sample, MESON_TYPE_SAMPLE)
+
+enum {
+ PROP_0,
+ PROP_MSG,
+ LAST_PROP
+};
+
+static GParamSpec *gParamSpecs [LAST_PROP];
+
+/**
+ * meson_sub_sample_new:
+ * @msg: The message to set.
+ *
+ * Allocates a new #MesonSubSample.
+ *
+ * Returns: (transfer full): a #MesonSubSample.
+ */
+MesonSubSample *
+meson_sub_sample_new (const gchar *msg)
+{
+ g_return_val_if_fail (msg != NULL, NULL);
+
+ return g_object_new (MESON_TYPE_SUB_SAMPLE,
+ "message", msg,
+ NULL);
+}
+
+static void
+meson_sub_sample_finalize (GObject *object)
+{
+ MesonSubSample *self = (MesonSubSample *)object;
+
+ g_clear_pointer (&self->msg, g_free);
+
+ G_OBJECT_CLASS (meson_sub_sample_parent_class)->finalize (object);
+}
+
+static void
+meson_sub_sample_get_property (GObject *object,
+ guint prop_id,
+ GValue *value,
+ GParamSpec *pspec)
+{
+ MesonSubSample *self = MESON_SUB_SAMPLE (object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ g_value_set_string (value, self->msg);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_sub_sample_set_property (GObject *object,
+ guint prop_id,
+ const GValue *value,
+ GParamSpec *pspec)
+{
+ MesonSubSample *self = MESON_SUB_SAMPLE (object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ self->msg = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_sub_sample_class_init (MesonSubSampleClass *klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ object_class->finalize = meson_sub_sample_finalize;
+ object_class->get_property = meson_sub_sample_get_property;
+ object_class->set_property = meson_sub_sample_set_property;
+
+ gParamSpecs [PROP_MSG] =
+ g_param_spec_string ("message",
+ "Message",
+ "The message to print.",
+ NULL,
+ (G_PARAM_READWRITE |
+ G_PARAM_CONSTRUCT_ONLY |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs);
+}
+
+static void
+meson_sub_sample_init (MesonSubSample *self)
+{
+}
+
+/**
+ * meson_sub_sample_print_message:
+ * @self: a #MesonSubSample.
+ *
+ * Prints the message.
+ *
+ * Returns: Nothing.
+ */
+void
+meson_sub_sample_print_message (MesonSubSample *self)
+{
+ g_return_if_fail (MESON_IS_SUB_SAMPLE (self));
+
+ g_print ("Message: %s\n", self->msg);
+}
--- /dev/null
+#ifndef MESON_SUB_SAMPLE_H
+#define MESON_SUB_SAMPLE_H
+
+#include <glib-object.h>
+#include <meson-sample.h>
+
+G_BEGIN_DECLS
+
+#define MESON_TYPE_SUB_SAMPLE (meson_sub_sample_get_type())
+
+G_DECLARE_FINAL_TYPE (MesonSubSample, meson_sub_sample, MESON, SUB_SAMPLE, MesonSample)
+
+MesonSubSample *meson_sub_sample_new (const gchar *msg);
+
+G_END_DECLS
+
+#endif /* MESON_SUB_SAMPLE_H */
--- /dev/null
+libsources = ['meson-subsample.c', 'meson-subsample.h']
+
+girsubproject = shared_library(
+ 'girsubproject',
+ sources : libsources,
+ dependencies : [gobj, girlib_dep],
+ install : true
+)
+
+girexe = executable(
+ 'girprog',
+ sources : 'prog.c',
+ dependencies : [gobj, girlib_dep],
+ link_with : girsubproject
+)
+
+gnome.generate_gir(
+ girsubproject,
+ sources : libsources,
+ nsversion : '1.0',
+ namespace : 'MesonSub',
+ symbol_prefix : 'meson_sub_',
+ identifier_prefix : 'MesonSub',
+ includes : ['GObject-2.0', meson_gir],
+ install : true
+)
+
+message('TEST: ' + girsubproject.outdir())
+
+test('gobject introspection/subproject/c', girexe)
--- /dev/null
+#include "meson-subsample.h"
+
+gint
+main (gint argc,
+ gchar *argv[])
+{
+ MesonSample * i = (MesonSample*) meson_sub_sample_new ("Hello, sub/meson/c!");
+ meson_sample_print_message (i);
+ g_object_unref (i);
+
+ return 0;
+}
--- /dev/null
+usr/lib/girepository-1.0/Meson-1.0.typelib
+usr/lib/girepository-1.0/MesonSub-1.0.typelib
+usr/lib/libgirlib.so
+usr/lib/libgirsubproject.so
+usr/share/gir-1.0/Meson-1.0.gir
+usr/share/gir-1.0/MesonSub-1.0.gir
--- /dev/null
+project('multiple-gobject-introspection', 'c')
+
+gnome = import('gnome')
+gobj = dependency('gobject-2.0')
+
+subdir('mesongir')
+subdir('gir')
--- /dev/null
+#include "meson-sample.h"
+
+typedef struct _MesonSamplePrivate
+{
+ gchar *msg;
+} MesonSamplePrivate;
+
+
+G_DEFINE_TYPE_WITH_PRIVATE (MesonSample, meson_sample, G_TYPE_OBJECT)
+
+enum {
+ PROP_0,
+ PROP_MSG,
+ LAST_PROP
+};
+
+static GParamSpec *gParamSpecs [LAST_PROP];
+
+/**
+ * meson_sample_new:
+ * @msg: The message to set.
+ *
+ * Allocates a new #MesonSample.
+ *
+ * Returns: (transfer full): a #MesonSample.
+ */
+MesonSample *
+meson_sample_new (const gchar *msg)
+{
+ g_return_val_if_fail (msg != NULL, NULL);
+
+ return g_object_new (MESON_TYPE_SAMPLE,
+ "message", msg,
+ NULL);
+}
+
+static void
+meson_sample_finalize (GObject *object)
+{
+ MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object);
+
+ g_clear_pointer (&priv->msg, g_free);
+
+ G_OBJECT_CLASS (meson_sample_parent_class)->finalize (object);
+}
+
+static void
+meson_sample_get_property (GObject *object,
+ guint prop_id,
+ GValue *value,
+ GParamSpec *pspec)
+{
+ MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ g_value_set_string (value, priv->msg);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_sample_set_property (GObject *object,
+ guint prop_id,
+ const GValue *value,
+ GParamSpec *pspec)
+{
+ MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ priv->msg = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_sample_class_init (MesonSampleClass *klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ object_class->finalize = meson_sample_finalize;
+ object_class->get_property = meson_sample_get_property;
+ object_class->set_property = meson_sample_set_property;
+
+ gParamSpecs [PROP_MSG] =
+ g_param_spec_string ("message",
+ "Message",
+ "The message to print.",
+ NULL,
+ (G_PARAM_READWRITE |
+ G_PARAM_CONSTRUCT_ONLY |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs);
+}
+
+static void
+meson_sample_init (MesonSample *self)
+{
+}
+
+/**
+ * meson_sample_print_message:
+ * @self: a #MesonSample.
+ *
+ * Prints the message.
+ *
+ */
+void
+meson_sample_print_message (MesonSample *self)
+{
+ MesonSamplePrivate *priv;
+
+ g_return_if_fail (MESON_IS_SAMPLE (self));
+
+ priv = meson_sample_get_instance_private (self);
+
+ g_print ("Message: %s\n", priv->msg);
+}
--- /dev/null
+#ifndef MESON_SAMPLE_H
+#define MESON_SAMPLE_H
+
+#include <@HEADER@>
+
+G_BEGIN_DECLS
+
+#define MESON_TYPE_SAMPLE (meson_sample_get_type())
+
+G_DECLARE_DERIVABLE_TYPE (MesonSample, meson_sample, MESON, SAMPLE, GObject)
+
+struct _MesonSampleClass {
+ GObjectClass parent_class;
+};
+
+
+MesonSample *meson_sample_new (const gchar *msg);
+void meson_sample_print_message (MesonSample *self);
+
+G_END_DECLS
+
+#endif /* MESON_SAMPLE_H */
--- /dev/null
+conf = configuration_data()
+conf.set('HEADER', 'glib-object.h')
+
+meson_sample_header = configure_file(
+ input : 'meson-sample.h.in',
+ output : 'meson-sample.h',
+ configuration : conf)
+
+libsources = ['meson-sample.c', meson_sample_header]
+
+girlib = shared_library(
+ 'girlib',
+ sources : libsources,
+ dependencies : gobj,
+ install : true
+)
+
+girtarget = gnome.generate_gir(
+ girlib,
+ sources : libsources,
+ nsversion : '1.0',
+ namespace : 'Meson',
+ symbol_prefix : 'meson_',
+ identifier_prefix : 'Meson',
+ includes : ['GObject-2.0'],
+ export_packages : 'meson',
+ install : true
+)
+meson_gir = girtarget[0]
+meson_typelib = girtarget[1]
+
+girlib_inc = include_directories('.')
+girlib_dep = declare_dependency(link_with : girlib,
+ include_directories : [girlib_inc],
+ dependencies : [gobj],
+ # Everything that uses libgst needs this built to compile
+ sources : girtarget,
+)
--- /dev/null
+<page xmlns="http://projectmallard.org/1.0/"
+ xmlns:its="http://www.w3.org/2005/11/its"
+ type="guide"
+ id="index">
+ <title>
+ Hello!
+ </title>
+</page>
--- /dev/null
+msgid ""
+msgstr ""
+"Project-Id-Version: meson master\n"
+"Language: de\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+#. (itstool) path: page/title
+#: C/index.page:5
+msgid "Hello!"
+msgstr "Hallo!"
--- /dev/null
+msgid ""
+msgstr ""
+"Project-Id-Version: meson master\n"
+"Language: es\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+#. (itstool) path: page/title
+#: C/index.page:5
+msgid "Hello!"
+msgstr "¡Hola!"
--- /dev/null
+gnome = import('gnome')
+
+gnome.yelp('meson',
+ sources: 'index.page',
+ media: 'media/test.txt',
+ symlink_media: false,
+ languages: ['de', 'es'],
+)
+
+gnome.yelp('meson-symlink',
+ sources: 'index.page',
+ media: 'media/test.txt',
+ symlink_media: true,
+ languages: ['de', 'es'],
+)
+
+gnome.yelp('meson-linguas',
+ sources: 'index.page',
+ media: 'media/test.txt',
+ symlink_media: false,
+)
--- /dev/null
+usr/share/help/C/meson/index.page
+usr/share/help/C/meson/media/test.txt
+usr/share/help/es/meson/index.page
+usr/share/help/es/meson/media/test.txt
+usr/share/help/de/meson/index.page
+usr/share/help/de/meson/media/test.txt
+usr/share/help/C/meson-symlink/index.page
+usr/share/help/C/meson-symlink/media/test.txt
+usr/share/help/es/meson-symlink/media/test.txt
+usr/share/help/es/meson-symlink/index.page
+usr/share/help/de/meson-symlink/index.page
+usr/share/help/de/meson-symlink/media/test.txt
+usr/share/help/C/meson-linguas/index.page
+usr/share/help/C/meson-linguas/media/test.txt
+usr/share/help/es/meson-linguas/media/test.txt
+usr/share/help/es/meson-linguas/index.page
+usr/share/help/de/meson-linguas/index.page
+usr/share/help/de/meson-linguas/media/test.txt
--- /dev/null
+project('yelp', 'c')
+subdir('help')
--- /dev/null
+# Doxyfile 1.8.13
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project.
+#
+# All text after a double hash (##) is considered a comment and is placed in
+# front of the TAG it is preceding.
+#
+# All text after a single hash (#) is considered a comment and will be ignored.
+# The format is:
+# TAG = value [value, ...]
+# For lists, items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (\" \").
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the config file
+# that follow. The default is UTF-8 which is also the encoding used for all text
+# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
+# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
+# for the list of possible encodings.
+# The default value is: UTF-8.
+
+DOXYFILE_ENCODING = UTF-8
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
+# double-quotes, unless you are using Doxywizard) that should identify the
+# project for which the documentation is generated. This name is used in the
+# title of most generated pages and in a few other places.
+# The default value is: My Project.
+
+PROJECT_NAME = "The Vast Comedian Project"
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
+# could be handy for archiving the generated documentation or if some version
+# control system is used.
+
+PROJECT_NUMBER = @VERSION@
+
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer a
+# quick idea about the purpose of the project. Keep the description short.
+
+PROJECT_BRIEF = Comedy generator
+
+# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
+# in the documentation. The maximum height of the logo should not exceed 55
+# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
+# the logo to the output directory.
+
+PROJECT_LOGO =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
+# into which the generated documentation will be written. If a relative path is
+# entered, it will be relative to the location where doxygen was started. If
+# left blank the current directory will be used.
+
+OUTPUT_DIRECTORY = doc
+
+# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
+# directories (in 2 levels) under the output directory of each output format and
+# will distribute the generated files over these directories. Enabling this
+# option can be useful when feeding doxygen a huge amount of source files, where
+# putting all generated files in the same directory would otherwise causes
+# performance problems for the file system.
+# The default value is: NO.
+
+CREATE_SUBDIRS = NO
+
+# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
+# characters to appear in the names of generated files. If set to NO, non-ASCII
+# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
+# U+3044.
+# The default value is: NO.
+
+ALLOW_UNICODE_NAMES = YES
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
+# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
+# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
+# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
+# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
+# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
+# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
+# Ukrainian and Vietnamese.
+# The default value is: English.
+
+OUTPUT_LANGUAGE = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
+# descriptions after the members that are listed in the file and class
+# documentation (similar to Javadoc). Set to NO to disable this.
+# The default value is: YES.
+
+BRIEF_MEMBER_DESC = YES
+
+# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
+# description of a member or function before the detailed description
+#
+# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+# The default value is: YES.
+
+REPEAT_BRIEF = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator that is
+# used to form the text in various listings. Each string in this list, if found
+# as the leading text of the brief description, will be stripped from the text
+# and the result, after processing the whole list, is used as the annotated
+# text. Otherwise, the brief description is used as-is. If left blank, the
+# following values are used ($name is automatically replaced with the name of
+# the entity):The $name class, The $name widget, The $name file, is, provides,
+# specifies, contains, represents, a, an and the.
+
+ABBREVIATE_BRIEF = "The $name class" \
+ "The $name widget" \
+ "The $name file" \
+ is \
+ provides \
+ specifies \
+ contains \
+ represents \
+ a \
+ an \
+ the
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# doxygen will generate a detailed section even if there is only a brief
+# description.
+# The default value is: NO.
+
+ALWAYS_DETAILED_SEC = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+# The default value is: NO.
+
+INLINE_INHERITED_MEMB = NO
+
+# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
+# before files name in the file list and in the header files. If set to NO the
+# shortest path that makes the file name unique will be used
+# The default value is: YES.
+
+FULL_PATH_NAMES = YES
+
+# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
+# Stripping is only done if one of the specified strings matches the left-hand
+# part of the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the path to
+# strip.
+#
+# Note that you can specify absolute paths here, but also relative paths, which
+# will be relative from the directory where doxygen is started.
+# This tag requires that the tag FULL_PATH_NAMES is set to YES.
+
+STRIP_FROM_PATH =
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
+# path mentioned in the documentation of a class, which tells the reader which
+# header file to include in order to use a class. If left blank only the name of
+# the header file containing the class definition is used. Otherwise one should
+# specify the list of include paths that are normally passed to the compiler
+# using the -I flag.
+
+STRIP_FROM_INC_PATH =
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
+# less readable) file names. This can be useful is your file systems doesn't
+# support long names like on DOS, Mac, or CD-ROM.
+# The default value is: NO.
+
+SHORT_NAMES = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
+# first line (until the first dot) of a Javadoc-style comment as the brief
+# description. If set to NO, the Javadoc-style will behave just like regular Qt-
+# style comments (thus requiring an explicit @brief command for a brief
+# description.)
+# The default value is: NO.
+
+JAVADOC_AUTOBRIEF = NO
+
+# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
+# line (until the first dot) of a Qt-style comment as the brief description. If
+# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
+# requiring an explicit \brief command for a brief description.)
+# The default value is: NO.
+
+QT_AUTOBRIEF = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
+# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
+# a brief description. This used to be the default behavior. The new default is
+# to treat a multi-line C++ comment block as a detailed description. Set this
+# tag to YES if you prefer the old behavior instead.
+#
+# Note that setting this tag to YES also means that rational rose comments are
+# not recognized any more.
+# The default value is: NO.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
+# documentation from any documented member that it re-implements.
+# The default value is: YES.
+
+INHERIT_DOCS = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
+# page for each member. If set to NO, the documentation of a member will be part
+# of the file/class/namespace that contains it.
+# The default value is: NO.
+
+SEPARATE_MEMBER_PAGES = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
+# uses this value to replace tabs by spaces in code fragments.
+# Minimum value: 1, maximum value: 16, default value: 4.
+
+TAB_SIZE = 4
+
+# This tag can be used to specify a number of aliases that act as commands in
+# the documentation. An alias has the form:
+# name=value
+# For example adding
+# "sideeffect=@par Side Effects:\n"
+# will allow you to put the command \sideeffect (or @sideeffect) in the
+# documentation, which will result in a user-defined paragraph with heading
+# "Side Effects:". You can put \n's in the value part of an alias to insert
+# newlines.
+
+ALIASES =
+
+# This tag can be used to specify a number of word-keyword mappings (TCL only).
+# A mapping has the form "name=value". For example adding "class=itcl::class"
+# will allow you to use the command class in the itcl::class meaning.
+
+TCL_SUBST =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
+# only. Doxygen will then generate output that is more tailored for C. For
+# instance, some of the names that are used will be different. The list of all
+# members will be omitted, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_FOR_C = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
+# Python sources only. Doxygen will then generate output that is more tailored
+# for that language. For instance, namespaces will be presented as packages,
+# qualified scopes will look different, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_JAVA = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources. Doxygen will then generate output that is tailored for Fortran.
+# The default value is: NO.
+
+OPTIMIZE_FOR_FORTRAN = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for VHDL.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_VHDL = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension, and
+# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
+# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran:
+# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran:
+# Fortran. In the later case the parser tries to guess whether the code is fixed
+# or free formatted code, this is the default for Fortran type files), VHDL. For
+# instance to make doxygen treat .inc files as Fortran files (default is PHP),
+# and .f files as C (default is Fortran), use: inc=Fortran f=C.
+#
+# Note: For files without extension you can use no_extension as a placeholder.
+#
+# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
+# the files are not read by doxygen.
+
+EXTENSION_MAPPING =
+
+# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
+# according to the Markdown format, which allows for more readable
+# documentation. See http://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you can
+# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
+# case of backward compatibilities issues.
+# The default value is: YES.
+
+MARKDOWN_SUPPORT = YES
+
+# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up
+# to that level are automatically included in the table of contents, even if
+# they do not have an id attribute.
+# Note: This feature currently applies only to Markdown headings.
+# Minimum value: 0, maximum value: 99, default value: 0.
+# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
+
+TOC_INCLUDE_HEADINGS = 0
+
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by putting a % sign in front of the word or
+# globally by setting AUTOLINK_SUPPORT to NO.
+# The default value is: YES.
+
+AUTOLINK_SUPPORT = YES
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should set this
+# tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string);
+# versus func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+# The default value is: NO.
+
+BUILTIN_STL_SUPPORT = NO
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+# The default value is: NO.
+
+CPP_CLI_SUPPORT = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
+# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
+# will parse them like normal C++ but will assume all classes use public instead
+# of private inheritance when no explicit protection keyword is present.
+# The default value is: NO.
+
+SIP_SUPPORT = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES will make
+# doxygen to replace the get and set methods by a property in the documentation.
+# This will only work if the methods are indeed getting or setting a simple
+# type. If this is not the case, or you want to show the methods anyway, you
+# should set this option to NO.
+# The default value is: YES.
+
+IDL_PROPERTY_SUPPORT = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+# The default value is: NO.
+
+DISTRIBUTE_GROUP_DOC = NO
+
+# If one adds a struct or class to a group and this option is enabled, then also
+# any nested class or struct is added to the same group. By default this option
+# is disabled and one has to add nested compounds explicitly via \ingroup.
+# The default value is: NO.
+
+GROUP_NESTED_COMPOUNDS = NO
+
+# Set the SUBGROUPING tag to YES to allow class member groups of the same type
+# (for instance a group of public functions) to be put as a subgroup of that
+# type (e.g. under the Public Functions section). Set it to NO to prevent
+# subgrouping. Alternatively, this can be done per class using the
+# \nosubgrouping command.
+# The default value is: YES.
+
+SUBGROUPING = YES
+
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
+# are shown inside the group in which they are included (e.g. using \ingroup)
+# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
+# and RTF).
+#
+# Note that this feature does not work in combination with
+# SEPARATE_MEMBER_PAGES.
+# The default value is: NO.
+
+INLINE_GROUPED_CLASSES = NO
+
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
+# with only public data fields or simple typedef fields will be shown inline in
+# the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO, structs, classes, and unions are shown on a separate page (for HTML and
+# Man pages) or section (for LaTeX and RTF).
+# The default value is: NO.
+
+INLINE_SIMPLE_STRUCTS = NO
+
+# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
+# enum is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically be
+# useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+# The default value is: NO.
+
+TYPEDEF_HIDES_STRUCT = NO
+
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can be
+# an expensive process and often the same symbol appears multiple times in the
+# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
+# doxygen will become slower. If the cache is too large, memory is wasted. The
+# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
+# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
+# symbols. At the end of a run doxygen will report the cache usage and suggest
+# the optimal cache size from a speed point of view.
+# Minimum value: 0, maximum value: 9, default value: 0.
+
+LOOKUP_CACHE_SIZE = 0
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
+# documentation are documented, even if no documentation was available. Private
+# class members and static file members will be hidden unless the
+# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
+# Note: This will also disable the warnings about undocumented members that are
+# normally produced when WARNINGS is set to YES.
+# The default value is: NO.
+
+EXTRACT_ALL = NO
+
+# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
+# be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PRIVATE = NO
+
+# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
+# scope will be included in the documentation.
+# The default value is: NO.
+
+EXTRACT_PACKAGE = NO
+
+# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
+# included in the documentation.
+# The default value is: NO.
+
+EXTRACT_STATIC = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
+# locally in source files will be included in the documentation. If set to NO,
+# only classes defined in header files are included. Does not have any effect
+# for Java sources.
+# The default value is: YES.
+
+EXTRACT_LOCAL_CLASSES = YES
+
+# This flag is only useful for Objective-C code. If set to YES, local methods,
+# which are defined in the implementation section but not in the interface are
+# included in the documentation. If set to NO, only methods in the interface are
+# included.
+# The default value is: NO.
+
+EXTRACT_LOCAL_METHODS = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base name of
+# the file that contains the anonymous namespace. By default anonymous namespace
+# are hidden.
+# The default value is: NO.
+
+EXTRACT_ANON_NSPACES = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
+# undocumented members inside documented classes or files. If set to NO these
+# members will be included in the various overviews, but no documentation
+# section is generated. This option has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_MEMBERS = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy. If set
+# to NO, these classes will be included in the various overviews. This option
+# has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
+
+HIDE_UNDOC_CLASSES = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
+# (class|struct|union) declarations. If set to NO, these declarations will be
+# included in the documentation.
+# The default value is: NO.
+
+HIDE_FRIEND_COMPOUNDS = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
+# documentation blocks found inside the body of a function. If set to NO, these
+# blocks will be appended to the function's detailed documentation block.
+# The default value is: NO.
+
+HIDE_IN_BODY_DOCS = NO
+
+# The INTERNAL_DOCS tag determines if documentation that is typed after a
+# \internal command is included. If the tag is set to NO then the documentation
+# will be excluded. Set it to YES to include the internal documentation.
+# The default value is: NO.
+
+INTERNAL_DOCS = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
+# names in lower-case letters. If set to YES, upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+# The default value is: system dependent.
+
+CASE_SENSE_NAMES = YES
+
+# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
+# their full class and namespace scopes in the documentation. If set to YES, the
+# scope will be hidden.
+# The default value is: NO.
+
+HIDE_SCOPE_NAMES = NO
+
+# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
+# append additional text to a page's title, such as Class Reference. If set to
+# YES the compound reference will be hidden.
+# The default value is: NO.
+
+HIDE_COMPOUND_REFERENCE= NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
+# the files that are included by a file in the documentation of that file.
+# The default value is: YES.
+
+SHOW_INCLUDE_FILES = YES
+
+# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
+# grouped member an include statement to the documentation, telling the reader
+# which file to include in order to use the member.
+# The default value is: NO.
+
+SHOW_GROUPED_MEMB_INC = NO
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
+# files with double quotes in the documentation rather than with sharp brackets.
+# The default value is: NO.
+
+FORCE_LOCAL_INCLUDES = NO
+
+# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
+# documentation for inline members.
+# The default value is: YES.
+
+INLINE_INFO = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
+# (detailed) documentation of file and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order.
+# The default value is: YES.
+
+SORT_MEMBER_DOCS = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
+# descriptions of file, namespace and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order. Note that
+# this will also influence the order of the classes in the class list.
+# The default value is: NO.
+
+SORT_BRIEF_DOCS = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
+# (brief and detailed) documentation of class members so that constructors and
+# destructors are listed first. If set to NO the constructors will appear in the
+# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
+# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
+# member documentation.
+# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
+# detailed member documentation.
+# The default value is: NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
+# of group names into alphabetical order. If set to NO the group names will
+# appear in their defined order.
+# The default value is: NO.
+
+SORT_GROUP_NAMES = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
+# fully-qualified names, including namespaces. If set to NO, the class list will
+# be sorted only by class name, not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the alphabetical
+# list.
+# The default value is: NO.
+
+SORT_BY_SCOPE_NAME = NO
+
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
+# type resolution of all parameters of a function it will reject a match between
+# the prototype and the implementation of a member function even if there is
+# only one candidate or it is obvious which candidate to choose by doing a
+# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
+# accept a match between prototype and implementation in such cases.
+# The default value is: NO.
+
+STRICT_PROTO_MATCHING = NO
+
+# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
+# list. This list is created by putting \todo commands in the documentation.
+# The default value is: YES.
+
+GENERATE_TODOLIST = YES
+
+# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
+# list. This list is created by putting \test commands in the documentation.
+# The default value is: YES.
+
+GENERATE_TESTLIST = YES
+
+# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
+# list. This list is created by putting \bug commands in the documentation.
+# The default value is: YES.
+
+GENERATE_BUGLIST = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
+# the deprecated list. This list is created by putting \deprecated commands in
+# the documentation.
+# The default value is: YES.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional documentation
+# sections, marked by \if <section_label> ... \endif and \cond <section_label>
+# ... \endcond blocks.
+
+ENABLED_SECTIONS =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
+# initial value of a variable or macro / define can have for it to appear in the
+# documentation. If the initializer consists of more lines than specified here
+# it will be hidden. Use a value of 0 to hide initializers completely. The
+# appearance of the value of individual variables and macros / defines can be
+# controlled using \showinitializer or \hideinitializer command in the
+# documentation regardless of this setting.
+# Minimum value: 0, maximum value: 10000, default value: 30.
+
+MAX_INITIALIZER_LINES = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
+# the bottom of the documentation of classes and structs. If set to YES, the
+# list will mention the files that were used to generate the documentation.
+# The default value is: YES.
+
+SHOW_USED_FILES = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
+# will remove the Files entry from the Quick Index and from the Folder Tree View
+# (if specified).
+# The default value is: YES.
+
+SHOW_FILES = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
+# page. This will remove the Namespaces entry from the Quick Index and from the
+# Folder Tree View (if specified).
+# The default value is: YES.
+
+SHOW_NAMESPACES = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command command input-file, where command is the value of the
+# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
+# by doxygen. Whatever the program writes to standard output is used as the file
+# version. For an example see the documentation.
+
+FILE_VERSION_FILTER =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option. You can
+# optionally specify a file name after the option, if omitted DoxygenLayout.xml
+# will be used as the name of the layout file.
+#
+# Note that if you run doxygen from a directory containing a file called
+# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
+# tag is left empty.
+
+LAYOUT_FILE =
+
+# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
+# the reference definitions. This must be a list of .bib files. The .bib
+# extension is automatically appended if omitted. This requires the bibtex tool
+# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
+# For LaTeX the style of the bibliography can be controlled using
+# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
+# search path. See also \cite for info how to create references.
+
+CITE_BIB_FILES =
+
+#---------------------------------------------------------------------------
+# Configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated to
+# standard output by doxygen. If QUIET is set to YES this implies that the
+# messages are off.
+# The default value is: NO.
+
+QUIET = NO
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
+# this implies that the warnings are on.
+#
+# Tip: Turn warnings on while writing the documentation.
+# The default value is: YES.
+
+WARNINGS = YES
+
+# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
+# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
+# will automatically be disabled.
+# The default value is: YES.
+
+WARN_IF_UNDOCUMENTED = YES
+
+# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some parameters
+# in a documented function, or documenting parameters that don't exist or using
+# markup commands wrongly.
+# The default value is: YES.
+
+WARN_IF_DOC_ERROR = YES
+
+# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
+# are documented, but have no documentation for their parameters or return
+# value. If set to NO, doxygen will only warn about wrong or incomplete
+# parameter documentation, but not about the absence of documentation.
+# The default value is: NO.
+
+WARN_NO_PARAMDOC = NO
+
+# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
+# a warning is encountered.
+# The default value is: NO.
+
+WARN_AS_ERROR = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that doxygen
+# can produce. The string should contain the $file, $line, and $text tags, which
+# will be replaced by the file and line number from which the warning originated
+# and the warning text. Optionally the format may contain $version, which will
+# be replaced by the version of the file (if it could be obtained via
+# FILE_VERSION_FILTER)
+# The default value is: $file:$line: $text.
+
+WARN_FORMAT = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning and error
+# messages should be written. If left blank the output is written to standard
+# error (stderr).
+
+WARN_LOGFILE =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag is used to specify the files and/or directories that contain
+# documented source files. You may enter file names like myfile.cpp or
+# directories like /usr/src/myproject. Separate the files or directories with
+# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
+# Note: If this tag is empty the current directory is searched.
+
+INPUT = "@TOP_SRCDIR@/include" "@TOP_SRCDIR@/src"
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
+# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
+# documentation (see: http://www.gnu.org/software/libiconv) for the list of
+# possible encodings.
+# The default value is: UTF-8.
+
+INPUT_ENCODING = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
+# *.h) to filter out the source-files in the directories.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# read by doxygen.
+#
+# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
+# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
+# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
+# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
+# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf.
+
+FILE_PATTERNS = *.c \
+ *.cc \
+ *.cxx \
+ *.cpp \
+ *.c++ \
+ *.java \
+ *.ii \
+ *.ixx \
+ *.ipp \
+ *.i++ \
+ *.inl \
+ *.idl \
+ *.ddl \
+ *.odl \
+ *.h \
+ *.hh \
+ *.hxx \
+ *.hpp \
+ *.h++ \
+ *.cs \
+ *.d \
+ *.php \
+ *.php4 \
+ *.php5 \
+ *.phtml \
+ *.inc \
+ *.m \
+ *.markdown \
+ *.md \
+ *.mm \
+ *.dox \
+ *.py \
+ *.pyw \
+ *.f90 \
+ *.f95 \
+ *.f03 \
+ *.f08 \
+ *.f \
+ *.for \
+ *.tcl \
+ *.vhd \
+ *.vhdl \
+ *.ucf \
+ *.qsf
+
+# The RECURSIVE tag can be used to specify whether or not subdirectories should
+# be searched for input files as well.
+# The default value is: NO.
+
+RECURSIVE = NO
+
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+#
+# Note that relative paths are relative to the directory from which doxygen is
+# run.
+
+EXCLUDE =
+
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
+# from the input.
+# The default value is: NO.
+
+EXCLUDE_SYMLINKS = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories.
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories for example use the pattern */test/*
+
+EXCLUDE_PATTERNS =
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories use the pattern */test/*
+
+EXCLUDE_SYMBOLS =
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or directories
+# that contain example code fragments that are included (see the \include
+# command).
+
+EXAMPLE_PATH =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank all
+# files are included.
+
+EXAMPLE_PATTERNS = *
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude commands
+# irrespective of the value of the RECURSIVE tag.
+# The default value is: NO.
+
+EXAMPLE_RECURSIVE = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or directories
+# that contain images that are to be included in the documentation (see the
+# \image command).
+
+IMAGE_PATH =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command:
+#
+# <filter> <input-file>
+#
+# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
+# name of an input file. Doxygen will then use the output that the filter
+# program writes to standard output. If FILTER_PATTERNS is specified, this tag
+# will be ignored.
+#
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+INPUT_FILTER =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis. Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match. The filters are a list of the form: pattern=filter
+# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
+# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
+# patterns match the file name, INPUT_FILTER is applied.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+FILTER_PATTERNS =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will also be used to filter the input files that are used for
+# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
+# The default value is: NO.
+
+FILTER_SOURCE_FILES = NO
+
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
+# it is also possible to disable source filtering for a specific pattern using
+# *.ext= (so without naming a filter).
+# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
+
+FILTER_SOURCE_PATTERNS =
+
+# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want to reuse the introduction page also for the doxygen output.
+
+USE_MDFILE_AS_MAINPAGE =
+
+#---------------------------------------------------------------------------
+# Configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
+# generated. Documented entities will be cross-referenced with these sources.
+#
+# Note: To get rid of all source code in the generated output, make sure that
+# also VERBATIM_HEADERS is set to NO.
+# The default value is: NO.
+
+SOURCE_BROWSER = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body of functions,
+# classes and enums directly into the documentation.
+# The default value is: NO.
+
+INLINE_SOURCES = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
+# special comment blocks from generated source code fragments. Normal C, C++ and
+# Fortran comments will always remain visible.
+# The default value is: YES.
+
+STRIP_CODE_COMMENTS = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
+# function all documented functions referencing it will be listed.
+# The default value is: NO.
+
+REFERENCED_BY_RELATION = NO
+
+# If the REFERENCES_RELATION tag is set to YES then for each documented function
+# all documented entities called/used by that function will be listed.
+# The default value is: NO.
+
+REFERENCES_RELATION = NO
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
+# to YES then the hyperlinks from functions in REFERENCES_RELATION and
+# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
+# link to the documentation.
+# The default value is: YES.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
+# source code will show a tooltip with additional information such as prototype,
+# brief description and links to the definition and documentation. Since this
+# will make the HTML file larger and loading of large files a bit slower, you
+# can opt to disable this feature.
+# The default value is: YES.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+SOURCE_TOOLTIPS = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code will
+# point to the HTML generated by the htags(1) tool instead of doxygen built-in
+# source browser. The htags tool is part of GNU's global source tagging system
+# (see http://www.gnu.org/software/global/global.html). You will need version
+# 4.8.6 or higher.
+#
+# To use it do the following:
+# - Install the latest version of global
+# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
+# - Make sure the INPUT points to the root of the source tree
+# - Run doxygen as normal
+#
+# Doxygen will invoke htags (and that will in turn invoke gtags), so these
+# tools must be available from the command line (i.e. in the search path).
+#
+# The result: instead of the source browser generated by doxygen, the links to
+# source code will now point to the output of htags.
+# The default value is: NO.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+USE_HTAGS = NO
+
+# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
+# verbatim copy of the header file for each class for which an include is
+# specified. Set to NO to disable this.
+# See also: Section \class.
+# The default value is: YES.
+
+VERBATIM_HEADERS = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
+# compounds will be generated. Enable this if the project contains a lot of
+# classes, structs, unions or interfaces.
+# The default value is: YES.
+
+ALPHABETICAL_INDEX = YES
+
+# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
+# which the alphabetical index list will be split.
+# Minimum value: 1, maximum value: 20, default value: 5.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+COLS_IN_ALPHA_INDEX = 5
+
+# In case all classes in a project start with a common prefix, all classes will
+# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
+# can be used to specify a prefix (or a list of prefixes) that should be ignored
+# while generating the index headers.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
+
+IGNORE_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
+# The default value is: YES.
+
+GENERATE_HTML = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_OUTPUT = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
+# generated HTML page (for example: .htm, .php, .asp).
+# The default value is: .html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FILE_EXTENSION = .html
+
+# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
+# each generated HTML page. If the tag is left blank doxygen will generate a
+# standard header.
+#
+# To get valid HTML the header file that includes any scripts and style sheets
+# that doxygen needs, which is dependent on the configuration options used (e.g.
+# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
+# default header using
+# doxygen -w html new_header.html new_footer.html new_stylesheet.css
+# YourConfigFile
+# and then modify the file new_header.html. See also section "Doxygen usage"
+# for information on how to generate the default header that doxygen normally
+# uses.
+# Note: The header is subject to change so you typically have to regenerate the
+# default header when upgrading to a newer version of doxygen. For a description
+# of the possible markers and block names see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_HEADER =
+
+# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
+# generated HTML page. If the tag is left blank doxygen will generate a standard
+# footer. See HTML_HEADER for more information on how to generate a default
+# footer and what special commands can be used inside the footer. See also
+# section "Doxygen usage" for information on how to generate the default footer
+# that doxygen normally uses.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FOOTER =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
+# sheet that is used by each HTML page. It can be used to fine-tune the look of
+# the HTML output. If left blank doxygen will generate a default style sheet.
+# See also section "Doxygen usage" for information on how to generate the style
+# sheet that doxygen normally uses.
+# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
+# it is more robust and this tag (HTML_STYLESHEET) will in the future become
+# obsolete.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_STYLESHEET =
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# cascading style sheets that are included after the standard style sheets
+# created by doxygen. Using this option one can overrule certain style aspects.
+# This is preferred over using HTML_STYLESHEET since it does not replace the
+# standard style sheet and is therefore more robust against future updates.
+# Doxygen will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list). For an example see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_STYLESHEET =
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
+# files will be copied as-is; there are no commands or markers available.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_FILES =
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
+# will adjust the colors in the style sheet and background images according to
+# this color. Hue is specified as an angle on a colorwheel, see
+# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
+# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
+# purple, and 360 is red again.
+# Minimum value: 0, maximum value: 359, default value: 220.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_HUE = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
+# in the HTML output. For a value of 0 the output will use grayscales only. A
+# value of 255 will produce the most vivid colors.
+# Minimum value: 0, maximum value: 255, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_SAT = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
+# luminance component of the colors in the HTML output. Values below 100
+# gradually make the output lighter, whereas values above 100 make the output
+# darker. The value divided by 100 is the actual gamma applied, so 80 represents
+# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
+# change the gamma.
+# Minimum value: 40, maximum value: 240, default value: 80.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_GAMMA = 80
+
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting this
+# to YES can help to show when doxygen was last run and thus if the
+# documentation is up to date.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_TIMESTAMP = NO
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_SECTIONS = NO
+
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
+# shown in the various tree structured indices initially; the user can expand
+# and collapse entries dynamically later on. Doxygen will expand the tree to
+# such a level that at most the specified number of entries are visible (unless
+# a fully collapsed tree already exceeds this amount). So setting the number of
+# entries 1 will produce a full collapsed tree by default. 0 is a special value
+# representing an infinite number of entries and will result in a full expanded
+# tree by default.
+# Minimum value: 0, maximum value: 9999, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_INDEX_NUM_ENTRIES = 100
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files will be
+# generated that can be used as input for Apple's Xcode 3 integrated development
+# environment (see: http://developer.apple.com/tools/xcode/), introduced with
+# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
+# Makefile in the HTML output directory. Running make will produce the docset in
+# that directory and running make install will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
+# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
+# for more information.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_DOCSET = NO
+
+# This tag determines the name of the docset feed. A documentation feed provides
+# an umbrella under which multiple documentation sets from a single provider
+# (such as a company or product suite) can be grouped.
+# The default value is: Doxygen generated docs.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_FEEDNAME = "Doxygen generated docs"
+
+# This tag specifies a string that should uniquely identify the documentation
+# set bundle. This should be a reverse domain-name style string, e.g.
+# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_BUNDLE_ID = org.doxygen.Project
+
+# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
+# the documentation publisher. This should be a reverse domain-name style
+# string, e.g. com.mycompany.MyDocSet.documentation.
+# The default value is: org.doxygen.Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_ID = org.doxygen.Publisher
+
+# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
+# The default value is: Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
+
+DOCSET_PUBLISHER_NAME = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
+# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
+# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
+# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
+# Windows.
+#
+# The HTML Help Workshop contains a compiler that can convert all HTML output
+# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
+# files are now used as the Windows 98 help format, and will replace the old
+# Windows help format (.hlp) on all Windows platforms in the future. Compressed
+# HTML files also contain an index, a table of contents, and you can search for
+# words in the documentation. The HTML workshop also contains a viewer for
+# compressed HTML files.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_HTMLHELP = NO
+
+# The CHM_FILE tag can be used to specify the file name of the resulting .chm
+# file. You can add a path in front of the file if the result should not be
+# written to the html output directory.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_FILE =
+
+# The HHC_LOCATION tag can be used to specify the location (absolute path
+# including file name) of the HTML help compiler (hhc.exe). If non-empty,
+# doxygen will try to run the HTML help compiler on the generated index.hhp.
+# The file has to be specified with full path.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+HHC_LOCATION =
+
+# The GENERATE_CHI flag controls if a separate .chi index file is generated
+# (YES) or that it should be included in the master .chm file (NO).
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+GENERATE_CHI = NO
+
+# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
+# and project file content.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+CHM_INDEX_ENCODING =
+
+# The BINARY_TOC flag controls whether a binary table of contents is generated
+# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
+# enables the Previous and Next buttons.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+BINARY_TOC = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members to
+# the table of contents of the HTML help documentation and to the tree view.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
+
+TOC_EXPAND = NO
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
+# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
+# (.qch) of the generated HTML documentation.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_QHP = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
+# the file name of the resulting .qch file. The path specified is relative to
+# the HTML output folder.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QCH_FILE =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
+# Project output. For more information please see Qt Help Project / Namespace
+# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_NAMESPACE = org.doxygen.Project
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
+# Help Project output. For more information please see Qt Help Project / Virtual
+# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
+# folders).
+# The default value is: doc.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_VIRTUAL_FOLDER = doc
+
+# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
+# filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_NAME =
+
+# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_CUST_FILTER_ATTRS =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's filter section matches. Qt Help Project / Filter Attributes (see:
+# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHP_SECT_FILTER_ATTRS =
+
+# The QHG_LOCATION tag can be used to specify the location of Qt's
+# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
+# generated .qhp file.
+# This tag requires that the tag GENERATE_QHP is set to YES.
+
+QHG_LOCATION =
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
+# generated, together with the HTML files, they form an Eclipse help plugin. To
+# install this plugin and make it available under the help contents menu in
+# Eclipse, the contents of the directory containing the HTML and XML files needs
+# to be copied into the plugins directory of eclipse. The name of the directory
+# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
+# After copying Eclipse needs to be restarted before the help appears.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_ECLIPSEHELP = NO
+
+# A unique identifier for the Eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have this
+# name. Each documentation set should have its own identifier.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
+
+ECLIPSE_DOC_ID = org.doxygen.Project
+
+# If you want full control over the layout of the generated HTML pages it might
+# be necessary to disable the index and replace it with your own. The
+# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
+# of each HTML page. A value of NO enables the index and the value YES disables
+# it. Since the tabs in the index contain the same information as the navigation
+# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+DISABLE_INDEX = NO
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information. If the tag
+# value is set to YES, a side panel will be generated containing a tree-like
+# index structure (just like the one that is generated for HTML Help). For this
+# to work a browser that supports JavaScript, DHTML, CSS and frames is required
+# (i.e. any modern browser). Windows users are probably better off using the
+# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
+# further fine-tune the look of the index. As an example, the default style
+# sheet generated by doxygen has an example that shows how to put an image at
+# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
+# the same information as the tab index, you could consider setting
+# DISABLE_INDEX to YES when enabling this option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+GENERATE_TREEVIEW = NO
+
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
+# doxygen will group on one line in the generated HTML documentation.
+#
+# Note that a value of 0 will completely suppress the enum values from appearing
+# in the overview section.
+# Minimum value: 0, maximum value: 20, default value: 4.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+ENUM_VALUES_PER_LINE = 4
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
+# to set the initial width (in pixels) of the frame in which the tree is shown.
+# Minimum value: 0, maximum value: 1500, default value: 250.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+TREEVIEW_WIDTH = 250
+
+# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
+# external symbols imported via tag files in a separate window.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+EXT_LINKS_IN_WINDOW = NO
+
+# Use this tag to change the font size of LaTeX formulas included as images in
+# the HTML documentation. When you change the font size after a successful
+# doxygen run you need to manually remove any form_*.png images from the HTML
+# output directory to force them to be regenerated.
+# Minimum value: 8, maximum value: 50, default value: 10.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_FONTSIZE = 10
+
+# Use the FORMULA_TRANPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are not
+# supported properly for IE 6.0, but are supported on all modern browsers.
+#
+# Note that when changing this option you need to delete any form_*.png files in
+# the HTML output directory before the changes have effect.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+FORMULA_TRANSPARENT = YES
+
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
+# http://www.mathjax.org) which uses client side Javascript for the rendering
+# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
+# installed or if you want to formulas look prettier in the HTML output. When
+# enabled you may also need to install MathJax separately and configure the path
+# to it using the MATHJAX_RELPATH option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+USE_MATHJAX = NO
+
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. See the MathJax site (see:
+# http://docs.mathjax.org/en/latest/output.html) for more details.
+# Possible values are: HTML-CSS (which is slower, but has the best
+# compatibility), NativeMML (i.e. MathML) and SVG.
+# The default value is: HTML-CSS.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_FORMAT = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the HTML
+# output directory using the MATHJAX_RELPATH option. The destination directory
+# should contain the MathJax.js script. For instance, if the mathjax directory
+# is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
+# Content Delivery Network so you can quickly see the result without installing
+# MathJax. However, it is strongly recommended to install a local copy of
+# MathJax from http://www.mathjax.org before deployment.
+# The default value is: http://cdn.mathjax.org/mathjax/latest.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
+# extension names that should be enabled during MathJax rendering. For example
+# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_EXTENSIONS =
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
+# of code that will be used on startup of the MathJax code. See the MathJax site
+# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
+# example see the documentation.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_CODEFILE =
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
+# the HTML output. The underlying search engine uses javascript and DHTML and
+# should work on any modern browser. Note that when using HTML help
+# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
+# there is already a search function so this one should typically be disabled.
+# For large projects the javascript based search engine can be slow, then
+# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
+# search using the keyboard; to jump to the search box use <access key> + S
+# (what the <access key> is depends on the OS and browser, but it is typically
+# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
+# key> to jump into the search results window, the results can be navigated
+# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
+# the search. The filter options can be selected when the cursor is inside the
+# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
+# to select a filter and <Enter> or <escape> to activate or cancel the filter
+# option.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+SEARCHENGINE = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using Javascript. There
+# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
+# setting. When disabled, doxygen will generate a PHP script for searching and
+# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
+# and searching needs to be provided by external tools. See the section
+# "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SERVER_BASED_SEARCH = NO
+
+# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
+# search results.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/).
+#
+# See the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH = NO
+
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will return the search results when EXTERNAL_SEARCH is enabled.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: http://xapian.org/). See the section "External Indexing and
+# Searching" for details.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHENGINE_URL =
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+# The default file is: searchdata.xml.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+SEARCHDATA_FILE = searchdata.xml
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH_ID =
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
+# to a relative location where the documentation can be found. The format is:
+# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTRA_SEARCH_MAPPINGS =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
+# The default value is: YES.
+
+GENERATE_LATEX = YES
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_OUTPUT = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked.
+#
+# Note that when enabling USE_PDFLATEX this option is only used for generating
+# bitmaps for formulas in the HTML output, but not in the Makefile that is
+# written to the output directory.
+# The default file is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_CMD_NAME = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
+# index for LaTeX.
+# The default file is: makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+MAKEINDEX_CMD_NAME = makeindex
+
+# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+COMPACT_LATEX = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used by the
+# printer.
+# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
+# 14 inches) and executive (7.25 x 10.5 inches).
+# The default value is: a4.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PAPER_TYPE = a4
+
+# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
+# that should be included in the LaTeX output. The package can be specified just
+# by its name or with the correct syntax as to be used with the LaTeX
+# \usepackage command. To get the times font for instance you can specify :
+# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times}
+# To use the option intlimits with the amsmath package you can specify:
+# EXTRA_PACKAGES=[intlimits]{amsmath}
+# If left blank no extra packages will be included.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+EXTRA_PACKAGES =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
+# generated LaTeX document. The header should contain everything until the first
+# chapter. If it is left blank doxygen will generate a standard header. See
+# section "Doxygen usage" for information on how to let doxygen write the
+# default header to a separate file.
+#
+# Note: Only use a user-defined header if you know what you are doing! The
+# following commands have a special meaning inside the header: $title,
+# $datetime, $date, $doxygenversion, $projectname, $projectnumber,
+# $projectbrief, $projectlogo. Doxygen will replace $title with the empty
+# string, for the replacement values of the other commands the user is referred
+# to HTML_HEADER.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HEADER =
+
+# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
+# generated LaTeX document. The footer should contain everything after the last
+# chapter. If it is left blank doxygen will generate a standard footer. See
+# LATEX_HEADER for more information on how to generate a default footer and what
+# special commands can be used inside the footer.
+#
+# Note: Only use a user-defined footer if you know what you are doing!
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_FOOTER =
+
+# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# LaTeX style sheets that are included after the standard style sheets created
+# by doxygen. Using this option one can overrule certain style aspects. Doxygen
+# will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list).
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_STYLESHEET =
+
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the LATEX_OUTPUT output
+# directory. Note that the files will be copied as-is; there are no commands or
+# markers available.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EXTRA_FILES =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
+# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
+# contain links (just like the HTML output) instead of page references. This
+# makes the output suitable for online browsing using a PDF viewer.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+PDF_HYPERLINKS = YES
+
+# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
+# the PDF file directly from the LaTeX files. Set this option to YES, to get a
+# higher quality PDF documentation.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+USE_PDFLATEX = YES
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
+# command to the generated LaTeX files. This will instruct LaTeX to keep running
+# if errors occur, instead of asking the user for help. This option is also used
+# when generating formulas in HTML.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BATCHMODE = NO
+
+# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
+# index chapters (such as File Index, Compound Index, etc.) in the output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HIDE_INDICES = NO
+
+# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
+# code with syntax highlighting in the LaTeX output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_SOURCE_CODE = NO
+
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. See
+# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
+# The default value is: plain.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_BIB_STYLE = plain
+
+# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated
+# page will contain the date and time when the page was generated. Setting this
+# to NO can help when comparing the output of multiple runs.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_TIMESTAMP = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The
+# RTF output is optimized for Word 97 and may not look too pretty with other RTF
+# readers/editors.
+# The default value is: NO.
+
+GENERATE_RTF = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: rtf.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_OUTPUT = rtf
+
+# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+COMPACT_RTF = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
+# contain hyperlink fields. The RTF file will contain links (just like the HTML
+# output) instead of page references. This makes the output suitable for online
+# browsing using Word or some other Word compatible readers that support those
+# fields.
+#
+# Note: WordPad (write) and others do not support links.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_HYPERLINKS = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's config
+# file, i.e. a series of assignments. You only have to provide replacements,
+# missing definitions are set to their default value.
+#
+# See also section "Doxygen usage" for information on how to generate the
+# default style sheet that doxygen normally uses.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_STYLESHEET_FILE =
+
+# Set optional variables used in the generation of an RTF document. Syntax is
+# similar to doxygen's config file. A template extensions file can be generated
+# using doxygen -e rtf extensionFile.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_EXTENSIONS_FILE =
+
+# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code
+# with syntax highlighting in the RTF output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_SOURCE_CODE = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for
+# classes and files.
+# The default value is: NO.
+
+GENERATE_MAN = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it. A directory man3 will be created inside the directory specified by
+# MAN_OUTPUT.
+# The default directory is: man.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_OUTPUT = man
+
+# The MAN_EXTENSION tag determines the extension that is added to the generated
+# man pages. In case the manual section does not start with a number, the number
+# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
+# optional.
+# The default value is: .3.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_EXTENSION = .3
+
+# The MAN_SUBDIR tag determines the name of the directory created within
+# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
+# MAN_EXTENSION with the initial . removed.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_SUBDIR =
+
+# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
+# will generate one additional man file for each entity documented in the real
+# man page(s). These additional files only source the real man page, but without
+# them the man command would be unable to find the correct page.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_LINKS = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that
+# captures the structure of the code including all documentation.
+# The default value is: NO.
+
+GENERATE_XML = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: xml.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_OUTPUT = xml
+
+# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program
+# listings (including syntax highlighting and cross-referencing information) to
+# the XML output. Note that enabling this will significantly increase the size
+# of the XML output.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_PROGRAMLISTING = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files
+# that can be used to generate PDF.
+# The default value is: NO.
+
+GENERATE_DOCBOOK = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it.
+# The default directory is: docbook.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_OUTPUT = docbook
+
+# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the
+# program listings (including syntax highlighting and cross-referencing
+# information) to the DOCBOOK output. Note that enabling this will significantly
+# increase the size of the DOCBOOK output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_PROGRAMLISTING = NO
+
+#---------------------------------------------------------------------------
+# Configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
+# AutoGen Definitions (see http://autogen.sf.net) file that captures the
+# structure of the code including all documentation. Note that this feature is
+# still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_AUTOGEN_DEF = NO
+
+#---------------------------------------------------------------------------
+# Configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module
+# file that captures the structure of the code including all documentation.
+#
+# Note that this feature is still experimental and incomplete at the moment.
+# The default value is: NO.
+
+GENERATE_PERLMOD = NO
+
+# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary
+# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
+# output from the Perl module output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_LATEX = NO
+
+# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely
+# formatted so it can be parsed by a human reader. This is useful if you want to
+# understand what is going on. On the other hand, if this tag is set to NO, the
+# size of the Perl module output will be much smaller and Perl will parse it
+# just the same.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_PRETTY = YES
+
+# The names of the make variables in the generated doxyrules.make file are
+# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
+# so different doxyrules.make files included by the same Makefile don't
+# overwrite each other's variables.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all
+# C-preprocessor directives found in the sources and include files.
+# The default value is: YES.
+
+ENABLE_PREPROCESSING = YES
+
+# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
+# in the source code. If set to NO, only conditional compilation will be
+# performed. Macro expansion can be done in a controlled way by setting
+# EXPAND_ONLY_PREDEF to YES.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+MACRO_EXPANSION = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
+# the macro expansion is limited to the macros specified with the PREDEFINED and
+# EXPAND_AS_DEFINED tags.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_ONLY_PREDEF = NO
+
+# If the SEARCH_INCLUDES tag is set to YES, the include files in the
+# INCLUDE_PATH will be searched if a #include is found.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SEARCH_INCLUDES = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by the
+# preprocessor.
+# This tag requires that the tag SEARCH_INCLUDES is set to YES.
+
+INCLUDE_PATH = "@TOP_SRCDIR@/include" "@TOP_BUILDDIR@/include"
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will be
+# used.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+INCLUDE_FILE_PATTERNS =
+
+# The PREDEFINED tag can be used to specify one or more macro names that are
+# defined before the preprocessor is started (similar to the -D option of e.g.
+# gcc). The argument of the tag is a list of macros of the form: name or
+# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
+# is assumed. To prevent a macro definition from being undefined via #undef or
+# recursively expanded use the := operator instead of the = operator.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+PREDEFINED =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
+# tag can be used to specify a list of macro names that should be expanded. The
+# macro definition that is found in the sources will be used. Use the PREDEFINED
+# tag if you want to use a different macro definition that overrules the
+# definition found in the source code.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+EXPAND_AS_DEFINED =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
+# remove all references to function-like macros that are alone on a line, have
+# an all uppercase name, and do not end with a semicolon. Such function macros
+# are typically used for boiler-plate code, and will confuse the parser if not
+# removed.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
+
+SKIP_FUNCTION_MACROS = YES
+
+#---------------------------------------------------------------------------
+# Configuration options related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES tag can be used to specify one or more tag files. For each tag
+# file the location of the external documentation should be added. The format of
+# a tag file without this location is as follows:
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where loc1 and loc2 can be relative or absolute paths or URLs. See the
+# section "Linking to external documentation" for more information about the use
+# of tag files.
+# Note: Each tag file must have a unique name (where the name does NOT include
+# the path). If a tag file is not located in the directory in which doxygen is
+# run, you must also specify the path to the tagfile here.
+
+TAGFILES =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
+# tag file that is based on the input files it reads. See section "Linking to
+# external documentation" for more information about the usage of tag files.
+
+GENERATE_TAGFILE =
+
+# If the ALLEXTERNALS tag is set to YES, all external class will be listed in
+# the class index. If set to NO, only the inherited external classes will be
+# listed.
+# The default value is: NO.
+
+ALLEXTERNALS = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
+# in the modules index. If set to NO, only the current project's groups will be
+# listed.
+# The default value is: YES.
+
+EXTERNAL_GROUPS = YES
+
+# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in
+# the related pages index. If set to NO, only the current project's pages will
+# be listed.
+# The default value is: YES.
+
+EXTERNAL_PAGES = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of 'which perl').
+# The default file (with absolute path) is: /usr/bin/perl.
+
+PERL_PATH = /usr/bin/perl
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram
+# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
+# NO turns the diagrams off. Note that this option also works with HAVE_DOT
+# disabled, but it is recommended to install and use dot, since it yields more
+# powerful graphs.
+# The default value is: YES.
+
+CLASS_DIAGRAMS = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see:
+# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
+# default search path.
+
+MSCGEN_PATH =
+
+# You can include diagrams made with dia in doxygen documentation. Doxygen will
+# then run dia to produce the diagram and insert it in the documentation. The
+# DIA_PATH tag allows you to specify the directory where the dia binary resides.
+# If left empty dia is assumed to be found in the default search path.
+
+DIA_PATH =
+
+# If set to YES the inheritance and collaboration graphs will hide inheritance
+# and usage relations if the target is undocumented or is not a class.
+# The default value is: YES.
+
+HIDE_UNDOC_RELATIONS = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz (see:
+# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
+# Bell Labs. The other options in this section have no effect if this option is
+# set to NO
+# The default value is: NO.
+
+HAVE_DOT = @HAVE_DOT@
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
+# to run in parallel. When set to 0 doxygen will base this on the number of
+# processors available in the system. You can set it explicitly to a value
+# larger than 0 to get control over the balance between CPU load and processing
+# speed.
+# Minimum value: 0, maximum value: 32, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_NUM_THREADS = 0
+
+# When you want a differently looking font in the dot files that doxygen
+# generates you can specify the font name using DOT_FONTNAME. You need to make
+# sure dot is able to find the font, which can be done by putting it in a
+# standard location or by setting the DOTFONTPATH environment variable or by
+# setting DOT_FONTPATH to the directory containing the font.
+# The default value is: Helvetica.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTNAME = Helvetica
+
+# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
+# dot graphs.
+# Minimum value: 4, maximum value: 24, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTSIZE = 10
+
+# By default doxygen will tell dot to use the default font as specified with
+# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
+# the path where dot can find it using this tag.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_FONTPATH =
+
+# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
+# each documented class showing the direct and indirect inheritance relations.
+# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CLASS_GRAPH = YES
+
+# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
+# graph for each documented class showing the direct and indirect implementation
+# dependencies (inheritance, containment, and class references variables) of the
+# class with other documented classes.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+COLLABORATION_GRAPH = YES
+
+# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
+# groups, showing the direct groups dependencies.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GROUP_GRAPHS = YES
+
+# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LOOK = NO
+
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
+# class node. If there are many fields or methods and many nodes the graph may
+# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
+# number of items for each type to make the size more manageable. Set this to 0
+# for no limit. Note that the threshold may be exceeded by 50% before the limit
+# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
+# but if the number exceeds 15, the total amount of fields shown is limited to
+# 10.
+# Minimum value: 0, maximum value: 100, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+UML_LIMIT_NUM_FIELDS = 10
+
+# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
+# collaboration graphs will show the relations between templates and their
+# instances.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+TEMPLATE_RELATIONS = NO
+
+# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
+# YES then doxygen will generate a graph for each documented file showing the
+# direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDE_GRAPH = YES
+
+# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
+# set to YES then doxygen will generate a graph for each documented file showing
+# the direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INCLUDED_BY_GRAPH = YES
+
+# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable call graphs for selected
+# functions only using the \callgraph command. Disabling a call graph can be
+# accomplished by means of the command \hidecallgraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALL_GRAPH = NO
+
+# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable caller graphs for selected
+# functions only using the \callergraph command. Disabling a caller graph can be
+# accomplished by means of the command \hidecallergraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+CALLER_GRAPH = NO
+
+# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
+# hierarchy of all classes instead of a textual one.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GRAPHICAL_HIERARCHY = YES
+
+# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
+# dependencies a directory has on other directories in a graphical way. The
+# dependency relations are determined by the #include relations between the
+# files in the directories.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DIRECTORY_GRAPH = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. For an explanation of the image formats see the section
+# output formats in the documentation of the dot tool (Graphviz (see:
+# http://www.graphviz.org/)).
+# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
+# to make the SVG files visible in IE 9+ (other browsers do not have this
+# requirement).
+# Possible values are: png, jpg, gif, svg, png:gd, png:gd:gd, png:cairo,
+# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and
+# png:gdiplus:gdiplus.
+# The default value is: png.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_IMAGE_FORMAT = png
+
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+#
+# Note that this requires a modern browser other than Internet Explorer. Tested
+# and working are Firefox, Chrome, Safari, and Opera.
+# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
+# the SVG files visible. Older versions of IE do not have SVG support.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+INTERACTIVE_SVG = NO
+
+# The DOT_PATH tag can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_PATH =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the \dotfile
+# command).
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOTFILE_DIRS =
+
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the \mscfile
+# command).
+
+MSCFILE_DIRS =
+
+# The DIAFILE_DIRS tag can be used to specify one or more directories that
+# contain dia files that are included in the documentation (see the \diafile
+# command).
+
+DIAFILE_DIRS =
+
+# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
+# path where java can find the plantuml.jar file. If left blank, it is assumed
+# PlantUML is not used or called during a preprocessing step. Doxygen will
+# generate a warning when it encounters a \startuml command in this case and
+# will not generate output for the diagram.
+
+PLANTUML_JAR_PATH =
+
+# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a
+# configuration file for plantuml.
+
+PLANTUML_CFG_FILE =
+
+# When using plantuml, the specified paths are searched for files specified by
+# the !include statement in a plantuml block.
+
+PLANTUML_INCLUDE_PATH =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
+# that will be shown in the graph. If the number of nodes in a graph becomes
+# larger than this value, doxygen will truncate the graph, which is visualized
+# by representing a node as a red box. Note that doxygen if the number of direct
+# children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
+# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+# Minimum value: 0, maximum value: 10000, default value: 50.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_GRAPH_MAX_NODES = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
+# generated by dot. A depth value of 3 means that only nodes reachable from the
+# root by following a path via at most 3 edges will be shown. Nodes that lay
+# further from the root node will be omitted. Note that setting this option to 1
+# or 2 may greatly reduce the computation time needed for large code bases. Also
+# note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+# Minimum value: 0, maximum value: 1000, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+MAX_DOT_GRAPH_DEPTH = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not seem
+# to support this out of the box.
+#
+# Warning: Depending on the platform used, enabling this option may lead to
+# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
+# read).
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_TRANSPARENT = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10) support
+# this, this feature is disabled by default.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_MULTI_TARGETS = NO
+
+# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
+# explaining the meaning of the various boxes and arrows in the dot generated
+# graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+GENERATE_LEGEND = YES
+
+# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
+# files that are used to generate the various graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_CLEANUP = YES
--- /dev/null
+cdata.set('TOP_SRCDIR', meson.source_root())
+cdata.set('TOP_BUILDDIR', meson.build_root())
+
+doxyfile = configure_file(input: 'Doxyfile.in',
+ output: 'Doxyfile',
+ configuration: cdata,
+ install: false)
+
+datadir = join_paths(get_option('datadir'), 'doc', 'spede')
+
+html_target = custom_target('spede-docs',
+ input: doxyfile,
+ output: 'html',
+ command: [doxygen, doxyfile],
+ install: true,
+ install_dir: datadir)
+
--- /dev/null
+#pragma once
+
+namespace Comedy {
+
+ /**
+ * Interface for a funnyperson.
+ */
+ class Comedian {
+ public:
+ /**
+ * Do the thing people want to happen.
+ */
+ virtual void tell_joke() = 0;
+ virtual ~Comedian();
+ };
+
+}
--- /dev/null
+#pragma once
+#include<comedian.h>
+#include<stdexcept>
+
+/**
+ * \file spede.h
+ *
+ * Spede definition.
+ */
+
+namespace Comedy {
+
+ /**
+ * Spede is the funniest person in the world.
+ */
+ class Spede : public Comedian {
+ public:
+ /**
+ * Creates a new spede.
+ */
+ Spede();
+
+ /**
+ * Make him do the funny thing he is known for.
+ */
+ void slap_forehead();
+
+ virtual void tell_joke() {
+ throw std::runtime_error("Not implemented");
+ }
+
+ };
+
+
+private:
+
+ int num_movies; ///< How many movies has he done.
+}
--- /dev/null
+usr/share/doc/spede/html/annotated.html
+usr/share/doc/spede/html/bc_s.png
+usr/share/doc/spede/html/bdwn.png
+usr/share/doc/spede/html/classComedy_1_1Comedian.html
+usr/share/doc/spede/html/classComedy_1_1Comedian.png
+usr/share/doc/spede/html/classComedy_1_1Comedian-members.html
+usr/share/doc/spede/html/classComedy_1_1Spede.html
+usr/share/doc/spede/html/classComedy_1_1Spede.png
+usr/share/doc/spede/html/classComedy_1_1Spede-members.html
+usr/share/doc/spede/html/classes.html
+usr/share/doc/spede/html/closed.png
+usr/share/doc/spede/html/comedian_8h_source.html
+usr/share/doc/spede/html/dir_7bdce917e28dfbd493cadd1d2e5c7d80.html
+usr/share/doc/spede/html/dir_44a4667d36a4476878de085754f6d2b9.html
+usr/share/doc/spede/html/dir_68b523c5b3a2dcea45d5ce70397fb722.html
+usr/share/doc/spede/html/dir_a7e6472d2301212032fd74682f8217f3.html
+usr/share/doc/spede/html/dir_ee191f21c02d247cc959e80c1a3acadf.html
+usr/share/doc/spede/html/doc.png
+usr/share/doc/spede/html/doxygen.css
+usr/share/doc/spede/html/doxygen.png
+usr/share/doc/spede/html/dynsections.js
+usr/share/doc/spede/html/files.html
+usr/share/doc/spede/html/folderclosed.png
+usr/share/doc/spede/html/folderopen.png
+usr/share/doc/spede/html/functions.html
+usr/share/doc/spede/html/functions_func.html
+usr/share/doc/spede/html/hierarchy.html
+usr/share/doc/spede/html/index.html
+usr/share/doc/spede/html/jquery.js
+usr/share/doc/spede/html/menu.js
+usr/share/doc/spede/html/menudata.js
+usr/share/doc/spede/html/namespaceComedy.html
+usr/share/doc/spede/html/namespacemembers.html
+usr/share/doc/spede/html/namespacemembers_func.html
+usr/share/doc/spede/html/namespaces.html
+usr/share/doc/spede/html/nav_f.png
+usr/share/doc/spede/html/nav_g.png
+usr/share/doc/spede/html/nav_h.png
+usr/share/doc/spede/html/open.png
+usr/share/doc/spede/html/search/all_0.html
+usr/share/doc/spede/html/search/all_0.js
+usr/share/doc/spede/html/search/all_1.html
+usr/share/doc/spede/html/search/all_1.js
+usr/share/doc/spede/html/search/all_2.html
+usr/share/doc/spede/html/search/all_2.js
+usr/share/doc/spede/html/search/all_3.html
+usr/share/doc/spede/html/search/all_3.js
+usr/share/doc/spede/html/search/classes_0.html
+usr/share/doc/spede/html/search/classes_0.js
+usr/share/doc/spede/html/search/classes_1.html
+usr/share/doc/spede/html/search/classes_1.js
+usr/share/doc/spede/html/search/close.png
+usr/share/doc/spede/html/search/files_0.html
+usr/share/doc/spede/html/search/files_0.js
+usr/share/doc/spede/html/search/functions_0.html
+usr/share/doc/spede/html/search/functions_0.js
+usr/share/doc/spede/html/search/functions_1.html
+usr/share/doc/spede/html/search/functions_1.js
+usr/share/doc/spede/html/search/functions_2.html
+usr/share/doc/spede/html/search/functions_2.js
+usr/share/doc/spede/html/search/mag_sel.png
+usr/share/doc/spede/html/search/namespaces_0.html
+usr/share/doc/spede/html/search/namespaces_0.js
+usr/share/doc/spede/html/search/nomatches.html
+usr/share/doc/spede/html/search/pages_0.html
+usr/share/doc/spede/html/search/pages_0.js
+usr/share/doc/spede/html/search/search.css
+usr/share/doc/spede/html/search/search.js
+usr/share/doc/spede/html/search/searchdata.js
+usr/share/doc/spede/html/search/search_l.png
+usr/share/doc/spede/html/search/search_m.png
+usr/share/doc/spede/html/search/search_r.png
+usr/share/doc/spede/html/spede_8cpp.html
+usr/share/doc/spede/html/spede_8h.html
+usr/share/doc/spede/html/spede_8h_source.html
+usr/share/doc/spede/html/splitbar.png
+usr/share/doc/spede/html/sync_off.png
+usr/share/doc/spede/html/sync_on.png
+usr/share/doc/spede/html/tabs.css
+usr/share/doc/spede/html/tab_a.png
+usr/share/doc/spede/html/tab_b.png
+usr/share/doc/spede/html/tab_h.png
+usr/share/doc/spede/html/tab_s.png
--- /dev/null
+project('doxygen test', 'cpp', version : '0.1.0')
+
+doxygen = find_program('doxygen', required : false)
+if not doxygen.found()
+ error('MESON_SKIP_TEST doxygen not found.')
+endif
+
+cdata = configuration_data()
+cdata.set('VERSION', meson.project_version())
+
+if find_program('dot', required : false).found()
+ # In the real world this would set the variable
+ # to YES. However we set it to NO so that the
+ # list of generated files is always the same
+ # so tests always pass.
+ cdata.set('HAVE_DOT', 'NO')
+else
+ cdata.set('HAVE_DOT', 'NO')
+endif
+
+subdir('doc')
+
--- /dev/null
+#include<spede.h>
+
+/**
+ * \file spede.cpp
+ *
+ * This file contains the implementation of the king of comedy.
+ */
+
+/**
+ * \mainpage The Vast Comedian Project
+ *
+ * \section intro Introduction
+ *
+ * The purpose of this project is to model every single comedian
+ * who has ever lived.
+ *
+ * \section sched Project schedule
+ *
+ * There is no real estimate on when this will be finished.
+ */
+
+/**
+ * \namespace Comedy
+ *
+ * This contains everything that is funny.
+ */
+
+namespace Comedy {
+
+/**
+ * Do all the delicate movements that lead to a comical sound
+ * emanating from a person.
+ *
+ * \param force how hard to move the hand.
+ * \return something or another
+ */
+int gesticulate(int force) {
+ // FIXME add implementation.
+ return 0;
+}
+
+Spede::Spede() : num_movies(100) {
+}
+
+Spede::slap_forehead() {
+ gesticulate(42);
+}
+
+}
--- /dev/null
+project('llvmtest', ['c', 'cpp'], default_options : ['c_std=c99'])
+
+d = dependency('llvm', modules : 'not-found', required : false)
+assert(d.found() == false, 'not-found llvm module found')
+
+d = dependency('llvm', version : '<0.1', required : false)
+assert(d.found() == false, 'ancient llvm module found')
+
+d = dependency('llvm', optional_modules : 'not-found', required : false)
+assert(d.found() == true, 'optional module stopped llvm from being found.')
+
+dep_tinfo = dependency('tinfo', required : false)
+if not dep_tinfo.found()
+ cpp = meson.get_compiler('cpp')
+ dep_tinfo = cpp.find_library('tinfo')
+endif
+
+foreach static : [true, false]
+ llvm_dep = dependency(
+ 'llvm',
+ modules : ['bitwriter', 'asmprinter', 'executionengine', 'target',
+ 'mcjit', 'nativecodegen'],
+ required : true,
+ static : static,
+ )
+ name = static ? 'static' : 'dynamic'
+ executable(
+ 'sum-@0@'.format(name),
+ 'sum.c',
+ dependencies : [
+ llvm_dep, dep_tinfo,
+ dependency('zlib'),
+ meson.get_compiler('c').find_library('dl', required : false),
+ ]
+ )
+endforeach
--- /dev/null
+/** This code is public domain, and taken from
+ * https://github.com/paulsmith/getting-started-llvm-c-api/blob/master/sum.c
+ */
+/**
+ * LLVM equivalent of:
+ *
+ * int sum(int a, int b) {
+ * return a + b;
+ * }
+ */
+
+#include <llvm-c/Core.h>
+#include <llvm-c/ExecutionEngine.h>
+#include <llvm-c/Target.h>
+#include <llvm-c/Analysis.h>
+#include <llvm-c/BitWriter.h>
+
+#include <inttypes.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+int main(int argc, char const *argv[]) {
+ LLVMModuleRef mod = LLVMModuleCreateWithName("my_module");
+
+ LLVMTypeRef param_types[] = { LLVMInt32Type(), LLVMInt32Type() };
+ LLVMTypeRef ret_type = LLVMFunctionType(LLVMInt32Type(), param_types, 2, 0);
+ LLVMValueRef sum = LLVMAddFunction(mod, "sum", ret_type);
+
+ LLVMBasicBlockRef entry = LLVMAppendBasicBlock(sum, "entry");
+
+ LLVMBuilderRef builder = LLVMCreateBuilder();
+ LLVMPositionBuilderAtEnd(builder, entry);
+ LLVMValueRef tmp = LLVMBuildAdd(builder, LLVMGetParam(sum, 0), LLVMGetParam(sum, 1), "tmp");
+ LLVMBuildRet(builder, tmp);
+
+ char *error = NULL;
+ LLVMVerifyModule(mod, LLVMAbortProcessAction, &error);
+ LLVMDisposeMessage(error);
+
+ LLVMExecutionEngineRef engine;
+ error = NULL;
+ LLVMLinkInMCJIT();
+ LLVMInitializeNativeAsmPrinter();
+ LLVMInitializeNativeTarget();
+ if (LLVMCreateExecutionEngineForModule(&engine, mod, &error) != 0) {
+ fprintf(stderr, "failed to create execution engine\n");
+ abort();
+ }
+ if (error) {
+ fprintf(stderr, "error: %s\n", error);
+ LLVMDisposeMessage(error);
+ exit(EXIT_FAILURE);
+ }
+
+ if (argc < 3) {
+ fprintf(stderr, "usage: %s x y\n", argv[0]);
+ exit(EXIT_FAILURE);
+ }
+ long long x = strtoll(argv[1], NULL, 10);
+ long long y = strtoll(argv[2], NULL, 10);
+
+ LLVMGenericValueRef args[] = {
+ LLVMCreateGenericValueOfInt(LLVMInt32Type(), x, 0),
+ LLVMCreateGenericValueOfInt(LLVMInt32Type(), y, 0)
+ };
+ LLVMGenericValueRef res = LLVMRunFunction(engine, sum, 2, args);
+ printf("%d\n", (int)LLVMGenericValueToInt(res, 0));
+
+ // Write out bitcode to file
+ if (LLVMWriteBitcodeToFile(mod, "sum.bc") != 0) {
+ fprintf(stderr, "error writing bitcode to file, skipping\n");
+ }
+
+ LLVMDisposeBuilder(builder);
+ LLVMDisposeExecutionEngine(engine);
+}
--- /dev/null
+project('sdl2 test', 'c')
+
+sdl2_dep = dependency('sdl2', version : '>=2.0.0')
+
+e = executable('sdl2prog', 'sdl2prog.c', dependencies : sdl2_dep)
+
+test('sdl2test', e)
+
+# Ensure that we can find it with sdl2-config too, using the legacy method name
+configdep = dependency('sdl2', method : 'sdlconfig')
+
+# And the modern method name
+configdep = dependency('sdl2', method : 'config-tool')
--- /dev/null
+/* vim: set sts=4 sw=4 et : */
+
+#include <stdio.h>
+#include <SDL_version.h>
+
+int main(int argc, char *argv[]) {
+ SDL_version compiled;
+ SDL_version linked;
+
+ SDL_VERSION(&compiled);
+ SDL_GetVersion(&linked);
+
+ if (compiled.major != linked.major) {
+ fprintf(stderr, "Compiled major '%u' != linked major '%u'",
+ compiled.major, linked.major);
+ return -1;
+ }
+
+ if (compiled.minor != linked.minor) {
+ fprintf(stderr, "Compiled minor '%u' != linked minor '%u'",
+ compiled.minor, linked.minor);
+ return -2;
+ }
+#if 0
+ /* Disabled because sometimes this is 'micro' and sometimes 'patch' */
+ if (compiled.micro != linked.micro) {
+ fprintf(stderr, "Compiled micro '%u' != linked micro '%u'",
+ compiled.micro, linked.micro);
+ return -3;
+ }
+#endif
+ return 0;
+}
--- /dev/null
+#!/usr/bin/env python3
+
+# Any exception causes return value to be not zero, which is sufficient.
+
+import sys
+
+fc = open('/etc/apt/sources.list').read()
+if 'artful' not in fc:
+ sys.exit(1)
--- /dev/null
+#include <stdio.h>
+#include <mpi.h>
+
+int main(int argc, char **argv)
+{
+ int ier, flag;
+ ier = MPI_Init(&argc, &argv);
+ if (ier) {
+ printf("Unable to initialize MPI: %d\n", ier);
+ return 1;
+ }
+ ier = MPI_Initialized(&flag);
+ if (ier) {
+ printf("Unable to check MPI initialization state: %d\n", ier);
+ return 1;
+ }
+ if (!flag) {
+ printf("MPI did not initialize!\n");
+ return 1;
+ }
+ ier = MPI_Finalize();
+ if (ier) {
+ printf("Unable to finalize MPI: %d\n", ier);
+ return 1;
+ }
+ return 0;
+}
--- /dev/null
+#include <mpi.h>
+
+int main(int argc, char **argv)
+{
+ MPI::Init(argc, argv);
+ if (!MPI::Is_initialized()) {
+ printf("MPI did not initialize!\n");
+ return 1;
+ }
+ MPI::Finalize();
+}
--- /dev/null
+program mpitest
+ implicit none
+ include 'mpif.h'
+ logical :: flag
+ integer :: ier
+ call MPI_Init(ier)
+ if (ier /= 0) then
+ print *, 'Unable to initialize MPI: ', ier
+ stop 1
+ endif
+ call MPI_Initialized(flag, ier)
+ if (ier /= 0) then
+ print *, 'Unable to check MPI initialization state: ', ier
+ stop 1
+ endif
+ call MPI_Finalize(ier)
+ if (ier /= 0) then
+ print *, 'Unable to finalize MPI: ', ier
+ stop 1
+ endif
+end program mpitest
--- /dev/null
+project('mpi', 'c', 'cpp')
+
+cc = meson.get_compiler('c')
+
+if build_machine.system() == 'windows' and cc.get_id() != 'msvc'
+ error('MESON_SKIP_TEST: MPI not available on Windows without MSVC.')
+endif
+
+mpic = dependency('mpi', language : 'c')
+exec = executable('exec',
+ 'main.c',
+ dependencies : [mpic])
+
+test('MPI C', exec)
+
+if build_machine.system() != 'windows'
+ # C++ MPI not supported by MS-MPI used on AppVeyor.
+ mpicpp = dependency('mpi', language : 'cpp')
+ execpp = executable('execpp',
+ 'main.cpp',
+ dependencies : [mpicpp])
+
+ test('MPI C++', execpp)
+endif
+
+# OpenMPI is broken with Fortran on Ubuntu Artful.
+# Remove this once the following bug has been fixed:
+#
+# https://bugs.launchpad.net/ubuntu/+source/gcc-defaults/+bug/1727474
+
+ubudetector = find_program('is_artful.py')
+uburesult = run_command(ubudetector)
+
+if uburesult.returncode() != 0 and add_languages('fortran', required : false)
+ mpifort = dependency('mpi', language : 'fortran')
+ exef = executable('exef',
+ 'main.f90',
+ dependencies : [mpifort])
+
+ test('MPI Fortran', exef)
+endif
--- /dev/null
+project('vulkan test', 'c')
+
+vulkan_dep = dependency('vulkan')
+
+e = executable('vulkanprog', 'vulkanprog.c', dependencies : vulkan_dep)
+
+test('vulkantest', e)
--- /dev/null
+#include <vulkan/vulkan.h>
+#include <stdio.h>
+
+int main()
+{
+ VkInstanceCreateInfo instance_create_info = {
+ VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+ NULL,
+ 0,
+ NULL,
+ 0,
+ NULL,
+ 0,
+ NULL,
+ };
+
+ // we don't actually require instance creation to succeed since
+ // we cannot expect test environments to have a vulkan driver installed.
+ // As long as this does not produce as segmentation fault or similar,
+ // everything's alright.
+ VkInstance instance;
+ if(vkCreateInstance(&instance_create_info, NULL, &instance) == VK_SUCCESS)
+ vkDestroyInstance(instance, NULL);
+
+ return 0;
+}
\ No newline at end of file
--- /dev/null
+project('pcap test', 'c')
+
+pcap_dep = dependency('pcap', version : '>=1.0')
+
+pcap_ver = pcap_dep.version()
+assert(pcap_ver.split('.').length() > 1, 'pcap version is "@0@"'.format(pcap_ver))
+
+e = executable('pcap_prog', 'pcap_prog.c', dependencies : pcap_dep)
+
+test('pcaptest', e)
+
+# Ensure discovery bia the configuration tools work also
+pcap_dep = dependency('pcap', version : '>=1.0', method : 'pcap-config')
+pcap_dep = dependency('pcap', version : '>=1.0', method : 'config-tool')
--- /dev/null
+#include <pcap/pcap.h>
+
+int
+main()
+{
+ char errbuf[PCAP_ERRBUF_SIZE];
+ pcap_t *p = pcap_create(NULL, errbuf);
+ return p == NULL;
+}
--- /dev/null
+project('gtest', 'cpp')
+
+gtest = dependency('gtest', main : true)
+gtest_nomain = dependency('gtest', main : false)
+
+e = executable('testprog', 'test.cc', dependencies : gtest)
+test('gtest test', e)
+
+e = executable('testprog_nomain', 'test_nomain.cc', dependencies : gtest_nomain)
+test('gtest nomain test', e)
--- /dev/null
+#include<gtest/gtest.h>
+
+TEST(basic_test, eq_works) {
+ ASSERT_EQ(0, 1-1) << "Equality is broken. Mass panic!";
+}
+
+TEST(basic_test, neq_works) {
+ ASSERT_NE(15, 106) << "Inequal is equal. The foundations of space and time are in jeopardy.";
+}
--- /dev/null
+#include<gtest/gtest.h>
+
+TEST(basic_test, eq_works) {
+ ASSERT_EQ(0, 1-1) << "Equality is broken. Mass panic!";
+}
+
+TEST(basic_test, neq_works) {
+ ASSERT_NE(15, 106) << "Inequal is equal. The foundations of space and time are in jeopardy.";
+}
+
+int main(int argc, char **argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
--- /dev/null
+#include <cups/cups.h>
+
+int
+main()
+{
+ cupsGetDefault();
+ return 0;
+}
--- /dev/null
+project('cups test', 'c')
+
+cups_dep = dependency('cups', version : '>=1.4')
+
+e = executable('cups_prog', 'cups_prog.c', dependencies : cups_dep)
+
+test('cupstest', e)
+
+# ensure we can find the cups dependency via the legacy and modern config-tool
+# options
+dep = dependency('cups', version : '>=1.4', method : 'cups-config')
+dep = dependency('cups', version : '>=1.4', method : 'config-tool')
--- /dev/null
+#include <libwmf/api.h>
+
+int
+main()
+{
+ wmf_help();
+ return 0;
+}
--- /dev/null
+project('libwmf test', 'c')
+
+libwmf_dep = dependency('libwmf', version : '>= 0.2.8')
+libwmf_ver = libwmf_dep.version()
+assert(libwmf_ver.split('.').length() > 1, 'libwmf version is "@0@"'.format(libwmf_ver))
+message('libwmf version is "@0@"'.format(libwmf_ver))
+e = executable('libwmf_prog', 'libwmf_prog.c', dependencies : libwmf_dep)
+
+test('libwmftest', e)
+
+# Test using the method keyword:
+
+dependency('libwmf', method : 'config-tool')
+dependency('libwmf', method : 'libwmf-config')
--- /dev/null
+#include<gtest/gtest.h>
+#include<gmock/gmock.h>
+
+using ::testing::Return;
+
+class Foo {
+public:
+ Foo() { x = 42; }
+ virtual ~Foo() {};
+
+ virtual int getValue() const { return x; }
+
+private:
+ int x;
+};
+
+class MockFoo : public Foo {
+public:
+ MOCK_CONST_METHOD0(getValue, int());
+};
+
+TEST(counttest, once) {
+ MockFoo f;
+ EXPECT_CALL(f, getValue()).Times(1).WillOnce(Return(42));
+
+ EXPECT_EQ(f.getValue(), 42) << "Got wrong value";
+}
--- /dev/null
+project('gmock test', 'cpp')
+
+# Using gmock without gtest is a pain so just
+# don't support that then.
+
+gtest = dependency('gtest', main : true)
+gmock = dependency('gmock')
+
+e = executable('gmocktest', 'gmocktest.cc', dependencies : [gtest, gmock])
+test('gmock test', e)
--- /dev/null
+#include <QApplication>
+#include "mainWindow.h"
+
+int main(int argc, char **argv) {
+ #ifndef UNITY_BUILD
+ Q_INIT_RESOURCE(stuff);
+ Q_INIT_RESOURCE(stuff2);
+ #endif
+ QApplication app(argc, argv);
+ MainWindow *win = new MainWindow();
+ QImage qi(":/thing.png");
+ if(qi.width() != 640) {
+ return 1;
+ }
+ QImage qi2(":/thing2.png");
+ if(qi2.width() != 640) {
+ return 1;
+ }
+ win->setWindowTitle("Meson Qt5 build test");
+ QLabel *label_stuff = win->findChild<QLabel *>("label_stuff");
+ if(label_stuff == nullptr) {
+ return 1;
+ }
+ int w = label_stuff->width();
+ int h = label_stuff->height();
+ label_stuff->setPixmap(QPixmap::fromImage(qi).scaled(w,h,Qt::KeepAspectRatio));
+ QLabel *label_stuff2 = win->findChild<QLabel *>("label_stuff2");
+ if(label_stuff2 == nullptr) {
+ return 1;
+ }
+ w = label_stuff2->width();
+ h = label_stuff2->height();
+ label_stuff2->setPixmap(QPixmap::fromImage(qi2).scaled(w,h,Qt::KeepAspectRatio));
+ win->show();
+ return app.exec();
+ return 0;
+}
--- /dev/null
+#include "mainWindow.h"
+
+MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent) {
+ setupUi(this);
+}
+
+MainWindow::~MainWindow() {
+}
--- /dev/null
+#ifndef MES_MAINWINDOW
+#define MES_MAINWINDOW
+
+#include <QObject>
+#include <QMainWindow>
+#include "ui_mainWindow.h"
+
+class NotificationModel;
+
+class MainWindow : public QMainWindow, private Ui_MainWindow {
+ Q_OBJECT
+
+public:
+ explicit MainWindow(QWidget *parent=0);
+ ~MainWindow();
+
+private:
+};
+
+#endif
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<ui version="4.0">
+ <class>MainWindow</class>
+ <widget class="QMainWindow" name="MainWindow">
+ <property name="geometry">
+ <rect>
+ <x>0</x>
+ <y>0</y>
+ <width>260</width>
+ <height>313</height>
+ </rect>
+ </property>
+ <property name="windowTitle">
+ <string>MainWindow</string>
+ </property>
+ <widget class="QWidget" name="centralwidget">
+ <widget class="QPushButton" name="pushButton">
+ <property name="geometry">
+ <rect>
+ <x>10</x>
+ <y>10</y>
+ <width>241</width>
+ <height>91</height>
+ </rect>
+ </property>
+ <property name="text">
+ <string>I am a button</string>
+ </property>
+ </widget>
+ <widget class="QLabel" name="label_stuff">
+ <property name="geometry">
+ <rect>
+ <x>10</x>
+ <y>112</y>
+ <width>241</width>
+ <height>91</height>
+ </rect>
+ </property>
+ </widget>
+ <widget class="QLabel" name="label_stuff2">
+ <property name="geometry">
+ <rect>
+ <x>10</x>
+ <y>212</y>
+ <width>241</width>
+ <height>91</height>
+ </rect>
+ </property>
+ </widget>
+ </widget>
+ </widget>
+ <resources/>
+ <connections/>
+</ui>
--- /dev/null
+#include"manualinclude.h"
+#include<QCoreApplication>
+
+#include<QObject>
+
+ManualInclude::ManualInclude() {
+}
+
+void ManualInclude::myslot(void) {
+ ;
+}
+
+class MocClass : public QObject {
+ Q_OBJECT
+};
+
+int main(int argc, char **argv) {
+ ManualInclude mi;
+ MocClass mc;
+ QObject::connect(&mi, SIGNAL(mysignal(void)),
+ &mi, SLOT(myslot(void)));
+ emit mi.mysignal();
+ return 0;
+}
+
+#include"manualinclude.moc"
+
--- /dev/null
+#ifndef MANUALINCLUDE_H_
+#define MANUALINCLUDE_H_
+
+#include<QObject>
+
+class ManualInclude : public QObject {
+ Q_OBJECT
+
+public:
+ ManualInclude();
+#if defined(MOC_EXTRA_FLAG)
+public slots:
+#endif
+ void myslot(void);
+
+#if defined(MOC_EXTRA_FLAG)
+signals:
+#endif
+ int mysignal();
+};
+
+#endif
--- /dev/null
+project('qt4 and 5 build test', 'cpp',
+ # Qt5 now requires C++ 11 support
+ default_options : ['cpp_std=c++11'])
+
+qt5_modules = ['Widgets']
+foreach qt : ['qt4', 'qt5']
+ qt_modules = ['Core', 'Gui']
+ if qt == 'qt5'
+ qt_modules += qt5_modules
+ endif
+
+ # Test that invalid modules are indeed not found
+ fakeqtdep = dependency(qt, modules : ['DefinitelyNotFound'], required : false, method : get_option('method'))
+ if fakeqtdep.found()
+ error('Invalid qt dep incorrectly found!')
+ endif
+
+ # Test that partially-invalid modules are indeed not found
+ fakeqtdep = dependency(qt, modules : ['Core', 'DefinitelyNotFound'], required : false, method : get_option('method'))
+ if fakeqtdep.found()
+ error('Invalid qt dep incorrectly found!')
+ endif
+
+ # Ensure that the "no-Core-module-specified" code branch is hit
+ nocoredep = dependency(qt, modules : ['Gui'], required : qt == 'qt5', method : get_option('method'))
+
+ # If qt4 modules are found, test that. qt5 is required.
+ qtdep = dependency(qt, modules : qt_modules, required : qt == 'qt5', method : get_option('method'))
+ if qtdep.found()
+ qtmodule = import(qt)
+
+ # The following has two resource files because having two in one target
+ # requires you to do it properly or you get linker symbol clashes.
+
+ prep = qtmodule.preprocess(
+ moc_headers : ['mainWindow.h'], # These need to be fed through the moc tool before use.
+ ui_files : 'mainWindow.ui', # XML files that need to be compiled with the uic tol.
+ method : get_option('method')
+ )
+
+ # Resource file(s) for rcc compiler
+ extra_cpp_args = []
+ if meson.is_unity()
+ extra_cpp_args += '-DUNITY_BUILD'
+ prep_rcc = qtmodule.preprocess(qt + '_unity_ressource', qresources : ['stuff.qrc', 'stuff2.qrc'], method : get_option('method'))
+ else
+ prep_rcc = qtmodule.preprocess(qresources : ['stuff.qrc', 'stuff2.qrc'], method : get_option('method'))
+ endif
+
+ # Test that setting a unique name with a positional argument works
+ qtmodule.preprocess(qt + 'teststuff', qresources : ['stuff.qrc', 'stuff2.qrc'], method : get_option('method'))
+
+ qexe = executable(qt + 'app',
+ sources : ['main.cpp', 'mainWindow.cpp', # Sources that don't need preprocessing.
+ prep, prep_rcc],
+ dependencies : qtdep,
+ cpp_args: extra_cpp_args)
+
+ # We need a console test application because some test environments
+ # do not have an X server.
+
+ translations = qtmodule.compile_translations(ts_files : qt+'core_fr.ts', build_by_default : true)
+
+ qtcore = dependency(qt, modules : 'Core', method : get_option('method'))
+
+ qtcoreapp = executable(qt + 'core', 'q5core.cpp',
+ dependencies : qtcore)
+
+ test(qt + 'test', qtcoreapp)
+
+ # The build system needs to include the cpp files from
+ # headers but the user must manually include moc
+ # files from sources.
+ manpreprocessed = qtmodule.preprocess(
+ moc_extra_arguments : ['-DMOC_EXTRA_FLAG'], # This is just a random macro to test `moc_extra_arguments`
+ moc_sources : 'manualinclude.cpp',
+ moc_headers : 'manualinclude.h',
+ method : get_option('method'))
+
+ qtmaninclude = executable(qt + 'maninclude',
+ sources : ['manualinclude.cpp', manpreprocessed],
+ dependencies : qtcore)
+
+ test(qt + 'maninclude', qtmaninclude)
+
+ # building Qt plugins implies to give include path to moc
+ plugin_includes = include_directories('pluginInterface', 'plugin')
+ pluginpreprocess = qtmodule.preprocess(
+ moc_headers : 'plugin/plugin.h',
+ include_directories : plugin_includes
+ )
+ plugin = library(qt + 'plugin', 'plugin/plugin.cpp', pluginpreprocess,
+ include_directories : plugin_includes,
+ dependencies : qtcore)
+ endif
+endforeach
--- /dev/null
+option('method', type : 'string', value : 'auto', description : 'The method to use to find Qt')
--- /dev/null
+#include "plugin.h"
+#include <QFile>
+
+QString plugin1::getResource()
+{
+ return "hello world";
+}
+
+
+#if QT_VERSION < 0x050000
+ Q_EXPORT_PLUGIN2(Plugin1, plugin1)
+#endif
\ No newline at end of file
--- /dev/null
+#pragma once
+#include <plugin_if.h>
+
+class plugin1:public QObject,public PluginInterface
+{
+ Q_OBJECT
+ Q_INTERFACES(PluginInterface)
+#if QT_VERSION >= 0x050000
+ Q_PLUGIN_METADATA(IID "demo.PluginInterface" FILE "plugin.json")
+#endif
+
+public:
+ QString getResource() override;
+};
--- /dev/null
+{
+ "name" : "Plugin1"
+}
--- /dev/null
+#ifndef PLUGIN_IF_H
+#define PLUGIN_IF_H
+
+#include <QString>
+#include <QtPlugin>
+
+/**
+ * @brief Interface for a plugin
+ */
+class PluginInterface
+{
+public:
+ virtual ~PluginInterface() = default;
+
+ /// Initializes the plugin
+ virtual QString getResource() = 0;
+};
+
+Q_DECLARE_INTERFACE(PluginInterface, "demo.PluginInterface")
+
+#endif
--- /dev/null
+#include <QCoreApplication>
+#include <QtGlobal>
+#include <QString>
+#include <QTranslator>
+#include <QLocale>
+#include <QLibraryInfo>
+#include <QDebug>
+
+int main(int argc, char **argv) {
+ QCoreApplication app(argc, argv);
+
+ QTranslator qtTranslator;
+ qtTranslator.load("qt_" + QLocale::system().name(),
+ QLibraryInfo::location(QLibraryInfo::TranslationsPath));
+ app.installTranslator(&qtTranslator);
+
+ QTranslator myappTranslator;
+ if(!myappTranslator.load("qt5core_fr") )
+ return 1;
+
+ app.installTranslator(&myappTranslator);
+
+ qDebug() << QObject::tr("Translate me!");
+ // Don't actually start the main loop so this
+ // can be run as a unit test.
+ //return app.exec();
+ return 0;
+}
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE TS>
+<TS version="2.0" language="fr_FR">
+<context>
+ <name>QObject</name>
+ <message>
+ <location filename="q5core.cpp" line="23"/>
+ <source>Translate me!</source>
+ <translation>Traduisez moi!</translation>
+ </message>
+</context>
+</TS>
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE TS>
+<TS version="2.1" language="fr_FR">
+<context>
+ <name>QObject</name>
+ <message>
+ <location filename="q5core.cpp" line="23"/>
+ <source>Translate me!</source>
+ <translation>Traduisez moi!</translation>
+ </message>
+</context>
+</TS>
--- /dev/null
+<!DOCTYPE RCC>
+<RCC version="1.0">
+ <qresource>
+ <file>thing.png</file>
+ </qresource>
+</RCC>
--- /dev/null
+<!DOCTYPE RCC>
+<RCC version="1.0">
+ <qresource>
+ <file>thing2.png</file>
+ </qresource>
+</RCC>
--- /dev/null
+message Dummy {
+ required string text = 1;
+}
--- /dev/null
+#include "defs.pb.h"
+
+int main(int argc, char **argv) {
+ GOOGLE_PROTOBUF_VERIFY_VERSION;
+ Dummy *d = new Dummy;
+ delete d;
+ google::protobuf::ShutdownProtobufLibrary();
+ return 0;
+}
--- /dev/null
+subdirgen = generator(protoc, \
+ output : ['@BASENAME@.pb.cc', '@BASENAME@.pb.h'],
+ arguments : ['--proto_path=@CURRENT_SOURCE_DIR@', '--cpp_out=@BUILD_DIR@', '@INPUT@'])
+
+generated = subdirgen.process('defs.proto')
+e = executable('subdir-prog', 'main.cpp', generated,
+ dependencies : dep)
+test('subdir-prototest', e)
--- /dev/null
+message Dummy {
+ required string text = 1;
+}
--- /dev/null
+#include "defs.pb.h"
+
+int main(int argc, char **argv) {
+ GOOGLE_PROTOBUF_VERIFY_VERSION;
+ Dummy *d = new Dummy;
+ delete d;
+ google::protobuf::ShutdownProtobufLibrary();
+ return 0;
+}
--- /dev/null
+project('protocol buffer test', 'cpp')
+
+protoc = find_program('protoc', required : false)
+dep = dependency('protobuf', required : false)
+
+if not protoc.found() or not dep.found()
+ error('MESON_SKIP_TEST: protoc tool and/or protobuf pkg-config dependency not found')
+endif
+
+
+gen = generator(protoc, \
+ output : ['@BASENAME@.pb.cc', '@BASENAME@.pb.h'],
+ arguments : ['--proto_path=@SOURCE_DIR@', '--cpp_out=@BUILD_DIR@', '@INPUT@'])
+
+generated = gen.process('defs.proto')
+e = executable('prog', 'main.cpp', generated,
+ dependencies : dep)
+test('prototest', e)
+
+subdir('asubdir')
--- /dev/null
+i18n.merge_file(
+ input: 'test.desktop.in',
+ output: 'test.desktop',
+ type: 'desktop',
+ po_dir: '../po',
+ install: true,
+ install_dir: join_paths(get_option('datadir'), 'applications')
+)
--- /dev/null
+[Desktop Entry]
+Name=Test
+GenericName=Application
+Comment=Test Application
+Type=Application
+
--- /dev/null
+usr/bin/intlprog
+usr/share/locale/de/LC_MESSAGES/intltest.mo
+usr/share/locale/fi/LC_MESSAGES/intltest.mo
+usr/share/applications/test.desktop
--- /dev/null
+project('gettext example', 'c')
+
+i18n = import('i18n')
+
+subdir('po')
+subdir('src')
+subdir('data')
--- /dev/null
+src/intlmain.c
+data/test.desktop.in
--- /dev/null
+# German translations for PACKAGE package.
+# Copyright (C) 2013 THE PACKAGE'S COPYRIGHT HOLDER
+# This file is distributed under the same license as the PACKAGE package.
+# Jussi Pakkanen <jpakkane@brash>, 2013.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2013-09-12 18:53+0300\n"
+"PO-Revision-Date: 2013-09-12 18:57+0300\n"
+"Last-Translator: Jussi Pakkanen <jpakkane@brash>\n"
+"Language-Team: German\n"
+"Language: de\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=ASCII\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+#: src/intlmain.c:15
+msgid "International greeting."
+msgstr "Internationale Gruss."
--- /dev/null
+# Finnish translations for PACKAGE package.
+# Copyright (C) 2013 THE PACKAGE'S COPYRIGHT HOLDER
+# This file is distributed under the same license as the PACKAGE package.
+# Jussi Pakkanen <jpakkane@brash>, 2013.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2013-09-12 18:53+0300\n"
+"PO-Revision-Date: 2013-09-12 18:57+0300\n"
+"Last-Translator: Jussi Pakkanen <jpakkane@brash>\n"
+"Language-Team: Finnish\n"
+"Language: fi\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=ASCII\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+#: src/intlmain.c:15
+msgid "International greeting."
+msgstr "Maailman tervehdys."
--- /dev/null
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
+# This file is distributed under the same license as the intltest package.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: intltest\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2017-05-31 05:16-0500\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"Language: \n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=CHARSET\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#: src/intlmain.c:15
+msgid "International greeting."
+msgstr ""
+
+#: data/test.desktop.in:3
+msgid "Test"
+msgstr ""
+
+#: data/test.desktop.in:4
+msgid "Application"
+msgstr ""
+
+#: data/test.desktop.in:5
+msgid "Test Application"
+msgstr ""
--- /dev/null
+langs = ['fi', 'de']
+
+i18n.gettext('intltest', languages : langs)
--- /dev/null
+#include<libintl.h>
+#include<locale.h>
+#include<stdio.h>
+
+#define _(String) gettext (String)
+
+#define PACKAGE "intltest"
+// WRONG, but enough for this test.
+#define LOCALEDIR "/usr/share/locale"
+
+int main(int argc, char **argv) {
+ setlocale(LC_ALL, "");
+ bindtextdomain(PACKAGE, LOCALEDIR);
+ textdomain(PACKAGE);
+ printf("%s\n", _("International greeting."));
+ return 0;
+}
--- /dev/null
+executable('intlprog', 'intlmain.c', install : true,
+ dependencies : meson.get_compiler('c').find_library('intl', required : false))
--- /dev/null
+<!DOCTYPE node PUBLIC "-//freedesktop//DTD D-BUS Object Introspection 1.0//EN"
+"http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd">
+<node>
+ <interface name="com.example">
+ <method name="Hello">
+ <arg direction="in" type="s" name="name"/>
+ <arg direction="out" type="s" name="greeting"/>
+ </method>
+ </interface>
+</node>
--- /dev/null
+#include"generated-gdbus.h"
+
+int main(int argc, char **argv) {
+ SampleComExample *s;
+ s = sample_com_example_skeleton_new();
+ g_object_unref(s);
+ return 0;
+}
--- /dev/null
+gdbus_src = gnome.gdbus_codegen('generated-gdbus', 'com.example.Sample.xml',
+ interface_prefix : 'com.example.',
+ namespace : 'Sample',
+ annotations : [
+ ['com.example.Hello()', 'org.freedesktop.DBus.Deprecated', 'true']
+ ],
+ docbook : 'generated-gdbus-doc'
+)
+
+gdbus_exe = executable('gdbus-test', 'gdbusprog.c',
+ gdbus_src,
+ include_directories : include_directories('..'),
+ dependencies : giounix)
+
+test('gdbus', gdbus_exe)
--- /dev/null
+#include<stdio.h>
+#include<stdlib.h>
+#include<glib-object.h>
+#include"marshaller.h"
+
+static int singleton = 42;
+
+void foo(gpointer user_data, gpointer data) {
+ if (user_data != &singleton) {
+ fprintf(stderr, "Invoked foo function was passed incorrect user data.\n");
+ exit(1);
+ }
+}
+
+void bar(gpointer user_data, gint param1, gpointer data) {
+ if (param1 != singleton) {
+ fprintf(stderr, "Invoked bar function was passed incorrect param1, but %d.\n", param1);
+ exit(2);
+ }
+ if (user_data != &singleton) {
+ fprintf(stderr, "Invoked bar function was passed incorrect user data.\n");
+ exit(3);
+ }
+}
+
+gfloat baz(gpointer user_data, gboolean param1, guchar param2, gpointer data) {
+ if (param1 != TRUE) {
+ fprintf(stderr, "Invoked baz function was passed incorrect param1.\n");
+ exit(4);
+ }
+ if (param2 != singleton) {
+ fprintf(stderr, "Invoked baz function was passed incorrect param2.\n");
+ exit(5);
+ }
+ if (user_data != &singleton) {
+ fprintf(stderr, "Invoked baz function was passed incorrect user data.\n");
+ exit(6);
+ }
+ return (gfloat)param2;
+}
+
+int main(int argc, char **argv) {
+ GClosure *cc_foo, *cc_bar, *cc_baz;
+ GValue return_value = G_VALUE_INIT;
+ GValue param_values[3] = {G_VALUE_INIT, G_VALUE_INIT, G_VALUE_INIT};
+
+ fprintf(stderr, "Invoking foo function.\n");
+ cc_foo = g_cclosure_new(G_CALLBACK(foo), NULL, NULL);
+ g_closure_set_marshal(cc_foo, g_cclosure_user_marshal_VOID__VOID);
+ g_value_init(¶m_values[0], G_TYPE_POINTER);
+ g_value_set_pointer(¶m_values[0], &singleton);
+ g_closure_invoke(cc_foo, &return_value, 1, param_values, NULL);
+ if (G_VALUE_TYPE(&return_value) != G_TYPE_INVALID) {
+ fprintf(stderr, "Invoked foo function did not return empty value, but %s.\n",
+ G_VALUE_TYPE_NAME(&return_value));
+ return 7;
+ }
+ g_value_unset(¶m_values[0]);
+ g_value_unset(&return_value);
+ g_closure_unref(cc_foo);
+
+ fprintf(stderr, "Invoking bar function.\n");
+ cc_bar = g_cclosure_new(G_CALLBACK(bar), NULL, NULL);
+ g_closure_set_marshal(cc_bar, g_cclosure_user_marshal_VOID__INT);
+ g_value_init(¶m_values[0], G_TYPE_POINTER);
+ g_value_set_pointer(¶m_values[0], &singleton);
+ g_value_init(¶m_values[1], G_TYPE_INT);
+ g_value_set_int(¶m_values[1], 42);
+ g_closure_invoke(cc_bar, &return_value, 2, param_values, NULL);
+ if (G_VALUE_TYPE(&return_value) != G_TYPE_INVALID) {
+ fprintf(stderr, "Invoked bar function did not return empty value.\n");
+ return 8;
+ }
+ g_value_unset(¶m_values[0]);
+ g_value_unset(¶m_values[1]);
+ g_value_unset(&return_value);
+ g_closure_unref(cc_bar);
+
+ fprintf(stderr, "Invoking baz function.\n");
+ cc_baz = g_cclosure_new(G_CALLBACK(baz), NULL, NULL);
+ g_closure_set_marshal(cc_baz, g_cclosure_user_marshal_FLOAT__BOOLEAN_UCHAR);
+ g_value_init(¶m_values[0], G_TYPE_POINTER);
+ g_value_set_pointer(¶m_values[0], &singleton);
+ g_value_init(¶m_values[1], G_TYPE_BOOLEAN);
+ g_value_set_boolean(¶m_values[1], TRUE);
+ g_value_init(¶m_values[2], G_TYPE_UCHAR);
+ g_value_set_uchar(¶m_values[2], 42);
+ g_value_init(&return_value, G_TYPE_FLOAT);
+ g_closure_invoke(cc_baz, &return_value, 3, param_values, NULL);
+ if (g_value_get_float(&return_value) != 42.0f) {
+ fprintf(stderr, "Invoked baz function did not return expected value.\n");
+ return 9;
+ }
+ g_value_unset(¶m_values[0]);
+ g_value_unset(¶m_values[1]);
+ g_value_unset(¶m_values[2]);
+ g_value_unset(&return_value);
+ g_closure_unref(cc_baz);
+
+ fprintf(stderr, "All ok.\n");
+ return 0;
+}
--- /dev/null
+VOID:VOID
+VOID:INT
+FLOAT:BOOLEAN,UCHAR
--- /dev/null
+marshallers = gnome.genmarshal('marshaller',
+sources : 'marshaller.list',
+install_header : true,
+install_dir : get_option('includedir'),
+extra_args : ['-UG_ENABLE_DEBUG', '--prototypes'])
+
+marshaller_c = marshallers[0]
+marshaller_h = marshallers[1]
+
+genmarshalexe = executable('genmarshalprog', 'main.c', marshaller_c, marshaller_h,
+dependencies : gobj)
+test('genmarshal test', genmarshalexe)
--- /dev/null
+#include "dep1.h"
+
+struct _MesonDep1
+{
+ GObject parent_instance;
+};
+
+G_DEFINE_TYPE (MesonDep1, meson_dep1, G_TYPE_OBJECT)
+
+/**
+ * meson_dep1_new:
+ *
+ * Allocates a new #MesonDep1.
+ *
+ * Returns: (transfer full): a #MesonDep1.
+ */
+MesonDep1 *
+meson_dep1_new (void)
+{
+ return g_object_new (MESON_TYPE_DEP1, NULL);
+}
+
+static void
+meson_dep1_finalize (GObject *object)
+{
+ G_OBJECT_CLASS (meson_dep1_parent_class)->finalize (object);
+}
+
+static void
+meson_dep1_class_init (MesonDep1Class *klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ object_class->finalize = meson_dep1_finalize;
+}
+
+static void
+meson_dep1_init (MesonDep1 *self)
+{
+}
+
+/**
+ * meson_dep1_just_return_it:
+ * @dep: a #MesonDep2.
+ *
+ * Returns the #MesonDep2 that is passed in
+ *
+ * Returns: (transfer none): a #MesonDep2
+ */
+MesonDep2*
+meson_dep1_just_return_it (MesonDep1 *self, MesonDep2 *dep)
+{
+ g_return_val_if_fail (MESON_IS_DEP1 (self), NULL);
+
+ return dep;
+}
--- /dev/null
+#ifndef MESON_DEP1_H
+#define MESON_DEP1_H
+
+#if !defined (MESON_TEST)
+#error "MESON_TEST not defined."
+#endif
+
+#include <glib-object.h>
+#include "dep2/dep2.h"
+
+G_BEGIN_DECLS
+
+#define MESON_TYPE_DEP1 (meson_dep1_get_type())
+
+G_DECLARE_FINAL_TYPE (MesonDep1, meson_dep1, MESON, DEP1, GObject)
+
+MesonDep1 *meson_dep1_new (void);
+MesonDep2 *meson_dep1_just_return_it (MesonDep1 *self,
+ MesonDep2 *dep);
+
+G_END_DECLS
+
+#endif /* MESON_DEP1_H */
--- /dev/null
+#include "dep2.h"
+
+struct _MesonDep2
+{
+ GObject parent_instance;
+
+ gchar *msg;
+};
+
+G_DEFINE_TYPE (MesonDep2, meson_dep2, G_TYPE_OBJECT)
+
+enum {
+ PROP_0,
+ PROP_MSG,
+ LAST_PROP
+};
+
+static GParamSpec *gParamSpecs [LAST_PROP];
+
+/**
+ * meson_dep2_new:
+ * @msg: The message to set.
+ *
+ * Allocates a new #MesonDep2.
+ *
+ * Returns: (transfer full): a #MesonDep2.
+ */
+MesonDep2 *
+meson_dep2_new (const gchar *msg)
+{
+ g_return_val_if_fail (msg != NULL, NULL);
+
+ return g_object_new (MESON_TYPE_DEP2,
+ "message", msg,
+ NULL);
+}
+
+static void
+meson_dep2_finalize (GObject *object)
+{
+ MesonDep2 *self = (MesonDep2 *)object;
+
+ g_clear_pointer (&self->msg, g_free);
+
+ G_OBJECT_CLASS (meson_dep2_parent_class)->finalize (object);
+}
+
+static void
+meson_dep2_get_property (GObject *object,
+ guint prop_id,
+ GValue *value,
+ GParamSpec *pspec)
+{
+ MesonDep2 *self = MESON_DEP2 (object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ g_value_set_string (value, self->msg);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_dep2_set_property (GObject *object,
+ guint prop_id,
+ const GValue *value,
+ GParamSpec *pspec)
+{
+ MesonDep2 *self = MESON_DEP2 (object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ self->msg = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_dep2_class_init (MesonDep2Class *klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ object_class->finalize = meson_dep2_finalize;
+ object_class->get_property = meson_dep2_get_property;
+ object_class->set_property = meson_dep2_set_property;
+
+ gParamSpecs [PROP_MSG] =
+ g_param_spec_string ("message",
+ "Message",
+ "The message to print.",
+ NULL,
+ (G_PARAM_READWRITE |
+ G_PARAM_CONSTRUCT_ONLY |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs);
+}
+
+static void
+meson_dep2_init (MesonDep2 *self)
+{
+}
+
+/**
+ * meson_dep2_return_message:
+ * @self: a #MesonDep2.
+ *
+ * Returns the message.
+ *
+ * Returns: (transfer none): a const gchar*
+ */
+const gchar*
+meson_dep2_return_message (MesonDep2 *self)
+{
+ g_return_val_if_fail (MESON_IS_DEP2 (self), NULL);
+
+ return (const gchar*) self->msg;
+}
--- /dev/null
+#ifndef MESON_DEP2_H
+#define MESON_DEP2_H
+
+#if !defined (MESON_TEST)
+#error "MESON_TEST not defined."
+#endif
+
+#include <glib-object.h>
+
+G_BEGIN_DECLS
+
+#define MESON_TYPE_DEP2 (meson_dep2_get_type())
+
+G_DECLARE_FINAL_TYPE (MesonDep2, meson_dep2, MESON, DEP2, GObject)
+
+MesonDep2 *meson_dep2_new (const gchar *msg);
+const gchar *meson_dep2_return_message (MesonDep2 *self);
+
+G_END_DECLS
+
+#endif /* MESON_DEP2_H */
--- /dev/null
+dep2sources = ['dep2.c', 'dep2.h']
+
+dep2lib = shared_library(
+ 'dep2lib',
+ sources : dep2sources,
+ dependencies : gobj,
+ install : true
+)
+
+dep2gir = gnome.generate_gir(
+ dep2lib,
+ sources : dep2sources,
+ nsversion : '1.0',
+ namespace : 'MesonDep2',
+ symbol_prefix : 'meson',
+ identifier_prefix : 'Meson',
+ includes : ['GObject-2.0'],
+ install : true
+)
+
+dep2_dep = declare_dependency(link_with : dep2lib,
+ sources : [dep2gir])
--- /dev/null
+subdir('dep2')
+
+dep1sources = ['dep1.c', 'dep1.h']
+
+# Do not need to link to dep2lib because we don't use any symbols from it
+dep1lib = shared_library(
+ 'dep1lib',
+ sources : dep1sources,
+ dependencies : gobj,
+ install : true
+)
+
+# But the gir does need it because it we use the MesonDep2* structure defined
+# in the header
+dep1gir = gnome.generate_gir(
+ dep1lib,
+ sources : dep1sources,
+ nsversion : '1.0',
+ namespace : 'MesonDep1',
+ symbol_prefix : 'meson',
+ identifier_prefix : 'Meson',
+ header: 'dep1.h',
+ includes : ['GObject-2.0', 'MesonDep2-1.0'],
+ dependencies : [dep2_dep],
+ install : true
+)
+
+dep1_dep = declare_dependency(link_with : dep1lib,
+ dependencies : [dep2_dep],
+ sources : [dep1gir])
--- /dev/null
+#include "meson-sample.h"
+
+struct _MesonSample
+{
+ GObject parent_instance;
+
+ gchar *msg;
+};
+
+G_DEFINE_TYPE (MesonSample, meson_sample, G_TYPE_OBJECT)
+
+enum {
+ PROP_0,
+ PROP_MSG,
+ LAST_PROP
+};
+
+static GParamSpec *gParamSpecs [LAST_PROP];
+
+/**
+ * meson_sample_new:
+ *
+ * Allocates a new #MesonSample.
+ *
+ * Returns: (transfer full): a #MesonSample.
+ */
+MesonSample *
+meson_sample_new (void)
+{
+ return g_object_new (MESON_TYPE_SAMPLE, NULL);
+}
+
+static void
+meson_sample_finalize (GObject *object)
+{
+ MesonSample *self = (MesonSample *)object;
+
+ g_clear_pointer (&self->msg, g_free);
+
+ G_OBJECT_CLASS (meson_sample_parent_class)->finalize (object);
+}
+
+static void
+meson_sample_get_property (GObject *object,
+ guint prop_id,
+ GValue *value,
+ GParamSpec *pspec)
+{
+ MesonSample *self = MESON_SAMPLE (object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ g_value_set_string (value, self->msg);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_sample_set_property (GObject *object,
+ guint prop_id,
+ const GValue *value,
+ GParamSpec *pspec)
+{
+ MesonSample *self = MESON_SAMPLE (object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ self->msg = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_sample_class_init (MesonSampleClass *klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ object_class->finalize = meson_sample_finalize;
+ object_class->get_property = meson_sample_get_property;
+ object_class->set_property = meson_sample_set_property;
+
+ gParamSpecs [PROP_MSG] =
+ g_param_spec_string ("message",
+ "Message",
+ "The message to print.",
+ NULL,
+ (G_PARAM_READWRITE |
+ G_PARAM_CONSTRUCT_ONLY |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs);
+}
+
+static void
+meson_sample_init (MesonSample *self)
+{
+}
+
+/**
+ * meson_sample_print_message:
+ * @self: a #MesonSample.
+ *
+ * Prints the message.
+ *
+ * Returns: Nothing.
+ */
+void
+meson_sample_print_message (MesonSample *self, MesonDep1 *dep1, MesonDep2 *dep2)
+{
+ MesonDep2 *samedep;
+ g_return_if_fail (MESON_IS_SAMPLE (self));
+
+ samedep = meson_dep1_just_return_it (dep1, dep2);
+ g_print ("Message: %s\n", meson_dep2_return_message (samedep));
+}
--- /dev/null
+#ifndef MESON_SAMPLE_H
+#define MESON_SAMPLE_H
+
+#if !defined (MESON_TEST)
+#error "MESON_TEST not defined."
+#endif
+
+#include <glib-object.h>
+#include "dep1/dep1.h"
+
+G_BEGIN_DECLS
+
+#define MESON_TYPE_SAMPLE (meson_sample_get_type())
+
+G_DECLARE_FINAL_TYPE (MesonSample, meson_sample, MESON, SAMPLE, GObject)
+
+MesonSample *meson_sample_new (void);
+void meson_sample_print_message (MesonSample *self,
+ MesonDep1 *dep1,
+ MesonDep2 *dep2);
+
+G_END_DECLS
+
+#endif /* MESON_SAMPLE_H */
--- /dev/null
+subdir('dep1')
+
+libsources = ['meson-sample.c', 'meson-sample.h']
+
+girlib = shared_library(
+ 'gir_lib',
+ sources : libsources,
+ dependencies : [gobj, dep1_dep],
+ install : true
+)
+
+girexe = executable(
+ 'girprog',
+ sources : 'prog.c',
+ dependencies : [glib, gobj, gir, dep1_dep],
+ link_with : girlib
+)
+
+fake_dep = dependency('no-way-this-exists', required: false)
+
+gnome.generate_gir(
+ girlib,
+ sources : libsources,
+ nsversion : '1.0',
+ namespace : 'Meson',
+ symbol_prefix : 'meson',
+ identifier_prefix : 'Meson',
+ includes : ['GObject-2.0', 'MesonDep1-1.0'],
+ # dep1_dep pulls in dep2_dep for us
+ dependencies : [[fake_dep, dep1_dep]],
+ install : true,
+ build_by_default : true,
+ # Test that unknown kwargs do not crash the parser.
+ # Unknown kwargs will eventually become a hard error.
+ # Once that happens remove this.
+ unknown_kwarg : true,
+)
+
+test('gobject introspection/c', girexe)
+gir_paths = ':'.join([girlib.outdir(), dep1lib.outdir(), dep2lib.outdir()])
+envdata = environment()
+envdata.append('GI_TYPELIB_PATH', gir_paths, separator : ':')
+envdata.append('LD_LIBRARY_PATH', gir_paths)
+test('gobject introspection/py', find_program('prog.py'),
+ env : envdata)
--- /dev/null
+#include <girepository.h>
+
+#include "meson-sample.h"
+
+gint
+main (gint argc,
+ gchar *argv[])
+{
+ GError * error = NULL;
+
+ GOptionContext * ctx = g_option_context_new (NULL);
+ g_option_context_add_group (ctx, g_irepository_get_option_group ());
+
+ if (!g_option_context_parse (ctx, &argc, &argv, &error)) {
+ g_print ("sample: %s\n", error->message);
+ g_option_context_free (ctx);
+ if (error) {
+ g_error_free (error);
+ }
+
+ return 1;
+ }
+
+ MesonSample * i = meson_sample_new ();
+ MesonDep1 * dep1 = meson_dep1_new ();
+ MesonDep2 * dep2 = meson_dep2_new ("Hello, meson/c!");
+ meson_sample_print_message (i, dep1, dep2);
+
+ g_object_unref (i);
+ g_object_unref (dep1);
+ g_object_unref (dep2);
+ g_option_context_free (ctx);
+
+ return 0;
+}
--- /dev/null
+#!/usr/bin/env python3
+from gi.repository import Meson, MesonDep1, MesonDep2
+
+if __name__ == "__main__":
+ s = Meson.Sample.new()
+ dep1 = MesonDep1.Dep1.new()
+ dep2 = MesonDep2.Dep2.new("Hello, meson/py!")
+ s.print_message(dep1, dep2)
--- /dev/null
+usr/include/enums.h
+usr/include/enums2.h
+usr/include/enums3.h
+usr/include/marshaller.h
+usr/lib/libgir_lib.so
+usr/lib/libdep1lib.so
+usr/lib/libdep2lib.so
+usr/lib/girepository-1.0/Meson-1.0.typelib
+usr/lib/girepository-1.0/MesonDep1-1.0.typelib
+usr/lib/girepository-1.0/MesonDep2-1.0.typelib
+usr/share/gir-1.0/Meson-1.0.gir
+usr/share/gir-1.0/MesonDep1-1.0.gir
+usr/share/gir-1.0/MesonDep2-1.0.gir
+usr/share/glib-2.0/schemas/com.github.meson.gschema.xml
+usr/share/simple-resources.gresource
+usr/include/simple-resources.h
--- /dev/null
+project('gobject-introspection', 'c')
+
+cc = meson.get_compiler('c')
+
+add_global_arguments('-DMESON_TEST', language : 'c')
+if cc.get_id() == 'intel'
+ # Ignore invalid GCC pragma warnings from glib
+ # https://bugzilla.gnome.org/show_bug.cgi?id=776562
+ add_global_arguments('-wd2282', language : 'c')
+endif
+
+gnome = import('gnome')
+gio = dependency('gio-2.0')
+giounix = dependency('gio-unix-2.0')
+glib = dependency('glib-2.0')
+gobj = dependency('gobject-2.0')
+gir = dependency('gobject-introspection-1.0')
+gmod = dependency('gmodule-2.0')
+
+subdir('resources-data')
+subdir('resources')
+subdir('gir')
+subdir('schemas')
+subdir('gdbus')
+subdir('mkenums')
+subdir('genmarshal')
--- /dev/null
+/*** BEGIN file-header ***/
+
+#include "enums.h"
+
+/*** END file-header ***/
+/*** BEGIN file-production ***/
+
+/* enumerations from "@basename@" */
+#include "@basename@"
+
+/*** END file-production ***/
+
+/*** BEGIN value-header ***/
+GType
+@enum_name@_get_type(void) {
+ static volatile gsize g_define_type_id__volatile = 0;
+
+ if(g_once_init_enter(&g_define_type_id__volatile)) {
+ static const G@Type@Value values [] = {
+/*** END value-header ***/
+
+/*** BEGIN value-production ***/
+ { @VALUENAME@, "@VALUENAME@", "@valuenick@" },
+/*** END value-production ***/
+
+/*** BEGIN value-tail ***/
+ { 0, NULL, NULL }
+ };
+
+ GType g_define_type_id =
+ g_@type@_register_static(g_intern_static_string("@EnumName@"), values);
+ g_once_init_leave(&g_define_type_id__volatile, g_define_type_id);
+ }
+
+ return g_define_type_id__volatile;
+}
+
+/*** END value-tail ***/
+
+/*** BEGIN file-tail ***/
+/*** END file-tail ***/
--- /dev/null
+/*** BEGIN file-header ***/
+#ifndef MESON_ENUMS_H
+#define MESON_ENUMS_H
+
+#include <glib-object.h>
+
+G_BEGIN_DECLS
+/*** END file-header ***/
+
+/*** BEGIN file-production ***/
+
+/* enumerations from "@basename@" */
+/*** END file-production ***/
+/*** BEGIN value-header ***/
+GType @enum_name@_get_type(void) G_GNUC_CONST;
+#define @ENUMPREFIX@_TYPE_@ENUMSHORT@ (@enum_name@_get_type())
+/*** END value-header ***/
+
+/*** BEGIN file-tail ***/
+
+G_END_DECLS
+
+#endif /* MESON_ENUMS_H */
+/*** END file-tail ***/
--- /dev/null
+/*** BEGIN file-header ***/
+
+#include "enums.h"
+
+/*** END file-header ***/
+/*** BEGIN file-production ***/
+
+/* enumerations from "@basename@" */
+#include "@basename@"
+
+/*** END file-production ***/
+
+/*** BEGIN value-header ***/
+GType
+@enum_name@_get_type(void) {
+ static volatile gsize g_define_type_id__volatile = 0;
+
+ if(g_once_init_enter(&g_define_type_id__volatile)) {
+ static const G@Type@Value values [] = {
+/*** END value-header ***/
+
+/*** BEGIN value-production ***/
+ { @VALUENAME@, "@VALUENAME@", "@valuenick@" },
+/*** END value-production ***/
+
+/*** BEGIN value-tail ***/
+ { 0, NULL, NULL }
+ };
+
+ GType g_define_type_id =
+ g_@type@_register_static(g_intern_static_string("@EnumName@"), values);
+ g_once_init_leave(&g_define_type_id__volatile, g_define_type_id);
+ }
+
+ return g_define_type_id__volatile;
+}
+
+/*** END value-tail ***/
+
+/*** BEGIN file-tail ***/
+/*** END file-tail ***/
--- /dev/null
+/*** BEGIN file-header ***/
+#ifndef MESON_ENUMS_H
+#define MESON_ENUMS_H
+
+#include <glib-object.h>
+
+G_BEGIN_DECLS
+/*** END file-header ***/
+
+/*** BEGIN file-production ***/
+
+/* enumerations from "@basename@" */
+/*** END file-production ***/
+/*** BEGIN value-header ***/
+GType @enum_name@_get_type(void) G_GNUC_CONST;
+#define @ENUMPREFIX@_TYPE_@ENUMSHORT@ (@enum_name@_get_type())
+/*** END value-header ***/
+
+/*** BEGIN file-tail ***/
+
+G_END_DECLS
+
+#endif /* MESON_ENUMS_H */
+/*** END file-tail ***/
--- /dev/null
+#include<stdio.h>
+#include<string.h>
+#include<glib-object.h>
+#include"meson-sample.h"
+#include"@ENUM_FILE@"
+
+int main(int argc, char **argv) {
+ GEnumClass *xenum = g_type_class_ref(MESON_TYPE_THE_XENUM);
+ GFlagsClass *flags_enum = g_type_class_ref(MESON_TYPE_THE_FLAGS_ENUM);
+ if (g_enum_get_value_by_name(xenum, "MESON_THE_XVALUE")->value != MESON_THE_XVALUE) {
+ fprintf(stderr, "Get MESON_THE_XVALUE by name failed.\n");
+ return 1;
+ }
+ if (g_enum_get_value_by_nick(xenum, "the-xvalue")->value != MESON_THE_XVALUE) {
+ fprintf(stderr, "Get MESON_THE_XVALUE by nick failed.\n");
+ return 2;
+ }
+ if (g_flags_get_value_by_name(flags_enum, "MESON_THE_FIRST_VALUE")->value != MESON_THE_FIRST_VALUE) {
+ fprintf(stderr, "Get MESON_THE_FIRST_VALUE by name failed.\n");
+ return 3;
+ }
+ if (g_flags_get_value_by_nick(flags_enum, "the-first-value")->value != MESON_THE_FIRST_VALUE) {
+ fprintf(stderr, "Get MESON_THE_FIRST_VALUE by nick failed.\n");
+ return 4;
+ }
+ g_type_class_unref(xenum);
+ g_type_class_unref(flags_enum);
+ fprintf(stderr, "All ok.\n");
+ return 0;
+}
--- /dev/null
+#include <stdio.h>
+#include <string.h>
+#include <glib-object.h>
+#include "enums4.h"
+#include "meson-sample.h"
+
+int main(int argc, char **argv) {
+ GEnumClass *xenum = g_type_class_ref(MESON_TYPE_THE_XENUM);
+ GFlagsClass *flags_enum = g_type_class_ref(MESON_TYPE_THE_FLAGS_ENUM);
+ if (g_enum_get_value_by_name(xenum, "MESON_THE_XVALUE")->value != MESON_THE_XVALUE) {
+ fprintf(stderr, "Get MESON_THE_XVALUE by name failed.\n");
+ return 1;
+ }
+ if (g_enum_get_value_by_nick(xenum, "the-xvalue")->value != MESON_THE_XVALUE) {
+ fprintf(stderr, "Get MESON_THE_XVALUE by nick failed.\n");
+ return 2;
+ }
+ if (g_flags_get_value_by_name(flags_enum, "MESON_THE_FIRST_VALUE")->value != MESON_THE_FIRST_VALUE) {
+ fprintf(stderr, "Get MESON_THE_FIRST_VALUE by name failed.\n");
+ return 3;
+ }
+ if (g_flags_get_value_by_nick(flags_enum, "the-first-value")->value != MESON_THE_FIRST_VALUE) {
+ fprintf(stderr, "Get MESON_THE_FIRST_VALUE by nick failed.\n");
+ return 4;
+ }
+
+ /* Make sure that funcs are generated with leading underscore as requested */
+ if (!_meson_the_xenum_get_type())
+ g_error ("Bad!");
+
+ g_type_class_unref(xenum);
+ g_type_class_unref(flags_enum);
+ fprintf(stderr, "All ok.\n");
+ return 0;
+}
--- /dev/null
+#include <stdio.h>
+#include <string.h>
+#include <glib-object.h>
+#include "enums5.h"
+#include "meson-sample.h"
+
+int main(int argc, char **argv) {
+ GEnumClass *xenum = g_type_class_ref(MESON_TYPE_THE_XENUM);
+ GFlagsClass *flags_enum = g_type_class_ref(MESON_TYPE_THE_FLAGS_ENUM);
+ if (g_enum_get_value_by_name(xenum, "MESON_THE_XVALUE")->value != MESON_THE_XVALUE) {
+ fprintf(stderr, "Get MESON_THE_XVALUE by name failed.\n");
+ return 1;
+ }
+ if (g_enum_get_value_by_nick(xenum, "the-xvalue")->value != MESON_THE_XVALUE) {
+ fprintf(stderr, "Get MESON_THE_XVALUE by nick failed.\n");
+ return 2;
+ }
+ if (g_flags_get_value_by_name(flags_enum, "MESON_THE_FIRST_VALUE")->value != MESON_THE_FIRST_VALUE) {
+ fprintf(stderr, "Get MESON_THE_FIRST_VALUE by name failed.\n");
+ return 3;
+ }
+ if (g_flags_get_value_by_nick(flags_enum, "the-first-value")->value != MESON_THE_FIRST_VALUE) {
+ fprintf(stderr, "Get MESON_THE_FIRST_VALUE by nick failed.\n");
+ return 4;
+ }
+
+ /* Make sure that funcs do not have any extra prefix */
+ if (!meson_the_xenum_get_type())
+ g_error ("Bad!");
+
+ g_type_class_unref(xenum);
+ g_type_class_unref(flags_enum);
+ fprintf(stderr, "All ok.\n");
+ return 0;
+}
--- /dev/null
+#pragma once
+#define MESON_EXPORT extern
--- /dev/null
+#pragma once
+
+typedef enum
+{
+ MESON_THE_XVALUE,
+ MESON_ANOTHER_VALUE
+} MesonTheXEnum;
+
+typedef enum /*< skip >*/
+{
+ MESON_FOO
+} MesonThisEnumWillBeSkipped;
+
+typedef enum /*< flags,prefix=MESON >*/
+{
+ MESON_THE_ZEROTH_VALUE, /*< skip >*/
+ MESON_THE_FIRST_VALUE,
+ MESON_THE_SECOND_VALUE,
+ MESON_THE_THIRD_VALUE, /*< nick=the-last-value >*/
+} MesonTheFlagsEnum;
--- /dev/null
+# Generate both header and source via template together.
+
+myenums = gnome.mkenums('abc1',
+ sources : 'meson-sample.h',
+ h_template : 'enums.h.in',
+ c_template : 'enums.c.in',
+ install_header : true,
+ install_dir : get_option('includedir'))
+
+enums_c1 = myenums[0]
+enums_h1 = myenums[1]
+
+conf = configuration_data()
+conf.set('ENUM_FILE', 'enums.h')
+main = configure_file(
+ input : 'main.c',
+ output : 'main1.c',
+ configuration : conf)
+
+enumexe1 = executable('enumprog1', main, enums_c1, enums_h1,
+dependencies : gobj)
+test('enum test 1', enumexe1)
+
+# Generate both header and source via template individually and overriding.
+
+enums_h2 = gnome.mkenums('abc2',
+ sources : 'meson-sample.h',
+ h_template : 'enums2.h.in',
+ ftail : '/* trailing header file info */',
+ install_header : true,
+ install_dir : get_option('includedir'))
+
+enums_c2 = gnome.mkenums('abc2',
+ sources : 'meson-sample.h',
+ depends : [enums_h1, enums_h2],
+ c_template : 'enums2.c.in',
+ ftail : '/* trailing source file info */',
+ install_header : true,
+ install_dir : get_option('includedir'))
+
+conf = configuration_data()
+conf.set('ENUM_FILE', 'enums2.h')
+main = configure_file(
+ input : 'main.c',
+ output : 'main2.c',
+ configuration : conf)
+
+enumexe2 = executable('enumprog2', main, enums_c2, enums_h2,
+dependencies : gobj)
+test('enum test 2', enumexe2)
+
+# Generate both header and source by options only.
+# These are specified in a way that should produce the same result as above
+# (modulo any filename changes.)
+
+enums_h3 = gnome.mkenums('enums3.h',
+ sources : 'meson-sample.h',
+ fhead : '''#ifndef MESON_ENUMS_H
+#define MESON_ENUMS_H
+
+#include <glib-object.h>
+
+G_BEGIN_DECLS
+''',
+ fprod : '''
+/* enumerations from "@basename@" */
+''',
+ vhead : '''GType @enum_name@_get_type(void) G_GNUC_CONST;
+#define @ENUMPREFIX@_TYPE_@ENUMSHORT@ (@enum_name@_get_type())
+''',
+ ftail : '''
+G_END_DECLS
+
+#endif /* MESON_ENUMS_H */
+''',
+ install_header : true,
+ install_dir : get_option('includedir'))
+
+enums_c3 = gnome.mkenums('enums3.c',
+ sources : 'meson-sample.h',
+ depends : enums_h3,
+ fhead : '''#include "enums3.h"
+''',
+ fprod : '''
+
+/* enumerations from "@basename@" */
+#include "@basename@"
+''',
+ vhead : '''
+GType
+@enum_name@_get_type(void) {
+ static volatile gsize g_define_type_id__volatile = 0;
+
+ if(g_once_init_enter(&g_define_type_id__volatile)) {
+ static const G@Type@Value values [] = {
+''',
+ vprod : ''' { @VALUENAME@, "@VALUENAME@", "@valuenick@" },''',
+ vtail : ''' { 0, NULL, NULL }
+ };
+
+ GType g_define_type_id =
+ g_@type@_register_static(g_intern_static_string("@EnumName@"), values);
+ g_once_init_leave(&g_define_type_id__volatile, g_define_type_id);
+ }
+
+ return g_define_type_id__volatile;
+}
+''')
+
+conf = configuration_data()
+conf.set('ENUM_FILE', 'enums3.h')
+main = configure_file(
+ input : 'main.c',
+ output : 'main3.c',
+ configuration : conf)
+
+enumexe3 = executable('enumprog3', main, enums_c3, enums_h3,
+dependencies : gobj)
+test('enum test 3', enumexe3)
+
+enums4 = gnome.mkenums_simple('enums4', sources : 'meson-sample.h',
+ function_prefix : '_')
+enumexe4 = executable('enumprog4', 'main4.c', enums4, dependencies : gobj)
+
+enums5 = gnome.mkenums_simple('enums5', sources : 'meson-sample.h',
+ decorator : 'MESON_EXPORT',
+ header_prefix : '#include "meson-decls.h"')
+enumexe5 = executable('enumprog5', main, enums5, dependencies : gobj)
--- /dev/null
+subdir('subdir')
+
+python3 = import('python3').find_python()
+
+fake_generator_script = '''
+import os, sys
+assert os.path.exists(sys.argv[1]), "File %s not found" % sys.argv[1]
+print("This is a generated resource.")
+'''
+
+# Generate file res3.txt from file res3.txt.in. This is then included
+# in a GResource file, driven by resources/meson.build.
+res3_txt = custom_target('res3',
+ input: 'res3.txt.in',
+ output: 'res3.txt',
+ command: [python3, '-c', fake_generator_script, '@INPUT@'],
+ capture: true,
+)
--- /dev/null
+This is a resource.
--- /dev/null
+This content is ignored, but Meson doesn't need to know that.
--- /dev/null
+cdata = configuration_data()
+cdata.set('NOISE', 'BARK')
+
+res4_txt = configure_file(
+ input: 'res4.txt.in',
+ output: 'res4.txt',
+ configuration: cdata
+)
--- /dev/null
+This is a resource in a subdirectory.
--- /dev/null
+@NOISE@ @NOISE@ @NOISE@
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import shutil
+
+shutil.copy(sys.argv[1], sys.argv[2])
--- /dev/null
+#include<stdio.h>
+#include<string.h>
+#include<gio/gio.h>
+#include"generated-resources.h"
+
+#define EXPECTED "This is a generated resource.\n"
+
+int main(int argc, char **argv) {
+ GResource *res = generated_resources_get_resource();
+ GError *err = NULL;
+ GBytes *data = g_resources_lookup_data("/com/example/myprog/res3.txt",
+ G_RESOURCE_LOOKUP_FLAGS_NONE, &err);
+
+ if(data == NULL) {
+ fprintf(stderr, "Data lookup failed: %s\n", err->message);
+ return 1;
+ }
+ if(strcmp(g_bytes_get_data(data, NULL), EXPECTED) != 0) {
+ fprintf(stderr, "Resource contents are wrong:\n %s\n",
+ (const char*)g_bytes_get_data(data, NULL));
+ return 1;
+ }
+ fprintf(stdout, "All ok.\n");
+ g_bytes_unref(data);
+ g_resource_unref(res);
+ return 0;
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<gresources>
+ <gresource prefix="/com/example/myprog">
+ <file>res1.txt</file>
+ <file>subdir/res2.txt</file>
+ <file>res3.txt</file>
+ <file>subdir/res4.txt</file>
+ </gresource>
+</gresources>
--- /dev/null
+# There are two tests here, because the 2nd one depends on a version of
+# GLib (2.51.1) that is very recent at the time of writing.
+
+copyfile = find_program('copyfile.py')
+
+simple_gresource = configure_file(
+ input : 'simple.gresource.xml',
+ output : 'simple-gen.gresource.xml',
+ command : [copyfile, '@INPUT@', '@OUTPUT@'])
+
+simple_resources = gnome.compile_resources('simple-resources',
+ simple_gresource,
+ install_header : true,
+ export : true,
+ source_dir : '../resources-data',
+ c_name : 'simple_resources')
+
+simple_res_exe = executable('simple-resources-test',
+ 'simple-main.c', simple_resources,
+ dependencies: gio)
+test('simple resource test', simple_res_exe)
+
+gnome.compile_resources('simple-resources',
+ 'simple.gresource.xml',
+ gresource_bundle: true,
+ install: true,
+ install_dir: get_option('datadir'),
+ source_dir : '../resources-data',
+)
+test('simple resource test (gresource)', find_program('resources.py'))
+
+if glib.version() >= '2.52.0'
+ # This test cannot pass if GLib version is older than 9.99.9.
+ # Meson will raise an error if the user tries to use the 'dependencies'
+ # argument and the version of GLib is too old for generated resource
+ # dependencies to work correctly.
+ generated_resources = gnome.compile_resources('generated-resources',
+ 'generated.gresource.xml',
+ source_dir : '../resources-data',
+ c_name : 'generated_resources',
+ dependencies : [res3_txt, res4_txt])
+
+ generated_res_exe = executable('generated-resources-test',
+ 'generated-main.c', generated_resources,
+ dependencies: gio)
+ test('generated resource test', generated_res_exe)
+endif
+
+# Test build_by_default
+gnome.compile_resources('build-resources',
+ 'simple.gresource.xml',
+ gresource_bundle : true,
+ build_by_default : true,
+ source_dir : '../resources-data',
+)
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<gresources>
+ <gresource prefix="/com/example/myprog">
+ <file>res1.txt</file>
+ <file>subdir/res2.txt</file>
+ <file>res3.txt</file>
+ <file>subdir/res4.txt</file>
+ </gresource>
+</gresources>
--- /dev/null
+#!/usr/bin/env python3
+import os
+from gi.repository import Gio
+
+if __name__ == '__main__':
+ res = Gio.resource_load(os.path.join('resources', 'simple-resources.gresource'))
+ Gio.Resource._register(res)
+
+ data = Gio.resources_lookup_data('/com/example/myprog/res1.txt', Gio.ResourceLookupFlags.NONE)
+ assert(data.get_data() == b'This is a resource.\n')
--- /dev/null
+#include<stdio.h>
+#include<string.h>
+#include<gio/gio.h>
+#include"simple-resources.h"
+
+#define EXPECTED "This is a resource.\n"
+
+int main(int argc, char **argv) {
+ GResource *res = simple_resources_get_resource();
+ GError *err = NULL;
+ GBytes *data = g_resources_lookup_data("/com/example/myprog/res1.txt",
+ G_RESOURCE_LOOKUP_FLAGS_NONE, &err);
+
+ if(data == NULL) {
+ fprintf(stderr, "Data lookup failed: %s\n", err->message);
+ return 1;
+ }
+ if(strcmp(g_bytes_get_data(data, NULL), EXPECTED) != 0) {
+ fprintf(stderr, "Resource contents are wrong:\n %s\n",
+ (const char*)g_bytes_get_data(data, NULL));
+ return 1;
+ }
+ fprintf(stdout, "All ok.\n");
+ g_bytes_unref(data);
+ g_resource_unref(res);
+ return 0;
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<gresources>
+ <gresource prefix="/com/example/myprog">
+ <file>res1.txt</file>
+ <file>subdir/res2.txt</file>
+ </gresource>
+</gresources>
--- /dev/null
+<schemalist>
+ <schema id="com.github.meson" path="/com/github/meson/" gettext-domain="test">
+
+ <key name="greeting" type="s">
+ <default l10n="messages">"Hello"</default>
+ <summary>A greeting</summary>
+ <description>
+ Sample text to test schema compilation
+ </description>
+ </key>
+ </schema>
+</schemalist>
\ No newline at end of file
--- /dev/null
+
+compiled = gnome.compile_schemas(build_by_default: true)
+install_data('com.github.meson.gschema.xml',
+install_dir : 'share/glib-2.0/schemas')
+
+schemaexe = executable('schemaprog', 'schemaprog.c', dependencies : gio)
+test('schema test', schemaexe)
--- /dev/null
+#include<gio/gio.h>
+#include<stdio.h>
+#include<string.h>
+
+int main(int argc, char **argv) {
+ GSettingsSchemaSource *src;
+ GSettingsSchema *schema;
+ GSettings *settings;
+ GVariant *value;
+
+ GError *error = NULL;
+ src = g_settings_schema_source_new_from_directory("schemas",
+ g_settings_schema_source_get_default(), TRUE, &error);
+ if(error) {
+ fprintf(stderr, "Fail: %s\n", error->message);
+ g_error_free(error);
+ return 1;
+ }
+
+ schema = g_settings_schema_source_lookup(src, "com.github.meson", FALSE);
+ if(!schema) {
+ fprintf(stderr, "Could not get schema from source.\n");
+ return 2;
+ }
+
+ settings = g_settings_new_full(schema, NULL, NULL);
+ if(!settings) {
+ fprintf(stderr, "Could not get settings object.\n");
+ return 3;
+ }
+
+ value = g_settings_get_value(settings, "greeting");
+ if(!value) {
+ fprintf(stderr, "Could not get value from settings.\n");
+ return 4;
+ }
+
+ if(strcmp("Hello", g_variant_get_string(value, NULL)) != 0) {
+ fprintf(stderr, "Value of setting is incorrect.\n");
+ return 5;
+ }
+ g_variant_unref(value);
+ g_object_unref(settings);
+ g_settings_schema_unref(schema);
+ g_settings_schema_source_unref(src);
+ return 0;
+}
--- /dev/null
+%{
+#include <stdlib.h>
+#include "parser.tab.h"
+%}
+
+%%
+("true"|"false") {return BOOLEAN;}
+. { yyerror(); }
--- /dev/null
+project('flex and bison', 'c')
+
+# The point of this test is that one generator
+# may output headers that are necessary to build
+# the sources of a different generator.
+
+flex = find_program('flex')
+bison = find_program('bison')
+
+lgen = generator(flex,
+output : '@PLAINNAME@.yy.c',
+arguments : ['-o', '@OUTPUT@', '@INPUT@'])
+
+lfiles = lgen.process('lexer.l')
+
+pgen = generator(bison,
+output : ['@BASENAME@.tab.c', '@BASENAME@.tab.h'],
+arguments : ['@INPUT@', '--defines=@OUTPUT1@', '--output=@OUTPUT0@'])
+
+pfiles = pgen.process('parser.y')
+
+e = executable('pgen', 'prog.c',
+lfiles, pfiles)
+
+test('parsertest', e)
+
--- /dev/null
+%token BOOLEAN
+
+%%
+input:
+ BOOLEAN { $$ = $1;}
+;
--- /dev/null
+#include"parser.tab.h"
+#include<unistd.h>
+#include<sys/types.h>
+#include<sys/stat.h>
+#include<fcntl.h>
+#include<stdio.h>
+#include<stdlib.h>
+
+int main(int argc, char **argv) {
+ /*
+ int input;
+ if(argc != 2) {
+ printf("%s <input file>");
+ return 1;
+ }
+ input = open(argv[1], O_RDONLY);
+ dup2(input, STDIN_FILENO);
+ close(input);
+ return yyparse();
+ */
+ /* We really should test that the
+ * generated parser works with input
+ * but it froze and I don't want to waste
+ * time debugging that. For this test what
+ * we care about is that it compiles and links.
+ */
+ void* __attribute__((unused)) dummy = (void*)yyparse;
+ return 0;
+}
+
+int yywrap(void) {
+ return 0;
+}
+
+int yyerror(void) {
+ printf("Parse error\n");
+ exit(1);
+}
--- /dev/null
+#pragma once
+
+#include <wx/wx.h>
+
+class MyApp: public wxApp
+{
+public:
+ virtual bool OnInit();
+};
+class MyFrame: public wxFrame
+{
+public:
+ MyFrame(const wxString& title, const wxPoint& pos, const wxSize& size);
+private:
+ void OnHello(wxCommandEvent& event);
+ void OnExit(wxCommandEvent& event);
+ void OnAbout(wxCommandEvent& event);
+ wxDECLARE_EVENT_TABLE();
+};
+
+enum {
+ ID_Hello = 1
+};
--- /dev/null
+project('wxwidgets test', 'cpp', default_options : ['cpp_std=c++11'])
+
+wxd = dependency('wxwidgets', version : '>=5', required : false)
+wxd = dependency('wxwidgets', version : '>=3.0.0', required : false)
+
+if wxd.found()
+ wp = executable('wxprog', 'wxprog.cpp', dependencies : wxd)
+
+ test('wxtest', wp)
+endif
--- /dev/null
+#include"mainwin.h"
+
+wxBEGIN_EVENT_TABLE(MyFrame, wxFrame)
+EVT_MENU(ID_Hello, MyFrame::OnHello)
+EVT_MENU(wxID_EXIT, MyFrame::OnExit)
+EVT_MENU(wxID_ABOUT, MyFrame::OnAbout)
+wxEND_EVENT_TABLE()
+
+bool MyApp::OnInit() {
+ MyFrame *frame = new MyFrame("Hello World", wxPoint(50, 50), wxSize(450, 340));
+ frame->Show( true );
+ return true;
+}
+
+MyFrame::MyFrame(const wxString& title, const wxPoint& pos, const wxSize& size)
+ : wxFrame(NULL, wxID_ANY, title, pos, size) {
+ wxMenu *menuFile = new wxMenu;
+ menuFile->Append(ID_Hello, "&Hello...\tCtrl-H",
+ "Help string shown in status bar for this menu item");
+ menuFile->AppendSeparator();
+ menuFile->Append(wxID_EXIT);
+ wxMenu *menuHelp = new wxMenu;
+ menuHelp->Append(wxID_ABOUT);
+ wxMenuBar *menuBar = new wxMenuBar;
+ menuBar->Append(menuFile, "&File");
+ menuBar->Append(menuHelp, "&Help");
+ SetMenuBar(menuBar);
+ CreateStatusBar();
+ SetStatusText("This is status." );
+}
+
+void MyFrame::OnExit(wxCommandEvent& event) {
+ Close( true );
+}
+
+void MyFrame::OnAbout(wxCommandEvent& event) {
+ //wxMessageBox("Some text", wxOK | wxICON_INFORMATION);
+}
+
+void MyFrame::OnHello(wxCommandEvent& event) {
+ wxLogMessage("Some more text.");
+}
+
+#if 0
+wxIMPLEMENT_APP(MyApp);
+#else
+// Don't open a window because this is an unit test and needs to
+// run headless.
+int main(int, char **) {
+ wxString name("Some app");
+ wxPoint p(0, 0);
+ wxSize s(100, 100);
+ return 0;
+}
+
+#endif
--- /dev/null
+package com.mesonbuild;
+
+class Simple {
+ public static void main(String [] args) {
+ System.out.println("Java is working.\n");
+ }
+}
--- /dev/null
+usr/bin/myprog.jar
--- /dev/null
+project('simplejava', 'java')
+
+javaprog = jar('myprog', 'com/mesonbuild/Simple.java',
+ main_class : 'com.mesonbuild.Simple',
+ install : true,
+ install_dir : get_option('bindir'))
+test('mytest', javaprog)
--- /dev/null
+project('subdirjava', 'java')
+
+subdir('sub')
--- /dev/null
+package com.mesonbuild;
+
+class Simple {
+ public static void main(String [] args) {
+ TextPrinter t = new TextPrinter("Printing from Java.");
+ t.print();
+ }
+}
--- /dev/null
+package com.mesonbuild;
+
+class TextPrinter {
+
+ private String msg;
+
+ TextPrinter(String s) {
+ msg = s;
+ }
+
+ public void print() {
+ System.out.println(msg);
+ }
+}
--- /dev/null
+javaprog = jar('myprog',
+ 'com/mesonbuild/Simple.java',
+ 'com/mesonbuild/TextPrinter.java',
+ main_class : 'com.mesonbuild.Simple',
+ include_directories : include_directories('.'))
+test('subdirtest', javaprog)
--- /dev/null
+package com.mesonbuild;
+
+class Simple {
+ public static void main(String [] args) {
+ System.out.println("Java is working.\n");
+ }
+}
--- /dev/null
+project('simplejava', 'java')
+
+add_project_arguments('-target', '1.6', language : 'java')
+
+javaprog = jar('myprog', 'com/mesonbuild/Simple.java',
+ main_class : 'com.mesonbuild.Simple',
+ java_args : ['-source', '1.6'])
+test('mytest', javaprog)
+
--- /dev/null
+package com.mesonbuild;
+
+class Simple {
+ class Inner {
+ public String getString() {
+ return "Inner class is working.\n";
+ }
+ }
+
+ public static void main(String [] args) {
+ Simple s = new Simple();
+ Simple.Inner ic = s.new Inner();
+ System.out.println(ic.getString());
+ }
+}
--- /dev/null
+project('simplejava', 'java')
+
+javaprog = jar('myprog', 'com/mesonbuild/Simple.java',
+ main_class : 'com.mesonbuild.Simple')
+test('mytest', javaprog)
--- /dev/null
+#pragma once
+
+#include<zlib.h>
--- /dev/null
+project('external dependency', 'c')
+
+# Zlib is probably on all dev machines.
+
+dep = dependency('zlib', version : '>=1.2')
+exe = executable('zlibprog', 'prog-checkver.c',
+ dependencies : dep,
+ c_args : '-DFOUND_ZLIB="' + dep.version() + '"')
+
+assert(dep.version().version_compare('>=1.2'), 'Pkg-config version numbers exposed incorrectly.')
+
+# Check that the version exposed by zlib internally is the same as the one we
+# retrieve from the pkg-config file. This assumes that the packager didn't mess
+# up, but we can be reasonably sure of that.
+test('zlibtest', exe)
+
+zprefix = dep.get_pkgconfig_variable('prefix') # Always set but we can't be sure what the value is.
+# pkg-config returns empty string for not defined variables
+assert(dep.get_pkgconfig_variable('nonexisting') == '', 'Value of unknown variable is not empty.')
+# pkg-config is able to replace variables
+assert(dep.get_pkgconfig_variable('prefix', define_variable: ['prefix', '/tmp']) == '/tmp', 'prefix variable has not been replaced.')
+
+# Test that dependencies of dependencies work.
+dep2 = declare_dependency(dependencies : dep)
+exe2 = executable('zlibprog2', 'prog.c', dependencies : dep2)
+test('zlibtest2', exe2)
+
+# Try to find a nonexistent library to ensure requires:false works.
+
+dep = dependency('nvakuhrabnsdfasdf', required : false)
+
+# Try to compile a test that takes a dep and an include_directories
+
+cc = meson.get_compiler('c')
+zlibdep = cc.find_library('z')
+code = '''#include<myinc.h>
+
+int main(int argc, char **argv) {
+ void * something = deflate;
+ if(something != 0)
+ return 0;
+ return 1;
+}
+'''
+
+inc = include_directories('incdir')
+
+r = cc.run(code, include_directories : inc, dependencies : zlibdep)
+assert(r.returncode() == 0, 'Running manual zlib test failed.')
--- /dev/null
+#include <zlib.h>
+#include <stdio.h>
+#include <string.h>
+
+int main(int argc, char **argv) {
+ void * something = deflate;
+ if(strcmp(ZLIB_VERSION, FOUND_ZLIB) != 0) {
+ printf("Meson found '%s' but zlib is '%s'\n", FOUND_ZLIB, ZLIB_VERSION);
+ return 2;
+ }
+ if(something != 0)
+ return 0;
+ printf("Couldn't find 'deflate'\n");
+ return 1;
+}
--- /dev/null
+#include<zlib.h>
+
+int main(int argc, char **argv) {
+ void * something = deflate;
+ if(something != 0)
+ return 0;
+ return 1;
+}
--- /dev/null
+project('trivial test', 'c')
+
+if host_machine.system() == 'cygwin'
+ error('MESON_SKIP_TEST _FILE_OFFSET_BITS not yet supported on Cygwin.')
+endif
+
+cc = meson.get_compiler('c')
+
+size = cc.sizeof('off_t')
+assert(size == 8, 'off_t size is @0@ bytes instead of 8'.format(size))
+
+code = '''#if !defined(_FILE_OFFSET_BITS) || (_FILE_OFFSET_BITS != 64)
+#error "Large-file support was not enabled"
+#endif'''
+
+assert(cc.compiles(code, name : 'checking for LFS'), 'Large file support was not enabled')
--- /dev/null
+int some_symbol (void) {
+ return RET_VALUE;
+}
--- /dev/null
+lib1 = shared_library('testeh', libsrc,
+ c_args : '-DRET_VALUE=1')
--- /dev/null
+lib2 = shared_library('esteh', libsrc,
+ name_prefix : 'libt',
+ c_args : '-DRET_VALUE=2')
--- /dev/null
+#include <stdio.h>
+
+int some_symbol (void);
+
+int main (int argc, char *argv[]) {
+ int ret = some_symbol ();
+ if (ret == 1)
+ return 0;
+ fprintf (stderr, "ret was %i instead of 1\n", ret);
+ return -1;
+}
--- /dev/null
+project('runpath rpath ldlibrarypath', 'c')
+
+error('MESON_SKIP_TEST test disabled due to bug #1635.')
+
+libsrc = files('lib.c')
+
+subdir('lib1')
+subdir('lib2')
+
+lib2dir = meson.current_build_dir() + '/lib2'
+
+e = executable('testexe', 'main.c',
+ link_with : lib1)
+
+test('ld-library-path-test', e,
+ env : ['LD_LIBRARY_PATH=' + lib2dir])
--- /dev/null
+project('external library', 'c')
+
+cc = meson.get_compiler('c')
+zlib = cc.find_library('z')
+
+# Verify that link testing works.
+linkcode = '''#include<zlib.h>
+int main(int argc, char **argv) {
+ void *ptr = (void*)(deflate);
+ return ptr == 0;
+}
+'''
+
+nolinkcode='''int nonexisting();
+int main(int argc, char **argv) {
+ return nonexisting();
+}
+'''
+
+assert(cc.links(linkcode, args : '-lz', name : 'Test link against zlib'), 'Linking test failed.')
+assert(not cc.links(nolinkcode, name : 'Failing link'), 'Linking succeeded when it should have failed.')
+
+e = executable('zprog', 'prog.c', dependencies : zlib)
+test('libtest', e)
+
+e2 = executable('zprog_alt', 'prog.c', dependencies : zlib)
+test('libtest_alt', e2)
+
+# Test that ext deps work via an internal dep.
+intdep = declare_dependency(dependencies : zlib)
+exe2 = executable('zprog2', 'prog.c', dependencies : intdep)
+test('libtest2', exe2)
+
+# Test that deps that use find_library deps work.
+depdep = declare_dependency(dependencies : intdep)
+exe3 = executable('zprog3', 'prog.c', dependencies : depdep)
+test('libtest3', exe3)
--- /dev/null
+#include<zlib.h>
+
+int main(int argc, char **argv) {
+ void * something = deflate;
+ if(something != 0)
+ return 0;
+ return 1;
+}
--- /dev/null
+#include"bob.h"
+
+int hiddenFunction() {
+ return 42;
+}
+
+int bobMcBob() {
+ return hiddenFunction();
+}
--- /dev/null
+#ifndef BOB_H_
+#define BOB_H_
+
+int bobMcBob();
+
+#endif
--- /dev/null
+V1_0_0 {
+ global:
+ "bobMcBob";
+ local:
+ *;
+};
--- /dev/null
+V1_0_0 {
+ global:
+ "@in@";
+ local:
+ *;
+};
--- /dev/null
+import shutil
+import sys
+
+if __name__ == '__main__':
+ shutil.copy(sys.argv[1], sys.argv[2])
--- /dev/null
+project('linker script', 'c')
+
+# Static map file
+mapfile = 'bob.map'
+vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile)
+
+l = shared_library('bob', 'bob.c', link_args : vflag, link_depends : mapfile)
+e = executable('prog', 'prog.c', link_with : l)
+test('core', e)
+
+# configure_file
+conf = configuration_data()
+conf.set('in', 'bobMcBob')
+m = configure_file(
+ input : 'bob.map.in',
+ output : 'bob-conf.map',
+ configuration : conf,
+)
+vflag = '-Wl,--version-script,@0@'.format(m)
+
+l = shared_library('bob-conf', 'bob.c', link_args : vflag, link_depends : m)
+e = executable('prog-conf', 'prog.c', link_with : l)
+test('core', e)
+
+# custom_target
+python = find_program('python3')
+m = custom_target(
+ 'bob-ct.map',
+ command : [python, '@INPUT0@', '@INPUT1@', 'bob-ct.map'],
+ input : ['copy.py', 'bob.map'],
+ output : 'bob-ct.map',
+ depend_files : 'bob.map',
+)
+vflag = '-Wl,--version-script,@0@'.format(m.full_path())
+
+l = shared_library('bob-ct', ['bob.c', m], link_args : vflag, link_depends : m)
+e = executable('prog-ct', 'prog.c', link_with : l)
+test('core', e)
+
+# File
+mapfile = files('bob.map')
+vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile[0])
+
+l = shared_library('bob-files', 'bob.c', link_args : vflag, link_depends : mapfile)
+e = executable('prog-files', 'prog.c', link_with : l)
+test('core', e)
+
+subdir('sub')
+
+# With map file in subdir
+mapfile = 'sub/foo.map'
+vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile)
+
+l = shared_library('bar', 'bob.c', link_args : vflag, link_depends : mapfile)
+e = executable('prog-bar', 'prog.c', link_with : l)
+test('core', e)
--- /dev/null
+#include"bob.h"
+
+int main(int argc, char **argv) {
+ return bobMcBob() != 42;
+}
--- /dev/null
+V1_0_0 {
+ global:
+ "bobMcBob";
+ local:
+ *;
+};
--- /dev/null
+mapfile = 'foo.map'
+vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile)
+
+l = shared_library('foo', '../bob.c', link_args : vflag, link_depends : mapfile)
+e = executable('prog-foo', '../prog.c', link_with : l)
+test('core', e)
--- /dev/null
+#include<zlib.h>
+
+int statlibfunc() {
+ void * something = deflate;
+ if(something != 0)
+ return 0;
+ return 1;
+}
--- /dev/null
+project('external dependency with static', 'c')
+
+# Zlib is probably on all dev machines.
+
+dep = dependency('zlib')
+statlib = static_library('statlib', 'lib.c', dependencies : dep)
+exe = executable('prog', 'prog.c', link_with : statlib)
+
+
+test('zlibtest', exe)
--- /dev/null
+int statlibfunc();
+
+int main(int argc, char **argv) {
+ return statlibfunc();
+}
--- /dev/null
+project('dep versions', 'c', 'cpp')
+
+# Find external dependency without version
+zlib = dependency('zlib')
+# Find external dependency with version
+zlibver = dependency('zlib', version : '>1.0')
+assert(zlib.version() == zlibver.version(), 'zlib versions did not match!')
+# Find external dependency with conflicting version
+assert(zlib.type_name() == 'pkgconfig', 'zlib should be of type "pkgconfig" not ' + zlib.type_name())
+zlibver = dependency('zlib', version : '<1.0', required : false)
+assert(zlibver.found() == false, 'zlib <1.0 should not be found!')
+
+# Find external dependencies with various version restrictions
+dependency('zlib', version : '>=1.0')
+dependency('zlib', version : '<=9999')
+dependency('zlib', version : '=' + zlib.version())
+
+# Find external dependencies with multiple version restrictions
+dependency('zlib', version : ['>=1.0', '<=9999'])
+if dependency('zlib', version : ['<=1.0', '>=9999', '=' + zlib.version()], required : false).found()
+ error('zlib <=1.0 >=9999 should not have been found')
+endif
+
+# Test that a versionless zlib is found after not finding an optional zlib dep with version reqs
+zlibopt = dependency('zlib', required : false)
+assert(zlibopt.found() == true, 'zlib not found')
+
+# Test https://github.com/mesonbuild/meson/pull/610
+dependency('somebrokenlib', version : '>=2.0', required : false)
+dependency('somebrokenlib', version : '>=1.0', required : false)
+
+# Search for an external dependency that won't be found, but must later be
+# found via fallbacks
+somelibnotfound = dependency('somelib', required : false)
+assert(somelibnotfound.found() == false, 'somelibnotfound was found?')
+# Find internal dependency without version
+somelibver = dependency('somelib',
+ fallback : ['somelibnover', 'some_dep'])
+assert(somelibver.type_name() == 'internal', 'somelibver should be of type "internal", not ' + somelibver.type_name())
+# Find an internal dependency again with the same name and a specific version
+somelib = dependency('somelib',
+ version : '== 0.1',
+ fallback : ['somelib', 'some_dep'])
+# Find an internal dependency again with the same name and incompatible version
+somelibver = dependency('somelib',
+ version : '>= 0.3',
+ fallback : ['somelibver', 'some_dep'])
+# Find somelib again, but with a fallback that will fail because subproject does not exist
+somelibfail = dependency('somelib',
+ version : '>= 0.2',
+ required : false,
+ fallback : ['somelibfail', 'some_dep'])
+assert(somelibfail.found() == false, 'somelibfail found via wrong fallback')
+# Find somelib again, but with a fallback that will fail because dependency does not exist
+somefail_dep = dependency('somelib',
+ version : '>= 0.2',
+ required : false,
+ fallback : ['somelib', 'somefail_dep'])
+assert(somefail_dep.found() == false, 'somefail_dep found via wrong fallback')
+
+# Fallback should only be used if the primary was not found
+fallbackzlib_dep = dependency('zlib',
+ fallback : ['somelib', 'fakezlib_dep'])
+assert(fallbackzlib_dep.type_name() == 'pkgconfig', 'fallbackzlib_dep should be of type "pkgconfig", not ' + fallbackzlib_dep.type_name())
+# Check that the above dependency was pkgconfig because the fallback wasn't
+# checked, not because the fallback didn't work
+fakezlib_dep = dependency('fakezlib',
+ fallback : ['somelib', 'fakezlib_dep'])
+assert(fakezlib_dep.type_name() == 'internal', 'fakezlib_dep should be of type "internal", not ' + fakezlib_dep.type_name())
+
+# Check that you can find a dependency by not specifying a version after not
+# finding it by specifying a version. We add `static: true` here so that the
+# previously cached zlib dependencies don't get checked.
+dependency('zlib', static : true, version : '>=8000', required : false)
+dependency('zlib', static : true)
+
+# Check that you can find a dependency by specifying a correct version after
+# not finding it by specifying a wrong one. We add `method: pkg-config` here so that
+# the previously cached zlib dependencies don't get checked.
+bzip2 = dependency('zlib', method : 'pkg-config', version : '>=9000', required : false)
+bzip2 = dependency('zlib', method : 'pkg-config', version : '>=1.0')
+
+if meson.is_cross_build()
+ # Test caching of native and cross dependencies
+ # https://github.com/mesonbuild/meson/issues/1736
+ cross_prefix = dependency('zlib').get_pkgconfig_variable('prefix')
+ native_prefix = dependency('zlib', native : true).get_pkgconfig_variable('prefix')
+ assert(cross_prefix != '', 'cross zlib prefix is not defined')
+ assert(native_prefix != '', 'native zlib prefix is not defined')
+ assert(native_prefix != cross_prefix, 'native prefix == cross_prefix == ' + native_prefix)
+endif
+
+objc_found = add_languages('objc', required : false)
+
+foreach d : ['sdl2', 'gnustep', 'wxwidgets', 'gl', 'python3', 'boost', 'gtest', 'gmock', 'valgrind']
+ if d == 'gnustep' and not objc_found
+ message('Skipping gnustep because no ObjC compiler found')
+ else
+ dep = dependency(d, required : false)
+ if dep.found()
+ dep.version()
+ endif
+ endif
+endforeach
--- /dev/null
+# Define version only in project, should get inherited by declare_dependency
+project('some', 'c', version : '0.1')
+
+somelib = shared_library('some', 'lib.c')
+someinc = include_directories('.')
+
+some_dep = declare_dependency(link_with : somelib,
+ include_directories : someinc)
+
+fakezlib_dep = declare_dependency(link_with : somelib,
+ include_directories : someinc)
--- /dev/null
+project('some', 'c')
+
+somelib = shared_library('some', 'lib.c')
+someinc = include_directories('.')
+
+# Define version only in declare_dependency
+some_dep = declare_dependency(link_with : somelib,
+ include_directories : someinc)
--- /dev/null
+project('some', 'c')
+
+somelib = shared_library('some', 'lib.c')
+someinc = include_directories('.')
+
+# Define version only in declare_dependency
+some_dep = declare_dependency(link_with : somelib,
+ include_directories : someinc,
+ version : '0.3')
--- /dev/null
+project('subdir include order', 'c')
+
+# Ensure that headers in subdirs override external dependencies
+cc = meson.get_compiler('c')
+
+glib = dependency('glib-2.0')
+
+# This will fail to compile if it picks up the system-installed glib instead of
+# the glib in the subdir
+executable('prog', 'prog.c',
+ include_directories : include_directories('subdir'),
+ dependencies : glib)
--- /dev/null
+#include <glib.h>
+
+#ifndef MESON_OUR_GLIB
+#error "Failed"
+#endif
+
+int main() { return 0; }
--- /dev/null
+#define MESON_OUR_GLIB 1
--- /dev/null
+int myFunc (void);
+
+int
+main (int argc, char *argv[])
+{
+ if (myFunc() == 55)
+ return 0;
+ return 1;
+}
--- /dev/null
+usr/lib/libsome.so
+usr/lib/libsome.so.0
+usr/lib/libsome.so.1.2.3
+usr/lib/libnoversion.so
+usr/lib/libonlyversion.so
+usr/lib/libonlyversion.so.1
+usr/lib/libonlyversion.so.1.4.5
+usr/lib/libonlysoversion.so
+usr/lib/libonlysoversion.so.5
+usr/lib/libmodule.so
--- /dev/null
+int myFunc() {
+ return 55;
+}
--- /dev/null
+project('library versions', 'c')
+
+if host_machine.system() == 'cygwin'
+ error('MESON_SKIP_TEST linuxlike soversions not supported on Cygwin.')
+endif
+
+some = shared_library('some', 'lib.c',
+ version : '1.2.3',
+ soversion : '0',
+ install : true)
+
+noversion = shared_library('noversion', 'lib.c',
+ install : true)
+
+onlyversion = shared_library('onlyversion', 'lib.c',
+ version : '1.4.5',
+ install : true)
+
+onlysoversion = shared_library('onlysoversion', 'lib.c',
+ # Also test that int soversion is acceptable
+ soversion : 5,
+ install : true)
+
+# Hack to make the executables below depend on the shared libraries above
+# without actually adding them as `link_with` dependencies since we want to try
+# linking to them with -lfoo linker arguments.
+out = custom_target('library-dependency-hack',
+ input : 'exe.orig.c',
+ output : 'exe.c',
+ depends : [some, noversion, onlyversion, onlysoversion],
+ command : ['cp', '@INPUT@', '@OUTPUT@'])
+
+# Need to add this manually because Meson can't add it automatically because
+# it doesn't know that we are linking to libraries in the build directory.
+rpath_dir = meson.current_build_dir()
+
+# Manually test if the linker can find the above libraries
+# i.e., whether they were generated with the right naming scheme
+test('manually linked 1', executable('manuallink1', out,
+ link_args : ['-L.', '-lsome'],
+ build_rpath : rpath_dir))
+
+test('manually linked 2', executable('manuallink2', out,
+ link_args : ['-L.', '-lnoversion'],
+ build_rpath : rpath_dir))
+
+test('manually linked 3', executable('manuallink3', out,
+ link_args : ['-L.', '-lonlyversion'],
+ build_rpath : rpath_dir))
+
+test('manually linked 4', executable('manuallink4', out,
+ link_args : ['-L.', '-lonlysoversion'],
+ build_rpath : rpath_dir))
+
+shared_module('module', 'lib.c', install : true)
--- /dev/null
+usr/lib/libsublib.so
+usr/lib/libsublib.so.5
+usr/lib/libsublib.so.2.1.0
--- /dev/null
+project('subproj lib install', 'c',
+ version : '2.3.4',
+ license : 'mylicense')
+
+if host_machine.system() == 'cygwin'
+ error('MESON_SKIP_TEST linuxlike soversions not supported on Cygwin.')
+endif
+
+# Test that the subproject library gets installed
+subproject('sublib', version : '1.0.0')
--- /dev/null
+#ifndef SUBDEFS_H_
+#define SUBDEFS_H_
+
+#if defined _WIN32 || defined __CYGWIN__
+#if defined BUILDING_SUB
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #define DLL_PUBLIC __declspec(dllimport)
+#endif
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+int DLL_PUBLIC subfunc();
+
+#endif
--- /dev/null
+project('subproject', 'c',
+ version : '1.0.0',
+ license : ['sublicense1', 'sublicense2'])
+
+i = include_directories('include')
+shared_library('sublib', 'sublib.c',
+ version : '2.1.0',
+ soversion : 5,
+ include_directories : i, install : true,
+ c_args : '-DBUILDING_SUB=2')
--- /dev/null
+#include<subdefs.h>
+
+int DLL_PUBLIC subfunc() {
+ return 42;
+}
--- /dev/null
+project('compiler checks with dependencies', 'c')
+
+cc = meson.get_compiler('c')
+
+glib = dependency ('glib-2.0')
+if glib.found()
+ assert (cc.has_header('glib.h', dependencies : glib), 'glib.h not found')
+ assert (cc.has_type('gint32', prefix : '#include <glib.h>', dependencies : glib), 'gint32 not found')
+ assert (cc.has_function('g_print', dependencies : glib), 'g_print not found')
+ assert (cc.has_member('GError', 'message', prefix : '#include <glib.h>', dependencies : glib), 'GError::message not found')
+ assert (cc.has_header_symbol('glib.h', 'gint32', dependencies : glib), 'gint32 symbol not found')
+ linkcode = '''#include <glib.h>
+int main (int argc, char *argv[]) {
+ GError *error = g_error_new_literal (0, 0, NULL);
+ return error == NULL;
+}
+ '''
+ assert (cc.links(linkcode, dependencies : glib, name : 'Test link against glib'), 'Linking test against glib failed')
+endif
+
+zlib = cc.find_library ('z')
+if zlib.found()
+ linkcode = '''#include<zlib.h>
+int main(int argc, char *argv[]) {
+ void *ptr = (void*)(deflate);
+ return ptr == 0;
+}
+'''
+ assert (cc.has_function('deflate', prefix : '#include<zlib.h>', dependencies : zlib, name : 'Test for function in zlib'), 'has_function test failed.')
+ assert (cc.links(linkcode, dependencies : zlib, name : 'Test link against zlib'), 'Linking test failed against zlib.')
+endif
+
+assert(cc.has_function('pthread_create',
+ dependencies : dependency('threads'),
+ prefix : '#include <pthread.h>'),
+ 'Could not detect pthread_create with a thread dependency.')
+
+
--- /dev/null
+project('objective c', 'objc')
+
+exe = executable('prog', 'prog.m')
+test('objctest', exe)
--- /dev/null
+#import<stdio.h>
+
+int main(int argc, char **argv) {
+ return 0;
+}
\ No newline at end of file
--- /dev/null
+project('nsstring', 'objc')
+
+if host_machine.system() == 'darwin'
+ dep = dependency('appleframeworks', modules : 'foundation')
+elif host_machine.system() == 'cygwin'
+ error('MESON_SKIP_TEST GNUstep is not packaged for Cygwin.')
+else
+ dep = dependency('gnustep')
+ if host_machine.system() == 'linux' and meson.get_compiler('objc').get_id() == 'clang'
+ error('MESON_SKIP_TEST: GNUstep is broken on Linux with Clang')
+ endif
+endif
+exe = executable('stringprog', 'stringprog.m', dependencies : dep)
+test('stringtest', exe)
--- /dev/null
+#import<Foundation/NSString.h>
+
+int main(int argc, char **argv) {
+ int result;
+ NSString *str = [NSString new];
+ result = [str length];
+ [str release];
+ return result;
+}
+
--- /dev/null
+project('Objective C++', 'objcpp')
+
+exe = executable('objcppprog', 'prog.mm')
+test('objcpp', exe)
--- /dev/null
+#import<stdio.h>
+
+class MyClass {
+};
+
+int main(int argc, char **argv) {
+ return 0;
+}
+
--- /dev/null
+project('objective c args', 'objc')
+
+exe = executable('prog', 'prog.m', objc_args : ['-DMESON_TEST'])
+test('objective c args', exe)
--- /dev/null
+#import<stdio.h>
+
+int main(int argc, char **argv)
+{
+#ifdef MESON_TEST
+ int x = 3;
+#endif
+
+ printf("x = %d\n", x);
+ return 0;
+}
--- /dev/null
+project('objective c++ args', 'objcpp')
+
+exe = executable('prog', 'prog.mm', objcpp_args : ['-DMESON_OBJCPP_TEST'])
+test('objective c++ args', exe)
--- /dev/null
+#import<stdio.h>
+
+class TestClass
+{
+};
+
+int main(int argc, char **argv)
+{
+#ifdef MESON_OBJCPP_TEST
+int x = 1;
+#endif
+
+ printf("x = %x\n", x);
+
+ return 0;
+}
--- /dev/null
+#include <CoreFoundation/CoreFoundation.h>
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('osx fundamentals', 'c')
+e = executable('prog', 'main.c')
+test('basic', e)
--- /dev/null
+cmake_minimum_required(VERSION 3.6.0)
+project(dylibversion C)
+
+# This file is here for debugging purposes to easily compare how
+# CMake does it.
+
+# libnoversion.dylib
+add_library(noversion SHARED lib.c)
+
+# libonlysoversion.dylib -> libonlysoversion.5.dylib
+# libonlyversion.1.4.5.dylib
+# -current_version 1.4.5
+
+add_library(onlyversion SHARED lib.c)
+set_target_properties(onlyversion PROPERTIES VERSION 1.4.5)
+
+# libonlysoversion.6.dylib
+# -compatibility_version 6.0.0
+
+add_library(onlysoversion SHARED lib.c)
+set_target_properties(onlysoversion PROPERTIES SOVERSION 6)
+
+# libsome.1.4.5.dylib
+# libsome.6.dylib -> libsome.1.4.5.dylib
+# libsome.dylib -> libsome.6.dylib
+# -current_version 1.4.5 -compatibility_version 5.0.0
+
+add_library(some SHARED lib.c)
+set_target_properties(some PROPERTIES VERSION 1.4.5 SOVERSION 6)
--- /dev/null
+int myFunc (void);
+
+int
+main (int argc, char *argv[])
+{
+ if (myFunc() == 55)
+ return 0;
+ return 1;
+}
--- /dev/null
+usr/lib/libsome.dylib
+usr/lib/libsome.0.dylib
+usr/lib/libnoversion.dylib
+usr/lib/libonlyversion.dylib
+usr/lib/libonlyversion.1.dylib
+usr/lib/libonlysoversion.dylib
+usr/lib/libonlysoversion.5.dylib
+usr/lib/libmodule.dylib
--- /dev/null
+int myFunc() {
+ return 55;
+}
--- /dev/null
+project('library versions', 'c')
+
+some = shared_library('some', 'lib.c',
+ version : '1.2.3',
+ soversion : '0',
+ install : true)
+
+noversion = shared_library('noversion', 'lib.c',
+ install : true)
+
+onlyversion = shared_library('onlyversion', 'lib.c',
+ version : '1.4.5',
+ install : true)
+
+onlysoversion = shared_library('onlysoversion', 'lib.c',
+ # Also test that int soversion is acceptable
+ soversion : 5,
+ install : true)
+
+# Hack to make the executables below depend on the shared libraries above
+# without actually adding them as `link_with` dependencies since we want to try
+# linking to them with -lfoo linker arguments.
+out = custom_target('library-dependency-hack',
+ input : 'exe.orig.c',
+ output : 'exe.c',
+ depends : [some, noversion, onlyversion, onlysoversion],
+ command : ['cp', '@INPUT@', '@OUTPUT@'])
+
+# Manually test if the linker can find the above libraries
+# i.e., whether they were generated with the right naming scheme
+test('manually linked 1', executable('manuallink1', out,
+ link_args : ['-L.', '-lsome'],
+ build_rpath : meson.current_build_dir()))
+
+test('manually linked 2', executable('manuallink2', out,
+ link_args : ['-L.', '-lnoversion'],
+ build_rpath : meson.current_build_dir()))
+
+test('manually linked 3', executable('manuallink3', out,
+ link_args : ['-L.', '-lonlyversion'],
+ build_rpath : meson.current_build_dir()))
+
+test('manually linked 4', executable('manuallink4', out,
+ link_args : ['-L.', '-lonlysoversion'],
+ build_rpath : meson.current_build_dir()))
+
+shared_module('module', 'lib.c', install : true)
--- /dev/null
+project('has function xcode8', 'c')
+
+cc = meson.get_compiler('c')
+
+# XCode 8 location for the macOS 10.12 SDK
+sdk_args = ['-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk']
+args_10_11 = ['-mmacosx-version-min=10.11'] + sdk_args
+args_10_12 = ['-mmacosx-version-min=10.12'] + sdk_args
+
+# XCode 9 location for the macOS 10.13 SDK
+sdk_args = ['-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk']
+args_10_12 = ['-mmacosx-version-min=10.13'] + sdk_args
+
+# Test requires XCode 8 which has the MacOSX 10.12 SDK
+if cc.version().version_compare('>=8.0') and cc.version().version_compare('<8.1')
+ if cc.has_function('clock_gettime', args : args_10_11, prefix : '#include <time.h>')
+ error('Should not have found clock_gettime via <time.h> when targeting Mac OS X 10.11')
+ endif
+ if not cc.has_function('clock_gettime', args : args_10_12, prefix : '#include <time.h>')
+ error('Did NOT find clock_gettime via <time.h> when targeting Mac OS X 10.12')
+ endif
+ if not cc.has_function('clock_gettime', args : args_10_11)
+ error('Did NOT find clock_gettime w/o a prototype when targeting Mac OS X 10.11')
+ endif
+ if not cc.has_function('clock_gettime', args : args_10_12)
+ error('Did NOT find clock_gettime w/o a prototype when targeting Mac OS X 10.12')
+ endif
+else
+ error('MESON_SKIP_TEST Test needs XCode 8.')
+endif
--- /dev/null
+# Primitive test for adding frameworks in XCode
+# When opening the xcodeproj, the Folder "Frameworks" should contain two frameworks (OpenGL.framework and Foundation.framework)
+# "Target Membership" of ...
+# - OpenGL.framework should be only to prog@exe
+# - Foundation.framework should be only to stat@sta
+# "Build Phase" / "Link Binary with Libraries" for the target
+# - "prog@exe" should be only "Foundation.framework"
+# - "stat@sta" should be only "OpenGL.framework"
+# see "xcode-frameworks.png" for an example
+
+project('xcode framework test', 'c', default_options : ['libdir=libtest'])
+
+dep_libs = dependency('appleframeworks', modules : ['OpenGL'], required : false)
+if not dep_libs.found()
+ error('OpenGL framework not found')
+endif
+assert(dep_libs.type_name() == 'appleframeworks', 'type_name is wrong')
+
+dep_main = dependency('appleframeworks', modules : ['Foundation'])
+
+stlib = static_library('stat', 'stat.c', install : true, dependencies: dep_libs)
+exe = executable('prog', 'prog.c', install : true, dependencies: dep_main)
+
--- /dev/null
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+int func() { return 933; }
--- /dev/null
+def gluoninate():
+ return 42
--- /dev/null
+project('python sample', 'c')
+
+py3_mod = import('python3')
+py3 = py3_mod.find_python()
+
+py3_version = py3_mod.language_version()
+if py3_version.version_compare('< 3.2')
+ error('Invalid python version!?')
+endif
+
+py3_purelib = py3_mod.sysconfig_path('purelib')
+if not py3_purelib.to_lower().startswith('lib') or not py3_purelib.endswith('site-packages')
+ error('Python3 purelib path seems invalid?')
+endif
+
+# could be 'lib64' or 'Lib' on some systems
+py3_platlib = py3_mod.sysconfig_path('platlib')
+if not py3_platlib.to_lower().startswith('lib') or not py3_platlib.endswith('site-packages')
+ error('Python3 platlib path seems invalid?')
+endif
+
+# could be 'Include' on Windows
+py3_include = py3_mod.sysconfig_path('include')
+if not py3_include.to_lower().startswith('include')
+ error('Python3 include path seems invalid?')
+endif
+
+main = files('prog.py')
+
+test('toplevel', py3, args : main)
+
+subdir('subdir')
--- /dev/null
+#!/usr/bin/env python3
+
+from gluon import gluonator
+import sys
+
+print('Running mainprog from root dir.')
+
+if gluonator.gluoninate() != 42:
+ sys.exit(1)
--- /dev/null
+test('subdir',
+ py3,
+ args : files('subprog.py'),
+ env : 'PYTHONPATH=' + meson.source_root())
--- /dev/null
+#!/usr/bin/env python3
+
+# In order to run this program, PYTHONPATH must be set to
+# point to source root.
+
+from gluon import gluonator
+import sys
+
+print('Running mainprog from subdir.')
+
+if gluonator.gluoninate() != 42:
+ sys.exit(1)
--- /dev/null
+#!/usr/bin/env python3
+
+import tachyon
+import sys
+
+result = tachyon.phaserize('shoot')
+
+if not isinstance(result, int):
+ print('Returned result not an integer.')
+ sys.exit(1)
+
+if result != 1:
+ print('Returned result {} is not 1.'.format(result))
+ sys.exit(1)
--- /dev/null
+pylib = py3_mod.extension_module('tachyon',
+ 'tachyon_module.c',
+ dependencies : py3_dep,
+)
+
+pypathdir = meson.current_build_dir()
--- /dev/null
+/*
+ Copyright 2016 The Meson development team
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+*/
+
+/* A very simple Python extension module. */
+
+#include <Python.h>
+#include <string.h>
+
+static PyObject* phaserize(PyObject *self, PyObject *args) {
+ const char *message;
+ int result;
+
+ if(!PyArg_ParseTuple(args, "s", &message))
+ return NULL;
+
+ result = strcmp(message, "shoot") ? 0 : 1;
+ return PyLong_FromLong(result);
+}
+
+static PyMethodDef TachyonMethods[] = {
+ {"phaserize", phaserize, METH_VARARGS,
+ "Shoot tachyon cannons."},
+ {NULL, NULL, 0, NULL}
+};
+
+static struct PyModuleDef tachyonmodule = {
+ PyModuleDef_HEAD_INIT,
+ "tachyon",
+ NULL,
+ -1,
+ TachyonMethods
+};
+
+PyMODINIT_FUNC PyInit_tachyon(void) {
+ return PyModule_Create(&tachyonmodule);
+}
--- /dev/null
+project('Python extension module', 'c',
+ default_options : ['buildtype=release'])
+# Because Windows Python ships only with optimized libs,
+# we must build this project the same way.
+
+py3_mod = import('python3')
+py3 = py3_mod.find_python()
+py3_dep = dependency('python3', required : false)
+
+if py3_dep.found()
+ subdir('ext')
+
+ test('extmod',
+ py3,
+ args : files('blaster.py'),
+ env : ['PYTHONPATH=' + pypathdir])
+else
+ error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.')
+endif
+
+py3_pkg_dep = dependency('python3', method: 'pkg-config', required : false)
+if py3_pkg_dep.found()
+ python_lib_dir = py3_pkg_dep.get_pkgconfig_variable('libdir')
+endif
--- /dev/null
+#!/usr/bin/env python3
+
+from storer import Storer
+import sys
+
+s = Storer()
+
+if s.get_value() != 0:
+ print('Initial value incorrect.')
+ sys.exit(1)
+
+s.set_value(42)
+
+if s.get_value() != 42:
+ print('Setting value failed.')
+ sys.exit(1)
+
+try:
+ s.set_value('not a number')
+ print('Using wrong argument type did not fail.')
+ sys.exit(1)
+except TypeError:
+ pass
--- /dev/null
+
+cdef extern from "storer.h":
+ ctypedef struct Storer:
+ pass
+
+ Storer* storer_new();
+ void storer_destroy(Storer *s);
+ int storer_get_value(Storer *s);
+ void storer_set_value(Storer *s, int v);
--- /dev/null
+pyx_c = custom_target('storer_pyx',
+ output : 'storer_pyx.c',
+ input : 'storer.pyx',
+ command : [cython, '@INPUT@', '-o', '@OUTPUT@'],
+)
+
+slib = py3_mod.extension_module('storer',
+ 'storer.c', pyx_c,
+ dependencies : py3_dep)
+
+pydir = meson.current_build_dir()
--- /dev/null
+#include"storer.h"
+#include<stdlib.h>
+
+struct _Storer {
+ int value;
+};
+
+Storer* storer_new() {
+ Storer *s = malloc(sizeof(struct _Storer));
+ s->value = 0;
+ return s;
+}
+
+void storer_destroy(Storer *s) {
+ free(s);
+}
+
+int storer_get_value(Storer *s) {
+ return s->value;
+}
+
+void storer_set_value(Storer *s, int v) {
+ s->value = v;
+}
--- /dev/null
+#pragma once
+
+typedef struct _Storer Storer;
+
+Storer* storer_new();
+void storer_destroy(Storer *s);
+int storer_get_value(Storer *s);
+void storer_set_value(Storer *s, int v);
--- /dev/null
+cimport cstorer
+
+cdef class Storer:
+ cdef cstorer.Storer* _c_storer
+
+ def __cinit__(self):
+ self._c_storer = cstorer.storer_new()
+
+ def __dealloc__(self):
+ cstorer.storer_destroy(self._c_storer)
+
+ cpdef int get_value(self):
+ return cstorer.storer_get_value(self._c_storer)
+
+ cpdef set_value(self, int value):
+ cstorer.storer_set_value(self._c_storer, value)
--- /dev/null
+project('cython', 'c',
+ default_options : ['warning_level=3'])
+
+cython = find_program('cython3', required : false)
+py3_dep = dependency('python3', required : false)
+
+if cython.found() and py3_dep.found()
+ py3_dep = dependency('python3')
+ py3_mod = import('python3')
+ py3 = py3_mod.find_python()
+ subdir('libdir')
+
+ test('cython tester',
+ py3,
+ args : files('cytest.py'),
+ env : ['PYTHONPATH=' + pydir]
+ )
+else
+ error('MESON_SKIP_TEST: Cython3 or Python3 libraries not found, skipping test.')
+endif
--- /dev/null
+#!/usr/bin/env python3
+
+import os
+import sys
+import argparse
+
+from pathlib import Path
+
+filedir = Path(os.path.dirname(__file__)).resolve()
+if list(filedir.glob('ext/*tachyon.*')):
+ sys.path.insert(0, (filedir / 'ext').as_posix())
+
+import tachyon
+
+parser = argparse.ArgumentParser()
+parser.add_argument('-o', dest='output', default=None)
+
+options = parser.parse_args(sys.argv[1:])
+
+result = tachyon.phaserize('shoot')
+
+if options.output:
+ with open(options.output, 'w') as f:
+ f.write('success')
+
+if not isinstance(result, int):
+ print('Returned result not an integer.')
+ sys.exit(1)
+
+if result != 1:
+ print('Returned result {} is not 1.'.format(result))
+ sys.exit(1)
--- /dev/null
+#ifdef _MSC_VER
+__declspec(dllexport)
+#endif
+const char*
+tachyon_phaser_command (void)
+{
+ return "shoot";
+}
--- /dev/null
+#pragma once
+
+#ifdef _MSC_VER
+__declspec(dllimport)
+#endif
+const char* tachyon_phaser_command (void);
--- /dev/null
+libtachyon = shared_library('tachyonlib', 'meson-tachyonlib.c')
+
+libtachyon_dep = declare_dependency(link_with : libtachyon,
+ include_directories : include_directories('.'))
--- /dev/null
+subdir('lib')
+
+pylib = py3_mod.extension_module('tachyon',
+ 'tachyon_module.c',
+ dependencies : [libtachyon_dep, py3_dep],
+)
--- /dev/null
+/*
+ Copyright 2016 The Meson development team
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+*/
+
+/* A very simple Python extension module. */
+
+#include <Python.h>
+#include <string.h>
+
+#include "meson-tachyonlib.h"
+
+static PyObject* phaserize(PyObject *self, PyObject *args) {
+ const char *message;
+ int result;
+
+ if(!PyArg_ParseTuple(args, "s", &message))
+ return NULL;
+
+ result = strcmp(message, tachyon_phaser_command()) ? 0 : 1;
+ return PyLong_FromLong(result);
+}
+
+static PyMethodDef TachyonMethods[] = {
+ {"phaserize", phaserize, METH_VARARGS,
+ "Shoot tachyon cannons."},
+ {NULL, NULL, 0, NULL}
+};
+
+static struct PyModuleDef tachyonmodule = {
+ PyModuleDef_HEAD_INIT,
+ "tachyon",
+ NULL,
+ -1,
+ TachyonMethods
+};
+
+PyMODINIT_FUNC PyInit_tachyon(void) {
+ return PyModule_Create(&tachyonmodule);
+}
--- /dev/null
+project('Python extension module', 'c',
+ default_options : ['buildtype=release'])
+# Because Windows Python ships only with optimized libs,
+# we must build this project the same way.
+
+py3_mod = import('python3')
+py3 = py3_mod.find_python()
+py3_dep = dependency('python3', required : false)
+
+# Copy to the builddir so that blaster.py can find the built tachyon module
+# FIXME: We should automatically detect this case and append the correct paths
+# to PYTHONLIBDIR
+blaster_py = configure_file(input : 'blaster.py',
+ output : 'blaster.py',
+ configuration : configuration_data())
+
+check_exists = '''
+import os, sys
+with open(sys.argv[1], 'rb') as f:
+ assert(f.read() == b'success')
+'''
+if py3_dep.found()
+ subdir('ext')
+
+ out_txt = custom_target('tachyon flux',
+ input : blaster_py,
+ output : 'out.txt',
+ command : [py3, '@INPUT@', '-o', '@OUTPUT@'],
+ depends : pylib,
+ build_by_default: true)
+
+ test('flux', py3, args : ['-c', check_exists, out_txt])
+else
+ error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.')
+endif
--- /dev/null
+project('rewritetest', 'c')
+
+sources = ['trivial.c']
+
+exe = executable('trivialprog', 'notthere.c', sources)
--- /dev/null
+project('rewritetest', 'c')
+
+sources = ['trivial.c', 'notthere.c']
+
+exe = executable('trivialprog', sources)
--- /dev/null
+project('rewritetest', 'c')
+
+sources = ['trivial.c']
+
+exe = executable('trivialprog', sources)
--- /dev/null
+project('subdir rewrite', 'c')
+
+subdir('sub1')
+subdir('sub2')
+
--- /dev/null
+srcs = ['first.c']
--- /dev/null
+srcs = ['first.c', 'second.c']
--- /dev/null
+executable('something', srcs)
+
--- /dev/null
+usr/bin/program?exe
+usr/bin/program2?exe
--- /dev/null
+project('rustprog', 'rust')
+
+e = executable('program', 'prog.rs',
+ rust_args : ['-C', 'lto'], # Just a test
+ install : true
+)
+test('rusttest', e)
+
+subdir('subdir')
--- /dev/null
+fn main() {
+ println!("rust compiler is working");
+}
--- /dev/null
+e = executable('program2', 'prog.rs', install : true)
+test('rusttest2', e)
--- /dev/null
+fn main() {
+ println!("rust compiler is working");
+}
--- /dev/null
+usr/bin/prog?exe
+usr/lib/libstuff.so
--- /dev/null
+project('rust shared library', 'rust')
+
+l = shared_library('stuff', 'stuff.rs', install : true)
+e = executable('prog', 'prog.rs', link_with : l, install : true)
+test('linktest', e)
--- /dev/null
+extern crate stuff;
+
+fn main() { println!("printing: {}", stuff::explore()); }
--- /dev/null
+#![crate_name = "stuff"]
+
+pub fn explore() -> &'static str { "librarystring" }
--- /dev/null
+usr/bin/prog?exe
+usr/lib/libstuff.rlib
--- /dev/null
+project('rust static library', 'rust')
+
+l = static_library('stuff', 'stuff.rs', install : true)
+e = executable('prog', 'prog.rs', link_with : l, install : true)
+test('linktest', e)
--- /dev/null
+extern crate stuff;
+
+fn main() { println!("printing: {}", stuff::explore()); }
--- /dev/null
+#![crate_name = "stuff"]
+
+pub fn explore() -> &'static str { "librarystring" }
--- /dev/null
+usr/bin/prog?exe
+usr/lib/libstuff.so
--- /dev/null
+project('rust and c polyglot executable', 'c', 'rust')
+
+l = library('stuff', 'stuff.rs', install : true)
+e = executable('prog', 'prog.c', link_with : l, install : true)
+test('polyglottest', e)
--- /dev/null
+#include <stdio.h>
+
+void f();
+
+int main() {
+ printf("Hello from C!\n");
+ f();
+}
--- /dev/null
+#![crate_name = "stuff"]
+
+#[no_mangle]
+pub extern fn f() {
+ println!("Hello from Rust!");
+}
--- /dev/null
+usr/bin/prog?exe
+usr/lib/libstuff.a
--- /dev/null
+project('static rust and c polyglot executable', 'c', 'rust')
+
+deps = [
+ meson.get_compiler('c').find_library('dl'),
+ dependency('threads'),
+]
+
+l = static_library('stuff', 'stuff.rs', rust_crate_type : 'staticlib', install : true)
+e = executable('prog', 'prog.c', dependencies: deps, link_with : l, install : true)
+test('polyglottest', e)
--- /dev/null
+#include <stdio.h>
+
+void f();
+
+int main() {
+ printf("Hello from C!\n");
+ f();
+}
--- /dev/null
+#![crate_name = "stuff"]
+
+#[no_mangle]
+pub extern fn f() {
+ println!("Hello from Rust!");
+}
--- /dev/null
+usr/bin/prog?exe
+usr/lib/libnamed_stuff.rlib
--- /dev/null
+project('rust static library', 'rust')
+
+l = static_library('named_stuff', 'stuff.rs', install : true)
+e = executable('prog', 'prog.rs', link_with : l, install : true)
+test('linktest', e)
--- /dev/null
+extern crate named_stuff;
+
+fn main() { println!("printing: {}", named_stuff::explore()); }
--- /dev/null
+pub fn explore() -> &'static str { "librarystring" }
--- /dev/null
+project('swift exe', 'swift')
+
+test('swifttest', executable('swifttest', 'prog.swift'))
--- /dev/null
+print("Swift executable is working.")
--- /dev/null
+func printSomething(text: String) {
+ print("Got this: \(text)")
+}
--- /dev/null
+#if swift(>=3.0)
+printSomething(text:"String from main")
+#else
+printSomething("String from main")
+#endif
--- /dev/null
+project('2 files', 'swift')
+
+test('2files', executable('twofiles', 'main.swift', 'libfile.swift'))
--- /dev/null
+import DataSource
+
+let data = getData()
+let data2 = getOther()
+
+print("String from module: \(data)")
+print("Other string: \(data2)")
--- /dev/null
+exe = executable('dataprog', 'main.swift', link_with : datasource)
+test('dataprog', exe)
--- /dev/null
+public func getData() -> String {
+ return "String from module."
+}
--- /dev/null
+datasource = static_library('DataSource', 'datasource.swift', 'othersource.swift')
--- /dev/null
+public func getOther() -> String {
+ return "String from other source."
+}
--- /dev/null
+project('linking', 'swift')
+
+subdir('lib')
+subdir('exe')
--- /dev/null
+#if os(OSX)
+ import Darwin
+#else
+ import Glibc
+#endif
+
+#if swift(>=3.0)
+let fname = CommandLine.arguments[1]
+#else
+let fname = Process.arguments[1]
+#endif
+let code = "public func getGenerated() -> Int {\n return 42\n}\n"
+
+let f = fopen(fname, "w")
+
+fwrite(code, 1, Int(strlen(code)), f)
+print("Name: \(fname)")
+fclose(f)
--- /dev/null
+gen = executable('gen', 'main.swift')
+
+srcs = custom_target('gensrc',
+ output : 'gen.swift',
+ command : [gen, '@OUTPUT@']
+)
--- /dev/null
+project('swift generator', 'swift')
+
+subdir('gen')
+subdir('user')
--- /dev/null
+let generated = getGenerated()
+
+print("Generated number is: \(generated).")
--- /dev/null
+user = executable('user', 'main.swift', srcs)
+test('User test', user)
--- /dev/null
+let num = getNumber()
+
+print("The number returned from C code is: \(num).")
--- /dev/null
+project('mixed', 'c', 'swift')
+
+lib = static_library('mylib', 'mylib.c')
+exe = executable('prog', 'main.swift', 'mylib.h',
+ link_with : lib)
+test('c interface', exe)
--- /dev/null
+#include"mylib.h"
+
+int getNumber() {
+ return 42;
+}
--- /dev/null
+#pragma once
+
+int getNumber();
--- /dev/null
+import mylib
+
+let num = getNumber()
+
+print("The number returned from C code is: \(num).")
--- /dev/null
+project('mixed', 'c', 'swift')
+
+i = include_directories('.')
+lib = static_library('mylib', 'mylib.c')
+exe = executable('prog', 'main.swift',
+ include_directories : [i],
+ link_with : lib)
+test('c module', exe)
--- /dev/null
+module mylib [extern_c] {
+ header "mylib.h"
+ link "mylib"
+ export *
+}
--- /dev/null
+#include"mylib.h"
+
+int getNumber() {
+ return 42;
+}
--- /dev/null
+#pragma once
+
+int getNumber();
--- /dev/null
+import mylib
+
+let num = getNumber()
+
+print("The number returned from C code is: \(num).")
--- /dev/null
+project('mixed', 'c', 'swift')
+add_project_arguments('-embed-bitcode', language : 'swift')
+subdir('mylib')
+exe = executable('prog', 'main.swift',
+ dependencies : dep)
+test('c module', exe)
--- /dev/null
+
+i = include_directories('.')
+lib = static_library('mylib', 'mylib.c')
+dep = declare_dependency(include_directories : i, link_with : lib)
--- /dev/null
+module mylib [extern_c] {
+ header "mylib.h"
+ link "mylib"
+ export *
+}
--- /dev/null
+#include"mylib.h"
+
+int getNumber() {
+ return 42;
+}
--- /dev/null
+#pragma once
+
+int getNumber();
--- /dev/null
+# This is a CMake version of this test. It behaves slightly differently
+# so in case you ever need to debug this, here it is.
+#
+# The biggest difference is that if SOVERSION is not set, it
+# is set to VERSION. Autotools sets it to the first number
+# of VERSION. That is, for version number 1.2.3 CMake sets
+# soname to 1.2.3 but Autotools sets it to 1.
+
+project(vertest C)
+cmake_minimum_required(VERSION 3.5)
+
+add_library(nover SHARED versioned.c)
+
+add_library(verset SHARED versioned.c)
+set_target_properties(verset PROPERTIES VERSION 4.5.6)
+
+add_library(soverset SHARED versioned.c)
+set_target_properties(soverset PROPERTIES SOVERSION 1.2.3)
+
+add_library(bothset SHARED versioned.c)
+set_target_properties(bothset PROPERTIES SOVERSION 1.2.3)
+set_target_properties(bothset PROPERTIES VERSION 4.5.6)
+
+add_library(settosame SHARED versioned.c)
+set_target_properties(settosame PROPERTIES SOVERSION 7.8.9)
+set_target_properties(settosame PROPERTIES VERSION 7.8.9)
--- /dev/null
+project('vertest', 'c')
+
+shared_library('nover', 'versioned.c',
+ install : true)
+
+shared_library('verset', 'versioned.c',
+ install : true,
+ version : '4.5.6')
+
+shared_library('soverset', 'versioned.c',
+ install : true,
+ soversion : '1.2.3')
+
+shared_library('bothset', 'versioned.c',
+ install : true,
+ soversion : '1.2.3',
+ version : '4.5.6')
+
+shared_library('settosame', 'versioned.c',
+ install : true,
+ soversion : '7.8.9',
+ version : '7.8.9')
--- /dev/null
+int versioned_func() {
+ return 0;
+}
--- /dev/null
+project('d dedup', 'c')
+
+add_project_arguments('-D', 'FOO', '-D', 'BAR', language : 'c')
+
+executable('prog', 'prog.c')
+
--- /dev/null
+#include<stdio.h>
+
+#ifndef FOO
+#error FOO is not defined.
+#endif
+
+#ifndef BAR
+#error BAR is not defined.
+#endif
+
+int main(int argc, char **argv) {
+ printf("All is well.\n");
+ return 0;
+}
--- /dev/null
+project('build rpath', 'c')
+
+subdir('sub')
+executable('prog', 'prog.c',
+ link_with : l,
+ build_rpath : '/foo/bar',
+ install_rpath : '/baz',
+ install : true,
+ )
--- /dev/null
+int get_stuff();
+
+int main(int argc, char **argv) {
+ return get_stuff();
+}
--- /dev/null
+l = shared_library('stuff', 'stuff.c')
--- /dev/null
+int get_stuff() {
+ return 0;
+}
--- /dev/null
+project('cross find program', 'c')
+
+native_exe = find_program('sometool.py', native : true)
+cross_exe = find_program('sometool.py')
+
+native_out = run_command(native_exe).stdout().strip()
+cross_out = run_command(cross_exe).stdout().strip()
+
+assert(native_out == 'native',
+ 'Native output incorrect:' + native_out)
+assert(cross_out == 'cross',
+ 'Cross output incorrect:' + cross_out)
--- /dev/null
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+print('cross')
--- /dev/null
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+print('native')
--- /dev/null
+project('reconfigure test', ['c'])
+
+if get_option('b_lto') != true
+ error('b_lto not set')
+endif
--- /dev/null
+int func();
+
+int main(int argc, char **argv) {
+ return func() == 42 ? 0 : 99;
+}
--- /dev/null
+# This test is on its own because it is special.
+# To run the test you need the prebuilt object
+# file for the given platform.
+#
+# Combined with cross compilation this would make
+# the state space explode so let's just keep this
+# in its own subdir so it's not run during cross
+# compilation tests.
+
+project('prebuilt object', 'c')
+
+if host_machine.system() == 'windows'
+ prebuilt = 'prebuilt.obj'
+else
+ prebuilt = 'prebuilt.o'
+endif
+
+# Remember: do not put source.c in this
+# declaration. run_tests.py generates the
+# prebuilt object before running this test.
+
+e = executable('prog', 'main.c',
+objects : prebuilt)
+
+test('objtest', e)
--- /dev/null
+/*
+ * Compile this manually on new platforms and add the
+ * object file to revision control and Meson configuration.
+ */
+
+int func() {
+ return 42;
+}
--- /dev/null
+const char *msg() {
+ return "I am the best.";
+}
--- /dev/null
+#pragma once
+
+const char *msg();
--- /dev/null
+cc = meson.get_compiler('c')
+stlib = cc.find_library('best', dirs : meson.current_source_dir())
+
+best_dep = declare_dependency(dependencies : stlib,
+ include_directories : include_directories('.'))
--- /dev/null
+#include<stdio.h>
+#include<best.h>
+
+int main(int argc, char **argv) {
+ printf("%s\n", msg());
+ return 0;
+}
--- /dev/null
+project('prebuilt static lib', 'c')
+
+subdir('libdir')
+
+test('static', executable('mainprog', 'main.c', dependencies : best_dep))
--- /dev/null
+#include"alexandria.h"
+#include<stdio.h>
+
+void alexandria_visit() {
+ printf("You are surrounded by wisdom and knowledge. You feel enlightened.\n");
+}
--- /dev/null
+#pragma once
+
+/* Both funcs here for simplicity. */
+
+#if defined _WIN32 || defined __CYGWIN__
+#if defined BUILDING_DLL
+ #define DLL_PUBLIC __declspec(dllexport)
+#else
+ #define DLL_PUBLIC __declspec(dllimport)
+#endif
+#else
+ #if defined __GNUC__
+ #define DLL_PUBLIC __attribute__ ((visibility("default")))
+ #else
+ #pragma message ("Compiler does not support symbol visibility.")
+ #define DLL_PUBLIC
+ #endif
+#endif
+
+void DLL_PUBLIC alexandria_visit();
--- /dev/null
+#include<alexandria.h>
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("Ahh, another visitor. Stay a while.\n");
+ printf("You enter the library.\n\n");
+ alexandria_visit();
+ printf("\nYou decided not to stay forever.\n");
+ return 0;
+}
--- /dev/null
+project('prebuilt shared library', 'c')
+
+cc = meson.get_compiler('c')
+shlib = cc.find_library('alexandria', dirs : meson.current_source_dir())
+
+exe = executable('patron', 'patron.c', dependencies : shlib)
+test('visitation', exe)
+
+d = declare_dependency(dependencies : shlib)
+
+exe2 = executable('another_visitor', 'another_visitor.c',
+ dependencies : d)
+test('another', exe2)
+
--- /dev/null
+#include<alexandria.h>
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("You are standing outside the Great Library of Alexandria.\n");
+ printf("You decide to go inside.\n\n");
+ alexandria_visit();
+}
--- /dev/null
+int power_level (void)
+{
+#ifdef FOO_STATIC
+ return 9001;
+#else
+ return 8999;
+#endif
+}
--- /dev/null
+prefix=@PREFIX@
+libdir=${prefix}
+includedir=${prefix}/include
+datadir=${prefix}/data
+
+Name: libfoo
+Description: A foo library.
+Version: 1.0
+Libs: -L${libdir} -lfoo
+Cflags: -I${includedir}
--- /dev/null
+#pragma once
+
+int power_level (void);
--- /dev/null
+#include <foo.h>
+#include <stdio.h>
+
+int
+main (int argc, char * argv[])
+{
+ int value = power_level ();
+ if (value < 9000) {
+ printf ("Power level is %i\n", value);
+ return 1;
+ }
+ printf ("IT'S OVER 9000!!!\n");
+ return 0;
+}
--- /dev/null
+project('pkg-config static', 'c')
+
+if build_machine.system() != 'windows'
+ prefix = meson.source_root()
+else
+ # pkg-config files should not use paths with \
+ prefix_parts = meson.source_root().split('\\')
+ # If the path is C:/foo/bar, convert it to /c/foo/bar so we can test if our
+ # automatic conversion to C:/foo/bar inside PkgConfigDependency is working.
+ if prefix_parts[0][1] == ':'
+ drive = prefix_parts[0][0]
+ else
+ drive = prefix_parts[0]
+ endif
+ new_parts = []
+ foreach part : prefix_parts
+ if part != prefix_parts[0]
+ new_parts += part
+ endif
+ endforeach
+ prefix = '/@0@/@1@'.format(drive, '/'.join(new_parts))
+endif
+message(prefix)
+
+# Escape spaces
+prefix_parts = prefix.split(' ')
+prefix = '\ '.join(prefix_parts)
+
+conf = configuration_data()
+conf.set('PREFIX', prefix)
+configure_file(input : 'foo.pc.in',
+ output : 'foo.pc',
+ configuration : conf)
+
+foo_dep = dependency('foo', static : true)
+
+test('footest', executable('foomain', 'main.c', dependencies : foo_dep))
--- /dev/null
+# Copyright © 2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+project('array option test')
--- /dev/null
+# Copyright © 2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+option(
+ 'list',
+ type : 'array',
+ value : ['foo', 'bar'],
+ choices : ['foo', 'bar', 'oink', 'boink'],
+)
--- /dev/null
+#include<stdio.h>
+#include<stdlib.h>
+
+#include<impl.h>
+
+int main(int argc, char **argv) {
+ char *ten = malloc(10);
+ if(getenv("TEST_ENV")) {
+ do_nasty(ten);
+ printf("TEST_ENV is set.\n");
+ }
+ free(ten);
+ return 0;
+}
--- /dev/null
+/* Write past the end. */
+
+void do_nasty(char *ptr) {
+ ptr[10] = 'n';
+}
--- /dev/null
+#pragma once
+
+void do_nasty(char *ptr);
--- /dev/null
+project('testsetups', 'c')
+
+vg = find_program('valgrind')
+
+# This is only set when running under Valgrind test setup.
+env = environment()
+env.set('TEST_ENV', '1')
+
+add_test_setup('valgrind',
+ exe_wrapper : [vg, '--error-exitcode=1', '--leak-check=full'],
+ timeout_multiplier : 100,
+ env : env)
+
+buggy = executable('buggy', 'buggy.c', 'impl.c')
+test('Test buggy', buggy)
+
+add_test_setup('empty')
+add_test_setup('onlyenv', env : env)
+add_test_setup('onlyenv2', env : 'TEST_ENV=1')
+add_test_setup('onlyenv3', env : ['TEST_ENV=1'])
+add_test_setup('wrapper', exe_wrapper : [vg, '--error-exitcode=1'])
+add_test_setup('timeout', timeout_multiplier : 20)
--- /dev/null
+project('subproject defaults', 'c',
+ default_options : ['defopoverride=defopt', # This should be overridden.
+ 'fromcmdline=defopt'] # This should get the value set in command line.
+ )
+
+subproject('foob', default_options : ['fromspfunc=spfunc', 'fromspfunconly=spfunc'])
+
+assert(get_option('fromcmdline') == 'cmdline', 'Default option defined in cmd line is incorrect: ' + get_option('fromcmdline'))
+assert(get_option('defopoverride') == 'defopt', 'Default option without cmd line override is incorrect: ' + get_option('defopoverride'))
+assert(get_option('fromoptfile') == 'optfile', 'Default value from option file is incorrect: ' + get_option('fromoptfile'))
+
--- /dev/null
+option('defopoverride', type : 'string', value : 'optfile', description : 'A value for overriding.')
+option('fromcmdline', type : 'string', value : 'optfile', description : 'A value for overriding.')
+option('fromoptfile', type : 'string', value : 'optfile', description : 'A value for not overriding.')
--- /dev/null
+project('foob', 'c',
+ default_options : ['defopoverride=s_defopt', # This should be overridden.
+ 'fromspfunc=s_defopt', # This is specified with a default_options kwarg to subproject()
+ 'fromcmdline=s_defopt'] # This should get the value set in command line.
+ )
+
+assert(get_option('fromcmdline') == 's_cmdline', 'Default option defined in cmd line is incorrect: ' + get_option('fromcmdline'))
+assert(get_option('fromspfunc') == 'spfunc', 'Default option set with subproject() incorrect: ' + get_option('fromspfunc'))
+assert(get_option('fromspfunconly') == 'spfunc', 'Default option set with subproject() incorrect: ' + get_option('fromspfunc'))
+assert(get_option('defopoverride') == 's_defopt', 'Default option without cmd line override is incorrect: ' + get_option('defopoverride'))
+assert(get_option('fromoptfile') == 's_optfile', 'Default value from option file is incorrect: ' + get_option('fromoptfile'))
+
--- /dev/null
+option('defopoverride', type : 'string', value : 's_optfile', description : 'A value for overriding.')
+option('fromcmdline', type : 'string', value : 's_optfile', description : 'A value for overriding.')
+option('fromspfunc', type : 'string', value : 's_optfile', description : 'A value for overriding.')
+option('fromspfunconly', type : 'string', value : 's_optfile', description : 'A value for overriding.')
+option('fromoptfile', type : 'string', value : 's_optfile', description : 'A value for not overriding.')
--- /dev/null
+int main() { return -1 ; }
--- /dev/null
+project('mainprj', 'c')
+
+subproject('subprjfail')
+subproject('subprjsucc')
+subproject('subprjmix')
+
+test('mainprj-failing_test',
+ executable('failing_test', 'failing_test.c'),
+ suite : 'fail')
+
+test('mainprj-successful_test',
+ executable('successful_test', 'successful_test.c'),
+ suite : 'success')
--- /dev/null
+int main() { return -1 ; }
--- /dev/null
+project('subprjfail', 'c')
+
+test('subprjfail-failing_test',
+ executable('failing_test', 'failing_test.c'),
+ suite : 'fail')
--- /dev/null
+int main() { return -1 ; }
--- /dev/null
+project('subprjmix', 'c')
+
+test('subprjmix-failing_test',
+ executable('failing_test', 'failing_test.c'),
+ suite : 'fail')
+
+test('subprjmix-successful_test',
+ executable('successful_test', 'successful_test.c'),
+ suite : 'success')
--- /dev/null
+int main() { return 0 ; }
--- /dev/null
+project('subprjsucc', 'c')
+
+test('subprjsucc-successful_test',
+ executable('successful_test', 'successful_test.c'),
+ suite : 'success')
--- /dev/null
+int main() { return 0 ; }
--- /dev/null
+int main() { return 0 ; }
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import subprocess
+
+sys.exit(subprocess.call(sys.argv[1:]))
--- /dev/null
+project('trivial test',
+ ['c', 'cpp', 'objc', 'objcpp'],
+ meson_version : '>=0.27.0')
+
+executable('trivialc', 'trivial.c')
+executable('trivialcpp', 'trivial.cc')
+executable('trivialobjc', 'trivial.m')
+executable('trivialobjcpp', 'trivial.mm')
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ printf("Trivial test is working.\n");
+ return 0;
+}
--- /dev/null
+#include<iostream>
+
+int main(int argc, char **argv) {
+ std::cout << "C++ seems to be working." << std::endl;
+ return 0;
+}
--- /dev/null
+#import<stdio.h>
+
+int main(int argc, char **argv) {
+ return 0;
+}
\ No newline at end of file
--- /dev/null
+#import<stdio.h>
+
+class MyClass {
+};
+
+int main(int argc, char **argv) {
+ return 0;
+}
+
--- /dev/null
+project('cpp std override', 'cpp',
+ default_options : ['cpp_std=c++03',
+ 'werror=true'])
+
+executable('plain', 'progp.cpp',
+ override_options : 'cpp_std=none')
+executable('v03', 'prog03.cpp',
+ override_options : 'werror=false')
+executable('v11', 'prog11.cpp',
+ override_options : 'cpp_std=c++11')
--- /dev/null
+#include<iostream>
+
+int main(int argc, char **argv) {
+ std::cout << "I am a c++03 test program.\n";
+ return 0;
+}
--- /dev/null
+#include<iostream>
+
+int main(int argc, char **argv) {
+ std::cout << "I am a C++11 test program.\n";
+ return 0;
+}
--- /dev/null
+#include<iostream>
+
+int main(int argc, char **argv) {
+ std::cout << "I am a test program of undefined C++ standard.\n";
+ return 0;
+}
--- /dev/null
+int foo() {
+ return 0;
+}
--- /dev/null
+# Try to invoke linker constant string deduplication,
+# to ensure we are not clobbering shared strings.
+# Name everything possible just as "foo".
+foolib = shared_library('foo', 'foo.c',
+ install_dir : 'foo',
+ install : true)
+
--- /dev/null
+project('foo', 'c',
+ default_options : 'libdir=lib')
+
+subdir('foo')
+
+executable('prog', 'prog.c',
+ link_with : foolib,
+ install : true)
+
--- /dev/null
+int foo();
+
+int main(int argc, char **argv) {
+ return foo();
+}
--- /dev/null
+prefix=/usr
+exec_prefix=${prefix}
+libdir=${prefix}/lib/x86_64-linux-gnu
+sharedlibdir=${libdir}
+includedir=${prefix}/include
+
+Name: jonne
+Description: jonne library
+Version: 1.0.0
+
+Requires:
+Libs: -L/me/first -lfoo1 -L/me/second -lfoo2
+Cflags: -I${includedir}
--- /dev/null
+project('jonne', 'c')
+
+firstdep = dependency('first')
+seconddep = dependency('second')
+
+executable('lprog', 'prog.c', dependencies : [firstdep, seconddep])
--- /dev/null
+#include<stdio.h>
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+prefix=/usr
+exec_prefix=${prefix}
+libdir=${prefix}/lib/x86_64-linux-gnu
+sharedlibdir=${libdir}
+includedir=${prefix}/include
+
+Name: jonne2
+Description: jonne2 library
+Version: 1.0.0
+
+Requires:
+Libs: -L/me/third -lfoo3 -L/me/fourth -lfoo4
+Cflags: -I${includedir}
--- /dev/null
+project('valatest', 'vala', 'c')
+
+valadeps = [dependency('glib-2.0'), dependency('gobject-2.0')]
+
+e = executable('valaprog', 'prog.vala', dependencies : valadeps)
+test('valatest', e)
--- /dev/null
+class MainProg : GLib.Object {
+
+ public static int main(string[] args) {
+ stdout.printf("Vala is working.\n");
+ return 0;
+ }
+}
--- /dev/null
+int retval (void);
+
+int test (void) {
+ return retval ();
+}
--- /dev/null
+writec = find_program('writec.py')
+
+retval = custom_target('writec',
+ output : 'retval.c',
+ command : [writec, '@OUTPUT@'])
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+c = '''int
+retval(void) {
+ return 0;
+}
+'''
+
+with open(sys.argv[1], 'w') as f:
+ f.write(c)
--- /dev/null
+project('foo', 'c', 'vala')
+
+glib = dependency('glib-2.0')
+
+subdir('c')
+e = executable('foo', 'c/foo.c', retval, 'vala/bar.vala', dependencies: [glib])
+test('test foo', e)
--- /dev/null
+extern int test ();
+
+public int main (string[] args) {
+ return test ();
+}
--- /dev/null
+usr/bin/vapigen-test
+usr/lib/libfoo.so
+usr/lib/libbar.so
+usr/share/vala/vapi/foo-1.0.vapi
+usr/share/vala/vapi/foo-1.0.deps
+usr/share/vala/vapi/bar-1.0.vapi
+usr/share/vala/vapi/bar-1.0.deps
--- /dev/null
+#include "bar.h"
+#include "foo.h"
+
+/**
+ * bar_return_success:
+ *
+ * Returns 0
+ */
+int bar_return_success(void)
+{
+ return foo_return_success();
+}
--- /dev/null
+#include <glib-object.h>
+
+#pragma once
+
+int bar_return_success(void);
--- /dev/null
+libbar_sources = [
+ 'bar.c',
+ 'bar.h',
+]
+
+libbar_deps = [
+ dependency('gobject-2.0'),
+ libfoo_dep,
+]
+
+libbar = shared_library('bar', libbar_sources,
+ dependencies: libbar_deps,
+ install: true,
+)
+
+libbar_api_ver = '1.0'
+
+libbar_gir = gnome.generate_gir(libbar,
+ sources: libbar_sources,
+ namespace: 'Bar',
+ nsversion: libbar_api_ver,
+ packages: 'gobject-2.0',
+ symbol_prefix: 'bar',
+ extra_args: [
+ '--c-include=bar.h',
+ ],
+)
+
+libbar_vapi = gnome.generate_vapi('bar-' + libbar_api_ver,
+ sources: libbar_gir[0],
+ packages: libfoo_vapi,
+ install: true,
+)
--- /dev/null
+#include "foo.h"
+
+/**
+ * foo_return_success:
+ *
+ * Returns 0
+ */
+int foo_return_success(void)
+{
+ return 0;
+}
--- /dev/null
+#include <glib-object.h>
+
+#pragma once
+
+int foo_return_success(void);
--- /dev/null
+libfoo_sources = [
+ 'foo.c',
+ 'foo.h',
+]
+
+libfoo_deps = [
+ dependency('gobject-2.0')
+]
+
+libfoo = shared_library('foo', libfoo_sources,
+ dependencies: libfoo_deps,
+ install: true,
+)
+
+libfoo_api_ver = '1.0'
+
+libfoo_gir = gnome.generate_gir(libfoo,
+ sources: libfoo_sources,
+ namespace: 'Foo',
+ nsversion: libfoo_api_ver,
+ packages: 'gobject-2.0',
+ symbol_prefix: 'foo',
+ extra_args: [
+ '--c-include=foo.h',
+ ],
+)
+
+libfoo_vapi = gnome.generate_vapi('foo-' + libfoo_api_ver,
+ sources: libfoo_gir[0],
+ install: true,
+)
+
+libfoo_dep = declare_dependency(
+ link_with: libfoo,
+ include_directories: include_directories('.'),
+)
--- /dev/null
+using Foo;
+using Bar;
+
+class Main : GLib.Object {
+ public static int main(string[] args) {
+ var ignore = Foo.return_success();
+ return Bar.return_success();
+ }
+}
--- /dev/null
+project('vapi-test', ['c', 'vala'])
+
+gnome = import('gnome')
+subdir('libfoo')
+subdir('libbar')
+
+vapiexe = executable('vapigen-test',
+ 'main.vala',
+ dependencies: [dependency('gobject-2.0'), libfoo_vapi, libbar_vapi],
+ install: true,
+)
+
+test('vapigen-test', vapiexe)
--- /dev/null
+project('valatest', 'c', 'vala')
+
+glib = dependency('glib-2.0')
+gobject = dependency('gobject-2.0')
+
+foo_lib = library('foo-1.0', 'foo.vala',
+ vala_header: 'foo.h',
+ vala_vapi: 'foo.vapi',
+ dependencies: [glib, gobject])
+
+library('bar', 'bar.vala',
+ link_with: [foo_lib],
+ dependencies: [glib, gobject])
--- /dev/null
+project('find vala library', 'vala', 'c')
+
+valac = meson.get_compiler('vala')
+
+gobject = dependency('gobject-2.0')
+zlib = valac.find_library('zlib')
+
+e = executable('zlibtest', 'test.vala', dependencies : [gobject, zlib])
+test('testzlib', e)
--- /dev/null
+using ZLib;
+
+public static int main(string[] args) {
+ stdout.printf("ZLIB_VERSION is: %s\n", ZLib.VERSION.STRING);
+ return 0;
+}
--- /dev/null
+res = gnome.compile_resources('testui',
+ 'test-resources.xml',
+ source_dir : '.')
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<gresources>
+ <gresource prefix="/org/Meson">
+ <file compressed="true" preprocess="xml-stripblanks">test.ui</file>
+ </gresource>
+</gresources>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<interface>
+ <!-- interface-requires gtk+ 3.8 -->
+ <template class="TestWidget" parent="GtkBox">
+ <property name="visible">True</property>
+ <property name="can_focus">False</property>
+ <property name="orientation">vertical</property>
+ <property name="spacing">4</property>
+ <child>
+ <object class="GtkEntry" id="entry">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ </object>
+ <packing>
+ <property name="position">0</property>
+ </packing>
+ </child>
+ </template>
+</interface>
--- /dev/null
+project('test glib target version and gresources', 'c', 'vala')
+
+gnome = import('gnome')
+
+glib = dependency('glib-2.0', version : '>=2.38')
+gtk = dependency('gtk+-3.0')
+
+subdir('gres')
+
+e = executable('gtktemplate', 'test.vala', res, dependencies : [glib, gtk])
+# No X on the CI, so disable this for now
+#test('test-target-glib', e)
--- /dev/null
+using Gtk;
+using GLib;
+
+[GtkTemplate (ui = "/org/Meson/test.ui")]
+public class TestWidget : Box {
+ public string text {
+ get { return entry.text; }
+ set { entry.text = value; }
+ }
+
+ [GtkChild]
+ private Entry entry;
+
+ public TestWidget (string text) {
+ this.text = text;
+ }
+}
+
+void main(string[] args) {
+ Gtk.init (ref args);
+ var win = new Window();
+ win.destroy.connect (Gtk.main_quit);
+
+ var widget = new TestWidget ("SOME TEXT HERE");
+
+ win.add (widget);
+ win.show_all ();
+
+ /* Exit immediately */
+ Timeout.add_full (Priority.DEFAULT_IDLE, 1, () =>
+ {
+ Gtk.main_quit();
+ return false;
+ });
+
+ Gtk.main ();
+}
--- /dev/null
+project('static vapi', 'c', 'vala')
+
+glib = dependency('glib-2.0')
+
+conf = configuration_data()
+conf.set_quoted('VERSION', '1.0.0')
+config_h = configure_file(output : 'config.h',
+ configuration : conf)
+
+e = executable('static-vapi', 'vapi/config.vapi', 'test.vala',
+ dependencies : glib)
+
+test('test-config', e)
--- /dev/null
+using GLib;
+using Config;
+
+public int main (string[] args) {
+ return GLib.strcmp(VERSION, "1.0.0");
+}
--- /dev/null
+[CCode (cprefix = "", lower_case_cprefix = "", cheader_filename = "config.h")]
+namespace Config {
+ public const string VERSION;
+}
--- /dev/null
+namespace App {
+ public static int main(string[] args) {
+ var mixer = new Mixer();
+ print("Current volume is %u\n", mixer.get_volume());
+ return 0;
+ }
+}
--- /dev/null
+project('mixed dependence', 'vala', 'c')
+
+cc = meson.get_compiler('c')
+
+deps = [dependency('glib-2.0'), dependency('gobject-2.0'),
+ # Should be ignored, see https://github.com/mesonbuild/meson/issues/1939
+ cc.find_library('z')]
+
+mixer = static_library('mixer', 'mixer.vala', 'mixer-glue.c',
+ dependencies : deps)
+
+app = executable('app', 'app.vala',
+ link_with : mixer,
+ dependencies : deps)
+
+test('valamixeddependencetest', app)
--- /dev/null
+#include "mixer.h"
+
+guint mixer_get_volume(Mixer *mixer) {
+ return 11;
+}
--- /dev/null
+public class Mixer : Object {
+ public extern uint get_volume();
+}
--- /dev/null
+#include "badger.h"
+
+int main(int argc, char *argv[]) {
+ Badger *badger;
+
+ badger = g_object_new(TYPE_BADGER, NULL);
+ g_print("Badger whose name is '%s'\n", badger_get_name(badger));
+ g_object_unref(badger);
+
+ return 0;
+}
--- /dev/null
+public class Badger : Object {
+ public string name {
+ get;
+ construct;
+ }
+
+ Badger() {
+ Object(name: "Joe");
+ }
+}
--- /dev/null
+project('plain consumer', 'vala', 'c')
+
+deps = [dependency('glib-2.0'), dependency('gobject-2.0')]
+
+badger = static_library('badger', 'badger.vala',
+ dependencies : deps)
+
+app = executable('app', 'app.c',
+ link_with : badger,
+ dependencies : deps)
+
+test('valaplainconsumertest', app)
--- /dev/null
+namespace App {
+ public static int main(string[] args) {
+ var person = new Person();
+ print("Favorite beer of \"%s\" is %s\n", person.name, person.favorite_beer.flavor);
+
+ var beer = new Beer("tasty");
+ print("This beer is %s\n", beer.flavor);
+
+ return 0;
+ }
+}
--- /dev/null
+public class Beer : Object {
+ public string flavor {
+ get;
+ construct;
+ }
+
+ public Beer(string flavor) {
+ Object(flavor: flavor);
+ }
+}
--- /dev/null
+project('vapi consumed twice', 'vala', 'c')
+
+base_deps = [dependency('glib-2.0'), dependency('gobject-2.0')]
+
+beer = library('beer', 'beer.vala', dependencies : base_deps)
+beer_dep = declare_dependency(link_with : beer)
+
+person = library('person', 'person.vala', link_with : beer,
+ dependencies : base_deps)
+person_dep = declare_dependency(link_with : person, dependencies : [beer_dep])
+
+app = executable('app', 'app.vala',
+ dependencies : base_deps + [person_dep, beer_dep])
+
+test('valavapiconsumedtwicetest', app)
--- /dev/null
+public class Person : Object {
+ public string name {
+ get {
+ return "Joe Badger";
+ }
+ }
+
+ public Beer favorite_beer {
+ get;
+ construct;
+ }
+
+ public Person() {
+ Object(favorite_beer: new Beer("smooth"));
+ }
+}
--- /dev/null
+project('genietest', 'vala', 'c')
+
+valadeps = [dependency('glib-2.0'), dependency('gobject-2.0')]
+
+e = executable('genieprog', 'prog.gs', dependencies : valadeps)
+test('genietest', e)
--- /dev/null
+init
+ print ("Genie is working.")
--- /dev/null
+class Class1 : GLib.Object {
+
+ public void hello() {
+ var c2 = new Class2();
+ c2.hello();
+ }
+}
--- /dev/null
+class Class2 : GLib.Object {
+
+ public void hello() {
+ stdout.printf("Multiple file Vala project is working.\n");
+ }
+}
--- /dev/null
+class MainProg : GLib.Object {
+
+ public static int main(string[] args) {
+ var c1 = new Class1();
+ c1.hello();
+ return 0;
+ }
+}
--- /dev/null
+project('multiple files', 'vala', 'c')
+
+glib = dependency('glib-2.0')
+gobject = dependency('gobject-2.0')
+
+e = executable('prog', 'main.vala', 'class1.vala', 'class2.vala',
+dependencies : [glib, gobject])
+test('multiple file test', e)
--- /dev/null
+#include <glib.h>
+
+gboolean c_test_one_is_true (void) {
+ return TRUE;
+}
--- /dev/null
+#include <glib.h>
+
+gboolean c_test_two_is_true (void) {
+ return TRUE;
+}
--- /dev/null
+def extern c_test_one_is_true():bool
+def extern c_test_two_is_true():bool
+
+init
+ assert( new Genie.TestOne().is_true() )
+ assert( new Genie.TestTwo().is_true() )
+ assert( new Vala.TestOne().is_true() )
+ assert( new Vala.TestTwo().is_true() )
+ assert( c_test_one_is_true() )
+ assert( c_test_two_is_true() )
+
--- /dev/null
+project( 'Genie multiple and mixed sources', 'vala', 'c' )
+
+genie_deps = [
+ dependency( 'glib-2.0' ),
+ dependency( 'gobject-2.0' ),
+]
+
+sources = [
+ 'init.gs',
+ 'test_one.gs',
+ 'test_two.gs',
+ 'vala_test_one.vala',
+ 'vala_test_two.vala',
+ 'c_test_one.c',
+ 'c_test_two.c',
+]
+
+prog = executable( 'genie_prog', sources, dependencies: genie_deps )
+test( 'Given a Genie program when it is compiled from multiple mixed sources then it should work', prog )
--- /dev/null
+namespace Genie
+
+ class TestOne
+ def is_true():bool
+ return true
--- /dev/null
+namespace Genie
+
+ class TestTwo
+ def is_true():bool
+ return true
--- /dev/null
+namespace Vala {
+ public class TestOne {
+ public bool is_true() {
+ return true;
+ }
+ }
+}
--- /dev/null
+namespace Vala {
+ public class TestTwo {
+ public bool is_true() {
+ return true;
+ }
+ }
+}
--- /dev/null
+public extern const string FOO_PLUGIN_PATH;
+
+Foo.PluginModule plugin_module;
+
+public int main () {
+ plugin_module = new Foo.PluginModule (FOO_PLUGIN_PATH, "bar");
+
+ if (!plugin_module.load ()) {
+ return 1;
+ }
+
+ var plugin = Object.new (plugin_module.plugin_type) as Foo.Plugin;
+
+ assert ("bar" == plugin.bar ());
+
+ return 0;
+}
--- /dev/null
+project('valatest', 'c', 'vala')
+
+glib_dep = dependency('glib-2.0')
+gobject_dep = dependency('gobject-2.0')
+gmodule_dep = dependency('gmodule-2.0')
+
+foo_sources = ['plugin.vala', 'plugin-module.vala']
+foo_lib = shared_library('foo', foo_sources,
+ dependencies: [glib_dep, gobject_dep, gmodule_dep])
+
+shared_module('bar', 'plugin-bar.vala',
+ dependencies: [glib_dep, gobject_dep],
+ link_with: foo_lib)
+
+foo_bin = executable('foo', 'foo.vala',
+ c_args: ['-DFOO_PLUGIN_PATH="@0@"'.format(meson.current_build_dir())],
+ dependencies: [glib_dep, gobject_dep],
+ link_with: foo_lib)
+
+test('shared module', foo_bin)
--- /dev/null
+[ModuleInit]
+public GLib.Type plugin_init (GLib.TypeModule tm) {
+ return typeof (Bar.Plugin);
+}
+
+public class Bar.Plugin : Foo.Plugin, GLib.Object {
+
+ public string bar () {
+ return "bar";
+ }
+}
--- /dev/null
+public class Foo.PluginModule : TypeModule {
+
+ [CCode (has_target = false)]
+ private delegate Type PluginInit (TypeModule type_module);
+
+ public string? directory { get; construct; default = null; }
+
+ public string name { get; construct; }
+
+ public string path { get; construct; }
+
+ public Type plugin_type { get; private set; }
+
+ private Module? module = null;
+
+ public PluginModule (string? directory, string name) {
+ Object (directory: directory, name: name);
+ }
+
+ construct {
+ path = Module.build_path (directory, name);
+ }
+
+ public override bool load () {
+ module = Module.open (path, ModuleFlags.BIND_LAZY);
+
+ if (module == null) {
+ critical (Module.error ());
+ return false;
+ }
+
+ void* plugin_init;
+ if (!module.symbol ("plugin_init", out plugin_init)){
+ critical (Module.error ());
+ return false;
+ }
+
+ if (plugin_init == null) {
+ return false;
+ }
+
+ plugin_type = ((PluginInit) plugin_init) (this);
+
+ if (!plugin_type.is_a (typeof (Plugin))) {
+ return false;
+ }
+
+ return true;
+ }
+
+ public override void unload () {
+ module = null;
+ }
+}
--- /dev/null
+public interface Foo.Plugin : GLib.Object {
+
+ public abstract string bar ();
+}
--- /dev/null
+public class Subdir.Subdir2.Test : GLib.Object {
+ construct {
+ stdout.printf("Test from Subdir/Subdir2/\n");
+ }
+}
--- /dev/null
+public class Subdir.Test : GLib.Object {
+ construct {
+ stdout.printf("Test from Subdir/\n");
+ }
+}
--- /dev/null
+public class Subdir2.Test : GLib.Object {
+ construct {
+ stdout.printf("Test from Subdir2/\n");
+ }
+}
--- /dev/null
+public class Test : GLib.Object {
+ construct {
+ stdout.printf("Test from main directory\n");
+ }
+}
--- /dev/null
+project('valatest', 'vala', 'c')
+
+valadeps = [dependency('glib-2.0'), dependency('gobject-2.0')]
+valafiles = files(
+ 'prog.vala',
+ 'Test.vala',
+ 'Subdir/Test.vala',
+ 'Subdir/Subdir2/Test.vala',
+ 'Subdir2/Test.vala',
+)
+
+e = executable('multidir_prog', valafiles, dependencies : valadeps)
+test('valatest', e)
--- /dev/null
+int main() {
+ var test1 = new Test ();
+ var test2 = new Subdir.Test ();
+ var test3 = new Subdir2.Test ();
+ var test4 = new Subdir.Subdir2.Test ();
+ stdout.printf("Vala is working.\n");
+ return 0;
+}
--- /dev/null
+class GioProg {
+
+ public static int main(string[] args) {
+ var homedir = File.new_for_path(Environment.get_home_dir());
+ stdout.printf("Home directory as told by gio is " + homedir.get_path() + "\n");
+ return 0;
+ }
+}
--- /dev/null
+project('giotest', 'vala', 'c')
+
+glib = dependency('glib-2.0')
+gobject = dependency('gobject-2.0')
+gio = [dependency('gio-2.0'),
+ # https://github.com/mesonbuild/meson/issues/1484
+ dependency('gio-unix-2.0', required : false),
+ dependency('gio-windows-2.0', required : false)]
+
+e = executable('gioprog', 'gioprog.vala',
+dependencies : [glib, gobject, gio])
+test('giotest', e)
--- /dev/null
+public const string DATA_DIRECTORY;
--- /dev/null
+public const string SOMETHING_ELSE;
--- /dev/null
+project('valatest', 'vala', 'c')
+
+valac = meson.get_compiler('vala')
+# Try to find our library
+valadeps = [valac.find_library('meson-something-else', dirs : meson.current_source_dir())]
+valadeps += [dependency('glib-2.0'), dependency('gobject-2.0')]
+
+e = executable(
+'valaprog',
+sources : ['config.vapi', 'prog.vala'],
+dependencies : valadeps,
+c_args : ['-DDATA_DIRECTORY="@0@"'.format(meson.current_source_dir()),
+ '-DSOMETHING_ELSE="Out of this world!"']
+)
+test('valatest', e)
--- /dev/null
+class MainProg : GLib.Object {
+
+ public static int main(string[] args) {
+ stdout.printf("DATA_DIRECTORY is: %s.\n", DATA_DIRECTORY);
+ stdout.printf("SOMETHING_ELSE is: %s.\n", SOMETHING_ELSE);
+ return 0;
+ }
+}
--- /dev/null
+extern int get_ret_code ();
+
+public class MyThread : Object {
+ public int x_times { get; private set; }
+
+ public MyThread (int times) {
+ this.x_times = times;
+ }
+
+ public int run () {
+ for (int i = 0; i < this.x_times; i++) {
+ stdout.printf ("ping! %d/%d\n", i + 1, this.x_times);
+ Thread.usleep (10000);
+ }
+
+ // return & exit have the same effect
+ Thread.exit (get_ret_code ());
+ return 43;
+ }
+}
+
+public static int main (string[] args) {
+ // Check whether threads are supported:
+ if (Thread.supported () == false) {
+ stderr.printf ("Threads are not supported!\n");
+ return -1;
+ }
+
+ try {
+ // Start a thread:
+ MyThread my_thread = new MyThread (10);
+ Thread<int> thread = new Thread<int>.try ("My fst. thread", my_thread.run);
+
+ // Wait until thread finishes:
+ int result = thread.join ();
+ // Output: `Thread stopped! Return value: 42`
+ stdout.printf ("Thread stopped! Return value: %d\n", result);
+ } catch (Error e) {
+ stdout.printf ("Error: %s\n", e.message);
+ }
+
+ return 0;
+}
--- /dev/null
+project('valatest', 'vala', 'c')
+
+if not meson.is_unity()
+ add_global_arguments('-Werror', language : 'c')
+endif
+
+valadeps = [dependency('glib-2.0', version : '>=2.32'), dependency('gobject-2.0')]
+
+e = executable('valaprog', 'GLib.Thread.vala', 'retcode.c', dependencies : valadeps)
+test('valatest', e)
--- /dev/null
+int
+get_ret_code (void)
+{
+ return 42;
+}
--- /dev/null
+usr/lib/libextractedlib.a
--- /dev/null
+project('valastatic', 'vala', 'c')
+
+valadeps = [dependency('glib-2.0'), dependency('gobject-2.0')]
+
+l = static_library('valalib', 'mylib.vala', dependencies : valadeps)
+# NOTE: This static library is not usable from Vala because it does not carry
+# forward the .vapi and .h files generated by Valac to the next BuildTarget.
+# Will have to be fixed with https://github.com/mesonbuild/meson/issues/891
+m = static_library('extractedlib',
+ objects : l.extract_all_objects(),
+ install : true)
+
+e = executable('valaprog', 'prog.vala',
+link_with : l,
+dependencies : valadeps)
+
+test('valastatictest', e)
--- /dev/null
+public class LibraryObject : Object {
+ public void func() {
+ stdout.printf("Method in library called.");
+ }
+}
--- /dev/null
+class MainApp : Object {
+ public static int main(string[] args) {
+ var l = new LibraryObject();
+ l.func();
+ return 0;
+ }
+}
--- /dev/null
+usr/lib/libinstalled_vala_lib.so
+usr/lib/libinstalled_vala_all.so
+usr/include/installed_vala_all.h
+usr/include/valah/installed_vala_all_nolib.h
+usr/include/installed_vala_onlyh.h
+usr/share/vala/vapi/installed_vala_all.vapi
+usr/share/vala-1.0/vapi/installed_vala_all_nolib.vapi
+usr/share/vala/vapi/installed_vala_onlyvapi.vapi
--- /dev/null
+args = []
+# https://github.com/mesonbuild/meson/issues/1969
+if get_option('unity') == 'on'
+ vala_args = ['-H', 'mylib.h']
+endif
+
+l = shared_library('valalib', 'mylib.vala',
+ vala_args : args,
+ dependencies : valadeps)
+
+shared_library('installed_vala_lib', 'mylib.vala',
+ dependencies : valadeps,
+ install : true)
+
+shared_library('installed_vala_all', 'mylib.vala',
+ dependencies : valadeps,
+ install : true,
+ install_dir : [true, true, true])
+
+shared_library('installed_vala_all_nolib', 'mylib.vala',
+ dependencies : valadeps,
+ install : true,
+ install_dir : [false,
+ join_paths(get_option('includedir'), 'valah'),
+ join_paths(get_option('datadir'), 'vala-1.0', 'vapi')])
+
+shared_library('installed_vala_onlyh', 'mylib.vala',
+ dependencies : valadeps,
+ install : true,
+ install_dir : [false, get_option('includedir'), false])
+
+shared_library('installed_vala_onlyvapi', 'mylib.vala',
+ dependencies : valadeps,
+ install : true,
+ install_dir : [false, false, join_paths(get_option('datadir'), 'vala', 'vapi')])
--- /dev/null
+public class LibraryObject : Object {
+ public void func() {
+ stdout.printf("Method in library called.");
+ }
+}
--- /dev/null
+project('shared library', 'vala', 'c')
+
+valadeps = [dependency('glib-2.0'), dependency('gobject-2.0')]
+
+libinc = include_directories('lib')
+
+subdir('lib')
+subdir('prog')
+
+test('valasharedtest', e)
--- /dev/null
+e = executable('valaprog', 'prog.vala',
+ link_with : l,
+ include_directories : libinc,
+ dependencies : valadeps)
--- /dev/null
+class MainApp : Object {
+ public static int main(string[] args) {
+ var l = new LibraryObject();
+ l.func();
+ return 0;
+ }
+}
--- /dev/null
+/*** BEGIN file-header ***/
+
+#include "enum-types.h"
+
+/*** END file-header ***/
+
+/*** BEGIN file-production ***/
+/* enumerations from "@filename@" */
+#include "@filename@"
+/*** END file-production ***/
+
+
+/*** BEGIN value-header ***/
+GType
+@enum_name@_get_type (void)
+{
+ static volatile gsize g_define_type_id__volatile = 0;
+
+ if (g_once_init_enter (&g_define_type_id__volatile)) {
+ static const G@Type@Value values[] = {
+/*** END value-header ***/
+
+/*** BEGIN value-production ***/
+ { @VALUENAME@, "@VALUENAME@", "@valuenick@" },
+/*** END value-production ***/
+
+/*** BEGIN value-tail ***/
+ { 0, NULL, NULL }
+ };
+ GType g_define_type_id =
+ g_@type@_register_static (g_intern_static_string ("@EnumName@"), values);
+
+ g_once_init_leave (&g_define_type_id__volatile, g_define_type_id);
+ }
+
+ return g_define_type_id__volatile;
+}
+
+/*** END value-tail ***/
+
+/*** BEGIN file-tail ***/
+
+/*** END file-tail ***/
--- /dev/null
+/*** BEGIN file-header ***/
+
+#ifndef __EXAMPLE_ENUMS_TYPES_H__
+#define __EXAMPLE_ENUMS_TYPES_H__
+
+#include <glib-object.h>
+#include "enums.h"
+
+G_BEGIN_DECLS
+/*** END file-header ***/
+
+/*** BEGIN file-production ***/
+
+/* enumerations from "@filename@" */
+/*** END file-production ***/
+
+/*** BEGIN value-header ***/
+GType @enum_name@_get_type (void) G_GNUC_CONST;
+#define EXAMPLE_TYPE_@ENUMSHORT@ (@enum_name@_get_type ())
+/*** END value-header ***/
+
+/*** BEGIN file-tail ***/
+G_END_DECLS
+
+#endif /* __EXAMPLE_ENUMS_TYPES_H__ */
+/*** END file-tail ***/
--- /dev/null
+#ifndef __EXAMPLE_ENUMS_H__
+#define __EXAMPLE_ENUMS_H__
+
+G_BEGIN_DECLS
+
+typedef enum {
+ EXAMPLE_VERBOSITY_ERRORS,
+ EXAMPLE_VERBOSITY_MINIMAL,
+ EXAMPLE_VERBOSITY_DETAILED,
+ EXAMPLE_VERBOSITY_DEBUG,
+} ExampleVerbosity;
+
+G_END_DECLS
+
+#endif /* __EXAMPLE_ENUMS_H__ */
--- /dev/null
+int whatever() {
+ return 0;
+}
--- /dev/null
+int main() {
+ return 0;
+}
--- /dev/null
+# Test that dependencies with their own generated sources don't
+# confuse the Vala build instruction generator.
+
+# Test case for https://github.com/mesonbuild/meson/issues/1084
+
+gnome = import('gnome')
+
+gobject = dependency('gobject-2.0')
+
+enums = gnome.mkenums('enum-types',
+ sources: 'enums.h',
+ c_template: 'enum-types.c.template',
+ h_template: 'enum-types.h.template',
+)
+
+libcommon = library('common',
+ enums[0], enums[1],
+ dependencies: gobject)
+
+common_dep = declare_dependency(
+ # This is required so that whoever depends on this also depends
+ # on the generated header; that won't happen implicitly.
+ # See: https://github.com/mesonbuild/meson/issues/1084
+ sources: enums[1],
+ link_with: libcommon,
+)
+
+libplover_vala = library('plover',
+ 'lib.vala',
+ dependencies: [common_dep, gobject]
+)
+
+plover_dep = declare_dependency(
+ link_with: libplover_vala,
+ dependencies: common_dep
+)
+
+vala_prog = executable('hello',
+ 'main.vala',
+ link_with: libplover_vala,
+ # There's no need to specify common_dep here since plover_dep pulls it
+ # in, but it should be harmless to do so.
+ dependencies: [common_dep, plover_dep, gobject]
+)
--- /dev/null
+usr/bin/generatedtest
+usr/bin/onlygentest
--- /dev/null
+project('mytest', 'vala', 'c')
+
+cd = configuration_data()
+cd.set('x', 'y')
+
+subdir('src')
+subdir('tools')
+subdir('onlygen')
+subdir('dependency-generated')
--- /dev/null
+int main() {
+ return 0;
+}
--- /dev/null
+onlygen = generator(copy,
+ output : '@BASENAME@.vala',
+ arguments : ['@INPUT@', '@OUTPUT@'])
+
+executable('onlygentest', onlygen.process('maingen.in'),
+ install : true,
+ dependencies: [dependency('glib-2.0'), dependency('gobject-2.0')])
--- /dev/null
+namespace Config {
+ public static const string x = "@x@";
+}
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import shutil
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
--- /dev/null
+config = configure_file(input: 'config.vala.in',
+ output: 'config.vala',
+ configuration: cd)
+
+print = find_program('write_wrapper.py')
+wrapper = custom_target('wrapper',
+ output : 'print_wrapper.vala',
+ command : [print, '@OUTPUT@'])
+
+copy = find_program('copy_file.py')
+gen = generator(copy,
+ output : '@BASENAME@.vala',
+ arguments : ['@INPUT@', '@OUTPUT@'])
+
+returncode = gen.process('returncode.in')
+
+src = files('test.vala')
--- /dev/null
+int return_code() {
+ return 0;
+}
--- /dev/null
+int main() {
+ print_wrapper (Config.x);
+ return return_code ();
+}
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+
+contents = '''
+void print_wrapper(string arg) {
+ print (arg);
+}
+'''
+
+with open(sys.argv[1], 'w') as f:
+ f.write(contents)
--- /dev/null
+executable('generatedtest', [src, config, returncode, wrapper],
+ install : true,
+ dependencies: [dependency('glib-2.0'), dependency('gobject-2.0')])
--- /dev/null
+namespace Foo
+{
+ public int bar ()
+ {
+ return 0;
+ }
+}
--- /dev/null
+usr/lib/libfoo.so
+usr/share/gir-1.0/Foo-1.0.gir
--- /dev/null
+project('foo', 'c', 'vala')
+
+glib = dependency('glib-2.0')
+gobject = dependency('gobject-2.0')
+g_ir_compiler = find_program('g-ir-compiler')
+
+foo = shared_library('foo', 'foo.vala',
+ install : true,
+ install_dir : [true, false, false, true],
+ vala_gir: 'Foo-1.0.gir',
+ dependencies: [glib, gobject])
+
+custom_target('foo-typelib',
+ command: [g_ir_compiler, '--output', '@OUTPUT@', '@INPUT@'],
+ input: meson.current_build_dir() + '/Foo-1.0.gir',
+ output: 'Foo-1.0.typelib',
+ depends: foo)
+
--- /dev/null
+usr/bin/prog.exe
+usr/bin/prog.pdb
--- /dev/null
+project('wintest', 'c')
+
+prog = executable('prog', 'prog.c', install : true)
+test('wintest', prog)
--- /dev/null
+#include <windows.h>
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('generated_dll_module_defs', 'c')
+
+subdir('subdir')
+exe = executable('prog', 'prog.c', link_with : shlib)
+test('runtest', exe)
--- /dev/null
+int somedllfunc();
+
+int main(int argc, char **argv) {
+ return somedllfunc() == 42 ? 0 : 1;
+}
--- /dev/null
+conf = configuration_data()
+conf.set('func', 'somedllfunc')
+def_file = configure_file(
+ input: 'somedll.def.in',
+ output: 'somedll.def',
+ configuration : conf,
+)
+
+shlib = shared_library('somedll', 'somedll.c', vs_module_defs : def_file)
--- /dev/null
+int somedllfunc() {
+ return 42;
+}
--- /dev/null
+EXPORTS
+ @func@
--- /dev/null
+project('generated_dll_module_defs', 'c')
+
+if meson.backend().startswith('vs')
+ # FIXME: Broken on the VS backends
+ error('MESON_SKIP_TEST see https://github.com/mesonbuild/meson/issues/1799')
+endif
+
+subdir('subdir')
+exe = executable('prog', 'prog.c', link_with : shlib)
+test('runtest', exe)
--- /dev/null
+int somedllfunc();
+
+int main(int argc, char **argv) {
+ return somedllfunc() == 42 ? 0 : 1;
+}
--- /dev/null
+#!/usr/bin/env python3
+import sys
+
+with open(sys.argv[1], 'w') as f:
+ print('EXPORTS', file=f)
+ print(' somedllfunc', file=f)
--- /dev/null
+make_def = find_program('make_def.py')
+
+def_file = custom_target('gen_def',
+ command: [make_def, '@OUTPUT@'],
+ output: 'somedll.def')
+
+shlib = shared_library('somedll', 'somedll.c', vs_module_defs: def_file)
--- /dev/null
+int somedllfunc() {
+ return 42;
+}
--- /dev/null
+usr/bin/prog.exe
+usr/bin/prog.pdb
+usr/bin/prog2.exe
+usr/bin/prog2.pdb
+?gcc:usr/lib/libprog.exe.a
+?gcc:usr/lib/libburble.a
+?msvc:usr/lib/prog.exe.lib
+?msvc:usr/lib/burble.lib
--- /dev/null
+project('wintest', 'c')
+
+# Test that we can produce an implib for an executable on Windows, and that it's
+# name can be set, and that it is installed along with the executable
+
+executable('prog', 'prog.c', install: true, implib: true)
+executable('prog2', 'prog.c', install: true, implib: 'burble')
--- /dev/null
+#include <windows.h>
+
+int __declspec(dllexport)
+main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('winmain', 'c')
+
+exe = executable('prog', 'prog.c', gui_app : true)
+test('winmain', exe)
--- /dev/null
+#include<windows.h>
+
+int APIENTRY
+WinMain(
+ HINSTANCE hInstance,
+ HINSTANCE hPrevInstance,
+ LPSTR lpszCmdLine,
+ int nCmdShow) {
+ return 0;
+}
--- /dev/null
+project('wincpp', 'cpp')
+
+exe = executable('prog', 'prog.cpp')
+test('wincpp', exe)
--- /dev/null
+#include<windows.h>
+
+class Foo;
+
+int main(int argc, char **argv) {
+ return 0;
+}
--- /dev/null
+project('winmaincpp', 'cpp')
+
+exe = executable('prog', 'prog.cpp', gui_app : true)
+test('winmaincpp', exe)
--- /dev/null
+#include<windows.h>
+
+class Foo;
+
+int APIENTRY
+WinMain(
+ HINSTANCE hInstance,
+ HINSTANCE hPrevInstance,
+ LPSTR lpszCmdLine,
+ int nCmdShow) {
+ return 0;
+}
--- /dev/null
+inc = include_directories('resource')
--- /dev/null
+#define ICON_ID 1
--- /dev/null
+project('winmain', 'c')
+
+# MinGW windres has a bug due to which it doesn't parse args with space properly:
+# https://github.com/mesonbuild/meson/pull/1346
+# https://sourceware.org/bugzilla/show_bug.cgi?id=4933
+if meson.get_compiler('c').get_id() == 'gcc' and host_machine.system() == 'windows'
+ # Construct build_to_src and skip this test if it has spaces
+ # because then the -I flag to windres will also have spaces
+ # and we know the test will fail
+ src_parts = meson.source_root().split('/')
+ build_parts = meson.build_root().split('/')
+
+ # Get the common path (which might just be '/' or 'C:/')
+ common = []
+ done = false
+ count = 0
+ if src_parts.length() > build_parts.length()
+ parts = build_parts
+ other = src_parts
+ else
+ parts = src_parts
+ other = build_parts
+ endif
+ foreach part : parts
+ if not done and part == other.get(count)
+ common += [part]
+ else
+ done = true
+ endif
+ count += 1
+ endforeach
+
+ # Create path components to go down from the build root to the common path
+ count = 0
+ rel = build_parts
+ foreach build : build_parts
+ if count < build_parts.length() - common.length()
+ rel += ['..']
+ endif
+ count += 1
+ endforeach
+
+ # Create path components to go up from the common path to the build root
+ count = 0
+ foreach src : src_parts
+ if count >= common.length()
+ rel += [src]
+ endif
+ count += 1
+ endforeach
+
+ build_to_src = '/'.join(rel)
+
+ if build_to_src.contains(' ')
+ message('build_to_src is: ' + build_to_src)
+ error('MESON_SKIP_TEST build_to_src has spaces')
+ endif
+ # Welcome to the end of this conditional.
+ # We hope you never have to implement something like this.
+endif
+
+subdir('inc')
+subdir('res')
+
+exe = executable('prog', 'prog.c',
+ res,
+ gui_app : true)
+
+test('winmain', exe)
--- /dev/null
+#include<windows.h>
+
+#define MY_ICON 1
+
+int APIENTRY
+WinMain(
+ HINSTANCE hInstance,
+ HINSTANCE hPrevInstance,
+ LPSTR lpszCmdLine,
+ int nCmdShow) {
+ HICON hIcon;
+ hIcon = LoadIcon(NULL, IDI_APPLICATION);
+ return hIcon ? 0 : 1;
+}
--- /dev/null
+win = import('windows')
+
+res = win.compile_resources('myres.rc',
+ include_directories : inc)
--- /dev/null
+#include<windows.h>
+#include"resource.h"
+
+ICON_ID ICON "sample.ico"
--- /dev/null
+project('dll_module_defs', 'c')
+
+subdir('subdir')
+exe = executable('prog', 'prog.c', link_with : shlib)
+test('runtest', exe)
--- /dev/null
+int somedllfunc();
+
+int main(int argc, char **argv) {
+ return somedllfunc() == 42 ? 0 : 1;
+}
--- /dev/null
+shlib = shared_library('somedll', 'somedll.c', vs_module_defs : 'somedll.def')
--- /dev/null
+int somedllfunc() {
+ return 42;
+}
--- /dev/null
+EXPORTS
+ somedllfunc
+
--- /dev/null
+#!/usr/bin/env python3
+
+import sys
+import shutil
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
--- /dev/null
+int myFunc (void);
+
+int
+main (int argc, char *argv[])
+{
+ if (myFunc() == 55)
+ return 0;
+ return 1;
+}
--- /dev/null
+?msvc:usr/bin/some-0.dll
+?msvc:usr/bin/some-0.pdb
+?msvc:usr/lib/some.lib
+?msvc:usr/bin/noversion.dll
+?msvc:usr/bin/noversion.pdb
+?msvc:usr/lib/noversion.lib
+?msvc:usr/bin/onlyversion-1.dll
+?msvc:usr/lib/onlyversion.lib
+?msvc:usr/bin/onlysoversion-5.dll
+?msvc:usr/lib/onlysoversion.lib
+?msvc:usr/libexec/customdir.dll
+?msvc:usr/libexec/customdir.lib
+?msvc:usr/lib/module.dll
+?gcc:usr/bin/?libsome-0.dll
+?gcc:usr/lib/libsome.dll.a
+?gcc:usr/bin/?libnoversion.dll
+?gcc:usr/lib/libnoversion.dll.a
+?gcc:usr/bin/?libonlyversion-1.dll
+?gcc:usr/lib/libonlyversion.dll.a
+?gcc:usr/bin/?libonlysoversion-5.dll
+?gcc:usr/lib/libonlysoversion.dll.a
+?gcc:usr/libexec/?libcustomdir.dll
+?gcc:usr/libexec/libcustomdir.dll.a
+?gcc:usr/lib/?libmodule.dll
--- /dev/null
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+int myFunc() {
+ return 55;
+}
--- /dev/null
+project('msvc dll versioning', 'c')
+
+cc = meson.get_compiler('c')
+
+# Test that we create correctly-named dll and import lib files,
+# and also install them in the right place
+some = shared_library('some', 'lib.c',
+ version : '1.2.3',
+ soversion : '0',
+ install : true)
+
+noversion = shared_library('noversion', 'lib.c',
+ install : true)
+
+onlyversion = shared_library('onlyversion', 'lib.c',
+ version : '1.4.5',
+ install : true)
+
+onlysoversion = shared_library('onlysoversion', 'lib.c',
+ # Also test that int soversion is acceptable
+ soversion : 5,
+ install : true)
+
+# Hack to make the executables below depend on the shared libraries above
+# without actually adding them as `link_with` dependencies since we want to try
+# linking to them with -lfoo linker arguments.
+cp = find_program('copyfile.py')
+out = custom_target('library-dependency-hack',
+ input : 'exe.orig.c',
+ output : 'exe.c',
+ depends : [some, noversion, onlyversion, onlysoversion],
+ command : [cp, '@INPUT@', '@OUTPUT@'])
+
+# Manually test if the linker can find the above libraries
+# i.e., whether they were generated with the right naming scheme
+test('manually linked 1', executable('manuallink1', out,
+ link_args : ['-L.', '-lsome']))
+
+test('manually linked 2', executable('manuallink2', out,
+ link_args : ['-L.', '-lnoversion']))
+
+test('manually linked 3', executable('manuallink3', out,
+ link_args : ['-L.', '-lonlyversion']))
+
+test('manually linked 4', executable('manuallink4', out,
+ link_args : ['-L.', '-lonlysoversion']))
+
+shared_library('customdir', 'lib.c',
+ install : true,
+ install_dir : get_option('libexecdir'))
+
+shared_module('module', 'lib.c', install : true)
--- /dev/null
+project('find program', 'c')
+
+# Test that we can find native windows executables
+find_program('cmd')
+find_program('cmd.exe')
+
+# Test that a script file with an extension can be found
+ext = find_program('test-script-ext.py')
+test('ext', ext)
+# Test that a script file without an extension can be found
+prog = find_program('test-script')
+test('script', prog)
--- /dev/null
+#!/usr/bin/env python
+
+print('1')
--- /dev/null
+#!/usr/bin/env python3
+
+print('ext/noext')
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+help_message = """Usage: %s <config.h.meson>
+
+This script reads config.h.meson, looks for header
+checks and writes the corresponding meson declaration.
+
+Copy config.h.in to config.h.meson, replace #undef
+with #mesondefine and run this. We can't do this automatically
+because some configure scripts have #undef statements
+that are unrelated to configure checks.
+"""
+
+import sys
+
+
+# Add stuff here as it is encountered.
+function_data = \
+ {'HAVE_FEENABLEEXCEPT': ('feenableexcept', 'fenv.h'),
+ 'HAVE_FECLEAREXCEPT': ('feclearexcept', 'fenv.h'),
+ 'HAVE_FEDISABLEEXCEPT': ('fedisableexcept', 'fenv.h'),
+ 'HAVE_MMAP': ('mmap', 'sys/mman.h'),
+ 'HAVE_GETPAGESIZE': ('getpagesize', 'unistd.h'),
+ 'HAVE_GETISAX': ('getisax', 'sys/auxv.h'),
+ 'HAVE_GETTIMEOFDAY': ('gettimeofday', 'sys/time.h'),
+ 'HAVE_MPROTECT': ('mprotect', 'sys/mman.h'),
+ 'HAVE_POSIX_MEMALIGN': ('posix_memalign', 'stdlib.h'),
+ 'HAVE_SIGACTION': ('sigaction', 'signal.h'),
+ 'HAVE_ALARM': ('alarm', 'unistd.h'),
+ 'HAVE_CTIME_R': ('ctime_r', 'time.h'),
+ 'HAVE_DRAND48': ('drand48', 'stdlib.h'),
+ 'HAVE_FLOCKFILE': ('flockfile', 'stdio.h'),
+ 'HAVE_FORK': ('fork', 'unistd.h'),
+ 'HAVE_FUNLOCKFILE': ('funlockfile', 'stdio.h'),
+ 'HAVE_GETLINE': ('getline', 'stdio.h'),
+ 'HAVE_LINK': ('link', 'unistd.h'),
+ 'HAVE_RAISE': ('raise', 'signal.h'),
+ 'HAVE_STRNDUP': ('strndup', 'string.h'),
+ 'HAVE_SCHED_GETAFFINITY': ('sched_getaffinity', 'sched.h'),
+ 'HAVE_WAITPID': ('waitpid', 'sys/wait.h'),
+ 'HAVE_XRENDERCREATECONICALGRADIENT': ('XRenderCreateConicalGradient', 'xcb/render.h'),
+ 'HAVE_XRENDERCREATELINEARGRADIENT': ('XRenderCreateLinearGradient', 'xcb/render.h'),
+ 'HAVE_XRENDERCREATERADIALGRADIENT': ('XRenderCreateRadialGradient', 'xcb/render.h'),
+ 'HAVE_XRENDERCREATESOLIDFILL': ('XRenderCreateSolidFill', 'xcb/render.h'),
+ 'HAVE_DCGETTEXT': ('dcgettext', 'libintl.h'),
+ 'HAVE_ENDMNTENT': ('endmntent', 'mntent.h'),
+ 'HAVE_ENDSERVENT': ('endservent', 'netdb.h'),
+ 'HAVE_EVENTFD': ('eventfd', 'sys/eventfd.h'),
+ 'HAVE_FALLOCATE': ('fallocate', 'fcntl.h'),
+ 'HAVE_FCHMOD': ('fchmod', 'sys/stat.h'),
+ 'HAVE_FCHOWN': ('fchown', 'unistd.h'),
+ 'HAVE_FDWALK': ('fdwalk', 'stdlib.h'),
+ 'HAVE_FSYNC': ('fsync', 'unistd.h'),
+ 'HAVE_GETC_UNLOCKED': ('getc_unlocked', 'stdio.h'),
+ 'HAVE_GETFSSTAT': ('getfsstat', 'sys/mount.h'),
+ 'HAVE_GETMNTENT_R': ('getmntent_r', 'mntent.h'),
+ 'HAVE_GETPROTOBYNAME_R': ('getprotobyname_r', 'netdb.h'),
+ 'HAVE_GETRESUID': ('getresuid', 'unistd.h'),
+ 'HAVE_GETVFSSTAT': ('getvfsstat', 'sys/statvfs.h'),
+ 'HAVE_GMTIME_R': ('gmtime_r', 'time.h'),
+ 'HAVE_HASMNTOPT': ('hasmntopt', 'mntent.h'),
+ 'HAVE_IF_INDEXTONAME': ('if_indextoname', 'net/if.h'),
+ 'HAVE_IF_NAMETOINDEX': ('if_nametoindex', 'net/if.h'),
+ 'HAVE_INOTIFY_INIT1': ('inotify_init1', 'sys/inotify.h'),
+ 'HAVE_ISSETUGID': ('issetugid', 'unistd.h'),
+ 'HAVE_KEVENT': ('kevent', 'sys/event.h'),
+ 'HAVE_KQUEUE': ('kqueue', 'sys/event.h'),
+ 'HAVE_LCHMOD': ('lchmod', 'sys/stat.h'),
+ 'HAVE_LCHOWN': ('lchown', 'unistd.h'),
+ 'HAVE_LSTAT': ('lstat', 'sys/stat.h'),
+ 'HAVE_MEMCPY': ('memcpy', 'string.h'),
+ 'HAVE_MEMALIGN': ('memalign', 'stdlib.h'),
+ 'HAVE_MEMMEM': ('memmem', 'string.h'),
+ 'HAVE_NEWLOCALE': ('newlocale', 'locale.h'),
+ 'HAVE_PIPE2': ('pipe2', 'fcntl.h'),
+ 'HAVE_POLL': ('poll', 'poll.h'),
+ 'HAVE_PRLIMIT': ('prlimit', 'sys/resource.h'),
+ 'HAVE_PTHREAD_ATTR_SETSTACKSIZE': ('pthread_attr_setstacksize', 'pthread.h'),
+ 'HAVE_PTHREAD_CONDATTR_SETCLOCK': ('pthread_condattr_setclock', 'pthread.h'),
+ 'HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE_NP': ('pthread_cond_timedwait_relative_np', 'pthread.h'),
+ 'HAVE_READLINK': ('readlink', 'unistd.h'),
+ 'HAVE_RES_INIT': ('res_init', 'resolv.h'),
+ 'HAVE_SENDMMSG': ('sendmmsg', 'sys/socket.h'),
+ 'HAVE_SOCKET': ('socket', 'sys/socket.h'),
+ 'HAVE_GETENV': ('getenv', 'stdlib.h'),
+ 'HAVE_SETENV': ('setenv', 'stdlib.h'),
+ 'HAVE_PUTENV': ('putenv', 'stdlib.h'),
+ 'HAVE_UNSETENV': ('unsetenv', 'stdlib.h'),
+ 'HAVE_SETMNTENT': ('setmntent', 'mntent.h'),
+ 'HAVE_SNPRINTF': ('snprintf', 'stdio.h'),
+ 'HAVE_SPLICE': ('splice', 'fcntl.h'),
+ 'HAVE_STATFS': ('statfs', 'mount.h'),
+ 'HAVE_STATVFS': ('statvfs', 'sys/statvfs.h'),
+ 'HAVE_STPCOPY': ('stpcopy', 'string.h'),
+ 'HAVE_STRCASECMP': ('strcasecmp', 'strings.h'),
+ 'HAVE_STRLCPY': ('strlcpy', 'string.h'),
+ 'HAVE_STRNCASECMP': ('strncasecmp', 'strings.h'),
+ 'HAVE_STRSIGNAL': ('strsignal', 'signal.h'),
+ 'HAVE_STRTOD_L': ('strtod_l', 'stdlib.h'),
+ 'HAVE_STRTOLL_L': ('strtoll_l', 'stdlib.h'),
+ 'HAVE_STRTOULL_L': ('strtoull_l', 'stdlib.h'),
+ 'HAVE_SYMLINK': ('symlink', 'unistd.h'),
+ 'HAVE_SYSCTLBYNAME': ('sysctlbyname', 'sys/sysctl.h'),
+ 'HAVE_TIMEGM': ('timegm', 'time.h'),
+ 'HAVE_USELOCALE': ('uselocale', 'xlocale.h'),
+ 'HAVE_UTIMES': ('utimes', 'sys/time.h'),
+ 'HAVE_VALLOC': ('valloc', 'stdlib.h'),
+ 'HAVE_VASPRINTF': ('vasprintf', 'stdio.h'),
+ 'HAVE_VSNPRINTF': ('vsnprintf', 'stdio.h'),
+ 'HAVE_BCOPY': ('bcopy', 'strings.h'),
+ 'HAVE_STRERROR': ('strerror', 'string.h'),
+ 'HAVE_MEMMOVE': ('memmove', 'string.h'),
+ 'HAVE_STRTOIMAX': ('strtoimax', 'inttypes.h'),
+ 'HAVE_STRTOLL': ('strtoll', 'stdlib.h'),
+ 'HAVE_STRTOQ': ('strtoq', 'stdlib.h'),
+ 'HAVE_ACCEPT4': ('accept4', 'sys/socket.h'),
+ 'HAVE_CHMOD': ('chmod', 'sys/stat.h'),
+ 'HAVE_CHOWN': ('chown', 'unistd.h'),
+ 'HAVE_FSTAT': ('fstat', 'sys/stat.h'),
+ 'HAVE_GETADDRINFO': ('getaddrinfo', 'netdb.h'),
+ 'HAVE_GETGRGID_R': ('getgrgid_r', 'grp.h'),
+ 'HAVE_GETGRNAM_R': ('getgrnam_r', 'grp.h'),
+ 'HAVE_GETGROUPS': ('getgroups', 'grp.h'),
+ 'HAVE_GETOPT_LONG': ('getopt_long', 'getopt.h'),
+ 'HAVE_GETPWNAM_R': ('getpwnam', 'pwd.h'),
+ 'HAVE_GETPWUID_R': ('getpwuid_r', 'pwd.h'),
+ 'HAVE_GETUID': ('getuid', 'unistd.h'),
+ 'HAVE_LRINTF': ('lrintf', 'math.h'),
+ 'HAVE_DECL_ISNAN': ('isnan', 'math.h'),
+ 'HAVE_DECL_ISINF': ('isinf', 'math.h'),
+ 'HAVE_ROUND': ('round', 'math.h'),
+ 'HAVE_NEARBYINT': ('nearbyint', 'math.h'),
+ 'HAVE_RINT': ('rint', 'math.h'),
+ 'HAVE_MKFIFO': ('mkfifo', 'sys/stat.h'),
+ 'HAVE_MLOCK': ('mlock', 'sys/mman.h'),
+ 'HAVE_NANOSLEEP': ('nanosleep', 'time.h'),
+ 'HAVE_PIPE': ('pipe', 'unistd.h'),
+ 'HAVE_PPOLL': ('ppoll', 'poll.h'),
+ 'HAVE_REGEXEC': ('regexec', 'regex.h'),
+ 'HAVE_SETEGID': ('setegid', 'unistd.h'),
+ 'HAVE_SETEUID': ('seteuid', 'unistd.h'),
+ 'HAVE_SETPGID': ('setpgid', 'unistd.h'),
+ 'HAVE_SETREGID': ('setregid', 'unistd.h'),
+ 'HAVE_SETRESGID': ('setresgid', 'unistd.h'),
+ 'HAVE_SETRESUID': ('setresuid', 'unistd.h'),
+ 'HAVE_SHM_OPEN': ('shm_open', 'fcntl.h'),
+ 'HAVE_SLEEP': ('sleep', 'unistd.h'),
+ 'HAVE_STRERROR_R': ('strerror_r', 'string.h'),
+ 'HAVE_STRTOF': ('strtof', 'stdlib.h'),
+ 'HAVE_SYSCONF': ('sysconf', 'unistd.h'),
+ 'HAVE_USLEEP': ('usleep', 'unistd.h'),
+ 'HAVE_VFORK': ('vfork', 'unistd.h'),
+ 'HAVE_MALLOC': ('malloc', 'stdlib.h'),
+ 'HAVE_CALLOC': ('calloc', 'stdlib.h'),
+ 'HAVE_REALLOC': ('realloc', 'stdlib.h'),
+ 'HAVE_FREE': ('free', 'stdlib.h'),
+ 'HAVE_ALLOCA': ('alloca', 'alloca.h'),
+ 'HAVE_QSORT': ('qsort', 'stdlib.h'),
+ 'HAVE_ABS': ('abs', 'stdlib.h'),
+ 'HAVE_MEMSET': ('memset', 'string.h'),
+ 'HAVE_MEMCMP': ('memcmp', 'string.h'),
+ 'HAVE_STRLEN': ('strlen', 'string.h'),
+ 'HAVE_STRLCAT': ('strlcat', 'string.h'),
+ 'HAVE_STRDUP': ('strdup', 'string.h'),
+ 'HAVE__STRREV': ('_strrev', 'string.h'),
+ 'HAVE__STRUPR': ('_strupr', 'string.h'),
+ 'HAVE__STRLWR': ('_strlwr', 'string.h'),
+ 'HAVE_INDEX': ('index', 'strings.h'),
+ 'HAVE_RINDEX': ('rindex', 'strings.h'),
+ 'HAVE_STRCHR': ('strchr', 'string.h'),
+ 'HAVE_STRRCHR': ('strrchr', 'string.h'),
+ 'HAVE_STRSTR': ('strstr', 'string.h'),
+ 'HAVE_STRTOL': ('strtol', 'stdlib.h'),
+ 'HAVE_STRTOUL': ('strtoul', 'stdlib.h'),
+ 'HAVE_STRTOULL': ('strtoull', 'stdlib.h'),
+ 'HAVE_STRTOD': ('strtod', 'stdlib.h'),
+ 'HAVE_ATOI': ('atoi', 'stdlib.h'),
+ 'HAVE_ATOF': ('atof', 'stdlib.h'),
+ 'HAVE_STRCMP': ('strcmp', 'string.h'),
+ 'HAVE_STRNCMP': ('strncmp', 'string.h'),
+ 'HAVE_VSSCANF': ('vsscanf', 'stdio.h'),
+ 'HAVE_CHROOT': ('chroot', 'unistd.h'),
+ 'HAVE_CLOCK': ('clock', 'time.h'),
+ 'HAVE_CLOCK_GETRES': ('clock_getres', 'time.h'),
+ 'HAVE_CLOCK_GETTIME': ('clock_gettime', 'time.h'),
+ 'HAVE_CLOCK_SETTIME': ('clock_settime', 'time.h'),
+ 'HAVE_CONFSTR': ('confstr', 'time.h'),
+ 'HAVE_CTERMID': ('ctermid', 'stdio.h'),
+ 'HAVE_DIRFD': ('dirfd', 'dirent.h'),
+ 'HAVE_DLOPEN': ('dlopen', 'dlfcn.h'),
+ 'HAVE_DUP2': ('dup2', 'unistd.h'),
+ 'HAVE_DUP3': ('dup3', 'unistd.h'),
+ 'HAVE_EPOLL_CREATE1': ('epoll_create1', 'sys/epoll.h'),
+ 'HAVE_ERF': ('erf', 'math.h'),
+ 'HAVE_ERFC': ('erfc', 'math.h'),
+ 'HAVE_EXECV': ('execv', 'unistd.h'),
+ 'HAVE_FACCESSAT': ('faccessat', 'unistd.h'),
+ 'HAVE_FCHDIR': ('fchdir', 'unistd.h'),
+ 'HAVE_FCHMODAT': ('fchmodat', 'sys/stat.h'),
+ 'HAVE_FDATASYNC': ('fdatasync', 'unistd.h'),
+ 'HAVE_FDOPENDIR': ('fdopendir', 'dirent.h'),
+ 'HAVE_FEXECVE': ('fexecve', 'unistd.h'),
+ 'HAVE_FLOCK': ('flock', 'sys/file.h'),
+ 'HAVE_FORKPTY': ('forkpty', 'pty.h'),
+ 'HAVE_FPATHCONF': ('fpathconf', 'unistd.h'),
+ 'HAVE_FSTATAT': ('fstatat', 'unistd.h'),
+ 'HAVE_FSTATVFS': ('fstatvfs', 'sys/statvfs.h'),
+ 'HAVE_FTELLO': ('ftello', 'stdio.h'),
+ 'HAVE_FTIME': ('ftime', 'sys/timeb.h'),
+ 'HAVE_FTRUNCATE': ('ftruncate', 'unistd.h'),
+ 'HAVE_FUTIMENS': ('futimens', 'sys/stat.h'),
+ 'HAVE_FUTIMES': ('futimes', 'sys/time.h'),
+ 'HAVE_GAI_STRERROR': ('gai_strerror', 'netdb.h'),
+ 'HAVE_GETGROUPLIST': ('getgrouplist', 'grp.h'),
+ 'HAVE_GETHOSTBYNAME': ('gethostbyname', 'netdb.h'),
+ 'HAVE_GETHOSTBYNAME_R': ('gethostbyname_r', 'netdb.h'),
+ 'HAVE_GETITIMER': ('getitimer', 'sys/time.h'),
+ 'HAVE_GETLOADAVG': ('getloadavg', 'stdlib.h'),
+ 'HAVE_GETLOGIN': ('getlogin', 'unistd.h'),
+ 'HAVE_GETNAMEINFO': ('getnameinfo', 'netdb.h'),
+ 'HAVE_GETPEERNAME': ('getpeername', 'sys/socket.h'),
+ 'HAVE_GETPGID': ('getpgid', 'unistd.h'),
+ 'HAVE_GETPGRP': ('getpgrp', 'unistd.h'),
+ 'HAVE_GETPID': ('getpid', 'unistd.h'),
+ 'HAVE_GETPRIORITY': ('getpriority', 'sys/resource.h'),
+ 'HAVE_GETPWENT': ('getpwent', 'pwd.h'),
+ 'HAVE_GETRANDOM': ('getrandom', 'linux/random.h'),
+ 'HAVE_GETRESGID': ('getresgid', 'unistd.h'),
+ 'HAVE_GETSID': ('getsid', 'unistd.h'),
+ 'HAVE_GETSPENT': ('getspent', 'shadow.h'),
+ 'HAVE_GETSPNAM': ('getspnam', 'shadow.h'),
+ 'HAVE_GETWD': ('getwd', 'unistd.h'),
+ 'HAVE_HSTRERROR': ('hstrerror', 'netdb.h'),
+ 'HAVE_HTOLE64': ('htole64', 'endian.h'),
+ 'HAVE_IF_NAMEINDEX': ('if_nameindex', 'net/if.h'),
+ 'HAVE_INET_ATON': ('inet_aton', 'arpa/inet.h'),
+ 'HAVE_INET_PTON': ('inet_pton', 'arpa/inet.h'),
+ 'HAVE_INITGROUPS': ('initgroups', 'grp.h'),
+ 'HAVE_KILL': ('kill', 'signal.h'),
+ 'HAVE_KILLPG': ('killpg', 'signal.h'),
+ 'HAVE_LINKAT': ('linkat', 'unistd.h'),
+ 'HAVE_LOCKF': ('lockf', 'unistd.h'),
+ 'HAVE_LUTIMES': ('lutimes', 'sys/time.h'),
+ 'HAVE_MAKEDEV': ('makedev', 'sys/sysmacros.h'),
+ 'HAVE_MBRTOWC': ('mbrtowc', 'wchar.h'),
+ 'HAVE_MEMRCHR': ('memrchr', 'string.h'),
+ 'HAVE_MKDIRAT': ('mkdirat', 'sys/stat.h'),
+ 'HAVE_MKFIFOAT': ('mkfifoat', 'sys/stat.h'),
+ 'HAVE_MKNOD': ('mknod', 'unistd.h'),
+ 'HAVE_MKNODAT': ('mknodat', 'unistd.h'),
+ 'HAVE_MKTIME': ('mktime', 'unistd.h'),
+ 'HAVE_MKREMAP': ('mkremap', 'sys/mman.h'),
+ 'HAVE_NICE': ('nice', 'unistd.h'),
+ 'HAVE_OPENAT': ('openat', 'fcntl.h'),
+ 'HAVE_OPENPTY': ('openpty', 'pty.h'),
+ 'HAVE_PATHCONF': ('pathconf', 'unistd.h'),
+ 'HAVE_PAUSE': ('pause', 'unistd.h'),
+ 'HAVE_PREAD': ('pread', 'unistd.h'),
+ 'HAVE_PTHREAD_KILL': ('pthread_kill', 'signal.h'),
+ 'HAVE_PTHREAD_SIGMASK': ('pthread_sigmask', 'signal.h'),
+ 'HAVE_PWRITE': ('pwrite', 'unistd.h'),
+ 'HAVE_READLINKAT': ('readlinkat', 'unistd.h'),
+ 'HAVE_READV': ('readv', 'sys/uio.h'),
+ 'HAVE_RENAMEAT': ('renamat', 'stdio.h'),
+ 'HAVE_SCHED_GET_PRIORITY_MAX': ('sched_get_priority_max', 'sched.h'),
+ 'HAVE_SCHED_RR_GET_INTERVAL': ('sched_rr_get_interval', 'sched.h'),
+ 'HAVE_SCHED_SETAFFINITY': ('sched_setaffinity', 'sched.h'),
+ 'HAVE_SCHED_SETPARAM': ('sched_setparam', 'sched.h'),
+ 'HAVE_SCHED_SETSCHEDULER': ('sched_setscheduler', 'sched.h'),
+ 'HAVE_SELECT': ('select', 'sys/select.h'),
+ 'HAVE_SEM_GETVALUE': ('sem_getvalue', 'semaphore.h'),
+ 'HAVE_SEM_OPEN': ('sem_open', 'semaphore.h'),
+ 'HAVE_SEM_TIMEDWAIT': ('sem_timedwait', 'semaphore.h'),
+ 'HAVE_SEM_UNLINK': ('sem_unlink', 'semaphore.h'),
+ 'HAVE_SENDFILE': ('sendfile', 'sys/sendfile.h'),
+ 'HAVE_SETGID': ('setgid', 'unistd.h'),
+ 'HAVE_SETGROUPS': ('setgroups', 'grp.h'),
+ 'HAVE_SETHOSTNAME': ('sethostname', 'unistd.h'),
+ 'HAVE_SETITIMER': ('setitimer', 'sys/time.h'),
+ 'HAVE_SETLOCALE': ('setlocale', 'locale.h'),
+ 'HAVE_SETPGRP': ('setpgrp', 'unistd.h'),
+ 'HAVE_SETPRIORITY': ('setpriority', 'sys/resource.h'),
+ 'HAVE_SETREUID': ('setreuid', 'unistd.h'),
+ 'HAVE_SETSID': ('setsid', 'unistd.h'),
+ 'HAVE_SETUID': ('setuid', 'unistd.h'),
+ 'HAVE_SETVBUF': ('setvbuf', 'unistd.h'),
+ 'HAVE_SIGALTSTACK': ('sigaltstack', 'signal.h'),
+ 'HAVE_SIGINTERRUPT': ('siginterrupt', 'signal.h'),
+ 'HAVE_SIGPENDING': ('sigpending', 'signal.h'),
+ 'HAVE_SIGRELSE': ('sigrelse', 'signal.h'),
+ 'HAVE_SIGTIMEDWAIT': ('sigtimedwait', 'signal.h'),
+ 'HAVE_SIGWAIT': ('sigwait', 'signal.h'),
+ 'HAVE_SIGWAITINFO': ('sigwaitinfo', 'signal.h'),
+ 'HAVE_SOCKETPAIR': ('socketpair', 'sys/socket.h'),
+ 'HAVE_STRFTIME': ('strftime', 'time.h'),
+ 'HAVE_SYMLINKAT': ('symlinkat', 'unistd.h'),
+ 'HAVE_SYNC': ('sync', 'unistd.h'),
+ 'HAVE_TCGETPGRP': ('tcgetpgrp', 'unistd.h'),
+ 'HAVE_TCSETPGRP': ('tcsetpgrp', 'unistd.h'),
+ 'HAVE_TEMPNAM': ('tempnam', 'stdio.h'),
+ 'HAVE_TIMES': ('times', 'sys/times.h'),
+ 'HAVE_TEMPFILE': ('tempfile', 'stdio.h'),
+ 'HAVE_TMPNAM': ('tmpnam', 'stdio.h'),
+ 'HAVE_TMPNAM_R': ('tmpnam_r', 'stdio.h'),
+ 'HAVE_TRUNCATE': ('truncate', 'unistd.h'),
+ 'HAVE_TZNAME': ('tzname', 'time.h'),
+ 'HAVE_UNAME': ('uname', 'sys/utsname.h'),
+ 'HAVE_UNLINKAT': ('unlinkat', 'unistd.h'),
+ 'HAVE_UTIMENSAT': ('utimensat', 'sys/stat.h'),
+ 'HAVE_WAIT3': ('wait3', 'sys/wait.h'),
+ 'HAVE_WAIT4': ('wait4', 'sys/wait.h'),
+ 'HAVE_WAITID': ('waitid', 'sys/wait.h'),
+ 'HAVE_WRITEV': ('writev', 'sys/uio.h'),
+ 'HAVE_WMEMCMP': ('wmemcmp', 'wchar.h'),
+ 'HAVE_ATAN': ('atan', 'math.h'),
+ 'HAVE_ATAN2': ('atan2', 'math.h'),
+ 'HAVE_ACOS': ('acos', 'math.h'),
+ 'HAVE_ACOSH': ('acosh', 'math.h'),
+ 'HAVE_ASIN': ('asin', 'math.h'),
+ 'HAVE_ASINH': ('asinh', 'math.h'),
+ 'HAVE_ATANH': ('atanh', 'math.h'),
+ 'HAVE_CEIL': ('ceil', 'math.h'),
+ 'HAVE_COPYSIGN': ('copysign', 'math.h'),
+ 'HAVE_COS': ('cos', 'math.h'),
+ 'HAVE_COSH': ('cosh', 'math.h'),
+ 'HAVE_COSF': ('cosf', 'math.h'),
+ 'HAVE_EXPM1': ('expm1', 'math.h'),
+ 'HAVE_FABS': ('fabs', 'math.h'),
+ 'HAVE_FINITE': ('finite', 'math.h'),
+ 'HAVE_FLOOR': ('floor', 'math.h'),
+ 'HAVE_GAMMA': ('gamma', 'math.h'),
+ 'HAVE_HYPOT': ('hypot', 'math.h'),
+ 'HAVE_ISINF': ('isinf', 'math.h'),
+ 'HAVE_LOG': ('log', 'math.h'),
+ 'HAVE_LOG1P': ('log1p', 'math.h'),
+ 'HAVE_LOG2': ('log2', 'math.h'),
+ 'HAVE_LGAMMA': ('lgamma', 'math.h'),
+ 'HAVE_POW': ('pow', 'math.h'),
+ 'HAVE_SCALBN': ('scalbn', 'math.h'),
+ 'HAVE_SIN': ('sin', 'math.h'),
+ 'HAVE_SINF': ('sinf', 'math.h'),
+ 'HAVE_SINH': ('sinh', 'math.h'),
+ 'HAVE_SQRT': ('sqrt', 'math.h'),
+ 'HAVE_TGAMMA': ('tgamma', 'math.h'),
+ 'HAVE_FSEEKO': ('fseeko', 'stdio.h'),
+ 'HAVE_FSEEKO64': ('fseeko64', 'stdio.h'),
+ 'HAVE_SETJMP': ('setjmp', 'setjmp.h'),
+ 'HAVE_PTHREAD_SETNAME_NP': ('pthread_setname_np', 'pthread.h'),
+ 'HAVE_PTHREAD_SET_NAME_NP': ('pthread_set_name_np', 'pthread.h'),
+ }
+
+headers = []
+functions = []
+sizes = []
+
+if len(sys.argv) != 2:
+ print(help_message % sys.argv[0])
+ sys.exit(0)
+
+with open(sys.argv[1]) as f:
+ for line in f:
+ line = line.strip()
+ arr = line.split()
+
+ # Check for headers.
+ if line.startswith('#mesondefine') and line.endswith('_H'):
+ token = line.split()[1]
+ tarr = token.split('_')[1:-1]
+ tarr = [x.lower() for x in tarr]
+ hname = '/'.join(tarr) + '.h'
+ headers.append((token, hname))
+
+ # Check for functions.
+ try:
+ token = arr[1]
+ if token in function_data:
+ fdata = function_data[token]
+ functions.append((token, fdata[0], fdata[1]))
+ elif token.startswith('HAVE_') and not token.endswith('_H'):
+ functions.append((token, ))
+ except Exception:
+ pass
+
+ # Check for sizeof tests.
+ if len(arr) != 2:
+ continue
+ elem = arr[1]
+ if elem.startswith('SIZEOF_'):
+ typename = elem.split('_', 1)[1] \
+ .replace('_P', '*') \
+ .replace('_', ' ') \
+ .lower() \
+ .replace('size t', 'size_t')
+ sizes.append((elem, typename))
+
+print('''cc = meson.get_compiler('c')
+cdata = configuration_data()''')
+
+# Convert header checks.
+
+print('check_headers = [')
+for token, hname in headers:
+ print(" ['%s', '%s']," % (token, hname))
+print(']\n')
+
+print('''foreach h : check_headers
+ if cc.has_header(h.get(1))
+ cdata.set(h.get(0), 1)
+ endif
+endforeach
+''')
+
+# Convert function checks.
+
+print('check_functions = [')
+for token in functions:
+ if len(token) == 3:
+ token, fdata0, fdata1 = token
+ print(" ['%s', '%s', '#include<%s>']," % (token, fdata0, fdata1))
+ else:
+ print('# check token', token)
+print(']\n')
+
+print('''foreach f : check_functions
+ if cc.has_function(f.get(1), prefix : f.get(2))
+ cdata.set(f.get(0), 1)
+ endif
+endforeach
+''')
+
+# Convert sizeof checks.
+
+for elem, typename in sizes:
+ print("cdata.set('%s', cc.sizeof('%s'))" % (elem, typename))
+
+print('''
+configure_file(input : 'config.h.meson',
+ output : 'config.h',
+ configuration : cdata)''')
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2017 Niklas Claesson
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This is two implementations for how to get module names from the boost
+sources. One relies on json metadata files in the sources, the other relies on
+the folder names.
+
+Run the tool in the boost directory and append the stdout to the misc.py:
+
+boost/$ path/to/meson/tools/boost_names.py >> path/to/meson/dependencies/misc.py
+"""
+
+import sys
+import os
+import collections
+import pprint
+import json
+import re
+
+Module = collections.namedtuple('Module', ['dirname', 'name', 'libnames'])
+Module.__repr__ = lambda self: str((self.dirname, self.name, self.libnames))
+
+LIBS = 'libs'
+
+manual_map = {
+ 'callable_traits': 'Call Traits',
+ 'crc': 'CRC',
+ 'dll': 'DLL',
+ 'gil': 'GIL',
+ 'graph_parallel': 'GraphParallel',
+ 'icl': 'ICL',
+ 'io': 'IO State Savers',
+ 'msm': 'Meta State Machine',
+ 'mpi': 'MPI',
+ 'mpl': 'MPL',
+ 'multi_array': 'Multi-Array',
+ 'multi_index': 'Multi-Index',
+ 'numeric': 'Numeric Conversion',
+ 'ptr_container': 'Pointer Container',
+ 'poly_collection': 'PolyCollection',
+ 'qvm': 'QVM',
+ 'throw_exception': 'ThrowException',
+ 'tti': 'TTI',
+ 'vmd': 'VMD',
+}
+
+extra = [
+ Module('utility', 'Compressed Pair', []),
+ Module('core', 'Enable If', []),
+ Module('functional', 'Functional/Factory', []),
+ Module('functional', 'Functional/Forward', []),
+ Module('functional', 'Functional/Hash', []),
+ Module('functional', 'Functional/Overloaded Function', []),
+ Module('utility', 'Identity Type', []),
+ Module('utility', 'In Place Factory, Typed In Place Factory', []),
+ Module('numeric', 'Interval', []),
+ Module('math', 'Math Common Factor', []),
+ Module('math', 'Math Octonion', []),
+ Module('math', 'Math Quaternion', []),
+ Module('math', 'Math/Special Functions', []),
+ Module('math', 'Math/Statistical Distributions', []),
+ Module('bind', 'Member Function', []),
+ Module('algorithm', 'Min-Max', []),
+ Module('numeric', 'Odeint', []),
+ Module('utility', 'Operators', []),
+ Module('core', 'Ref', []),
+ Module('utility', 'Result Of', []),
+ Module('algorithm', 'String Algo', []),
+ Module('core', 'Swap', []),
+ Module('', 'Tribool', []),
+ Module('numeric', 'uBLAS', []),
+ Module('utility', 'Value Initialized', []),
+]
+
+# Cannot find the following modules in the documentation of boost
+not_modules = ['beast', 'logic', 'mp11', 'winapi']
+
+def eprint(message):
+ print(message, file=sys.stderr)
+
+def get_library_names(jamfile):
+ libs = []
+ with open(jamfile) as jamfh:
+ jam = jamfh.read()
+ res = re.finditer(r'^lib[\s]+([A-Za-z0-9_]+)([^;]*);', jam, re.MULTILINE | re.DOTALL)
+ for matches in res:
+ if ':' in matches.group(2):
+ libs.append(matches.group(1))
+ res = re.finditer(r'^boost-lib[\s]+([A-Za-z0-9_]+)([^;]*);', jam, re.MULTILINE | re.DOTALL)
+ for matches in res:
+ if ':' in matches.group(2):
+ libs.append('boost_{}'.format(matches.group(1)))
+ return libs
+
+def exists(modules, module):
+ return len([x for x in modules if x.dirname == module.dirname]) != 0
+
+def get_modules(init=extra):
+ modules = init
+ for directory in os.listdir(LIBS):
+ if not os.path.isdir(os.path.join(LIBS, directory)):
+ continue
+ if directory in not_modules:
+ continue
+ jamfile = os.path.join(LIBS, directory, 'build', 'Jamfile.v2')
+ if os.path.isfile(jamfile):
+ libs = get_library_names(jamfile)
+ else:
+ libs = []
+ if directory in manual_map.keys():
+ modname = manual_map[directory]
+ else:
+ modname = directory.replace('_', ' ').title()
+ modules.append(Module(directory, modname, libs))
+ return modules
+
+def get_modules_2():
+ modules = []
+ # The python module uses an older build system format and is not easily parseable.
+ # We add the python module libraries manually.
+ modules.append(Module('python', 'Python', ['boost_python', 'boost_python3', 'boost_numpy', 'boost_numpy3']))
+ for (root, dirs, files) in os.walk(LIBS):
+ for f in files:
+ if f == "libraries.json":
+ projectdir = os.path.dirname(root)
+
+ jamfile = os.path.join(projectdir, 'build', 'Jamfile.v2')
+ if os.path.isfile(jamfile):
+ libs = get_library_names(jamfile)
+ else:
+ libs = []
+
+ # Get metadata for module
+ jsonfile = os.path.join(root, f)
+ with open(jsonfile) as jsonfh:
+ boost_modules = json.loads(jsonfh.read())
+ if(isinstance(boost_modules, dict)):
+ boost_modules = [boost_modules]
+ for boost_module in boost_modules:
+ modules.append(Module(boost_module['key'], boost_module['name'], libs))
+
+ # Some subprojects do not have meta directory with json file. Find those
+ jsonless_modules = [x for x in get_modules([]) if not exists(modules, x)]
+ for module in jsonless_modules:
+ eprint("WARNING: {} does not have meta/libraries.json. Will guess pretty name '{}'".format(module.dirname, module.name))
+ modules.extend(jsonless_modules)
+
+ return modules
+
+def main(args):
+ if not os.path.isdir(LIBS):
+ eprint("ERROR: script must be run in boost source directory")
+
+ # It will pick jsonless algorithm if 1 is given as argument
+ impl = 0
+ if len(args) > 1:
+ if args[1] == '1':
+ impl = 1
+
+ if impl == 1:
+ modules = get_modules()
+ else:
+ modules = get_modules_2()
+
+ sorted_modules = sorted(modules, key=lambda module: module.name.lower())
+ sorted_modules = [x[2] for x in sorted_modules if x[2]]
+ sorted_modules = sum(sorted_modules, [])
+ sorted_modules = [x for x in sorted_modules if x.startswith('boost')]
+
+ pp = pprint.PrettyPrinter()
+ pp.pprint(sorted_modules)
+
+if __name__ == '__main__':
+ main(sys.argv)
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2014 Jussi Pakkanen
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os
+import re
+
+class Token:
+ def __init__(self, tid, value):
+ self.tid = tid
+ self.value = value
+ self.lineno = 0
+ self.colno = 0
+
+class Statement:
+ def __init__(self, name, args):
+ self.name = name.lower()
+ self.args = args
+
+class Lexer:
+ def __init__(self):
+ self.token_specification = [
+ # Need to be sorted longest to shortest.
+ ('ignore', re.compile(r'[ \t]')),
+ ('string', re.compile(r'"([^\\]|(\\.))*?"', re.M)),
+ ('varexp', re.compile(r'\${[-_0-9a-z/A-Z.]+}')),
+ ('id', re.compile('''[,-><${}=+_0-9a-z/A-Z|@.*]+''')),
+ ('eol', re.compile(r'\n')),
+ ('comment', re.compile(r'#.*')),
+ ('lparen', re.compile(r'\(')),
+ ('rparen', re.compile(r'\)')),
+ ]
+
+ def lex(self, code):
+ lineno = 1
+ line_start = 0
+ loc = 0
+ col = 0
+ while loc < len(code):
+ matched = False
+ for (tid, reg) in self.token_specification:
+ mo = reg.match(code, loc)
+ if mo:
+ col = mo.start() - line_start
+ matched = True
+ loc = mo.end()
+ match_text = mo.group()
+ if tid == 'ignore':
+ continue
+ if tid == 'comment':
+ yield(Token('comment', match_text))
+ elif tid == 'lparen':
+ yield(Token('lparen', '('))
+ elif tid == 'rparen':
+ yield(Token('rparen', ')'))
+ elif tid == 'string':
+ yield(Token('string', match_text[1:-1]))
+ elif tid == 'id':
+ yield(Token('id', match_text))
+ elif tid == 'eol':
+ # yield('eol')
+ lineno += 1
+ col = 1
+ line_start = mo.end()
+ elif tid == 'varexp':
+ yield(Token('varexp', match_text[2:-1]))
+ else:
+ raise RuntimeError('Wharrgarbl')
+ break
+ if not matched:
+ raise RuntimeError('Lexer got confused line %d column %d' % (lineno, col))
+
+class Parser:
+ def __init__(self, code):
+ self.stream = Lexer().lex(code)
+ self.getsym()
+
+ def getsym(self):
+ try:
+ self.current = next(self.stream)
+ except StopIteration:
+ self.current = Token('eof', '')
+
+ def accept(self, s):
+ if self.current.tid == s:
+ self.getsym()
+ return True
+ return False
+
+ def expect(self, s):
+ if self.accept(s):
+ return True
+ raise RuntimeError('Expecting %s got %s.' % (s, self.current.tid), self.current.lineno, self.current.colno)
+
+ def statement(self):
+ cur = self.current
+ if self.accept('comment'):
+ return Statement('_', [cur.value])
+ self.accept('id')
+ self.expect('lparen')
+ args = self.arguments()
+ self.expect('rparen')
+ return Statement(cur.value, args)
+
+ def arguments(self):
+ args = []
+ if self.accept('lparen'):
+ args.append(self.arguments())
+ self.expect('rparen')
+ arg = self.current
+ if self.accept('comment'):
+ rest = self.arguments()
+ args += rest
+ elif self.accept('string') \
+ or self.accept('varexp') \
+ or self.accept('id'):
+ args.append(arg)
+ rest = self.arguments()
+ args += rest
+ return args
+
+ def parse(self):
+ while not self.accept('eof'):
+ yield(self.statement())
+
+class Converter:
+ ignored_funcs = {'cmake_minimum_required': True,
+ 'enable_testing': True,
+ 'include': True}
+
+ def __init__(self, cmake_root):
+ self.cmake_root = cmake_root
+ self.indent_unit = ' '
+ self.indent_level = 0
+ self.options = []
+
+ def convert_args(self, args, as_array=True):
+ res = []
+ if as_array:
+ start = '['
+ end = ']'
+ else:
+ start = ''
+ end = ''
+ for i in args:
+ if i.tid == 'id':
+ res.append("'%s'" % i.value)
+ elif i.tid == 'varexp':
+ res.append('%s' % i.value.lower())
+ elif i.tid == 'string':
+ res.append("'%s'" % i.value)
+ else:
+ print(i)
+ raise RuntimeError('Unknown arg type.')
+ if len(res) > 1:
+ return start + ', '.join(res) + end
+ if len(res) == 1:
+ return res[0]
+ return ''
+
+ def write_entry(self, outfile, t):
+ if t.name in Converter.ignored_funcs:
+ return
+ preincrement = 0
+ postincrement = 0
+ if t.name == '_':
+ line = t.args[0]
+ elif t.name == 'add_subdirectory':
+ line = "subdir('" + t.args[0].value + "')"
+ elif t.name == 'pkg_search_module' or t.name == 'pkg_search_modules':
+ varname = t.args[0].value.lower()
+ mods = ["dependency('%s')" % i.value for i in t.args[1:]]
+ if len(mods) == 1:
+ line = '%s = %s' % (varname, mods[0])
+ else:
+ line = '%s = [%s]' % (varname, ', '.join(["'%s'" % i for i in mods]))
+ elif t.name == 'find_package':
+ line = "%s_dep = dependency('%s')" % (t.args[0].value, t.args[0].value)
+ elif t.name == 'find_library':
+ line = "%s = find_library('%s')" % (t.args[0].value.lower(), t.args[0].value)
+ elif t.name == 'add_executable':
+ line = '%s_exe = executable(%s)' % (t.args[0].value, self.convert_args(t.args, False))
+ elif t.name == 'add_library':
+ if t.args[1].value == 'SHARED':
+ libcmd = 'shared_library'
+ args = [t.args[0]] + t.args[2:]
+ elif t.args[1].value == 'STATIC':
+ libcmd = 'static_library'
+ args = [t.args[0]] + t.args[2:]
+ else:
+ libcmd = 'static_library'
+ args = t.args
+ line = '%s_lib = %s(%s)' % (t.args[0].value, libcmd, self.convert_args(args, False))
+ elif t.name == 'add_test':
+ line = 'test(%s)' % self.convert_args(t.args, False)
+ elif t.name == 'option':
+ optname = t.args[0].value
+ description = t.args[1].value
+ if len(t.args) > 2:
+ default = t.args[2].value
+ else:
+ default = None
+ self.options.append((optname, description, default))
+ return
+ elif t.name == 'project':
+ pname = t.args[0].value
+ args = [pname]
+ for l in t.args[1:]:
+ l = l.value.lower()
+ if l == 'cxx':
+ l = 'cpp'
+ args.append(l)
+ args = ["'%s'" % i for i in args]
+ line = 'project(' + ', '.join(args) + ')'
+ elif t.name == 'set':
+ varname = t.args[0].value.lower()
+ line = '%s = %s\n' % (varname, self.convert_args(t.args[1:]))
+ elif t.name == 'if':
+ postincrement = 1
+ line = 'if %s' % self.convert_args(t.args, False)
+ elif t.name == 'elseif':
+ preincrement = -1
+ postincrement = 1
+ line = 'elif %s' % self.convert_args(t.args, False)
+ elif t.name == 'else':
+ preincrement = -1
+ postincrement = 1
+ line = 'else'
+ elif t.name == 'endif':
+ preincrement = -1
+ line = 'endif'
+ else:
+ line = '''# %s(%s)''' % (t.name, self.convert_args(t.args))
+ self.indent_level += preincrement
+ indent = self.indent_level * self.indent_unit
+ outfile.write(indent)
+ outfile.write(line)
+ if not(line.endswith('\n')):
+ outfile.write('\n')
+ self.indent_level += postincrement
+
+ def convert(self, subdir=''):
+ if subdir == '':
+ subdir = self.cmake_root
+ cfile = os.path.join(subdir, 'CMakeLists.txt')
+ try:
+ with open(cfile) as f:
+ cmakecode = f.read()
+ except FileNotFoundError:
+ print('\nWarning: No CMakeLists.txt in', subdir, '\n')
+ return
+ p = Parser(cmakecode)
+ with open(os.path.join(subdir, 'meson.build'), 'w') as outfile:
+ for t in p.parse():
+ if t.name == 'add_subdirectory':
+ # print('\nRecursing to subdir',
+ # os.path.join(self.cmake_root, t.args[0].value),
+ # '\n')
+ self.convert(os.path.join(subdir, t.args[0].value))
+ # print('\nReturning to', self.cmake_root, '\n')
+ self.write_entry(outfile, t)
+ if subdir == self.cmake_root and len(self.options) > 0:
+ self.write_options()
+
+ def write_options(self):
+ filename = os.path.join(self.cmake_root, 'meson_options.txt')
+ with open(filename, 'w') as optfile:
+ for o in self.options:
+ (optname, description, default) = o
+ if default is None:
+ typestr = ''
+ defaultstr = ''
+ else:
+ if default == 'OFF':
+ typestr = ' type : \'boolean\','
+ default = 'false'
+ elif default == 'ON':
+ default = 'true'
+ typestr = ' type : \'boolean\','
+ else:
+ typestr = ' type : \'string\','
+ defaultstr = ' value : %s,' % default
+ line = "option(%r,%s%s description : '%s')\n" % (optname,
+ typestr,
+ defaultstr,
+ description)
+ optfile.write(line)
+
+if __name__ == '__main__':
+ if len(sys.argv) != 2:
+ print(sys.argv[0], '<CMake project root>')
+ sys.exit(1)
+ c = Converter(sys.argv[1])
+ c.convert()
--- /dev/null
+#!/usr/bin/env python3
+
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mesonbuild.wrap import wraptool
+import sys
+
+sys.exit(wraptool.run(sys.argv[1:]))