doctooldir = $(pkgdatadir)/doctool
dist_doctool_DATA = \
util/doc-install.pl \
+ util/doc_install.py \
util/doc-postprocess.pl \
+ util/doc_postprocess.py \
util/doxygen.css \
util/doxygen-extra.css \
util/tagfile-to-devhelp2.xsl
doctags/mm-common-libstdc++-uninstalled.pc \
util/mm-common-util-uninstalled.pc
-dist_doc_DATA = README
+dist_doc_DATA = README.md OVERVIEW.md
doc_DATA = skeletonmm.tar.gz
skeletonmm_data_files = \
+mm-common 1.0.5 (2022-12-02)
+
+* Convert README to README.md and OVERVIEW.md.
+ (Kjell Ahlstedt)
+
+Meson build:
+* util/meson_aux: Make tarball reproducible.
+ (Jelle van der Waa) Merge request 6
+* Simplify lookup of python command.
+ (Kjell Ahlstedt) Pull request libsigcplusplus#83 (wael)
+
+Skeletonmm:
+* Avoid configuration warnings.
+ (Kjell Ahlstedt)
+* Detect if we build from a git subtree.
+ (Kjell Ahlstedt) Merge request gtkmm!72 (William Roy)
+* Simplify lookup of python command.
+ (Kjell Ahlstedt) Pull request libsigcplusplus#83 (wael)
+* Add build-tests option.
+ (Kjell Ahlstedt) Pull request libsigcplusplus#84 (Fabrice Fontaine)
+
+
+mm-common 1.0.4 (2022-02-11)
+
+* Add util/doc_postprocess.py and util/doc_install.py.
+ Generating documentation in modules that use mm-common
+ does not require Perl in Meson builds.
+ (Kjell Ahlstedt)
+
+Meson build and skeletonmm:
+* Specify 'check' option in run_command().
+ The default value will change in future Meson releases.
+ (Kjell Ahlstedt)
+
+
mm-common 1.0.3 (2021-05-20)
Meson build:
-This module is part of the GNOME C++ bindings effort <http://www.gtkmm.org/>.
-
-The mm-common module provides the build infrastructure and utilities
-shared among the GNOME C++ binding libraries. It is only a required
-dependency for building the C++ bindings from the gnome.org version
-control repository. An installation of mm-common is not required for
-building tarball releases, unless configured to use maintainer-mode.
-
-Release archives of mm-common include the Doxygen tag file for the
-GNU C++ Library reference documentation. It is covered by the same
-licence as the source code it was extracted from. More information
-is available at <http://gcc.gnu.org/onlinedocs/libstdc++/>.
-
-Autotools or Meson?
-===================
-
-mm-common can be built with Autotools or Meson. Building with Autotools
-may be phased out in the future.
-
-The files that mm-common installs and mm-common-prepare copies to other
-modules are useful in modules that are built with Autotools.
-The files that mm-common installs and mm-common-get copies to other
-modules are useful in modules that are built with Meson.
-
-The files in the skeletonmm directory show the start of a project that will
-use Meson.
-
-Skeleton C++ binding module
-===========================
-
-When creating a new C++ binding module based on mm-common, the easiest way
-to get started is to copy the skeletonmm directory shipped with mm-common.
-It contains the build support files required for a C++ binding module using
-Meson, gmmproc and glibmm.
-
-In order to create a new binding project from the copied skeleton directory,
-any files which have "skeleton" in the filename must be renamed. References
-to the project name or author in the files need to be substituted with the
-actual name and author of the new binding.
-
-mm-common overview
-==================
+# mm-common overview
The following sections provide an overview of the various files shipped
with mm-common, and briefly explain their purpose. Detailed documentation
and usage instructions can be found in the files themselves.
-mm-common-prepare and Autotools
--------------------------------
+## mm-common-prepare and Autotools
The mm-common-prepare shell script is installed in ${bindir} and must be
invoked from the bootstrap script of a binding module in order to set up
before any of Autotools' own setup utilities. The classic command line
options such as --copy and --force can be used to adjust the behavior of
mm-common-prepare. A typical autogen.sh would look like this:
-
+```
#! /bin/sh -e
test -n "$srcdir" || srcdir=`dirname "$0"`
test -n "$srcdir" || srcdir=.
mm-common-prepare --copy --force "$srcdir"
autoreconf --force --install --verbose "$srcdir"
test -n "$NOCONFIGURE" || "$srcdir/configure" --enable-maintainer-mode "$@"
-
+```
Do not forget to set:
-
+```
ACLOCAL_AMFLAGS = -I build ${ACLOCAL_FLAGS}
-
+```
in your project's top-level Makefile.am. "build" should be changed to the
name of the Autoconf M4 macro subdirectory of your project's source tree.
Also note that mm-common-prepare inspects the project's configure.ac file
for the AC_CONFIG_AUX_DIR([...]) argument. This is explained in further
detail below in the section on Automake include files.
-mm-common-get and Meson
------------------------
+## mm-common-get and Meson
The mm-common-get shell script is installed in ${bindir} and must be
invoked with run_command() early in a meson.build file. The meson.build file
should contain code similar to
-
- python = import('python').find_installation('python3')
+```
+ python3 = import('python').find_installation()
cmd_py = '''
import os
import sys
- sys.exit(0 if os.path.isdir("@0@") else 1)
+ sys.exit(os.path.isdir("@0@") or os.path.isfile("@0@"))
'''.format(project_source_root / '.git')
- is_git_build = run_command(python, '-c', cmd_py).returncode() == 0
+ is_git_build = run_command(python3, '-c', cmd_py, check: false).returncode() != 0
maintainer_mode_opt = get_option('maintainer-mode')
maintainer_mode = maintainer_mode_opt == 'true' or \
(maintainer_mode_opt == 'if-git-build' and is_git_build)
- mm_common_get = find_program('mm-common-get', required: maintainer_mode)
-
- if maintainer_mode and mm_common_get.found()
+ mm_common_get = find_program('mm-common-get', required: false)
+ if maintainer_mode and not mm_common_get.found()
+ message('Maintainer mode requires the \'mm-common-get\' command. If it is not found,\n' +
+ 'install the \'mm-common\' package, version 1.0.0 or higher.')
+ # If meson --wrap-mode != forcefallback, Meson falls back to the mm-common
+ # subproject only if mm-common-get is required.
+ mm_common_get = find_program('mm-common-get', required: true)
+ endif
+ if maintainer_mode
# Copy files to untracked/build_scripts and untracked/docs.
run_command(mm_common_get, '--force',
project_source_root / 'untracked' / 'build_scripts',
- project_source_root / 'untracked' / 'docs')
+ project_source_root / 'untracked' / 'docs',
+ check: true,
+ )
endif
+```
-In a Unix-like system the first few lines can be replaced with
-
- is_git_build = run_command('test', '-d', project_source_root/'.git').returncode() == 0
-
-
-Autoconf M4 macros (Autotools)
-------------------------------
+## Autoconf M4 macros (Autotools)
The Autoconf M4 macros are installed into the system-wide macro repository
in the ${datadir}/aclocal directory. Since all used M4 macros are copied
mm-common-prepare. If mm-common is installed to a different prefix than
Automake, it may be necessary to adjust ACLOCAL_PATH accordingly so that
aclocal can find the M4 files:
-
+```
export ACLOCAL_PATH="${mm_common_prefix}/share/aclocal"
-
+```
This step is not necessary when using jhbuild, as it takes care of setting
up the environment for using the locally built modules.
-macros/mm-common.m4: (generated from macros/mm-common.m4.in)
+- macros/mm-common.m4: (generated from macros/mm-common.m4.in) \
Provides MM_PREREQ() for requiring a minimum version of mm-common, and
an internal initialization macro shared by the other mm-common macros.
-macros/mm-warnings.m4:
+- macros/mm-warnings.m4: \
Implements the MM_ARG_ENABLE_WARNINGS() Autoconf macro for easy setup
of compiler diagnostics through the --enable-warnings configure option.
-macros/mm-doc.m4:
- Implements the MM_ARG_ENABLE_DOCUMENTATION() Autoconf macro to intialize
+- macros/mm-doc.m4: \
+ Implements the MM_ARG_ENABLE_DOCUMENTATION() Autoconf macro to initialize
the documentation support for a C++ binding package. Among other things,
it provides the --enable-documentation configure option, and checks for
the required utilities.
default base paths for substitution into the configuration Doxyfile. It
also generates the command line options for doc-install.pl.
-macros/mm-module.m4:
+- macros/mm-module.m4: \
The magic MM_INIT_MODULE() macro takes care of defining the various
substitution variables and preprocessor macros to identify the name,
version and API version of a C++ binding module.
-macros/mm-pkg.m4:
+- macros/mm-pkg.m4: \
The helper macro MM_PKG_CONFIG_SUBST, which simplifies the retrieval of
specific configuration values from pkg-config. Checks for particular
utility programs are also defined here, such as MM_CHECK_GNU_MAKE and
MM_CHECK_PERL.
-macros/mm-dietlib.m4:
+- macros/mm-dietlib.m4: \
Implements Autoconf macros which provide options intended to reduce the
binary size of the generated binding library, typically for embedded use.
The MM_PROG_GCC_VISIBILITY macro is defined in this file as well.
-macros/mm-ax_cxx_compile_stdcxx.m4:
+- macros/mm-ax_cxx_compile_stdcxx.m4: \
Implements the MM_AX_CXX_COMPILE_STDCXX() macro to test and set flags
for C++11/14/17 compatibility of the C++ compiler. This is identical to the
AX_CXX_COMPILE_STDCXX() macro described at
- http://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx.html,
+ <http://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx.html>,
except for the MM_ prefix.
-Automake include files (Autotools)
-----------------------------------
+## Automake include files (Autotools)
The Automake include files are located in the am_include/ directory.
The installed mm-common-prepare program copies all of the .am files into
the configure.ac file, the .am files will be placed in the indicated
subdirectory.
-am_include/generate-binding.am:
+- am_include/generate-binding.am: \
Variables and rules for running the gmmproc code generator to produce
the source code files for a C++ binding module.
-am_include/compile-binding.am:
+- am_include/compile-binding.am: \
Variables and rules for compiling and linking the shared library which
implements a C++ binding module.
-am_include/doc-reference.am:
+- am_include/doc-reference.am: \
Variables and rules for building the API reference documentation using
Doxygen, and to create a Devhelp book for the library. The installation
rules also take care of translating references to external documentation
in the generated hypertext documents.
-am_include/dist-changelog.am:
+- am_include/dist-changelog.am: \
A dist-hook rule to automatically generate a ChangeLog file when making
a release, intended to be used by modules which use the version control
log exclusively to document changes.
-Python build scripts (Meson)
-----------------------------
+## Python build scripts (Meson)
These scripts can be called from meson.build files with run_command(),
custom_target(), meson.add_postconf_script(), meson.add_install_script()
and meson.add_dist_script().
-util/build_scripts/generate-binding.py:
+- util/build_scripts/generate-binding.py: \
Commands for running the gmmproc code generator to produce
the source code files for a C++ binding module.
-util/build_scripts/doc-reference.py:
+- util/build_scripts/doc-reference.py: \
Commands for building the API reference documentation using
Doxygen, and to create a Devhelp book for the library. The installation
rules also take care of translating references to external documentation
in the generated hypertext documents.
-util/build_scripts/dist-changelog.py:
+- util/build_scripts/dist-changelog.py: \
A git command to generate a ChangeLog file when making a release,
intended to be used by modules which use the version control
log exclusively to document changes.
-util/build_scripts/dist-build-scripts.py:
+- util/build_scripts/dist-build-scripts.py: \
Commands that trim the distribution directory before a tarball is made.
The scripts copied by mm-common-get are distributed, although they are
not checked into the git repository. All .gitignore files and an empty build/
directory are removed
-util/build_scripts/check-dllexport-usage.py:
- Command that checks on the gmmproc version that is to be used or been used
+- util/build_scripts/check-dllexport-usage.py: \
+ Command that checks on the gmmproc version that is to be used or has been used
to generate the sources, to check whether to use compiler directives to
export symbols. Only used for Visual Studio or clang-cl builds.
-Documentation utilities (Meson and Autotools)
----------------------------------------------
+## Documentation utilities (Meson and Autotools)
-These are two Perl scripts, a style sheet, and one XSL transformation
-which assist with the task of generating and installing the Doxygen
-reference documentation. At least doc-install.pl is also required for
-tarball builds.
+These are two Perl scripts and two equivalent Python scripts, a style sheet,
+and one XSL transformation which assist with the task of generating and installing
+the Doxygen reference documentation. At least doc-install.pl or doc-install.py
+is also required for tarball builds. Autotools uses the Perl scripts.
+Meson uses the Python scripts.
Autotools: To avoid copying these files into all binding modules, they are
distributed and installed with the mm-common module. Those binding modules
that it should copy the documentation utilities into the project's source tree.
Otherwise the files installed with mm-common will be used automatically.
-util/doc-postprocess.pl:
+- util/doc-postprocess.pl: \
+ util/doc_postprocess.py: \
A simple script to post-process the HTML files generated by Doxygen.
It replaces various code constructs that do not match the coding style
used throughout the C++ bindings. For instance, it rewrites function
prototypes in order to place the reference symbol (&) next to the type
instead of the name of the argument.
-util/doc-install.pl:
+- util/doc-install.pl: \
+ util/doc_install.py: \
A replacement for the installdox script generated by Doxygen. Its
purpose is to translate references to external documentation at the
time the documentation is installed. This step is necessary because
as well, and will happily pass through unrecognized files without any
alterations.
-util/doxygen.css:
+- util/doxygen.css: \
A Cascading Style Sheet to unify the appearance of the HTML reference
documentation generated by Doxygen for each C++ binding module.
This file is deprecated. Use util/doxygen-extra.css instead.
-util/doxygen-extra.css:
+- util/doxygen-extra.css: \
A Cascading Style Sheet to unify the appearance of the HTML reference
documentation generated by Doxygen for each C++ binding module.
-util/tagfile-to-devhelp2.xsl:
+- util/tagfile-to-devhelp2.xsl: \
An XSLT script to generate a Devhelp2 book for the Doxygen reference
documentation. The generated Doxygen tag file serves as the input of
the translation.
-GNU C++ Library tag file
-------------------------
+## GNU C++ Library tag file
All modules in the GNOME C++ bindings set make use of the C++ standard
library in the API. As the GNU C++ Library shipped with GCC also uses
Doxygen for its reference documentation, its tag file is made available
by mm-common at a shared location for use by all C++ binding modules.
-doctags/libstdc++.tag:
+- doctags/libstdc++.tag: \
The Doxygen tag file for the GNU libstdc++ reference documentation
hosted at <http://gcc.gnu.org/onlinedocs/libstdc++/latest-doxygen/>.
This file is distributed with release archives of mm-common, but not
--- /dev/null
+# mm-common
+
+This module is part of the GNOME C++ bindings effort <http://www.gtkmm.org/>.
+
+The mm-common module provides the build infrastructure and utilities
+shared among the GNOME C++ binding libraries. It is only a required
+dependency for building the C++ bindings from the gnome.org version
+control repository. An installation of mm-common is not required for
+building tarball releases, unless configured to use maintainer-mode.
+
+Release archives of mm-common include the Doxygen tag file for the
+GNU C++ Library reference documentation. It is covered by the same
+licence as the source code it was extracted from. More information
+is available at <http://gcc.gnu.org/onlinedocs/libstdc++/>.
+
+# Autotools or Meson?
+
+mm-common can be built with Autotools or Meson. Autotools support may be
+dropped in the future.
+
+The files that mm-common installs and mm-common-prepare copies to other
+modules are useful in modules that are built with Autotools.
+The files that mm-common installs and mm-common-get copies to other
+modules are useful in modules that are built with Meson.
+
+The files in the skeletonmm directory show the start of a project that will
+use Meson.
+
+# Skeleton C++ binding module
+
+When creating a new C++ binding module based on mm-common, the easiest way
+to get started is to copy the `skeletonmm` directory shipped with mm-common.
+It contains the build support files required for a C++ binding module using
+Meson, gmmproc and glibmm.
+
+In order to create a new binding project from the copied skeleton directory,
+any files which have `skeleton` in the filename must be renamed. References
+to the project name or author in the files need to be substituted with the
+actual name and author of the new binding.
+
+# mm-common overview
+
+See [OVERVIEW.md](OVERVIEW.md) for an overview of the files shipped with mm-common.
## You should have received a copy of the GNU General Public License
## along with mm-common. If not, see <http://www.gnu.org/licenses/>.
-AC_INIT([mm-common], [1.0.3], [https://gitlab.gnome.org/GNOME/mm-common/issues/],
+AC_INIT([mm-common], [1.0.5], [https://gitlab.gnome.org/GNOME/mm-common/issues/],
[mm-common], [http://www.gtkmm.org/])
AC_PREREQ([2.59])
AC_CONFIG_SRCDIR([util/mm-common-prepare.in])
-AM_INIT_AUTOMAKE([1.9 -Wno-portability check-news no-dist-gzip dist-xz no-define std-options])
+AM_INIT_AUTOMAKE([1.9 -Wno-portability check-news no-dist-gzip dist-xz no-define std-options foreign])
# Support silent build rules.
# Disable by either passing --disable-silent-rules to configure or passing V=1 to make.
m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES([yes])])
# This file is part of mm-common.
project('mm-common',
- version: '1.0.3',
- meson_version: '>= 0.54.0', # required for meson.override_dependency()
- license: 'GPLv2+'
+ version: '1.0.5',
+ license: 'GPLv2+',
+ meson_version: '>= 0.55.0', # required for meson.add_dist_script(python3, ...)
+ # and meson.add_install_script(python3, ...)
)
-python3 = import('python').find_installation('python3')
-python_version = python3.language_version()
-python_version_req = '>= 3.5'
-if not python_version.version_compare(python_version_req)
- error('Requires Python @0@, found @1@.'.format(python_version_req, python_version))
-endif
+python3 = find_program('python3', version: '>= 3.5')
# Use these instead of meson.source_root() and meson.build_root().
# source_root() and build_root() are not useful, if this is a subproject.
if find_program('aclocal', required: false).found()
meson.add_install_script(
- python3.path(),
- script_dir / 'extra-install-cmd.py',
+ python3, script_dir / 'extra-install-cmd.py',
install_prefix / install_aclocal_macrodir
)
endif
# into projects at Meson setup or configure time.
doctool_basefiles = [
'doc-install.pl',
+ 'doc_install.py',
'doc-postprocess.pl',
+ 'doc_postprocess.py',
'doxygen.css',
'doxygen-extra.css',
'tagfile-to-devhelp2.xsl',
# Create tar archive of skeletonmm for installation.
skeletonmm_tarball_script = script_dir / 'skeletonmm-tarball.py'
-tarball_filetype = run_command(python3, skeletonmm_tarball_script, 'check')
+tarball_filetype = run_command(python3,
+ skeletonmm_tarball_script, 'check',
+ check: true,
+)
tarball_filename = 'skeletonmm' + tarball_filetype.stdout()
custom_target(tarball_filename,
input: skeletonmm_files,
install_dir: install_docdir,
install: true
)
-install_data('README', install_dir: install_docdir)
+install_data('OVERVIEW.md', 'README.md', install_dir: install_docdir)
# Either curl or wget is required for downloading libstdc++.tag,
# used by Doxygen.
if get_option('use-network')
curl = find_program('curl', required: false)
if curl.found()
- download_cmd = curl.path()
+ download_cmd = curl.full_path()
else
wget = find_program('wget', required: true)
- download_cmd = wget.path()
+ download_cmd = wget.full_path()
endif
endif
# Modify the contents of the distribution directory.
# (not allowed in a subproject if meson.version() < 0.58.0)
meson.add_dist_script(
- python3.path(),
- script_dir / 'extra-dist-cmd.py',
+ python3, script_dir / 'extra-dist-cmd.py',
project_source_root,
project_build_root,
)
import os
import sys
os.chmod(sys.argv[1], 0o755)'''
- run_command(python3, '-c', cmd_py, project_build_root / 'mm-common-get2')
+ run_command(python3, '-c', cmd_py,
+ project_build_root / 'mm-common-get2',
+ check: true,
+ )
# A main project that looks for mm-common-get shall find mm_common_get2.
meson.override_find_program('mm-common-get', mm_common_get2)
project_source_root / 'util' / 'build_scripts',
project_build_root / meson.project_name() / 'build',
meson_build_support_basefiles,
+ check: true,
)
run_command(python3, script_dir / 'copy-files.py',
project_source_root / 'util',
project_build_root / meson.project_name() / 'doctool',
doctool_basefiles,
+ check: true,
)
mm_common_libstdc_dep = declare_dependency(
# Install Devhelp file and html files.
meson.add_install_script(
- python3.path(), doc_reference, 'install_doc',
+ python3, doc_reference, 'install_doc',
doctool_dir,
devhelp_file.full_path(),
install_devhelpdir,
if can_add_dist_script
# Distribute built files and files copied by mm-common-prepare.
meson.add_dist_script(
- python3.path(), doc_reference, 'dist_doc',
+ python3, doc_reference, 'dist_doc',
doctool_dir,
doctool_dist_dir,
meson.current_build_dir(),
exe_file = executable(ex_name, ex_sources, resources,
dependencies: skeletonmm_own_dep,
implicit_include_directories: false,
- gui_app: true,
+ win_subsystem: 'windows',
build_by_default: build_examples
)
endforeach
version: '0.1.0',
license: 'LGPLv2.1+',
default_options: [
- 'cpp_std=c++17'
+ 'cpp_std=c++17',
+ 'warning_level=1',
],
- meson_version: '>= 0.54.0', # required for meson.override_dependency()
- # and dep.get_variable(internal:)
+ meson_version: '>= 0.56.0', # required for executable(..., win_subsystem: ...)
)
skeletonmm_api_version = '1.0'
project_source_root = meson.current_source_dir()
project_build_root = meson.current_build_dir()
-python3 = import('python').find_installation('python3')
-python_version = python3.language_version()
-python_version_req = '>= 3.5'
-if not python_version.version_compare(python_version_req)
- error('Requires Python @0@, found @1@.'.format(python_version_req, python_version))
-endif
+python3 = find_program('python3', version: '>= 3.5')
# Do we build from a git repository?
-# Suppose we do if and only if a '.git' directory or file exists.
+# Suppose we do if and only if the meson.build file is tracked by git.
cmd_py = '''
-import os
-import sys
-sys.exit(os.path.isdir("@0@") or os.path.isfile("@0@"))
-'''.format(project_source_root / '.git')
-is_git_build = run_command(python3, '-c', cmd_py).returncode() != 0
+import shutil, subprocess, sys
+if not shutil.which('git'):
+ sys.exit(1)
+cmd = [ 'git', 'ls-files', '--error-unmatch', 'meson.build' ]
+sys.exit(subprocess.run(cmd, cwd="@0@", stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL).returncode)
+'''.format(project_source_root)
+is_git_build = run_command(python3, '-c', cmd_py, check: false).returncode() == 0
# Are we testing a dist tarball while it's being built?
is_dist_check = project_source_root.contains('dist-unpack') and \
if is_dist_check
message('Looks like a tarball is being tested. ' + \
'Option "dist-warnings" is used instead of "warnings".')
- warning_level = get_option('dist-warnings')
+ cpp_warnings = get_option('dist-warnings')
else
- warning_level = get_option('warnings')
+ cpp_warnings = get_option('warnings')
endif
+warning_level = get_option('warning_level').to_int()
+werror = get_option('werror')
build_deprecated_api = get_option('build-deprecated-api')
build_documentation_opt = get_option('build-documentation')
build_documentation = build_documentation_opt == 'true' or \
(build_documentation_opt == 'if-maintainer-mode' and maintainer_mode)
build_examples = get_option('build-examples')
+build_tests = get_option('build-tests')
# Installation directories are relative to {prefix}.
install_prefix = get_option('prefix')
mm_common_get = find_program('mm-common-get', required: true)
endif
m4 = find_program('m4', required: maintainer_mode) # Used by gmmproc (in glibmm)
-perl = find_program('perl', required: maintainer_mode or build_documentation)
doxygen = find_program('doxygen', required: build_documentation)
dot = find_program('dot', required: build_documentation) # Used by Doxygen
xsltproc = find_program('xsltproc', required: build_documentation)
if maintainer_mode
# Copy files to untracked/build_scripts and untracked/doc.
run_command(mm_common_get, '--force', script_dir,
- project_source_root / 'untracked' / 'doc')
+ project_source_root / 'untracked' / 'doc',
+ check: true,
+ )
else
cmd_py = '''
import os
import sys
sys.exit(os.path.isfile("@0@"))
'''.format(generate_binding)
- file_exists = run_command(python3, '-c', cmd_py).returncode() != 0
+ file_exists = run_command(python3, '-c', cmd_py, check: false).returncode() != 0
if not file_exists
error('Missing files in untracked/. You must enable maintainer-mode.')
endif
endif
+# Check if perl is required and available.
+doc_perl_prop = run_command(
+ python3, doc_reference, 'get_script_property',
+ '', # MMDOCTOOLDIR is not used
+ 'requires_perl',
+ check: false,
+)
+doc_requires_perl = true
+if doc_perl_prop.returncode() == 0 and doc_perl_prop.stdout() == 'false'
+ doc_requires_perl = false
+endif
+
+perl = find_program('perl', required: maintainer_mode or \
+ (build_documentation and doc_requires_perl))
+
cpp_compiler = meson.get_compiler('cpp')
# Set compiler warnings.
+# Meson warns if any of the /W1, /W2, /W3, /W4, /Wall, -Wall, -Wextra, -Werror
+# compiler options are added with add_project_arguments().
+# Avoid such warnings, when possible.
+# See _warn_about_builtin_args() in meson/mesonbuild/interpreter/interpreter.py.
warning_flags = []
-if warning_level == 'min'
- warning_flags = ['-Wall']
-elif warning_level == 'max' or warning_level == 'fatal'
- warning_flags = '-pedantic -Wall -Wextra -Wformat-security -Wsuggest-override -Wshadow -Wno-long-long'.split()
- if warning_level == 'fatal'
- warning_flags += ['-Werror']
+if cpp_warnings == 'min'
+ if warning_level == 0
+ warning_flags = ['-Wall']
+ endif
+elif cpp_warnings == 'max' or cpp_warnings == 'fatal'
+ if warning_level < 3
+ warning_flags = ['-pedantic', '-Wall', '-Wextra']
+ endif
+ warning_flags += '-Wformat-security -Wsuggest-override -Wshadow -Wno-long-long'.split()
+ if cpp_warnings == 'fatal'
+ if not werror
+ warning_flags += ['-Werror']
+ endif
deprecations = 'G SKELETON GLIBMM SIGCXX'.split()
foreach d : deprecations
warning_flags += '-D@0@_DISABLE_DEPRECATED'.format(d)
if can_add_dist_script
# Add a ChangeLog file to the distribution directory.
meson.add_dist_script(
- python3.path(), dist_changelog,
+ python3, dist_changelog,
project_source_root,
)
# Add build scripts to the distribution directory, and delete .gitignore
# files and an empty $MESON_PROJECT_DIST_ROOT/build/ directory.
meson.add_dist_script(
- python3.path(), dist_build_scripts,
+ python3, dist_build_scripts,
project_source_root,
'untracked' / 'build_scripts',
)
meson.project_name() + ' ' + meson.project_version(),
'',
' Maintainer mode: @0@@1@'.format(maintainer_mode_opt, real_maintainer_mode),
- ' Compiler warnings: @0@'.format(warning_level),
+ ' Compiler warnings: @0@ (warning_level: @1@, werror: @2@)'. \
+ format(cpp_warnings, warning_level, werror),
' Build deprecated API: @0@'.format(build_deprecated_api),
'Build HTML documentation: @0@@1@'.format(build_documentation_opt, real_build_documentation),
' Build example programs: @0@'.format(build_examples),
+ ' Build test programs: @0@'.format(build_tests),
'Directories:',
' prefix: @0@'.format(install_prefix),
' includedir: @0@'.format(install_prefix / install_includedir),
value: 'if-maintainer-mode', description: 'Build and install the documentation')
option('build-examples', type: 'boolean', value: true,
description: 'Build example programs')
+option('build-tests', type: 'boolean', value: true,
+ description: 'Build test programs')
meson.current_build_dir(),
src_untracked_skeletonmm,
hg_ccg_basenames,
+ check: true,
)
built_cc_files = [ rel_untracked_skeletonmm / 'wrap_init.cc' ]
# Install built .h and _p.h files.
meson.add_install_script(
- python3.path(), generate_binding, 'install_built_h_files',
+ python3, generate_binding, 'install_built_h_files',
built_h_cc_dir,
install_includedir / skeletonmm_pcname / 'skeletonmm', # subdir below {prefix}
hg_ccg_basenames
if can_add_dist_script
# Distribute built files.
meson.add_dist_script(
- python3.path(), generate_binding, 'dist_built_files',
+ python3, generate_binding, 'dist_built_files',
built_h_cc_dir,
untracked_skeletonmm,
hg_ccg_basenames,
# tests
-# input: gnome, skeletonmm_own_dep
+# input: gnome, skeletonmm_own_dep, build_tests
tests = [
# [[dir-name], exe-name, [sources]]
exe_file = executable(ex_name, ex_sources, resources,
dependencies: skeletonmm_own_dep,
implicit_include_directories: false,
- gui_app: false,
- build_by_default: true
+ build_by_default: build_tests
)
- test(dir.underscorify(), exe_file)
+ # If exe_file is a test program, it is built by default unconditionally.
+ if build_tests
+ test(ex_name, exe_file)
+ endif
endforeach
1. Files copied by mm-common-get
--------------------------------
-untracked/doc/doc-install.pl
- doc-postprocess.pl
+untracked/doc/doc_install.py
+ doc_postprocess.py
doxygen-extra.css
tagfile-to-devhelp2.xsl
untracked/build_scripts/check-dllexport-usage.py
]
# MESON_PROJECT_DIST_ROOT is set only if meson.version() >= 0.58.0.
project_dist_root = os.getenv('MESON_PROJECT_DIST_ROOT', os.getenv('MESON_DIST_ROOT'))
-with open(os.path.join(project_dist_root, 'ChangeLog'), mode='w') as logfile:
+logfilename = os.path.join(project_dist_root, 'ChangeLog')
+with open(logfilename, mode='w', encoding='utf-8') as logfile:
sys.exit(subprocess.run(cmd, stdout=logfile).returncode)
#!/usr/bin/env python3
-# External command, intended to be called with custom_target(),
+# External command, intended to be called with run_command(), custom_target(),
# meson.add_install_script() or meson.add_dist_script() in meson.build.
# argv[1] argv[2] argv[3:]
doxytagfile = sys.argv[3]
doc_outdir = os.path.dirname(doxytagfile)
+ # Search for doc_postprocess.py first in MMDOCTOOLDIR.
+ sys.path.insert(0, MMDOCTOOLDIR)
+ from doc_postprocess import doc_postprocess
+
# Export this variable for use in the Doxygen configuration file.
child_env = os.environ.copy()
child_env['MMDOCTOOLDIR'] = MMDOCTOOLDIR
if result.returncode:
return result.returncode
- cmd = [
- 'perl',
- '--',
- os.path.join(MMDOCTOOLDIR, 'doc-postprocess.pl'),
- os.path.join(doc_outdir, 'html', '*.html'),
- ]
- return subprocess.run(cmd).returncode
+ return doc_postprocess(os.path.join(doc_outdir, 'html', '*.html'))
# Invoked from custom_target() in meson.build.
def devhelp():
prefix_htmlrefdir = os.path.join(os.getenv('MESON_INSTALL_PREFIX'), sys.argv[5])
build_dir = os.path.dirname(devhelpfile)
+ # Search for doc_install.py first in MMDOCTOOLDIR.
+ sys.path.insert(0, MMDOCTOOLDIR)
+ from doc_install import doc_install_cmdargs, doc_install_funcargs
+
# Create the installation directories, if they do not exist.
os.makedirs(destdir_htmlrefdir, exist_ok=True)
os.makedirs(destdir_devhelpdir, exist_ok=True)
verbose = ['--verbose']
# Install html files.
- cmd = [
- 'perl',
- '--',
- os.path.join(MMDOCTOOLDIR, 'doc-install.pl'),
- '--mode=0644',
+ cmdargs = [
+ '--mode=0o644',
] + verbose + sys.argv[6:] + [
'-t', destdir_htmlrefdir,
'--glob',
'--',
os.path.join(build_dir, 'html', '*'),
]
- result1 = subprocess.run(cmd)
+ result1 = doc_install_cmdargs(cmdargs)
# Install the Devhelp file.
# rstrip('/') means remove trailing /, if any.
- cmd = [
- 'perl',
- '--',
- os.path.join(MMDOCTOOLDIR, 'doc-install.pl'),
- '--mode=0644',
- ] + verbose + [
- '--book-base=' + prefix_htmlrefdir.rstrip('/'),
- '-t', destdir_devhelpdir,
- '--',
- devhelpfile,
- ]
- result2 = subprocess.run(cmd)
+ result2 = doc_install_funcargs(
+ sources=[devhelpfile],
+ target=destdir_devhelpdir,
+ target_is_dir=True,
+ mode=0o644,
+ verbose=bool(verbose),
+ book_base=prefix_htmlrefdir.rstrip('/'),
+ )
- if result1.returncode:
- return result1.returncode
- return result2.returncode
+ return max(result1, result2)
# Invoked from meson.add_dist_script().
def dist_doc():
# Distribute files that mm-common-get has copied to MMDOCTOOLDIR.
# shutil.copy() does not copy timestamps.
- for file in ['doc-install.pl', 'doc-postprocess.pl', 'doxygen-extra.css', 'tagfile-to-devhelp2.xsl']:
+ for file in ['doc_install.py', 'doc_postprocess.py', 'doxygen-extra.css', 'tagfile-to-devhelp2.xsl']:
shutil.copy(os.path.join(MMDOCTOOLDIR, file), doctool_dist_dir)
# Distribute built files: tag file, devhelp file, html files.
copy_function=shutil.copy)
return 0
+# Invoked from run_command() in meson.build.
+def get_script_property():
+ # argv[3]
+ # <property>
+ # argv[2] (MMDOCTOOLDIR) is not used.
+ prop = sys.argv[3]
+ if prop == 'requires_perl':
+ print('false', end='') # stdout can be read in the meson.build file.
+ return 0
+ print(sys.argv[0], ': unknown property,', prop)
+ return 1
+
# ----- Main -----
if subcommand == 'doxygen':
sys.exit(doxygen())
sys.exit(install_doc())
if subcommand == 'dist_doc':
sys.exit(dist_doc())
+if subcommand == 'get_script_property':
+ sys.exit(get_script_property())
print(sys.argv[0], ': illegal subcommand,', subcommand)
sys.exit(1)
'--namespace=' + namespace,
'--parent_dir=' + parent_dir,
] + sys.argv[5:]
- with open(output_file, mode='w') as output_file_obj:
+ with open(output_file, mode='w', encoding='utf-8') as output_file_obj:
return subprocess.run(cmd, stdout=output_file_obj).returncode
# Invoked from custom_target() in meson.build.
--- /dev/null
+#!/usr/bin/env python3
+
+# doc_install.py [OPTION]... [-T] SOURCE DEST
+# doc_install.py [OPTION]... SOURCE... DIRECTORY
+# doc_install.py [OPTION]... -t DIRECTORY SOURCE...
+
+# Copy SOURCE to DEST or multiple SOURCE files to the existing DIRECTORY,
+# while setting permission modes. For HTML files, translate references to
+# external documentation.
+
+# Mandatory arguments to long options are mandatory for short options, too.
+# --book-base=BASEPATH use reference BASEPATH for Devhelp book
+# -l, --tag-base=TAGFILE\@BASEPATH use BASEPATH for references from TAGFILE (Doxygen <= 1.8.15)
+# -l, --tag-base=s\@BASEPUB\@BASEPATH substitute BASEPATH for BASEPUB (Doxygen >= 1.8.16)
+# -m, --mode=MODE override file permission MODE (octal)
+# -t, --target-directory=DIRECTORY copy all SOURCE arguments into DIRECTORY
+# -T, --no-target-directory treat DEST as normal file
+# --glob expand SOURCE as filename glob pattern
+# -v, --verbose enable informational messages
+# -h, --help display help and exit
+
+import os
+import sys
+import re
+import glob
+
+# Globals
+g_verbose = False
+tags_dict = {}
+subst_dict = {}
+perm_mode = 0o644
+g_book_base = None
+html_doxygen_count = 0
+
+message_prefix = os.path.basename(__file__) + ':'
+
+# The installed files are read and written in binary mode.
+# All regular expressions and replacement strings must be bytes objects.
+html_start_pattern = re.compile(rb'\s*(?:<[?!][^<]+)*<html[>\s]')
+html_split1_pattern = re.compile(rb'''
+ \bdoxygen="([^:"]+):([^"]*)" # doxygen="(TAGFILE):(BASEPATH)"
+ \s+((?:href|src)=")\2([^"]*") # (href="|src=")BASEPATH(RELPATH")
+ ''', re.VERBOSE)
+html_split2_pattern = re.compile(rb'''
+ \b((?:href|src)=")([^"]+") # (href="|src=")(BASEPUB RELPATH")
+ ''', re.VERBOSE)
+
+devhelp_start_pattern = re.compile(rb'\s*(?:<[?!][^<]+)*<book\s')
+devhelp_subst_pattern = re.compile(rb'(<book\s+[^<>]*?\bbase=")[^"]*(?=")')
+
+def notice(*msg):
+ if g_verbose:
+ print(message_prefix, ''.join(msg))
+
+def error(*msg):
+ print(message_prefix, 'Error:', ''.join(msg), file=sys.stderr)
+ raise RuntimeError(''.join(msg))
+
+def html_split1_func(group1, group2):
+ global html_doxygen_count
+ if group1 in tags_dict:
+ html_doxygen_count += 1
+ return tags_dict[group1]
+ return group2
+
+def html_split2_func(group2):
+ for key in subst_dict:
+ # Don't use regular expressions here. key may contain characters
+ # that are special in regular expressions.
+ if group2.startswith(key):
+ return subst_dict[key] + group2[len(key):]
+ return None
+
+def install_file(in_name, out_name):
+ '''
+ Copy file to destination while translating references on the fly.
+ '''
+ global html_doxygen_count
+
+ # Some installed files are binary (e.g. .png).
+ # Read and write all files in binary mode, thus avoiding decoding/encoding errors.
+ in_basename = os.path.basename(in_name)
+ with open(in_name, mode='rb') as in_file:
+ # Read the whole file into a string buffer.
+ buf = in_file.read()
+
+ if (tags_dict or subst_dict) and html_start_pattern.match(buf):
+ # Probably an html file. Modify it, if appropriate.
+ #
+ # It would be possible to modify with a call to Pattern.sub() or Pattern.subn()
+ # and let a function calculate the replacement string. Example:
+ # (buf, number_of_subs) = html_split2_pattern.subn(html_subst2_func, buf)
+ # A previous Perl script does just that. However, calling a function from
+ # sub() or subn() is a slow operation. Installing doc files for a typical
+ # module such as glibmm or gtkmm takes about 8 times as long as with the
+ # present split+join solution. (Measured with python 3.9.5)
+ html_doxygen_count = 0
+ number_of_subs = 0
+ change = 'no'
+ if tags_dict and b'doxygen="' in buf:
+ # Doxygen 1.8.15 and earlier stores the tag file name and BASEPATH in the html files.
+ split_buf = html_split1_pattern.split(buf)
+ for i in range(0, len(split_buf)-4, 5):
+ basepath = html_split1_func(split_buf[i+1], split_buf[i+2])
+ split_buf[i+1] = b''
+ split_buf[i+2] = b''
+ split_buf[i+3] += basepath
+ number_of_subs = len(split_buf) // 5
+ if number_of_subs > 0:
+ buf = b''.join(split_buf)
+ change = 'rewrote ' + str(html_doxygen_count) + ' of ' + str(number_of_subs)
+
+ if number_of_subs == 0 and subst_dict:
+ # Doxygen 1.8.16 and later does not store the tag file name and BASEPATH in the html files.
+ # The previous html_split1_pattern.split() won't find anything to substitute.
+ split_buf = html_split2_pattern.split(buf)
+ for i in range(2, len(split_buf), 3):
+ basepath = html_split2_func(split_buf[i])
+ if basepath:
+ split_buf[i] = basepath
+ html_doxygen_count += 1
+ number_of_subs = len(split_buf) // 3
+ if html_doxygen_count > 0:
+ buf = b''.join(split_buf)
+ if number_of_subs > 0:
+ change = 'rewrote ' + str(html_doxygen_count)
+ notice('Translating ', in_basename, ' (', change, ' references)')
+
+ elif g_book_base and devhelp_start_pattern.match(buf):
+ # Probably a devhelp file.
+ # Substitute new value for attribute "base" of element <book>.
+ (buf, number_of_subs) = devhelp_subst_pattern.subn(rb'\1' + g_book_base, buf, 1)
+ change = 'rewrote base path' if number_of_subs else 'base path not set'
+ notice('Translating ', in_basename, ' (', change, ')')
+ else:
+ # A file that shall not be modified.
+ notice('Copying ', in_basename)
+
+ with open(out_name, mode='wb') as out_file:
+ # Write the whole buffer into the target file.
+ out_file.write(buf)
+
+ os.chmod(out_name, perm_mode)
+
+def split_key_value(mapping):
+ '''
+ Split TAGFILE@BASEPATH or s@BASEPUB@BASEPATH argument into key/value pair
+ '''
+ (name, path) = mapping.split('@', 1)
+ if name != 's': # Doxygen 1.8.15 and earlier
+ if not name:
+ error('Invalid base path mapping: ', mapping)
+ if path != None:
+ return (name, path, False)
+ notice('Not changing base path for tag file ', name);
+
+ else: # name=='s', Doxygen 1.8.16 and later
+ (name, path) = path.split('@', 1)
+ if not name:
+ error('Invalid base path mapping: ', mapping)
+ if path != None:
+ return (name, path, True)
+ notice('Not changing base path for ', name);
+
+ return (None, None, None)
+
+def string_to_bytes(s):
+ if isinstance(s, str):
+ return s.encode('utf-8')
+ return s # E.g. None
+
+def make_dicts(tags):
+ global tags_dict, subst_dict
+
+ tags_dict = {}
+ subst_dict = {}
+ if not tags:
+ return
+
+ for tag in tags:
+ (name, path, subst) = split_key_value(tag)
+ if subst == None:
+ continue
+ # Translate a local absolute path to URI.
+ path = path.replace('\\', '/').replace(' ', '%20')
+ if path.startswith('/'):
+ path = 'file://' + path
+ path = re.sub(r'^([A-Za-z]:/)', r'file:///\1', path, count=1) # Windows: C:/path
+ if not path.endswith('/'):
+ path += '/'
+ if subst:
+ notice('Using base path ', path, ' for ', name)
+ subst_dict[string_to_bytes(name)] = string_to_bytes(path)
+ else:
+ notice('Using base path ', path, ' for tag file ', name)
+ tags_dict[string_to_bytes(name)] = string_to_bytes(path)
+
+def doc_install_funcargs(sources=[], target=None, book_base=None, tags=[],
+ mode=0o644, target_is_dir=True, expand_glob=False, verbose=False):
+ '''
+ Copy source files to target files or target directory.
+ '''
+ global g_verbose, perm_mode, g_book_base
+
+ g_verbose = verbose
+ perm_mode = mode
+ make_dicts(tags)
+ g_book_base = string_to_bytes(book_base)
+
+ if not target:
+ error('Target file or directory required.')
+ if book_base:
+ notice('Using base path ', book_base, ' for Devhelp book')
+
+ if not target_is_dir:
+ if expand_glob:
+ error('Filename globbing requires target directory.')
+ if len(sources) != 1:
+ error('Only one source file allowed when target is a filename.')
+
+ install_file(sources[0], target)
+ return 0
+
+ if expand_glob:
+ expanded_sources = []
+ for source in sources:
+ expanded_sources += glob.glob(source)
+ sources = expanded_sources
+
+ basename_set = set()
+ for source in sources:
+ basename = os.path.basename(source)
+
+ # If there are multiple files with the same base name in the list, only
+ # the first one will be installed. This behavior makes it very easy to
+ # implement a VPATH search for each individual file.
+ if basename not in basename_set:
+ basename_set.add(basename)
+ out_name = os.path.join(target, basename)
+ install_file(source, out_name)
+ return 0
+
+def doc_install_cmdargs(args=None):
+ '''
+ Parse command line parameters, or a sequence of strings equal to
+ command line parameters. Then copy source files to target file or
+ target directory.
+ '''
+ import argparse
+
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.RawTextHelpFormatter,
+ prog=os.path.basename(__file__),
+ usage='''
+ %(prog)s [OPTION]... [-T] SOURCE DEST
+ %(prog)s [OPTION]... SOURCE... DIRECTORY
+ %(prog)s [OPTION]... -t DIRECTORY SOURCE...''',
+ description='''
+ Copy SOURCE to DEST or multiple SOURCE files to the existing DIRECTORY,
+ while setting permission modes. For HTML files, translate references to
+ external documentation.'''
+ )
+ parser.add_argument('--book-base', dest='book_base', metavar='BASEPATH',
+ help='use reference BASEPATH for Devhelp book')
+ parser.add_argument('-l', '--tag-base', action='append', dest='tags', metavar='SUBST',
+ help='''TAGFILE@BASEPATH use BASEPATH for references from TAGFILE (Doxygen <= 1.8.15)
+s@BASEPUB@BASEPATH substitute BASEPATH for BASEPUB (Doxygen >= 1.8.16)'''
+ )
+ parser.add_argument('-m', '--mode', dest='mode', metavar='MODE', default='0o644',
+ help='override file permission MODE (octal)')
+
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument('-t', '--target-directory', dest='target_dir', metavar='DIRECTORY',
+ help='copy all SOURCE arguments into DIRECTORY')
+ group.add_argument('-T', '--no-target-directory', action='store_false', dest='target_is_dir',
+ help='treat DEST as normal file')
+
+ parser.add_argument('--glob', action='store_true', dest='expand_glob',
+ help='expand SOURCE as filename glob pattern')
+ parser.add_argument('-v', '--verbose', action='store_true', dest='verbose',
+ help='enable informational messages')
+ parser.add_argument('source_dest', nargs='+',
+ help='''SOURCE DEST
+SOURCE... DIRECTORY
+SOURCE...'''
+ )
+ parsed_args = parser.parse_args(args)
+
+ if not parsed_args.target_is_dir:
+ if len(parsed_args.source_dest) != 2:
+ error('Source and destination filenames expected.')
+ sources = [parsed_args.source_dest[0]]
+ target = parsed_args.source_dest[1]
+ else:
+ target = parsed_args.target_dir
+ if not target:
+ if len(parsed_args.source_dest) < 2:
+ error('At least one source file and destination directory expected.')
+ target = parsed_args.source_dest[-1]
+ sources = parsed_args.source_dest[0:-1]
+ else:
+ sources = parsed_args.source_dest
+
+ return doc_install_funcargs(
+ sources=sources,
+ target=target,
+ book_base=parsed_args.book_base,
+ tags=parsed_args.tags,
+ mode=int(parsed_args.mode, base=8),
+ target_is_dir=parsed_args.target_is_dir,
+ expand_glob=parsed_args.expand_glob,
+ verbose=parsed_args.verbose
+ )
+
+# ----- Main -----
+if __name__ == '__main__':
+ sys.exit(doc_install_cmdargs())
--- /dev/null
+#!/usr/bin/env python3
+
+# doc_postprocess.py [-h|--help] <pattern>...
+
+# Post-process the Doxygen-generated HTML files matching pattern.
+
+import os
+import sys
+import re
+import glob
+
+# Substitutions with regular expressions are somewhat slow in Python 3.9.5.
+# Use str.replace() rather than re.sub() where possible.
+
+# [search string, compiled regular expression or None, substitution string, count]
+class_el_patterns = [
+ # return value
+ [ ' & ', re.compile(r' & *'), '& ', 1],
+ [ ' * ', re.compile(r' \* *'), '* ', 1],
+ # parameters
+ [ ' &', None, '&', 0],
+ [ '&', re.compile(r'&\b'), '& ', 0],
+ [ ' *', None, '*', 0],
+ [ '*', re.compile(r'\*\b'), '* ', 0],
+ # templates
+ [ 'template<', re.compile(r'\btemplate<'), 'template <', 1]
+]
+
+class_md_patterns = [
+ # left parenthesis
+ [ '( ', re.compile(r'\( *'), '(', 1],
+ # return value
+ [ ' & ', None, '& ', 0],
+ [ ' * ', None, '* ', 0],
+ # parameters
+ [ ' & ', re.compile(r' & *'), '& ', 0],
+ [ ' * ', re.compile(r' \* *'), '* ', 0],
+ # templates
+ [ 'template<', re.compile(r'\btemplate<'), 'template <', 1]
+]
+
+else_patterns = [
+ # template decls
+ [ 'template<', re.compile(r'^(<h\d>|)template<'), '\\1template <', 1]
+]
+
+all_lines_patterns = [
+ # For some reason, some versions of Doxygen output the full path to
+ # referenced tag files. This is bad since it breaks doc_install.py,
+ # and also because it leaks local path names into source tarballs.
+ # Thus, strip the directory prefix here.
+ [ ' doxygen="', re.compile(r' doxygen="[^":]*/([^":]+\.tag):'), ' doxygen="\\1:', 0],
+
+ [ '©', None, '©', 0],
+ [ '—', None, '—', 0],
+ [ '–', None, '–', 0],
+ [ ' ', re.compile(r' * *'), ' ', 0]
+]
+
+def doc_postprocess(patterns):
+ if not (isinstance(patterns, list) or isinstance(patterns, tuple)):
+ patterns = [] if patterns == None else [patterns]
+
+ filepaths = []
+ for pattern in patterns:
+ filepaths += glob.glob(pattern)
+
+ for filepath in filepaths:
+ # Assume that the file is UTF-8 encoded.
+ # If illegal UTF-8 bytes in the range 0x80..0xff are encountered, they are
+ # replaced by Unicode Private Use characters in the range 0xdc80..0xdcff
+ # and restored to their original values when the file is rewritten.
+ with open(filepath, mode='r', encoding='utf-8', errors='surrogateescape') as file:
+ # Read the whole file into a buffer, a list with one line per element.
+ buf = file.readlines()
+
+ for line_number in range(len(buf)):
+ line = buf[line_number]
+
+ # Substitute
+ if '<a class="el"' in line:
+ for subst in class_el_patterns:
+ if subst[0] in line:
+ if subst[1]:
+ line = subst[1].sub(subst[2], line, count=subst[3])
+ else:
+ line = line.replace(subst[0], subst[2], subst[3])
+
+ elif ('<td class="md"' in line) or ('<td class="mdname"' in line):
+ for subst in class_md_patterns:
+ if subst[0] in line:
+ if subst[1]:
+ line = subst[1].sub(subst[2], line, count=subst[3])
+ else:
+ line = line.replace(subst[0], subst[2], subst[3])
+
+ else:
+ for subst in else_patterns:
+ if subst[0] in line:
+ if subst[1]:
+ line = subst[1].sub(subst[2], line, count=subst[3])
+ else:
+ line = line.replace(subst[0], subst[2], subst[3])
+
+ for subst in all_lines_patterns:
+ if subst[0] in line:
+ if subst[1]:
+ line = subst[1].sub(subst[2], line, count=subst[3])
+ else:
+ line = line.replace(subst[0], subst[2], subst[3])
+
+ buf[line_number] = line
+
+ with open(filepath, mode='w', encoding='utf-8', errors='surrogateescape') as file:
+ # Write the whole buffer back into the file.
+ file.writelines(buf)
+
+ return 0
+
+# ----- Main -----
+if __name__ == '__main__':
+ import argparse
+
+ parser = argparse.ArgumentParser(
+ description='Post-process the Doxygen-generated HTML files matching pattern.')
+ parser.add_argument('patterns', nargs='*', metavar='pattern', help='filename pattern')
+ args = parser.parse_args()
+ print(args.patterns)
+
+ sys.exit(doc_postprocess(args.patterns))
]
# MESON_PROJECT_DIST_ROOT is set only if meson.version() >= 0.58.0.
project_dist_root = os.getenv('MESON_PROJECT_DIST_ROOT', os.getenv('MESON_DIST_ROOT'))
-with open(os.path.join(project_dist_root, 'ChangeLog'), mode='w') as logfile:
+logfilename = os.path.join(project_dist_root, 'ChangeLog')
+with open(logfilename, mode='w', encoding='utf-8') as logfile:
result = subprocess.run(cmd, stdout=logfile)
# Distribute the libstdc++.tag file in addition to the files in the local git clone.
else:
mode = 'w'
+def reset(tarinfo):
+ tarinfo.uid = tarinfo.gid = 0
+ tarinfo.uname = tarinfo.gname = "root"
+ return tarinfo
+
+
with tarfile.open(output_file, mode=mode) as tar_file:
os.chdir(source_dir) # Input filenames are relative to source_dir.
for file in sys.argv[3:]:
- tar_file.add(file)
+ tar_file.add(file, filter=reset)
# Errors raise exceptions. If an exception is raised, Meson+ninja will notice
# that the command failed, despite exit(0).
sys.exit(0)
.BI "Documentation utilities copied to " DOCTOOL-DIR :
.PD 0
.IP
-.I doc-install.pl
+.I doc_install.py
.IP
-.I doc-postprocess.pl
+.I doc_postprocess.py
.IP
.I doxygen.css
.IP
print(progname + ': putting documentation utilities in ' + doctooldir)
os.makedirs(doctooldir, exist_ok=True)
-for file in ['doc-install.pl', 'doc-postprocess.pl',
+for file in ['doc_install.py', 'doc_postprocess.py',
'doxygen.css', 'doxygen-extra.css', 'tagfile-to-devhelp2.xsl']:
src_file = os.path.join(pkgdatadir, 'doctool', file)
dest_file = os.path.join(doctooldir, file)