import os
assert('GST_ENV' not in os.environ)
'''
-cmdres = run_command(python3, '-c', ensure_not_uninstalled)
+cmdres = run_command(python3, '-c', ensure_not_uninstalled, check: false)
if cmdres.returncode() != 0
error('Do not run `ninja` or `meson` for gst-build inside the uninstalled environment, you will run into problems')
endif
# Install gst-indent pre-commit hook
-run_command(python3, '-c', 'import shutil; shutil.copy("scripts/git-hooks/multi-pre-commit.hook", ".git/hooks/pre-commit")')
+run_command(python3, '-c', 'import shutil; shutil.copy("scripts/git-hooks/multi-pre-commit.hook", ".git/hooks/pre-commit")', check: false)
# Ensure that the user does not have Strawberry Perl in PATH, since it ships
# with a pkg-config.bat and broken pkgconfig files for libffi and zlib. Will
assert(r'Strawberry\perl\bin' not in os.environ['PATH'])
'''
if build_system == 'windows' and meson.version().version_compare('<0.60.0')
- cmdres = run_command(python3, '-c', ensure_no_strawberry_perl)
+ cmdres = run_command(python3, '-c', ensure_no_strawberry_perl, check: false)
if cmdres.returncode() != 0
error('You have Strawberry Perl in PATH which is known to cause build issues with Meson < 0.60.0. Please remove it from PATH, uninstall it, or upgrade Meson.')
endif
if not meson.is_subproject() and cc.get_id() == 'msvc'
uname = find_program('uname', required: false)
if uname.found()
- ret = run_command(uname, '-o')
+ ret = run_command(uname, '-o', check: false)
if ret.returncode() == 0 and ret.stdout().to_lower() == 'msys'
- ret = run_command(uname, '-r')
+ ret = run_command(uname, '-r', check: false)
# The kernel version returned by uname is actually the msys version
if ret.returncode() == 0 and ret.stdout().startswith('2')
# If a system zlib is found, disable UNIX features in zlib.h and zconf.h
subdir('data')
-if run_command(python3,
- '-c', 'import gi; gi.require_version("Gtk", "3.0")').returncode() == 0
+if run_command(python3, '-c', 'import gi; gi.require_version("Gtk", "3.0")', check: false).returncode() == 0
test('gst-debug-viewer', python3, args: ['-m', 'unittest'],
workdir: meson.current_source_dir())
endif
subdir('debug-viewer')
endif
subdir('docs')
-
-run_command(python3, '-c', 'import shutil; shutil.copy("hooks/multi-pre-commit.hook", ".git/hooks/pre-commit")')
endif
hotdoc_req = '>= 0.12.2'
-hotdoc_version = run_command(hotdoc_p, '--version').stdout()
+hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
if not hotdoc_version.version_compare(hotdoc_req)
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
endif
python3,
'-c', read_file_contents,
fname,
+ check: false,
)
if cmdres.returncode() == 0
built_subprojects = cmdres.stdout().strip()
endif
hotdoc_req = '>= 0.11.0'
-hotdoc_version = run_command(hotdoc_p, '--version').stdout()
+hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
if not hotdoc_version.version_compare(hotdoc_req)
if get_option('doc').enabled()
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
with open("@0@") as f:
print(':'.join(json.load(f).keys()), end='')
-'''.format(plugins_cache))
-
-assert(list_plugin_res.returncode() == 0,
- 'Could not list plugins from @0@\n@1@\n@1@'.format(plugins_cache, list_plugin_res.stdout(), list_plugin_res.stderr()))
-
-
+'''.format(plugins_cache),
+ check: true)
foreach plugin_name: list_plugin_res.stdout().split(':')
plugins_doc += [hotdoc.generate_doc(plugin_name,
project_version: apiversion,
if not cc.compiles('#include <Python.h>', dependencies: [python_dep])
error_msg = 'Could not compile a simple program against python'
elif pylib_loc == ''
- check_path_exists = 'import os, sys; assert(os.path.exists(sys.argv[1]))'
+ fsmod = import('fs')
pylib_loc = python.get_variable('LIBPL', '')
if host_machine.system() != 'windows' and host_machine.system() != 'darwin'
pylib_ldlibrary = python.get_variable('LDLIBRARY', '')
- if run_command(python, '-c', check_path_exists, join_paths(pylib_loc, pylib_ldlibrary)).returncode() != 0
+ if not fsmod.exists(pylib_loc / pylib_ldlibrary)
# Workaround for Fedora
pylib_loc = python.get_variable('LIBDIR', '')
message('pylib_loc = @0@'.format(pylib_loc))
endif
- res = run_command(python, '-c', check_path_exists, join_paths(pylib_loc, pylib_ldlibrary))
- if res.returncode() != 0
+ if not fsmod.exists(pylib_loc / pylib_ldlibrary)
error_msg = '@0@ doesn\' exist, can\'t use python'.format(join_paths(pylib_loc, pylib_ldlibrary))
endif
endif
'''
pygi_override_dir = get_option('pygi-overrides-dir')
if pygi_override_dir == ''
- cres = run_command(python3, '-c', override_detector, get_option('prefix'))
+ cres = run_command(python3, '-c', override_detector, get_option('prefix'), check: false)
if cres.returncode() == 0
pygi_override_dir = cres.stdout().strip()
endif
# Set release date
if gst_version_nano == 0
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
- run_result = run_command(extract_release_date, gst_version, files('gst-editing-services.doap'))
- if run_result.returncode() == 0
- release_date = run_result.stdout().strip()
- cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
- message('Package release date: ' + release_date)
- else
- # Error out if our release can't be found in the .doap file
- error(run_result.stderr())
- endif
+ run_result = run_command(extract_release_date, gst_version, files('gst-editing-services.doap'), check: true)
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
endif
if gio_dep.version().version_compare('< 2.67.4')
]
check_test = find_program ('configure_test_check.py')
-have_webrtc_check_deps = run_command (check_test).returncode() == 0
+have_webrtc_check_deps = run_command (check_test, check: false).returncode() == 0
if openssl.found() and have_webrtc_check_deps
test_deps = [certs]
dependencies = []
foreach dependency, version: { 'Newtonsoft.Json': '11.0.2', 'WebSocketSharp': '1.0.3-rc11'}
message('Getting @0@:@1@'.format(dependency, version))
- get_dep= run_command(nuget, 'get',
+ get_dep = run_command(nuget, 'get',
'--builddir', dependency,
'--nuget-name', dependency,
'--nuget-version', version,
'--csharp-version=net45',
'--current-builddir', meson.current_build_dir(),
'--builddir', meson.global_build_root(), # FIXME: --builddir specified twice?!
+ check: true,
)
- if get_dep.returncode() != 0
- error('Failed to get @0@-@1@: @2@'.format(dependency, version, get_dep.stderr()))
- endif
-
link_args = get_dep.stdout().split()
dependencies += [declare_dependency(link_args: link_args, version: version)]
foreach path: get_dep.stdout().split()
endif
hotdoc_req = '>= 0.11.0'
-hotdoc_version = run_command(hotdoc_p, '--version').stdout()
+hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
if not hotdoc_version.version_compare(hotdoc_req)
if get_option('doc').enabled()
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
# Set release date
if gst_version_nano == 0
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
- run_result = run_command(extract_release_date, gst_version, files('gst-libav.doap'))
- if run_result.returncode() == 0
- release_date = run_result.stdout().strip()
- cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
- message('Package release date: ' + release_date)
- else
- # Error out if our release can't be found in the .doap file
- error(run_result.stderr())
- endif
+ run_result = run_command(extract_release_date, gst_version, files('gst-libav.doap'), check: true)
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
endif
configure_file(output: 'config.h', configuration: cdata)
# Set release date
if gst_version_nano == 0
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
- run_result = run_command(extract_release_date, gst_version, files('gst-omx.doap'))
- if run_result.returncode() == 0
- release_date = run_result.stdout().strip()
- cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
- message('Package release date: ' + release_date)
- else
- # Error out if our release can't be found in the .doap file
- error(run_result.stderr())
- endif
+ run_result = run_command(extract_release_date, gst_version, files('gst-omx.doap'), check: true)
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
endif
configure_file(output: 'config.h', configuration: cdata)
endif
hotdoc_req = '>= 0.11.0'
-hotdoc_version = run_command(hotdoc_p, '--version').stdout()
+hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
if not hotdoc_version.version_compare(hotdoc_req)
if get_option('doc').enabled()
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
with open("@0@") as f:
print(':'.join(json.load(f).keys()), end='')
-'''.format(plugins_cache))
-
-assert(list_plugin_res.returncode() == 0,
- 'Could not list plugins from @0@'.format(plugins_cache))
+'''.format(plugins_cache),
+ check: true)
plugins_doc = []
foreach plugin_name: list_plugin_res.stdout().split(':')
# Check the data dir used by opencv for its xml data files
# Use prefix from pkg-config to be compatible with cross-compilation
- r = run_command('test', '-d', opencv_prefix + '/share/opencv')
+ r = run_command('test', '-d', opencv_prefix + '/share/opencv', check: false)
if r.returncode() == 0
gstopencv_cargs += '-DOPENCV_PATH_NAME="opencv"'
else
- r = run_command('test', '-d', opencv_prefix + '/share/OpenCV')
+ r = run_command('test', '-d', opencv_prefix + '/share/OpenCV', check: false)
if r.returncode() == 0
gstopencv_cargs += '-DOPENCV_PATH_NAME="OpenCV"'
else
- r = run_command('test', '-d', opencv_prefix + '/share/opencv4')
+ r = run_command('test', '-d', opencv_prefix + '/share/opencv4', check: false)
if r.returncode() == 0
gstopencv_cargs += '-DOPENCV_PATH_NAME="opencv4"'
else
# https://github.com/KhronosGroup/MoltenVK/issues/492
vulkan_dep = cc.find_library('MoltenVK', required : get_option('vulkan'))
elif host_system == 'windows'
- vulkan_root = run_command(python3, '-c', 'import os; print(os.environ.get("VK_SDK_PATH"))').stdout().strip()
+ vulkan_root = run_command(python3, '-c', 'import os; print(os.environ.get("VK_SDK_PATH"))', check: false).stdout().strip()
if vulkan_root != '' and vulkan_root != 'None'
vulkan_lib_dir = ''
if build_machine.cpu_family() == 'x86_64'
# Set release date
if gst_version_nano == 0
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
- run_result = run_command(extract_release_date, gst_version, files('gst-plugins-bad.doap'))
- if run_result.returncode() == 0
- release_date = run_result.stdout().strip()
- cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
- message('Package release date: ' + release_date)
- else
- # Error out if our release can't be found in the .doap file
- error(run_result.stderr())
- endif
+ run_result = run_command(extract_release_date, gst_version, files('gst-plugins-bad.doap'), check: true)
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
endif
if glib_dep.version().version_compare('< 2.67.4')
use_msdk = true
else
# Old versions of MediaSDK don't provide a pkg-config file
- mfx_root = run_command(python3, '-c', 'import os; print(os.environ.get("INTELMEDIASDKROOT", os.environ.get("MFX_HOME", "")))').stdout().strip()
+ mfx_root = run_command(python3, '-c', 'import os; print(os.environ.get("INTELMEDIASDKROOT", os.environ.get("MFX_HOME", "")))', check: false).stdout().strip()
if mfx_root != ''
mfx_libdir = [mfx_root + '/lib/lin_x64', mfx_root + '/lib/x64', mfx_root + '/lib64', mfx_root + '/lib']
endif
hotdoc_req = '>= 0.11.0'
-hotdoc_version = run_command(hotdoc_p, '--version').stdout()
+hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
if not hotdoc_version.version_compare(hotdoc_req)
if get_option('doc').enabled()
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
with open("@0@") as f:
print(':'.join(json.load(f).keys()), end='')
-'''.format(plugins_cache))
-
-assert(list_plugin_res.returncode() == 0,
- 'Could not list plugins from @0@\n@1@\n@1@'.format(plugins_cache, list_plugin_res.stdout(), list_plugin_res.stderr()))
-
-
+'''.format(plugins_cache),
+ check: true)
foreach plugin_name: list_plugin_res.stdout().split(':')
plugins_doc += [hotdoc.generate_doc(plugin_name,
project_version: api_version,
# Set release date
if gst_version_nano == 0
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
- run_result = run_command(extract_release_date, gst_version, files('gst-plugins-base.doap'))
- if run_result.returncode() == 0
- release_date = run_result.stdout().strip()
- core_conf.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
- message('Package release date: ' + release_date)
- else
- # Error out if our release can't be found in the .doap file
- error(run_result.stderr())
- endif
+ run_result = run_command(extract_release_date, gst_version, files('gst-plugins-base.doap'), check: true)
+ release_date = run_result.stdout().strip()
+ core_conf.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
endif
if gio_dep.version().version_compare('< 2.67.4')
endif
hotdoc_req = '>= 0.11.0'
-hotdoc_version = run_command(hotdoc_p, '--version').stdout()
+hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
if not hotdoc_version.version_compare(hotdoc_req)
if get_option('doc').enabled()
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
with open("@0@") as f:
print(':'.join(json.load(f).keys()), end='')
-'''.format(plugins_cache))
-
-assert(list_plugin_res.returncode() == 0,
- 'Could not list plugins from @0@'.format(plugins_cache))
-
+'''.format(plugins_cache),
+ check: true)
foreach plugin_name: list_plugin_res.stdout().split(':')
plugins_doc += [hotdoc.generate_doc(plugin_name,
project_version: api_version,
if nasm.found()
# We can't use the version: kwarg for find_program because old versions
# of nasm don't support --version
- ret = run_command(nasm, '-v')
+ ret = run_command(nasm, '-v', check: false)
if ret.returncode() == 0
nasm_version = ret.stdout().strip().split()[2]
nasm_req = '>=2.13'
# Set release date
if gst_version_nano == 0
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
- run_result = run_command(extract_release_date, gst_version, files('gst-plugins-good.doap'))
- if run_result.returncode() == 0
- release_date = run_result.stdout().strip()
- cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
- message('Package release date: ' + release_date)
- else
- # Error out if our release can't be found in the .doap file
- error(run_result.stderr())
- endif
+ run_result = run_command(extract_release_date, gst_version, files('gst-plugins-good.doap'), check: true)
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
endif
if gio_dep.version().version_compare('< 2.67.4')
endif
hotdoc_req = '>= 0.11.0'
-hotdoc_version = run_command(hotdoc_p, '--version').stdout()
+hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
if not hotdoc_version.version_compare(hotdoc_req)
if get_option('doc').enabled()
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
with open("@0@") as f:
print(':'.join(json.load(f).keys()), end='')
-'''.format(plugins_cache))
-
-assert(list_plugin_res.returncode() == 0,
- 'Could not list plugins from @0@'.format(plugins_cache))
-
+'''.format(plugins_cache),
+ check: true)
foreach plugin_name: list_plugin_res.stdout().split(':')
plugins_doc += [hotdoc.generate_doc(plugin_name,
project_version: api_version,
# Set release date
if gst_version_nano == 0
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
- run_result = run_command(extract_release_date, gst_version, files('gst-plugins-ugly.doap'))
- if run_result.returncode() == 0
- release_date = run_result.stdout().strip()
- cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
- message('Package release date: ' + release_date)
- else
- # Error out if our release can't be found in the .doap file
- error(run_result.stderr())
- endif
+ run_result = run_command(extract_release_date, gst_version, files('gst-plugins-ugly.doap'), check: true)
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
endif
configure_file(output : 'config.h', configuration : cdata)
python_abi_flags = python.get_variable('ABIFLAGS', '')
pylib_loc = get_option('libpython-dir')
if pylib_loc == ''
- check_path_exists = 'import os, sys; assert(os.path.exists(sys.argv[1]))'
+ fsmod = import('fs')
pylib_loc = python.get_variable('LIBPL', '')
if host_machine.system() != 'windows' and host_machine.system() != 'darwin'
pylib_ldlibrary = python.get_variable('LDLIBRARY', '')
- if run_command(python, '-c', check_path_exists, join_paths(pylib_loc, pylib_ldlibrary)).returncode() != 0
+ if not fsmod.exists(pylib_loc / pylib_ldlibrary)
# Workaround for Fedora
pylib_loc = python.get_variable('LIBDIR', '')
message('pylib_loc = @0@'.format(pylib_loc))
endif
- assert(
- run_command(python, '-c', check_path_exists, join_paths(pylib_loc, pylib_ldlibrary)).returncode() == 0,
- 'Python dynamic library path could not be determined'
- )
+ if not fsmod.exists(pylib_loc / pylib_ldlibrary)
+ error('Python dynamic library path could not be determined')
+ endif
endif
endif
if not meson.is_subproject()
pkgconfig = find_program('pkg-config')
runcmd = run_command(pkgconfig, '--variable=pluginsdir',
- 'gstreamer-' + api_version)
- if runcmd.returncode() == 0
- pluginsdirs = runcmd.stdout().split()
- else
- error('Could not determine GStreamer core plugins directory for unit tests.')
- endif
+ 'gstreamer-' + api_version, check: true)
+ pluginsdirs = runcmd.stdout().split()
endif
runcmd = run_command(python, '-c', '''with open("@0@/mesonconfig.py", "w") as f:
f.write("path='@1@'")'''.format(
- join_paths(meson.current_build_dir()), join_paths(meson.current_build_dir(), '..')))
-
-if runcmd.returncode() != 0
- error('Could not configure testsuite config file.' + runcmd.stderr())
-endif
+ join_paths(meson.current_build_dir()), join_paths(meson.current_build_dir(), '..')),
+ check: true)
pluginsdirs = []
if gst_dep.type_name() == 'pkgconfig'
endif
hotdoc_req = '>= 0.11.0'
-hotdoc_version = run_command(hotdoc_p, '--version').stdout()
+hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
if not hotdoc_version.version_compare(hotdoc_req)
if get_option('doc').enabled()
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
# Set release date
if gst_version_nano == 0
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
- run_result = run_command(extract_release_date, gst_version, files('gst-rtsp-server.doap'))
- if run_result.returncode() == 0
- release_date = run_result.stdout().strip()
- cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
- message('Package release date: ' + release_date)
- else
- # Error out if our release can't be found in the .doap file
- error(run_result.stderr())
- endif
+ run_result = run_command(extract_release_date, gst_version, files('gst-rtsp-server.doap'), check: true)
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
endif
configure_file(output: 'config.h', configuration: cdata)
'--csharp-version=net45',
'--current-builddir', meson.current_build_dir(),
'--builddir', meson.build_root(), # FIXME: --builddir specified twice?!
+ check: true,
)
nunit_mono_path = []
nunit_dep = dependency('mono-nunit', required: false, version: ['>=2.6', '< 2.7'])
if not nunit_dep.found()
- if get_nunit_res.returncode() != 0
- message('Failed to get NUnit: ' + get_nunit_res.stderr())
- else
- foreach path: get_nunit_res.stdout().split()
- nunit_mono_path += [join_paths(meson.build_root(), path.strip('-r:'), '..')]
- endforeach
+ foreach path: get_nunit_res.stdout().split()
+ nunit_mono_path += [meson.build_root() / path.strip('-r:') / '..']
+ endforeach
- nunit_dep = declare_dependency(link_args: get_nunit_res.stdout().split(),
- version: nunit_version)
- endif
+ nunit_dep = declare_dependency(link_args: get_nunit_res.stdout().split(),
+ version: nunit_version)
endif
if nunit_mono_path.length() > 0
endif
hotdoc_req = '>= 0.11.0'
-hotdoc_version = run_command(hotdoc_p, '--version').stdout()
+hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
if not hotdoc_version.version_compare(hotdoc_req)
if get_option('doc').enabled()
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
# Set release date
if gst_version_nano == 0
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
- run_result = run_command(extract_release_date, gst_version, files('gstreamer-vaapi.doap'))
- if run_result.returncode() == 0
- release_date = run_result.stdout().strip()
- cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
- message('Package release date: ' + release_date)
- else
- # Error out if our release can't be found in the .doap file
- error(run_result.stderr())
- endif
+ run_result = run_command(extract_release_date, gst_version, files('gstreamer-vaapi.doap'), check: true)
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
endif
if gmodule_dep.version().version_compare('< 2.67.4')
endif
hotdoc_req = '>= 0.11.0'
-hotdoc_version = run_command(hotdoc_p, '--version').stdout()
+hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
if not hotdoc_version.version_compare(hotdoc_req)
if get_option('doc').enabled()
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
bison_min_version='2.4'
bison = find_program('bison', 'win_bison')
-bversion_res = run_command([bison, '--version'])
-if bversion_res.returncode() != 0
- error('Could not get bison version (@0@)'.format(bversion_res.stderr()))
-endif
-
+bversion_res = run_command([bison, '--version'], check: true)
bversion = bversion_res.stdout().split('\n')[0].split(' ')[-1].strip()
if bversion.version_compare('<' + bison_min_version)
error('bison version @0@ >= @1@: NO'.format(bversion, bison_min_version))
# Set release date
if gst_version_nano == 0
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
- run_result = run_command(extract_release_date, gst_version, files('gstreamer.doap'))
- if run_result.returncode() == 0
- release_date = run_result.stdout().strip()
- cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
- message('Package release date: ' + release_date)
- else
- # Error out if our release can't be found in the .doap file
- error(run_result.stderr())
- endif
+ run_result = run_command(extract_release_date, gst_version, files('gstreamer.doap'), check: true)
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
endif
configure_file(output : 'config.h', configuration : cdata)
root_rel = '../..'
python = import('python').find_installation()
-if run_command(python, '-c', 'import gi').returncode() != 0
+if run_command(python, '-c', 'import gi', check: false).returncode() != 0
message('PyGObject not found, not running PyGObject tests')
subdir_done()
endif