Merge pull request #1751 from centricular/fix-cached-deps

Fix caching of external dependencies of various types
pull/1688/head
Jussi Pakkanen 8 years ago committed by GitHub
commit 69abca2a67
  1. 2
      mesonbuild/coredata.py
  2. 38
      mesonbuild/dependencies.py
  3. 78
      mesonbuild/interpreter.py
  4. 2
      mesonbuild/mesonlib.py
  5. 6
      mesonbuild/mintro.py
  6. 10
      run_project_tests.py
  7. 8
      run_tests.py
  8. 12
      run_unittests.py
  9. 1
      test cases/linuxlike/1 pkg-config/meson.build
  10. 46
      test cases/linuxlike/5 dependency versions/meson.build

@ -160,7 +160,7 @@ class CoreData:
self.wrap_mode = options.wrap_mode
self.compilers = OrderedDict()
self.cross_compilers = OrderedDict()
self.deps = {}
self.deps = OrderedDict()
self.modules = {}
# Only to print a warning if it changes between Meson invocations.
self.pkgconf_envvar = os.environ.get('PKG_CONFIG_PATH', '')

@ -27,9 +27,10 @@ import subprocess
import sysconfig
from enum import Enum
from collections import OrderedDict
from . mesonlib import MesonException, version_compare, version_compare_many, Popen_safe
from . import mlog
from . import mesonlib
from .mesonlib import Popen_safe, flatten
from .mesonlib import MesonException, version_compare, version_compare_many
from .environment import detect_cpu_family, for_windows
class DependencyException(MesonException):
@ -103,6 +104,7 @@ class InternalDependency(Dependency):
def __init__(self, version, incdirs, compile_args, link_args, libraries, sources, ext_deps):
super().__init__('internal', {})
self.version = version
self.is_found = True
self.include_directories = incdirs
self.compile_args = compile_args
self.link_args = link_args
@ -127,6 +129,7 @@ class PkgConfigDependency(Dependency):
def __init__(self, name, environment, kwargs):
Dependency.__init__(self, 'pkgconfig', kwargs)
self.is_libtool = False
self.version_reqs = kwargs.get('version', None)
self.required = kwargs.get('required', True)
self.static = kwargs.get('static', False)
self.silent = kwargs.get('silent', False)
@ -187,7 +190,6 @@ class PkgConfigDependency(Dependency):
''.format(self.type_string, name))
return
found_msg = [self.type_string + ' dependency', mlog.bold(name), 'found:']
self.version_reqs = kwargs.get('version', None)
if self.version_reqs is None:
self.is_found = True
else:
@ -1743,21 +1745,23 @@ class LLVMDependency(Dependency):
return True
def get_dep_identifier(name, kwargs):
elements = [name]
modlist = kwargs.get('modules', [])
if isinstance(modlist, str):
modlist = [modlist]
for module in modlist:
elements.append(module)
# We use a tuple because we need a non-mutable structure to use as the key
# of a dictionary and a string has potential for name collisions
identifier = tuple(elements)
identifier += ('main', kwargs.get('main', False))
identifier += ('static', kwargs.get('static', False))
if 'fallback' in kwargs:
f = kwargs.get('fallback')
identifier += ('fallback', f[0], f[1])
def get_dep_identifier(name, kwargs, want_cross):
# Need immutable objects since the identifier will be used as a dict key
version_reqs = flatten(kwargs.get('version', []))
if isinstance(version_reqs, list):
version_reqs = frozenset(version_reqs)
identifier = (name, version_reqs, want_cross)
for key, value in kwargs.items():
# 'version' is embedded above as the second element for easy access
# 'native' is handled above with `want_cross`
# 'required' is irrelevant for caching; the caller handles it separately
# 'fallback' subprojects cannot be cached -- they must be initialized
if key in ('version', 'native', 'required', 'fallback',):
continue
# All keyword arguments are strings, ints, or lists (or lists of lists)
if isinstance(value, list):
value = frozenset(flatten(value))
identifier += (key, value)
return identifier
def find_external_dependency(name, environment, kwargs):

@ -23,7 +23,8 @@ from . import compilers
from .wrap import wrap, WrapMode
from . import mesonlib
from .mesonlib import FileMode, Popen_safe, get_meson_script
from .dependencies import InternalDependency, Dependency, ExternalProgram
from .dependencies import ExternalProgram
from .dependencies import InternalDependency, Dependency, DependencyException
from .interpreterbase import InterpreterBase
from .interpreterbase import check_stringlist, noPosargs, noKwargs, stringArgs
from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode
@ -1852,13 +1853,7 @@ class Interpreter(InterpreterBase):
def func_find_library(self, node, args, kwargs):
mlog.log(mlog.red('DEPRECATION:'), 'find_library() is removed, use the corresponding method in compiler object instead.')
def func_dependency(self, node, args, kwargs):
self.validate_arguments(args, 1, [str])
name = args[0]
if '<' in name or '>' in name or '=' in name:
raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
'version\n requirements use the \'version\' keyword argument instead.')
identifier = dependencies.get_dep_identifier(name, kwargs)
def _find_cached_dep(self, name, kwargs):
# Check if we want this as a cross-dep or a native-dep
# FIXME: Not all dependencies support such a distinction right now,
# and we repeat this check inside dependencies that do. We need to
@ -1868,60 +1863,79 @@ class Interpreter(InterpreterBase):
want_cross = not kwargs['native']
else:
want_cross = is_cross
# Check if we've already searched for and found this dep
identifier = dependencies.get_dep_identifier(name, kwargs, want_cross)
cached_dep = None
# Check if we've already searched for and found this dep
if identifier in self.coredata.deps:
cached_dep = self.coredata.deps[identifier]
if 'version' in kwargs:
wanted = kwargs['version']
found = cached_dep.get_version()
if not cached_dep.found() or \
not mesonlib.version_compare_many(found, wanted)[0]:
# Cached dep has the wrong version. Check if an external
# dependency or a fallback dependency provides it.
cached_dep = None
# Don't re-use cached dep if it wasn't required but this one is,
# so we properly go into fallback/error code paths
if kwargs.get('required', True) and not getattr(cached_dep, 'required', False):
cached_dep = None
# Don't reuse cached dep if one is a cross-dep and the other is a native dep
if not getattr(cached_dep, 'want_cross', is_cross) == want_cross:
cached_dep = None
else:
# Check if exactly the same dep with different version requirements
# was found already.
wanted = identifier[1]
for trial, trial_dep in self.coredata.deps.items():
# trial[1], identifier[1] are the version requirements
if trial[0] != identifier[0] or trial[2:] != identifier[2:]:
continue
found = trial_dep.get_version()
if not wanted or mesonlib.version_compare_many(found, wanted)[0]:
# We either don't care about the version, or our
# version requirements matched the trial dep's version.
cached_dep = trial_dep
break
return identifier, cached_dep
def func_dependency(self, node, args, kwargs):
self.validate_arguments(args, 1, [str])
name = args[0]
if '<' in name or '>' in name or '=' in name:
raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
'version\n requirements use the \'version\' keyword argument instead.')
identifier, cached_dep = self._find_cached_dep(name, kwargs)
if cached_dep:
if kwargs.get('required', True) and not cached_dep.found():
m = 'Dependency {!r} was already checked and was not found'
raise DependencyException(m.format(name))
dep = cached_dep
else:
# We need to actually search for this dep
exception = None
dep = None
# If the fallback has already been configured (possibly by a higher level project)
# try to use it before using the native version
# If the dependency has already been configured, possibly by
# a higher level project, try to use it first.
if 'fallback' in kwargs:
dirname, varname = self.get_subproject_infos(kwargs)
if dirname in self.subprojects:
subproject = self.subprojects[dirname]
try:
dep = self.subprojects[dirname].get_variable_method([varname], {})
dep = dep.held_object
# Never add fallback deps to self.coredata.deps
return subproject.get_variable_method([varname], {})
except KeyError:
pass
# Search for it outside the project
if not dep:
try:
dep = dependencies.find_external_dependency(name, self.environment, kwargs)
except dependencies.DependencyException as e:
except DependencyException as e:
exception = e
pass
# Search inside the projects list
if not dep or not dep.found():
if 'fallback' in kwargs:
fallback_dep = self.dependency_fallback(name, kwargs)
if fallback_dep:
# Never add fallback deps to self.coredata.deps since we
# cannot cache them. They must always be evaluated else
# we won't actually read all the build files.
return fallback_dep
if not dep:
raise exception
self.coredata.deps[identifier] = dep
# Only store found-deps in the cache
if dep.found():
self.coredata.deps[identifier] = dep
return DependencyHolder(dep)
def get_subproject_infos(self, kwargs):
@ -2230,7 +2244,7 @@ class Interpreter(InterpreterBase):
absname = os.path.join(self.environment.get_source_dir(), buildfilename)
if not os.path.isfile(absname):
self.subdir = prev_subdir
raise InterpreterException('Nonexistent build def file %s.' % buildfilename)
raise InterpreterException('Non-existent build file {!r}'.format(buildfilename))
with open(absname, encoding='utf8') as f:
code = f.read()
assert(isinstance(code, str))

@ -305,7 +305,7 @@ def version_compare(vstr1, vstr2, strict=False):
return cmpop(varr1, varr2)
def version_compare_many(vstr1, conditions):
if not isinstance(conditions, (list, tuple)):
if not isinstance(conditions, (list, tuple, frozenset)):
conditions = [conditions]
found = []
not_found = []

@ -161,12 +161,12 @@ def list_buildsystem_files(coredata, builddata):
print(json.dumps(filelist))
def list_deps(coredata):
result = {}
for d in coredata.deps.values():
result = []
for d in coredata.deps:
if d.found():
args = {'compile_args': d.get_compile_args(),
'link_args': d.get_link_args()}
result[d.name] = args
result += [d.name, args]
print(json.dumps(result))
def list_tests(testdata):

@ -36,6 +36,7 @@ import concurrent.futures as conc
import re
from run_tests import get_backend_commands, get_backend_args_for_dir, Backend
from run_tests import ensure_backend_detects_changes
class BuildStep(Enum):
@ -342,6 +343,10 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen
return TestResult('Test that should have failed succeeded', BuildStep.configure, stdo, stde, mesonlog, gen_time)
if returncode != 0:
return TestResult('Generating the build system failed.', BuildStep.configure, stdo, stde, mesonlog, gen_time)
# Touch the meson.build file to force a regenerate so we can test that
# regeneration works before a build is run.
ensure_backend_detects_changes(backend)
os.utime(os.path.join(testdir, 'meson.build'))
# Build with subprocess
dir_args = get_backend_args_for_dir(backend, test_build_dir)
build_start = time.time()
@ -356,9 +361,8 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen
if pc.returncode != 0:
return TestResult('Compiling source code failed.', BuildStep.build, stdo, stde, mesonlog, gen_time, build_time)
# Touch the meson.build file to force a regenerate so we can test that
# regeneration works. We need to sleep for 0.2s because Ninja tracks mtimes
# at a low resolution: https://github.com/ninja-build/ninja/issues/371
time.sleep(0.2)
# regeneration works after a build is complete.
ensure_backend_detects_changes(backend)
os.utime(os.path.join(testdir, 'meson.build'))
test_start = time.time()
# Test in-process

@ -16,6 +16,7 @@
import os
import sys
import time
import shutil
import subprocess
import platform
@ -98,6 +99,13 @@ def get_backend_commands(backend, debug=False):
raise AssertionError('Unknown backend: {!r}'.format(backend))
return cmd, clean_cmd, test_cmd, install_cmd, uninstall_cmd
def ensure_backend_detects_changes(backend):
# This is needed to increase the difference between build.ninja's
# timestamp and the timestamp of whatever you changed due to a Ninja
# bug: https://github.com/ninja-build/ninja/issues/371
if backend is Backend.ninja:
time.sleep(1)
def get_fake_options(prefix):
import argparse
opts = argparse.Namespace()

@ -30,6 +30,7 @@ from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram
from run_tests import exe_suffix, get_fake_options, FakeEnvironment
from run_tests import get_builddir_target_args, get_backend_commands, Backend
from run_tests import ensure_backend_detects_changes
def get_soname(fname):
@ -355,13 +356,6 @@ class BasePlatformTests(unittest.TestCase):
# XCode backend is untested with unit tests, help welcome!
self.no_rebuild_stdout = 'UNKNOWN BACKEND {!r}'.format(self.backend.name)
def ensure_backend_detects_changes(self):
# This is needed to increase the difference between build.ninja's
# timestamp and the timestamp of whatever you changed due to a Ninja
# bug: https://github.com/ninja-build/ninja/issues/371
if self.backend is Backend.ninja:
time.sleep(1)
def _print_meson_log(self):
log = os.path.join(self.logdir, 'meson-log.txt')
if not os.path.isfile(log):
@ -439,14 +433,14 @@ class BasePlatformTests(unittest.TestCase):
def setconf(self, arg, will_build=True):
if will_build:
self.ensure_backend_detects_changes()
ensure_backend_detects_changes(self.backend)
self._run(self.mconf_command + [arg, self.builddir])
def wipe(self):
shutil.rmtree(self.builddir)
def utime(self, f):
self.ensure_backend_detects_changes()
ensure_backend_detects_changes(self.backend)
os.utime(f)
def get_compdb(self):

@ -45,4 +45,3 @@ inc = include_directories('incdir')
r = cc.run(code, include_directories : inc, dependencies : zlibdep)
assert(r.returncode() == 0, 'Running manual zlib test failed.')

@ -21,10 +21,18 @@ if dependency('zlib', version : ['<=1.0', '>=9999', '=' + zlib.version()], requi
error('zlib <=1.0 >=9999 should not have been found')
endif
# Test that a versionless zlib is found after not finding an optional zlib dep with version reqs
zlibopt = dependency('zlib', required : false)
assert(zlibopt.found() == true, 'zlib not found')
# Test https://github.com/mesonbuild/meson/pull/610
dependency('somebrokenlib', version : '>=2.0', required : false)
dependency('somebrokenlib', version : '>=1.0', required : false)
# Search for an external dependency that won't be found, but must later be
# found via fallbacks
somelibnotfound = dependency('somelib', required : false)
assert(somelibnotfound.found() == false, 'somelibnotfound was found?')
# Find internal dependency without version
somelibver = dependency('somelib',
fallback : ['somelibnover', 'some_dep'])
@ -37,17 +45,51 @@ somelib = dependency('somelib',
somelibver = dependency('somelib',
version : '>= 0.3',
fallback : ['somelibver', 'some_dep'])
# Find somelib again, but with a fallback that will fail
# Find somelib again, but with a fallback that will fail because subproject does not exist
somelibfail = dependency('somelib',
version : '>= 0.2',
required : false,
fallback : ['somelibfail', 'some_dep'])
assert(somelibfail.found() == false, 'somelibfail found via wrong fallback')
# Find somelib again, but with a fallback that will fail because dependency does not exist
somefail_dep = dependency('somelib',
version : '>= 0.2',
required : false,
fallback : ['somelib', 'somefail_dep'])
assert(somefail_dep.found() == false, 'somefail_dep found via wrong fallback')
fakezlib_dep = dependency('zlib',
# Fallback should only be used if the primary was not found
fallbackzlib_dep = dependency('zlib',
fallback : ['somelib', 'fakezlib_dep'])
assert(fallbackzlib_dep.type_name() == 'pkgconfig', 'fallbackzlib_dep should be of type "pkgconfig", not ' + fallbackzlib_dep.type_name())
# Check that the above dependency was pkgconfig because the fallback wasn't
# checked, not because the fallback didn't work
fakezlib_dep = dependency('fakezlib',
fallback : ['somelib', 'fakezlib_dep'])
assert(fakezlib_dep.type_name() == 'internal', 'fakezlib_dep should be of type "internal", not ' + fakezlib_dep.type_name())
# Check that you can find a dependency by not specifying a version after not
# finding it by specifying a version. We add `static: true` here so that the
# previously cached zlib dependencies don't get checked.
dependency('zlib', static : true, version : '>=8000', required : false)
dependency('zlib', static : true)
# Check that you can find a dependency by specifying a correct version after
# not finding it by specifying a wrong one. We add `method: pkg-config` here so that
# the previously cached zlib dependencies don't get checked.
bzip2 = dependency('zlib', method : 'pkg-config', version : '>=9000', required : false)
bzip2 = dependency('zlib', method : 'pkg-config', version : '>=1.0')
if meson.is_cross_build()
# Test caching of native and cross dependencies
# https://github.com/mesonbuild/meson/issues/1736
cross_prefix = dependency('zlib').get_pkgconfig_variable('prefix')
native_prefix = dependency('zlib', native : true).get_pkgconfig_variable('prefix')
assert(cross_prefix != '', 'cross zlib prefix is not defined')
assert(native_prefix != '', 'native zlib prefix is not defined')
assert(native_prefix != cross_prefix, 'native prefix == cross_prefix == ' + native_prefix)
endif
foreach d : ['sdl2', 'gnustep', 'wx', 'gl', 'python3', 'boost', 'gtest', 'gmock']
dep = dependency(d, required : false)
if dep.found()

Loading…
Cancel
Save