Merge pull request #3150 from trhd/setups2

Fix a data pickling bug.
pull/3157/head
Jussi Pakkanen 7 years ago committed by GitHub
commit a383c5c1a5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 11
      mesonbuild/backend/ninjabackend.py
  2. 20
      mesonbuild/build.py
  3. 6
      mesonbuild/coredata.py
  4. 9
      mesonbuild/environment.py
  5. 19
      mesonbuild/mconf.py
  6. 10
      mesonbuild/mesonmain.py
  7. 44
      mesonbuild/mintro.py
  8. 86
      mesonbuild/mtest.py

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os, pickle, re, shlex, subprocess, sys import os, pickle, re, shlex, subprocess
from collections import OrderedDict from collections import OrderedDict
from pathlib import PurePath from pathlib import PurePath
@ -115,7 +115,6 @@ class NinjaBuildElement:
(name, elems) = e (name, elems) = e
should_quote = name not in raw_names should_quote = name not in raw_names
line = ' %s = ' % name line = ' %s = ' % name
noq_templ = "%s"
newelems = [] newelems = []
for i in elems: for i in elems:
if not should_quote or i == '&&': # Hackety hack hack if not should_quote or i == '&&': # Hackety hack hack
@ -1852,7 +1851,6 @@ rule FORTRAN_DEP_HACK
infilelist = genlist.get_inputs() infilelist = genlist.get_inputs()
outfilelist = genlist.get_outputs() outfilelist = genlist.get_outputs()
extra_dependencies = [os.path.join(self.build_to_src, i) for i in genlist.extra_depends] extra_dependencies = [os.path.join(self.build_to_src, i) for i in genlist.extra_depends]
source_target_dir = self.get_target_source_dir(target)
for i in range(len(infilelist)): for i in range(len(infilelist)):
if len(generator.outputs) == 1: if len(generator.outputs) == 1:
sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i]) sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i])
@ -1877,7 +1875,6 @@ rule FORTRAN_DEP_HACK
# We have consumed output files, so drop them from the list of remaining outputs. # We have consumed output files, so drop them from the list of remaining outputs.
if sole_output == '': if sole_output == '':
outfilelist = outfilelist[len(generator.outputs):] outfilelist = outfilelist[len(generator.outputs):]
relout = self.get_target_private_dir(target)
args = self.replace_paths(target, args, override_subdir=subdir) args = self.replace_paths(target, args, override_subdir=subdir)
cmdlist = exe_arr + self.replace_extra_args(args, genlist) cmdlist = exe_arr + self.replace_extra_args(args, genlist)
if generator.capture: if generator.capture:
@ -2694,3 +2691,9 @@ rule FORTRAN_DEP_HACK
elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '') elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '')
elem.write(outfile) elem.write(outfile)
def load(build_dir):
filename = os.path.join(build_dir, 'meson-private', 'install.dat')
with open(filename, 'rb') as f:
obj = pickle.load(f)
return obj

@ -15,6 +15,7 @@
import copy, os, re import copy, os, re
from collections import OrderedDict from collections import OrderedDict
import itertools, pathlib import itertools, pathlib
import pickle
from . import environment from . import environment
from . import dependencies from . import dependencies
@ -1928,3 +1929,22 @@ def get_sources_string_names(sources):
else: else:
raise AssertionError('Unknown source type: {!r}'.format(s)) raise AssertionError('Unknown source type: {!r}'.format(s))
return names return names
def load(build_dir):
filename = os.path.join(build_dir, 'meson-private', 'build.dat')
load_fail_msg = 'Build data file {!r} is corrupted. Try with a fresh build tree.'.format(filename)
nonexisting_fail_msg = 'No such build data file as "{!r}".'.format(filename)
try:
with open(filename, 'rb') as f:
obj = pickle.load(f)
except FileNotFoundError:
raise MesonException(nonexisting_fail_msg)
except pickle.UnpicklingError:
raise MesonException(load_fail_msg)
if not isinstance(obj, Build):
raise MesonException(load_fail_msg)
return obj
def save(obj, filename):
with open(filename, 'wb') as f:
pickle.dump(obj, f)

@ -340,7 +340,8 @@ class CoreData:
return opt.validate_value(override_value) return opt.validate_value(override_value)
raise MesonException('Tried to validate unknown option %s.' % option_name) raise MesonException('Tried to validate unknown option %s.' % option_name)
def load(filename): def load(build_dir):
filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename) load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename)
try: try:
with open(filename, 'rb') as f: with open(filename, 'rb') as f:
@ -354,7 +355,8 @@ def load(filename):
(obj.version, version)) (obj.version, version))
return obj return obj
def save(obj, filename): def save(obj, build_dir):
filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
if obj.version != version: if obj.version != version:
raise MesonException('Fatal version mismatch corruption.') raise MesonException('Fatal version mismatch corruption.')
with open(filename, 'wb') as f: with open(filename, 'wb') as f:

@ -19,7 +19,6 @@ from .linkers import ArLinker, VisualStudioLinker
from . import mesonlib from . import mesonlib
from .mesonlib import EnvironmentException, Popen_safe from .mesonlib import EnvironmentException, Popen_safe
from . import mlog from . import mlog
import sys
from . import compilers from . import compilers
from .compilers import ( from .compilers import (
@ -252,8 +251,7 @@ class Environment:
os.makedirs(self.scratch_dir, exist_ok=True) os.makedirs(self.scratch_dir, exist_ok=True)
os.makedirs(self.log_dir, exist_ok=True) os.makedirs(self.log_dir, exist_ok=True)
try: try:
cdf = os.path.join(self.get_build_dir(), Environment.coredata_file) self.coredata = coredata.load(self.get_build_dir())
self.coredata = coredata.load(cdf)
self.first_invocation = False self.first_invocation = False
except FileNotFoundError: except FileNotFoundError:
# WARNING: Don't use any values from coredata in __init__. It gets # WARNING: Don't use any values from coredata in __init__. It gets
@ -316,9 +314,8 @@ class Environment:
return self.cross_info is not None return self.cross_info is not None
def dump_coredata(self): def dump_coredata(self):
cdf = os.path.join(self.get_build_dir(), Environment.coredata_file) coredata.save(self.coredata, self.get_build_dir())
coredata.save(self.coredata, cdf) return os.path.join(self.get_build_dir(), Environment.coredata_file)
return cdf
def get_script_dir(self): def get_script_dir(self):
import mesonbuild.scripts import mesonbuild.scripts

@ -13,9 +13,8 @@
# limitations under the License. # limitations under the License.
import sys, os import sys, os
import pickle
import argparse import argparse
from . import coredata, mesonlib from . import coredata, mesonlib, build
parser = argparse.ArgumentParser(prog='meson configure') parser = argparse.ArgumentParser(prog='meson configure')
@ -31,25 +30,17 @@ class ConfException(mesonlib.MesonException):
class Conf: class Conf:
def __init__(self, build_dir): def __init__(self, build_dir):
self.build_dir = build_dir self.build_dir = build_dir
self.coredata_file = os.path.join(build_dir, 'meson-private/coredata.dat') if not os.path.isdir(os.path.join(build_dir, 'meson-private')):
self.build_file = os.path.join(build_dir, 'meson-private/build.dat')
if not os.path.isfile(self.coredata_file) or not os.path.isfile(self.build_file):
raise ConfException('Directory %s does not seem to be a Meson build directory.' % build_dir) raise ConfException('Directory %s does not seem to be a Meson build directory.' % build_dir)
with open(self.coredata_file, 'rb') as f: self.build = build.load(self.build_dir)
self.coredata = pickle.load(f) self.coredata = coredata.load(self.build_dir)
with open(self.build_file, 'rb') as f:
self.build = pickle.load(f)
if self.coredata.version != coredata.version:
raise ConfException('Version mismatch (%s vs %s)' %
(coredata.version, self.coredata.version))
def clear_cache(self): def clear_cache(self):
self.coredata.deps = {} self.coredata.deps = {}
def save(self): def save(self):
# Only called if something has changed so overwrite unconditionally. # Only called if something has changed so overwrite unconditionally.
with open(self.coredata_file, 'wb') as f: coredata.save(self.coredata, self.build_dir)
pickle.dump(self.coredata, f)
# We don't write the build file because any changes to it # We don't write the build file because any changes to it
# are erased when Meson is executed the next time, i.e. when # are erased when Meson is executed the next time, i.e. when
# Ninja is run. # Ninja is run.

@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys, stat, traceback, pickle, argparse import sys, stat, traceback, argparse
import time, datetime import datetime
import os.path import os.path
from . import environment, interpreter, mesonlib from . import environment, interpreter, mesonlib
from . import build from . import build
@ -196,6 +196,7 @@ class MesonApp:
mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {}))) mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {})))
intr.run() intr.run()
try: try:
dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
# We would like to write coredata as late as possible since we use the existence of # We would like to write coredata as late as possible since we use the existence of
# this file to check if we generated the build file successfully. Since coredata # this file to check if we generated the build file successfully. Since coredata
# includes settings, the build files must depend on it and appear newer. However, due # includes settings, the build files must depend on it and appear newer. However, due
@ -204,16 +205,13 @@ class MesonApp:
# possible, but before build files, and if any error occurs, delete it. # possible, but before build files, and if any error occurs, delete it.
cdf = env.dump_coredata() cdf = env.dump_coredata()
g.generate(intr) g.generate(intr)
dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') build.save(b, dumpfile)
with open(dumpfile, 'wb') as f:
pickle.dump(b, f)
# Post-conf scripts must be run after writing coredata or else introspection fails. # Post-conf scripts must be run after writing coredata or else introspection fails.
g.run_postconf_scripts() g.run_postconf_scripts()
except: except:
os.unlink(cdf) os.unlink(cdf)
raise raise
def run_script_command(args): def run_script_command(args):
cmdname = args[0] cmdname = args[0]
cmdargs = args[1:] cmdargs = args[1:]

@ -19,8 +19,9 @@ tests and so on. All output is in JSON for simple parsing.
Currently only works for the Ninja backend. Others use generated Currently only works for the Ninja backend. Others use generated
project files and don't need this info.""" project files and don't need this info."""
import json, pickle import json
from . import coredata, build from . import build, mtest, coredata as cdata
from .backend import ninjabackend
import argparse import argparse
import sys, os import sys, os
import pathlib import pathlib
@ -132,16 +133,16 @@ def add_keys(optlist, options):
for key in keys: for key in keys:
opt = options[key] opt = options[key]
optdict = {'name': key, 'value': opt.value} optdict = {'name': key, 'value': opt.value}
if isinstance(opt, coredata.UserStringOption): if isinstance(opt, cdata.UserStringOption):
typestr = 'string' typestr = 'string'
elif isinstance(opt, coredata.UserBooleanOption): elif isinstance(opt, cdata.UserBooleanOption):
typestr = 'boolean' typestr = 'boolean'
elif isinstance(opt, coredata.UserComboOption): elif isinstance(opt, cdata.UserComboOption):
optdict['choices'] = opt.choices optdict['choices'] = opt.choices
typestr = 'combo' typestr = 'combo'
elif isinstance(opt, coredata.UserIntegerOption): elif isinstance(opt, cdata.UserIntegerOption):
typestr = 'integer' typestr = 'integer'
elif isinstance(opt, coredata.UserArrayOption): elif isinstance(opt, cdata.UserArrayOption):
typestr = 'array' typestr = 'array'
else: else:
raise RuntimeError("Unknown option type") raise RuntimeError("Unknown option type")
@ -149,7 +150,7 @@ def add_keys(optlist, options):
optdict['description'] = opt.description optdict['description'] = opt.description
optlist.append(optdict) optlist.append(optdict)
def list_buildsystem_files(coredata, builddata): def list_buildsystem_files(builddata):
src_dir = builddata.environment.get_source_dir() src_dir = builddata.environment.get_source_dir()
# I feel dirty about this. But only slightly. # I feel dirty about this. But only slightly.
filelist = [] filelist = []
@ -208,26 +209,15 @@ def run(args):
'change the working directory to it.') 'change the working directory to it.')
return 1 return 1
corefile = os.path.join(datadir, 'coredata.dat') coredata = cdata.load(options.builddir)
buildfile = os.path.join(datadir, 'build.dat') builddata = build.load(options.builddir)
installfile = os.path.join(datadir, 'install.dat') testdata = mtest.load_tests(options.builddir)
testfile = os.path.join(datadir, 'meson_test_setup.dat') benchmarkdata = mtest.load_benchmarks(options.builddir)
benchmarkfile = os.path.join(datadir, 'meson_benchmark_setup.dat')
# Load all data files
with open(corefile, 'rb') as f:
coredata = pickle.load(f)
with open(buildfile, 'rb') as f:
builddata = pickle.load(f)
with open(testfile, 'rb') as f:
testdata = pickle.load(f)
with open(benchmarkfile, 'rb') as f:
benchmarkdata = pickle.load(f)
# Install data is only available with the Ninja backend # Install data is only available with the Ninja backend
if os.path.isfile(installfile): try:
with open(installfile, 'rb') as f: installdata = ninjabackend.load(options.builddir)
installdata = pickle.load(f) except FileNotFoundError:
else:
installdata = None installdata = None
if options.list_targets: if options.list_targets:
@ -237,7 +227,7 @@ def run(args):
elif options.target_files is not None: elif options.target_files is not None:
list_target_files(options.target_files, coredata, builddata) list_target_files(options.target_files, coredata, builddata)
elif options.buildsystem_files: elif options.buildsystem_files:
list_buildsystem_files(coredata, builddata) list_buildsystem_files(builddata)
elif options.buildoptions: elif options.buildoptions:
list_buildoptions(coredata, builddata) list_buildoptions(coredata, builddata)
elif options.tests: elif options.tests:

@ -165,6 +165,22 @@ def run_with_mono(fname):
return True return True
return False return False
def load_benchmarks(build_dir):
datafile = os.path.join(build_dir, 'meson-private', 'meson_benchmark_setup.dat')
if not os.path.isfile(datafile):
raise TestException('Directory ${!r} does not seem to be a Meson build directory.'.format(build_dir))
with open(datafile, 'rb') as f:
obj = pickle.load(f)
return obj
def load_tests(build_dir):
datafile = os.path.join(build_dir, 'meson-private', 'meson_test_setup.dat')
if not os.path.isfile(datafile):
raise TestException('Directory ${!r} does not seem to be a Meson build directory.'.format(build_dir))
with open(datafile, 'rb') as f:
obj = pickle.load(f)
return obj
class TestHarness: class TestHarness:
def __init__(self, options): def __init__(self, options):
self.options = options self.options = options
@ -180,12 +196,10 @@ class TestHarness:
self.logfile = None self.logfile = None
self.jsonlogfile = None self.jsonlogfile = None
if self.options.benchmark: if self.options.benchmark:
datafile = os.path.join(options.wd, 'meson-private', 'meson_benchmark_setup.dat') self.tests = load_benchmarks(options.wd)
else: else:
datafile = os.path.join(options.wd, 'meson-private', 'meson_test_setup.dat') self.tests = load_tests(options.wd)
if not os.path.isfile(datafile): self.load_suites()
raise TestException('Directory %s does not seem to be a Meson build directory.' % options.wd)
self.load_datafile(datafile)
def __del__(self): def __del__(self):
if self.logfile: if self.logfile:
@ -193,9 +207,31 @@ class TestHarness:
if self.jsonlogfile: if self.jsonlogfile:
self.jsonlogfile.close() self.jsonlogfile.close()
def merge_suite_options(self, options, test):
if ":" in options.setup:
if options.setup not in self.build_data.test_setups:
sys.exit("Unknown test setup '%s'." % options.setup)
current = self.build_data.test_setups[options.setup]
else:
full_name = test.project_name + ":" + options.setup
if full_name not in self.build_data.test_setups:
sys.exit("Test setup '%s' not found from project '%s'." % (options.setup, test.project_name))
current = self.build_data.test_setups[full_name]
if not options.gdb:
options.gdb = current.gdb
if options.timeout_multiplier is None:
options.timeout_multiplier = current.timeout_multiplier
# if options.env is None:
# options.env = current.env # FIXME, should probably merge options here.
if options.wrapper is not None and current.exe_wrapper is not None:
sys.exit('Conflict: both test setup and command line specify an exe wrapper.')
if options.wrapper is None:
options.wrapper = current.exe_wrapper
return current.env.get_env(os.environ.copy())
def get_test_env(self, options, test): def get_test_env(self, options, test):
if options.setup: if options.setup:
env = merge_suite_options(options, test) env = self.merge_suite_options(options, test)
else: else:
env = os.environ.copy() env = os.environ.copy()
if isinstance(test.env, build.EnvironmentVariables): if isinstance(test.env, build.EnvironmentVariables):
@ -374,9 +410,6 @@ TIMEOUT: %4d
def doit(self): def doit(self):
if self.is_run: if self.is_run:
raise RuntimeError('Test harness object can only be used once.') raise RuntimeError('Test harness object can only be used once.')
if not os.path.isfile(self.datafile):
print('Test data file. Probably this means that you did not run this in the build directory.')
return 1
self.is_run = True self.is_run = True
tests = self.get_tests() tests = self.get_tests()
if not tests: if not tests:
@ -415,15 +448,6 @@ TIMEOUT: %4d
ss.add(s) ss.add(s)
self.suites = list(ss) self.suites = list(ss)
def load_tests(self):
with open(self.datafile, 'rb') as f:
self.tests = pickle.load(f)
def load_datafile(self, datafile):
self.datafile = datafile
self.load_tests()
self.load_suites()
def get_tests(self): def get_tests(self):
if not self.tests: if not self.tests:
print('No tests defined.') print('No tests defined.')
@ -503,6 +527,7 @@ TIMEOUT: %4d
startdir = os.getcwd() startdir = os.getcwd()
if self.options.wd: if self.options.wd:
os.chdir(self.options.wd) os.chdir(self.options.wd)
self.build_data = build.load(os.getcwd())
try: try:
for _ in range(self.options.repeat): for _ in range(self.options.repeat):
@ -558,31 +583,6 @@ def list_tests(th):
for t in tests: for t in tests:
print(th.get_pretty_suite(t)) print(th.get_pretty_suite(t))
def merge_suite_options(options, test):
buildfile = os.path.join(options.wd, 'meson-private/build.dat')
with open(buildfile, 'rb') as f:
build = pickle.load(f)
if ":" in options.setup:
if options.setup not in build.test_setups:
sys.exit("Unknown test setup '%s'." % options.setup)
current = build.test_setups[options.setup]
else:
full_name = test.project_name + ":" + options.setup
if full_name not in build.test_setups:
sys.exit("Test setup '%s' not found from project '%s'." % (options.setup, test.project_name))
current = build.test_setups[full_name]
if not options.gdb:
options.gdb = current.gdb
if options.timeout_multiplier is None:
options.timeout_multiplier = current.timeout_multiplier
# if options.env is None:
# options.env = current.env # FIXME, should probably merge options here.
if options.wrapper is not None and current.exe_wrapper is not None:
sys.exit('Conflict: both test setup and command line specify an exe wrapper.')
if options.wrapper is None:
options.wrapper = current.exe_wrapper
return current.env.get_env(os.environ.copy())
def rebuild_all(wd): def rebuild_all(wd):
if not os.path.isfile(os.path.join(wd, 'build.ninja')): if not os.path.isfile(os.path.join(wd, 'build.ninja')):
print("Only ninja backend is supported to rebuild tests before running them.") print("Only ninja backend is supported to rebuild tests before running them.")

Loading…
Cancel
Save