Use context manager for file I/O.

There are a few cases where a context manager cannot be used, such as
the logger.
pull/716/head
Elliott Sales de Andrade 8 years ago
parent 7830cb61c3
commit 4c71695e41
  1. 59
      mesonbuild/backend/backends.py
  2. 141
      mesonbuild/backend/ninjabackend.py
  3. 120
      mesonbuild/backend/vs2010backend.py
  4. 36
      mesonbuild/backend/xcodebackend.py
  5. 73
      mesonbuild/compilers.py
  6. 6
      mesonbuild/coredata.py
  7. 25
      mesonbuild/dependencies.py
  8. 6
      mesonbuild/interpreter.py
  9. 9
      mesonbuild/mconf.py
  10. 16
      mesonbuild/mesonlib.py
  11. 3
      mesonbuild/mesonmain.py
  12. 12
      mesonbuild/mintro.py
  13. 62
      mesonbuild/modules/pkgconfig.py
  14. 147
      mesonbuild/modules/rpm.py
  15. 3
      mesonbuild/optinterpreter.py
  16. 43
      mesonbuild/scripts/meson_benchmark.py
  17. 3
      mesonbuild/scripts/meson_exe.py
  18. 11
      mesonbuild/scripts/meson_install.py
  19. 45
      mesonbuild/scripts/meson_test.py
  20. 6
      mesonbuild/scripts/regen_checker.py
  21. 9
      mesonbuild/scripts/symbolextractor.py
  22. 13
      mesonbuild/scripts/vcstagger.py
  23. 39
      mesonbuild/wrap/wrap.py
  24. 6
      mesonbuild/wrap/wraptool.py

@ -139,24 +139,34 @@ class Backend():
langlist = {} langlist = {}
abs_files = [] abs_files = []
result = [] result = []
for src in unity_src:
comp = self.get_compiler_for_source(src, target.is_cross) def init_language_file(language, suffix):
language = comp.get_language() outfilename = os.path.join(self.get_target_private_dir_abs(target),
suffix = '.' + comp.get_default_suffix() target.name + '-unity' + suffix)
if language not in langlist: outfileabs = os.path.join(self.environment.get_build_dir(),
outfilename = os.path.join(self.get_target_private_dir_abs(target), target.name + '-unity' + suffix) outfilename)
outfileabs = os.path.join(self.environment.get_build_dir(), outfilename) outfileabs_tmp = outfileabs + '.tmp'
outfileabs_tmp = outfileabs + '.tmp' abs_files.append(outfileabs)
abs_files.append(outfileabs) outfileabs_tmp_dir = os.path.dirname(outfileabs_tmp)
outfileabs_tmp_dir = os.path.dirname(outfileabs_tmp) if not os.path.exists(outfileabs_tmp_dir):
if not os.path.exists(outfileabs_tmp_dir): os.makedirs(outfileabs_tmp_dir)
os.makedirs(outfileabs_tmp_dir) result.append(outfilename)
outfile = open(outfileabs_tmp, 'w') return open(outfileabs_tmp, 'w')
langlist[language] = outfile
result.append(outfilename) try:
ofile = langlist[language] for src in unity_src:
ofile.write('#include<%s>\n' % src) comp = self.get_compiler_for_source(src, target.is_cross)
[x.close() for x in langlist.values()] language = comp.get_language()
try:
ofile = langlist[language]
except KeyError:
suffix = '.' + comp.get_default_suffix()
ofile = langlist[language] = init_language_file(language,
suffix)
ofile.write('#include<%s>\n' % src)
finally:
for x in langlist.values():
x.close()
[mesonlib.replace_if_different(x, x + '.tmp') for x in abs_files] [mesonlib.replace_if_different(x, x + '.tmp') for x in abs_files]
return result return result
@ -215,13 +225,11 @@ class Backend():
def serialise_tests(self): def serialise_tests(self):
test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat') test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat')
datafile = open(test_data, 'wb') with open(test_data, 'wb') as datafile:
self.write_test_file(datafile) self.write_test_file(datafile)
datafile.close()
benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat') benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat')
datafile = open(benchmark_data, 'wb') with open(benchmark_data, 'wb') as datafile:
self.write_benchmark_file(datafile) self.write_benchmark_file(datafile)
datafile.close()
return (test_data, benchmark_data) return (test_data, benchmark_data)
def has_source_suffix(self, target, suffix): def has_source_suffix(self, target, suffix):
@ -442,7 +450,8 @@ class Backend():
mfobj = {'type': 'dependency manifest', mfobj = {'type': 'dependency manifest',
'version': '1.0'} 'version': '1.0'}
mfobj['projects'] = self.build.dep_manifest mfobj['projects'] = self.build.dep_manifest
open(ifilename, 'w').write(json.dumps(mfobj)) with open(ifilename, 'w') as f:
f.write(json.dumps(mfobj))
d.data.append([ifilename, ofilename]) d.data.append([ifilename, ofilename])
def get_regen_filelist(self): def get_regen_filelist(self):

@ -133,17 +133,18 @@ class NinjaBackend(backends.Backend):
self.all_outputs = {} self.all_outputs = {}
self.valgrind = environment.find_valgrind() self.valgrind = environment.find_valgrind()
def detect_vs_dep_prefix(self, outfile, tempfilename): def detect_vs_dep_prefix(self, tempfilename):
'''VS writes its dependency in a locale dependent format. '''VS writes its dependency in a locale dependent format.
Detect the search prefix to use.''' Detect the search prefix to use.'''
# Of course there is another program called 'cl' on # Of course there is another program called 'cl' on
# some platforms. Let's just require that on Windows # some platforms. Let's just require that on Windows
# cl points to msvc. # cl points to msvc.
if not mesonlib.is_windows() or shutil.which('cl') is None: if not mesonlib.is_windows() or shutil.which('cl') is None:
return outfile return open(tempfilename, 'a')
outfile.close() filename = os.path.join(self.environment.get_scratch_dir(),
open(os.path.join(self.environment.get_scratch_dir(), 'incdetect.c'), 'incdetect.c')
'w').write('''#include<stdio.h> with open(filename, 'w') as f:
f.write('''#include<stdio.h>
int dummy; int dummy;
''') ''')
@ -157,9 +158,8 @@ int dummy;
for line in stdo.split(b'\r\n'): for line in stdo.split(b'\r\n'):
if line.endswith(b'stdio.h'): if line.endswith(b'stdio.h'):
matchstr = b':'.join(line.split(b':')[0:2]) + b':' matchstr = b':'.join(line.split(b':')[0:2]) + b':'
binfile = open(tempfilename, 'ab') with open(tempfilename, 'ab') as binfile:
binfile.write(b'msvc_deps_prefix = ' + matchstr + b'\r\n') binfile.write(b'msvc_deps_prefix = ' + matchstr + b'\r\n')
binfile.close()
return open(tempfilename, 'a') return open(tempfilename, 'a')
raise MesonException('Could not determine vs dep dependency prefix string.') raise MesonException('Could not determine vs dep dependency prefix string.')
@ -167,30 +167,31 @@ int dummy;
self.interpreter = interp self.interpreter = interp
outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename) outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename)
tempfilename = outfilename + '~' tempfilename = outfilename + '~'
outfile = open(tempfilename, 'w') with open(tempfilename, 'w') as outfile:
outfile.write('# This is the build file for project "%s"\n' % self.build.get_project()) outfile.write('# This is the build file for project "%s"\n' %
outfile.write('# It is autogenerated by the Meson build system.\n') self.build.get_project())
outfile.write('# Do not edit by hand.\n\n') outfile.write('# It is autogenerated by the Meson build system.\n')
outfile.write('ninja_required_version = 1.5.1\n\n') outfile.write('# Do not edit by hand.\n\n')
outfile = self.detect_vs_dep_prefix(outfile, tempfilename) outfile.write('ninja_required_version = 1.5.1\n\n')
self.generate_rules(outfile) with self.detect_vs_dep_prefix(tempfilename) as outfile:
self.generate_phony(outfile) self.generate_rules(outfile)
outfile.write('# Build rules for targets\n\n') self.generate_phony(outfile)
[self.generate_target(t, outfile) for t in self.build.get_targets().values()] outfile.write('# Build rules for targets\n\n')
outfile.write('# Test rules\n\n') for t in self.build.get_targets().values():
self.generate_tests(outfile) self.generate_target(t, outfile)
outfile.write('# Install rules\n\n') outfile.write('# Test rules\n\n')
self.generate_install(outfile) self.generate_tests(outfile)
if 'b_coverage' in self.environment.coredata.base_options and \ outfile.write('# Install rules\n\n')
self.environment.coredata.base_options['b_coverage'].value: self.generate_install(outfile)
outfile.write('# Coverage rules\n\n') if 'b_coverage' in self.environment.coredata.base_options and \
self.generate_coverage_rules(outfile) self.environment.coredata.base_options['b_coverage'].value:
outfile.write('# Suffix\n\n') outfile.write('# Coverage rules\n\n')
self.generate_utils(outfile) self.generate_coverage_rules(outfile)
self.generate_ending(outfile) outfile.write('# Suffix\n\n')
self.generate_utils(outfile)
self.generate_ending(outfile)
# Only ovewrite the old build file after the new one has been # Only ovewrite the old build file after the new one has been
# fully created. # fully created.
outfile.close()
os.replace(tempfilename, outfilename) os.replace(tempfilename, outfilename)
self.generate_compdb() self.generate_compdb()
@ -202,7 +203,8 @@ int dummy;
jsondb = subprocess.check_output([ninja_exe, '-t', 'compdb', 'c_COMPILER', 'cpp_COMPILER'], cwd=builddir) jsondb = subprocess.check_output([ninja_exe, '-t', 'compdb', 'c_COMPILER', 'cpp_COMPILER'], cwd=builddir)
except Exception: except Exception:
raise MesonException('Could not create compilation database.') raise MesonException('Could not create compilation database.')
open(os.path.join(builddir, 'compile_commands.json'), 'wb').write(jsondb) with open(os.path.join(builddir, 'compile_commands.json'), 'wb') as f:
f.write(jsondb)
# Get all generated headers. Any source file might need them so # Get all generated headers. Any source file might need them so
# we need to add an order dependency to them. # we need to add an order dependency to them.
@ -505,8 +507,8 @@ int dummy;
self.generate_subdir_install(d) self.generate_subdir_install(d)
elem.write(outfile) elem.write(outfile)
ofile = open(install_data_file, 'wb') with open(install_data_file, 'wb') as ofile:
pickle.dump(d, ofile) pickle.dump(d, ofile)
def generate_target_install(self, d): def generate_target_install(self, d):
should_strip = self.environment.coredata.get_builtin_option('strip') should_strip = self.environment.coredata.get_builtin_option('strip')
@ -1416,16 +1418,22 @@ rule FORTRAN_DEP_HACK
# but those are really rare. I hope. # but those are really rare. I hope.
if not compiler.can_compile(s): if not compiler.can_compile(s):
continue continue
for line in open(os.path.join(self.environment.get_source_dir(), s.subdir, s.fname)): filename = os.path.join(self.environment.get_source_dir(),
modmatch = modre.match(line) s.subdir, s.fname)
if modmatch is not None: with open(filename) as f:
modname = modmatch.group(1) for line in f:
if modname.lower() == 'procedure': # MODULE PROCEDURE construct modmatch = modre.match(line)
continue if modmatch is not None:
if modname in module_files: modname = modmatch.group(1)
raise InvalidArguments('Namespace collision: module %s defined in two files %s and %s.' % if modname.lower() == 'procedure':
(modname, module_files[modname], s)) # MODULE PROCEDURE construct
module_files[modname] = s continue
if modname in module_files:
raise InvalidArguments(
'Namespace collision: module %s defined in '
'two files %s and %s.' %
(modname, module_files[modname], s))
module_files[modname] = s
self.fortran_deps[target.get_basename()] = module_files self.fortran_deps[target.get_basename()] = module_files
def get_fortran_deps(self, compiler, src, target): def get_fortran_deps(self, compiler, src, target):
@ -1433,27 +1441,32 @@ rule FORTRAN_DEP_HACK
usere = re.compile(r"\s*use\s+(\w+)", re.IGNORECASE) usere = re.compile(r"\s*use\s+(\w+)", re.IGNORECASE)
dirname = self.get_target_private_dir(target) dirname = self.get_target_private_dir(target)
tdeps= self.fortran_deps[target.get_basename()] tdeps= self.fortran_deps[target.get_basename()]
for line in open(src): with open(src) as f:
usematch = usere.match(line) for line in f:
if usematch is not None: usematch = usere.match(line)
usename = usematch.group(1) if usematch is not None:
if usename not in tdeps: usename = usematch.group(1)
# The module is not provided by any source file. This is due to if usename not in tdeps:
# a) missing file/typo/etc # The module is not provided by any source file. This
# b) using a module provided by the compiler, such as OpenMP # is due to:
# There's no easy way to tell which is which (that I know of) # a) missing file/typo/etc
# so just ignore this and go on. Ideally we would print a # b) using a module provided by the compiler, such as
# warning message to the user but this is a common occurrance, # OpenMP
# which would lead to lots of distracting noise. # There's no easy way to tell which is which (that I
continue # know of) so just ignore this and go on. Ideally we
mod_source_file = tdeps[usename] # would print a warning message to the user but this is
# Check if a source uses a module it exports itself. # a common occurrence, which would lead to lots of
# Potential bug if multiple targets have a file with # distracting noise.
# the same name. continue
if mod_source_file.fname == os.path.split(src)[1]: mod_source_file = tdeps[usename]
continue # Check if a source uses a module it exports itself.
mod_name = compiler.module_name_to_filename(usematch.group(1)) # Potential bug if multiple targets have a file with
mod_files.append(os.path.join(dirname, mod_name)) # the same name.
if mod_source_file.fname == os.path.split(src)[1]:
continue
mod_name = compiler.module_name_to_filename(
usematch.group(1))
mod_files.append(os.path.join(dirname, mod_name))
return mod_files return mod_files
def get_cross_stdlib_args(self, target, compiler): def get_cross_stdlib_args(self, target, compiler):

@ -175,14 +175,18 @@ class Vs2010Backend(backends.Backend):
@staticmethod @staticmethod
def touch_regen_timestamp(build_dir): def touch_regen_timestamp(build_dir):
open(Vs2010Backend.get_regen_stampfile(build_dir), 'w').close() with open(Vs2010Backend.get_regen_stampfile(build_dir), 'w'):
pass
def generate_regen_info(self): def generate_regen_info(self):
deps = self.get_regen_filelist() deps = self.get_regen_filelist()
regeninfo = RegenInfo(self.environment.get_source_dir(), regeninfo = RegenInfo(self.environment.get_source_dir(),
self.environment.get_build_dir(), self.environment.get_build_dir(),
deps) deps)
pickle.dump(regeninfo, open(os.path.join(self.environment.get_scratch_dir(), 'regeninfo.dump'), 'wb')) filename = os.path.join(self.environment.get_scratch_dir(),
'regeninfo.dump')
with open(filename, 'wb') as f:
pickle.dump(regeninfo, f)
def get_obj_target_deps(self, obj_list): def get_obj_target_deps(self, obj_list):
result = {} result = {}
@ -217,57 +221,66 @@ class Vs2010Backend(backends.Backend):
return all_deps return all_deps
def generate_solution(self, sln_filename, projlist): def generate_solution(self, sln_filename, projlist):
ofile = open(sln_filename, 'w') with open(sln_filename, 'w') as ofile:
ofile.write('Microsoft Visual Studio Solution File, Format Version 11.00\n') ofile.write('Microsoft Visual Studio Solution File, Format '
ofile.write('# Visual Studio ' + self.vs_version + '\n') 'Version 11.00\n')
prj_templ = prj_line = 'Project("{%s}") = "%s", "%s", "{%s}"\n' ofile.write('# Visual Studio ' + self.vs_version + '\n')
for p in projlist: prj_templ = prj_line = 'Project("{%s}") = "%s", "%s", "{%s}"\n'
prj_line = prj_templ % (self.environment.coredata.guid, p[0], p[1], p[2]) for p in projlist:
ofile.write(prj_line) prj_line = prj_templ % (self.environment.coredata.guid,
all_deps = self.determine_deps(p) p[0], p[1], p[2])
ofile.write('\tProjectSection(ProjectDependencies) = postProject\n') ofile.write(prj_line)
regen_guid = self.environment.coredata.regen_guid all_deps = self.determine_deps(p)
ofile.write('\t\t{%s} = {%s}\n' % (regen_guid, regen_guid)) ofile.write('\tProjectSection(ProjectDependencies) = '
for dep in all_deps.keys(): 'postProject\n')
guid = self.environment.coredata.target_guids[dep] regen_guid = self.environment.coredata.regen_guid
ofile.write('\t\t{%s} = {%s}\n' % (guid, guid)) ofile.write('\t\t{%s} = {%s}\n' % (regen_guid, regen_guid))
ofile.write('EndProjectSection\n') for dep in all_deps.keys():
guid = self.environment.coredata.target_guids[dep]
ofile.write('\t\t{%s} = {%s}\n' % (guid, guid))
ofile.write('EndProjectSection\n')
ofile.write('EndProject\n')
test_line = prj_templ % (self.environment.coredata.guid,
'RUN_TESTS', 'RUN_TESTS.vcxproj',
self.environment.coredata.test_guid)
ofile.write(test_line)
ofile.write('EndProject\n') ofile.write('EndProject\n')
test_line = prj_templ % (self.environment.coredata.guid, regen_line = prj_templ % (self.environment.coredata.guid,
'RUN_TESTS', 'RUN_TESTS.vcxproj', self.environment.coredata.test_guid) 'REGEN', 'REGEN.vcxproj',
ofile.write(test_line) self.environment.coredata.regen_guid)
ofile.write('EndProject\n') ofile.write(regen_line)
regen_line = prj_templ % (self.environment.coredata.guid, ofile.write('EndProject\n')
'REGEN', 'REGEN.vcxproj', self.environment.coredata.regen_guid) ofile.write('Global\n')
ofile.write(regen_line) ofile.write('\tGlobalSection(SolutionConfigurationPlatforms) = '
ofile.write('EndProject\n') 'preSolution\n')
ofile.write('Global\n') ofile.write('\t\t%s|%s = %s|%s\n' %
ofile.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n') (self.buildtype, self.platform, self.buildtype,
ofile.write('\t\t%s|%s = %s|%s\n' % (self.buildtype, self.platform, self.buildtype, self.platform)) self.platform))
ofile.write('\tEndGlobalSection\n') ofile.write('\tEndGlobalSection\n')
ofile.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n') ofile.write('\tGlobalSection(ProjectConfigurationPlatforms) = '
ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % 'postSolution\n')
(self.environment.coredata.regen_guid, self.buildtype, self.platform,
self.buildtype, self.platform))
ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
(self.environment.coredata.regen_guid, self.buildtype, self.platform,
self.buildtype, self.platform))
for p in projlist:
ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
(p[2], self.buildtype, self.platform, (self.environment.coredata.regen_guid, self.buildtype,
self.buildtype, self.platform)) self.platform, self.buildtype, self.platform))
if not isinstance(self.build.targets[p[0]], build.RunTarget): ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' % (self.environment.coredata.regen_guid, self.buildtype,
self.platform, self.buildtype, self.platform))
for p in projlist:
ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
(p[2], self.buildtype, self.platform, (p[2], self.buildtype, self.platform,
self.buildtype, self.platform)) self.buildtype, self.platform))
ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % if not isinstance(self.build.targets[p[0]], build.RunTarget):
(self.environment.coredata.test_guid, self.buildtype, self.platform, ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
self.buildtype, self.platform)) (p[2], self.buildtype, self.platform,
ofile.write('\tEndGlobalSection\n') self.buildtype, self.platform))
ofile.write('\tGlobalSection(SolutionProperties) = preSolution\n') ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
ofile.write('\t\tHideSolutionNode = FALSE\n') (self.environment.coredata.test_guid, self.buildtype,
ofile.write('\tEndGlobalSection\n') self.platform, self.buildtype, self.platform))
ofile.write('EndGlobal\n') ofile.write('\tEndGlobalSection\n')
ofile.write('\tGlobalSection(SolutionProperties) = preSolution\n')
ofile.write('\t\tHideSolutionNode = FALSE\n')
ofile.write('\tEndGlobalSection\n')
ofile.write('EndGlobal\n')
def generate_projects(self): def generate_projects(self):
projlist = [] projlist = []
@ -862,12 +875,15 @@ class Vs2010Backend(backends.Backend):
tree.write(ofname, encoding='utf-8', xml_declaration=True) tree.write(ofname, encoding='utf-8', xml_declaration=True)
# ElementTree can not do prettyprinting so do it manually # ElementTree can not do prettyprinting so do it manually
doc = xml.dom.minidom.parse(ofname) doc = xml.dom.minidom.parse(ofname)
open(ofname, 'w').write(doc.toprettyxml()) with open(ofname, 'w') as of:
of.write(doc.toprettyxml())
# World of horror! Python insists on not quoting quotes and # World of horror! Python insists on not quoting quotes and
# fixing the escaped &quot; into &amp;quot; whereas MSVS # fixing the escaped &quot; into &amp;quot; whereas MSVS
# requires quoted but not fixed elements. Enter horrible hack. # requires quoted but not fixed elements. Enter horrible hack.
txt = open(ofname, 'r').read() with open(ofname, 'r') as of:
open(ofname, 'w').write(txt.replace('&amp;quot;', '&quot;')) txt = of.read()
with open(ofname, 'w') as of:
of.write(txt.replace('&amp;quot;', '&quot;'))
def gen_regenproj(self, project_name, ofname): def gen_regenproj(self, project_name, ofname):
root = ET.Element('Project', {'DefaultTargets': 'Build', root = ET.Element('Project', {'DefaultTargets': 'Build',

@ -82,26 +82,22 @@ class XCodeBackend(backends.Backend):
self.proj_dir = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.xcodeproj') self.proj_dir = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.xcodeproj')
os.makedirs(self.proj_dir, exist_ok=True) os.makedirs(self.proj_dir, exist_ok=True)
self.proj_file = os.path.join(self.proj_dir, 'project.pbxproj') self.proj_file = os.path.join(self.proj_dir, 'project.pbxproj')
self.ofile = open(self.proj_file, 'w') with open(self.proj_file, 'w') as self.ofile:
self.generate_prefix() self.generate_prefix()
self.generate_pbx_aggregate_target() self.generate_pbx_aggregate_target()
self.generate_pbx_build_file() self.generate_pbx_build_file()
self.generate_pbx_build_style() self.generate_pbx_build_style()
self.generate_pbx_container_item_proxy() self.generate_pbx_container_item_proxy()
self.generate_pbx_file_reference() self.generate_pbx_file_reference()
self.generate_pbx_group() self.generate_pbx_group()
self.generate_pbx_native_target() self.generate_pbx_native_target()
self.generate_pbx_project() self.generate_pbx_project()
self.generate_pbx_shell_build_phase(test_data) self.generate_pbx_shell_build_phase(test_data)
self.generate_pbx_sources_build_phase() self.generate_pbx_sources_build_phase()
self.generate_pbx_target_dependency() self.generate_pbx_target_dependency()
self.generate_xc_build_configuration() self.generate_xc_build_configuration()
self.generate_xc_configurationList() self.generate_xc_configurationList()
self.generate_suffix() self.generate_suffix()
# for some reason, the entire file was not being flushed to the disk.
# closing it explicitly forces a flush and fixes the issue
self.ofile.close()
def get_xcodetype(self, fname): def get_xcodetype(self, fname):
return self.xcodetypemap[fname.split('.')[-1]] return self.xcodetypemap[fname.split('.')[-1]]

@ -538,9 +538,8 @@ class CCompiler(Compiler):
binname += '.exe' binname += '.exe'
# Write binary check source # Write binary check source
binary_name = os.path.join(work_dir, binname) binary_name = os.path.join(work_dir, binname)
ofile = open(source_name, 'w') with open(source_name, 'w') as ofile:
ofile.write(code) ofile.write(code)
ofile.close()
# Compile sanity check # Compile sanity check
cmdlist = self.exelist + extra_flags + [source_name] + self.get_output_args(binary_name) cmdlist = self.exelist + extra_flags + [source_name] + self.get_output_args(binary_name)
pc = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=work_dir) pc = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=work_dir)
@ -617,9 +616,8 @@ int main () {{ {1}; }}'''
suflen = len(self.default_suffix) suflen = len(self.default_suffix)
(fd, srcname) = tempfile.mkstemp(suffix='.'+self.default_suffix) (fd, srcname) = tempfile.mkstemp(suffix='.'+self.default_suffix)
os.close(fd) os.close(fd)
ofile = open(srcname, 'w') with open(srcname, 'w') as ofile:
ofile.write(code) ofile.write(code)
ofile.close()
# Convert flags to the native type of the selected compiler # Convert flags to the native type of the selected compiler
args = self.unix_link_flags_to_native(extra_args) args = self.unix_link_flags_to_native(extra_args)
# Read c_args/cpp_args/etc from the cross-info file (if needed) # Read c_args/cpp_args/etc from the cross-info file (if needed)
@ -647,9 +645,8 @@ int main () {{ {1}; }}'''
os.close(fd) os.close(fd)
(fd, dstname) = tempfile.mkstemp() (fd, dstname) = tempfile.mkstemp()
os.close(fd) os.close(fd)
ofile = open(srcname, 'w') with open(srcname, 'w') as ofile:
ofile.write(code) ofile.write(code)
ofile.close()
# Convert flags to the native type of the selected compiler # Convert flags to the native type of the selected compiler
args = self.unix_link_flags_to_native(extra_args) args = self.unix_link_flags_to_native(extra_args)
# Select a CRT if needed since we're linking # Select a CRT if needed since we're linking
@ -672,9 +669,8 @@ int main () {{ {1}; }}'''
raise CrossNoRunException('Can not run test applications in this cross environment.') raise CrossNoRunException('Can not run test applications in this cross environment.')
(fd, srcname) = tempfile.mkstemp(suffix='.'+self.default_suffix) (fd, srcname) = tempfile.mkstemp(suffix='.'+self.default_suffix)
os.close(fd) os.close(fd)
ofile = open(srcname, 'w') with open(srcname, 'w') as ofile:
ofile.write(code) ofile.write(code)
ofile.close()
# Convert flags to the native type of the selected compiler # Convert flags to the native type of the selected compiler
args = self.unix_link_flags_to_native(extra_args) args = self.unix_link_flags_to_native(extra_args)
# Select a CRT if needed since we're linking # Select a CRT if needed since we're linking
@ -997,9 +993,9 @@ class ObjCCompiler(CCompiler):
extra_flags = self.get_cross_extra_flags(environment, compile=True, link=False) extra_flags = self.get_cross_extra_flags(environment, compile=True, link=False)
if self.is_cross: if self.is_cross:
extra_flags += self.get_compile_only_args() extra_flags += self.get_compile_only_args()
ofile = open(source_name, 'w') with open(source_name, 'w') as ofile:
ofile.write('#import<stdio.h>\nint main(int argc, char **argv) { return 0; }\n') ofile.write('#import<stdio.h>\n'
ofile.close() 'int main(int argc, char **argv) { return 0; }\n')
pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name]) pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
pc.wait() pc.wait()
if pc.returncode != 0: if pc.returncode != 0:
@ -1031,9 +1027,10 @@ class ObjCPPCompiler(CPPCompiler):
extra_flags = self.get_cross_extra_flags(environment, compile=True, link=False) extra_flags = self.get_cross_extra_flags(environment, compile=True, link=False)
if self.is_cross: if self.is_cross:
extra_flags += self.get_compile_only_args() extra_flags += self.get_compile_only_args()
ofile = open(source_name, 'w') with open(source_name, 'w') as ofile:
ofile.write('#import<stdio.h>\nclass MyClass;int main(int argc, char **argv) { return 0; }\n') ofile.write('#import<stdio.h>\n'
ofile.close() 'class MyClass;'
'int main(int argc, char **argv) { return 0; }\n')
pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name]) pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
pc.wait() pc.wait()
if pc.returncode != 0: if pc.returncode != 0:
@ -1133,13 +1130,12 @@ class MonoCompiler(Compiler):
src = 'sanity.cs' src = 'sanity.cs'
obj = 'sanity.exe' obj = 'sanity.exe'
source_name = os.path.join(work_dir, src) source_name = os.path.join(work_dir, src)
ofile = open(source_name, 'w') with open(source_name, 'w') as ofile:
ofile.write('''public class Sanity { ofile.write('''public class Sanity {
static public void Main () { static public void Main () {
} }
} }
''') ''')
ofile.close()
pc = subprocess.Popen(self.exelist + [src], cwd=work_dir) pc = subprocess.Popen(self.exelist + [src], cwd=work_dir)
pc.wait() pc.wait()
if pc.returncode != 0: if pc.returncode != 0:
@ -1245,14 +1241,13 @@ class JavaCompiler(Compiler):
src = 'SanityCheck.java' src = 'SanityCheck.java'
obj = 'SanityCheck' obj = 'SanityCheck'
source_name = os.path.join(work_dir, src) source_name = os.path.join(work_dir, src)
ofile = open(source_name, 'w') with open(source_name, 'w') as ofile:
ofile.write('''class SanityCheck { ofile.write('''class SanityCheck {
public static void main(String[] args) { public static void main(String[] args) {
int i; int i;
} }
} }
''') ''')
ofile.close()
pc = subprocess.Popen(self.exelist + [src], cwd=work_dir) pc = subprocess.Popen(self.exelist + [src], cwd=work_dir)
pc.wait() pc.wait()
if pc.returncode != 0: if pc.returncode != 0:
@ -1292,11 +1287,10 @@ class ValaCompiler(Compiler):
def sanity_check(self, work_dir, environment): def sanity_check(self, work_dir, environment):
src = 'valatest.vala' src = 'valatest.vala'
source_name = os.path.join(work_dir, src) source_name = os.path.join(work_dir, src)
ofile = open(source_name, 'w') with open(source_name, 'w') as ofile:
ofile.write('''class SanityCheck : Object { ofile.write('''class SanityCheck : Object {
} }
''') ''')
ofile.close()
extra_flags = self.get_cross_extra_flags(environment, compile=True, link=False) extra_flags = self.get_cross_extra_flags(environment, compile=True, link=False)
pc = subprocess.Popen(self.exelist + extra_flags + ['-C', '-c', src], cwd=work_dir) pc = subprocess.Popen(self.exelist + extra_flags + ['-C', '-c', src], cwd=work_dir)
pc.wait() pc.wait()
@ -1336,11 +1330,10 @@ class RustCompiler(Compiler):
def sanity_check(self, work_dir, environment): def sanity_check(self, work_dir, environment):
source_name = os.path.join(work_dir, 'sanity.rs') source_name = os.path.join(work_dir, 'sanity.rs')
output_name = os.path.join(work_dir, 'rusttest') output_name = os.path.join(work_dir, 'rusttest')
ofile = open(source_name, 'w') with open(source_name, 'w') as ofile:
ofile.write('''fn main() { ofile.write('''fn main() {
} }
''') ''')
ofile.close()
pc = subprocess.Popen(self.exelist + ['-o', output_name, source_name], cwd=work_dir) pc = subprocess.Popen(self.exelist + ['-o', output_name, source_name], cwd=work_dir)
pc.wait() pc.wait()
if pc.returncode != 0: if pc.returncode != 0:
@ -1435,10 +1428,9 @@ class SwiftCompiler(Compiler):
src = 'swifttest.swift' src = 'swifttest.swift'
source_name = os.path.join(work_dir, src) source_name = os.path.join(work_dir, src)
output_name = os.path.join(work_dir, 'swifttest') output_name = os.path.join(work_dir, 'swifttest')
ofile = open(source_name, 'w') with open(source_name, 'w') as ofile:
ofile.write('''1 + 2 ofile.write('''1 + 2
''') ''')
ofile.close()
extra_flags = self.get_cross_extra_flags(environment, compile=True, link=True) extra_flags = self.get_cross_extra_flags(environment, compile=True, link=True)
pc = subprocess.Popen(self.exelist + extra_flags + ['-emit-executable', '-o', output_name, src], cwd=work_dir) pc = subprocess.Popen(self.exelist + extra_flags + ['-emit-executable', '-o', output_name, src], cwd=work_dir)
pc.wait() pc.wait()
@ -1461,11 +1453,10 @@ class DCompiler(Compiler):
def sanity_check(self, work_dir, environment): def sanity_check(self, work_dir, environment):
source_name = os.path.join(work_dir, 'sanity.d') source_name = os.path.join(work_dir, 'sanity.d')
output_name = os.path.join(work_dir, 'dtest') output_name = os.path.join(work_dir, 'dtest')
ofile = open(source_name, 'w') with open(source_name, 'w') as ofile:
ofile.write('''void main() { ofile.write('''void main() {
} }
''') ''')
ofile.close()
pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + [source_name], cwd=work_dir) pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + [source_name], cwd=work_dir)
pc.wait() pc.wait()
if pc.returncode != 0: if pc.returncode != 0:
@ -1872,9 +1863,8 @@ class VisualStudioCCompiler(CCompiler):
code = 'int i;\n' code = 'int i;\n'
(fd, srcname) = tempfile.mkstemp(suffix='.'+self.default_suffix) (fd, srcname) = tempfile.mkstemp(suffix='.'+self.default_suffix)
os.close(fd) os.close(fd)
ofile = open(srcname, 'w') with open(srcname, 'w') as ofile:
ofile.write(code) ofile.write(code)
ofile.close()
# Read c_args/cpp_args/etc from the cross-info file (if needed) # Read c_args/cpp_args/etc from the cross-info file (if needed)
extra_args = self.get_cross_extra_flags(env, compile=True, link=False) extra_args = self.get_cross_extra_flags(env, compile=True, link=False)
extra_args += self.get_compile_only_args() extra_args += self.get_compile_only_args()
@ -2286,12 +2276,11 @@ class FortranCompiler(Compiler):
def sanity_check(self, work_dir, environment): def sanity_check(self, work_dir, environment):
source_name = os.path.join(work_dir, 'sanitycheckf.f90') source_name = os.path.join(work_dir, 'sanitycheckf.f90')
binary_name = os.path.join(work_dir, 'sanitycheckf') binary_name = os.path.join(work_dir, 'sanitycheckf')
ofile = open(source_name, 'w') with open(source_name, 'w') as ofile:
ofile.write('''program prog ofile.write('''program prog
print *, "Fortran compilation is working." print *, "Fortran compilation is working."
end program prog end program prog
''') ''')
ofile.close()
extra_flags = self.get_cross_extra_flags(environment, compile=True, link=True) extra_flags = self.get_cross_extra_flags(environment, compile=True, link=True)
pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name]) pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
pc.wait() pc.wait()

@ -154,7 +154,8 @@ class CoreData():
raise RuntimeError('Tried to set unknown builtin option %s.' % optname) raise RuntimeError('Tried to set unknown builtin option %s.' % optname)
def load(filename): def load(filename):
obj = pickle.load(open(filename, 'rb')) with open(filename, 'rb') as f:
obj = pickle.load(f)
if not isinstance(obj, CoreData): if not isinstance(obj, CoreData):
raise RuntimeError('Core data file is corrupted.') raise RuntimeError('Core data file is corrupted.')
if obj.version != version: if obj.version != version:
@ -165,7 +166,8 @@ def load(filename):
def save(obj, filename): def save(obj, filename):
if obj.version != version: if obj.version != version:
raise RuntimeError('Fatal version mismatch corruption.') raise RuntimeError('Fatal version mismatch corruption.')
pickle.dump(obj, open(filename, 'wb')) with open(filename, 'wb') as f:
pickle.dump(obj, f)
def get_builtin_options(): def get_builtin_options():
return list(builtin_options.keys()) return list(builtin_options.keys())

@ -228,10 +228,11 @@ class PkgConfigDependency(Dependency):
return self.is_found return self.is_found
def extract_field(self, la_file, fieldname): def extract_field(self, la_file, fieldname):
for line in open(la_file): with open(la_file) as f:
arr = line.strip().split('=') for line in f:
if arr[0] == fieldname: arr = line.strip().split('=')
return arr[1][1:-1] if arr[0] == fieldname:
return arr[1][1:-1]
return None return None
def extract_dlname_field(self, la_file): def extract_dlname_field(self, la_file):
@ -374,7 +375,8 @@ class ExternalProgram():
shebang and manually parse it to figure out the interpreter to use shebang and manually parse it to figure out the interpreter to use
""" """
try: try:
first_line = open(script).readline().strip() with open(script) as f:
first_line = f.readline().strip()
if first_line.startswith('#!'): if first_line.startswith('#!'):
commands = first_line[2:].split('#')[0].strip().split() commands = first_line[2:].split('#')[0].strip().split()
if mesonlib.is_windows(): if mesonlib.is_windows():
@ -552,12 +554,13 @@ class BoostDependency(Dependency):
except FileNotFoundError: except FileNotFoundError:
self.version = None self.version = None
return return
for line in ifile: with ifile:
if line.startswith("#define") and 'BOOST_LIB_VERSION' in line: for line in ifile:
ver = line.split()[-1] if line.startswith("#define") and 'BOOST_LIB_VERSION' in line:
ver = ver[1:-1] ver = line.split()[-1]
self.version = ver.replace('_', '.') ver = ver[1:-1]
return self.version = ver.replace('_', '.')
return
self.version = None self.version = None
def detect_src_modules(self): def detect_src_modules(self):

@ -1032,7 +1032,8 @@ class Interpreter():
mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename) mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename)
if not os.path.isfile(mesonfile): if not os.path.isfile(mesonfile):
raise InvalidArguments('Missing Meson file in %s' % mesonfile) raise InvalidArguments('Missing Meson file in %s' % mesonfile)
code = open(mesonfile, encoding='utf8').read() with open(mesonfile, encoding='utf8') as mf:
code = mf.read()
if len(code.strip()) == 0: if len(code.strip()) == 0:
raise InvalidCode('Builder file is empty.') raise InvalidCode('Builder file is empty.')
assert(isinstance(code, str)) assert(isinstance(code, str))
@ -1994,7 +1995,8 @@ class Interpreter():
if not os.path.isfile(absname): if not os.path.isfile(absname):
self.subdir = prev_subdir self.subdir = prev_subdir
raise InterpreterException('Nonexistant build def file %s.' % buildfilename) raise InterpreterException('Nonexistant build def file %s.' % buildfilename)
code = open(absname, encoding='utf8').read() with open(absname, encoding='utf8') as f:
code = f.read()
assert(isinstance(code, str)) assert(isinstance(code, str))
try: try:
codeblock = mparser.Parser(code).parse() codeblock = mparser.Parser(code).parse()

@ -36,15 +36,18 @@ class Conf:
self.build_file = os.path.join(build_dir, 'meson-private/build.dat') self.build_file = os.path.join(build_dir, 'meson-private/build.dat')
if not os.path.isfile(self.coredata_file) or not os.path.isfile(self.build_file): if not os.path.isfile(self.coredata_file) or not os.path.isfile(self.build_file):
raise ConfException('Directory %s does not seem to be a Meson build directory.' % build_dir) raise ConfException('Directory %s does not seem to be a Meson build directory.' % build_dir)
self.coredata = pickle.load(open(self.coredata_file, 'rb')) with open(self.coredata_file, 'rb') as f:
self.build = pickle.load(open(self.build_file, 'rb')) self.coredata = pickle.load(f)
with open(self.build_file, 'rb') as f:
self.build = pickle.load(f)
if self.coredata.version != coredata.version: if self.coredata.version != coredata.version:
raise ConfException('Version mismatch (%s vs %s)' % raise ConfException('Version mismatch (%s vs %s)' %
(coredata.version, self.coredata.version)) (coredata.version, self.coredata.version))
def save(self): def save(self):
# Only called if something has changed so overwrite unconditionally. # Only called if something has changed so overwrite unconditionally.
pickle.dump(self.coredata, open(self.coredata_file, 'wb')) with open(self.coredata_file, 'wb') as f:
pickle.dump(self.coredata, f)
# We don't write the build file because any changes to it # We don't write the build file because any changes to it
# are erased when Meson is executed the nex time, i.e. the next # are erased when Meson is executed the nex time, i.e. the next
# time Ninja is run. # time Ninja is run.

@ -96,7 +96,8 @@ def is_32bit():
def is_debianlike(): def is_debianlike():
try: try:
open('/etc/debian_version', 'r') with open('/etc/debian_version', 'r'):
pass
return True return True
except FileNotFoundError: except FileNotFoundError:
return False return False
@ -262,7 +263,8 @@ def do_mesondefine(line, confdata):
def do_conf_file(src, dst, confdata): def do_conf_file(src, dst, confdata):
try: try:
data = open(src).readlines() with open(src) as f:
data = f.readlines()
except Exception: except Exception:
raise MesonException('Could not read input file %s.' % src) raise MesonException('Could not read input file %s.' % src)
# Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define # Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
@ -276,7 +278,8 @@ def do_conf_file(src, dst, confdata):
line = do_replacement(regex, line, confdata) line = do_replacement(regex, line, confdata)
result.append(line) result.append(line)
dst_tmp = dst + '~' dst_tmp = dst + '~'
open(dst_tmp, 'w').writelines(result) with open(dst_tmp, 'w') as f:
f.writelines(result)
shutil.copymode(src, dst_tmp) shutil.copymode(src, dst_tmp)
replace_if_different(dst, dst_tmp) replace_if_different(dst, dst_tmp)
@ -306,9 +309,10 @@ def replace_if_different(dst, dst_tmp):
# If contents are identical, don't touch the file to prevent # If contents are identical, don't touch the file to prevent
# unnecessary rebuilds. # unnecessary rebuilds.
try: try:
if open(dst, 'r').read() == open(dst_tmp, 'r').read(): with open(dst, 'r') as f1, open(dst_tmp, 'r') as f2:
os.unlink(dst_tmp) if f1.read() == f2.read():
return os.unlink(dst_tmp)
return
except FileNotFoundError: except FileNotFoundError:
pass pass
os.replace(dst_tmp, dst) os.replace(dst_tmp, dst)

@ -169,7 +169,8 @@ itself as required.'''
g.generate(intr) g.generate(intr)
g.run_postconf_scripts() g.run_postconf_scripts()
dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
pickle.dump(b, open(dumpfile, 'wb')) with open(dumpfile, 'wb') as f:
pickle.dump(b, f)
# Write this last since we use the existence of this file to check if # Write this last since we use the existence of this file to check if
# we generated the build file successfully, so we don't want an error # we generated the build file successfully, so we don't want an error
# that pops up during generation, post-conf scripts, etc to cause us to # that pops up during generation, post-conf scripts, etc to cause us to

@ -177,10 +177,14 @@ def run(args):
buildfile = os.path.join(bdir, 'meson-private/build.dat') buildfile = os.path.join(bdir, 'meson-private/build.dat')
testfile = os.path.join(bdir, 'meson-private/meson_test_setup.dat') testfile = os.path.join(bdir, 'meson-private/meson_test_setup.dat')
benchmarkfile = os.path.join(bdir, 'meson-private/meson_benchmark_setup.dat') benchmarkfile = os.path.join(bdir, 'meson-private/meson_benchmark_setup.dat')
coredata = pickle.load(open(corefile, 'rb')) with open(corefile, 'rb') as f:
builddata = pickle.load(open(buildfile, 'rb')) coredata = pickle.load(f)
testdata = pickle.load(open(testfile, 'rb')) with open(buildfile, 'rb') as f:
benchmarkdata = pickle.load(open(benchmarkfile, 'rb')) builddata = pickle.load(f)
with open(testfile, 'rb') as f:
testdata = pickle.load(f)
with open(benchmarkfile, 'rb') as f:
benchmarkdata = pickle.load(f)
if options.list_targets: if options.list_targets:
list_targets(coredata, builddata) list_targets(coredata, builddata)
elif options.target_files is not None: elif options.target_files is not None:

@ -23,37 +23,41 @@ class PkgConfigModule:
def generate_pkgconfig_file(self, state, libraries, subdirs, name, description, version, filebase, def generate_pkgconfig_file(self, state, libraries, subdirs, name, description, version, filebase,
pub_reqs, priv_reqs, priv_libs): pub_reqs, priv_reqs, priv_libs):
coredata = state.environment.get_coredata()
outdir = state.environment.scratch_dir outdir = state.environment.scratch_dir
fname = os.path.join(outdir, filebase + '.pc') fname = os.path.join(outdir, filebase + '.pc')
ofile = open(fname, 'w') with open(fname, 'w') as ofile:
coredata = state.environment.get_coredata() ofile.write('prefix=%s\n' % coredata.get_builtin_option('prefix'))
ofile.write('prefix=%s\n' % coredata.get_builtin_option('prefix')) ofile.write('libdir=${prefix}/%s\n' %
ofile.write('libdir=${prefix}/%s\n' % coredata.get_builtin_option('libdir')) coredata.get_builtin_option('libdir'))
ofile.write('includedir=${prefix}/%s\n\n' % coredata.get_builtin_option('includedir')) ofile.write('includedir=${prefix}/%s\n\n' %
ofile.write('Name: %s\n' % name) coredata.get_builtin_option('includedir'))
if len(description) > 0: ofile.write('Name: %s\n' % name)
ofile.write('Description: %s\n' % description) if len(description) > 0:
if len(version) > 0: ofile.write('Description: %s\n' % description)
ofile.write('Version: %s\n' % version) if len(version) > 0:
if len(pub_reqs) > 0: ofile.write('Version: %s\n' % version)
ofile.write('Requires: {}\n'.format(' '.join(pub_reqs))) if len(pub_reqs) > 0:
if len(priv_reqs) > 0: ofile.write('Requires: {}\n'.format(' '.join(pub_reqs)))
ofile.write('Requires.private: {}\n'.format(' '.join(priv_reqs))) if len(priv_reqs) > 0:
if len(priv_libs) > 0: ofile.write(
ofile.write('Libraries.private: {}\n'.format(' '.join(priv_libs))) 'Requires.private: {}\n'.format(' '.join(priv_reqs)))
ofile.write('Libs: -L${libdir} ') if len(priv_libs) > 0:
for l in libraries: ofile.write(
if l.custom_install_dir: 'Libraries.private: {}\n'.format(' '.join(priv_libs)))
ofile.write('-L${prefix}/%s ' % l.custom_install_dir) ofile.write('Libs: -L${libdir} ')
ofile.write('-l%s ' % l.name) for l in libraries:
ofile.write('\n') if l.custom_install_dir:
ofile.write('CFlags: ') ofile.write('-L${prefix}/%s ' % l.custom_install_dir)
for h in subdirs: ofile.write('-l%s ' % l.name)
if h == '.': ofile.write('\n')
h = '' ofile.write('CFlags: ')
ofile.write(os.path.join('-I${includedir}', h)) for h in subdirs:
ofile.write(' ') if h == '.':
ofile.write('\n') h = ''
ofile.write(os.path.join('-I${includedir}', h))
ofile.write(' ')
ofile.write('\n')
def generate(self, state, args, kwargs): def generate(self, state, args, kwargs):
if len(args) > 0: if len(args) > 0:

@ -80,82 +80,87 @@ class RPMModule:
files.add('%%{_mandir}/man%u/%s.*' % (int(man_file.split('.')[-1]), man_file)) files.add('%%{_mandir}/man%u/%s.*' % (int(man_file.split('.')[-1]), man_file))
if len(files_devel) > 0: if len(files_devel) > 0:
devel_subpkg = True devel_subpkg = True
fn = open('%s.spec' % os.path.join(state.environment.get_build_dir(), proj), 'w+') filename = os.path.join(state.environment.get_build_dir(),
fn.write('Name: %s\n' % proj) '%s.spec' % proj)
fn.write('Version: # FIXME\n') with open(filename, 'w+') as fn:
fn.write('Release: 1%{?dist}\n') fn.write('Name: %s\n' % proj)
fn.write('Summary: # FIXME\n') fn.write('Version: # FIXME\n')
fn.write('License: # FIXME\n') fn.write('Release: 1%{?dist}\n')
fn.write('\n') fn.write('Summary: # FIXME\n')
fn.write('Source0: %{name}-%{version}.tar.xz # FIXME\n') fn.write('License: # FIXME\n')
fn.write('\n') fn.write('\n')
for compiler in compiler_deps: fn.write('Source0: %{name}-%{version}.tar.xz # FIXME\n')
fn.write('BuildRequires: %s\n' % compiler) fn.write('\n')
for dep in state.environment.coredata.deps: for compiler in compiler_deps:
fn.write('BuildRequires: pkgconfig(%s)\n' % dep) fn.write('BuildRequires: %s\n' % compiler)
for lib in state.environment.coredata.ext_libs.values(): for dep in state.environment.coredata.deps:
fn.write('BuildRequires: %s # FIXME\n' % lib.fullpath) fn.write('BuildRequires: pkgconfig(%s)\n' % dep)
mlog.log('Warning, replace', mlog.bold(lib.fullpath), 'with real package.', for lib in state.environment.coredata.ext_libs.values():
'You can use following command to find package which contains this lib:', fn.write('BuildRequires: %s # FIXME\n' % lib.fullpath)
mlog.bold('dnf provides %s' % lib.fullpath)) mlog.log('Warning, replace', mlog.bold(lib.fullpath),
for prog in state.environment.coredata.ext_progs.values(): 'with real package.',
if not prog.found(): 'You can use following command to find package which '
fn.write('BuildRequires: /usr/bin/%s # FIXME\n' % prog.get_name()) 'contains this lib:',
else: mlog.bold('dnf provides %s' % lib.fullpath))
fn.write('BuildRequires: %s\n' % ' '.join(prog.fullpath)) for prog in state.environment.coredata.ext_progs.values():
fn.write('BuildRequires: meson\n') if not prog.found():
fn.write('\n') fn.write('BuildRequires: /usr/bin/%s # FIXME\n' %
fn.write('%description\n') prog.get_name())
fn.write('\n') else:
if devel_subpkg: fn.write('BuildRequires: %s\n' % ' '.join(prog.fullpath))
fn.write('%package devel\n') fn.write('BuildRequires: meson\n')
fn.write('Summary: Development files for %{name}\n') fn.write('\n')
fn.write('Requires: %{name}%{?_isa} = %{version}-%{release}\n') fn.write('%description\n')
fn.write('\n') fn.write('\n')
fn.write('%description devel\n') if devel_subpkg:
fn.write('Development files for %{name}.\n') fn.write('%package devel\n')
fn.write('Summary: Development files for %{name}\n')
fn.write('Requires: %{name}%{?_isa} = %{version}-%{release}\n')
fn.write('\n')
fn.write('%description devel\n')
fn.write('Development files for %{name}.\n')
fn.write('\n')
fn.write('%prep\n')
fn.write('%autosetup\n')
fn.write('rm -rf rpmbuilddir && mkdir rpmbuilddir\n')
fn.write('\n') fn.write('\n')
fn.write('%prep\n') fn.write('%build\n')
fn.write('%autosetup\n') fn.write('pushd rpmbuilddir\n')
fn.write('rm -rf rpmbuilddir && mkdir rpmbuilddir\n') fn.write(' %meson ..\n')
fn.write('\n') fn.write(' ninja-build -v\n')
fn.write('%build\n') fn.write('popd\n')
fn.write('pushd rpmbuilddir\n') fn.write('\n')
fn.write(' %meson ..\n') fn.write('%install\n')
fn.write(' ninja-build -v\n') fn.write('pushd rpmbuilddir\n')
fn.write('popd\n') fn.write(' DESTDIR=%{buildroot} ninja-build -v install\n')
fn.write('\n') fn.write('popd\n')
fn.write('%install\n') if len(to_delete) > 0:
fn.write('pushd rpmbuilddir\n') fn.write('rm -rf %s\n' % ' '.join(to_delete))
fn.write(' DESTDIR=%{buildroot} ninja-build -v install\n') fn.write('\n')
fn.write('popd\n') fn.write('%check\n')
if len(to_delete) > 0: fn.write('pushd rpmbuilddir\n')
fn.write('rm -rf %s\n' % ' '.join(to_delete)) fn.write(' ninja-build -v test\n')
fn.write('\n') fn.write('popd\n')
fn.write('%check\n') fn.write('\n')
fn.write('pushd rpmbuilddir\n') fn.write('%files\n')
fn.write(' ninja-build -v test\n') for f in files:
fn.write('popd\n')
fn.write('\n')
fn.write('%files\n')
for f in files:
fn.write('%s\n' % f)
fn.write('\n')
if devel_subpkg:
fn.write('%files devel\n')
for f in files_devel:
fn.write('%s\n' % f) fn.write('%s\n' % f)
fn.write('\n') fn.write('\n')
if so_installed: if devel_subpkg:
fn.write('%post -p /sbin/ldconfig\n') fn.write('%files devel\n')
for f in files_devel:
fn.write('%s\n' % f)
fn.write('\n')
if so_installed:
fn.write('%post -p /sbin/ldconfig\n')
fn.write('\n')
fn.write('%postun -p /sbin/ldconfig\n')
fn.write('\n')
fn.write('%changelog\n')
fn.write('* %s meson <meson@example.com> - \n' %
datetime.date.today().strftime('%a %b %d %Y'))
fn.write('- \n')
fn.write('\n') fn.write('\n')
fn.write('%postun -p /sbin/ldconfig\n')
fn.write('\n')
fn.write('%changelog\n')
fn.write('* %s meson <meson@example.com> - \n' % datetime.date.today().strftime('%a %b %d %Y'))
fn.write('- \n')
fn.write('\n')
fn.close()
mlog.log('RPM spec template written to %s.spec.\n' % proj) mlog.log('RPM spec template written to %s.spec.\n' % proj)
def initialize(): def initialize():

@ -78,7 +78,8 @@ class OptionInterpreter:
def process(self, option_file): def process(self, option_file):
try: try:
ast = mparser.Parser(open(option_file, 'r', encoding='utf8').read()).parse() with open(option_file, 'r', encoding='utf8') as f:
ast = mparser.Parser(f.read()).parse()
except mesonlib.MesonException as me: except mesonlib.MesonException as me:
me.file = option_file me.file = option_file
raise me raise me

@ -52,33 +52,34 @@ def run_benchmarks(options, datafile):
failed_tests = 0 failed_tests = 0
logfile_base = 'meson-logs/benchmarklog' logfile_base = 'meson-logs/benchmarklog'
jsonlogfilename = logfile_base+ '.json' jsonlogfilename = logfile_base+ '.json'
jsonlogfile = open(jsonlogfilename, 'w') with open(datafile, 'rb') as f:
tests = pickle.load(open(datafile, 'rb')) tests = pickle.load(f)
num_tests = len(tests) num_tests = len(tests)
if num_tests == 0: if num_tests == 0:
print('No benchmarks defined.') print('No benchmarks defined.')
return 0 return 0
iteration_count = 5 iteration_count = 5
wrap = [] # Benchmarks on cross builds are pointless so don't support them. wrap = [] # Benchmarks on cross builds are pointless so don't support them.
for i, test in enumerate(tests): with open(jsonlogfilename, 'w') as jsonlogfile:
runs = [] for i, test in enumerate(tests):
durations = [] runs = []
failed = False durations = []
for _ in range(iteration_count): failed = False
res = meson_test.run_single_test(wrap, test) for _ in range(iteration_count):
runs.append(res) res = meson_test.run_single_test(wrap, test)
durations.append(res.duration) runs.append(res)
if res.returncode != 0: durations.append(res.duration)
failed = True if res.returncode != 0:
mean = statistics.mean(durations) failed = True
stddev = statistics.stdev(durations) mean = statistics.mean(durations)
if failed: stddev = statistics.stdev(durations)
resultstr = 'FAIL' if failed:
failed_tests += 1 resultstr = 'FAIL'
else: failed_tests += 1
resultstr = 'OK' else:
print_stats(3, num_tests, test.name, resultstr, i, mean, stddev) resultstr = 'OK'
print_json_log(jsonlogfile, runs, test.name, i) print_stats(3, num_tests, test.name, resultstr, i, mean, stddev)
print_json_log(jsonlogfile, runs, test.name, i)
print('\nFull log written to meson-logs/benchmarklog.json.') print('\nFull log written to meson-logs/benchmarklog.json.')
return failed_tests return failed_tests

@ -74,7 +74,8 @@ def run(args):
print('Test runner for Meson. Do not run on your own, mmm\'kay?') print('Test runner for Meson. Do not run on your own, mmm\'kay?')
print(sys.argv[0] + ' [data file]') print(sys.argv[0] + ' [data file]')
exe_data_file = options.args[0] exe_data_file = options.args[0]
exe = pickle.load(open(exe_data_file, 'rb')) with open(exe_data_file, 'rb') as f:
exe = pickle.load(f)
return run_exe(exe) return run_exe(exe)
if __name__ == '__main__': if __name__ == '__main__':

@ -38,8 +38,8 @@ def do_copy(from_file, to_file):
append_to_log(to_file) append_to_log(to_file)
def do_install(datafilename): def do_install(datafilename):
ifile = open(datafilename, 'rb') with open(datafilename, 'rb') as ifile:
d = pickle.load(ifile) d = pickle.load(ifile)
d.destdir = os.environ.get('DESTDIR', '') d.destdir = os.environ.get('DESTDIR', '')
d.fullprefix = destdir_join(d.destdir, d.prefix) d.fullprefix = destdir_join(d.destdir, d.prefix)
@ -112,7 +112,9 @@ def install_man(d):
os.makedirs(outdir, exist_ok=True) os.makedirs(outdir, exist_ok=True)
print('Installing %s to %s.' % (full_source_filename, outdir)) print('Installing %s to %s.' % (full_source_filename, outdir))
if outfilename.endswith('.gz') and not full_source_filename.endswith('.gz'): if outfilename.endswith('.gz') and not full_source_filename.endswith('.gz'):
open(outfilename, 'wb').write(gzip.compress(open(full_source_filename, 'rb').read())) with open(outfilename, 'wb') as of:
with open(full_source_filename, 'rb') as sf:
of.write(gzip.compress(sf.read()))
shutil.copystat(full_source_filename, outfilename) shutil.copystat(full_source_filename, outfilename)
append_to_log(outfilename) append_to_log(outfilename)
else: else:
@ -142,7 +144,8 @@ def run_install_script(d):
print('Running custom install script %s' % script) print('Running custom install script %s' % script)
suffix = os.path.splitext(script)[1].lower() suffix = os.path.splitext(script)[1].lower()
if platform.system().lower() == 'windows' and suffix != '.bat': if platform.system().lower() == 'windows' and suffix != '.bat':
first_line = open(script, encoding='latin_1', errors='ignore').readline().strip() with open(script, encoding='latin_1', errors='ignore') as f:
first_line = f.readline().strip()
if first_line.startswith('#!'): if first_line.startswith('#!'):
if shutil.which(first_line[2:]): if shutil.which(first_line[2:]):
commands = [first_line[2:]] commands = [first_line[2:]]

@ -202,10 +202,8 @@ def run_tests(datafilename):
wrap = [options.wrapper] wrap = [options.wrapper]
logfilename = logfile_base + '-' + options.wrapper.replace(' ', '_') + '.txt' logfilename = logfile_base + '-' + options.wrapper.replace(' ', '_') + '.txt'
jsonlogfilename = logfile_base + '-' + options.wrapper.replace(' ', '_') + '.json' jsonlogfilename = logfile_base + '-' + options.wrapper.replace(' ', '_') + '.json'
logfile = open(logfilename, 'w') with open(datafilename, 'rb') as f:
jsonlogfile = open(jsonlogfilename, 'w') tests = pickle.load(f)
logfile.write('Log of Meson test suite run on %s.\n\n' % datetime.datetime.now().isoformat())
tests = pickle.load(open(datafilename, 'rb'))
if len(tests) == 0: if len(tests) == 0:
print('No tests defined.') print('No tests defined.')
return return
@ -222,24 +220,31 @@ def run_tests(datafilename):
executor = conc.ThreadPoolExecutor(max_workers=num_workers) executor = conc.ThreadPoolExecutor(max_workers=num_workers)
futures = [] futures = []
filtered_tests = filter_tests(options.suite, tests) filtered_tests = filter_tests(options.suite, tests)
for i, test in enumerate(filtered_tests):
if test.suite[0] == '': with open(jsonlogfilename, 'w') as jsonlogfile, \
visible_name = test.name open(logfilename, 'w') as logfile:
else: logfile.write('Log of Meson test suite run on %s.\n\n' %
if options.suite is not None: datetime.datetime.now().isoformat())
visible_name = options.suite + ' / ' + test.name for i, test in enumerate(filtered_tests):
if test.suite[0] == '':
visible_name = test.name
else: else:
visible_name = test.suite[0] + ' / ' + test.name if options.suite is not None:
visible_name = options.suite + ' / ' + test.name
else:
visible_name = test.suite[0] + ' / ' + test.name
if not test.is_parallel: if not test.is_parallel:
drain_futures(futures) drain_futures(futures)
futures = [] futures = []
res = run_single_test(wrap, test) res = run_single_test(wrap, test)
print_stats(numlen, filtered_tests, visible_name, res, i, logfile, jsonlogfile) print_stats(numlen, filtered_tests, visible_name, res, i,
else: logfile, jsonlogfile)
f = executor.submit(run_single_test, wrap, test) else:
futures.append((f, numlen, filtered_tests, visible_name, i, logfile, jsonlogfile)) f = executor.submit(run_single_test, wrap, test)
drain_futures(futures) futures.append((f, numlen, filtered_tests, visible_name, i,
logfile, jsonlogfile))
drain_futures(futures)
return logfilename return logfilename
def run(args): def run(args):

@ -48,8 +48,10 @@ def run(args):
private_dir = args[0] private_dir = args[0]
dumpfile = os.path.join(private_dir, 'regeninfo.dump') dumpfile = os.path.join(private_dir, 'regeninfo.dump')
coredata = os.path.join(private_dir, 'coredata.dat') coredata = os.path.join(private_dir, 'coredata.dat')
regeninfo = pickle.load(open(dumpfile, 'rb')) with open(dumpfile, 'rb') as f:
coredata = pickle.load(open(coredata, 'rb')) regeninfo = pickle.load(f)
with open(coredata, 'rb') as f:
coredata = pickle.load(f)
mesonscript = coredata.meson_script_file mesonscript = coredata.meson_script_file
backend = coredata.get_builtin_option('backend') backend = coredata.get_builtin_option('backend')
regen_timestamp = os.stat(dumpfile).st_mtime regen_timestamp = os.stat(dumpfile).st_mtime

@ -34,16 +34,19 @@ parser.add_argument('args', nargs='+')
def dummy_syms(outfilename): def dummy_syms(outfilename):
"""Just touch it so relinking happens always.""" """Just touch it so relinking happens always."""
open(outfilename, 'w').close() with open(outfilename, 'w'):
pass
def write_if_changed(text, outfilename): def write_if_changed(text, outfilename):
try: try:
oldtext = open(outfilename, 'r').read() with open(outfilename, 'r') as f:
oldtext = f.read()
if text == oldtext: if text == oldtext:
return return
except FileNotFoundError: except FileNotFoundError:
pass pass
open(outfilename, 'w').write(text) with open(outfilename, 'w') as f:
f.write(text)
def linux_syms(libfilename, outfilename): def linux_syms(libfilename, outfilename):
pe = subprocess.Popen(['readelf', '-d', libfilename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pe = subprocess.Popen(['readelf', '-d', libfilename], stdout=subprocess.PIPE, stderr=subprocess.PIPE)

@ -23,9 +23,16 @@ def config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_
except Exception: except Exception:
new_string = fallback new_string = fallback
new_data = open(infile).read().replace(replace_string, new_string) with open(infile) as f:
if (not os.path.exists(outfile)) or (open(outfile).read() != new_data): new_data = f.read().replace(replace_string, new_string)
open(outfile, 'w').write(new_data) if os.path.exists(outfile):
with open(outfile) as f:
needs_update = (f.read() != new_data)
else:
needs_update = True
if needs_update:
with open(outfile, 'w') as f:
f.write(new_data)
def run(args): def run(args):
infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6] infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]

@ -58,23 +58,23 @@ def open_wrapdburl(urlstring):
class PackageDefinition: class PackageDefinition:
def __init__(self, fname): def __init__(self, fname):
self.values = {} self.values = {}
ifile = open(fname) with open(fname) as ifile:
first = ifile.readline().strip() first = ifile.readline().strip()
if first == '[wrap-file]': if first == '[wrap-file]':
self.type = 'file' self.type = 'file'
elif first == '[wrap-git]': elif first == '[wrap-git]':
self.type = 'git' self.type = 'git'
else: else:
raise RuntimeError('Invalid format of package file') raise RuntimeError('Invalid format of package file')
for line in ifile: for line in ifile:
line = line.strip() line = line.strip()
if line == '': if line == '':
continue continue
(k, v) = line.split('=', 1) (k, v) = line.split('=', 1)
k = k.strip() k = k.strip()
v = v.strip() v = v.strip()
self.values[k] = v self.values[k] = v
def get(self, key): def get(self, key):
return self.values[key] return self.values[key]
@ -177,7 +177,8 @@ class Resolver:
expected = p.get('source_hash') expected = p.get('source_hash')
if dhash != expected: if dhash != expected:
raise RuntimeError('Incorrect hash for source %s:\n %s expected\n %s actual.' % (packagename, expected, dhash)) raise RuntimeError('Incorrect hash for source %s:\n %s expected\n %s actual.' % (packagename, expected, dhash))
open(ofname, 'wb').write(srcdata) with open(ofname, 'wb') as f:
f.write(srcdata)
if p.has_patch(): if p.has_patch():
purl = p.get('patch_url') purl = p.get('patch_url')
mlog.log('Downloading patch from', mlog.bold(purl)) mlog.log('Downloading patch from', mlog.bold(purl))
@ -186,7 +187,9 @@ class Resolver:
expected = p.get('patch_hash') expected = p.get('patch_hash')
if phash != expected: if phash != expected:
raise RuntimeError('Incorrect hash for patch %s:\n %s expected\n %s actual' % (packagename, expected, phash)) raise RuntimeError('Incorrect hash for patch %s:\n %s expected\n %s actual' % (packagename, expected, phash))
open(os.path.join(self.cachedir, p.get('patch_filename')), 'wb').write(pdata) filename = os.path.join(self.cachedir, p.get('patch_filename'))
with open(filename, 'wb') as f:
f.write(pdata)
else: else:
mlog.log('Package does not require patch.') mlog.log('Package does not require patch.')

@ -92,7 +92,8 @@ def install(name):
(branch, revision) = get_latest_version(name) (branch, revision) = get_latest_version(name)
u = open_wrapdburl(API_ROOT + 'projects/%s/%s/%s/get_wrap' % (name, branch, revision)) u = open_wrapdburl(API_ROOT + 'projects/%s/%s/%s/get_wrap' % (name, branch, revision))
data = u.read() data = u.read()
open(wrapfile, 'wb').write(data) with open(wrapfile, 'wb') as f:
f.write(data)
print('Installed', name, 'branch', branch, 'revision', revision) print('Installed', name, 'branch', branch, 'revision', revision)
def get_current_version(wrapfile): def get_current_version(wrapfile):
@ -129,7 +130,8 @@ def update(name):
os.unlink(os.path.join('subprojects/packagecache', patch_file)) os.unlink(os.path.join('subprojects/packagecache', patch_file))
except FileNotFoundError: except FileNotFoundError:
pass pass
open(wrapfile, 'wb').write(data) with open(wrapfile, 'wb') as f:
f.write(data)
print('Updated', name, 'to branch', new_branch, 'revision', new_revision) print('Updated', name, 'to branch', new_branch, 'revision', new_revision)
def info(name): def info(name):

Loading…
Cancel
Save