# Copyright 2012 Jussi Pakkanen # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import mparser import os, sys, re, pickle import build from coredata import MesonException def do_replacement(regex, line, confdata): match = re.search(regex, line) while match: varname = match.group(1) if varname in confdata.keys(): var = confdata.get(varname) if isinstance(var, str): pass elif isinstance(var, mparser.StringNode): var = var.value elif isinstance(var, int): var = str(var) else: raise RuntimeError('Tried to replace a variable with something other than a string or int.') else: var = '' line = line.replace('@' + varname + '@', var) match = re.search(regex, line) return line def do_mesondefine(line, confdata): arr = line.split() if len(arr) != 2: raise build.InvalidArguments('#mesondefine does not contain exactly two tokens: %s', line.strip()) varname = arr[1] try: v = confdata.get(varname) except KeyError: return '/* undef %s */\n' % varname if isinstance(v, mparser.BooleanNode): v = v.value if isinstance(v, bool): if v: return '#define %s\n' % varname else: return '#undef %s\n' % varname elif isinstance(v, int): return '#define %s %d\n' % (varname, v) elif isinstance(v, str): return '#define %s %s\n' % (varname, v) else: raise build.InvalidArguments('#mesondefine argument "%s" is of unknown type.' % varname) def replace_if_different(dst, dst_tmp): # If contents are identical, don't touch the file to prevent # unnecessary rebuilds. try: if open(dst, 'r').read() == open(dst_tmp, 'r').read(): os.unlink(dst_tmp) return except FileNotFoundError: pass os.replace(dst_tmp, dst) def do_conf_file(src, dst, confdata): data = open(src).readlines() regex = re.compile('@(.*?)@') result = [] for line in data: if line.startswith('#mesondefine'): line = do_mesondefine(line, confdata) else: line = do_replacement(regex, line, confdata) result.append(line) dst_tmp = dst + '~' open(dst_tmp, 'w').writelines(result) replace_if_different(dst, dst_tmp) class TestSerialisation: def __init__(self, name, fname, is_cross, exe_wrapper, is_parallel, cmd_args, env): self.name = name self.fname = fname self.is_cross = is_cross self.exe_runner = exe_wrapper self.is_parallel = is_parallel self.cmd_args = cmd_args self.env = env # This class contains the basic functionality that is needed by all backends. # Feel free to move stuff in and out of it as you see fit. class Backend(): def __init__(self, build, interp): self.build = build self.environment = build.environment self.interpreter = interp self.processed_targets = {} self.dep_rules = {} self.build_to_src = os.path.relpath(self.environment.get_source_dir(), self.environment.get_build_dir()) def get_compiler_for_lang(self, lang): for i in self.build.compilers: if i.language == lang: return i raise RuntimeError('No compiler for language ' + lang) def get_compiler_for_source(self, src): for i in self.build.compilers: if i.can_compile(src): return i raise RuntimeError('No specified compiler can handle file ' + src) def get_target_filename(self, target): targetdir = self.get_target_dir(target) filename = os.path.join(targetdir, target.get_filename()) return filename def get_target_dir(self, target): dirname = target.get_subdir() os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True) return dirname def get_target_private_dir(self, target): dirname = os.path.join(self.get_target_dir(target), target.get_basename() + '.dir') os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True) return dirname def generate_unity_files(self, target, unity_src): langlist = {} abs_files = [] result = [] for src in unity_src: comp = self.get_compiler_for_source(src) language = comp.get_language() suffix = '.' + comp.get_default_suffix() if language not in langlist: outfilename = os.path.join(self.get_target_private_dir(target), target.name + '-unity' + suffix) outfileabs = os.path.join(self.environment.get_build_dir(), outfilename) outfileabs_tmp = outfileabs + '.tmp' abs_files.append(outfileabs) outfile = open(outfileabs_tmp, 'w') langlist[language] = outfile result.append(outfilename) ofile = langlist[language] ofile.write('#include<%s>\n' % src) [x.close() for x in langlist.values()] [replace_if_different(x, x + '.tmp') for x in abs_files] return result def relpath(self, todir, fromdir): return os.path.relpath(os.path.join('dummyprefixdir', todir),\ os.path.join('dummyprefixdir', fromdir)) def flatten_object_list(self, target, proj_dir_to_build_root=''): obj_list = [] for obj in target.get_objects(): if isinstance(obj, str): o = os.path.join(proj_dir_to_build_root, self.build_to_src, target.get_subdir(), obj) obj_list.append(o) elif isinstance(obj, build.ExtractedObjects): obj_list += self.determine_ext_objs(obj, proj_dir_to_build_root) else: raise MesonException('Unknown data type in object list.') return obj_list def serialise_tests(self): test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat') datafile = open(test_data, 'wb') self.write_test_file(datafile) datafile.close() def generate_target(self, target, outfile): name = target.get_basename() if name in self.processed_targets: return if isinstance(target, build.Jar): self.generate_jar_target(target, outfile) return # The following deals with C/C++ compilation. (gen_src_deps, gen_other_deps) = self.process_dep_gens(outfile, target) self.process_target_dependencies(target, outfile) self.generate_custom_generator_rules(target, outfile) outname = self.get_target_filename(target) obj_list = [] use_pch = self.environment.coredata.use_pch is_unity = self.environment.coredata.unity if use_pch and target.has_pch(): self.generate_pch(target, outfile) header_deps = gen_other_deps unity_src = [] unity_deps = [] # Generated sources that must be built before compiling a Unity target. for genlist in target.get_generated_sources(): for src in genlist.get_outfilelist(): if not self.environment.is_header(src): if is_unity: if '/' in src: rel_src = src else: rel_src = os.path.join(self.get_target_private_dir(target), src) unity_deps.append(rel_src) abs_src = os.path.join(self.environment.get_build_dir(), rel_src) unity_src.append(abs_src) else: obj_list.append(self.generate_single_compile(target, outfile, src, True)) else: header_deps.append(src) src_list = [] for src in gen_src_deps: src_list.append(src) if is_unity: unity_src.append(src) else: # Generated targets are ordered deps because the must exist # before the sources compiling them are used. After the first # compile we get precise dependency info from dep files. # This should work in all cases. If it does not, then just # move them from orderdeps to proper deps. obj_list.append(self.generate_single_compile(target, outfile, src, True, [], header_deps)) for src in target.get_sources(): if not self.environment.is_header(src): src_list.append(src) if is_unity: abs_src = os.path.join(self.environment.get_source_dir(), target.get_subdir(), src) unity_src.append(abs_src) else: obj_list.append(self.generate_single_compile(target, outfile, src, False, [], header_deps)) obj_list += self.flatten_object_list(target) if is_unity: for src in self.generate_unity_files(target, unity_src): obj_list.append(self.generate_single_compile(target, outfile, src, True, unity_deps + header_deps)) linker = self.determine_linker(target, src_list) elem = self.generate_link(target, outfile, outname, obj_list, linker) self.generate_shlib_aliases(target, self.get_target_dir(target), outfile, elem) self.processed_targets[name] = True def determine_linker(self, target, src): if isinstance(target, build.StaticLibrary): return self.build.static_linker if len(self.build.compilers) == 1: return self.build.compilers[0] # Currently a bit naive. C++ must # be linked with a C++ compiler, but # otherwise we don't care. This will # become trickier if and when Fortran # and the like become supported. cpp = None for c in self.build.compilers: if c.get_language() == 'cpp': cpp = c break if cpp is not None: for s in src: if c.can_compile(s): return cpp return self.build.compilers[0] def determine_ext_objs(self, extobj, proj_dir_to_build_root=''): result = [] targetdir = self.get_target_private_dir(extobj.target) suffix = '.' + self.environment.get_object_suffix() for osrc in extobj.srclist: if not self.source_suffix_in_objs: osrc = '.'.join(osrc.split('.')[:-1]) objname = os.path.join(proj_dir_to_build_root, targetdir, os.path.basename(osrc) + suffix) result.append(objname) return result def process_target_dependencies(self, target, outfile): for t in target.get_dependencies(): tname = t.get_basename() if not tname in self.processed_targets: self.generate_target(t, outfile) def get_pch_include_args(self, compiler, target): args = [] pchpath = self.get_target_private_dir(target) includearg = compiler.get_include_arg(pchpath) for lang in ['c', 'cpp']: p = target.get_pch(lang) if len(p) == 0: continue if compiler.can_compile(p[-1]): header = p[0] args += compiler.get_pch_use_args(pchpath, header) if len(args) > 0: args = [includearg] + args return args def generate_basic_compiler_flags(self, target, compiler): commands = [] commands += compiler.get_always_flags() commands += self.build.get_global_flags(compiler) commands += target.get_extra_args(compiler.get_language()) if self.environment.coredata.buildtype != 'plain': commands += compiler.get_debug_flags() commands += compiler.get_std_warn_flags() commands += compiler.get_buildtype_flags(self.environment.coredata.buildtype) if self.environment.coredata.coverage: commands += compiler.get_coverage_flags() if isinstance(target, build.SharedLibrary): commands += compiler.get_pic_flags() for dep in target.get_external_deps(): commands += dep.get_compile_flags() if isinstance(target, build.Executable): commands += dep.get_exe_flags() return commands def build_target_link_arguments(self, compiler, deps): args = [] for d in deps: if not isinstance(d, build.StaticLibrary) and\ not isinstance(d, build.SharedLibrary): raise RuntimeError('Tried to link with a non-library target "%s".' % d.get_basename()) fname = self.get_target_filename(d) if compiler.id == 'msvc': if fname.endswith('dll'): fname = fname[:-3] + 'lib' args.append(fname) return args def generate_configure_files(self): for cf in self.build.get_configure_files(): infile = os.path.join(self.environment.get_source_dir(), cf.get_subdir(), cf.get_source_name()) outdir = os.path.join(self.environment.get_build_dir(), cf.get_subdir()) os.makedirs(outdir, exist_ok=True) outfile = os.path.join(outdir, cf.get_target_name()) confdata = cf.get_configuration_data() do_conf_file(infile, outfile, confdata) def write_test_file(self, datafile): arr = [] for t in self.build.get_tests(): fname = os.path.join(self.environment.get_build_dir(), self.get_target_filename(t.get_exe())) is_cross = self.environment.is_cross_build() if is_cross: exe_wrapper = self.environment.cross_info.get('exe_wrapper', None) else: exe_wrapper = None ts = TestSerialisation(t.get_name(), fname, is_cross, exe_wrapper, t.is_parallel, t.cmd_args, t.env) arr.append(ts) pickle.dump(arr, datafile) def generate_pkgconfig_files(self): for p in self.build.pkgconfig_gens: outdir = self.environment.scratch_dir fname = os.path.join(outdir, p.filebase + '.pc') ofile = open(fname, 'w') ofile.write('prefix=%s\n' % self.environment.get_coredata().prefix) ofile.write('libdir=${prefix}/%s\n' % self.environment.get_coredata().libdir) ofile.write('includedir=${prefix}/%s\n\n' % self.environment.get_coredata().includedir) ofile.write('Name: %s\n' % p.name) if len(p.description) > 0: ofile.write('Description: %s\n' % p.description) if len(p.version) > 0: ofile.write('Version: %s\n' % p.version) ofile.write('Libs: -L${libdir} ') for l in p.libraries: ofile.write('-l%s ' % l.name) ofile.write('\n') ofile.write('CFlags: ') for h in p.subdirs: if h == '.': h = '' ofile.write(os.path.join('-I${includedir}', h)) ofile.write(' ') ofile.write('\n')