| # Copyright 2012-2014 The Meson development team |
| |
| # Licensed under the Apache License, Version 2.0 (the "License"); |
| # you may not use this file except in compliance with the License. |
| # You may obtain a copy of the License at |
| |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| |
| import backends |
| import environment, mesonlib |
| import build |
| import mlog |
| import dependencies |
| from mesonlib import File |
| from meson_install import InstallData |
| from build import InvalidArguments |
| from coredata import MesonException |
| import os, sys, shutil, pickle, re |
| |
| if mesonlib.is_windows(): |
| quote_char = '"' |
| execute_wrapper = 'cmd /c' |
| else: |
| quote_char = "'" |
| execute_wrapper = '' |
| |
| def ninja_quote(text): |
| return text.replace(' ', '$ ').replace(':', '$:') |
| |
| class RawFilename(): |
| def __init__(self, fname): |
| self.fname = fname |
| |
| def split(self, c): |
| return self.fname.split(c) |
| |
| def startswith(self, s): |
| return self.fname.startswith(s) |
| |
| class NinjaBuildElement(): |
| def __init__(self, outfilenames, rule, infilenames): |
| if isinstance(outfilenames, str): |
| self.outfilenames = [outfilenames] |
| else: |
| self.outfilenames = outfilenames |
| assert(isinstance(rule, str)) |
| self.rule = rule |
| if isinstance(infilenames, str): |
| self.infilenames = [infilenames] |
| else: |
| self.infilenames = infilenames |
| self.deps = [] |
| self.orderdeps = [] |
| self.elems = [] |
| |
| def add_dep(self, dep): |
| if isinstance(dep, list): |
| self.deps += dep |
| else: |
| self.deps.append(dep) |
| |
| def add_orderdep(self, dep): |
| if isinstance(dep, list): |
| self.orderdeps += dep |
| else: |
| self.orderdeps.append(dep) |
| |
| def add_item(self, name, elems): |
| if isinstance(elems, str): |
| elems = [elems] |
| self.elems.append((name, elems)) |
| |
| def write(self, outfile): |
| # Sort inputs so the command line does not change. |
| # This allows Ninja to skip unnecessary rebuilds. |
| line = 'build %s: %s %s' % (' '.join([ninja_quote(i) for i in self.outfilenames]),\ |
| self.rule, |
| ' '.join([ninja_quote(i) for i in self.infilenames])) |
| if len(self.deps) > 0: |
| line += ' | ' + ' '.join([ninja_quote(x) for x in self.deps]) |
| if len(self.orderdeps) > 0: |
| line += ' || ' + ' '.join([ninja_quote(x) for x in self.orderdeps]) |
| line += '\n' |
| outfile.write(line) |
| |
| for e in self.elems: |
| (name, elems) = e |
| should_quote = True |
| if name == 'DEPFILE' or name == 'DESC' or name == 'pool': |
| should_quote = False |
| line = ' %s = ' % name |
| q_templ = quote_char + "%s" + quote_char |
| noq_templ = "%s" |
| newelems = [] |
| for i in elems: |
| if not should_quote or i == '&&': # Hackety hack hack |
| templ = noq_templ |
| else: |
| templ = q_templ |
| newelems.append(templ % ninja_quote(i)) |
| line += ' '.join(newelems) |
| line += '\n' |
| outfile.write(line) |
| outfile.write('\n') |
| |
| class NinjaBackend(backends.Backend): |
| |
| def __init__(self, build, interp): |
| super().__init__(build, interp) |
| self.source_suffix_in_objs = True |
| self.ninja_filename = 'build.ninja' |
| self.fortran_deps = {} |
| |
| def generate(self): |
| outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename) |
| tempfilename = outfilename + '~' |
| outfile = open(tempfilename, 'w') |
| self.generate_pkgconfig_files() |
| outfile.write('# This is the build file for project "%s"\n' % self.build.get_project()) |
| outfile.write('# It is autogenerated by the Meson build system.\n') |
| outfile.write('# Do not edit by hand.\n\n') |
| outfile.write('ninja_required_version = 1.5.1\n\n') |
| self.generate_rules(outfile) |
| self.generate_phony(outfile) |
| outfile.write('# Build rules for targets\n\n') |
| [self.generate_target(t, outfile) for t in self.build.get_targets().values()] |
| if len(self.build.pot) > 0: |
| outfile.write('# Build rules for localisation.\n\n') |
| self.generate_po(outfile) |
| outfile.write('# Test rules\n\n') |
| self.generate_tests(outfile) |
| outfile.write('# Install rules\n\n') |
| self.generate_install(outfile) |
| if self.environment.coredata.coverage: |
| outfile.write('# Coverage rules\n\n') |
| self.generate_coverage_rules(outfile) |
| outfile.write('# Suffix\n\n') |
| self.generate_ending(outfile) |
| # Only ovewrite the old build file after the new one has been |
| # fully created. |
| outfile.close() |
| os.replace(tempfilename, outfilename) |
| |
| # Get all generated headers. Any source file might need them so |
| # we need to add an order dependency to them. |
| def get_generated_headers(self, target): |
| header_deps = [] |
| for gensource in target.get_generated_sources(): |
| if isinstance(gensource, build.CustomTarget): |
| continue |
| for src in gensource.get_outfilelist(): |
| if self.environment.is_header(src): |
| header_deps.append(src) |
| return header_deps |
| |
| def generate_target(self, target, outfile): |
| if isinstance(target, build.CustomTarget): |
| self.generate_custom_target(target, outfile) |
| if isinstance(target, build.RunTarget): |
| self.generate_run_target(target, outfile) |
| name = target.get_basename() |
| gen_src_deps = [] |
| if name in self.processed_targets: |
| return |
| if isinstance(target, build.Jar): |
| self.generate_jar_target(target, outfile) |
| return |
| if 'rust' in self.environment.coredata.compilers.keys() and self.has_rust(target): |
| self.generate_rust_target(target, outfile) |
| return |
| if 'cs' in self.environment.coredata.compilers.keys() and self.has_cs(target): |
| self.generate_cs_target(target, outfile) |
| return |
| if 'vala' in self.environment.coredata.compilers.keys() and self.has_vala(target): |
| gen_src_deps += self.generate_vala_compile(target, outfile) |
| self.scan_fortran_module_outputs(target) |
| # The following deals with C/C++ compilation. |
| (gen_src, gen_other_deps) = self.process_dep_gens(outfile, target) |
| gen_src_deps += gen_src |
| self.process_target_dependencies(target, outfile) |
| self.generate_custom_generator_rules(target, outfile) |
| outname = self.get_target_filename(target) |
| obj_list = [] |
| use_pch = self.environment.coredata.use_pch |
| is_unity = self.environment.coredata.unity |
| if use_pch and target.has_pch(): |
| pch_objects = self.generate_pch(target, outfile) |
| else: |
| pch_objects = [] |
| header_deps = gen_other_deps |
| unity_src = [] |
| unity_deps = [] # Generated sources that must be built before compiling a Unity target. |
| header_deps += self.get_generated_headers(target) |
| for gensource in target.get_generated_sources(): |
| if isinstance(gensource, build.CustomTarget): |
| for src in gensource.output: |
| src = os.path.join(gensource.subdir, src) |
| if self.environment.is_source(src) and not self.environment.is_header(src): |
| if is_unity: |
| unity_deps.append(os.path.join(self.environment.get_build_dir(), RawFilename(src))) |
| else: |
| obj_list.append(self.generate_single_compile(target, outfile, RawFilename(src), True, |
| header_deps)) |
| elif self.environment.is_object(src): |
| obj_list.append(src) |
| else: |
| # Assume anything not specifically a source file is a header. This is because |
| # people generate files with weird suffixes (.inc, .fh) that they then include |
| # in their source files. |
| header_deps.append(RawFilename(src)) |
| else: |
| for src in gensource.get_outfilelist(): |
| if self.environment.is_object(src): |
| obj_list.append(os.path.join(self.get_target_dir(target), target.get_basename() + '.dir', src)) |
| elif not self.environment.is_header(src): |
| if is_unity: |
| if '/' in src: |
| rel_src = src |
| else: |
| rel_src = os.path.join(self.get_target_private_dir(target), src) |
| unity_deps.append(rel_src) |
| abs_src = os.path.join(self.environment.get_build_dir(), rel_src) |
| unity_src.append(abs_src) |
| else: |
| obj_list.append(self.generate_single_compile(target, outfile, src, True, |
| header_deps=header_deps)) |
| src_list = [] |
| for src in gen_src_deps: |
| src_list.append(src) |
| if is_unity: |
| unity_src.append(os.path.join(self.environment.get_build_dir(), src)) |
| header_deps.append(src) |
| else: |
| # Generated targets are ordered deps because the must exist |
| # before the sources compiling them are used. After the first |
| # compile we get precise dependency info from dep files. |
| # This should work in all cases. If it does not, then just |
| # move them from orderdeps to proper deps. |
| obj_list.append(self.generate_single_compile(target, outfile, src, True, [], header_deps)) |
| for src in target.get_sources(): |
| if src.endswith('.vala'): |
| continue |
| if not self.environment.is_header(src): |
| src_list.append(src) |
| if is_unity: |
| abs_src = os.path.join(self.environment.get_source_dir(), |
| target.get_subdir(), src) |
| unity_src.append(abs_src) |
| else: |
| obj_list.append(self.generate_single_compile(target, outfile, src, False, [], header_deps)) |
| obj_list += self.flatten_object_list(target) |
| if is_unity: |
| for src in self.generate_unity_files(target, unity_src): |
| obj_list.append(self.generate_single_compile(target, outfile, src, True, unity_deps + header_deps)) |
| linker = self.determine_linker(target, src_list) |
| # Sort object list to preserve command line over multiple invocations. |
| elem = self.generate_link(target, outfile, outname, sorted(obj_list), linker, pch_objects) |
| self.generate_shlib_aliases(target, self.get_target_dir(target), outfile, elem) |
| self.processed_targets[name] = True |
| |
| def process_target_dependencies(self, target, outfile): |
| for t in target.get_dependencies(): |
| tname = t.get_basename() |
| if not tname in self.processed_targets: |
| self.generate_target(t, outfile) |
| |
| def hackety_hack(self, hack): |
| if isinstance(hack, list): |
| return hack[0] |
| return hack |
| |
| def generate_custom_target(self, target, outfile): |
| ofilenames = [os.path.join(target.subdir, i) for i in target.output] |
| # FIXME, should not grab element at zero but rather expand all. |
| deps = [os.path.join(i.get_subdir(), self.hackety_hack(i.get_filename())) for i in target.get_dependencies()] |
| srcs = [os.path.join(self.build_to_src, target.subdir, i) for i in target.sources] |
| deps += srcs |
| if target.build_always: |
| deps.append('PHONY') |
| elem = NinjaBuildElement(ofilenames, 'CUSTOM_COMMAND', deps) |
| for d in target.extra_depends: |
| tmp = d.get_filename() |
| if not isinstance(tmp, list): |
| tmp = [tmp] |
| for fname in tmp: |
| elem.add_dep(os.path.join(d.get_subdir(), fname)) |
| cmd = [] |
| for i in target.command: |
| for (j, src) in enumerate(srcs): |
| i = i.replace('@INPUT%d@' % j, src) |
| for (j, res) in enumerate(ofilenames): |
| i = i.replace('@OUTPUT%d@' % j, res) |
| if i == '@INPUT@': |
| cmd += srcs |
| elif i == '@OUTPUT@': |
| cmd += ofilenames |
| else: |
| cmd.append(i) |
| elem.add_item('COMMAND', cmd) |
| elem.add_item('description', 'Generating %s with a custom command.' % target.name) |
| elem.write(outfile) |
| self.processed_targets[target.name] = True |
| |
| def generate_run_target(self, target, outfile): |
| runnerscript = os.path.join(self.environment.get_script_dir(), 'commandrunner.py') |
| elem = NinjaBuildElement(target.name, 'CUSTOM_COMMAND', []) |
| cmd = [sys.executable, runnerscript, self.environment.get_source_dir(), self.environment.get_build_dir(), |
| target.subdir, target.command] + target.args |
| elem.add_item('COMMAND', cmd) |
| elem.add_item('description', 'Running external command %s.' % target.name) |
| elem.add_item('pool', 'console') |
| elem.write(outfile) |
| self.processed_targets[target.name] = True |
| |
| def generate_po(self, outfile): |
| for p in self.build.pot: |
| (packagename, languages, subdir) = p |
| input_file = os.path.join(subdir, 'POTFILES') |
| elem = NinjaBuildElement('pot', 'GEN_POT', []) |
| elem.add_item('PACKAGENAME', packagename) |
| elem.add_item('OUTFILE', packagename + '.pot') |
| elem.add_item('FILELIST', os.path.join(self.environment.get_source_dir(), input_file)) |
| elem.add_item('OUTDIR', os.path.join(self.environment.get_source_dir(), subdir)) |
| elem.write(outfile) |
| for l in languages: |
| infile = os.path.join(self.environment.get_source_dir(), subdir, l + '.po') |
| outfilename = os.path.join(subdir, l + '.gmo') |
| lelem = NinjaBuildElement(outfilename, 'GEN_GMO', infile) |
| lelem.add_item('INFILE', infile) |
| lelem.add_item('OUTFILE', outfilename) |
| lelem.write(outfile) |
| |
| def generate_coverage_rules(self, outfile): |
| (gcovr_exe, lcov_exe, genhtml_exe) = environment.find_coverage_tools() |
| added_rule = False |
| if gcovr_exe: |
| added_rule = True |
| elem = NinjaBuildElement('coverage-xml', 'CUSTOM_COMMAND', '') |
| elem.add_item('COMMAND', [gcovr_exe, '-x', '-r', self.environment.get_build_dir(),\ |
| '-o', os.path.join(self.environment.get_log_dir(), 'coverage.xml')]) |
| elem.add_item('DESC', 'Generating XML coverage report.') |
| elem.write(outfile) |
| elem = NinjaBuildElement('coverage-text', 'CUSTOM_COMMAND', '') |
| elem.add_item('COMMAND', [gcovr_exe, '-r', self.environment.get_build_dir(),\ |
| '-o', os.path.join(self.environment.get_log_dir(), 'coverage.txt')]) |
| elem.add_item('DESC', 'Generating text coverage report.') |
| elem.write(outfile) |
| if lcov_exe and genhtml_exe: |
| added_rule = True |
| phony_elem = NinjaBuildElement('coverage-html', 'phony', 'coveragereport/index.html') |
| phony_elem.write(outfile) |
| |
| elem = NinjaBuildElement('coveragereport/index.html', 'CUSTOM_COMMAND', '') |
| command = [lcov_exe, '--directory', self.environment.get_build_dir(),\ |
| '--capture', '--output-file', 'coverage.info', '--no-checksum',\ |
| '&&', genhtml_exe, '--prefix', self.environment.get_build_dir(),\ |
| '--output-directory', self.environment.get_log_dir(), '--title', 'Code coverage',\ |
| '--legend', '--show-details', 'coverage.info'] |
| elem.add_item('COMMAND', command) |
| elem.add_item('DESC', 'Generating HTML coverage report.') |
| elem.write(outfile) |
| if not added_rule: |
| mlog.log(mlog.red('Warning:'), 'coverage requested but neither gcovr nor lcov/genhtml found.') |
| |
| def generate_install(self, outfile): |
| script_root = self.environment.get_script_dir() |
| install_script = os.path.join(script_root, 'meson_install.py') |
| install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat') |
| depfixer = os.path.join(script_root, 'depfixer.py') |
| d = InstallData(self.environment.get_source_dir(), |
| self.environment.get_build_dir(), |
| self.environment.get_prefix(), depfixer) |
| elem = NinjaBuildElement('install', 'CUSTOM_COMMAND', '') |
| elem.add_dep('all') |
| elem.add_item('DESC', 'Installing files.') |
| elem.add_item('COMMAND', [sys.executable, install_script, install_data_file]) |
| elem.add_item('pool', 'console') |
| self.generate_target_install(d) |
| self.generate_header_install(d) |
| self.generate_man_install(d) |
| self.generate_data_install(d) |
| self.generate_po_install(d, elem) |
| self.generate_pkgconfig_install(d) |
| self.generate_custom_install_script(d) |
| self.generate_subdir_install(d) |
| elem.write(outfile) |
| |
| ofile = open(install_data_file, 'wb') |
| pickle.dump(d, ofile) |
| |
| def generate_po_install(self, d, elem): |
| for p in self.build.pot: |
| (package_name, languages, subdir) = p |
| # FIXME: assumes only one po package per source |
| d.po_package_name = package_name |
| for lang in languages: |
| rel_src = os.path.join(subdir, lang + '.gmo') |
| src_file = os.path.join(self.environment.get_build_dir(), rel_src) |
| d.po.append((src_file, self.environment.coredata.localedir, lang)) |
| elem.add_dep(rel_src) |
| |
| def generate_target_install(self, d): |
| libdir = self.environment.get_libdir() |
| bindir = self.environment.get_bindir() |
| |
| should_strip = self.environment.coredata.strip |
| for t in self.build.get_targets().values(): |
| if t.should_install(): |
| outdir = t.get_custom_install_dir() |
| if outdir is None: |
| if isinstance(t, build.Executable): |
| outdir = bindir |
| else: |
| outdir = libdir |
| i = [self.get_target_filename(t), outdir, t.get_aliaslist(),\ |
| should_strip, t.install_rpath] |
| d.targets.append(i) |
| |
| def generate_pkgconfig_install(self, d): |
| pkgroot = os.path.join(self.environment.coredata.libdir, 'pkgconfig') |
| |
| for p in self.build.pkgconfig_gens: |
| pcfile = p.filebase + '.pc' |
| srcabs = os.path.join(self.environment.get_scratch_dir(), |
| pcfile) |
| dstrel = os.path.join(pkgroot, pcfile) |
| i = [srcabs, dstrel] |
| d.man.append(i) |
| |
| def generate_custom_install_script(self, d): |
| d.install_script = self.build.install_script |
| |
| |
| def generate_header_install(self, d): |
| incroot = self.environment.get_includedir() |
| headers = self.build.get_headers() |
| |
| for h in headers: |
| outdir = h.get_custom_install_dir() |
| if outdir is None: |
| outdir = os.path.join(incroot, h.get_install_subdir()) |
| for f in h.get_sources(): |
| abspath = os.path.join(self.environment.get_source_dir(), h.get_source_subdir(), f) |
| i = [abspath, outdir] |
| d.headers.append(i) |
| |
| def generate_man_install(self, d): |
| manroot = self.environment.get_mandir() |
| man = self.build.get_man() |
| for m in man: |
| for f in m.get_sources(): |
| num = f.split('.')[-1] |
| subdir = m.get_custom_install_dir() |
| if subdir is None: |
| subdir = os.path.join(manroot, 'man' + num) |
| srcabs = os.path.join(self.environment.get_source_dir(), m.get_source_subdir(), f) |
| dstabs = os.path.join(subdir, f + '.gz') |
| i = [srcabs, dstabs] |
| d.man.append(i) |
| |
| def generate_data_install(self, d): |
| dataroot = self.environment.get_datadir() |
| data = self.build.get_data() |
| for de in data: |
| subdir = de.get_install_dir() |
| for f in de.get_sources(): |
| if de.in_sourcetree: |
| srcprefix = self.environment.get_source_dir() |
| else: |
| srcprefix = self.environment.get_build_dir() |
| srcabs = os.path.join(srcprefix, de.get_source_subdir(), f) |
| dstabs = os.path.join(subdir, f) |
| i = [srcabs, dstabs] |
| d.data.append(i) |
| |
| def generate_subdir_install(self, d): |
| for sd in self.build.get_install_subdirs(): |
| src_dir = os.path.join(self.environment.get_source_dir(), sd.source_subdir, sd.installable_subdir) |
| dst_dir = os.path.join(self.environment.get_prefix(), sd.install_dir) |
| d.install_subdirs.append([src_dir, dst_dir]) |
| |
| def generate_tests(self, outfile): |
| self.serialise_tests() |
| valgrind = environment.find_valgrind() |
| script_root = self.environment.get_script_dir() |
| test_script = os.path.join(script_root, 'meson_test.py') |
| test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat') |
| cmd = [sys.executable, test_script, test_data] |
| elem = NinjaBuildElement('test', 'CUSTOM_COMMAND', ['all', 'PHONY']) |
| elem.add_item('COMMAND', cmd) |
| elem.add_item('DESC', 'Running test suite.') |
| elem.add_item('pool', 'console') |
| elem.write(outfile) |
| |
| if valgrind: |
| velem = NinjaBuildElement('test-valgrind', 'CUSTOM_COMMAND', ['all', 'PHONY']) |
| velem.add_item('COMMAND', cmd + ['--wrapper=' + valgrind]) |
| velem.add_item('DESC', 'Running test suite under Valgrind.') |
| velem.add_item('pool', 'console') |
| velem.write(outfile) |
| |
| def generate_rules(self, outfile): |
| outfile.write('# Rules for compiling.\n\n') |
| self.generate_compile_rules(outfile) |
| outfile.write('# Rules for linking.\n\n') |
| if self.environment.is_cross_build(): |
| self.generate_static_link_rules(True, outfile) |
| self.generate_static_link_rules(False, outfile) |
| self.generate_dynamic_link_rules(outfile) |
| outfile.write('# Other rules\n\n') |
| outfile.write('rule CUSTOM_COMMAND\n') |
| outfile.write(' command = $COMMAND\n') |
| outfile.write(' description = $DESC\n') |
| outfile.write(' restat = 1\n\n') |
| outfile.write('rule REGENERATE_BUILD\n') |
| c = (quote_char + ninja_quote(sys.executable) + quote_char, |
| quote_char + ninja_quote(self.environment.get_build_command()) + quote_char, |
| quote_char + ninja_quote(self.environment.get_source_dir()) + quote_char, |
| quote_char + ninja_quote(self.environment.get_build_dir()) + quote_char) |
| outfile.write(" command = %s %s %s %s --backend ninja secret-handshake\n" % c) |
| outfile.write(' description = Regenerating build files\n') |
| outfile.write(' generator = 1\n\n') |
| if len(self.build.pot) > 0: |
| self.generate_gettext_rules(outfile) |
| outfile.write('\n') |
| |
| def generate_gettext_rules(self, outfile): |
| rule = 'rule GEN_POT\n' |
| command = " command = xgettext --package-name=$PACKAGENAME -p $OUTDIR -f $FILELIST -D '%s' -k_ -o $OUTFILE\n" % \ |
| self.environment.get_source_dir() |
| desc = " description = Creating pot file for package $PACKAGENAME.\n" |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(desc) |
| outfile.write('\n') |
| rule = 'rule GEN_GMO\n' |
| command = ' command = msgfmt $INFILE -o $OUTFILE\n' |
| desc = ' description = Generating gmo file $OUTFILE\n' |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(desc) |
| outfile.write('\n') |
| |
| def generate_phony(self, outfile): |
| outfile.write('# Phony build target, always out of date\n') |
| outfile.write('build PHONY: phony\n') |
| outfile.write('\n') |
| |
| def generate_jar_target(self, target, outfile): |
| fname = target.get_filename() |
| subdir = target.get_subdir() |
| outname_rel = os.path.join(subdir, fname) |
| src_list = target.get_sources() |
| class_list = [] |
| compiler = self.get_compiler_for_source(src_list[0]) |
| assert(compiler.get_language() == 'java') |
| c = 'c' |
| m = '' |
| e = '' |
| f = 'f' |
| main_class = target.get_main_class() |
| if main_class != '': |
| e = 'e' |
| for src in src_list: |
| class_list.append(self.generate_single_java_compile(subdir, src, target, compiler, outfile)) |
| jar_rule = 'java_LINKER' |
| commands = [c+m+e+f] |
| if e != '': |
| commands.append(main_class) |
| commands.append(self.get_target_filename(target)) |
| commands += ['-C', self.get_target_private_dir(target)] |
| commands += class_list |
| elem = NinjaBuildElement(outname_rel, jar_rule, []) |
| elem.add_dep([os.path.join(self.get_target_private_dir(target), i) for i in class_list]) |
| elem.add_item('ARGS', commands) |
| elem.write(outfile) |
| |
| def generate_cs_resource_tasks(self, target, outfile): |
| args = [] |
| deps = [] |
| for r in target.resources: |
| rel_sourcefile = os.path.join(self.build_to_src, target.subdir, r) |
| if r.endswith('.resources'): |
| a = '-resource:' + rel_sourcefile |
| elif r.endswith('.txt') or r.endswith('.resx'): |
| ofilebase = os.path.splitext(os.path.basename(r))[0] + '.resources' |
| ofilename = os.path.join(self.get_target_dir(target), target.get_basename() + '.dir', ofilebase) |
| elem = NinjaBuildElement(ofilename, "CUSTOM_COMMAND", rel_sourcefile) |
| elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename]) |
| elem.add_item('DESC', 'Compiling resource %s.' % rel_sourcefile) |
| elem.write(outfile) |
| deps.append(ofilename) |
| a = '-resource:' + ofilename |
| else: |
| raise InvalidArguments('Unknown resource file %s.' % r) |
| args.append(a) |
| return (args, deps) |
| |
| def generate_cs_target(self, target, outfile): |
| buildtype = self.environment.coredata.buildtype |
| fname = target.get_filename() |
| subdir = target.get_subdir() |
| outname_rel = os.path.join(subdir, fname) |
| src_list = target.get_sources() |
| compiler = self.get_compiler_for_source(src_list[0]) |
| assert(compiler.get_language() == 'cs') |
| rel_srcs = [s.rel_to_builddir(self.build_to_src) for s in src_list] |
| deps = [] |
| commands = target.extra_args.get('cs', []) |
| commands += compiler.get_buildtype_args(buildtype) |
| if isinstance(target, build.Executable): |
| commands.append('-target:exe') |
| elif isinstance(target, build.SharedLibrary): |
| commands.append('-target:library') |
| else: |
| raise MesonException('Unknown C# target type.') |
| (resource_args, resource_deps) = self.generate_cs_resource_tasks(target, outfile) |
| commands += resource_args |
| deps += resource_deps |
| commands += compiler.get_output_args(outname_rel) |
| for l in target.link_targets: |
| commands += compiler.get_link_args(l.get_filename()) |
| deps.append(l.get_filename()) |
| if '-g' in commands: |
| outputs = [outname_rel, outname_rel + '.mdb'] |
| else: |
| outputs = [outname_rel] |
| elem = NinjaBuildElement(outputs, 'cs_COMPILER', rel_srcs) |
| elem.add_dep(deps) |
| elem.add_item('ARGS', commands) |
| elem.write(outfile) |
| |
| def generate_single_java_compile(self, subdir, src, target, compiler, outfile): |
| args = [] |
| args += compiler.get_buildtype_args(self.environment.coredata.buildtype) |
| args += compiler.get_output_args(self.get_target_private_dir(target)) |
| rel_src = src.rel_to_builddir(self.build_to_src) |
| plain_class_path = src.fname[:-4] + 'class' |
| rel_obj = os.path.join(self.get_target_private_dir(target), plain_class_path) |
| element = NinjaBuildElement(rel_obj, |
| compiler.get_language() + '_COMPILER', rel_src) |
| element.add_item('ARGS', args) |
| element.write(outfile) |
| return plain_class_path |
| |
| def generate_java_link(self, outfile): |
| rule = 'rule java_LINKER\n' |
| command = ' command = jar $ARGS\n' |
| description = ' description = Creating jar $out.\n' |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(description) |
| outfile.write('\n') |
| |
| def generate_fastvapi_compile(self, target, valac, outfile): |
| fastvapis = {} |
| for s in target.get_sources(): |
| if not s.endswith('.vala'): |
| continue |
| vapibase = os.path.basename(s.fname)[:-4] + 'vapi' |
| rel_vapi = os.path.join(self.get_target_dir(target), target.get_basename() + '.dir', vapibase) |
| args = ['--fast-vapi=' + rel_vapi] |
| rel_s = s.rel_to_builddir(self.build_to_src) |
| element = NinjaBuildElement(rel_vapi, valac.get_language() + '_COMPILER', rel_s) |
| element.add_item('ARGS', args) |
| element.write(outfile) |
| fastvapis[s] = (vapibase, rel_vapi) |
| return fastvapis |
| |
| def split_vala_sources(self, sources): |
| src = [] |
| vapi_src = [] |
| for s in sources: |
| if s.endswith('.vapi'): |
| vapi_src.append(s) |
| else: |
| src.append(s) |
| return (src, vapi_src) |
| |
| def generate_vala_compile(self, target, outfile): |
| """Vala is compiled into C. Set up all necessary build steps here.""" |
| valac = self.environment.coredata.compilers['vala'] |
| fast_vapis = self.generate_fastvapi_compile(target, valac, outfile) |
| generated_c = [] |
| (src, vapi_src) = self.split_vala_sources(target.get_sources()) |
| vapi_src = [x.rel_to_builddir(self.build_to_src) for x in vapi_src] |
| for s in src: |
| if not s.endswith('.vala'): |
| continue |
| args = ['-d', self.get_target_private_dir(target)] |
| sc = os.path.basename(s.fname)[:-4] + 'c' |
| args += ['-C'] |
| vapi_order_deps = [] |
| for (sourcefile, vapi_info) in fast_vapis.items(): |
| if sourcefile == s: |
| continue |
| (vapibase, rel_vapi) = vapi_info |
| args += ['--use-fast-vapi=' + rel_vapi] |
| vapi_order_deps.append(rel_vapi) |
| relsc = os.path.join(self.get_target_dir(target), target.get_basename() + '.dir', sc) |
| rel_s = s.rel_to_builddir(self.build_to_src) |
| args += ['--deps', relsc + '.d'] |
| if self.environment.coredata.werror: |
| args += valac.get_werror_args() |
| for d in target.external_deps: |
| if isinstance(d, dependencies.PkgConfigDependency): |
| args += ['--pkg', d.name] |
| args += vapi_src |
| generated_c += [relsc] |
| element = NinjaBuildElement(relsc, valac.get_language() + '_COMPILER', rel_s) |
| element.add_item('ARGS', args) |
| element.add_orderdep(vapi_order_deps) |
| element.write(outfile) |
| return generated_c |
| |
| def generate_rust_target(self, target, outfile): |
| rustc = self.environment.coredata.compilers['rust'] |
| relsrc = [] |
| for i in target.get_sources(): |
| if not rustc.can_compile(i): |
| raise InvalidArguments('Rust target %s contains a non-rust source file.' % target.get_basename()) |
| relsrc.append(os.path.join(self.build_to_src, i)) |
| target_name = os.path.join(target.subdir, target.get_filename()) |
| args = ['--crate-type'] |
| if isinstance(target, build.Executable): |
| cratetype = 'bin' |
| elif isinstance(target, build.SharedLibrary): |
| cratetype = 'dylib' |
| elif isinstance(target, build.StaticLibrary): |
| cratetype = 'lib' |
| else: |
| raise InvalidArguments('Unknown target type for rustc.') |
| args.append(cratetype) |
| args += rustc.get_buildtype_args(self.environment.coredata.buildtype) |
| depfile = target_name + '.d' |
| args += ['--out-dir', target.subdir] |
| args += ['--dep-info', depfile] |
| orderdeps = [os.path.join(t.subdir, t.get_filename()) for t in target.link_targets] |
| linkdirs = {} |
| for d in target.link_targets: |
| linkdirs[d.subdir] = True |
| for d in linkdirs.keys(): |
| if d == '': |
| d = '.' |
| args += ['-L', d] |
| element = NinjaBuildElement(target_name, 'rust_COMPILER', relsrc) |
| if len(orderdeps) > 0: |
| element.add_orderdep(orderdeps) |
| element.add_item('ARGS', args) |
| element.add_item('targetdep', depfile) |
| element.add_item('cratetype', cratetype) |
| element.write(outfile) |
| |
| def generate_static_link_rules(self, is_cross, outfile): |
| if self.build.has_language('java'): |
| if not is_cross: |
| self.generate_java_link(outfile) |
| if is_cross: |
| static_linker = self.build.static_cross_linker |
| crstr = '_CROSS' |
| else: |
| static_linker = self.build.static_linker |
| crstr = '' |
| if static_linker is None: |
| return |
| rule = 'rule STATIC%s_LINKER\n' % crstr |
| command = ' command = %s $LINK_ARGS %s $in\n' % \ |
| (' '.join(static_linker.get_exelist()), |
| ' '.join(static_linker.get_output_args('$out'))) |
| description = ' description = Static linking library $out\n\n' |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(description) |
| |
| def generate_dynamic_link_rules(self, outfile): |
| ctypes = [(self.build.compilers, False), (self.build.cross_compilers, True)] |
| for (complist, is_cross) in ctypes: |
| for compiler in complist: |
| langname = compiler.get_language() |
| if langname == 'java' or langname == 'vala' or\ |
| langname == 'rust' or langname == 'cs': |
| continue |
| crstr = '' |
| if is_cross: |
| crstr = '_CROSS' |
| rule = 'rule %s%s_LINKER\n' % (langname, crstr) |
| command = ' command = %s $ARGS %s $in $LINK_ARGS $aliasing\n' % \ |
| (' '.join(compiler.get_linker_exelist()),\ |
| ' '.join(compiler.get_linker_output_args('$out'))) |
| description = ' description = Linking target $out' |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(description) |
| outfile.write('\n') |
| scriptdir = self.environment.get_script_dir() |
| outfile.write('\n') |
| symrule = 'rule SHSYM\n' |
| symcmd = ' command = "%s" "%s" "%s" "%s" $CROSS\n' % (ninja_quote(sys.executable), |
| ninja_quote(os.path.join(scriptdir, 'symbolextractor.py')), |
| '$in', '$out') |
| synstat = ' restat = 1\n' |
| syndesc = ' description = Generating symbol file $out.\n' |
| outfile.write(symrule) |
| outfile.write(symcmd) |
| outfile.write(synstat) |
| outfile.write(syndesc) |
| outfile.write('\n') |
| |
| def generate_java_compile_rule(self, compiler, outfile): |
| rule = 'rule %s_COMPILER\n' % compiler.get_language() |
| invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()]) |
| command = ' command = %s $ARGS $in\n' % invoc |
| description = ' description = Compiling Java object $in.\n' |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(description) |
| outfile.write('\n') |
| |
| def generate_cs_compile_rule(self, compiler, outfile): |
| rule = 'rule %s_COMPILER\n' % compiler.get_language() |
| invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()]) |
| command = ' command = %s $ARGS $in\n' % invoc |
| description = ' description = Compiling cs target $out.\n' |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(description) |
| outfile.write('\n') |
| |
| def generate_vala_compile_rules(self, compiler, outfile): |
| rule = 'rule %s_COMPILER\n' % compiler.get_language() |
| invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()]) |
| command = ' command = %s $ARGS $in\n' % invoc |
| description = ' description = Compiling Vala source $in.\n' |
| restat = ' restat = 1\n' # ValaC does this always to take advantage of it. |
| depfile = ' depfile = $out.d\n' |
| depstyle = ' deps = gcc\n' |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(description) |
| outfile.write(restat) |
| outfile.write(depfile) |
| outfile.write(depstyle) |
| outfile.write('\n') |
| |
| def generate_rust_compile_rules(self, compiler, outfile): |
| rule = 'rule %s_COMPILER\n' % compiler.get_language() |
| invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()]) |
| command = ' command = %s %s $out $cratetype %s $ARGS $in\n' % \ |
| (ninja_quote(sys.executable), |
| ninja_quote(os.path.join(os.path.split(__file__)[0], "rustrunner.py")), |
| invoc) |
| description = ' description = Compiling Rust source $in.\n' |
| depfile = ' depfile = $out.d\n' |
| |
| depstyle = ' deps = gcc\n' |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(description) |
| outfile.write(depfile) |
| outfile.write(depstyle) |
| outfile.write('\n') |
| |
| def generate_fortran_dep_hack(self, outfile): |
| if mesonlib.is_windows(): |
| cmd = 'cmd /C ""' |
| else: |
| cmd = 'true' |
| template = '''# Workaround for these issues: |
| # https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8 |
| # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485 |
| rule FORTRAN_DEP_HACK |
| command = %s |
| description = Dep hack |
| restat = 1 |
| |
| ''' |
| outfile.write(template % cmd) |
| |
| def generate_compile_rule_for(self, langname, compiler, qstr, is_cross, outfile): |
| if langname == 'java': |
| if not is_cross: |
| self.generate_java_compile_rule(compiler, outfile) |
| return |
| if langname == 'cs': |
| if not is_cross: |
| self.generate_cs_compile_rule(compiler, outfile) |
| return |
| if langname == 'vala': |
| if not is_cross: |
| self.generate_vala_compile_rules(compiler, outfile) |
| return |
| if langname == 'rust': |
| if not is_cross: |
| self.generate_rust_compile_rules(compiler, outfile) |
| return |
| if langname == 'fortran': |
| self.generate_fortran_dep_hack(outfile) |
| if is_cross: |
| crstr = '_CROSS' |
| else: |
| crstr = '' |
| rule = 'rule %s%s_COMPILER\n' % (langname, crstr) |
| depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') |
| quoted_depargs = [] |
| for d in depargs: |
| if d != '$out' and d != '$in': |
| d = qstr % d |
| quoted_depargs.append(d) |
| command = " command = %s $ARGS %s %s %s $in\n" % \ |
| (' '.join(compiler.get_exelist()),\ |
| ' '.join(quoted_depargs),\ |
| ' '.join(compiler.get_output_args('$out')),\ |
| ' '.join(compiler.get_compile_only_args())) |
| description = ' description = Compiling %s object $out\n' % langname |
| if compiler.get_id() == 'msvc': |
| deps = ' deps = msvc\n' |
| else: |
| deps = ' deps = gcc\n' |
| deps += ' depfile = $DEPFILE\n' |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(deps) |
| outfile.write(description) |
| outfile.write('\n') |
| |
| def generate_pch_rule_for(self, langname, compiler, qstr, is_cross, outfile): |
| if langname != 'c' and langname != 'cpp': |
| return |
| if is_cross: |
| crstr = '_CROSS' |
| else: |
| crstr = '' |
| rule = 'rule %s%s_PCH\n' % (langname, crstr) |
| depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') |
| quoted_depargs = [] |
| for d in depargs: |
| if d != '$out' and d != '$in': |
| d = qstr % d |
| quoted_depargs.append(d) |
| if compiler.get_id() == 'msvc': |
| output = '' |
| else: |
| output = ' '.join(compiler.get_output_args('$out')) |
| command = " command = %s $ARGS %s %s %s $in\n" % \ |
| (' '.join(compiler.get_exelist()),\ |
| ' '.join(quoted_depargs),\ |
| output,\ |
| ' '.join(compiler.get_compile_only_args())) |
| description = ' description = Precompiling header %s\n' % '$in' |
| if compiler.get_id() == 'msvc': |
| deps = ' deps = msvc\n' |
| else: |
| deps = ' deps = gcc\n' |
| deps += ' depfile = $DEPFILE\n' |
| outfile.write(rule) |
| outfile.write(command) |
| outfile.write(deps) |
| outfile.write(description) |
| outfile.write('\n') |
| |
| def generate_compile_rules(self, outfile): |
| qstr = quote_char + "%s" + quote_char |
| for compiler in self.build.compilers: |
| langname = compiler.get_language() |
| self.generate_compile_rule_for(langname, compiler, qstr, False, outfile) |
| self.generate_pch_rule_for(langname, compiler, qstr, False, outfile) |
| if self.environment.is_cross_build(): |
| for compiler in self.build.cross_compilers: |
| langname = compiler.get_language() |
| self.generate_compile_rule_for(langname, compiler, qstr, True, outfile) |
| self.generate_pch_rule_for(langname, compiler, qstr, True, outfile) |
| outfile.write('\n') |
| |
| def replace_outputs(self, args, private_dir, output_list): |
| newargs = [] |
| regex = re.compile('@OUTPUT(\d+)@') |
| for arg in args: |
| m = regex.search(arg) |
| while m is not None: |
| index = int(m.group(1)) |
| src = '@OUTPUT%d@' % index |
| arg = arg.replace(src, os.path.join(private_dir, output_list[index])) |
| m = regex.search(arg) |
| newargs.append(arg) |
| return newargs |
| |
| def generate_custom_generator_rules(self, target, outfile): |
| for genlist in target.get_generated_sources(): |
| if isinstance(genlist, build.CustomTarget): |
| continue # Customtarget has already written its output rules |
| generator = genlist.get_generator() |
| exe = generator.get_exe() |
| if self.environment.is_cross_build() and \ |
| isinstance(exe, build.BuildTarget) and exe.is_cross: |
| if 'exe_wrapper' not in self.environment.cross_info: |
| s = 'Can not use target %s as a generator because it is cross-built\n' |
| s += 'and no exe wrapper is defined. You might want to set it to native instead.' |
| s = s % exe.name |
| raise MesonException(s) |
| infilelist = genlist.get_infilelist() |
| outfilelist = genlist.get_outfilelist() |
| if isinstance(exe, build.BuildTarget): |
| exe_arr = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(exe))] |
| else: |
| exe_arr = exe.get_command() |
| base_args = generator.get_arglist() |
| extra_dependencies = [os.path.join(self.build_to_src, i) for i in genlist.extra_depends] |
| for i in range(len(infilelist)): |
| if len(generator.outputs) == 1: |
| sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i]) |
| else: |
| sole_output = '' |
| curfile = infilelist[i] |
| infilename = os.path.join(self.environment.get_source_dir(), curfile) |
| outfiles = genlist.get_outputs_for(curfile) |
| outfiles = [os.path.join(self.get_target_private_dir(target), of) for of in outfiles] |
| args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output)\ |
| for x in base_args] |
| args = self.replace_outputs(args, self.get_target_private_dir(target), outfilelist) |
| args = [x.replace("@SOURCE_DIR@", self.environment.get_source_dir()).replace("@BUILD_DIR@", self.get_target_private_dir(target)) |
| for x in args] |
| cmdlist = exe_arr + args |
| elem = NinjaBuildElement(outfiles, 'CUSTOM_COMMAND', infilename) |
| if len(extra_dependencies) > 0: |
| elem.add_dep(extra_dependencies) |
| elem.add_item('DESC', 'Generating $out') |
| if isinstance(exe, build.BuildTarget): |
| elem.add_dep(self.get_target_filename(exe)) |
| elem.add_item('COMMAND', cmdlist) |
| elem.write(outfile) |
| |
| def scan_fortran_module_outputs(self, target): |
| compiler = None |
| for c in self.build.compilers: |
| if c.get_language() == 'fortran': |
| compiler = c |
| break |
| if compiler is None: |
| self.fortran_deps[target.get_basename()] = {} |
| return |
| modre = re.compile(r"\s*module\s+(\w+)", re.IGNORECASE) |
| module_files = {} |
| for s in target.get_sources(): |
| # FIXME, does not work for generated Fortran sources, |
| # but those are really rare. I hope. |
| if not compiler.can_compile(s): |
| continue |
| for line in open(os.path.join(self.environment.get_source_dir(), s.subdir, s.fname)): |
| modmatch = modre.match(line) |
| if modmatch is not None: |
| modname = modmatch.group(1) |
| if modname.lower() == 'procedure': # MODULE PROCEDURE construct |
| continue |
| if modname in module_files: |
| raise InvalidArguments('Namespace collision: module %s defined in two files %s and %s.' % |
| (modname, module_files[modname], s)) |
| module_files[modname] = s |
| self.fortran_deps[target.get_basename()] = module_files |
| |
| def get_fortran_deps(self, compiler, src, target): |
| mod_files = [] |
| usere = re.compile(r"\s*use\s+(\w+)", re.IGNORECASE) |
| dirname = os.path.join(self.get_target_dir(target), target.get_basename() + '.dir') |
| tdeps= self.fortran_deps[target.get_basename()] |
| for line in open(src): |
| usematch = usere.match(line) |
| if usematch is not None: |
| usename = usematch.group(1) |
| if usename not in tdeps: |
| # The module is not provided by any source file. This is due to |
| # a) missing file/typo/etc |
| # b) using a module provided by the compiler, such as OpenMP |
| # There's no easy way to tell which is which (that I know of) |
| # so just ignore this and go on. Ideally we would print a |
| # warning message to the user but this is a common occurrance, |
| # which would lead to lots of distracting noise. |
| continue |
| mod_source_file = tdeps[usename] |
| # Check if a source uses a module it exports itself. |
| # Potential bug if multiple targets have a file with |
| # the same name. |
| if mod_source_file.fname == os.path.split(src)[1]: |
| continue |
| mod_name = compiler.module_name_to_filename(usematch.group(1)) |
| mod_files.append(os.path.join(dirname, mod_name)) |
| return mod_files |
| |
| def generate_single_compile(self, target, outfile, src, is_generated=False, header_deps=[], order_deps=[]): |
| if(isinstance(src, str) and src.endswith('.h')): |
| raise RuntimeError('Fug') |
| if isinstance(src, RawFilename) and src.fname.endswith('.h'): |
| raise RuntimeError('Fug') |
| extra_orderdeps = [] |
| compiler = self.get_compiler_for_source(src) |
| commands = self.generate_basic_compiler_args(target, compiler) |
| commands += compiler.get_include_args(self.get_target_private_dir(target)) |
| curdir = target.get_subdir() |
| tmppath = os.path.normpath(os.path.join(self.build_to_src, curdir)) |
| commands += compiler.get_include_args(tmppath) |
| if curdir == '': |
| curdir = '.' |
| commands += compiler.get_include_args(curdir) |
| for d in target.external_deps: |
| if d.need_threads(): |
| commands += compiler.thread_flags() |
| break |
| if isinstance(src, RawFilename): |
| rel_src = src.fname |
| elif is_generated: |
| if '/' in src: |
| rel_src = src |
| else: |
| rel_src = os.path.join(self.get_target_private_dir(target), src) |
| abs_src = os.path.join(self.environment.get_source_dir(), rel_src) |
| else: |
| if isinstance(src, File): |
| rel_src = src.rel_to_builddir(self.build_to_src) |
| else: |
| raise build.InvalidArguments('Invalid source type.') |
| abs_src = os.path.join(self.environment.get_build_dir(), rel_src) |
| if isinstance(src, RawFilename): |
| src_filename = src.fname |
| elif isinstance(src, File): |
| src_filename = src.fname |
| elif os.path.isabs(src): |
| src_filename = os.path.basename(src) |
| else: |
| src_filename = src |
| obj_basename = src_filename.replace('/', '_').replace('\\', '_') |
| rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename) |
| rel_obj += '.' + self.environment.get_object_suffix() |
| dep_file = rel_obj + '.' + compiler.get_depfile_suffix() |
| if self.environment.coredata.use_pch: |
| pchlist = target.get_pch(compiler.language) |
| else: |
| pchlist = [] |
| if len(pchlist) == 0: |
| pch_dep = [] |
| else: |
| arr = [] |
| i = os.path.join(self.get_target_private_dir(target), compiler.get_pch_name(pchlist[0])) |
| arr.append(i) |
| pch_dep = arr |
| for i in target.get_include_dirs(): |
| basedir = i.get_curdir() |
| for d in i.get_incdirs(): |
| expdir = os.path.join(basedir, d) |
| fulldir = os.path.join(self.environment.get_source_dir(), expdir) |
| bargs = compiler.get_include_args(expdir) |
| sargs = compiler.get_include_args(fulldir) |
| commands += bargs |
| commands += sargs |
| if self.environment.coredata.use_pch: |
| commands += self.get_pch_include_args(compiler, target) |
| crstr = '' |
| if target.is_cross: |
| crstr = '_CROSS' |
| compiler_name = '%s%s_COMPILER' % (compiler.get_language(), crstr) |
| extra_deps = [] |
| if compiler.get_language() == 'fortran': |
| extra_deps += self.get_fortran_deps(compiler, abs_src, target) |
| # Dependency hack. Remove once multiple outputs in Ninja is fixed: |
| # https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8 |
| for modname, srcfile in self.fortran_deps[target.get_basename()].items(): |
| modfile = os.path.join(self.get_target_dir(target), target.get_basename() + '.dir', |
| compiler.module_name_to_filename(modname)) |
| if srcfile == src: |
| depelem = NinjaBuildElement(modfile, 'FORTRAN_DEP_HACK', rel_obj) |
| depelem.write(outfile) |
| commands += compiler.get_module_outdir_args(os.path.join(self.get_target_dir(target), target.get_basename() + '.dir')) |
| |
| element = NinjaBuildElement(rel_obj, compiler_name, rel_src) |
| for d in header_deps: |
| if isinstance(d, RawFilename): |
| d = d.fname |
| elif not '/' in d: |
| d = os.path.join(self.get_target_private_dir(target), d) |
| element.add_dep(d) |
| for d in extra_deps: |
| element.add_dep(d) |
| for d in order_deps: |
| if isinstance(d, RawFilename): |
| d = d.fname |
| elif not '/' in d : |
| d = os.path.join(self.get_target_private_dir(target), d) |
| element.add_orderdep(d) |
| element.add_orderdep(pch_dep) |
| element.add_orderdep(extra_orderdeps) |
| element.add_item('DEPFILE', dep_file) |
| element.add_item('ARGS', commands) |
| element.write(outfile) |
| return rel_obj |
| |
| def generate_msvc_pch_command(self, target, compiler, pch): |
| if len(pch) != 2: |
| raise RuntimeError('MSVC requires one header and one source to produce precompiled headers.') |
| header = pch[0] |
| source = pch[1] |
| pchname = compiler.get_pch_name(header) |
| dst = os.path.join(self.get_target_private_dir(target), pchname) |
| |
| commands = [] |
| commands += self.generate_basic_compiler_args(target, compiler) |
| just_name = os.path.split(header)[1] |
| (objname, pch_args) = compiler.gen_pch_args(just_name, source, dst) |
| commands += pch_args |
| dep = dst + '.' + compiler.get_depfile_suffix() |
| return (commands, dep, dst, [objname]) |
| |
| def generate_gcc_pch_command(self, target, compiler, pch): |
| commands = [] |
| commands += self.generate_basic_compiler_args(target, compiler) |
| dst = os.path.join(self.get_target_private_dir(target), |
| os.path.split(pch)[-1] + '.' + compiler.get_pch_suffix()) |
| dep = dst + '.' + compiler.get_depfile_suffix() |
| return (commands, dep, dst, []) # Gcc does not create an object file during pch generation. |
| |
| def generate_pch(self, target, outfile): |
| cstr = '' |
| pch_objects = [] |
| if target.is_cross: |
| cstr = '_CROSS' |
| for lang in ['c', 'cpp']: |
| pch = target.get_pch(lang) |
| if len(pch) == 0: |
| continue |
| if '/' not in pch[0] or '/' not in pch[-1]: |
| raise build.InvalidArguments('Precompiled header of "%s" must not be in the same directory as source, please put it in a subdirectory.' % target.get_basename()) |
| compiler = self.get_compiler_for_lang(lang) |
| if compiler.id == 'msvc': |
| src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[-1]) |
| (commands, dep, dst, objs) = self.generate_msvc_pch_command(target, compiler, pch) |
| extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) |
| else: |
| src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) |
| (commands, dep, dst, objs) = self.generate_gcc_pch_command(target, compiler, pch[0]) |
| extradep = None |
| pch_objects += objs |
| rulename = compiler.get_language() + cstr + '_PCH' |
| elem = NinjaBuildElement(dst, rulename, src) |
| if extradep is not None: |
| elem.add_dep(extradep) |
| elem.add_item('ARGS', commands) |
| elem.add_item('DEPFILE', dep) |
| elem.write(outfile) |
| return pch_objects |
| |
| def generate_shsym(self, outfile, target): |
| target_name = self.get_target_filename(target) |
| targetdir = self.get_target_private_dir(target) |
| symname = os.path.join(targetdir, target_name + '.symbols') |
| elem = NinjaBuildElement(symname, 'SHSYM', target_name) |
| if self.environment.is_cross_build(): |
| elem.add_item('CROSS', '--cross-host=' + self.environment.cross_info['name']) |
| elem.write(outfile) |
| |
| def generate_link(self, target, outfile, outname, obj_list, linker, extra_args=[]): |
| if isinstance(target, build.StaticLibrary): |
| linker_base = 'STATIC' |
| else: |
| linker_base = linker.get_language() # Fixme. |
| if isinstance(target, build.SharedLibrary): |
| self.generate_shsym(outfile, target) |
| crstr = '' |
| if target.is_cross: |
| crstr = '_CROSS' |
| linker_rule = linker_base + crstr + '_LINKER' |
| abspath = os.path.join(self.environment.get_build_dir(), target.subdir) |
| commands = [] |
| commands += linker.get_linker_always_args() |
| commands += linker.get_buildtype_linker_args(self.environment.coredata.buildtype) |
| if not(isinstance(target, build.StaticLibrary)): |
| commands += self.environment.coredata.external_link_args[linker.get_language()] |
| if isinstance(target, build.Executable): |
| commands += linker.get_std_exe_link_args() |
| elif isinstance(target, build.SharedLibrary): |
| commands += linker.get_std_shared_lib_link_args() |
| commands += linker.get_pic_args() |
| if hasattr(target, 'soversion'): |
| soversion = target.soversion |
| else: |
| soversion = None |
| commands += linker.get_soname_args(target.name, abspath, soversion) |
| elif isinstance(target, build.StaticLibrary): |
| commands += linker.get_std_link_args() |
| else: |
| raise RuntimeError('Unknown build target type.') |
| # Link arguments of static libraries are not put in the command line of |
| # the library. They are instead appended to the command line where |
| # the static library is used. |
| if linker_base == 'STATIC': |
| dependencies = [] |
| else: |
| dependencies = target.get_dependencies() |
| commands += self.build_target_link_arguments(linker, dependencies) |
| for d in target.external_deps: |
| if d.need_threads(): |
| commands += linker.thread_link_flags() |
| commands += target.link_args |
| # External deps must be last because target link libraries may depend on them. |
| if not(isinstance(target, build.StaticLibrary)): |
| for dep in target.get_external_deps(): |
| commands += dep.get_link_args() |
| for d in target.get_dependencies(): |
| if isinstance(d, build.StaticLibrary): |
| for dep in d.get_external_deps(): |
| commands += dep.get_link_args() |
| commands += linker.build_rpath_args(self.environment.get_build_dir(),\ |
| target.get_rpaths(), target.install_rpath) |
| if self.environment.coredata.coverage: |
| commands += linker.get_coverage_link_args() |
| commands += extra_args |
| dep_targets = [self.get_dependency_filename(t) for t in dependencies] |
| dep_targets += [os.path.join(self.environment.source_dir, |
| target.subdir, t) for t in target.link_depends] |
| elem = NinjaBuildElement(outname, linker_rule, obj_list) |
| elem.add_dep(dep_targets) |
| elem.add_item('LINK_ARGS', commands) |
| return elem |
| |
| def get_dependency_filename(self, t): |
| if isinstance(t, build.SharedLibrary): |
| return os.path.join(self.get_target_private_dir(t), self.get_target_filename(t) + '.symbols') |
| return self.get_target_filename(t) |
| |
| def generate_shlib_aliases(self, target, outdir, outfile, elem): |
| basename = target.get_filename() |
| aliases = target.get_aliaslist() |
| aliascmd = [] |
| if shutil.which('ln'): |
| for alias in aliases: |
| aliasfile = os.path.join(outdir, alias) |
| cmd = ["&&", 'ln', '-s', '-f', basename, aliasfile] |
| aliascmd += cmd |
| else: |
| mlog.log("Library versioning disabled because host does not support symlinks.") |
| elem.add_item('aliasing', aliascmd) |
| elem.write(outfile) |
| |
| def generate_gcov_clean(self, outfile): |
| gcno_elem = NinjaBuildElement('clean-gcno', 'CUSTOM_COMMAND', 'PHONY') |
| script_root = self.environment.get_script_dir() |
| clean_script = os.path.join(script_root, 'delwithsuffix.py') |
| gcno_elem.add_item('COMMAND', [sys.executable, clean_script, '.', 'gcno']) |
| gcno_elem.add_item('description', 'Deleting gcno files') |
| gcno_elem.write(outfile) |
| |
| gcda_elem = NinjaBuildElement('clean-gcda', 'CUSTOM_COMMAND', 'PHONY') |
| script_root = self.environment.get_script_dir() |
| clean_script = os.path.join(script_root, 'delwithsuffix.py') |
| gcda_elem.add_item('COMMAND', [sys.executable, clean_script, '.', 'gcda']) |
| gcda_elem.add_item('description', 'Deleting gcda files') |
| gcda_elem.write(outfile) |
| |
| def is_compilable_file(self, filename): |
| if filename.endswith('.cpp') or\ |
| filename.endswith('.c') or\ |
| filename.endswith('.cxx') or\ |
| filename.endswith('.cc') or\ |
| filename.endswith('.C'): |
| return True |
| return False |
| |
| def process_dep_gens(self, outfile, target): |
| src_deps = [] |
| other_deps = [] |
| for rule in self.dep_rules.values(): |
| srcs = target.get_original_kwargs().get(rule.src_keyword, []) |
| if isinstance(srcs, str): |
| srcs = [srcs] |
| for src in srcs: |
| plainname = os.path.split(src)[1] |
| basename = plainname.split('.')[0] |
| outname = rule.name_templ.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) |
| outfilename = os.path.join(self.get_target_private_dir(target), outname) |
| infilename = os.path.join(self.build_to_src, target.get_source_subdir(), src) |
| elem = NinjaBuildElement(outfilename, rule.name, infilename) |
| elem.write(outfile) |
| if self.is_compilable_file(outfilename): |
| src_deps.append(outfilename) |
| else: |
| other_deps.append(outfilename) |
| return (src_deps, other_deps) |
| |
| def generate_ending(self, outfile): |
| targetlist = [self.get_target_filename(t) for t in self.build.get_targets().values()\ |
| if not isinstance(t, build.RunTarget)] |
| |
| elem = NinjaBuildElement('all', 'phony', targetlist) |
| elem.write(outfile) |
| |
| default = 'default all\n\n' |
| outfile.write(default) |
| |
| ninja_command = environment.detect_ninja() |
| if ninja_command is None: |
| raise MesonException('Could not detect ninja command') |
| elem = NinjaBuildElement('clean', 'CUSTOM_COMMAND', 'PHONY') |
| elem.add_item('COMMAND', [ninja_command, '-t', 'clean']) |
| elem.add_item('description', 'Cleaning') |
| if self.environment.coredata.coverage: |
| self.generate_gcov_clean(outfile) |
| elem.add_dep('clean-gcda') |
| elem.add_dep('clean-gcno') |
| elem.write(outfile) |
| |
| deps = [os.path.join(self.build_to_src, df) \ |
| for df in self.interpreter.get_build_def_files()] |
| if self.environment.is_cross_build(): |
| deps.append(os.path.join(self.build_to_src, |
| self.environment.coredata.cross_file)) |
| deps.append('meson-private/coredata.dat') |
| if os.path.exists(os.path.join(self.environment.get_source_dir(), 'meson_options.txt')): |
| deps.append(os.path.join(self.build_to_src, 'meson_options.txt')) |
| for sp in self.build.subprojects.keys(): |
| fname = os.path.join(self.environment.get_source_dir(), sp, 'meson_options.txt') |
| if os.path.isfile(fname): |
| deps.append(os.path.join(self.build_to_src, sp, 'meson_options.txt')) |
| elem = NinjaBuildElement('build.ninja', 'REGENERATE_BUILD', deps) |
| elem.add_item('pool', 'console') |
| elem.write(outfile) |
| |
| elem = NinjaBuildElement(deps, 'phony', '') |
| elem.write(outfile) |