| # SPDX-License-Identifier: Apache-2.0 |
| # Copyright © 2021-2024 Intel Corporation |
| |
| """Accumulator for p1689r5 module dependencies. |
| |
| See: https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2022/p1689r5.html |
| """ |
| |
| from __future__ import annotations |
| import json |
| import re |
| import textwrap |
| import typing as T |
| |
| if T.TYPE_CHECKING: |
| from .depscan import Description, Rule |
| |
| # The quoting logic has been copied from the ninjabackend to avoid having to |
| # import half of Meson just to quote outputs, which is a performance problem |
| _QUOTE_PAT = re.compile(r'[$ :\n]') |
| |
| |
| def quote(text: str) -> str: |
| # Fast path for when no quoting is necessary |
| if not _QUOTE_PAT.search(text): |
| return text |
| if '\n' in text: |
| errmsg = textwrap.dedent(f'''\ |
| Ninja does not support newlines in rules. The content was: |
| |
| {text} |
| |
| Please report this error with a test case to the Meson bug tracker.''') |
| raise RuntimeError(errmsg) |
| return _QUOTE_PAT.sub(r'$\g<0>', text) |
| |
| |
| _PROVIDER_CACHE: T.Dict[str, str] = {} |
| |
| |
| def get_provider(rules: T.List[Rule], name: str) -> T.Optional[str]: |
| """Get the object that a module from another Target provides |
| |
| We must rely on the object file here instead of the module itself, because |
| the object rule is part of the generated build.ninja, while the module is |
| only declared inside a dyndep. This creates for the dyndep generator to |
| depend on previous dyndeps as order deps. Since the module |
| interface file will be generated when the object is generated we can rely on |
| that in proxy and simplify generation. |
| |
| :param rules: The list of rules to check |
| :param name: The logical-name to look for |
| :raises RuntimeError: If no provider can be found |
| :return: The object file of the rule providing the module |
| """ |
| # Cache the result for performance reasons |
| if name in _PROVIDER_CACHE: |
| return _PROVIDER_CACHE[name] |
| |
| for r in rules: |
| for p in r.get('provides', []): |
| if p['logical-name'] == name: |
| obj = r['primary-output'] |
| _PROVIDER_CACHE[name] = obj |
| return obj |
| return None |
| |
| |
| def process_rules(rules: T.List[Rule], |
| extra_rules: T.List[Rule], |
| ) -> T.Iterable[T.Tuple[str, T.Optional[T.List[str]], T.List[str]]]: |
| """Process the rules for this Target |
| |
| :param rules: the rules for this target |
| :param extra_rules: the rules for all of the targets this one links with, to use their provides |
| :yield: A tuple of the output, the exported modules, and the consumed modules |
| """ |
| for rule in rules: |
| prov: T.Optional[T.List[str]] = None |
| req: T.List[str] = [] |
| if 'provides' in rule: |
| prov = [p['compiled-module-path'] for p in rule['provides']] |
| if 'requires' in rule: |
| for p in rule['requires']: |
| modfile = p.get('compiled-module-path') |
| if modfile is not None: |
| req.append(modfile) |
| else: |
| # We can't error if this is not found because of compiler |
| # provided modules |
| found = get_provider(extra_rules, p['logical-name']) |
| if found: |
| req.append(found) |
| yield rule['primary-output'], prov, req |
| |
| |
| def formatter(files: T.Optional[T.List[str]]) -> str: |
| if files: |
| fmt = ' '.join(quote(f) for f in files) |
| return f'| {fmt}' |
| return '' |
| |
| |
| def gen(outfile: str, desc: Description, extra_rules: T.List[Rule]) -> int: |
| with open(outfile, 'w', encoding='utf-8') as f: |
| f.write('ninja_dyndep_version = 1\n\n') |
| |
| for obj, provides, requires in process_rules(desc['rules'], extra_rules): |
| ins = formatter(requires) |
| out = formatter(provides) |
| f.write(f'build {quote(obj)} {out}: dyndep {ins}\n\n') |
| |
| return 0 |
| |
| |
| def run(args: T.List[str]) -> int: |
| assert len(args) >= 2, 'got wrong number of arguments!' |
| outfile, jsonfile, *jsondeps = args |
| with open(jsonfile, 'r', encoding='utf-8') as f: |
| desc: Description = json.load(f) |
| |
| # All rules, necessary for fulfilling across TU and target boundaries |
| rules = desc['rules'].copy() |
| for dep in jsondeps: |
| with open(dep, encoding='utf-8') as f: |
| d: Description = json.load(f) |
| rules.extend(d['rules']) |
| |
| return gen(outfile, desc, rules) |