blob: 70e81a4b85d96061e7c1f450fe7f13900aaec682 [file] [log] [blame]
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2023 The Meson development team
from __future__ import annotations
"""Mixin classes to be shared between C and C++ compilers.
Without this we'll end up with awful diamond inheritance problems. The goal
of this is to have mixin's, which are classes that are designed *not* to be
standalone, they only work through inheritance.
"""
import collections
import functools
import glob
import itertools
import os
import re
import subprocess
import copy
import typing as T
from pathlib import Path
from ... import arglist
from ... import mesonlib
from ... import mlog
from ...linkers.linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker
from ...mesonlib import LibType
from .. import compilers
from ..compilers import CompileCheckMode
from .visualstudio import VisualStudioLikeCompiler
if T.TYPE_CHECKING:
from ...dependencies import Dependency
from ..._typing import ImmutableListProtocol
from ...environment import Environment
from ...compilers.compilers import Compiler
else:
# This is a bit clever, for mypy we pretend that these mixins descend from
# Compiler, so we get all of the methods and attributes defined for us, but
# for runtime we make them descend from object (which all classes normally
# do). This gives up DRYer type checking, with no runtime impact
Compiler = object
GROUP_FLAGS = re.compile(r'''^(?!-Wl,) .*\.so (?:\.[0-9]+)? (?:\.[0-9]+)? (?:\.[0-9]+)?$ |
^(?:-Wl,)?-l |
\.a$''', re.X)
class CLikeCompilerArgs(arglist.CompilerArgs):
prepend_prefixes = ('-I', '-L')
dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U')
# NOTE: not thorough. A list of potential corner cases can be found in
# https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic')
dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
def to_native(self, copy: bool = False) -> T.List[str]:
# This seems to be allowed, but could never work?
assert isinstance(self.compiler, compilers.Compiler), 'How did you get here'
# Check if we need to add --start/end-group for circular dependencies
# between static libraries, and for recursively searching for symbols
# needed by static libraries that are provided by object files or
# shared libraries.
self.flush_pre_post()
if copy:
new = self.copy()
else:
new = self
# This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which
# all act like (or are) gnu ld
# TODO: this could probably be added to the DynamicLinker instead
if isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker)):
group_start = -1
group_end = -1
for i, each in enumerate(new):
if not GROUP_FLAGS.search(each):
continue
group_end = i
if group_start < 0:
# First occurrence of a library
group_start = i
# Only add groups if there are multiple libraries.
if group_end > group_start >= 0:
# Last occurrence of a library
new.insert(group_end + 1, '-Wl,--end-group')
new.insert(group_start, '-Wl,--start-group')
# Remove system/default include paths added with -isystem
default_dirs = self.compiler.get_default_include_dirs()
if default_dirs:
real_default_dirs = [self._cached_realpath(i) for i in default_dirs]
bad_idx_list: T.List[int] = []
for i, each in enumerate(new):
if not each.startswith('-isystem'):
continue
# Remove the -isystem and the path if the path is a default path
if (each == '-isystem' and
i < (len(new) - 1) and
self._cached_realpath(new[i + 1]) in real_default_dirs):
bad_idx_list += [i, i + 1]
elif each.startswith('-isystem=') and self._cached_realpath(each[9:]) in real_default_dirs:
bad_idx_list += [i]
elif self._cached_realpath(each[8:]) in real_default_dirs:
bad_idx_list += [i]
for i in reversed(bad_idx_list):
new.pop(i)
return self.compiler.unix_args_to_native(new._container)
@staticmethod
@functools.lru_cache(maxsize=None)
def _cached_realpath(arg: str) -> str:
return os.path.realpath(arg)
def __repr__(self) -> str:
self.flush_pre_post()
return f'CLikeCompilerArgs({self.compiler!r}, {self._container!r})'
class CLikeCompiler(Compiler):
"""Shared bits for the C and CPP Compilers."""
if T.TYPE_CHECKING:
warn_args: T.Dict[str, T.List[str]] = {}
# TODO: Replace this manual cache with functools.lru_cache
find_library_cache: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], str, LibType], T.Optional[T.List[str]]] = {}
find_framework_cache: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], bool], T.Optional[T.List[str]]] = {}
internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS
def __init__(self) -> None:
# If a child ObjC or CPP class has already set it, don't set it ourselves
self.can_compile_suffixes.add('h')
# Lazy initialized in get_preprocessor()
self.preprocessor: T.Optional[Compiler] = None
def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CLikeCompilerArgs:
# This is correct, mypy just doesn't understand co-operative inheritance
return CLikeCompilerArgs(self, args)
def needs_static_linker(self) -> bool:
return True # When compiling static libraries, so yes.
def get_always_args(self) -> T.List[str]:
'''
Args that are always-on for all C compilers other than MSVC
'''
return self.get_largefile_args()
def get_no_stdinc_args(self) -> T.List[str]:
return ['-nostdinc']
def get_no_stdlib_link_args(self) -> T.List[str]:
return ['-nostdlib']
def get_warn_args(self, level: str) -> T.List[str]:
# TODO: this should be an enum
return self.warn_args[level]
def get_depfile_suffix(self) -> str:
return 'd'
def get_preprocess_only_args(self) -> T.List[str]:
return ['-E', '-P']
def get_compile_only_args(self) -> T.List[str]:
return ['-c']
def get_no_optimization_args(self) -> T.List[str]:
return ['-O0']
def get_output_args(self, outputname: str) -> T.List[str]:
return ['-o', outputname]
def get_werror_args(self) -> T.List[str]:
return ['-Werror']
def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
if path == '':
path = '.'
if is_system:
return ['-isystem', path]
return ['-I' + path]
def get_compiler_dirs(self, env: 'Environment', name: str) -> T.List[str]:
'''
Get dirs from the compiler, either `libraries:` or `programs:`
'''
return []
@functools.lru_cache()
def _get_library_dirs(self, env: 'Environment',
elf_class: T.Optional[int] = None) -> 'ImmutableListProtocol[str]':
# TODO: replace elf_class with enum
dirs = self.get_compiler_dirs(env, 'libraries')
if elf_class is None or elf_class == 0:
return dirs
# if we do have an elf class for 32-bit or 64-bit, we want to check that
# the directory in question contains libraries of the appropriate class. Since
# system directories aren't mixed, we only need to check one file for each
# directory and go by that. If we can't check the file for some reason, assume
# the compiler knows what it's doing, and accept the directory anyway.
retval: T.List[str] = []
for d in dirs:
files = [f for f in os.listdir(d) if f.endswith('.so') and os.path.isfile(os.path.join(d, f))]
# if no files, accept directory and move on
if not files:
retval.append(d)
continue
for f in files:
file_to_check = os.path.join(d, f)
try:
with open(file_to_check, 'rb') as fd:
header = fd.read(5)
# if file is not an ELF file, it's weird, but accept dir
# if it is elf, and the class matches, accept dir
if header[1:4] != b'ELF' or int(header[4]) == elf_class:
retval.append(d)
# at this point, it's an ELF file which doesn't match the
# appropriate elf_class, so skip this one
# stop scanning after the first successful read
break
except OSError:
# Skip the file if we can't read it
pass
return retval
def get_library_dirs(self, env: 'Environment',
elf_class: T.Optional[int] = None) -> T.List[str]:
"""Wrap the lru_cache so that we return a new copy and don't allow
mutation of the cached value.
"""
return self._get_library_dirs(env, elf_class).copy()
@functools.lru_cache()
def _get_program_dirs(self, env: 'Environment') -> 'ImmutableListProtocol[str]':
'''
Programs used by the compiler. Also where toolchain DLLs such as
libstdc++-6.dll are found with MinGW.
'''
return self.get_compiler_dirs(env, 'programs')
def get_program_dirs(self, env: 'Environment') -> T.List[str]:
return self._get_program_dirs(env).copy()
def get_pic_args(self) -> T.List[str]:
return ['-fPIC']
def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
return ['-include', os.path.basename(header)]
def get_pch_name(self, name: str) -> str:
return os.path.basename(name) + '.' + self.get_pch_suffix()
def get_default_include_dirs(self) -> T.List[str]:
return []
def gen_export_dynamic_link_args(self, env: 'Environment') -> T.List[str]:
return self.linker.export_dynamic_args(env)
def gen_import_library_args(self, implibname: str) -> T.List[str]:
return self.linker.import_library_args(implibname)
def _sanity_check_impl(self, work_dir: str, environment: 'Environment',
sname: str, code: str) -> None:
mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', mesonlib.join_args(self.exelist))
mlog.debug(f'Is cross compiler: {self.is_cross!s}.')
source_name = os.path.join(work_dir, sname)
binname = sname.rsplit('.', 1)[0]
mode = CompileCheckMode.LINK
if self.is_cross:
binname += '_cross'
if environment.need_exe_wrapper(self.for_machine) and not environment.has_exe_wrapper():
# Linking cross built C/C++ apps is painful. You can't really
# tell if you should use -nostdlib or not and for example
# on OSX the compiler binary is the same but you need
# a ton of compiler flags to differentiate between
# arm and x86_64. So just compile.
mode = CompileCheckMode.COMPILE
cargs, largs = self._get_basic_compiler_args(environment, mode)
extra_flags = cargs + self.linker_to_compiler_args(largs)
# Is a valid executable output for all toolchains and platforms
binname += '.exe'
# Write binary check source
binary_name = os.path.join(work_dir, binname)
with open(source_name, 'w', encoding='utf-8') as ofile:
ofile.write(code)
# Compile sanity check
# NOTE: extra_flags must be added at the end. On MSVC, it might contain a '/link' argument
# after which all further arguments will be passed directly to the linker
cmdlist = self.exelist + [sname] + self.get_output_args(binname) + extra_flags
pc, stdo, stde = mesonlib.Popen_safe(cmdlist, cwd=work_dir)
mlog.debug('Sanity check compiler command line:', mesonlib.join_args(cmdlist))
mlog.debug('Sanity check compile stdout:')
mlog.debug(stdo)
mlog.debug('-----\nSanity check compile stderr:')
mlog.debug(stde)
mlog.debug('-----')
if pc.returncode != 0:
raise mesonlib.EnvironmentException(f'Compiler {self.name_string()} cannot compile programs.')
# Run sanity check
if environment.need_exe_wrapper(self.for_machine):
if not environment.has_exe_wrapper():
# Can't check if the binaries run so we have to assume they do
return
cmdlist = environment.exe_wrapper.get_command() + [binary_name]
else:
cmdlist = [binary_name]
mlog.debug('Running test binary command: ', mesonlib.join_args(cmdlist))
try:
# fortran code writes to stdout
pe = subprocess.run(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
except Exception as e:
raise mesonlib.EnvironmentException(f'Could not invoke sanity test executable: {e!s}.')
if pe.returncode != 0:
raise mesonlib.EnvironmentException(f'Executables created by {self.language} compiler {self.name_string()} are not runnable.')
def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
code = 'int main(void) { int class=0; return class; }\n'
return self._sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code)
def check_header(self, hname: str, prefix: str, env: 'Environment', *,
extra_args: T.Union[None, T.List[str], T.Callable[['CompileCheckMode'], T.List[str]]] = None,
dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
code = f'''{prefix}
#include <{hname}>'''
return self.compiles(code, env, extra_args=extra_args,
dependencies=dependencies)
def has_header(self, hname: str, prefix: str, env: 'Environment', *,
extra_args: T.Union[None, T.List[str], T.Callable[['CompileCheckMode'], T.List[str]]] = None,
dependencies: T.Optional[T.List['Dependency']] = None,
disable_cache: bool = False) -> T.Tuple[bool, bool]:
code = f'''{prefix}
#ifdef __has_include
#if !__has_include("{hname}")
#error "Header '{hname}' could not be found"
#endif
#else
#include <{hname}>
#endif'''
return self.compiles(code, env, extra_args=extra_args,
dependencies=dependencies, mode=CompileCheckMode.PREPROCESS, disable_cache=disable_cache)
def has_header_symbol(self, hname: str, symbol: str, prefix: str,
env: 'Environment', *,
extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None,
dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
t = f'''{prefix}
#include <{hname}>
int main(void) {{
/* If it's not defined as a macro, try to use as a symbol */
#ifndef {symbol}
{symbol};
#endif
return 0;
}}'''
return self.compiles(t, env, extra_args=extra_args,
dependencies=dependencies)
def _get_basic_compiler_args(self, env: 'Environment', mode: CompileCheckMode) -> T.Tuple[T.List[str], T.List[str]]:
cargs: T.List[str] = []
largs: T.List[str] = []
if mode is CompileCheckMode.LINK:
# Sometimes we need to manually select the CRT to use with MSVC.
# One example is when trying to do a compiler check that involves
# linking with static libraries since MSVC won't select a CRT for
# us in that case and will error out asking us to pick one.
try:
crt_val = env.coredata.optstore.get_value('b_vscrt')
buildtype = env.coredata.optstore.get_value('buildtype')
cargs += self.get_crt_compile_args(crt_val, buildtype)
except (KeyError, AttributeError):
pass
# Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS and CPPFLAGS from the env
sys_args = env.coredata.get_external_args(self.for_machine, self.language)
if isinstance(sys_args, str):
sys_args = [sys_args]
# Apparently it is a thing to inject linker flags both
# via CFLAGS _and_ LDFLAGS, even though the former are
# also used during linking. These flags can break
# argument checks. Thanks, Autotools.
cleaned_sys_args = self.remove_linkerlike_args(sys_args)
cargs += cleaned_sys_args
if mode is CompileCheckMode.LINK:
ld_value = env.lookup_binary_entry(self.for_machine, self.language + '_ld')
if ld_value is not None:
largs += self.use_linker_args(ld_value[0], self.version)
# Add LDFLAGS from the env
sys_ld_args = env.coredata.get_external_link_args(self.for_machine, self.language)
# CFLAGS and CXXFLAGS go to both linking and compiling, but we want them
# to only appear on the command line once. Remove dupes.
largs += [x for x in sys_ld_args if x not in sys_args]
cargs += self.get_compiler_args_for_mode(mode)
return cargs, largs
def build_wrapper_args(self, env: 'Environment',
extra_args: T.Union[None, arglist.CompilerArgs, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]],
dependencies: T.Optional[T.List['Dependency']],
mode: CompileCheckMode = CompileCheckMode.COMPILE) -> arglist.CompilerArgs:
# TODO: the caller should handle the listing of these arguments
if extra_args is None:
extra_args = []
else:
# TODO: we want to do this in the caller
extra_args = mesonlib.listify(extra_args)
extra_args = mesonlib.listify([e(mode.value) if callable(e) else e for e in extra_args])
if dependencies is None:
dependencies = []
elif not isinstance(dependencies, collections.abc.Iterable):
# TODO: we want to ensure the front end does the listifing here
dependencies = [dependencies]
# Collect compiler arguments
cargs: arglist.CompilerArgs = self.compiler_args()
largs: T.List[str] = []
for d in dependencies:
# Add compile flags needed by dependencies
cargs += d.get_compile_args()
system_incdir = d.get_include_type() == 'system'
for i in d.get_include_dirs():
for idir in i.to_string_list(env.get_source_dir(), env.get_build_dir()):
cargs.extend(self.get_include_args(idir, system_incdir))
if mode is CompileCheckMode.LINK:
# Add link flags needed to find dependencies
largs += d.get_link_args()
ca, la = self._get_basic_compiler_args(env, mode)
cargs += ca
cargs += self.get_compiler_check_args(mode)
# on MSVC compiler and linker flags must be separated by the "/link" argument
# at this point, the '/link' argument may already be part of extra_args, otherwise, it is added here
largs += [l for l in self.linker_to_compiler_args(la) if l != '/link']
if self.linker_to_compiler_args([]) == ['/link']:
if largs != [] and '/link' not in extra_args:
extra_args += ['/link']
# all linker flags must be converted now, otherwise the reordering
# of arglist will apply and -L flags will be reordered into
# breaking form. See arglist._should_prepend
largs = self.unix_args_to_native(largs)
args = cargs + extra_args + largs
return args
def _compile_int(self, expression: str, prefix: str, env: 'Environment',
extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]],
dependencies: T.Optional[T.List['Dependency']]) -> bool:
t = f'''{prefix}
#include <stddef.h>
int main(void) {{ static int a[1-2*!({expression})]; a[0]=0; return 0; }}'''
return self.compiles(t, env, extra_args=extra_args,
dependencies=dependencies)[0]
def cross_compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int],
guess: T.Optional[int], prefix: str, env: 'Environment',
extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None,
dependencies: T.Optional[T.List['Dependency']] = None) -> int:
# Try user's guess first
if isinstance(guess, int):
if self._compile_int(f'{expression} == {guess}', prefix, env, extra_args, dependencies):
return guess
# If no bounds are given, compute them in the limit of int32
maxint = 0x7fffffff
minint = -0x80000000
if not isinstance(low, int) or not isinstance(high, int):
if self._compile_int(f'{expression} >= 0', prefix, env, extra_args, dependencies):
low = cur = 0
while self._compile_int(f'{expression} > {cur}', prefix, env, extra_args, dependencies):
low = cur + 1
if low > maxint:
raise mesonlib.EnvironmentException('Cross-compile check overflowed')
cur = min(cur * 2 + 1, maxint)
high = cur
else:
high = cur = -1
while self._compile_int(f'{expression} < {cur}', prefix, env, extra_args, dependencies):
high = cur - 1
if high < minint:
raise mesonlib.EnvironmentException('Cross-compile check overflowed')
cur = max(cur * 2, minint)
low = cur
else:
# Sanity check limits given by user
if high < low:
raise mesonlib.EnvironmentException('high limit smaller than low limit')
condition = f'{expression} <= {high} && {expression} >= {low}'
if not self._compile_int(condition, prefix, env, extra_args, dependencies):
raise mesonlib.EnvironmentException('Value out of given range')
# Binary search
while low != high:
cur = low + int((high - low) / 2)
if self._compile_int(f'{expression} <= {cur}', prefix, env, extra_args, dependencies):
high = cur
else:
low = cur + 1
return low
def compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int],
guess: T.Optional[int], prefix: str, env: 'Environment', *,
extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]],
dependencies: T.Optional[T.List['Dependency']] = None) -> int:
if extra_args is None:
extra_args = []
if self.is_cross:
return self.cross_compute_int(expression, low, high, guess, prefix, env, extra_args, dependencies)
t = f'''{prefix}
#include<stddef.h>
#include<stdio.h>
int main(void) {{
printf("%ld\\n", (long)({expression}));
return 0;
}}'''
res = self.run(t, env, extra_args=extra_args,
dependencies=dependencies)
if not res.compiled:
return -1
if res.returncode != 0:
raise mesonlib.EnvironmentException('Could not run compute_int test binary.')
return int(res.stdout)
def cross_sizeof(self, typename: str, prefix: str, env: 'Environment', *,
extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None,
dependencies: T.Optional[T.List['Dependency']] = None) -> int:
if extra_args is None:
extra_args = []
t = f'''{prefix}
#include <stddef.h>
int main(void) {{
{typename} something;
return 0;
}}'''
if not self.compiles(t, env, extra_args=extra_args,
dependencies=dependencies)[0]:
return -1
return self.cross_compute_int(f'sizeof({typename})', None, None, None, prefix, env, extra_args, dependencies)
def sizeof(self, typename: str, prefix: str, env: 'Environment', *,
extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None,
dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[int, bool]:
if extra_args is None:
extra_args = []
if self.is_cross:
r = self.cross_sizeof(typename, prefix, env, extra_args=extra_args,
dependencies=dependencies)
return r, False
t = f'''{prefix}
#include<stddef.h>
#include<stdio.h>
int main(void) {{
printf("%ld\\n", (long)(sizeof({typename})));
return 0;
}}'''
res = self.cached_run(t, env, extra_args=extra_args,
dependencies=dependencies)
if not res.compiled:
return -1, False
if res.returncode != 0:
raise mesonlib.EnvironmentException('Could not run sizeof test binary.')
return int(res.stdout), res.cached
def cross_alignment(self, typename: str, prefix: str, env: 'Environment', *,
extra_args: T.Optional[T.List[str]] = None,
dependencies: T.Optional[T.List['Dependency']] = None) -> int:
if extra_args is None:
extra_args = []
t = f'''{prefix}
#include <stddef.h>
int main(void) {{
{typename} something;
return 0;
}}'''
if not self.compiles(t, env, extra_args=extra_args,
dependencies=dependencies)[0]:
return -1
t = f'''{prefix}
#include <stddef.h>
struct tmp {{
char c;
{typename} target;
}};'''
return self.cross_compute_int('offsetof(struct tmp, target)', None, None, None, t, env, extra_args, dependencies)
def alignment(self, typename: str, prefix: str, env: 'Environment', *,
extra_args: T.Optional[T.List[str]] = None,
dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[int, bool]:
if extra_args is None:
extra_args = []
if self.is_cross:
r = self.cross_alignment(typename, prefix, env, extra_args=extra_args,
dependencies=dependencies)
return r, False
t = f'''{prefix}
#include <stdio.h>
#include <stddef.h>
struct tmp {{
char c;
{typename} target;
}};
int main(void) {{
printf("%d", (int)offsetof(struct tmp, target));
return 0;
}}'''
res = self.cached_run(t, env, extra_args=extra_args,
dependencies=dependencies)
if not res.compiled:
raise mesonlib.EnvironmentException('Could not compile alignment test.')
if res.returncode != 0:
raise mesonlib.EnvironmentException('Could not run alignment test binary.')
align: int
try:
align = int(res.stdout)
except ValueError:
# If we get here, the user is most likely using a script that is
# pretending to be a compiler.
raise mesonlib.EnvironmentException('Could not run alignment test binary.')
if align == 0:
raise mesonlib.EnvironmentException(f'Could not determine alignment of {typename}. Sorry. You might want to file a bug.')
return align, res.cached
def get_define(self, dname: str, prefix: str, env: 'Environment',
extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]]],
dependencies: T.Optional[T.List['Dependency']],
disable_cache: bool = False) -> T.Tuple[str, bool]:
delim_start = '"MESON_GET_DEFINE_DELIMITER_START"\n'
delim_end = '\n"MESON_GET_DEFINE_DELIMITER_END"'
sentinel_undef = '"MESON_GET_DEFINE_UNDEFINED_SENTINEL"'
code = f'''
{prefix}
#ifndef {dname}
# define {dname} {sentinel_undef}
#endif
{delim_start}{dname}{delim_end}'''
args = self.build_wrapper_args(env, extra_args, dependencies,
mode=CompileCheckMode.PREPROCESS).to_native()
func = functools.partial(self.cached_compile, code, env.coredata, extra_args=args, mode=CompileCheckMode.PREPROCESS)
if disable_cache:
func = functools.partial(self.compile, code, extra_args=args, mode=CompileCheckMode.PREPROCESS)
with func() as p:
cached = p.cached
if p.returncode != 0:
raise mesonlib.EnvironmentException(f'Could not get define {dname!r}')
# Get the preprocessed value between the delimiters
star_idx = p.stdout.find(delim_start)
end_idx = p.stdout.rfind(delim_end)
if (star_idx == -1) or (end_idx == -1) or (star_idx == end_idx):
raise mesonlib.MesonBugException('Delimiters not found in preprocessor output.')
define_value = p.stdout[star_idx + len(delim_start):end_idx]
if define_value == sentinel_undef:
define_value = None
else:
# Merge string literals
define_value = self._concatenate_string_literals(define_value).strip()
return define_value, cached
def get_return_value(self, fname: str, rtype: str, prefix: str,
env: 'Environment', extra_args: T.Optional[T.List[str]],
dependencies: T.Optional[T.List['Dependency']]) -> T.Union[str, int]:
# TODO: rtype should be an enum.
# TODO: maybe we can use overload to tell mypy when this will return int vs str?
if rtype == 'string':
fmt = '%s'
cast = '(char*)'
elif rtype == 'int':
fmt = '%lli'
cast = '(long long int)'
else:
raise AssertionError(f'BUG: Unknown return type {rtype!r}')
code = f'''{prefix}
#include <stdio.h>
int main(void) {{
printf ("{fmt}", {cast} {fname}());
return 0;
}}'''
res = self.run(code, env, extra_args=extra_args, dependencies=dependencies)
if not res.compiled:
raise mesonlib.EnvironmentException(f'Could not get return value of {fname}()')
if rtype == 'string':
return res.stdout
elif rtype == 'int':
try:
return int(res.stdout.strip())
except ValueError:
raise mesonlib.EnvironmentException(f'Return value of {fname}() is not an int')
assert False, 'Unreachable'
@staticmethod
def _no_prototype_templ() -> T.Tuple[str, str]:
"""
Try to find the function without a prototype from a header by defining
our own dummy prototype and trying to link with the C library (and
whatever else the compiler links in by default). This is very similar
to the check performed by Autoconf for AC_CHECK_FUNCS.
"""
# Define the symbol to something else since it is defined by the
# includes or defines listed by the user or by the compiler. This may
# include, for instance _GNU_SOURCE which must be defined before
# limits.h, which includes features.h
# Then, undef the symbol to get rid of it completely.
head = '''
#define {func} meson_disable_define_of_{func}
{prefix}
#include <limits.h>
#undef {func}
'''
# Override any GCC internal prototype and declare our own definition for
# the symbol. Use char because that's unlikely to be an actual return
# value for a function which ensures that we override the definition.
head += '''
#ifdef __cplusplus
extern "C"
#endif
char {func} (void);
'''
# The actual function call
main = '''
int main(void) {{
return {func} ();
}}'''
return head, main
@staticmethod
def _have_prototype_templ() -> T.Tuple[str, str]:
"""
Returns a head-er and main() call that uses the headers listed by the
user for the function prototype while checking if a function exists.
"""
# Add the 'prefix', aka defines, includes, etc that the user provides
# This may include, for instance _GNU_SOURCE which must be defined
# before limits.h, which includes features.h
head = '{prefix}\n#include <limits.h>\n'
# We don't know what the function takes or returns, so return it as an int.
# Just taking the address or comparing it to void is not enough because
# compilers are smart enough to optimize it away. The resulting binary
# is not run so we don't care what the return value is.
main = '''\nint main(void) {{
void *a = (void*) &{func};
long long b = (long long) a;
return (int) b;
}}'''
return head, main
def has_function(self, funcname: str, prefix: str, env: 'Environment', *,
extra_args: T.Optional[T.List[str]] = None,
dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
"""Determine if a function exists.
First, this function looks for the symbol in the default libraries
provided by the compiler (stdlib + a few others usually). If that
fails, it checks if any of the headers specified in the prefix provide
an implementation of the function, and if that fails, it checks if it's
implemented as a compiler-builtin.
"""
if extra_args is None:
extra_args = []
# Short-circuit if the check is already provided by the cross-info file
varname = 'has function ' + funcname
varname = varname.replace(' ', '_')
if self.is_cross:
val = env.properties.host.get(varname, None)
if val is not None:
if isinstance(val, bool):
return val, False
raise mesonlib.EnvironmentException(f'Cross variable {varname} is not a boolean.')
# TODO: we really need a protocol for this,
#
# class StrProto(typing.Protocol):
# def __str__(self) -> str: ...
fargs: T.Dict[str, T.Union[str, bool, int]] = {'prefix': prefix, 'func': funcname}
# glibc defines functions that are not available on Linux as stubs that
# fail with ENOSYS (such as e.g. lchmod). In this case we want to fail
# instead of detecting the stub as a valid symbol.
# We already included limits.h earlier to ensure that these are defined
# for stub functions.
stubs_fail = '''
#if defined __stub_{func} || defined __stub___{func}
fail fail fail this function is not going to work
#endif
'''
# If we have any includes in the prefix supplied by the user, assume
# that the user wants us to use the symbol prototype defined in those
# includes. If not, then try to do the Autoconf-style check with
# a dummy prototype definition of our own.
# This is needed when the linker determines symbol availability from an
# SDK based on the prototype in the header provided by the SDK.
# Ignoring this prototype would result in the symbol always being
# marked as available.
if '#include' in prefix:
head, main = self._have_prototype_templ()
else:
head, main = self._no_prototype_templ()
templ = head + stubs_fail + main
res, cached = self.links(templ.format(**fargs), env, extra_args=extra_args,
dependencies=dependencies)
if res:
return True, cached
# MSVC does not have compiler __builtin_-s.
if self.get_id() in {'msvc', 'intel-cl'}:
return False, False
# Detect function as a built-in
#
# Some functions like alloca() are defined as compiler built-ins which
# are inlined by the compiler and you can't take their address, so we
# need to look for them differently. On nice compilers like clang, we
# can just directly use the __has_builtin() macro.
fargs['no_includes'] = '#include' not in prefix
is_builtin = funcname.startswith('__builtin_')
fargs['is_builtin'] = is_builtin
fargs['__builtin_'] = '' if is_builtin else '__builtin_'
t = '''{prefix}
int main(void) {{
/* With some toolchains (MSYS2/mingw for example) the compiler
* provides various builtins which are not really implemented and
* fall back to the stdlib where they aren't provided and fail at
* build/link time. In case the user provides a header, including
* the header didn't lead to the function being defined, and the
* function we are checking isn't a builtin itself we assume the
* builtin is not functional and we just error out. */
#if !{no_includes:d} && !defined({func}) && !{is_builtin:d}
#error "No definition for {__builtin_}{func} found in the prefix"
#endif
#ifdef __has_builtin
#if !__has_builtin({__builtin_}{func})
#error "{__builtin_}{func} not found"
#endif
#elif ! defined({func})
{__builtin_}{func};
#endif
return 0;
}}'''
return self.links(t.format(**fargs), env, extra_args=extra_args,
dependencies=dependencies)
def has_members(self, typename: str, membernames: T.List[str],
prefix: str, env: 'Environment', *,
extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None,
dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
if extra_args is None:
extra_args = []
# Create code that accesses all members
members = ''.join(f'foo.{member};\n' for member in membernames)
t = f'''{prefix}
void bar(void) {{
{typename} foo;
{members}
}}'''
return self.compiles(t, env, extra_args=extra_args,
dependencies=dependencies)
def has_type(self, typename: str, prefix: str, env: 'Environment',
extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], *,
dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
t = f'''{prefix}
void bar(void) {{
sizeof({typename});
}}'''
return self.compiles(t, env, extra_args=extra_args,
dependencies=dependencies)
def _symbols_have_underscore_prefix_searchbin(self, env: 'Environment') -> bool:
'''
Check if symbols have underscore prefix by compiling a small test binary
and then searching the binary for the string,
'''
symbol_name = b'meson_uscore_prefix'
code = '''#ifdef __cplusplus
extern "C" {
#endif
void ''' + symbol_name.decode() + ''' (void) {}
#ifdef __cplusplus
}
#endif
'''
args = self.get_compiler_check_args(CompileCheckMode.COMPILE)
n = '_symbols_have_underscore_prefix_searchbin'
with self._build_wrapper(code, env, extra_args=args, mode=CompileCheckMode.COMPILE, want_output=True) as p:
if p.returncode != 0:
raise RuntimeError(f'BUG: Unable to compile {n!r} check: {p.stderr}')
if not os.path.isfile(p.output_name):
raise RuntimeError(f'BUG: Can\'t find compiled test code for {n!r} check')
with open(p.output_name, 'rb') as o:
for line in o:
# Check if the underscore form of the symbol is somewhere
# in the output file.
if b'_' + symbol_name in line:
mlog.debug("Underscore prefix check found prefixed function in binary")
return True
# Else, check if the non-underscored form is present
elif symbol_name in line:
mlog.debug("Underscore prefix check found non-prefixed function in binary")
return False
raise RuntimeError(f'BUG: {n!r} check did not find symbol string in binary')
def _symbols_have_underscore_prefix_define(self, env: 'Environment') -> T.Optional[bool]:
'''
Check if symbols have underscore prefix by querying the
__USER_LABEL_PREFIX__ define that most compilers provide
for this. Return if functions have underscore prefix or None
if it was not possible to determine, like when the compiler
does not set the define or the define has an unexpected value.
'''
delim = '"MESON_HAVE_UNDERSCORE_DELIMITER" '
code = f'''
#ifndef __USER_LABEL_PREFIX__
#define MESON_UNDERSCORE_PREFIX unsupported
#else
#define MESON_UNDERSCORE_PREFIX __USER_LABEL_PREFIX__
#endif
{delim}MESON_UNDERSCORE_PREFIX
'''
with self._build_wrapper(code, env, mode=CompileCheckMode.PREPROCESS, want_output=False) as p:
if p.returncode != 0:
raise RuntimeError(f'BUG: Unable to preprocess _symbols_have_underscore_prefix_define check: {p.stdout}')
symbol_prefix = p.stdout.partition(delim)[-1].rstrip()
mlog.debug(f'Queried compiler for function prefix: __USER_LABEL_PREFIX__ is "{symbol_prefix!s}"')
if symbol_prefix == '_':
return True
elif symbol_prefix == '':
return False
else:
return None
def _symbols_have_underscore_prefix_list(self, env: 'Environment') -> T.Optional[bool]:
'''
Check if symbols have underscore prefix by consulting a hardcoded
list of cases where we know the results.
Return if functions have underscore prefix or None if unknown.
'''
m = env.machines[self.for_machine]
# Darwin always uses the underscore prefix, not matter what
if m.is_darwin():
return True
# Windows uses the underscore prefix on x86 (32bit) only
if m.is_windows() or m.is_cygwin():
return m.cpu_family == 'x86'
return None
def symbols_have_underscore_prefix(self, env: 'Environment') -> bool:
'''
Check if the compiler prefixes an underscore to global C symbols
'''
# First, try to query the compiler directly
result = self._symbols_have_underscore_prefix_define(env)
if result is not None:
return result
# Else, try to consult a hardcoded list of cases we know
# absolutely have an underscore prefix
result = self._symbols_have_underscore_prefix_list(env)
if result is not None:
return result
# As a last resort, try search in a compiled binary, which is the
# most unreliable way of checking this, see #5482
return self._symbols_have_underscore_prefix_searchbin(env)
def _get_patterns(self, env: 'Environment', prefixes: T.List[str], suffixes: T.List[str], shared: bool = False) -> T.List[str]:
patterns: T.List[str] = []
for p in prefixes:
for s in suffixes:
patterns.append(p + '{}.' + s)
if shared and env.machines[self.for_machine].is_openbsd():
# Shared libraries on OpenBSD can be named libfoo.so.X.Y:
# https://www.openbsd.org/faq/ports/specialtopics.html#SharedLibs
#
# This globbing is probably the best matching we can do since regex
# is expensive. It's wrong in many edge cases, but it will match
# correctly-named libraries and hopefully no one on OpenBSD names
# their files libfoo.so.9a.7b.1.0
for p in prefixes:
patterns.append(p + '{}.so.[0-9]*.[0-9]*')
return patterns
def get_library_naming(self, env: 'Environment', libtype: LibType, strict: bool = False) -> T.Tuple[str, ...]:
'''
Get library prefixes and suffixes for the target platform ordered by
priority
'''
stlibext = ['a']
# We've always allowed libname to be both `foo` and `libfoo`, and now
# people depend on it. Also, some people use prebuilt `foo.so` instead
# of `libfoo.so` for unknown reasons, and may also want to create
# `foo.so` by setting name_prefix to ''
if strict and not isinstance(self, VisualStudioLikeCompiler): # lib prefix is not usually used with msvc
prefixes = ['lib']
else:
prefixes = ['lib', '']
# Library suffixes and prefixes
if env.machines[self.for_machine].is_darwin():
shlibext = ['dylib', 'so']
elif env.machines[self.for_machine].is_windows():
# FIXME: .lib files can be import or static so we should read the
# file, figure out which one it is, and reject the wrong kind.
if isinstance(self, VisualStudioLikeCompiler):
shlibext = ['lib']
else:
shlibext = ['dll.a', 'lib', 'dll']
# Yep, static libraries can also be foo.lib
stlibext += ['lib']
elif env.machines[self.for_machine].is_cygwin():
shlibext = ['dll', 'dll.a']
prefixes = ['cyg'] + prefixes
elif self.id.lower() == 'c6000' or self.id.lower() == 'ti':
# TI C6000 compiler can use both extensions for static or dynamic libs.
stlibext = ['a', 'lib']
shlibext = ['dll', 'so']
else:
# Linux/BSDs
shlibext = ['so']
# Search priority
if libtype is LibType.PREFER_SHARED:
patterns = self._get_patterns(env, prefixes, shlibext, True)
patterns.extend([x for x in self._get_patterns(env, prefixes, stlibext, False) if x not in patterns])
elif libtype is LibType.PREFER_STATIC:
patterns = self._get_patterns(env, prefixes, stlibext, False)
patterns.extend([x for x in self._get_patterns(env, prefixes, shlibext, True) if x not in patterns])
elif libtype is LibType.SHARED:
patterns = self._get_patterns(env, prefixes, shlibext, True)
else:
assert libtype is LibType.STATIC
patterns = self._get_patterns(env, prefixes, stlibext, False)
return tuple(patterns)
@staticmethod
def _sort_shlibs_openbsd(libs: T.List[str]) -> T.List[str]:
def tuple_key(x: str) -> T.Tuple[int, ...]:
ver = x.rsplit('.so.', maxsplit=1)[1]
return tuple(int(i) for i in ver.split('.'))
filtered: T.List[str] = []
for lib in libs:
# Validate file as a shared library of type libfoo.so.X.Y
ret = lib.rsplit('.so.', maxsplit=1)
if len(ret) != 2:
continue
try:
tuple(int(i) for i in ret[1].split('.'))
except ValueError:
continue
filtered.append(lib)
return sorted(filtered, key=tuple_key, reverse=True)
@classmethod
def _get_trials_from_pattern(cls, pattern: str, directory: str, libname: str) -> T.List[Path]:
f = Path(directory) / pattern.format(libname)
# Globbing for OpenBSD
if '*' in pattern:
# NOTE: globbing matches directories and broken symlinks
# so we have to do an isfile test on it later
return [Path(x) for x in cls._sort_shlibs_openbsd(glob.glob(str(f)))]
return [f]
@staticmethod
def _get_file_from_list(env: Environment, paths: T.List[Path]) -> T.Optional[Path]:
'''
We just check whether the library exists. We can't do a link check
because the library might have unresolved symbols that require other
libraries. On macOS we check if the library matches our target
architecture.
'''
for p in paths:
if p.is_file():
if env.machines.host.is_darwin() and env.machines.build.is_darwin():
# Run `lipo` and check if the library supports the arch we want
archs = mesonlib.darwin_get_object_archs(str(p))
if not archs or env.machines.host.cpu_family not in archs:
mlog.debug(f'Rejected {p}, supports {archs} but need {env.machines.host.cpu_family}')
continue
return p
return None
@functools.lru_cache()
def output_is_64bit(self, env: 'Environment') -> bool:
'''
returns true if the output produced is 64-bit, false if 32-bit
'''
return self.sizeof('void *', '', env)[0] == 8
def _find_library_real(self, libname: str, env: 'Environment', extra_dirs: T.List[str], code: str, libtype: LibType, lib_prefix_warning: bool) -> T.Optional[T.List[str]]:
# First try if we can just add the library as -l.
# Gcc + co seem to prefer builtin lib dirs to -L dirs.
# Only try to find std libs if no extra dirs specified.
# The built-in search procedure will always favour .so and then always
# search for .a. This is only allowed if libtype is LibType.PREFER_SHARED
if ((not extra_dirs and libtype is LibType.PREFER_SHARED) or
libname in self.internal_libs):
cargs = ['-l' + libname]
largs = self.get_linker_always_args() + self.get_allow_undefined_link_args()
extra_args = cargs + self.linker_to_compiler_args(largs)
if self.links(code, env, extra_args=extra_args, disable_cache=True)[0]:
return cargs
# Don't do a manual search for internal libs
if libname in self.internal_libs:
return None
# Not found or we want to use a specific libtype? Try to find the
# library file itself.
patterns = self.get_library_naming(env, libtype)
# try to detect if we are 64-bit or 32-bit. If we can't
# detect, we will just skip path validity checks done in
# get_library_dirs() call
try:
if self.output_is_64bit(env):
elf_class = 2
else:
elf_class = 1
except (mesonlib.MesonException, KeyError): # TODO evaluate if catching KeyError is wanted here
elf_class = 0
# Search in the specified dirs, and then in the system libraries
for d in itertools.chain(extra_dirs, self.get_library_dirs(env, elf_class)):
for p in patterns:
trials = self._get_trials_from_pattern(p, d, libname)
if not trials:
continue
trial = self._get_file_from_list(env, trials)
if not trial:
continue
if libname.startswith('lib') and trial.name.startswith(libname) and lib_prefix_warning:
mlog.warning(f'find_library({libname!r}) starting in "lib" only works by accident and is not portable')
return [trial.as_posix()]
return None
def _find_library_impl(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
code: str, libtype: LibType, lib_prefix_warning: bool) -> T.Optional[T.List[str]]:
# These libraries are either built-in or invalid
if libname in self.ignore_libs:
return []
if isinstance(extra_dirs, str):
extra_dirs = [extra_dirs]
key = (tuple(self.exelist), libname, tuple(extra_dirs), code, libtype)
if key not in self.find_library_cache:
value = self._find_library_real(libname, env, extra_dirs, code, libtype, lib_prefix_warning)
self.find_library_cache[key] = value
else:
value = self.find_library_cache[key]
if value is None:
return None
return value.copy()
def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]:
code = 'int main(void) { return 0; }\n'
return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning)
def find_framework_paths(self, env: 'Environment') -> T.List[str]:
'''
These are usually /Library/Frameworks and /System/Library/Frameworks,
unless you select a particular macOS SDK with the -isysroot flag.
You can also add to this by setting -F in CFLAGS.
'''
# TODO: this really needs to be *AppleClang*, not just any clang.
if self.id != 'clang':
raise mesonlib.MesonException('Cannot find framework path with non-clang compiler')
# Construct the compiler command-line
commands = self.get_exelist(ccache=False) + ['-v', '-E', '-']
commands += self.get_always_args()
# Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env
commands += env.coredata.get_external_args(self.for_machine, self.language)
mlog.debug('Finding framework path by running: ', ' '.join(commands), '\n')
os_env = os.environ.copy()
os_env['LC_ALL'] = 'C'
_, _, stde = mesonlib.Popen_safe(commands, env=os_env, stdin=subprocess.PIPE)
paths: T.List[str] = []
for line in stde.split('\n'):
if '(framework directory)' not in line:
continue
# line is of the form:
# ` /path/to/framework (framework directory)`
paths.append(line[:-21].strip())
return paths
def _find_framework_real(self, name: str, env: 'Environment', extra_dirs: T.List[str], allow_system: bool) -> T.Optional[T.List[str]]:
code = 'int main(void) { return 0; }'
link_args: T.List[str] = []
for d in extra_dirs:
link_args += ['-F' + d]
# We can pass -Z to disable searching in the system frameworks, but
# then we must also pass -L/usr/lib to pick up libSystem.dylib
extra_args = [] if allow_system else ['-Z', '-L/usr/lib']
link_args += ['-framework', name]
if self.links(code, env, extra_args=(extra_args + link_args), disable_cache=True)[0]:
return link_args
return None
def _find_framework_impl(self, name: str, env: 'Environment', extra_dirs: T.List[str],
allow_system: bool) -> T.Optional[T.List[str]]:
if isinstance(extra_dirs, str):
extra_dirs = [extra_dirs]
key = (tuple(self.exelist), name, tuple(extra_dirs), allow_system)
if key in self.find_framework_cache:
value = self.find_framework_cache[key]
else:
value = self._find_framework_real(name, env, extra_dirs, allow_system)
self.find_framework_cache[key] = value
if value is None:
return None
return value.copy()
def find_framework(self, name: str, env: 'Environment', extra_dirs: T.List[str],
allow_system: bool = True) -> T.Optional[T.List[str]]:
'''
Finds the framework with the specified name, and returns link args for
the same or returns None when the framework is not found.
'''
# TODO: should probably check for macOS?
return self._find_framework_impl(name, env, extra_dirs, allow_system)
def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
# TODO: does this belong here or in GnuLike or maybe PosixLike?
return []
def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]:
# TODO: does this belong here or in GnuLike or maybe PosixLike?
return []
def thread_flags(self, env: 'Environment') -> T.List[str]:
# TODO: does this belong here or in GnuLike or maybe PosixLike?
host_m = env.machines[self.for_machine]
if host_m.is_haiku() or host_m.is_darwin():
return []
return ['-pthread']
def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
return args.copy()
def has_arguments(self, args: T.List[str], env: 'Environment', code: str,
mode: CompileCheckMode) -> T.Tuple[bool, bool]:
return self.compiles(code, env, extra_args=args, mode=mode)
def _has_multi_arguments(self, args: T.List[str], env: 'Environment', code: str) -> T.Tuple[bool, bool]:
new_args: T.List[str] = []
for arg in args:
# some compilers, e.g. GCC, don't warn for unsupported warning-disable
# flags, so when we are testing a flag like "-Wno-forgotten-towel", also
# check the equivalent enable flag too "-Wforgotten-towel"
if arg.startswith('-Wno-'):
new_args.append('-W' + arg[5:])
if arg.startswith('-Wl,'):
mlog.warning(f'{arg} looks like a linker argument, '
'but has_argument and other similar methods only '
'support checking compiler arguments. Using them '
'to check linker arguments are never supported, '
'and results are likely to be wrong regardless of '
'the compiler you are using. has_link_argument or '
'other similar method can be used instead.')
new_args.append(arg)
return self.has_arguments(new_args, env, code, mode=CompileCheckMode.COMPILE)
def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
return self._has_multi_arguments(args, env, 'extern int i;\nint i;\n')
def _has_multi_link_arguments(self, args: T.List[str], env: 'Environment', code: str) -> T.Tuple[bool, bool]:
# First time we check for link flags we need to first check if we have
# --fatal-warnings, otherwise some linker checks could give some
# false positive.
args = self.linker.fatal_warnings() + args
args = self.linker_to_compiler_args(args)
return self.has_arguments(args, env, code, mode=CompileCheckMode.LINK)
def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
return self._has_multi_link_arguments(args, env, 'int main(void) { return 0; }\n')
@staticmethod
def _concatenate_string_literals(s: str) -> str:
pattern = re.compile(r'(?P<pre>.*([^\\]")|^")(?P<str1>([^\\"]|\\.)*)"\s+"(?P<str2>([^\\"]|\\.)*)(?P<post>".*)')
ret = s
m = pattern.match(ret)
while m:
ret = ''.join(m.group('pre', 'str1', 'str2', 'post'))
m = pattern.match(ret)
return ret
def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
# Most compilers (such as GCC and Clang) only warn about unknown or
# ignored attributes, so force an error. Overridden in GCC and Clang
# mixins.
return ['-Werror']
def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]:
# Just assume that if we're not on windows that dllimport and dllexport
# don't work
m = env.machines[self.for_machine]
if not (m.is_windows() or m.is_cygwin()):
if name in {'dllimport', 'dllexport'}:
return False, False
return self.compiles(self.attribute_check_func(name), env,
extra_args=self.get_has_func_attribute_extra_args(name))
def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]:
if disable:
return ['-DNDEBUG']
return []
@functools.lru_cache(maxsize=None)
def can_compile(self, src: 'mesonlib.FileOrString') -> bool:
# Files we preprocess can be anything, e.g. .in
if self.mode == 'PREPROCESSOR':
return True
return super().can_compile(src)
def get_preprocessor(self) -> Compiler:
if not self.preprocessor:
self.preprocessor = copy.copy(self)
self.preprocessor.exelist = self.exelist + self.get_preprocess_to_file_args()
self.preprocessor.mode = 'PREPROCESSOR'
self.modes.append(self.preprocessor)
return self.preprocessor