|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
# Copyright 2014-2016 The Meson development team
|
|
|
|
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
"""This is a helper script for IDE developers. It allows you to
|
|
|
|
extract information such as list of targets, files, compiler flags,
|
|
|
|
tests and so on. All output is in JSON for simple parsing.
|
|
|
|
|
|
|
|
Currently only works for the Ninja backend. Others use generated
|
|
|
|
project files and don't need this info."""
|
|
|
|
|
|
|
|
from contextlib import redirect_stdout
|
|
|
|
import collections
|
|
|
|
import dataclasses
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
from pathlib import Path, PurePath
|
|
|
|
import sys
|
|
|
|
import typing as T
|
|
|
|
|
|
|
|
from . import build, mesonlib, options, coredata as cdata
|
|
|
|
from .ast import IntrospectionInterpreter, BUILD_TARGET_FUNCTIONS, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter
|
|
|
|
from .backend import backends
|
|
|
|
from .dependencies import Dependency
|
|
|
|
from . import environment
|
|
|
|
from .interpreterbase import ObjectHolder
|
|
|
|
from .options import OptionKey
|
|
|
|
from .mparser import FunctionNode, ArrayNode, ArgumentNode, StringNode
|
|
|
|
|
|
|
|
if T.TYPE_CHECKING:
|
|
|
|
import argparse
|
|
|
|
from typing import Any
|
|
|
|
from .options import UserOption
|
|
|
|
|
|
|
|
from .interpreter import Interpreter
|
|
|
|
from .mparser import BaseNode
|
|
|
|
|
|
|
|
def get_meson_info_file(info_dir: str) -> str:
|
|
|
|
return os.path.join(info_dir, 'meson-info.json')
|
|
|
|
|
|
|
|
def get_meson_introspection_version() -> str:
|
|
|
|
return '1.0.0'
|
|
|
|
|
|
|
|
def get_meson_introspection_required_version() -> T.List[str]:
|
|
|
|
return ['>=1.0', '<2.0']
|
|
|
|
|
|
|
|
class IntroCommand:
|
|
|
|
def __init__(self,
|
|
|
|
desc: str,
|
|
|
|
func: T.Optional[T.Callable[[], T.Union[dict, list]]] = None,
|
|
|
|
no_bd: T.Optional[T.Callable[[IntrospectionInterpreter], T.Union[dict, list]]] = None) -> None:
|
|
|
|
self.desc = desc + '.'
|
|
|
|
self.func = func
|
|
|
|
self.no_bd = no_bd
|
|
|
|
|
|
|
|
def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
|
|
|
|
builddata: T.Optional[build.Build] = None,
|
|
|
|
backend: T.Optional[backends.Backend] = None) -> 'T.Mapping[str, IntroCommand]':
|
|
|
|
if backend and builddata:
|
|
|
|
benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
|
|
|
|
testdata = backend.create_test_serialisation(builddata.get_tests())
|
|
|
|
installdata = backend.create_install_data()
|
|
|
|
interpreter = backend.interpreter
|
|
|
|
else:
|
|
|
|
benchmarkdata = testdata = installdata = None
|
|
|
|
|
|
|
|
# Enforce key order for argparse
|
|
|
|
return collections.OrderedDict([
|
|
|
|
('ast', IntroCommand('Dump the AST of the meson file', no_bd=dump_ast)),
|
|
|
|
('benchmarks', IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata))),
|
|
|
|
('buildoptions', IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source)),
|
|
|
|
('buildsystem_files', IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter))),
|
|
|
|
('compilers', IntroCommand('List used compilers', func=lambda: list_compilers(coredata))),
|
|
|
|
('dependencies', IntroCommand('List external dependencies', func=lambda: list_deps(coredata, backend), no_bd=list_deps_from_source)),
|
|
|
|
('scan_dependencies', IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source)),
|
|
|
|
('installed', IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata))),
|
|
|
|
('install_plan', IntroCommand('List all installed files and directories with their details', func=lambda: list_install_plan(installdata))),
|
|
|
|
('machines', IntroCommand('Information about host, build, and target machines', func=lambda: list_machines(builddata))),
|
|
|
|
('projectinfo', IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source)),
|
|
|
|
('targets', IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source)),
|
|
|
|
('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))),
|
|
|
|
])
|
|
|
|
|
|
|
|
# Note: when adding arguments, please also add them to the completion
|
|
|
|
# scripts in $MESONSRC/data/shell-completions/
|
|
|
|
def add_arguments(parser: argparse.ArgumentParser) -> None:
|
|
|
|
intro_types = get_meson_introspection_types()
|
|
|
|
for key, val in intro_types.items():
|
|
|
|
flag = '--' + key.replace('_', '-')
|
|
|
|
parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc)
|
|
|
|
|
|
|
|
parser.add_argument('--backend', choices=sorted(options.backendlist), dest='backend', default='ninja',
|
|
|
|
help='The backend to use for the --buildoptions introspection.')
|
|
|
|
parser.add_argument('-a', '--all', action='store_true', dest='all', default=False,
|
|
|
|
help='Print all available information.')
|
|
|
|
parser.add_argument('-i', '--indent', action='store_true', dest='indent', default=False,
|
|
|
|
help='Enable pretty printed JSON.')
|
|
|
|
parser.add_argument('-f', '--force-object-output', action='store_true', dest='force_dict', default=False,
|
|
|
|
help='Always use the new JSON format for multiple entries (even for 0 and 1 introspection commands)')
|
|
|
|
parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
|
|
|
|
|
|
|
|
def dump_ast(intr: IntrospectionInterpreter) -> T.Dict[str, T.Any]:
|
|
|
|
printer = AstJSONPrinter()
|
|
|
|
intr.ast.accept(printer)
|
|
|
|
return printer.result
|
|
|
|
|
|
|
|
def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]:
|
|
|
|
res = {}
|
|
|
|
if installdata is not None:
|
|
|
|
for t in installdata.targets:
|
|
|
|
res[os.path.join(installdata.build_dir, t.fname)] = \
|
|
|
|
os.path.join(installdata.prefix, t.outdir, os.path.basename(t.fname))
|
|
|
|
for i in installdata.data:
|
|
|
|
res[i.path] = os.path.join(installdata.prefix, i.install_path)
|
|
|
|
for i in installdata.headers:
|
|
|
|
res[i.path] = os.path.join(installdata.prefix, i.install_path, os.path.basename(i.path))
|
|
|
|
for i in installdata.man:
|
|
|
|
res[i.path] = os.path.join(installdata.prefix, i.install_path)
|
|
|
|
for i in installdata.install_subdirs:
|
|
|
|
res[i.path] = os.path.join(installdata.prefix, i.install_path)
|
|
|
|
for s in installdata.symlinks:
|
|
|
|
basename = os.path.basename(s.name)
|
|
|
|
res[basename] = os.path.join(installdata.prefix, s.install_path, basename)
|
|
|
|
return res
|
|
|
|
|
|
|
|
def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]]:
|
|
|
|
plan: T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]] = {
|
|
|
|
'targets': {
|
|
|
|
os.path.join(installdata.build_dir, target.fname): {
|
|
|
|
'destination': target.out_name,
|
|
|
|
'tag': target.tag or None,
|
|
|
|
'subproject': target.subproject or None,
|
|
|
|
'install_rpath': target.install_rpath or None
|
|
|
|
}
|
|
|
|
for target in installdata.targets
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for key, data_list in {
|
|
|
|
'data': installdata.data,
|
|
|
|
'man': installdata.man,
|
|
|
|
'headers': installdata.headers,
|
|
|
|
'install_subdirs': installdata.install_subdirs
|
|
|
|
}.items():
|
|
|
|
# Mypy doesn't recognize SubdirInstallData as a subclass of InstallDataBase
|
|
|
|
for data in data_list: # type: ignore[attr-defined]
|
|
|
|
data_type = data.data_type or key
|
|
|
|
install_path_name = data.install_path_name
|
|
|
|
if key == 'headers': # in the headers, install_path_name is the directory
|
|
|
|
install_path_name = os.path.join(install_path_name, os.path.basename(data.path))
|
|
|
|
|
|
|
|
entry = {
|
|
|
|
'destination': install_path_name,
|
|
|
|
'tag': data.tag or None,
|
|
|
|
'subproject': data.subproject or None,
|
|
|
|
}
|
|
|
|
|
|
|
|
if key == 'install_subdirs':
|
|
|
|
exclude_files, exclude_dirs = data.exclude or ([], [])
|
|
|
|
entry['exclude_dirs'] = list(exclude_dirs)
|
|
|
|
entry['exclude_files'] = list(exclude_files)
|
|
|
|
|
|
|
|
plan[data_type] = plan.get(data_type, {})
|
|
|
|
plan[data_type][data.path] = entry
|
|
|
|
|
|
|
|
return plan
|
|
|
|
|
|
|
|
def get_target_dir(coredata: cdata.CoreData, subdir: str) -> str:
|
|
|
|
if coredata.get_option(OptionKey('layout')) == 'flat':
|
|
|
|
return 'meson-out'
|
|
|
|
else:
|
|
|
|
return subdir
|
|
|
|
|
|
|
|
def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
|
|
|
|
tlist: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]] = []
|
|
|
|
root_dir = Path(intr.source_root)
|
|
|
|
|
|
|
|
def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]:
|
|
|
|
res: T.List[Path] = []
|
|
|
|
for n in node_list:
|
|
|
|
args: T.List[BaseNode] = []
|
|
|
|
if isinstance(n, FunctionNode):
|
|
|
|
args = list(n.args.arguments)
|
|
|
|
if n.func_name.value in BUILD_TARGET_FUNCTIONS:
|
|
|
|
args.pop(0)
|
|
|
|
elif isinstance(n, ArrayNode):
|
|
|
|
args = n.args.arguments
|
|
|
|
elif isinstance(n, ArgumentNode):
|
|
|
|
args = n.arguments
|
|
|
|
for j in args:
|
|
|
|
if isinstance(j, StringNode):
|
|
|
|
assert isinstance(j.value, str)
|
|
|
|
res += [Path(j.value)]
|
|
|
|
elif isinstance(j, str):
|
|
|
|
res += [Path(j)]
|
|
|
|
res = [root_dir / i['subdir'] / x for x in res]
|
|
|
|
res = [x.resolve() for x in res]
|
|
|
|
return res
|
|
|
|
|
|
|
|
for i in intr.targets:
|
|
|
|
sources = nodes_to_paths(i['sources'])
|
|
|
|
extra_f = nodes_to_paths(i['extra_files'])
|
|
|
|
outdir = get_target_dir(intr.coredata, i['subdir'])
|
|
|
|
|
|
|
|
tlist += [{
|
|
|
|
'name': i['name'],
|
|
|
|
'id': i['id'],
|
|
|
|
'type': i['type'],
|
|
|
|
'defined_in': i['defined_in'],
|
|
|
|
'filename': [os.path.join(outdir, x) for x in i['outputs']],
|
|
|
|
'build_by_default': i['build_by_default'],
|
|
|
|
'target_sources': [{
|
|
|
|
'language': 'unknown',
|
|
|
|
'compiler': [],
|
|
|
|
'parameters': [],
|
|
|
|
'sources': [str(x) for x in sources],
|
|
|
|
'generated_sources': []
|
|
|
|
}],
|
|
|
|
'depends': [],
|
|
|
|
'extra_files': [str(x) for x in extra_f],
|
|
|
|
'subproject': None, # Subprojects are not supported
|
|
|
|
'installed': i['installed']
|
|
|
|
}]
|
|
|
|
|
|
|
|
return tlist
|
|
|
|
|
|
|
|
def list_targets(builddata: build.Build, installdata: backends.InstallData, backend: backends.Backend) -> T.List[T.Any]:
|
|
|
|
tlist: T.List[T.Any] = []
|
|
|
|
build_dir = builddata.environment.get_build_dir()
|
|
|
|
src_dir = builddata.environment.get_source_dir()
|
|
|
|
|
|
|
|
# Fast lookup table for installation files
|
|
|
|
install_lookuptable = {}
|
|
|
|
for i in installdata.targets:
|
|
|
|
basename = os.path.basename(i.fname)
|
|
|
|
install_lookuptable[basename] = [str(PurePath(installdata.prefix, i.outdir, basename))]
|
|
|
|
for s in installdata.symlinks:
|
|
|
|
# Symlink's target must already be in the table. They share the same list
|
|
|
|
# to support symlinks to symlinks recursively, such as .so -> .so.0 -> .so.1.2.3
|
|
|
|
basename = os.path.basename(s.name)
|
|
|
|
try:
|
|
|
|
install_lookuptable[basename] = install_lookuptable[os.path.basename(s.target)]
|
|
|
|
install_lookuptable[basename].append(str(PurePath(installdata.prefix, s.install_path, basename)))
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
for (idname, target) in builddata.get_targets().items():
|
|
|
|
if not isinstance(target, build.Target):
|
|
|
|
raise RuntimeError('The target object in `builddata.get_targets()` is not of type `build.Target`. Please file a bug with this error message.')
|
|
|
|
|
|
|
|
outdir = get_target_dir(builddata.environment.coredata, target.subdir)
|
|
|
|
t = {
|
|
|
|
'name': target.get_basename(),
|
|
|
|
'id': idname,
|
|
|
|
'type': target.get_typename(),
|
|
|
|
'defined_in': os.path.normpath(os.path.join(src_dir, target.subdir, environment.build_filename)),
|
|
|
|
'filename': [os.path.join(build_dir, outdir, x) for x in target.get_outputs()],
|
|
|
|
'build_by_default': target.build_by_default,
|
|
|
|
'target_sources': backend.get_introspection_data(idname, target),
|
|
|
|
'extra_files': [os.path.normpath(os.path.join(src_dir, x.subdir, x.fname)) for x in target.extra_files],
|
|
|
|
'subproject': target.subproject or None,
|
|
|
|
'dependencies': [d.name for d in getattr(target, 'external_deps', [])],
|
|
|
|
'depends': [lib.get_id() for lib in getattr(target, 'dependencies', [])]
|
|
|
|
}
|
|
|
|
|
|
|
|
vs_module_defs = getattr(target, 'vs_module_defs', None)
|
|
|
|
if vs_module_defs is not None:
|
|
|
|
t['vs_module_defs'] = vs_module_defs.relative_name()
|
|
|
|
win_subsystem = getattr(target, 'win_subsystem', None)
|
|
|
|
if win_subsystem is not None:
|
|
|
|
t['win_subsystem'] = win_subsystem
|
|
|
|
|
|
|
|
if installdata and target.should_install():
|
|
|
|
t['installed'] = True
|
|
|
|
ifn = [install_lookuptable.get(x, [None]) for x in target.get_outputs()]
|
|
|
|
t['install_filename'] = [x for sublist in ifn for x in sublist] # flatten the list
|
|
|
|
else:
|
|
|
|
t['installed'] = False
|
|
|
|
tlist.append(t)
|
|
|
|
return tlist
|
|
|
|
|
|
|
|
def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
|
|
|
|
subprojects = [i['name'] for i in intr.project_data['subprojects']]
|
|
|
|
return list_buildoptions(intr.coredata, subprojects)
|
|
|
|
|
|
|
|
def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
|
|
|
|
optlist: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]] = []
|
|
|
|
subprojects = subprojects or []
|
|
|
|
|
|
|
|
dir_option_names = set(options.BUILTIN_DIR_OPTIONS)
|
|
|
|
test_option_names = {OptionKey('errorlogs'),
|
|
|
|
OptionKey('stdsplit')}
|
|
|
|
|
|
|
|
dir_options: 'cdata.MutableKeyedOptionDictType' = {}
|
|
|
|
test_options: 'cdata.MutableKeyedOptionDictType' = {}
|
|
|
|
core_options: 'cdata.MutableKeyedOptionDictType' = {}
|
|
|
|
for k, v in coredata.optstore.items():
|
|
|
|
if k in dir_option_names:
|
|
|
|
dir_options[k] = v
|
|
|
|
elif k in test_option_names:
|
|
|
|
test_options[k] = v
|
|
|
|
elif coredata.optstore.is_builtin_option(k):
|
|
|
|
core_options[k] = v
|
|
|
|
if not v.yielding:
|
|
|
|
for s in subprojects:
|
|
|
|
core_options[k.evolve(subproject=s)] = v
|
|
|
|
|
|
|
|
def add_keys(opts: 'T.Union[dict[OptionKey, UserOption[Any]], cdata.KeyedOptionDictType]', section: str) -> None:
|
|
|
|
for key, opt in sorted(opts.items()):
|
|
|
|
optdict = {'name': str(key), 'value': opt.value, 'section': section,
|
|
|
|
'machine': key.machine.get_lower_case_name() if coredata.is_per_machine_option(key) else 'any'}
|
|
|
|
if isinstance(opt, options.UserStringOption):
|
|
|
|
typestr = 'string'
|
|
|
|
elif isinstance(opt, options.UserBooleanOption):
|
|
|
|
typestr = 'boolean'
|
|
|
|
elif isinstance(opt, options.UserComboOption):
|
|
|
|
optdict['choices'] = opt.choices
|
|
|
|
typestr = 'combo'
|
|
|
|
elif isinstance(opt, options.UserIntegerOption):
|
|
|
|
typestr = 'integer'
|
|
|
|
elif isinstance(opt, options.UserArrayOption):
|
|
|
|
typestr = 'array'
|
|
|
|
if opt.choices:
|
|
|
|
optdict['choices'] = opt.choices
|
|
|
|
else:
|
|
|
|
raise RuntimeError("Unknown option type")
|
|
|
|
optdict['type'] = typestr
|
|
|
|
optdict['description'] = opt.description
|
|
|
|
optlist.append(optdict)
|
|
|
|
|
|
|
|
add_keys(core_options, 'core')
|
|
|
|
add_keys({k: v for k, v in coredata.optstore.items() if coredata.optstore.is_backend_option(k)}, 'backend')
|
|
|
|
add_keys({k: v for k, v in coredata.optstore.items() if coredata.optstore.is_base_option(k)}, 'base')
|
|
|
|
add_keys(
|
|
|
|
{k: v for k, v in sorted(coredata.optstore.items(), key=lambda i: i[0].machine) if coredata.optstore.is_compiler_option(k)},
|
|
|
|
'compiler',
|
|
|
|
)
|
|
|
|
add_keys(dir_options, 'directory')
|
|
|
|
add_keys({k: v for k, v in coredata.optstore.items() if coredata.optstore.is_project_option(k)}, 'user')
|
|
|
|
add_keys(test_options, 'test')
|
|
|
|
return optlist
|
|
|
|
|
|
|
|
def find_buildsystem_files_list(src_dir: str) -> T.List[str]:
|
Add support for meson.options as a replacement for meson_options.txt
We will still try to load `meson_options.txt` if `meson.options` doesn't
exist. Because there are some advantages to using `meson.options` even
with older versions of meson (such as better text editor handling)
we will not warn about the existence of a `meson.options` file if a
`meson_options.txt` file or symlink also exists.
The name `meson.options` was picked instead of alternative proposals,
such as `meson_options.build` for a couple of reasons:
1. meson.options is shorter
2. While the syntax is the same, only the `option()` function may be
called in meson.options, while, it may not be called in meson.build
3. While the two files share a syntax and elementary types (strings,
arrays, etc), they have different purposes: `meson.build` declares
build targets, `meson.options` declares options. This is similar to
the difference between C's `.c` and `.h` extensions.
As an implementation detail `Interpreter.option_file` has been removed,
as it is used exactly once, in the `project()` call to read the options,
and we can just calculate it there and not store it.
Fixes: #11176
2 years ago
|
|
|
build_files = frozenset({'meson.build', 'meson.options', 'meson_options.txt'})
|
|
|
|
# I feel dirty about this. But only slightly.
|
Add support for meson.options as a replacement for meson_options.txt
We will still try to load `meson_options.txt` if `meson.options` doesn't
exist. Because there are some advantages to using `meson.options` even
with older versions of meson (such as better text editor handling)
we will not warn about the existence of a `meson.options` file if a
`meson_options.txt` file or symlink also exists.
The name `meson.options` was picked instead of alternative proposals,
such as `meson_options.build` for a couple of reasons:
1. meson.options is shorter
2. While the syntax is the same, only the `option()` function may be
called in meson.options, while, it may not be called in meson.build
3. While the two files share a syntax and elementary types (strings,
arrays, etc), they have different purposes: `meson.build` declares
build targets, `meson.options` declares options. This is similar to
the difference between C's `.c` and `.h` extensions.
As an implementation detail `Interpreter.option_file` has been removed,
as it is used exactly once, in the `project()` call to read the options,
and we can just calculate it there and not store it.
Fixes: #11176
2 years ago
|
|
|
filelist: T.List[str] = []
|
|
|
|
for root, _, files in os.walk(src_dir):
|
Add support for meson.options as a replacement for meson_options.txt
We will still try to load `meson_options.txt` if `meson.options` doesn't
exist. Because there are some advantages to using `meson.options` even
with older versions of meson (such as better text editor handling)
we will not warn about the existence of a `meson.options` file if a
`meson_options.txt` file or symlink also exists.
The name `meson.options` was picked instead of alternative proposals,
such as `meson_options.build` for a couple of reasons:
1. meson.options is shorter
2. While the syntax is the same, only the `option()` function may be
called in meson.options, while, it may not be called in meson.build
3. While the two files share a syntax and elementary types (strings,
arrays, etc), they have different purposes: `meson.build` declares
build targets, `meson.options` declares options. This is similar to
the difference between C's `.c` and `.h` extensions.
As an implementation detail `Interpreter.option_file` has been removed,
as it is used exactly once, in the `project()` call to read the options,
and we can just calculate it there and not store it.
Fixes: #11176
2 years ago
|
|
|
filelist.extend(os.path.relpath(os.path.join(root, f), src_dir)
|
|
|
|
for f in build_files.intersection(files))
|
|
|
|
return filelist
|
|
|
|
|
|
|
|
def list_buildsystem_files(builddata: build.Build, interpreter: Interpreter) -> T.List[str]:
|
|
|
|
src_dir = builddata.environment.get_source_dir()
|
|
|
|
filelist = list(interpreter.get_build_def_files())
|
|
|
|
filelist = [PurePath(src_dir, x).as_posix() for x in filelist]
|
|
|
|
return filelist
|
|
|
|
|
|
|
|
def list_compilers(coredata: cdata.CoreData) -> T.Dict[str, T.Dict[str, T.Dict[str, str]]]:
|
|
|
|
compilers: T.Dict[str, T.Dict[str, T.Dict[str, str]]] = {}
|
|
|
|
for machine in ('host', 'build'):
|
|
|
|
compilers[machine] = {}
|
|
|
|
for language, compiler in getattr(coredata.compilers, machine).items():
|
|
|
|
compilers[machine][language] = {
|
|
|
|
'id': compiler.get_id(),
|
|
|
|
'exelist': compiler.get_exelist(),
|
|
|
|
'linker_exelist': compiler.get_linker_exelist(),
|
|
|
|
'file_suffixes': compiler.file_suffixes,
|
|
|
|
'default_suffix': compiler.get_default_suffix(),
|
|
|
|
'version': compiler.version,
|
|
|
|
'full_version': compiler.full_version,
|
|
|
|
'linker_id': compiler.get_linker_id(),
|
|
|
|
}
|
|
|
|
return compilers
|
|
|
|
|
|
|
|
def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]:
|
|
|
|
result: T.List[T.Dict[str, T.Union[str, bool]]] = []
|
|
|
|
for i in intr.dependencies:
|
|
|
|
keys = [
|
|
|
|
'name',
|
|
|
|
'required',
|
|
|
|
'version',
|
|
|
|
'has_fallback',
|
|
|
|
'conditional',
|
|
|
|
]
|
|
|
|
result += [{k: v for k, v in i.items() if k in keys}]
|
|
|
|
return result
|
|
|
|
|
|
|
|
def list_deps(coredata: cdata.CoreData, backend: backends.Backend) -> T.List[T.Dict[str, T.Union[str, T.List[str]]]]:
|
|
|
|
result: T.Dict[str, T.Dict[str, T.Union[str, T.List[str]]]] = {}
|
|
|
|
|
|
|
|
def _src_to_str(src_file: T.Union[mesonlib.FileOrString, build.CustomTarget, build.StructuredSources, build.CustomTargetIndex, build.GeneratedList]) -> T.List[str]:
|
|
|
|
if isinstance(src_file, str):
|
|
|
|
return [src_file]
|
|
|
|
if isinstance(src_file, mesonlib.File):
|
|
|
|
return [src_file.absolute_path(backend.source_dir, backend.build_dir)]
|
|
|
|
if isinstance(src_file, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)):
|
|
|
|
return src_file.get_outputs()
|
|
|
|
if isinstance(src_file, build.StructuredSources):
|
|
|
|
return [f for s in src_file.as_list() for f in _src_to_str(s)]
|
|
|
|
raise mesonlib.MesonBugException(f'Invalid file type {type(src_file)}.')
|
|
|
|
|
|
|
|
def _create_result(d: Dependency, varname: T.Optional[str] = None) -> T.Dict[str, T.Any]:
|
|
|
|
return {
|
|
|
|
'name': d.name,
|
|
|
|
'type': d.type_name,
|
|
|
|
'version': d.get_version(),
|
|
|
|
'compile_args': d.get_compile_args(),
|
|
|
|
'link_args': d.get_link_args(),
|
|
|
|
'include_directories': [i for idirs in d.get_include_dirs() for i in idirs.to_string_list(backend.source_dir, backend.build_dir)],
|
|
|
|
'sources': [f for s in d.get_sources() for f in _src_to_str(s)],
|
|
|
|
'extra_files': [f for s in d.get_extra_files() for f in _src_to_str(s)],
|
|
|
|
'dependencies': [e.name for e in d.ext_deps],
|
|
|
|
'depends': [lib.get_id() for lib in getattr(d, 'libraries', [])],
|
|
|
|
'meson_variables': [varname] if varname else [],
|
|
|
|
}
|
|
|
|
|
|
|
|
for d in coredata.deps.host.values():
|
|
|
|
if d.found():
|
|
|
|
result[d.name] = _create_result(d)
|
|
|
|
|
|
|
|
for varname, holder in backend.interpreter.variables.items():
|
|
|
|
if isinstance(holder, ObjectHolder):
|
|
|
|
d = holder.held_object
|
|
|
|
if isinstance(d, Dependency) and d.found():
|
|
|
|
if d.name in result:
|
|
|
|
T.cast('T.List[str]', result[d.name]['meson_variables']).append(varname)
|
|
|
|
else:
|
|
|
|
result[d.name] = _create_result(d, varname)
|
|
|
|
|
|
|
|
return list(result.values())
|
|
|
|
|
|
|
|
def get_test_list(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
|
|
|
|
result: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]] = []
|
|
|
|
for t in testdata:
|
|
|
|
to: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]] = {}
|
|
|
|
if isinstance(t.fname, str):
|
|
|
|
fname = [t.fname]
|
|
|
|
else:
|
|
|
|
fname = t.fname
|
|
|
|
to['cmd'] = fname + t.cmd_args
|
|
|
|
if isinstance(t.env, mesonlib.EnvironmentVariables):
|
|
|
|
to['env'] = t.env.get_env({})
|
|
|
|
else:
|
|
|
|
to['env'] = t.env
|
|
|
|
to['name'] = t.name
|
|
|
|
to['workdir'] = t.workdir
|
|
|
|
to['timeout'] = t.timeout
|
|
|
|
to['suite'] = t.suite
|
|
|
|
to['is_parallel'] = t.is_parallel
|
|
|
|
to['priority'] = t.priority
|
|
|
|
to['protocol'] = str(t.protocol)
|
|
|
|
to['depends'] = t.depends
|
|
|
|
to['extra_paths'] = t.extra_paths
|
|
|
|
result.append(to)
|
|
|
|
return result
|
|
|
|
|
|
|
|
def list_tests(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
|
|
|
|
return get_test_list(testdata)
|
|
|
|
|
|
|
|
def list_benchmarks(benchdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
|
|
|
|
return get_test_list(benchdata)
|
|
|
|
|
|
|
|
def list_machines(builddata: build.Build) -> T.Dict[str, T.Dict[str, T.Union[str, bool]]]:
|
|
|
|
machines: T.Dict[str, T.Dict[str, T.Union[str, bool]]] = {}
|
|
|
|
for m in ('host', 'build', 'target'):
|
|
|
|
machine = getattr(builddata.environment.machines, m)
|
|
|
|
machines[m] = dataclasses.asdict(machine)
|
|
|
|
machines[m]['is_64_bit'] = machine.is_64_bit
|
|
|
|
machines[m]['exe_suffix'] = machine.get_exe_suffix()
|
|
|
|
machines[m]['object_suffix'] = machine.get_object_suffix()
|
|
|
|
return machines
|
|
|
|
|
|
|
|
def list_projinfo(builddata: build.Build) -> T.Dict[str, T.Union[str, T.List[str], T.List[T.Dict[str, str]]]]:
|
|
|
|
result: T.Dict[str, T.Union[str, T.List[str], T.List[T.Dict[str, str]]]] = {
|
|
|
|
'version': builddata.project_version,
|
|
|
|
'descriptive_name': builddata.project_name,
|
|
|
|
'license': builddata.dep_manifest[builddata.project_name].license,
|
|
|
|
'license_files': [f[1].fname for f in builddata.dep_manifest[builddata.project_name].license_files],
|
|
|
|
'subproject_dir': builddata.subproject_dir,
|
|
|
|
}
|
|
|
|
subprojects = []
|
|
|
|
for k, v in builddata.subprojects.items():
|
|
|
|
c: T.Dict[str, str] = {
|
|
|
|
'name': k,
|
|
|
|
'version': v,
|
|
|
|
'descriptive_name': builddata.projects.get(k),
|
|
|
|
}
|
|
|
|
subprojects.append(c)
|
|
|
|
result['subprojects'] = subprojects
|
|
|
|
return result
|
|
|
|
|
|
|
|
def list_projinfo_from_source(intr: IntrospectionInterpreter) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
|
|
|
|
sourcedir = intr.source_root
|
|
|
|
files = find_buildsystem_files_list(sourcedir)
|
|
|
|
files = [os.path.normpath(x) for x in files]
|
|
|
|
|
|
|
|
for i in intr.project_data['subprojects']:
|
|
|
|
basedir = os.path.join(intr.subproject_dir, i['name'])
|
|
|
|
i['buildsystem_files'] = [x for x in files if x.startswith(basedir)]
|
|
|
|
files = [x for x in files if not x.startswith(basedir)]
|
|
|
|
|
|
|
|
intr.project_data['buildsystem_files'] = files
|
|
|
|
intr.project_data['subproject_dir'] = intr.subproject_dir
|
|
|
|
return intr.project_data
|
|
|
|
|
|
|
|
def print_results(options: argparse.Namespace, results: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], indent: T.Optional[int]) -> int:
|
|
|
|
if not results and not options.force_dict:
|
|
|
|
print('No command specified')
|
|
|
|
return 1
|
|
|
|
elif len(results) == 1 and not options.force_dict:
|
|
|
|
# Make to keep the existing output format for a single option
|
|
|
|
print(json.dumps(results[0][1], indent=indent))
|
|
|
|
else:
|
|
|
|
out = {}
|
|
|
|
for i in results:
|
|
|
|
out[i[0]] = i[1]
|
|
|
|
print(json.dumps(out, indent=indent))
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def get_infodir(builddir: T.Optional[str] = None) -> str:
|
|
|
|
infodir = 'meson-info'
|
|
|
|
if builddir is not None:
|
|
|
|
infodir = os.path.join(builddir, infodir)
|
|
|
|
return infodir
|
|
|
|
|
|
|
|
def get_info_file(infodir: str, kind: T.Optional[str] = None) -> str:
|
|
|
|
return os.path.join(infodir,
|
|
|
|
'meson-info.json' if not kind else f'intro-{kind}.json')
|
|
|
|
|
|
|
|
def load_info_file(infodir: str, kind: T.Optional[str] = None) -> T.Any:
|
|
|
|
with open(get_info_file(infodir, kind), encoding='utf-8') as fp:
|
|
|
|
return json.load(fp)
|
|
|
|
|
|
|
|
def run(options: argparse.Namespace) -> int:
|
|
|
|
datadir = 'meson-private'
|
|
|
|
infodir = get_infodir(options.builddir)
|
|
|
|
if options.builddir is not None:
|
|
|
|
datadir = os.path.join(options.builddir, datadir)
|
|
|
|
indent = 4 if options.indent else None
|
|
|
|
results: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] = []
|
|
|
|
sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
|
|
|
|
intro_types = get_meson_introspection_types()
|
|
|
|
|
|
|
|
if 'meson.build' in [os.path.basename(options.builddir), options.builddir]:
|
|
|
|
# Make sure that log entries in other parts of meson don't interfere with the JSON output
|
|
|
|
with redirect_stdout(sys.stderr):
|
|
|
|
backend = backends.get_backend_from_name(options.backend)
|
|
|
|
assert backend is not None
|
|
|
|
intr = IntrospectionInterpreter(sourcedir, '', backend.name, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
|
|
|
|
intr.analyze()
|
|
|
|
|
|
|
|
for key, val in intro_types.items():
|
|
|
|
if (not options.all and not getattr(options, key, False)) or not val.no_bd:
|
|
|
|
continue
|
|
|
|
results += [(key, val.no_bd(intr))]
|
|
|
|
return print_results(options, results, indent)
|
|
|
|
|
|
|
|
try:
|
|
|
|
raw = load_info_file(infodir)
|
|
|
|
intro_vers = raw.get('introspection', {}).get('version', {}).get('full', '0.0.0')
|
|
|
|
except FileNotFoundError:
|
|
|
|
if not os.path.isdir(datadir) or not os.path.isdir(infodir):
|
|
|
|
print('Current directory is not a meson build directory.\n'
|
|
|
|
'Please specify a valid build dir or change the working directory to it.')
|
|
|
|
else:
|
|
|
|
print('Introspection file {} does not exist.\n'
|
|
|
|
'It is also possible that the build directory was generated with an old\n'
|
|
|
|
'meson version. Please regenerate it in this case.'.format(get_info_file(infodir)))
|
|
|
|
return 1
|
|
|
|
|
|
|
|
vers_to_check = get_meson_introspection_required_version()
|
|
|
|
for i in vers_to_check:
|
|
|
|
if not mesonlib.version_compare(intro_vers, i):
|
|
|
|
print('Introspection version {} is not supported. '
|
|
|
|
'The required version is: {}'
|
|
|
|
.format(intro_vers, ' and '.join(vers_to_check)))
|
|
|
|
return 1
|
|
|
|
|
|
|
|
# Extract introspection information from JSON
|
|
|
|
for i, v in intro_types.items():
|
|
|
|
if not v.func:
|
|
|
|
continue
|
|
|
|
if not options.all and not getattr(options, i, False):
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
results += [(i, load_info_file(infodir, i))]
|
|
|
|
except FileNotFoundError:
|
|
|
|
print('Introspection file {} does not exist.'.format(get_info_file(infodir, i)))
|
|
|
|
return 1
|
|
|
|
|
|
|
|
return print_results(options, results, indent)
|
|
|
|
|
|
|
|
updated_introspection_files: T.List[str] = []
|
|
|
|
|
|
|
|
def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], info_dir: str) -> None:
|
|
|
|
for kind, data in intro_info:
|
|
|
|
out_file = os.path.join(info_dir, f'intro-{kind}.json')
|
|
|
|
tmp_file = os.path.join(info_dir, 'tmp_dump.json')
|
|
|
|
with open(tmp_file, 'w', encoding='utf-8') as fp:
|
|
|
|
json.dump(data, fp, indent=2)
|
|
|
|
fp.flush() # Not sure if this is needed
|
|
|
|
os.replace(tmp_file, out_file)
|
|
|
|
updated_introspection_files.append(kind)
|
|
|
|
|
|
|
|
def generate_introspection_file(builddata: build.Build, backend: backends.Backend) -> None:
|
|
|
|
coredata = builddata.environment.get_coredata()
|
|
|
|
intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend)
|
|
|
|
intro_info: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] = []
|
|
|
|
|
|
|
|
for key, val in intro_types.items():
|
|
|
|
if not val.func:
|
|
|
|
continue
|
|
|
|
intro_info += [(key, val.func())]
|
|
|
|
|
|
|
|
write_intro_info(intro_info, builddata.environment.info_dir)
|
|
|
|
|
|
|
|
def update_build_options(coredata: cdata.CoreData, info_dir: str) -> None:
|
|
|
|
intro_info = [
|
|
|
|
('buildoptions', list_buildoptions(coredata))
|
|
|
|
]
|
|
|
|
|
|
|
|
write_intro_info(intro_info, info_dir)
|
|
|
|
|
|
|
|
def split_version_string(version: str) -> T.Dict[str, T.Union[str, int]]:
|
|
|
|
vers_list = version.split('.')
|
|
|
|
return {
|
|
|
|
'full': version,
|
|
|
|
'major': int(vers_list[0] if len(vers_list) > 0 else 0),
|
|
|
|
'minor': int(vers_list[1] if len(vers_list) > 1 else 0),
|
|
|
|
'patch': int(vers_list[2] if len(vers_list) > 2 else 0)
|
|
|
|
}
|
|
|
|
|
|
|
|
def write_meson_info_file(builddata: build.Build, errors: list, build_files_updated: bool = False) -> None:
|
|
|
|
info_dir = builddata.environment.info_dir
|
|
|
|
info_file = get_meson_info_file(info_dir)
|
|
|
|
intro_types = get_meson_introspection_types()
|
|
|
|
intro_info = {}
|
|
|
|
|
|
|
|
for i, v in intro_types.items():
|
|
|
|
if not v.func:
|
|
|
|
continue
|
|
|
|
intro_info[i] = {
|
|
|
|
'file': f'intro-{i}.json',
|
|
|
|
'updated': i in updated_introspection_files
|
|
|
|
}
|
|
|
|
|
|
|
|
info_data = {
|
|
|
|
'meson_version': split_version_string(cdata.version),
|
|
|
|
'directories': {
|
|
|
|
'source': builddata.environment.get_source_dir(),
|
|
|
|
'build': builddata.environment.get_build_dir(),
|
|
|
|
'info': info_dir,
|
|
|
|
},
|
|
|
|
'introspection': {
|
|
|
|
'version': split_version_string(get_meson_introspection_version()),
|
|
|
|
'information': intro_info,
|
|
|
|
},
|
|
|
|
'build_files_updated': build_files_updated,
|
|
|
|
}
|
|
|
|
|
|
|
|
if errors:
|
|
|
|
info_data['error'] = True
|
|
|
|
info_data['error_list'] = [x if isinstance(x, str) else str(x) for x in errors]
|
|
|
|
else:
|
|
|
|
info_data['error'] = False
|
|
|
|
|
|
|
|
# Write the data to disc
|
|
|
|
tmp_file = os.path.join(info_dir, 'tmp_dump.json')
|
|
|
|
with open(tmp_file, 'w', encoding='utf-8') as fp:
|
|
|
|
json.dump(info_data, fp)
|
|
|
|
fp.flush()
|
|
|
|
os.replace(tmp_file, info_file)
|