various manual conversion of percent-formatted strings to f-strings

pull/9517/head
Eli Schwartz 3 years ago
parent bae0fdf64a
commit 038b31e72b
No known key found for this signature in database
GPG Key ID: CEB167EFB5722BD6
  1. 2
      mesonbuild/backend/ninjabackend.py
  2. 21
      mesonbuild/backend/vs2010backend.py
  3. 4
      mesonbuild/compilers/cpp.py
  4. 4
      mesonbuild/dependencies/detect.py
  5. 26
      mesonbuild/dependencies/pkgconfig.py
  6. 2
      mesonbuild/dependencies/qt.py
  7. 2
      mesonbuild/interpreter/compiler.py
  8. 44
      mesonbuild/interpreter/interpreter.py
  9. 10
      mesonbuild/interpreter/interpreterobjects.py
  10. 10
      mesonbuild/interpreterbase/interpreterbase.py
  11. 2
      mesonbuild/mesonlib/universal.py
  12. 20
      mesonbuild/modules/gnome.py
  13. 13
      mesonbuild/modules/hotdoc.py
  14. 10
      mesonbuild/modules/pkgconfig.py

@ -368,7 +368,7 @@ class NinjaBuildElement:
use_rspfile = self._should_use_rspfile()
if use_rspfile:
rulename = self.rulename + '_RSP'
mlog.debug("Command line for building %s is long, using a response file" % self.outfilenames)
mlog.debug(f'Command line for building {self.outfilenames} is long, using a response file')
else:
rulename = self.rulename
line = f'build {outs}{implicit_outs}: {rulename} {ins}'

@ -293,7 +293,7 @@ class Vs2010Backend(backends.Backend):
for obj_id, objdep in self.get_obj_target_deps(target.objects):
all_deps[obj_id] = objdep
else:
raise MesonException('Unknown target type for target %s' % target)
raise MesonException(f'Unknown target type for target {target}')
for gendep in target.get_generated_sources():
if isinstance(gendep, build.CustomTarget):
@ -653,7 +653,7 @@ class Vs2010Backend(backends.Backend):
return 'c'
if ext in compilers.cpp_suffixes:
return 'cpp'
raise MesonException('Could not guess language from source file %s.' % src)
raise MesonException(f'Could not guess language from source file {src}.')
def add_pch(self, pch_sources, lang, inc_cl):
if lang in pch_sources:
@ -684,13 +684,13 @@ class Vs2010Backend(backends.Backend):
# or be in the same directory as the PCH implementation.
pch_file.text = header
pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile')
pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % lang
pch_out.text = f'$(IntDir)$(TargetName)-{lang}.pch'
# Need to set the name for the pdb, as cl otherwise gives it a static
# name. Which leads to problems when there is more than one pch
# (e.g. for different languages).
pch_pdb = ET.SubElement(inc_cl, 'ProgramDataBaseFileName')
pch_pdb.text = '$(IntDir)$(TargetName)-%s.pdb' % lang
pch_pdb.text = f'$(IntDir)$(TargetName)-{lang}.pdb'
return header
@ -819,7 +819,7 @@ class Vs2010Backend(backends.Backend):
replace_if_different(ofname, ofname_tmp)
def gen_vcxproj(self, target, ofname, guid):
mlog.debug('Generating vcxproj %s.' % target.name)
mlog.debug(f'Generating vcxproj {target.name}.')
subsystem = 'Windows'
self.handled_target_deps[target.get_id()] = []
if isinstance(target, build.Executable):
@ -840,7 +840,7 @@ class Vs2010Backend(backends.Backend):
elif isinstance(target, build.RunTarget):
return self.gen_run_target_vcxproj(target, ofname, guid)
else:
raise MesonException('Unknown target type for %s' % target.get_basename())
raise MesonException(f'Unknown target type for {target.get_basename()}')
# Prefix to use to access the build root from the vcxproj dir
down = self.target_to_build_root(target)
# Prefix to use to access the source tree's root from the vcxproj dir
@ -1261,7 +1261,7 @@ class Vs2010Backend(backends.Backend):
additional_links.append('%(AdditionalDependencies)')
ET.SubElement(link, 'AdditionalDependencies').text = ';'.join(additional_links)
ofile = ET.SubElement(link, 'OutputFile')
ofile.text = '$(OutDir)%s' % target.get_filename()
ofile.text = f'$(OutDir){target.get_filename()}'
subsys = ET.SubElement(link, 'SubSystem')
subsys.text = subsystem
if (isinstance(target, build.SharedLibrary) or isinstance(target, build.Executable)) and target.get_import_filename():
@ -1275,7 +1275,7 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(link, 'ModuleDefinitionFile').text = relpath
if self.debug:
pdb = ET.SubElement(link, 'ProgramDataBaseFileName')
pdb.text = '$(OutDir)%s.pdb' % target_name
pdb.text = f'$(OutDir){target_name}.pdb'
targetmachine = ET.SubElement(link, 'TargetMachine')
if target.for_machine is MachineChoice.BUILD:
targetplatform = platform
@ -1474,8 +1474,8 @@ class Vs2010Backend(backends.Backend):
message.text = msg
if not verify_files:
ET.SubElement(custombuild, 'VerifyInputsAndOutputsExist').text = 'false'
cmd_templ = '''setlocal
%s
ET.SubElement(custombuild, 'Command').text = f'''setlocal
{command}
if %%errorlevel%% neq 0 goto :cmEnd
:cmEnd
endlocal & call :cmErrorLevel %%errorlevel%% & goto :cmDone
@ -1483,7 +1483,6 @@ endlocal & call :cmErrorLevel %%errorlevel%% & goto :cmDone
exit /b %%1
:cmDone
if %%errorlevel%% neq 0 goto :VCEnd'''
ET.SubElement(custombuild, 'Command').text = cmd_templ % command
if not outputs:
# Use a nonexistent file to always consider the target out-of-date.
outputs = [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(),

@ -815,7 +815,7 @@ class CcrxCPPCompiler(CcrxCompiler, CPPCompiler):
return []
def get_output_args(self, target: str) -> T.List[str]:
return ['-output=obj=%s' % target]
return [f'-output=obj={target}']
def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
return []
@ -848,7 +848,7 @@ class C2000CPPCompiler(C2000Compiler, CPPCompiler):
return []
def get_output_args(self, target: str) -> T.List[str]:
return ['-output=obj=%s' % target]
return [f'-output=obj={target}']
def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
return []

@ -159,8 +159,8 @@ def find_external_dependency(name: str, env: 'Environment', kwargs: T.Dict[str,
# we have a list of failed ExternalDependency objects, so we can report
# the methods we tried to find the dependency
raise DependencyException('Dependency "%s" not found' % (name) +
(', tried %s' % (tried) if tried else ''))
raise DependencyException(f'Dependency "{name}" not found' +
(f', tried {tried}' if tried else ''))
return NotFoundDependency(name, env)

@ -46,12 +46,12 @@ class PkgConfigDependency(ExternalDependency):
# Only search for pkg-config for each machine the first time and store
# the result in the class definition
if PkgConfigDependency.class_pkgbin[self.for_machine] is False:
mlog.debug('Pkg-config binary for %s is cached as not found.' % self.for_machine)
mlog.debug(f'Pkg-config binary for {self.for_machine} is cached as not found.')
elif PkgConfigDependency.class_pkgbin[self.for_machine] is not None:
mlog.debug('Pkg-config binary for %s is cached.' % self.for_machine)
mlog.debug(f'Pkg-config binary for {self.for_machine} is cached.')
else:
assert PkgConfigDependency.class_pkgbin[self.for_machine] is None
mlog.debug('Pkg-config binary for %s is not cached.' % self.for_machine)
mlog.debug(f'Pkg-config binary for {self.for_machine} is not cached.')
for potential_pkgbin in find_external_program(
self.env, self.for_machine, 'pkgconfig', 'Pkg-config',
environment.default_pkgconfig, allow_default_for_cross=False):
@ -60,7 +60,7 @@ class PkgConfigDependency(ExternalDependency):
continue
if not self.silent:
mlog.log('Found pkg-config:', mlog.bold(potential_pkgbin.get_path()),
'(%s)' % version_if_ok)
f'({version_if_ok})')
PkgConfigDependency.class_pkgbin[self.for_machine] = potential_pkgbin
break
else:
@ -73,7 +73,7 @@ class PkgConfigDependency(ExternalDependency):
self.pkgbin = PkgConfigDependency.class_pkgbin[self.for_machine]
if self.pkgbin is False:
self.pkgbin = None
msg = 'Pkg-config binary for machine %s not found. Giving up.' % self.for_machine
msg = f'Pkg-config binary for machine {self.for_machine} not found. Giving up.'
if self.required:
raise DependencyException(msg)
else:
@ -202,8 +202,7 @@ class PkgConfigDependency(ExternalDependency):
env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1'
ret, out, err = self._call_pkgbin(['--cflags', self.name], env=env)
if ret != 0:
raise DependencyException('Could not generate cargs for %s:\n%s\n' %
(self.name, err))
raise DependencyException(f'Could not generate cargs for {self.name}:\n{err}\n')
self.compile_args = self._convert_mingw_paths(self._split_args(out))
def _search_libs(self, out: str, out_raw: str) -> T.Tuple[T.List[str], T.List[str]]:
@ -342,9 +341,9 @@ class PkgConfigDependency(ExternalDependency):
shared_lib = os.path.join(os.path.dirname(lib), ".libs", shared_libname)
if not os.path.exists(shared_lib):
raise DependencyException('Got a libtools specific "%s" dependencies'
raise DependencyException(f'Got a libtools specific "{lib}" dependencies'
'but we could not compute the actual shared'
'library path' % lib)
'library path')
self.is_libtool = True
lib = shared_lib
if lib in link_args:
@ -372,15 +371,13 @@ class PkgConfigDependency(ExternalDependency):
env['PKG_CONFIG_ALLOW_SYSTEM_LIBS'] = '1'
ret, out, err = self._call_pkgbin(libcmd, env=env)
if ret != 0:
raise DependencyException('Could not generate libs for %s:\n%s\n' %
(self.name, err))
raise DependencyException(f'Could not generate libs for {self.name}:\n{err}\n')
# Also get the 'raw' output without -Lfoo system paths for adding -L
# args with -lfoo when a library can't be found, and also in
# gnome.generate_gir + gnome.gtkdoc which need -L -l arguments.
ret, out_raw, err_raw = self._call_pkgbin(libcmd)
if ret != 0:
raise DependencyException('Could not generate libs for %s:\n\n%s' %
(self.name, out_raw))
raise DependencyException(f'Could not generate libs for {self.name}:\n\n{out_raw}')
self.link_args, self.raw_link_args = self._search_libs(out, out_raw)
def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Union[str, T.List[str]]]) -> str:
@ -400,8 +397,7 @@ class PkgConfigDependency(ExternalDependency):
variable = ''
if ret != 0:
if self.required:
raise DependencyException('dependency %s not found:\n%s\n' %
(self.name, err))
raise DependencyException(f'dependency {self.name} not found:\n{err}\n')
else:
variable = out.strip()

@ -387,7 +387,7 @@ class Qt4PkgConfigDependency(QtPkgConfigDependency):
applications = ['moc', 'uic', 'rcc', 'lupdate', 'lrelease']
for application in applications:
try:
return os.path.dirname(core.get_pkgconfig_variable('%s_location' % application, {}))
return os.path.dirname(core.get_pkgconfig_variable(f'{application}_location', {}))
except mesonlib.MesonException:
pass
return None

@ -288,7 +288,7 @@ class CompilerHolder(ObjectHolder['Compiler']):
elif result.returncode == 0:
h = mlog.green('YES')
else:
h = mlog.red('NO (%d)' % result.returncode)
h = mlog.red(f'NO ({result.returncode})')
mlog.log('Checking if', mlog.bold(testname, True), msg, 'runs:', h)
return result

@ -584,7 +584,7 @@ class Interpreter(InterpreterBase, HoldableObject):
mlog.warning(f'Module {modname} is now stable, please use the {plainname} module instead.')
return mod
except InvalidArguments:
mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node)
mlog.warning(f'Module {modname} has no backwards or forwards compatibility and might not exist in future releases.', location=node)
modname = 'unstable_' + plainname
return self._import_module(modname, required)
@ -671,8 +671,7 @@ external dependencies (including libraries) must go to "dependencies".''')
def validate_arguments(self, args, argcount, arg_types):
if argcount is not None:
if argcount != len(args):
raise InvalidArguments('Expected %d arguments, got %d.' %
(argcount, len(args)))
raise InvalidArguments(f'Expected {argcount} arguments, got {len(args)}.')
for actual, wanted in zip(args, arg_types):
if wanted is not None:
if not isinstance(actual, wanted):
@ -798,11 +797,11 @@ external dependencies (including libraries) must go to "dependencies".''')
if subp_name in self.subproject_stack:
fullstack = self.subproject_stack + [subp_name]
incpath = ' => '.join(fullstack)
raise InvalidCode('Recursive include of subprojects: %s.' % incpath)
raise InvalidCode(f'Recursive include of subprojects: {incpath}.')
if subp_name in self.subprojects:
subproject = self.subprojects[subp_name]
if required and not subproject.found():
raise InterpreterException('Subproject "%s" required but not found.' % (subproject.subdir))
raise InterpreterException(f'Subproject "{subproject.subdir}" required but not found.')
return subproject
r = self.environment.wrap_resolver
@ -968,7 +967,7 @@ external dependencies (including libraries) must go to "dependencies".''')
except KeyError:
pass
raise InterpreterException('Tried to access unknown option "%s".' % optname)
raise InterpreterException(f'Tried to access unknown option {optname!r}.')
@typed_pos_args('get_option', str)
@noKwargs
@ -1006,7 +1005,7 @@ external dependencies (including libraries) must go to "dependencies".''')
self.backend = backends.get_backend_from_name(backend, self.build, self)
if self.backend is None:
raise InterpreterException('Unknown backend "%s".' % backend)
raise InterpreterException(f'Unknown backend "{backend}".')
if backend != self.backend.name:
if self.backend.name.startswith('vs'):
mlog.log('Auto detected Visual Studio backend:', mlog.bold(self.backend.name))
@ -1302,7 +1301,7 @@ external dependencies (including libraries) must go to "dependencies".''')
try:
comp = compilers.detect_compiler_for(self.environment, lang, for_machine)
if comp is None:
raise InvalidArguments('Tried to use unknown language "%s".' % lang)
raise InvalidArguments(f'Tried to use unknown language "{lang}".')
if self.should_skip_sanity_check(for_machine):
mlog.log_once('Cross compiler sanity tests disabled via the cross file.')
else:
@ -1731,8 +1730,8 @@ external dependencies (including libraries) must go to "dependencies".''')
try:
kwargs['input'] = self.source_strings_to_files(extract_as_list(kwargs, 'input'))
except mesonlib.MesonException:
mlog.warning('''Custom target input \'%s\' can\'t be converted to File object(s).
This will become a hard error in the future.''' % kwargs['input'], location=self.current_node)
mlog.warning(f'''Custom target input '{kwargs['input']}' can't be converted to File object(s).
This will become a hard error in the future.''', location=self.current_node)
kwargs['env'] = self.unpack_env_kwarg(kwargs)
if 'command' in kwargs and isinstance(kwargs['command'], list) and kwargs['command']:
if isinstance(kwargs['command'][0], str):
@ -1955,8 +1954,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
symlinkless_dir = os.path.realpath(absdir)
build_file = os.path.join(symlinkless_dir, 'meson.build')
if build_file in self.processed_buildfiles:
raise InvalidArguments('Tried to enter directory "%s", which has already been visited.'
% subdir)
raise InvalidArguments(f'Tried to enter directory "{subdir}", which has already been visited.')
self.processed_buildfiles.add(build_file)
self.subdir = subdir
os.makedirs(os.path.join(self.environment.build_dir, subdir), exist_ok=True)
@ -2182,9 +2180,8 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
if missing_variables:
var_list = ", ".join(map(repr, sorted(missing_variables)))
mlog.warning(
"The variable(s) %s in the input file '%s' are not "
"present in the given configuration data." % (
var_list, inputs[0]), location=node)
f"The variable(s) {var_list} in the input file '{inputs[0]}' are not "
"present in the given configuration data.", location=node)
if confdata_useless:
ifbase = os.path.basename(inputs_abs[0])
mlog.warning('Got an empty configuration_data() object and found no '
@ -2209,8 +2206,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
mlog.log('Configuring', mlog.bold(output), 'with command')
res = self.run_command_impl(node, cmd, {}, True)
if res.returncode != 0:
raise InterpreterException('Running configure command failed.\n%s\n%s' %
(res.stdout, res.stderr))
raise InterpreterException(f'Running configure command failed.\n{res.stdout}\n{res.stderr}')
if 'capture' in kwargs and kwargs['capture']:
dst_tmp = ofile_abs + '~'
file_encoding = kwargs.setdefault('encoding', 'utf-8')
@ -2343,7 +2339,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
absdir_src = os.path.join(absbase_src, a)
absdir_build = os.path.join(absbase_build, a)
if not os.path.isdir(absdir_src) and not os.path.isdir(absdir_build):
raise InvalidArguments('Include dir %s does not exist.' % a)
raise InvalidArguments(f'Include dir {a} does not exist.')
i = build.IncludeDirs(self.subdir, incdir_strings, is_system)
return i
@ -2383,8 +2379,8 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
raise InterpreterException('is_default option must be a boolean')
if is_default:
if self.build.test_setup_default_name is not None:
raise InterpreterException('\'%s\' is already set as default. '
'is_default can be set to true only once' % self.build.test_setup_default_name)
raise InterpreterException(f'{self.build.test_setup_default_name!r} is already set as default. '
'is_default can be set to true only once')
self.build.test_setup_default_name = setup_name
exclude_suites = mesonlib.stringlistify(kwargs.get('exclude_suites', []))
env = self.unpack_env_kwarg(kwargs)
@ -2599,13 +2595,13 @@ Try setting b_lundef to false instead.'''.format(self.coredata.options[OptionKey
raise InvalidArguments("Target names starting with 'meson-' are reserved "
"for Meson's internal use. Please rename.")
if name in coredata.FORBIDDEN_TARGET_NAMES:
raise InvalidArguments("Target name '%s' is reserved for Meson's "
"internal use. Please rename." % name)
raise InvalidArguments(f"Target name '{name}' is reserved for Meson's "
"internal use. Please rename.")
# To permit an executable and a shared library to have the
# same name, such as "foo.exe" and "libfoo.a".
idname = tobj.get_id()
if idname in self.build.targets:
raise InvalidCode('Tried to create target "%s", but a target of that name already exists.' % name)
raise InvalidCode(f'Tried to create target "{name}", but a target of that name already exists.')
self.build.targets[idname] = tobj
if idname not in self.coredata.target_guids:
self.coredata.target_guids[idname] = str(uuid.uuid4()).upper()
@ -2738,7 +2734,7 @@ This will become a hard error in the future.''', location=self.current_node)
continue # This means a generated source and they always exist.
fname = os.path.join(subdir, s)
if not os.path.isfile(fname):
raise InterpreterException('Tried to add non-existing source file %s.' % s)
raise InterpreterException(f'Tried to add non-existing source file {s}.')
# Only permit object extraction from the same subproject
def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:

@ -127,7 +127,7 @@ class FeatureOptionHolder(ObjectHolder[coredata.UserFeatureOption]):
@permittedKwargs({'error_message'})
def require_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption:
if len(args) != 1:
raise InvalidArguments('Expected 1 argument, got %d.' % (len(args), ))
raise InvalidArguments(f'Expected 1 argument, got {len(args)}.')
if not isinstance(args[0], bool):
raise InvalidArguments('boolean argument expected.')
error_message = kwargs.pop('error_message', '')
@ -146,7 +146,7 @@ class FeatureOptionHolder(ObjectHolder[coredata.UserFeatureOption]):
@noKwargs
def disable_auto_if_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption:
if len(args) != 1:
raise InvalidArguments('Expected 1 argument, got %d.' % (len(args), ))
raise InvalidArguments(f'Expected 1 argument, got {len(args)}.')
if not isinstance(args[0], bool):
raise InvalidArguments('boolean argument expected.')
return copy.deepcopy(self.held_object) if self.value != 'auto' or not args[0] else self.as_disabled()
@ -374,7 +374,7 @@ class ConfigurationDataObject(MutableInterpreterObject, MesonInterpreterObject):
# TODO: Fix this once the deprecation is removed
# assert isinstance(args[1], (int, str, bool))
return T.cast(T.Union[str, int, bool], args[1])
raise InterpreterException('Entry %s not in configuration data.' % name)
raise InterpreterException(f'Entry {name} not in configuration data.')
@FeatureNew('configuration_data.get_unquoted()', '0.44.0')
def get_unquoted_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
@ -389,7 +389,7 @@ class ConfigurationDataObject(MutableInterpreterObject, MesonInterpreterObject):
assert isinstance(args[1], (str, int, bool))
val = args[1]
else:
raise InterpreterException('Entry %s not in configuration data.' % name)
raise InterpreterException(f'Entry {name} not in configuration data.')
if isinstance(val, str) and val[0] == '"' and val[-1] == '"':
return val[1:-1]
return val
@ -720,7 +720,7 @@ class SubprojectHolder(MesonInterpreterObject):
if len(args) < 1 or len(args) > 2:
raise InterpreterException('Get_variable takes one or two arguments.')
if isinstance(self.held_object, NullSubprojectInterpreter): # == not self.found()
raise InterpreterException('Subproject "%s" disabled can\'t get_variable on it.' % (self.subdir))
raise InterpreterException(f'Subproject "{self.subdir}" disabled can\'t get_variable on it.')
varname = args[0]
if not isinstance(varname, str):
raise InterpreterException('Get_variable first argument must be a string.')

@ -98,7 +98,7 @@ class InterpreterBase:
def load_root_meson_file(self) -> None:
mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename)
if not os.path.isfile(mesonfile):
raise InvalidArguments('Missing Meson file in %s' % mesonfile)
raise InvalidArguments(f'Missing Meson file in {mesonfile}')
with open(mesonfile, encoding='utf-8') as mf:
code = mf.read()
if code.isspace():
@ -471,7 +471,7 @@ class InterpreterBase:
if is_disabled(args, kwargs):
return Disabler()
if not isinstance(obj, InterpreterObject):
raise InvalidArguments('Variable "%s" is not callable.' % object_name)
raise InvalidArguments(f'Variable "{object_name}" is not callable.')
# TODO: InterpreterBase **really** shouldn't be in charge of checking this
if method_name == 'extract_objects':
if isinstance(obj, ObjectHolder):
@ -507,7 +507,7 @@ class InterpreterBase:
return [_unholder(x) for x in args], {k: _unholder(v) for k, v in kwargs.items()}
def unknown_function_called(self, func_name: str) -> None:
raise InvalidCode('Unknown function "%s".' % func_name)
raise InvalidCode(f'Unknown function "{func_name}".')
def reduce_arguments(
self,
@ -584,7 +584,7 @@ class InterpreterBase:
if re.match('[_a-zA-Z][_0-9a-zA-Z]*$', varname) is None:
raise InvalidCode('Invalid variable name: ' + varname)
if varname in self.builtin:
raise InvalidCode('Tried to overwrite internal variable "%s"' % varname)
raise InvalidCode(f'Tried to overwrite internal variable "{varname}"')
self.variables[varname] = variable
def get_variable(self, varname: str) -> InterpreterObject:
@ -592,7 +592,7 @@ class InterpreterBase:
return self.builtin[varname]
if varname in self.variables:
return self.variables[varname]
raise InvalidCode('Unknown variable "%s".' % varname)
raise InvalidCode(f'Unknown variable "{varname}".')
def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
raise InterpreterException('validate_extraction is not implemented in this context (please file a bug)')

@ -395,7 +395,7 @@ class File(HoldableObject):
@lru_cache(maxsize=None)
def from_source_file(source_root: str, subdir: str, fname: str) -> 'File':
if not os.path.isfile(os.path.join(source_root, subdir, fname)):
raise MesonException('File %s does not exist.' % fname)
raise MesonException(f'File {fname} does not exist.')
return File(False, subdir, fname)
@staticmethod

@ -386,10 +386,10 @@ class GnomeModule(ExtensionModule):
".", resfile)
except MesonException:
raise MesonException(
'Resource "%s" listed in "%s" was not found. If this is a '
'generated file, pass the target that generates it to '
'gnome.compile_resources() using the "dependencies" '
'keyword argument.' % (resfile, input_file))
f'Resource "{resfile}" listed in "{input_file}" was not found. '
'If this is a generated file, pass the target that generates '
'it to gnome.compile_resources() using the "dependencies" '
'keyword argument.')
raw_dep_files.remove(resfile)
dep_files.append(f)
dep_files.extend(raw_dep_files)
@ -476,14 +476,14 @@ class GnomeModule(ExtensionModule):
# For PkgConfigDependency only:
getattr(dep, 'is_libtool', False)):
lib_dir = os.path.dirname(flag)
external_ldflags.update(["-L%s" % lib_dir])
external_ldflags.update([f'-L{lib_dir}'])
if include_rpath:
external_ldflags.update([f'-Wl,-rpath {lib_dir}'])
libname = os.path.basename(flag)
if libname.startswith("lib"):
libname = libname[3:]
libname = libname.split(".so")[0]
flag = "-l%s" % libname
flag = f"-l{libname}"
# FIXME: Hack to avoid passing some compiler options in
if flag.startswith("-W"):
continue
@ -608,7 +608,7 @@ class GnomeModule(ExtensionModule):
if 'symbol_prefix' in kwargs:
sym_prefixes = mesonlib.stringlistify(kwargs.pop('symbol_prefix', []))
ret += ['--symbol-prefix=%s' % sym_prefix for sym_prefix in sym_prefixes]
ret += [f'--symbol-prefix={sym_prefix}' for sym_prefix in sym_prefixes]
return ret
@ -619,7 +619,7 @@ class GnomeModule(ExtensionModule):
identifier_prefix = kwargs.pop('identifier_prefix')
if not isinstance(identifier_prefix, str):
raise MesonException('Gir identifier prefix must be str')
ret += ['--identifier-prefix=%s' % identifier_prefix]
ret += [f'--identifier-prefix={identifier_prefix}']
return ret
@ -629,9 +629,9 @@ class GnomeModule(ExtensionModule):
if 'export_packages' in kwargs:
pkgs = kwargs.pop('export_packages')
if isinstance(pkgs, str):
ret += ['--pkg-export=%s' % pkgs]
ret += [f'--pkg-export={pkgs}']
elif isinstance(pkgs, list):
ret += ['--pkg-export=%s' % pkg for pkg in pkgs]
ret += [f'--pkg-export={pkg}' for pkg in pkgs]
else:
raise MesonException('Gir export packages must be str or list')

@ -135,12 +135,11 @@ class HotdocTargetBuilder:
if force_list and not isinstance(value, list):
return [value], uvalue
return value, uvalue
raise MesonException("%s field value %s is not valid,"
" valid types are %s" % (argname, value,
types))
raise MesonException(f"{argname} field value {value} is not valid,"
f" valid types are {types}")
except KeyError:
if mandatory:
raise MesonException("%s mandatory field not found" % argname)
raise MesonException(f"{argname} mandatory field not found")
if default is not None:
return default, default
@ -279,14 +278,14 @@ class HotdocTargetBuilder:
_dir = os.path.join(self.sourcedir, self.subdir, value)
if not os.path.isdir(_dir):
raise InvalidArguments('"%s" is not a directory.' % _dir)
raise InvalidArguments(f'"{_dir}" is not a directory.')
return os.path.relpath(_dir, os.path.join(self.builddir, self.subdir))
def check_forbidden_args(self):
for arg in ['conf_file']:
if arg in self.kwargs:
raise InvalidArguments('Argument "%s" is forbidden.' % arg)
raise InvalidArguments(f'Argument "{arg}" is forbidden.')
def add_include_path(self, path):
self.include_paths[path] = path
@ -414,7 +413,7 @@ class HotDocModule(ExtensionModule):
@noKwargs
def has_extensions(self, state, args, kwargs):
return self.hotdoc.run_hotdoc(['--has-extension=%s' % extension for extension in args]) == 0
return self.hotdoc.run_hotdoc([f'--has-extension={extension}' for extension in args]) == 0
def generate_doc(self, state, args, kwargs):
if len(args) != 1:

@ -355,12 +355,12 @@ class PkgConfigModule(ExtensionModule):
for k, v in unescaped_variables:
ofile.write(f'{k}={v}\n')
ofile.write('\n')
ofile.write('Name: %s\n' % name)
ofile.write(f'Name: {name}\n')
if len(description) > 0:
ofile.write('Description: %s\n' % description)
ofile.write(f'Description: {description}\n')
if len(url) > 0:
ofile.write('URL: %s\n' % url)
ofile.write('Version: %s\n' % version)
ofile.write(f'URL: {url}\n')
ofile.write(f'Version: {version}\n')
reqs_str = deps.format_reqs(deps.pub_reqs)
if len(reqs_str) > 0:
ofile.write(f'Requires: {reqs_str}\n')
@ -406,7 +406,7 @@ class PkgConfigModule(ExtensionModule):
if not is_custom_target and l.name_suffix_set:
mlog.warning(msg.format(l.name, 'name_suffix', lname, pcfile))
if is_custom_target or 'cs' not in l.compilers:
yield '-l%s' % lname
yield f'-l{lname}'
def get_uninstalled_include_dirs(libs):
result = []

Loading…
Cancel
Save