typing: fully annotate tools

pull/7657/head
Daniel Mensinger 5 years ago
parent 449dd8e72a
commit 0d57e307b2
No known key found for this signature in database
GPG Key ID: 54DD94C131E277D4
  1. 1
      run_mypy.py
  2. 4
      tools/build_website.py
  3. 19
      tools/cmake2meson.py
  4. 4
      tools/dircondenser.py
  5. 16
      tools/regenerate_docs.py

@ -36,6 +36,7 @@ strict_modules = [
'mesonbuild/ast', 'mesonbuild/ast',
'mesonbuild/wrap', 'mesonbuild/wrap',
'run_mypy.py', 'run_mypy.py',
'tools',
] ]
normal_args = ['--follow-imports=skip'] normal_args = ['--follow-imports=skip']

@ -6,14 +6,14 @@ assert(os.getcwd() == '/home/jpakkane')
from glob import glob from glob import glob
def purge(fname): def purge(fname: str) -> None:
if not os.path.exists(fname): if not os.path.exists(fname):
return return
if os.path.isdir(fname): if os.path.isdir(fname):
shutil.rmtree(fname) shutil.rmtree(fname)
os.unlink(fname) os.unlink(fname)
def update(): def update() -> None:
webdir = 'mesonweb' webdir = 'mesonweb'
repodir = 'mesonwebbuild' repodir = 'mesonwebbuild'
docdir = os.path.join(repodir, 'docs') docdir = os.path.join(repodir, 'docs')

@ -34,7 +34,7 @@ class Statement:
self.args = args self.args = args
class Lexer: class Lexer:
def __init__(self): def __init__(self) -> None:
self.token_specification = [ self.token_specification = [
# Need to be sorted longest to shortest. # Need to be sorted longest to shortest.
('ignore', re.compile(r'[ \t]')), ('ignore', re.compile(r'[ \t]')),
@ -87,11 +87,11 @@ class Lexer:
raise ValueError('Lexer got confused line %d column %d' % (lineno, col)) raise ValueError('Lexer got confused line %d column %d' % (lineno, col))
class Parser: class Parser:
def __init__(self, code: str): def __init__(self, code: str) -> None:
self.stream = Lexer().lex(code) self.stream = Lexer().lex(code)
self.getsym() self.getsym()
def getsym(self): def getsym(self) -> None:
try: try:
self.current = next(self.stream) self.current = next(self.stream)
except StopIteration: except StopIteration:
@ -118,8 +118,8 @@ class Parser:
self.expect('rparen') self.expect('rparen')
return Statement(cur.value, args) return Statement(cur.value, args)
def arguments(self) -> list: def arguments(self) -> T.List[T.Union[Token, T.Any]]:
args = [] args = [] # type: T.List[T.Union[Token, T.Any]]
if self.accept('lparen'): if self.accept('lparen'):
args.append(self.arguments()) args.append(self.arguments())
self.expect('rparen') self.expect('rparen')
@ -139,7 +139,7 @@ class Parser:
while not self.accept('eof'): while not self.accept('eof'):
yield(self.statement()) yield(self.statement())
def token_or_group(arg): def token_or_group(arg: T.Union[Token, T.List[Token]]) -> str:
if isinstance(arg, Token): if isinstance(arg, Token):
return ' ' + arg.value return ' ' + arg.value
elif isinstance(arg, list): elif isinstance(arg, list):
@ -148,6 +148,7 @@ def token_or_group(arg):
line += ' ' + token_or_group(a) line += ' ' + token_or_group(a)
line += ' )' line += ' )'
return line return line
raise RuntimeError('Conversion error in token_or_group')
class Converter: class Converter:
ignored_funcs = {'cmake_minimum_required': True, ignored_funcs = {'cmake_minimum_required': True,
@ -183,7 +184,7 @@ class Converter:
return res[0] return res[0]
return '' return ''
def write_entry(self, outfile: T.TextIO, t: Statement): def write_entry(self, outfile: T.TextIO, t: Statement) -> None:
if t.name in Converter.ignored_funcs: if t.name in Converter.ignored_funcs:
return return
preincrement = 0 preincrement = 0
@ -274,7 +275,7 @@ class Converter:
outfile.write('\n') outfile.write('\n')
self.indent_level += postincrement self.indent_level += postincrement
def convert(self, subdir: Path = None): def convert(self, subdir: Path = None) -> None:
if not subdir: if not subdir:
subdir = self.cmake_root subdir = self.cmake_root
cfile = Path(subdir).expanduser() / 'CMakeLists.txt' cfile = Path(subdir).expanduser() / 'CMakeLists.txt'
@ -297,7 +298,7 @@ class Converter:
if subdir == self.cmake_root and len(self.options) > 0: if subdir == self.cmake_root and len(self.options) > 0:
self.write_options() self.write_options()
def write_options(self): def write_options(self) -> None:
filename = self.cmake_root / 'meson_options.txt' filename = self.cmake_root / 'meson_options.txt'
with filename.open('w') as optfile: with filename.open('w') as optfile:
for o in self.options: for o in self.options:

@ -53,7 +53,7 @@ def get_entries() -> T.List[T.Tuple[int, str]]:
entries.sort() entries.sort()
return entries return entries
def replace_source(sourcefile: str, replacements: T.List[T.Tuple[str, str]]): def replace_source(sourcefile: str, replacements: T.List[T.Tuple[str, str]]) -> None:
with open(sourcefile, 'r') as f: with open(sourcefile, 'r') as f:
contents = f.read() contents = f.read()
for old_name, new_name in replacements: for old_name, new_name in replacements:
@ -61,7 +61,7 @@ def replace_source(sourcefile: str, replacements: T.List[T.Tuple[str, str]]):
with open(sourcefile, 'w') as f: with open(sourcefile, 'w') as f:
f.write(contents) f.write(contents)
def condense(dirname: str): def condense(dirname: str) -> None:
curdir = os.getcwd() curdir = os.getcwd()
os.chdir(dirname) os.chdir(dirname)
entries = get_entries() entries = get_entries()

@ -31,21 +31,21 @@ from pathlib import Path
PathLike = T.Union[Path,str] PathLike = T.Union[Path,str]
def _get_meson_output(root_dir: Path, args: T.List): def _get_meson_output(root_dir: Path, args: T.List) -> str:
env = os.environ.copy() env = os.environ.copy()
env['COLUMNS'] = '80' env['COLUMNS'] = '80'
return subprocess.run([str(sys.executable), str(root_dir/'meson.py')] + args, check=True, capture_output=True, text=True, env=env).stdout.strip() return subprocess.run([str(sys.executable), str(root_dir/'meson.py')] + args, check=True, capture_output=True, text=True, env=env).stdout.strip()
def get_commands_data(root_dir: Path): def get_commands_data(root_dir: Path) -> T.Dict[str, T.Any]:
usage_start_pattern = re.compile(r'^usage: ', re.MULTILINE) usage_start_pattern = re.compile(r'^usage: ', re.MULTILINE)
positional_start_pattern = re.compile(r'^positional arguments:[\t ]*[\r\n]+', re.MULTILINE) positional_start_pattern = re.compile(r'^positional arguments:[\t ]*[\r\n]+', re.MULTILINE)
options_start_pattern = re.compile(r'^optional arguments:[\t ]*[\r\n]+', re.MULTILINE) options_start_pattern = re.compile(r'^optional arguments:[\t ]*[\r\n]+', re.MULTILINE)
commands_start_pattern = re.compile(r'^[A-Za-z ]*[Cc]ommands:[\t ]*[\r\n]+', re.MULTILINE) commands_start_pattern = re.compile(r'^[A-Za-z ]*[Cc]ommands:[\t ]*[\r\n]+', re.MULTILINE)
def get_next_start(iterators, end): def get_next_start(iterators: T.Sequence[T.Any], end: T.Optional[int]) -> int:
return next((i.start() for i in iterators if i), end) return next((i.start() for i in iterators if i), end)
def normalize_text(text): def normalize_text(text: str) -> str:
# clean up formatting # clean up formatting
out = text out = text
out = re.sub(r'\r\n', r'\r', out, flags=re.MULTILINE) # replace newlines with a linux EOL out = re.sub(r'\r\n', r'\r', out, flags=re.MULTILINE) # replace newlines with a linux EOL
@ -53,7 +53,7 @@ def get_commands_data(root_dir: Path):
out = re.sub(r'(?:^\n+|\n+$)', '', out) # remove trailing empty lines out = re.sub(r'(?:^\n+|\n+$)', '', out) # remove trailing empty lines
return out return out
def parse_cmd(cmd): def parse_cmd(cmd: str) -> T.Dict[str, str]:
cmd_len = len(cmd) cmd_len = len(cmd)
usage = usage_start_pattern.search(cmd) usage = usage_start_pattern.search(cmd)
positionals = positional_start_pattern.search(cmd) positionals = positional_start_pattern.search(cmd)
@ -72,7 +72,7 @@ def get_commands_data(root_dir: Path):
'arguments': normalize_text(cmd[arguments_start:cmd_len]), 'arguments': normalize_text(cmd[arguments_start:cmd_len]),
} }
def clean_dir_arguments(text): def clean_dir_arguments(text: str) -> str:
# Remove platform specific defaults # Remove platform specific defaults
args = [ args = [
'prefix', 'prefix',
@ -127,7 +127,7 @@ def regenerate_docs(output_dir: PathLike,
dummy_output_file: T.Optional[PathLike]) -> None: dummy_output_file: T.Optional[PathLike]) -> None:
if not output_dir: if not output_dir:
raise ValueError(f'Output directory value is not set') raise ValueError(f'Output directory value is not set')
output_dir = Path(output_dir).resolve() output_dir = Path(output_dir).resolve()
output_dir.mkdir(parents=True, exist_ok=True) output_dir.mkdir(parents=True, exist_ok=True)
@ -143,7 +143,7 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate meson docs') parser = argparse.ArgumentParser(description='Generate meson docs')
parser.add_argument('--output-dir', required=True) parser.add_argument('--output-dir', required=True)
parser.add_argument('--dummy-output-file', type=str) parser.add_argument('--dummy-output-file', type=str)
args = parser.parse_args() args = parser.parse_args()
regenerate_docs(output_dir=args.output_dir, regenerate_docs(output_dir=args.output_dir,

Loading…
Cancel
Save