yapf tools/buildgen

pull/13719/head
ncteisen 7 years ago
parent 7fd341746b
commit 26d70b1653
  1. 100
      tools/buildgen/build-cleaner.py
  2. 53
      tools/buildgen/bunch.py
  3. 77
      tools/buildgen/generate_projects.py
  4. 278
      tools/buildgen/mako_renderer.py
  5. 37
      tools/buildgen/plugins/expand_bin_attrs.py
  6. 222
      tools/buildgen/plugins/expand_filegroups.py
  7. 133
      tools/buildgen/plugins/expand_version.py
  8. 104
      tools/buildgen/plugins/generate_vsprojects.py
  9. 55
      tools/buildgen/plugins/list_api.py
  10. 36
      tools/buildgen/plugins/list_protos.py
  11. 54
      tools/buildgen/plugins/make_fuzzer_tests.py
  12. 46
      tools/buildgen/plugins/transitive_dependencies.py
  13. 1
      tools/distrib/yapf_code.sh

@ -22,65 +22,65 @@ import yaml
TEST = (os.environ.get('TEST', 'false') == 'true')
_TOP_LEVEL_KEYS = ['settings', 'proto_deps', 'filegroups', 'libs', 'targets', 'vspackages']
_TOP_LEVEL_KEYS = [
'settings', 'proto_deps', 'filegroups', 'libs', 'targets', 'vspackages'
]
_ELEM_KEYS = [
'name',
'gtest',
'cpu_cost',
'flaky',
'build',
'run',
'language',
'public_headers',
'headers',
'src',
'deps']
'name', 'gtest', 'cpu_cost', 'flaky', 'build', 'run', 'language',
'public_headers', 'headers', 'src', 'deps'
]
def repr_ordered_dict(dumper, odict):
return dumper.represent_mapping(u'tag:yaml.org,2002:map', odict.items())
return dumper.represent_mapping(u'tag:yaml.org,2002:map', odict.items())
yaml.add_representer(collections.OrderedDict, repr_ordered_dict)
def rebuild_as_ordered_dict(indict, special_keys):
outdict = collections.OrderedDict()
for key in sorted(indict.keys()):
if '#' in key:
outdict[key] = indict[key]
for key in special_keys:
if key in indict:
outdict[key] = indict[key]
for key in sorted(indict.keys()):
if key in special_keys: continue
if '#' in key: continue
outdict[key] = indict[key]
return outdict
outdict = collections.OrderedDict()
for key in sorted(indict.keys()):
if '#' in key:
outdict[key] = indict[key]
for key in special_keys:
if key in indict:
outdict[key] = indict[key]
for key in sorted(indict.keys()):
if key in special_keys: continue
if '#' in key: continue
outdict[key] = indict[key]
return outdict
def clean_elem(indict):
for name in ['public_headers', 'headers', 'src']:
if name not in indict: continue
inlist = indict[name]
protos = list(x for x in inlist if os.path.splitext(x)[1] == '.proto')
others = set(x for x in inlist if x not in protos)
indict[name] = protos + sorted(others)
return rebuild_as_ordered_dict(indict, _ELEM_KEYS)
for name in ['public_headers', 'headers', 'src']:
if name not in indict: continue
inlist = indict[name]
protos = list(x for x in inlist if os.path.splitext(x)[1] == '.proto')
others = set(x for x in inlist if x not in protos)
indict[name] = protos + sorted(others)
return rebuild_as_ordered_dict(indict, _ELEM_KEYS)
for filename in sys.argv[1:]:
with open(filename) as f:
js = yaml.load(f)
js = rebuild_as_ordered_dict(js, _TOP_LEVEL_KEYS)
for grp in ['filegroups', 'libs', 'targets']:
if grp not in js: continue
js[grp] = sorted([clean_elem(x) for x in js[grp]],
key=lambda x: (x.get('language', '_'), x['name']))
output = yaml.dump(js, indent=2, width=80, default_flow_style=False)
# massage out trailing whitespace
lines = []
for line in output.splitlines():
lines.append(line.rstrip() + '\n')
output = ''.join(lines)
if TEST:
with open(filename) as f:
assert f.read() == output
else:
with open(filename, 'w') as f:
f.write(output)
js = yaml.load(f)
js = rebuild_as_ordered_dict(js, _TOP_LEVEL_KEYS)
for grp in ['filegroups', 'libs', 'targets']:
if grp not in js: continue
js[grp] = sorted(
[clean_elem(x) for x in js[grp]],
key=lambda x: (x.get('language', '_'), x['name']))
output = yaml.dump(js, indent=2, width=80, default_flow_style=False)
# massage out trailing whitespace
lines = []
for line in output.splitlines():
lines.append(line.rstrip() + '\n')
output = ''.join(lines)
if TEST:
with open(filename) as f:
assert f.read() == output
else:
with open(filename, 'w') as f:
f.write(output)

@ -11,43 +11,42 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Allows dot-accessible dictionaries."""
class Bunch(dict):
def __init__(self, d):
dict.__init__(self, d)
self.__dict__.update(d)
def __init__(self, d):
dict.__init__(self, d)
self.__dict__.update(d)
# Converts any kind of variable to a Bunch
def to_bunch(var):
if isinstance(var, list):
return [to_bunch(i) for i in var]
if isinstance(var, dict):
ret = {}
for k, v in var.items():
if isinstance(v, (list, dict)):
v = to_bunch(v)
ret[k] = v
return Bunch(ret)
else:
return var
if isinstance(var, list):
return [to_bunch(i) for i in var]
if isinstance(var, dict):
ret = {}
for k, v in var.items():
if isinstance(v, (list, dict)):
v = to_bunch(v)
ret[k] = v
return Bunch(ret)
else:
return var
# Merges JSON 'add' into JSON 'dst'
def merge_json(dst, add):
if isinstance(dst, dict) and isinstance(add, dict):
for k, v in add.items():
if k in dst:
if k == '#': continue
merge_json(dst[k], v)
else:
dst[k] = v
elif isinstance(dst, list) and isinstance(add, list):
dst.extend(add)
else:
raise Exception('Tried to merge incompatible objects %s %s\n\n%r\n\n%r' % (type(dst).__name__, type(add).__name__, dst, add))
if isinstance(dst, dict) and isinstance(add, dict):
for k, v in add.items():
if k in dst:
if k == '#': continue
merge_json(dst[k], v)
else:
dst[k] = v
elif isinstance(dst, list) and isinstance(add, list):
dst.extend(add)
else:
raise Exception('Tried to merge incompatible objects %s %s\n\n%r\n\n%r'
% (type(dst).__name__, type(add).__name__, dst, add))

@ -21,7 +21,9 @@ import shutil
import sys
import tempfile
import multiprocessing
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '..', 'run_tests', 'python_utils'))
sys.path.append(
os.path.join(
os.path.dirname(sys.argv[0]), '..', 'run_tests', 'python_utils'))
assert sys.argv[1:], 'run generate_projects.sh instead of this directly'
@ -45,57 +47,58 @@ plugins = sorted(glob.glob('tools/buildgen/plugins/*.py'))
templates = args.templates
if not templates:
for root, dirs, files in os.walk('templates'):
for f in files:
templates.append(os.path.join(root, f))
for root, dirs, files in os.walk('templates'):
for f in files:
templates.append(os.path.join(root, f))
pre_jobs = []
base_cmd = ['python2.7', 'tools/buildgen/mako_renderer.py']
cmd = base_cmd[:]
for plugin in plugins:
cmd.append('-p')
cmd.append(plugin)
cmd.append('-p')
cmd.append(plugin)
for js in json:
cmd.append('-d')
cmd.append(js)
cmd.append('-d')
cmd.append(js)
cmd.append('-w')
preprocessed_build = '.preprocessed_build'
cmd.append(preprocessed_build)
if args.output_merged is not None:
cmd.append('-M')
cmd.append(args.output_merged)
pre_jobs.append(jobset.JobSpec(cmd, shortname='preprocess', timeout_seconds=None))
cmd.append('-M')
cmd.append(args.output_merged)
pre_jobs.append(
jobset.JobSpec(cmd, shortname='preprocess', timeout_seconds=None))
jobs = []
for template in reversed(sorted(templates)):
root, f = os.path.split(template)
if os.path.splitext(f)[1] == '.template':
out_dir = args.base + root[len('templates'):]
out = out_dir + '/' + os.path.splitext(f)[0]
if not os.path.exists(out_dir):
os.makedirs(out_dir)
cmd = base_cmd[:]
cmd.append('-P')
cmd.append(preprocessed_build)
cmd.append('-o')
if test is None:
cmd.append(out)
else:
tf = tempfile.mkstemp()
test[out] = tf[1]
os.close(tf[0])
cmd.append(test[out])
cmd.append(args.base + '/' + root + '/' + f)
jobs.append(jobset.JobSpec(cmd, shortname=out, timeout_seconds=None))
root, f = os.path.split(template)
if os.path.splitext(f)[1] == '.template':
out_dir = args.base + root[len('templates'):]
out = out_dir + '/' + os.path.splitext(f)[0]
if not os.path.exists(out_dir):
os.makedirs(out_dir)
cmd = base_cmd[:]
cmd.append('-P')
cmd.append(preprocessed_build)
cmd.append('-o')
if test is None:
cmd.append(out)
else:
tf = tempfile.mkstemp()
test[out] = tf[1]
os.close(tf[0])
cmd.append(test[out])
cmd.append(args.base + '/' + root + '/' + f)
jobs.append(jobset.JobSpec(cmd, shortname=out, timeout_seconds=None))
jobset.run(pre_jobs, maxjobs=args.jobs)
jobset.run(jobs, maxjobs=args.jobs)
if test is not None:
for s, g in test.iteritems():
if os.path.isfile(g):
assert 0 == os.system('diff %s %s' % (s, g)), s
os.unlink(g)
else:
assert 0 == os.system('diff -r %s %s' % (s, g)), s
shutil.rmtree(g, ignore_errors=True)
for s, g in test.iteritems():
if os.path.isfile(g):
assert 0 == os.system('diff %s %s' % (s, g)), s
os.unlink(g)
else:
assert 0 == os.system('diff -r %s %s' % (s, g)), s
shutil.rmtree(g, ignore_errors=True)

@ -12,8 +12,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple Mako renderer.
Just a wrapper around the mako rendering library.
@ -27,7 +25,6 @@ import cPickle as pickle
import shutil
import sys
from mako.lookup import TemplateLookup
from mako.runtime import Context
from mako.template import Template
@ -37,151 +34,158 @@ import yaml
# Imports a plugin
def import_plugin(name):
_, base_ex = os.path.split(name)
base, _ = os.path.splitext(base_ex)
_, base_ex = os.path.split(name)
base, _ = os.path.splitext(base_ex)
with open(name, 'r') as plugin_file:
plugin_code = plugin_file.read()
plugin_module = imp.new_module(base)
exec plugin_code in plugin_module.__dict__
return plugin_module
with open(name, 'r') as plugin_file:
plugin_code = plugin_file.read()
plugin_module = imp.new_module(base)
exec plugin_code in plugin_module.__dict__
return plugin_module
def out(msg):
print >> sys.stderr, msg
print >> sys.stderr, msg
def showhelp():
out('mako-renderer.py [-o out] [-m cache] [-P preprocessed_input] [-d dict] [-d dict...]'
' [-t template] [-w preprocessed_output]')
out('mako-renderer.py [-o out] [-m cache] [-P preprocessed_input] [-d dict] [-d dict...]'
' [-t template] [-w preprocessed_output]')
def main(argv):
got_input = False
module_directory = None
preprocessed_output = None
dictionary = {}
json_dict = {}
got_output = False
plugins = []
output_name = None
got_preprocessed_input = False
output_merged = None
try:
opts, args = getopt.getopt(argv, 'hM:m:d:o:p:t:P:w:')
except getopt.GetoptError:
out('Unknown option')
showhelp()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
out('Displaying showhelp')
showhelp()
sys.exit()
elif opt == '-o':
if got_output:
out('Got more than one output')
got_input = False
module_directory = None
preprocessed_output = None
dictionary = {}
json_dict = {}
got_output = False
plugins = []
output_name = None
got_preprocessed_input = False
output_merged = None
try:
opts, args = getopt.getopt(argv, 'hM:m:d:o:p:t:P:w:')
except getopt.GetoptError:
out('Unknown option')
showhelp()
sys.exit(3)
got_output = True
output_name = arg
elif opt == '-m':
if module_directory is not None:
out('Got more than one cache directory')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
out('Displaying showhelp')
showhelp()
sys.exit()
elif opt == '-o':
if got_output:
out('Got more than one output')
showhelp()
sys.exit(3)
got_output = True
output_name = arg
elif opt == '-m':
if module_directory is not None:
out('Got more than one cache directory')
showhelp()
sys.exit(4)
module_directory = arg
elif opt == '-M':
if output_merged is not None:
out('Got more than one output merged path')
showhelp()
sys.exit(5)
output_merged = arg
elif opt == '-P':
assert not got_preprocessed_input
assert json_dict == {}
sys.path.insert(
0,
os.path.abspath(
os.path.join(os.path.dirname(sys.argv[0]), 'plugins')))
with open(arg, 'r') as dict_file:
dictionary = pickle.load(dict_file)
got_preprocessed_input = True
elif opt == '-d':
assert not got_preprocessed_input
with open(arg, 'r') as dict_file:
bunch.merge_json(json_dict, yaml.load(dict_file.read()))
elif opt == '-p':
plugins.append(import_plugin(arg))
elif opt == '-w':
preprocessed_output = arg
if not got_preprocessed_input:
for plugin in plugins:
plugin.mako_plugin(json_dict)
if output_merged:
with open(output_merged, 'w') as yaml_file:
yaml_file.write(yaml.dump(json_dict))
for k, v in json_dict.items():
dictionary[k] = bunch.to_bunch(v)
if preprocessed_output:
with open(preprocessed_output, 'w') as dict_file:
pickle.dump(dictionary, dict_file)
cleared_dir = False
for arg in args:
got_input = True
with open(arg) as f:
srcs = list(yaml.load_all(f.read()))
for src in srcs:
if isinstance(src, basestring):
assert len(srcs) == 1
template = Template(
src,
filename=arg,
module_directory=module_directory,
lookup=TemplateLookup(directories=['.']))
with open(output_name, 'w') as output_file:
template.render_context(Context(output_file, **dictionary))
else:
# we have optional control data: this template represents
# a directory
if not cleared_dir:
if not os.path.exists(output_name):
pass
elif os.path.isfile(output_name):
os.unlink(output_name)
else:
shutil.rmtree(output_name, ignore_errors=True)
cleared_dir = True
items = []
if 'foreach' in src:
for el in dictionary[src['foreach']]:
if 'cond' in src:
args = dict(dictionary)
args['selected'] = el
if not eval(src['cond'], {}, args):
continue
items.append(el)
assert items
else:
items = [None]
for item in items:
args = dict(dictionary)
args['selected'] = item
item_output_name = os.path.join(
output_name,
Template(src['output_name']).render(**args))
if not os.path.exists(os.path.dirname(item_output_name)):
os.makedirs(os.path.dirname(item_output_name))
template = Template(
src['template'],
filename=arg,
module_directory=module_directory,
lookup=TemplateLookup(directories=['.']))
with open(item_output_name, 'w') as output_file:
template.render_context(Context(output_file, **args))
if not got_input and not preprocessed_output:
out('Got nothing to do')
showhelp()
sys.exit(4)
module_directory = arg
elif opt == '-M':
if output_merged is not None:
out('Got more than one output merged path')
showhelp()
sys.exit(5)
output_merged = arg
elif opt == '-P':
assert not got_preprocessed_input
assert json_dict == {}
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), 'plugins')))
with open(arg, 'r') as dict_file:
dictionary = pickle.load(dict_file)
got_preprocessed_input = True
elif opt == '-d':
assert not got_preprocessed_input
with open(arg, 'r') as dict_file:
bunch.merge_json(json_dict, yaml.load(dict_file.read()))
elif opt == '-p':
plugins.append(import_plugin(arg))
elif opt == '-w':
preprocessed_output = arg
if not got_preprocessed_input:
for plugin in plugins:
plugin.mako_plugin(json_dict)
if output_merged:
with open(output_merged, 'w') as yaml_file:
yaml_file.write(yaml.dump(json_dict))
for k, v in json_dict.items():
dictionary[k] = bunch.to_bunch(v)
if preprocessed_output:
with open(preprocessed_output, 'w') as dict_file:
pickle.dump(dictionary, dict_file)
cleared_dir = False
for arg in args:
got_input = True
with open(arg) as f:
srcs = list(yaml.load_all(f.read()))
for src in srcs:
if isinstance(src, basestring):
assert len(srcs) == 1
template = Template(src,
filename=arg,
module_directory=module_directory,
lookup=TemplateLookup(directories=['.']))
with open(output_name, 'w') as output_file:
template.render_context(Context(output_file, **dictionary))
else:
# we have optional control data: this template represents
# a directory
if not cleared_dir:
if not os.path.exists(output_name):
pass
elif os.path.isfile(output_name):
os.unlink(output_name)
else:
shutil.rmtree(output_name, ignore_errors=True)
cleared_dir = True
items = []
if 'foreach' in src:
for el in dictionary[src['foreach']]:
if 'cond' in src:
args = dict(dictionary)
args['selected'] = el
if not eval(src['cond'], {}, args):
continue
items.append(el)
assert items
else:
items = [None]
for item in items:
args = dict(dictionary)
args['selected'] = item
item_output_name = os.path.join(
output_name, Template(src['output_name']).render(**args))
if not os.path.exists(os.path.dirname(item_output_name)):
os.makedirs(os.path.dirname(item_output_name))
template = Template(src['template'],
filename=arg,
module_directory=module_directory,
lookup=TemplateLookup(directories=['.']))
with open(item_output_name, 'w') as output_file:
template.render_context(Context(output_file, **args))
if not got_input and not preprocessed_output:
out('Got nothing to do')
showhelp()
if __name__ == '__main__':
main(sys.argv[1:])
main(sys.argv[1:])

@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Buildgen expand binary attributes plugin.
This fills in any optional attributes.
@ -20,7 +19,7 @@ This fills in any optional attributes.
def mako_plugin(dictionary):
"""The exported plugin code for expand_filegroups.
"""The exported plugin code for expand_filegroups.
The list of libs in the build.yaml file can contain "filegroups" tags.
These refer to the filegroups in the root object. We will expand and
@ -28,20 +27,20 @@ def mako_plugin(dictionary):
"""
targets = dictionary.get('targets')
default_platforms = ['windows', 'posix', 'linux', 'mac']
for tgt in targets:
tgt['flaky'] = tgt.get('flaky', False)
tgt['platforms'] = sorted(tgt.get('platforms', default_platforms))
tgt['ci_platforms'] = sorted(tgt.get('ci_platforms', tgt['platforms']))
tgt['boringssl'] = tgt.get('boringssl', False)
tgt['zlib'] = tgt.get('zlib', False)
tgt['ares'] = tgt.get('ares', False)
tgt['gtest'] = tgt.get('gtest', False)
libs = dictionary.get('libs')
for lib in libs:
lib['boringssl'] = lib.get('boringssl', False)
lib['zlib'] = lib.get('zlib', False)
lib['ares'] = lib.get('ares', False)
targets = dictionary.get('targets')
default_platforms = ['windows', 'posix', 'linux', 'mac']
for tgt in targets:
tgt['flaky'] = tgt.get('flaky', False)
tgt['platforms'] = sorted(tgt.get('platforms', default_platforms))
tgt['ci_platforms'] = sorted(tgt.get('ci_platforms', tgt['platforms']))
tgt['boringssl'] = tgt.get('boringssl', False)
tgt['zlib'] = tgt.get('zlib', False)
tgt['ares'] = tgt.get('ares', False)
tgt['gtest'] = tgt.get('gtest', False)
libs = dictionary.get('libs')
for lib in libs:
lib['boringssl'] = lib.get('boringssl', False)
lib['zlib'] = lib.get('zlib', False)
lib['ares'] = lib.get('ares', False)

@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Buildgen expand filegroups plugin.
This takes the list of libs from our yaml dictionary,
@ -21,132 +20,135 @@ and expands any and all filegroup.
def excluded(filename, exclude_res):
for r in exclude_res:
if r.search(filename):
return True
return False
for r in exclude_res:
if r.search(filename):
return True
return False
def uniquify(lst):
out = []
for el in lst:
if el not in out:
out.append(el)
return out
out = []
for el in lst:
if el not in out:
out.append(el)
return out
FILEGROUP_LISTS = ['src', 'headers', 'public_headers', 'deps']
FILEGROUP_DEFAULTS = {
'language': 'c',
'boringssl': False,
'zlib': False,
'ares': False,
'language': 'c',
'boringssl': False,
'zlib': False,
'ares': False,
}
def mako_plugin(dictionary):
"""The exported plugin code for expand_filegroups.
"""The exported plugin code for expand_filegroups.
The list of libs in the build.yaml file can contain "filegroups" tags.
These refer to the filegroups in the root object. We will expand and
merge filegroups on the src, headers and public_headers properties.
"""
libs = dictionary.get('libs')
targets = dictionary.get('targets')
filegroups_list = dictionary.get('filegroups')
filegroups = {}
for fg in filegroups_list:
for lst in FILEGROUP_LISTS:
fg[lst] = fg.get(lst, [])
fg['own_%s' % lst] = list(fg[lst])
for attr, val in FILEGROUP_DEFAULTS.iteritems():
if attr not in fg:
fg[attr] = val
todo = list(filegroups_list)
skips = 0
while todo:
assert skips != len(todo), "infinite loop in filegroup uses clauses: %r" % [t['name'] for t in todo]
# take the first element of the todo list
cur = todo[0]
todo = todo[1:]
# check all uses filegroups are present (if no, skip and come back later)
skip = False
for uses in cur.get('uses', []):
if uses not in filegroups:
skip = True
if skip:
skips += 1
todo.append(cur)
else:
skips = 0
assert 'plugins' not in cur
plugins = []
for uses in cur.get('uses', []):
for plugin in filegroups[uses]['plugins']:
if plugin not in plugins:
plugins.append(plugin)
libs = dictionary.get('libs')
targets = dictionary.get('targets')
filegroups_list = dictionary.get('filegroups')
filegroups = {}
for fg in filegroups_list:
for lst in FILEGROUP_LISTS:
vals = cur.get(lst, [])
vals.extend(filegroups[uses].get(lst, []))
cur[lst] = vals
cur_plugin_name = cur.get('plugin')
if cur_plugin_name:
plugins.append(cur_plugin_name)
cur['plugins'] = plugins
filegroups[cur['name']] = cur
# build reverse dependency map
things = {}
for thing in dictionary['libs'] + dictionary['targets'] + dictionary['filegroups']:
things[thing['name']] = thing
thing['used_by'] = []
thing_deps = lambda t: t.get('uses', []) + t.get('filegroups', []) + t.get('deps', [])
for thing in things.itervalues():
done = set()
todo = thing_deps(thing)
fg[lst] = fg.get(lst, [])
fg['own_%s' % lst] = list(fg[lst])
for attr, val in FILEGROUP_DEFAULTS.iteritems():
if attr not in fg:
fg[attr] = val
todo = list(filegroups_list)
skips = 0
while todo:
cur = todo[0]
todo = todo[1:]
if cur in done: continue
things[cur]['used_by'].append(thing['name'])
todo.extend(thing_deps(things[cur]))
done.add(cur)
# the above expansion can introduce duplicate filenames: contract them here
for fg in filegroups.itervalues():
for lst in FILEGROUP_LISTS:
fg[lst] = uniquify(fg.get(lst, []))
for tgt in dictionary['targets']:
for lst in FILEGROUP_LISTS:
tgt[lst] = tgt.get(lst, [])
tgt['own_%s' % lst] = list(tgt[lst])
for lib in libs + targets:
assert 'plugins' not in lib
plugins = []
for lst in FILEGROUP_LISTS:
vals = lib.get(lst, [])
lib[lst] = list(vals)
lib['own_%s' % lst] = list(vals)
for fg_name in lib.get('filegroups', []):
fg = filegroups[fg_name]
for plugin in fg['plugins']:
if plugin not in plugins:
plugins.append(plugin)
for lst in FILEGROUP_LISTS:
vals = lib.get(lst, [])
vals.extend(fg.get(lst, []))
lib[lst] = vals
lib['plugins'] = plugins
if lib.get('generate_plugin_registry', False):
lib['src'].append('src/core/plugin_registry/%s_plugin_registry.cc' %
lib['name'])
for lst in FILEGROUP_LISTS:
lib[lst] = uniquify(lib.get(lst, []))
assert skips != len(
todo), "infinite loop in filegroup uses clauses: %r" % [
t['name'] for t in todo
]
# take the first element of the todo list
cur = todo[0]
todo = todo[1:]
# check all uses filegroups are present (if no, skip and come back later)
skip = False
for uses in cur.get('uses', []):
if uses not in filegroups:
skip = True
if skip:
skips += 1
todo.append(cur)
else:
skips = 0
assert 'plugins' not in cur
plugins = []
for uses in cur.get('uses', []):
for plugin in filegroups[uses]['plugins']:
if plugin not in plugins:
plugins.append(plugin)
for lst in FILEGROUP_LISTS:
vals = cur.get(lst, [])
vals.extend(filegroups[uses].get(lst, []))
cur[lst] = vals
cur_plugin_name = cur.get('plugin')
if cur_plugin_name:
plugins.append(cur_plugin_name)
cur['plugins'] = plugins
filegroups[cur['name']] = cur
# build reverse dependency map
things = {}
for thing in dictionary['libs'] + dictionary['targets'] + dictionary[
'filegroups']:
things[thing['name']] = thing
thing['used_by'] = []
thing_deps = lambda t: t.get('uses', []) + t.get('filegroups', []) + t.get('deps', [])
for thing in things.itervalues():
done = set()
todo = thing_deps(thing)
while todo:
cur = todo[0]
todo = todo[1:]
if cur in done: continue
things[cur]['used_by'].append(thing['name'])
todo.extend(thing_deps(things[cur]))
done.add(cur)
# the above expansion can introduce duplicate filenames: contract them here
for fg in filegroups.itervalues():
for lst in FILEGROUP_LISTS:
fg[lst] = uniquify(fg.get(lst, []))
for tgt in dictionary['targets']:
for lst in FILEGROUP_LISTS:
tgt[lst] = tgt.get(lst, [])
tgt['own_%s' % lst] = list(tgt[lst])
for lib in libs + targets:
assert 'plugins' not in lib
plugins = []
for lst in FILEGROUP_LISTS:
vals = lib.get(lst, [])
lib[lst] = list(vals)
lib['own_%s' % lst] = list(vals)
for fg_name in lib.get('filegroups', []):
fg = filegroups[fg_name]
for plugin in fg['plugins']:
if plugin not in plugins:
plugins.append(plugin)
for lst in FILEGROUP_LISTS:
vals = lib.get(lst, [])
vals.extend(fg.get(lst, []))
lib[lst] = vals
lib['plugins'] = plugins
if lib.get('generate_plugin_registry', False):
lib['src'].append('src/core/plugin_registry/%s_plugin_registry.cc' %
lib['name'])
for lst in FILEGROUP_LISTS:
lib[lst] = uniquify(lib.get(lst, []))

@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Buildgen package version plugin
This parses the list of targets from the yaml build file, and creates
@ -19,84 +18,90 @@ a custom version string for each language's package.
"""
import re
LANGUAGES = [
'core',
'cpp',
'csharp',
'objc',
'php',
'python',
'ruby',
]
'core',
'cpp',
'csharp',
'objc',
'php',
'python',
'ruby',
]
class Version:
def __init__(self, s):
self.tag = None
if '-' in s:
s, self.tag = s.split('-')
self.major, self.minor, self.patch = [int(x) for x in s.split('.')]
def __init__(self, s):
self.tag = None
if '-' in s:
s, self.tag = s.split('-')
self.major, self.minor, self.patch = [int(x) for x in s.split('.')]
def __str__(self):
"""Version string in a somewhat idiomatic style for most languages"""
s = '%d.%d.%d' % (self.major, self.minor, self.patch)
if self.tag:
s += '-%s' % self.tag
return s
def __str__(self):
"""Version string in a somewhat idiomatic style for most languages"""
s = '%d.%d.%d' % (self.major, self.minor, self.patch)
if self.tag:
s += '-%s' % self.tag
return s
def pep440(self):
"""Version string in Python PEP440 style"""
s = '%d.%d.%d' % (self.major, self.minor, self.patch)
if self.tag:
# we need to translate from grpc version tags to pep440 version
# tags; this code is likely to be a little ad-hoc
if self.tag == 'dev':
s += '.dev0'
elif len(self.tag) >= 3 and self.tag[0:3] == 'pre':
s += 'rc%d' % int(self.tag[3:])
else:
raise Exception(
'Don\'t know how to translate version tag "%s" to pep440' %
self.tag)
return s
def pep440(self):
"""Version string in Python PEP440 style"""
s = '%d.%d.%d' % (self.major, self.minor, self.patch)
if self.tag:
# we need to translate from grpc version tags to pep440 version
# tags; this code is likely to be a little ad-hoc
if self.tag == 'dev':
s += '.dev0'
elif len(self.tag) >= 3 and self.tag[0:3] == 'pre':
s += 'rc%d' % int(self.tag[3:])
else:
raise Exception('Don\'t know how to translate version tag "%s" to pep440' % self.tag)
return s
def ruby(self):
"""Version string in Ruby style"""
if self.tag:
return '%d.%d.%d.%s' % (self.major, self.minor, self.patch,
self.tag)
else:
return '%d.%d.%d' % (self.major, self.minor, self.patch)
def ruby(self):
"""Version string in Ruby style"""
if self.tag:
return '%d.%d.%d.%s' % (self.major, self.minor, self.patch, self.tag)
else:
return '%d.%d.%d' % (self.major, self.minor, self.patch)
def php(self):
"""Version string for PHP PECL package"""
s = '%d.%d.%d' % (self.major, self.minor, self.patch)
if self.tag:
if self.tag == 'dev':
s += 'dev'
elif len(self.tag) >= 3 and self.tag[0:3] == 'pre':
s += 'RC%d' % int(self.tag[3:])
else:
raise Exception(
'Don\'t know how to translate version tag "%s" to PECL version'
% self.tag)
return s
def php(self):
"""Version string for PHP PECL package"""
s = '%d.%d.%d' % (self.major, self.minor, self.patch)
if self.tag:
if self.tag == 'dev':
s += 'dev'
elif len(self.tag) >= 3 and self.tag[0:3] == 'pre':
s += 'RC%d' % int(self.tag[3:])
else:
raise Exception('Don\'t know how to translate version tag "%s" to PECL version' % self.tag)
return s
def php_composer(self):
"""Version string for PHP Composer package"""
return '%d.%d.%d' % (self.major, self.minor, self.patch)
def php_composer(self):
"""Version string for PHP Composer package"""
return '%d.%d.%d' % (self.major, self.minor, self.patch)
def mako_plugin(dictionary):
"""Expand version numbers:
"""Expand version numbers:
- for each language, ensure there's a language_version tag in
settings (defaulting to the master version tag)
- expand version strings to major, minor, patch, and tag
"""
settings = dictionary['settings']
master_version = Version(settings['version'])
settings['version'] = master_version
for language in LANGUAGES:
version_tag = '%s_version' % language
if version_tag in settings:
settings[version_tag] = Version(settings[version_tag])
else:
settings[version_tag] = master_version
settings = dictionary['settings']
master_version = Version(settings['version'])
settings['version'] = master_version
for language in LANGUAGES:
version_tag = '%s_version' % language
if version_tag in settings:
settings[version_tag] = Version(settings[version_tag])
else:
settings[version_tag] = master_version

@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Buildgen vsprojects plugin.
This parses the list of libraries, and generates globals "vsprojects"
@ -19,62 +18,67 @@ and "vsproject_dict", to be used by the visual studio generators.
"""
import hashlib
import re
def mako_plugin(dictionary):
"""The exported plugin code for generate_vsprojeccts
"""The exported plugin code for generate_vsprojeccts
We want to help the work of the visual studio generators.
"""
libs = dictionary.get('libs', [])
targets = dictionary.get('targets', [])
for lib in libs:
lib['is_library'] = True
for target in targets:
target['is_library'] = False
projects = []
projects.extend(libs)
projects.extend(targets)
for target in projects:
if 'build' in target and target['build'] == 'test':
default_test_dir = 'test'
else:
default_test_dir = '.'
if 'vs_config_type' not in target:
if 'build' in target and target['build'] == 'test':
target['vs_config_type'] = 'Application'
else:
target['vs_config_type'] = 'StaticLibrary'
if 'vs_packages' not in target:
target['vs_packages'] = []
if 'vs_props' not in target:
target['vs_props'] = []
target['vs_proj_dir'] = target.get('vs_proj_dir', default_test_dir)
if target.get('vs_project_guid', None) is None and 'windows' in target.get('platforms', ['windows']):
name = target['name']
guid = re.sub('(........)(....)(....)(....)(.*)',
r'{\1-\2-\3-\4-\5}',
hashlib.md5(name).hexdigest())
target['vs_project_guid'] = guid.upper()
# Exclude projects without a visual project guid, such as the tests.
projects = [project for project in projects
if project.get('vs_project_guid', None)]
projects = [project for project in projects
if project['language'] != 'c++' or project['build'] == 'all' or project['build'] == 'protoc' or (project['language'] == 'c++' and (project['build'] == 'test' or project['build'] == 'private'))]
project_dict = dict([(p['name'], p) for p in projects])
packages = dictionary.get('vspackages', [])
packages_dict = dict([(p['name'], p) for p in packages])
dictionary['vsprojects'] = projects
dictionary['vsproject_dict'] = project_dict
dictionary['vspackages_dict'] = packages_dict
libs = dictionary.get('libs', [])
targets = dictionary.get('targets', [])
for lib in libs:
lib['is_library'] = True
for target in targets:
target['is_library'] = False
projects = []
projects.extend(libs)
projects.extend(targets)
for target in projects:
if 'build' in target and target['build'] == 'test':
default_test_dir = 'test'
else:
default_test_dir = '.'
if 'vs_config_type' not in target:
if 'build' in target and target['build'] == 'test':
target['vs_config_type'] = 'Application'
else:
target['vs_config_type'] = 'StaticLibrary'
if 'vs_packages' not in target:
target['vs_packages'] = []
if 'vs_props' not in target:
target['vs_props'] = []
target['vs_proj_dir'] = target.get('vs_proj_dir', default_test_dir)
if target.get('vs_project_guid',
None) is None and 'windows' in target.get('platforms',
['windows']):
name = target['name']
guid = re.sub('(........)(....)(....)(....)(.*)',
r'{\1-\2-\3-\4-\5}', hashlib.md5(name).hexdigest())
target['vs_project_guid'] = guid.upper()
# Exclude projects without a visual project guid, such as the tests.
projects = [
project for project in projects if project.get('vs_project_guid', None)
]
projects = [
project for project in projects
if project['language'] != 'c++' or project['build'] == 'all' or project[
'build'] == 'protoc' or (project['language'] == 'c++' and (project[
'build'] == 'test' or project['build'] == 'private'))
]
project_dict = dict([(p['name'], p) for p in projects])
packages = dictionary.get('vspackages', [])
packages_dict = dict([(p['name'], p) for p in packages])
dictionary['vsprojects'] = projects
dictionary['vsproject_dict'] = project_dict
dictionary['vspackages_dict'] = packages_dict

@ -21,44 +21,47 @@ import re
import sys
import yaml
_RE_API = r'(?:GPRAPI|GRPCAPI|CENSUSAPI)([^;]*);'
def list_c_apis(filenames):
for filename in filenames:
with open(filename, 'r') as f:
text = f.read()
for m in re.finditer(_RE_API, text):
api_declaration = re.sub('[ \r\n\t]+', ' ', m.group(1))
type_and_name, args_and_close = api_declaration.split('(', 1)
args = args_and_close[:args_and_close.rfind(')')].strip()
last_space = type_and_name.rfind(' ')
last_star = type_and_name.rfind('*')
type_end = max(last_space, last_star)
return_type = type_and_name[0:type_end+1].strip()
name = type_and_name[type_end+1:].strip()
yield {'return_type': return_type, 'name': name, 'arguments': args, 'header': filename}
for filename in filenames:
with open(filename, 'r') as f:
text = f.read()
for m in re.finditer(_RE_API, text):
api_declaration = re.sub('[ \r\n\t]+', ' ', m.group(1))
type_and_name, args_and_close = api_declaration.split('(', 1)
args = args_and_close[:args_and_close.rfind(')')].strip()
last_space = type_and_name.rfind(' ')
last_star = type_and_name.rfind('*')
type_end = max(last_space, last_star)
return_type = type_and_name[0:type_end + 1].strip()
name = type_and_name[type_end + 1:].strip()
yield {
'return_type': return_type,
'name': name,
'arguments': args,
'header': filename
}
def headers_under(directory):
for root, dirnames, filenames in os.walk(directory):
for filename in fnmatch.filter(filenames, '*.h'):
yield os.path.join(root, filename)
for root, dirnames, filenames in os.walk(directory):
for filename in fnmatch.filter(filenames, '*.h'):
yield os.path.join(root, filename)
def mako_plugin(dictionary):
apis = []
headers = []
apis = []
headers = []
for lib in dictionary['libs']:
if lib['name'] in ['grpc', 'gpr']:
headers.extend(lib['public_headers'])
for lib in dictionary['libs']:
if lib['name'] in ['grpc', 'gpr']:
headers.extend(lib['public_headers'])
apis.extend(list_c_apis(sorted(set(headers))))
dictionary['c_apis'] = apis
apis.extend(list_c_apis(sorted(set(headers))))
dictionary['c_apis'] = apis
if __name__ == '__main__':
print yaml.dump([api for api in list_c_apis(headers_under('include/grpc'))])
print yaml.dump([api for api in list_c_apis(headers_under('include/grpc'))])

@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Buildgen .proto files list plugin.
This parses the list of targets from the yaml build file, and creates
@ -19,12 +18,11 @@ a list called "protos" that contains all of the proto file names.
"""
import re
def mako_plugin(dictionary):
"""The exported plugin code for list_protos.
"""The exported plugin code for list_protos.
Some projects generators may want to get the full list of unique .proto files
that are being included in a project. This code extracts all files referenced
@ -33,23 +31,23 @@ def mako_plugin(dictionary):
"""
libs = dictionary.get('libs', [])
targets = dictionary.get('targets', [])
libs = dictionary.get('libs', [])
targets = dictionary.get('targets', [])
proto_re = re.compile('(.*)\\.proto')
proto_re = re.compile('(.*)\\.proto')
protos = set()
for lib in libs:
for src in lib.get('src', []):
m = proto_re.match(src)
if m:
protos.add(m.group(1))
for tgt in targets:
for src in tgt.get('src', []):
m = proto_re.match(src)
if m:
protos.add(m.group(1))
protos = set()
for lib in libs:
for src in lib.get('src', []):
m = proto_re.match(src)
if m:
protos.add(m.group(1))
for tgt in targets:
for src in tgt.get('src', []):
m = proto_re.match(src)
if m:
protos.add(m.group(1))
protos = sorted(protos)
protos = sorted(protos)
dictionary['protos'] = protos
dictionary['protos'] = protos

@ -11,35 +11,37 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create tests for each fuzzer"""
import copy
import glob
def mako_plugin(dictionary):
targets = dictionary['targets']
tests = dictionary['tests']
for tgt in targets:
if tgt['build'] == 'fuzzer':
new_target = copy.deepcopy(tgt)
new_target['build'] = 'test'
new_target['name'] += '_one_entry'
new_target['run'] = False
new_target['src'].append('test/core/util/one_corpus_entry_fuzzer.cc')
new_target['own_src'].append('test/core/util/one_corpus_entry_fuzzer.cc')
targets.append(new_target)
for corpus in new_target['corpus_dirs']:
for fn in sorted(glob.glob('%s/*' % corpus)):
tests.append({
'name': new_target['name'],
'args': [fn],
'exclude_iomgrs': ['uv'],
'exclude_configs': ['tsan'],
'uses_polling': False,
'platforms': ['mac', 'linux'],
'ci_platforms': ['linux'],
'flaky': False,
'language': 'c',
'cpu_cost': 0.1,
})
targets = dictionary['targets']
tests = dictionary['tests']
for tgt in targets:
if tgt['build'] == 'fuzzer':
new_target = copy.deepcopy(tgt)
new_target['build'] = 'test'
new_target['name'] += '_one_entry'
new_target['run'] = False
new_target['src'].append(
'test/core/util/one_corpus_entry_fuzzer.cc')
new_target['own_src'].append(
'test/core/util/one_corpus_entry_fuzzer.cc')
targets.append(new_target)
for corpus in new_target['corpus_dirs']:
for fn in sorted(glob.glob('%s/*' % corpus)):
tests.append({
'name': new_target['name'],
'args': [fn],
'exclude_iomgrs': ['uv'],
'exclude_configs': ['tsan'],
'uses_polling': False,
'platforms': ['mac', 'linux'],
'ci_platforms': ['linux'],
'flaky': False,
'language': 'c',
'cpu_cost': 0.1,
})

@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Buildgen transitive dependencies
This takes the list of libs, node_modules, and targets from our
@ -20,35 +19,40 @@ of the list of dependencies.
"""
def get_lib(libs, name):
try:
return next(lib for lib in libs if lib['name']==name)
except StopIteration:
return None
try:
return next(lib for lib in libs if lib['name'] == name)
except StopIteration:
return None
def transitive_deps(lib, libs):
if lib is not None and 'deps' in lib:
# Recursively call transitive_deps on each dependency, and take the union
return set.union(set(lib['deps']),
*[set(transitive_deps(get_lib(libs, dep), libs))
for dep in lib['deps']])
else:
return set()
if lib is not None and 'deps' in lib:
# Recursively call transitive_deps on each dependency, and take the union
return set.union(
set(lib['deps']), *[
set(transitive_deps(get_lib(libs, dep), libs))
for dep in lib['deps']
])
else:
return set()
def mako_plugin(dictionary):
"""The exported plugin code for transitive_dependencies.
"""The exported plugin code for transitive_dependencies.
Iterate over each list and check each item for a deps list. We add a
transitive_deps property to each with the transitive closure of those
dependency lists.
"""
libs = dictionary.get('libs')
libs = dictionary.get('libs')
for target_name, target_list in dictionary.items():
for target in target_list:
if isinstance(target, dict) and 'deps' in target:
target['transitive_deps'] = transitive_deps(target, libs)
for target_name, target_list in dictionary.items():
for target in target_list:
if isinstance(target, dict) and 'deps' in target:
target['transitive_deps'] = transitive_deps(target, libs)
python_dependencies = dictionary.get('python_dependencies')
python_dependencies['transitive_deps'] = (
transitive_deps(python_dependencies, libs))
python_dependencies = dictionary.get('python_dependencies')
python_dependencies['transitive_deps'] = (
transitive_deps(python_dependencies, libs))

@ -20,6 +20,7 @@ cd "$(dirname "${0}")/../.."
DIRS=(
'src/python'
'tools/buildgen'
)
EXCLUSIONS=(
'grpcio/grpc_*.py'

Loading…
Cancel
Save