yapf tools/distrib

pull/13719/head
ncteisen 7 years ago
parent 5898847ddf
commit 7a2be20a8a
  1. 2
      templates/tools/distrib/python/grpcio_tools/grpc_version.py.template
  2. 10
      tools/codegen/core/gen_stats_data.py
  3. 61
      tools/distrib/c-ish/check_documentation.py
  4. 194
      tools/distrib/check_copyright.py
  5. 245
      tools/distrib/check_include_guards.py
  6. 11
      tools/distrib/python/check_grpcio_tools.py
  7. 135
      tools/distrib/python/docgen.py
  8. 1
      tools/distrib/python/grpcio_tools/grpc_tools/__init__.py
  9. 63
      tools/distrib/python/grpcio_tools/grpc_tools/command.py
  10. 12
      tools/distrib/python/grpcio_tools/grpc_tools/protoc.py
  11. 2
      tools/distrib/python/grpcio_tools/grpc_version.py
  12. 170
      tools/distrib/python/grpcio_tools/protoc_lib_deps.py
  13. 251
      tools/distrib/python/grpcio_tools/setup.py
  14. 149
      tools/distrib/python/make_grpcio_tools.py
  15. 67
      tools/distrib/python/submit.py
  16. 44
      tools/distrib/run_clang_tidy.py
  17. 1
      tools/distrib/yapf_code.sh

@ -16,4 +16,4 @@
# AUTO-GENERATED FROM `$REPO_ROOT/templates/tools/distrib/python/grpcio_tools/grpc_version.py.template`!!!
VERSION='${settings.python_version.pep440()}'
VERSION = '${settings.python_version.pep440()}'

@ -154,8 +154,8 @@ def gen_bucket_code(histogram):
if done_trivial:
first_nontrivial_code = dbl2u64(first_nontrivial)
code_bounds = [dbl2u64(x) - first_nontrivial_code for x in bounds]
shift_data = find_ideal_shift(code_bounds[first_nontrivial:], 256 *
histogram.buckets)
shift_data = find_ideal_shift(code_bounds[first_nontrivial:],
256 * histogram.buckets)
#print first_nontrivial, shift_data, bounds
#if shift_data is not None: print [hex(x >> shift_data[0]) for x in code_bounds[first_nontrivial:]]
code = 'value = GPR_CLAMP(value, 0, %d);\n' % histogram.max
@ -352,9 +352,9 @@ with open('src/core/lib/debug/stats_data.cc', 'w') as C:
len(inst_map['Histogram']), ','.join('grpc_stats_table_%d' % x
for x in histo_bucket_boundaries))
print >> C, "void (*const grpc_stats_inc_histogram[%d])(int x) = {%s};" % (
len(inst_map['Histogram']),
','.join('grpc_stats_inc_%s' % histogram.name.lower()
for histogram in inst_map['Histogram']))
len(inst_map['Histogram']), ','.join(
'grpc_stats_inc_%s' % histogram.name.lower()
for histogram in inst_map['Histogram']))
# patch qps_test bigquery schema
RECORD_EXPLICIT_PERCENTILES = [50, 95, 99]

@ -22,24 +22,15 @@ import sys
# where do we run
_TARGET_DIRS = [
'include/grpc',
'include/grpc++',
'src/core',
'src/cpp',
'test/core',
'test/cpp'
'include/grpc', 'include/grpc++', 'src/core', 'src/cpp', 'test/core',
'test/cpp'
]
# which file extensions do we care about
_INTERESTING_EXTENSIONS = [
'.c',
'.h',
'.cc'
]
_INTERESTING_EXTENSIONS = ['.c', '.h', '.cc']
# find our home
_ROOT = os.path.abspath(
os.path.join(os.path.dirname(sys.argv[0]), '../../..'))
_ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../../..'))
os.chdir(_ROOT)
errors = 0
@ -47,30 +38,30 @@ errors = 0
# walk directories, find things
printed_banner = False
for target_dir in _TARGET_DIRS:
for root, dirs, filenames in os.walk(target_dir):
if 'README.md' not in filenames:
if not printed_banner:
print 'Missing README.md'
print '================='
printed_banner = True
print root
errors += 1
for root, dirs, filenames in os.walk(target_dir):
if 'README.md' not in filenames:
if not printed_banner:
print 'Missing README.md'
print '================='
printed_banner = True
print root
errors += 1
if printed_banner: print
printed_banner = False
for target_dir in _TARGET_DIRS:
for root, dirs, filenames in os.walk(target_dir):
for filename in filenames:
if os.path.splitext(filename)[1] not in _INTERESTING_EXTENSIONS:
continue
path = os.path.join(root, filename)
with open(path) as f:
contents = f.read()
if '\\file' not in contents:
if not printed_banner:
print 'Missing \\file comment'
print '======================'
printed_banner = True
print path
errors += 1
for root, dirs, filenames in os.walk(target_dir):
for filename in filenames:
if os.path.splitext(filename)[1] not in _INTERESTING_EXTENSIONS:
continue
path = os.path.join(root, filename)
with open(path) as f:
contents = f.read()
if '\\file' not in contents:
if not printed_banner:
print 'Missing \\file comment'
print '======================'
printed_banner = True
print path
errors += 1
assert errors == 0, 'error count = %d' % errors

@ -22,149 +22,135 @@ import sys
import subprocess
# find our home
ROOT = os.path.abspath(
os.path.join(os.path.dirname(sys.argv[0]), '../..'))
ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
os.chdir(ROOT)
# parse command line
argp = argparse.ArgumentParser(description='copyright checker')
argp.add_argument('-o', '--output',
default='details',
choices=['list', 'details'])
argp.add_argument('-s', '--skips',
default=0,
action='store_const',
const=1)
argp.add_argument('-a', '--ancient',
default=0,
action='store_const',
const=1)
argp.add_argument('--precommit',
default=False,
action='store_true')
argp.add_argument(
'-o', '--output', default='details', choices=['list', 'details'])
argp.add_argument('-s', '--skips', default=0, action='store_const', const=1)
argp.add_argument('-a', '--ancient', default=0, action='store_const', const=1)
argp.add_argument('--precommit', default=False, action='store_true')
args = argp.parse_args()
# open the license text
with open('NOTICE.txt') as f:
LICENSE_NOTICE = f.read().splitlines()
LICENSE_NOTICE = f.read().splitlines()
# license format by file extension
# key is the file extension, value is a format string
# that given a line of license text, returns what should
# be in the file
LICENSE_PREFIX = {
'.bat': r'@rem\s*',
'.c': r'\s*(?://|\*)\s*',
'.cc': r'\s*(?://|\*)\s*',
'.h': r'\s*(?://|\*)\s*',
'.m': r'\s*\*\s*',
'.php': r'\s*\*\s*',
'.js': r'\s*\*\s*',
'.py': r'#\s*',
'.pyx': r'#\s*',
'.pxd': r'#\s*',
'.pxi': r'#\s*',
'.rb': r'#\s*',
'.sh': r'#\s*',
'.proto': r'//\s*',
'.cs': r'//\s*',
'.mak': r'#\s*',
'Makefile': r'#\s*',
'Dockerfile': r'#\s*',
'BUILD': r'#\s*',
'.bat': r'@rem\s*',
'.c': r'\s*(?://|\*)\s*',
'.cc': r'\s*(?://|\*)\s*',
'.h': r'\s*(?://|\*)\s*',
'.m': r'\s*\*\s*',
'.php': r'\s*\*\s*',
'.js': r'\s*\*\s*',
'.py': r'#\s*',
'.pyx': r'#\s*',
'.pxd': r'#\s*',
'.pxi': r'#\s*',
'.rb': r'#\s*',
'.sh': r'#\s*',
'.proto': r'//\s*',
'.cs': r'//\s*',
'.mak': r'#\s*',
'Makefile': r'#\s*',
'Dockerfile': r'#\s*',
'BUILD': r'#\s*',
}
_EXEMPT = frozenset((
# Generated protocol compiler output.
'examples/python/helloworld/helloworld_pb2.py',
'examples/python/helloworld/helloworld_pb2_grpc.py',
'examples/python/multiplex/helloworld_pb2.py',
'examples/python/multiplex/helloworld_pb2_grpc.py',
'examples/python/multiplex/route_guide_pb2.py',
'examples/python/multiplex/route_guide_pb2_grpc.py',
'examples/python/route_guide/route_guide_pb2.py',
'examples/python/route_guide/route_guide_pb2_grpc.py',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c',
'src/cpp/server/health/health.pb.h',
'src/cpp/server/health/health.pb.c',
# An older file originally from outside gRPC.
'src/php/tests/bootstrap.php',
# census.proto copied from github
'tools/grpcz/census.proto',
# status.proto copied from googleapis
'src/proto/grpc/status/status.proto',
))
# Generated protocol compiler output.
'examples/python/helloworld/helloworld_pb2.py',
'examples/python/helloworld/helloworld_pb2_grpc.py',
'examples/python/multiplex/helloworld_pb2.py',
'examples/python/multiplex/helloworld_pb2_grpc.py',
'examples/python/multiplex/route_guide_pb2.py',
'examples/python/multiplex/route_guide_pb2_grpc.py',
'examples/python/route_guide/route_guide_pb2.py',
'examples/python/route_guide/route_guide_pb2_grpc.py',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c',
'src/cpp/server/health/health.pb.h',
'src/cpp/server/health/health.pb.c',
# An older file originally from outside gRPC.
'src/php/tests/bootstrap.php',
# census.proto copied from github
'tools/grpcz/census.proto',
# status.proto copied from googleapis
'src/proto/grpc/status/status.proto',))
RE_YEAR = r'Copyright (?P<first_year>[0-9]+\-)?(?P<last_year>[0-9]+) gRPC authors.'
RE_LICENSE = dict(
(k, r'\n'.join(
LICENSE_PREFIX[k] +
(RE_YEAR if re.search(RE_YEAR, line) else re.escape(line))
for line in LICENSE_NOTICE))
for k, v in LICENSE_PREFIX.iteritems())
RE_LICENSE = dict((k, r'\n'.join(
LICENSE_PREFIX[k] + (RE_YEAR
if re.search(RE_YEAR, line) else re.escape(line))
for line in LICENSE_NOTICE)) for k, v in LICENSE_PREFIX.iteritems())
if args.precommit:
FILE_LIST_COMMAND = 'git status -z | grep -Poz \'(?<=^[MARC][MARCD ] )[^\s]+\''
FILE_LIST_COMMAND = 'git status -z | grep -Poz \'(?<=^[MARC][MARCD ] )[^\s]+\''
else:
FILE_LIST_COMMAND = 'git ls-tree -r --name-only -r HEAD | ' \
'grep -v ^third_party/ |' \
'grep -v "\(ares_config.h\|ares_build.h\)"'
FILE_LIST_COMMAND = 'git ls-tree -r --name-only -r HEAD | ' \
'grep -v ^third_party/ |' \
'grep -v "\(ares_config.h\|ares_build.h\)"'
def load(name):
with open(name) as f:
return f.read()
with open(name) as f:
return f.read()
def save(name, text):
with open(name, 'w') as f:
f.write(text)
with open(name, 'w') as f:
f.write(text)
assert(re.search(RE_LICENSE['Makefile'], load('Makefile')))
assert (re.search(RE_LICENSE['Makefile'], load('Makefile')))
def log(cond, why, filename):
if not cond: return
if args.output == 'details':
print '%s: %s' % (why, filename)
else:
print filename
if not cond: return
if args.output == 'details':
print '%s: %s' % (why, filename)
else:
print filename
# scan files, validate the text
ok = True
filename_list = []
try:
filename_list = subprocess.check_output(FILE_LIST_COMMAND,
shell=True).splitlines()
filename_list = subprocess.check_output(
FILE_LIST_COMMAND, shell=True).splitlines()
except subprocess.CalledProcessError:
sys.exit(0)
sys.exit(0)
for filename in filename_list:
if filename in _EXEMPT:
continue
ext = os.path.splitext(filename)[1]
base = os.path.basename(filename)
if ext in RE_LICENSE:
re_license = RE_LICENSE[ext]
elif base in RE_LICENSE:
re_license = RE_LICENSE[base]
else:
log(args.skips, 'skip', filename)
continue
try:
text = load(filename)
except:
continue
m = re.search(re_license, text)
if m:
pass
elif 'DO NOT EDIT' not in text and filename != 'src/boringssl/err_data.c':
log(1, 'copyright missing', filename)
ok = False
if filename in _EXEMPT:
continue
ext = os.path.splitext(filename)[1]
base = os.path.basename(filename)
if ext in RE_LICENSE:
re_license = RE_LICENSE[ext]
elif base in RE_LICENSE:
re_license = RE_LICENSE[base]
else:
log(args.skips, 'skip', filename)
continue
try:
text = load(filename)
except:
continue
m = re.search(re_license, text)
if m:
pass
elif 'DO NOT EDIT' not in text and filename != 'src/boringssl/err_data.c':
log(1, 'copyright missing', filename)
ok = False
sys.exit(0 if ok else 1)

@ -23,136 +23,136 @@ import subprocess
def build_valid_guard(fpath):
prefix = 'GRPC_' if not fpath.startswith('include/') else ''
return prefix + '_'.join(fpath.replace('++', 'XX').replace('.', '_').upper().split('/')[1:])
prefix = 'GRPC_' if not fpath.startswith('include/') else ''
return prefix + '_'.join(
fpath.replace('++', 'XX').replace('.', '_').upper().split('/')[1:])
def load(fpath):
with open(fpath, 'r') as f:
return f.read()
with open(fpath, 'r') as f:
return f.read()
def save(fpath, contents):
with open(fpath, 'w') as f:
f.write(contents)
with open(fpath, 'w') as f:
f.write(contents)
class GuardValidator(object):
def __init__(self):
self.ifndef_re = re.compile(r'#ifndef ([A-Z][A-Z_1-9]*)')
self.define_re = re.compile(r'#define ([A-Z][A-Z_1-9]*)')
self.endif_c_re = re.compile(r'#endif /\* ([A-Z][A-Z_1-9]*) (?:\\ *\n *)?\*/')
self.endif_cpp_re = re.compile(r'#endif // ([A-Z][A-Z_1-9]*)')
self.failed = False
def fail(self, fpath, regexp, fcontents, match_txt, correct, fix):
cpp_header = 'grpc++' in fpath
self.failed = True
invalid_guards_msg_template = (
'{0}: Missing preprocessor guards (RE {1}). '
'Please wrap your code around the following guards:\n'
'#ifndef {2}\n'
'#define {2}\n'
'...\n'
'... epic code ...\n'
'...\n') + ('#endif // {2}' if cpp_header else '#endif /* {2} */')
if not match_txt:
print invalid_guards_msg_template.format(fpath, regexp.pattern,
build_valid_guard(fpath))
return fcontents
print ('{}: Wrong preprocessor guards (RE {}):'
'\n\tFound {}, expected {}').format(
fpath, regexp.pattern, match_txt, correct)
if fix:
print 'Fixing {}...\n'.format(fpath)
fixed_fcontents = re.sub(match_txt, correct, fcontents)
if fixed_fcontents:
def __init__(self):
self.ifndef_re = re.compile(r'#ifndef ([A-Z][A-Z_1-9]*)')
self.define_re = re.compile(r'#define ([A-Z][A-Z_1-9]*)')
self.endif_c_re = re.compile(
r'#endif /\* ([A-Z][A-Z_1-9]*) (?:\\ *\n *)?\*/')
self.endif_cpp_re = re.compile(r'#endif // ([A-Z][A-Z_1-9]*)')
self.failed = False
return fixed_fcontents
else:
print
return fcontents
def check(self, fpath, fix):
cpp_header = 'grpc++' in fpath
valid_guard = build_valid_guard(fpath)
fcontents = load(fpath)
match = self.ifndef_re.search(fcontents)
if not match:
print 'something drastically wrong with: %s' % fpath
return False # failed
if match.lastindex is None:
# No ifndef. Request manual addition with hints
self.fail(fpath, match.re, match.string, '', '', False)
return False # failed
# Does the guard end with a '_H'?
running_guard = match.group(1)
if not running_guard.endswith('_H'):
fcontents = self.fail(fpath, match.re, match.string, match.group(1),
valid_guard, fix)
if fix: save(fpath, fcontents)
# Is it the expected one based on the file path?
if running_guard != valid_guard:
fcontents = self.fail(fpath, match.re, match.string, match.group(1),
valid_guard, fix)
if fix: save(fpath, fcontents)
# Is there a #define? Is it the same as the #ifndef one?
match = self.define_re.search(fcontents)
if match.lastindex is None:
# No define. Request manual addition with hints
self.fail(fpath, match.re, match.string, '', '', False)
return False # failed
# Is the #define guard the same as the #ifndef guard?
if match.group(1) != running_guard:
fcontents = self.fail(fpath, match.re, match.string, match.group(1),
valid_guard, fix)
if fix: save(fpath, fcontents)
# Is there a properly commented #endif?
endif_re = self.endif_cpp_re if cpp_header else self.endif_c_re
flines = fcontents.rstrip().splitlines()
match = endif_re.search('\n'.join(flines[-2:]))
if not match:
# No endif. Check if we have the last line as just '#endif' and if so
# replace it with a properly commented one.
if flines[-1] == '#endif':
flines[-1] = ('#endif' +
(' // {}\n'.format(valid_guard) if cpp_header
else ' /* {} */\n'.format(valid_guard)))
def fail(self, fpath, regexp, fcontents, match_txt, correct, fix):
cpp_header = 'grpc++' in fpath
self.failed = True
invalid_guards_msg_template = (
'{0}: Missing preprocessor guards (RE {1}). '
'Please wrap your code around the following guards:\n'
'#ifndef {2}\n'
'#define {2}\n'
'...\n'
'... epic code ...\n'
'...\n') + ('#endif // {2}' if cpp_header else '#endif /* {2} */')
if not match_txt:
print invalid_guards_msg_template.format(fpath, regexp.pattern,
build_valid_guard(fpath))
return fcontents
print('{}: Wrong preprocessor guards (RE {}):'
'\n\tFound {}, expected {}').format(fpath, regexp.pattern,
match_txt, correct)
if fix:
fcontents = '\n'.join(flines)
save(fpath, fcontents)
else:
# something else is wrong, bail out
self.fail(fpath, endif_re, flines[-1], '', '', False)
elif match.group(1) != running_guard:
# Is the #endif guard the same as the #ifndef and #define guards?
fcontents = self.fail(fpath, endif_re, fcontents, match.group(1),
valid_guard, fix)
if fix: save(fpath, fcontents)
return not self.failed # Did the check succeed? (ie, not failed)
print 'Fixing {}...\n'.format(fpath)
fixed_fcontents = re.sub(match_txt, correct, fcontents)
if fixed_fcontents:
self.failed = False
return fixed_fcontents
else:
print
return fcontents
def check(self, fpath, fix):
cpp_header = 'grpc++' in fpath
valid_guard = build_valid_guard(fpath)
fcontents = load(fpath)
match = self.ifndef_re.search(fcontents)
if not match:
print 'something drastically wrong with: %s' % fpath
return False # failed
if match.lastindex is None:
# No ifndef. Request manual addition with hints
self.fail(fpath, match.re, match.string, '', '', False)
return False # failed
# Does the guard end with a '_H'?
running_guard = match.group(1)
if not running_guard.endswith('_H'):
fcontents = self.fail(fpath, match.re, match.string,
match.group(1), valid_guard, fix)
if fix: save(fpath, fcontents)
# Is it the expected one based on the file path?
if running_guard != valid_guard:
fcontents = self.fail(fpath, match.re, match.string,
match.group(1), valid_guard, fix)
if fix: save(fpath, fcontents)
# Is there a #define? Is it the same as the #ifndef one?
match = self.define_re.search(fcontents)
if match.lastindex is None:
# No define. Request manual addition with hints
self.fail(fpath, match.re, match.string, '', '', False)
return False # failed
# Is the #define guard the same as the #ifndef guard?
if match.group(1) != running_guard:
fcontents = self.fail(fpath, match.re, match.string,
match.group(1), valid_guard, fix)
if fix: save(fpath, fcontents)
# Is there a properly commented #endif?
endif_re = self.endif_cpp_re if cpp_header else self.endif_c_re
flines = fcontents.rstrip().splitlines()
match = endif_re.search('\n'.join(flines[-2:]))
if not match:
# No endif. Check if we have the last line as just '#endif' and if so
# replace it with a properly commented one.
if flines[-1] == '#endif':
flines[-1] = (
'#endif' +
(' // {}\n'.format(valid_guard)
if cpp_header else ' /* {} */\n'.format(valid_guard)))
if fix:
fcontents = '\n'.join(flines)
save(fpath, fcontents)
else:
# something else is wrong, bail out
self.fail(fpath, endif_re, flines[-1], '', '', False)
elif match.group(1) != running_guard:
# Is the #endif guard the same as the #ifndef and #define guards?
fcontents = self.fail(fpath, endif_re, fcontents,
match.group(1), valid_guard, fix)
if fix: save(fpath, fcontents)
return not self.failed # Did the check succeed? (ie, not failed)
# find our home
ROOT = os.path.abspath(
os.path.join(os.path.dirname(sys.argv[0]), '../..'))
ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
os.chdir(ROOT)
# parse command line
argp = argparse.ArgumentParser(description='include guard checker')
argp.add_argument('-f', '--fix',
default=False,
action='store_true');
argp.add_argument('--precommit',
default=False,
action='store_true')
argp.add_argument('-f', '--fix', default=False, action='store_true')
argp.add_argument('--precommit', default=False, action='store_true')
args = argp.parse_args()
KNOWN_BAD = set([
@ -161,12 +161,11 @@ KNOWN_BAD = set([
'include/grpc++/ext/reflection.pb.h',
])
grep_filter = r"grep -E '^(include|src/core)/.*\.h$'"
if args.precommit:
git_command = 'git diff --name-only HEAD'
git_command = 'git diff --name-only HEAD'
else:
git_command = 'git ls-tree -r --name-only -r HEAD'
git_command = 'git ls-tree -r --name-only -r HEAD'
FILE_LIST_COMMAND = ' | '.join((git_command, grep_filter))
@ -174,17 +173,17 @@ FILE_LIST_COMMAND = ' | '.join((git_command, grep_filter))
ok = True
filename_list = []
try:
filename_list = subprocess.check_output(FILE_LIST_COMMAND,
shell=True).splitlines()
# Filter out non-existent files (ie, file removed or renamed)
filename_list = (f for f in filename_list if os.path.isfile(f))
filename_list = subprocess.check_output(
FILE_LIST_COMMAND, shell=True).splitlines()
# Filter out non-existent files (ie, file removed or renamed)
filename_list = (f for f in filename_list if os.path.isfile(f))
except subprocess.CalledProcessError:
sys.exit(0)
sys.exit(0)
validator = GuardValidator()
for filename in filename_list:
if filename in KNOWN_BAD: continue
ok = ok and validator.check(filename, args.fix)
if filename in KNOWN_BAD: continue
ok = ok and validator.check(filename, args.fix)
sys.exit(0 if ok else 1)

@ -23,12 +23,11 @@ Have you called tools/distrib/python/make_grpcio_tools.py since upgrading protob
submodule_commit_hash = _make.protobuf_submodule_commit_hash()
with open(_make.GRPC_PYTHON_PROTOC_LIB_DEPS, 'r') as _protoc_lib_deps_file:
content = _protoc_lib_deps_file.read().splitlines()
content = _protoc_lib_deps_file.read().splitlines()
testString = (_make.COMMIT_HASH_PREFIX +
submodule_commit_hash +
_make.COMMIT_HASH_SUFFIX)
testString = (
_make.COMMIT_HASH_PREFIX + submodule_commit_hash + _make.COMMIT_HASH_SUFFIX)
if testString not in content:
print(OUT_OF_DATE_MESSAGE.format(_make.GRPC_PYTHON_PROTOC_LIB_DEPS))
raise SystemExit(1)
print(OUT_OF_DATE_MESSAGE.format(_make.GRPC_PYTHON_PROTOC_LIB_DEPS))
raise SystemExit(1)

@ -24,14 +24,20 @@ import sys
import tempfile
parser = argparse.ArgumentParser()
parser.add_argument('--config', metavar='c', type=str, nargs=1,
help='GRPC/GPR libraries build configuration',
default='opt')
parser.add_argument(
'--config',
metavar='c',
type=str,
nargs=1,
help='GRPC/GPR libraries build configuration',
default='opt')
parser.add_argument('--submit', action='store_true')
parser.add_argument('--gh-user', type=str, help='GitHub user to push as.')
parser.add_argument('--gh-repo-owner', type=str,
help=('Owner of the GitHub repository to be pushed; '
'defaults to --gh-user.'))
parser.add_argument(
'--gh-repo-owner',
type=str,
help=('Owner of the GitHub repository to be pushed; '
'defaults to --gh-user.'))
parser.add_argument('--doc-branch', type=str)
args = parser.parse_args()
@ -59,60 +65,75 @@ environment.update({
})
subprocess_arguments_list = [
{'args': ['virtualenv', VIRTUALENV_DIR], 'env': environment},
{'args': [VIRTUALENV_PIP_PATH, 'install', '--upgrade', 'pip==9.0.1'],
'env': environment},
{'args': [VIRTUALENV_PIP_PATH, 'install', '-r', REQUIREMENTS_PATH],
'env': environment},
{'args': [VIRTUALENV_PYTHON_PATH, SETUP_PATH, 'build'], 'env': environment},
{'args': [VIRTUALENV_PYTHON_PATH, SETUP_PATH, 'doc'], 'env': environment},
{
'args': ['virtualenv', VIRTUALENV_DIR],
'env': environment
},
{
'args': [VIRTUALENV_PIP_PATH, 'install', '--upgrade', 'pip==9.0.1'],
'env': environment
},
{
'args': [VIRTUALENV_PIP_PATH, 'install', '-r', REQUIREMENTS_PATH],
'env': environment
},
{
'args': [VIRTUALENV_PYTHON_PATH, SETUP_PATH, 'build'],
'env': environment
},
{
'args': [VIRTUALENV_PYTHON_PATH, SETUP_PATH, 'doc'],
'env': environment
},
]
for subprocess_arguments in subprocess_arguments_list:
print('Running command: {}'.format(subprocess_arguments['args']))
subprocess.check_call(**subprocess_arguments)
print('Running command: {}'.format(subprocess_arguments['args']))
subprocess.check_call(**subprocess_arguments)
if args.submit:
assert args.gh_user
assert args.doc_branch
github_user = args.gh_user
github_repository_owner = (
args.gh_repo_owner if args.gh_repo_owner else args.gh_user)
# Create a temporary directory out of tree, checkout gh-pages from the
# specified repository, edit it, and push it. It's up to the user to then go
# onto GitHub and make a PR against grpc/grpc:gh-pages.
repo_parent_dir = tempfile.mkdtemp()
print('Documentation parent directory: {}'.format(repo_parent_dir))
repo_dir = os.path.join(repo_parent_dir, 'grpc')
python_doc_dir = os.path.join(repo_dir, 'python')
doc_branch = args.doc_branch
assert args.gh_user
assert args.doc_branch
github_user = args.gh_user
github_repository_owner = (args.gh_repo_owner
if args.gh_repo_owner else args.gh_user)
# Create a temporary directory out of tree, checkout gh-pages from the
# specified repository, edit it, and push it. It's up to the user to then go
# onto GitHub and make a PR against grpc/grpc:gh-pages.
repo_parent_dir = tempfile.mkdtemp()
print('Documentation parent directory: {}'.format(repo_parent_dir))
repo_dir = os.path.join(repo_parent_dir, 'grpc')
python_doc_dir = os.path.join(repo_dir, 'python')
doc_branch = args.doc_branch
print('Cloning your repository...')
subprocess.check_call([
'git', 'clone', 'https://{}@github.com/{}/grpc'.format(
github_user, github_repository_owner)
], cwd=repo_parent_dir)
subprocess.check_call([
'git', 'remote', 'add', 'upstream', 'https://github.com/grpc/grpc'
], cwd=repo_dir)
subprocess.check_call(['git', 'fetch', 'upstream'], cwd=repo_dir)
subprocess.check_call([
'git', 'checkout', 'upstream/gh-pages', '-b', doc_branch
], cwd=repo_dir)
print('Updating documentation...')
shutil.rmtree(python_doc_dir, ignore_errors=True)
shutil.copytree(DOC_PATH, python_doc_dir)
print('Attempting to push documentation...')
try:
subprocess.check_call(['git', 'add', '--all'], cwd=repo_dir)
subprocess.check_call([
'git', 'commit', '-m', 'Auto-update Python documentation'
], cwd=repo_dir)
subprocess.check_call([
'git', 'push', '--set-upstream', 'origin', doc_branch
], cwd=repo_dir)
except subprocess.CalledProcessError:
print('Failed to push documentation. Examine this directory and push '
'manually: {}'.format(repo_parent_dir))
sys.exit(1)
shutil.rmtree(repo_parent_dir)
print('Cloning your repository...')
subprocess.check_call(
[
'git', 'clone', 'https://{}@github.com/{}/grpc'.format(
github_user, github_repository_owner)
],
cwd=repo_parent_dir)
subprocess.check_call(
['git', 'remote', 'add', 'upstream', 'https://github.com/grpc/grpc'],
cwd=repo_dir)
subprocess.check_call(['git', 'fetch', 'upstream'], cwd=repo_dir)
subprocess.check_call(
['git', 'checkout', 'upstream/gh-pages', '-b', doc_branch],
cwd=repo_dir)
print('Updating documentation...')
shutil.rmtree(python_doc_dir, ignore_errors=True)
shutil.copytree(DOC_PATH, python_doc_dir)
print('Attempting to push documentation...')
try:
subprocess.check_call(['git', 'add', '--all'], cwd=repo_dir)
subprocess.check_call(
['git', 'commit', '-m', 'Auto-update Python documentation'],
cwd=repo_dir)
subprocess.check_call(
['git', 'push', '--set-upstream', 'origin', doc_branch],
cwd=repo_dir)
except subprocess.CalledProcessError:
print('Failed to push documentation. Examine this directory and push '
'manually: {}'.format(repo_parent_dir))
sys.exit(1)
shutil.rmtree(repo_parent_dir)

@ -11,4 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

@ -22,43 +22,44 @@ from grpc_tools import protoc
def build_package_protos(package_root):
proto_files = []
inclusion_root = os.path.abspath(package_root)
for root, _, files in os.walk(inclusion_root):
for filename in files:
if filename.endswith('.proto'):
proto_files.append(os.path.abspath(os.path.join(root, filename)))
proto_files = []
inclusion_root = os.path.abspath(package_root)
for root, _, files in os.walk(inclusion_root):
for filename in files:
if filename.endswith('.proto'):
proto_files.append(
os.path.abspath(os.path.join(root, filename)))
well_known_protos_include = pkg_resources.resource_filename(
'grpc_tools', '_proto')
well_known_protos_include = pkg_resources.resource_filename('grpc_tools',
'_proto')
for proto_file in proto_files:
command = [
'grpc_tools.protoc',
'--proto_path={}'.format(inclusion_root),
'--proto_path={}'.format(well_known_protos_include),
'--python_out={}'.format(inclusion_root),
'--grpc_python_out={}'.format(inclusion_root),
] + [proto_file]
if protoc.main(command) != 0:
sys.stderr.write('warning: {} failed'.format(command))
for proto_file in proto_files:
command = [
'grpc_tools.protoc',
'--proto_path={}'.format(inclusion_root),
'--proto_path={}'.format(well_known_protos_include),
'--python_out={}'.format(inclusion_root),
'--grpc_python_out={}'.format(inclusion_root),
] + [proto_file]
if protoc.main(command) != 0:
sys.stderr.write('warning: {} failed'.format(command))
class BuildPackageProtos(setuptools.Command):
"""Command to generate project *_pb2.py modules from proto files."""
"""Command to generate project *_pb2.py modules from proto files."""
description = 'build grpc protobuf modules'
user_options = []
description = 'build grpc protobuf modules'
user_options = []
def initialize_options(self):
pass
def initialize_options(self):
pass
def finalize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# due to limitations of the proto generator, we require that only *one*
# directory is provided as an 'include' directory. We assume it's the '' key
# to `self.distribution.package_dir` (and get a key error if it's not
# there).
build_package_protos(self.distribution.package_dir[''])
def run(self):
# due to limitations of the proto generator, we require that only *one*
# directory is provided as an 'include' directory. We assume it's the '' key
# to `self.distribution.package_dir` (and get a key error if it's not
# there).
build_package_protos(self.distribution.package_dir[''])

@ -19,16 +19,18 @@ import sys
from grpc_tools import _protoc_compiler
def main(command_arguments):
"""Run the protocol buffer compiler with the given command-line arguments.
"""Run the protocol buffer compiler with the given command-line arguments.
Args:
command_arguments: a list of strings representing command line arguments to
`protoc`.
"""
command_arguments = [argument.encode() for argument in command_arguments]
return _protoc_compiler.run_main(command_arguments)
command_arguments = [argument.encode() for argument in command_arguments]
return _protoc_compiler.run_main(command_arguments)
if __name__ == '__main__':
proto_include = pkg_resources.resource_filename('grpc_tools', '_proto')
sys.exit(main(sys.argv + ['-I{}'.format(proto_include)]))
proto_include = pkg_resources.resource_filename('grpc_tools', '_proto')
sys.exit(main(sys.argv + ['-I{}'.format(proto_include)]))

@ -14,4 +14,4 @@
# AUTO-GENERATED FROM `$REPO_ROOT/templates/tools/distrib/python/grpcio_tools/grpc_version.py.template`!!!
VERSION='1.9.0.dev0'
VERSION = '1.9.0.dev0'

File diff suppressed because one or more lines are too long

@ -66,42 +66,42 @@ BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False)
EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None)
EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None)
if EXTRA_ENV_COMPILE_ARGS is None:
EXTRA_ENV_COMPILE_ARGS = '-std=c++11'
if 'win32' in sys.platform:
if sys.version_info < (3, 5):
# We use define flags here and don't directly add to DEFINE_MACROS below to
# ensure that the expert user/builder has a way of turning it off (via the
# envvars) without adding yet more GRPC-specific envvars.
# See https://sourceforge.net/p/mingw-w64/bugs/363/
if '32' in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s -D_hypot=hypot'
else:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64 -D_hypot=hypot'
else:
# We need to statically link the C++ Runtime, only the C runtime is
# available dynamically
EXTRA_ENV_COMPILE_ARGS += ' /MT'
elif "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -fno-wrapv -frtti'
EXTRA_ENV_COMPILE_ARGS = '-std=c++11'
if 'win32' in sys.platform:
if sys.version_info < (3, 5):
# We use define flags here and don't directly add to DEFINE_MACROS below to
# ensure that the expert user/builder has a way of turning it off (via the
# envvars) without adding yet more GRPC-specific envvars.
# See https://sourceforge.net/p/mingw-w64/bugs/363/
if '32' in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s -D_hypot=hypot'
else:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64 -D_hypot=hypot'
else:
# We need to statically link the C++ Runtime, only the C runtime is
# available dynamically
EXTRA_ENV_COMPILE_ARGS += ' /MT'
elif "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -fno-wrapv -frtti'
if EXTRA_ENV_LINK_ARGS is None:
EXTRA_ENV_LINK_ARGS = ''
if "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -lpthread'
elif "win32" in sys.platform and sys.version_info < (3, 5):
msvcr = cygwinccompiler.get_msvcr()[0]
# TODO(atash) sift through the GCC specs to see if libstdc++ can have any
# influence on the linkage outcome on MinGW for non-C++ programs.
EXTRA_ENV_LINK_ARGS += (
' -static-libgcc -static-libstdc++ -mcrtdll={msvcr} '
'-static'.format(msvcr=msvcr))
EXTRA_ENV_LINK_ARGS = ''
if "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -lpthread'
elif "win32" in sys.platform and sys.version_info < (3, 5):
msvcr = cygwinccompiler.get_msvcr()[0]
# TODO(atash) sift through the GCC specs to see if libstdc++ can have any
# influence on the linkage outcome on MinGW for non-C++ programs.
EXTRA_ENV_LINK_ARGS += (
' -static-libgcc -static-libstdc++ -mcrtdll={msvcr} '
'-static'.format(msvcr=msvcr))
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
CC_FILES = [
os.path.normpath(cc_file) for cc_file in protoc_lib_deps.CC_FILES]
CC_FILES = [os.path.normpath(cc_file) for cc_file in protoc_lib_deps.CC_FILES]
PROTO_FILES = [
os.path.normpath(proto_file) for proto_file in protoc_lib_deps.PROTO_FILES]
os.path.normpath(proto_file) for proto_file in protoc_lib_deps.PROTO_FILES
]
CC_INCLUDE = os.path.normpath(protoc_lib_deps.CC_INCLUDE)
PROTO_INCLUDE = os.path.normpath(protoc_lib_deps.PROTO_INCLUDE)
@ -110,107 +110,114 @@ GRPC_PYTHON_PROTO_RESOURCES_NAME = '_proto'
DEFINE_MACROS = ()
if "win32" in sys.platform:
DEFINE_MACROS += (('WIN32_LEAN_AND_MEAN', 1),)
if '64bit' in platform.architecture()[0]:
DEFINE_MACROS += (('MS_WIN64', 1),)
DEFINE_MACROS += (('WIN32_LEAN_AND_MEAN', 1),)
if '64bit' in platform.architecture()[0]:
DEFINE_MACROS += (('MS_WIN64', 1),)
elif "linux" in sys.platform or "darwin" in sys.platform:
DEFINE_MACROS += (('HAVE_PTHREAD', 1),)
DEFINE_MACROS += (('HAVE_PTHREAD', 1),)
# By default, Python3 distutils enforces compatibility of
# c plugins (.so files) with the OSX version Python3 was built with.
# For Python3.4, this is OSX 10.6, but we need Thread Local Support (__thread)
if 'darwin' in sys.platform and PY3:
mac_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
if mac_target and (pkg_resources.parse_version(mac_target) <
pkg_resources.parse_version('10.9.0')):
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.9'
os.environ['_PYTHON_HOST_PLATFORM'] = re.sub(
r'macosx-[0-9]+\.[0-9]+-(.+)',
r'macosx-10.9-\1',
util.get_platform())
mac_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
if mac_target and (pkg_resources.parse_version(mac_target) <
pkg_resources.parse_version('10.9.0')):
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.9'
os.environ['_PYTHON_HOST_PLATFORM'] = re.sub(
r'macosx-[0-9]+\.[0-9]+-(.+)', r'macosx-10.9-\1',
util.get_platform())
def package_data():
tools_path = GRPC_PYTHON_TOOLS_PACKAGE.replace('.', os.path.sep)
proto_resources_path = os.path.join(tools_path,
GRPC_PYTHON_PROTO_RESOURCES_NAME)
proto_files = []
for proto_file in PROTO_FILES:
source = os.path.join(PROTO_INCLUDE, proto_file)
target = os.path.join(proto_resources_path, proto_file)
relative_target = os.path.join(GRPC_PYTHON_PROTO_RESOURCES_NAME, proto_file)
try:
os.makedirs(os.path.dirname(target))
except OSError as error:
if error.errno == errno.EEXIST:
pass
else:
raise
shutil.copy(source, target)
proto_files.append(relative_target)
return {GRPC_PYTHON_TOOLS_PACKAGE: proto_files}
tools_path = GRPC_PYTHON_TOOLS_PACKAGE.replace('.', os.path.sep)
proto_resources_path = os.path.join(tools_path,
GRPC_PYTHON_PROTO_RESOURCES_NAME)
proto_files = []
for proto_file in PROTO_FILES:
source = os.path.join(PROTO_INCLUDE, proto_file)
target = os.path.join(proto_resources_path, proto_file)
relative_target = os.path.join(GRPC_PYTHON_PROTO_RESOURCES_NAME,
proto_file)
try:
os.makedirs(os.path.dirname(target))
except OSError as error:
if error.errno == errno.EEXIST:
pass
else:
raise
shutil.copy(source, target)
proto_files.append(relative_target)
return {GRPC_PYTHON_TOOLS_PACKAGE: proto_files}
def extension_modules():
if BUILD_WITH_CYTHON:
plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.pyx')]
else:
plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.cpp')]
plugin_sources += [
os.path.join('grpc_tools', 'main.cc'),
os.path.join('grpc_root', 'src', 'compiler', 'python_generator.cc')]
#HACK: Substitute the embed.cc, which is a JS to C++
# preprocessor with the generated code.
# The generated code should not be material
# to the parts of protoc we use (it affects
# the JavaScript code generator, supposedly),
# but we need to be cautious about it.
cc_files_clone = list(CC_FILES)
embed_cc_file = os.path.normpath('google/protobuf/compiler/js/embed.cc')
well_known_types_file = os.path.normpath(
'google/protobuf/compiler/js/well_known_types_embed.cc')
if embed_cc_file in cc_files_clone:
cc_files_clone.remove(embed_cc_file)
if well_known_types_file in cc_files_clone:
cc_files_clone.remove(well_known_types_file)
plugin_sources += [os.path.join('grpc_tools', 'protobuf_generated_well_known_types_embed.cc')]
plugin_sources += [os.path.join(CC_INCLUDE, cc_file) for cc_file in cc_files_clone]
plugin_ext = extension.Extension(
name='grpc_tools._protoc_compiler',
sources=plugin_sources,
include_dirs=[
'.',
'grpc_root',
os.path.join('grpc_root', 'include'),
CC_INCLUDE,
],
language='c++',
define_macros=list(DEFINE_MACROS),
extra_compile_args=list(EXTRA_COMPILE_ARGS),
extra_link_args=list(EXTRA_LINK_ARGS),
)
extensions = [plugin_ext]
if BUILD_WITH_CYTHON:
from Cython import Build
return Build.cythonize(extensions)
else:
return extensions
if BUILD_WITH_CYTHON:
plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.pyx')]
else:
plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.cpp')]
plugin_sources += [
os.path.join('grpc_tools', 'main.cc'),
os.path.join('grpc_root', 'src', 'compiler', 'python_generator.cc')
]
#HACK: Substitute the embed.cc, which is a JS to C++
# preprocessor with the generated code.
# The generated code should not be material
# to the parts of protoc we use (it affects
# the JavaScript code generator, supposedly),
# but we need to be cautious about it.
cc_files_clone = list(CC_FILES)
embed_cc_file = os.path.normpath('google/protobuf/compiler/js/embed.cc')
well_known_types_file = os.path.normpath(
'google/protobuf/compiler/js/well_known_types_embed.cc')
if embed_cc_file in cc_files_clone:
cc_files_clone.remove(embed_cc_file)
if well_known_types_file in cc_files_clone:
cc_files_clone.remove(well_known_types_file)
plugin_sources += [
os.path.join('grpc_tools',
'protobuf_generated_well_known_types_embed.cc')
]
plugin_sources += [
os.path.join(CC_INCLUDE, cc_file) for cc_file in cc_files_clone
]
plugin_ext = extension.Extension(
name='grpc_tools._protoc_compiler',
sources=plugin_sources,
include_dirs=[
'.',
'grpc_root',
os.path.join('grpc_root', 'include'),
CC_INCLUDE,
],
language='c++',
define_macros=list(DEFINE_MACROS),
extra_compile_args=list(EXTRA_COMPILE_ARGS),
extra_link_args=list(EXTRA_LINK_ARGS),)
extensions = [plugin_ext]
if BUILD_WITH_CYTHON:
from Cython import Build
return Build.cythonize(extensions)
else:
return extensions
setuptools.setup(
name='grpcio-tools',
version=grpc_version.VERSION,
description='Protobuf code generator for gRPC',
author='The gRPC Authors',
author_email='grpc-io@googlegroups.com',
url='https://grpc.io',
license='Apache License 2.0',
classifiers=CLASSIFIERS,
ext_modules=extension_modules(),
packages=setuptools.find_packages('.'),
install_requires=[
'protobuf>=3.5.0.post1',
'grpcio>={version}'.format(version=grpc_version.VERSION),
],
package_data=package_data(),
)
name='grpcio-tools',
version=grpc_version.VERSION,
description='Protobuf code generator for gRPC',
author='The gRPC Authors',
author_email='grpc-io@googlegroups.com',
url='https://grpc.io',
license='Apache License 2.0',
classifiers=CLASSIFIERS,
ext_modules=extension_modules(),
packages=setuptools.find_packages('.'),
install_requires=[
'protobuf>=3.5.0.post1',
'grpcio>={version}'.format(version=grpc_version.VERSION),
],
package_data=package_data(),)

@ -27,7 +27,7 @@ import sys
import traceback
import uuid
DEPS_FILE_CONTENT="""
DEPS_FILE_CONTENT = """
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -60,15 +60,16 @@ PROTOBUF_CC_PREFIX = '//:src/'
PROTOBUF_PROTO_PREFIX = '//:src/'
GRPC_ROOT = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', '..'))
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..'))
GRPC_PYTHON_ROOT = os.path.join(GRPC_ROOT, 'tools', 'distrib',
'python', 'grpcio_tools')
GRPC_PYTHON_ROOT = os.path.join(GRPC_ROOT, 'tools', 'distrib', 'python',
'grpcio_tools')
GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT = os.path.join('third_party', 'protobuf', 'src')
GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT = os.path.join('third_party', 'protobuf',
'src')
GRPC_PROTOBUF = os.path.join(GRPC_ROOT, GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT)
GRPC_PROTOBUF_SUBMODULE_ROOT = os.path.join(GRPC_ROOT, 'third_party', 'protobuf')
GRPC_PROTOBUF_SUBMODULE_ROOT = os.path.join(GRPC_ROOT, 'third_party',
'protobuf')
GRPC_PROTOC_PLUGINS = os.path.join(GRPC_ROOT, 'src', 'compiler')
GRPC_PYTHON_PROTOBUF = os.path.join(GRPC_PYTHON_ROOT, 'third_party', 'protobuf',
'src')
@ -80,81 +81,91 @@ GRPC_PYTHON_PROTOC_LIB_DEPS = os.path.join(GRPC_PYTHON_ROOT,
GRPC_INCLUDE = os.path.join(GRPC_ROOT, 'include')
GRPC_PYTHON_INCLUDE = os.path.join(GRPC_PYTHON_ROOT, 'grpc_root', 'include')
BAZEL_DEPS = os.path.join(GRPC_ROOT, 'tools', 'distrib', 'python', 'bazel_deps.sh')
BAZEL_DEPS = os.path.join(GRPC_ROOT, 'tools', 'distrib', 'python',
'bazel_deps.sh')
BAZEL_DEPS_PROTOC_LIB_QUERY = '//:protoc_lib'
BAZEL_DEPS_COMMON_PROTOS_QUERY = '//:well_known_protos'
def protobuf_submodule_commit_hash():
"""Gets the commit hash for the HEAD of the protobuf submodule currently
"""Gets the commit hash for the HEAD of the protobuf submodule currently
checked out."""
cwd = os.getcwd()
os.chdir(GRPC_PROTOBUF_SUBMODULE_ROOT)
output = subprocess.check_output(['git', 'rev-parse', 'HEAD'])
os.chdir(cwd)
return output.splitlines()[0].strip()
cwd = os.getcwd()
os.chdir(GRPC_PROTOBUF_SUBMODULE_ROOT)
output = subprocess.check_output(['git', 'rev-parse', 'HEAD'])
os.chdir(cwd)
return output.splitlines()[0].strip()
def bazel_query(query):
output = subprocess.check_output([BAZEL_DEPS, query])
return output.splitlines()
output = subprocess.check_output([BAZEL_DEPS, query])
return output.splitlines()
def get_deps():
"""Write the result of the bazel query `query` against protobuf to
"""Write the result of the bazel query `query` against protobuf to
`out_file`."""
cc_files_output = bazel_query(BAZEL_DEPS_PROTOC_LIB_QUERY)
cc_files = [
name[len(PROTOBUF_CC_PREFIX):] for name in cc_files_output
if name.endswith('.cc') and name.startswith(PROTOBUF_CC_PREFIX)]
proto_files_output = bazel_query(BAZEL_DEPS_COMMON_PROTOS_QUERY)
proto_files = [
name[len(PROTOBUF_PROTO_PREFIX):] for name in proto_files_output
if name.endswith('.proto') and name.startswith(PROTOBUF_PROTO_PREFIX)]
commit_hash = protobuf_submodule_commit_hash()
deps_file_content = DEPS_FILE_CONTENT.format(
cc_files=cc_files,
proto_files=proto_files,
cc_include=repr(GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT),
proto_include=repr(GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT),
commit_hash=COMMIT_HASH_PREFIX + commit_hash + COMMIT_HASH_SUFFIX)
return deps_file_content
cc_files_output = bazel_query(BAZEL_DEPS_PROTOC_LIB_QUERY)
cc_files = [
name[len(PROTOBUF_CC_PREFIX):] for name in cc_files_output
if name.endswith('.cc') and name.startswith(PROTOBUF_CC_PREFIX)
]
proto_files_output = bazel_query(BAZEL_DEPS_COMMON_PROTOS_QUERY)
proto_files = [
name[len(PROTOBUF_PROTO_PREFIX):] for name in proto_files_output
if name.endswith('.proto') and name.startswith(PROTOBUF_PROTO_PREFIX)
]
commit_hash = protobuf_submodule_commit_hash()
deps_file_content = DEPS_FILE_CONTENT.format(
cc_files=cc_files,
proto_files=proto_files,
cc_include=repr(GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT),
proto_include=repr(GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT),
commit_hash=COMMIT_HASH_PREFIX + commit_hash + COMMIT_HASH_SUFFIX)
return deps_file_content
def long_path(path):
if os.name == 'nt':
return '\\\\?\\' + path
else:
return path
if os.name == 'nt':
return '\\\\?\\' + path
else:
return path
def main():
os.chdir(GRPC_ROOT)
for source, target in [
(GRPC_PROTOBUF, GRPC_PYTHON_PROTOBUF),
(GRPC_PROTOC_PLUGINS, GRPC_PYTHON_PROTOC_PLUGINS),
(GRPC_INCLUDE, GRPC_PYTHON_INCLUDE)]:
for source_dir, _, files in os.walk(source):
target_dir = os.path.abspath(os.path.join(target, os.path.relpath(source_dir, source)))
try:
os.makedirs(target_dir)
except OSError as error:
if error.errno != errno.EEXIST:
raise
for relative_file in files:
source_file = os.path.abspath(os.path.join(source_dir, relative_file))
target_file = os.path.abspath(os.path.join(target_dir, relative_file))
shutil.copyfile(source_file, target_file)
try:
protoc_lib_deps_content = get_deps()
except Exception as error:
# We allow this script to succeed even if we couldn't get the dependencies,
# as then we can assume that even without a successful bazel run the
# dependencies currently in source control are 'good enough'.
sys.stderr.write("Got non-fatal error:\n")
traceback.print_exc(file=sys.stderr)
return
# If we successfully got the dependencies, truncate and rewrite the deps file.
with open(GRPC_PYTHON_PROTOC_LIB_DEPS, 'w') as deps_file:
deps_file.write(protoc_lib_deps_content)
os.chdir(GRPC_ROOT)
for source, target in [(GRPC_PROTOBUF, GRPC_PYTHON_PROTOBUF),
(GRPC_PROTOC_PLUGINS, GRPC_PYTHON_PROTOC_PLUGINS),
(GRPC_INCLUDE, GRPC_PYTHON_INCLUDE)]:
for source_dir, _, files in os.walk(source):
target_dir = os.path.abspath(
os.path.join(target, os.path.relpath(source_dir, source)))
try:
os.makedirs(target_dir)
except OSError as error:
if error.errno != errno.EEXIST:
raise
for relative_file in files:
source_file = os.path.abspath(
os.path.join(source_dir, relative_file))
target_file = os.path.abspath(
os.path.join(target_dir, relative_file))
shutil.copyfile(source_file, target_file)
try:
protoc_lib_deps_content = get_deps()
except Exception as error:
# We allow this script to succeed even if we couldn't get the dependencies,
# as then we can assume that even without a successful bazel run the
# dependencies currently in source control are 'good enough'.
sys.stderr.write("Got non-fatal error:\n")
traceback.print_exc(file=sys.stderr)
return
# If we successfully got the dependencies, truncate and rewrite the deps file.
with open(GRPC_PYTHON_PROTOC_LIB_DEPS, 'w') as deps_file:
deps_file.write(protoc_lib_deps_content)
if __name__ == '__main__':
main()
if __name__ == '__main__':
main()

@ -21,43 +21,52 @@ import subprocess
parser = argparse.ArgumentParser(
description='Submit the package to a PyPI repository.')
parser.add_argument(
'--repository', '-r', metavar='r', type=str, default='pypi',
'--repository',
'-r',
metavar='r',
type=str,
default='pypi',
help='The repository to push the package to. '
'Ensure the value appears in your .pypirc file. '
'Defaults to "pypi".'
)
'Ensure the value appears in your .pypirc file. '
'Defaults to "pypi".')
parser.add_argument(
'--identity', '-i', metavar='i', type=str,
help='GPG identity to sign the files with.'
)
'--identity',
'-i',
metavar='i',
type=str,
help='GPG identity to sign the files with.')
parser.add_argument(
'--username', '-u', metavar='u', type=str,
'--username',
'-u',
metavar='u',
type=str,
help='Username to authenticate with the repository. Not needed if you have '
'configured your .pypirc to include your username.'
)
'configured your .pypirc to include your username.')
parser.add_argument(
'--password', '-p', metavar='p', type=str,
'--password',
'-p',
metavar='p',
type=str,
help='Password to authenticate with the repository. Not needed if you have '
'configured your .pypirc to include your password.'
)
'configured your .pypirc to include your password.')
parser.add_argument(
'--bdist', '-b', action='store_true',
help='Generate a binary distribution (wheel) for the current OS.'
)
'--bdist',
'-b',
action='store_true',
help='Generate a binary distribution (wheel) for the current OS.')
parser.add_argument(
'--dist-args', type=str,
help='Additional arguments to pass to the *dist setup.py command.'
)
'--dist-args',
type=str,
help='Additional arguments to pass to the *dist setup.py command.')
args = parser.parse_args()
# Move to the root directory of Python GRPC.
pkgdir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../../../')
pkgdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../../')
# Remove previous distributions; they somehow confuse twine.
try:
shutil.rmtree(os.path.join(pkgdir, 'dist/'))
shutil.rmtree(os.path.join(pkgdir, 'dist/'))
except:
pass
pass
# Build the Cython C files
build_env = os.environ.copy()
@ -67,20 +76,20 @@ subprocess.call(cmd, cwd=pkgdir, env=build_env)
# Make the push.
if args.bdist:
cmd = ['python', 'setup.py', 'bdist_wheel']
cmd = ['python', 'setup.py', 'bdist_wheel']
else:
cmd = ['python', 'setup.py', 'sdist']
cmd = ['python', 'setup.py', 'sdist']
if args.dist_args:
cmd += args.dist_args.split()
cmd += args.dist_args.split()
subprocess.call(cmd, cwd=pkgdir)
cmd = ['twine', 'upload', '-r', args.repository]
if args.identity is not None:
cmd.extend(['-i', args.identity])
cmd.extend(['-i', args.identity])
if args.username is not None:
cmd.extend(['-u', args.username])
cmd.extend(['-u', args.username])
if args.password is not None:
cmd.extend(['-p', args.password])
cmd.extend(['-p', args.password])
cmd.append('dist/*')
subprocess.call(cmd, cwd=pkgdir)

@ -20,51 +20,51 @@ import argparse
import multiprocessing
sys.path.append(
os.path.join(
os.path.dirname(sys.argv[0]), '..', 'run_tests', 'python_utils'))
os.path.join(
os.path.dirname(sys.argv[0]), '..', 'run_tests', 'python_utils'))
import jobset
GRPC_CHECKS = [
'modernize-use-nullptr',
'modernize-use-nullptr',
]
extra_args = [
'-x',
'c++',
'-std=c++11',
'-x',
'c++',
'-std=c++11',
]
with open('.clang_complete') as f:
for line in f:
line = line.strip()
if line.startswith('-I'):
extra_args.append(line)
for line in f:
line = line.strip()
if line.startswith('-I'):
extra_args.append(line)
clang_tidy = os.environ.get('CLANG_TIDY', 'clang-tidy')
argp = argparse.ArgumentParser(description='Run clang-tidy against core')
argp.add_argument('files', nargs='+', help='Files to tidy')
argp.add_argument('--fix', dest='fix', action='store_true')
argp.add_argument('-j', '--jobs', type=int, default=multiprocessing.cpu_count(),
help='Number of CPUs to use')
argp.add_argument(
'-j',
'--jobs',
type=int,
default=multiprocessing.cpu_count(),
help='Number of CPUs to use')
argp.set_defaults(fix=False)
args = argp.parse_args()
cmdline = [
clang_tidy,
'--checks=-*,%s' % ','.join(GRPC_CHECKS),
clang_tidy, '--checks=-*,%s' % ','.join(GRPC_CHECKS),
'--warnings-as-errors=%s' % ','.join(GRPC_CHECKS)
] + [
'--extra-arg-before=%s' % arg
for arg in extra_args
]
] + ['--extra-arg-before=%s' % arg for arg in extra_args]
if args.fix:
cmdline.append('--fix')
cmdline.append('--fix')
jobs = []
for filename in args.files:
jobs.append(jobset.JobSpec(cmdline + [filename],
shortname=filename,
))#verbose_success=True))
jobs.append(jobset.JobSpec(
cmdline + [filename],
shortname=filename,)) #verbose_success=True))
jobset.run(jobs, maxjobs=args.jobs)

@ -22,6 +22,7 @@ DIRS=(
'src/python'
'tools/buildgen'
'tools/codegen'
'tools/distrib'
)
EXCLUSIONS=(
'grpcio/grpc_*.py'

Loading…
Cancel
Save