commit
cc0bd13cb9
1933 changed files with 84840 additions and 75108 deletions
@ -1,18 +1,18 @@ |
||||
-Wall |
||||
-Wc++-compat |
||||
-Ithird_party/googletest/include |
||||
-Ithird_party/googletest |
||||
-Iinclude |
||||
-Igens |
||||
-I. |
||||
-Ithird_party/boringssl/include |
||||
-Ithird_party/benchmark/include |
||||
-Ithird_party/zlib |
||||
-Ithird_party/protobuf/src |
||||
-Igens |
||||
-Iinclude |
||||
-Isrc/core/ext/upb-generated |
||||
-Ithird_party/abseil-cpp |
||||
-Ithird_party/cares/cares |
||||
-Ithird_party/benchmark/include |
||||
-Ithird_party/boringssl/include |
||||
-Ithird_party/cares |
||||
-Ithird_party/googletest/googletest/include |
||||
-Ithird_party/cares/cares |
||||
-Ithird_party/googletest |
||||
-Ithird_party/googletest/googlemock/include |
||||
-Ithird_party/nanopb |
||||
|
||||
-Ithird_party/googletest/googletest/include |
||||
-Ithird_party/googletest/include |
||||
-Ithird_party/protobuf/src |
||||
-Ithird_party/upb |
||||
-Ithird_party/zlib |
||||
|
@ -0,0 +1,18 @@ |
||||
--- |
||||
name: Request a cleanup |
||||
about: Suggest a cleanup in our repository |
||||
labels: kind/internal cleanup |
||||
assignees: karthikravis |
||||
|
||||
--- |
||||
|
||||
<!-- |
||||
|
||||
This form is for bug reports and feature requests ONLY! |
||||
For general questions and troubleshooting, please ask/look for answers here: |
||||
- grpc.io mailing list: https://groups.google.com/forum/#!forum/grpc-io |
||||
- StackOverflow, with "grpc" tag: https://stackoverflow.com/questions/tagged/grpc |
||||
|
||||
Issues specific to *grpc-java*, *grpc-go*, *grpc-node*, *grpc-dart*, *grpc-web* should be created in the repository they belong to (e.g. https://github.com/grpc/grpc-LANGUAGE/issues/new) |
||||
--> |
||||
|
@ -0,0 +1,29 @@ |
||||
--- |
||||
name: Request a feature |
||||
about: Suggest an idea for this project |
||||
labels: kind/enhancement |
||||
assignees: karthikravis |
||||
|
||||
--- |
||||
|
||||
<!-- |
||||
|
||||
This form is for bug reports and feature requests ONLY! |
||||
For general questions and troubleshooting, please ask/look for answers here: |
||||
- grpc.io mailing list: https://groups.google.com/forum/#!forum/grpc-io |
||||
- StackOverflow, with "grpc" tag: https://stackoverflow.com/questions/tagged/grpc |
||||
|
||||
Issues specific to *grpc-java*, *grpc-go*, *grpc-node*, *grpc-dart*, *grpc-web* should be created in the repository they belong to (e.g. https://github.com/grpc/grpc-LANGUAGE/issues/new) |
||||
--> |
||||
|
||||
### Is your feature request related to a problem? Please describe. |
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] |
||||
|
||||
### Describe the solution you'd like |
||||
A clear and concise description of what you want to happen. |
||||
|
||||
### Describe alternatives you've considered |
||||
A clear and concise description of any alternative solutions or features you've considered. |
||||
|
||||
### Additional context |
||||
Add any other context about the feature request here. |
@ -0,0 +1,11 @@ |
||||
|
||||
|
||||
|
||||
<!-- |
||||
|
||||
Your pull request will be routed to the following person by default for triaging. |
||||
If you know who should review your pull request, please remove the mentioning below. |
||||
|
||||
--> |
||||
|
||||
@karthikravis |
@ -0,0 +1,59 @@ |
||||
# Configuration for probot-stale - https://github.com/probot/stale |
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale |
||||
daysUntilStale: 180 |
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed. |
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale. |
||||
daysUntilClose: 1 |
||||
|
||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled) |
||||
onlyLabels: [] |
||||
|
||||
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable |
||||
exemptLabels: |
||||
- "disposition/never stale" |
||||
|
||||
# Set to true to ignore issues in a project (defaults to false) |
||||
exemptProjects: false |
||||
|
||||
# Set to true to ignore issues in a milestone (defaults to false) |
||||
exemptMilestones: false |
||||
|
||||
# Set to true to ignore issues with an assignee (defaults to false) |
||||
exemptAssignees: false |
||||
|
||||
# Label to use when marking as stale |
||||
staleLabel: "disposition/stale" |
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable |
||||
markComment: > |
||||
This issue/PR has been automatically marked as stale because it has not had any update (including |
||||
commits, comments, labels, milestones, etc) for 180 days. It will be closed automatically if no |
||||
further update occurs in 1 day. Thank you for your contributions! |
||||
|
||||
# Comment to post when removing the stale label. |
||||
# unmarkComment: > |
||||
# Your comment here. |
||||
|
||||
# Comment to post when closing a stale Issue or Pull Request. |
||||
# closeComment: > |
||||
# Your comment here. |
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30 |
||||
limitPerRun: 30 |
||||
|
||||
# Limit to only `issues` or `pulls` |
||||
# only: issues |
||||
|
||||
# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls': |
||||
# pulls: |
||||
# daysUntilStale: 30 |
||||
# markComment: > |
||||
# This pull request has been automatically marked as stale because it has not had |
||||
# recent activity. It will be closed if no further activity occurs. Thank you |
||||
# for your contributions. |
||||
|
||||
# issues: |
||||
# exemptLabels: |
||||
# - confirmed |
@ -1,3 +1,4 @@ |
||||
Dropbox, Inc. |
||||
Google Inc. |
||||
Skyscanner Ltd. |
||||
WeWork Companies Inc. |
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@ |
||||
set noparent |
||||
@nicolasnoble |
||||
@jtattermusch |
||||
@a11r |
||||
@vjpai |
||||
@veblush |
||||
@gnossen |
||||
|
||||
|
@ -1,71 +1,105 @@ |
||||
"""Generates and compiles C++ grpc stubs from proto_library rules.""" |
||||
|
||||
load("//bazel:generate_cc.bzl", "generate_cc") |
||||
load("//bazel:protobuf.bzl", "well_known_proto_libs") |
||||
|
||||
def cc_grpc_library(name, srcs, deps, proto_only, well_known_protos, generate_mocks = False, use_external = False, **kwargs): |
||||
"""Generates C++ grpc classes from a .proto file. |
||||
def cc_grpc_library( |
||||
name, |
||||
srcs, |
||||
deps, |
||||
proto_only = False, |
||||
well_known_protos = False, |
||||
generate_mocks = False, |
||||
use_external = False, |
||||
grpc_only = False, |
||||
**kwargs): |
||||
"""Generates C++ grpc classes for services defined in a proto file. |
||||
|
||||
Assumes the generated classes will be used in cc_api_version = 2. |
||||
If grpc_only is True, this rule is compatible with proto_library and |
||||
cc_proto_library native rules such that it expects proto_library target |
||||
as srcs argument and generates only grpc library classes, expecting |
||||
protobuf messages classes library (cc_proto_library target) to be passed in |
||||
deps argument. By default grpc_only is False which makes this rule to behave |
||||
in a backwards-compatible mode (trying to generate both proto and grpc |
||||
classes). |
||||
|
||||
Arguments: |
||||
name: name of rule. |
||||
srcs: a single proto_library, which wraps the .proto files with services. |
||||
deps: a list of C++ proto_library (or cc_proto_library) which provides |
||||
the compiled code of any message that the services depend on. |
||||
well_known_protos: Should this library additionally depend on well known |
||||
protos |
||||
use_external: When True the grpc deps are prefixed with //external. This |
||||
allows grpc to be used as a dependency in other bazel projects. |
||||
generate_mocks: When True, Google Mock code for client stub is generated. |
||||
**kwargs: rest of arguments, e.g., compatible_with and visibility. |
||||
""" |
||||
if len(srcs) > 1: |
||||
fail("Only one srcs value supported", "srcs") |
||||
Assumes the generated classes will be used in cc_api_version = 2. |
||||
|
||||
proto_target = "_" + name + "_only" |
||||
codegen_target = "_" + name + "_codegen" |
||||
codegen_grpc_target = "_" + name + "_grpc_codegen" |
||||
proto_deps = ["_" + dep + "_only" for dep in deps if dep.find(':') == -1] |
||||
proto_deps += [dep.split(':')[0] + ':' + "_" + dep.split(':')[1] + "_only" for dep in deps if dep.find(':') != -1] |
||||
Args: |
||||
name (str): Name of rule. |
||||
srcs (list): A single .proto file which contains services definitions, |
||||
or if grpc_only parameter is True, a single proto_library which |
||||
contains services descriptors. |
||||
deps (list): A list of C++ proto_library (or cc_proto_library) which |
||||
provides the compiled code of any message that the services depend on. |
||||
proto_only (bool): If True, create only C++ proto classes library, |
||||
avoid creating C++ grpc classes library (expect it in deps). |
||||
Deprecated, use native cc_proto_library instead. False by default. |
||||
well_known_protos (bool): Should this library additionally depend on |
||||
well known protos. Deprecated, the well known protos should be |
||||
specified as explicit dependencies of the proto_library target |
||||
(passed in srcs parameter) instead. False by default. |
||||
generate_mocks (bool): when True, Google Mock code for client stub is |
||||
generated. False by default. |
||||
use_external (bool): Not used. |
||||
grpc_only (bool): if True, generate only grpc library, expecting |
||||
protobuf messages library (cc_proto_library target) to be passed as |
||||
deps. False by default (will become True by default eventually). |
||||
**kwargs: rest of arguments, e.g., compatible_with and visibility |
||||
""" |
||||
if len(srcs) > 1: |
||||
fail("Only one srcs value supported", "srcs") |
||||
if grpc_only and proto_only: |
||||
fail("A mutualy exclusive configuration is specified: grpc_only = True and proto_only = True") |
||||
|
||||
native.proto_library( |
||||
name = proto_target, |
||||
srcs = srcs, |
||||
deps = proto_deps, |
||||
**kwargs |
||||
) |
||||
extra_deps = [] |
||||
proto_targets = [] |
||||
|
||||
generate_cc( |
||||
name = codegen_target, |
||||
srcs = [proto_target], |
||||
well_known_protos = well_known_protos, |
||||
**kwargs |
||||
) |
||||
if not grpc_only: |
||||
proto_target = "_" + name + "_only" |
||||
cc_proto_target = name if proto_only else "_" + name + "_cc_proto" |
||||
|
||||
if not proto_only: |
||||
plugin = "@com_github_grpc_grpc//:grpc_cpp_plugin" |
||||
generate_cc( |
||||
name = codegen_grpc_target, |
||||
srcs = [proto_target], |
||||
plugin = plugin, |
||||
well_known_protos = well_known_protos, |
||||
generate_mocks = generate_mocks, |
||||
**kwargs |
||||
) |
||||
grpc_deps = ["@com_github_grpc_grpc//:grpc++_codegen_proto", |
||||
"//external:protobuf"] |
||||
native.cc_library( |
||||
name = name, |
||||
srcs = [":" + codegen_grpc_target, ":" + codegen_target], |
||||
hdrs = [":" + codegen_grpc_target, ":" + codegen_target], |
||||
deps = deps + grpc_deps, |
||||
**kwargs |
||||
) |
||||
else: |
||||
native.cc_library( |
||||
name = name, |
||||
srcs = [":" + codegen_target], |
||||
hdrs = [":" + codegen_target], |
||||
deps = deps + ["//external:protobuf"], |
||||
**kwargs |
||||
) |
||||
proto_deps = ["_" + dep + "_only" for dep in deps if dep.find(":") == -1] |
||||
proto_deps += [dep.split(":")[0] + ":" + "_" + dep.split(":")[1] + "_only" for dep in deps if dep.find(":") != -1] |
||||
if well_known_protos: |
||||
proto_deps += well_known_proto_libs() |
||||
|
||||
native.proto_library( |
||||
name = proto_target, |
||||
srcs = srcs, |
||||
deps = proto_deps, |
||||
**kwargs |
||||
) |
||||
|
||||
native.cc_proto_library( |
||||
name = cc_proto_target, |
||||
deps = [":" + proto_target], |
||||
**kwargs |
||||
) |
||||
extra_deps.append(":" + cc_proto_target) |
||||
proto_targets.append(proto_target) |
||||
else: |
||||
if not srcs: |
||||
fail("srcs cannot be empty", "srcs") |
||||
proto_targets += srcs |
||||
|
||||
if not proto_only: |
||||
codegen_grpc_target = "_" + name + "_grpc_codegen" |
||||
generate_cc( |
||||
name = codegen_grpc_target, |
||||
srcs = proto_targets, |
||||
plugin = "@com_github_grpc_grpc//src/compiler:grpc_cpp_plugin", |
||||
well_known_protos = well_known_protos, |
||||
generate_mocks = generate_mocks, |
||||
**kwargs |
||||
) |
||||
|
||||
native.cc_library( |
||||
name = name, |
||||
srcs = [":" + codegen_grpc_target], |
||||
hdrs = [":" + codegen_grpc_target], |
||||
deps = deps + |
||||
extra_deps + |
||||
["@com_github_grpc_grpc//:grpc++_codegen_proto"], |
||||
**kwargs |
||||
) |
||||
|
@ -0,0 +1,215 @@ |
||||
load( |
||||
"//bazel:protobuf.bzl", |
||||
"get_include_protoc_args", |
||||
"get_plugin_args", |
||||
"proto_path_to_generated_filename", |
||||
) |
||||
load(":grpc_util.bzl", "to_upper_camel_with_extension",) |
||||
|
||||
_GRPC_PROTO_HEADER_FMT = "{}.pbrpc.h" |
||||
_GRPC_PROTO_SRC_FMT = "{}.pbrpc.m" |
||||
_PROTO_HEADER_FMT = "{}.pbobjc.h" |
||||
_PROTO_SRC_FMT = "{}.pbobjc.m" |
||||
_GENERATED_PROTOS_DIR = "_generated_protos" |
||||
|
||||
_GENERATE_HDRS = 1 |
||||
_GENERATE_SRCS = 2 |
||||
_GENERATE_NON_ARC_SRCS = 3 |
||||
|
||||
def _generate_objc_impl(ctx): |
||||
"""Implementation of the generate_objc rule.""" |
||||
protos = [ |
||||
f |
||||
for src in ctx.attr.deps |
||||
for f in src[ProtoInfo].transitive_imports.to_list() |
||||
] |
||||
|
||||
target_package = _join_directories([ctx.label.workspace_root, ctx.label.package]) |
||||
|
||||
files_with_rpc = [_label_to_full_file_path(f, target_package) for f in ctx.attr.srcs] |
||||
|
||||
outs = [] |
||||
for proto in protos: |
||||
outs += [_get_output_file_name_from_proto(proto, _PROTO_HEADER_FMT)] |
||||
outs += [_get_output_file_name_from_proto(proto, _PROTO_SRC_FMT)] |
||||
|
||||
file_path = _get_full_path_from_file(proto) |
||||
if file_path in files_with_rpc: |
||||
outs += [_get_output_file_name_from_proto(proto, _GRPC_PROTO_HEADER_FMT)] |
||||
outs += [_get_output_file_name_from_proto(proto, _GRPC_PROTO_SRC_FMT)] |
||||
|
||||
out_files = [ctx.actions.declare_file(out) for out in outs] |
||||
dir_out = _join_directories([ |
||||
str(ctx.genfiles_dir.path), target_package, _GENERATED_PROTOS_DIR |
||||
]) |
||||
|
||||
arguments = [] |
||||
if ctx.executable.plugin: |
||||
arguments += get_plugin_args( |
||||
ctx.executable.plugin, |
||||
[], |
||||
dir_out, |
||||
False, |
||||
) |
||||
tools = [ctx.executable.plugin] |
||||
arguments += ["--objc_out=" + dir_out] |
||||
|
||||
arguments += ["--proto_path=."] |
||||
arguments += get_include_protoc_args(protos) |
||||
# Include the output directory so that protoc puts the generated code in the |
||||
# right directory. |
||||
arguments += ["--proto_path={}".format(dir_out)] |
||||
arguments += ["--proto_path={}".format(_get_directory_from_proto(proto)) for proto in protos] |
||||
arguments += [_get_full_path_from_file(proto) for proto in protos] |
||||
|
||||
# create a list of well known proto files if the argument is non-None |
||||
well_known_proto_files = [] |
||||
if ctx.attr.use_well_known_protos: |
||||
f = ctx.attr.well_known_protos.files.to_list()[0].dirname |
||||
# go two levels up so that #import "google/protobuf/..." is correct |
||||
arguments += ["-I{0}".format(f + "/../..")] |
||||
well_known_proto_files = ctx.attr.well_known_protos.files.to_list() |
||||
ctx.actions.run( |
||||
inputs = protos + well_known_proto_files, |
||||
tools = tools, |
||||
outputs = out_files, |
||||
executable = ctx.executable._protoc, |
||||
arguments = arguments, |
||||
) |
||||
|
||||
return struct(files = depset(out_files)) |
||||
|
||||
def _label_to_full_file_path(src, package): |
||||
if not src.startswith("//"): |
||||
# Relative from current package |
||||
if not src.startswith(":"): |
||||
# "a.proto" -> ":a.proto" |
||||
src = ":" + src |
||||
src = "//" + package + src |
||||
# Converts //path/to/package:File.ext to path/to/package/File.ext. |
||||
src = src.replace("//", "") |
||||
src = src.replace(":", "/") |
||||
if src.startswith("/"): |
||||
# "//:a.proto" -> "/a.proto" so remove the initial slash |
||||
return src[1:] |
||||
else: |
||||
return src |
||||
|
||||
def _get_output_file_name_from_proto(proto, fmt): |
||||
return proto_path_to_generated_filename( |
||||
_GENERATED_PROTOS_DIR + "/" + |
||||
_get_directory_from_proto(proto) + _get_slash_or_null_from_proto(proto) + |
||||
to_upper_camel_with_extension(_get_file_name_from_proto(proto), "proto"), |
||||
fmt, |
||||
) |
||||
|
||||
def _get_file_name_from_proto(proto): |
||||
return proto.path.rpartition("/")[2] |
||||
|
||||
def _get_slash_or_null_from_proto(proto): |
||||
"""Potentially returns empty (if the file is in the root directory)""" |
||||
return proto.path.rpartition("/")[1] |
||||
|
||||
def _get_directory_from_proto(proto): |
||||
return proto.path.rpartition("/")[0] |
||||
|
||||
def _get_full_path_from_file(file): |
||||
gen_dir_length = 0 |
||||
# if file is generated, then prepare to remote its root |
||||
# (including CPU architecture...) |
||||
if not file.is_source: |
||||
gen_dir_length = len(file.root.path) + 1 |
||||
|
||||
return file.path[gen_dir_length:] |
||||
|
||||
def _join_directories(directories): |
||||
massaged_directories = [directory for directory in directories if len(directory) != 0] |
||||
return "/".join(massaged_directories) |
||||
|
||||
|
||||
generate_objc = rule( |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
mandatory = True, |
||||
allow_empty = False, |
||||
providers = [ProtoInfo], |
||||
), |
||||
"plugin": attr.label( |
||||
default = "@com_github_grpc_grpc//src/compiler:grpc_objective_c_plugin", |
||||
executable = True, |
||||
providers = ["files_to_run"], |
||||
cfg = "host", |
||||
), |
||||
"srcs": attr.string_list( |
||||
mandatory = False, |
||||
allow_empty = True |
||||
), |
||||
"use_well_known_protos": attr.bool( |
||||
mandatory = False, |
||||
default = False |
||||
), |
||||
"well_known_protos": attr.label( |
||||
default = "@com_google_protobuf//:well_known_protos" |
||||
), |
||||
"_protoc": attr.label( |
||||
default = Label("//external:protocol_compiler"), |
||||
executable = True, |
||||
cfg = "host", |
||||
), |
||||
}, |
||||
output_to_genfiles = True, |
||||
implementation = _generate_objc_impl |
||||
) |
||||
|
||||
def _group_objc_files_impl(ctx): |
||||
suffix = "" |
||||
if ctx.attr.gen_mode == _GENERATE_HDRS: |
||||
suffix = "h" |
||||
elif ctx.attr.gen_mode == _GENERATE_SRCS: |
||||
suffix = "pbrpc.m" |
||||
elif ctx.attr.gen_mode == _GENERATE_NON_ARC_SRCS: |
||||
suffix = "pbobjc.m" |
||||
else: |
||||
fail("Undefined gen_mode") |
||||
out_files = [ |
||||
file |
||||
for file in ctx.attr.src.files.to_list() |
||||
if file.basename.endswith(suffix) |
||||
] |
||||
return struct(files = depset(out_files)) |
||||
|
||||
generate_objc_hdrs = rule( |
||||
attrs = { |
||||
"src": attr.label( |
||||
mandatory = True, |
||||
), |
||||
"gen_mode": attr.int( |
||||
default = _GENERATE_HDRS, |
||||
) |
||||
}, |
||||
implementation = _group_objc_files_impl |
||||
) |
||||
|
||||
generate_objc_srcs = rule( |
||||
attrs = { |
||||
"src": attr.label( |
||||
mandatory = True, |
||||
), |
||||
"gen_mode": attr.int( |
||||
default = _GENERATE_SRCS, |
||||
) |
||||
}, |
||||
implementation = _group_objc_files_impl |
||||
) |
||||
|
||||
generate_objc_non_arc_srcs = rule( |
||||
attrs = { |
||||
"src": attr.label( |
||||
mandatory = True, |
||||
), |
||||
"gen_mode": attr.int( |
||||
default = _GENERATE_NON_ARC_SRCS, |
||||
) |
||||
}, |
||||
implementation = _group_objc_files_impl |
||||
) |
@ -1,16 +1,76 @@ |
||||
load("//third_party/py:python_configure.bzl", "python_configure") |
||||
load("@io_bazel_rules_python//python:pip.bzl", "pip_repositories") |
||||
load("@grpc_python_dependencies//:requirements.bzl", "pip_install") |
||||
load("@org_pubref_rules_protobuf//python:rules.bzl", "py_proto_repositories") |
||||
"""Load dependencies needed to compile and test the grpc python library as a 3rd-party consumer.""" |
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") |
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") |
||||
load("@com_github_grpc_grpc//third_party/py:python_configure.bzl", "python_configure") |
||||
|
||||
def grpc_python_deps(): |
||||
# TODO(https://github.com/grpc/grpc/issues/18256): Remove conditional. |
||||
if hasattr(native, "http_archive"): |
||||
python_configure(name = "local_config_python") |
||||
pip_repositories() |
||||
pip_install() |
||||
py_proto_repositories() |
||||
else: |
||||
print("Building Python gRPC with bazel 23.0+ is disabled pending " + |
||||
"resolution of https://github.com/grpc/grpc/issues/18256.") |
||||
native.bind( |
||||
name = "six", |
||||
actual = "@six_archive//:six", |
||||
) |
||||
|
||||
# protobuf binds to the name "six", so we can't use it here. |
||||
# See https://github.com/bazelbuild/bazel/issues/1952 for why bind is |
||||
# horrible. |
||||
if "six_archive" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "six_archive", |
||||
strip_prefix = "six-1.12.0", |
||||
build_file = "@com_github_grpc_grpc//third_party:six.BUILD", |
||||
sha256 = "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73", |
||||
urls = ["https://files.pythonhosted.org/packages/dd/bf/4138e7bfb757de47d1f4b6994648ec67a51efe58fa907c1e11e350cddfca/six-1.12.0.tar.gz"], |
||||
) |
||||
|
||||
if "enum34" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "enum34", |
||||
build_file = "@com_github_grpc_grpc//third_party:enum34.BUILD", |
||||
strip_prefix = "enum34-1.1.6", |
||||
sha256 = "8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1", |
||||
urls = ["https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz"], |
||||
) |
||||
|
||||
if "futures" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "futures", |
||||
build_file = "@com_github_grpc_grpc//third_party:futures.BUILD", |
||||
strip_prefix = "futures-3.3.0", |
||||
sha256 = "7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794", |
||||
urls = ["https://files.pythonhosted.org/packages/47/04/5fc6c74ad114032cd2c544c575bffc17582295e9cd6a851d6026ab4b2c00/futures-3.3.0.tar.gz"], |
||||
) |
||||
|
||||
if "io_bazel_rules_python" not in native.existing_rules(): |
||||
git_repository( |
||||
name = "io_bazel_rules_python", |
||||
commit = "fdbb17a4118a1728d19e638a5291b4c4266ea5b8", |
||||
remote = "https://github.com/bazelbuild/rules_python.git", |
||||
) |
||||
|
||||
|
||||
if "rules_python" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "rules_python", |
||||
url = "https://github.com/bazelbuild/rules_python/archive/9d68f24659e8ce8b736590ba1e4418af06ec2552.zip", |
||||
sha256 = "f7402f11691d657161f871e11968a984e5b48b023321935f5a55d7e56cf4758a", |
||||
strip_prefix = "rules_python-9d68f24659e8ce8b736590ba1e4418af06ec2552", |
||||
) |
||||
|
||||
python_configure(name = "local_config_python") |
||||
|
||||
native.bind( |
||||
name = "python_headers", |
||||
actual = "@local_config_python//:python_headers", |
||||
) |
||||
|
||||
if "cython" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "cython", |
||||
build_file = "@com_github_grpc_grpc//third_party:cython.BUILD", |
||||
sha256 = "d68138a2381afbdd0876c3cb2a22389043fa01c4badede1228ee073032b07a27", |
||||
strip_prefix = "cython-c2b80d87658a8525ce091cbe146cb7eaa29fed5c", |
||||
urls = [ |
||||
"https://github.com/cython/cython/archive/c2b80d87658a8525ce091cbe146cb7eaa29fed5c.tar.gz", |
||||
], |
||||
) |
||||
|
||||
|
@ -0,0 +1,46 @@ |
||||
# Follows convention set in objectivec_helpers.cc in the protobuf ObjC compiler. |
||||
_upper_segments_list = ["url", "http", "https"] |
||||
|
||||
def strip_extension(str): |
||||
return str.rpartition(".")[0] |
||||
|
||||
def capitalize(word): |
||||
if word in _upper_segments_list: |
||||
return word.upper() |
||||
else: |
||||
return word.capitalize() |
||||
|
||||
def lower_underscore_to_upper_camel(str): |
||||
str = strip_extension(str) |
||||
camel_case_str = "" |
||||
word = "" |
||||
for c in str.elems(): # NB: assumes ASCII! |
||||
if c.isalpha(): |
||||
word += c.lower() |
||||
else: |
||||
# Last word is finished. |
||||
if len(word): |
||||
camel_case_str += capitalize(word) |
||||
word = "" |
||||
if c.isdigit(): |
||||
camel_case_str += c |
||||
|
||||
# Otherwise, drop the character. See UnderscoresToCamelCase in: |
||||
# third_party/protobuf/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc |
||||
|
||||
if len(word): |
||||
camel_case_str += capitalize(word) |
||||
return camel_case_str |
||||
|
||||
def file_to_upper_camel(src): |
||||
elements = src.rpartition("/") |
||||
upper_camel = lower_underscore_to_upper_camel(elements[-1]) |
||||
return "".join(list(elements[:-1]) + [upper_camel]) |
||||
|
||||
def file_with_extension(src, ext): |
||||
elements = src.rpartition("/") |
||||
return "".join(list(elements[:-1]) + [elements[-1], "." + ext]) |
||||
|
||||
def to_upper_camel_with_extension(src, ext): |
||||
src = file_to_upper_camel(src) |
||||
return file_with_extension(src, ext) |
@ -0,0 +1,69 @@ |
||||
load( |
||||
"//bazel:generate_objc.bzl", |
||||
"generate_objc", |
||||
"generate_objc_hdrs", |
||||
"generate_objc_srcs", |
||||
"generate_objc_non_arc_srcs" |
||||
) |
||||
load("//bazel:protobuf.bzl", "well_known_proto_libs") |
||||
|
||||
def objc_grpc_library(name, deps, srcs = [], use_well_known_protos = False, **kwargs): |
||||
"""Generates messages and/or service stubs for given proto_library and all transitively dependent proto files |
||||
|
||||
Args: |
||||
name: name of target |
||||
deps: a list of proto_library targets that needs to be compiled |
||||
srcs: a list of labels to proto files with service stubs to be generated, |
||||
labels specified must include service stubs; otherwise Bazel will complain about srcs being empty |
||||
use_well_known_protos: whether to use the well known protos defined in |
||||
@com_google_protobuf//src/google/protobuf, default to false |
||||
**kwargs: other arguments |
||||
""" |
||||
objc_grpc_library_name = "_" + name + "_objc_grpc_library" |
||||
|
||||
generate_objc( |
||||
name = objc_grpc_library_name, |
||||
srcs = srcs, |
||||
deps = deps, |
||||
use_well_known_protos = use_well_known_protos, |
||||
**kwargs |
||||
) |
||||
|
||||
generate_objc_hdrs( |
||||
name = objc_grpc_library_name + "_hdrs", |
||||
src = ":" + objc_grpc_library_name, |
||||
) |
||||
|
||||
generate_objc_non_arc_srcs( |
||||
name = objc_grpc_library_name + "_non_arc_srcs", |
||||
src = ":" + objc_grpc_library_name, |
||||
) |
||||
|
||||
arc_srcs = None |
||||
if len(srcs) > 0: |
||||
generate_objc_srcs( |
||||
name = objc_grpc_library_name + "_srcs", |
||||
src = ":" + objc_grpc_library_name, |
||||
) |
||||
arc_srcs = [":" + objc_grpc_library_name + "_srcs"] |
||||
|
||||
native.objc_library( |
||||
name = name, |
||||
hdrs = [":" + objc_grpc_library_name + "_hdrs"], |
||||
non_arc_srcs = [":" + objc_grpc_library_name + "_non_arc_srcs"], |
||||
srcs = arc_srcs, |
||||
defines = [ |
||||
"GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS=0", |
||||
"GPB_GRPC_FORWARD_DECLARE_MESSAGE_PROTO=0", |
||||
], |
||||
includes = [ |
||||
"_generated_protos", |
||||
"src/objective-c", |
||||
], |
||||
deps = [ |
||||
"@com_github_grpc_grpc//src/objective-c:proto_objc_rpc", |
||||
"@com_google_protobuf//:protobuf_objc", |
||||
], |
||||
**kwargs |
||||
) |
||||
|
@ -0,0 +1,171 @@ |
||||
"""Utility functions for generating protobuf code.""" |
||||
|
||||
_PROTO_EXTENSION = ".proto" |
||||
|
||||
def well_known_proto_libs(): |
||||
return [ |
||||
"@com_google_protobuf//:any_proto", |
||||
"@com_google_protobuf//:api_proto", |
||||
"@com_google_protobuf//:compiler_plugin_proto", |
||||
"@com_google_protobuf//:descriptor_proto", |
||||
"@com_google_protobuf//:duration_proto", |
||||
"@com_google_protobuf//:empty_proto", |
||||
"@com_google_protobuf//:field_mask_proto", |
||||
"@com_google_protobuf//:source_context_proto", |
||||
"@com_google_protobuf//:struct_proto", |
||||
"@com_google_protobuf//:timestamp_proto", |
||||
"@com_google_protobuf//:type_proto", |
||||
"@com_google_protobuf//:wrappers_proto", |
||||
] |
||||
|
||||
def get_proto_root(workspace_root): |
||||
"""Gets the root protobuf directory. |
||||
|
||||
Args: |
||||
workspace_root: context.label.workspace_root |
||||
|
||||
Returns: |
||||
The directory relative to which generated include paths should be. |
||||
""" |
||||
if workspace_root: |
||||
return "/{}".format(workspace_root) |
||||
else: |
||||
return "" |
||||
|
||||
def _strip_proto_extension(proto_filename): |
||||
if not proto_filename.endswith(_PROTO_EXTENSION): |
||||
fail('"{}" does not end with "{}"'.format( |
||||
proto_filename, |
||||
_PROTO_EXTENSION, |
||||
)) |
||||
return proto_filename[:-len(_PROTO_EXTENSION)] |
||||
|
||||
def proto_path_to_generated_filename(proto_path, fmt_str): |
||||
"""Calculates the name of a generated file for a protobuf path. |
||||
|
||||
For example, "examples/protos/helloworld.proto" might map to |
||||
"helloworld.pb.h". |
||||
|
||||
Args: |
||||
proto_path: The path to the .proto file. |
||||
fmt_str: A format string used to calculate the generated filename. For |
||||
example, "{}.pb.h" might be used to calculate a C++ header filename. |
||||
|
||||
Returns: |
||||
The generated filename. |
||||
""" |
||||
return fmt_str.format(_strip_proto_extension(proto_path)) |
||||
|
||||
def _get_include_directory(include): |
||||
directory = include.path |
||||
prefix_len = 0 |
||||
|
||||
virtual_imports = "/_virtual_imports/" |
||||
if not include.is_source and virtual_imports in include.path: |
||||
root, relative = include.path.split(virtual_imports, 2) |
||||
result = root + virtual_imports + relative.split("/", 1)[0] |
||||
return result |
||||
|
||||
if not include.is_source and directory.startswith(include.root.path): |
||||
prefix_len = len(include.root.path) + 1 |
||||
|
||||
if directory.startswith("external", prefix_len): |
||||
external_separator = directory.find("/", prefix_len) |
||||
repository_separator = directory.find("/", external_separator + 1) |
||||
return directory[:repository_separator] |
||||
else: |
||||
return include.root.path if include.root.path else "." |
||||
|
||||
def get_include_protoc_args(includes): |
||||
"""Returns protoc args that imports protos relative to their import root. |
||||
|
||||
Args: |
||||
includes: A list of included proto files. |
||||
|
||||
Returns: |
||||
A list of arguments to be passed to protoc. For example, ["--proto_path=."]. |
||||
""" |
||||
return [ |
||||
"--proto_path={}".format(_get_include_directory(include)) |
||||
for include in includes |
||||
] |
||||
|
||||
def get_plugin_args(plugin, flags, dir_out, generate_mocks): |
||||
"""Returns arguments configuring protoc to use a plugin for a language. |
||||
|
||||
Args: |
||||
plugin: An executable file to run as the protoc plugin. |
||||
flags: The plugin flags to be passed to protoc. |
||||
dir_out: The output directory for the plugin. |
||||
generate_mocks: A bool indicating whether to generate mocks. |
||||
|
||||
Returns: |
||||
A list of protoc arguments configuring the plugin. |
||||
""" |
||||
augmented_flags = list(flags) |
||||
if generate_mocks: |
||||
augmented_flags.append("generate_mock_code=true") |
||||
return [ |
||||
"--plugin=protoc-gen-PLUGIN=" + plugin.path, |
||||
"--PLUGIN_out=" + ",".join(augmented_flags) + ":" + dir_out, |
||||
] |
||||
|
||||
def _get_staged_proto_file(context, source_file): |
||||
if source_file.dirname == context.label.package: |
||||
return source_file |
||||
else: |
||||
copied_proto = context.actions.declare_file(source_file.basename) |
||||
context.actions.run_shell( |
||||
inputs = [source_file], |
||||
outputs = [copied_proto], |
||||
command = "cp {} {}".format(source_file.path, copied_proto.path), |
||||
mnemonic = "CopySourceProto", |
||||
) |
||||
return copied_proto |
||||
|
||||
|
||||
def protos_from_context(context): |
||||
"""Copies proto files to the appropriate location. |
||||
|
||||
Args: |
||||
context: The ctx object for the rule. |
||||
|
||||
Returns: |
||||
A list of the protos. |
||||
""" |
||||
protos = [] |
||||
for src in context.attr.deps: |
||||
for file in src[ProtoInfo].direct_sources: |
||||
protos.append(_get_staged_proto_file(context, file)) |
||||
return protos |
||||
|
||||
|
||||
def includes_from_deps(deps): |
||||
"""Get includes from rule dependencies.""" |
||||
return [ |
||||
file |
||||
for src in deps |
||||
for file in src[ProtoInfo].transitive_imports.to_list() |
||||
] |
||||
|
||||
def get_proto_arguments(protos, genfiles_dir_path): |
||||
"""Get the protoc arguments specifying which protos to compile.""" |
||||
arguments = [] |
||||
for proto in protos: |
||||
massaged_path = proto.path |
||||
if massaged_path.startswith(genfiles_dir_path): |
||||
massaged_path = proto.path[len(genfiles_dir_path) + 1:] |
||||
arguments.append(massaged_path) |
||||
return arguments |
||||
|
||||
def declare_out_files(protos, context, generated_file_format): |
||||
"""Declares and returns the files to be generated.""" |
||||
return [ |
||||
context.actions.declare_file( |
||||
proto_path_to_generated_filename( |
||||
proto.basename, |
||||
generated_file_format, |
||||
), |
||||
) |
||||
for proto in protos |
||||
] |
@ -0,0 +1,225 @@ |
||||
"""Generates and compiles Python gRPC stubs from proto_library rules.""" |
||||
|
||||
load( |
||||
"//bazel:protobuf.bzl", |
||||
"get_include_protoc_args", |
||||
"get_plugin_args", |
||||
"get_proto_root", |
||||
"proto_path_to_generated_filename", |
||||
"protos_from_context", |
||||
"includes_from_deps", |
||||
"get_proto_arguments", |
||||
"declare_out_files", |
||||
) |
||||
|
||||
_GENERATED_PROTO_FORMAT = "{}_pb2.py" |
||||
_GENERATED_GRPC_PROTO_FORMAT = "{}_pb2_grpc.py" |
||||
|
||||
def _generate_py_impl(context): |
||||
protos = protos_from_context(context) |
||||
includes = includes_from_deps(context.attr.deps) |
||||
proto_root = get_proto_root(context.label.workspace_root) |
||||
out_files = declare_out_files(protos, context, _GENERATED_PROTO_FORMAT) |
||||
|
||||
tools = [context.executable._protoc] |
||||
arguments = ([ |
||||
"--python_out={}".format( |
||||
context.genfiles_dir.path, |
||||
), |
||||
] + get_include_protoc_args(includes) + [ |
||||
"--proto_path={}".format(context.genfiles_dir.path) |
||||
for proto in protos |
||||
]) |
||||
arguments += get_proto_arguments(protos, context.genfiles_dir.path) |
||||
|
||||
context.actions.run( |
||||
inputs = protos + includes, |
||||
tools = tools, |
||||
outputs = out_files, |
||||
executable = context.executable._protoc, |
||||
arguments = arguments, |
||||
mnemonic = "ProtocInvocation", |
||||
) |
||||
return struct(files = depset(out_files)) |
||||
|
||||
_generate_pb2_src = rule( |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
mandatory = True, |
||||
allow_empty = False, |
||||
providers = [ProtoInfo], |
||||
), |
||||
"_protoc": attr.label( |
||||
default = Label("//external:protocol_compiler"), |
||||
providers = ["files_to_run"], |
||||
executable = True, |
||||
cfg = "host", |
||||
), |
||||
}, |
||||
implementation = _generate_py_impl, |
||||
) |
||||
|
||||
def py_proto_library( |
||||
name, |
||||
deps, |
||||
**kwargs): |
||||
"""Generate python code for a protobuf. |
||||
|
||||
Args: |
||||
name: The name of the target. |
||||
deps: A list of proto_library dependencies. Must contain a single element. |
||||
""" |
||||
codegen_target = "_{}_codegen".format(name) |
||||
if len(deps) != 1: |
||||
fail("Can only compile a single proto at a time.") |
||||
|
||||
|
||||
_generate_pb2_src( |
||||
name = codegen_target, |
||||
deps = deps, |
||||
**kwargs |
||||
) |
||||
|
||||
native.py_library( |
||||
name = name, |
||||
srcs = [":{}".format(codegen_target)], |
||||
deps = ["@com_google_protobuf//:protobuf_python"], |
||||
**kwargs |
||||
) |
||||
|
||||
def _generate_pb2_grpc_src_impl(context): |
||||
protos = protos_from_context(context) |
||||
includes = includes_from_deps(context.attr.deps) |
||||
proto_root = get_proto_root(context.label.workspace_root) |
||||
out_files = declare_out_files(protos, context, _GENERATED_GRPC_PROTO_FORMAT) |
||||
|
||||
plugin_flags = ["grpc_2_0"] + context.attr.strip_prefixes |
||||
|
||||
arguments = [] |
||||
tools = [context.executable._protoc, context.executable._plugin] |
||||
arguments += get_plugin_args( |
||||
context.executable._plugin, |
||||
plugin_flags, |
||||
context.genfiles_dir.path, |
||||
False, |
||||
) |
||||
|
||||
arguments += get_include_protoc_args(includes) |
||||
arguments += [ |
||||
"--proto_path={}".format(context.genfiles_dir.path) |
||||
for proto in protos |
||||
] |
||||
arguments += get_proto_arguments(protos, context.genfiles_dir.path) |
||||
|
||||
context.actions.run( |
||||
inputs = protos + includes, |
||||
tools = tools, |
||||
outputs = out_files, |
||||
executable = context.executable._protoc, |
||||
arguments = arguments, |
||||
mnemonic = "ProtocInvocation", |
||||
) |
||||
return struct(files = depset(out_files)) |
||||
|
||||
|
||||
_generate_pb2_grpc_src = rule( |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
mandatory = True, |
||||
allow_empty = False, |
||||
providers = [ProtoInfo], |
||||
), |
||||
"strip_prefixes": attr.string_list(), |
||||
"_plugin": attr.label( |
||||
executable = True, |
||||
providers = ["files_to_run"], |
||||
cfg = "host", |
||||
default = Label("//src/compiler:grpc_python_plugin"), |
||||
), |
||||
"_protoc": attr.label( |
||||
executable = True, |
||||
providers = ["files_to_run"], |
||||
cfg = "host", |
||||
default = Label("//external:protocol_compiler"), |
||||
), |
||||
}, |
||||
implementation = _generate_pb2_grpc_src_impl, |
||||
) |
||||
|
||||
def py_grpc_library( |
||||
name, |
||||
srcs, |
||||
deps, |
||||
strip_prefixes = [], |
||||
**kwargs): |
||||
"""Generate python code for gRPC services defined in a protobuf. |
||||
|
||||
Args: |
||||
name: The name of the target. |
||||
srcs: (List of `labels`) a single proto_library target containing the |
||||
schema of the service. |
||||
deps: (List of `labels`) a single py_proto_library target for the |
||||
proto_library in `srcs`. |
||||
strip_prefixes: (List of `strings`) If provided, this prefix will be |
||||
stripped from the beginning of foo_pb2 modules imported by the |
||||
generated stubs. This is useful in combination with the `imports` |
||||
attribute of the `py_library` rule. |
||||
**kwargs: Additional arguments to be supplied to the invocation of |
||||
py_library. |
||||
""" |
||||
codegen_grpc_target = "_{}_grpc_codegen".format(name) |
||||
if len(srcs) != 1: |
||||
fail("Can only compile a single proto at a time.") |
||||
|
||||
if len(deps) != 1: |
||||
fail("Deps must have length 1.") |
||||
|
||||
_generate_pb2_grpc_src( |
||||
name = codegen_grpc_target, |
||||
deps = srcs, |
||||
strip_prefixes = strip_prefixes, |
||||
**kwargs |
||||
) |
||||
|
||||
native.py_library( |
||||
name = name, |
||||
srcs = [ |
||||
":{}".format(codegen_grpc_target), |
||||
], |
||||
deps = [Label("//src/python/grpcio/grpc:grpcio")] + deps, |
||||
**kwargs |
||||
) |
||||
|
||||
|
||||
def py2and3_test(name, |
||||
py_test = native.py_test, |
||||
**kwargs): |
||||
"""Runs a Python test under both Python 2 and Python 3. |
||||
|
||||
Args: |
||||
name: The name of the test. |
||||
py_test: The rule to use for each test. |
||||
**kwargs: Keyword arguments passed directly to the underlying py_test |
||||
rule. |
||||
""" |
||||
if "python_version" in kwargs: |
||||
fail("Cannot specify 'python_version' in py2and3_test.") |
||||
|
||||
names = [name + suffix for suffix in (".python2", ".python3")] |
||||
python_versions = ["PY2", "PY3"] |
||||
for case_name, python_version in zip(names, python_versions): |
||||
py_test( |
||||
name = case_name, |
||||
python_version = python_version, |
||||
**kwargs |
||||
) |
||||
|
||||
suite_kwargs = {} |
||||
if "visibility" in kwargs: |
||||
suite_kwargs["visibility"] = kwargs["visibility"] |
||||
|
||||
native.test_suite( |
||||
name = name, |
||||
tests = names, |
||||
**suite_kwargs |
||||
) |
@ -0,0 +1,2 @@ |
||||
bazel-* |
||||
tools/bazel-* |
@ -0,0 +1,62 @@ |
||||
# gRPC Bazel BUILD file. |
||||
# |
||||
# Copyright 2019 The gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_proto_library", "py_grpc_library") |
||||
|
||||
package(default_testonly = 1) |
||||
|
||||
proto_library( |
||||
name = "helloworld_proto", |
||||
srcs = ["helloworld.proto"], |
||||
deps = [ |
||||
"@com_google_protobuf//:duration_proto", |
||||
"@com_google_protobuf//:timestamp_proto", |
||||
], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "helloworld_py_pb2", |
||||
deps = [":helloworld_proto"], |
||||
) |
||||
|
||||
py_grpc_library( |
||||
name = "helloworld_py_pb2_grpc", |
||||
srcs = [":helloworld_proto"], |
||||
deps = [":helloworld_py_pb2"], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "duration_py_pb2", |
||||
deps = ["@com_google_protobuf//:duration_proto"], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "timestamp_py_pb2", |
||||
deps = ["@com_google_protobuf//:timestamp_proto"], |
||||
) |
||||
|
||||
py_test( |
||||
name = "import_test", |
||||
main = "helloworld.py", |
||||
srcs = ["helloworld.py"], |
||||
deps = [ |
||||
":helloworld_py_pb2", |
||||
":helloworld_py_pb2_grpc", |
||||
":duration_py_pb2", |
||||
":timestamp_py_pb2", |
||||
], |
||||
python_version = "PY3", |
||||
) |
@ -0,0 +1,5 @@ |
||||
## Bazel Workspace Test |
||||
|
||||
This directory houses a test ensuring that downstream projects can use |
||||
`@com_github_grpc_grpc//src/python/grpcio:grpcio`, `py_proto_library`, and |
||||
`py_grpc_library`. |
@ -0,0 +1,17 @@ |
||||
local_repository( |
||||
name = "com_github_grpc_grpc", |
||||
path = "../../..", |
||||
) |
||||
|
||||
load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") |
||||
grpc_deps() |
||||
|
||||
# TODO(https://github.com/grpc/grpc/issues/19835): Remove. |
||||
load("@upb//bazel:workspace_deps.bzl", "upb_deps") |
||||
upb_deps() |
||||
|
||||
load("@build_bazel_rules_apple//apple:repositories.bzl", "apple_rules_dependencies") |
||||
apple_rules_dependencies() |
||||
|
||||
load("@build_bazel_apple_support//lib:repositories.bzl", "apple_support_dependencies") |
||||
apple_support_dependencies() |
@ -0,0 +1,43 @@ |
||||
// Copyright 2019 The gRPC authors. |
||||
// |
||||
// Licensed under the Apache License, Version 2.0 (the "License"); |
||||
// you may not use this file except in compliance with the License. |
||||
// You may obtain a copy of the License at |
||||
// |
||||
// http://www.apache.org/licenses/LICENSE-2.0 |
||||
// |
||||
// Unless required by applicable law or agreed to in writing, software |
||||
// distributed under the License is distributed on an "AS IS" BASIS, |
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
// See the License for the specific language governing permissions and |
||||
// limitations under the License. |
||||
|
||||
syntax = "proto3"; |
||||
|
||||
option java_multiple_files = true; |
||||
option java_package = "io.grpc.examples.helloworld"; |
||||
option java_outer_classname = "HelloWorldProto"; |
||||
option objc_class_prefix = "HLW"; |
||||
|
||||
package helloworld; |
||||
|
||||
import "google/protobuf/timestamp.proto"; |
||||
import "google/protobuf/duration.proto"; |
||||
|
||||
// The greeting service definition. |
||||
service Greeter { |
||||
// Sends a greeting |
||||
rpc SayHello (HelloRequest) returns (HelloReply) {} |
||||
} |
||||
|
||||
// The request message containing the user's name. |
||||
message HelloRequest { |
||||
string name = 1; |
||||
google.protobuf.Timestamp request_initiation = 2; |
||||
} |
||||
|
||||
// The response message containing the greetings |
||||
message HelloReply { |
||||
string message = 1; |
||||
google.protobuf.Duration request_duration = 2; |
||||
} |
@ -0,0 +1,73 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""The Python implementation of the GRPC helloworld.Greeter client.""" |
||||
|
||||
import contextlib |
||||
import datetime |
||||
import logging |
||||
import unittest |
||||
|
||||
import grpc |
||||
|
||||
import duration_pb2 |
||||
import helloworld_pb2 |
||||
import helloworld_pb2_grpc |
||||
|
||||
_HOST = 'localhost' |
||||
_SERVER_ADDRESS = '{}:0'.format(_HOST) |
||||
|
||||
|
||||
class Greeter(helloworld_pb2_grpc.GreeterServicer): |
||||
|
||||
def SayHello(self, request, context): |
||||
request_in_flight = datetime.now() - request.request_initation.ToDatetime() |
||||
request_duration = duration_pb2.Duration() |
||||
request_duration.FromTimedelta(request_in_flight) |
||||
return helloworld_pb2.HelloReply( |
||||
message='Hello, %s!' % request.name, |
||||
request_duration=request_duration, |
||||
) |
||||
|
||||
|
||||
@contextlib.contextmanager |
||||
def _listening_server(): |
||||
server = grpc.server(futures.ThreadPoolExecutor()) |
||||
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) |
||||
port = server.add_insecure_port(_SERVER_ADDRESS) |
||||
server.start() |
||||
try: |
||||
yield port |
||||
finally: |
||||
server.stop(0) |
||||
|
||||
|
||||
class ImportTest(unittest.TestCase): |
||||
def run(): |
||||
with _listening_server() as port: |
||||
with grpc.insecure_channel('{}:{}'.format(_HOST, port)) as channel: |
||||
stub = helloworld_pb2_grpc.GreeterStub(channel) |
||||
request_timestamp = timestamp_pb2.Timestamp() |
||||
request_timestamp.GetCurrentTime() |
||||
response = stub.SayHello(helloworld_pb2.HelloRequest( |
||||
name='you', |
||||
request_initiation=request_timestamp, |
||||
), |
||||
wait_for_ready=True) |
||||
self.assertEqual(response.message, "Hello, you!") |
||||
self.assertGreater(response.request_duration.microseconds, 0) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
logging.basicConfig() |
||||
unittest.main() |
@ -0,0 +1 @@ |
||||
../../../../tools/bazel |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,18 @@ |
||||
# Copyright 2019 gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
set(UPB_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/upb) |
||||
|
||||
set(_gRPC_UPB_INCLUDE_DIR "${UPB_ROOT_DIR}") |
||||
set(_gRPC_UPB_GRPC_GENERATED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/src/core/ext/upb-generated") |
@ -0,0 +1,41 @@ |
||||
# gRPC Versioning Guide |
||||
|
||||
## Versioning Overview |
||||
|
||||
All gRPC implementations use a three-part version number (`vX.Y.Z`) and strictly follow [semantic versioning](https://semver.org/), which defines the semantics of major, minor and patch components of the version number. In addition to that, gRPC versions evolve according to these rules: |
||||
- **Major version bumps** only happen on rare occasions. In order to qualify for a major version bump, certain criteria described later in this document need to be met. Most importantly, a major version increase must not break wire compatibility with other gRPC implementations so that existing gRPC libraries remain fully interoperable. |
||||
- **Minor version bumps** happen approx. every 6 weeks as part of the normal release cycle as defined by the gRPC release process. A new release branch named vMAJOR.MINOR.PATCH) is cut every 6 weeks based on the [release schedule](https://github.com/grpc/grpc/blob/master/doc/grpc_release_schedule.md). |
||||
- **Patch version bump** corresponds to bugfixes done on release branch. |
||||
|
||||
There are also a few extra rules regarding adding new gRPC implementations (e.g. adding support for a new language) |
||||
- New implementations start at v0.x.y version and until they reach 1.0, they are considered not ready for production workloads. Breaking API changes are allowed in the 0.x releases as the library is not considered stable yet. |
||||
- The "1.0" release has semantics of GA (generally available) and being production ready. Requirements to reach this milestone are at least these |
||||
- basic RPC features are feature complete and tested |
||||
- implementation is tested for interoperability with other languages |
||||
- Public API is declared stable |
||||
- Once a gRPC library reaches 1.0 (or higher version), the normal rules for versioning apply. |
||||
|
||||
## Policy for updating the major version number |
||||
|
||||
To avoid user confusion and simplify reasoning, the gRPC releases in different languages try to stay synchronized in terms of major and minor version (all languages follow the same release schedule). Nevertheless, because we also strictly follow semantic versioning, there are circumstances in which a gRPC implementation needs to break the version synchronicity and do a major version bump independently of other languages. |
||||
|
||||
### Situations when it's ok to do a major version bump |
||||
- **change forced by the language ecosystem:** when the language itself or its standard libraries that we depend on make a breaking change (something which is out of our control), reacting with updating gRPC APIs may be the only adequate response. |
||||
- **voluntary change:** Even in non-forced situations, there might be circumstances in which a breaking API change makes sense and represents a net win, but as a rule of thumb breaking changes are very disruptive for users, cause user fragmentation and incur high maintenance costs. Therefore, breaking API changes should be very rare events that need to be considered with extreme care and the bar for accepting such changes is intentionally set very high. |
||||
Example scenarios where a breaking API change might be adequate: |
||||
- fixing a security problem which requires changes to API (need to consider the non-breaking alternatives first) |
||||
- the change leads to very significant gains to security, usability or development velocity. These gains need to be clearly documented and claims need to be supported by evidence (ideally by numbers). Costs to the ecosystem (impact on users, dev team etc.) need to be taken into account and the change still needs to be a net positive after subtracting the costs. |
||||
|
||||
All proposals to make a breaking change need to be documented as a gRFC document (in the grpc/proposal repository) that covers at least these areas: |
||||
- Description of the proposal including an explanation why the proposed change is one of the very rare events where a breaking change is introduced. |
||||
- Migration costs (= what does it mean for the users to migrate to the new API, what are the costs and risks associated with it) |
||||
- Pros of the change (what is gained and how) |
||||
- Cons of the change (e.g. user confusion, lost users and user trust, work needed, added maintenance costs) |
||||
- Plan for supporting users still using the old major version (in case migration to the new major version is not trivial or not everyone can migrate easily) |
||||
|
||||
Note that while major version bump allows changing APIs used by the users, it must not impact the interoperability of the implementation with other gRPC implementations and the previous major version released. That means that **no backward incompatible protocol changes are allowed**: old clients must continue interoperating correctly with new servers and new servers with old clients. |
||||
|
||||
### Situations that DON'T warrant a major version bump |
||||
- Because other languages do so. This is not a good enough reason because |
||||
doing a major version bump has high potential for disturbing and confusing the users of that language and fragmenting the user base and that is a bigger threat than having language implementations at different major version (provided the state is well documented). Having some languages at different major version seems to be unavoidable anyway (due to forced version bumps), unless we bump some languages artificially. |
||||
- "I don't like this API": In retrospect, some API decisions made in the past necessarily turn out more lucky than others, but without strong reasons that would be in favor of changing the API and without enough supporting evidence (see previous section), other strategy than making a breaking API change needs to be used. Possible options: Expand the API to make it useful again; mark API as deprecated while keeping its functionality and providing a new better API. |
@ -0,0 +1,119 @@ |
||||
# Copyright 2019 gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
licenses(["notice"]) # 3-clause BSD |
||||
|
||||
package(default_visibility = ["//visibility:public"]) |
||||
|
||||
load("@com_github_grpc_grpc//bazel:objc_grpc_library.bzl", "objc_grpc_library") |
||||
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_application") |
||||
load("@build_bazel_rules_apple//apple:macos.bzl", "macos_application") |
||||
|
||||
objc_grpc_library( |
||||
name = "HelloWorld_grpc_proto", |
||||
srcs = ["//examples:protos/helloworld.proto"], |
||||
deps = ["//examples:helloworld_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
# This one works with import "external/com_github_grpc_grpc/examples/protos/Helloworld.pbrpc.h" |
||||
objc_grpc_library( |
||||
name = "HelloWorld_grpc_proto_external", |
||||
srcs = ["//external/com_github_grpc_grpc/examples:protos/helloworld.proto"], |
||||
deps = ["@com_github_grpc_grpc//examples:helloworld_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_library( |
||||
name = "HelloWorld-lib", |
||||
srcs = glob(["helloworld/**/*.m",]), |
||||
hdrs = glob(["helloworld/**/*.h"]), |
||||
data = glob([ |
||||
"helloworld/HelloWorld/Base.lproj/**", |
||||
"helloworld/HelloWorld/Images.xcassets/**", |
||||
]), |
||||
includes = ["helloworld/HelloWorld"], |
||||
deps = [":HelloWorld_grpc_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
ios_application( |
||||
name = "HelloWorld", |
||||
bundle_id = "Google.HelloWorld", |
||||
families = [ |
||||
"iphone", |
||||
"ipad", |
||||
], |
||||
minimum_os_version = "8.0", |
||||
infoplists = ["helloworld/HelloWorld/Info.plist"], |
||||
deps = [":HelloWorld-lib"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_library( |
||||
name = "HelloWorldMacos-lib", |
||||
srcs = glob(["helloworld_macos/**/*.m",]), |
||||
hdrs = glob(["helloworld_macos/**/*.h"]), |
||||
data = glob([ |
||||
"helloworld_macos/HelloWorld/Base.lproj/**", |
||||
"helloworld_macos/HelloWorld/Images.xcassets/**", |
||||
]), |
||||
includes = ["helloworld_macos/HelloWorld"], |
||||
deps = [":HelloWorld_grpc_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
macos_application( |
||||
name = "HelloWorldMacos", |
||||
bundle_id = "io.grpc.HelloWorld", |
||||
minimum_os_version = "10.13", |
||||
entitlements = "helloworld_macos/HelloWorld/Helloworld.entitlements", |
||||
infoplists = ["helloworld_macos/HelloWorld/Info.plist"], |
||||
deps = [":HelloWorldMacos-lib"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_grpc_library( |
||||
name = "RouteGuide", |
||||
srcs = ["//examples:protos/route_guide.proto"], |
||||
deps = ["//examples:route_guide_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_library( |
||||
name = "RouteGuideClient-lib", |
||||
srcs = glob(["route_guide/**/*.m"]), |
||||
hdrs = glob(["route_guide/**/*.h"]), |
||||
data = glob([ |
||||
"route_guide/Misc/Base.lproj/**", |
||||
"route_guide/Misc/Images.xcassets/**", |
||||
"route_guide/route_guide_db.json", |
||||
]), |
||||
includes = ["route_guide/Misc"], |
||||
deps = [":RouteGuide"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
ios_application( |
||||
name = "RouteGuideClient", |
||||
bundle_id = "gRPC.RouteGuideClient", |
||||
families = [ |
||||
"iphone", |
||||
"ipad", |
||||
], |
||||
minimum_os_version = "8.0", |
||||
infoplists = ["route_guide/Misc/Info.plist"], |
||||
deps = [":RouteGuideClient-lib"], |
||||
tags = ["manual"], |
||||
) |
@ -0,0 +1,69 @@ |
||||
# Copyright 2019 The gRPC Authors |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
filegroup( |
||||
name = "_credentials_files", |
||||
testonly = 1, |
||||
srcs = [ |
||||
"credentials/localhost.key", |
||||
"credentials/localhost.crt", |
||||
"credentials/root.crt", |
||||
], |
||||
) |
||||
|
||||
py_library( |
||||
name = "_credentials", |
||||
testonly = 1, |
||||
srcs = ["_credentials.py"], |
||||
data = [":_credentials_files"], |
||||
) |
||||
|
||||
py_binary( |
||||
name = "customized_auth_client", |
||||
testonly = 1, |
||||
srcs = ["customized_auth_client.py"], |
||||
deps = [ |
||||
":_credentials", |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
"//examples:helloworld_py_pb2", |
||||
"//examples:helloworld_py_pb2_grpc", |
||||
], |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
py_binary( |
||||
name = "customized_auth_server", |
||||
testonly = 1, |
||||
srcs = ["customized_auth_server.py"], |
||||
deps = [ |
||||
":_credentials", |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
"//examples:helloworld_py_pb2", |
||||
"//examples:helloworld_py_pb2_grpc", |
||||
], |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
py_test( |
||||
name = "_auth_example_test", |
||||
srcs = ["test/_auth_example_test.py"], |
||||
deps = [ |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
"//examples:helloworld_py_pb2", |
||||
":customized_auth_client", |
||||
":customized_auth_server", |
||||
":_credentials", |
||||
], |
||||
python_version = "PY3", |
||||
) |
@ -0,0 +1,112 @@ |
||||
# Authentication Extension Example in gRPC Python |
||||
|
||||
## Check Our Guide First |
||||
|
||||
For most common usage of authentication in gRPC Python, please see our |
||||
[Authentication](https://grpc.io/docs/guides/auth/) guide's Python section. The |
||||
Guide includes following scenarios: |
||||
|
||||
1. Server SSL credential setup |
||||
2. Client SSL credential setup |
||||
3. Authenticate with Google using a JWT |
||||
4. Authenticate with Google using an Oauth2 token |
||||
|
||||
Also, the guide talks about gRPC specific credential types. |
||||
|
||||
### Channel credentials |
||||
|
||||
Channel credentials are attached to a `Channel` object, the most common use case |
||||
are SSL credentials. |
||||
|
||||
### Call credentials |
||||
|
||||
Call credentials are attached to a `Call` object (corresponding to an RPC). |
||||
Under the hood, the call credentials is a function that takes in information of |
||||
the RPC and modify metadata through callback. |
||||
|
||||
## About This Example |
||||
|
||||
This example focuses on extending gRPC authentication mechanism: |
||||
1) Customize authentication plugin; |
||||
2) Composite client side credentials; |
||||
3) Validation through interceptor on server side. |
||||
|
||||
## AuthMetadataPlugin: Manipulate metadata for each call |
||||
|
||||
Unlike TLS/SSL based authentication, the authentication extension in gRPC Python |
||||
lives at a much higher level of networking. It relies on the transmission of |
||||
metadata (HTTP Header) between client and server, instead of alternating the |
||||
transport protocol. |
||||
|
||||
gRPC Python provides a way to intercept an RPC and append authentication related |
||||
metadata through |
||||
[`AuthMetadataPlugin`](https://grpc.github.io/grpc/python/grpc.html#grpc.AuthMetadataPlugin). |
||||
Those in need of a custom authentication method may simply provide a concrete |
||||
implementation of the following interface: |
||||
|
||||
```Python |
||||
class AuthMetadataPlugin: |
||||
"""A specification for custom authentication.""" |
||||
|
||||
def __call__(self, context, callback): |
||||
"""Implements authentication by passing metadata to a callback. |
||||
|
||||
Implementations of this method must not block. |
||||
|
||||
Args: |
||||
context: An AuthMetadataContext providing information on the RPC that |
||||
the plugin is being called to authenticate. |
||||
callback: An AuthMetadataPluginCallback to be invoked either |
||||
synchronously or asynchronously. |
||||
""" |
||||
``` |
||||
|
||||
Then pass the instance of the concrete implementation to |
||||
`grpc.metadata_call_credentials` function to be converted into a |
||||
`CallCredentials` object. Please NOTE that it is possible to pass a Python |
||||
function object directly, but we recommend to inherit from the base class to |
||||
ensure implementation correctness. |
||||
|
||||
|
||||
```Python |
||||
def metadata_call_credentials(metadata_plugin, name=None): |
||||
"""Construct CallCredentials from an AuthMetadataPlugin. |
||||
|
||||
Args: |
||||
metadata_plugin: An AuthMetadataPlugin to use for authentication. |
||||
name: An optional name for the plugin. |
||||
|
||||
Returns: |
||||
A CallCredentials. |
||||
""" |
||||
``` |
||||
|
||||
The `CallCredentials` object can be passed directly into an RPC like: |
||||
|
||||
```Python |
||||
call_credentials = grpc.metadata_call_credentials(my_foo_plugin) |
||||
stub.FooRpc(request, credentials=call_credentials) |
||||
``` |
||||
|
||||
Or you can use `ChannelCredentials` and `CallCredentials` at the same time by |
||||
combining them: |
||||
|
||||
```Python |
||||
channel_credentials = ... |
||||
call_credentials = ... |
||||
composite_credentials = grpc.composite_channel_credentials( |
||||
channel_credential, |
||||
call_credentials) |
||||
channel = grpc.secure_channel(server_address, composite_credentials) |
||||
``` |
||||
|
||||
It is also possible to apply multiple `CallCredentials` to a single RPC: |
||||
|
||||
```Python |
||||
call_credentials_foo = ... |
||||
call_credentials_bar = ... |
||||
call_credentials = grpc.composite_call_credentials( |
||||
call_credentials_foo, |
||||
call_credentials_bar) |
||||
stub.FooRpc(request, credentials=call_credentials) |
||||
``` |
@ -0,0 +1,31 @@ |
||||
# Copyright 2019 The gRPC Authors |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""Loading SSL credentials for gRPC Python authentication example.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import os |
||||
|
||||
|
||||
def _load_credential_from_file(filepath): |
||||
real_path = os.path.join(os.path.dirname(__file__), filepath) |
||||
with open(real_path, 'rb') as f: |
||||
return f.read() |
||||
|
||||
|
||||
SERVER_CERTIFICATE = _load_credential_from_file('credentials/localhost.crt') |
||||
SERVER_CERTIFICATE_KEY = _load_credential_from_file('credentials/localhost.key') |
||||
ROOT_CERTIFICATE = _load_credential_from_file('credentials/root.crt') |
@ -0,0 +1,19 @@ |
||||
-----BEGIN CERTIFICATE----- |
||||
MIIDFjCCAf4CCQCzrLIhrWa55zANBgkqhkiG9w0BAQsFADBCMQswCQYDVQQGEwJV |
||||
UzETMBEGA1UECAwKQ2FsaWZvcm5pYTEPMA0GA1UECgwGR29vZ2xlMQ0wCwYDVQQL |
||||
DARnUlBDMCAXDTE5MDYyNDIyMjIzM1oYDzIxMTkwNTMxMjIyMjMzWjBWMQswCQYD |
||||
VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEPMA0GA1UECgwGR29vZ2xlMQ0w |
||||
CwYDVQQLDARnUlBDMRIwEAYDVQQDDAlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEB |
||||
AQUAA4IBDwAwggEKAoIBAQCtCW0TjugnIUu8BEVIYvdMP+/2GENQDjZhZ8eKR5C6 |
||||
toDGbgjsDtt/GxISAg4cg70fIvy0XolnGPZodvfHDM4lJ7yHBOdZD8TXQoE6okR7 |
||||
HZuLUJ20M0pXgWqtRewKRUjuYsSDXBnzLiZw1dcv9nGpo+Bqa8NonpiGRRpEkshF |
||||
D6T9KU9Ts/x+wMQBIra2Gj0UMh79jPhUuxcYAQA0JQGivnOtdwuPiumpnUT8j8h6 |
||||
tWg5l01EsCZWJecCF85KnGpJEVYPyPqBqGsy0nGS9plGotOWF87+jyUQt+KD63xA |
||||
aBmTro86mKDDKEK4JvzjVeMGz2UbVcLPiiZnErTFaiXJAgMBAAEwDQYJKoZIhvcN |
||||
AQELBQADggEBAKsDgOPCWp5WCy17vJbRlgfgk05sVNIHZtzrmdswjBmvSg8MUpep |
||||
XqcPNUpsljAXsf9UM5IFEMRdilUsFGWvHjBEtNAW8WUK9UV18WRuU//0w1Mp5HAN |
||||
xUEKb4BoyZr65vlCnTR+AR5c9FfPvLibhr5qHs2RA8Y3GyLOcGqBWed87jhdQLCc |
||||
P1bxB+96le5JeXq0tw215lxonI2/3ZYVK4/ok9gwXrQoWm8YieJqitk/ZQ4S17/4 |
||||
pynHtDfdxLn23EXeGx+UTxJGfpRmhEZdJ+MN7QGYoomzx5qS5XoYKxRNrDlirJpr |
||||
OqXIn8E1it+6d5gOZfuHawcNGhRLplE/pfA= |
||||
-----END CERTIFICATE----- |
@ -0,0 +1,27 @@ |
||||
-----BEGIN RSA PRIVATE KEY----- |
||||
MIIEogIBAAKCAQEArQltE47oJyFLvARFSGL3TD/v9hhDUA42YWfHikeQuraAxm4I |
||||
7A7bfxsSEgIOHIO9HyL8tF6JZxj2aHb3xwzOJSe8hwTnWQ/E10KBOqJEex2bi1Cd |
||||
tDNKV4FqrUXsCkVI7mLEg1wZ8y4mcNXXL/ZxqaPgamvDaJ6YhkUaRJLIRQ+k/SlP |
||||
U7P8fsDEASK2tho9FDIe/Yz4VLsXGAEANCUBor5zrXcLj4rpqZ1E/I/IerVoOZdN |
||||
RLAmViXnAhfOSpxqSRFWD8j6gahrMtJxkvaZRqLTlhfO/o8lELfig+t8QGgZk66P |
||||
OpigwyhCuCb841XjBs9lG1XCz4omZxK0xWolyQIDAQABAoIBADeq/Kh6JT3RfGf0 |
||||
h8WN8TlaqHxnueAbcmtL0+oss+cdp7gu1jf7X6o4r0uT1a5ew40s2Fe+wj2kzkE1 |
||||
ZOlouTlC22gkr7j7Vbxa7PBMG/Pvxoa/XL0IczZLsGImSJXVTG1E4SvRiZeulTdf |
||||
1GbdxhtpWV1jZe5Wd4Na3+SHxF5S7m3PrHiZlYdz1ND+8XZs1NlL9+ej72qSFul9 |
||||
t/QjMWJ9pky/Wad5abnRLRyOsg+BsgnXbkUy2rD89ZxFMLda9pzXo3TPyAlBHonr |
||||
mkEsE4eRMWMpjBM79JbeyDdHn/cs/LjAZrzeDf7ugXr2CHQpKaM5O0PsNHezJII9 |
||||
L5kCfzECgYEA4M/rz1UP1/BJoSqigUlSs0tPAg8a5UlkVsh6Osuq72IPNo8qg/Fw |
||||
oV/IiIS+q+obRcFj1Od3PGdTpCJwW5dzd2fXBQGmGdj0HucnCrs13RtBh91JiF5i |
||||
y/YYI9KfgOG2ZT9gG68T0gTs6jRrS3Qd83npqjrkJqMOd7s00MK9tUcCgYEAxQq7 |
||||
T541oCYHSBRIIb0IrR25krZy9caxzCqPDwOcuuhaCqCiaq+ATvOWlSfgecm4eH0K |
||||
PCH0xlWxG0auPEwm4pA8+/WR/XJwscPZMuoht1EoKy1his4eKx/s7hHNeO6KOF0V |
||||
Y/zqIiuZnEwUoKbn7EqqNFSTT65PJKyGsICJFG8CgYAfaw9yl1myfQNdQb8aQGwN |
||||
YJ33FLNWje427qeeZe5KrDKiFloDvI9YDjHRWnPnRL1w/zj7fSm9yFb5HlMDieP6 |
||||
MQnsyjEzdY2QcA+VwVoiv3dmDHgFVeOKy6bOAtaFxYWfGr9MvygO9t9BT/gawGyb |
||||
JVORlc9i0vDnrMMR1dV7awKBgBpTWLtGc/u1mPt0Wj7HtsUKV6TWY32a0l5owTxM |
||||
S0BdksogtBJ06DukJ9Y9wawD23WdnyRxlPZ6tHLkeprrwbY7dypioOKvy4a0l+xJ |
||||
g7+uRCOgqIuXBkjUtx8HmeAyXp0xMo5tWArAsIFFWOwt4IadYygitJvMuh44PraO |
||||
NcJZAoGADEiV0dheXUCVr8DrtSom8DQMj92/G/FIYjXL8OUhh0+F+YlYP0+F8PEU |
||||
yYIWEqL/S5tVKYshimUXQa537JcRKsTVJBG/ZKD2kuqgOc72zQy3oplimXeJDCXY |
||||
h2eAQ0u8GN6tN9C4t8Kp4a3y6FGsxgu+UTxdnL3YQ+yHAVhtCzo= |
||||
-----END RSA PRIVATE KEY----- |
@ -0,0 +1,20 @@ |
||||
-----BEGIN CERTIFICATE----- |
||||
MIIDWTCCAkGgAwIBAgIJAPOConZMwykwMA0GCSqGSIb3DQEBCwUAMEIxCzAJBgNV |
||||
BAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMQ8wDQYDVQQKDAZHb29nbGUxDTAL |
||||
BgNVBAsMBGdSUEMwIBcNMTkwNjI0MjIyMDA3WhgPMjExOTA1MzEyMjIwMDdaMEIx |
||||
CzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMQ8wDQYDVQQKDAZHb29n |
||||
bGUxDTALBgNVBAsMBGdSUEMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB |
||||
AQCwqei3TfyLidnQNDJ2lierMYo229K92DuORni7nSjJQ59Jc3dNMsmqGQJjCD8o |
||||
6mTlKM/oCbs27Wpx+OxcOLvT95j2kiDGca1fCvaMdguIod09SWiyMpv/hp0trLv7 |
||||
NJIKHznath6rHYX2Ii3fZ1yCPzyQbEPSAA+GNpoNm1v1ZWmWKke9v7vLlS3inNlW |
||||
Mt9jepK7DrtbNZnVDjeItnppBSbVYRMxIyNHkepFbqXx5TpkCvl4M4XQZw9bfSxQ |
||||
i3WZ3q+T1Tw//OUdPNc+OfMhu0MA0QoMwikskP0NaIC3dbJZ5Ogx0RcnaB4E+9C6 |
||||
O/znUEh3WuKVl5HXBF+UwWoFAgMBAAGjUDBOMB0GA1UdDgQWBBRm3JIgzgK4G97J |
||||
fbMGatWMZc7V3jAfBgNVHSMEGDAWgBRm3JIgzgK4G97JfbMGatWMZc7V3jAMBgNV |
||||
HRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCNiV8x41if094ry2srS0YucpiN |
||||
3rTPk08FOLsENTMYai524TGXJti1P6ofGr5KXCL0uxTByHE3fEiMMud2TIY5iHQo |
||||
Y4mzDTTcb+Q7yKHwYZMlcp6nO8W+NeY5t+S0JPHhb8deKWepcN2UpXBUYQLw7AiE |
||||
l96T9Gi+vC9h/XE5IVwHFQXTxf5UYzXtW1nfapvrOONg/ms41dgmrRKIi+knWfiJ |
||||
FdHpHX2sfDAoJtnpEISX+nxRGNVTLY64utXWm4yxaZJshvy2s8zWJgRg7rtwAhTT |
||||
Np9E9MnihXLEmDI4Co9XlLPJyZFmqImsbmVuKFeQOCiLAoPJaMI2lbi7fiTo |
||||
-----END CERTIFICATE----- |
@ -0,0 +1,103 @@ |
||||
# Copyright 2019 The gRPC Authors |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""Client of the Python example of customizing authentication mechanism.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import argparse |
||||
import contextlib |
||||
import logging |
||||
|
||||
import grpc |
||||
from examples import helloworld_pb2 |
||||
from examples import helloworld_pb2_grpc |
||||
from examples.python.auth import _credentials |
||||
|
||||
_LOGGER = logging.getLogger(__name__) |
||||
_LOGGER.setLevel(logging.INFO) |
||||
|
||||
_SERVER_ADDR_TEMPLATE = 'localhost:%d' |
||||
_SIGNATURE_HEADER_KEY = 'x-signature' |
||||
|
||||
|
||||
class AuthGateway(grpc.AuthMetadataPlugin): |
||||
|
||||
def __call__(self, context, callback): |
||||
"""Implements authentication by passing metadata to a callback. |
||||
|
||||
Implementations of this method must not block. |
||||
|
||||
Args: |
||||
context: An AuthMetadataContext providing information on the RPC that |
||||
the plugin is being called to authenticate. |
||||
callback: An AuthMetadataPluginCallback to be invoked either |
||||
synchronously or asynchronously. |
||||
""" |
||||
# Example AuthMetadataContext object: |
||||
# AuthMetadataContext( |
||||
# service_url=u'https://localhost:50051/helloworld.Greeter', |
||||
# method_name=u'SayHello') |
||||
signature = context.method_name[::-1] |
||||
callback(((_SIGNATURE_HEADER_KEY, signature),), None) |
||||
|
||||
|
||||
@contextlib.contextmanager |
||||
def create_client_channel(addr): |
||||
# Call credential object will be invoked for every single RPC |
||||
call_credentials = grpc.metadata_call_credentials( |
||||
AuthGateway(), name='auth gateway') |
||||
# Channel credential will be valid for the entire channel |
||||
channel_credential = grpc.ssl_channel_credentials( |
||||
_credentials.ROOT_CERTIFICATE) |
||||
# Combining channel credentials and call credentials together |
||||
composite_credentials = grpc.composite_channel_credentials( |
||||
channel_credential, |
||||
call_credentials, |
||||
) |
||||
channel = grpc.secure_channel(addr, composite_credentials) |
||||
yield channel |
||||
|
||||
|
||||
def send_rpc(channel): |
||||
stub = helloworld_pb2_grpc.GreeterStub(channel) |
||||
request = helloworld_pb2.HelloRequest(name='you') |
||||
try: |
||||
response = stub.SayHello(request) |
||||
except grpc.RpcError as rpc_error: |
||||
_LOGGER.error('Received error: %s', rpc_error) |
||||
return rpc_error |
||||
else: |
||||
_LOGGER.info('Received message: %s', response) |
||||
return response |
||||
|
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser() |
||||
parser.add_argument( |
||||
'--port', |
||||
nargs='?', |
||||
type=int, |
||||
default=50051, |
||||
help='the address of server') |
||||
args = parser.parse_args() |
||||
|
||||
with create_client_channel(_SERVER_ADDR_TEMPLATE % args.port) as channel: |
||||
send_rpc(channel) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
logging.basicConfig(level=logging.INFO) |
||||
main() |
@ -0,0 +1,103 @@ |
||||
# Copyright 2019 The gRPC Authors |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""Server of the Python example of customizing authentication mechanism.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import argparse |
||||
import contextlib |
||||
import logging |
||||
from concurrent import futures |
||||
|
||||
import grpc |
||||
from examples import helloworld_pb2 |
||||
from examples import helloworld_pb2_grpc |
||||
from examples.python.auth import _credentials |
||||
|
||||
_LOGGER = logging.getLogger(__name__) |
||||
_LOGGER.setLevel(logging.INFO) |
||||
|
||||
_LISTEN_ADDRESS_TEMPLATE = 'localhost:%d' |
||||
_SIGNATURE_HEADER_KEY = 'x-signature' |
||||
|
||||
|
||||
class SignatureValidationInterceptor(grpc.ServerInterceptor): |
||||
|
||||
def __init__(self): |
||||
|
||||
def abort(ignored_request, context): |
||||
context.abort(grpc.StatusCode.UNAUTHENTICATED, 'Invalid signature') |
||||
|
||||
self._abortion = grpc.unary_unary_rpc_method_handler(abort) |
||||
|
||||
def intercept_service(self, continuation, handler_call_details): |
||||
# Example HandlerCallDetails object: |
||||
# _HandlerCallDetails( |
||||
# method=u'/helloworld.Greeter/SayHello', |
||||
# invocation_metadata=...) |
||||
method_name = handler_call_details.method.split('/')[-1] |
||||
expected_metadata = (_SIGNATURE_HEADER_KEY, method_name[::-1]) |
||||
if expected_metadata in handler_call_details.invocation_metadata: |
||||
return continuation(handler_call_details) |
||||
else: |
||||
return self._abortion |
||||
|
||||
|
||||
class SimpleGreeter(helloworld_pb2_grpc.GreeterServicer): |
||||
|
||||
def SayHello(self, request, unused_context): |
||||
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name) |
||||
|
||||
|
||||
@contextlib.contextmanager |
||||
def run_server(port): |
||||
# Bind interceptor to server |
||||
server = grpc.server( |
||||
futures.ThreadPoolExecutor(), |
||||
interceptors=(SignatureValidationInterceptor(),)) |
||||
helloworld_pb2_grpc.add_GreeterServicer_to_server(SimpleGreeter(), server) |
||||
|
||||
# Loading credentials |
||||
server_credentials = grpc.ssl_server_credentials((( |
||||
_credentials.SERVER_CERTIFICATE_KEY, |
||||
_credentials.SERVER_CERTIFICATE, |
||||
),)) |
||||
|
||||
# Pass down credentials |
||||
port = server.add_secure_port(_LISTEN_ADDRESS_TEMPLATE % port, |
||||
server_credentials) |
||||
|
||||
server.start() |
||||
try: |
||||
yield server, port |
||||
finally: |
||||
server.stop(0) |
||||
|
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser() |
||||
parser.add_argument( |
||||
'--port', nargs='?', type=int, default=50051, help='the listening port') |
||||
args = parser.parse_args() |
||||
|
||||
with run_server(args.port) as (server, port): |
||||
logging.info('Server is listening at port :%d', port) |
||||
server.wait_for_termination() |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
logging.basicConfig(level=logging.INFO) |
||||
main() |
@ -0,0 +1,56 @@ |
||||
# Copyright 2019 The gRPC Authors |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""Test for gRPC Python authentication example.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import unittest |
||||
|
||||
import grpc |
||||
from examples.python.auth import _credentials |
||||
from examples.python.auth import customized_auth_client |
||||
from examples.python.auth import customized_auth_server |
||||
|
||||
_SERVER_ADDR_TEMPLATE = 'localhost:%d' |
||||
|
||||
|
||||
class AuthExampleTest(unittest.TestCase): |
||||
|
||||
def test_successful_call(self): |
||||
with customized_auth_server.run_server(0) as (_, port): |
||||
with customized_auth_client.create_client_channel( |
||||
_SERVER_ADDR_TEMPLATE % port) as channel: |
||||
customized_auth_client.send_rpc(channel) |
||||
# No unhandled exception raised, test passed! |
||||
|
||||
def test_no_channel_credential(self): |
||||
with customized_auth_server.run_server(0) as (_, port): |
||||
with grpc.insecure_channel(_SERVER_ADDR_TEMPLATE % port) as channel: |
||||
resp = customized_auth_client.send_rpc(channel) |
||||
self.assertEqual(resp.code(), grpc.StatusCode.UNAVAILABLE) |
||||
|
||||
def test_no_call_credential(self): |
||||
with customized_auth_server.run_server(0) as (_, port): |
||||
channel_credential = grpc.ssl_channel_credentials( |
||||
_credentials.ROOT_CERTIFICATE) |
||||
with grpc.secure_channel(_SERVER_ADDR_TEMPLATE % port, |
||||
channel_credential) as channel: |
||||
resp = customized_auth_client.send_rpc(channel) |
||||
self.assertEqual(resp.code(), grpc.StatusCode.UNAUTHENTICATED) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
unittest.main(verbosity=2) |
@ -0,0 +1,84 @@ |
||||
# gRPC Bazel BUILD file. |
||||
# |
||||
# Copyright 2019 The gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
load("@grpc_python_dependencies//:requirements.bzl", "requirement") |
||||
load("//bazel:python_rules.bzl", "py_proto_library", "py_grpc_library") |
||||
|
||||
package(default_testonly = 1) |
||||
|
||||
proto_library( |
||||
name = "hash_name_proto", |
||||
srcs = ["hash_name.proto"], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "hash_name_py_pb2", |
||||
deps = [":hash_name_proto"], |
||||
) |
||||
|
||||
py_grpc_library( |
||||
name = "hash_name_py_pb2_grpc", |
||||
srcs = [":hash_name_proto"], |
||||
deps = [":hash_name_py_pb2"], |
||||
) |
||||
|
||||
py_binary( |
||||
name = "client", |
||||
srcs = ["client.py"], |
||||
deps = [ |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
":hash_name_py_pb2", |
||||
":hash_name_py_pb2_grpc", |
||||
"//external:six" |
||||
], |
||||
srcs_version = "PY2AND3", |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
py_library( |
||||
name = "search", |
||||
srcs = ["search.py"], |
||||
srcs_version = "PY2AND3", |
||||
deps = [ |
||||
":hash_name_py_pb2", |
||||
], |
||||
) |
||||
|
||||
py_binary( |
||||
name = "server", |
||||
srcs = ["server.py"], |
||||
deps = [ |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
":hash_name_py_pb2", |
||||
":search", |
||||
] + select({ |
||||
"//conditions:default": ["@futures//:futures"], |
||||
"//:python3": [], |
||||
}), |
||||
srcs_version = "PY2AND3", |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
py_test( |
||||
name = "test/_cancellation_example_test", |
||||
srcs = ["test/_cancellation_example_test.py"], |
||||
data = [ |
||||
":client", |
||||
":server" |
||||
], |
||||
size = "small", |
||||
python_version = "PY3", |
||||
) |
@ -0,0 +1,127 @@ |
||||
### Cancellation |
||||
|
||||
In the example, we implement a silly algorithm. We search for bytestrings whose |
||||
hashes are similar to a given search string. For example, say we're looking for |
||||
the string "doctor". Our algorithm may return `JrqhZVkTDoctYrUlXDbL6pfYQHU=` or |
||||
`RC9/7mlM3ldy4TdoctOc6WzYbO4=`. This is a brute force algorithm, so the server |
||||
performing the search must be conscious of the resources it allows to each client |
||||
and each client must be conscientious of the resources it demands of the server. |
||||
|
||||
In particular, we ensure that client processes cancel the stream explicitly |
||||
before terminating and we ensure that server processes cancel RPCs that have gone on longer |
||||
than a certain number of iterations. |
||||
|
||||
#### Cancellation on the Client Side |
||||
|
||||
A client may cancel an RPC for several reasons. Perhaps the data it requested |
||||
has been made irrelevant. Perhaps you, as the client, want to be a good citizen |
||||
of the server and are conserving compute resources. |
||||
|
||||
##### Cancelling a Server-Side Unary RPC from the Client |
||||
|
||||
The default RPC methods on a stub will simply return the result of an RPC. |
||||
|
||||
```python |
||||
>>> stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
>>> stub.Find(hash_name_pb2.HashNameRequest(desired_name=name)) |
||||
<hash_name_pb2.HashNameResponse object at 0x7fe2eb8ce2d0> |
||||
``` |
||||
|
||||
But you may use the `future()` method to receive an instance of `grpc.Future`. |
||||
This interface allows you to wait on a response with a timeout, add a callback |
||||
to be executed when the RPC completes, or to cancel the RPC before it has |
||||
completed. |
||||
|
||||
In the example, we use this interface to cancel our in-progress RPC when the |
||||
user interrupts the process with ctrl-c. |
||||
|
||||
```python |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
future = stub.Find.future(hash_name_pb2.HashNameRequest(desired_name=name)) |
||||
def cancel_request(unused_signum, unused_frame): |
||||
future.cancel() |
||||
sys.exit(0) |
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
|
||||
result = future.result() |
||||
print(result) |
||||
``` |
||||
|
||||
We also call `sys.exit(0)` to terminate the process. If we do not do this, then |
||||
`future.result()` with throw an `RpcError`. Alternatively, you may catch this |
||||
exception. |
||||
|
||||
|
||||
##### Cancelling a Server-Side Streaming RPC from the Client |
||||
|
||||
Cancelling a Server-side streaming RPC is even simpler from the perspective of |
||||
the gRPC API. The default stub method is already an instance of `grpc.Future`, |
||||
so the methods outlined above still apply. It is also a generator, so we may |
||||
iterate over it to yield the results of our RPC. |
||||
|
||||
```python |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
result_generator = stub.FindRange(hash_name_pb2.HashNameRequest(desired_name=name)) |
||||
def cancel_request(unused_signum, unused_frame): |
||||
result_generator.cancel() |
||||
sys.exit(0) |
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
for result in result_generator: |
||||
print(result) |
||||
``` |
||||
|
||||
We also call `sys.exit(0)` here to terminate the process. Alternatively, you may |
||||
catch the `RpcError` raised by the for loop upon cancellation. |
||||
|
||||
|
||||
#### Cancellation on the Server Side |
||||
|
||||
A server is reponsible for cancellation in two ways. It must respond in some way |
||||
when a client initiates a cancellation, otherwise long-running computations |
||||
could continue indefinitely. |
||||
|
||||
It may also decide to cancel the RPC for its own reasons. In our example, the |
||||
server can be configured to cancel an RPC after a certain number of hashes has |
||||
been computed in order to conserve compute resources. |
||||
|
||||
##### Responding to Cancellations from a Servicer Thread |
||||
|
||||
It's important to remember that a gRPC Python server is backed by a thread pool |
||||
with a fixed size. When an RPC is cancelled, the library does *not* terminate |
||||
your servicer thread. It is your responsibility as the application author to |
||||
ensure that your servicer thread terminates soon after the RPC has been |
||||
cancelled. |
||||
|
||||
In this example, we use the `ServicerContext.add_callback` method to set a |
||||
`threading.Event` object when the RPC is terminated. We pass this `Event` object |
||||
down through our hashing algorithm and ensure to check that the RPC is still |
||||
ongoing before each iteration. |
||||
|
||||
```python |
||||
stop_event = threading.Event() |
||||
def on_rpc_done(): |
||||
# Regain servicer thread. |
||||
stop_event.set() |
||||
context.add_callback(on_rpc_done) |
||||
secret = _find_secret(stop_event) |
||||
``` |
||||
|
||||
##### Initiating a Cancellation on the Server Side |
||||
|
||||
Initiating a cancellation from the server side is simpler. Just call |
||||
`ServicerContext.cancel()`. |
||||
|
||||
In our example, we ensure that no single client is monopolizing the server by |
||||
cancelling after a configurable number of hashes have been checked. |
||||
|
||||
```python |
||||
try: |
||||
for candidate in secret_generator: |
||||
yield candidate |
||||
except ResourceLimitExceededError: |
||||
print("Cancelling RPC due to exhausted resources.") |
||||
context.cancel() |
||||
``` |
||||
|
||||
In this type of situation, you may also consider returning a more specific error |
||||
using the [`grpcio-status`](https://pypi.org/project/grpcio-status/) package. |
@ -0,0 +1,104 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""An example of cancelling requests in gRPC.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import argparse |
||||
import logging |
||||
import signal |
||||
import sys |
||||
|
||||
import grpc |
||||
|
||||
from examples.python.cancellation import hash_name_pb2 |
||||
from examples.python.cancellation import hash_name_pb2_grpc |
||||
|
||||
_DESCRIPTION = "A client for finding hashes similar to names." |
||||
_LOGGER = logging.getLogger(__name__) |
||||
|
||||
|
||||
def run_unary_client(server_target, name, ideal_distance): |
||||
with grpc.insecure_channel(server_target) as channel: |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
future = stub.Find.future( |
||||
hash_name_pb2.HashNameRequest( |
||||
desired_name=name, ideal_hamming_distance=ideal_distance), |
||||
wait_for_ready=True) |
||||
|
||||
def cancel_request(unused_signum, unused_frame): |
||||
future.cancel() |
||||
sys.exit(0) |
||||
|
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
result = future.result() |
||||
print(result) |
||||
|
||||
|
||||
def run_streaming_client(server_target, name, ideal_distance, |
||||
interesting_distance): |
||||
with grpc.insecure_channel(server_target) as channel: |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
result_generator = stub.FindRange( |
||||
hash_name_pb2.HashNameRequest( |
||||
desired_name=name, |
||||
ideal_hamming_distance=ideal_distance, |
||||
interesting_hamming_distance=interesting_distance), |
||||
wait_for_ready=True) |
||||
|
||||
def cancel_request(unused_signum, unused_frame): |
||||
result_generator.cancel() |
||||
sys.exit(0) |
||||
|
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
for result in result_generator: |
||||
print(result) |
||||
|
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser(description=_DESCRIPTION) |
||||
parser.add_argument("name", type=str, help='The desired name.') |
||||
parser.add_argument( |
||||
"--ideal-distance", |
||||
default=0, |
||||
nargs='?', |
||||
type=int, |
||||
help="The desired Hamming distance.") |
||||
parser.add_argument( |
||||
'--server', |
||||
default='localhost:50051', |
||||
type=str, |
||||
nargs='?', |
||||
help='The host-port pair at which to reach the server.') |
||||
parser.add_argument( |
||||
'--show-inferior', |
||||
default=None, |
||||
type=int, |
||||
nargs='?', |
||||
help='Also show candidates with a Hamming distance less than this value.' |
||||
) |
||||
|
||||
args = parser.parse_args() |
||||
if args.show_inferior is not None: |
||||
run_streaming_client(args.server, args.name, args.ideal_distance, |
||||
args.show_inferior) |
||||
else: |
||||
run_unary_client(args.server, args.name, args.ideal_distance) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
logging.basicConfig() |
||||
main() |
@ -0,0 +1,56 @@ |
||||
// Copyright 2019 the gRPC authors. |
||||
// |
||||
// Licensed under the Apache License, Version 2.0 (the "License"); |
||||
// you may not use this file except in compliance with the License. |
||||
// You may obtain a copy of the License at |
||||
// |
||||
// http://www.apache.org/licenses/LICENSE-2.0 |
||||
// |
||||
// Unless required by applicable law or agreed to in writing, software |
||||
// distributed under the License is distributed on an "AS IS" BASIS, |
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
// See the License for the specific language governing permissions and |
||||
// limitations under the License. |
||||
|
||||
syntax = "proto3"; |
||||
|
||||
package hash_name; |
||||
|
||||
// A request for a single secret whose hash is similar to a desired name. |
||||
message HashNameRequest { |
||||
// The string that is desired in the secret's hash. |
||||
string desired_name = 1; |
||||
|
||||
// The ideal Hamming distance betwen desired_name and the secret that will |
||||
// be searched for. |
||||
int32 ideal_hamming_distance = 2; |
||||
|
||||
// A Hamming distance greater than the ideal Hamming distance. Search results |
||||
// with a Hamming distance less than this value but greater than the ideal |
||||
// distance will be returned back to the client but will not terminate the |
||||
// search. |
||||
int32 interesting_hamming_distance = 3; |
||||
} |
||||
|
||||
message HashNameResponse { |
||||
// The search result. |
||||
string secret = 1; |
||||
|
||||
// The hash of the search result. A substring of this is of |
||||
// ideal_hamming_distance Hamming distance or less from desired_name. |
||||
string hashed_name = 2; |
||||
|
||||
// The Hamming distance between hashed_name and desired_name. |
||||
int32 hamming_distance = 3; |
||||
} |
||||
|
||||
service HashFinder { |
||||
|
||||
// Search for a single string whose hash is similar to the specified |
||||
// desired_name. interesting_hamming_distance is ignored. |
||||
rpc Find (HashNameRequest) returns (HashNameResponse) {} |
||||
|
||||
// Search for a string whose hash is similar to the specified desired_name, |
||||
// but also stream back less-than-ideal candidates. |
||||
rpc FindRange (HashNameRequest) returns (stream HashNameResponse) {} |
||||
} |
@ -0,0 +1,148 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""A search algorithm over the space of all bytestrings.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import base64 |
||||
import hashlib |
||||
import itertools |
||||
import logging |
||||
import struct |
||||
|
||||
from examples.python.cancellation import hash_name_pb2 |
||||
|
||||
_LOGGER = logging.getLogger(__name__) |
||||
_BYTE_MAX = 255 |
||||
|
||||
|
||||
def _get_hamming_distance(a, b): |
||||
"""Calculates hamming distance between strings of equal length.""" |
||||
distance = 0 |
||||
for char_a, char_b in zip(a, b): |
||||
if char_a != char_b: |
||||
distance += 1 |
||||
return distance |
||||
|
||||
|
||||
def _get_substring_hamming_distance(candidate, target): |
||||
"""Calculates the minimum hamming distance between between the target |
||||
and any substring of the candidate. |
||||
|
||||
Args: |
||||
candidate: The string whose substrings will be tested. |
||||
target: The target string. |
||||
|
||||
Returns: |
||||
The minimum Hamming distance between candidate and target. |
||||
""" |
||||
min_distance = None |
||||
if len(target) > len(candidate): |
||||
raise ValueError("Candidate must be at least as long as target.") |
||||
for i in range(len(candidate) - len(target) + 1): |
||||
distance = _get_hamming_distance(candidate[i:i + len(target)].lower(), |
||||
target.lower()) |
||||
if min_distance is None or distance < min_distance: |
||||
min_distance = distance |
||||
return min_distance |
||||
|
||||
|
||||
def _get_hash(secret): |
||||
hasher = hashlib.sha1() |
||||
hasher.update(secret) |
||||
return base64.b64encode(hasher.digest()).decode('ascii') |
||||
|
||||
|
||||
class ResourceLimitExceededError(Exception): |
||||
"""Signifies the request has exceeded configured limits.""" |
||||
|
||||
|
||||
def _bytestrings_of_length(length): |
||||
"""Generates a stream containing all bytestrings of a given length. |
||||
|
||||
Args: |
||||
length: A positive integer length. |
||||
|
||||
Yields: |
||||
All bytestrings of length `length`. |
||||
""" |
||||
for digits in itertools.product(range(_BYTE_MAX), repeat=length): |
||||
yield b''.join(struct.pack('B', i) for i in digits) |
||||
|
||||
|
||||
def _all_bytestrings(): |
||||
"""Generates a stream containing all possible bytestrings. |
||||
|
||||
This generator does not terminate. |
||||
|
||||
Yields: |
||||
All bytestrings in ascending order of length. |
||||
""" |
||||
for bytestring in itertools.chain.from_iterable( |
||||
_bytestrings_of_length(length) for length in itertools.count()): |
||||
yield bytestring |
||||
|
||||
|
||||
def search(target, |
||||
ideal_distance, |
||||
stop_event, |
||||
maximum_hashes, |
||||
interesting_hamming_distance=None): |
||||
"""Find candidate strings. |
||||
|
||||
Search through the space of all bytestrings, in order of increasing length, |
||||
indefinitely, until a hash with a Hamming distance of `maximum_distance` or |
||||
less has been found. |
||||
|
||||
Args: |
||||
target: The search string. |
||||
ideal_distance: The desired Hamming distance. |
||||
stop_event: An event indicating whether the RPC should terminate. |
||||
maximum_hashes: The maximum number of hashes to check before stopping. |
||||
interesting_hamming_distance: If specified, strings with a Hamming |
||||
distance from the target below this value will be yielded. |
||||
|
||||
Yields: |
||||
Instances of HashNameResponse. The final entry in the stream will be of |
||||
`maximum_distance` Hamming distance or less from the target string, |
||||
while all others will be of less than `interesting_hamming_distance`. |
||||
|
||||
Raises: |
||||
ResourceLimitExceededError: If the computation exceeds `maximum_hashes` |
||||
iterations. |
||||
""" |
||||
hashes_computed = 0 |
||||
for secret in _all_bytestrings(): |
||||
if stop_event.is_set(): |
||||
raise StopIteration() # pylint: disable=stop-iteration-return |
||||
candidate_hash = _get_hash(secret) |
||||
distance = _get_substring_hamming_distance(candidate_hash, target) |
||||
if interesting_hamming_distance is not None and distance <= interesting_hamming_distance: |
||||
# Surface interesting candidates, but don't stop. |
||||
yield hash_name_pb2.HashNameResponse( |
||||
secret=base64.b64encode(secret), |
||||
hashed_name=candidate_hash, |
||||
hamming_distance=distance) |
||||
elif distance <= ideal_distance: |
||||
# Yield ideal candidate and end the stream. |
||||
yield hash_name_pb2.HashNameResponse( |
||||
secret=base64.b64encode(secret), |
||||
hashed_name=candidate_hash, |
||||
hamming_distance=distance) |
||||
raise StopIteration() # pylint: disable=stop-iteration-return |
||||
hashes_computed += 1 |
||||
if hashes_computed == maximum_hashes: |
||||
raise ResourceLimitExceededError() |
@ -0,0 +1,124 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""An example of cancelling requests in gRPC.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
from concurrent import futures |
||||
import argparse |
||||
import logging |
||||
import threading |
||||
|
||||
import grpc |
||||
import search |
||||
|
||||
from examples.python.cancellation import hash_name_pb2 |
||||
from examples.python.cancellation import hash_name_pb2_grpc |
||||
|
||||
_LOGGER = logging.getLogger(__name__) |
||||
_SERVER_HOST = 'localhost' |
||||
|
||||
_DESCRIPTION = "A server for finding hashes similar to names." |
||||
|
||||
|
||||
class HashFinder(hash_name_pb2_grpc.HashFinderServicer): |
||||
|
||||
def __init__(self, maximum_hashes): |
||||
super(HashFinder, self).__init__() |
||||
self._maximum_hashes = maximum_hashes |
||||
|
||||
def Find(self, request, context): |
||||
stop_event = threading.Event() |
||||
|
||||
def on_rpc_done(): |
||||
_LOGGER.debug("Attempting to regain servicer thread.") |
||||
stop_event.set() |
||||
|
||||
context.add_callback(on_rpc_done) |
||||
candidates = [] |
||||
try: |
||||
candidates = list( |
||||
search.search(request.desired_name, |
||||
request.ideal_hamming_distance, stop_event, |
||||
self._maximum_hashes)) |
||||
except search.ResourceLimitExceededError: |
||||
_LOGGER.info("Cancelling RPC due to exhausted resources.") |
||||
context.cancel() |
||||
_LOGGER.debug("Servicer thread returning.") |
||||
if not candidates: |
||||
return hash_name_pb2.HashNameResponse() |
||||
return candidates[-1] |
||||
|
||||
def FindRange(self, request, context): |
||||
stop_event = threading.Event() |
||||
|
||||
def on_rpc_done(): |
||||
_LOGGER.debug("Attempting to regain servicer thread.") |
||||
stop_event.set() |
||||
|
||||
context.add_callback(on_rpc_done) |
||||
secret_generator = search.search( |
||||
request.desired_name, |
||||
request.ideal_hamming_distance, |
||||
stop_event, |
||||
self._maximum_hashes, |
||||
interesting_hamming_distance=request.interesting_hamming_distance) |
||||
try: |
||||
for candidate in secret_generator: |
||||
yield candidate |
||||
except search.ResourceLimitExceededError: |
||||
_LOGGER.info("Cancelling RPC due to exhausted resources.") |
||||
context.cancel() |
||||
_LOGGER.debug("Regained servicer thread.") |
||||
|
||||
|
||||
def _running_server(port, maximum_hashes): |
||||
# We use only a single servicer thread here to demonstrate that, if managed |
||||
# carefully, cancelled RPCs can need not continue occupying servicers |
||||
# threads. |
||||
server = grpc.server( |
||||
futures.ThreadPoolExecutor(max_workers=1), maximum_concurrent_rpcs=1) |
||||
hash_name_pb2_grpc.add_HashFinderServicer_to_server( |
||||
HashFinder(maximum_hashes), server) |
||||
address = '{}:{}'.format(_SERVER_HOST, port) |
||||
actual_port = server.add_insecure_port(address) |
||||
server.start() |
||||
print("Server listening at '{}'".format(address)) |
||||
return server |
||||
|
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser(description=_DESCRIPTION) |
||||
parser.add_argument( |
||||
'--port', |
||||
type=int, |
||||
default=50051, |
||||
nargs='?', |
||||
help='The port on which the server will listen.') |
||||
parser.add_argument( |
||||
'--maximum-hashes', |
||||
type=int, |
||||
default=1000000, |
||||
nargs='?', |
||||
help='The maximum number of hashes to search before cancelling.') |
||||
args = parser.parse_args() |
||||
server = _running_server(args.port, args.maximum_hashes) |
||||
server.wait_for_termination() |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
logging.basicConfig() |
||||
main() |
@ -0,0 +1,87 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""Test for cancellation example.""" |
||||
|
||||
import contextlib |
||||
import os |
||||
import signal |
||||
import socket |
||||
import subprocess |
||||
import unittest |
||||
|
||||
_BINARY_DIR = os.path.realpath( |
||||
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) |
||||
_SERVER_PATH = os.path.join(_BINARY_DIR, 'server') |
||||
_CLIENT_PATH = os.path.join(_BINARY_DIR, 'client') |
||||
|
||||
|
||||
@contextlib.contextmanager |
||||
def _get_port(): |
||||
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) |
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) |
||||
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0: |
||||
raise RuntimeError("Failed to set SO_REUSEPORT.") |
||||
sock.bind(('', 0)) |
||||
try: |
||||
yield sock.getsockname()[1] |
||||
finally: |
||||
sock.close() |
||||
|
||||
|
||||
def _start_client(server_port, |
||||
desired_string, |
||||
ideal_distance, |
||||
interesting_distance=None): |
||||
interesting_distance_args = () if interesting_distance is None else ( |
||||
'--show-inferior', interesting_distance) |
||||
return subprocess.Popen((_CLIENT_PATH, desired_string, '--server', |
||||
'localhost:{}'.format(server_port), |
||||
'--ideal-distance', |
||||
str(ideal_distance)) + interesting_distance_args) |
||||
|
||||
|
||||
class CancellationExampleTest(unittest.TestCase): |
||||
|
||||
def test_successful_run(self): |
||||
with _get_port() as test_port: |
||||
server_process = subprocess.Popen((_SERVER_PATH, '--port', |
||||
str(test_port))) |
||||
try: |
||||
client_process = _start_client(test_port, 'aa', 0) |
||||
client_return_code = client_process.wait() |
||||
self.assertEqual(0, client_return_code) |
||||
self.assertIsNone(server_process.poll()) |
||||
finally: |
||||
server_process.kill() |
||||
server_process.wait() |
||||
|
||||
def test_graceful_sigint(self): |
||||
with _get_port() as test_port: |
||||
server_process = subprocess.Popen((_SERVER_PATH, '--port', |
||||
str(test_port))) |
||||
try: |
||||
client_process1 = _start_client(test_port, 'aaaaaaaaaa', 0) |
||||
client_process1.send_signal(signal.SIGINT) |
||||
client_process1.wait() |
||||
client_process2 = _start_client(test_port, 'aa', 0) |
||||
client_return_code = client_process2.wait() |
||||
self.assertEqual(0, client_return_code) |
||||
self.assertIsNone(server_process.poll()) |
||||
finally: |
||||
server_process.kill() |
||||
server_process.wait() |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
unittest.main(verbosity=2) |
@ -0,0 +1,49 @@ |
||||
# Copyright 2019 The gRPC Authors |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
py_binary( |
||||
name = "server", |
||||
srcs = ["server.py"], |
||||
deps = [ |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
"//examples:helloworld_py_pb2", |
||||
"//examples:helloworld_py_pb2_grpc", |
||||
], |
||||
srcs_version = "PY2AND3", |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
py_binary( |
||||
name = "client", |
||||
srcs = ["client.py"], |
||||
deps = [ |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
"//examples:helloworld_py_pb2", |
||||
"//examples:helloworld_py_pb2_grpc", |
||||
], |
||||
srcs_version = "PY2AND3", |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
py_test( |
||||
name = "test/compression_example_test", |
||||
srcs = ["test/compression_example_test.py"], |
||||
srcs_version = "PY2AND3", |
||||
data = [ |
||||
":client", |
||||
":server", |
||||
], |
||||
size = "small", |
||||
python_version = "PY3", |
||||
) |
@ -0,0 +1,58 @@ |
||||
## Compression with gRPC Python |
||||
|
||||
gRPC offers lossless compression options in order to decrease the number of bits |
||||
transferred over the wire. Three levels of compression are available: |
||||
|
||||
- `grpc.Compression.NoCompression` - No compression is applied to the payload. (default) |
||||
- `grpc.Compression.Deflate` - The "Deflate" algorithm is applied to the payload. |
||||
- `grpc.Compression.Gzip` - The Gzip algorithm is applied to the payload. |
||||
|
||||
The default option on both clients and servers is `grpc.Compression.NoCompression`. |
||||
|
||||
See [the gRPC Compression Spec](https://github.com/grpc/grpc/blob/master/doc/compression.md) |
||||
for more information. |
||||
|
||||
### Client Side Compression |
||||
|
||||
Compression may be set at two levels on the client side. |
||||
|
||||
#### At the channel level |
||||
|
||||
```python |
||||
with grpc.insecure_channel('foo.bar:1234', compression=grpc.Compression.Gzip) as channel: |
||||
use_channel(channel) |
||||
``` |
||||
|
||||
#### At the call level |
||||
|
||||
Setting the compression method at the call level will override any settings on |
||||
the channel level. |
||||
|
||||
```python |
||||
stub = helloworld_pb2_grpc.GreeterStub(channel) |
||||
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'), |
||||
compression=grpc.Compression.Deflate) |
||||
``` |
||||
|
||||
|
||||
### Server Side Compression |
||||
|
||||
Additionally, compression may be set at two levels on the server side. |
||||
|
||||
#### On the entire server |
||||
|
||||
```python |
||||
server = grpc.server(futures.ThreadPoolExecutor(), |
||||
compression=grpc.Compression.Gzip) |
||||
``` |
||||
|
||||
#### For an individual RPC |
||||
|
||||
```python |
||||
def SayHello(self, request, context): |
||||
context.set_response_compression(grpc.Compression.NoCompression) |
||||
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name) |
||||
``` |
||||
|
||||
Setting the compression method for an individual RPC will override any setting |
||||
supplied at server creation time. |
@ -0,0 +1,76 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""An example of compression on the client side with gRPC.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import argparse |
||||
import logging |
||||
import grpc |
||||
|
||||
from examples import helloworld_pb2 |
||||
from examples import helloworld_pb2_grpc |
||||
|
||||
_DESCRIPTION = 'A client capable of compression.' |
||||
_COMPRESSION_OPTIONS = { |
||||
"none": grpc.Compression.NoCompression, |
||||
"deflate": grpc.Compression.Deflate, |
||||
"gzip": grpc.Compression.Gzip, |
||||
} |
||||
|
||||
_LOGGER = logging.getLogger(__name__) |
||||
|
||||
|
||||
def run_client(channel_compression, call_compression, target): |
||||
with grpc.insecure_channel( |
||||
target, compression=channel_compression) as channel: |
||||
stub = helloworld_pb2_grpc.GreeterStub(channel) |
||||
response = stub.SayHello( |
||||
helloworld_pb2.HelloRequest(name='you'), |
||||
compression=call_compression, |
||||
wait_for_ready=True) |
||||
print("Response: {}".format(response)) |
||||
|
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser(description=_DESCRIPTION) |
||||
parser.add_argument( |
||||
'--channel_compression', |
||||
default='none', |
||||
nargs='?', |
||||
choices=_COMPRESSION_OPTIONS.keys(), |
||||
help='The compression method to use for the channel.') |
||||
parser.add_argument( |
||||
'--call_compression', |
||||
default='none', |
||||
nargs='?', |
||||
choices=_COMPRESSION_OPTIONS.keys(), |
||||
help='The compression method to use for an individual call.') |
||||
parser.add_argument( |
||||
'--server', |
||||
default='localhost:50051', |
||||
type=str, |
||||
nargs='?', |
||||
help='The host-port pair at which to reach the server.') |
||||
args = parser.parse_args() |
||||
channel_compression = _COMPRESSION_OPTIONS[args.channel_compression] |
||||
call_compression = _COMPRESSION_OPTIONS[args.call_compression] |
||||
run_client(channel_compression, call_compression, args.server) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
logging.basicConfig() |
||||
main() |
@ -0,0 +1,103 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""An example of compression on the server side with gRPC.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
from concurrent import futures |
||||
import argparse |
||||
import logging |
||||
import threading |
||||
import grpc |
||||
|
||||
from examples import helloworld_pb2 |
||||
from examples import helloworld_pb2_grpc |
||||
|
||||
_DESCRIPTION = 'A server capable of compression.' |
||||
_COMPRESSION_OPTIONS = { |
||||
"none": grpc.Compression.NoCompression, |
||||
"deflate": grpc.Compression.Deflate, |
||||
"gzip": grpc.Compression.Gzip, |
||||
} |
||||
_LOGGER = logging.getLogger(__name__) |
||||
|
||||
_SERVER_HOST = 'localhost' |
||||
|
||||
|
||||
class Greeter(helloworld_pb2_grpc.GreeterServicer): |
||||
|
||||
def __init__(self, no_compress_every_n): |
||||
super(Greeter, self).__init__() |
||||
self._no_compress_every_n = 0 |
||||
self._request_counter = 0 |
||||
self._counter_lock = threading.RLock() |
||||
|
||||
def _should_suppress_compression(self): |
||||
suppress_compression = False |
||||
with self._counter_lock: |
||||
if self._no_compress_every_n and self._request_counter % self._no_compress_every_n == 0: |
||||
suppress_compression = True |
||||
self._request_counter += 1 |
||||
return suppress_compression |
||||
|
||||
def SayHello(self, request, context): |
||||
if self._should_suppress_compression(): |
||||
context.set_response_compression(grpc.Compression.NoCompression) |
||||
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name) |
||||
|
||||
|
||||
def run_server(server_compression, no_compress_every_n, port): |
||||
server = grpc.server( |
||||
futures.ThreadPoolExecutor(), |
||||
compression=server_compression, |
||||
options=(('grpc.so_reuseport', 1),)) |
||||
helloworld_pb2_grpc.add_GreeterServicer_to_server( |
||||
Greeter(no_compress_every_n), server) |
||||
address = '{}:{}'.format(_SERVER_HOST, port) |
||||
server.add_insecure_port(address) |
||||
server.start() |
||||
print("Server listening at '{}'".format(address)) |
||||
server.wait_for_termination() |
||||
|
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser(description=_DESCRIPTION) |
||||
parser.add_argument( |
||||
'--server_compression', |
||||
default='none', |
||||
nargs='?', |
||||
choices=_COMPRESSION_OPTIONS.keys(), |
||||
help='The default compression method for the server.') |
||||
parser.add_argument( |
||||
'--no_compress_every_n', |
||||
type=int, |
||||
default=0, |
||||
nargs='?', |
||||
help='If set, every nth reply will be uncompressed.') |
||||
parser.add_argument( |
||||
'--port', |
||||
type=int, |
||||
default=50051, |
||||
nargs='?', |
||||
help='The port on which the server will listen.') |
||||
args = parser.parse_args() |
||||
run_server(_COMPRESSION_OPTIONS[args.server_compression], |
||||
args.no_compress_every_n, args.port) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
logging.basicConfig() |
||||
main() |
@ -0,0 +1,62 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""Test for compression example.""" |
||||
|
||||
import contextlib |
||||
import os |
||||
import socket |
||||
import subprocess |
||||
import unittest |
||||
|
||||
_BINARY_DIR = os.path.realpath( |
||||
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) |
||||
_SERVER_PATH = os.path.join(_BINARY_DIR, 'server') |
||||
_CLIENT_PATH = os.path.join(_BINARY_DIR, 'client') |
||||
|
||||
|
||||
@contextlib.contextmanager |
||||
def _get_port(): |
||||
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) |
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) |
||||
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0: |
||||
raise RuntimeError("Failed to set SO_REUSEPORT.") |
||||
sock.bind(('', 0)) |
||||
try: |
||||
yield sock.getsockname()[1] |
||||
finally: |
||||
sock.close() |
||||
|
||||
|
||||
class CompressionExampleTest(unittest.TestCase): |
||||
|
||||
def test_compression_example(self): |
||||
with _get_port() as test_port: |
||||
server_process = subprocess.Popen((_SERVER_PATH, '--port', |
||||
str(test_port), |
||||
'--server_compression', 'gzip')) |
||||
try: |
||||
server_target = 'localhost:{}'.format(test_port) |
||||
client_process = subprocess.Popen( |
||||
(_CLIENT_PATH, '--server', server_target, |
||||
'--channel_compression', 'gzip')) |
||||
client_return_code = client_process.wait() |
||||
self.assertEqual(0, client_return_code) |
||||
self.assertIsNone(server_process.poll()) |
||||
finally: |
||||
server_process.kill() |
||||
server_process.wait() |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
unittest.main(verbosity=2) |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue