mirror of https://github.com/grpc/grpc.git
commit
bcc458fb37
537 changed files with 26497 additions and 23047 deletions
@ -1,18 +1,19 @@ |
||||
-Wall |
||||
-Wc++-compat |
||||
-Ithird_party/googletest/include |
||||
-Ithird_party/googletest |
||||
-Iinclude |
||||
-Igens |
||||
-I. |
||||
-Ithird_party/boringssl/include |
||||
-Ithird_party/benchmark/include |
||||
-Ithird_party/zlib |
||||
-Ithird_party/protobuf/src |
||||
-Igens |
||||
-Iinclude |
||||
-Isrc/core/ext/upb-generated |
||||
-Ithird_party/abseil-cpp |
||||
-Ithird_party/cares/cares |
||||
-Ithird_party/benchmark/include |
||||
-Ithird_party/boringssl/include |
||||
-Ithird_party/cares |
||||
-Ithird_party/googletest/googletest/include |
||||
-Ithird_party/cares/cares |
||||
-Ithird_party/googletest |
||||
-Ithird_party/googletest/googlemock/include |
||||
-Ithird_party/googletest/googletest/include |
||||
-Ithird_party/googletest/include |
||||
-Ithird_party/nanopb |
||||
|
||||
-Ithird_party/protobuf/src |
||||
-Ithird_party/upb |
||||
-Ithird_party/zlib |
||||
|
@ -0,0 +1,18 @@ |
||||
--- |
||||
name: Request a cleanup |
||||
about: Suggest a cleanup in our repository |
||||
labels: kind/internal cleanup |
||||
assignees: AspirinSJL |
||||
|
||||
--- |
||||
|
||||
<!-- |
||||
|
||||
This form is for bug reports and feature requests ONLY! |
||||
For general questions and troubleshooting, please ask/look for answers here: |
||||
- grpc.io mailing list: https://groups.google.com/forum/#!forum/grpc-io |
||||
- StackOverflow, with "grpc" tag: https://stackoverflow.com/questions/tagged/grpc |
||||
|
||||
Issues specific to *grpc-java*, *grpc-go*, *grpc-node*, *grpc-dart*, *grpc-web* should be created in the repository they belong to (e.g. https://github.com/grpc/grpc-LANGUAGE/issues/new) |
||||
--> |
||||
|
@ -0,0 +1,29 @@ |
||||
--- |
||||
name: Request a feature |
||||
about: Suggest an idea for this project |
||||
labels: kind/enhancement |
||||
assignees: AspirinSJL |
||||
|
||||
--- |
||||
|
||||
<!-- |
||||
|
||||
This form is for bug reports and feature requests ONLY! |
||||
For general questions and troubleshooting, please ask/look for answers here: |
||||
- grpc.io mailing list: https://groups.google.com/forum/#!forum/grpc-io |
||||
- StackOverflow, with "grpc" tag: https://stackoverflow.com/questions/tagged/grpc |
||||
|
||||
Issues specific to *grpc-java*, *grpc-go*, *grpc-node*, *grpc-dart*, *grpc-web* should be created in the repository they belong to (e.g. https://github.com/grpc/grpc-LANGUAGE/issues/new) |
||||
--> |
||||
|
||||
### Is your feature request related to a problem? Please describe. |
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] |
||||
|
||||
### Describe the solution you'd like |
||||
A clear and concise description of what you want to happen. |
||||
|
||||
### Describe alternatives you've considered |
||||
A clear and concise description of any alternative solutions or features you've considered. |
||||
|
||||
### Additional context |
||||
Add any other context about the feature request here. |
@ -0,0 +1,5 @@ |
||||
|
||||
|
||||
|
||||
<!-- Your pull request will be routed to the following person. --> |
||||
@AspirinSJL |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,215 @@ |
||||
load( |
||||
"//bazel:protobuf.bzl", |
||||
"get_include_protoc_args", |
||||
"get_plugin_args", |
||||
"proto_path_to_generated_filename", |
||||
) |
||||
load(":grpc_util.bzl", "to_upper_camel_with_extension",) |
||||
|
||||
_GRPC_PROTO_HEADER_FMT = "{}.pbrpc.h" |
||||
_GRPC_PROTO_SRC_FMT = "{}.pbrpc.m" |
||||
_PROTO_HEADER_FMT = "{}.pbobjc.h" |
||||
_PROTO_SRC_FMT = "{}.pbobjc.m" |
||||
_GENERATED_PROTOS_DIR = "_generated_protos" |
||||
|
||||
_GENERATE_HDRS = 1 |
||||
_GENERATE_SRCS = 2 |
||||
_GENERATE_NON_ARC_SRCS = 3 |
||||
|
||||
def _generate_objc_impl(ctx): |
||||
"""Implementation of the generate_objc rule.""" |
||||
protos = [ |
||||
f |
||||
for src in ctx.attr.deps |
||||
for f in src[ProtoInfo].transitive_imports.to_list() |
||||
] |
||||
|
||||
target_package = _join_directories([ctx.label.workspace_root, ctx.label.package]) |
||||
|
||||
files_with_rpc = [_label_to_full_file_path(f, target_package) for f in ctx.attr.srcs] |
||||
|
||||
outs = [] |
||||
for proto in protos: |
||||
outs += [_get_output_file_name_from_proto(proto, _PROTO_HEADER_FMT)] |
||||
outs += [_get_output_file_name_from_proto(proto, _PROTO_SRC_FMT)] |
||||
|
||||
file_path = _get_full_path_from_file(proto) |
||||
if file_path in files_with_rpc: |
||||
outs += [_get_output_file_name_from_proto(proto, _GRPC_PROTO_HEADER_FMT)] |
||||
outs += [_get_output_file_name_from_proto(proto, _GRPC_PROTO_SRC_FMT)] |
||||
|
||||
out_files = [ctx.actions.declare_file(out) for out in outs] |
||||
dir_out = _join_directories([ |
||||
str(ctx.genfiles_dir.path), target_package, _GENERATED_PROTOS_DIR |
||||
]) |
||||
|
||||
arguments = [] |
||||
if ctx.executable.plugin: |
||||
arguments += get_plugin_args( |
||||
ctx.executable.plugin, |
||||
[], |
||||
dir_out, |
||||
False, |
||||
) |
||||
tools = [ctx.executable.plugin] |
||||
arguments += ["--objc_out=" + dir_out] |
||||
|
||||
arguments += ["--proto_path=."] |
||||
arguments += get_include_protoc_args(protos) |
||||
# Include the output directory so that protoc puts the generated code in the |
||||
# right directory. |
||||
arguments += ["--proto_path={}".format(dir_out)] |
||||
arguments += ["--proto_path={}".format(_get_directory_from_proto(proto)) for proto in protos] |
||||
arguments += [_get_full_path_from_file(proto) for proto in protos] |
||||
|
||||
# create a list of well known proto files if the argument is non-None |
||||
well_known_proto_files = [] |
||||
if ctx.attr.use_well_known_protos: |
||||
f = ctx.attr.well_known_protos.files.to_list()[0].dirname |
||||
# go two levels up so that #import "google/protobuf/..." is correct |
||||
arguments += ["-I{0}".format(f + "/../..")] |
||||
well_known_proto_files = ctx.attr.well_known_protos.files.to_list() |
||||
ctx.actions.run( |
||||
inputs = protos + well_known_proto_files, |
||||
tools = tools, |
||||
outputs = out_files, |
||||
executable = ctx.executable._protoc, |
||||
arguments = arguments, |
||||
) |
||||
|
||||
return struct(files = depset(out_files)) |
||||
|
||||
def _label_to_full_file_path(src, package): |
||||
if not src.startswith("//"): |
||||
# Relative from current package |
||||
if not src.startswith(":"): |
||||
# "a.proto" -> ":a.proto" |
||||
src = ":" + src |
||||
src = "//" + package + src |
||||
# Converts //path/to/package:File.ext to path/to/package/File.ext. |
||||
src = src.replace("//", "") |
||||
src = src.replace(":", "/") |
||||
if src.startswith("/"): |
||||
# "//:a.proto" -> "/a.proto" so remove the initial slash |
||||
return src[1:] |
||||
else: |
||||
return src |
||||
|
||||
def _get_output_file_name_from_proto(proto, fmt): |
||||
return proto_path_to_generated_filename( |
||||
_GENERATED_PROTOS_DIR + "/" + |
||||
_get_directory_from_proto(proto) + _get_slash_or_null_from_proto(proto) + |
||||
to_upper_camel_with_extension(_get_file_name_from_proto(proto), "proto"), |
||||
fmt, |
||||
) |
||||
|
||||
def _get_file_name_from_proto(proto): |
||||
return proto.path.rpartition("/")[2] |
||||
|
||||
def _get_slash_or_null_from_proto(proto): |
||||
"""Potentially returns empty (if the file is in the root directory)""" |
||||
return proto.path.rpartition("/")[1] |
||||
|
||||
def _get_directory_from_proto(proto): |
||||
return proto.path.rpartition("/")[0] |
||||
|
||||
def _get_full_path_from_file(file): |
||||
gen_dir_length = 0 |
||||
# if file is generated, then prepare to remote its root |
||||
# (including CPU architecture...) |
||||
if not file.is_source: |
||||
gen_dir_length = len(file.root.path) + 1 |
||||
|
||||
return file.path[gen_dir_length:] |
||||
|
||||
def _join_directories(directories): |
||||
massaged_directories = [directory for directory in directories if len(directory) != 0] |
||||
return "/".join(massaged_directories) |
||||
|
||||
|
||||
generate_objc = rule( |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
mandatory = True, |
||||
allow_empty = False, |
||||
providers = [ProtoInfo], |
||||
), |
||||
"plugin": attr.label( |
||||
default = "@com_github_grpc_grpc//src/compiler:grpc_objective_c_plugin", |
||||
executable = True, |
||||
providers = ["files_to_run"], |
||||
cfg = "host", |
||||
), |
||||
"srcs": attr.string_list( |
||||
mandatory = False, |
||||
allow_empty = True |
||||
), |
||||
"use_well_known_protos": attr.bool( |
||||
mandatory = False, |
||||
default = False |
||||
), |
||||
"well_known_protos": attr.label( |
||||
default = "@com_google_protobuf//:well_known_protos" |
||||
), |
||||
"_protoc": attr.label( |
||||
default = Label("//external:protocol_compiler"), |
||||
executable = True, |
||||
cfg = "host", |
||||
), |
||||
}, |
||||
output_to_genfiles = True, |
||||
implementation = _generate_objc_impl |
||||
) |
||||
|
||||
def _group_objc_files_impl(ctx): |
||||
suffix = "" |
||||
if ctx.attr.gen_mode == _GENERATE_HDRS: |
||||
suffix = "h" |
||||
elif ctx.attr.gen_mode == _GENERATE_SRCS: |
||||
suffix = "pbrpc.m" |
||||
elif ctx.attr.gen_mode == _GENERATE_NON_ARC_SRCS: |
||||
suffix = "pbobjc.m" |
||||
else: |
||||
fail("Undefined gen_mode") |
||||
out_files = [ |
||||
file |
||||
for file in ctx.attr.src.files.to_list() |
||||
if file.basename.endswith(suffix) |
||||
] |
||||
return struct(files = depset(out_files)) |
||||
|
||||
generate_objc_hdrs = rule( |
||||
attrs = { |
||||
"src": attr.label( |
||||
mandatory = True, |
||||
), |
||||
"gen_mode": attr.int( |
||||
default = _GENERATE_HDRS, |
||||
) |
||||
}, |
||||
implementation = _group_objc_files_impl |
||||
) |
||||
|
||||
generate_objc_srcs = rule( |
||||
attrs = { |
||||
"src": attr.label( |
||||
mandatory = True, |
||||
), |
||||
"gen_mode": attr.int( |
||||
default = _GENERATE_SRCS, |
||||
) |
||||
}, |
||||
implementation = _group_objc_files_impl |
||||
) |
||||
|
||||
generate_objc_non_arc_srcs = rule( |
||||
attrs = { |
||||
"src": attr.label( |
||||
mandatory = True, |
||||
), |
||||
"gen_mode": attr.int( |
||||
default = _GENERATE_NON_ARC_SRCS, |
||||
) |
||||
}, |
||||
implementation = _group_objc_files_impl |
||||
) |
@ -1,8 +1,67 @@ |
||||
load("//third_party/py:python_configure.bzl", "python_configure") |
||||
load("@io_bazel_rules_python//python:pip.bzl", "pip_repositories") |
||||
load("@grpc_python_dependencies//:requirements.bzl", "pip_install") |
||||
"""Load dependencies needed to compile and test the grpc python library as a 3rd-party consumer.""" |
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") |
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") |
||||
load("@com_github_grpc_grpc//third_party/py:python_configure.bzl", "python_configure") |
||||
|
||||
def grpc_python_deps(): |
||||
native.bind( |
||||
name = "six", |
||||
actual = "@six_archive//:six", |
||||
) |
||||
|
||||
# protobuf binds to the name "six", so we can't use it here. |
||||
# See https://github.com/bazelbuild/bazel/issues/1952 for why bind is |
||||
# horrible. |
||||
if "six_archive" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "six_archive", |
||||
strip_prefix = "six-1.12.0", |
||||
build_file = "@com_github_grpc_grpc//third_party:six.BUILD", |
||||
sha256 = "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73", |
||||
urls = ["https://files.pythonhosted.org/packages/dd/bf/4138e7bfb757de47d1f4b6994648ec67a51efe58fa907c1e11e350cddfca/six-1.12.0.tar.gz"], |
||||
) |
||||
|
||||
if "enum34" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "enum34", |
||||
build_file = "@com_github_grpc_grpc//third_party:enum34.BUILD", |
||||
strip_prefix = "enum34-1.1.6", |
||||
sha256 = "8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1", |
||||
urls = ["https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz"], |
||||
) |
||||
|
||||
if "futures" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "futures", |
||||
build_file = "@com_github_grpc_grpc//third_party:futures.BUILD", |
||||
strip_prefix = "futures-3.3.0", |
||||
sha256 = "7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794", |
||||
urls = ["https://files.pythonhosted.org/packages/47/04/5fc6c74ad114032cd2c544c575bffc17582295e9cd6a851d6026ab4b2c00/futures-3.3.0.tar.gz"], |
||||
) |
||||
|
||||
if "io_bazel_rules_python" not in native.existing_rules(): |
||||
git_repository( |
||||
name = "io_bazel_rules_python", |
||||
commit = "fdbb17a4118a1728d19e638a5291b4c4266ea5b8", |
||||
remote = "https://github.com/bazelbuild/rules_python.git", |
||||
) |
||||
|
||||
python_configure(name = "local_config_python") |
||||
pip_repositories() |
||||
pip_install() |
||||
|
||||
native.bind( |
||||
name = "python_headers", |
||||
actual = "@local_config_python//:python_headers", |
||||
) |
||||
|
||||
if "cython" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "cython", |
||||
build_file = "@com_github_grpc_grpc//third_party:cython.BUILD", |
||||
sha256 = "d68138a2381afbdd0876c3cb2a22389043fa01c4badede1228ee073032b07a27", |
||||
strip_prefix = "cython-c2b80d87658a8525ce091cbe146cb7eaa29fed5c", |
||||
urls = [ |
||||
"https://github.com/cython/cython/archive/c2b80d87658a8525ce091cbe146cb7eaa29fed5c.tar.gz", |
||||
], |
||||
) |
||||
|
||||
|
@ -0,0 +1,46 @@ |
||||
# Follows convention set in objectivec_helpers.cc in the protobuf ObjC compiler. |
||||
_upper_segments_list = ["url", "http", "https"] |
||||
|
||||
def strip_extension(str): |
||||
return str.rpartition(".")[0] |
||||
|
||||
def capitalize(word): |
||||
if word in _upper_segments_list: |
||||
return word.upper() |
||||
else: |
||||
return word.capitalize() |
||||
|
||||
def lower_underscore_to_upper_camel(str): |
||||
str = strip_extension(str) |
||||
camel_case_str = "" |
||||
word = "" |
||||
for c in str.elems(): # NB: assumes ASCII! |
||||
if c.isalpha(): |
||||
word += c.lower() |
||||
else: |
||||
# Last word is finished. |
||||
if len(word): |
||||
camel_case_str += capitalize(word) |
||||
word = "" |
||||
if c.isdigit(): |
||||
camel_case_str += c |
||||
|
||||
# Otherwise, drop the character. See UnderscoresToCamelCase in: |
||||
# third_party/protobuf/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc |
||||
|
||||
if len(word): |
||||
camel_case_str += capitalize(word) |
||||
return camel_case_str |
||||
|
||||
def file_to_upper_camel(src): |
||||
elements = src.rpartition("/") |
||||
upper_camel = lower_underscore_to_upper_camel(elements[-1]) |
||||
return "".join(list(elements[:-1]) + [upper_camel]) |
||||
|
||||
def file_with_extension(src, ext): |
||||
elements = src.rpartition("/") |
||||
return "".join(list(elements[:-1]) + [elements[-1], "." + ext]) |
||||
|
||||
def to_upper_camel_with_extension(src, ext): |
||||
src = file_to_upper_camel(src) |
||||
return file_with_extension(src, ext) |
@ -0,0 +1,69 @@ |
||||
load( |
||||
"//bazel:generate_objc.bzl", |
||||
"generate_objc", |
||||
"generate_objc_hdrs", |
||||
"generate_objc_srcs", |
||||
"generate_objc_non_arc_srcs" |
||||
) |
||||
load("//bazel:protobuf.bzl", "well_known_proto_libs") |
||||
|
||||
def objc_grpc_library(name, deps, srcs = [], use_well_known_protos = False, **kwargs): |
||||
"""Generates messages and/or service stubs for given proto_library and all transitively dependent proto files |
||||
|
||||
Args: |
||||
name: name of target |
||||
deps: a list of proto_library targets that needs to be compiled |
||||
srcs: a list of labels to proto files with service stubs to be generated, |
||||
labels specified must include service stubs; otherwise Bazel will complain about srcs being empty |
||||
use_well_known_protos: whether to use the well known protos defined in |
||||
@com_google_protobuf//src/google/protobuf, default to false |
||||
**kwargs: other arguments |
||||
""" |
||||
objc_grpc_library_name = "_" + name + "_objc_grpc_library" |
||||
|
||||
generate_objc( |
||||
name = objc_grpc_library_name, |
||||
srcs = srcs, |
||||
deps = deps, |
||||
use_well_known_protos = use_well_known_protos, |
||||
**kwargs |
||||
) |
||||
|
||||
generate_objc_hdrs( |
||||
name = objc_grpc_library_name + "_hdrs", |
||||
src = ":" + objc_grpc_library_name, |
||||
) |
||||
|
||||
generate_objc_non_arc_srcs( |
||||
name = objc_grpc_library_name + "_non_arc_srcs", |
||||
src = ":" + objc_grpc_library_name, |
||||
) |
||||
|
||||
arc_srcs = None |
||||
if len(srcs) > 0: |
||||
generate_objc_srcs( |
||||
name = objc_grpc_library_name + "_srcs", |
||||
src = ":" + objc_grpc_library_name, |
||||
) |
||||
arc_srcs = [":" + objc_grpc_library_name + "_srcs"] |
||||
|
||||
native.objc_library( |
||||
name = name, |
||||
hdrs = [":" + objc_grpc_library_name + "_hdrs"], |
||||
non_arc_srcs = [":" + objc_grpc_library_name + "_non_arc_srcs"], |
||||
srcs = arc_srcs, |
||||
defines = [ |
||||
"GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS=0", |
||||
"GPB_GRPC_FORWARD_DECLARE_MESSAGE_PROTO=0", |
||||
], |
||||
includes = [ |
||||
"_generated_protos", |
||||
"src/objective-c", |
||||
], |
||||
deps = [ |
||||
"@com_github_grpc_grpc//src/objective-c:proto_objc_rpc", |
||||
"@com_google_protobuf//:protobuf_objc", |
||||
], |
||||
**kwargs |
||||
) |
||||
|
@ -0,0 +1,2 @@ |
||||
bazel-* |
||||
tools/bazel-* |
@ -0,0 +1,62 @@ |
||||
# gRPC Bazel BUILD file. |
||||
# |
||||
# Copyright 2019 The gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_proto_library", "py_grpc_library") |
||||
|
||||
package(default_testonly = 1) |
||||
|
||||
proto_library( |
||||
name = "helloworld_proto", |
||||
srcs = ["helloworld.proto"], |
||||
deps = [ |
||||
"@com_google_protobuf//:duration_proto", |
||||
"@com_google_protobuf//:timestamp_proto", |
||||
], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "helloworld_py_pb2", |
||||
srcs = [":helloworld_proto"], |
||||
) |
||||
|
||||
py_grpc_library( |
||||
name = "helloworld_py_pb2_grpc", |
||||
srcs = [":helloworld_proto"], |
||||
deps = [":helloworld_py_pb2"], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "duration_py_pb2", |
||||
srcs = ["@com_google_protobuf//:duration_proto"], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "timestamp_py_pb2", |
||||
srcs = ["@com_google_protobuf//:timestamp_proto"], |
||||
) |
||||
|
||||
py_test( |
||||
name = "import_test", |
||||
main = "helloworld.py", |
||||
srcs = ["helloworld.py"], |
||||
deps = [ |
||||
":helloworld_py_pb2", |
||||
":helloworld_py_pb2_grpc", |
||||
":duration_py_pb2", |
||||
":timestamp_py_pb2", |
||||
], |
||||
python_version = "PY3", |
||||
) |
@ -0,0 +1,5 @@ |
||||
## Bazel Workspace Test |
||||
|
||||
This directory houses a test ensuring that downstream projects can use |
||||
`@com_github_grpc_grpc//src/python/grpcio:grpcio`, `py_proto_library`, and |
||||
`py_grpc_library`. |
@ -0,0 +1,17 @@ |
||||
local_repository( |
||||
name = "com_github_grpc_grpc", |
||||
path = "../../..", |
||||
) |
||||
|
||||
load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") |
||||
grpc_deps() |
||||
|
||||
# TODO(https://github.com/grpc/grpc/issues/19835): Remove. |
||||
load("@upb//bazel:workspace_deps.bzl", "upb_deps") |
||||
upb_deps() |
||||
|
||||
load("@build_bazel_rules_apple//apple:repositories.bzl", "apple_rules_dependencies") |
||||
apple_rules_dependencies() |
||||
|
||||
load("@build_bazel_apple_support//lib:repositories.bzl", "apple_support_dependencies") |
||||
apple_support_dependencies() |
@ -0,0 +1,43 @@ |
||||
// Copyright 2019 The gRPC authors. |
||||
// |
||||
// Licensed under the Apache License, Version 2.0 (the "License"); |
||||
// you may not use this file except in compliance with the License. |
||||
// You may obtain a copy of the License at |
||||
// |
||||
// http://www.apache.org/licenses/LICENSE-2.0 |
||||
// |
||||
// Unless required by applicable law or agreed to in writing, software |
||||
// distributed under the License is distributed on an "AS IS" BASIS, |
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
// See the License for the specific language governing permissions and |
||||
// limitations under the License. |
||||
|
||||
syntax = "proto3"; |
||||
|
||||
option java_multiple_files = true; |
||||
option java_package = "io.grpc.examples.helloworld"; |
||||
option java_outer_classname = "HelloWorldProto"; |
||||
option objc_class_prefix = "HLW"; |
||||
|
||||
package helloworld; |
||||
|
||||
import "google/protobuf/timestamp.proto"; |
||||
import "google/protobuf/duration.proto"; |
||||
|
||||
// The greeting service definition. |
||||
service Greeter { |
||||
// Sends a greeting |
||||
rpc SayHello (HelloRequest) returns (HelloReply) {} |
||||
} |
||||
|
||||
// The request message containing the user's name. |
||||
message HelloRequest { |
||||
string name = 1; |
||||
google.protobuf.Timestamp request_initiation = 2; |
||||
} |
||||
|
||||
// The response message containing the greetings |
||||
message HelloReply { |
||||
string message = 1; |
||||
google.protobuf.Duration request_duration = 2; |
||||
} |
@ -0,0 +1,73 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""The Python implementation of the GRPC helloworld.Greeter client.""" |
||||
|
||||
import contextlib |
||||
import datetime |
||||
import logging |
||||
import unittest |
||||
|
||||
import grpc |
||||
|
||||
import duration_pb2 |
||||
import helloworld_pb2 |
||||
import helloworld_pb2_grpc |
||||
|
||||
_HOST = 'localhost' |
||||
_SERVER_ADDRESS = '{}:0'.format(_HOST) |
||||
|
||||
|
||||
class Greeter(helloworld_pb2_grpc.GreeterServicer): |
||||
|
||||
def SayHello(self, request, context): |
||||
request_in_flight = datetime.now() - request.request_initation.ToDatetime() |
||||
request_duration = duration_pb2.Duration() |
||||
request_duration.FromTimedelta(request_in_flight) |
||||
return helloworld_pb2.HelloReply( |
||||
message='Hello, %s!' % request.name, |
||||
request_duration=request_duration, |
||||
) |
||||
|
||||
|
||||
@contextlib.contextmanager |
||||
def _listening_server(): |
||||
server = grpc.server(futures.ThreadPoolExecutor()) |
||||
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) |
||||
port = server.add_insecure_port(_SERVER_ADDRESS) |
||||
server.start() |
||||
try: |
||||
yield port |
||||
finally: |
||||
server.stop(0) |
||||
|
||||
|
||||
class ImportTest(unittest.TestCase): |
||||
def run(): |
||||
with _listening_server() as port: |
||||
with grpc.insecure_channel('{}:{}'.format(_HOST, port)) as channel: |
||||
stub = helloworld_pb2_grpc.GreeterStub(channel) |
||||
request_timestamp = timestamp_pb2.Timestamp() |
||||
request_timestamp.GetCurrentTime() |
||||
response = stub.SayHello(helloworld_pb2.HelloRequest( |
||||
name='you', |
||||
request_initiation=request_timestamp, |
||||
), |
||||
wait_for_ready=True) |
||||
self.assertEqual(response.message, "Hello, you!") |
||||
self.assertGreater(response.request_duration.microseconds, 0) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
logging.basicConfig() |
||||
unittest.main() |
@ -0,0 +1 @@ |
||||
../../../../tools/bazel |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,18 @@ |
||||
# Copyright 2019 gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
set(UPB_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/upb) |
||||
|
||||
set(_gRPC_UPB_INCLUDE_DIR "${UPB_ROOT_DIR}") |
||||
set(_gRPC_UPB_GRPC_GENERATED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/src/core/ext/upb-generated") |
@ -0,0 +1,119 @@ |
||||
# Copyright 2019 gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
licenses(["notice"]) # 3-clause BSD |
||||
|
||||
package(default_visibility = ["//visibility:public"]) |
||||
|
||||
load("@com_github_grpc_grpc//bazel:objc_grpc_library.bzl", "objc_grpc_library") |
||||
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_application") |
||||
load("@build_bazel_rules_apple//apple:macos.bzl", "macos_application") |
||||
|
||||
objc_grpc_library( |
||||
name = "HelloWorld_grpc_proto", |
||||
srcs = ["//examples:protos/helloworld.proto"], |
||||
deps = ["//examples:helloworld_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
# This one works with import "external/com_github_grpc_grpc/examples/protos/Helloworld.pbrpc.h" |
||||
objc_grpc_library( |
||||
name = "HelloWorld_grpc_proto_external", |
||||
srcs = ["//external/com_github_grpc_grpc/examples:protos/helloworld.proto"], |
||||
deps = ["@com_github_grpc_grpc//examples:helloworld_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_library( |
||||
name = "HelloWorld-lib", |
||||
srcs = glob(["helloworld/**/*.m",]), |
||||
hdrs = glob(["helloworld/**/*.h"]), |
||||
data = glob([ |
||||
"helloworld/HelloWorld/Base.lproj/**", |
||||
"helloworld/HelloWorld/Images.xcassets/**", |
||||
]), |
||||
includes = ["helloworld/HelloWorld"], |
||||
deps = [":HelloWorld_grpc_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
ios_application( |
||||
name = "HelloWorld", |
||||
bundle_id = "Google.HelloWorld", |
||||
families = [ |
||||
"iphone", |
||||
"ipad", |
||||
], |
||||
minimum_os_version = "8.0", |
||||
infoplists = ["helloworld/HelloWorld/Info.plist"], |
||||
deps = [":HelloWorld-lib"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_library( |
||||
name = "HelloWorldMacos-lib", |
||||
srcs = glob(["helloworld_macos/**/*.m",]), |
||||
hdrs = glob(["helloworld_macos/**/*.h"]), |
||||
data = glob([ |
||||
"helloworld_macos/HelloWorld/Base.lproj/**", |
||||
"helloworld_macos/HelloWorld/Images.xcassets/**", |
||||
]), |
||||
includes = ["helloworld_macos/HelloWorld"], |
||||
deps = [":HelloWorld_grpc_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
macos_application( |
||||
name = "HelloWorldMacos", |
||||
bundle_id = "io.grpc.HelloWorld", |
||||
minimum_os_version = "10.13", |
||||
entitlements = "helloworld_macos/HelloWorld/Helloworld.entitlements", |
||||
infoplists = ["helloworld_macos/HelloWorld/Info.plist"], |
||||
deps = [":HelloWorldMacos-lib"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_grpc_library( |
||||
name = "RouteGuide", |
||||
srcs = ["//examples:protos/route_guide.proto"], |
||||
deps = ["//examples:route_guide_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_library( |
||||
name = "RouteGuideClient-lib", |
||||
srcs = glob(["route_guide/**/*.m"]), |
||||
hdrs = glob(["route_guide/**/*.h"]), |
||||
data = glob([ |
||||
"route_guide/Misc/Base.lproj/**", |
||||
"route_guide/Misc/Images.xcassets/**", |
||||
"route_guide/route_guide_db.json", |
||||
]), |
||||
includes = ["route_guide/Misc"], |
||||
deps = [":RouteGuide"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
ios_application( |
||||
name = "RouteGuideClient", |
||||
bundle_id = "gRPC.RouteGuideClient", |
||||
families = [ |
||||
"iphone", |
||||
"ipad", |
||||
], |
||||
minimum_os_version = "8.0", |
||||
infoplists = ["route_guide/Misc/Info.plist"], |
||||
deps = [":RouteGuideClient-lib"], |
||||
tags = ["manual"], |
||||
) |
@ -0,0 +1,81 @@ |
||||
# gRPC Bazel BUILD file. |
||||
# |
||||
# Copyright 2019 The gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
load("@grpc_python_dependencies//:requirements.bzl", "requirement") |
||||
load("//bazel:python_rules.bzl", "py_proto_library", "py_grpc_library") |
||||
|
||||
package(default_testonly = 1) |
||||
|
||||
proto_library( |
||||
name = "hash_name_proto", |
||||
srcs = ["hash_name.proto"], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "hash_name_py_pb2", |
||||
srcs = [":hash_name_proto"], |
||||
) |
||||
|
||||
py_grpc_library( |
||||
name = "hash_name_py_pb2_grpc", |
||||
srcs = [":hash_name_proto"], |
||||
deps = [":hash_name_py_pb2"], |
||||
) |
||||
|
||||
py_binary( |
||||
name = "client", |
||||
srcs = ["client.py"], |
||||
deps = [ |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
":hash_name_py_pb2", |
||||
":hash_name_py_pb2_grpc", |
||||
"//external:six" |
||||
], |
||||
srcs_version = "PY2AND3", |
||||
) |
||||
|
||||
py_library( |
||||
name = "search", |
||||
srcs = ["search.py"], |
||||
srcs_version = "PY2AND3", |
||||
deps = [ |
||||
":hash_name_py_pb2", |
||||
], |
||||
) |
||||
|
||||
py_binary( |
||||
name = "server", |
||||
srcs = ["server.py"], |
||||
deps = [ |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
":hash_name_py_pb2", |
||||
":search", |
||||
] + select({ |
||||
"//conditions:default": ["@futures//:futures"], |
||||
"//:python3": [], |
||||
}), |
||||
srcs_version = "PY2AND3", |
||||
) |
||||
|
||||
py_test( |
||||
name = "test/_cancellation_example_test", |
||||
srcs = ["test/_cancellation_example_test.py"], |
||||
data = [ |
||||
":client", |
||||
":server" |
||||
], |
||||
size = "small", |
||||
) |
@ -0,0 +1,127 @@ |
||||
### Cancellation |
||||
|
||||
In the example, we implement a silly algorithm. We search for bytestrings whose |
||||
hashes are similar to a given search string. For example, say we're looking for |
||||
the string "doctor". Our algorithm may return `JrqhZVkTDoctYrUlXDbL6pfYQHU=` or |
||||
`RC9/7mlM3ldy4TdoctOc6WzYbO4=`. This is a brute force algorithm, so the server |
||||
performing the search must be conscious of the resources it allows to each client |
||||
and each client must be conscientious of the resources it demands of the server. |
||||
|
||||
In particular, we ensure that client processes cancel the stream explicitly |
||||
before terminating and we ensure that server processes cancel RPCs that have gone on longer |
||||
than a certain number of iterations. |
||||
|
||||
#### Cancellation on the Client Side |
||||
|
||||
A client may cancel an RPC for several reasons. Perhaps the data it requested |
||||
has been made irrelevant. Perhaps you, as the client, want to be a good citizen |
||||
of the server and are conserving compute resources. |
||||
|
||||
##### Cancelling a Server-Side Unary RPC from the Client |
||||
|
||||
The default RPC methods on a stub will simply return the result of an RPC. |
||||
|
||||
```python |
||||
>>> stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
>>> stub.Find(hash_name_pb2.HashNameRequest(desired_name=name)) |
||||
<hash_name_pb2.HashNameResponse object at 0x7fe2eb8ce2d0> |
||||
``` |
||||
|
||||
But you may use the `future()` method to receive an instance of `grpc.Future`. |
||||
This interface allows you to wait on a response with a timeout, add a callback |
||||
to be executed when the RPC completes, or to cancel the RPC before it has |
||||
completed. |
||||
|
||||
In the example, we use this interface to cancel our in-progress RPC when the |
||||
user interrupts the process with ctrl-c. |
||||
|
||||
```python |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
future = stub.Find.future(hash_name_pb2.HashNameRequest(desired_name=name)) |
||||
def cancel_request(unused_signum, unused_frame): |
||||
future.cancel() |
||||
sys.exit(0) |
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
|
||||
result = future.result() |
||||
print(result) |
||||
``` |
||||
|
||||
We also call `sys.exit(0)` to terminate the process. If we do not do this, then |
||||
`future.result()` with throw an `RpcError`. Alternatively, you may catch this |
||||
exception. |
||||
|
||||
|
||||
##### Cancelling a Server-Side Streaming RPC from the Client |
||||
|
||||
Cancelling a Server-side streaming RPC is even simpler from the perspective of |
||||
the gRPC API. The default stub method is already an instance of `grpc.Future`, |
||||
so the methods outlined above still apply. It is also a generator, so we may |
||||
iterate over it to yield the results of our RPC. |
||||
|
||||
```python |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
result_generator = stub.FindRange(hash_name_pb2.HashNameRequest(desired_name=name)) |
||||
def cancel_request(unused_signum, unused_frame): |
||||
result_generator.cancel() |
||||
sys.exit(0) |
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
for result in result_generator: |
||||
print(result) |
||||
``` |
||||
|
||||
We also call `sys.exit(0)` here to terminate the process. Alternatively, you may |
||||
catch the `RpcError` raised by the for loop upon cancellation. |
||||
|
||||
|
||||
#### Cancellation on the Server Side |
||||
|
||||
A server is reponsible for cancellation in two ways. It must respond in some way |
||||
when a client initiates a cancellation, otherwise long-running computations |
||||
could continue indefinitely. |
||||
|
||||
It may also decide to cancel the RPC for its own reasons. In our example, the |
||||
server can be configured to cancel an RPC after a certain number of hashes has |
||||
been computed in order to conserve compute resources. |
||||
|
||||
##### Responding to Cancellations from a Servicer Thread |
||||
|
||||
It's important to remember that a gRPC Python server is backed by a thread pool |
||||
with a fixed size. When an RPC is cancelled, the library does *not* terminate |
||||
your servicer thread. It is your responsibility as the application author to |
||||
ensure that your servicer thread terminates soon after the RPC has been |
||||
cancelled. |
||||
|
||||
In this example, we use the `ServicerContext.add_callback` method to set a |
||||
`threading.Event` object when the RPC is terminated. We pass this `Event` object |
||||
down through our hashing algorithm and ensure to check that the RPC is still |
||||
ongoing before each iteration. |
||||
|
||||
```python |
||||
stop_event = threading.Event() |
||||
def on_rpc_done(): |
||||
# Regain servicer thread. |
||||
stop_event.set() |
||||
context.add_callback(on_rpc_done) |
||||
secret = _find_secret(stop_event) |
||||
``` |
||||
|
||||
##### Initiating a Cancellation on the Server Side |
||||
|
||||
Initiating a cancellation from the server side is simpler. Just call |
||||
`ServicerContext.cancel()`. |
||||
|
||||
In our example, we ensure that no single client is monopolizing the server by |
||||
cancelling after a configurable number of hashes have been checked. |
||||
|
||||
```python |
||||
try: |
||||
for candidate in secret_generator: |
||||
yield candidate |
||||
except ResourceLimitExceededError: |
||||
print("Cancelling RPC due to exhausted resources.") |
||||
context.cancel() |
||||
``` |
||||
|
||||
In this type of situation, you may also consider returning a more specific error |
||||
using the [`grpcio-status`](https://pypi.org/project/grpcio-status/) package. |
@ -0,0 +1,104 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""An example of cancelling requests in gRPC.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import argparse |
||||
import logging |
||||
import signal |
||||
import sys |
||||
|
||||
import grpc |
||||
|
||||
from examples.python.cancellation import hash_name_pb2 |
||||
from examples.python.cancellation import hash_name_pb2_grpc |
||||
|
||||
_DESCRIPTION = "A client for finding hashes similar to names." |
||||
_LOGGER = logging.getLogger(__name__) |
||||
|
||||
|
||||
def run_unary_client(server_target, name, ideal_distance): |
||||
with grpc.insecure_channel(server_target) as channel: |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
future = stub.Find.future( |
||||
hash_name_pb2.HashNameRequest( |
||||
desired_name=name, ideal_hamming_distance=ideal_distance), |
||||
wait_for_ready=True) |
||||
|
||||
def cancel_request(unused_signum, unused_frame): |
||||
future.cancel() |
||||
sys.exit(0) |
||||
|
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
result = future.result() |
||||
print(result) |
||||
|
||||
|
||||
def run_streaming_client(server_target, name, ideal_distance, |
||||
interesting_distance): |
||||
with grpc.insecure_channel(server_target) as channel: |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
result_generator = stub.FindRange( |
||||
hash_name_pb2.HashNameRequest( |
||||
desired_name=name, |
||||
ideal_hamming_distance=ideal_distance, |
||||
interesting_hamming_distance=interesting_distance), |
||||
wait_for_ready=True) |
||||
|
||||
def cancel_request(unused_signum, unused_frame): |
||||
result_generator.cancel() |
||||
sys.exit(0) |
||||
|
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
for result in result_generator: |
||||
print(result) |
||||
|
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser(description=_DESCRIPTION) |
||||
parser.add_argument("name", type=str, help='The desired name.') |
||||
parser.add_argument( |
||||
"--ideal-distance", |
||||
default=0, |
||||
nargs='?', |
||||
type=int, |
||||
help="The desired Hamming distance.") |
||||
parser.add_argument( |
||||
'--server', |
||||
default='localhost:50051', |
||||
type=str, |
||||
nargs='?', |
||||
help='The host-port pair at which to reach the server.') |
||||
parser.add_argument( |
||||
'--show-inferior', |
||||
default=None, |
||||
type=int, |
||||
nargs='?', |
||||
help='Also show candidates with a Hamming distance less than this value.' |
||||
) |
||||
|
||||
args = parser.parse_args() |
||||
if args.show_inferior is not None: |
||||
run_streaming_client(args.server, args.name, args.ideal_distance, |
||||
args.show_inferior) |
||||
else: |
||||
run_unary_client(args.server, args.name, args.ideal_distance) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
logging.basicConfig() |
||||
main() |
@ -0,0 +1,56 @@ |
||||
// Copyright 2019 the gRPC authors. |
||||
// |
||||
// Licensed under the Apache License, Version 2.0 (the "License"); |
||||
// you may not use this file except in compliance with the License. |
||||
// You may obtain a copy of the License at |
||||
// |
||||
// http://www.apache.org/licenses/LICENSE-2.0 |
||||
// |
||||
// Unless required by applicable law or agreed to in writing, software |
||||
// distributed under the License is distributed on an "AS IS" BASIS, |
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
// See the License for the specific language governing permissions and |
||||
// limitations under the License. |
||||
|
||||
syntax = "proto3"; |
||||
|
||||
package hash_name; |
||||
|
||||
// A request for a single secret whose hash is similar to a desired name. |
||||
message HashNameRequest { |
||||
// The string that is desired in the secret's hash. |
||||
string desired_name = 1; |
||||
|
||||
// The ideal Hamming distance betwen desired_name and the secret that will |
||||
// be searched for. |
||||
int32 ideal_hamming_distance = 2; |
||||
|
||||
// A Hamming distance greater than the ideal Hamming distance. Search results |
||||
// with a Hamming distance less than this value but greater than the ideal |
||||
// distance will be returned back to the client but will not terminate the |
||||
// search. |
||||
int32 interesting_hamming_distance = 3; |
||||
} |
||||
|
||||
message HashNameResponse { |
||||
// The search result. |
||||
string secret = 1; |
||||
|
||||
// The hash of the search result. A substring of this is of |
||||
// ideal_hamming_distance Hamming distance or less from desired_name. |
||||
string hashed_name = 2; |
||||
|
||||
// The Hamming distance between hashed_name and desired_name. |
||||
int32 hamming_distance = 3; |
||||
} |
||||
|
||||
service HashFinder { |
||||
|
||||
// Search for a single string whose hash is similar to the specified |
||||
// desired_name. interesting_hamming_distance is ignored. |
||||
rpc Find (HashNameRequest) returns (HashNameResponse) {} |
||||
|
||||
// Search for a string whose hash is similar to the specified desired_name, |
||||
// but also stream back less-than-ideal candidates. |
||||
rpc FindRange (HashNameRequest) returns (stream HashNameResponse) {} |
||||
} |
@ -0,0 +1,148 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""A search algorithm over the space of all bytestrings.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import base64 |
||||
import hashlib |
||||
import itertools |
||||
import logging |
||||
import struct |
||||
|
||||
from examples.python.cancellation import hash_name_pb2 |
||||
|
||||
_LOGGER = logging.getLogger(__name__) |
||||
_BYTE_MAX = 255 |
||||
|
||||
|
||||
def _get_hamming_distance(a, b): |
||||
"""Calculates hamming distance between strings of equal length.""" |
||||
distance = 0 |
||||
for char_a, char_b in zip(a, b): |
||||
if char_a != char_b: |
||||
distance += 1 |
||||
return distance |
||||
|
||||
|
||||
def _get_substring_hamming_distance(candidate, target): |
||||
"""Calculates the minimum hamming distance between between the target |
||||
and any substring of the candidate. |
||||
|
||||
Args: |
||||
candidate: The string whose substrings will be tested. |
||||
target: The target string. |
||||
|
||||
Returns: |
||||
The minimum Hamming distance between candidate and target. |
||||
""" |
||||
min_distance = None |
||||
if len(target) > len(candidate): |
||||
raise ValueError("Candidate must be at least as long as target.") |
||||
for i in range(len(candidate) - len(target) + 1): |
||||
distance = _get_hamming_distance(candidate[i:i + len(target)].lower(), |
||||
target.lower()) |
||||
if min_distance is None or distance < min_distance: |
||||
min_distance = distance |
||||
return min_distance |
||||
|
||||
|
||||
def _get_hash(secret): |
||||
hasher = hashlib.sha1() |
||||
hasher.update(secret) |
||||
return base64.b64encode(hasher.digest()).decode('ascii') |
||||
|
||||
|
||||
class ResourceLimitExceededError(Exception): |
||||
"""Signifies the request has exceeded configured limits.""" |
||||
|
||||
|
||||
def _bytestrings_of_length(length): |
||||
"""Generates a stream containing all bytestrings of a given length. |
||||
|
||||
Args: |
||||
length: A positive integer length. |
||||
|
||||
Yields: |
||||
All bytestrings of length `length`. |
||||
""" |
||||
for digits in itertools.product(range(_BYTE_MAX), repeat=length): |
||||
yield b''.join(struct.pack('B', i) for i in digits) |
||||
|
||||
|
||||
def _all_bytestrings(): |
||||
"""Generates a stream containing all possible bytestrings. |
||||
|
||||
This generator does not terminate. |
||||
|
||||
Yields: |
||||
All bytestrings in ascending order of length. |
||||
""" |
||||
for bytestring in itertools.chain.from_iterable( |
||||
_bytestrings_of_length(length) for length in itertools.count()): |
||||
yield bytestring |
||||
|
||||
|
||||
def search(target, |
||||
ideal_distance, |
||||
stop_event, |
||||
maximum_hashes, |
||||
interesting_hamming_distance=None): |
||||
"""Find candidate strings. |
||||
|
||||
Search through the space of all bytestrings, in order of increasing length, |
||||
indefinitely, until a hash with a Hamming distance of `maximum_distance` or |
||||
less has been found. |
||||
|
||||
Args: |
||||
target: The search string. |
||||
ideal_distance: The desired Hamming distance. |
||||
stop_event: An event indicating whether the RPC should terminate. |
||||
maximum_hashes: The maximum number of hashes to check before stopping. |
||||
interesting_hamming_distance: If specified, strings with a Hamming |
||||
distance from the target below this value will be yielded. |
||||
|
||||
Yields: |
||||
Instances of HashNameResponse. The final entry in the stream will be of |
||||
`maximum_distance` Hamming distance or less from the target string, |
||||
while all others will be of less than `interesting_hamming_distance`. |
||||
|
||||
Raises: |
||||
ResourceLimitExceededError: If the computation exceeds `maximum_hashes` |
||||
iterations. |
||||
""" |
||||
hashes_computed = 0 |
||||
for secret in _all_bytestrings(): |
||||
if stop_event.is_set(): |
||||
raise StopIteration() # pylint: disable=stop-iteration-return |
||||
candidate_hash = _get_hash(secret) |
||||
distance = _get_substring_hamming_distance(candidate_hash, target) |
||||
if interesting_hamming_distance is not None and distance <= interesting_hamming_distance: |
||||
# Surface interesting candidates, but don't stop. |
||||
yield hash_name_pb2.HashNameResponse( |
||||
secret=base64.b64encode(secret), |
||||
hashed_name=candidate_hash, |
||||
hamming_distance=distance) |
||||
elif distance <= ideal_distance: |
||||
# Yield ideal candidate and end the stream. |
||||
yield hash_name_pb2.HashNameResponse( |
||||
secret=base64.b64encode(secret), |
||||
hashed_name=candidate_hash, |
||||
hamming_distance=distance) |
||||
raise StopIteration() # pylint: disable=stop-iteration-return |
||||
hashes_computed += 1 |
||||
if hashes_computed == maximum_hashes: |
||||
raise ResourceLimitExceededError() |
@ -0,0 +1,124 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""An example of cancelling requests in gRPC.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
from concurrent import futures |
||||
import argparse |
||||
import logging |
||||
import threading |
||||
|
||||
import grpc |
||||
import search |
||||
|
||||
from examples.python.cancellation import hash_name_pb2 |
||||
from examples.python.cancellation import hash_name_pb2_grpc |
||||
|
||||
_LOGGER = logging.getLogger(__name__) |
||||
_SERVER_HOST = 'localhost' |
||||
|
||||
_DESCRIPTION = "A server for finding hashes similar to names." |
||||
|
||||
|
||||
class HashFinder(hash_name_pb2_grpc.HashFinderServicer): |
||||
|
||||
def __init__(self, maximum_hashes): |
||||
super(HashFinder, self).__init__() |
||||
self._maximum_hashes = maximum_hashes |
||||
|
||||
def Find(self, request, context): |
||||
stop_event = threading.Event() |
||||
|
||||
def on_rpc_done(): |
||||
_LOGGER.debug("Attempting to regain servicer thread.") |
||||
stop_event.set() |
||||
|
||||
context.add_callback(on_rpc_done) |
||||
candidates = [] |
||||
try: |
||||
candidates = list( |
||||
search.search(request.desired_name, |
||||
request.ideal_hamming_distance, stop_event, |
||||
self._maximum_hashes)) |
||||
except search.ResourceLimitExceededError: |
||||
_LOGGER.info("Cancelling RPC due to exhausted resources.") |
||||
context.cancel() |
||||
_LOGGER.debug("Servicer thread returning.") |
||||
if not candidates: |
||||
return hash_name_pb2.HashNameResponse() |
||||
return candidates[-1] |
||||
|
||||
def FindRange(self, request, context): |
||||
stop_event = threading.Event() |
||||
|
||||
def on_rpc_done(): |
||||
_LOGGER.debug("Attempting to regain servicer thread.") |
||||
stop_event.set() |
||||
|
||||
context.add_callback(on_rpc_done) |
||||
secret_generator = search.search( |
||||
request.desired_name, |
||||
request.ideal_hamming_distance, |
||||
stop_event, |
||||
self._maximum_hashes, |
||||
interesting_hamming_distance=request.interesting_hamming_distance) |
||||
try: |
||||
for candidate in secret_generator: |
||||
yield candidate |
||||
except search.ResourceLimitExceededError: |
||||
_LOGGER.info("Cancelling RPC due to exhausted resources.") |
||||
context.cancel() |
||||
_LOGGER.debug("Regained servicer thread.") |
||||
|
||||
|
||||
def _running_server(port, maximum_hashes): |
||||
# We use only a single servicer thread here to demonstrate that, if managed |
||||
# carefully, cancelled RPCs can need not continue occupying servicers |
||||
# threads. |
||||
server = grpc.server( |
||||
futures.ThreadPoolExecutor(max_workers=1), maximum_concurrent_rpcs=1) |
||||
hash_name_pb2_grpc.add_HashFinderServicer_to_server( |
||||
HashFinder(maximum_hashes), server) |
||||
address = '{}:{}'.format(_SERVER_HOST, port) |
||||
actual_port = server.add_insecure_port(address) |
||||
server.start() |
||||
print("Server listening at '{}'".format(address)) |
||||
return server |
||||
|
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser(description=_DESCRIPTION) |
||||
parser.add_argument( |
||||
'--port', |
||||
type=int, |
||||
default=50051, |
||||
nargs='?', |
||||
help='The port on which the server will listen.') |
||||
parser.add_argument( |
||||
'--maximum-hashes', |
||||
type=int, |
||||
default=1000000, |
||||
nargs='?', |
||||
help='The maximum number of hashes to search before cancelling.') |
||||
args = parser.parse_args() |
||||
server = _running_server(args.port, args.maximum_hashes) |
||||
server.wait_for_termination() |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
logging.basicConfig() |
||||
main() |
@ -0,0 +1,87 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""Test for cancellation example.""" |
||||
|
||||
import contextlib |
||||
import os |
||||
import signal |
||||
import socket |
||||
import subprocess |
||||
import unittest |
||||
|
||||
_BINARY_DIR = os.path.realpath( |
||||
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) |
||||
_SERVER_PATH = os.path.join(_BINARY_DIR, 'server') |
||||
_CLIENT_PATH = os.path.join(_BINARY_DIR, 'client') |
||||
|
||||
|
||||
@contextlib.contextmanager |
||||
def _get_port(): |
||||
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) |
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) |
||||
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0: |
||||
raise RuntimeError("Failed to set SO_REUSEPORT.") |
||||
sock.bind(('', 0)) |
||||
try: |
||||
yield sock.getsockname()[1] |
||||
finally: |
||||
sock.close() |
||||
|
||||
|
||||
def _start_client(server_port, |
||||
desired_string, |
||||
ideal_distance, |
||||
interesting_distance=None): |
||||
interesting_distance_args = () if interesting_distance is None else ( |
||||
'--show-inferior', interesting_distance) |
||||
return subprocess.Popen((_CLIENT_PATH, desired_string, '--server', |
||||
'localhost:{}'.format(server_port), |
||||
'--ideal-distance', |
||||
str(ideal_distance)) + interesting_distance_args) |
||||
|
||||
|
||||
class CancellationExampleTest(unittest.TestCase): |
||||
|
||||
def test_successful_run(self): |
||||
with _get_port() as test_port: |
||||
server_process = subprocess.Popen((_SERVER_PATH, '--port', |
||||
str(test_port))) |
||||
try: |
||||
client_process = _start_client(test_port, 'aa', 0) |
||||
client_return_code = client_process.wait() |
||||
self.assertEqual(0, client_return_code) |
||||
self.assertIsNone(server_process.poll()) |
||||
finally: |
||||
server_process.kill() |
||||
server_process.wait() |
||||
|
||||
def test_graceful_sigint(self): |
||||
with _get_port() as test_port: |
||||
server_process = subprocess.Popen((_SERVER_PATH, '--port', |
||||
str(test_port))) |
||||
try: |
||||
client_process1 = _start_client(test_port, 'aaaaaaaaaa', 0) |
||||
client_process1.send_signal(signal.SIGINT) |
||||
client_process1.wait() |
||||
client_process2 = _start_client(test_port, 'aa', 0) |
||||
client_return_code = client_process2.wait() |
||||
self.assertEqual(0, client_return_code) |
||||
self.assertIsNone(server_process.poll()) |
||||
finally: |
||||
server_process.kill() |
||||
server_process.wait() |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
unittest.main(verbosity=2) |
@ -0,0 +1,87 @@ |
||||
/*
|
||||
* |
||||
* Copyright 2019 gRPC authors. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
* you may not use this file except in compliance with the License. |
||||
* You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
* |
||||
*/ |
||||
|
||||
#ifndef GRPCPP_IMPL_CODEGEN_DELEGATING_CHANNEL_H |
||||
#define GRPCPP_IMPL_CODEGEN_DELEGATING_CHANNEL_H |
||||
|
||||
namespace grpc { |
||||
namespace experimental { |
||||
|
||||
class DelegatingChannel : public ::grpc::ChannelInterface { |
||||
public: |
||||
virtual ~DelegatingChannel() {} |
||||
|
||||
DelegatingChannel(std::shared_ptr<::grpc::ChannelInterface> delegate_channel) |
||||
: delegate_channel_(delegate_channel) {} |
||||
|
||||
grpc_connectivity_state GetState(bool try_to_connect) override { |
||||
return delegate_channel()->GetState(try_to_connect); |
||||
} |
||||
|
||||
std::shared_ptr<::grpc::ChannelInterface> delegate_channel() { |
||||
return delegate_channel_; |
||||
} |
||||
|
||||
private: |
||||
internal::Call CreateCall(const internal::RpcMethod& method, |
||||
ClientContext* context, |
||||
::grpc_impl::CompletionQueue* cq) final { |
||||
return delegate_channel()->CreateCall(method, context, cq); |
||||
} |
||||
|
||||
void PerformOpsOnCall(internal::CallOpSetInterface* ops, |
||||
internal::Call* call) final { |
||||
delegate_channel()->PerformOpsOnCall(ops, call); |
||||
} |
||||
|
||||
void* RegisterMethod(const char* method) final { |
||||
return delegate_channel()->RegisterMethod(method); |
||||
} |
||||
|
||||
void NotifyOnStateChangeImpl(grpc_connectivity_state last_observed, |
||||
gpr_timespec deadline, |
||||
::grpc_impl::CompletionQueue* cq, |
||||
void* tag) override { |
||||
delegate_channel()->NotifyOnStateChangeImpl(last_observed, deadline, cq, |
||||
tag); |
||||
} |
||||
|
||||
bool WaitForStateChangeImpl(grpc_connectivity_state last_observed, |
||||
gpr_timespec deadline) override { |
||||
return delegate_channel()->WaitForStateChangeImpl(last_observed, deadline); |
||||
} |
||||
|
||||
internal::Call CreateCallInternal(const internal::RpcMethod& method, |
||||
ClientContext* context, |
||||
::grpc_impl::CompletionQueue* cq, |
||||
size_t interceptor_pos) final { |
||||
return delegate_channel()->CreateCallInternal(method, context, cq, |
||||
interceptor_pos); |
||||
} |
||||
|
||||
::grpc_impl::CompletionQueue* CallbackCQ() final { |
||||
return delegate_channel()->CallbackCQ(); |
||||
} |
||||
|
||||
std::shared_ptr<::grpc::ChannelInterface> delegate_channel_; |
||||
}; |
||||
|
||||
} // namespace experimental
|
||||
} // namespace grpc
|
||||
|
||||
#endif // GRPCPP_IMPL_CODEGEN_DELEGATING_CHANNEL_H
|
@ -1,23 +0,0 @@ |
||||
/* Automatically generated nanopb constant definitions */ |
||||
/* Generated by nanopb-0.3.7-dev */ |
||||
|
||||
#include "src/core/ext/filters/client_channel/health/health.pb.h" |
||||
/* @@protoc_insertion_point(includes) */ |
||||
#if PB_PROTO_HEADER_VERSION != 30 |
||||
#error Regenerate this file with the current version of nanopb generator. |
||||
#endif |
||||
|
||||
|
||||
|
||||
const pb_field_t grpc_health_v1_HealthCheckRequest_fields[2] = { |
||||
PB_FIELD( 1, STRING , OPTIONAL, STATIC , FIRST, grpc_health_v1_HealthCheckRequest, service, service, 0), |
||||
PB_LAST_FIELD |
||||
}; |
||||
|
||||
const pb_field_t grpc_health_v1_HealthCheckResponse_fields[2] = { |
||||
PB_FIELD( 1, UENUM , OPTIONAL, STATIC , FIRST, grpc_health_v1_HealthCheckResponse, status, status, 0), |
||||
PB_LAST_FIELD |
||||
}; |
||||
|
||||
|
||||
/* @@protoc_insertion_point(eof) */ |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue