mirror of https://github.com/grpc/grpc.git
commit
0a0ca24a84
1251 changed files with 42470 additions and 54799 deletions
@ -1,18 +1,18 @@ |
||||
-Wall |
||||
-Wc++-compat |
||||
-Ithird_party/googletest/include |
||||
-Ithird_party/googletest |
||||
-Iinclude |
||||
-Igens |
||||
-I. |
||||
-Ithird_party/boringssl/include |
||||
-Ithird_party/benchmark/include |
||||
-Ithird_party/zlib |
||||
-Ithird_party/protobuf/src |
||||
-Igens |
||||
-Iinclude |
||||
-Isrc/core/ext/upb-generated |
||||
-Ithird_party/abseil-cpp |
||||
-Ithird_party/cares/cares |
||||
-Ithird_party/benchmark/include |
||||
-Ithird_party/boringssl/include |
||||
-Ithird_party/cares |
||||
-Ithird_party/googletest/googletest/include |
||||
-Ithird_party/cares/cares |
||||
-Ithird_party/googletest |
||||
-Ithird_party/googletest/googlemock/include |
||||
-Ithird_party/nanopb |
||||
|
||||
-Ithird_party/googletest/googletest/include |
||||
-Ithird_party/googletest/include |
||||
-Ithird_party/protobuf/src |
||||
-Ithird_party/upb |
||||
-Ithird_party/zlib |
||||
|
@ -0,0 +1,18 @@ |
||||
--- |
||||
name: Request a cleanup |
||||
about: Suggest a cleanup in our repository |
||||
labels: kind/internal cleanup |
||||
assignees: veblush |
||||
|
||||
--- |
||||
|
||||
<!-- |
||||
|
||||
This form is for bug reports and feature requests ONLY! |
||||
For general questions and troubleshooting, please ask/look for answers here: |
||||
- grpc.io mailing list: https://groups.google.com/forum/#!forum/grpc-io |
||||
- StackOverflow, with "grpc" tag: https://stackoverflow.com/questions/tagged/grpc |
||||
|
||||
Issues specific to *grpc-java*, *grpc-go*, *grpc-node*, *grpc-dart*, *grpc-web* should be created in the repository they belong to (e.g. https://github.com/grpc/grpc-LANGUAGE/issues/new) |
||||
--> |
||||
|
@ -0,0 +1,29 @@ |
||||
--- |
||||
name: Request a feature |
||||
about: Suggest an idea for this project |
||||
labels: kind/enhancement |
||||
assignees: veblush |
||||
|
||||
--- |
||||
|
||||
<!-- |
||||
|
||||
This form is for bug reports and feature requests ONLY! |
||||
For general questions and troubleshooting, please ask/look for answers here: |
||||
- grpc.io mailing list: https://groups.google.com/forum/#!forum/grpc-io |
||||
- StackOverflow, with "grpc" tag: https://stackoverflow.com/questions/tagged/grpc |
||||
|
||||
Issues specific to *grpc-java*, *grpc-go*, *grpc-node*, *grpc-dart*, *grpc-web* should be created in the repository they belong to (e.g. https://github.com/grpc/grpc-LANGUAGE/issues/new) |
||||
--> |
||||
|
||||
### Is your feature request related to a problem? Please describe. |
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] |
||||
|
||||
### Describe the solution you'd like |
||||
A clear and concise description of what you want to happen. |
||||
|
||||
### Describe alternatives you've considered |
||||
A clear and concise description of any alternative solutions or features you've considered. |
||||
|
||||
### Additional context |
||||
Add any other context about the feature request here. |
@ -0,0 +1,11 @@ |
||||
|
||||
|
||||
|
||||
<!-- |
||||
|
||||
Your pull request will be routed to the following person by default for triaging. |
||||
If you know who should review your pull request, please remove the mentioning below. |
||||
|
||||
--> |
||||
|
||||
@veblush |
@ -0,0 +1,59 @@ |
||||
# Configuration for probot-stale - https://github.com/probot/stale |
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale |
||||
daysUntilStale: 180 |
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed. |
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale. |
||||
daysUntilClose: 1 |
||||
|
||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled) |
||||
onlyLabels: [] |
||||
|
||||
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable |
||||
exemptLabels: |
||||
- "disposition/never stale" |
||||
|
||||
# Set to true to ignore issues in a project (defaults to false) |
||||
exemptProjects: false |
||||
|
||||
# Set to true to ignore issues in a milestone (defaults to false) |
||||
exemptMilestones: false |
||||
|
||||
# Set to true to ignore issues with an assignee (defaults to false) |
||||
exemptAssignees: false |
||||
|
||||
# Label to use when marking as stale |
||||
staleLabel: "disposition/stale" |
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable |
||||
markComment: > |
||||
This issue/PR has been automatically marked as stale because it has not had any update (including |
||||
commits, comments, labels, milestones, etc) for 180 days. It will be closed automatically if no |
||||
further update occurs in 1 day. Thank you for your contributions! |
||||
|
||||
# Comment to post when removing the stale label. |
||||
# unmarkComment: > |
||||
# Your comment here. |
||||
|
||||
# Comment to post when closing a stale Issue or Pull Request. |
||||
# closeComment: > |
||||
# Your comment here. |
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30 |
||||
limitPerRun: 30 |
||||
|
||||
# Limit to only `issues` or `pulls` |
||||
# only: issues |
||||
|
||||
# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls': |
||||
# pulls: |
||||
# daysUntilStale: 30 |
||||
# markComment: > |
||||
# This pull request has been automatically marked as stale because it has not had |
||||
# recent activity. It will be closed if no further activity occurs. Thank you |
||||
# for your contributions. |
||||
|
||||
# issues: |
||||
# exemptLabels: |
||||
# - confirmed |
@ -1,3 +1,4 @@ |
||||
Dropbox, Inc. |
||||
Google Inc. |
||||
Skyscanner Ltd. |
||||
WeWork Companies Inc. |
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@ |
||||
set noparent |
||||
@nicolasnoble |
||||
@jtattermusch |
||||
@a11r |
||||
@vjpai |
||||
@veblush |
||||
@gnossen |
||||
|
||||
|
@ -0,0 +1,215 @@ |
||||
load( |
||||
"//bazel:protobuf.bzl", |
||||
"get_include_protoc_args", |
||||
"get_plugin_args", |
||||
"proto_path_to_generated_filename", |
||||
) |
||||
load(":grpc_util.bzl", "to_upper_camel_with_extension",) |
||||
|
||||
_GRPC_PROTO_HEADER_FMT = "{}.pbrpc.h" |
||||
_GRPC_PROTO_SRC_FMT = "{}.pbrpc.m" |
||||
_PROTO_HEADER_FMT = "{}.pbobjc.h" |
||||
_PROTO_SRC_FMT = "{}.pbobjc.m" |
||||
_GENERATED_PROTOS_DIR = "_generated_protos" |
||||
|
||||
_GENERATE_HDRS = 1 |
||||
_GENERATE_SRCS = 2 |
||||
_GENERATE_NON_ARC_SRCS = 3 |
||||
|
||||
def _generate_objc_impl(ctx): |
||||
"""Implementation of the generate_objc rule.""" |
||||
protos = [ |
||||
f |
||||
for src in ctx.attr.deps |
||||
for f in src[ProtoInfo].transitive_imports.to_list() |
||||
] |
||||
|
||||
target_package = _join_directories([ctx.label.workspace_root, ctx.label.package]) |
||||
|
||||
files_with_rpc = [_label_to_full_file_path(f, target_package) for f in ctx.attr.srcs] |
||||
|
||||
outs = [] |
||||
for proto in protos: |
||||
outs += [_get_output_file_name_from_proto(proto, _PROTO_HEADER_FMT)] |
||||
outs += [_get_output_file_name_from_proto(proto, _PROTO_SRC_FMT)] |
||||
|
||||
file_path = _get_full_path_from_file(proto) |
||||
if file_path in files_with_rpc: |
||||
outs += [_get_output_file_name_from_proto(proto, _GRPC_PROTO_HEADER_FMT)] |
||||
outs += [_get_output_file_name_from_proto(proto, _GRPC_PROTO_SRC_FMT)] |
||||
|
||||
out_files = [ctx.actions.declare_file(out) for out in outs] |
||||
dir_out = _join_directories([ |
||||
str(ctx.genfiles_dir.path), target_package, _GENERATED_PROTOS_DIR |
||||
]) |
||||
|
||||
arguments = [] |
||||
if ctx.executable.plugin: |
||||
arguments += get_plugin_args( |
||||
ctx.executable.plugin, |
||||
[], |
||||
dir_out, |
||||
False, |
||||
) |
||||
tools = [ctx.executable.plugin] |
||||
arguments += ["--objc_out=" + dir_out] |
||||
|
||||
arguments += ["--proto_path=."] |
||||
arguments += get_include_protoc_args(protos) |
||||
# Include the output directory so that protoc puts the generated code in the |
||||
# right directory. |
||||
arguments += ["--proto_path={}".format(dir_out)] |
||||
arguments += ["--proto_path={}".format(_get_directory_from_proto(proto)) for proto in protos] |
||||
arguments += [_get_full_path_from_file(proto) for proto in protos] |
||||
|
||||
# create a list of well known proto files if the argument is non-None |
||||
well_known_proto_files = [] |
||||
if ctx.attr.use_well_known_protos: |
||||
f = ctx.attr.well_known_protos.files.to_list()[0].dirname |
||||
# go two levels up so that #import "google/protobuf/..." is correct |
||||
arguments += ["-I{0}".format(f + "/../..")] |
||||
well_known_proto_files = ctx.attr.well_known_protos.files.to_list() |
||||
ctx.actions.run( |
||||
inputs = protos + well_known_proto_files, |
||||
tools = tools, |
||||
outputs = out_files, |
||||
executable = ctx.executable._protoc, |
||||
arguments = arguments, |
||||
) |
||||
|
||||
return struct(files = depset(out_files)) |
||||
|
||||
def _label_to_full_file_path(src, package): |
||||
if not src.startswith("//"): |
||||
# Relative from current package |
||||
if not src.startswith(":"): |
||||
# "a.proto" -> ":a.proto" |
||||
src = ":" + src |
||||
src = "//" + package + src |
||||
# Converts //path/to/package:File.ext to path/to/package/File.ext. |
||||
src = src.replace("//", "") |
||||
src = src.replace(":", "/") |
||||
if src.startswith("/"): |
||||
# "//:a.proto" -> "/a.proto" so remove the initial slash |
||||
return src[1:] |
||||
else: |
||||
return src |
||||
|
||||
def _get_output_file_name_from_proto(proto, fmt): |
||||
return proto_path_to_generated_filename( |
||||
_GENERATED_PROTOS_DIR + "/" + |
||||
_get_directory_from_proto(proto) + _get_slash_or_null_from_proto(proto) + |
||||
to_upper_camel_with_extension(_get_file_name_from_proto(proto), "proto"), |
||||
fmt, |
||||
) |
||||
|
||||
def _get_file_name_from_proto(proto): |
||||
return proto.path.rpartition("/")[2] |
||||
|
||||
def _get_slash_or_null_from_proto(proto): |
||||
"""Potentially returns empty (if the file is in the root directory)""" |
||||
return proto.path.rpartition("/")[1] |
||||
|
||||
def _get_directory_from_proto(proto): |
||||
return proto.path.rpartition("/")[0] |
||||
|
||||
def _get_full_path_from_file(file): |
||||
gen_dir_length = 0 |
||||
# if file is generated, then prepare to remote its root |
||||
# (including CPU architecture...) |
||||
if not file.is_source: |
||||
gen_dir_length = len(file.root.path) + 1 |
||||
|
||||
return file.path[gen_dir_length:] |
||||
|
||||
def _join_directories(directories): |
||||
massaged_directories = [directory for directory in directories if len(directory) != 0] |
||||
return "/".join(massaged_directories) |
||||
|
||||
|
||||
generate_objc = rule( |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
mandatory = True, |
||||
allow_empty = False, |
||||
providers = [ProtoInfo], |
||||
), |
||||
"plugin": attr.label( |
||||
default = "@com_github_grpc_grpc//src/compiler:grpc_objective_c_plugin", |
||||
executable = True, |
||||
providers = ["files_to_run"], |
||||
cfg = "host", |
||||
), |
||||
"srcs": attr.string_list( |
||||
mandatory = False, |
||||
allow_empty = True |
||||
), |
||||
"use_well_known_protos": attr.bool( |
||||
mandatory = False, |
||||
default = False |
||||
), |
||||
"well_known_protos": attr.label( |
||||
default = "@com_google_protobuf//:well_known_protos" |
||||
), |
||||
"_protoc": attr.label( |
||||
default = Label("//external:protocol_compiler"), |
||||
executable = True, |
||||
cfg = "host", |
||||
), |
||||
}, |
||||
output_to_genfiles = True, |
||||
implementation = _generate_objc_impl |
||||
) |
||||
|
||||
def _group_objc_files_impl(ctx): |
||||
suffix = "" |
||||
if ctx.attr.gen_mode == _GENERATE_HDRS: |
||||
suffix = "h" |
||||
elif ctx.attr.gen_mode == _GENERATE_SRCS: |
||||
suffix = "pbrpc.m" |
||||
elif ctx.attr.gen_mode == _GENERATE_NON_ARC_SRCS: |
||||
suffix = "pbobjc.m" |
||||
else: |
||||
fail("Undefined gen_mode") |
||||
out_files = [ |
||||
file |
||||
for file in ctx.attr.src.files.to_list() |
||||
if file.basename.endswith(suffix) |
||||
] |
||||
return struct(files = depset(out_files)) |
||||
|
||||
generate_objc_hdrs = rule( |
||||
attrs = { |
||||
"src": attr.label( |
||||
mandatory = True, |
||||
), |
||||
"gen_mode": attr.int( |
||||
default = _GENERATE_HDRS, |
||||
) |
||||
}, |
||||
implementation = _group_objc_files_impl |
||||
) |
||||
|
||||
generate_objc_srcs = rule( |
||||
attrs = { |
||||
"src": attr.label( |
||||
mandatory = True, |
||||
), |
||||
"gen_mode": attr.int( |
||||
default = _GENERATE_SRCS, |
||||
) |
||||
}, |
||||
implementation = _group_objc_files_impl |
||||
) |
||||
|
||||
generate_objc_non_arc_srcs = rule( |
||||
attrs = { |
||||
"src": attr.label( |
||||
mandatory = True, |
||||
), |
||||
"gen_mode": attr.int( |
||||
default = _GENERATE_NON_ARC_SRCS, |
||||
) |
||||
}, |
||||
implementation = _group_objc_files_impl |
||||
) |
@ -1,8 +1,76 @@ |
||||
load("//third_party/py:python_configure.bzl", "python_configure") |
||||
load("@io_bazel_rules_python//python:pip.bzl", "pip_repositories") |
||||
load("@grpc_python_dependencies//:requirements.bzl", "pip_install") |
||||
"""Load dependencies needed to compile and test the grpc python library as a 3rd-party consumer.""" |
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") |
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") |
||||
load("@com_github_grpc_grpc//third_party/py:python_configure.bzl", "python_configure") |
||||
|
||||
def grpc_python_deps(): |
||||
native.bind( |
||||
name = "six", |
||||
actual = "@six_archive//:six", |
||||
) |
||||
|
||||
# protobuf binds to the name "six", so we can't use it here. |
||||
# See https://github.com/bazelbuild/bazel/issues/1952 for why bind is |
||||
# horrible. |
||||
if "six_archive" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "six_archive", |
||||
strip_prefix = "six-1.12.0", |
||||
build_file = "@com_github_grpc_grpc//third_party:six.BUILD", |
||||
sha256 = "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73", |
||||
urls = ["https://files.pythonhosted.org/packages/dd/bf/4138e7bfb757de47d1f4b6994648ec67a51efe58fa907c1e11e350cddfca/six-1.12.0.tar.gz"], |
||||
) |
||||
|
||||
if "enum34" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "enum34", |
||||
build_file = "@com_github_grpc_grpc//third_party:enum34.BUILD", |
||||
strip_prefix = "enum34-1.1.6", |
||||
sha256 = "8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1", |
||||
urls = ["https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz"], |
||||
) |
||||
|
||||
if "futures" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "futures", |
||||
build_file = "@com_github_grpc_grpc//third_party:futures.BUILD", |
||||
strip_prefix = "futures-3.3.0", |
||||
sha256 = "7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794", |
||||
urls = ["https://files.pythonhosted.org/packages/47/04/5fc6c74ad114032cd2c544c575bffc17582295e9cd6a851d6026ab4b2c00/futures-3.3.0.tar.gz"], |
||||
) |
||||
|
||||
if "io_bazel_rules_python" not in native.existing_rules(): |
||||
git_repository( |
||||
name = "io_bazel_rules_python", |
||||
commit = "fdbb17a4118a1728d19e638a5291b4c4266ea5b8", |
||||
remote = "https://github.com/bazelbuild/rules_python.git", |
||||
) |
||||
|
||||
|
||||
if "rules_python" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "rules_python", |
||||
url = "https://github.com/bazelbuild/rules_python/archive/9d68f24659e8ce8b736590ba1e4418af06ec2552.zip", |
||||
sha256 = "f7402f11691d657161f871e11968a984e5b48b023321935f5a55d7e56cf4758a", |
||||
strip_prefix = "rules_python-9d68f24659e8ce8b736590ba1e4418af06ec2552", |
||||
) |
||||
|
||||
python_configure(name = "local_config_python") |
||||
pip_repositories() |
||||
pip_install() |
||||
|
||||
native.bind( |
||||
name = "python_headers", |
||||
actual = "@local_config_python//:python_headers", |
||||
) |
||||
|
||||
if "cython" not in native.existing_rules(): |
||||
http_archive( |
||||
name = "cython", |
||||
build_file = "@com_github_grpc_grpc//third_party:cython.BUILD", |
||||
sha256 = "d68138a2381afbdd0876c3cb2a22389043fa01c4badede1228ee073032b07a27", |
||||
strip_prefix = "cython-c2b80d87658a8525ce091cbe146cb7eaa29fed5c", |
||||
urls = [ |
||||
"https://github.com/cython/cython/archive/c2b80d87658a8525ce091cbe146cb7eaa29fed5c.tar.gz", |
||||
], |
||||
) |
||||
|
||||
|
@ -0,0 +1,46 @@ |
||||
# Follows convention set in objectivec_helpers.cc in the protobuf ObjC compiler. |
||||
_upper_segments_list = ["url", "http", "https"] |
||||
|
||||
def strip_extension(str): |
||||
return str.rpartition(".")[0] |
||||
|
||||
def capitalize(word): |
||||
if word in _upper_segments_list: |
||||
return word.upper() |
||||
else: |
||||
return word.capitalize() |
||||
|
||||
def lower_underscore_to_upper_camel(str): |
||||
str = strip_extension(str) |
||||
camel_case_str = "" |
||||
word = "" |
||||
for c in str.elems(): # NB: assumes ASCII! |
||||
if c.isalpha(): |
||||
word += c.lower() |
||||
else: |
||||
# Last word is finished. |
||||
if len(word): |
||||
camel_case_str += capitalize(word) |
||||
word = "" |
||||
if c.isdigit(): |
||||
camel_case_str += c |
||||
|
||||
# Otherwise, drop the character. See UnderscoresToCamelCase in: |
||||
# third_party/protobuf/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc |
||||
|
||||
if len(word): |
||||
camel_case_str += capitalize(word) |
||||
return camel_case_str |
||||
|
||||
def file_to_upper_camel(src): |
||||
elements = src.rpartition("/") |
||||
upper_camel = lower_underscore_to_upper_camel(elements[-1]) |
||||
return "".join(list(elements[:-1]) + [upper_camel]) |
||||
|
||||
def file_with_extension(src, ext): |
||||
elements = src.rpartition("/") |
||||
return "".join(list(elements[:-1]) + [elements[-1], "." + ext]) |
||||
|
||||
def to_upper_camel_with_extension(src, ext): |
||||
src = file_to_upper_camel(src) |
||||
return file_with_extension(src, ext) |
@ -0,0 +1,69 @@ |
||||
load( |
||||
"//bazel:generate_objc.bzl", |
||||
"generate_objc", |
||||
"generate_objc_hdrs", |
||||
"generate_objc_srcs", |
||||
"generate_objc_non_arc_srcs" |
||||
) |
||||
load("//bazel:protobuf.bzl", "well_known_proto_libs") |
||||
|
||||
def objc_grpc_library(name, deps, srcs = [], use_well_known_protos = False, **kwargs): |
||||
"""Generates messages and/or service stubs for given proto_library and all transitively dependent proto files |
||||
|
||||
Args: |
||||
name: name of target |
||||
deps: a list of proto_library targets that needs to be compiled |
||||
srcs: a list of labels to proto files with service stubs to be generated, |
||||
labels specified must include service stubs; otherwise Bazel will complain about srcs being empty |
||||
use_well_known_protos: whether to use the well known protos defined in |
||||
@com_google_protobuf//src/google/protobuf, default to false |
||||
**kwargs: other arguments |
||||
""" |
||||
objc_grpc_library_name = "_" + name + "_objc_grpc_library" |
||||
|
||||
generate_objc( |
||||
name = objc_grpc_library_name, |
||||
srcs = srcs, |
||||
deps = deps, |
||||
use_well_known_protos = use_well_known_protos, |
||||
**kwargs |
||||
) |
||||
|
||||
generate_objc_hdrs( |
||||
name = objc_grpc_library_name + "_hdrs", |
||||
src = ":" + objc_grpc_library_name, |
||||
) |
||||
|
||||
generate_objc_non_arc_srcs( |
||||
name = objc_grpc_library_name + "_non_arc_srcs", |
||||
src = ":" + objc_grpc_library_name, |
||||
) |
||||
|
||||
arc_srcs = None |
||||
if len(srcs) > 0: |
||||
generate_objc_srcs( |
||||
name = objc_grpc_library_name + "_srcs", |
||||
src = ":" + objc_grpc_library_name, |
||||
) |
||||
arc_srcs = [":" + objc_grpc_library_name + "_srcs"] |
||||
|
||||
native.objc_library( |
||||
name = name, |
||||
hdrs = [":" + objc_grpc_library_name + "_hdrs"], |
||||
non_arc_srcs = [":" + objc_grpc_library_name + "_non_arc_srcs"], |
||||
srcs = arc_srcs, |
||||
defines = [ |
||||
"GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS=0", |
||||
"GPB_GRPC_FORWARD_DECLARE_MESSAGE_PROTO=0", |
||||
], |
||||
includes = [ |
||||
"_generated_protos", |
||||
"src/objective-c", |
||||
], |
||||
deps = [ |
||||
"@com_github_grpc_grpc//src/objective-c:proto_objc_rpc", |
||||
"@com_google_protobuf//:protobuf_objc", |
||||
], |
||||
**kwargs |
||||
) |
||||
|
@ -0,0 +1,2 @@ |
||||
bazel-* |
||||
tools/bazel-* |
@ -0,0 +1,62 @@ |
||||
# gRPC Bazel BUILD file. |
||||
# |
||||
# Copyright 2019 The gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_proto_library", "py_grpc_library") |
||||
|
||||
package(default_testonly = 1) |
||||
|
||||
proto_library( |
||||
name = "helloworld_proto", |
||||
srcs = ["helloworld.proto"], |
||||
deps = [ |
||||
"@com_google_protobuf//:duration_proto", |
||||
"@com_google_protobuf//:timestamp_proto", |
||||
], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "helloworld_py_pb2", |
||||
deps = [":helloworld_proto"], |
||||
) |
||||
|
||||
py_grpc_library( |
||||
name = "helloworld_py_pb2_grpc", |
||||
srcs = [":helloworld_proto"], |
||||
deps = [":helloworld_py_pb2"], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "duration_py_pb2", |
||||
deps = ["@com_google_protobuf//:duration_proto"], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "timestamp_py_pb2", |
||||
deps = ["@com_google_protobuf//:timestamp_proto"], |
||||
) |
||||
|
||||
py_test( |
||||
name = "import_test", |
||||
main = "helloworld.py", |
||||
srcs = ["helloworld.py"], |
||||
deps = [ |
||||
":helloworld_py_pb2", |
||||
":helloworld_py_pb2_grpc", |
||||
":duration_py_pb2", |
||||
":timestamp_py_pb2", |
||||
], |
||||
python_version = "PY3", |
||||
) |
@ -0,0 +1,5 @@ |
||||
## Bazel Workspace Test |
||||
|
||||
This directory houses a test ensuring that downstream projects can use |
||||
`@com_github_grpc_grpc//src/python/grpcio:grpcio`, `py_proto_library`, and |
||||
`py_grpc_library`. |
@ -0,0 +1,20 @@ |
||||
local_repository( |
||||
name = "com_github_grpc_grpc", |
||||
path = "../../..", |
||||
) |
||||
|
||||
load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") |
||||
grpc_deps() |
||||
|
||||
load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") |
||||
protobuf_deps() |
||||
|
||||
# TODO(https://github.com/grpc/grpc/issues/19835): Remove. |
||||
load("@upb//bazel:workspace_deps.bzl", "upb_deps") |
||||
upb_deps() |
||||
|
||||
load("@build_bazel_rules_apple//apple:repositories.bzl", "apple_rules_dependencies") |
||||
apple_rules_dependencies() |
||||
|
||||
load("@build_bazel_apple_support//lib:repositories.bzl", "apple_support_dependencies") |
||||
apple_support_dependencies() |
@ -0,0 +1,43 @@ |
||||
// Copyright 2019 The gRPC authors. |
||||
// |
||||
// Licensed under the Apache License, Version 2.0 (the "License"); |
||||
// you may not use this file except in compliance with the License. |
||||
// You may obtain a copy of the License at |
||||
// |
||||
// http://www.apache.org/licenses/LICENSE-2.0 |
||||
// |
||||
// Unless required by applicable law or agreed to in writing, software |
||||
// distributed under the License is distributed on an "AS IS" BASIS, |
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
// See the License for the specific language governing permissions and |
||||
// limitations under the License. |
||||
|
||||
syntax = "proto3"; |
||||
|
||||
option java_multiple_files = true; |
||||
option java_package = "io.grpc.examples.helloworld"; |
||||
option java_outer_classname = "HelloWorldProto"; |
||||
option objc_class_prefix = "HLW"; |
||||
|
||||
package helloworld; |
||||
|
||||
import "google/protobuf/timestamp.proto"; |
||||
import "google/protobuf/duration.proto"; |
||||
|
||||
// The greeting service definition. |
||||
service Greeter { |
||||
// Sends a greeting |
||||
rpc SayHello (HelloRequest) returns (HelloReply) {} |
||||
} |
||||
|
||||
// The request message containing the user's name. |
||||
message HelloRequest { |
||||
string name = 1; |
||||
google.protobuf.Timestamp request_initiation = 2; |
||||
} |
||||
|
||||
// The response message containing the greetings |
||||
message HelloReply { |
||||
string message = 1; |
||||
google.protobuf.Duration request_duration = 2; |
||||
} |
@ -0,0 +1,73 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""The Python implementation of the GRPC helloworld.Greeter client.""" |
||||
|
||||
import contextlib |
||||
import datetime |
||||
import logging |
||||
import unittest |
||||
|
||||
import grpc |
||||
|
||||
import duration_pb2 |
||||
import helloworld_pb2 |
||||
import helloworld_pb2_grpc |
||||
|
||||
_HOST = 'localhost' |
||||
_SERVER_ADDRESS = '{}:0'.format(_HOST) |
||||
|
||||
|
||||
class Greeter(helloworld_pb2_grpc.GreeterServicer): |
||||
|
||||
def SayHello(self, request, context): |
||||
request_in_flight = datetime.now() - request.request_initation.ToDatetime() |
||||
request_duration = duration_pb2.Duration() |
||||
request_duration.FromTimedelta(request_in_flight) |
||||
return helloworld_pb2.HelloReply( |
||||
message='Hello, %s!' % request.name, |
||||
request_duration=request_duration, |
||||
) |
||||
|
||||
|
||||
@contextlib.contextmanager |
||||
def _listening_server(): |
||||
server = grpc.server(futures.ThreadPoolExecutor()) |
||||
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server) |
||||
port = server.add_insecure_port(_SERVER_ADDRESS) |
||||
server.start() |
||||
try: |
||||
yield port |
||||
finally: |
||||
server.stop(0) |
||||
|
||||
|
||||
class ImportTest(unittest.TestCase): |
||||
def run(): |
||||
with _listening_server() as port: |
||||
with grpc.insecure_channel('{}:{}'.format(_HOST, port)) as channel: |
||||
stub = helloworld_pb2_grpc.GreeterStub(channel) |
||||
request_timestamp = timestamp_pb2.Timestamp() |
||||
request_timestamp.GetCurrentTime() |
||||
response = stub.SayHello(helloworld_pb2.HelloRequest( |
||||
name='you', |
||||
request_initiation=request_timestamp, |
||||
), |
||||
wait_for_ready=True) |
||||
self.assertEqual(response.message, "Hello, you!") |
||||
self.assertGreater(response.request_duration.microseconds, 0) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
logging.basicConfig() |
||||
unittest.main() |
@ -0,0 +1 @@ |
||||
../../../../tools/bazel |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,18 @@ |
||||
# Copyright 2019 gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
set(UPB_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/upb) |
||||
|
||||
set(_gRPC_UPB_INCLUDE_DIR "${UPB_ROOT_DIR}") |
||||
set(_gRPC_UPB_GRPC_GENERATED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/src/core/ext/upb-generated") |
@ -0,0 +1,119 @@ |
||||
# Copyright 2019 gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
licenses(["notice"]) # 3-clause BSD |
||||
|
||||
package(default_visibility = ["//visibility:public"]) |
||||
|
||||
load("@com_github_grpc_grpc//bazel:objc_grpc_library.bzl", "objc_grpc_library") |
||||
load("@build_bazel_rules_apple//apple:ios.bzl", "ios_application") |
||||
load("@build_bazel_rules_apple//apple:macos.bzl", "macos_application") |
||||
|
||||
objc_grpc_library( |
||||
name = "HelloWorld_grpc_proto", |
||||
srcs = ["//examples:protos/helloworld.proto"], |
||||
deps = ["//examples:helloworld_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
# This one works with import "external/com_github_grpc_grpc/examples/protos/Helloworld.pbrpc.h" |
||||
objc_grpc_library( |
||||
name = "HelloWorld_grpc_proto_external", |
||||
srcs = ["//external/com_github_grpc_grpc/examples:protos/helloworld.proto"], |
||||
deps = ["@com_github_grpc_grpc//examples:helloworld_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_library( |
||||
name = "HelloWorld-lib", |
||||
srcs = glob(["helloworld/**/*.m",]), |
||||
hdrs = glob(["helloworld/**/*.h"]), |
||||
data = glob([ |
||||
"helloworld/HelloWorld/Base.lproj/**", |
||||
"helloworld/HelloWorld/Images.xcassets/**", |
||||
]), |
||||
includes = ["helloworld/HelloWorld"], |
||||
deps = [":HelloWorld_grpc_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
ios_application( |
||||
name = "HelloWorld", |
||||
bundle_id = "Google.HelloWorld", |
||||
families = [ |
||||
"iphone", |
||||
"ipad", |
||||
], |
||||
minimum_os_version = "8.0", |
||||
infoplists = ["helloworld/HelloWorld/Info.plist"], |
||||
deps = [":HelloWorld-lib"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_library( |
||||
name = "HelloWorldMacos-lib", |
||||
srcs = glob(["helloworld_macos/**/*.m",]), |
||||
hdrs = glob(["helloworld_macos/**/*.h"]), |
||||
data = glob([ |
||||
"helloworld_macos/HelloWorld/Base.lproj/**", |
||||
"helloworld_macos/HelloWorld/Images.xcassets/**", |
||||
]), |
||||
includes = ["helloworld_macos/HelloWorld"], |
||||
deps = [":HelloWorld_grpc_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
macos_application( |
||||
name = "HelloWorldMacos", |
||||
bundle_id = "io.grpc.HelloWorld", |
||||
minimum_os_version = "10.13", |
||||
entitlements = "helloworld_macos/HelloWorld/Helloworld.entitlements", |
||||
infoplists = ["helloworld_macos/HelloWorld/Info.plist"], |
||||
deps = [":HelloWorldMacos-lib"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_grpc_library( |
||||
name = "RouteGuide", |
||||
srcs = ["//examples:protos/route_guide.proto"], |
||||
deps = ["//examples:route_guide_proto"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
objc_library( |
||||
name = "RouteGuideClient-lib", |
||||
srcs = glob(["route_guide/**/*.m"]), |
||||
hdrs = glob(["route_guide/**/*.h"]), |
||||
data = glob([ |
||||
"route_guide/Misc/Base.lproj/**", |
||||
"route_guide/Misc/Images.xcassets/**", |
||||
"route_guide/route_guide_db.json", |
||||
]), |
||||
includes = ["route_guide/Misc"], |
||||
deps = [":RouteGuide"], |
||||
tags = ["manual"], |
||||
) |
||||
|
||||
ios_application( |
||||
name = "RouteGuideClient", |
||||
bundle_id = "gRPC.RouteGuideClient", |
||||
families = [ |
||||
"iphone", |
||||
"ipad", |
||||
], |
||||
minimum_os_version = "8.0", |
||||
infoplists = ["route_guide/Misc/Info.plist"], |
||||
deps = [":RouteGuideClient-lib"], |
||||
tags = ["manual"], |
||||
) |
@ -0,0 +1,84 @@ |
||||
# gRPC Bazel BUILD file. |
||||
# |
||||
# Copyright 2019 The gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
|
||||
load("@grpc_python_dependencies//:requirements.bzl", "requirement") |
||||
load("//bazel:python_rules.bzl", "py_proto_library", "py_grpc_library") |
||||
|
||||
package(default_testonly = 1) |
||||
|
||||
proto_library( |
||||
name = "hash_name_proto", |
||||
srcs = ["hash_name.proto"], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "hash_name_py_pb2", |
||||
deps = [":hash_name_proto"], |
||||
) |
||||
|
||||
py_grpc_library( |
||||
name = "hash_name_py_pb2_grpc", |
||||
srcs = [":hash_name_proto"], |
||||
deps = [":hash_name_py_pb2"], |
||||
) |
||||
|
||||
py_binary( |
||||
name = "client", |
||||
srcs = ["client.py"], |
||||
deps = [ |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
":hash_name_py_pb2", |
||||
":hash_name_py_pb2_grpc", |
||||
"//external:six" |
||||
], |
||||
srcs_version = "PY2AND3", |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
py_library( |
||||
name = "search", |
||||
srcs = ["search.py"], |
||||
srcs_version = "PY2AND3", |
||||
deps = [ |
||||
":hash_name_py_pb2", |
||||
], |
||||
) |
||||
|
||||
py_binary( |
||||
name = "server", |
||||
srcs = ["server.py"], |
||||
deps = [ |
||||
"//src/python/grpcio/grpc:grpcio", |
||||
":hash_name_py_pb2", |
||||
":search", |
||||
] + select({ |
||||
"//conditions:default": ["@futures//:futures"], |
||||
"//:python3": [], |
||||
}), |
||||
srcs_version = "PY2AND3", |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
py_test( |
||||
name = "test/_cancellation_example_test", |
||||
srcs = ["test/_cancellation_example_test.py"], |
||||
data = [ |
||||
":client", |
||||
":server" |
||||
], |
||||
size = "small", |
||||
python_version = "PY3", |
||||
) |
@ -0,0 +1,127 @@ |
||||
### Cancellation |
||||
|
||||
In the example, we implement a silly algorithm. We search for bytestrings whose |
||||
hashes are similar to a given search string. For example, say we're looking for |
||||
the string "doctor". Our algorithm may return `JrqhZVkTDoctYrUlXDbL6pfYQHU=` or |
||||
`RC9/7mlM3ldy4TdoctOc6WzYbO4=`. This is a brute force algorithm, so the server |
||||
performing the search must be conscious of the resources it allows to each client |
||||
and each client must be conscientious of the resources it demands of the server. |
||||
|
||||
In particular, we ensure that client processes cancel the stream explicitly |
||||
before terminating and we ensure that server processes cancel RPCs that have gone on longer |
||||
than a certain number of iterations. |
||||
|
||||
#### Cancellation on the Client Side |
||||
|
||||
A client may cancel an RPC for several reasons. Perhaps the data it requested |
||||
has been made irrelevant. Perhaps you, as the client, want to be a good citizen |
||||
of the server and are conserving compute resources. |
||||
|
||||
##### Cancelling a Server-Side Unary RPC from the Client |
||||
|
||||
The default RPC methods on a stub will simply return the result of an RPC. |
||||
|
||||
```python |
||||
>>> stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
>>> stub.Find(hash_name_pb2.HashNameRequest(desired_name=name)) |
||||
<hash_name_pb2.HashNameResponse object at 0x7fe2eb8ce2d0> |
||||
``` |
||||
|
||||
But you may use the `future()` method to receive an instance of `grpc.Future`. |
||||
This interface allows you to wait on a response with a timeout, add a callback |
||||
to be executed when the RPC completes, or to cancel the RPC before it has |
||||
completed. |
||||
|
||||
In the example, we use this interface to cancel our in-progress RPC when the |
||||
user interrupts the process with ctrl-c. |
||||
|
||||
```python |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
future = stub.Find.future(hash_name_pb2.HashNameRequest(desired_name=name)) |
||||
def cancel_request(unused_signum, unused_frame): |
||||
future.cancel() |
||||
sys.exit(0) |
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
|
||||
result = future.result() |
||||
print(result) |
||||
``` |
||||
|
||||
We also call `sys.exit(0)` to terminate the process. If we do not do this, then |
||||
`future.result()` with throw an `RpcError`. Alternatively, you may catch this |
||||
exception. |
||||
|
||||
|
||||
##### Cancelling a Server-Side Streaming RPC from the Client |
||||
|
||||
Cancelling a Server-side streaming RPC is even simpler from the perspective of |
||||
the gRPC API. The default stub method is already an instance of `grpc.Future`, |
||||
so the methods outlined above still apply. It is also a generator, so we may |
||||
iterate over it to yield the results of our RPC. |
||||
|
||||
```python |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
result_generator = stub.FindRange(hash_name_pb2.HashNameRequest(desired_name=name)) |
||||
def cancel_request(unused_signum, unused_frame): |
||||
result_generator.cancel() |
||||
sys.exit(0) |
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
for result in result_generator: |
||||
print(result) |
||||
``` |
||||
|
||||
We also call `sys.exit(0)` here to terminate the process. Alternatively, you may |
||||
catch the `RpcError` raised by the for loop upon cancellation. |
||||
|
||||
|
||||
#### Cancellation on the Server Side |
||||
|
||||
A server is reponsible for cancellation in two ways. It must respond in some way |
||||
when a client initiates a cancellation, otherwise long-running computations |
||||
could continue indefinitely. |
||||
|
||||
It may also decide to cancel the RPC for its own reasons. In our example, the |
||||
server can be configured to cancel an RPC after a certain number of hashes has |
||||
been computed in order to conserve compute resources. |
||||
|
||||
##### Responding to Cancellations from a Servicer Thread |
||||
|
||||
It's important to remember that a gRPC Python server is backed by a thread pool |
||||
with a fixed size. When an RPC is cancelled, the library does *not* terminate |
||||
your servicer thread. It is your responsibility as the application author to |
||||
ensure that your servicer thread terminates soon after the RPC has been |
||||
cancelled. |
||||
|
||||
In this example, we use the `ServicerContext.add_callback` method to set a |
||||
`threading.Event` object when the RPC is terminated. We pass this `Event` object |
||||
down through our hashing algorithm and ensure to check that the RPC is still |
||||
ongoing before each iteration. |
||||
|
||||
```python |
||||
stop_event = threading.Event() |
||||
def on_rpc_done(): |
||||
# Regain servicer thread. |
||||
stop_event.set() |
||||
context.add_callback(on_rpc_done) |
||||
secret = _find_secret(stop_event) |
||||
``` |
||||
|
||||
##### Initiating a Cancellation on the Server Side |
||||
|
||||
Initiating a cancellation from the server side is simpler. Just call |
||||
`ServicerContext.cancel()`. |
||||
|
||||
In our example, we ensure that no single client is monopolizing the server by |
||||
cancelling after a configurable number of hashes have been checked. |
||||
|
||||
```python |
||||
try: |
||||
for candidate in secret_generator: |
||||
yield candidate |
||||
except ResourceLimitExceededError: |
||||
print("Cancelling RPC due to exhausted resources.") |
||||
context.cancel() |
||||
``` |
||||
|
||||
In this type of situation, you may also consider returning a more specific error |
||||
using the [`grpcio-status`](https://pypi.org/project/grpcio-status/) package. |
@ -0,0 +1,104 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""An example of cancelling requests in gRPC.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import argparse |
||||
import logging |
||||
import signal |
||||
import sys |
||||
|
||||
import grpc |
||||
|
||||
from examples.python.cancellation import hash_name_pb2 |
||||
from examples.python.cancellation import hash_name_pb2_grpc |
||||
|
||||
_DESCRIPTION = "A client for finding hashes similar to names." |
||||
_LOGGER = logging.getLogger(__name__) |
||||
|
||||
|
||||
def run_unary_client(server_target, name, ideal_distance): |
||||
with grpc.insecure_channel(server_target) as channel: |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
future = stub.Find.future( |
||||
hash_name_pb2.HashNameRequest( |
||||
desired_name=name, ideal_hamming_distance=ideal_distance), |
||||
wait_for_ready=True) |
||||
|
||||
def cancel_request(unused_signum, unused_frame): |
||||
future.cancel() |
||||
sys.exit(0) |
||||
|
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
result = future.result() |
||||
print(result) |
||||
|
||||
|
||||
def run_streaming_client(server_target, name, ideal_distance, |
||||
interesting_distance): |
||||
with grpc.insecure_channel(server_target) as channel: |
||||
stub = hash_name_pb2_grpc.HashFinderStub(channel) |
||||
result_generator = stub.FindRange( |
||||
hash_name_pb2.HashNameRequest( |
||||
desired_name=name, |
||||
ideal_hamming_distance=ideal_distance, |
||||
interesting_hamming_distance=interesting_distance), |
||||
wait_for_ready=True) |
||||
|
||||
def cancel_request(unused_signum, unused_frame): |
||||
result_generator.cancel() |
||||
sys.exit(0) |
||||
|
||||
signal.signal(signal.SIGINT, cancel_request) |
||||
for result in result_generator: |
||||
print(result) |
||||
|
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser(description=_DESCRIPTION) |
||||
parser.add_argument("name", type=str, help='The desired name.') |
||||
parser.add_argument( |
||||
"--ideal-distance", |
||||
default=0, |
||||
nargs='?', |
||||
type=int, |
||||
help="The desired Hamming distance.") |
||||
parser.add_argument( |
||||
'--server', |
||||
default='localhost:50051', |
||||
type=str, |
||||
nargs='?', |
||||
help='The host-port pair at which to reach the server.') |
||||
parser.add_argument( |
||||
'--show-inferior', |
||||
default=None, |
||||
type=int, |
||||
nargs='?', |
||||
help='Also show candidates with a Hamming distance less than this value.' |
||||
) |
||||
|
||||
args = parser.parse_args() |
||||
if args.show_inferior is not None: |
||||
run_streaming_client(args.server, args.name, args.ideal_distance, |
||||
args.show_inferior) |
||||
else: |
||||
run_unary_client(args.server, args.name, args.ideal_distance) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
logging.basicConfig() |
||||
main() |
@ -0,0 +1,56 @@ |
||||
// Copyright 2019 the gRPC authors. |
||||
// |
||||
// Licensed under the Apache License, Version 2.0 (the "License"); |
||||
// you may not use this file except in compliance with the License. |
||||
// You may obtain a copy of the License at |
||||
// |
||||
// http://www.apache.org/licenses/LICENSE-2.0 |
||||
// |
||||
// Unless required by applicable law or agreed to in writing, software |
||||
// distributed under the License is distributed on an "AS IS" BASIS, |
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
// See the License for the specific language governing permissions and |
||||
// limitations under the License. |
||||
|
||||
syntax = "proto3"; |
||||
|
||||
package hash_name; |
||||
|
||||
// A request for a single secret whose hash is similar to a desired name. |
||||
message HashNameRequest { |
||||
// The string that is desired in the secret's hash. |
||||
string desired_name = 1; |
||||
|
||||
// The ideal Hamming distance betwen desired_name and the secret that will |
||||
// be searched for. |
||||
int32 ideal_hamming_distance = 2; |
||||
|
||||
// A Hamming distance greater than the ideal Hamming distance. Search results |
||||
// with a Hamming distance less than this value but greater than the ideal |
||||
// distance will be returned back to the client but will not terminate the |
||||
// search. |
||||
int32 interesting_hamming_distance = 3; |
||||
} |
||||
|
||||
message HashNameResponse { |
||||
// The search result. |
||||
string secret = 1; |
||||
|
||||
// The hash of the search result. A substring of this is of |
||||
// ideal_hamming_distance Hamming distance or less from desired_name. |
||||
string hashed_name = 2; |
||||
|
||||
// The Hamming distance between hashed_name and desired_name. |
||||
int32 hamming_distance = 3; |
||||
} |
||||
|
||||
service HashFinder { |
||||
|
||||
// Search for a single string whose hash is similar to the specified |
||||
// desired_name. interesting_hamming_distance is ignored. |
||||
rpc Find (HashNameRequest) returns (HashNameResponse) {} |
||||
|
||||
// Search for a string whose hash is similar to the specified desired_name, |
||||
// but also stream back less-than-ideal candidates. |
||||
rpc FindRange (HashNameRequest) returns (stream HashNameResponse) {} |
||||
} |
@ -0,0 +1,148 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""A search algorithm over the space of all bytestrings.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
import base64 |
||||
import hashlib |
||||
import itertools |
||||
import logging |
||||
import struct |
||||
|
||||
from examples.python.cancellation import hash_name_pb2 |
||||
|
||||
_LOGGER = logging.getLogger(__name__) |
||||
_BYTE_MAX = 255 |
||||
|
||||
|
||||
def _get_hamming_distance(a, b): |
||||
"""Calculates hamming distance between strings of equal length.""" |
||||
distance = 0 |
||||
for char_a, char_b in zip(a, b): |
||||
if char_a != char_b: |
||||
distance += 1 |
||||
return distance |
||||
|
||||
|
||||
def _get_substring_hamming_distance(candidate, target): |
||||
"""Calculates the minimum hamming distance between between the target |
||||
and any substring of the candidate. |
||||
|
||||
Args: |
||||
candidate: The string whose substrings will be tested. |
||||
target: The target string. |
||||
|
||||
Returns: |
||||
The minimum Hamming distance between candidate and target. |
||||
""" |
||||
min_distance = None |
||||
if len(target) > len(candidate): |
||||
raise ValueError("Candidate must be at least as long as target.") |
||||
for i in range(len(candidate) - len(target) + 1): |
||||
distance = _get_hamming_distance(candidate[i:i + len(target)].lower(), |
||||
target.lower()) |
||||
if min_distance is None or distance < min_distance: |
||||
min_distance = distance |
||||
return min_distance |
||||
|
||||
|
||||
def _get_hash(secret): |
||||
hasher = hashlib.sha1() |
||||
hasher.update(secret) |
||||
return base64.b64encode(hasher.digest()).decode('ascii') |
||||
|
||||
|
||||
class ResourceLimitExceededError(Exception): |
||||
"""Signifies the request has exceeded configured limits.""" |
||||
|
||||
|
||||
def _bytestrings_of_length(length): |
||||
"""Generates a stream containing all bytestrings of a given length. |
||||
|
||||
Args: |
||||
length: A positive integer length. |
||||
|
||||
Yields: |
||||
All bytestrings of length `length`. |
||||
""" |
||||
for digits in itertools.product(range(_BYTE_MAX), repeat=length): |
||||
yield b''.join(struct.pack('B', i) for i in digits) |
||||
|
||||
|
||||
def _all_bytestrings(): |
||||
"""Generates a stream containing all possible bytestrings. |
||||
|
||||
This generator does not terminate. |
||||
|
||||
Yields: |
||||
All bytestrings in ascending order of length. |
||||
""" |
||||
for bytestring in itertools.chain.from_iterable( |
||||
_bytestrings_of_length(length) for length in itertools.count()): |
||||
yield bytestring |
||||
|
||||
|
||||
def search(target, |
||||
ideal_distance, |
||||
stop_event, |
||||
maximum_hashes, |
||||
interesting_hamming_distance=None): |
||||
"""Find candidate strings. |
||||
|
||||
Search through the space of all bytestrings, in order of increasing length, |
||||
indefinitely, until a hash with a Hamming distance of `maximum_distance` or |
||||
less has been found. |
||||
|
||||
Args: |
||||
target: The search string. |
||||
ideal_distance: The desired Hamming distance. |
||||
stop_event: An event indicating whether the RPC should terminate. |
||||
maximum_hashes: The maximum number of hashes to check before stopping. |
||||
interesting_hamming_distance: If specified, strings with a Hamming |
||||
distance from the target below this value will be yielded. |
||||
|
||||
Yields: |
||||
Instances of HashNameResponse. The final entry in the stream will be of |
||||
`maximum_distance` Hamming distance or less from the target string, |
||||
while all others will be of less than `interesting_hamming_distance`. |
||||
|
||||
Raises: |
||||
ResourceLimitExceededError: If the computation exceeds `maximum_hashes` |
||||
iterations. |
||||
""" |
||||
hashes_computed = 0 |
||||
for secret in _all_bytestrings(): |
||||
if stop_event.is_set(): |
||||
raise StopIteration() # pylint: disable=stop-iteration-return |
||||
candidate_hash = _get_hash(secret) |
||||
distance = _get_substring_hamming_distance(candidate_hash, target) |
||||
if interesting_hamming_distance is not None and distance <= interesting_hamming_distance: |
||||
# Surface interesting candidates, but don't stop. |
||||
yield hash_name_pb2.HashNameResponse( |
||||
secret=base64.b64encode(secret), |
||||
hashed_name=candidate_hash, |
||||
hamming_distance=distance) |
||||
elif distance <= ideal_distance: |
||||
# Yield ideal candidate and end the stream. |
||||
yield hash_name_pb2.HashNameResponse( |
||||
secret=base64.b64encode(secret), |
||||
hashed_name=candidate_hash, |
||||
hamming_distance=distance) |
||||
raise StopIteration() # pylint: disable=stop-iteration-return |
||||
hashes_computed += 1 |
||||
if hashes_computed == maximum_hashes: |
||||
raise ResourceLimitExceededError() |
@ -0,0 +1,124 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""An example of cancelling requests in gRPC.""" |
||||
|
||||
from __future__ import absolute_import |
||||
from __future__ import division |
||||
from __future__ import print_function |
||||
|
||||
from concurrent import futures |
||||
import argparse |
||||
import logging |
||||
import threading |
||||
|
||||
import grpc |
||||
import search |
||||
|
||||
from examples.python.cancellation import hash_name_pb2 |
||||
from examples.python.cancellation import hash_name_pb2_grpc |
||||
|
||||
_LOGGER = logging.getLogger(__name__) |
||||
_SERVER_HOST = 'localhost' |
||||
|
||||
_DESCRIPTION = "A server for finding hashes similar to names." |
||||
|
||||
|
||||
class HashFinder(hash_name_pb2_grpc.HashFinderServicer): |
||||
|
||||
def __init__(self, maximum_hashes): |
||||
super(HashFinder, self).__init__() |
||||
self._maximum_hashes = maximum_hashes |
||||
|
||||
def Find(self, request, context): |
||||
stop_event = threading.Event() |
||||
|
||||
def on_rpc_done(): |
||||
_LOGGER.debug("Attempting to regain servicer thread.") |
||||
stop_event.set() |
||||
|
||||
context.add_callback(on_rpc_done) |
||||
candidates = [] |
||||
try: |
||||
candidates = list( |
||||
search.search(request.desired_name, |
||||
request.ideal_hamming_distance, stop_event, |
||||
self._maximum_hashes)) |
||||
except search.ResourceLimitExceededError: |
||||
_LOGGER.info("Cancelling RPC due to exhausted resources.") |
||||
context.cancel() |
||||
_LOGGER.debug("Servicer thread returning.") |
||||
if not candidates: |
||||
return hash_name_pb2.HashNameResponse() |
||||
return candidates[-1] |
||||
|
||||
def FindRange(self, request, context): |
||||
stop_event = threading.Event() |
||||
|
||||
def on_rpc_done(): |
||||
_LOGGER.debug("Attempting to regain servicer thread.") |
||||
stop_event.set() |
||||
|
||||
context.add_callback(on_rpc_done) |
||||
secret_generator = search.search( |
||||
request.desired_name, |
||||
request.ideal_hamming_distance, |
||||
stop_event, |
||||
self._maximum_hashes, |
||||
interesting_hamming_distance=request.interesting_hamming_distance) |
||||
try: |
||||
for candidate in secret_generator: |
||||
yield candidate |
||||
except search.ResourceLimitExceededError: |
||||
_LOGGER.info("Cancelling RPC due to exhausted resources.") |
||||
context.cancel() |
||||
_LOGGER.debug("Regained servicer thread.") |
||||
|
||||
|
||||
def _running_server(port, maximum_hashes): |
||||
# We use only a single servicer thread here to demonstrate that, if managed |
||||
# carefully, cancelled RPCs can need not continue occupying servicers |
||||
# threads. |
||||
server = grpc.server( |
||||
futures.ThreadPoolExecutor(max_workers=1), maximum_concurrent_rpcs=1) |
||||
hash_name_pb2_grpc.add_HashFinderServicer_to_server( |
||||
HashFinder(maximum_hashes), server) |
||||
address = '{}:{}'.format(_SERVER_HOST, port) |
||||
actual_port = server.add_insecure_port(address) |
||||
server.start() |
||||
print("Server listening at '{}'".format(address)) |
||||
return server |
||||
|
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser(description=_DESCRIPTION) |
||||
parser.add_argument( |
||||
'--port', |
||||
type=int, |
||||
default=50051, |
||||
nargs='?', |
||||
help='The port on which the server will listen.') |
||||
parser.add_argument( |
||||
'--maximum-hashes', |
||||
type=int, |
||||
default=1000000, |
||||
nargs='?', |
||||
help='The maximum number of hashes to search before cancelling.') |
||||
args = parser.parse_args() |
||||
server = _running_server(args.port, args.maximum_hashes) |
||||
server.wait_for_termination() |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
logging.basicConfig() |
||||
main() |
@ -0,0 +1,87 @@ |
||||
# Copyright 2019 the gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""Test for cancellation example.""" |
||||
|
||||
import contextlib |
||||
import os |
||||
import signal |
||||
import socket |
||||
import subprocess |
||||
import unittest |
||||
|
||||
_BINARY_DIR = os.path.realpath( |
||||
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) |
||||
_SERVER_PATH = os.path.join(_BINARY_DIR, 'server') |
||||
_CLIENT_PATH = os.path.join(_BINARY_DIR, 'client') |
||||
|
||||
|
||||
@contextlib.contextmanager |
||||
def _get_port(): |
||||
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) |
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) |
||||
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0: |
||||
raise RuntimeError("Failed to set SO_REUSEPORT.") |
||||
sock.bind(('', 0)) |
||||
try: |
||||
yield sock.getsockname()[1] |
||||
finally: |
||||
sock.close() |
||||
|
||||
|
||||
def _start_client(server_port, |
||||
desired_string, |
||||
ideal_distance, |
||||
interesting_distance=None): |
||||
interesting_distance_args = () if interesting_distance is None else ( |
||||
'--show-inferior', interesting_distance) |
||||
return subprocess.Popen((_CLIENT_PATH, desired_string, '--server', |
||||
'localhost:{}'.format(server_port), |
||||
'--ideal-distance', |
||||
str(ideal_distance)) + interesting_distance_args) |
||||
|
||||
|
||||
class CancellationExampleTest(unittest.TestCase): |
||||
|
||||
def test_successful_run(self): |
||||
with _get_port() as test_port: |
||||
server_process = subprocess.Popen((_SERVER_PATH, '--port', |
||||
str(test_port))) |
||||
try: |
||||
client_process = _start_client(test_port, 'aa', 0) |
||||
client_return_code = client_process.wait() |
||||
self.assertEqual(0, client_return_code) |
||||
self.assertIsNone(server_process.poll()) |
||||
finally: |
||||
server_process.kill() |
||||
server_process.wait() |
||||
|
||||
def test_graceful_sigint(self): |
||||
with _get_port() as test_port: |
||||
server_process = subprocess.Popen((_SERVER_PATH, '--port', |
||||
str(test_port))) |
||||
try: |
||||
client_process1 = _start_client(test_port, 'aaaaaaaaaa', 0) |
||||
client_process1.send_signal(signal.SIGINT) |
||||
client_process1.wait() |
||||
client_process2 = _start_client(test_port, 'aa', 0) |
||||
client_return_code = client_process2.wait() |
||||
self.assertEqual(0, client_return_code) |
||||
self.assertIsNone(server_process.poll()) |
||||
finally: |
||||
server_process.kill() |
||||
server_process.wait() |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
unittest.main(verbosity=2) |
@ -0,0 +1,36 @@ |
||||
## Data transmission demo for using gRPC in Python |
||||
|
||||
在Python中使用gRPC时, 进行数据传输的四种方式 [官方指南](<https://grpc.io/docs/guides/concepts/#unary-rpc>) |
||||
|
||||
- #### 一元模式 |
||||
|
||||
在一次调用中, 客户端只能向服务器传输一次请求数据, 服务器也只能返回一次响应 |
||||
|
||||
`client.py: simple_method` |
||||
|
||||
`server.py: SimpleMethod` |
||||
|
||||
- #### 客户端流模式 |
||||
|
||||
在一次调用中, 客户端可以多次向服务器传输数据, 但是服务器只能返回一次响应 |
||||
|
||||
`client.py: client_streaming_method ` |
||||
|
||||
`server.py: ClientStreamingMethod` |
||||
|
||||
- #### 服务端流模式 |
||||
|
||||
在一次调用中, 客户端只能向服务器传输一次请求数据, 但是服务器可以多次返回响应 |
||||
|
||||
`client.py: server_streaming_method` |
||||
|
||||
`server.py: ServerStreamingMethod` |
||||
|
||||
- #### 双向流模式 |
||||
|
||||
在一次调用中, 客户端和服务器都可以向对方多次收发数据 |
||||
|
||||
`client.py: bidirectional_streaming_method` |
||||
|
||||
`server.py: BidirectionalStreamingMethod` |
||||
|
@ -0,0 +1,37 @@ |
||||
## Data transmission demo for using gRPC in Python |
||||
|
||||
Four ways of data transmission when gRPC is used in Python. [Offical Guide](<https://grpc.io/docs/guides/concepts/#unary-rpc>) |
||||
|
||||
- #### unary-unary |
||||
|
||||
In a single call, the client can only send request once, and the server can only respond once. |
||||
|
||||
`client.py: simple_method` |
||||
|
||||
`server.py: SimpleMethod` |
||||
|
||||
- #### stream-unary |
||||
|
||||
In a single call, the client can transfer data to the server an arbitrary number of times, but the server can only return a response once. |
||||
|
||||
`client.py: client_streaming_method` |
||||
|
||||
`server.py: ClientStreamingMethod` |
||||
|
||||
- #### unary-stream |
||||
|
||||
In a single call, the client can only transmit data to the server at one time, but the server can return the response many times. |
||||
|
||||
`client.py: server_streaming_method` |
||||
|
||||
`server.py: ServerStreamingMethod` |
||||
|
||||
- #### stream-stream |
||||
|
||||
In a single call, both client and server can send and receive data |
||||
to each other multiple times. |
||||
|
||||
`client.py: bidirectional_streaming_method` |
||||
|
||||
`server.py: BidirectionalStreamingMethod` |
||||
|
@ -0,0 +1,114 @@ |
||||
# Copyright 2019 gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""The example of four ways of data transmission using gRPC in Python.""" |
||||
|
||||
import time |
||||
import grpc |
||||
|
||||
import demo_pb2_grpc |
||||
import demo_pb2 |
||||
|
||||
SERVER_ADDRESS = "localhost:23333" |
||||
CLIENT_ID = 1 |
||||
|
||||
|
||||
# 一元模式(在一次调用中, 客户端只能向服务器传输一次请求数据, 服务器也只能返回一次响应) |
||||
# unary-unary(In a single call, the client can only send request once, and the server can |
||||
# only respond once.) |
||||
def simple_method(stub): |
||||
print("--------------Call SimpleMethod Begin--------------") |
||||
request = demo_pb2.Request( |
||||
client_id=CLIENT_ID, request_data="called by Python client") |
||||
response = stub.SimpleMethod(request) |
||||
print("resp from server(%d), the message=%s" % (response.server_id, |
||||
response.response_data)) |
||||
print("--------------Call SimpleMethod Over---------------") |
||||
|
||||
|
||||
# 客户端流模式(在一次调用中, 客户端可以多次向服务器传输数据, 但是服务器只能返回一次响应) |
||||
# stream-unary (In a single call, the client can transfer data to the server several times, |
||||
# but the server can only return a response once.) |
||||
def client_streaming_method(stub): |
||||
print("--------------Call ClientStreamingMethod Begin--------------") |
||||
|
||||
# 创建一个生成器 |
||||
# create a generator |
||||
def request_messages(): |
||||
for i in range(5): |
||||
request = demo_pb2.Request( |
||||
client_id=CLIENT_ID, |
||||
request_data=("called by Python client, message:%d" % i)) |
||||
yield request |
||||
|
||||
response = stub.ClientStreamingMethod(request_messages()) |
||||
print("resp from server(%d), the message=%s" % (response.server_id, |
||||
response.response_data)) |
||||
print("--------------Call ClientStreamingMethod Over---------------") |
||||
|
||||
|
||||
# 服务端流模式(在一次调用中, 客户端只能一次向服务器传输数据, 但是服务器可以多次返回响应) |
||||
# unary-stream (In a single call, the client can only transmit data to the server at one time, |
||||
# but the server can return the response many times.) |
||||
def server_streaming_method(stub): |
||||
print("--------------Call ServerStreamingMethod Begin--------------") |
||||
request = demo_pb2.Request( |
||||
client_id=CLIENT_ID, request_data="called by Python client") |
||||
response_iterator = stub.ServerStreamingMethod(request) |
||||
for response in response_iterator: |
||||
print("recv from server(%d), message=%s" % (response.server_id, |
||||
response.response_data)) |
||||
|
||||
print("--------------Call ServerStreamingMethod Over---------------") |
||||
|
||||
|
||||
# 双向流模式 (在一次调用中, 客户端和服务器都可以向对方多次收发数据) |
||||
# stream-stream (In a single call, both client and server can send and receive data |
||||
# to each other multiple times.) |
||||
def bidirectional_streaming_method(stub): |
||||
print( |
||||
"--------------Call BidirectionalStreamingMethod Begin---------------") |
||||
|
||||
# 创建一个生成器 |
||||
# create a generator |
||||
def request_messages(): |
||||
for i in range(5): |
||||
request = demo_pb2.Request( |
||||
client_id=CLIENT_ID, |
||||
request_data=("called by Python client, message: %d" % i)) |
||||
yield request |
||||
time.sleep(1) |
||||
|
||||
response_iterator = stub.BidirectionalStreamingMethod(request_messages()) |
||||
for response in response_iterator: |
||||
print("recv from server(%d), message=%s" % (response.server_id, |
||||
response.response_data)) |
||||
|
||||
print("--------------Call BidirectionalStreamingMethod Over---------------") |
||||
|
||||
|
||||
def main(): |
||||
with grpc.insecure_channel(SERVER_ADDRESS) as channel: |
||||
stub = demo_pb2_grpc.GRPCDemoStub(channel) |
||||
|
||||
simple_method(stub) |
||||
|
||||
client_streaming_method(stub) |
||||
|
||||
server_streaming_method(stub) |
||||
|
||||
bidirectional_streaming_method(stub) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
main() |
@ -0,0 +1,69 @@ |
||||
// Copyright 2019 gRPC authors. |
||||
// |
||||
// Licensed under the Apache License, Version 2.0 (the "License"); |
||||
// you may not use this file except in compliance with the License. |
||||
// You may obtain a copy of the License at |
||||
// |
||||
// http://www.apache.org/licenses/LICENSE-2.0 |
||||
// |
||||
// Unless required by applicable law or agreed to in writing, software |
||||
// distributed under the License is distributed on an "AS IS" BASIS, |
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
// See the License for the specific language governing permissions and |
||||
// limitations under the License. |
||||
|
||||
// 语法版本声明,必须放在非注释的第一行 |
||||
// Syntax version declaration. Must be placed on the first line of non-commentary. |
||||
|
||||
syntax = "proto3"; |
||||
// The document of proto3: https://developers.google.com/protocol-buffers/docs/proto3 |
||||
|
||||
// 包名定义, Python中使用时可以省略不写 |
||||
// Package name definition, which can be omitted in Python. |
||||
package demo; |
||||
|
||||
/* |
||||
`message`是用来定义传输的数据的格式的, 等号后面的是字段编号 |
||||
消息定义中的每个字段都有唯一的编号 |
||||
总体格式类似于Python中定义一个类或者Golang中定义一个结构体 |
||||
*/ |
||||
/* |
||||
`message` is used to define the structure of the data to be transmitted, after the equal sign |
||||
is the field number. Each field in the message definition has a unique number. |
||||
The overall format is similar to defining a class in Python or a structure in Golang. |
||||
*/ |
||||
message Request { |
||||
int64 client_id = 1; |
||||
string request_data = 2; |
||||
} |
||||
|
||||
message Response { |
||||
int64 server_id = 1; |
||||
string response_data = 2; |
||||
} |
||||
|
||||
// `service` 是用来给gRPC服务定义方法的, 格式固定, 类似于Golang中定义一个接口 |
||||
// `service` is used to define methods for gRPC services in a fixed format, similar to defining |
||||
//an interface in Golang |
||||
service GRPCDemo { |
||||
// 一元模式(在一次调用中, 客户端只能向服务器传输一次请求数据, 服务器也只能返回一次响应) |
||||
// unary-unary(In a single call, the client can only send request once, and the server can |
||||
// only respond once.) |
||||
rpc SimpleMethod (Request) returns (Response); |
||||
|
||||
// 客户端流模式(在一次调用中, 客户端可以多次向服务器传输数据, 但是服务器只能返回一次响应) |
||||
// stream-unary (In a single call, the client can transfer data to the server several times, |
||||
// but the server can only return a response once.) |
||||
rpc ClientStreamingMethod (stream Request) returns (Response); |
||||
|
||||
// 服务端流模式(在一次调用中, 客户端只能一次向服务器传输数据, 但是服务器可以多次返回响应) |
||||
// unary-stream (In a single call, the client can only transmit data to the server at one time, |
||||
// but the server can return the response many times.) |
||||
rpc ServerStreamingMethod (Request) returns (stream Response); |
||||
|
||||
// 双向流模式 (在一次调用中, 客户端和服务器都可以向对方多次收发数据) |
||||
// stream-stream (In a single call, both client and server can send and receive data |
||||
// to each other multiple times.) |
||||
rpc BidirectionalStreamingMethod (stream Request) returns (stream Response); |
||||
} |
||||
|
@ -0,0 +1,174 @@ |
||||
# -*- coding: utf-8 -*- |
||||
# Generated by the protocol buffer compiler. DO NOT EDIT! |
||||
# source: demo.proto |
||||
|
||||
import sys |
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) |
||||
from google.protobuf import descriptor as _descriptor |
||||
from google.protobuf import message as _message |
||||
from google.protobuf import reflection as _reflection |
||||
from google.protobuf import symbol_database as _symbol_database |
||||
# @@protoc_insertion_point(imports) |
||||
|
||||
_sym_db = _symbol_database.Default() |
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor( |
||||
name='demo.proto', |
||||
package='demo', |
||||
syntax='proto3', |
||||
serialized_options=None, |
||||
serialized_pb=_b('\n\ndemo.proto\x12\x04\x64\x65mo\"2\n\x07Request\x12\x11\n\tclient_id\x18\x01 \x01(\x03\x12\x14\n\x0crequest_data\x18\x02 \x01(\t\"4\n\x08Response\x12\x11\n\tserver_id\x18\x01 \x01(\x03\x12\x15\n\rresponse_data\x18\x02 \x01(\t2\xf0\x01\n\x08GRPCDemo\x12-\n\x0cSimpleMethod\x12\r.demo.Request\x1a\x0e.demo.Response\x12\x38\n\x15\x43lientStreamingMethod\x12\r.demo.Request\x1a\x0e.demo.Response(\x01\x12\x38\n\x15ServerStreamingMethod\x12\r.demo.Request\x1a\x0e.demo.Response0\x01\x12\x41\n\x1c\x42idirectionalStreamingMethod\x12\r.demo.Request\x1a\x0e.demo.Response(\x01\x30\x01\x62\x06proto3') |
||||
) |
||||
|
||||
|
||||
|
||||
|
||||
_REQUEST = _descriptor.Descriptor( |
||||
name='Request', |
||||
full_name='demo.Request', |
||||
filename=None, |
||||
file=DESCRIPTOR, |
||||
containing_type=None, |
||||
fields=[ |
||||
_descriptor.FieldDescriptor( |
||||
name='client_id', full_name='demo.Request.client_id', index=0, |
||||
number=1, type=3, cpp_type=2, label=1, |
||||
has_default_value=False, default_value=0, |
||||
message_type=None, enum_type=None, containing_type=None, |
||||
is_extension=False, extension_scope=None, |
||||
serialized_options=None, file=DESCRIPTOR), |
||||
_descriptor.FieldDescriptor( |
||||
name='request_data', full_name='demo.Request.request_data', index=1, |
||||
number=2, type=9, cpp_type=9, label=1, |
||||
has_default_value=False, default_value=_b("").decode('utf-8'), |
||||
message_type=None, enum_type=None, containing_type=None, |
||||
is_extension=False, extension_scope=None, |
||||
serialized_options=None, file=DESCRIPTOR), |
||||
], |
||||
extensions=[ |
||||
], |
||||
nested_types=[], |
||||
enum_types=[ |
||||
], |
||||
serialized_options=None, |
||||
is_extendable=False, |
||||
syntax='proto3', |
||||
extension_ranges=[], |
||||
oneofs=[ |
||||
], |
||||
serialized_start=20, |
||||
serialized_end=70, |
||||
) |
||||
|
||||
|
||||
_RESPONSE = _descriptor.Descriptor( |
||||
name='Response', |
||||
full_name='demo.Response', |
||||
filename=None, |
||||
file=DESCRIPTOR, |
||||
containing_type=None, |
||||
fields=[ |
||||
_descriptor.FieldDescriptor( |
||||
name='server_id', full_name='demo.Response.server_id', index=0, |
||||
number=1, type=3, cpp_type=2, label=1, |
||||
has_default_value=False, default_value=0, |
||||
message_type=None, enum_type=None, containing_type=None, |
||||
is_extension=False, extension_scope=None, |
||||
serialized_options=None, file=DESCRIPTOR), |
||||
_descriptor.FieldDescriptor( |
||||
name='response_data', full_name='demo.Response.response_data', index=1, |
||||
number=2, type=9, cpp_type=9, label=1, |
||||
has_default_value=False, default_value=_b("").decode('utf-8'), |
||||
message_type=None, enum_type=None, containing_type=None, |
||||
is_extension=False, extension_scope=None, |
||||
serialized_options=None, file=DESCRIPTOR), |
||||
], |
||||
extensions=[ |
||||
], |
||||
nested_types=[], |
||||
enum_types=[ |
||||
], |
||||
serialized_options=None, |
||||
is_extendable=False, |
||||
syntax='proto3', |
||||
extension_ranges=[], |
||||
oneofs=[ |
||||
], |
||||
serialized_start=72, |
||||
serialized_end=124, |
||||
) |
||||
|
||||
DESCRIPTOR.message_types_by_name['Request'] = _REQUEST |
||||
DESCRIPTOR.message_types_by_name['Response'] = _RESPONSE |
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR) |
||||
|
||||
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), { |
||||
'DESCRIPTOR' : _REQUEST, |
||||
'__module__' : 'demo_pb2' |
||||
# @@protoc_insertion_point(class_scope:demo.Request) |
||||
}) |
||||
_sym_db.RegisterMessage(Request) |
||||
|
||||
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), { |
||||
'DESCRIPTOR' : _RESPONSE, |
||||
'__module__' : 'demo_pb2' |
||||
# @@protoc_insertion_point(class_scope:demo.Response) |
||||
}) |
||||
_sym_db.RegisterMessage(Response) |
||||
|
||||
|
||||
|
||||
_GRPCDEMO = _descriptor.ServiceDescriptor( |
||||
name='GRPCDemo', |
||||
full_name='demo.GRPCDemo', |
||||
file=DESCRIPTOR, |
||||
index=0, |
||||
serialized_options=None, |
||||
serialized_start=127, |
||||
serialized_end=367, |
||||
methods=[ |
||||
_descriptor.MethodDescriptor( |
||||
name='SimpleMethod', |
||||
full_name='demo.GRPCDemo.SimpleMethod', |
||||
index=0, |
||||
containing_service=None, |
||||
input_type=_REQUEST, |
||||
output_type=_RESPONSE, |
||||
serialized_options=None, |
||||
), |
||||
_descriptor.MethodDescriptor( |
||||
name='ClientStreamingMethod', |
||||
full_name='demo.GRPCDemo.ClientStreamingMethod', |
||||
index=1, |
||||
containing_service=None, |
||||
input_type=_REQUEST, |
||||
output_type=_RESPONSE, |
||||
serialized_options=None, |
||||
), |
||||
_descriptor.MethodDescriptor( |
||||
name='ServerStreamingMethod', |
||||
full_name='demo.GRPCDemo.ServerStreamingMethod', |
||||
index=2, |
||||
containing_service=None, |
||||
input_type=_REQUEST, |
||||
output_type=_RESPONSE, |
||||
serialized_options=None, |
||||
), |
||||
_descriptor.MethodDescriptor( |
||||
name='BidirectionalStreamingMethod', |
||||
full_name='demo.GRPCDemo.BidirectionalStreamingMethod', |
||||
index=3, |
||||
containing_service=None, |
||||
input_type=_REQUEST, |
||||
output_type=_RESPONSE, |
||||
serialized_options=None, |
||||
), |
||||
]) |
||||
_sym_db.RegisterServiceDescriptor(_GRPCDEMO) |
||||
|
||||
DESCRIPTOR.services_by_name['GRPCDemo'] = _GRPCDEMO |
||||
|
||||
# @@protoc_insertion_point(module_scope) |
@ -0,0 +1,106 @@ |
||||
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! |
||||
import grpc |
||||
|
||||
import demo_pb2 as demo__pb2 |
||||
|
||||
|
||||
class GRPCDemoStub(object): |
||||
"""service是用来给GRPC服务定义方法的, 格式固定, 类似于Golang中定义一个接口 |
||||
`service` is used to define methods for GRPC services in a fixed format, similar to defining an interface in Golang |
||||
""" |
||||
|
||||
def __init__(self, channel): |
||||
"""Constructor. |
||||
|
||||
Args: |
||||
channel: A grpc.Channel. |
||||
""" |
||||
self.SimpleMethod = channel.unary_unary( |
||||
'/demo.GRPCDemo/SimpleMethod', |
||||
request_serializer=demo__pb2.Request.SerializeToString, |
||||
response_deserializer=demo__pb2.Response.FromString, |
||||
) |
||||
self.ClientStreamingMethod = channel.stream_unary( |
||||
'/demo.GRPCDemo/ClientStreamingMethod', |
||||
request_serializer=demo__pb2.Request.SerializeToString, |
||||
response_deserializer=demo__pb2.Response.FromString, |
||||
) |
||||
self.ServerStreamingMethod = channel.unary_stream( |
||||
'/demo.GRPCDemo/ServerStreamingMethod', |
||||
request_serializer=demo__pb2.Request.SerializeToString, |
||||
response_deserializer=demo__pb2.Response.FromString, |
||||
) |
||||
self.BidirectionalStreamingMethod = channel.stream_stream( |
||||
'/demo.GRPCDemo/BidirectionalStreamingMethod', |
||||
request_serializer=demo__pb2.Request.SerializeToString, |
||||
response_deserializer=demo__pb2.Response.FromString, |
||||
) |
||||
|
||||
|
||||
class GRPCDemoServicer(object): |
||||
"""service是用来给GRPC服务定义方法的, 格式固定, 类似于Golang中定义一个接口 |
||||
`service` is used to define methods for GRPC services in a fixed format, similar to defining an interface in Golang |
||||
""" |
||||
|
||||
def SimpleMethod(self, request, context): |
||||
"""简单模式 |
||||
unary-unary |
||||
""" |
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED) |
||||
context.set_details('Method not implemented!') |
||||
raise NotImplementedError('Method not implemented!') |
||||
|
||||
def ClientStreamingMethod(self, request_iterator, context): |
||||
"""客户端流模式(在一次调用中, 客户端可以多次向服务器传输数据, 但是服务器只能返回一次响应) |
||||
stream-unary (In a single call, the client can transfer data to the server several times, |
||||
but the server can only return a response once.) |
||||
""" |
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED) |
||||
context.set_details('Method not implemented!') |
||||
raise NotImplementedError('Method not implemented!') |
||||
|
||||
def ServerStreamingMethod(self, request, context): |
||||
"""服务端流模式(在一次调用中, 客户端只能一次向服务器传输数据, 但是服务器可以多次返回响应) |
||||
unary-stream (In a single call, the client can only transmit data to the server at one time, |
||||
but the server can return the response many times.) |
||||
""" |
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED) |
||||
context.set_details('Method not implemented!') |
||||
raise NotImplementedError('Method not implemented!') |
||||
|
||||
def BidirectionalStreamingMethod(self, request_iterator, context): |
||||
"""双向流模式 (在一次调用中, 客户端和服务器都可以向对方多次收发数据) |
||||
stream-stream (In a single call, both client and server can send and receive data |
||||
to each other multiple times.) |
||||
""" |
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED) |
||||
context.set_details('Method not implemented!') |
||||
raise NotImplementedError('Method not implemented!') |
||||
|
||||
|
||||
def add_GRPCDemoServicer_to_server(servicer, server): |
||||
rpc_method_handlers = { |
||||
'SimpleMethod': grpc.unary_unary_rpc_method_handler( |
||||
servicer.SimpleMethod, |
||||
request_deserializer=demo__pb2.Request.FromString, |
||||
response_serializer=demo__pb2.Response.SerializeToString, |
||||
), |
||||
'ClientStreamingMethod': grpc.stream_unary_rpc_method_handler( |
||||
servicer.ClientStreamingMethod, |
||||
request_deserializer=demo__pb2.Request.FromString, |
||||
response_serializer=demo__pb2.Response.SerializeToString, |
||||
), |
||||
'ServerStreamingMethod': grpc.unary_stream_rpc_method_handler( |
||||
servicer.ServerStreamingMethod, |
||||
request_deserializer=demo__pb2.Request.FromString, |
||||
response_serializer=demo__pb2.Response.SerializeToString, |
||||
), |
||||
'BidirectionalStreamingMethod': grpc.stream_stream_rpc_method_handler( |
||||
servicer.BidirectionalStreamingMethod, |
||||
request_deserializer=demo__pb2.Request.FromString, |
||||
response_serializer=demo__pb2.Response.SerializeToString, |
||||
), |
||||
} |
||||
generic_handler = grpc.method_handlers_generic_handler( |
||||
'demo.GRPCDemo', rpc_method_handlers) |
||||
server.add_generic_rpc_handlers((generic_handler,)) |
@ -0,0 +1,114 @@ |
||||
# Copyright 2019 gRPC authors. |
||||
# |
||||
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
# you may not use this file except in compliance with the License. |
||||
# You may obtain a copy of the License at |
||||
# |
||||
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
# |
||||
# Unless required by applicable law or agreed to in writing, software |
||||
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
# See the License for the specific language governing permissions and |
||||
# limitations under the License. |
||||
"""The example of four ways of data transmission using gRPC in Python.""" |
||||
|
||||
from threading import Thread |
||||
from concurrent import futures |
||||
|
||||
import grpc |
||||
import demo_pb2_grpc |
||||
import demo_pb2 |
||||
|
||||
SERVER_ADDRESS = 'localhost:23333' |
||||
SERVER_ID = 1 |
||||
|
||||
|
||||
class DemoServer(demo_pb2_grpc.GRPCDemoServicer): |
||||
|
||||
# 一元模式(在一次调用中, 客户端只能向服务器传输一次请求数据, 服务器也只能返回一次响应) |
||||
# unary-unary(In a single call, the client can only send request once, and the server can |
||||
# only respond once.) |
||||
def SimpleMethod(self, request, context): |
||||
print("SimpleMethod called by client(%d) the message: %s" % |
||||
(request.client_id, request.request_data)) |
||||
response = demo_pb2.Response( |
||||
server_id=SERVER_ID, |
||||
response_data="Python server SimpleMethod Ok!!!!") |
||||
return response |
||||
|
||||
# 客户端流模式(在一次调用中, 客户端可以多次向服务器传输数据, 但是服务器只能返回一次响应) |
||||
# stream-unary (In a single call, the client can transfer data to the server several times, |
||||
# but the server can only return a response once.) |
||||
def ClientStreamingMethod(self, request_iterator, context): |
||||
print("ClientStreamingMethod called by client...") |
||||
for request in request_iterator: |
||||
print("recv from client(%d), message= %s" % (request.client_id, |
||||
request.request_data)) |
||||
response = demo_pb2.Response( |
||||
server_id=SERVER_ID, |
||||
response_data="Python server ClientStreamingMethod ok") |
||||
return response |
||||
|
||||
# 服务端流模式(在一次调用中, 客户端只能一次向服务器传输数据, 但是服务器可以多次返回响应) |
||||
# unary-stream (In a single call, the client can only transmit data to the server at one time, |
||||
# but the server can return the response many times.) |
||||
def ServerStreamingMethod(self, request, context): |
||||
print("ServerStreamingMethod called by client(%d), message= %s" % |
||||
(request.client_id, request.request_data)) |
||||
|
||||
# 创建一个生成器 |
||||
# create a generator |
||||
def response_messages(): |
||||
for i in range(5): |
||||
response = demo_pb2.Response( |
||||
server_id=SERVER_ID, |
||||
response_data=("send by Python server, message=%d" % i)) |
||||
yield response |
||||
|
||||
return response_messages() |
||||
|
||||
# 双向流模式 (在一次调用中, 客户端和服务器都可以向对方多次收发数据) |
||||
# stream-stream (In a single call, both client and server can send and receive data |
||||
# to each other multiple times.) |
||||
def BidirectionalStreamingMethod(self, request_iterator, context): |
||||
print("BidirectionalStreamingMethod called by client...") |
||||
|
||||
# 开启一个子线程去接收数据 |
||||
# Open a sub thread to receive data |
||||
def parse_request(): |
||||
for request in request_iterator: |
||||
print("recv from client(%d), message= %s" % |
||||
(request.client_id, request.request_data)) |
||||
|
||||
t = Thread(target=parse_request) |
||||
t.start() |
||||
|
||||
for i in range(5): |
||||
yield demo_pb2.Response( |
||||
server_id=SERVER_ID, |
||||
response_data=("send by Python server, message= %d" % i)) |
||||
|
||||
t.join() |
||||
|
||||
|
||||
def main(): |
||||
server = grpc.server(futures.ThreadPoolExecutor()) |
||||
|
||||
demo_pb2_grpc.add_GRPCDemoServicer_to_server(DemoServer(), server) |
||||
|
||||
server.add_insecure_port(SERVER_ADDRESS) |
||||
print("------------------start Python GRPC server") |
||||
server.start() |
||||
server.wait_for_termination() |
||||
|
||||
# If raise Error: |
||||
# AttributeError: '_Server' object has no attribute 'wait_for_termination' |
||||
# You can use the following code instead: |
||||
# import time |
||||
# while 1: |
||||
# time.sleep(10) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
main() |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue