Merge pull request #20401 from tanjunchen/clean-up

Fix spelling in comments
pull/20433/head
Lidi Zheng 6 years ago committed by GitHub
commit f12df974ae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      doc/interop-test-descriptions.md
  2. 2
      examples/csharp/HelloworldXamarin/iOS/AppDelegate.cs
  3. 2
      examples/python/cancellation/README.md
  4. 2
      examples/python/cancellation/hash_name.proto
  5. 2
      examples/python/data_transmission/README.en.md
  6. 2
      src/compiler/ruby_generator_string-inl.h
  7. 4
      test/cpp/util/cli_credentials.cc
  8. 4
      test/cpp/util/proto_file_parser.h
  9. 2
      tools/buildgen/plugins/check_attrs.py
  10. 2
      tools/gce/linux_kokoro_performance_worker_init.sh
  11. 2
      tools/github/pr_latency.py
  12. 2
      tools/internal_ci/linux/grpc_publish_packages.sh
  13. 2
      tools/interop_matrix/create_matrix_images.py
  14. 2
      tools/interop_matrix/create_testcases.sh
  15. 4
      tools/profiling/microbenchmarks/bm_diff/bm_main.py
  16. 2
      tools/run_tests/performance/README.md
  17. 4
      tools/run_tests/run_tests.py

@ -1121,7 +1121,7 @@ for the `SimpleRequest.response_type`. If the server does not support the
Server gets the default SimpleRequest proto as the request. The content of the
request is ignored. It returns the SimpleResponse proto with the payload set
to current timestamp. The timestamp is an integer representing current time
with nanosecond resolution. This integer is formated as ASCII decimal in the
with nanosecond resolution. This integer is formatted as ASCII decimal in the
response. The format is not really important as long as the response payload
is different for each request. In addition it adds
1. cache control headers such that the response can be cached by proxies in

@ -58,7 +58,7 @@ namespace HelloworldXamarin.iOS
public override void WillEnterForeground(UIApplication application)
{
// Called as part of the transiton from background to active state.
// Called as part of the transition from background to active state.
// Here you can undo many of the changes made on entering the background.
}

@ -76,7 +76,7 @@ catch the `RpcError` raised by the for loop upon cancellation.
#### Cancellation on the Server Side
A server is reponsible for cancellation in two ways. It must respond in some way
A server is responsible for cancellation in two ways. It must respond in some way
when a client initiates a cancellation, otherwise long-running computations
could continue indefinitely.

@ -21,7 +21,7 @@ message HashNameRequest {
// The string that is desired in the secret's hash.
string desired_name = 1;
// The ideal Hamming distance betwen desired_name and the secret that will
// The ideal Hamming distance between desired_name and the secret that will
// be searched for.
int32 ideal_hamming_distance = 2;

@ -1,6 +1,6 @@
## Data transmission demo for using gRPC in Python
Four ways of data transmission when gRPC is used in Python. [Offical Guide](<https://grpc.io/docs/guides/concepts/#unary-rpc>)
Four ways of data transmission when gRPC is used in Python. [Official Guide](<https://grpc.io/docs/guides/concepts/#unary-rpc>)
- #### unary-unary

@ -106,7 +106,7 @@ inline grpc::string RubyPackage(const grpc::protobuf::FileDescriptor* file) {
if (file->options().has_ruby_package()) {
package_name = file->options().ruby_package();
// If :: is in the package convert the Ruby formated name
// If :: is in the package convert the Ruby formatted name
// -> A::B::C
// to use the dot seperator notation
// -> A.B.C

@ -41,11 +41,11 @@ DEFINE_string(
"validation.");
DEFINE_string(
ssl_client_cert, "",
"If not empty, load this PEM formated client certificate file. Requires "
"If not empty, load this PEM formatted client certificate file. Requires "
"use of --ssl_client_key.");
DEFINE_string(
ssl_client_key, "",
"If not empty, load this PEM formated private key. Requires use of "
"If not empty, load this PEM formatted private key. Requires use of "
"--ssl_client_cert");
DEFINE_string(
channel_creds_type, "",

@ -63,7 +63,7 @@ class ProtoFileParser {
/// \param is_json_format if \c true the \c formatted_proto is treated as a
/// json-formatted proto, otherwise it is treated as a text-formatted
/// proto
/// \return the serialised binary proto represenation of \c formatted_proto
/// \return the serialised binary proto representation of \c formatted_proto
grpc::string GetSerializedProtoFromMethod(const grpc::string& method,
const grpc::string& formatted_proto,
bool is_request,
@ -72,7 +72,7 @@ class ProtoFileParser {
/// Converts a text or json string to its proto representation for the given
/// message type.
/// \param formatted_proto the text- or json-formatted proto string
/// \return the serialised binary proto represenation of \c formatted_proto
/// \return the serialised binary proto representation of \c formatted_proto
grpc::string GetSerializedProtoFromMessageType(
const grpc::string& message_type_name,
const grpc::string& formatted_proto, bool is_json_format);

@ -112,7 +112,7 @@ def mako_plugin(dictionary):
This validates that filegroups, libs, and target can have only valid
attributes. This is mainly for preventing build.yaml from having
unnecessary and misleading attributes accidently.
unnecessary and misleading attributes accidentally.
"""
errors = []

@ -92,7 +92,7 @@ sudo pypy get-pip.py
sudo pypy -m pip install tabulate
sudo pypy -m pip install google-api-python-client oauth2client
# TODO(jtattermusch): for some reason, we need psutil installed
# in pypy for kokoro_log_reader.py (strange, because the comand is
# in pypy for kokoro_log_reader.py (strange, because the command is
# "python kokoro_log_reader.py" and pypy is not the system default)
sudo pypy -m pip install psutil

@ -18,7 +18,7 @@ You'll need a github API token to avoid being rate-limited. See
https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/
This script goes over the most recent 100 pull requests. For PRs with a single
commit, it uses the PR's creation as the initial time; othewise, it uses the
commit, it uses the PR's creation as the initial time; otherwise, it uses the
date of the last commit. This is somewhat fragile, and imposed by the fact that
GitHub reports a PR's updated timestamp for any event that modifies the PR (e.g.
comments), not just the addition of new commits.

@ -223,7 +223,7 @@ EOF
# Upload the current build artifacts
gsutil -m cp -r "$LOCAL_STAGING_TEMPDIR/${BUILD_RELPATH%%/*}" "$GCS_ARCHIVE_ROOT"
# Upload directory indicies for subdirectories
# Upload directory indices for subdirectories
(
cd "$LOCAL_BUILD_ROOT"
find * -type d | while read -r directory

@ -25,7 +25,7 @@ import subprocess
import sys
import tempfile
# Langauage Runtime Matrix
# Language Runtime Matrix
import client_matrix
python_util_dir = os.path.abspath(

@ -19,7 +19,7 @@
# Params:
# LANG - The language.
# SKIP_TEST - If set, skip running the test cases for sanity.
# RELEASE - Create testcase for specific release, defautl to 'master'.
# RELEASE - Create testcase for specific release, default to 'master'.
# KEEP_IMAGE - Do not clean local docker image created for the test cases.
set -e

@ -66,7 +66,7 @@ def _args():
'--old',
default='old',
type=str,
help='Name of baseline run to compare to. Ususally just called "old"')
help='Name of baseline run to compare to. Usually just called "old"')
argp.add_argument(
'-r',
'--regex',
@ -91,7 +91,7 @@ def _args():
'--pr_comment_name',
type=str,
default="microbenchmarks",
help='Name that Jenkins will use to commen on the PR')
help='Name that Jenkins will use to comment on the PR')
argp.add_argument('--counters', dest='counters', action='store_true')
argp.add_argument('--no-counters', dest='counters', action='store_false')
argp.set_defaults(counters=True)

@ -24,7 +24,7 @@ GCE "worker" machines that are in the same zone.
* For example, to start the grpc-go benchmark worker:
[grpc-go worker main.go](https://github.com/grpc/grpc-go/blob/master/benchmark/worker/main.go) --driver_port <driver_port>
#### Comands to start workers in different languages:
#### Commands to start workers in different languages:
* Note that these commands are what the top-level
[run_performance_test.py](../run_performance_tests.py) script uses to
build and run different workers through the

@ -1380,7 +1380,7 @@ def _docker_arch_suffix(arch):
def runs_per_test_type(arg_str):
"""Auxilary function to parse the "runs_per_test" flag.
"""Auxiliary function to parse the "runs_per_test" flag.
Returns:
A positive integer or 0, the latter indicating an infinite number of
@ -1786,7 +1786,7 @@ def _shut_down_legacy_server(legacy_server_port):
def _calculate_num_runs_failures(list_of_results):
"""Caculate number of runs and failures for a particular test.
"""Calculate number of runs and failures for a particular test.
Args:
list_of_results: (List) of JobResult object.

Loading…
Cancel
Save