diff --git a/.clang_complete b/.clang_complete deleted file mode 100644 index 2769e899722..00000000000 --- a/.clang_complete +++ /dev/null @@ -1,20 +0,0 @@ --Wall --Wc++-compat --I. --Igens --Iinclude --Isrc/core/ext/upb-generated --Ithird_party/abseil-cpp --Ithird_party/address_sorting/include --Ithird_party/benchmark/include --Ithird_party/boringssl-with-bazel/src/include --Ithird_party/cares --Ithird_party/cares/cares --Ithird_party/googletest --Ithird_party/googletest/googlemock/include --Ithird_party/googletest/googletest/include --Ithird_party/googletest/include --Ithird_party/protobuf/src --Ithird_party/re2 --Ithird_party/upb --Ithird_party/zlib diff --git a/.gitignore b/.gitignore index c0ec88139f2..cff0222ef8e 100644 --- a/.gitignore +++ b/.gitignore @@ -151,3 +151,6 @@ BenchmarkDotNet.Artifacts/ # pyenv config .python-version + +# clang JSON compilation database file +compile_commands.json diff --git a/bazel/grpc_deps.bzl b/bazel/grpc_deps.bzl index ed8786ed684..75d1b4dbbe3 100644 --- a/bazel/grpc_deps.bzl +++ b/bazel/grpc_deps.bzl @@ -269,6 +269,17 @@ def grpc_deps(): sha256 = "97e70364e9249702246c0e9444bccdc4b847bed1eb03c5a3ece4f83dfe6abc44", ) + if "bazel_compdb" not in native.existing_rules(): + http_archive( + name = "bazel_compdb", + sha256 = "bcecfd622c4ef272fd4ba42726a52e140b961c4eac23025f18b346c968a8cfb4", + strip_prefix = "bazel-compilation-database-0.4.5", + urls = [ + "https://storage.googleapis.com/grpc-bazel-mirror/github.com/grailbio/bazel-compilation-database/archive/0.4.5.tar.gz", + "https://github.com/grailbio/bazel-compilation-database/archive/0.4.5.tar.gz", + ], + ) + if "io_opencensus_cpp" not in native.existing_rules(): http_archive( name = "io_opencensus_cpp", diff --git a/templates/tools/dockerfile/grpc_clang_tidy/Dockerfile.template b/templates/tools/dockerfile/grpc_clang_tidy/Dockerfile.template index e56f76d63cb..4d5c83fda04 100644 --- a/templates/tools/dockerfile/grpc_clang_tidy/Dockerfile.template +++ b/templates/tools/dockerfile/grpc_clang_tidy/Dockerfile.template @@ -20,7 +20,7 @@ # This is because clang-tidy 7.0 started treating compiler errors as tidy errors # and there are a couple of files which are not properly compiled via tidy so it # should be using 6.0 version until all compilation errors are addressed. - RUN apt-get update && apt-get install -y clang-tidy-6.0 + RUN apt-get update && apt-get install -y clang-tidy-6.0 jq ENV CLANG_TIDY=clang-tidy-6.0 ADD clang_tidy_all_the_things.sh / diff --git a/templates/tools/dockerfile/test/sanity/Dockerfile.template b/templates/tools/dockerfile/test/sanity/Dockerfile.template index 453505d7c9e..314f3ab625f 100644 --- a/templates/tools/dockerfile/test/sanity/Dockerfile.template +++ b/templates/tools/dockerfile/test/sanity/Dockerfile.template @@ -40,7 +40,7 @@ # This is because clang-tidy 7.0 started treating compiler errors as tidy errors # and there are a couple of files which are not properly compiled via tidy so it # should be using 6.0 version until all compilation errors are addressed. - RUN apt-get install -y clang-tidy-6.0 + RUN apt-get install -y clang-tidy-6.0 jq ENV CLANG_TIDY=clang-tidy-6.0 diff --git a/test/cpp/end2end/client_lb_end2end_test.cc b/test/cpp/end2end/client_lb_end2end_test.cc index c6ee552a48e..9140cb4084d 100644 --- a/test/cpp/end2end/client_lb_end2end_test.cc +++ b/test/cpp/end2end/client_lb_end2end_test.cc @@ -412,7 +412,8 @@ class ClientLbEnd2endTest : public ::testing::Test { } bool WaitForChannelState( - Channel* channel, std::function predicate, + Channel* channel, + const std::function& predicate, bool try_to_connect = false, int timeout_seconds = 5) { const gpr_timespec deadline = grpc_timeout_seconds_to_deadline(timeout_seconds); @@ -1759,13 +1760,11 @@ class ClientLbInterceptTrailingMetadataTest : public ClientLbEnd2endTest { self->load_report_->set_rps(backend_metric_data->requests_per_second); for (const auto& p : backend_metric_data->request_cost) { std::string name = std::string(p.first); - (*self->load_report_->mutable_request_cost())[std::move(name)] = - p.second; + (*self->load_report_->mutable_request_cost())[name] = p.second; } for (const auto& p : backend_metric_data->utilization) { std::string name = std::string(p.first); - (*self->load_report_->mutable_utilization())[std::move(name)] = - p.second; + (*self->load_report_->mutable_utilization())[name] = p.second; } } } diff --git a/test/cpp/end2end/message_allocator_end2end_test.cc b/test/cpp/end2end/message_allocator_end2end_test.cc index c15066794bc..2c022940b29 100644 --- a/test/cpp/end2end/message_allocator_end2end_test.cc +++ b/test/cpp/end2end/message_allocator_end2end_test.cc @@ -69,7 +69,7 @@ class CallbackTestServiceImpl std::function mutator) { - allocator_mutator_ = mutator; + allocator_mutator_ = std::move(mutator); } experimental::ServerUnaryReactor* Echo( diff --git a/test/cpp/end2end/xds_end2end_test.cc b/test/cpp/end2end/xds_end2end_test.cc index 4310bd4bf4f..0c1c033de02 100644 --- a/test/cpp/end2end/xds_end2end_test.cc +++ b/test/cpp/end2end/xds_end2end_test.cc @@ -440,7 +440,7 @@ class AdsServiceImpl : public std::enable_shared_from_this { struct EdsResourceArgs { struct Locality { - Locality(const std::string& sub_zone, std::vector ports, + Locality(std::string sub_zone, std::vector ports, int lb_weight = kDefaultLocalityWeight, int priority = kDefaultLocalityPriority, std::vector health_statuses = {}) @@ -1508,7 +1508,7 @@ class XdsEnd2endTest : public ::testing::TestWithParam { RpcOptions& set_metadata( std::vector> rpc_metadata) { - metadata = rpc_metadata; + metadata = std::move(rpc_metadata); return *this; } }; @@ -4811,6 +4811,7 @@ TEST_P(LocalityMapTest, UpdateMap) { const double kTotalLocalityWeight0 = std::accumulate(kLocalityWeights0.begin(), kLocalityWeights0.end(), 0); std::vector locality_weight_rate_0; + locality_weight_rate_0.reserve(kLocalityWeights0.size()); for (int weight : kLocalityWeights0) { locality_weight_rate_0.push_back(weight / kTotalLocalityWeight0); } diff --git a/test/cpp/interop/http2_client.cc b/test/cpp/interop/http2_client.cc index dbe9f7f5620..3d6ac9f9660 100644 --- a/test/cpp/interop/http2_client.cc +++ b/test/cpp/interop/http2_client.cc @@ -43,7 +43,7 @@ const int kLargeResponseSize = 314159; } // namespace Http2Client::ServiceStub::ServiceStub(const std::shared_ptr& channel) - : channel_(std::move(channel)) { + : channel_(channel) { stub_ = TestService::NewStub(channel); } @@ -51,7 +51,7 @@ TestService::Stub* Http2Client::ServiceStub::Get() { return stub_.get(); } Http2Client::Http2Client(const std::shared_ptr& channel) : serviceStub_(channel), - channel_(std::move(channel)), + channel_(channel), defaultRequest_(BuildDefaultRequest()) {} bool Http2Client::AssertStatusCode(const Status& s, StatusCode expected_code) { @@ -159,7 +159,7 @@ bool Http2Client::DoMaxStreams() { std::string(kLargeResponseSize, '\0')); std::vector test_threads; - + test_threads.reserve(10); for (int i = 0; i < 10; i++) { test_threads.emplace_back( std::thread(&Http2Client::MaxStreamsWorker, this, channel_)); diff --git a/test/cpp/qps/BUILD b/test/cpp/qps/BUILD index ff4ee9836c0..974733cd2d5 100644 --- a/test/cpp/qps/BUILD +++ b/test/cpp/qps/BUILD @@ -116,7 +116,10 @@ grpc_cc_library( "histogram.h", "stats.h", ], - deps = ["//test/core/util:grpc_test_util"], + deps = [ + "//src/proto/grpc/testing:stats_proto", + "//test/core/util:grpc_test_util", + ], ) grpc_cc_binary( diff --git a/test/cpp/qps/histogram.h b/test/cpp/qps/histogram.h index 6275128f340..55c19844bd9 100644 --- a/test/cpp/qps/histogram.h +++ b/test/cpp/qps/histogram.h @@ -39,7 +39,9 @@ class Histogram { impl_ = grpc_histogram_create(default_resolution(), default_max_possible()); } - Histogram(Histogram&& other) : impl_(other.impl_) { other.impl_ = nullptr; } + Histogram(Histogram&& other) noexcept : impl_(other.impl_) { + other.impl_ = nullptr; + } void Merge(const Histogram& h) { grpc_histogram_merge(impl_, h.impl_); } void Add(double value) { grpc_histogram_add(impl_, value); } diff --git a/test/cpp/util/grpc_tool.cc b/test/cpp/util/grpc_tool.cc index fceee0c82b8..3b52defcfad 100644 --- a/test/cpp/util/grpc_tool.cc +++ b/test/cpp/util/grpc_tool.cc @@ -86,26 +86,26 @@ class GrpcTool { virtual ~GrpcTool() {} bool Help(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback); + const GrpcToolOutputCallback& callback); bool CallMethod(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback); + const GrpcToolOutputCallback& callback); bool ListServices(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback); + const GrpcToolOutputCallback& callback); bool PrintType(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback); + const GrpcToolOutputCallback& callback); // TODO(zyc): implement the following methods // bool ListServices(int argc, const char** argv, GrpcToolOutputCallback // callback); // bool PrintTypeId(int argc, const char** argv, GrpcToolOutputCallback // callback); bool ParseMessage(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback); + const GrpcToolOutputCallback& callback); bool ToText(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback); + const GrpcToolOutputCallback& callback); bool ToJson(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback); + const GrpcToolOutputCallback& callback); bool ToBinary(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback); + const GrpcToolOutputCallback& callback); void SetPrintCommandMode(int exit_status) { print_command_usage_ = true; @@ -193,8 +193,8 @@ void PrintMetadata(const T& m, const std::string& message) { } void ReadResponse(CliCall* call, const std::string& method_name, - GrpcToolOutputCallback callback, ProtoFileParser* parser, - gpr_mu* parser_mu, bool print_mode) { + const GrpcToolOutputCallback& callback, + ProtoFileParser* parser, gpr_mu* parser_mu, bool print_mode) { std::string serialized_response_proto; std::multimap server_initial_metadata; @@ -287,7 +287,7 @@ const Command* FindCommand(const std::string& name) { } // namespace int GrpcToolMainLib(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback) { + const GrpcToolOutputCallback& callback) { if (argc < 2) { Usage("No command specified"); } @@ -324,7 +324,7 @@ void GrpcTool::CommandUsage(const std::string& usage) const { } bool GrpcTool::Help(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback) { + const GrpcToolOutputCallback& callback) { CommandUsage( "Print help\n" " grpc_cli help [subcommand]\n"); @@ -344,7 +344,7 @@ bool GrpcTool::Help(int argc, const char** argv, const CliCredentials& cred, bool GrpcTool::ListServices(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback) { + const GrpcToolOutputCallback& callback) { CommandUsage( "List services\n" " grpc_cli ls
[[/]]\n" @@ -445,7 +445,7 @@ bool GrpcTool::ListServices(int argc, const char** argv, bool GrpcTool::PrintType(int /*argc*/, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback) { + const GrpcToolOutputCallback& callback) { CommandUsage( "Print type\n" " grpc_cli type
\n" @@ -473,7 +473,7 @@ bool GrpcTool::PrintType(int /*argc*/, const char** argv, bool GrpcTool::CallMethod(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback) { + const GrpcToolOutputCallback& callback) { CommandUsage( "Call method\n" " grpc_cli call
[.] \n" @@ -836,7 +836,7 @@ bool GrpcTool::CallMethod(int argc, const char** argv, bool GrpcTool::ParseMessage(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback) { + const GrpcToolOutputCallback& callback) { CommandUsage( "Parse message\n" " grpc_cli parse
[]\n" @@ -927,7 +927,7 @@ bool GrpcTool::ParseMessage(int argc, const char** argv, } bool GrpcTool::ToText(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback) { + const GrpcToolOutputCallback& callback) { CommandUsage( "Convert binary message to text\n" " grpc_cli totext \n" @@ -945,7 +945,7 @@ bool GrpcTool::ToText(int argc, const char** argv, const CliCredentials& cred, } bool GrpcTool::ToJson(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback) { + const GrpcToolOutputCallback& callback) { CommandUsage( "Convert binary message to json\n" " grpc_cli tojson \n" @@ -964,7 +964,7 @@ bool GrpcTool::ToJson(int argc, const char** argv, const CliCredentials& cred, } bool GrpcTool::ToBinary(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback) { + const GrpcToolOutputCallback& callback) { CommandUsage( "Convert text message to binary\n" " grpc_cli tobinary []\n" diff --git a/test/cpp/util/grpc_tool.h b/test/cpp/util/grpc_tool.h index e998807cc61..124ace3f580 100644 --- a/test/cpp/util/grpc_tool.h +++ b/test/cpp/util/grpc_tool.h @@ -31,7 +31,7 @@ namespace testing { typedef std::function GrpcToolOutputCallback; int GrpcToolMainLib(int argc, const char** argv, const CliCredentials& cred, - GrpcToolOutputCallback callback); + const GrpcToolOutputCallback& callback); } // namespace testing } // namespace grpc diff --git a/tools/bazel.rc b/tools/bazel.rc index 02795e434dc..7b687484846 100644 --- a/tools/bazel.rc +++ b/tools/bazel.rc @@ -103,3 +103,6 @@ build:mutrace --copt=-O3 build:mutrace --copt=-fno-omit-frame-pointer build:mutrace --copt=-DNDEBUG build:mutrace --linkopt=-rdynamic + +# Compile database generation config +build:compdb --build_tag_filters=-nocompdb diff --git a/tools/distrib/clang_tidy_code.sh b/tools/distrib/clang_tidy_code.sh index 9262b6bd3e1..dade592954a 100755 --- a/tools/distrib/clang_tidy_code.sh +++ b/tools/distrib/clang_tidy_code.sh @@ -21,6 +21,22 @@ set -ex cd $(dirname $0)/../.. REPO_ROOT=$(pwd) +# grep targets with manual tag, which is not included in a result of bazel build using ... +# let's get a list of them using query command and pass it to gen_compilation_database.py +export MANUAL_TARGETS=$(bazel query 'attr("tags", "manual", tests(//test/cpp/...))' | grep -v _on_ios) + +# generate a clang compilation database for all C/C++ sources in the repo. +tools/distrib/gen_compilation_database.py \ + --include_headers \ + --ignore_system_headers \ + --dedup_targets \ + "//:*" \ + "//src/core/..." \ + "//src/compiler/..." \ + "//test/core/..." \ + "//test/cpp/..." \ + $MANUAL_TARGETS + if [ "$CLANG_TIDY_SKIP_DOCKER" == "" ] then # build clang-tidy docker image @@ -29,7 +45,15 @@ then # run clang-tidy against the checked out codebase # when modifying the checked-out files, the current user will be impersonated # so that the updated files don't end up being owned by "root". - docker run -e TEST="$TEST" -e CHANGED_FILES="$CHANGED_FILES" -e CLANG_TIDY_ROOT="/local-code" --rm=true -v "${REPO_ROOT}":/local-code --user "$(id -u):$(id -g)" -t grpc_clang_tidy /clang_tidy_all_the_things.sh "$@" + docker run \ + -e TEST="$TEST" \ + -e CHANGED_FILES="$CHANGED_FILES" \ + -e CLANG_TIDY_ROOT="/local-code" \ + --rm=true \ + -v "${REPO_ROOT}":/local-code \ + -v "${HOME/.cache/bazel}":"${HOME/.cache/bazel}" \ + --user "$(id -u):$(id -g)" \ + -t grpc_clang_tidy /clang_tidy_all_the_things.sh "$@" else CLANG_TIDY_ROOT="${REPO_ROOT}" tools/dockerfile/grpc_clang_tidy/clang_tidy_all_the_things.sh "$@" fi diff --git a/tools/distrib/gen_compilation_database.py b/tools/distrib/gen_compilation_database.py new file mode 100755 index 00000000000..d8d2761b8e4 --- /dev/null +++ b/tools/distrib/gen_compilation_database.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python3 + +# Copyright 2020 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is based on the script on the Envoy project +# https://github.com/envoyproxy/envoy/blob/master/tools/gen_compilation_database.py + +import argparse +import glob +import json +import logging +import os +import re +import shlex +import subprocess +from pathlib import Path + +RE_INCLUDE_SYSTEM = re.compile("\s*-I\s+/usr/[^ ]+") + + +# This method is equivalent to https://github.com/grailbio/bazel-compilation-database/blob/master/generate.sh +def generateCompilationDatabase(args): + # We need to download all remote outputs for generated source code. + # This option lives here to override those specified in bazelrc. + bazel_options = shlex.split(os.environ.get("BAZEL_BUILD_OPTIONS", "")) + [ + "--config=compdb", + "--remote_download_outputs=all", + ] + + subprocess.check_call(["bazel", "build"] + bazel_options + [ + "--aspects=@bazel_compdb//:aspects.bzl%compilation_database_aspect", + "--output_groups=compdb_files,header_files" + ] + args.bazel_targets) + + execroot = subprocess.check_output(["bazel", "info", "execution_root"] + + bazel_options).decode().strip() + + compdb = [] + for compdb_file in Path(execroot).glob("**/*.compile_commands.json"): + compdb.extend( + json.loads( + "[" + + compdb_file.read_text().replace("__EXEC_ROOT__", execroot) + + "]")) + + if args.dedup_targets: + compdb_map = {target["file"]: target for target in compdb} + compdb = list(compdb_map.values()) + + return compdb + + +def isHeader(filename): + for ext in (".h", ".hh", ".hpp", ".hxx"): + if filename.endswith(ext): + return True + return False + + +def isCompileTarget(target, args): + filename = target["file"] + if not args.include_headers and isHeader(filename): + return False + if not args.include_genfiles: + if filename.startswith("bazel-out/"): + return False + if not args.include_external: + if filename.startswith("external/"): + return False + return True + + +def modifyCompileCommand(target, args): + cc, options = target["command"].split(" ", 1) + + # Workaround for bazel added C++11 options, those doesn't affect build itself but + # clang-tidy will misinterpret them. + options = options.replace("-std=c++0x ", "") + options = options.replace("-std=c++11 ", "") + + if args.vscode: + # Visual Studio Code doesn't seem to like "-iquote". Replace it with + # old-style "-I". + options = options.replace("-iquote ", "-I ") + + if args.ignore_system_headers: + # Remove all include options for /usr/* directories + options = RE_INCLUDE_SYSTEM.sub("", options) + + if isHeader(target["file"]): + options += " -Wno-pragma-once-outside-header -Wno-unused-const-variable" + options += " -Wno-unused-function" + if not target["file"].startswith("external/"): + # *.h file is treated as C header by default while our headers files are all C++11. + options = "-x c++ -std=c++11 -fexceptions " + options + + target["command"] = " ".join([cc, options]) + return target + + +def fixCompilationDatabase(args, db): + db = [ + modifyCompileCommand(target, args) + for target in db + if isCompileTarget(target, args) + ] + + with open("compile_commands.json", "w") as db_file: + json.dump(db, db_file, indent=2) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description='Generate JSON compilation database') + parser.add_argument('--include_external', action='store_true') + parser.add_argument('--include_genfiles', action='store_true') + parser.add_argument('--include_headers', action='store_true') + parser.add_argument('--vscode', action='store_true') + parser.add_argument('--ignore_system_headers', action='store_true') + parser.add_argument('--dedup_targets', action='store_true') + parser.add_argument('bazel_targets', nargs='*', default=["//..."]) + args = parser.parse_args() + fixCompilationDatabase(args, generateCompilationDatabase(args)) diff --git a/tools/distrib/run_clang_tidy.py b/tools/distrib/run_clang_tidy.py index 6e54a7593db..6cdddcc5781 100755 --- a/tools/distrib/run_clang_tidy.py +++ b/tools/distrib/run_clang_tidy.py @@ -24,17 +24,6 @@ sys.path.append( 'python_utils')) import jobset -extra_args = [ - '-x', - 'c++', - '-std=c++11', -] -with open('.clang_complete') as f: - for line in f: - line = line.strip() - if line.startswith('-I'): - extra_args.append(line) - clang_tidy = os.environ.get('CLANG_TIDY', 'clang-tidy') argp = argparse.ArgumentParser(description='Run clang-tidy against core') @@ -50,17 +39,19 @@ args = argp.parse_args() cmdline = [ clang_tidy, -] + ['--extra-arg-before=%s' % arg for arg in extra_args] +] if args.fix: cmdline.append('--fix') jobs = [] for filename in args.files: - jobs.append(jobset.JobSpec( - cmdline + [filename], - shortname=filename, - )) #verbose_success=True)) + jobs.append( + jobset.JobSpec( + cmdline + [filename], + shortname=filename, + timeout_seconds=15 * 60, + )) num_fails, res_set = jobset.run(jobs, maxjobs=args.jobs) sys.exit(num_fails) diff --git a/tools/dockerfile/grpc_clang_tidy/Dockerfile b/tools/dockerfile/grpc_clang_tidy/Dockerfile index 494dc6423a9..54eb6a52e06 100644 --- a/tools/dockerfile/grpc_clang_tidy/Dockerfile +++ b/tools/dockerfile/grpc_clang_tidy/Dockerfile @@ -65,7 +65,7 @@ RUN mkdir /var/local/jenkins # This is because clang-tidy 7.0 started treating compiler errors as tidy errors # and there are a couple of files which are not properly compiled via tidy so it # should be using 6.0 version until all compilation errors are addressed. -RUN apt-get update && apt-get install -y clang-tidy-6.0 +RUN apt-get update && apt-get install -y clang-tidy-6.0 jq ENV CLANG_TIDY=clang-tidy-6.0 ADD clang_tidy_all_the_things.sh / diff --git a/tools/dockerfile/grpc_clang_tidy/clang_tidy_all_the_things.sh b/tools/dockerfile/grpc_clang_tidy/clang_tidy_all_the_things.sh index cad2b2b3732..947ffbafb71 100755 --- a/tools/dockerfile/grpc_clang_tidy/clang_tidy_all_the_things.sh +++ b/tools/dockerfile/grpc_clang_tidy/clang_tidy_all_the_things.sh @@ -20,5 +20,9 @@ CLANG_TIDY=${CLANG_TIDY:-clang-tidy} cd ${CLANG_TIDY_ROOT} -find src/core src/cpp test/core test/cpp ! -path 'src/core/ext/upb-generated/*' ! -path 'src/core/ext/upbdefs-generated/*' -name '*.h' -or -name '*.cc' -print0 \ - | xargs -0 tools/distrib/run_clang_tidy.py "$@" +# run clang tidy for all source files +cat compile_commands.json | jq -r '.[].file' \ + | grep -E "(^src/core/|^src/cpp/|^test/core/|^test/cpp/)" \ + | grep -v -E "/upb-generated/|/upbdefs-generated/" \ + | sort \ + | xargs tools/distrib/run_clang_tidy.py "$@" diff --git a/tools/dockerfile/test/sanity/Dockerfile b/tools/dockerfile/test/sanity/Dockerfile index 7a75aba3655..87ce4d0d0f9 100644 --- a/tools/dockerfile/test/sanity/Dockerfile +++ b/tools/dockerfile/test/sanity/Dockerfile @@ -88,7 +88,7 @@ RUN apt-get install -y clang clang-format # This is because clang-tidy 7.0 started treating compiler errors as tidy errors # and there are a couple of files which are not properly compiled via tidy so it # should be using 6.0 version until all compilation errors are addressed. -RUN apt-get install -y clang-tidy-6.0 +RUN apt-get install -y clang-tidy-6.0 jq ENV CLANG_TIDY=clang-tidy-6.0 diff --git a/tools/run_tests/sanity/check_bazel_workspace.py b/tools/run_tests/sanity/check_bazel_workspace.py index 69afd0860e3..b784a15eec9 100755 --- a/tools/run_tests/sanity/check_bazel_workspace.py +++ b/tools/run_tests/sanity/check_bazel_workspace.py @@ -35,6 +35,7 @@ git_submodule_hashes = { _BAZEL_SKYLIB_DEP_NAME = 'bazel_skylib' _BAZEL_TOOLCHAINS_DEP_NAME = 'bazel_toolchains' +_BAZEL_COMPDB_DEP_NAME = 'bazel_compdb' _TWISTED_TWISTED_DEP_NAME = 'com_github_twisted_twisted' _YAML_PYYAML_DEP_NAME = 'com_github_yaml_pyyaml' _TWISTED_INCREMENTAL_DEP_NAME = 'com_github_twisted_incremental' @@ -56,6 +57,7 @@ _GRPC_DEP_NAMES = [ 'envoy_api', _BAZEL_SKYLIB_DEP_NAME, _BAZEL_TOOLCHAINS_DEP_NAME, + _BAZEL_COMPDB_DEP_NAME, _TWISTED_TWISTED_DEP_NAME, _YAML_PYYAML_DEP_NAME, _TWISTED_INCREMENTAL_DEP_NAME, @@ -75,6 +77,7 @@ _GRPC_BAZEL_ONLY_DEPS = [ 'io_opencensus_cpp', _BAZEL_SKYLIB_DEP_NAME, _BAZEL_TOOLCHAINS_DEP_NAME, + _BAZEL_COMPDB_DEP_NAME, _TWISTED_TWISTED_DEP_NAME, _YAML_PYYAML_DEP_NAME, _TWISTED_INCREMENTAL_DEP_NAME, @@ -149,7 +152,10 @@ build_rules = { exec(bazel_file) in build_rules for name in _GRPC_DEP_NAMES: assert name in names_and_urls.keys() -assert len(_GRPC_DEP_NAMES) == len(names_and_urls.keys()) +if len(_GRPC_DEP_NAMES) != len(names_and_urls.keys()): + assert False, "Diff: " + (str(set(_GRPC_DEP_NAMES) - set(names_and_urls)) + + "," + + str(set(names_and_urls) - set(_GRPC_DEP_NAMES))) # There are some "bazel-only" deps that are exceptions to this sanity check, # we don't require that there is a corresponding git module for these. diff --git a/tools/run_tests/sanity/sanity_tests.yaml b/tools/run_tests/sanity/sanity_tests.yaml index b94e7b3a739..9b6d3008233 100644 --- a/tools/run_tests/sanity/sanity_tests.yaml +++ b/tools/run_tests/sanity/sanity_tests.yaml @@ -24,6 +24,9 @@ - script: tools/distrib/check_pytype.sh - script: tools/distrib/clang_format_code.sh - script: tools/distrib/clang_tidy_code.sh + # ClangTidy needs to run exclusively because it uses files under the bazel output + # directory and this will be removed by another bazel invocation. + cpu_cost: 1000 - script: tools/distrib/pylint_code.sh - script: tools/distrib/python/check_grpcio_tools.py - script: tools/distrib/yapf_code.sh --diff