go/conformanceDebuggability can now run tests in isolation without producing debugging info/commands. Test names to be run in isolation are passed through the --test flag, and if the user wishes to debug them, they will set --debug.

PiperOrigin-RevId: 660491962
pull/17754/head
Yamil Morales 8 months ago committed by Copybara-Service
parent ee26f27493
commit 3ce384ade2
  1. 28
      conformance/conformance_test.cc
  2. 44
      conformance/conformance_test.h
  3. 50
      conformance/conformance_test_runner.cc

@ -514,7 +514,7 @@ void ConformanceTestSuite::VerifyResponse(
bool ConformanceTestSuite::RunTest(const std::string& test_name, bool ConformanceTestSuite::RunTest(const std::string& test_name,
const ConformanceRequest& request, const ConformanceRequest& request,
ConformanceResponse* response) { ConformanceResponse* response) {
if (test_names_.insert(test_name).second == false) { if (test_names_ran_.insert(test_name).second == false) {
ABSL_LOG(FATAL) << "Duplicated test name: " << test_name; ABSL_LOG(FATAL) << "Duplicated test name: " << test_name;
} }
@ -525,8 +525,14 @@ bool ConformanceTestSuite::RunTest(const std::string& test_name,
uint32_t len = internal::little_endian::FromHost( uint32_t len = internal::little_endian::FromHost(
static_cast<uint32_t>(serialized_request.size())); static_cast<uint32_t>(serialized_request.size()));
if (!debug_) { // Not in debug mode. Continue. if (isolated_) {
} else if (debug_test_names_->erase(test_name) == 1) { if (names_to_test_.erase(test_name) ==
0) { // Tests were asked to be run in isolated mode, but this test was
// not asked to be run.
expected_to_fail_.erase(test_name);
return false;
}
if (debug_) {
std::string octal = ProduceOctalSerialized(serialized_request, len); std::string octal = ProduceOctalSerialized(serialized_request, len);
std::string full_filename = WriteToFile(octal, output_dir_, test_name); std::string full_filename = WriteToFile(octal, output_dir_, test_name);
if (!full_filename.empty()) { if (!full_filename.empty()) {
@ -539,21 +545,20 @@ bool ConformanceTestSuite::RunTest(const std::string& test_name,
"serialized request directly to " "serialized request directly to "
"the " "the "
"testee run from the root of your workspace:\n printf $(" "testee run from the root of your workspace:\n printf $("
"<\"%s\") | " "<\"%s\") | %s\n\n",
"./bazel-bin/google/protobuf/conformance/%s\n\n",
full_filename, testee_); full_filename, testee_);
absl::StrAppendFormat( absl::StrAppendFormat(
&output_, &output_,
" To inspect the wire format of the serialized request run " " To inspect the wire format of the serialized request with "
"(Disclaimer: This may not work properly on non-Linux platforms):\n " "protoscope run "
"(Disclaimer: This may not work properly on non-Linux "
"platforms):\n "
" " " "
"contents=$(<\"%s\"); sub=$(cut -d \\\\ -f 6- <<< " "contents=$(<\"%s\"); sub=$(cut -d \\\\ -f 6- <<< "
"$contents) ; printf \"\\\\${sub}\" | protoscope \n\n\n", "$contents) ; printf \"\\\\${sub}\" | protoscope \n\n\n",
full_filename); full_filename);
} }
} else { // Test is not ran, as it was not asked to be debugged. }
expected_to_fail_.erase(test_name);
return false;
} }
response->set_protobuf_payload(serialized_request); response->set_protobuf_payload(serialized_request);
@ -604,7 +609,7 @@ bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
successes_ = 0; successes_ = 0;
expected_failures_ = 0; expected_failures_ = 0;
skipped_.clear(); skipped_.clear();
test_names_.clear(); test_names_ran_.clear();
unexpected_failing_tests_.clear(); unexpected_failing_tests_.clear();
unexpected_succeeding_tests_.clear(); unexpected_succeeding_tests_.clear();
@ -618,6 +623,7 @@ bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
for (const TestStatus& failure : failure_list->test()) { for (const TestStatus& failure : failure_list->test()) {
AddExpectedFailedTest(failure); AddExpectedFailedTest(failure);
} }
RunSuiteImpl(); RunSuiteImpl();
if (*output_dir_.rbegin() != '/') { if (*output_dir_.rbegin() != '/') {

@ -128,15 +128,7 @@ class ForkPipeRunner : public ConformanceTestRunner {
// //
class ConformanceTestSuite { class ConformanceTestSuite {
public: public:
ConformanceTestSuite() ConformanceTestSuite() = default;
: testee_(""),
verbose_(false),
performance_(false),
enforce_recommended_(false),
maximum_edition_(Edition::EDITION_PROTO3),
failure_list_flag_name_("--failure_list"),
debug_test_names_(nullptr),
debug_(false) {}
virtual ~ConformanceTestSuite() = default; virtual ~ConformanceTestSuite() = default;
void SetPerformance(bool performance) { performance_ = performance; } void SetPerformance(bool performance) { performance_ = performance; }
@ -172,9 +164,16 @@ class ConformanceTestSuite {
// Sets the testee name // Sets the testee name
void SetTestee(const std::string& testee) { testee_ = testee; } void SetTestee(const std::string& testee) { testee_ = testee; }
// Sets the debug test names // Sets the names of tests to ONLY be run isolated from all the others.
void SetDebugTestNames(absl::flat_hash_set<std::string>& debug_test_names) { void SetNamesToTest(absl::flat_hash_set<std::string> names_to_test) {
debug_test_names_ = &debug_test_names; if (!names_to_test.empty()) {
isolated_ = true;
}
names_to_test_ = std::move(names_to_test);
}
absl::flat_hash_set<std::string> GetExpectedTestsNotRun() {
return names_to_test_;
} }
// Run all the conformance tests against the given test runner. // Run all the conformance tests against the given test runner.
@ -185,8 +184,6 @@ class ConformanceTestSuite {
// The filename here is *only* used to create/format useful error messages for // The filename here is *only* used to create/format useful error messages for
// how to update the failure list. We do NOT read this file at all. // how to update the failure list. We do NOT read this file at all.
// "debug_test_names" holds the list of test names that the user requested to
// debug. If this is empty, we will run all the tests.
bool RunSuite(ConformanceTestRunner* runner, std::string* output, bool RunSuite(ConformanceTestRunner* runner, std::string* output,
const std::string& filename, const std::string& filename,
conformance::FailureSet* failure_list); conformance::FailureSet* failure_list);
@ -303,16 +300,19 @@ class ConformanceTestSuite {
std::string testee_; std::string testee_;
int successes_; int successes_;
int expected_failures_; int expected_failures_;
bool verbose_; bool verbose_ = false;
bool performance_; bool performance_ = false;
bool enforce_recommended_; bool enforce_recommended_ = false;
Edition maximum_edition_; Edition maximum_edition_ = Edition::EDITION_PROTO3;
std::string output_; std::string output_;
std::string output_dir_; std::string output_dir_;
std::string failure_list_flag_name_; std::string failure_list_flag_name_ = "--failure_list";
std::string failure_list_filename_; std::string failure_list_filename_;
absl::flat_hash_set<std::string>* debug_test_names_; absl::flat_hash_set<std::string> names_to_test_;
bool debug_; bool debug_ = false;
// If names were given for names_to_test_, only those tests
// will be run and this bool will be set to true.
bool isolated_ = false;
// The set of test names that are expected to fail in this run, but haven't // The set of test names that are expected to fail in this run, but haven't
// failed yet. // failed yet.
@ -326,7 +326,7 @@ class ConformanceTestSuite {
// The set of test names that have been run. Used to ensure that there are no // The set of test names that have been run. Used to ensure that there are no
// duplicate names in the suite. // duplicate names in the suite.
absl::flat_hash_set<std::string> test_names_; absl::flat_hash_set<std::string> test_names_ran_;
// The set of tests that failed, but weren't expected to: They weren't // The set of tests that failed, but weren't expected to: They weren't
// present in our failure lists. // present in our failure lists.

@ -45,6 +45,7 @@
#include <cstring> #include <cstring>
#include <fstream> #include <fstream>
#include <future> #include <future>
#include <memory>
#include <string> #include <string>
#include <vector> #include <vector>
@ -155,14 +156,16 @@ void UsageError() {
fprintf(stderr, fprintf(stderr,
" --output_dir <dirname> Directory to write\n" " --output_dir <dirname> Directory to write\n"
" output files.\n\n"); " output files.\n\n");
fprintf(stderr, " --test <test_name> Only run\n");
fprintf(stderr, fprintf(stderr,
" --debug <test_name1> <test_name2> ... <test_nameN> Debug the \n"); " the specified test. Mulitple tests\n"
fprintf(stderr, " specified tests by running\n"); " can be specified by repeating the \n"
" flag.\n\n");
fprintf(stderr, fprintf(stderr,
" them in isolation and producing\n"); " --debug Enable debug mode\n"
fprintf(stderr, " to produce octal serialized\n"
" serialized request data for piping\n"); " ConformanceRequest for the tests\n"
fprintf(stderr, " directly to the testee.\n\n"); " passed to --test (required)\n\n");
fprintf(stderr, " --performance Boolean option\n"); fprintf(stderr, " --performance Boolean option\n");
fprintf(stderr, " for enabling run of\n"); fprintf(stderr, " for enabling run of\n");
fprintf(stderr, " performance tests.\n"); fprintf(stderr, " performance tests.\n");
@ -228,7 +231,7 @@ int ForkPipeRunner::Run(int argc, char *argv[],
std::vector<string> program_args; std::vector<string> program_args;
bool performance = false; bool performance = false;
bool debug = false; bool debug = false;
absl::flat_hash_set<string> debug_test_names; absl::flat_hash_set<string> names_to_test;
bool enforce_recommended = false; bool enforce_recommended = false;
Edition maximum_edition = EDITION_UNKNOWN; Edition maximum_edition = EDITION_UNKNOWN;
std::string output_dir; std::string output_dir;
@ -237,6 +240,8 @@ int ForkPipeRunner::Run(int argc, char *argv[],
for (int arg = 1; arg < argc; ++arg) { for (int arg = 1; arg < argc; ++arg) {
if (strcmp(argv[arg], "--performance") == 0) { if (strcmp(argv[arg], "--performance") == 0) {
performance = true; performance = true;
} else if (strcmp(argv[arg], "--debug") == 0) {
debug = true;
} else if (strcmp(argv[arg], "--verbose") == 0) { } else if (strcmp(argv[arg], "--verbose") == 0) {
verbose = true; verbose = true;
} else if (strcmp(argv[arg], "--enforce_recommended") == 0) { } else if (strcmp(argv[arg], "--enforce_recommended") == 0) {
@ -253,17 +258,9 @@ int ForkPipeRunner::Run(int argc, char *argv[],
if (++arg == argc) UsageError(); if (++arg == argc) UsageError();
output_dir = argv[arg]; output_dir = argv[arg];
} else if (strcmp(argv[arg], "--debug") == 0) { } else if (strcmp(argv[arg], "--test") == 0) {
if (++arg == argc) UsageError(); if (++arg == argc) UsageError();
for (int debug_arg = arg; debug_arg < argc; ++debug_arg) { names_to_test.insert(argv[arg]);
// Stop when we either find another flag or we reach the last arg
// (program arg)
if (argv[debug_arg][0] == '-' || debug_arg == argc - 1) {
arg = debug_arg - 1;
break;
}
debug_test_names.insert(argv[debug_arg]);
}
} else if (argv[arg][0] == '-') { } else if (argv[arg][0] == '-') {
bool recognized_flag = false; bool recognized_flag = false;
@ -286,12 +283,8 @@ int ForkPipeRunner::Run(int argc, char *argv[],
} }
} }
if (!debug_test_names.empty()) { if (debug && names_to_test.empty()) {
debug = true; UsageError();
}
auto last_slash = program.find_last_of('/');
if (last_slash != string::npos) {
testee = program.substr(last_slash + 1);
} }
bool all_ok = true; bool all_ok = true;
@ -311,8 +304,8 @@ int ForkPipeRunner::Run(int argc, char *argv[],
suite->SetMaximumEdition(maximum_edition); suite->SetMaximumEdition(maximum_edition);
suite->SetOutputDir(output_dir); suite->SetOutputDir(output_dir);
suite->SetDebug(debug); suite->SetDebug(debug);
suite->SetDebugTestNames(debug_test_names); suite->SetNamesToTest(names_to_test);
suite->SetTestee(testee); suite->SetTestee(program);
ForkPipeRunner runner(program, program_args, performance); ForkPipeRunner runner(program, program_args, performance);
@ -320,14 +313,15 @@ int ForkPipeRunner::Run(int argc, char *argv[],
all_ok = all_ok && suite->RunSuite(&runner, &output, failure_list_filename, all_ok = all_ok && suite->RunSuite(&runner, &output, failure_list_filename,
&failure_list); &failure_list);
names_to_test = suite->GetExpectedTestsNotRun();
fwrite(output.c_str(), 1, output.size(), stderr); fwrite(output.c_str(), 1, output.size(), stderr);
} }
if (!debug_test_names.empty()) { if (!names_to_test.empty()) {
fprintf(stderr, fprintf(stderr,
"These tests were requested to be debugged, but they do " "These tests were requested to be ran isolated, but they do "
"not exist. Revise the test names:\n\n"); "not exist. Revise the test names:\n\n");
for (const string &test_name : debug_test_names) { for (const string &test_name : names_to_test) {
fprintf(stderr, " %s\n", test_name.c_str()); fprintf(stderr, " %s\n", test_name.c_str());
} }
fprintf(stderr, "\n\n"); fprintf(stderr, "\n\n");

Loading…
Cancel
Save