go/conformanceDebuggability can now run tests in isolation without producing debugging info/commands. Test names to be run in isolation are passed through the --test flag, and if the user wishes to debug them, they will set --debug.

PiperOrigin-RevId: 660491962
pull/17754/head
Yamil Morales 8 months ago committed by Copybara-Service
parent ee26f27493
commit 3ce384ade2
  1. 66
      conformance/conformance_test.cc
  2. 44
      conformance/conformance_test.h
  3. 50
      conformance/conformance_test_runner.cc

@ -514,7 +514,7 @@ void ConformanceTestSuite::VerifyResponse(
bool ConformanceTestSuite::RunTest(const std::string& test_name,
const ConformanceRequest& request,
ConformanceResponse* response) {
if (test_names_.insert(test_name).second == false) {
if (test_names_ran_.insert(test_name).second == false) {
ABSL_LOG(FATAL) << "Duplicated test name: " << test_name;
}
@ -525,35 +525,40 @@ bool ConformanceTestSuite::RunTest(const std::string& test_name,
uint32_t len = internal::little_endian::FromHost(
static_cast<uint32_t>(serialized_request.size()));
if (!debug_) { // Not in debug mode. Continue.
} else if (debug_test_names_->erase(test_name) == 1) {
std::string octal = ProduceOctalSerialized(serialized_request, len);
std::string full_filename = WriteToFile(octal, output_dir_, test_name);
if (!full_filename.empty()) {
absl::StrAppendFormat(
&output_, "Produced octal serialized request file for test %s\n",
test_name);
absl::StrAppendFormat(
&output_,
" To pipe the "
"serialized request directly to "
"the "
"testee run from the root of your workspace:\n printf $("
"<\"%s\") | "
"./bazel-bin/google/protobuf/conformance/%s\n\n",
full_filename, testee_);
absl::StrAppendFormat(
&output_,
" To inspect the wire format of the serialized request run "
"(Disclaimer: This may not work properly on non-Linux platforms):\n "
" "
"contents=$(<\"%s\"); sub=$(cut -d \\\\ -f 6- <<< "
"$contents) ; printf \"\\\\${sub}\" | protoscope \n\n\n",
full_filename);
if (isolated_) {
if (names_to_test_.erase(test_name) ==
0) { // Tests were asked to be run in isolated mode, but this test was
// not asked to be run.
expected_to_fail_.erase(test_name);
return false;
}
if (debug_) {
std::string octal = ProduceOctalSerialized(serialized_request, len);
std::string full_filename = WriteToFile(octal, output_dir_, test_name);
if (!full_filename.empty()) {
absl::StrAppendFormat(
&output_, "Produced octal serialized request file for test %s\n",
test_name);
absl::StrAppendFormat(
&output_,
" To pipe the "
"serialized request directly to "
"the "
"testee run from the root of your workspace:\n printf $("
"<\"%s\") | %s\n\n",
full_filename, testee_);
absl::StrAppendFormat(
&output_,
" To inspect the wire format of the serialized request with "
"protoscope run "
"(Disclaimer: This may not work properly on non-Linux "
"platforms):\n "
" "
"contents=$(<\"%s\"); sub=$(cut -d \\\\ -f 6- <<< "
"$contents) ; printf \"\\\\${sub}\" | protoscope \n\n\n",
full_filename);
}
}
} else { // Test is not ran, as it was not asked to be debugged.
expected_to_fail_.erase(test_name);
return false;
}
response->set_protobuf_payload(serialized_request);
@ -604,7 +609,7 @@ bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
successes_ = 0;
expected_failures_ = 0;
skipped_.clear();
test_names_.clear();
test_names_ran_.clear();
unexpected_failing_tests_.clear();
unexpected_succeeding_tests_.clear();
@ -618,6 +623,7 @@ bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
for (const TestStatus& failure : failure_list->test()) {
AddExpectedFailedTest(failure);
}
RunSuiteImpl();
if (*output_dir_.rbegin() != '/') {

@ -128,15 +128,7 @@ class ForkPipeRunner : public ConformanceTestRunner {
//
class ConformanceTestSuite {
public:
ConformanceTestSuite()
: testee_(""),
verbose_(false),
performance_(false),
enforce_recommended_(false),
maximum_edition_(Edition::EDITION_PROTO3),
failure_list_flag_name_("--failure_list"),
debug_test_names_(nullptr),
debug_(false) {}
ConformanceTestSuite() = default;
virtual ~ConformanceTestSuite() = default;
void SetPerformance(bool performance) { performance_ = performance; }
@ -172,9 +164,16 @@ class ConformanceTestSuite {
// Sets the testee name
void SetTestee(const std::string& testee) { testee_ = testee; }
// Sets the debug test names
void SetDebugTestNames(absl::flat_hash_set<std::string>& debug_test_names) {
debug_test_names_ = &debug_test_names;
// Sets the names of tests to ONLY be run isolated from all the others.
void SetNamesToTest(absl::flat_hash_set<std::string> names_to_test) {
if (!names_to_test.empty()) {
isolated_ = true;
}
names_to_test_ = std::move(names_to_test);
}
absl::flat_hash_set<std::string> GetExpectedTestsNotRun() {
return names_to_test_;
}
// Run all the conformance tests against the given test runner.
@ -185,8 +184,6 @@ class ConformanceTestSuite {
// The filename here is *only* used to create/format useful error messages for
// how to update the failure list. We do NOT read this file at all.
// "debug_test_names" holds the list of test names that the user requested to
// debug. If this is empty, we will run all the tests.
bool RunSuite(ConformanceTestRunner* runner, std::string* output,
const std::string& filename,
conformance::FailureSet* failure_list);
@ -303,16 +300,19 @@ class ConformanceTestSuite {
std::string testee_;
int successes_;
int expected_failures_;
bool verbose_;
bool performance_;
bool enforce_recommended_;
Edition maximum_edition_;
bool verbose_ = false;
bool performance_ = false;
bool enforce_recommended_ = false;
Edition maximum_edition_ = Edition::EDITION_PROTO3;
std::string output_;
std::string output_dir_;
std::string failure_list_flag_name_;
std::string failure_list_flag_name_ = "--failure_list";
std::string failure_list_filename_;
absl::flat_hash_set<std::string>* debug_test_names_;
bool debug_;
absl::flat_hash_set<std::string> names_to_test_;
bool debug_ = false;
// If names were given for names_to_test_, only those tests
// will be run and this bool will be set to true.
bool isolated_ = false;
// The set of test names that are expected to fail in this run, but haven't
// failed yet.
@ -326,7 +326,7 @@ class ConformanceTestSuite {
// The set of test names that have been run. Used to ensure that there are no
// duplicate names in the suite.
absl::flat_hash_set<std::string> test_names_;
absl::flat_hash_set<std::string> test_names_ran_;
// The set of tests that failed, but weren't expected to: They weren't
// present in our failure lists.

@ -45,6 +45,7 @@
#include <cstring>
#include <fstream>
#include <future>
#include <memory>
#include <string>
#include <vector>
@ -155,14 +156,16 @@ void UsageError() {
fprintf(stderr,
" --output_dir <dirname> Directory to write\n"
" output files.\n\n");
fprintf(stderr, " --test <test_name> Only run\n");
fprintf(stderr,
" --debug <test_name1> <test_name2> ... <test_nameN> Debug the \n");
fprintf(stderr, " specified tests by running\n");
" the specified test. Mulitple tests\n"
" can be specified by repeating the \n"
" flag.\n\n");
fprintf(stderr,
" them in isolation and producing\n");
fprintf(stderr,
" serialized request data for piping\n");
fprintf(stderr, " directly to the testee.\n\n");
" --debug Enable debug mode\n"
" to produce octal serialized\n"
" ConformanceRequest for the tests\n"
" passed to --test (required)\n\n");
fprintf(stderr, " --performance Boolean option\n");
fprintf(stderr, " for enabling run of\n");
fprintf(stderr, " performance tests.\n");
@ -228,7 +231,7 @@ int ForkPipeRunner::Run(int argc, char *argv[],
std::vector<string> program_args;
bool performance = false;
bool debug = false;
absl::flat_hash_set<string> debug_test_names;
absl::flat_hash_set<string> names_to_test;
bool enforce_recommended = false;
Edition maximum_edition = EDITION_UNKNOWN;
std::string output_dir;
@ -237,6 +240,8 @@ int ForkPipeRunner::Run(int argc, char *argv[],
for (int arg = 1; arg < argc; ++arg) {
if (strcmp(argv[arg], "--performance") == 0) {
performance = true;
} else if (strcmp(argv[arg], "--debug") == 0) {
debug = true;
} else if (strcmp(argv[arg], "--verbose") == 0) {
verbose = true;
} else if (strcmp(argv[arg], "--enforce_recommended") == 0) {
@ -253,17 +258,9 @@ int ForkPipeRunner::Run(int argc, char *argv[],
if (++arg == argc) UsageError();
output_dir = argv[arg];
} else if (strcmp(argv[arg], "--debug") == 0) {
} else if (strcmp(argv[arg], "--test") == 0) {
if (++arg == argc) UsageError();
for (int debug_arg = arg; debug_arg < argc; ++debug_arg) {
// Stop when we either find another flag or we reach the last arg
// (program arg)
if (argv[debug_arg][0] == '-' || debug_arg == argc - 1) {
arg = debug_arg - 1;
break;
}
debug_test_names.insert(argv[debug_arg]);
}
names_to_test.insert(argv[arg]);
} else if (argv[arg][0] == '-') {
bool recognized_flag = false;
@ -286,12 +283,8 @@ int ForkPipeRunner::Run(int argc, char *argv[],
}
}
if (!debug_test_names.empty()) {
debug = true;
}
auto last_slash = program.find_last_of('/');
if (last_slash != string::npos) {
testee = program.substr(last_slash + 1);
if (debug && names_to_test.empty()) {
UsageError();
}
bool all_ok = true;
@ -311,8 +304,8 @@ int ForkPipeRunner::Run(int argc, char *argv[],
suite->SetMaximumEdition(maximum_edition);
suite->SetOutputDir(output_dir);
suite->SetDebug(debug);
suite->SetDebugTestNames(debug_test_names);
suite->SetTestee(testee);
suite->SetNamesToTest(names_to_test);
suite->SetTestee(program);
ForkPipeRunner runner(program, program_args, performance);
@ -320,14 +313,15 @@ int ForkPipeRunner::Run(int argc, char *argv[],
all_ok = all_ok && suite->RunSuite(&runner, &output, failure_list_filename,
&failure_list);
names_to_test = suite->GetExpectedTestsNotRun();
fwrite(output.c_str(), 1, output.size(), stderr);
}
if (!debug_test_names.empty()) {
if (!names_to_test.empty()) {
fprintf(stderr,
"These tests were requested to be debugged, but they do "
"These tests were requested to be ran isolated, but they do "
"not exist. Revise the test names:\n\n");
for (const string &test_name : debug_test_names) {
for (const string &test_name : names_to_test) {
fprintf(stderr, " %s\n", test_name.c_str());
}
fprintf(stderr, "\n\n");

Loading…
Cancel
Save