@ -32,6 +32,7 @@
# include <errno.h>
# include <signal.h>
# include <stdio.h>
# include <sys/types.h>
# include <sys/wait.h>
# include <unistd.h>
@ -47,9 +48,11 @@
# include <string>
# include <vector>
# include "absl/container/flat_hash_set.h"
# include "absl/log/absl_log.h"
# include "absl/strings/ascii.h"
# include "absl/strings/str_cat.h"
# include "absl/strings/str_format.h"
# include "conformance/conformance.pb.h"
# include "conformance/conformance.pb.h"
# include "conformance_test.h"
@ -122,7 +125,7 @@ void UsageError() {
fprintf ( stderr ,
" should contain one test name per \n " ) ;
fprintf ( stderr ,
" line. Use '#' for comments. \n " ) ;
" line. Use '#' for comments. \n \n " ) ;
fprintf ( stderr ,
" --text_format_failure_list <filename> Use to specify list \n " ) ;
fprintf ( stderr ,
@ -133,7 +136,7 @@ void UsageError() {
fprintf ( stderr ,
" File should contain one test name \n " ) ;
fprintf ( stderr ,
" per line. Use '#' for comments. \n " ) ;
" per line. Use '#' for comments. \n \n " ) ;
fprintf ( stderr ,
" --enforce_recommended Enforce that recommended test \n " ) ;
@ -143,19 +146,30 @@ void UsageError() {
" this flag if you want to be \n " ) ;
fprintf ( stderr ,
" strictly conforming to protobuf \n " ) ;
fprintf ( stderr , " spec. \n " ) ;
fprintf ( stderr , " spec. \n \n " ) ;
fprintf ( stderr ,
" --maximum_edition <edition> Only run conformance tests up \n " ) ;
" --maximum_edition <edition> Only run conformance tests up to \n " ) ;
fprintf ( stderr ,
" to and including the specified \n " ) ;
fprintf ( stderr , " edition. \n " ) ;
" and including the specified \n " ) ;
fprintf ( stderr , " edition. \n \n " ) ;
fprintf ( stderr ,
" --output_dir <dirname> Directory to write \n "
" output files. \n " ) ;
" output files. \n \n " ) ;
fprintf ( stderr ,
" --debug <test_name1> <test_name2> ... <test_nameN> Debug the \n " ) ;
fprintf ( stderr , " specified tests by running \n " ) ;
fprintf ( stderr ,
" them in isolation and producing \n " ) ;
fprintf ( stderr ,
" serialized request data for piping \n " ) ;
fprintf ( stderr , " directly to the testee. \n \n " ) ;
fprintf ( stderr , " --performance Boolean option \n " ) ;
fprintf ( stderr , " for enabling run of \n " ) ;
fprintf ( stderr , " performance tests. \n " ) ;
exit ( 1 ) ;
}
void ForkPipeRunner : : RunTest ( const std : : string & test_name ,
void ForkPipeRunner : : RunTest ( const std : : string & test_name , uint32_t len ,
const std : : string & request ,
std : : string * response ) {
if ( child_pid_ < 0 ) {
@ -163,8 +177,6 @@ void ForkPipeRunner::RunTest(const std::string &test_name,
}
current_test_name_ = test_name ;
uint32_t len =
internal : : little_endian : : FromHost ( static_cast < uint32_t > ( request . size ( ) ) ) ;
CheckedWrite ( write_fd_ , & len , sizeof ( uint32_t ) ) ;
CheckedWrite ( write_fd_ , request . c_str ( ) , request . size ( ) ) ;
@ -210,57 +222,97 @@ int ForkPipeRunner::Run(int argc, char *argv[],
fprintf ( stderr , " No test suites found. \n " ) ;
return EXIT_FAILURE ;
}
string program ;
string testee ;
std : : vector < string > program_args ;
bool performance = false ;
bool debug = false ;
absl : : flat_hash_set < string > debug_test_names ;
bool enforce_recommended = false ;
Edition maximum_edition = EDITION_UNKNOWN ;
std : : string output_dir ;
bool verbose = false ;
for ( int arg = 1 ; arg < argc ; + + arg ) {
if ( strcmp ( argv [ arg ] , " --performance " ) = = 0 ) {
performance = true ;
} else if ( strcmp ( argv [ arg ] , " --verbose " ) = = 0 ) {
verbose = true ;
} else if ( strcmp ( argv [ arg ] , " --enforce_recommended " ) = = 0 ) {
enforce_recommended = true ;
} else if ( strcmp ( argv [ arg ] , " --maximum_edition " ) = = 0 ) {
if ( + + arg = = argc ) UsageError ( ) ;
Edition edition = EDITION_UNKNOWN ;
if ( ! Edition_Parse ( absl : : StrCat ( " EDITION_ " , argv [ arg ] ) , & edition ) ) {
fprintf ( stderr , " Unknown edition: %s \n " , argv [ arg ] ) ;
UsageError ( ) ;
}
maximum_edition = edition ;
} else if ( strcmp ( argv [ arg ] , " --output_dir " ) = = 0 ) {
if ( + + arg = = argc ) UsageError ( ) ;
output_dir = argv [ arg ] ;
} else if ( strcmp ( argv [ arg ] , " --debug " ) = = 0 ) {
if ( + + arg = = argc ) UsageError ( ) ;
for ( int debug_arg = arg ; debug_arg < argc ; + + debug_arg ) {
// Stop when we either find another flag or we reach the last arg
// (program arg)
if ( argv [ debug_arg ] [ 0 ] = = ' - ' | | debug_arg = = argc - 1 ) {
arg = debug_arg - 1 ;
break ;
}
debug_test_names . insert ( argv [ debug_arg ] ) ;
}
} else if ( argv [ arg ] [ 0 ] = = ' - ' ) {
bool recognized_flag = false ;
for ( ConformanceTestSuite * suite : suites ) {
if ( strcmp ( argv [ arg ] , suite - > GetFailureListFlagName ( ) . c_str ( ) ) = = 0 ) {
if ( + + arg = = argc ) UsageError ( ) ;
recognized_flag = true ;
}
}
if ( ! recognized_flag ) {
fprintf ( stderr , " Unknown option: %s \n " , argv [ arg ] ) ;
UsageError ( ) ;
}
} else {
program + = argv [ arg + + ] ;
while ( arg < argc ) {
program_args . push_back ( argv [ arg ] ) ;
arg + + ;
}
}
}
if ( ! debug_test_names . empty ( ) ) {
debug = true ;
}
auto last_slash = program . find_last_of ( ' / ' ) ;
if ( last_slash ! = string : : npos ) {
testee = program . substr ( last_slash + 1 ) ;
}
bool all_ok = true ;
for ( ConformanceTestSuite * suite : suites ) {
string program ;
std : : vector < string > program_args ;
string failure_list_filename ;
conformance : : FailureSet failure_list ;
bool performance = false ;
for ( int arg = 1 ; arg < argc ; + + arg ) {
if ( strcmp ( argv [ arg ] , suite - > GetFailureListFlagName ( ) . c_str ( ) ) = = 0 ) {
if ( + + arg = = argc ) UsageError ( ) ;
failure_list_filename = argv [ arg ] ;
ParseFailureList ( argv [ arg ] , & failure_list ) ;
} else if ( strcmp ( argv [ arg ] , " --performance " ) = = 0 ) {
performance = true ;
suite - > SetPerformance ( true ) ;
} else if ( strcmp ( argv [ arg ] , " --verbose " ) = = 0 ) {
suite - > SetVerbose ( true ) ;
} else if ( strcmp ( argv [ arg ] , " --enforce_recommended " ) = = 0 ) {
suite - > SetEnforceRecommended ( true ) ;
} else if ( strcmp ( argv [ arg ] , " --maximum_edition " ) = = 0 ) {
if ( + + arg = = argc ) UsageError ( ) ;
Edition edition = EDITION_UNKNOWN ;
if ( ! Edition_Parse ( absl : : StrCat ( " EDITION_ " , argv [ arg ] ) , & edition ) ) {
fprintf ( stderr , " Unknown edition: %s \n " , argv [ arg ] ) ;
UsageError ( ) ;
}
suite - > SetMaximumEdition ( edition ) ;
} else if ( strcmp ( argv [ arg ] , " --output_dir " ) = = 0 ) {
if ( + + arg = = argc ) UsageError ( ) ;
suite - > SetOutputDir ( argv [ arg ] ) ;
} else if ( argv [ arg ] [ 0 ] = = ' - ' ) {
bool recognized_flag = false ;
for ( ConformanceTestSuite * suite : suites ) {
if ( strcmp ( argv [ arg ] , suite - > GetFailureListFlagName ( ) . c_str ( ) ) = = 0 ) {
if ( + + arg = = argc ) UsageError ( ) ;
recognized_flag = true ;
}
}
if ( ! recognized_flag ) {
fprintf ( stderr , " Unknown option: %s \n " , argv [ arg ] ) ;
UsageError ( ) ;
}
} else {
program + = argv [ arg + + ] ;
while ( arg < argc ) {
program_args . push_back ( argv [ arg ] ) ;
arg + + ;
}
}
}
suite - > SetPerformance ( performance ) ;
suite - > SetVerbose ( verbose ) ;
suite - > SetEnforceRecommended ( enforce_recommended ) ;
suite - > SetMaximumEdition ( maximum_edition ) ;
suite - > SetOutputDir ( output_dir ) ;
suite - > SetDebug ( debug ) ;
suite - > SetDebugTestNames ( debug_test_names ) ;
suite - > SetTestee ( testee ) ;
ForkPipeRunner runner ( program , program_args , performance ) ;
@ -270,6 +322,16 @@ int ForkPipeRunner::Run(int argc, char *argv[],
fwrite ( output . c_str ( ) , 1 , output . size ( ) , stderr ) ;
}
if ( ! debug_test_names . empty ( ) ) {
fprintf ( stderr ,
" These tests were requested to be debugged, but they do "
" not exist. Revise the test names: \n \n " ) ;
for ( const string & test_name : debug_test_names ) {
fprintf ( stderr , " %s \n " , test_name . c_str ( ) ) ;
}
fprintf ( stderr , " \n \n " ) ;
}
return all_ok ? EXIT_SUCCESS : EXIT_FAILURE ;
}