|
|
|
@ -1002,9 +1002,19 @@ bool Tokenizer::ParseInteger(const std::string& text, uint64_t max_value, |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
double Tokenizer::ParseFloat(const std::string& text) { |
|
|
|
|
double result; |
|
|
|
|
GOOGLE_LOG_IF(DFATAL, |
|
|
|
|
!TryParseFloat(text, &result)) |
|
|
|
|
<< " Tokenizer::ParseFloat() passed text that could not have been" |
|
|
|
|
" tokenized as a float: " |
|
|
|
|
<< absl::CEscape(text); |
|
|
|
|
return result; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool Tokenizer::TryParseFloat(const std::string& text, double* result) { |
|
|
|
|
const char* start = text.c_str(); |
|
|
|
|
char* end; |
|
|
|
|
double result = NoLocaleStrtod(start, &end); |
|
|
|
|
*result = NoLocaleStrtod(start, &end); |
|
|
|
|
|
|
|
|
|
// "1e" is not a valid float, but if the tokenizer reads it, it will
|
|
|
|
|
// report an error but still return it as a valid token. We need to
|
|
|
|
@ -1020,12 +1030,7 @@ double Tokenizer::ParseFloat(const std::string& text) { |
|
|
|
|
++end; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
GOOGLE_LOG_IF(DFATAL, |
|
|
|
|
static_cast<size_t>(end - start) != text.size() || *start == '-') |
|
|
|
|
<< " Tokenizer::ParseFloat() passed text that could not have been" |
|
|
|
|
" tokenized as a float: " |
|
|
|
|
<< absl::CEscape(text); |
|
|
|
|
return result; |
|
|
|
|
return static_cast<size_t>(end - start) == text.size() && *start != '-'; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// Helper to append a Unicode code point to a string as UTF8, without bringing
|
|
|
|
|