diff --git a/upb/io/tokenizer_test.cc b/upb/io/tokenizer_test.cc index 70027d356b..68f93f6686 100644 --- a/upb/io/tokenizer_test.cc +++ b/upb/io/tokenizer_test.cc @@ -190,7 +190,7 @@ TEST_2D(TokenizerTest, SimpleTokens, kSimpleTokenCases, kBlockSizes) { auto input = TestInputStream(kSimpleTokenCases_case.input.data(), kSimpleTokenCases_case.input.size(), kBlockSizes_case, arena.ptr()); - auto t = upb_Tokenizer_New(NULL, 0, input, 0, arena.ptr()); + auto t = upb_Tokenizer_New(nullptr, 0, input, 0, arena.ptr()); // Before Next() is called, the initial token should always be TYPE_START. EXPECT_EQ(upb_Tokenizer_Type(t), kUpb_TokenType_Start); @@ -200,7 +200,7 @@ TEST_2D(TokenizerTest, SimpleTokens, kSimpleTokenCases, kBlockSizes) { EXPECT_TRUE(StringEquals(upb_Tokenizer_TextData(t), "")); // Parse the token. - EXPECT_TRUE(upb_Tokenizer_Next(t, NULL)); + EXPECT_TRUE(upb_Tokenizer_Next(t, nullptr)); // Check that it has the right type. EXPECT_EQ(upb_Tokenizer_Type(t), kSimpleTokenCases_case.type); // Check that it contains the complete input text. @@ -236,23 +236,23 @@ TEST_1D(TokenizerTest, FloatSuffix, kBlockSizes) { auto input = TestInputStream(text, strlen(text), kBlockSizes_case, arena.ptr()); const int options = kUpb_TokenizerOption_AllowFAfterFloat; - auto t = upb_Tokenizer_New(NULL, 0, input, options, arena.ptr()); + auto t = upb_Tokenizer_New(nullptr, 0, input, options, arena.ptr()); // Advance through tokens and check that they are parsed as expected. - EXPECT_TRUE(upb_Tokenizer_Next(t, NULL)); + EXPECT_TRUE(upb_Tokenizer_Next(t, nullptr)); EXPECT_EQ(upb_Tokenizer_Type(t), kUpb_TokenType_Float); EXPECT_TRUE(StringEquals(upb_Tokenizer_TextData(t), "1f")); - EXPECT_TRUE(upb_Tokenizer_Next(t, NULL)); + EXPECT_TRUE(upb_Tokenizer_Next(t, nullptr)); EXPECT_EQ(upb_Tokenizer_Type(t), kUpb_TokenType_Float); EXPECT_TRUE(StringEquals(upb_Tokenizer_TextData(t), "2.5f")); - EXPECT_TRUE(upb_Tokenizer_Next(t, NULL)); + EXPECT_TRUE(upb_Tokenizer_Next(t, nullptr)); EXPECT_EQ(upb_Tokenizer_Type(t), kUpb_TokenType_Float); EXPECT_TRUE(StringEquals(upb_Tokenizer_TextData(t), "6e3f")); - EXPECT_TRUE(upb_Tokenizer_Next(t, NULL)); + EXPECT_TRUE(upb_Tokenizer_Next(t, nullptr)); EXPECT_EQ(upb_Tokenizer_Type(t), kUpb_TokenType_Float); EXPECT_TRUE(StringEquals(upb_Tokenizer_TextData(t), "7F")); @@ -282,23 +282,23 @@ TEST_2D(TokenizerTest, Whitespace, kWhitespaceTokenCases, kBlockSizes) { auto input = TestInputStream(kWhitespaceTokenCases_case.input.data(), kWhitespaceTokenCases_case.input.size(), kBlockSizes_case, arena.ptr()); - auto t = upb_Tokenizer_New(NULL, 0, input, 0, arena.ptr()); + auto t = upb_Tokenizer_New(nullptr, 0, input, 0, arena.ptr()); - EXPECT_FALSE(upb_Tokenizer_Next(t, NULL)); + EXPECT_FALSE(upb_Tokenizer_Next(t, nullptr)); } { auto input = TestInputStream(kWhitespaceTokenCases_case.input.data(), kWhitespaceTokenCases_case.input.size(), kBlockSizes_case, arena.ptr()); const int options = kUpb_TokenizerOption_ReportNewlines; - auto t = upb_Tokenizer_New(NULL, 0, input, options, arena.ptr()); + auto t = upb_Tokenizer_New(nullptr, 0, input, options, arena.ptr()); - EXPECT_TRUE(upb_Tokenizer_Next(t, NULL)); + EXPECT_TRUE(upb_Tokenizer_Next(t, nullptr)); EXPECT_EQ(upb_Tokenizer_Type(t), kWhitespaceTokenCases_case.type); EXPECT_TRUE(StringEquals(upb_Tokenizer_TextData(t), kWhitespaceTokenCases_case.input.data())); - EXPECT_FALSE(upb_Tokenizer_Next(t, NULL)); + EXPECT_FALSE(upb_Tokenizer_Next(t, nullptr)); } } @@ -423,7 +423,7 @@ TEST_2D(TokenizerTest, MultipleTokens, kMultiTokenCases, kBlockSizes) { auto input = TestInputStream(kMultiTokenCases_case.input.data(), kMultiTokenCases_case.input.size(), kBlockSizes_case, arena.ptr()); - auto t = upb_Tokenizer_New(NULL, 0, input, 0, arena.ptr()); + auto t = upb_Tokenizer_New(nullptr, 0, input, 0, arena.ptr()); // Before Next() is called, the initial token should always be TYPE_START. EXPECT_EQ(upb_Tokenizer_Type(t), kUpb_TokenType_Start); @@ -448,7 +448,7 @@ TEST_2D(TokenizerTest, MultipleTokens, kMultiTokenCases, kBlockSizes) { EXPECT_FALSE(upb_Tokenizer_Next(t, &status)); EXPECT_TRUE(upb_Status_IsOk(&status)); } else { - EXPECT_TRUE(upb_Tokenizer_Next(t, NULL)); + EXPECT_TRUE(upb_Tokenizer_Next(t, nullptr)); } // Check that the token matches the expected one. @@ -490,7 +490,7 @@ TEST_2D(TokenizerTest, MultipleWhitespaceTokens, kMultiWhitespaceTokenCases, kMultiWhitespaceTokenCases_case.input.size(), kBlockSizes_case, arena.ptr()); const int options = kUpb_TokenizerOption_ReportNewlines; - auto t = upb_Tokenizer_New(NULL, 0, input, options, arena.ptr()); + auto t = upb_Tokenizer_New(nullptr, 0, input, options, arena.ptr()); // Before Next() is called, the initial token should always be TYPE_START. EXPECT_EQ(upb_Tokenizer_Type(t), kUpb_TokenType_Start); @@ -515,7 +515,7 @@ TEST_2D(TokenizerTest, MultipleWhitespaceTokens, kMultiWhitespaceTokenCases, EXPECT_FALSE(upb_Tokenizer_Next(t, &status)); EXPECT_TRUE(upb_Status_IsOk(&status)); } else { - EXPECT_TRUE(upb_Tokenizer_Next(t, NULL)); + EXPECT_TRUE(upb_Tokenizer_Next(t, nullptr)); } // Check that the token matches the expected one. @@ -549,11 +549,11 @@ TEST_1D(TokenizerTest, ShCommentStyle, kBlockSizes) { auto input = TestInputStream(text, strlen(text), kBlockSizes_case, arena.ptr()); const int options = kUpb_TokenizerOption_CommentStyleShell; - auto t = upb_Tokenizer_New(NULL, 0, input, options, arena.ptr()); + auto t = upb_Tokenizer_New(nullptr, 0, input, options, arena.ptr()); // Advance through tokens and check that they are parsed as expected. for (size_t i = 0; i < arraysize(kTokens); i++) { - EXPECT_TRUE(upb_Tokenizer_Next(t, NULL)); + EXPECT_TRUE(upb_Tokenizer_Next(t, nullptr)); EXPECT_TRUE(StringEquals(upb_Tokenizer_TextData(t), kTokens[i])); } @@ -729,7 +729,7 @@ TEST_2D(TokenizerTest, DocComments, kDocCommentCases, kBlockSizes) { std::string next_leading_comments; tokenizer.NextWithComments(&prev_trailing_comments, &detached_comments, &next_leading_comments); - tokenizer2.NextWithComments(NULL, NULL, NULL); + tokenizer2.NextWithComments(nullptr, nullptr, nullptr); EXPECT_EQ("next", tokenizer.current().text); EXPECT_EQ("next", tokenizer2.current().text); @@ -738,12 +738,12 @@ TEST_2D(TokenizerTest, DocComments, kDocCommentCases, kBlockSizes) { for (int i = 0; i < detached_comments.size(); i++) { EXPECT_LT(i, arraysize(kDocCommentCases)); - EXPECT_TRUE(kDocCommentCases_case.detached_comments[i] != NULL); + EXPECT_TRUE(kDocCommentCases_case.detached_comments[i] != nullptr); EXPECT_EQ(kDocCommentCases_case.detached_comments[i], detached_comments[i]); } // Verify that we matched all the detached comments. - EXPECT_EQ(NULL, + EXPECT_EQ(nullptr, kDocCommentCases_case.detached_comments[detached_comments.size()]); EXPECT_EQ(kDocCommentCases_case.next_leading_comments, next_leading_comments); @@ -1084,7 +1084,7 @@ TEST_2D(TokenizerTest, Errors, kErrorCases, kBlockSizes) { auto input = TestInputStream(kErrorCases_case.input.data(), kErrorCases_case.input.size(), kBlockSizes_case, arena.ptr()); - auto t = upb_Tokenizer_New(NULL, 0, input, 0, arena.ptr()); + auto t = upb_Tokenizer_New(nullptr, 0, input, 0, arena.ptr()); upb_Status status; upb_Status_Clear(&status); @@ -1104,8 +1104,8 @@ TEST_1D(TokenizerTest, BackUpOnDestruction, kBlockSizes) { TestInputStream(text.data(), text.size(), kBlockSizes_case, arena.ptr()); // Create a tokenizer, read one token, then destroy it. - auto t = upb_Tokenizer_New(NULL, 0, input, 0, arena.ptr()); - upb_Tokenizer_Next(t, NULL); + auto t = upb_Tokenizer_New(nullptr, 0, input, 0, arena.ptr()); + upb_Tokenizer_Next(t, nullptr); upb_Tokenizer_Fini(t); // Only "foo" should have been read. diff --git a/upb/json/encode_test.cc b/upb/json/encode_test.cc index 710e2bae82..a79c00799c 100644 --- a/upb/json/encode_test.cc +++ b/upb/json/encode_test.cc @@ -30,11 +30,15 @@ #include "upb/json/encode.h" +#include +#include + #include "google/protobuf/struct.upb.h" #include #include "upb/base/status.hpp" #include "upb/json/test.upb.h" #include "upb/json/test.upbdefs.h" +#include "upb/mem/arena.h" #include "upb/mem/arena.hpp" #include "upb/reflection/def.hpp" @@ -45,8 +49,8 @@ static std::string JsonEncode(const upb_test_Box* msg, int options) { upb::MessageDefPtr m(upb_test_Box_getmsgdef(defpool.ptr())); EXPECT_TRUE(m.ptr() != nullptr); - size_t json_size = upb_JsonEncode(msg, m.ptr(), defpool.ptr(), options, NULL, - 0, status.ptr()); + size_t json_size = upb_JsonEncode(msg, m.ptr(), defpool.ptr(), options, + nullptr, 0, status.ptr()); char* json_buf = (char*)upb_Arena_Malloc(a.ptr(), json_size + 1); size_t size = upb_JsonEncode(msg, m.ptr(), defpool.ptr(), options, json_buf, diff --git a/upb/lex/atoi_test.cc b/upb/lex/atoi_test.cc index bf5547afab..edd949fb5c 100644 --- a/upb/lex/atoi_test.cc +++ b/upb/lex/atoi_test.cc @@ -7,6 +7,8 @@ #include "upb/lex/atoi.h" +#include + #include #include "absl/strings/str_cat.h" @@ -31,7 +33,7 @@ TEST(AtoiTest, Uint64) { // Integer overflow const char* u = "1000000000000000000000000000000"; - EXPECT_EQ(NULL, upb_BufToUint64(u, u + strlen(u), &val)); + EXPECT_EQ(nullptr, upb_BufToUint64(u, u + strlen(u), &val)); // Not an integer const char* v = "foobar"; @@ -58,19 +60,19 @@ TEST(AtoiTest, Int64) { EXPECT_EQ(s + 1, upb_BufToInt64(s, s + 1, &val, &neg)); EXPECT_EQ(val, 1); EXPECT_EQ(neg, false); - EXPECT_EQ(s + 4, upb_BufToInt64(s, s + 4, &val, NULL)); + EXPECT_EQ(s + 4, upb_BufToInt64(s, s + 4, &val, nullptr)); EXPECT_EQ(val, 1234); - EXPECT_EQ(s + 4, upb_BufToInt64(s, s + 5, &val, NULL)); + EXPECT_EQ(s + 4, upb_BufToInt64(s, s + 5, &val, nullptr)); EXPECT_EQ(val, 1234); const char* t = "-42.6"; EXPECT_EQ(t + 2, upb_BufToInt64(t, t + 2, &val, &neg)); EXPECT_EQ(val, -4); EXPECT_EQ(neg, true); - EXPECT_EQ(t + 3, upb_BufToInt64(t, t + 3, &val, NULL)); + EXPECT_EQ(t + 3, upb_BufToInt64(t, t + 3, &val, nullptr)); EXPECT_EQ(val, -42); EXPECT_EQ(neg, true); - EXPECT_EQ(t + 3, upb_BufToInt64(t, t + 5, &val, NULL)); + EXPECT_EQ(t + 3, upb_BufToInt64(t, t + 5, &val, nullptr)); EXPECT_EQ(val, -42); const int64_t values[] = { @@ -83,7 +85,7 @@ TEST(AtoiTest, Int64) { std::string v = absl::StrCat(values[i]); const char* ptr = v.c_str(); const char* end = ptr + strlen(ptr); - EXPECT_EQ(end, upb_BufToInt64(ptr, end, &val, NULL)); + EXPECT_EQ(end, upb_BufToInt64(ptr, end, &val, nullptr)); EXPECT_EQ(val, values[i]); } } diff --git a/upb/message/accessors_test.cc b/upb/message/accessors_test.cc index 50c3921c32..5f2eccc4db 100644 --- a/upb/message/accessors_test.cc +++ b/upb/message/accessors_test.cc @@ -223,7 +223,7 @@ TEST(GeneratedCode, Strings) { msg, upb_StringView_FromString(kTestStr1)); EXPECT_EQ(true, upb_Message_HasField(msg, optional_string_field)); upb_StringView value = upb_Message_GetString(msg, optional_string_field, - upb_StringView{NULL, 0}); + upb_StringView{nullptr, 0}); std::string read_value = std::string(value.data, value.size); EXPECT_EQ(kTestStr1, read_value); // Clear. diff --git a/upb/util/required_fields_test.cc b/upb/util/required_fields_test.cc index 4066e31ede..167fa7e71c 100644 --- a/upb/util/required_fields_test.cc +++ b/upb/util/required_fields_test.cc @@ -62,8 +62,8 @@ void CheckRequired(absl::string_view json, // Verify that we can pass a NULL pointer to entries when we don't care about // them. - EXPECT_EQ(!missing.empty(), - upb_util_HasUnsetRequired(test_msg, m.ptr(), defpool.ptr(), NULL)); + EXPECT_EQ(!missing.empty(), upb_util_HasUnsetRequired( + test_msg, m.ptr(), defpool.ptr(), nullptr)); } // message HasRequiredField { diff --git a/upb/wire/eps_copy_input_stream_test.cc b/upb/wire/eps_copy_input_stream_test.cc index 61445c37d9..7a3caa2df0 100644 --- a/upb/wire/eps_copy_input_stream_test.cc +++ b/upb/wire/eps_copy_input_stream_test.cc @@ -21,9 +21,10 @@ namespace { TEST(EpsCopyInputStreamTest, ZeroSize) { upb_EpsCopyInputStream stream; - const char* ptr = NULL; + const char* ptr = nullptr; upb_EpsCopyInputStream_Init(&stream, &ptr, 0, false); - EXPECT_TRUE(upb_EpsCopyInputStream_IsDoneWithCallback(&stream, &ptr, NULL)); + EXPECT_TRUE( + upb_EpsCopyInputStream_IsDoneWithCallback(&stream, &ptr, nullptr)); } // begin:google_only