Merge tag 'refs/tags/sync-piper' into sync-stage

# Conflicts:
#	src/google/protobuf/compiler/objectivec/helpers.cc
#	src/google/protobuf/compiler/objectivec/text_format_decode_data.cc
#	src/google/protobuf/util/message_differencer.h
pull/10760/head
Mike Kruskal 2 years ago
commit 5019451bdf
  1. 3
      java/core/src/main/java/com/google/protobuf/CanIgnoreReturnValue.java
  2. 1
      java/core/src/test/proto/com/google/protobuf/proto2_message.proto
  3. 1
      java/core/src/test/proto/com/google/protobuf/proto2_message_lite.proto
  4. 17
      protobuf.bzl
  5. 7
      python/BUILD.bazel
  6. 333
      python/google/protobuf/internal/field_mask.py
  7. 400
      python/google/protobuf/internal/field_mask_test.py
  8. 2
      python/google/protobuf/internal/missing_enum_values.proto
  9. 304
      python/google/protobuf/internal/well_known_types.py
  10. 360
      python/google/protobuf/internal/well_known_types_test.py
  11. 2
      src/file_lists.cmake
  12. 1
      src/google/protobuf/any.proto
  13. 1
      src/google/protobuf/api.proto
  14. 1
      src/google/protobuf/arenastring.cc
  15. 6
      src/google/protobuf/compiler/command_line_interface.cc
  16. 1
      src/google/protobuf/compiler/cpp/bootstrap_unittest.cc
  17. 30
      src/google/protobuf/compiler/cpp/cpp_generator.h
  18. 33
      src/google/protobuf/compiler/cpp/parse_function_generator.cc
  19. 82
      src/google/protobuf/compiler/cpp/unittest.inc
  20. 1
      src/google/protobuf/compiler/csharp/csharp_bootstrap_unittest.cc
  21. 30
      src/google/protobuf/compiler/java/java_generator.h
  22. 3
      src/google/protobuf/compiler/objectivec/text_format_decode_data.cc
  23. 1
      src/google/protobuf/compiler/plugin.proto
  24. 30
      src/google/protobuf/compiler/python/python_generator.h
  25. 14
      src/google/protobuf/descriptor.proto
  26. 1
      src/google/protobuf/descriptor_database.cc
  27. 1
      src/google/protobuf/duration.proto
  28. 9
      src/google/protobuf/extension_set_unittest.cc
  29. 2
      src/google/protobuf/generated_message_reflection.cc
  30. 6
      src/google/protobuf/generated_message_tctable_decl.h
  31. 17
      src/google/protobuf/generated_message_tctable_gen.cc
  32. 1
      src/google/protobuf/generated_message_tctable_impl.h
  33. 1
      src/google/protobuf/io/tokenizer.cc
  34. 1
      src/google/protobuf/io/zero_copy_stream_impl.cc
  35. 15
      src/google/protobuf/io/zero_copy_stream_impl_lite.h
  36. 5
      src/google/protobuf/lite_unittest.cc
  37. 10
      src/google/protobuf/map_test.inc
  38. 1
      src/google/protobuf/map_unittest.proto
  39. 1
      src/google/protobuf/message.cc
  40. 1
      src/google/protobuf/repeated_field_unittest.cc
  41. 2
      src/google/protobuf/stubs/BUILD.bazel
  42. 30
      src/google/protobuf/stubs/callback.h
  43. 68
      src/google/protobuf/stubs/stl_util.h
  44. 6
      src/google/protobuf/stubs/strutil.cc
  45. 1
      src/google/protobuf/stubs/strutil_unittest.cc
  46. 1
      src/google/protobuf/text_format.cc
  47. 3
      src/google/protobuf/timestamp.proto
  48. 5
      src/google/protobuf/unittest.proto
  49. 1
      src/google/protobuf/unittest_import.proto
  50. 7
      src/google/protobuf/unknown_field_set_unittest.cc
  51. 2
      src/google/protobuf/util/json_format.proto
  52. 1
      src/google/protobuf/well_known_types_unittest.cc
  53. 7
      src/google/protobuf/wire_format_unittest.inc

@ -31,7 +31,6 @@
package com.google.protobuf;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.CLASS;
import java.lang.annotation.Documented;
@ -45,6 +44,6 @@ import java.lang.annotation.Target;
* annotated with {@code @CheckReturnValue} to exempt specific methods from the default.
*/
@Documented
@Target({METHOD, TYPE})
@Target(METHOD) // TODO(kak): consider adding CONSTRUCTOR later if necessary
@Retention(CLASS)
@interface CanIgnoreReturnValue {}

@ -36,7 +36,6 @@ package protobuf.experimental;
option java_package = "com.google.protobuf.testing";
option java_outer_classname = "Proto2Testing";
message Proto2SpecialFieldName {
optional double regular_name = 1;
optional int32 cached_size = 2;

@ -36,7 +36,6 @@ package protobuf.experimental.lite;
option java_package = "com.google.protobuf.testing";
option java_outer_classname = "Proto2TestingLite";
message Proto2MessageLite {
enum TestEnum {

@ -40,13 +40,14 @@ def _SourceDir(ctx):
def _ObjcBase(srcs):
return [
"".join([token.capitalize() for token in src[:-len(".proto")].split("_")])
for src in srcs]
for src in srcs
]
def _ObjcHdrs(srcs):
return[src + ".pbobjc.h" for src in _ObjcBase(srcs)]
return [src + ".pbobjc.h" for src in _ObjcBase(srcs)]
def _ObjcSrcs(srcs):
return[src + ".pbobjc.m" for src in _ObjcBase(srcs)]
return [src + ".pbobjc.m" for src in _ObjcBase(srcs)]
def _ObjcOuts(srcs, out_type):
if out_type == "hdrs":
@ -67,7 +68,8 @@ def _RubyOuts(srcs):
def _CsharpOuts(srcs):
return [
"".join([token.capitalize() for token in src[:-len(".proto")].split("_")]) + ".cs"
for src in srcs]
for src in srcs
]
ProtoGenInfo = provider(
fields = ["srcs", "import_flags", "deps"],
@ -96,7 +98,7 @@ def _proto_gen_impl(ctx):
if ctx.attr.includes:
for include in ctx.attr.includes:
import_flags += ["-I"+_GetPath(ctx,include)]
import_flags += ["-I" + _GetPath(ctx, include)]
import_flags = depset(direct = import_flags)
@ -149,6 +151,7 @@ def _proto_gen_impl(ctx):
outs.extend(_PyOuts([src.basename], use_grpc_plugin = use_grpc_plugin))
elif lang == "ruby":
outs.extend(_RubyOuts([src.basename]))
# Otherwise, rely on user-supplied outs.
args += [("--%s_out=" + path_tpl) % (lang, gen_dir)]
@ -261,7 +264,7 @@ _proto_gen = rule(
"langs": attr.string_list(),
"outs": attr.string_list(),
"out_type": attr.string(
default = "all"
default = "all",
),
},
output_to_genfiles = True,
@ -641,7 +644,7 @@ def _source_proto_library(
native.filegroup(
name = name,
srcs = [":%s_genproto"%name],
srcs = [":%s_genproto" % name],
testonly = testonly,
visibility = visibility,
**kwargs

@ -275,6 +275,13 @@ py_test(
deps = [":python_test_lib"],
)
py_test(
name = "field_mask_test",
srcs = ["google/protobuf/internal/field_mask_test.py"],
imports = ["."],
deps = [":python_test_lib"],
)
py_test(
name = "generator_test",
srcs = ["google/protobuf/internal/generator_test.py"],

@ -0,0 +1,333 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains FieldMask class."""
from google.protobuf.descriptor import FieldDescriptor
class FieldMask(object):
"""Class for FieldMask message type."""
__slots__ = ()
def ToJsonString(self):
"""Converts FieldMask to string according to proto3 JSON spec."""
camelcase_paths = []
for path in self.paths:
camelcase_paths.append(_SnakeCaseToCamelCase(path))
return ','.join(camelcase_paths)
def FromJsonString(self, value):
"""Converts string to FieldMask according to proto3 JSON spec."""
if not isinstance(value, str):
raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
self.Clear()
if value:
for path in value.split(','):
self.paths.append(_CamelCaseToSnakeCase(path))
def IsValidForDescriptor(self, message_descriptor):
"""Checks whether the FieldMask is valid for Message Descriptor."""
for path in self.paths:
if not _IsValidPath(message_descriptor, path):
return False
return True
def AllFieldsFromDescriptor(self, message_descriptor):
"""Gets all direct fields of Message Descriptor to FieldMask."""
self.Clear()
for field in message_descriptor.fields:
self.paths.append(field.name)
def CanonicalFormFromMask(self, mask):
"""Converts a FieldMask to the canonical form.
Removes paths that are covered by another path. For example,
"foo.bar" is covered by "foo" and will be removed if "foo"
is also in the FieldMask. Then sorts all paths in alphabetical order.
Args:
mask: The original FieldMask to be converted.
"""
tree = _FieldMaskTree(mask)
tree.ToFieldMask(self)
def Union(self, mask1, mask2):
"""Merges mask1 and mask2 into this FieldMask."""
_CheckFieldMaskMessage(mask1)
_CheckFieldMaskMessage(mask2)
tree = _FieldMaskTree(mask1)
tree.MergeFromFieldMask(mask2)
tree.ToFieldMask(self)
def Intersect(self, mask1, mask2):
"""Intersects mask1 and mask2 into this FieldMask."""
_CheckFieldMaskMessage(mask1)
_CheckFieldMaskMessage(mask2)
tree = _FieldMaskTree(mask1)
intersection = _FieldMaskTree()
for path in mask2.paths:
tree.IntersectPath(path, intersection)
intersection.ToFieldMask(self)
def MergeMessage(
self, source, destination,
replace_message_field=False, replace_repeated_field=False):
"""Merges fields specified in FieldMask from source to destination.
Args:
source: Source message.
destination: The destination message to be merged into.
replace_message_field: Replace message field if True. Merge message
field if False.
replace_repeated_field: Replace repeated field if True. Append
elements of repeated field if False.
"""
tree = _FieldMaskTree(self)
tree.MergeMessage(
source, destination, replace_message_field, replace_repeated_field)
def _IsValidPath(message_descriptor, path):
"""Checks whether the path is valid for Message Descriptor."""
parts = path.split('.')
last = parts.pop()
for name in parts:
field = message_descriptor.fields_by_name.get(name)
if (field is None or
field.label == FieldDescriptor.LABEL_REPEATED or
field.type != FieldDescriptor.TYPE_MESSAGE):
return False
message_descriptor = field.message_type
return last in message_descriptor.fields_by_name
def _CheckFieldMaskMessage(message):
"""Raises ValueError if message is not a FieldMask."""
message_descriptor = message.DESCRIPTOR
if (message_descriptor.name != 'FieldMask' or
message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
raise ValueError('Message {0} is not a FieldMask.'.format(
message_descriptor.full_name))
def _SnakeCaseToCamelCase(path_name):
"""Converts a path name from snake_case to camelCase."""
result = []
after_underscore = False
for c in path_name:
if c.isupper():
raise ValueError(
'Fail to print FieldMask to Json string: Path name '
'{0} must not contain uppercase letters.'.format(path_name))
if after_underscore:
if c.islower():
result.append(c.upper())
after_underscore = False
else:
raise ValueError(
'Fail to print FieldMask to Json string: The '
'character after a "_" must be a lowercase letter '
'in path name {0}.'.format(path_name))
elif c == '_':
after_underscore = True
else:
result += c
if after_underscore:
raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
'in path name {0}.'.format(path_name))
return ''.join(result)
def _CamelCaseToSnakeCase(path_name):
"""Converts a field name from camelCase to snake_case."""
result = []
for c in path_name:
if c == '_':
raise ValueError('Fail to parse FieldMask: Path name '
'{0} must not contain "_"s.'.format(path_name))
if c.isupper():
result += '_'
result += c.lower()
else:
result += c
return ''.join(result)
class _FieldMaskTree(object):
"""Represents a FieldMask in a tree structure.
For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
the FieldMaskTree will be:
[_root] -+- foo -+- bar
| |
| +- baz
|
+- bar --- baz
In the tree, each leaf node represents a field path.
"""
__slots__ = ('_root',)
def __init__(self, field_mask=None):
"""Initializes the tree by FieldMask."""
self._root = {}
if field_mask:
self.MergeFromFieldMask(field_mask)
def MergeFromFieldMask(self, field_mask):
"""Merges a FieldMask to the tree."""
for path in field_mask.paths:
self.AddPath(path)
def AddPath(self, path):
"""Adds a field path into the tree.
If the field path to add is a sub-path of an existing field path
in the tree (i.e., a leaf node), it means the tree already matches
the given path so nothing will be added to the tree. If the path
matches an existing non-leaf node in the tree, that non-leaf node
will be turned into a leaf node with all its children removed because
the path matches all the node's children. Otherwise, a new path will
be added.
Args:
path: The field path to add.
"""
node = self._root
for name in path.split('.'):
if name not in node:
node[name] = {}
elif not node[name]:
# Pre-existing empty node implies we already have this entire tree.
return
node = node[name]
# Remove any sub-trees we might have had.
node.clear()
def ToFieldMask(self, field_mask):
"""Converts the tree to a FieldMask."""
field_mask.Clear()
_AddFieldPaths(self._root, '', field_mask)
def IntersectPath(self, path, intersection):
"""Calculates the intersection part of a field path with this tree.
Args:
path: The field path to calculates.
intersection: The out tree to record the intersection part.
"""
node = self._root
for name in path.split('.'):
if name not in node:
return
elif not node[name]:
intersection.AddPath(path)
return
node = node[name]
intersection.AddLeafNodes(path, node)
def AddLeafNodes(self, prefix, node):
"""Adds leaf nodes begin with prefix to this tree."""
if not node:
self.AddPath(prefix)
for name in node:
child_path = prefix + '.' + name
self.AddLeafNodes(child_path, node[name])
def MergeMessage(
self, source, destination,
replace_message, replace_repeated):
"""Merge all fields specified by this tree from source to destination."""
_MergeMessage(
self._root, source, destination, replace_message, replace_repeated)
def _StrConvert(value):
"""Converts value to str if it is not."""
# This file is imported by c extension and some methods like ClearField
# requires string for the field name. py2/py3 has different text
# type and may use unicode.
if not isinstance(value, str):
return value.encode('utf-8')
return value
def _MergeMessage(
node, source, destination, replace_message, replace_repeated):
"""Merge all fields specified by a sub-tree from source to destination."""
source_descriptor = source.DESCRIPTOR
for name in node:
child = node[name]
field = source_descriptor.fields_by_name[name]
if field is None:
raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
name, source_descriptor.full_name))
if child:
# Sub-paths are only allowed for singular message fields.
if (field.label == FieldDescriptor.LABEL_REPEATED or
field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
raise ValueError('Error: Field {0} in message {1} is not a singular '
'message field and cannot have sub-fields.'.format(
name, source_descriptor.full_name))
if source.HasField(name):
_MergeMessage(
child, getattr(source, name), getattr(destination, name),
replace_message, replace_repeated)
continue
if field.label == FieldDescriptor.LABEL_REPEATED:
if replace_repeated:
destination.ClearField(_StrConvert(name))
repeated_source = getattr(source, name)
repeated_destination = getattr(destination, name)
repeated_destination.MergeFrom(repeated_source)
else:
if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
if replace_message:
destination.ClearField(_StrConvert(name))
if source.HasField(name):
getattr(destination, name).MergeFrom(getattr(source, name))
else:
setattr(destination, name, getattr(source, name))
def _AddFieldPaths(node, prefix, field_mask):
"""Adds the field paths descended from node to field_mask."""
if not node and prefix:
field_mask.paths.append(prefix)
return
for name in sorted(node):
if prefix:
child_path = prefix + '.' + name
else:
child_path = name
_AddFieldPaths(node[name], child_path, field_mask)

@ -0,0 +1,400 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for google.protobuf.internal.well_known_types."""
import unittest
from google.protobuf import field_mask_pb2
from google.protobuf import map_unittest_pb2
from google.protobuf import unittest_pb2
from google.protobuf.internal import field_mask
from google.protobuf.internal import test_util
from google.protobuf import descriptor
class FieldMaskTest(unittest.TestCase):
def testStringFormat(self):
mask = field_mask_pb2.FieldMask()
self.assertEqual('', mask.ToJsonString())
mask.paths.append('foo')
self.assertEqual('foo', mask.ToJsonString())
mask.paths.append('bar')
self.assertEqual('foo,bar', mask.ToJsonString())
mask.FromJsonString('')
self.assertEqual('', mask.ToJsonString())
mask.FromJsonString('foo')
self.assertEqual(['foo'], mask.paths)
mask.FromJsonString('foo,bar')
self.assertEqual(['foo', 'bar'], mask.paths)
# Test camel case
mask.Clear()
mask.paths.append('foo_bar')
self.assertEqual('fooBar', mask.ToJsonString())
mask.paths.append('bar_quz')
self.assertEqual('fooBar,barQuz', mask.ToJsonString())
mask.FromJsonString('')
self.assertEqual('', mask.ToJsonString())
self.assertEqual([], mask.paths)
mask.FromJsonString('fooBar')
self.assertEqual(['foo_bar'], mask.paths)
mask.FromJsonString('fooBar,barQuz')
self.assertEqual(['foo_bar', 'bar_quz'], mask.paths)
def testDescriptorToFieldMask(self):
mask = field_mask_pb2.FieldMask()
msg_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
mask.AllFieldsFromDescriptor(msg_descriptor)
self.assertEqual(76, len(mask.paths))
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
for field in msg_descriptor.fields:
self.assertTrue(field.name in mask.paths)
def testIsValidForDescriptor(self):
msg_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
# Empty mask
mask = field_mask_pb2.FieldMask()
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# All fields from descriptor
mask.AllFieldsFromDescriptor(msg_descriptor)
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# Child under optional message
mask.paths.append('optional_nested_message.bb')
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# Repeated field is only allowed in the last position of path
mask.paths.append('repeated_nested_message.bb')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid top level field
mask = field_mask_pb2.FieldMask()
mask.paths.append('xxx')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in root
mask = field_mask_pb2.FieldMask()
mask.paths.append('xxx.zzz')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in internal node
mask = field_mask_pb2.FieldMask()
mask.paths.append('optional_nested_message.xxx.zzz')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in leaf
mask = field_mask_pb2.FieldMask()
mask.paths.append('optional_nested_message.xxx')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
def testCanonicalFrom(self):
mask = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
# Paths will be sorted.
mask.FromJsonString('baz.quz,bar,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,baz.quz,foo', out_mask.ToJsonString())
# Duplicated paths will be removed.
mask.FromJsonString('foo,bar,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,foo', out_mask.ToJsonString())
# Sub-paths of other paths will be removed.
mask.FromJsonString('foo.b1,bar.b1,foo.b2,bar')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,foo.b1,foo.b2', out_mask.ToJsonString())
# Test more deeply nested cases.
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2.quz,foo.bar.baz2')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar.baz1,foo.bar.baz2',
out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar.baz1,foo.bar.baz2',
out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz,foo.bar')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar', out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo', out_mask.ToJsonString())
def testUnion(self):
mask1 = field_mask_pb2.FieldMask()
mask2 = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
mask1.FromJsonString('foo,baz')
mask2.FromJsonString('bar,quz')
out_mask.Union(mask1, mask2)
self.assertEqual('bar,baz,foo,quz', out_mask.ToJsonString())
# Overlap with duplicated paths.
mask1.FromJsonString('foo,baz.bb')
mask2.FromJsonString('baz.bb,quz')
out_mask.Union(mask1, mask2)
self.assertEqual('baz.bb,foo,quz', out_mask.ToJsonString())
# Overlap with paths covering some other paths.
mask1.FromJsonString('foo.bar.baz,quz')
mask2.FromJsonString('foo.bar,bar')
out_mask.Union(mask1, mask2)
self.assertEqual('bar,foo.bar,quz', out_mask.ToJsonString())
src = unittest_pb2.TestAllTypes()
with self.assertRaises(ValueError):
out_mask.Union(src, mask2)
def testIntersect(self):
mask1 = field_mask_pb2.FieldMask()
mask2 = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
# Test cases without overlapping.
mask1.FromJsonString('foo,baz')
mask2.FromJsonString('bar,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('', out_mask.ToJsonString())
self.assertEqual(len(out_mask.paths), 0)
self.assertEqual(out_mask.paths, [])
# Overlap with duplicated paths.
mask1.FromJsonString('foo,baz.bb')
mask2.FromJsonString('baz.bb,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('baz.bb', out_mask.ToJsonString())
# Overlap with paths covering some other paths.
mask1.FromJsonString('foo.bar.baz,quz')
mask2.FromJsonString('foo.bar,bar')
out_mask.Intersect(mask1, mask2)
self.assertEqual('foo.bar.baz', out_mask.ToJsonString())
mask1.FromJsonString('foo.bar,bar')
mask2.FromJsonString('foo.bar.baz,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('foo.bar.baz', out_mask.ToJsonString())
# Intersect '' with ''
mask1.Clear()
mask2.Clear()
mask1.paths.append('')
mask2.paths.append('')
self.assertEqual(mask1.paths, [''])
self.assertEqual('', mask1.ToJsonString())
out_mask.Intersect(mask1, mask2)
self.assertEqual(out_mask.paths, [])
def testMergeMessageWithoutMapFields(self):
# Test merge one field.
src = unittest_pb2.TestAllTypes()
test_util.SetAllFields(src)
for field in src.DESCRIPTOR.fields:
if field.containing_oneof:
continue
field_name = field.name
dst = unittest_pb2.TestAllTypes()
# Only set one path to mask.
mask = field_mask_pb2.FieldMask()
mask.paths.append(field_name)
mask.MergeMessage(src, dst)
# The expected result message.
msg = unittest_pb2.TestAllTypes()
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
repeated_src = getattr(src, field_name)
repeated_msg = getattr(msg, field_name)
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
for item in repeated_src:
repeated_msg.add().CopyFrom(item)
else:
repeated_msg.extend(repeated_src)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
getattr(msg, field_name).CopyFrom(getattr(src, field_name))
else:
setattr(msg, field_name, getattr(src, field_name))
# Only field specified in mask is merged.
self.assertEqual(msg, dst)
# Test merge nested fields.
nested_src = unittest_pb2.NestedTestAllTypes()
nested_dst = unittest_pb2.NestedTestAllTypes()
nested_src.child.payload.optional_int32 = 1234
nested_src.child.child.payload.optional_int32 = 5678
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(0, nested_dst.child.child.payload.optional_int32)
mask.FromJsonString('child.child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
nested_dst.Clear()
mask.FromJsonString('child.child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(0, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
nested_dst.Clear()
mask.FromJsonString('child')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
# Test MergeOptions.
nested_dst.Clear()
nested_dst.child.payload.optional_int64 = 4321
# Message fields will be merged by default.
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(4321, nested_dst.child.payload.optional_int64)
# Change the behavior to replace message fields.
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst, True, False)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(0, nested_dst.child.payload.optional_int64)
# By default, fields missing in source are not cleared in destination.
nested_dst.payload.optional_int32 = 1234
self.assertTrue(nested_dst.HasField('payload'))
mask.FromJsonString('payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertTrue(nested_dst.HasField('payload'))
# But they are cleared when replacing message fields.
nested_dst.Clear()
nested_dst.payload.optional_int32 = 1234
mask.FromJsonString('payload')
mask.MergeMessage(nested_src, nested_dst, True, False)
self.assertFalse(nested_dst.HasField('payload'))
nested_src.payload.repeated_int32.append(1234)
nested_dst.payload.repeated_int32.append(5678)
# Repeated fields will be appended by default.
mask.FromJsonString('payload.repeatedInt32')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(2, len(nested_dst.payload.repeated_int32))
self.assertEqual(5678, nested_dst.payload.repeated_int32[0])
self.assertEqual(1234, nested_dst.payload.repeated_int32[1])
# Change the behavior to replace repeated fields.
mask.FromJsonString('payload.repeatedInt32')
mask.MergeMessage(nested_src, nested_dst, False, True)
self.assertEqual(1, len(nested_dst.payload.repeated_int32))
self.assertEqual(1234, nested_dst.payload.repeated_int32[0])
# Test Merge oneof field.
new_msg = unittest_pb2.TestOneof2()
dst = unittest_pb2.TestOneof2()
dst.foo_message.moo_int = 1
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('fooMessage,fooLazyMessage.mooInt')
mask.MergeMessage(new_msg, dst)
self.assertTrue(dst.HasField('foo_message'))
self.assertFalse(dst.HasField('foo_lazy_message'))
def testMergeMessageWithMapField(self):
empty_map = map_unittest_pb2.TestRecursiveMapMessage()
src_level_2 = map_unittest_pb2.TestRecursiveMapMessage()
src_level_2.a['src level 2'].CopyFrom(empty_map)
src = map_unittest_pb2.TestRecursiveMapMessage()
src.a['common key'].CopyFrom(src_level_2)
src.a['src level 1'].CopyFrom(src_level_2)
dst_level_2 = map_unittest_pb2.TestRecursiveMapMessage()
dst_level_2.a['dst level 2'].CopyFrom(empty_map)
dst = map_unittest_pb2.TestRecursiveMapMessage()
dst.a['common key'].CopyFrom(dst_level_2)
dst.a['dst level 1'].CopyFrom(empty_map)
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('a')
mask.MergeMessage(src, dst)
# map from dst is replaced with map from src.
self.assertEqual(dst.a['common key'], src_level_2)
self.assertEqual(dst.a['src level 1'], src_level_2)
self.assertEqual(dst.a['dst level 1'], empty_map)
def testMergeErrors(self):
src = unittest_pb2.TestAllTypes()
dst = unittest_pb2.TestAllTypes()
mask = field_mask_pb2.FieldMask()
test_util.SetAllFields(src)
mask.FromJsonString('optionalInt32.field')
with self.assertRaises(ValueError) as e:
mask.MergeMessage(src, dst)
self.assertEqual('Error: Field optional_int32 in message '
'protobuf_unittest.TestAllTypes is not a singular '
'message field and cannot have sub-fields.',
str(e.exception))
def testSnakeCaseToCamelCase(self):
self.assertEqual('fooBar',
field_mask._SnakeCaseToCamelCase('foo_bar'))
self.assertEqual('FooBar',
field_mask._SnakeCaseToCamelCase('_foo_bar'))
self.assertEqual('foo3Bar',
field_mask._SnakeCaseToCamelCase('foo3_bar'))
# No uppercase letter is allowed.
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: Path name Foo must '
'not contain uppercase letters.',
field_mask._SnakeCaseToCamelCase, 'Foo')
# Any character after a "_" must be a lowercase letter.
# 1. "_" cannot be followed by another "_".
# 2. "_" cannot be followed by a digit.
# 3. "_" cannot appear as the last character.
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: The character after a '
'"_" must be a lowercase letter in path name foo__bar.',
field_mask._SnakeCaseToCamelCase, 'foo__bar')
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: The character after a '
'"_" must be a lowercase letter in path name foo_3bar.',
field_mask._SnakeCaseToCamelCase, 'foo_3bar')
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: Trailing "_" in path '
'name foo_bar_.', field_mask._SnakeCaseToCamelCase, 'foo_bar_')
def testCamelCaseToSnakeCase(self):
self.assertEqual('foo_bar',
field_mask._CamelCaseToSnakeCase('fooBar'))
self.assertEqual('_foo_bar',
field_mask._CamelCaseToSnakeCase('FooBar'))
self.assertEqual('foo3_bar',
field_mask._CamelCaseToSnakeCase('foo3Bar'))
self.assertRaisesRegex(
ValueError,
'Fail to parse FieldMask: Path name foo_bar must not contain "_"s.',
field_mask._CamelCaseToSnakeCase, 'foo_bar')
if __name__ == '__main__':
unittest.main()

@ -30,7 +30,6 @@
syntax = "proto2";
package google.protobuf.python.internal;
message TestEnumValues {
@ -53,4 +52,3 @@ message TestMissingEnumValues {
message JustString {
required string dummy = 1;
}

@ -44,7 +44,9 @@ import calendar
import collections.abc
import datetime
from google.protobuf.descriptor import FieldDescriptor
from google.protobuf.internal import field_mask
FieldMask = field_mask.FieldMask
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
_NANOS_PER_SECOND = 1000000000
@ -430,306 +432,6 @@ def _RoundTowardZero(value, divider):
return result
class FieldMask(object):
"""Class for FieldMask message type."""
__slots__ = ()
def ToJsonString(self):
"""Converts FieldMask to string according to proto3 JSON spec."""
camelcase_paths = []
for path in self.paths:
camelcase_paths.append(_SnakeCaseToCamelCase(path))
return ','.join(camelcase_paths)
def FromJsonString(self, value):
"""Converts string to FieldMask according to proto3 JSON spec."""
if not isinstance(value, str):
raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
self.Clear()
if value:
for path in value.split(','):
self.paths.append(_CamelCaseToSnakeCase(path))
def IsValidForDescriptor(self, message_descriptor):
"""Checks whether the FieldMask is valid for Message Descriptor."""
for path in self.paths:
if not _IsValidPath(message_descriptor, path):
return False
return True
def AllFieldsFromDescriptor(self, message_descriptor):
"""Gets all direct fields of Message Descriptor to FieldMask."""
self.Clear()
for field in message_descriptor.fields:
self.paths.append(field.name)
def CanonicalFormFromMask(self, mask):
"""Converts a FieldMask to the canonical form.
Removes paths that are covered by another path. For example,
"foo.bar" is covered by "foo" and will be removed if "foo"
is also in the FieldMask. Then sorts all paths in alphabetical order.
Args:
mask: The original FieldMask to be converted.
"""
tree = _FieldMaskTree(mask)
tree.ToFieldMask(self)
def Union(self, mask1, mask2):
"""Merges mask1 and mask2 into this FieldMask."""
_CheckFieldMaskMessage(mask1)
_CheckFieldMaskMessage(mask2)
tree = _FieldMaskTree(mask1)
tree.MergeFromFieldMask(mask2)
tree.ToFieldMask(self)
def Intersect(self, mask1, mask2):
"""Intersects mask1 and mask2 into this FieldMask."""
_CheckFieldMaskMessage(mask1)
_CheckFieldMaskMessage(mask2)
tree = _FieldMaskTree(mask1)
intersection = _FieldMaskTree()
for path in mask2.paths:
tree.IntersectPath(path, intersection)
intersection.ToFieldMask(self)
def MergeMessage(
self, source, destination,
replace_message_field=False, replace_repeated_field=False):
"""Merges fields specified in FieldMask from source to destination.
Args:
source: Source message.
destination: The destination message to be merged into.
replace_message_field: Replace message field if True. Merge message
field if False.
replace_repeated_field: Replace repeated field if True. Append
elements of repeated field if False.
"""
tree = _FieldMaskTree(self)
tree.MergeMessage(
source, destination, replace_message_field, replace_repeated_field)
def _IsValidPath(message_descriptor, path):
"""Checks whether the path is valid for Message Descriptor."""
parts = path.split('.')
last = parts.pop()
for name in parts:
field = message_descriptor.fields_by_name.get(name)
if (field is None or
field.label == FieldDescriptor.LABEL_REPEATED or
field.type != FieldDescriptor.TYPE_MESSAGE):
return False
message_descriptor = field.message_type
return last in message_descriptor.fields_by_name
def _CheckFieldMaskMessage(message):
"""Raises ValueError if message is not a FieldMask."""
message_descriptor = message.DESCRIPTOR
if (message_descriptor.name != 'FieldMask' or
message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
raise ValueError('Message {0} is not a FieldMask.'.format(
message_descriptor.full_name))
def _SnakeCaseToCamelCase(path_name):
"""Converts a path name from snake_case to camelCase."""
result = []
after_underscore = False
for c in path_name:
if c.isupper():
raise ValueError(
'Fail to print FieldMask to Json string: Path name '
'{0} must not contain uppercase letters.'.format(path_name))
if after_underscore:
if c.islower():
result.append(c.upper())
after_underscore = False
else:
raise ValueError(
'Fail to print FieldMask to Json string: The '
'character after a "_" must be a lowercase letter '
'in path name {0}.'.format(path_name))
elif c == '_':
after_underscore = True
else:
result += c
if after_underscore:
raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
'in path name {0}.'.format(path_name))
return ''.join(result)
def _CamelCaseToSnakeCase(path_name):
"""Converts a field name from camelCase to snake_case."""
result = []
for c in path_name:
if c == '_':
raise ValueError('Fail to parse FieldMask: Path name '
'{0} must not contain "_"s.'.format(path_name))
if c.isupper():
result += '_'
result += c.lower()
else:
result += c
return ''.join(result)
class _FieldMaskTree(object):
"""Represents a FieldMask in a tree structure.
For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
the FieldMaskTree will be:
[_root] -+- foo -+- bar
| |
| +- baz
|
+- bar --- baz
In the tree, each leaf node represents a field path.
"""
__slots__ = ('_root',)
def __init__(self, field_mask=None):
"""Initializes the tree by FieldMask."""
self._root = {}
if field_mask:
self.MergeFromFieldMask(field_mask)
def MergeFromFieldMask(self, field_mask):
"""Merges a FieldMask to the tree."""
for path in field_mask.paths:
self.AddPath(path)
def AddPath(self, path):
"""Adds a field path into the tree.
If the field path to add is a sub-path of an existing field path
in the tree (i.e., a leaf node), it means the tree already matches
the given path so nothing will be added to the tree. If the path
matches an existing non-leaf node in the tree, that non-leaf node
will be turned into a leaf node with all its children removed because
the path matches all the node's children. Otherwise, a new path will
be added.
Args:
path: The field path to add.
"""
node = self._root
for name in path.split('.'):
if name not in node:
node[name] = {}
elif not node[name]:
# Pre-existing empty node implies we already have this entire tree.
return
node = node[name]
# Remove any sub-trees we might have had.
node.clear()
def ToFieldMask(self, field_mask):
"""Converts the tree to a FieldMask."""
field_mask.Clear()
_AddFieldPaths(self._root, '', field_mask)
def IntersectPath(self, path, intersection):
"""Calculates the intersection part of a field path with this tree.
Args:
path: The field path to calculates.
intersection: The out tree to record the intersection part.
"""
node = self._root
for name in path.split('.'):
if name not in node:
return
elif not node[name]:
intersection.AddPath(path)
return
node = node[name]
intersection.AddLeafNodes(path, node)
def AddLeafNodes(self, prefix, node):
"""Adds leaf nodes begin with prefix to this tree."""
if not node:
self.AddPath(prefix)
for name in node:
child_path = prefix + '.' + name
self.AddLeafNodes(child_path, node[name])
def MergeMessage(
self, source, destination,
replace_message, replace_repeated):
"""Merge all fields specified by this tree from source to destination."""
_MergeMessage(
self._root, source, destination, replace_message, replace_repeated)
def _StrConvert(value):
"""Converts value to str if it is not."""
# This file is imported by c extension and some methods like ClearField
# requires string for the field name. py2/py3 has different text
# type and may use unicode.
if not isinstance(value, str):
return value.encode('utf-8')
return value
def _MergeMessage(
node, source, destination, replace_message, replace_repeated):
"""Merge all fields specified by a sub-tree from source to destination."""
source_descriptor = source.DESCRIPTOR
for name in node:
child = node[name]
field = source_descriptor.fields_by_name[name]
if field is None:
raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
name, source_descriptor.full_name))
if child:
# Sub-paths are only allowed for singular message fields.
if (field.label == FieldDescriptor.LABEL_REPEATED or
field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
raise ValueError('Error: Field {0} in message {1} is not a singular '
'message field and cannot have sub-fields.'.format(
name, source_descriptor.full_name))
if source.HasField(name):
_MergeMessage(
child, getattr(source, name), getattr(destination, name),
replace_message, replace_repeated)
continue
if field.label == FieldDescriptor.LABEL_REPEATED:
if replace_repeated:
destination.ClearField(_StrConvert(name))
repeated_source = getattr(source, name)
repeated_destination = getattr(destination, name)
repeated_destination.MergeFrom(repeated_source)
else:
if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
if replace_message:
destination.ClearField(_StrConvert(name))
if source.HasField(name):
getattr(destination, name).MergeFrom(getattr(source, name))
else:
setattr(destination, name, getattr(source, name))
def _AddFieldPaths(node, prefix, field_mask):
"""Adds the field paths descended from node to field_mask."""
if not node and prefix:
field_mask.paths.append(prefix)
return
for name in sorted(node):
if prefix:
child_path = prefix + '.' + name
else:
child_path = name
_AddFieldPaths(node[name], child_path, field_mask)
def _SetStructValue(struct_value, value):
if value is None:
struct_value.null_value = 0

@ -38,15 +38,11 @@ import unittest
from google.protobuf import any_pb2
from google.protobuf import duration_pb2
from google.protobuf import field_mask_pb2
from google.protobuf import struct_pb2
from google.protobuf import timestamp_pb2
from google.protobuf import map_unittest_pb2
from google.protobuf import unittest_pb2
from google.protobuf.internal import any_test_pb2
from google.protobuf.internal import test_util
from google.protobuf.internal import well_known_types
from google.protobuf import descriptor
from google.protobuf import text_format
from google.protobuf.internal import _parameterized
@ -390,362 +386,6 @@ class TimeUtilTest(TimeUtilTestBase):
message.ToJsonString)
class FieldMaskTest(unittest.TestCase):
def testStringFormat(self):
mask = field_mask_pb2.FieldMask()
self.assertEqual('', mask.ToJsonString())
mask.paths.append('foo')
self.assertEqual('foo', mask.ToJsonString())
mask.paths.append('bar')
self.assertEqual('foo,bar', mask.ToJsonString())
mask.FromJsonString('')
self.assertEqual('', mask.ToJsonString())
mask.FromJsonString('foo')
self.assertEqual(['foo'], mask.paths)
mask.FromJsonString('foo,bar')
self.assertEqual(['foo', 'bar'], mask.paths)
# Test camel case
mask.Clear()
mask.paths.append('foo_bar')
self.assertEqual('fooBar', mask.ToJsonString())
mask.paths.append('bar_quz')
self.assertEqual('fooBar,barQuz', mask.ToJsonString())
mask.FromJsonString('')
self.assertEqual('', mask.ToJsonString())
self.assertEqual([], mask.paths)
mask.FromJsonString('fooBar')
self.assertEqual(['foo_bar'], mask.paths)
mask.FromJsonString('fooBar,barQuz')
self.assertEqual(['foo_bar', 'bar_quz'], mask.paths)
def testDescriptorToFieldMask(self):
mask = field_mask_pb2.FieldMask()
msg_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
mask.AllFieldsFromDescriptor(msg_descriptor)
self.assertEqual(76, len(mask.paths))
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
for field in msg_descriptor.fields:
self.assertTrue(field.name in mask.paths)
def testIsValidForDescriptor(self):
msg_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
# Empty mask
mask = field_mask_pb2.FieldMask()
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# All fields from descriptor
mask.AllFieldsFromDescriptor(msg_descriptor)
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# Child under optional message
mask.paths.append('optional_nested_message.bb')
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# Repeated field is only allowed in the last position of path
mask.paths.append('repeated_nested_message.bb')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid top level field
mask = field_mask_pb2.FieldMask()
mask.paths.append('xxx')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in root
mask = field_mask_pb2.FieldMask()
mask.paths.append('xxx.zzz')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in internal node
mask = field_mask_pb2.FieldMask()
mask.paths.append('optional_nested_message.xxx.zzz')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in leaf
mask = field_mask_pb2.FieldMask()
mask.paths.append('optional_nested_message.xxx')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
def testCanonicalFrom(self):
mask = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
# Paths will be sorted.
mask.FromJsonString('baz.quz,bar,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,baz.quz,foo', out_mask.ToJsonString())
# Duplicated paths will be removed.
mask.FromJsonString('foo,bar,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,foo', out_mask.ToJsonString())
# Sub-paths of other paths will be removed.
mask.FromJsonString('foo.b1,bar.b1,foo.b2,bar')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,foo.b1,foo.b2', out_mask.ToJsonString())
# Test more deeply nested cases.
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2.quz,foo.bar.baz2')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar.baz1,foo.bar.baz2',
out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar.baz1,foo.bar.baz2',
out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz,foo.bar')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar', out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo', out_mask.ToJsonString())
def testUnion(self):
mask1 = field_mask_pb2.FieldMask()
mask2 = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
mask1.FromJsonString('foo,baz')
mask2.FromJsonString('bar,quz')
out_mask.Union(mask1, mask2)
self.assertEqual('bar,baz,foo,quz', out_mask.ToJsonString())
# Overlap with duplicated paths.
mask1.FromJsonString('foo,baz.bb')
mask2.FromJsonString('baz.bb,quz')
out_mask.Union(mask1, mask2)
self.assertEqual('baz.bb,foo,quz', out_mask.ToJsonString())
# Overlap with paths covering some other paths.
mask1.FromJsonString('foo.bar.baz,quz')
mask2.FromJsonString('foo.bar,bar')
out_mask.Union(mask1, mask2)
self.assertEqual('bar,foo.bar,quz', out_mask.ToJsonString())
src = unittest_pb2.TestAllTypes()
with self.assertRaises(ValueError):
out_mask.Union(src, mask2)
def testIntersect(self):
mask1 = field_mask_pb2.FieldMask()
mask2 = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
# Test cases without overlapping.
mask1.FromJsonString('foo,baz')
mask2.FromJsonString('bar,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('', out_mask.ToJsonString())
self.assertEqual(len(out_mask.paths), 0)
self.assertEqual(out_mask.paths, [])
# Overlap with duplicated paths.
mask1.FromJsonString('foo,baz.bb')
mask2.FromJsonString('baz.bb,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('baz.bb', out_mask.ToJsonString())
# Overlap with paths covering some other paths.
mask1.FromJsonString('foo.bar.baz,quz')
mask2.FromJsonString('foo.bar,bar')
out_mask.Intersect(mask1, mask2)
self.assertEqual('foo.bar.baz', out_mask.ToJsonString())
mask1.FromJsonString('foo.bar,bar')
mask2.FromJsonString('foo.bar.baz,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('foo.bar.baz', out_mask.ToJsonString())
# Intersect '' with ''
mask1.Clear()
mask2.Clear()
mask1.paths.append('')
mask2.paths.append('')
self.assertEqual(mask1.paths, [''])
self.assertEqual('', mask1.ToJsonString())
out_mask.Intersect(mask1, mask2)
self.assertEqual(out_mask.paths, [])
def testMergeMessageWithoutMapFields(self):
# Test merge one field.
src = unittest_pb2.TestAllTypes()
test_util.SetAllFields(src)
for field in src.DESCRIPTOR.fields:
if field.containing_oneof:
continue
field_name = field.name
dst = unittest_pb2.TestAllTypes()
# Only set one path to mask.
mask = field_mask_pb2.FieldMask()
mask.paths.append(field_name)
mask.MergeMessage(src, dst)
# The expected result message.
msg = unittest_pb2.TestAllTypes()
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
repeated_src = getattr(src, field_name)
repeated_msg = getattr(msg, field_name)
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
for item in repeated_src:
repeated_msg.add().CopyFrom(item)
else:
repeated_msg.extend(repeated_src)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
getattr(msg, field_name).CopyFrom(getattr(src, field_name))
else:
setattr(msg, field_name, getattr(src, field_name))
# Only field specified in mask is merged.
self.assertEqual(msg, dst)
# Test merge nested fields.
nested_src = unittest_pb2.NestedTestAllTypes()
nested_dst = unittest_pb2.NestedTestAllTypes()
nested_src.child.payload.optional_int32 = 1234
nested_src.child.child.payload.optional_int32 = 5678
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(0, nested_dst.child.child.payload.optional_int32)
mask.FromJsonString('child.child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
nested_dst.Clear()
mask.FromJsonString('child.child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(0, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
nested_dst.Clear()
mask.FromJsonString('child')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
# Test MergeOptions.
nested_dst.Clear()
nested_dst.child.payload.optional_int64 = 4321
# Message fields will be merged by default.
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(4321, nested_dst.child.payload.optional_int64)
# Change the behavior to replace message fields.
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst, True, False)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(0, nested_dst.child.payload.optional_int64)
# By default, fields missing in source are not cleared in destination.
nested_dst.payload.optional_int32 = 1234
self.assertTrue(nested_dst.HasField('payload'))
mask.FromJsonString('payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertTrue(nested_dst.HasField('payload'))
# But they are cleared when replacing message fields.
nested_dst.Clear()
nested_dst.payload.optional_int32 = 1234
mask.FromJsonString('payload')
mask.MergeMessage(nested_src, nested_dst, True, False)
self.assertFalse(nested_dst.HasField('payload'))
nested_src.payload.repeated_int32.append(1234)
nested_dst.payload.repeated_int32.append(5678)
# Repeated fields will be appended by default.
mask.FromJsonString('payload.repeatedInt32')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(2, len(nested_dst.payload.repeated_int32))
self.assertEqual(5678, nested_dst.payload.repeated_int32[0])
self.assertEqual(1234, nested_dst.payload.repeated_int32[1])
# Change the behavior to replace repeated fields.
mask.FromJsonString('payload.repeatedInt32')
mask.MergeMessage(nested_src, nested_dst, False, True)
self.assertEqual(1, len(nested_dst.payload.repeated_int32))
self.assertEqual(1234, nested_dst.payload.repeated_int32[0])
# Test Merge oneof field.
new_msg = unittest_pb2.TestOneof2()
dst = unittest_pb2.TestOneof2()
dst.foo_message.moo_int = 1
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('fooMessage,fooLazyMessage.mooInt')
mask.MergeMessage(new_msg, dst)
self.assertTrue(dst.HasField('foo_message'))
self.assertFalse(dst.HasField('foo_lazy_message'))
def testMergeMessageWithMapField(self):
empty_map = map_unittest_pb2.TestRecursiveMapMessage()
src_level_2 = map_unittest_pb2.TestRecursiveMapMessage()
src_level_2.a['src level 2'].CopyFrom(empty_map)
src = map_unittest_pb2.TestRecursiveMapMessage()
src.a['common key'].CopyFrom(src_level_2)
src.a['src level 1'].CopyFrom(src_level_2)
dst_level_2 = map_unittest_pb2.TestRecursiveMapMessage()
dst_level_2.a['dst level 2'].CopyFrom(empty_map)
dst = map_unittest_pb2.TestRecursiveMapMessage()
dst.a['common key'].CopyFrom(dst_level_2)
dst.a['dst level 1'].CopyFrom(empty_map)
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('a')
mask.MergeMessage(src, dst)
# map from dst is replaced with map from src.
self.assertEqual(dst.a['common key'], src_level_2)
self.assertEqual(dst.a['src level 1'], src_level_2)
self.assertEqual(dst.a['dst level 1'], empty_map)
def testMergeErrors(self):
src = unittest_pb2.TestAllTypes()
dst = unittest_pb2.TestAllTypes()
mask = field_mask_pb2.FieldMask()
test_util.SetAllFields(src)
mask.FromJsonString('optionalInt32.field')
with self.assertRaises(ValueError) as e:
mask.MergeMessage(src, dst)
self.assertEqual('Error: Field optional_int32 in message '
'protobuf_unittest.TestAllTypes is not a singular '
'message field and cannot have sub-fields.',
str(e.exception))
def testSnakeCaseToCamelCase(self):
self.assertEqual('fooBar',
well_known_types._SnakeCaseToCamelCase('foo_bar'))
self.assertEqual('FooBar',
well_known_types._SnakeCaseToCamelCase('_foo_bar'))
self.assertEqual('foo3Bar',
well_known_types._SnakeCaseToCamelCase('foo3_bar'))
# No uppercase letter is allowed.
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: Path name Foo must '
'not contain uppercase letters.',
well_known_types._SnakeCaseToCamelCase, 'Foo')
# Any character after a "_" must be a lowercase letter.
# 1. "_" cannot be followed by another "_".
# 2. "_" cannot be followed by a digit.
# 3. "_" cannot appear as the last character.
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: The character after a '
'"_" must be a lowercase letter in path name foo__bar.',
well_known_types._SnakeCaseToCamelCase, 'foo__bar')
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: The character after a '
'"_" must be a lowercase letter in path name foo_3bar.',
well_known_types._SnakeCaseToCamelCase, 'foo_3bar')
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: Trailing "_" in path '
'name foo_bar_.', well_known_types._SnakeCaseToCamelCase, 'foo_bar_')
def testCamelCaseToSnakeCase(self):
self.assertEqual('foo_bar',
well_known_types._CamelCaseToSnakeCase('fooBar'))
self.assertEqual('_foo_bar',
well_known_types._CamelCaseToSnakeCase('FooBar'))
self.assertEqual('foo3_bar',
well_known_types._CamelCaseToSnakeCase('foo3Bar'))
self.assertRaisesRegex(
ValueError,
'Fail to parse FieldMask: Path name foo_bar must not contain "_"s.',
well_known_types._CamelCaseToSnakeCase, 'foo_bar')
class StructTest(unittest.TestCase):
def testStruct(self):

@ -181,7 +181,6 @@ set(libprotobuf_hdrs
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/platform_macros.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/port.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/status_macros.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/stl_util.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/strutil.h
${protobuf_SOURCE_DIR}/src/google/protobuf/text_format.h
${protobuf_SOURCE_DIR}/src/google/protobuf/unknown_field_set.h
@ -273,7 +272,6 @@ set(libprotobuf_lite_hdrs
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/platform_macros.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/port.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/status_macros.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/stl_util.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/strutil.h
${protobuf_SOURCE_DIR}/src/google/protobuf/wire_format_lite.h
)

@ -93,7 +93,6 @@ option objc_class_prefix = "GPB";
// in the type URL, for example "foo.bar.com/x/y.z" will yield type
// name "y.z".
//
//
// JSON
//
// The JSON representation of an `Any` value uses the regular

@ -82,7 +82,6 @@ message Api {
// be omitted. Zero major versions must only be used for
// experimental, non-GA interfaces.
//
//
string version = 4;
// Source context for the protocol buffer service represented by this

@ -39,7 +39,6 @@
#include "absl/synchronization/mutex.h"
#include "google/protobuf/message_lite.h"
#include "google/protobuf/parse_context.h"
#include "google/protobuf/stubs/stl_util.h"
// clang-format off
#include "google/protobuf/port_def.inc"

@ -89,7 +89,6 @@
#include "google/protobuf/io/printer.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/text_format.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
@ -872,7 +871,7 @@ CommandLineInterface::MemoryOutputStream::~MemoryOutputStream() {
// Now copy in the data.
std::string::size_type data_pos = 0;
char* target_ptr = ::google::protobuf::string_as_array(target) + pos;
char* target_ptr = &(*target)[pos];
while (data_pos < data_.size()) {
// Copy indent.
memcpy(target_ptr, indent_.data(), indent_.size());
@ -889,8 +888,7 @@ CommandLineInterface::MemoryOutputStream::~MemoryOutputStream() {
}
UpdateMetadata(data_, pos, data_.size() + indent_size, indent_.size());
GOOGLE_CHECK_EQ(target_ptr,
::google::protobuf::string_as_array(target) + pos + data_.size() + indent_size);
GOOGLE_CHECK_EQ(target_ptr, &(*target)[pos] + data_.size() + indent_size);
}
}
}

@ -56,7 +56,6 @@
#include "absl/container/flat_hash_map.h"
#include "absl/strings/substitute.h"
#include "google/protobuf/compiler/cpp/helpers.h"
#include "google/protobuf/stubs/stl_util.h"
namespace google {
namespace protobuf {

@ -1,3 +1,33 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_CPP_GENERATOR_H_
#define GOOGLE_PROTOBUF_COMPILER_CPP_CPP_GENERATOR_H_

@ -271,6 +271,12 @@ struct NumToEntryTable {
static NumToEntryTable MakeNumToEntryTable(
const std::vector<const FieldDescriptor*>& field_descriptors);
static int FieldNameDataSize(const std::vector<uint8_t>& data) {
// We add a +1 here to allow for a NUL termination character. It makes the
// codegen nicer.
return data.empty() ? 0 : data.size() + 1;
}
void ParseFunctionGenerator::GenerateDataDecls(io::Printer* printer) {
if (!should_generate_tctable()) {
return;
@ -288,9 +294,7 @@ void ParseFunctionGenerator::GenerateDataDecls(io::Printer* printer) {
"TcParseTable<$1$, $2$, $3$, $4$, $5$> _table_;\n",
tc_table_info_->table_size_log2, ordered_fields_.size(),
tc_table_info_->aux_entries.size(),
// We add a +1 here to allow for a NUL termination character. It makes the
// codegen nicer.
tc_table_info_->field_name_data.size() + 1,
FieldNameDataSize(tc_table_info_->field_name_data),
field_num_to_entry_table.size16());
if (should_generate_guarded_tctable()) {
format.Outdent();
@ -450,7 +454,7 @@ void ParseFunctionGenerator::GenerateTailCallTable(Formatter& format) {
"{\n",
tc_table_info_->table_size_log2, ordered_fields_.size(),
tc_table_info_->aux_entries.size(),
tc_table_info_->field_name_data.size() + 1, // See above for why +1
FieldNameDataSize(tc_table_info_->field_name_data),
field_num_to_entry_table.size16());
{
auto table_scope = format.ScopedIndent();
@ -608,12 +612,12 @@ void ParseFunctionGenerator::GenerateTailCallTable(Formatter& format) {
format("}}, {{\n");
}
} // ordered_fields_.empty()
{
// field_names[]
auto field_name_scope = format.ScopedIndent();
GenerateFieldNames(format);
}
format("}},\n");
{
// field_names[]
auto field_name_scope = format.ScopedIndent();
GenerateFieldNames(format);
}
format("}},\n");
}
format("};\n\n"); // _table_
}
@ -832,6 +836,11 @@ void ParseFunctionGenerator::GenerateFieldEntries(Formatter& format) {
}
void ParseFunctionGenerator::GenerateFieldNames(Formatter& format) {
if (tc_table_info_->field_name_data.empty()) {
// No names to output.
return;
}
// We could just output the bytes directly, but we want it to look better than
// that in the source code. Also, it is more efficient for compilation time to
// have a literal string than an initializer list of chars.
@ -854,8 +863,8 @@ void ParseFunctionGenerator::GenerateFieldNames(Formatter& format) {
format("\"\n");
// Then print each name in a line of its own
for (; sizes < sizes_end && sizes[0] != 0; p += *sizes++) {
format("\"$1$\"\n", std::string(p, p + *sizes));
for (; sizes < sizes_end; p += *sizes++) {
if (*sizes != 0) format("\"$1$\"\n", std::string(p, p + *sizes));
}
}

@ -76,7 +76,6 @@
#include "google/protobuf/io/coded_stream.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/test_util2.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
#include "google/protobuf/port_def.inc"
@ -664,7 +663,7 @@ TEST(GENERATED_MESSAGE_TEST_NAME, SerializationToArray) {
TestUtil::SetAllFields(&message1);
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -678,8 +677,7 @@ TEST(GENERATED_MESSAGE_TEST_NAME, PackedFieldsSerializationToArray) {
TestUtil::SetPackedFields(&packed_message1);
int packed_size = packed_message1.ByteSizeLong();
packed_data.resize(packed_size);
uint8_t* start =
reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&packed_data));
uint8_t* start = reinterpret_cast<uint8_t*>(&packed_data[0]);
uint8_t* end = packed_message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(packed_size, end - start);
EXPECT_TRUE(packed_message2.ParseFromString(packed_data));
@ -696,7 +694,7 @@ TEST(GENERATED_MESSAGE_TEST_NAME, SerializationToStream) {
data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
@ -715,7 +713,7 @@ TEST(GENERATED_MESSAGE_TEST_NAME, PackedFieldsSerializationToStream) {
data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
@ -1841,7 +1839,7 @@ std::string data;
message1.set_foo_int(123);
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1855,7 +1853,7 @@ EXPECT_EQ(message2.foo_int(), 123);
message1.set_foo_string("foo");
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1870,7 +1868,7 @@ EXPECT_EQ(message2.foo_int(), 123);
message1.set_foo_bytes("moo");
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1884,7 +1882,7 @@ EXPECT_EQ(message2.foo_int(), 123);
message1.set_foo_enum(UNITTEST::TestOneof2::FOO);
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1898,7 +1896,7 @@ EXPECT_EQ(message2.foo_int(), 123);
message1.mutable_foo_message()->set_moo_int(234);
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1912,7 +1910,7 @@ EXPECT_EQ(message2.foo_int(), 123);
message1.mutable_foogroup()->set_a(345);
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1937,11 +1935,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));
@ -1958,11 +1956,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));
@ -1980,11 +1978,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));
@ -2001,11 +1999,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));
@ -2022,11 +2020,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));
@ -2043,11 +2041,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));

@ -51,7 +51,6 @@
#include "google/protobuf/compiler/importer.h"
#include "google/protobuf/descriptor.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/stubs/stl_util.h"
namespace google {
namespace protobuf {

@ -1,3 +1,33 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_JAVA_GENERATOR_H_
#define GOOGLE_PROTOBUF_COMPILER_JAVA_JAVA_GENERATOR_H_

@ -32,14 +32,13 @@
#include "absl/strings/ascii.h"
#include "absl/strings/escaping.h"
#include "absl/strings/str_split.h"
#include "absl/strings/str_replace.h"
#include "absl/strings/str_split.h"
#include "google/protobuf/compiler/code_generator.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/coded_stream.h"
#include "google/protobuf/io/printer.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/stubs/strutil.h"
// NOTE: src/google/protobuf/compiler/plugin.cc makes use of cerr for some
// error cases, so it seems to be ok to use as a back door for errors.

@ -89,7 +89,6 @@ message CodeGeneratorRequest {
// The version number of protocol compiler.
optional Version compiler_version = 3;
}
// The plugin writes an encoded CodeGeneratorResponse to stdout.

@ -1,3 +1,33 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef GOOGLE_PROTOBUF_COMPILER_PYTHON_PYTHON_GENERATOR_H_
#define GOOGLE_PROTOBUF_COMPILER_PYTHON_PYTHON_GENERATOR_H_

@ -36,7 +36,6 @@
// A valid .proto file can be translated directly to a FileDescriptorProto
// without any other information (e.g. without reading its imports).
syntax = "proto2";
package google.protobuf;
@ -134,7 +133,6 @@ message ExtensionRangeOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
@ -310,7 +308,6 @@ message MethodDescriptorProto {
optional bool server_streaming = 6 [default = false];
}
// ===================================================================
// Options
@ -351,7 +348,6 @@ message FileOptions {
// domain names.
optional string java_package = 1;
// Controls the name of the wrapper Java class generated for the .proto file.
// That class will always contain the .proto file's getDescriptor() method as
// well as any top-level extensions defined in the .proto file.
@ -378,7 +374,6 @@ message FileOptions {
// This option has no effect on when used with the lite runtime.
optional bool java_string_check_utf8 = 27 [default = false];
// Generated classes can be optimized for speed or code size.
enum OptimizeMode {
SPEED = 1; // Generate complete code for parsing, serialization,
@ -395,9 +390,6 @@ message FileOptions {
// - Otherwise, the basename of the .proto file, without extension.
optional string go_package = 11;
// Should generic services be generated in each language? "Generic" services
// are not specific to any particular RPC system. They are generated by the
// main code generators in each language (without additional plugins).
@ -423,7 +415,6 @@ message FileOptions {
// only to generated classes for C++.
optional bool cc_enable_arenas = 31 [default = true];
// Sets the objective c class prefix which is prepended to all objective c
// generated classes from this .proto. There is no default.
optional string objc_class_prefix = 36;
@ -456,7 +447,6 @@ message FileOptions {
// determining the ruby package.
optional string ruby_package = 45;
// The parser stores options it doesn't recognize here.
// See the documentation for the "Options" section above.
repeated UninterpretedOption uninterpreted_option = 999;
@ -528,7 +518,6 @@ message MessageOptions {
reserved 8; // javalite_serializable
reserved 9; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
@ -597,7 +586,6 @@ message FieldOptions {
// call from multiple threads concurrently, while non-const methods continue
// to require exclusive access.
//
//
// Note that implementations may choose not to check required fields within
// a lazy sub-message. That is, calling IsInitialized() on the outer message
// may return true even if the inner message has missing required fields.
@ -627,7 +615,6 @@ message FieldOptions {
// For Google-internal migration only. Do not use.
optional bool weak = 10 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
@ -731,7 +718,6 @@ message MethodOptions {
extensions 1000 to max;
}
// A message representing a option the parser does not recognize. This only
// appears in options protos created by the compiler::Parser class.
// DescriptorPool resolves these when building Descriptor objects. Therefore,

@ -42,7 +42,6 @@
#include "absl/strings/match.h"
#include "absl/strings/str_replace.h"
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/stubs/stl_util.h"
namespace google {

@ -99,7 +99,6 @@ option objc_class_prefix = "GPB";
// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
// microsecond should be expressed in JSON format as "3.000001s".
//
//
message Duration {
// Signed seconds of the span of time. Must be from -315,576,000,000
// to +315,576,000,000 inclusive. Note: these bounds are computed from:

@ -53,7 +53,6 @@
#include "absl/strings/match.h"
#include "google/protobuf/test_util.h"
#include "google/protobuf/test_util2.h"
#include "google/protobuf/stubs/stl_util.h"
#include "google/protobuf/stubs/strutil.h"
@ -552,7 +551,7 @@ TEST(ExtensionSetTest, SerializationToArray) {
size_t size = source.ByteSizeLong();
std::string data;
data.resize(size);
uint8_t* target = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* target = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = source.SerializeWithCachedSizesToArray(target);
EXPECT_EQ(size, end - target);
EXPECT_TRUE(destination.ParseFromString(data));
@ -574,7 +573,7 @@ TEST(ExtensionSetTest, SerializationToStream) {
std::string data;
data.resize(size);
{
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
source.SerializeWithCachedSizes(&output_stream);
ASSERT_FALSE(output_stream.HadError());
@ -596,7 +595,7 @@ TEST(ExtensionSetTest, PackedSerializationToArray) {
size_t size = source.ByteSizeLong();
std::string data;
data.resize(size);
uint8_t* target = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* target = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = source.SerializeWithCachedSizesToArray(target);
EXPECT_EQ(size, end - target);
EXPECT_TRUE(destination.ParseFromString(data));
@ -618,7 +617,7 @@ TEST(ExtensionSetTest, PackedSerializationToStream) {
std::string data;
data.resize(size);
{
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
source.SerializeWithCachedSizes(&output_stream);
ASSERT_FALSE(output_stream.HadError());

@ -3095,7 +3095,7 @@ const internal::TcParseTableBase* Reflection::CreateTcParseTableForMessageSet()
// Create a dummy table that only exists to make TcParser::ParseLoop jump
// into the reflective parse loop.
using Table = internal::TcParseTable<0, 0, 0, 1, 1>;
using Table = internal::TcParseTable<0, 0, 0, 0, 1>;
// We use `operator new` here because the destruction will be done with
// `operator delete` unconditionally.
void* p = ::operator new(sizeof(Table));

@ -347,7 +347,7 @@ struct TcParseTable {
// Entries for all fields:
std::array<TcParseTableBase::FieldEntry, kNumFieldEntries> field_entries;
std::array<TcParseTableBase::FieldAux, kNumFieldAux> aux_entries;
std::array<char, kNameTableSize> field_names;
std::array<char, kNameTableSize == 0 ? 1 : kNameTableSize> field_names;
};
// Partial specialization: if there are no aux entries, there will be no array.
@ -363,7 +363,7 @@ struct TcParseTable<kFastTableSizeLog2, kNumFieldEntries, 0, kNameTableSize,
fast_entries;
std::array<uint16_t, kFieldLookupSize> field_lookup_table;
std::array<TcParseTableBase::FieldEntry, kNumFieldEntries> field_entries;
std::array<char, kNameTableSize> field_names;
std::array<char, kNameTableSize == 0 ? 1 : kNameTableSize> field_names;
};
// Partial specialization: if there are no fields at all, then we can save space
@ -375,7 +375,7 @@ struct TcParseTable<0, 0, 0, kNameTableSize, kFieldLookupSize> {
// The fast parsing loop will always use this entry, so it must be present.
std::array<TcParseTableBase::FastFieldEntry, 1> fast_entries;
std::array<uint16_t, kFieldLookupSize> field_lookup_table;
std::array<char, kNameTableSize> field_names;
std::array<char, kNameTableSize == 0 ? 1 : kNameTableSize> field_names;
};
static_assert(std::is_standard_layout<TcParseTable<1>>::value,

@ -420,10 +420,9 @@ absl::string_view FieldNameForTable(
case field_layout::kTvUtf8:
case field_layout::kTvUtf8Debug:
return field->name();
break;
}
}
return "?";
return "";
}
std::vector<uint8_t> GenerateFieldNames(
@ -431,6 +430,20 @@ std::vector<uint8_t> GenerateFieldNames(
const std::vector<TailCallTableInfo::FieldEntryInfo>& entries) {
static constexpr int kMaxNameLength = 255;
std::vector<uint8_t> out;
bool found_needed_name = false;
for (const auto& entry : entries) {
if (!FieldNameForTable(entry).empty()) {
found_needed_name = true;
break;
}
}
// No names needed. Omit the whole table.
if (!found_needed_name) {
return out;
}
// First, we output the size of each string, as an unsigned byte. The first
// string is the message name.
int count = 1;

@ -610,6 +610,7 @@ class PROTOBUF_EXPORT TcParser final {
// For FindFieldEntry tests:
friend class FindFieldEntryTest;
friend struct ParseFunctionGeneratorTestPeer;
static constexpr const uint32_t kMtSmallScanSize = 4;
// Mini parsing:

@ -96,7 +96,6 @@
#include "absl/strings/str_format.h"
#include "google/protobuf/io/strtod.h"
#include "google/protobuf/io/zero_copy_stream.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
#include "google/protobuf/port_def.inc"

@ -47,7 +47,6 @@
#include "google/protobuf/stubs/logging.h"
#include "google/protobuf/io/io_win32.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/stubs/stl_util.h"
namespace google {

@ -53,7 +53,6 @@
#include "google/protobuf/stubs/common.h"
#include "google/protobuf/io/zero_copy_stream.h"
#include "google/protobuf/port.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
@ -387,24 +386,10 @@ class PROTOBUF_EXPORT LimitingInputStream PROTOBUF_FUTURE_FINAL
// ===================================================================
// mutable_string_data() and as_string_data() are workarounds to improve
// the performance of writing new data to an existing string. Unfortunately
// the methods provided by the string class are suboptimal, and using memcpy()
// is mildly annoying because it requires its pointer args to be non-NULL even
// if we ask it to copy 0 bytes. Furthermore, string_as_array() has the
// property that it always returns NULL if its arg is the empty string, exactly
// what we want to avoid if we're using it in conjunction with memcpy()!
// With C++11, the desired memcpy() boils down to memcpy(..., &(*s)[0], size),
// where s is a string*. Without C++11, &(*s)[0] is not guaranteed to be safe,
// so we use string_as_array(), and live with the extra logic that tests whether
// *s is empty.
// Return a pointer to mutable characters underlying the given string. The
// return value is valid until the next time the string is resized. We
// trust the caller to treat the return value as an array of length s->size().
inline char* mutable_string_data(std::string* s) {
// This should be simpler & faster than string_as_array() because the latter
// is guaranteed to return NULL when *s is empty, so it has to check for that.
return &(*s)[0];
}

@ -675,8 +675,7 @@ TEST(Lite, AllLite28) {
MapLiteTestUtil::SetMapFields(&message1);
size_t size = message1.ByteSizeLong();
data.resize(size);
::uint8_t* start =
reinterpret_cast<::uint8_t*>(::google::protobuf::string_as_array(&data));
::uint8_t* start = reinterpret_cast<::uint8_t*>(&data[0]);
::uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -696,7 +695,7 @@ TEST(Lite, AllLite29) {
data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());

@ -2692,7 +2692,7 @@ TEST(GeneratedMapFieldTest, SerializationToArray) {
MapTestUtil::SetMapFields(&message1);
size_t size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -2708,7 +2708,7 @@ TEST(GeneratedMapFieldTest, SerializationToStream) {
data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
@ -3739,7 +3739,7 @@ static std::string DeterministicSerializationWithSerializePartialToCodedStream(
const T& t) {
const size_t size = t.ByteSizeLong();
std::string result(size, '\0');
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&result), size);
io::ArrayOutputStream array_stream(&result[0], size);
io::CodedOutputStream output_stream(&array_stream);
output_stream.SetSerializationDeterministic(true);
t.SerializePartialToCodedStream(&output_stream);
@ -3753,7 +3753,7 @@ static std::string DeterministicSerializationWithSerializeToCodedStream(
const T& t) {
const size_t size = t.ByteSizeLong();
std::string result(size, '\0');
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&result), size);
io::ArrayOutputStream array_stream(&result[0], size);
io::CodedOutputStream output_stream(&array_stream);
output_stream.SetSerializationDeterministic(true);
t.SerializeToCodedStream(&output_stream);
@ -3766,7 +3766,7 @@ template <typename T>
static std::string DeterministicSerialization(const T& t) {
const size_t size = t.ByteSizeLong();
std::string result(size, '\0');
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&result), size);
io::ArrayOutputStream array_stream(&result[0], size);
{
io::CodedOutputStream output_stream(&array_stream);
output_stream.SetSerializationDeterministic(true);

@ -76,7 +76,6 @@ message TestSameTypeMap {
map<int32, int32> map2 = 2;
}
enum MapEnum {
MAP_ENUM_FOO = 0;
MAP_ENUM_BAR = 1;

@ -61,7 +61,6 @@
#include "google/protobuf/unknown_field_set.h"
#include "google/protobuf/wire_format.h"
#include "google/protobuf/wire_format_lite.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.

@ -56,7 +56,6 @@
#include "absl/strings/cord.h"
#include "absl/strings/str_cat.h"
#include "google/protobuf/stubs/strutil.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
#include "google/protobuf/port_def.inc"

@ -26,7 +26,6 @@ cc_library(
"platform_macros.h",
"port.h",
"status_macros.h",
"stl_util.h",
"strutil.h",
],
copts = COPTS,
@ -56,7 +55,6 @@ cc_library(
"platform_macros.h",
"port.h",
"status_macros.h",
"stl_util.h",
"strutil.h",
],
deps = [

@ -1,3 +1,33 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef GOOGLE_PROTOBUF_STUBS_CALLBACK_H_
#define GOOGLE_PROTOBUF_STUBS_CALLBACK_H_

@ -1,68 +0,0 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// from google3/util/gtl/stl_util.h
#ifndef GOOGLE_PROTOBUF_STUBS_STL_UTIL_H__
#define GOOGLE_PROTOBUF_STUBS_STL_UTIL_H__
#include <algorithm>
#include "google/protobuf/stubs/common.h"
// Must be last.
#include "google/protobuf/port_def.inc" // NOLINT
namespace google {
namespace protobuf {
// Return a mutable char* pointing to a string's internal buffer,
// which may not be null-terminated. Writing through this pointer will
// modify the string.
//
// string_as_array(&str)[i] is valid for 0 <= i < str.size() until the
// next call to a string method that invalidates iterators.
//
// As of 2006-04, there is no standard-blessed way of getting a
// mutable reference to a string's internal buffer. However, issue 530
// (http://www.open-std.org/JTC1/SC22/WG21/docs/lwg-active.html#530)
// proposes this as the method. According to Matt Austern, this should
// already work on all current implementations.
inline char* string_as_array(std::string* str) {
// DO NOT USE const_cast<char*>(str->data())! See the unittest for why.
return str->empty() ? nullptr : &*str->begin();
}
} // namespace protobuf
} // namespace google
#include "google/protobuf/port_undef.inc" // NOLINT
#endif // GOOGLE_PROTOBUF_STUBS_STL_UTIL_H__

@ -44,7 +44,6 @@
#include "absl/strings/ascii.h"
#include "absl/strings/string_view.h"
#include "google/protobuf/stubs/logging.h"
#include "google/protobuf/stubs/stl_util.h"
#ifdef _WIN32
// MSVC has only _snprintf, not snprintf.
@ -671,9 +670,8 @@ void Base64EscapeInternal(const unsigned char *src, int szsrc,
const absl::string_view base64_chars) {
const int calc_escaped_size = CalculateBase64EscapedLen(szsrc, do_padding);
dest->resize(calc_escaped_size);
const int escaped_len =
Base64EscapeInternal(src, szsrc, string_as_array(dest), dest->size(),
base64_chars, do_padding);
const int escaped_len = Base64EscapeInternal(
src, szsrc, &(*dest)[0], dest->size(), base64_chars, do_padding);
GOOGLE_DCHECK_EQ(calc_escaped_size, escaped_len);
dest->erase(escaped_len);
}

@ -35,7 +35,6 @@
#include <gtest/gtest.h>
#include <locale.h>
#include "google/protobuf/stubs/stl_util.h"
#include "google/protobuf/testing/googletest.h"
#ifdef _WIN32

@ -67,7 +67,6 @@
#include "google/protobuf/repeated_field.h"
#include "google/protobuf/unknown_field_set.h"
#include "google/protobuf/wire_format_lite.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
#include "google/protobuf/port_def.inc"

@ -90,7 +90,6 @@ option objc_class_prefix = "GPB";
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
// .setNanos((int) ((millis % 1000) * 1000000)).build();
//
//
// Example 5: Compute Timestamp from Java `Instant.now()`.
//
// Instant now = Instant.now();
@ -99,7 +98,6 @@ option objc_class_prefix = "GPB";
// Timestamp.newBuilder().setSeconds(now.getEpochSecond())
// .setNanos(now.getNano()).build();
//
//
// Example 6: Compute Timestamp from current time in Python.
//
// timestamp = Timestamp()
@ -132,7 +130,6 @@ option objc_class_prefix = "GPB";
// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
// ) to obtain a formatter capable of generating timestamps in this format.
//
//
message Timestamp {
// Represents seconds of UTC time since Unix epoch
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to

@ -889,7 +889,6 @@ message TestRequiredOneof {
}
}
// Test messages for packed fields
message TestPackedTypes {
@ -1070,7 +1069,6 @@ message TestMessageSize {
optional int64 m6 = 6;
}
// Test that RPC services work.
message FooRequest {}
message FooResponse {}
@ -1083,7 +1081,6 @@ service TestService {
rpc Bar(BarRequest) returns (BarResponse);
}
message BarRequest {}
message BarResponse {}
@ -1437,7 +1434,6 @@ message TestVerifyBigFieldNumberUint32 {
optional Nested optional_nested = 1;
}
// This message contains different kind of enums to exercise the different
// parsers in table-driven.
message EnumParseTester {
@ -1565,4 +1561,3 @@ message StringParseTester {
repeated string repeated_string_midfield = 1002;
repeated string repeated_string_hifield = 1000002;
};

@ -64,7 +64,6 @@ enum ImportEnum {
IMPORT_BAZ = 9;
}
// To use an enum in a map, it must has the first value as 0.
enum ImportEnumForMap {
UNKNOWN = 0;

@ -59,7 +59,6 @@
#include "absl/time/clock.h"
#include "absl/time/time.h"
#include "google/protobuf/test_util.h"
#include "google/protobuf/stubs/stl_util.h"
namespace google {
@ -207,15 +206,13 @@ TEST_F(UnknownFieldSetTest, SerializeFastAndSlowAreEquivalent) {
slow_buffer.resize(size);
fast_buffer.resize(size);
uint8_t* target =
reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&fast_buffer));
uint8_t* target = reinterpret_cast<uint8_t*>(&fast_buffer[0]);
uint8_t* result = WireFormat::SerializeUnknownFieldsToArray(
empty_message_.unknown_fields(), target);
EXPECT_EQ(size, result - target);
{
io::ArrayOutputStream raw_stream(::google::protobuf::string_as_array(&slow_buffer), size,
1);
io::ArrayOutputStream raw_stream(&slow_buffer[0], size, 1);
io::CodedOutputStream output_stream(&raw_stream);
WireFormat::SerializeUnknownFields(empty_message_.unknown_fields(),
&output_stream);

@ -41,7 +41,6 @@ package protobuf_unittest;
import "google/protobuf/any.proto";
import "google/protobuf/struct.proto";
message TestFlagsAndStrings {
required int32 A = 1;
repeated group RepeatedGroup = 2 {
@ -96,7 +95,6 @@ message TestNumbers {
optional uint32 f = 6;
}
message TestCamelCase {
optional string normal_field = 1;
optional int32 CAPITAL_FIELD = 2;

@ -30,7 +30,6 @@
#include <gtest/gtest.h>
#include "google/protobuf/stubs/common.h"
#include "google/protobuf/stubs/stl_util.h"
#include "google/protobuf/testing/googletest.h"
#include "google/protobuf/unittest_well_known_types.pb.h"

@ -49,7 +49,6 @@
#include "absl/strings/match.h"
#include "google/protobuf/dynamic_message.h"
#include "google/protobuf/test_util2.h"
#include "google/protobuf/stubs/stl_util.h"
// clang-format off
#include "google/protobuf/port_def.inc"
@ -494,16 +493,14 @@ TEST(WireFormatTest, SerializeMessageSetVariousWaysAreEqual) {
// Serialize to flat array
{
uint8_t* target =
reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&flat_data));
uint8_t* target = reinterpret_cast<uint8_t*>(&flat_data[0]);
uint8_t* end = message_set.SerializeWithCachedSizesToArray(target);
EXPECT_EQ(size, end - target);
}
// Serialize to buffer
{
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&stream_data), size,
1);
io::ArrayOutputStream array_stream(&stream_data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message_set.SerializeWithCachedSizes(&output_stream);
ASSERT_FALSE(output_stream.HadError());

Loading…
Cancel
Save