parent
0087da9d47
commit
46e8ff63cb
112 changed files with 3562 additions and 962 deletions
@ -1,32 +1,32 @@ |
||||
# Only test one combination: "Visual Studio 12 + Win64 + Debug + DLL". We can |
||||
# test more combinations but AppVeyor just takes too long to finish (each |
||||
# combination takes ~15mins). |
||||
platform: |
||||
- Win64 |
||||
|
||||
configuration: |
||||
- Debug |
||||
|
||||
environment: |
||||
matrix: |
||||
- language: cpp |
||||
BUILD_DLL: ON |
||||
|
||||
- language: csharp |
||||
|
||||
install: |
||||
- ps: Start-FileDownload https://googlemock.googlecode.com/files/gmock-1.7.0.zip |
||||
- 7z x gmock-1.7.0.zip |
||||
- rename gmock-1.7.0 gmock |
||||
|
||||
before_build: |
||||
- if %platform%==Win32 set generator=Visual Studio 12 |
||||
- if %platform%==Win64 set generator=Visual Studio 12 Win64 |
||||
- if %platform%==Win32 set vcplatform=Win32 |
||||
- if %platform%==Win64 set vcplatform=x64 |
||||
|
||||
build_script: |
||||
- CALL appveyor.bat |
||||
|
||||
skip_commits: |
||||
message: /.*\[skip appveyor\].*/ |
||||
# Only test one combination: "Visual Studio 12 + Win64 + Debug + DLL". We can |
||||
# test more combinations but AppVeyor just takes too long to finish (each |
||||
# combination takes ~15mins). |
||||
platform: |
||||
- Win64 |
||||
|
||||
configuration: |
||||
- Debug |
||||
|
||||
environment: |
||||
matrix: |
||||
- language: cpp |
||||
BUILD_DLL: ON |
||||
|
||||
- language: csharp |
||||
|
||||
install: |
||||
- ps: Start-FileDownload https://googlemock.googlecode.com/files/gmock-1.7.0.zip |
||||
- 7z x gmock-1.7.0.zip |
||||
- rename gmock-1.7.0 gmock |
||||
|
||||
before_build: |
||||
- if %platform%==Win32 set generator=Visual Studio 12 |
||||
- if %platform%==Win64 set generator=Visual Studio 12 Win64 |
||||
- if %platform%==Win32 set vcplatform=Win32 |
||||
- if %platform%==Win64 set vcplatform=x64 |
||||
|
||||
build_script: |
||||
- CALL appveyor.bat |
||||
|
||||
skip_commits: |
||||
message: /.*\[skip appveyor\].*/ |
||||
|
@ -0,0 +1,522 @@ |
||||
#! /usr/bin/env python |
||||
# |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2008 Google Inc. All rights reserved. |
||||
# https://developers.google.com/protocol-buffers/ |
||||
# |
||||
# Redistribution and use in source and binary forms, with or without |
||||
# modification, are permitted provided that the following conditions are |
||||
# met: |
||||
# |
||||
# * Redistributions of source code must retain the above copyright |
||||
# notice, this list of conditions and the following disclaimer. |
||||
# * Redistributions in binary form must reproduce the above |
||||
# copyright notice, this list of conditions and the following disclaimer |
||||
# in the documentation and/or other materials provided with the |
||||
# distribution. |
||||
# * Neither the name of Google Inc. nor the names of its |
||||
# contributors may be used to endorse or promote products derived from |
||||
# this software without specific prior written permission. |
||||
# |
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
"""Test for google.protobuf.json_format.""" |
||||
|
||||
__author__ = 'jieluo@google.com (Jie Luo)' |
||||
|
||||
import json |
||||
import math |
||||
import sys |
||||
|
||||
import unittest |
||||
from google.protobuf import json_format |
||||
from google.protobuf.util import json_format_proto3_pb2 |
||||
|
||||
|
||||
class JsonFormatBase(unittest.TestCase): |
||||
|
||||
def FillAllFields(self, message): |
||||
message.int32_value = 20 |
||||
message.int64_value = -20 |
||||
message.uint32_value = 3120987654 |
||||
message.uint64_value = 12345678900 |
||||
message.float_value = float('-inf') |
||||
message.double_value = 3.1415 |
||||
message.bool_value = True |
||||
message.string_value = 'foo' |
||||
message.bytes_value = b'bar' |
||||
message.message_value.value = 10 |
||||
message.enum_value = json_format_proto3_pb2.BAR |
||||
# Repeated |
||||
message.repeated_int32_value.append(0x7FFFFFFF) |
||||
message.repeated_int32_value.append(-2147483648) |
||||
message.repeated_int64_value.append(9007199254740992) |
||||
message.repeated_int64_value.append(-9007199254740992) |
||||
message.repeated_uint32_value.append(0xFFFFFFF) |
||||
message.repeated_uint32_value.append(0x7FFFFFF) |
||||
message.repeated_uint64_value.append(9007199254740992) |
||||
message.repeated_uint64_value.append(9007199254740991) |
||||
message.repeated_float_value.append(0) |
||||
|
||||
message.repeated_double_value.append(1E-15) |
||||
message.repeated_double_value.append(float('inf')) |
||||
message.repeated_bool_value.append(True) |
||||
message.repeated_bool_value.append(False) |
||||
message.repeated_string_value.append('Few symbols!#$,;') |
||||
message.repeated_string_value.append('bar') |
||||
message.repeated_bytes_value.append(b'foo') |
||||
message.repeated_bytes_value.append(b'bar') |
||||
message.repeated_message_value.add().value = 10 |
||||
message.repeated_message_value.add().value = 11 |
||||
message.repeated_enum_value.append(json_format_proto3_pb2.FOO) |
||||
message.repeated_enum_value.append(json_format_proto3_pb2.BAR) |
||||
self.message = message |
||||
|
||||
def CheckParseBack(self, message, parsed_message): |
||||
json_format.Parse(json_format.MessageToJson(message), |
||||
parsed_message) |
||||
self.assertEqual(message, parsed_message) |
||||
|
||||
def CheckError(self, text, error_message): |
||||
message = json_format_proto3_pb2.TestMessage() |
||||
self.assertRaisesRegexp( |
||||
json_format.ParseError, |
||||
error_message, |
||||
json_format.Parse, text, message) |
||||
|
||||
|
||||
class JsonFormatTest(JsonFormatBase): |
||||
|
||||
def testEmptyMessageToJson(self): |
||||
message = json_format_proto3_pb2.TestMessage() |
||||
self.assertEqual(json_format.MessageToJson(message), |
||||
'{}') |
||||
parsed_message = json_format_proto3_pb2.TestMessage() |
||||
self.CheckParseBack(message, parsed_message) |
||||
|
||||
def testPartialMessageToJson(self): |
||||
message = json_format_proto3_pb2.TestMessage( |
||||
string_value='test', |
||||
repeated_int32_value=[89, 4]) |
||||
self.assertEqual(json.loads(json_format.MessageToJson(message)), |
||||
json.loads('{"stringValue": "test", ' |
||||
'"repeatedInt32Value": [89, 4]}')) |
||||
parsed_message = json_format_proto3_pb2.TestMessage() |
||||
self.CheckParseBack(message, parsed_message) |
||||
|
||||
def testAllFieldsToJson(self): |
||||
message = json_format_proto3_pb2.TestMessage() |
||||
text = ('{"int32Value": 20, ' |
||||
'"int64Value": "-20", ' |
||||
'"uint32Value": 3120987654,' |
||||
'"uint64Value": "12345678900",' |
||||
'"floatValue": "-Infinity",' |
||||
'"doubleValue": 3.1415,' |
||||
'"boolValue": true,' |
||||
'"stringValue": "foo",' |
||||
'"bytesValue": "YmFy",' |
||||
'"messageValue": {"value": 10},' |
||||
'"enumValue": "BAR",' |
||||
'"repeatedInt32Value": [2147483647, -2147483648],' |
||||
'"repeatedInt64Value": ["9007199254740992", "-9007199254740992"],' |
||||
'"repeatedUint32Value": [268435455, 134217727],' |
||||
'"repeatedUint64Value": ["9007199254740992", "9007199254740991"],' |
||||
'"repeatedFloatValue": [0],' |
||||
'"repeatedDoubleValue": [1e-15, "Infinity"],' |
||||
'"repeatedBoolValue": [true, false],' |
||||
'"repeatedStringValue": ["Few symbols!#$,;", "bar"],' |
||||
'"repeatedBytesValue": ["Zm9v", "YmFy"],' |
||||
'"repeatedMessageValue": [{"value": 10}, {"value": 11}],' |
||||
'"repeatedEnumValue": ["FOO", "BAR"]' |
||||
'}') |
||||
self.FillAllFields(message) |
||||
self.assertEqual( |
||||
json.loads(json_format.MessageToJson(message)), |
||||
json.loads(text)) |
||||
parsed_message = json_format_proto3_pb2.TestMessage() |
||||
json_format.Parse(text, parsed_message) |
||||
self.assertEqual(message, parsed_message) |
||||
|
||||
def testJsonEscapeString(self): |
||||
message = json_format_proto3_pb2.TestMessage() |
||||
if sys.version_info[0] < 3: |
||||
message.string_value = '&\n<\"\r>\b\t\f\\\001/\xe2\x80\xa8\xe2\x80\xa9' |
||||
else: |
||||
message.string_value = '&\n<\"\r>\b\t\f\\\001/' |
||||
message.string_value += (b'\xe2\x80\xa8\xe2\x80\xa9').decode('utf-8') |
||||
self.assertEqual( |
||||
json_format.MessageToJson(message), |
||||
'{\n "stringValue": ' |
||||
'"&\\n<\\\"\\r>\\b\\t\\f\\\\\\u0001/\\u2028\\u2029"\n}') |
||||
parsed_message = json_format_proto3_pb2.TestMessage() |
||||
self.CheckParseBack(message, parsed_message) |
||||
text = u'{"int32Value": "\u0031"}' |
||||
json_format.Parse(text, message) |
||||
self.assertEqual(message.int32_value, 1) |
||||
|
||||
def testAlwaysSeriliaze(self): |
||||
message = json_format_proto3_pb2.TestMessage( |
||||
string_value='foo') |
||||
self.assertEqual( |
||||
json.loads(json_format.MessageToJson(message, True)), |
||||
json.loads('{' |
||||
'"repeatedStringValue": [],' |
||||
'"stringValue": "foo",' |
||||
'"repeatedBoolValue": [],' |
||||
'"repeatedUint32Value": [],' |
||||
'"repeatedInt32Value": [],' |
||||
'"enumValue": "FOO",' |
||||
'"int32Value": 0,' |
||||
'"floatValue": 0,' |
||||
'"int64Value": "0",' |
||||
'"uint32Value": 0,' |
||||
'"repeatedBytesValue": [],' |
||||
'"repeatedUint64Value": [],' |
||||
'"repeatedDoubleValue": [],' |
||||
'"bytesValue": "",' |
||||
'"boolValue": false,' |
||||
'"repeatedEnumValue": [],' |
||||
'"uint64Value": "0",' |
||||
'"doubleValue": 0,' |
||||
'"repeatedFloatValue": [],' |
||||
'"repeatedInt64Value": [],' |
||||
'"repeatedMessageValue": []}')) |
||||
parsed_message = json_format_proto3_pb2.TestMessage() |
||||
self.CheckParseBack(message, parsed_message) |
||||
|
||||
def testMapFields(self): |
||||
message = json_format_proto3_pb2.TestMap() |
||||
message.bool_map[True] = 1 |
||||
message.bool_map[False] = 2 |
||||
message.int32_map[1] = 2 |
||||
message.int32_map[2] = 3 |
||||
message.int64_map[1] = 2 |
||||
message.int64_map[2] = 3 |
||||
message.uint32_map[1] = 2 |
||||
message.uint32_map[2] = 3 |
||||
message.uint64_map[1] = 2 |
||||
message.uint64_map[2] = 3 |
||||
message.string_map['1'] = 2 |
||||
message.string_map['null'] = 3 |
||||
self.assertEqual( |
||||
json.loads(json_format.MessageToJson(message, True)), |
||||
json.loads('{' |
||||
'"boolMap": {"false": 2, "true": 1},' |
||||
'"int32Map": {"1": 2, "2": 3},' |
||||
'"int64Map": {"1": 2, "2": 3},' |
||||
'"uint32Map": {"1": 2, "2": 3},' |
||||
'"uint64Map": {"1": 2, "2": 3},' |
||||
'"stringMap": {"1": 2, "null": 3}' |
||||
'}')) |
||||
parsed_message = json_format_proto3_pb2.TestMap() |
||||
self.CheckParseBack(message, parsed_message) |
||||
|
||||
def testOneofFields(self): |
||||
message = json_format_proto3_pb2.TestOneof() |
||||
# Always print does not affect oneof fields. |
||||
self.assertEqual( |
||||
json_format.MessageToJson(message, True), |
||||
'{}') |
||||
message.oneof_int32_value = 0 |
||||
self.assertEqual( |
||||
json_format.MessageToJson(message, True), |
||||
'{\n' |
||||
' "oneofInt32Value": 0\n' |
||||
'}') |
||||
parsed_message = json_format_proto3_pb2.TestOneof() |
||||
self.CheckParseBack(message, parsed_message) |
||||
|
||||
def testTimestampMessage(self): |
||||
message = json_format_proto3_pb2.TestTimestamp() |
||||
message.value.seconds = 0 |
||||
message.value.nanos = 0 |
||||
message.repeated_value.add().seconds = 20 |
||||
message.repeated_value[0].nanos = 1 |
||||
message.repeated_value.add().seconds = 0 |
||||
message.repeated_value[1].nanos = 10000 |
||||
message.repeated_value.add().seconds = 100000000 |
||||
message.repeated_value[2].nanos = 0 |
||||
# Maximum time |
||||
message.repeated_value.add().seconds = 253402300799 |
||||
message.repeated_value[3].nanos = 999999999 |
||||
# Minimum time |
||||
message.repeated_value.add().seconds = -62135596800 |
||||
message.repeated_value[4].nanos = 0 |
||||
self.assertEqual( |
||||
json.loads(json_format.MessageToJson(message, True)), |
||||
json.loads('{' |
||||
'"value": "1970-01-01T00:00:00Z",' |
||||
'"repeatedValue": [' |
||||
' "1970-01-01T00:00:20.000000001Z",' |
||||
' "1970-01-01T00:00:00.000010Z",' |
||||
' "1973-03-03T09:46:40Z",' |
||||
' "9999-12-31T23:59:59.999999999Z",' |
||||
' "0001-01-01T00:00:00Z"' |
||||
']' |
||||
'}')) |
||||
parsed_message = json_format_proto3_pb2.TestTimestamp() |
||||
self.CheckParseBack(message, parsed_message) |
||||
text = (r'{"value": "1972-01-01T01:00:00.01+08:00",' |
||||
r'"repeatedValue":[' |
||||
r' "1972-01-01T01:00:00.01+08:30",' |
||||
r' "1972-01-01T01:00:00.01-01:23"]}') |
||||
json_format.Parse(text, parsed_message) |
||||
self.assertEqual(parsed_message.value.seconds, 63104400) |
||||
self.assertEqual(parsed_message.value.nanos, 10000000) |
||||
self.assertEqual(parsed_message.repeated_value[0].seconds, 63106200) |
||||
self.assertEqual(parsed_message.repeated_value[1].seconds, 63070620) |
||||
|
||||
def testDurationMessage(self): |
||||
message = json_format_proto3_pb2.TestDuration() |
||||
message.value.seconds = 1 |
||||
message.repeated_value.add().seconds = 0 |
||||
message.repeated_value[0].nanos = 10 |
||||
message.repeated_value.add().seconds = -1 |
||||
message.repeated_value[1].nanos = -1000 |
||||
message.repeated_value.add().seconds = 10 |
||||
message.repeated_value[2].nanos = 11000000 |
||||
message.repeated_value.add().seconds = -315576000000 |
||||
message.repeated_value.add().seconds = 315576000000 |
||||
self.assertEqual( |
||||
json.loads(json_format.MessageToJson(message, True)), |
||||
json.loads('{' |
||||
'"value": "1s",' |
||||
'"repeatedValue": [' |
||||
' "0.000000010s",' |
||||
' "-1.000001s",' |
||||
' "10.011s",' |
||||
' "-315576000000s",' |
||||
' "315576000000s"' |
||||
']' |
||||
'}')) |
||||
parsed_message = json_format_proto3_pb2.TestDuration() |
||||
self.CheckParseBack(message, parsed_message) |
||||
|
||||
def testFieldMaskMessage(self): |
||||
message = json_format_proto3_pb2.TestFieldMask() |
||||
message.value.paths.append('foo.bar') |
||||
message.value.paths.append('bar') |
||||
self.assertEqual( |
||||
json_format.MessageToJson(message, True), |
||||
'{\n' |
||||
' "value": "foo.bar,bar"\n' |
||||
'}') |
||||
parsed_message = json_format_proto3_pb2.TestFieldMask() |
||||
self.CheckParseBack(message, parsed_message) |
||||
|
||||
def testWrapperMessage(self): |
||||
message = json_format_proto3_pb2.TestWrapper() |
||||
message.bool_value.value = False |
||||
message.int32_value.value = 0 |
||||
message.string_value.value = '' |
||||
message.bytes_value.value = b'' |
||||
message.repeated_bool_value.add().value = True |
||||
message.repeated_bool_value.add().value = False |
||||
self.assertEqual( |
||||
json.loads(json_format.MessageToJson(message, True)), |
||||
json.loads('{\n' |
||||
' "int32Value": 0,' |
||||
' "boolValue": false,' |
||||
' "stringValue": "",' |
||||
' "bytesValue": "",' |
||||
' "repeatedBoolValue": [true, false],' |
||||
' "repeatedInt32Value": [],' |
||||
' "repeatedUint32Value": [],' |
||||
' "repeatedFloatValue": [],' |
||||
' "repeatedDoubleValue": [],' |
||||
' "repeatedBytesValue": [],' |
||||
' "repeatedInt64Value": [],' |
||||
' "repeatedUint64Value": [],' |
||||
' "repeatedStringValue": []' |
||||
'}')) |
||||
parsed_message = json_format_proto3_pb2.TestWrapper() |
||||
self.CheckParseBack(message, parsed_message) |
||||
|
||||
def testParseNull(self): |
||||
message = json_format_proto3_pb2.TestMessage() |
||||
message.repeated_int32_value.append(1) |
||||
message.repeated_int32_value.append(2) |
||||
message.repeated_int32_value.append(3) |
||||
parsed_message = json_format_proto3_pb2.TestMessage() |
||||
self.FillAllFields(parsed_message) |
||||
json_format.Parse('{"int32Value": null, ' |
||||
'"int64Value": null, ' |
||||
'"uint32Value": null,' |
||||
'"uint64Value": null,' |
||||
'"floatValue": null,' |
||||
'"doubleValue": null,' |
||||
'"boolValue": null,' |
||||
'"stringValue": null,' |
||||
'"bytesValue": null,' |
||||
'"messageValue": null,' |
||||
'"enumValue": null,' |
||||
'"repeatedInt32Value": [1, 2, null, 3],' |
||||
'"repeatedInt64Value": null,' |
||||
'"repeatedUint32Value": null,' |
||||
'"repeatedUint64Value": null,' |
||||
'"repeatedFloatValue": null,' |
||||
'"repeatedDoubleValue": null,' |
||||
'"repeatedBoolValue": null,' |
||||
'"repeatedStringValue": null,' |
||||
'"repeatedBytesValue": null,' |
||||
'"repeatedMessageValue": null,' |
||||
'"repeatedEnumValue": null' |
||||
'}', |
||||
parsed_message) |
||||
self.assertEqual(message, parsed_message) |
||||
|
||||
def testNanFloat(self): |
||||
message = json_format_proto3_pb2.TestMessage() |
||||
message.float_value = float('nan') |
||||
text = '{\n "floatValue": "NaN"\n}' |
||||
self.assertEqual(json_format.MessageToJson(message), text) |
||||
parsed_message = json_format_proto3_pb2.TestMessage() |
||||
json_format.Parse(text, parsed_message) |
||||
self.assertTrue(math.isnan(parsed_message.float_value)) |
||||
|
||||
def testParseEmptyText(self): |
||||
self.CheckError('', |
||||
r'Failed to load JSON: (Expecting value)|(No JSON)') |
||||
|
||||
def testParseBadEnumValue(self): |
||||
self.CheckError( |
||||
'{"enumValue": 1}', |
||||
'Enum value must be a string literal with double quotes. ' |
||||
'Type "proto3.EnumType" has no value named 1.') |
||||
self.CheckError( |
||||
'{"enumValue": "baz"}', |
||||
'Enum value must be a string literal with double quotes. ' |
||||
'Type "proto3.EnumType" has no value named baz.') |
||||
|
||||
def testParseBadIdentifer(self): |
||||
self.CheckError('{int32Value: 1}', |
||||
(r'Failed to load JSON: Expecting property name enclosed ' |
||||
r'in double quotes: line 1')) |
||||
self.CheckError('{"unknownName": 1}', |
||||
'Message type "proto3.TestMessage" has no field named ' |
||||
'"unknownName".') |
||||
|
||||
def testDuplicateField(self): |
||||
self.CheckError('{"int32Value": 1,\n"int32Value":2}', |
||||
'Failed to load JSON: duplicate key int32Value') |
||||
|
||||
def testInvalidBoolValue(self): |
||||
self.CheckError('{"boolValue": 1}', |
||||
'Failed to parse boolValue field: ' |
||||
'Expected true or false without quotes.') |
||||
self.CheckError('{"boolValue": "true"}', |
||||
'Failed to parse boolValue field: ' |
||||
'Expected true or false without quotes.') |
||||
|
||||
def testInvalidIntegerValue(self): |
||||
message = json_format_proto3_pb2.TestMessage() |
||||
text = '{"int32Value": 0x12345}' |
||||
self.assertRaises(json_format.ParseError, |
||||
json_format.Parse, text, message) |
||||
self.CheckError('{"int32Value": 012345}', |
||||
(r'Failed to load JSON: Expecting \',\' delimiter: ' |
||||
r'line 1')) |
||||
self.CheckError('{"int32Value": 1.0}', |
||||
'Failed to parse int32Value field: ' |
||||
'Couldn\'t parse integer: 1.0') |
||||
self.CheckError('{"int32Value": " 1 "}', |
||||
'Failed to parse int32Value field: ' |
||||
'Couldn\'t parse integer: " 1 "') |
||||
self.CheckError('{"int32Value": 12345678901234567890}', |
||||
'Failed to parse int32Value field: Value out of range: ' |
||||
'12345678901234567890') |
||||
self.CheckError('{"int32Value": 1e5}', |
||||
'Failed to parse int32Value field: ' |
||||
'Couldn\'t parse integer: 100000.0') |
||||
self.CheckError('{"uint32Value": -1}', |
||||
'Failed to parse uint32Value field: Value out of range: -1') |
||||
|
||||
def testInvalidFloatValue(self): |
||||
self.CheckError('{"floatValue": "nan"}', |
||||
'Failed to parse floatValue field: Couldn\'t ' |
||||
'parse float "nan", use "NaN" instead') |
||||
|
||||
def testInvalidBytesValue(self): |
||||
self.CheckError('{"bytesValue": "AQI"}', |
||||
'Failed to parse bytesValue field: Incorrect padding') |
||||
self.CheckError('{"bytesValue": "AQI*"}', |
||||
'Failed to parse bytesValue field: Incorrect padding') |
||||
|
||||
def testInvalidMap(self): |
||||
message = json_format_proto3_pb2.TestMap() |
||||
text = '{"int32Map": {"null": 2, "2": 3}}' |
||||
self.assertRaisesRegexp( |
||||
json_format.ParseError, |
||||
'Failed to parse int32Map field: Couldn\'t parse integer: "null"', |
||||
json_format.Parse, text, message) |
||||
text = '{"int32Map": {1: 2, "2": 3}}' |
||||
self.assertRaisesRegexp( |
||||
json_format.ParseError, |
||||
(r'Failed to load JSON: Expecting property name enclosed ' |
||||
r'in double quotes: line 1'), |
||||
json_format.Parse, text, message) |
||||
text = r'{"stringMap": {"a": 3, "\u0061": 2}}' |
||||
self.assertRaisesRegexp( |
||||
json_format.ParseError, |
||||
'Failed to load JSON: duplicate key a', |
||||
json_format.Parse, text, message) |
||||
text = '{"boolMap": {"null": 1}}' |
||||
self.assertRaisesRegexp( |
||||
json_format.ParseError, |
||||
'Failed to parse boolMap field: Expect "true" or "false", not null.', |
||||
json_format.Parse, text, message) |
||||
|
||||
def testInvalidTimestamp(self): |
||||
message = json_format_proto3_pb2.TestTimestamp() |
||||
text = '{"value": "10000-01-01T00:00:00.00Z"}' |
||||
self.assertRaisesRegexp( |
||||
json_format.ParseError, |
||||
'time data \'10000-01-01T00:00:00\' does not match' |
||||
' format \'%Y-%m-%dT%H:%M:%S\'', |
||||
json_format.Parse, text, message) |
||||
text = '{"value": "1970-01-01T00:00:00.0123456789012Z"}' |
||||
self.assertRaisesRegexp( |
||||
json_format.ParseError, |
||||
'Failed to parse value field: Failed to parse Timestamp: ' |
||||
'nanos 0123456789012 more than 9 fractional digits.', |
||||
json_format.Parse, text, message) |
||||
text = '{"value": "1972-01-01T01:00:00.01+08"}' |
||||
self.assertRaisesRegexp( |
||||
json_format.ParseError, |
||||
(r'Failed to parse value field: Invalid timezone offset value: \+08'), |
||||
json_format.Parse, text, message) |
||||
# Time smaller than minimum time. |
||||
text = '{"value": "0000-01-01T00:00:00Z"}' |
||||
self.assertRaisesRegexp( |
||||
json_format.ParseError, |
||||
'Failed to parse value field: year is out of range', |
||||
json_format.Parse, text, message) |
||||
# Time bigger than maxinum time. |
||||
message.value.seconds = 253402300800 |
||||
self.assertRaisesRegexp( |
||||
json_format.SerializeToJsonError, |
||||
'Failed to serialize value field: year is out of range', |
||||
json_format.MessageToJson, message) |
||||
|
||||
def testInvalidOneof(self): |
||||
message = json_format_proto3_pb2.TestOneof() |
||||
text = '{"oneofInt32Value": 1, "oneofStringValue": "2"}' |
||||
self.assertRaisesRegexp( |
||||
json_format.ParseError, |
||||
'Message type "proto3.TestOneof"' |
||||
' should not have multiple "oneof_value" oneof fields.', |
||||
json_format.Parse, text, message) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
unittest.main() |
@ -0,0 +1,601 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2008 Google Inc. All rights reserved. |
||||
# https://developers.google.com/protocol-buffers/ |
||||
# |
||||
# Redistribution and use in source and binary forms, with or without |
||||
# modification, are permitted provided that the following conditions are |
||||
# met: |
||||
# |
||||
# * Redistributions of source code must retain the above copyright |
||||
# notice, this list of conditions and the following disclaimer. |
||||
# * Redistributions in binary form must reproduce the above |
||||
# copyright notice, this list of conditions and the following disclaimer |
||||
# in the documentation and/or other materials provided with the |
||||
# distribution. |
||||
# * Neither the name of Google Inc. nor the names of its |
||||
# contributors may be used to endorse or promote products derived from |
||||
# this software without specific prior written permission. |
||||
# |
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
"""Contains routines for printing protocol messages in JSON format.""" |
||||
|
||||
__author__ = 'jieluo@google.com (Jie Luo)' |
||||
|
||||
import base64 |
||||
from datetime import datetime |
||||
import json |
||||
import math |
||||
import re |
||||
|
||||
from google.protobuf import descriptor |
||||
|
||||
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' |
||||
_NUMBER = re.compile(u'[0-9+-][0-9e.+-]*') |
||||
_INTEGER = re.compile(u'[0-9+-]') |
||||
_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, |
||||
descriptor.FieldDescriptor.CPPTYPE_UINT32, |
||||
descriptor.FieldDescriptor.CPPTYPE_INT64, |
||||
descriptor.FieldDescriptor.CPPTYPE_UINT64]) |
||||
_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, |
||||
descriptor.FieldDescriptor.CPPTYPE_UINT64]) |
||||
_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, |
||||
descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) |
||||
if str is bytes: |
||||
_UNICODETYPE = unicode |
||||
else: |
||||
_UNICODETYPE = str |
||||
|
||||
|
||||
class SerializeToJsonError(Exception): |
||||
"""Thrown if serialization to JSON fails.""" |
||||
|
||||
|
||||
class ParseError(Exception): |
||||
"""Thrown in case of parsing error.""" |
||||
|
||||
|
||||
def MessageToJson(message, including_default_value_fields=False): |
||||
"""Converts protobuf message to JSON format. |
||||
|
||||
Args: |
||||
message: The protocol buffers message instance to serialize. |
||||
including_default_value_fields: If True, singular primitive fields, |
||||
repeated fields, and map fields will always be serialized. If |
||||
False, only serialize non-empty fields. Singular message fields |
||||
and oneof fields are not affected by this option. |
||||
|
||||
Returns: |
||||
A string containing the JSON formatted protocol buffer message. |
||||
""" |
||||
js = _MessageToJsonObject(message, including_default_value_fields) |
||||
return json.dumps(js, indent=2) |
||||
|
||||
|
||||
def _MessageToJsonObject(message, including_default_value_fields): |
||||
"""Converts message to an object according to Proto3 JSON Specification.""" |
||||
message_descriptor = message.DESCRIPTOR |
||||
if _IsTimestampMessage(message_descriptor): |
||||
return _TimestampMessageToJsonObject(message) |
||||
if _IsDurationMessage(message_descriptor): |
||||
return _DurationMessageToJsonObject(message) |
||||
if _IsFieldMaskMessage(message_descriptor): |
||||
return _FieldMaskMessageToJsonObject(message) |
||||
if _IsWrapperMessage(message_descriptor): |
||||
return _WrapperMessageToJsonObject(message) |
||||
return _RegularMessageToJsonObject(message, including_default_value_fields) |
||||
|
||||
|
||||
def _IsMapEntry(field): |
||||
return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and |
||||
field.message_type.has_options and |
||||
field.message_type.GetOptions().map_entry) |
||||
|
||||
|
||||
def _RegularMessageToJsonObject(message, including_default_value_fields): |
||||
"""Converts normal message according to Proto3 JSON Specification.""" |
||||
js = {} |
||||
fields = message.ListFields() |
||||
|
||||
try: |
||||
for field, value in fields: |
||||
name = field.camelcase_name |
||||
if _IsMapEntry(field): |
||||
# Convert a map field. |
||||
js_map = {} |
||||
for key in value: |
||||
js_map[key] = _ConvertFieldToJsonObject( |
||||
field.message_type.fields_by_name['value'], |
||||
value[key], including_default_value_fields) |
||||
js[name] = js_map |
||||
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: |
||||
# Convert a repeated field. |
||||
repeated = [] |
||||
for element in value: |
||||
repeated.append(_ConvertFieldToJsonObject( |
||||
field, element, including_default_value_fields)) |
||||
js[name] = repeated |
||||
else: |
||||
js[name] = _ConvertFieldToJsonObject( |
||||
field, value, including_default_value_fields) |
||||
|
||||
# Serialize default value if including_default_value_fields is True. |
||||
if including_default_value_fields: |
||||
message_descriptor = message.DESCRIPTOR |
||||
for field in message_descriptor.fields: |
||||
# Singular message fields and oneof fields will not be affected. |
||||
if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and |
||||
field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or |
||||
field.containing_oneof): |
||||
continue |
||||
name = field.camelcase_name |
||||
if name in js: |
||||
# Skip the field which has been serailized already. |
||||
continue |
||||
if _IsMapEntry(field): |
||||
js[name] = {} |
||||
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: |
||||
js[name] = [] |
||||
else: |
||||
js[name] = _ConvertFieldToJsonObject(field, field.default_value) |
||||
|
||||
except ValueError as e: |
||||
raise SerializeToJsonError( |
||||
'Failed to serialize {0} field: {1}'.format(field.name, e)) |
||||
|
||||
return js |
||||
|
||||
|
||||
def _ConvertFieldToJsonObject( |
||||
field, value, including_default_value_fields=False): |
||||
"""Converts field value according to Proto3 JSON Specification.""" |
||||
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: |
||||
return _MessageToJsonObject(value, including_default_value_fields) |
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: |
||||
enum_value = field.enum_type.values_by_number.get(value, None) |
||||
if enum_value is not None: |
||||
return enum_value.name |
||||
else: |
||||
raise SerializeToJsonError('Enum field contains an integer value ' |
||||
'which can not mapped to an enum value.') |
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: |
||||
if field.type == descriptor.FieldDescriptor.TYPE_BYTES: |
||||
# Use base64 Data encoding for bytes |
||||
return base64.b64encode(value).decode('utf-8') |
||||
else: |
||||
return value |
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: |
||||
if value: |
||||
return True |
||||
else: |
||||
return False |
||||
elif field.cpp_type in _INT64_TYPES: |
||||
return str(value) |
||||
elif field.cpp_type in _FLOAT_TYPES: |
||||
if math.isinf(value): |
||||
if value < 0.0: |
||||
return '-Infinity' |
||||
else: |
||||
return 'Infinity' |
||||
if math.isnan(value): |
||||
return 'NaN' |
||||
return value |
||||
|
||||
|
||||
def _IsTimestampMessage(message_descriptor): |
||||
return (message_descriptor.name == 'Timestamp' and |
||||
message_descriptor.file.name == 'google/protobuf/timestamp.proto') |
||||
|
||||
|
||||
def _TimestampMessageToJsonObject(message): |
||||
"""Converts Timestamp message according to Proto3 JSON Specification.""" |
||||
nanos = message.nanos % 1e9 |
||||
dt = datetime.utcfromtimestamp( |
||||
message.seconds + (message.nanos - nanos) / 1e9) |
||||
result = dt.isoformat() |
||||
if (nanos % 1e9) == 0: |
||||
# If there are 0 fractional digits, the fractional |
||||
# point '.' should be omitted when serializing. |
||||
return result + 'Z' |
||||
if (nanos % 1e6) == 0: |
||||
# Serialize 3 fractional digits. |
||||
return result + '.%03dZ' % (nanos / 1e6) |
||||
if (nanos % 1e3) == 0: |
||||
# Serialize 6 fractional digits. |
||||
return result + '.%06dZ' % (nanos / 1e3) |
||||
# Serialize 9 fractional digits. |
||||
return result + '.%09dZ' % nanos |
||||
|
||||
|
||||
def _IsDurationMessage(message_descriptor): |
||||
return (message_descriptor.name == 'Duration' and |
||||
message_descriptor.file.name == 'google/protobuf/duration.proto') |
||||
|
||||
|
||||
def _DurationMessageToJsonObject(message): |
||||
"""Converts Duration message according to Proto3 JSON Specification.""" |
||||
if message.seconds < 0 or message.nanos < 0: |
||||
result = '-' |
||||
seconds = - message.seconds + int((0 - message.nanos) / 1e9) |
||||
nanos = (0 - message.nanos) % 1e9 |
||||
else: |
||||
result = '' |
||||
seconds = message.seconds + int(message.nanos / 1e9) |
||||
nanos = message.nanos % 1e9 |
||||
result += '%d' % seconds |
||||
if (nanos % 1e9) == 0: |
||||
# If there are 0 fractional digits, the fractional |
||||
# point '.' should be omitted when serializing. |
||||
return result + 's' |
||||
if (nanos % 1e6) == 0: |
||||
# Serialize 3 fractional digits. |
||||
return result + '.%03ds' % (nanos / 1e6) |
||||
if (nanos % 1e3) == 0: |
||||
# Serialize 6 fractional digits. |
||||
return result + '.%06ds' % (nanos / 1e3) |
||||
# Serialize 9 fractional digits. |
||||
return result + '.%09ds' % nanos |
||||
|
||||
|
||||
def _IsFieldMaskMessage(message_descriptor): |
||||
return (message_descriptor.name == 'FieldMask' and |
||||
message_descriptor.file.name == 'google/protobuf/field_mask.proto') |
||||
|
||||
|
||||
def _FieldMaskMessageToJsonObject(message): |
||||
"""Converts FieldMask message according to Proto3 JSON Specification.""" |
||||
result = '' |
||||
first = True |
||||
for path in message.paths: |
||||
if not first: |
||||
result += ',' |
||||
result += path |
||||
first = False |
||||
return result |
||||
|
||||
|
||||
def _IsWrapperMessage(message_descriptor): |
||||
return message_descriptor.file.name == 'google/protobuf/wrappers.proto' |
||||
|
||||
|
||||
def _WrapperMessageToJsonObject(message): |
||||
return _ConvertFieldToJsonObject( |
||||
message.DESCRIPTOR.fields_by_name['value'], message.value) |
||||
|
||||
|
||||
def _DuplicateChecker(js): |
||||
result = {} |
||||
for name, value in js: |
||||
if name in result: |
||||
raise ParseError('Failed to load JSON: duplicate key ' + name) |
||||
result[name] = value |
||||
return result |
||||
|
||||
|
||||
def Parse(text, message): |
||||
"""Parses a JSON representation of a protocol message into a message. |
||||
|
||||
Args: |
||||
text: Message JSON representation. |
||||
message: A protocol beffer message to merge into. |
||||
|
||||
Returns: |
||||
The same message passed as argument. |
||||
|
||||
Raises:: |
||||
ParseError: On JSON parsing problems. |
||||
""" |
||||
if not isinstance(text, _UNICODETYPE): text = text.decode('utf-8') |
||||
try: |
||||
js = json.loads(text, object_pairs_hook=_DuplicateChecker) |
||||
except ValueError as e: |
||||
raise ParseError('Failed to load JSON: ' + str(e)) |
||||
_ConvertFieldValuePair(js, message) |
||||
return message |
||||
|
||||
|
||||
def _ConvertFieldValuePair(js, message): |
||||
"""Convert field value pairs into regular message. |
||||
|
||||
Args: |
||||
js: A JSON object to convert the field value pairs. |
||||
message: A regular protocol message to record the data. |
||||
|
||||
Raises: |
||||
ParseError: In case of problems converting. |
||||
""" |
||||
names = [] |
||||
message_descriptor = message.DESCRIPTOR |
||||
for name in js: |
||||
try: |
||||
field = message_descriptor.fields_by_camelcase_name.get(name, None) |
||||
if not field: |
||||
raise ParseError( |
||||
'Message type "{0}" has no field named "{1}".'.format( |
||||
message_descriptor.full_name, name)) |
||||
if name in names: |
||||
raise ParseError( |
||||
'Message type "{0}" should not have multiple "{1}" fields.'.format( |
||||
message.DESCRIPTOR.full_name, name)) |
||||
names.append(name) |
||||
# Check no other oneof field is parsed. |
||||
if field.containing_oneof is not None: |
||||
oneof_name = field.containing_oneof.name |
||||
if oneof_name in names: |
||||
raise ParseError('Message type "{0}" should not have multiple "{1}" ' |
||||
'oneof fields.'.format( |
||||
message.DESCRIPTOR.full_name, oneof_name)) |
||||
names.append(oneof_name) |
||||
|
||||
value = js[name] |
||||
if value is None: |
||||
message.ClearField(field.name) |
||||
continue |
||||
|
||||
# Parse field value. |
||||
if _IsMapEntry(field): |
||||
message.ClearField(field.name) |
||||
_ConvertMapFieldValue(value, message, field) |
||||
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: |
||||
message.ClearField(field.name) |
||||
if not isinstance(value, list): |
||||
raise ParseError('repeated field {0} must be in [] which is ' |
||||
'{1}'.format(name, value)) |
||||
for item in value: |
||||
if item is None: |
||||
continue |
||||
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: |
||||
sub_message = getattr(message, field.name).add() |
||||
_ConvertMessage(item, sub_message) |
||||
else: |
||||
getattr(message, field.name).append( |
||||
_ConvertScalarFieldValue(item, field)) |
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: |
||||
sub_message = getattr(message, field.name) |
||||
_ConvertMessage(value, sub_message) |
||||
else: |
||||
setattr(message, field.name, _ConvertScalarFieldValue(value, field)) |
||||
except ParseError as e: |
||||
if field and field.containing_oneof is None: |
||||
raise ParseError('Failed to parse {0} field: {1}'.format(name, e)) |
||||
else: |
||||
raise ParseError(str(e)) |
||||
except ValueError as e: |
||||
raise ParseError('Failed to parse {0} field: {1}'.format(name, e)) |
||||
except TypeError as e: |
||||
raise ParseError('Failed to parse {0} field: {1}'.format(name, e)) |
||||
|
||||
|
||||
def _ConvertMessage(value, message): |
||||
"""Convert a JSON object into a message. |
||||
|
||||
Args: |
||||
value: A JSON object. |
||||
message: A WKT or regular protocol message to record the data. |
||||
|
||||
Raises: |
||||
ParseError: In case of convert problems. |
||||
""" |
||||
message_descriptor = message.DESCRIPTOR |
||||
if _IsTimestampMessage(message_descriptor): |
||||
_ConvertTimestampMessage(value, message) |
||||
elif _IsDurationMessage(message_descriptor): |
||||
_ConvertDurationMessage(value, message) |
||||
elif _IsFieldMaskMessage(message_descriptor): |
||||
_ConvertFieldMaskMessage(value, message) |
||||
elif _IsWrapperMessage(message_descriptor): |
||||
_ConvertWrapperMessage(value, message) |
||||
else: |
||||
_ConvertFieldValuePair(value, message) |
||||
|
||||
|
||||
def _ConvertTimestampMessage(value, message): |
||||
"""Convert a JSON representation into Timestamp message.""" |
||||
timezone_offset = value.find('Z') |
||||
if timezone_offset == -1: |
||||
timezone_offset = value.find('+') |
||||
if timezone_offset == -1: |
||||
timezone_offset = value.rfind('-') |
||||
if timezone_offset == -1: |
||||
raise ParseError( |
||||
'Failed to parse timestamp: missing valid timezone offset.') |
||||
time_value = value[0:timezone_offset] |
||||
# Parse datetime and nanos |
||||
point_position = time_value.find('.') |
||||
if point_position == -1: |
||||
second_value = time_value |
||||
nano_value = '' |
||||
else: |
||||
second_value = time_value[:point_position] |
||||
nano_value = time_value[point_position + 1:] |
||||
date_object = datetime.strptime(second_value, _TIMESTAMPFOMAT) |
||||
seconds = (date_object - datetime(1970, 1, 1)).total_seconds() |
||||
if len(nano_value) > 9: |
||||
raise ParseError( |
||||
'Failed to parse Timestamp: nanos {0} more than ' |
||||
'9 fractional digits.'.format(nano_value)) |
||||
if nano_value: |
||||
nanos = round(float('0.' + nano_value) * 1e9) |
||||
else: |
||||
nanos = 0 |
||||
# Parse timezone offsets |
||||
if value[timezone_offset] == 'Z': |
||||
if len(value) != timezone_offset + 1: |
||||
raise ParseError( |
||||
'Failed to parse timestamp: invalid trailing data {0}.'.format(value)) |
||||
else: |
||||
timezone = value[timezone_offset:] |
||||
pos = timezone.find(':') |
||||
if pos == -1: |
||||
raise ParseError( |
||||
'Invalid timezone offset value: ' + timezone) |
||||
if timezone[0] == '+': |
||||
seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 |
||||
else: |
||||
seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 |
||||
# Set seconds and nanos |
||||
message.seconds = int(seconds) |
||||
message.nanos = int(nanos) |
||||
|
||||
|
||||
def _ConvertDurationMessage(value, message): |
||||
"""Convert a JSON representation into Duration message.""" |
||||
if value[-1] != 's': |
||||
raise ParseError( |
||||
'Duration must end with letter "s": ' + value) |
||||
try: |
||||
duration = float(value[:-1]) |
||||
except ValueError: |
||||
raise ParseError( |
||||
'Couldn\'t parse duration: ' + value) |
||||
message.seconds = int(duration) |
||||
message.nanos = int(round((duration - message.seconds) * 1e9)) |
||||
|
||||
|
||||
def _ConvertFieldMaskMessage(value, message): |
||||
"""Convert a JSON representation into FieldMask message.""" |
||||
for path in value.split(','): |
||||
message.paths.append(path) |
||||
|
||||
|
||||
def _ConvertWrapperMessage(value, message): |
||||
"""Convert a JSON representation into Wrapper message.""" |
||||
field = message.DESCRIPTOR.fields_by_name['value'] |
||||
setattr(message, 'value', _ConvertScalarFieldValue(value, field)) |
||||
|
||||
|
||||
def _ConvertMapFieldValue(value, message, field): |
||||
"""Convert map field value for a message map field. |
||||
|
||||
Args: |
||||
value: A JSON object to convert the map field value. |
||||
message: A protocol message to record the converted data. |
||||
field: The descriptor of the map field to be converted. |
||||
|
||||
Raises: |
||||
ParseError: In case of convert problems. |
||||
""" |
||||
if not isinstance(value, dict): |
||||
raise ParseError( |
||||
'Map fieled {0} must be in {} which is {1}.'.format(field.name, value)) |
||||
key_field = field.message_type.fields_by_name['key'] |
||||
value_field = field.message_type.fields_by_name['value'] |
||||
for key in value: |
||||
key_value = _ConvertScalarFieldValue(key, key_field, True) |
||||
if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: |
||||
_ConvertMessage(value[key], getattr(message, field.name)[key_value]) |
||||
else: |
||||
getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( |
||||
value[key], value_field) |
||||
|
||||
|
||||
def _ConvertScalarFieldValue(value, field, require_quote=False): |
||||
"""Convert a single scalar field value. |
||||
|
||||
Args: |
||||
value: A scalar value to convert the scalar field value. |
||||
field: The descriptor of the field to convert. |
||||
require_quote: If True, '"' is required for the field value. |
||||
|
||||
Returns: |
||||
The converted scalar field value |
||||
|
||||
Raises: |
||||
ParseError: In case of convert problems. |
||||
""" |
||||
if field.cpp_type in _INT_TYPES: |
||||
return _ConvertInteger(value) |
||||
elif field.cpp_type in _FLOAT_TYPES: |
||||
return _ConvertFloat(value) |
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: |
||||
return _ConvertBool(value, require_quote) |
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: |
||||
if field.type == descriptor.FieldDescriptor.TYPE_BYTES: |
||||
return base64.b64decode(value) |
||||
else: |
||||
return value |
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: |
||||
# Convert an enum value. |
||||
enum_value = field.enum_type.values_by_name.get(value, None) |
||||
if enum_value is None: |
||||
raise ParseError( |
||||
'Enum value must be a string literal with double quotes. ' |
||||
'Type "{0}" has no value named {1}.'.format( |
||||
field.enum_type.full_name, value)) |
||||
return enum_value.number |
||||
|
||||
|
||||
def _ConvertInteger(value): |
||||
"""Convert an integer. |
||||
|
||||
Args: |
||||
value: A scalar value to convert. |
||||
|
||||
Returns: |
||||
The integer value. |
||||
|
||||
Raises: |
||||
ParseError: If an integer couldn't be consumed. |
||||
""" |
||||
if isinstance(value, float): |
||||
raise ParseError('Couldn\'t parse integer: {0}'.format(value)) |
||||
|
||||
if isinstance(value, _UNICODETYPE) and not _INTEGER.match(value): |
||||
raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) |
||||
|
||||
return int(value) |
||||
|
||||
|
||||
def _ConvertFloat(value): |
||||
"""Convert an floating point number.""" |
||||
if value == 'nan': |
||||
raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') |
||||
try: |
||||
# Assume Python compatible syntax. |
||||
return float(value) |
||||
except ValueError: |
||||
# Check alternative spellings. |
||||
if value == '-Infinity': |
||||
return float('-inf') |
||||
elif value == 'Infinity': |
||||
return float('inf') |
||||
elif value == 'NaN': |
||||
return float('nan') |
||||
else: |
||||
raise ParseError('Couldn\'t parse float: {0}'.format(value)) |
||||
|
||||
|
||||
def _ConvertBool(value, require_quote): |
||||
"""Convert a boolean value. |
||||
|
||||
Args: |
||||
value: A scalar value to convert. |
||||
require_quote: If True, '"' is required for the boolean value. |
||||
|
||||
Returns: |
||||
The bool parsed. |
||||
|
||||
Raises: |
||||
ParseError: If a boolean value couldn't be consumed. |
||||
""" |
||||
if require_quote: |
||||
if value == 'true': |
||||
return True |
||||
elif value == 'false': |
||||
return False |
||||
else: |
||||
raise ParseError('Expect "true" or "false", not {0}.'.format(value)) |
||||
|
||||
if not isinstance(value, bool): |
||||
raise ParseError('Expected true or false without quotes.') |
||||
return value |
@ -0,0 +1,218 @@ |
||||
#ifndef NET_PROTO2_UTIL_CONVERTER_INTERNAL_DEFAULT_VALUE_OBJECTWRITER_H_ |
||||
#define NET_PROTO2_UTIL_CONVERTER_INTERNAL_DEFAULT_VALUE_OBJECTWRITER_H_ |
||||
|
||||
#include <memory> |
||||
#include <stack> |
||||
#include <vector> |
||||
|
||||
#include "base/macros.h" |
||||
#include "net/proto2/util/converter/internal/type_info.h" |
||||
#include "net/proto2/util/converter/public/datapiece.h" |
||||
#include "net/proto2/util/converter/public/object_writer.h" |
||||
#include "net/proto2/util/converter/public/utility.h" |
||||
#include "net/proto2/util/public/type_resolver.h" |
||||
#include "strings/stringpiece.h" |
||||
|
||||
namespace proto2 { |
||||
namespace util { |
||||
namespace converter { |
||||
|
||||
// An ObjectWriter that renders non-repeated primitive fields of proto messages
|
||||
// with their default values. DefaultValueObjectWriter holds objects, lists and
|
||||
// fields it receives in a tree structure and writes them out to another
|
||||
// ObjectWriter when EndObject() is called on the root object. It also writes
|
||||
// out all non-repeated primitive fields that haven't been explicitly rendered
|
||||
// with their default values (0 for numbers, "" for strings, etc).
|
||||
class DefaultValueObjectWriter : public ObjectWriter { |
||||
public: |
||||
#ifndef PROTO2_OPENSOURCE |
||||
DefaultValueObjectWriter(const TypeInfo& typeinfo, |
||||
const google::protobuf::Type& type, |
||||
ObjectWriter* ow); |
||||
#endif // !PROTO2_OPENSOURCE
|
||||
DefaultValueObjectWriter(TypeResolver* type_resolver, |
||||
const google::protobuf::Type& type, |
||||
ObjectWriter* ow); |
||||
|
||||
virtual ~DefaultValueObjectWriter(); |
||||
|
||||
// ObjectWriter methods.
|
||||
virtual DefaultValueObjectWriter* StartObject(StringPiece name); |
||||
|
||||
virtual DefaultValueObjectWriter* EndObject(); |
||||
|
||||
virtual DefaultValueObjectWriter* StartList(StringPiece name); |
||||
|
||||
virtual DefaultValueObjectWriter* EndList(); |
||||
|
||||
virtual DefaultValueObjectWriter* RenderBool(StringPiece name, bool value); |
||||
|
||||
virtual DefaultValueObjectWriter* RenderInt32(StringPiece name, int32 value); |
||||
|
||||
virtual DefaultValueObjectWriter* RenderUint32(StringPiece name, |
||||
uint32 value); |
||||
|
||||
virtual DefaultValueObjectWriter* RenderInt64(StringPiece name, int64 value); |
||||
|
||||
virtual DefaultValueObjectWriter* RenderUint64(StringPiece name, |
||||
uint64 value); |
||||
|
||||
virtual DefaultValueObjectWriter* RenderDouble(StringPiece name, |
||||
double value); |
||||
|
||||
virtual DefaultValueObjectWriter* RenderFloat(StringPiece name, float value); |
||||
|
||||
virtual DefaultValueObjectWriter* RenderString(StringPiece name, |
||||
StringPiece value); |
||||
#ifdef PROTO2_OPENSOURCE |
||||
virtual DefaultValueObjectWriter* RenderBytes(StringPiece name, |
||||
StringPiece value); |
||||
#else // PROTO2_OPENSOURCE
|
||||
virtual DefaultValueObjectWriter* RenderCord(StringPiece name, |
||||
const Cord& value); |
||||
#endif // !PROTO2_OPENSOURCE
|
||||
|
||||
virtual DefaultValueObjectWriter* RenderNull(StringPiece name); |
||||
|
||||
virtual DefaultValueObjectWriter* DisableCaseNormalizationForNextKey(); |
||||
|
||||
private: |
||||
enum NodeKind { |
||||
PRIMITIVE = 0, |
||||
OBJECT = 1, |
||||
LIST = 2, |
||||
MAP = 3, |
||||
}; |
||||
|
||||
// "Node" represents a node in the tree that holds the input of
|
||||
// DefaultValueObjectWriter.
|
||||
class Node { |
||||
public: |
||||
Node(const string& name, const google::protobuf::Type* type, NodeKind kind, |
||||
const DataPiece& data, bool is_placeholder); |
||||
virtual ~Node() { |
||||
for (int i = 0; i < children_.size(); ++i) { |
||||
delete children_[i]; |
||||
} |
||||
} |
||||
|
||||
// Adds a child to this node. Takes ownership of this child.
|
||||
void AddChild(Node* child) { children_.push_back(child); } |
||||
|
||||
// Finds the child given its name.
|
||||
Node* FindChild(StringPiece name); |
||||
|
||||
// Populates children of this Node based on its type. If there are already
|
||||
// children created, they will be merged to the result. Caller should pass
|
||||
// in TypeInfo for looking up types of the children.
|
||||
void PopulateChildren(const TypeInfo* typeinfo); |
||||
|
||||
// If this node is a leaf (has data), writes the current node to the
|
||||
// ObjectWriter; if not, then recursively writes the children to the
|
||||
// ObjectWriter.
|
||||
void WriteTo(ObjectWriter* ow); |
||||
|
||||
// Accessors
|
||||
const string& name() const { return name_; } |
||||
|
||||
const google::protobuf::Type* type() { return type_; } |
||||
|
||||
void set_type(const google::protobuf::Type* type) { type_ = type; } |
||||
|
||||
NodeKind kind() { return kind_; } |
||||
|
||||
int number_of_children() { return children_.size(); } |
||||
|
||||
void set_data(const DataPiece& data) { data_ = data; } |
||||
|
||||
void set_disable_normalize(bool disable_normalize) { |
||||
disable_normalize_ = disable_normalize; |
||||
} |
||||
|
||||
bool is_any() { return is_any_; } |
||||
|
||||
void set_is_any(bool is_any) { is_any_ = is_any; } |
||||
|
||||
void set_is_placeholder(bool is_placeholder) { |
||||
is_placeholder_ = is_placeholder; |
||||
} |
||||
|
||||
private: |
||||
// Returns the Value Type of a map given the Type of the map entry and a
|
||||
// TypeInfo instance.
|
||||
const google::protobuf::Type* GetMapValueType( |
||||
const google::protobuf::Type& entry_type, const TypeInfo* typeinfo); |
||||
|
||||
// Calls WriteTo() on every child in children_.
|
||||
void WriteChildren(ObjectWriter* ow); |
||||
|
||||
// The name of this node.
|
||||
string name_; |
||||
// google::protobuf::Type of this node. Owned by TypeInfo.
|
||||
const google::protobuf::Type* type_; |
||||
// The kind of this node.
|
||||
NodeKind kind_; |
||||
// Whether to disable case normalization of the name.
|
||||
bool disable_normalize_; |
||||
// Whether this is a node for "Any".
|
||||
bool is_any_; |
||||
// The data of this node when it is a leaf node.
|
||||
DataPiece data_; |
||||
// Children of this node.
|
||||
std::vector<Node*> children_; |
||||
// Whether this node is a placeholder for an object or list automatically
|
||||
// generated when creating the parent node. Should be set to false after
|
||||
// the parent node's StartObject()/StartList() method is called with this
|
||||
// node's name.
|
||||
bool is_placeholder_; |
||||
}; |
||||
|
||||
// Populates children of "node" if it is an "any" Node and its real type has
|
||||
// been given.
|
||||
void MaybePopulateChildrenOfAny(Node* node); |
||||
|
||||
// Writes the root_ node to ow_ and resets the root_ and current_ pointer to
|
||||
// nullptr.
|
||||
void WriteRoot(); |
||||
|
||||
// Creates a DataPiece containing the default value of the type of the field.
|
||||
static DataPiece CreateDefaultDataPieceForField( |
||||
const google::protobuf::Field& field); |
||||
|
||||
// Returns disable_normalize_ and reset it to false.
|
||||
bool GetAndResetDisableNormalize() { |
||||
return disable_normalize_ ? (disable_normalize_ = false, true) : false; |
||||
} |
||||
|
||||
// Adds or replaces the data_ of a primitive child node.
|
||||
void RenderDataPiece(StringPiece name, const DataPiece& data); |
||||
|
||||
// Type information for all the types used in the descriptor. Used to find
|
||||
// google::protobuf::Type of nested messages/enums.
|
||||
const TypeInfo* typeinfo_; |
||||
// Whether the TypeInfo object is owned by this class.
|
||||
bool own_typeinfo_; |
||||
// google::protobuf::Type of the root message type.
|
||||
const google::protobuf::Type& type_; |
||||
// Holds copies of strings passed to RenderString.
|
||||
vector<string*> string_values_; |
||||
|
||||
// Whether to disable case normalization of the next node.
|
||||
bool disable_normalize_; |
||||
// The current Node. Owned by its parents.
|
||||
Node* current_; |
||||
// The root Node.
|
||||
std::unique_ptr<Node> root_; |
||||
// The stack to hold the path of Nodes from current_ to root_;
|
||||
std::stack<Node*> stack_; |
||||
|
||||
ObjectWriter* ow_; |
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(DefaultValueObjectWriter); |
||||
}; |
||||
|
||||
} // namespace converter
|
||||
} // namespace util
|
||||
} // namespace proto2
|
||||
|
||||
#endif // NET_PROTO2_UTIL_CONVERTER_INTERNAL_DEFAULT_VALUE_OBJECTWRITER_H_
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue