delete all duplicate empty blanks (#5758)

Signed-off-by: Xiang Dai <764524258@qq.com>
pull/5762/head
Xiang Dai 6 years ago committed by Yilun Chong
parent c691c4cbfa
commit e479410564
  1. 32
      benchmarks/Makefile.am
  2. 8
      benchmarks/README.md
  3. 26
      benchmarks/java/src/main/java/com/google/protobuf/ProtoCaliperBenchmark.java
  4. 6
      benchmarks/js/benchmark_suite.js
  5. 12
      benchmarks/js/js_benchmark.js
  6. 14
      benchmarks/php/PhpBenchmark.php
  7. 10
      benchmarks/protobuf.js/protobufjs_benchmark.js
  8. 8
      benchmarks/python/py_benchmark.py
  9. 2
      benchmarks/util/result_parser.py
  10. 4
      benchmarks/util/result_uploader.py
  11. 14
      cmake/README.md
  12. 2
      cmake/protobuf-config.cmake.in
  13. 2
      cmake/version.rc.in
  14. 12
      conformance/ConformanceJava.java
  15. 2
      conformance/conformance_ruby.rb
  16. 20
      conformance/third_party/jsoncpp/json.h
  17. 20
      conformance/third_party/jsoncpp/jsoncpp.cpp
  18. 18
      csharp/CHANGES.txt
  19. 26
      csharp/README.md
  20. 10
      csharp/compatibility_tests/v3.0.0/protos/csharp/protos/unittest_issues.proto
  21. 4
      csharp/compatibility_tests/v3.0.0/src/Google.Protobuf.Test/Collections/MapFieldTest.cs
  22. 2
      csharp/compatibility_tests/v3.0.0/src/Google.Protobuf.Test/Compatibility/TypeExtensionsTest.cs
  23. 4
      csharp/compatibility_tests/v3.0.0/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj
  24. 10
      csharp/compatibility_tests/v3.0.0/src/Google.Protobuf.Test/JsonTokenizerTest.cs
  25. 4
      csharp/compatibility_tests/v3.0.0/src/Google.Protobuf.Test/Reflection/DescriptorsTest.cs
  26. 2
      csharp/compatibility_tests/v3.0.0/src/Google.Protobuf.Test/Reflection/FieldAccessTest.cs
  27. 2
      csharp/compatibility_tests/v3.0.0/src/Google.Protobuf.Test/SampleEnum.cs
  28. 2
      csharp/compatibility_tests/v3.0.0/src/Google.Protobuf.Test/WellKnownTypes/TimestampTest.cs
  29. 4
      csharp/compatibility_tests/v3.0.0/src/Google.Protobuf.Test/WellKnownTypes/WrappersTest.cs
  30. 10
      csharp/protos/unittest_issues.proto
  31. 4
      csharp/protos/unittest_proto3.proto
  32. 8
      csharp/src/Google.Protobuf.Conformance/Program.cs
  33. 4
      csharp/src/Google.Protobuf.Test/Collections/MapFieldTest.cs
  34. 2
      csharp/src/Google.Protobuf.Test/Collections/ProtobufEqualityComparersTest.cs
  35. 2
      csharp/src/Google.Protobuf.Test/Compatibility/TypeExtensionsTest.cs
  36. 4
      csharp/src/Google.Protobuf.Test/FieldMaskTreeTest.cs
  37. 2
      csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj
  38. 2
      csharp/src/Google.Protobuf.Test/JsonFormatterTest.cs
  39. 10
      csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs
  40. 4
      csharp/src/Google.Protobuf.Test/Reflection/DescriptorsTest.cs
  41. 2
      csharp/src/Google.Protobuf.Test/Reflection/FieldAccessTest.cs
  42. 2
      csharp/src/Google.Protobuf.Test/SampleEnum.cs
  43. 2
      csharp/src/Google.Protobuf.Test/SampleNaNs.cs
  44. 4
      csharp/src/Google.Protobuf.Test/WellKnownTypes/TimestampTest.cs
  45. 4
      csharp/src/Google.Protobuf.Test/WellKnownTypes/WrappersTest.cs
  46. BIN
      csharp/src/Google.Protobuf.Test/testprotos.pb
  47. 4
      csharp/src/Google.Protobuf/FieldCodec.cs
  48. 6
      csharp/src/Google.Protobuf/FieldMaskTree.cs
  49. 4
      csharp/src/Google.Protobuf/Google.Protobuf.csproj
  50. 8
      csharp/src/Google.Protobuf/JsonFormatter.cs
  51. 4
      csharp/src/Google.Protobuf/JsonParser.cs
  52. 4
      csharp/src/Google.Protobuf/JsonTokenizer.cs
  53. 2
      csharp/src/Google.Protobuf/LimitedInputStream.cs
  54. 2
      csharp/src/Google.Protobuf/MessageExtensions.cs
  55. 4
      csharp/src/Google.Protobuf/MessageParser.cs
  56. 6
      csharp/src/Google.Protobuf/Reflection/CustomOptions.cs
  57. 2
      csharp/src/Google.Protobuf/Reflection/DescriptorBase.cs
  58. 2
      csharp/src/Google.Protobuf/Reflection/EnumValueDescriptor.cs
  59. 4
      csharp/src/Google.Protobuf/Reflection/FieldDescriptor.cs
  60. 2
      csharp/src/Google.Protobuf/Reflection/IDescriptor.cs
  61. 2
      csharp/src/Google.Protobuf/Reflection/MessageDescriptor.cs
  62. 2
      csharp/src/Google.Protobuf/Reflection/OneofAccessor.cs
  63. 2
      csharp/src/Google.Protobuf/Reflection/OriginalNameAttribute.cs
  64. 2
      csharp/src/Google.Protobuf/Reflection/SingleFieldAccessor.cs
  65. 4
      csharp/src/Google.Protobuf/UnknownField.cs
  66. 2
      csharp/src/Google.Protobuf/WellKnownTypes/AnyPartial.cs
  67. 2
      csharp/src/Google.Protobuf/WellKnownTypes/DurationPartial.cs
  68. 2
      csharp/src/Google.Protobuf/WellKnownTypes/FieldMaskPartial.cs
  69. 2
      docs/options.md
  70. 8
      docs/performance.md
  71. 2
      docs/third_party.md
  72. 2
      examples/list_people_test.go
  73. 2
      java/compatibility_tests/v2.5.0/more_protos/src/proto/google/protobuf/unittest_optimize_for.proto
  74. 2
      java/compatibility_tests/v2.5.0/protos/src/proto/google/protobuf/unittest_optimize_for.proto
  75. 6
      java/compatibility_tests/v2.5.0/tests/src/main/java/com/google/protobuf/test/ByteStringTest.java
  76. 12
      java/compatibility_tests/v2.5.0/tests/src/main/java/com/google/protobuf/test/DeprecatedFieldTest.java
  77. 8
      java/compatibility_tests/v2.5.0/tests/src/main/java/com/google/protobuf/test/DescriptorsTest.java
  78. 2
      java/compatibility_tests/v2.5.0/tests/src/main/java/com/google/protobuf/test/GeneratedMessageTest.java
  79. 4
      java/compatibility_tests/v2.5.0/tests/src/main/java/com/google/protobuf/test/LiteralByteStringTest.java
  80. 4
      java/compatibility_tests/v2.5.0/tests/src/main/java/com/google/protobuf/test/MessageTest.java
  81. 2
      java/compatibility_tests/v2.5.0/tests/src/main/java/com/google/protobuf/test/RopeByteStringSubstringTest.java
  82. 2
      java/compatibility_tests/v2.5.0/tests/src/main/java/com/google/protobuf/test/RopeByteStringTest.java
  83. 8
      java/core/src/main/java/com/google/protobuf/GeneratedMessage.java
  84. 2
      java/core/src/main/java/com/google/protobuf/GeneratedMessageV3.java
  85. 2
      java/util/src/test/java/com/google/protobuf/util/FieldMaskTreeTest.java
  86. 4
      js/README.md
  87. 2
      kokoro/linux/benchmark/continuous.cfg
  88. 2
      kokoro/linux/benchmark/run.sh
  89. 2
      kokoro/release/ruby/macos/ruby/ruby_build_environment.sh
  90. 2
      objectivec/DevTools/full_mac_build.sh
  91. 2
      objectivec/GPBRuntimeTypes.h
  92. 8
      php/ext/google/protobuf/encode_decode.c
  93. 4
      php/ext/google/protobuf/type_check.c
  94. 408
      php/ext/google/protobuf/upb.c
  95. 2
      php/ext/google/protobuf/upb.h
  96. 4
      php/src/Google/Protobuf/Internal/GPBWire.php
  97. 2
      python/compatibility_tests/v2.5.0/tests/google/protobuf/internal/service_reflection_test.py
  98. 2
      python/google/protobuf/internal/_parameterized.py
  99. 2
      python/tox.ini
  100. 2
      ruby/README.md
  101. Some files were not shown because too many files have changed in this diff Show More

@ -126,7 +126,7 @@ java_benchmark_testing_files = \
java/src/main/java/com/google/protobuf/ProtoCaliperBenchmark.java java/src/main/java/com/google/protobuf/ProtoCaliperBenchmark.java
javac_middleman: $(java_benchmark_testing_files) protoc_middleman protoc_middleman2 javac_middleman: $(java_benchmark_testing_files) protoc_middleman protoc_middleman2
cp -r $(srcdir)/java tmp cp -r $(srcdir)/java tmp
mkdir -p tmp/java/lib mkdir -p tmp/java/lib
cp $(top_srcdir)/java/core/target/*.jar tmp/java/lib/protobuf-java.jar cp $(top_srcdir)/java/core/target/*.jar tmp/java/lib/protobuf-java.jar
cd tmp/java && mvn clean compile assembly:single -Dprotobuf.version=$(PACKAGE_VERSION) && cd ../.. cd tmp/java && mvn clean compile assembly:single -Dprotobuf.version=$(PACKAGE_VERSION) && cd ../..
@ -253,7 +253,7 @@ go_protoc_middleman: make_tmp_dir $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2_message4) ) oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2_message4) )
touch go_protoc_middleman touch go_protoc_middleman
go-benchmark: go_protoc_middleman go-benchmark: go_protoc_middleman
@echo "Writing shortcut script go-benchmark..." @echo "Writing shortcut script go-benchmark..."
@echo '#! /bin/bash' > go-benchmark @echo '#! /bin/bash' > go-benchmark
@echo 'cd $(srcdir)/go' >> go-benchmark @echo 'cd $(srcdir)/go' >> go-benchmark
@ -265,7 +265,7 @@ go-benchmark: go_protoc_middleman
@echo 'cd ..' >> go-benchmark @echo 'cd ..' >> go-benchmark
@chmod +x go-benchmark @chmod +x go-benchmark
go: go_protoc_middleman go-benchmark go: go_protoc_middleman go-benchmark
./go-benchmark $(all_data) ./go-benchmark $(all_data)
############# GO RULES END ############## ############# GO RULES END ##############
@ -322,9 +322,9 @@ $(cpp_no_group_benchmarks_protoc_outputs_proto2_header): cpp_no_group_protoc_mid
generate_cpp_no_group_benchmark_code: generate_cpp_no_group_benchmark_code:
cp $(srcdir)/cpp/cpp_benchmark.cc gogo/cpp_no_group/cpp_benchmark.cc cp $(srcdir)/cpp/cpp_benchmark.cc gogo/cpp_no_group/cpp_benchmark.cc
sed -i -e "s/\#include \"datasets/\#include \"gogo\/cpp_no_group\/datasets/g" gogo/cpp_no_group/cpp_benchmark.cc sed -i -e "s/\#include \"datasets/\#include \"gogo\/cpp_no_group\/datasets/g" gogo/cpp_no_group/cpp_benchmark.cc
sed -i -e "s/\#include \"benchmarks.pb.h/\#include \"gogo\/cpp_no_group\/benchmarks.pb.h/g" gogo/cpp_no_group/cpp_benchmark.cc sed -i -e "s/\#include \"benchmarks.pb.h/\#include \"gogo\/cpp_no_group\/benchmarks.pb.h/g" gogo/cpp_no_group/cpp_benchmark.cc
touch generate_cpp_no_group_benchmark_code touch generate_cpp_no_group_benchmark_code
bin_PROGRAMS += cpp-no-group-benchmark bin_PROGRAMS += cpp-no-group-benchmark
cpp_no_group_benchmark_LDADD = $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/third_party/benchmark/src/libbenchmark.a cpp_no_group_benchmark_LDADD = $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/third_party/benchmark/src/libbenchmark.a
cpp_no_group_benchmark_SOURCES = gogo/cpp_no_group/cpp_benchmark.cc cpp_no_group_benchmark_SOURCES = gogo/cpp_no_group/cpp_benchmark.cc
@ -343,7 +343,7 @@ nodist_cpp_no_group_benchmark_SOURCES = \
cpp_no_group: cpp_no_group_protoc_middleman generate_gogo_data cpp-no-group-benchmark cpp_no_group: cpp_no_group_protoc_middleman generate_gogo_data cpp-no-group-benchmark
./cpp-no-group-benchmark $(gogo_data) ./cpp-no-group-benchmark $(gogo_data)
gogo_proto_middleman: protoc-gen-gogoproto gogo_proto_middleman: protoc-gen-gogoproto
mkdir -p "tmp/gogo_proto" mkdir -p "tmp/gogo_proto"
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I$(srcdir) -I$(top_srcdir) --plugin=protoc-gen-gogoproto --gogoproto_out=$$oldpwd/tmp/gogo_proto $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2) ) oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I$(srcdir) -I$(top_srcdir) --plugin=protoc-gen-gogoproto --gogoproto_out=$$oldpwd/tmp/gogo_proto $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2) )
@ -355,7 +355,7 @@ generate_gogo_data: protoc_middleman protoc_middleman2 gogo-data-scrubber
mkdir -p `dirname $(gogo_data)` mkdir -p `dirname $(gogo_data)`
./gogo-data-scrubber $(all_data) $(gogo_data) ./gogo-data-scrubber $(all_data) $(gogo_data)
touch generate_gogo_data touch generate_gogo_data
make_tmp_dir_gogo: make_tmp_dir_gogo:
mkdir -p tmp/go_no_group/benchmark_code mkdir -p tmp/go_no_group/benchmark_code
mkdir -p tmp/gogofast/benchmark_code mkdir -p tmp/gogofast/benchmark_code
@ -435,10 +435,10 @@ gogo-benchmark:
go_no_group: go_no_group_protoc_middleman generate_gogo_data generate_all_gogo_benchmark_code gogo-benchmark go_no_group: go_no_group_protoc_middleman generate_gogo_data generate_all_gogo_benchmark_code gogo-benchmark
./gogo-benchmark go_no_group $(gogo_data) ./gogo-benchmark go_no_group $(gogo_data)
gogofast: gogofast_protoc_middleman generate_gogo_data gogo-benchmark generate_all_gogo_benchmark_code gogofast: gogofast_protoc_middleman generate_gogo_data gogo-benchmark generate_all_gogo_benchmark_code
./gogo-benchmark gogofast $(gogo_data) ./gogo-benchmark gogofast $(gogo_data)
gogofaster: gogofaster_protoc_middleman generate_gogo_data gogo-benchmark generate_all_gogo_benchmark_code gogofaster: gogofaster_protoc_middleman generate_gogo_data gogo-benchmark generate_all_gogo_benchmark_code
./gogo-benchmark gogofaster $(gogo_data) ./gogo-benchmark gogofaster $(gogo_data)
@ -448,7 +448,7 @@ gogoslick: gogoslick_protoc_middleman generate_gogo_data gogo-benchmark generat
############# GOGO RULES END ############ ############# GOGO RULES END ############
############ UTIL RULES BEGIN ############ ############ UTIL RULES BEGIN ############
bin_PROGRAMS += protoc-gen-gogoproto gogo-data-scrubber protoc-gen-proto2_to_proto3 proto3-data-stripper bin_PROGRAMS += protoc-gen-gogoproto gogo-data-scrubber protoc-gen-proto2_to_proto3 proto3-data-stripper
@ -481,7 +481,7 @@ nodist_proto3_data_stripper_SOURCES = \
$(benchmarks_protoc_outputs_proto2_header) \ $(benchmarks_protoc_outputs_proto2_header) \
$(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_header)
############ UTIL RULES END ############ ############ UTIL RULES END ############
############ PROTO3 PREPARATION BEGIN ############# ############ PROTO3 PREPARATION BEGIN #############
@ -510,7 +510,7 @@ proto3_middleman_php: proto3_proto_middleman
php-benchmark: proto3_middleman_php generate_proto3_data php-benchmark: proto3_middleman_php generate_proto3_data
mkdir -p "tmp/php/Google/Protobuf/Benchmark" && cp php/PhpBenchmark.php "tmp/php/Google/Protobuf/Benchmark" mkdir -p "tmp/php/Google/Protobuf/Benchmark" && cp php/PhpBenchmark.php "tmp/php/Google/Protobuf/Benchmark"
cp php/autoload.php "tmp/php" cp php/autoload.php "tmp/php"
@echo "Writing shortcut script php-benchmark..." @echo "Writing shortcut script php-benchmark..."
@echo '#! /bin/bash' > php-benchmark @echo '#! /bin/bash' > php-benchmark
@echo 'export PROTOBUF_PHP_SRCDIR="$$(cd $(top_srcdir) && pwd)/php/src"' >> php-benchmark @echo 'export PROTOBUF_PHP_SRCDIR="$$(cd $(top_srcdir) && pwd)/php/src"' >> php-benchmark
@ -527,8 +527,8 @@ php_c_extension:
cd $(top_srcdir)/php/ext/google/protobuf && phpize && ./configure CFLAGS='-O3' && make -j8 cd $(top_srcdir)/php/ext/google/protobuf && phpize && ./configure CFLAGS='-O3' && make -j8
php-c-benchmark: proto3_middleman_php generate_proto3_data php_c_extension php_c_extension php-c-benchmark: proto3_middleman_php generate_proto3_data php_c_extension php_c_extension
mkdir -p "tmp/php/Google/Protobuf/Benchmark" && cp php/PhpBenchmark.php "tmp/php/Google/Protobuf/Benchmark" mkdir -p "tmp/php/Google/Protobuf/Benchmark" && cp php/PhpBenchmark.php "tmp/php/Google/Protobuf/Benchmark"
cp php/autoload.php "tmp/php" cp php/autoload.php "tmp/php"
@echo "Writing shortcut script php-c-benchmark..." @echo "Writing shortcut script php-c-benchmark..."
@echo '#! /bin/bash' > php-c-benchmark @echo '#! /bin/bash' > php-c-benchmark
@echo 'export PROTOBUF_PHP_SRCDIR="$$(cd $(top_srcdir) && pwd)/php/src"' >> php-c-benchmark @echo 'export PROTOBUF_PHP_SRCDIR="$$(cd $(top_srcdir) && pwd)/php/src"' >> php-c-benchmark
@ -654,4 +654,4 @@ CLEANFILES = \
clean-local: clean-local:
-rm -rf tmp/* -rm -rf tmp/*

@ -3,7 +3,7 @@
This directory contains benchmarking schemas and data sets that you This directory contains benchmarking schemas and data sets that you
can use to test a variety of performance scenarios against your can use to test a variety of performance scenarios against your
protobuf language runtime. If you are looking for performance protobuf language runtime. If you are looking for performance
numbers of officially support languages, see [here]( numbers of officially support languages, see [here](
https://github.com/protocolbuffers/protobuf/blob/master/docs/performance.md) https://github.com/protocolbuffers/protobuf/blob/master/docs/performance.md)
@ -45,8 +45,8 @@ And you also need to make sure `pkg-config` is installed.
### Go ### Go
Go protobufs are maintained at [github.com/golang/protobuf]( Go protobufs are maintained at [github.com/golang/protobuf](
http://github.com/golang/protobuf). If not done already, you need to install the http://github.com/golang/protobuf). If not done already, you need to install the
toolchain and the Go protoc-gen-go plugin for protoc. toolchain and the Go protoc-gen-go plugin for protoc.
To install protoc-gen-go, run: To install protoc-gen-go, run:
@ -59,7 +59,7 @@ The first command installs `protoc-gen-go` into the `bin` directory in your loca
The second command adds the `bin` directory to your `PATH` so that `protoc` can locate the plugin later. The second command adds the `bin` directory to your `PATH` so that `protoc` can locate the plugin later.
### PHP ### PHP
PHP benchmark's requirement is the same as PHP protobuf's requirements. The benchmark will automaticly PHP benchmark's requirement is the same as PHP protobuf's requirements. The benchmark will automaticly
include PHP protobuf's src and build the c extension if required. include PHP protobuf's src and build the c extension if required.
### Node.js ### Node.js

@ -24,8 +24,8 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
// Caliper set CICompilerCount to 1 for making sure compilation doesn't run in parallel with itself, // Caliper set CICompilerCount to 1 for making sure compilation doesn't run in parallel with itself,
// This makes TieredCompilation not working. We just disable TieredCompilation by default. In master // This makes TieredCompilation not working. We just disable TieredCompilation by default. In master
// branch this has been disabled by default in caliper: // branch this has been disabled by default in caliper:
// https://github.com/google/caliper/blob/master/caliper-runner/src/main/java/com/google/caliper/runner/target/Jvm.java#L38:14 // https://github.com/google/caliper/blob/master/caliper-runner/src/main/java/com/google/caliper/runner/target/Jvm.java#L38:14
// But this haven't been added into most recent release. // But this haven't been added into most recent release.
@VmOptions("-XX:-TieredCompilation") @VmOptions("-XX:-TieredCompilation")
@ -89,7 +89,7 @@ public class ProtoCaliperBenchmark {
return com.google.protobuf.benchmarks.BenchmarkMessage4.GoogleMessage4.getDefaultInstance(); return com.google.protobuf.benchmarks.BenchmarkMessage4.GoogleMessage4.getDefaultInstance();
} }
}; };
abstract ExtensionRegistry getExtensionRegistry(); abstract ExtensionRegistry getExtensionRegistry();
abstract Message getDefaultInstance(); abstract Message getDefaultInstance();
} }
@ -97,7 +97,7 @@ public class ProtoCaliperBenchmark {
private BenchmarkMessageType benchmarkMessageType; private BenchmarkMessageType benchmarkMessageType;
@Param("") @Param("")
private String dataFile; private String dataFile;
private byte[] inputData; private byte[] inputData;
private BenchmarkDataset benchmarkDataset; private BenchmarkDataset benchmarkDataset;
private Message defaultMessage; private Message defaultMessage;
@ -125,7 +125,7 @@ public class ProtoCaliperBenchmark {
+ benchmarkDataset.getMessageName()); + benchmarkDataset.getMessageName());
} }
} }
@BeforeExperiment @BeforeExperiment
void setUp() throws IOException { void setUp() throws IOException {
if (!dataFile.equals("")) { if (!dataFile.equals("")) {
@ -145,7 +145,7 @@ public class ProtoCaliperBenchmark {
inputStreamList = new ArrayList<ByteArrayInputStream>(); inputStreamList = new ArrayList<ByteArrayInputStream>();
inputStringList = new ArrayList<ByteString>(); inputStringList = new ArrayList<ByteString>();
sampleMessageList = new ArrayList<Message>(); sampleMessageList = new ArrayList<Message>();
for (int i = 0; i < benchmarkDataset.getPayloadCount(); i++) { for (int i = 0; i < benchmarkDataset.getPayloadCount(); i++) {
byte[] singleInputData = benchmarkDataset.getPayload(i).toByteArray(); byte[] singleInputData = benchmarkDataset.getPayload(i).toByteArray();
inputDataList.add(benchmarkDataset.getPayload(i).toByteArray()); inputDataList.add(benchmarkDataset.getPayload(i).toByteArray());
@ -156,8 +156,8 @@ public class ProtoCaliperBenchmark {
defaultMessage.newBuilderForType().mergeFrom(singleInputData, extensions).build()); defaultMessage.newBuilderForType().mergeFrom(singleInputData, extensions).build());
} }
} }
@Benchmark @Benchmark
void serializeToByteArray(int reps) throws IOException { void serializeToByteArray(int reps) throws IOException {
if (sampleMessageList.size() == 0) { if (sampleMessageList.size() == 0) {
@ -165,11 +165,11 @@ public class ProtoCaliperBenchmark {
} }
for (int i = 0; i < reps; i++) { for (int i = 0; i < reps; i++) {
for (int j = 0; j < sampleMessageList.size(); j++) { for (int j = 0; j < sampleMessageList.size(); j++) {
sampleMessageList.get(j).toByteArray(); sampleMessageList.get(j).toByteArray();
} }
} }
} }
@Benchmark @Benchmark
void serializeToMemoryStream(int reps) throws IOException { void serializeToMemoryStream(int reps) throws IOException {
if (sampleMessageList.size() == 0) { if (sampleMessageList.size() == 0) {
@ -178,11 +178,11 @@ public class ProtoCaliperBenchmark {
for (int i = 0; i < reps; i++) { for (int i = 0; i < reps; i++) {
for (int j = 0; j < sampleMessageList.size(); j++) { for (int j = 0; j < sampleMessageList.size(); j++) {
ByteArrayOutputStream output = new ByteArrayOutputStream(); ByteArrayOutputStream output = new ByteArrayOutputStream();
sampleMessageList.get(j).writeTo(output); sampleMessageList.get(j).writeTo(output);
} }
} }
} }
@Benchmark @Benchmark
void deserializeFromByteArray(int reps) throws IOException { void deserializeFromByteArray(int reps) throws IOException {
if (inputDataList.size() == 0) { if (inputDataList.size() == 0) {
@ -195,7 +195,7 @@ public class ProtoCaliperBenchmark {
} }
} }
} }
@Benchmark @Benchmark
void deserializeFromMemoryStream(int reps) throws IOException { void deserializeFromMemoryStream(int reps) throws IOException {
if (inputStreamList.size() == 0) { if (inputStreamList.size() == 0) {

@ -9,8 +9,8 @@ function newBenchmark(messageName, filename, language) {
}) })
.on("start", function() { .on("start", function() {
process.stdout.write( process.stdout.write(
"benchmarking message " + messageName "benchmarking message " + messageName
+ " of dataset file " + filename + " of dataset file " + filename
+ "'s performance ..." + "\n\n"); + "'s performance ..." + "\n\n");
}) })
.on("cycle", function(event) { .on("cycle", function(event) {
@ -21,7 +21,7 @@ function newBenchmark(messageName, filename, language) {
return 1 / (bench.stats.mean + bench.stats.moe); return 1 / (bench.stats.mean + bench.stats.moe);
} }
benches.forEach(function(val, index) { benches.forEach(function(val, index) {
benches[index] = getHz(val); benches[index] = getHz(val);
}); });
}), }),
benches: benches benches: benches

@ -30,7 +30,7 @@ process.argv.forEach(function(filename, index) {
json_file = filename.replace(/^--json_output=/, ''); json_file = filename.replace(/^--json_output=/, '');
return; return;
} }
var benchmarkDataset = var benchmarkDataset =
proto.benchmarks.BenchmarkDataset.deserializeBinary(fs.readFileSync(filename)); proto.benchmarks.BenchmarkDataset.deserializeBinary(fs.readFileSync(filename));
var messageList = []; var messageList = [];
@ -40,7 +40,7 @@ process.argv.forEach(function(filename, index) {
messageList.push(message.deserializeBinary(onePayload)); messageList.push(message.deserializeBinary(onePayload));
totalBytes += onePayload.length; totalBytes += onePayload.length;
}); });
var senarios = benchmarkSuite.newBenchmark( var senarios = benchmarkSuite.newBenchmark(
benchmarkDataset.getMessageName(), filename, "js"); benchmarkDataset.getMessageName(), filename, "js");
senarios.suite senarios.suite
@ -48,14 +48,14 @@ process.argv.forEach(function(filename, index) {
benchmarkDataset.getPayloadList().forEach(function(onePayload) { benchmarkDataset.getPayloadList().forEach(function(onePayload) {
var protoType = getNewPrototype(benchmarkDataset.getMessageName()); var protoType = getNewPrototype(benchmarkDataset.getMessageName());
protoType.deserializeBinary(onePayload); protoType.deserializeBinary(onePayload);
}); });
}) })
.add("js serialize", function() { .add("js serialize", function() {
var protoType = getNewPrototype(benchmarkDataset.getMessageName()); var protoType = getNewPrototype(benchmarkDataset.getMessageName());
messageList.forEach(function(message) { messageList.forEach(function(message) {
message.serializeBinary(); message.serializeBinary();
}); });
}) })
.run({"Async": false}); .run({"Async": false});
results.push({ results.push({
@ -66,9 +66,9 @@ process.argv.forEach(function(filename, index) {
} }
}) })
console.log("Throughput for deserialize: " console.log("Throughput for deserialize: "
+ senarios.benches[0] * totalBytes / 1024 / 1024 + "MB/s" ); + senarios.benches[0] * totalBytes / 1024 / 1024 + "MB/s" );
console.log("Throughput for serialize: " console.log("Throughput for serialize: "
+ senarios.benches[1] * totalBytes / 1024 / 1024 + "MB/s" ); + senarios.benches[1] * totalBytes / 1024 / 1024 + "MB/s" );
console.log(""); console.log("");
}); });

@ -33,7 +33,7 @@ class BenchmarkMethod
(new $args[1]())->mergeFromString($payloads->offsetGet($i)); (new $args[1]())->mergeFromString($payloads->offsetGet($i));
} }
} }
// $args: array of message // $args: array of message
static function serialize(&$args) { static function serialize(&$args) {
foreach ($args as &$temp_message) { foreach ($args as &$temp_message) {
@ -49,7 +49,7 @@ class Benchmark
private $benchmark_time; private $benchmark_time;
private $total_bytes; private $total_bytes;
private $coefficient; private $coefficient;
public function __construct($benchmark_name, $args, $total_bytes, public function __construct($benchmark_name, $args, $total_bytes,
$benchmark_time = 5.0) { $benchmark_time = 5.0) {
$this->args = $args; $this->args = $args;
@ -58,7 +58,7 @@ class Benchmark
$this->total_bytes = $total_bytes; $this->total_bytes = $total_bytes;
$this->coefficient = pow (10, 0) / pow(2, 20); $this->coefficient = pow (10, 0) / pow(2, 20);
} }
public function runBenchmark() { public function runBenchmark() {
$t = $this->runBenchmarkWithTimes(1); $t = $this->runBenchmarkWithTimes(1);
$times = ceil($this->benchmark_time / $t); $times = ceil($this->benchmark_time / $t);
@ -66,7 +66,7 @@ class Benchmark
($times == 1 ? $t : $this->runBenchmarkWithTimes($times)) * ($times == 1 ? $t : $this->runBenchmarkWithTimes($times)) *
$this->coefficient; $this->coefficient;
} }
private function runBenchmarkWithTimes($times) { private function runBenchmarkWithTimes($times) {
$st = microtime(true); $st = microtime(true);
for ($i = 0; $i < $times; $i++) { for ($i = 0; $i < $times; $i++) {
@ -109,14 +109,14 @@ function runBenchmark($file, $behavior_prefix) {
array_push($message_list, $new_message); array_push($message_list, $new_message);
$total_bytes += strlen($payloads->offsetGet($i)); $total_bytes += strlen($payloads->offsetGet($i));
} }
$parse_benchmark = new Benchmark( $parse_benchmark = new Benchmark(
"\Google\Protobuf\Benchmark\BenchmarkMethod::parse", "\Google\Protobuf\Benchmark\BenchmarkMethod::parse",
array($dataset, $message_name), $total_bytes); array($dataset, $message_name), $total_bytes);
$serialize_benchmark = new Benchmark( $serialize_benchmark = new Benchmark(
"\Google\Protobuf\Benchmark\BenchmarkMethod::serialize", "\Google\Protobuf\Benchmark\BenchmarkMethod::serialize",
$message_list, $total_bytes); $message_list, $total_bytes);
return array( return array(
"filename" => $file, "filename" => $file,
"benchmarks" => array( "benchmarks" => array(
@ -139,7 +139,7 @@ foreach ($argv as $index => $arg) {
if ($arg == "--json") { if ($arg == "--json") {
$json_output = true; $json_output = true;
} else if (strpos($arg, "--behavior_prefix") == 0) { } else if (strpos($arg, "--behavior_prefix") == 0) {
$behavior_prefix = str_replace("--behavior_prefix=", "", $arg); $behavior_prefix = str_replace("--behavior_prefix=", "", $arg);
} }
} }

@ -30,7 +30,7 @@ process.argv.forEach(function(filename, index) {
messageList.push(message.decode(onePayload)); messageList.push(message.decode(onePayload));
totalBytes += onePayload.length; totalBytes += onePayload.length;
}); });
var senarios = benchmarkSuite.newBenchmark( var senarios = benchmarkSuite.newBenchmark(
benchmarkDataset.messageName, filename, "protobufjs"); benchmarkDataset.messageName, filename, "protobufjs");
senarios.suite senarios.suite
@ -38,14 +38,14 @@ process.argv.forEach(function(filename, index) {
benchmarkDataset.payload.forEach(function(onePayload) { benchmarkDataset.payload.forEach(function(onePayload) {
var protoType = getNewPrototype(benchmarkDataset.messageName); var protoType = getNewPrototype(benchmarkDataset.messageName);
protoType.decode(onePayload); protoType.decode(onePayload);
}); });
}) })
.add("protobuf.js static encoding", function() { .add("protobuf.js static encoding", function() {
var protoType = getNewPrototype(benchmarkDataset.messageName); var protoType = getNewPrototype(benchmarkDataset.messageName);
messageList.forEach(function(message) { messageList.forEach(function(message) {
protoType.encode(message).finish(); protoType.encode(message).finish();
}); });
}) })
.run({"Async": false}); .run({"Async": false});
results.push({ results.push({
@ -56,9 +56,9 @@ process.argv.forEach(function(filename, index) {
} }
}) })
console.log("Throughput for decoding: " console.log("Throughput for decoding: "
+ senarios.benches[0] * totalBytes / 1024 / 1024 + "MB/s" ); + senarios.benches[0] * totalBytes / 1024 / 1024 + "MB/s" );
console.log("Throughput for encoding: " console.log("Throughput for encoding: "
+ senarios.benches[1] * totalBytes / 1024 / 1024 + "MB/s" ); + senarios.benches[1] * totalBytes / 1024 / 1024 + "MB/s" );
console.log(""); console.log("");
}); });

@ -8,7 +8,7 @@ import fnmatch
import json import json
parser = argparse.ArgumentParser(description="Python protobuf benchmark") parser = argparse.ArgumentParser(description="Python protobuf benchmark")
parser.add_argument("data_files", metavar="dataFile", nargs="+", parser.add_argument("data_files", metavar="dataFile", nargs="+",
help="testing data files.") help="testing data files.")
parser.add_argument("--json", action="store_const", dest="json", parser.add_argument("--json", action="store_const", dest="json",
const="yes", default="no", const="yes", default="no",
@ -138,14 +138,14 @@ class Benchmark:
t = timeit.timeit(stmt="%s(%s)" % (self.test_method, test_method_args), t = timeit.timeit(stmt="%s(%s)" % (self.test_method, test_method_args),
setup=self.full_setup_code(setup_method_args), setup=self.full_setup_code(setup_method_args),
number=reps); number=reps);
return self.total_bytes * 1.0 / 2 ** 20 / (1.0 * t / reps * self.full_iteration) return self.total_bytes * 1.0 / 2 ** 20 / (1.0 * t / reps * self.full_iteration)
if __name__ == "__main__": if __name__ == "__main__":
results = [] results = []
for file in args.data_files: for file in args.data_files:
results.append(run_one_test(file)) results.append(run_one_test(file))
if args.json != "no": if args.json != "no":
print(json.dumps(results)) print(json.dumps(results))
else: else:

@ -295,6 +295,6 @@ def get_result_from_file(cpp_file="",
if php_file != "": if php_file != "":
__parse_php_result(php_file, "php") __parse_php_result(php_file, "php")
if php_c_file != "": if php_c_file != "":
__parse_php_result(php_c_file, "php") __parse_php_result(php_c_file, "php")
return __results return __results

@ -60,7 +60,7 @@ def upload_result(result_list, metadata):
new_result["labels"] = labels_string[1:] new_result["labels"] = labels_string[1:]
new_result["timestamp"] = _INITIAL_TIME new_result["timestamp"] = _INITIAL_TIME
print(labels_string) print(labels_string)
bq = big_query_utils.create_big_query() bq = big_query_utils.create_big_query()
row = big_query_utils.make_row(str(uuid.uuid4()), new_result) row = big_query_utils.make_row(str(uuid.uuid4()), new_result)
if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET, if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET,
@ -91,7 +91,7 @@ if __name__ == "__main__":
default="") default="")
parser.add_argument("-php_c", "--php_c_input_file", parser.add_argument("-php_c", "--php_c_input_file",
help="The php with c ext benchmark result file's name", help="The php with c ext benchmark result file's name",
default="") default="")
args = parser.parse_args() args = parser.parse_args()
metadata = get_metadata() metadata = get_metadata()

@ -130,11 +130,11 @@ It will generate *Visual Studio* solution file *protobuf.sln* in current directo
If the *gmock* directory does not exist, and you do not want to build protobuf unit tests, If the *gmock* directory does not exist, and you do not want to build protobuf unit tests,
you need to add *cmake* command argument `-Dprotobuf_BUILD_TESTS=OFF` to disable testing. you need to add *cmake* command argument `-Dprotobuf_BUILD_TESTS=OFF` to disable testing.
To make a *Visual Studio* file for Visual Studio 15 2017, create the *Visual Studio* To make a *Visual Studio* file for Visual Studio 15 2017, create the *Visual Studio*
solution file above and edit the CmakeCache file. solution file above and edit the CmakeCache file.
C:Path\to\protobuf\cmake\build\solution\CMakeCache C:Path\to\protobuf\cmake\build\solution\CMakeCache
Then create the *Visual Studio* solution file again Then create the *Visual Studio* solution file again
Compiling Compiling
@ -177,9 +177,9 @@ You should see output similar to:
Running main() from gmock_main.cc Running main() from gmock_main.cc
[==========] Running 1546 tests from 165 test cases. [==========] Running 1546 tests from 165 test cases.
... ...
[==========] 1546 tests from 165 test cases ran. (2529 ms total) [==========] 1546 tests from 165 test cases ran. (2529 ms total)
[ PASSED ] 1546 tests. [ PASSED ] 1546 tests.
@ -198,7 +198,7 @@ To run specific tests:
[ RUN ] AnyTest.TestIs [ RUN ] AnyTest.TestIs
[ OK ] AnyTest.TestIs (0 ms) [ OK ] AnyTest.TestIs (0 ms)
[----------] 3 tests from AnyTest (1 ms total) [----------] 3 tests from AnyTest (1 ms total)
[----------] Global test environment tear-down [----------] Global test environment tear-down
[==========] 3 tests from 1 test case ran. (2 ms total) [==========] 3 tests from 1 test case ran. (2 ms total)
[ PASSED ] 3 tests. [ PASSED ] 3 tests.
@ -310,7 +310,7 @@ If you already have ZLIB library and headers at some other location on your syst
-DZLIB_INCLUDE_DIR=<path to dir containing zlib headers> -DZLIB_INCLUDE_DIR=<path to dir containing zlib headers>
-DZLIB_LIB=<path to dir containing zlib> -DZLIB_LIB=<path to dir containing zlib>
Build and testing protobuf as usual. Build and testing protobuf as usual.
Notes on Compiler Warnings Notes on Compiler Warnings

@ -37,7 +37,7 @@ function(protobuf_generate)
if(NOT protobuf_generate_PROTOC_OUT_DIR) if(NOT protobuf_generate_PROTOC_OUT_DIR)
set(protobuf_generate_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}) set(protobuf_generate_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
endif() endif()
if(protobuf_generate_EXPORT_MACRO AND protobuf_generate_LANGUAGE STREQUAL cpp) if(protobuf_generate_EXPORT_MACRO AND protobuf_generate_LANGUAGE STREQUAL cpp)
set(_dll_export_decl "dllexport_decl=${protobuf_generate_EXPORT_MACRO}:") set(_dll_export_decl "dllexport_decl=${protobuf_generate_EXPORT_MACRO}:")
endif() endif()

@ -23,7 +23,7 @@ VS_VERSION_INFO VERSIONINFO
FILETYPE VFT_DLL FILETYPE VFT_DLL
BEGIN BEGIN
BLOCK "VarFileInfo" BLOCK "VarFileInfo"
BEGIN BEGIN
// English language (0x409) and the Windows Unicode codepage (1200) // English language (0x409) and the Windows Unicode codepage (1200)
VALUE "Translation", 0x409, 1200 VALUE "Translation", 0x409, 1200
END END

@ -57,7 +57,7 @@ class ConformanceJava {
buf[3] = (byte)(val >> 24); buf[3] = (byte)(val >> 24);
writeToStdout(buf); writeToStdout(buf);
} }
private enum BinaryDecoderType { private enum BinaryDecoderType {
BTYE_STRING_DECODER, BTYE_STRING_DECODER,
BYTE_ARRAY_DECODER, BYTE_ARRAY_DECODER,
@ -69,11 +69,11 @@ class ConformanceJava {
} }
private static class BinaryDecoder <MessageType extends AbstractMessage> { private static class BinaryDecoder <MessageType extends AbstractMessage> {
public MessageType decode (ByteString bytes, BinaryDecoderType type, public MessageType decode (ByteString bytes, BinaryDecoderType type,
Parser <MessageType> parser, ExtensionRegistry extensions) Parser <MessageType> parser, ExtensionRegistry extensions)
throws InvalidProtocolBufferException { throws InvalidProtocolBufferException {
switch (type) { switch (type) {
case BTYE_STRING_DECODER: case BTYE_STRING_DECODER:
return parser.parseFrom(bytes, extensions); return parser.parseFrom(bytes, extensions);
case BYTE_ARRAY_DECODER: case BYTE_ARRAY_DECODER:
return parser.parseFrom(bytes.toByteArray(), extensions); return parser.parseFrom(bytes.toByteArray(), extensions);
@ -94,7 +94,7 @@ class ConformanceJava {
} catch (InvalidProtocolBufferException e) { } catch (InvalidProtocolBufferException e) {
throw e; throw e;
} }
} }
case DIRECT_BYTE_BUFFER_DECODER: { case DIRECT_BYTE_BUFFER_DECODER: {
ByteBuffer buffer = ByteBuffer.allocateDirect(bytes.size()); ByteBuffer buffer = ByteBuffer.allocateDirect(bytes.size());
bytes.copyTo(buffer); bytes.copyTo(buffer);
@ -135,7 +135,7 @@ class ConformanceJava {
ArrayList <MessageType> messages = new ArrayList <MessageType> (); ArrayList <MessageType> messages = new ArrayList <MessageType> ();
ArrayList <InvalidProtocolBufferException> exceptions = ArrayList <InvalidProtocolBufferException> exceptions =
new ArrayList <InvalidProtocolBufferException>(); new ArrayList <InvalidProtocolBufferException>();
for (int i = 0; i < BinaryDecoderType.values().length; i++) { for (int i = 0; i < BinaryDecoderType.values().length; i++) {
messages.add(null); messages.add(null);
exceptions.add(null); exceptions.add(null);
@ -273,7 +273,7 @@ class ConformanceJava {
throw new RuntimeException("Unspecified output format."); throw new RuntimeException("Unspecified output format.");
case PROTOBUF: { case PROTOBUF: {
ByteString MessageString = testMessage.toByteString(); ByteString MessageString = testMessage.toByteString();
return Conformance.ConformanceResponse.newBuilder().setProtobufPayload(MessageString).build(); return Conformance.ConformanceResponse.newBuilder().setProtobufPayload(MessageString).build();
} }

@ -54,7 +54,7 @@ def do_test(request)
elsif request.message_type.eql?('protobuf_test_messages.proto2.TestAllTypesProto2') elsif request.message_type.eql?('protobuf_test_messages.proto2.TestAllTypesProto2')
response.skipped = "Ruby doesn't support proto2" response.skipped = "Ruby doesn't support proto2"
return response return response
else else
fail "Protobuf request doesn't have specific payload type" fail "Protobuf request doesn't have specific payload type"
end end

@ -6,28 +6,28 @@
// ////////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////////
/* /*
The JsonCpp library's source code, including accompanying documentation, The JsonCpp library's source code, including accompanying documentation,
tests and demonstration applications, are licensed under the following tests and demonstration applications, are licensed under the following
conditions... conditions...
The author (Baptiste Lepilleur) explicitly disclaims copyright in all The author (Baptiste Lepilleur) explicitly disclaims copyright in all
jurisdictions which recognize such a disclaimer. In such jurisdictions, jurisdictions which recognize such a disclaimer. In such jurisdictions,
this software is released into the Public Domain. this software is released into the Public Domain.
In jurisdictions which do not recognize Public Domain property (e.g. Germany as of In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is 2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
released under the terms of the MIT License (see below). released under the terms of the MIT License (see below).
In jurisdictions which recognize Public Domain property, the user of this In jurisdictions which recognize Public Domain property, the user of this
software may choose to accept it either as 1) Public Domain, 2) under the software may choose to accept it either as 1) Public Domain, 2) under the
conditions of the MIT License (see below), or 3) under the terms of dual conditions of the MIT License (see below), or 3) under the terms of dual
Public Domain/MIT License conditions described here, as they choose. Public Domain/MIT License conditions described here, as they choose.
The MIT License is about as close to Public Domain as a license can get, and is The MIT License is about as close to Public Domain as a license can get, and is
described in clear, concise terms at: described in clear, concise terms at:
http://en.wikipedia.org/wiki/MIT_License http://en.wikipedia.org/wiki/MIT_License
The full text of the MIT License follows: The full text of the MIT License follows:
======================================================================== ========================================================================
@ -434,7 +434,7 @@ protected:
/** Exceptions which the user cannot easily avoid. /** Exceptions which the user cannot easily avoid.
* *
* E.g. out-of-memory (when we use malloc), stack-overflow, malicious input * E.g. out-of-memory (when we use malloc), stack-overflow, malicious input
* *
* \remark derived from Json::Exception * \remark derived from Json::Exception
*/ */
class JSON_API RuntimeError : public Exception { class JSON_API RuntimeError : public Exception {
@ -445,7 +445,7 @@ public:
/** Exceptions thrown by JSON_ASSERT/JSON_FAIL macros. /** Exceptions thrown by JSON_ASSERT/JSON_FAIL macros.
* *
* These are precondition-violations (user bugs) and internal errors (our bugs). * These are precondition-violations (user bugs) and internal errors (our bugs).
* *
* \remark derived from Json::Exception * \remark derived from Json::Exception
*/ */
class JSON_API LogicError : public Exception { class JSON_API LogicError : public Exception {
@ -1570,7 +1570,7 @@ public:
- `"rejectDupKeys": false or true` - `"rejectDupKeys": false or true`
- If true, `parse()` returns false when a key is duplicated within an object. - If true, `parse()` returns false when a key is duplicated within an object.
- `"allowSpecialFloats": false or true` - `"allowSpecialFloats": false or true`
- If true, special float values (NaNs and infinities) are allowed - If true, special float values (NaNs and infinities) are allowed
and their values are lossfree restorable. and their values are lossfree restorable.
You can examine 'settings_` yourself You can examine 'settings_` yourself

@ -6,28 +6,28 @@
// ////////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////////
/* /*
The JsonCpp library's source code, including accompanying documentation, The JsonCpp library's source code, including accompanying documentation,
tests and demonstration applications, are licensed under the following tests and demonstration applications, are licensed under the following
conditions... conditions...
The author (Baptiste Lepilleur) explicitly disclaims copyright in all The author (Baptiste Lepilleur) explicitly disclaims copyright in all
jurisdictions which recognize such a disclaimer. In such jurisdictions, jurisdictions which recognize such a disclaimer. In such jurisdictions,
this software is released into the Public Domain. this software is released into the Public Domain.
In jurisdictions which do not recognize Public Domain property (e.g. Germany as of In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is 2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
released under the terms of the MIT License (see below). released under the terms of the MIT License (see below).
In jurisdictions which recognize Public Domain property, the user of this In jurisdictions which recognize Public Domain property, the user of this
software may choose to accept it either as 1) Public Domain, 2) under the software may choose to accept it either as 1) Public Domain, 2) under the
conditions of the MIT License (see below), or 3) under the terms of dual conditions of the MIT License (see below), or 3) under the terms of dual
Public Domain/MIT License conditions described here, as they choose. Public Domain/MIT License conditions described here, as they choose.
The MIT License is about as close to Public Domain as a license can get, and is The MIT License is about as close to Public Domain as a license can get, and is
described in clear, concise terms at: described in clear, concise terms at:
http://en.wikipedia.org/wiki/MIT_License http://en.wikipedia.org/wiki/MIT_License
The full text of the MIT License follows: The full text of the MIT License follows:
======================================================================== ========================================================================
@ -207,7 +207,7 @@ static inline void fixNumericLocale(char* begin, char* end) {
#include <limits> #include <limits>
#if defined(_MSC_VER) #if defined(_MSC_VER)
#if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above #if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above
#define snprintf sprintf_s #define snprintf sprintf_s
#elif _MSC_VER >= 1900 // VC++ 14.0 and above #elif _MSC_VER >= 1900 // VC++ 14.0 and above
#define snprintf std::snprintf #define snprintf std::snprintf
@ -4029,7 +4029,7 @@ Value& Path::make(Value& root) const {
#define snprintf std::snprintf #define snprintf std::snprintf
#endif #endif
#if defined(__BORLANDC__) #if defined(__BORLANDC__)
#include <float.h> #include <float.h>
#define isfinite _finite #define isfinite _finite
#define snprintf _snprintf #define snprintf _snprintf
@ -5096,7 +5096,7 @@ StreamWriter* StreamWriterBuilder::newStreamWriter() const
std::string cs_str = settings_["commentStyle"].asString(); std::string cs_str = settings_["commentStyle"].asString();
bool eyc = settings_["enableYAMLCompatibility"].asBool(); bool eyc = settings_["enableYAMLCompatibility"].asBool();
bool dnp = settings_["dropNullPlaceholders"].asBool(); bool dnp = settings_["dropNullPlaceholders"].asBool();
bool usf = settings_["useSpecialFloats"].asBool(); bool usf = settings_["useSpecialFloats"].asBool();
unsigned int pre = settings_["precision"].asUInt(); unsigned int pre = settings_["precision"].asUInt();
CommentStyle::Enum cs = CommentStyle::All; CommentStyle::Enum cs = CommentStyle::All;
if (cs_str == "All") { if (cs_str == "All") {

@ -26,7 +26,7 @@ Changes:
- Optimized enum parsing. - Optimized enum parsing.
Fixes: Fixes:
- Fix for bug in limited input stream's Position, Introduced Position on - Fix for bug in limited input stream's Position, Introduced Position on
output stream output stream
- Fix for writing a character to a JSON output overflows allocated buffer - Fix for writing a character to a JSON output overflows allocated buffer
- Optimize FromBase64String to return Empty when presented with empty string. - Optimize FromBase64String to return Empty when presented with empty string.
@ -47,14 +47,14 @@ Changes:
- Added 'Unsafe' static type in ByteString to allow direct buffer access - Added 'Unsafe' static type in ByteString to allow direct buffer access
Fixes: Fixes:
- Issue 50: The XML serializer will fail to deserialize a message with empty - Issue 50: The XML serializer will fail to deserialize a message with empty
child message child message
- Issue 45: Use of 'item' as a field name causes AmbiguousMatchException - Issue 45: Use of 'item' as a field name causes AmbiguousMatchException
- Issue 49: Generated nested static Types class should be partial - Issue 49: Generated nested static Types class should be partial
- Issue 38: Disable CLSCompliant warnings (3021) - Issue 38: Disable CLSCompliant warnings (3021)
- Issue 40: proto_path does not work for command-line file names - Issue 40: proto_path does not work for command-line file names
- Issue 54: should retire all bytes in buffer (bufferSize) - Issue 54: should retire all bytes in buffer (bufferSize)
- Issue 43: Fix to correct identical 'umbrella_classname' options from trying - Issue 43: Fix to correct identical 'umbrella_classname' options from trying
to write to the same filename. to write to the same filename.
=============================================================================== ===============================================================================
@ -66,7 +66,7 @@ Features:
NONE, GENERIC, INTERFACE, or IRPCDISPATCH NONE, GENERIC, INTERFACE, or IRPCDISPATCH
- Added interfaces IRpcDispatch and IRpcServerStub to provide for blocking - Added interfaces IRpcDispatch and IRpcServerStub to provide for blocking
services and implementations. services and implementations.
- Added ProtoGen.exe command-line argument "--protoc_dir=" to specify the - Added ProtoGen.exe command-line argument "--protoc_dir=" to specify the
location of protoc.exe. location of protoc.exe.
- Extracted interfaces for ICodedInputStream and ICodedOutputStream to allow - Extracted interfaces for ICodedInputStream and ICodedOutputStream to allow
custom implementation of writers with both speed and size optimizations. custom implementation of writers with both speed and size optimizations.
@ -86,9 +86,9 @@ Fixes:
- Issue 16: Does not integrate well with other tooling - Issue 16: Does not integrate well with other tooling
- Issue 19: Support for negative enum values - Issue 19: Support for negative enum values
- Issue 26: AddRange in GeneratedBuilder iterates twice. - Issue 26: AddRange in GeneratedBuilder iterates twice.
- Issue 27: Remove XML documentation output from test projects to clear - Issue 27: Remove XML documentation output from test projects to clear
warnings/errors. warnings/errors.
- Issue 28: Circular message dependencies result in null default values for - Issue 28: Circular message dependencies result in null default values for
Message fields. Message fields.
- Issue 29: Message classes generated have a public default constructor. You - Issue 29: Message classes generated have a public default constructor. You
can disable private ctor generation with the option generate_private_ctor. can disable private ctor generation with the option generate_private_ctor.
@ -109,14 +109,14 @@ RELEASE NOTES - Version 2.3.0.277
=============================================================================== ===============================================================================
Features: Features:
- Added cls_compliance option to generate attributes indicating - Added cls_compliance option to generate attributes indicating
non-CLS-compliance. non-CLS-compliance.
- Added file_extension option to control the generated output file's extension. - Added file_extension option to control the generated output file's extension.
- Added umbrella_namespace option to place the umbrella class into a nested - Added umbrella_namespace option to place the umbrella class into a nested
namespace to address issues with proto files having the same name as a namespace to address issues with proto files having the same name as a
message it contains. message it contains.
- Added output_directory option to set the output path for the source file(s). - Added output_directory option to set the output path for the source file(s).
- Added ignore_google_protobuf option to avoid generating code for includes - Added ignore_google_protobuf option to avoid generating code for includes
from the google.protobuf package. from the google.protobuf package.
- Added the LITE framework (Google.ProtoBuffersLite.dll) and the ability to - Added the LITE framework (Google.ProtoBuffersLite.dll) and the ability to
generate code with "option optimize_for = LITE_RUNTIME;". generate code with "option optimize_for = LITE_RUNTIME;".

@ -10,7 +10,7 @@ You will also want to install the `Google.Protobuf.Tools` NuGet package, which
contains precompiled version of `protoc.exe` and a copy of well known `.proto` contains precompiled version of `protoc.exe` and a copy of well known `.proto`
files under the package's `tools` directory. files under the package's `tools` directory.
To generate C# files from your `.proto` files, invoke `protoc` with the To generate C# files from your `.proto` files, invoke `protoc` with the
`--csharp_out` option. `--csharp_out` option.
Supported platforms Supported platforms
@ -37,8 +37,8 @@ later.
Although *users* of this project are only expected to have Visual Although *users* of this project are only expected to have Visual
Studio 2012 or later, *developers* of the library are required to Studio 2012 or later, *developers* of the library are required to
have Visual Studio 2017 or later, as the library uses C# 6 features have Visual Studio 2017 or later, as the library uses C# 6 features
in its implementation, as well as the new Visual Studio 2017 csproj in its implementation, as well as the new Visual Studio 2017 csproj
format. These features have no impact when using the compiled code - format. These features have no impact when using the compiled code -
they're only relevant when building the `Google.Protobuf` assembly. they're only relevant when building the `Google.Protobuf` assembly.
In order to run and debug the AddressBook example in the IDE, you must In order to run and debug the AddressBook example in the IDE, you must
@ -56,19 +56,19 @@ run using the Visual Studio Test Explorer or `dotnet test`.
.NET 3.5 .NET 3.5
======== ========
We don't officially support .NET 3.5. However, there has been some effort We don't officially support .NET 3.5. However, there has been some effort
to make enabling .NET 3.5 support relatively painless in case you require it. to make enabling .NET 3.5 support relatively painless in case you require it.
There's no guarantee that this will continue in the future, so rely on .NET There's no guarantee that this will continue in the future, so rely on .NET
3.5 support at your peril. 3.5 support at your peril.
To enable .NET 3.5 support, you must edit the `TargetFrameworks` elements of To enable .NET 3.5 support, you must edit the `TargetFrameworks` elements of
[src/Google.Protobuf/Google.Protobuf.csproj](src/Google.Protobuf/Google.Protobuf.csproj) [src/Google.Protobuf/Google.Protobuf.csproj](src/Google.Protobuf/Google.Protobuf.csproj)
(and [src/Google.Protobuf.Test/Google.Protobuf.Test.csproj](src/Google.Protobuf.Test/Google.Protobuf.Test.csproj) (and [src/Google.Protobuf.Test/Google.Protobuf.Test.csproj](src/Google.Protobuf.Test/Google.Protobuf.Test.csproj)
if you want to run the unit tests): if you want to run the unit tests):
Open the .csproj file in a text editor and simply add `net35` to the list of Open the .csproj file in a text editor and simply add `net35` to the list of
target frameworks, noting that the `TargetFrameworks` element appears twice in target frameworks, noting that the `TargetFrameworks` element appears twice in
the file (once in the first `PropertyGroup` element, and again in the second the file (once in the first `PropertyGroup` element, and again in the second
`PropertyGroup` element, i.e., the one with the conditional). `PropertyGroup` element, i.e., the one with the conditional).
History of C# protobufs History of C# protobufs

@ -20,7 +20,7 @@ message Issue307 {
// Old issue 13: http://code.google.com/p/protobuf-csharp-port/issues/detail?id=13 // Old issue 13: http://code.google.com/p/protobuf-csharp-port/issues/detail?id=13
// New issue 309: https://github.com/protocolbuffers/protobuf/issues/309 // New issue 309: https://github.com/protocolbuffers/protobuf/issues/309
// message A { // message A {
// optional int32 _A = 1; // optional int32 _A = 1;
// } // }
@ -101,21 +101,21 @@ message TestJsonFieldOrdering {
// that will require fixing other tests in multiple platforms. // that will require fixing other tests in multiple platforms.
// Alternatively, consider just adding this to // Alternatively, consider just adding this to
// unittest_proto3.proto if multiple platforms want it. // unittest_proto3.proto if multiple platforms want it.
int32 plain_int32 = 4; int32 plain_int32 = 4;
oneof o1 { oneof o1 {
string o1_string = 2; string o1_string = 2;
int32 o1_int32 = 5; int32 o1_int32 = 5;
} }
string plain_string = 1; string plain_string = 1;
oneof o2 { oneof o2 {
int32 o2_int32 = 6; int32 o2_int32 = 6;
string o2_string = 3; string o2_string = 3;
} }
} }
message TestJsonName { message TestJsonName {

@ -86,7 +86,7 @@ namespace Google.Protobuf.Collections
var map = new MapField<string, ForeignMessage>(); var map = new MapField<string, ForeignMessage>();
Assert.Throws<ArgumentNullException>(() => map[null] = new ForeignMessage()); Assert.Throws<ArgumentNullException>(() => map[null] = new ForeignMessage());
} }
[Test] [Test]
public void AddPreservesInsertionOrder() public void AddPreservesInsertionOrder()
{ {
@ -471,7 +471,7 @@ namespace Google.Protobuf.Collections
keys.CopyTo(array, 1); keys.CopyTo(array, 1);
CollectionAssert.AreEqual(new[] { null, "foo", "x", null }, array); CollectionAssert.AreEqual(new[] { null, "foo", "x", null }, array);
} }
// Just test keys - we know the implementation is the same for values // Just test keys - we know the implementation is the same for values
[Test] [Test]
public void NonGenericViewCopyTo() public void NonGenericViewCopyTo()

@ -59,7 +59,7 @@ namespace Google.Protobuf.Compatibility
[TestCase(typeof(string), typeof(int), false)] [TestCase(typeof(string), typeof(int), false)]
[TestCase(typeof(int), typeof(int), true)] [TestCase(typeof(int), typeof(int), true)]
[TestCase(typeof(ValueType), typeof(int), true)] [TestCase(typeof(ValueType), typeof(int), true)]
[TestCase(typeof(long), typeof(int), false)] // [TestCase(typeof(long), typeof(int), false)] //
public void IsAssignableFrom(Type target, Type argument, bool expected) public void IsAssignableFrom(Type target, Type argument, bool expected)
{ {
Assert.AreEqual(expected, TypeExtensions.IsAssignableFrom(target, argument)); Assert.AreEqual(expected, TypeExtensions.IsAssignableFrom(target, argument));

@ -18,7 +18,7 @@
<PackageReference Include="NUnitLite" Version="3.6.1" /> <PackageReference Include="NUnitLite" Version="3.6.1" />
</ItemGroup> </ItemGroup>
<!-- <!--
- Override target frameworks on non-Windows to just .NET Core - Override target frameworks on non-Windows to just .NET Core
- Doing this conditionally in the initial PropertyGroup confuses - Doing this conditionally in the initial PropertyGroup confuses
- Visual Studio. - Visual Studio.
@ -26,5 +26,5 @@
<PropertyGroup Condition="'$(OS)' != 'Windows_NT'"> <PropertyGroup Condition="'$(OS)' != 'Windows_NT'">
<TargetFrameworks>netcoreapp1.0</TargetFrameworks> <TargetFrameworks>netcoreapp1.0</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
</Project> </Project>

@ -240,7 +240,7 @@ namespace Google.Protobuf
AssertTokens("{'x': 'y'}", AssertTokens("{'x': 'y'}",
JsonToken.StartObject, JsonToken.Name("x"), JsonToken.Value("y"), JsonToken.EndObject); JsonToken.StartObject, JsonToken.Name("x"), JsonToken.Value("y"), JsonToken.EndObject);
} }
[Test] [Test]
[TestCase("[10, 20", 3)] [TestCase("[10, 20", 3)]
[TestCase("[10,", 2)] [TestCase("[10,", 2)]
@ -305,7 +305,7 @@ namespace Google.Protobuf
[Test] [Test]
public void ObjectMixedType() public void ObjectMixedType()
{ {
AssertTokens(@"{'a': 1, 'b': 'bar', 'c': null, 'd': false, 'e': true, AssertTokens(@"{'a': 1, 'b': 'bar', 'c': null, 'd': false, 'e': true,
'f': [2], 'g': {'x':'y' }}", 'f': [2], 'g': {'x':'y' }}",
JsonToken.StartObject, JsonToken.StartObject,
JsonToken.Name("a"), JsonToken.Name("a"),
@ -349,12 +349,12 @@ namespace Google.Protobuf
Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next()); Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
Assert.Throws<InvalidOperationException>(() => tokenizer.Next()); Assert.Throws<InvalidOperationException>(() => tokenizer.Next());
} }
/// <summary> /// <summary>
/// Asserts that the specified JSON is tokenized into the given sequence of tokens. /// Asserts that the specified JSON is tokenized into the given sequence of tokens.
/// All apostrophes are first converted to double quotes, allowing any tests /// All apostrophes are first converted to double quotes, allowing any tests
/// that don't need to check actual apostrophe handling to use apostrophes in the JSON, avoiding /// that don't need to check actual apostrophe handling to use apostrophes in the JSON, avoiding
/// messy string literal escaping. The "end document" token is not specified in the list of /// messy string literal escaping. The "end document" token is not specified in the list of
/// expected tokens, but is implicit. /// expected tokens, but is implicit.
/// </summary> /// </summary>
private static void AssertTokens(string json, params JsonToken[] expectedTokens) private static void AssertTokens(string json, params JsonToken[] expectedTokens)
@ -366,7 +366,7 @@ namespace Google.Protobuf
/// Asserts that the specified JSON is tokenized into the given sequence of tokens. /// Asserts that the specified JSON is tokenized into the given sequence of tokens.
/// Unlike <see cref="AssertTokens(string, JsonToken[])"/>, this does not perform any character /// Unlike <see cref="AssertTokens(string, JsonToken[])"/>, this does not perform any character
/// replacement on the specified JSON, and should be used when the text contains apostrophes which /// replacement on the specified JSON, and should be used when the text contains apostrophes which
/// are expected to be used *as* apostrophes. The "end document" token is not specified in the list of /// are expected to be used *as* apostrophes. The "end document" token is not specified in the list of
/// expected tokens, but is implicit. /// expected tokens, but is implicit.
/// </summary> /// </summary>
private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens) private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens)

@ -150,7 +150,7 @@ namespace Google.Protobuf.Reflection
Assert.AreEqual(UnittestProto3Reflection.Descriptor, primitiveField.File); Assert.AreEqual(UnittestProto3Reflection.Descriptor, primitiveField.File);
Assert.AreEqual(FieldType.Int32, primitiveField.FieldType); Assert.AreEqual(FieldType.Int32, primitiveField.FieldType);
Assert.IsNull(primitiveField.Proto.Options); Assert.IsNull(primitiveField.Proto.Options);
Assert.AreEqual("single_nested_enum", enumField.Name); Assert.AreEqual("single_nested_enum", enumField.Name);
Assert.AreEqual(FieldType.Enum, enumField.FieldType); Assert.AreEqual(FieldType.Enum, enumField.FieldType);
// Assert.AreEqual(TestAllTypes.Types.NestedEnum.DescriptorProtoFile, enumField.EnumType); // Assert.AreEqual(TestAllTypes.Types.NestedEnum.DescriptorProtoFile, enumField.EnumType);
@ -242,7 +242,7 @@ namespace Google.Protobuf.Reflection
// NestedMessage single_nested_message = 200; // NestedMessage single_nested_message = 200;
[Test] [Test]
public void FieldListOrderings() public void FieldListOrderings()
{ {
var fields = TestFieldOrderings.Descriptor.Fields; var fields = TestFieldOrderings.Descriptor.Fields;
Assert.AreEqual(new[] { 11, 1, 101, 200 }, fields.InDeclarationOrder().Select(x => x.FieldNumber)); Assert.AreEqual(new[] { 11, 1, 101, 200 }, fields.InDeclarationOrder().Select(x => x.FieldNumber));
Assert.AreEqual(new[] { 1, 11, 101, 200 }, fields.InFieldNumberOrder().Select(x => x.FieldNumber)); Assert.AreEqual(new[] { 1, 11, 101, 200 }, fields.InFieldNumberOrder().Select(x => x.FieldNumber));

@ -213,6 +213,6 @@ namespace Google.Protobuf.Reflection
var descriptor = TestAllTypes.Descriptor; var descriptor = TestAllTypes.Descriptor;
Assert.Throws<KeyNotFoundException>(() => descriptor.Fields[999999].ToString()); Assert.Throws<KeyNotFoundException>(() => descriptor.Fields[999999].ToString());
Assert.Throws<KeyNotFoundException>(() => descriptor.Fields["not found"].ToString()); Assert.Throws<KeyNotFoundException>(() => descriptor.Fields["not found"].ToString());
} }
} }
} }

@ -29,7 +29,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion #endregion
namespace Google.Protobuf namespace Google.Protobuf
{ {
// Just a sample enum with positive and negative values to be used in tests. // Just a sample enum with positive and negative values to be used in tests.

@ -99,7 +99,7 @@ namespace Google.Protobuf.WellKnownTypes
Duration difference = new Duration { Seconds = 1999, Nanos = Duration.NanosecondsPerSecond - 5000 }; Duration difference = new Duration { Seconds = 1999, Nanos = Duration.NanosecondsPerSecond - 5000 };
Assert.AreEqual(difference, t1 - t2); Assert.AreEqual(difference, t1 - t2);
Assert.AreEqual(-difference, t2 - t1); Assert.AreEqual(-difference, t2 - t1);
Assert.AreEqual(t1, t2 + difference); Assert.AreEqual(t1, t2 + difference);
Assert.AreEqual(t2, t1 - difference); Assert.AreEqual(t2, t1 - difference);
} }

@ -135,7 +135,7 @@ namespace Google.Protobuf.WellKnownTypes
DoubleField = { 12.5, -1.5, 0d }, DoubleField = { 12.5, -1.5, 0d },
FloatField = { 123.25f, -20f, 0f }, FloatField = { 123.25f, -20f, 0f },
Int32Field = { int.MaxValue, int.MinValue, 0 }, Int32Field = { int.MaxValue, int.MinValue, 0 },
Int64Field = { long.MaxValue, long.MinValue, 0L }, Int64Field = { long.MaxValue, long.MinValue, 0L },
StringField = { "First", "Second", "" }, StringField = { "First", "Second", "" },
Uint32Field = { uint.MaxValue, uint.MinValue, 0U }, Uint32Field = { uint.MaxValue, uint.MinValue, 0U },
Uint64Field = { ulong.MaxValue, ulong.MinValue, 0UL }, Uint64Field = { ulong.MaxValue, ulong.MinValue, 0UL },
@ -403,7 +403,7 @@ namespace Google.Protobuf.WellKnownTypes
output.Flush(); output.Flush();
stream.Position = 0; stream.Position = 0;
var message = TestWellKnownTypes.Parser.ParseFrom(stream); var message = TestWellKnownTypes.Parser.ParseFrom(stream);
Assert.AreEqual(6, message.Int32Field); Assert.AreEqual(6, message.Int32Field);
} }

@ -19,7 +19,7 @@ message Issue307 {
// Old issue 13: http://code.google.com/p/protobuf-csharp-port/issues/detail?id=13 // Old issue 13: http://code.google.com/p/protobuf-csharp-port/issues/detail?id=13
// New issue 309: https://github.com/protocolbuffers/protobuf/issues/309 // New issue 309: https://github.com/protocolbuffers/protobuf/issues/309
// message A { // message A {
// optional int32 _A = 1; // optional int32 _A = 1;
// } // }
@ -100,21 +100,21 @@ message TestJsonFieldOrdering {
// that will require fixing other tests in multiple platforms. // that will require fixing other tests in multiple platforms.
// Alternatively, consider just adding this to // Alternatively, consider just adding this to
// unittest_proto3.proto if multiple platforms want it. // unittest_proto3.proto if multiple platforms want it.
int32 plain_int32 = 4; int32 plain_int32 = 4;
oneof o1 { oneof o1 {
string o1_string = 2; string o1_string = 2;
int32 o1_int32 = 5; int32 o1_int32 = 5;
} }
string plain_string = 1; string plain_string = 1;
oneof o2 { oneof o2 {
int32 o2_int32 = 6; int32 o2_int32 = 6;
string o2_string = 3; string o2_string = 3;
} }
} }
message TestJsonName { message TestJsonName {

@ -392,13 +392,13 @@ message CommentMessage {
// Leading nested message field comment // Leading nested message field comment
string nested_text = 1; string nested_text = 1;
} }
// Leading nested enum comment // Leading nested enum comment
enum NestedCommentEnum { enum NestedCommentEnum {
// Zero value comment // Zero value comment
ZERO_VALUE = 0; ZERO_VALUE = 0;
} }
// Leading field comment // Leading field comment
string text = 1; // Trailing field comment string text = 1; // Trailing field comment
} }

@ -93,17 +93,17 @@ namespace Google.Protobuf.Conformance
var parser = new JsonParser(new JsonParser.Settings(20, typeRegistry)); var parser = new JsonParser(new JsonParser.Settings(20, typeRegistry));
message = parser.Parse<ProtobufTestMessages.Proto3.TestAllTypesProto3>(request.JsonPayload); message = parser.Parse<ProtobufTestMessages.Proto3.TestAllTypesProto3>(request.JsonPayload);
break; break;
case ConformanceRequest.PayloadOneofCase.ProtobufPayload: case ConformanceRequest.PayloadOneofCase.ProtobufPayload:
{ {
if (request.MessageType.Equals("protobuf_test_messages.proto3.TestAllTypesProto3")) if (request.MessageType.Equals("protobuf_test_messages.proto3.TestAllTypesProto3"))
{ {
message = ProtobufTestMessages.Proto3.TestAllTypesProto3.Parser.ParseFrom(request.ProtobufPayload); message = ProtobufTestMessages.Proto3.TestAllTypesProto3.Parser.ParseFrom(request.ProtobufPayload);
} }
else if (request.MessageType.Equals("protobuf_test_messages.proto2.TestAllTypesProto2")) else if (request.MessageType.Equals("protobuf_test_messages.proto2.TestAllTypesProto2"))
{ {
return new ConformanceResponse { Skipped = "CSharp doesn't support proto2" }; return new ConformanceResponse { Skipped = "CSharp doesn't support proto2" };
} }
else else
{ {
throw new Exception(" Protobuf request doesn't have specific payload type"); throw new Exception(" Protobuf request doesn't have specific payload type");
} }

@ -86,7 +86,7 @@ namespace Google.Protobuf.Collections
var map = new MapField<string, ForeignMessage>(); var map = new MapField<string, ForeignMessage>();
Assert.Throws<ArgumentNullException>(() => map[null] = new ForeignMessage()); Assert.Throws<ArgumentNullException>(() => map[null] = new ForeignMessage());
} }
[Test] [Test]
public void AddPreservesInsertionOrder() public void AddPreservesInsertionOrder()
{ {
@ -471,7 +471,7 @@ namespace Google.Protobuf.Collections
keys.CopyTo(array, 1); keys.CopyTo(array, 1);
CollectionAssert.AreEqual(new[] { null, "foo", "x", null }, array); CollectionAssert.AreEqual(new[] { null, "foo", "x", null }, array);
} }
// Just test keys - we know the implementation is the same for values // Just test keys - we know the implementation is the same for values
[Test] [Test]
public void NonGenericViewCopyTo() public void NonGenericViewCopyTo()

@ -120,5 +120,5 @@ namespace Google.Protobuf.Collections
} }
} }
} }
} }
} }

@ -59,7 +59,7 @@ namespace Google.Protobuf.Compatibility
[TestCase(typeof(string), typeof(int), false)] [TestCase(typeof(string), typeof(int), false)]
[TestCase(typeof(int), typeof(int), true)] [TestCase(typeof(int), typeof(int), true)]
[TestCase(typeof(ValueType), typeof(int), true)] [TestCase(typeof(ValueType), typeof(int), true)]
[TestCase(typeof(long), typeof(int), false)] // [TestCase(typeof(long), typeof(int), false)] //
public void IsAssignableFrom(Type target, Type argument, bool expected) public void IsAssignableFrom(Type target, Type argument, bool expected)
{ {
Assert.AreEqual(expected, TypeExtensions.IsAssignableFrom(target, argument)); Assert.AreEqual(expected, TypeExtensions.IsAssignableFrom(target, argument));

@ -68,7 +68,7 @@ namespace Google.Protobuf
paths = tree.ToFieldMask().Paths; paths = tree.ToFieldMask().Paths;
Assert.AreEqual(3, paths.Count); Assert.AreEqual(3, paths.Count);
Assert.Contains("bar.baz", paths); Assert.Contains("bar.baz", paths);
// Redundant sub-path. // Redundant sub-path.
tree.AddFieldPath("foo.bar"); tree.AddFieldPath("foo.bar");
paths = tree.ToFieldMask().Paths; paths = tree.ToFieldMask().Paths;
@ -79,7 +79,7 @@ namespace Google.Protobuf
paths = tree.ToFieldMask().Paths; paths = tree.ToFieldMask().Paths;
Assert.AreEqual(4, paths.Count); Assert.AreEqual(4, paths.Count);
Assert.Contains("bar.quz", paths); Assert.Contains("bar.quz", paths);
// A path that matches several existing sub-paths. // A path that matches several existing sub-paths.
tree.AddFieldPath("bar"); tree.AddFieldPath("bar");
paths = tree.ToFieldMask().Paths; paths = tree.ToFieldMask().Paths;

@ -18,7 +18,7 @@
<PackageReference Include="NUnit3TestAdapter" Version="3.9.0" /> <PackageReference Include="NUnit3TestAdapter" Version="3.9.0" />
</ItemGroup> </ItemGroup>
<!-- <!--
- Override target frameworks on non-Windows to just .NET Core - Override target frameworks on non-Windows to just .NET Core
- Doing this conditionally in the initial PropertyGroup confuses - Doing this conditionally in the initial PropertyGroup confuses
- Visual Studio. - Visual Studio.

@ -243,7 +243,7 @@ namespace Google.Protobuf
[Test] [Test]
public void InvalidSurrogatePairsFail() public void InvalidSurrogatePairsFail()
{ {
// Note: don't use TestCase for these, as the strings can't be reliably represented // Note: don't use TestCase for these, as the strings can't be reliably represented
// See http://codeblog.jonskeet.uk/2014/11/07/when-is-a-string-not-a-string/ // See http://codeblog.jonskeet.uk/2014/11/07/when-is-a-string-not-a-string/
// Lone low surrogate // Lone low surrogate

@ -240,7 +240,7 @@ namespace Google.Protobuf
AssertTokens("{'x': 'y'}", AssertTokens("{'x': 'y'}",
JsonToken.StartObject, JsonToken.Name("x"), JsonToken.Value("y"), JsonToken.EndObject); JsonToken.StartObject, JsonToken.Name("x"), JsonToken.Value("y"), JsonToken.EndObject);
} }
[Test] [Test]
[TestCase("[10, 20", 3)] [TestCase("[10, 20", 3)]
[TestCase("[10,", 2)] [TestCase("[10,", 2)]
@ -305,7 +305,7 @@ namespace Google.Protobuf
[Test] [Test]
public void ObjectMixedType() public void ObjectMixedType()
{ {
AssertTokens(@"{'a': 1, 'b': 'bar', 'c': null, 'd': false, 'e': true, AssertTokens(@"{'a': 1, 'b': 'bar', 'c': null, 'd': false, 'e': true,
'f': [2], 'g': {'x':'y' }}", 'f': [2], 'g': {'x':'y' }}",
JsonToken.StartObject, JsonToken.StartObject,
JsonToken.Name("a"), JsonToken.Name("a"),
@ -365,12 +365,12 @@ namespace Google.Protobuf
tokenizer.SkipValue(); tokenizer.SkipValue();
Assert.AreEqual("next", tokenizer.Next().StringValue); Assert.AreEqual("next", tokenizer.Next().StringValue);
} }
/// <summary> /// <summary>
/// Asserts that the specified JSON is tokenized into the given sequence of tokens. /// Asserts that the specified JSON is tokenized into the given sequence of tokens.
/// All apostrophes are first converted to double quotes, allowing any tests /// All apostrophes are first converted to double quotes, allowing any tests
/// that don't need to check actual apostrophe handling to use apostrophes in the JSON, avoiding /// that don't need to check actual apostrophe handling to use apostrophes in the JSON, avoiding
/// messy string literal escaping. The "end document" token is not specified in the list of /// messy string literal escaping. The "end document" token is not specified in the list of
/// expected tokens, but is implicit. /// expected tokens, but is implicit.
/// </summary> /// </summary>
private static void AssertTokens(string json, params JsonToken[] expectedTokens) private static void AssertTokens(string json, params JsonToken[] expectedTokens)
@ -382,7 +382,7 @@ namespace Google.Protobuf
/// Asserts that the specified JSON is tokenized into the given sequence of tokens. /// Asserts that the specified JSON is tokenized into the given sequence of tokens.
/// Unlike <see cref="AssertTokens(string, JsonToken[])"/>, this does not perform any character /// Unlike <see cref="AssertTokens(string, JsonToken[])"/>, this does not perform any character
/// replacement on the specified JSON, and should be used when the text contains apostrophes which /// replacement on the specified JSON, and should be used when the text contains apostrophes which
/// are expected to be used *as* apostrophes. The "end document" token is not specified in the list of /// are expected to be used *as* apostrophes. The "end document" token is not specified in the list of
/// expected tokens, but is implicit. /// expected tokens, but is implicit.
/// </summary> /// </summary>
private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens) private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens)

@ -256,7 +256,7 @@ namespace Google.Protobuf.Reflection
Assert.AreEqual(unitTestProto3Descriptor, primitiveField.File); Assert.AreEqual(unitTestProto3Descriptor, primitiveField.File);
Assert.AreEqual(FieldType.Int32, primitiveField.FieldType); Assert.AreEqual(FieldType.Int32, primitiveField.FieldType);
Assert.IsNull(primitiveField.Proto.Options); Assert.IsNull(primitiveField.Proto.Options);
Assert.AreEqual("single_nested_enum", enumField.Name); Assert.AreEqual("single_nested_enum", enumField.Name);
Assert.AreEqual(FieldType.Enum, enumField.FieldType); Assert.AreEqual(FieldType.Enum, enumField.FieldType);
Assert.AreEqual(testAllTypesDescriptor.EnumTypes[0], enumField.EnumType); Assert.AreEqual(testAllTypesDescriptor.EnumTypes[0], enumField.EnumType);
@ -352,7 +352,7 @@ namespace Google.Protobuf.Reflection
// NestedMessage single_nested_message = 200; // NestedMessage single_nested_message = 200;
[Test] [Test]
public void FieldListOrderings() public void FieldListOrderings()
{ {
var fields = TestFieldOrderings.Descriptor.Fields; var fields = TestFieldOrderings.Descriptor.Fields;
Assert.AreEqual(new[] { 11, 1, 101, 200 }, fields.InDeclarationOrder().Select(x => x.FieldNumber)); Assert.AreEqual(new[] { 11, 1, 101, 200 }, fields.InDeclarationOrder().Select(x => x.FieldNumber));
Assert.AreEqual(new[] { 1, 11, 101, 200 }, fields.InFieldNumberOrder().Select(x => x.FieldNumber)); Assert.AreEqual(new[] { 1, 11, 101, 200 }, fields.InFieldNumberOrder().Select(x => x.FieldNumber));

@ -213,6 +213,6 @@ namespace Google.Protobuf.Reflection
var descriptor = TestAllTypes.Descriptor; var descriptor = TestAllTypes.Descriptor;
Assert.Throws<KeyNotFoundException>(() => descriptor.Fields[999999].ToString()); Assert.Throws<KeyNotFoundException>(() => descriptor.Fields[999999].ToString());
Assert.Throws<KeyNotFoundException>(() => descriptor.Fields["not found"].ToString()); Assert.Throws<KeyNotFoundException>(() => descriptor.Fields["not found"].ToString());
} }
} }
} }

@ -29,7 +29,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion #endregion
namespace Google.Protobuf namespace Google.Protobuf
{ {
// Just a sample enum with positive and negative values to be used in tests. // Just a sample enum with positive and negative values to be used in tests.

@ -43,7 +43,7 @@ namespace Google.Protobuf
// Signalling bit is inverted compared with double.NaN. Doesn't really matter // Signalling bit is inverted compared with double.NaN. Doesn't really matter
// whether that makes it quiet or signalling - it's different. // whether that makes it quiet or signalling - it's different.
public static double SignallingFlipped { get; } = public static double SignallingFlipped { get; } =
BitConverter.Int64BitsToDouble(BitConverter.DoubleToInt64Bits(double.NaN) ^ -0x8000_0000_0000_0000L); BitConverter.Int64BitsToDouble(BitConverter.DoubleToInt64Bits(double.NaN) ^ -0x8000_0000_0000_0000L);
// A bit in the middle of the mantissa is flipped; this difference is preserved when casting to float. // A bit in the middle of the mantissa is flipped; this difference is preserved when casting to float.

@ -99,7 +99,7 @@ namespace Google.Protobuf.WellKnownTypes
Duration difference = new Duration { Seconds = 1999, Nanos = Duration.NanosecondsPerSecond - 5000 }; Duration difference = new Duration { Seconds = 1999, Nanos = Duration.NanosecondsPerSecond - 5000 };
Assert.AreEqual(difference, t1 - t2); Assert.AreEqual(difference, t1 - t2);
Assert.AreEqual(-difference, t2 - t1); Assert.AreEqual(-difference, t2 - t1);
Assert.AreEqual(t1, t2 + difference); Assert.AreEqual(t1, t2 + difference);
Assert.AreEqual(t2, t1 - difference); Assert.AreEqual(t2, t1 - difference);
} }
@ -190,7 +190,7 @@ namespace Google.Protobuf.WellKnownTypes
Assert.IsTrue(e > d); Assert.IsTrue(e > d);
Assert.IsTrue(e == e); Assert.IsTrue(e == e);
Assert.IsTrue(e == e.Clone()); Assert.IsTrue(e == e.Clone());
Assert.IsTrue(b >= a); Assert.IsTrue(b >= a);
Assert.IsTrue(b <= c); Assert.IsTrue(b <= c);
Assert.IsTrue(b <= d); Assert.IsTrue(b <= d);

@ -135,7 +135,7 @@ namespace Google.Protobuf.WellKnownTypes
DoubleField = { 12.5, -1.5, 0d }, DoubleField = { 12.5, -1.5, 0d },
FloatField = { 123.25f, -20f, 0f }, FloatField = { 123.25f, -20f, 0f },
Int32Field = { int.MaxValue, int.MinValue, 0 }, Int32Field = { int.MaxValue, int.MinValue, 0 },
Int64Field = { long.MaxValue, long.MinValue, 0L }, Int64Field = { long.MaxValue, long.MinValue, 0L },
StringField = { "First", "Second", "" }, StringField = { "First", "Second", "" },
Uint32Field = { uint.MaxValue, uint.MinValue, 0U }, Uint32Field = { uint.MaxValue, uint.MinValue, 0U },
Uint64Field = { ulong.MaxValue, ulong.MinValue, 0UL }, Uint64Field = { ulong.MaxValue, ulong.MinValue, 0UL },
@ -403,7 +403,7 @@ namespace Google.Protobuf.WellKnownTypes
output.Flush(); output.Flush();
stream.Position = 0; stream.Position = 0;
var message = TestWellKnownTypes.Parser.ParseFrom(stream); var message = TestWellKnownTypes.Parser.ParseFrom(stream);
Assert.AreEqual(6, message.Int32Field); Assert.AreEqual(6, message.Int32Field);
} }

@ -292,7 +292,7 @@ namespace Google.Protobuf
/// <summary> /// <summary>
/// Returns a field codec which effectively wraps a value of type T in a message. /// Returns a field codec which effectively wraps a value of type T in a message.
/// ///
/// </summary> /// </summary>
internal static FieldCodec<T> GetCodec<T>() internal static FieldCodec<T> GetCodec<T>()
{ {
@ -431,7 +431,7 @@ namespace Google.Protobuf
internal T DefaultValue { get; } internal T DefaultValue { get; }
private readonly int tagSize; private readonly int tagSize;
internal FieldCodec( internal FieldCodec(
Func<CodedInputStream, T> reader, Func<CodedInputStream, T> reader,
Action<CodedOutputStream, T> writer, Action<CodedOutputStream, T> writer,

@ -41,7 +41,7 @@ namespace Google.Protobuf
/// <summary> /// <summary>
/// <para>A tree representation of a FieldMask. Each leaf node in this tree represent /// <para>A tree representation of a FieldMask. Each leaf node in this tree represent
/// a field path in the FieldMask.</para> /// a field path in the FieldMask.</para>
/// ///
/// <para>For example, FieldMask "foo.bar,foo.baz,bar.baz" as a tree will be:</para> /// <para>For example, FieldMask "foo.bar,foo.baz,bar.baz" as a tree will be:</para>
/// <code> /// <code>
/// [root] -+- foo -+- bar /// [root] -+- foo -+- bar
@ -50,7 +50,7 @@ namespace Google.Protobuf
/// | /// |
/// +- bar --- baz /// +- bar --- baz
/// </code> /// </code>
/// ///
/// <para>By representing FieldMasks with this tree structure we can easily convert /// <para>By representing FieldMasks with this tree structure we can easily convert
/// a FieldMask to a canonical form, merge two FieldMasks, calculate the /// a FieldMask to a canonical form, merge two FieldMasks, calculate the
/// intersection to two FieldMasks and traverse all fields specified by the /// intersection to two FieldMasks and traverse all fields specified by the
@ -242,7 +242,7 @@ namespace Google.Protobuf
Merge(root, "", source, destination, options); Merge(root, "", source, destination, options);
} }
/// <summary> /// <summary>
/// Merges all fields specified by a sub-tree from <paramref name="source"/> to <paramref name="destination"/>. /// Merges all fields specified by a sub-tree from <paramref name="source"/> to <paramref name="destination"/>.
/// </summary> /// </summary>

@ -20,7 +20,7 @@
<RepositoryUrl>https://github.com/protocolbuffers/protobuf.git</RepositoryUrl> <RepositoryUrl>https://github.com/protocolbuffers/protobuf.git</RepositoryUrl>
</PropertyGroup> </PropertyGroup>
<!-- <!--
- Override target frameworks on non-Windows to just .NET Core - Override target frameworks on non-Windows to just .NET Core
- Doing this conditionally in the initial PropertyGroup confuses - Doing this conditionally in the initial PropertyGroup confuses
- Visual Studio. - Visual Studio.
@ -30,7 +30,7 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="SourceLink.Create.CommandLine" PrivateAssets="All" Version="2.7.6"/> <PackageReference Include="SourceLink.Create.CommandLine" PrivateAssets="All" Version="2.7.6"/>
</ItemGroup> </ItemGroup>
</Project> </Project>

@ -70,7 +70,7 @@ namespace Google.Protobuf
/// </summary> /// </summary>
public static JsonFormatter Default { get; } = new JsonFormatter(Settings.Default); public static JsonFormatter Default { get; } = new JsonFormatter(Settings.Default);
// A JSON formatter which *only* exists // A JSON formatter which *only* exists
private static readonly JsonFormatter diagnosticFormatter = new JsonFormatter(Settings.Default); private static readonly JsonFormatter diagnosticFormatter = new JsonFormatter(Settings.Default);
/// <summary> /// <summary>
@ -579,7 +579,7 @@ namespace Google.Protobuf
writer.Write(data.ToBase64()); writer.Write(data.ToBase64());
writer.Write('"'); writer.Write('"');
writer.Write(" }"); writer.Write(" }");
} }
private void WriteStruct(TextWriter writer, IMessage message) private void WriteStruct(TextWriter writer, IMessage message)
{ {
@ -616,7 +616,7 @@ namespace Google.Protobuf
} }
object value = specifiedField.Accessor.GetValue(message); object value = specifiedField.Accessor.GetValue(message);
switch (specifiedField.FieldNumber) switch (specifiedField.FieldNumber)
{ {
case Value.BoolValueFieldNumber: case Value.BoolValueFieldNumber:
@ -871,7 +871,7 @@ namespace Google.Protobuf
// the platforms we target have it. // the platforms we target have it.
private static readonly Dictionary<System.Type, Dictionary<object, string>> dictionaries private static readonly Dictionary<System.Type, Dictionary<object, string>> dictionaries
= new Dictionary<System.Type, Dictionary<object, string>>(); = new Dictionary<System.Type, Dictionary<object, string>>();
internal static string GetOriginalName(object value) internal static string GetOriginalName(object value)
{ {
var enumType = value.GetType(); var enumType = value.GetType();

@ -674,7 +674,7 @@ namespace Google.Protobuf
if (value != Math.Floor(value)) if (value != Math.Floor(value))
{ {
throw new InvalidProtocolBufferException($"Value not an integer: {value}"); throw new InvalidProtocolBufferException($"Value not an integer: {value}");
} }
} }
private static object ParseSingleStringValue(FieldDescriptor field, string text) private static object ParseSingleStringValue(FieldDescriptor field, string text)
@ -919,7 +919,7 @@ namespace Google.Protobuf
messagePaths.Add(ToSnakeCase(path)); messagePaths.Add(ToSnakeCase(path));
} }
} }
// Ported from src/google/protobuf/util/internal/utility.cc // Ported from src/google/protobuf/util/internal/utility.cc
private static string ToSnakeCase(string text) private static string ToSnakeCase(string text)
{ {

@ -614,7 +614,7 @@ namespace Google.Protobuf
/// where ^ represents the current position within the text stream. The examples all use string values, /// where ^ represents the current position within the text stream. The examples all use string values,
/// but could be any value, including nested objects/arrays. /// but could be any value, including nested objects/arrays.
/// The complete state of the tokenizer also includes a stack to indicate the contexts (arrays/objects). /// The complete state of the tokenizer also includes a stack to indicate the contexts (arrays/objects).
/// Any additional notional state of "AfterValue" indicates that a value has been completed, at which /// Any additional notional state of "AfterValue" indicates that a value has been completed, at which
/// point there's an immediate transition to ExpectedEndOfDocument, ObjectAfterProperty or ArrayAfterValue. /// point there's an immediate transition to ExpectedEndOfDocument, ObjectAfterProperty or ArrayAfterValue.
/// </para> /// </para>
/// <para> /// <para>
@ -655,7 +655,7 @@ namespace Google.Protobuf
/// <summary> /// <summary>
/// { "foo" : ^ "bar", "x": "y" } /// { "foo" : ^ "bar", "x": "y" }
/// Before any property other than the first in an object. /// Before any property other than the first in an object.
/// (Equivalently: after any property in an object) /// (Equivalently: after any property in an object)
/// Next states: /// Next states:
/// "AfterValue" (value is simple) /// "AfterValue" (value is simple)
/// ObjectStart (value is object) /// ObjectStart (value is object)

@ -29,7 +29,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion #endregion
using System; using System;
using System.IO; using System.IO;

@ -183,7 +183,7 @@ namespace Google.Protobuf
{ {
return f.Accessor.HasValue(message); return f.Accessor.HasValue(message);
} }
else else
{ {
return true; return true;
} }

@ -29,7 +29,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion #endregion
using System; using System;
using System.IO; using System.IO;
@ -210,7 +210,7 @@ namespace Google.Protobuf
// The current implementation avoids a virtual method call and a cast, which *may* be significant in some cases. // The current implementation avoids a virtual method call and a cast, which *may* be significant in some cases.
// Benchmarking work is required to measure the significance - but it's only a few lines of code in any case. // Benchmarking work is required to measure the significance - but it's only a few lines of code in any case.
// The API wouldn't change anyway - just the implementation - so this work can be deferred. // The API wouldn't change anyway - just the implementation - so this work can be deferred.
private readonly Func<T> factory; private readonly Func<T> factory;
/// <summary> /// <summary>
/// Creates a new parser. /// Creates a new parser.

@ -63,7 +63,7 @@ namespace Google.Protobuf.Reflection
/// Singleton for all descriptors with an empty set of options. /// Singleton for all descriptors with an empty set of options.
/// </summary> /// </summary>
internal static readonly CustomOptions Empty = new CustomOptions(); internal static readonly CustomOptions Empty = new CustomOptions();
/// <summary> /// <summary>
/// A sequence of values per field. This needs to be per field rather than per tag to allow correct deserialization /// A sequence of values per field. This needs to be per field rather than per tag to allow correct deserialization
/// of repeated fields which could be "int, ByteString, int" - unlikely as that is. The fact that values are boxed /// of repeated fields which could be "int, ByteString, int" - unlikely as that is. The fact that values are boxed
@ -147,7 +147,7 @@ namespace Google.Protobuf.Reflection
/// <param name="value">The output variable to populate.</param> /// <param name="value">The output variable to populate.</param>
/// <returns><c>true</c> if a suitable value for the field was found; <c>false</c> otherwise.</returns> /// <returns><c>true</c> if a suitable value for the field was found; <c>false</c> otherwise.</returns>
public bool TryGetSFixed64(int field, out long value) => TryGetInt64(field, out value); public bool TryGetSFixed64(int field, out long value) => TryGetInt64(field, out value);
/// <summary> /// <summary>
/// Retrieves a signed 32-bit integer value for the specified option field, /// Retrieves a signed 32-bit integer value for the specified option field,
/// assuming a zigzag encoding. /// assuming a zigzag encoding.
@ -357,7 +357,7 @@ namespace Google.Protobuf.Reflection
List<FieldValue> valuesForField; List<FieldValue> valuesForField;
if (!ret.valuesByField.TryGetValue(field, out valuesForField)) if (!ret.valuesByField.TryGetValue(field, out valuesForField))
{ {
// Expect almost all // Expect almost all
valuesForField = new List<FieldValue>(1); valuesForField = new List<FieldValue>(1);
ret.valuesByField[field] = valuesForField; ret.valuesByField[field] = valuesForField;
} }

@ -47,7 +47,7 @@ namespace Google.Protobuf.Reflection
} }
/// <value> /// <value>
/// The index of this descriptor within its parent descriptor. /// The index of this descriptor within its parent descriptor.
/// </value> /// </value>
/// <remarks> /// <remarks>
/// This returns the index of this descriptor within its parent, for /// This returns the index of this descriptor within its parent, for

@ -35,7 +35,7 @@ namespace Google.Protobuf.Reflection
/// <summary> /// <summary>
/// Descriptor for a single enum value within an enum in a .proto file. /// Descriptor for a single enum value within an enum in a .proto file.
/// </summary> /// </summary>
public sealed class EnumValueDescriptor : DescriptorBase public sealed class EnumValueDescriptor : DescriptorBase
{ {
private readonly EnumDescriptor enumDescriptor; private readonly EnumDescriptor enumDescriptor;
private readonly EnumValueDescriptorProto proto; private readonly EnumValueDescriptorProto proto;

@ -98,7 +98,7 @@ namespace Google.Protobuf.Reflection
this.propertyName = propertyName; this.propertyName = propertyName;
JsonName = Proto.JsonName == "" ? JsonFormatter.ToJsonName(Proto.Name) : Proto.JsonName; JsonName = Proto.JsonName == "" ? JsonFormatter.ToJsonName(Proto.Name) : Proto.JsonName;
} }
/// <summary> /// <summary>
/// The brief name of the descriptor's target. /// The brief name of the descriptor's target.
@ -129,7 +129,7 @@ namespace Google.Protobuf.Reflection
/// </para> /// </para>
/// </remarks> /// </remarks>
public IFieldAccessor Accessor => accessor; public IFieldAccessor Accessor => accessor;
/// <summary> /// <summary>
/// Maps a field type as included in the .proto file to a FieldType. /// Maps a field type as included in the .proto file to a FieldType.
/// </summary> /// </summary>

@ -51,5 +51,5 @@ namespace Google.Protobuf.Reflection
/// Returns the descriptor for the .proto file that this entity is part of. /// Returns the descriptor for the .proto file that this entity is part of.
/// </summary> /// </summary>
FileDescriptor File { get; } FileDescriptor File { get; }
} }
} }

@ -63,7 +63,7 @@ namespace Google.Protobuf.Reflection
private readonly IList<FieldDescriptor> fieldsInDeclarationOrder; private readonly IList<FieldDescriptor> fieldsInDeclarationOrder;
private readonly IList<FieldDescriptor> fieldsInNumberOrder; private readonly IList<FieldDescriptor> fieldsInNumberOrder;
private readonly IDictionary<string, FieldDescriptor> jsonFieldMap; private readonly IDictionary<string, FieldDescriptor> jsonFieldMap;
internal MessageDescriptor(DescriptorProto proto, FileDescriptor file, MessageDescriptor parent, int typeIndex, GeneratedClrTypeInfo generatedCodeInfo) internal MessageDescriptor(DescriptorProto proto, FileDescriptor file, MessageDescriptor parent, int typeIndex, GeneratedClrTypeInfo generatedCodeInfo)
: base(file, file.ComputeFullName(parent, proto.Name), typeIndex) : base(file, file.ComputeFullName(parent, proto.Name), typeIndex)
{ {

@ -45,7 +45,7 @@ namespace Google.Protobuf.Reflection
private readonly Action<IMessage> clearDelegate; private readonly Action<IMessage> clearDelegate;
private OneofDescriptor descriptor; private OneofDescriptor descriptor;
internal OneofAccessor(PropertyInfo caseProperty, MethodInfo clearMethod, OneofDescriptor descriptor) internal OneofAccessor(PropertyInfo caseProperty, MethodInfo clearMethod, OneofDescriptor descriptor)
{ {
if (!caseProperty.CanRead) if (!caseProperty.CanRead)
{ {

@ -60,6 +60,6 @@ namespace Google.Protobuf.Reflection
Name = ProtoPreconditions.CheckNotNull(name, nameof(name)); Name = ProtoPreconditions.CheckNotNull(name, nameof(name));
PreferredAlias = true; PreferredAlias = true;
} }
} }
} }

@ -77,7 +77,7 @@ namespace Google.Protobuf.Reflection
}; };
var clrType = property.PropertyType; var clrType = property.PropertyType;
// TODO: Validate that this is a reasonable single field? (Should be a value type, a message type, or string/ByteString.) // TODO: Validate that this is a reasonable single field? (Should be a value type, a message type, or string/ByteString.)
object defaultValue = object defaultValue =
descriptor.FieldType == FieldType.Message ? null descriptor.FieldType == FieldType.Message ? null
: clrType == typeof(string) ? "" : clrType == typeof(string) ? ""

@ -171,7 +171,7 @@ namespace Google.Protobuf
{ {
result += CodedOutputStream.ComputeTagSize(fieldNumber) * fixed64List.Count; result += CodedOutputStream.ComputeTagSize(fieldNumber) * fixed64List.Count;
result += CodedOutputStream.ComputeFixed64Size(1) * fixed64List.Count; result += CodedOutputStream.ComputeFixed64Size(1) * fixed64List.Count;
} }
if (lengthDelimitedList != null) if (lengthDelimitedList != null)
{ {
result += CodedOutputStream.ComputeTagSize(fieldNumber) * lengthDelimitedList.Count; result += CodedOutputStream.ComputeTagSize(fieldNumber) * lengthDelimitedList.Count;
@ -266,7 +266,7 @@ namespace Google.Protobuf
return this; return this;
} }
internal UnknownField AddGroup(UnknownFieldSet value) internal UnknownField AddGroup(UnknownFieldSet value)
{ {
groupList = Add(groupList, value); groupList = Add(groupList, value);
return this; return this;

@ -49,7 +49,7 @@ namespace Google.Protobuf.WellKnownTypes
/// </summary> /// </summary>
/// <remarks> /// <remarks>
/// <para> /// <para>
/// This is always just the last part of the URL, after the final slash. No validation of /// This is always just the last part of the URL, after the final slash. No validation of
/// anything before the trailing slash is performed. If the type URL does not include a slash, /// anything before the trailing slash is performed. If the type URL does not include a slash,
/// an empty string is returned rather than an exception being thrown; this won't match any types, /// an empty string is returned rather than an exception being thrown; this won't match any types,
/// and the calling code is probably in a better position to give a meaningful error. /// and the calling code is probably in a better position to give a meaningful error.

@ -156,7 +156,7 @@ namespace Google.Protobuf.WellKnownTypes
return Normalize(lhs.Seconds - rhs.Seconds, lhs.Nanos - rhs.Nanos); return Normalize(lhs.Seconds - rhs.Seconds, lhs.Nanos - rhs.Nanos);
} }
} }
/// <summary> /// <summary>
/// Creates a duration with the normalized values from the given number of seconds and /// Creates a duration with the normalized values from the given number of seconds and
/// nanoseconds, conforming with the description in the proto file. /// nanoseconds, conforming with the description in the proto file.

@ -358,7 +358,7 @@ namespace Google.Protobuf.WellKnownTypes
/// field is set in the source, the value is copied to the /// field is set in the source, the value is copied to the
/// destination; if the field is unset in the source, the field is cleared /// destination; if the field is unset in the source, the field is cleared
/// from the destination) when merging. /// from the destination) when merging.
/// ///
/// Default behavior is to always set the value of the source primitive /// Default behavior is to always set the value of the source primitive
/// field to the destination primitive field, and if the source field is /// field to the destination primitive field, and if the source field is
/// unset, the default value of the source field is copied to the /// unset, the default value of the source field is copied to the

@ -192,7 +192,7 @@ with info about your project (name and website) so we can add an entry for you.
1. CGSN Mooring Project 1. CGSN Mooring Project
* Website: https://bitbucket.org/ooicgsn/cgsn-mooring * Website: https://bitbucket.org/ooicgsn/cgsn-mooring
* Extensions: 1058 * Extensions: 1058
1. Container Storage Interface 1. Container Storage Interface
* Website: https://github.com/container-storage-interface/spec * Website: https://github.com/container-storage-interface/spec
* Extensions: 1059-1069 * Extensions: 1059-1069

@ -9,16 +9,16 @@ This table contains 3 languages' results:
* **reuse** - This is for reusing the same message instance for parsing. * **reuse** - This is for reusing the same message instance for parsing.
* **Java** - For Java there're 3 kinds of parsing/Serialization ways: * **Java** - For Java there're 3 kinds of parsing/Serialization ways:
* **byte[]** - This is for parsing from a Byte Array. * **byte[]** - This is for parsing from a Byte Array.
* **ByteString** - This is for parsing from a * **ByteString** - This is for parsing from a
com.google.protobuf.ByteString. com.google.protobuf.ByteString.
* **InputStream** - This is for parsing from a InputStream * **InputStream** - This is for parsing from a InputStream
* **Python** - For Pythong there're 3 kinds of python protobuf for testing: * **Python** - For Pythong there're 3 kinds of python protobuf for testing:
* **C++-genereated-code** - This is for using cpp generated code of the * **C++-genereated-code** - This is for using cpp generated code of the
proto file as dynamic linked library. proto file as dynamic linked library.
* **C++-reflection** - This is for using cpp reflection, which there's no * **C++-reflection** - This is for using cpp reflection, which there's no
generated code, but still using cpp protobuf library as dynamic linked generated code, but still using cpp protobuf library as dynamic linked
library. library.
* **pure-Python** - This is for pure Python version, which don't link with * **pure-Python** - This is for pure Python version, which don't link with
any cpp protobuf library. any cpp protobuf library.
## Parsing performance ## Parsing performance

@ -172,4 +172,4 @@ There are miscellaneous other things you may find useful as a Protocol Buffers d
* [Make protoc plugins in NodeJS](https://github.com/konsumer/node-protoc-plugin) * [Make protoc plugins in NodeJS](https://github.com/konsumer/node-protoc-plugin)
* [ProfaneDB - A Protocol Buffers database](https://profanedb.gitlab.io) * [ProfaneDB - A Protocol Buffers database](https://profanedb.gitlab.io)
* [Protocol Buffer property-based testing utility and example message generator (Python / Hypothesis)](https://github.com/CurataEng/hypothesis-protobuf) * [Protocol Buffer property-based testing utility and example message generator (Python / Hypothesis)](https://github.com/CurataEng/hypothesis-protobuf)
* [Protolock - CLI utility to prevent backward-incompatible changes to .proto files](https://github.com/nilslice/protolock) * [Protolock - CLI utility to prevent backward-incompatible changes to .proto files](https://github.com/nilslice/protolock)

@ -34,7 +34,7 @@ func TestWritePersonWritesPerson(t *testing.T) {
func TestListPeopleWritesList(t *testing.T) { func TestListPeopleWritesList(t *testing.T) {
buf := new(bytes.Buffer) buf := new(bytes.Buffer)
in := pb.AddressBook{People: []*pb.Person { in := pb.AddressBook{People: []*pb.Person{
{ {
Name: "John Doe", Name: "John Doe",
Id: 101, Id: 101,

@ -55,7 +55,7 @@ message TestOptimizedForSize {
message TestRequiredOptimizedForSize { message TestRequiredOptimizedForSize {
required int32 x = 1; required int32 x = 1;
} }
message TestOptionalOptimizedForSize { message TestOptionalOptimizedForSize {
optional TestRequiredOptimizedForSize o = 1; optional TestRequiredOptimizedForSize o = 1;
} }

@ -55,7 +55,7 @@ message TestOptimizedForSize {
message TestRequiredOptimizedForSize { message TestRequiredOptimizedForSize {
required int32 x = 1; required int32 x = 1;
} }
message TestOptionalOptimizedForSize { message TestOptionalOptimizedForSize {
optional TestRequiredOptimizedForSize o = 1; optional TestRequiredOptimizedForSize o = 1;
} }

@ -342,7 +342,7 @@ public class ByteStringTest extends TestCase {
return -1; return -1;
} }
} }
// A stream which exposes the byte array passed into write(byte[], int, int). // A stream which exposes the byte array passed into write(byte[], int, int).
private static class EvilOutputStream extends OutputStream { private static class EvilOutputStream extends OutputStream {
public byte[] capturedArray = null; public byte[] capturedArray = null;
@ -454,13 +454,13 @@ public class ByteStringTest extends TestCase {
isArrayRange(bytes, byteString.toByteArray(), 0, bytes.length)); isArrayRange(bytes, byteString.toByteArray(), 0, bytes.length));
} }
} }
public void testNewOutputEmpty() throws IOException { public void testNewOutputEmpty() throws IOException {
// Make sure newOutput() correctly builds empty byte strings // Make sure newOutput() correctly builds empty byte strings
ByteString byteString = ByteString.newOutput().toByteString(); ByteString byteString = ByteString.newOutput().toByteString();
assertEquals(ByteString.EMPTY, byteString); assertEquals(ByteString.EMPTY, byteString);
} }
public void testNewOutput_Mutating() throws IOException { public void testNewOutput_Mutating() throws IOException {
Output os = ByteString.newOutput(5); Output os = ByteString.newOutput(5);
os.write(new byte[] {1, 2, 3, 4, 5}); os.write(new byte[] {1, 2, 3, 4, 5});

@ -39,22 +39,22 @@ import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Method; import java.lang.reflect.Method;
/** /**
* Test field deprecation * Test field deprecation
* *
* @author birdo@google.com (Roberto Scaramuzzi) * @author birdo@google.com (Roberto Scaramuzzi)
*/ */
public class DeprecatedFieldTest extends TestCase { public class DeprecatedFieldTest extends TestCase {
private String[] deprecatedGetterNames = { private String[] deprecatedGetterNames = {
"hasDeprecatedInt32", "hasDeprecatedInt32",
"getDeprecatedInt32"}; "getDeprecatedInt32"};
private String[] deprecatedBuilderGetterNames = { private String[] deprecatedBuilderGetterNames = {
"hasDeprecatedInt32", "hasDeprecatedInt32",
"getDeprecatedInt32", "getDeprecatedInt32",
"clearDeprecatedInt32"}; "clearDeprecatedInt32"};
private String[] deprecatedBuilderSetterNames = { private String[] deprecatedBuilderSetterNames = {
"setDeprecatedInt32"}; "setDeprecatedInt32"};
public void testDeprecatedField() throws Exception { public void testDeprecatedField() throws Exception {
Class<?> deprecatedFields = TestDeprecatedFields.class; Class<?> deprecatedFields = TestDeprecatedFields.class;
Class<?> deprecatedFieldsBuilder = TestDeprecatedFields.Builder.class; Class<?> deprecatedFieldsBuilder = TestDeprecatedFields.Builder.class;
@ -74,7 +74,7 @@ public class DeprecatedFieldTest extends TestCase {
isDeprecated(method)); isDeprecated(method));
} }
} }
private boolean isDeprecated(AnnotatedElement annotated) { private boolean isDeprecated(AnnotatedElement annotated) {
return annotated.isAnnotationPresent(Deprecated.class); return annotated.isAnnotationPresent(Deprecated.class);
} }

@ -490,7 +490,7 @@ public class DescriptorsTest extends TestCase {
.build(); .build();
// translate and crosslink // translate and crosslink
FileDescriptor file = FileDescriptor file =
Descriptors.FileDescriptor.buildFrom(fileDescriptorProto, Descriptors.FileDescriptor.buildFrom(fileDescriptorProto,
new FileDescriptor[0]); new FileDescriptor[0]);
// verify resulting descriptors // verify resulting descriptors
assertNotNull(file); assertNotNull(file);
@ -511,7 +511,7 @@ public class DescriptorsTest extends TestCase {
} }
assertTrue(barFound); assertTrue(barFound);
} }
public void testInvalidPublicDependency() throws Exception { public void testInvalidPublicDependency() throws Exception {
FileDescriptorProto fooProto = FileDescriptorProto.newBuilder() FileDescriptorProto fooProto = FileDescriptorProto.newBuilder()
.setName("foo.proto") .build(); .setName("foo.proto") .build();
@ -595,7 +595,7 @@ public class DescriptorsTest extends TestCase {
Descriptors.FileDescriptor.buildFrom( Descriptors.FileDescriptor.buildFrom(
fooProto, new FileDescriptor[] {forwardFile}); fooProto, new FileDescriptor[] {forwardFile});
} }
/** /**
* Tests the translate/crosslink for an example with a more complex namespace * Tests the translate/crosslink for an example with a more complex namespace
* referencing. * referencing.
@ -644,6 +644,6 @@ public class DescriptorsTest extends TestCase {
assertTrue(field.getEnumType().getFile().getName().equals("bar.proto")); assertTrue(field.getEnumType().getFile().getName().equals("bar.proto"));
assertTrue(field.getEnumType().getFile().getPackage().equals( assertTrue(field.getEnumType().getFile().getPackage().equals(
"a.b.c.d.bar.shared")); "a.b.c.d.bar.shared"));
} }
} }
} }

@ -901,7 +901,7 @@ public class GeneratedMessageTest extends TestCase {
FieldDescriptor fieldDescriptor = FieldDescriptor fieldDescriptor =
descriptor.findFieldByName("optional_nested_message"); descriptor.findFieldByName("optional_nested_message");
// Before setting field, builder is initialized by default value. // Before setting field, builder is initialized by default value.
TestAllTypes.Builder builder = TestAllTypes.newBuilder(); TestAllTypes.Builder builder = TestAllTypes.newBuilder();
NestedMessage.Builder fieldBuilder = NestedMessage.Builder fieldBuilder =
(NestedMessage.Builder) builder.getFieldBuilder(fieldDescriptor); (NestedMessage.Builder) builder.getFieldBuilder(fieldDescriptor);

@ -233,7 +233,7 @@ public class LiteralByteStringTest extends TestCase {
assertTrue(classUnderTest + ".writeTo() must give back the same bytes", assertTrue(classUnderTest + ".writeTo() must give back the same bytes",
Arrays.equals(referenceBytes, roundTripBytes)); Arrays.equals(referenceBytes, roundTripBytes));
} }
public void testWriteTo_mutating() throws IOException { public void testWriteTo_mutating() throws IOException {
OutputStream os = new OutputStream() { OutputStream os = new OutputStream() {
@Override @Override
@ -274,7 +274,7 @@ public class LiteralByteStringTest extends TestCase {
assertEquals("Output.reset() resets the output", 0, output.size()); assertEquals("Output.reset() resets the output", 0, output.size());
assertEquals("Output.reset() resets the output", assertEquals("Output.reset() resets the output",
ByteString.EMPTY, output.toByteString()); ByteString.EMPTY, output.toByteString());
} }
public void testHashCode() { public void testHashCode() {

@ -313,7 +313,7 @@ public class MessageTest extends TestCase {
assertEquals("Message missing required fields: a, b, c", e.getMessage()); assertEquals("Message missing required fields: a, b, c", e.getMessage());
} }
} }
/** Test reading unset repeated message from DynamicMessage. */ /** Test reading unset repeated message from DynamicMessage. */
public void testDynamicRepeatedMessageNull() throws Exception { public void testDynamicRepeatedMessageNull() throws Exception {
Descriptors.Descriptor descriptor = TestRequired.getDescriptor(); Descriptors.Descriptor descriptor = TestRequired.getDescriptor();
@ -327,7 +327,7 @@ public class MessageTest extends TestCase {
assertEquals(result.getRepeatedFieldCount(result.getDescriptorForType() assertEquals(result.getRepeatedFieldCount(result.getDescriptorForType()
.findFieldByName("repeated_foreign_message")), 0); .findFieldByName("repeated_foreign_message")), 0);
} }
/** Test reading repeated message from DynamicMessage. */ /** Test reading repeated message from DynamicMessage. */
public void testDynamicRepeatedMessageNotNull() throws Exception { public void testDynamicRepeatedMessageNotNull() throws Exception {

@ -36,7 +36,7 @@ import java.util.Iterator;
/** /**
* This class tests {@link RopeByteString#substring(int, int)} by inheriting the tests from * This class tests {@link RopeByteString#substring(int, int)} by inheriting the tests from
* {@link LiteralByteStringTest}. Only a couple of methods are overridden. * {@link LiteralByteStringTest}. Only a couple of methods are overridden.
* *
* @author carlanton@google.com (Carl Haverl) * @author carlanton@google.com (Carl Haverl)
*/ */

@ -38,7 +38,7 @@ import java.util.Iterator;
/** /**
* This class tests {@link RopeByteString} by inheriting the tests from * This class tests {@link RopeByteString} by inheriting the tests from
* {@link LiteralByteStringTest}. Only a couple of methods are overridden. * {@link LiteralByteStringTest}. Only a couple of methods are overridden.
* *
* <p>A full test of the result of {@link RopeByteString#substring(int, int)} is found in the * <p>A full test of the result of {@link RopeByteString#substring(int, int)} is found in the
* separate class {@link RopeByteStringSubstringTest}. * separate class {@link RopeByteStringSubstringTest}.
* *

@ -3014,7 +3014,7 @@ public abstract class GeneratedMessage extends AbstractMessage
return (Extension<MessageType, T>) extension; return (Extension<MessageType, T>) extension;
} }
protected static int computeStringSize(final int fieldNumber, final Object value) { protected static int computeStringSize(final int fieldNumber, final Object value) {
if (value instanceof String) { if (value instanceof String) {
return CodedOutputStream.computeStringSize(fieldNumber, (String) value); return CodedOutputStream.computeStringSize(fieldNumber, (String) value);
@ -3022,7 +3022,7 @@ public abstract class GeneratedMessage extends AbstractMessage
return CodedOutputStream.computeBytesSize(fieldNumber, (ByteString) value); return CodedOutputStream.computeBytesSize(fieldNumber, (ByteString) value);
} }
} }
protected static int computeStringSizeNoTag(final Object value) { protected static int computeStringSizeNoTag(final Object value) {
if (value instanceof String) { if (value instanceof String) {
return CodedOutputStream.computeStringSizeNoTag((String) value); return CodedOutputStream.computeStringSizeNoTag((String) value);
@ -3030,7 +3030,7 @@ public abstract class GeneratedMessage extends AbstractMessage
return CodedOutputStream.computeBytesSizeNoTag((ByteString) value); return CodedOutputStream.computeBytesSizeNoTag((ByteString) value);
} }
} }
protected static void writeString( protected static void writeString(
CodedOutputStream output, final int fieldNumber, final Object value) throws IOException { CodedOutputStream output, final int fieldNumber, final Object value) throws IOException {
if (value instanceof String) { if (value instanceof String) {
@ -3039,7 +3039,7 @@ public abstract class GeneratedMessage extends AbstractMessage
output.writeBytes(fieldNumber, (ByteString) value); output.writeBytes(fieldNumber, (ByteString) value);
} }
} }
protected static void writeStringNoTag( protected static void writeStringNoTag(
CodedOutputStream output, final Object value) throws IOException { CodedOutputStream output, final Object value) throws IOException {
if (value instanceof String) { if (value instanceof String) {

@ -364,7 +364,7 @@ public abstract class GeneratedMessageV3 extends AbstractMessage
throw e.unwrapIOException(); throw e.unwrapIOException();
} }
} }
protected static boolean canUseUnsafe() { protected static boolean canUseUnsafe() {
return UnsafeUtil.hasUnsafeArrayOperations() && UnsafeUtil.hasUnsafeByteBufferOperations(); return UnsafeUtil.hasUnsafeArrayOperations() && UnsafeUtil.hasUnsafeByteBufferOperations();
} }

@ -95,7 +95,7 @@ public class FieldMaskTreeTest extends TestCase {
tree.intersectFieldPath("bar", result); tree.intersectFieldPath("bar", result);
assertEquals("bar.baz,bar.quz,foo", result.toString()); assertEquals("bar.baz,bar.quz,foo", result.toString());
} }
public void testMerge() throws Exception { public void testMerge() throws Exception {
testMergeImpl(true); testMergeImpl(true);
testMergeImpl(false); testMergeImpl(false);

@ -19,8 +19,8 @@ resolve imports at compile time.
To use Protocol Buffers with JavaScript, you need two main components: To use Protocol Buffers with JavaScript, you need two main components:
1. The protobuf runtime library. You can install this with 1. The protobuf runtime library. You can install this with
`npm install google-protobuf`, or use the files in this directory. `npm install google-protobuf`, or use the files in this directory.
If npm is not being used, as of 3.3.0, the files needed are located in binary subdirectory; If npm is not being used, as of 3.3.0, the files needed are located in binary subdirectory;
arith.js, constants.js, decoder.js, encoder.js, map.js, message.js, reader.js, utils.js, writer.js arith.js, constants.js, decoder.js, encoder.js, map.js, message.js, reader.js, utils.js, writer.js
2. The Protocol Compiler `protoc`. This translates `.proto` files 2. The Protocol Compiler `protoc`. This translates `.proto` files
into `.js` files. The compiler is not currently available via into `.js` files. The compiler is not currently available via

@ -2,7 +2,7 @@
# Location of the build script in repository # Location of the build script in repository
build_file: "protobuf/kokoro/linux/benchmark/build.sh" build_file: "protobuf/kokoro/linux/benchmark/build.sh"
timeout_mins: 240 timeout_mins: 240
action { action {
define_artifacts { define_artifacts {

@ -65,7 +65,7 @@ echo "benchmarking cpp..."
env LD_PRELOAD="$oldpwd/gperftools/.libs/libtcmalloc.so" ./cpp-benchmark --benchmark_min_time=5.0 --benchmark_out_format=json --benchmark_out="tmp/cpp_result.json" $datasets env LD_PRELOAD="$oldpwd/gperftools/.libs/libtcmalloc.so" ./cpp-benchmark --benchmark_min_time=5.0 --benchmark_out_format=json --benchmark_out="tmp/cpp_result.json" $datasets
cd $oldpwd cd $oldpwd
# build go protobuf # build go protobuf
export PATH="`pwd`/src:$PATH" export PATH="`pwd`/src:$PATH"
export GOPATH="$HOME/gocode" export GOPATH="$HOME/gocode"
mkdir -p "$GOPATH/src/github.com/google" mkdir -p "$GOPATH/src/github.com/google"

@ -34,7 +34,7 @@ patch "$CROSS_RUBY" << EOF
+ '--without-gmp', + '--without-gmp',
'--with-ext=' '--with-ext='
] ]
@@ -151,6 +153,7 @@ @@ -151,6 +153,7 @@
# make # make
file "#{USER_HOME}/builds/#{MINGW_HOST}/#{RUBY_CC_VERSION}/ruby.exe" => ["#{USER_HOME}/builds/#{MINGW_HOST}/#{RUBY_CC_VERSION}/Makefile"] do |t| file "#{USER_HOME}/builds/#{MINGW_HOST}/#{RUBY_CC_VERSION}/ruby.exe" => ["#{USER_HOME}/builds/#{MINGW_HOST}/#{RUBY_CC_VERSION}/Makefile"] do |t|

@ -341,7 +341,7 @@ if [[ "${DO_XCODE_TVOS_TESTS}" == "yes" ]] ; then
xcodebuild xcodebuild
-project objectivec/ProtocolBuffers_tvOS.xcodeproj -project objectivec/ProtocolBuffers_tvOS.xcodeproj
-scheme ProtocolBuffers -scheme ProtocolBuffers
# Test on the oldest and current. # Test on the oldest and current.
-destination "platform=tvOS Simulator,name=Apple TV 1080p,OS=9.0" -destination "platform=tvOS Simulator,name=Apple TV 1080p,OS=9.0"
-destination "platform=tvOS Simulator,name=Apple TV,OS=latest" -destination "platform=tvOS Simulator,name=Apple TV,OS=latest"
) )

@ -74,7 +74,7 @@ typedef union {
/** /**
* Enum listing the possible data types that a field can contain. * Enum listing the possible data types that a field can contain.
* *
* @note Do not change the order of this enum (or add things to it) without * @note Do not change the order of this enum (or add things to it) without
* thinking about it very carefully. There are several things that depend * thinking about it very carefully. There are several things that depend
* on the order. * on the order.

@ -265,7 +265,7 @@ static void* appendstr_handler(void *closure,
(stringfields_parseframe_t*)malloc(sizeof(stringfields_parseframe_t)); (stringfields_parseframe_t*)malloc(sizeof(stringfields_parseframe_t));
frame->closure = closure; frame->closure = closure;
stringsink_init(&frame->sink); stringsink_init(&frame->sink);
return frame; return frame;
} }
@ -376,7 +376,7 @@ static void* str_handler(void *closure,
(stringfields_parseframe_t*)malloc(sizeof(stringfields_parseframe_t)); (stringfields_parseframe_t*)malloc(sizeof(stringfields_parseframe_t));
frame->closure = closure; frame->closure = closure;
stringsink_init(&frame->sink); stringsink_init(&frame->sink);
return frame; return frame;
} }
@ -791,7 +791,7 @@ static void *oneofstr_handler(void *closure,
(stringfields_parseframe_t*)malloc(sizeof(stringfields_parseframe_t)); (stringfields_parseframe_t*)malloc(sizeof(stringfields_parseframe_t));
frame->closure = closure; frame->closure = closure;
stringsink_init(&frame->sink); stringsink_init(&frame->sink);
return frame; return frame;
} }
@ -955,7 +955,7 @@ static void add_handlers_for_mapentry(const upb_msgdef* msgdef, upb_handlers* h,
offsetof(map_parse_frame_data_t, offsetof(map_parse_frame_data_t,
key_storage)); key_storage));
add_handlers_for_singular_field(h, value_field, add_handlers_for_singular_field(h, value_field,
offsetof(map_parse_frame_data_t, offsetof(map_parse_frame_data_t,
value_storage)); value_storage));
} }

@ -88,7 +88,7 @@ void util_init(TSRMLS_D) {
// Type checking/conversion. // Type checking/conversion.
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// This is modified from is_numeric_string in zend_operators.h. The behavior of // This is modified from is_numeric_string in zend_operators.h. The behavior of
// this function is the same as is_numeric_string, except that this takes // this function is the same as is_numeric_string, except that this takes
// int64_t as input instead of long. // int64_t as input instead of long.
static zend_uchar convert_numeric_string( static zend_uchar convert_numeric_string(
@ -102,7 +102,7 @@ static zend_uchar convert_numeric_string(
return IS_NULL; return IS_NULL;
} }
while (*str == ' ' || *str == '\t' || *str == '\n' || while (*str == ' ' || *str == '\t' || *str == '\n' ||
*str == '\r' || *str == '\v' || *str == '\f') { *str == '\r' || *str == '\v' || *str == '\f') {
str++; str++;
length--; length--;

@ -13327,7 +13327,7 @@ static void capture_suspend(upb_json_parser *p, const char **ptr) {
if (multipart_text(p, p->capture, *ptr - p->capture, false)) { if (multipart_text(p, p->capture, *ptr - p->capture, false)) {
/* We use this as a signal that we were in the middle of capturing, and /* We use this as a signal that we were in the middle of capturing, and
* that capturing should resume at the beginning of the next buffer. * that capturing should resume at the beginning of the next buffer.
* *
* We can't use *ptr here, because we have no guarantee that this pointer * We can't use *ptr here, because we have no guarantee that this pointer
* will be valid when we resume (if the underlying memory is freed, then * will be valid when we resume (if the underlying memory is freed, then
* using the pointer at all, even to compare to NULL, is likely undefined * using the pointer at all, even to compare to NULL, is likely undefined
@ -13877,7 +13877,7 @@ static bool end_any_stringval(upb_json_parser *p) {
} }
json_parser_any_frame_set_payload_type(p, p->top->any_frame, payload_type); json_parser_any_frame_set_payload_type(p, p->top->any_frame, payload_type);
return true; return true;
} else { } else {
upb_status_seterrf( upb_status_seterrf(
@ -15134,242 +15134,242 @@ static bool is_fieldmask_object(upb_json_parser *p) {
#line 2556 "upb/json/parser.c" #line 2556 "upb/json/parser.c"
static const char _json_actions[] = { static const char _json_actions[] = {
0, 1, 0, 1, 1, 1, 3, 1, 0, 1, 0, 1, 1, 1, 3, 1,
4, 1, 6, 1, 7, 1, 8, 1, 4, 1, 6, 1, 7, 1, 8, 1,
9, 1, 10, 1, 11, 1, 12, 1, 9, 1, 10, 1, 11, 1, 12, 1,
13, 1, 24, 1, 26, 1, 28, 1, 13, 1, 24, 1, 26, 1, 28, 1,
29, 1, 31, 1, 32, 1, 33, 1, 29, 1, 31, 1, 32, 1, 33, 1,
35, 1, 37, 1, 38, 1, 39, 1, 35, 1, 37, 1, 38, 1, 39, 1,
40, 1, 42, 1, 43, 2, 4, 9, 40, 1, 42, 1, 43, 2, 4, 9,
2, 5, 6, 2, 7, 3, 2, 7, 2, 5, 6, 2, 7, 3, 2, 7,
9, 2, 14, 15, 2, 16, 17, 2, 9, 2, 14, 15, 2, 16, 17, 2,
18, 19, 2, 21, 23, 2, 22, 20, 18, 19, 2, 21, 23, 2, 22, 20,
2, 27, 25, 2, 29, 31, 2, 34, 2, 27, 25, 2, 29, 31, 2, 34,
2, 2, 35, 43, 2, 36, 25, 2, 2, 2, 35, 43, 2, 36, 25, 2,
38, 43, 2, 39, 43, 2, 40, 43, 38, 43, 2, 39, 43, 2, 40, 43,
2, 41, 30, 2, 42, 43, 3, 21, 2, 41, 30, 2, 42, 43, 3, 21,
23, 24, 4, 14, 15, 16, 17 23, 24, 4, 14, 15, 16, 17
}; };
static const short _json_key_offsets[] = { static const short _json_key_offsets[] = {
0, 0, 12, 13, 18, 23, 28, 29, 0, 0, 12, 13, 18, 23, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 30, 31, 32, 33, 34, 35, 36, 37,
38, 43, 44, 48, 53, 58, 63, 67, 38, 43, 44, 48, 53, 58, 63, 67,
71, 74, 77, 79, 83, 87, 89, 91, 71, 74, 77, 79, 83, 87, 89, 91,
96, 98, 100, 109, 115, 121, 127, 133, 96, 98, 100, 109, 115, 121, 127, 133,
135, 139, 142, 144, 146, 149, 150, 154, 135, 139, 142, 144, 146, 149, 150, 154,
156, 158, 160, 162, 163, 165, 167, 168, 156, 158, 160, 162, 163, 165, 167, 168,
170, 172, 173, 175, 177, 178, 180, 182, 170, 172, 173, 175, 177, 178, 180, 182,
183, 185, 187, 191, 193, 195, 196, 197, 183, 185, 187, 191, 193, 195, 196, 197,
198, 199, 201, 206, 208, 210, 212, 221, 198, 199, 201, 206, 208, 210, 212, 221,
222, 222, 222, 227, 232, 237, 238, 239, 222, 222, 222, 227, 232, 237, 238, 239,
240, 241, 241, 242, 243, 244, 244, 245, 240, 241, 241, 242, 243, 244, 244, 245,
246, 247, 247, 252, 253, 257, 262, 267, 246, 247, 247, 252, 253, 257, 262, 267,
272, 276, 276, 279, 282, 285, 288, 291, 272, 276, 276, 279, 282, 285, 288, 291,
294, 294, 294, 294, 294, 294 294, 294, 294, 294, 294, 294
}; };
static const char _json_trans_keys[] = { static const char _json_trans_keys[] = {
32, 34, 45, 91, 102, 110, 116, 123, 32, 34, 45, 91, 102, 110, 116, 123,
9, 13, 48, 57, 34, 32, 93, 125, 9, 13, 48, 57, 34, 32, 93, 125,
9, 13, 32, 44, 93, 9, 13, 32, 9, 13, 32, 44, 93, 9, 13, 32,
93, 125, 9, 13, 97, 108, 115, 101, 93, 125, 9, 13, 97, 108, 115, 101,
117, 108, 108, 114, 117, 101, 32, 34, 117, 108, 108, 114, 117, 101, 32, 34,
125, 9, 13, 34, 32, 58, 9, 13, 125, 9, 13, 34, 32, 58, 9, 13,
32, 93, 125, 9, 13, 32, 44, 125, 32, 93, 125, 9, 13, 32, 44, 125,
9, 13, 32, 44, 125, 9, 13, 32, 9, 13, 32, 44, 125, 9, 13, 32,
34, 9, 13, 45, 48, 49, 57, 48, 34, 9, 13, 45, 48, 49, 57, 48,
49, 57, 46, 69, 101, 48, 57, 69, 49, 57, 46, 69, 101, 48, 57, 69,
101, 48, 57, 43, 45, 48, 57, 48, 101, 48, 57, 43, 45, 48, 57, 48,
57, 48, 57, 46, 69, 101, 48, 57, 57, 48, 57, 46, 69, 101, 48, 57,
34, 92, 34, 92, 34, 47, 92, 98, 34, 92, 34, 92, 34, 47, 92, 98,
102, 110, 114, 116, 117, 48, 57, 65, 102, 110, 114, 116, 117, 48, 57, 65,
70, 97, 102, 48, 57, 65, 70, 97, 70, 97, 102, 48, 57, 65, 70, 97,
102, 48, 57, 65, 70, 97, 102, 48, 102, 48, 57, 65, 70, 97, 102, 48,
57, 65, 70, 97, 102, 34, 92, 45, 57, 65, 70, 97, 102, 34, 92, 45,
48, 49, 57, 48, 49, 57, 46, 115, 48, 49, 57, 48, 49, 57, 46, 115,
48, 57, 115, 48, 57, 34, 46, 115, 48, 57, 115, 48, 57, 34, 46, 115,
48, 57, 48, 57, 48, 57, 48, 57, 48, 57, 48, 57, 48, 57, 48, 57,
48, 57, 45, 48, 57, 48, 57, 45, 48, 57, 45, 48, 57, 48, 57, 45,
48, 57, 48, 57, 84, 48, 57, 48, 48, 57, 48, 57, 84, 48, 57, 48,
57, 58, 48, 57, 48, 57, 58, 48, 57, 58, 48, 57, 48, 57, 58, 48,
57, 48, 57, 43, 45, 46, 90, 48, 57, 48, 57, 43, 45, 46, 90, 48,
57, 48, 57, 58, 48, 48, 34, 48, 57, 48, 57, 58, 48, 48, 34, 48,
57, 43, 45, 90, 48, 57, 34, 44, 57, 43, 45, 90, 48, 57, 34, 44,
34, 44, 34, 44, 34, 45, 91, 102, 34, 44, 34, 44, 34, 45, 91, 102,
110, 116, 123, 48, 57, 34, 32, 93, 110, 116, 123, 48, 57, 34, 32, 93,
125, 9, 13, 32, 44, 93, 9, 13, 125, 9, 13, 32, 44, 93, 9, 13,
32, 93, 125, 9, 13, 97, 108, 115, 32, 93, 125, 9, 13, 97, 108, 115,
101, 117, 108, 108, 114, 117, 101, 32, 101, 117, 108, 108, 114, 117, 101, 32,
34, 125, 9, 13, 34, 32, 58, 9, 34, 125, 9, 13, 34, 32, 58, 9,
13, 32, 93, 125, 9, 13, 32, 44, 13, 32, 93, 125, 9, 13, 32, 44,
125, 9, 13, 32, 44, 125, 9, 13, 125, 9, 13, 32, 44, 125, 9, 13,
32, 34, 9, 13, 32, 9, 13, 32, 32, 34, 9, 13, 32, 9, 13, 32,
9, 13, 32, 9, 13, 32, 9, 13, 9, 13, 32, 9, 13, 32, 9, 13,
32, 9, 13, 32, 9, 13, 0 32, 9, 13, 32, 9, 13, 0
}; };
static const char _json_single_lengths[] = { static const char _json_single_lengths[] = {
0, 8, 1, 3, 3, 3, 1, 1, 0, 8, 1, 3, 3, 3, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
3, 1, 2, 3, 3, 3, 2, 2, 3, 1, 2, 3, 3, 3, 2, 2,
1, 3, 0, 2, 2, 0, 0, 3, 1, 3, 0, 2, 2, 0, 0, 3,
2, 2, 9, 0, 0, 0, 0, 2, 2, 2, 9, 0, 0, 0, 0, 2,
2, 1, 2, 0, 1, 1, 2, 0, 2, 1, 2, 0, 1, 1, 2, 0,
0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0,
0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1,
0, 0, 4, 0, 0, 1, 1, 1, 0, 0, 4, 0, 0, 1, 1, 1,
1, 0, 3, 2, 2, 2, 7, 1, 1, 0, 3, 2, 2, 2, 7, 1,
0, 0, 3, 3, 3, 1, 1, 1, 0, 0, 3, 3, 3, 1, 1, 1,
1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1,
1, 0, 3, 1, 2, 3, 3, 3, 1, 0, 3, 1, 2, 3, 3, 3,
2, 0, 1, 1, 1, 1, 1, 1, 2, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0 0, 0, 0, 0, 0, 0
}; };
static const char _json_range_lengths[] = { static const char _json_range_lengths[] = {
0, 2, 0, 1, 1, 1, 0, 0, 0, 2, 0, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 3, 3, 3, 3, 0, 0, 0, 0, 3, 3, 3, 3, 0,
1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1,
1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1,
1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0,
1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0,
0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0 0, 0, 0, 0, 0, 0
}; };
static const short _json_index_offsets[] = { static const short _json_index_offsets[] = {
0, 0, 11, 13, 18, 23, 28, 30, 0, 0, 11, 13, 18, 23, 28, 30,
32, 34, 36, 38, 40, 42, 44, 46, 32, 34, 36, 38, 40, 42, 44, 46,
48, 53, 55, 59, 64, 69, 74, 78, 48, 53, 55, 59, 64, 69, 74, 78,
82, 85, 89, 91, 95, 99, 101, 103, 82, 85, 89, 91, 95, 99, 101, 103,
108, 111, 114, 124, 128, 132, 136, 140, 108, 111, 114, 124, 128, 132, 136, 140,
143, 147, 150, 153, 155, 158, 160, 164, 143, 147, 150, 153, 155, 158, 160, 164,
166, 168, 170, 172, 174, 176, 178, 180, 166, 168, 170, 172, 174, 176, 178, 180,
182, 184, 186, 188, 190, 192, 194, 196, 182, 184, 186, 188, 190, 192, 194, 196,
198, 200, 202, 207, 209, 211, 213, 215, 198, 200, 202, 207, 209, 211, 213, 215,
217, 219, 221, 226, 229, 232, 235, 244, 217, 219, 221, 226, 229, 232, 235, 244,
246, 247, 248, 253, 258, 263, 265, 267, 246, 247, 248, 253, 258, 263, 265, 267,
269, 271, 272, 274, 276, 278, 279, 281, 269, 271, 272, 274, 276, 278, 279, 281,
283, 285, 286, 291, 293, 297, 302, 307, 283, 285, 286, 291, 293, 297, 302, 307,
312, 316, 317, 320, 323, 326, 329, 332, 312, 316, 317, 320, 323, 326, 329, 332,
335, 336, 337, 338, 339, 340 335, 336, 337, 338, 339, 340
}; };
static const unsigned char _json_indicies[] = { static const unsigned char _json_indicies[] = {
0, 2, 3, 4, 5, 6, 7, 8, 0, 2, 3, 4, 5, 6, 7, 8,
0, 3, 1, 9, 1, 11, 12, 1, 0, 3, 1, 9, 1, 11, 12, 1,
11, 10, 13, 14, 12, 13, 1, 14, 11, 10, 13, 14, 12, 13, 1, 14,
1, 1, 14, 10, 15, 1, 16, 1, 1, 1, 14, 10, 15, 1, 16, 1,
17, 1, 18, 1, 19, 1, 20, 1, 17, 1, 18, 1, 19, 1, 20, 1,
21, 1, 22, 1, 23, 1, 24, 1, 21, 1, 22, 1, 23, 1, 24, 1,
25, 26, 27, 25, 1, 28, 1, 29, 25, 26, 27, 25, 1, 28, 1, 29,
30, 29, 1, 30, 1, 1, 30, 31, 30, 29, 1, 30, 1, 1, 30, 31,
32, 33, 34, 32, 1, 35, 36, 27, 32, 33, 34, 32, 1, 35, 36, 27,
35, 1, 36, 26, 36, 1, 37, 38, 35, 1, 36, 26, 36, 1, 37, 38,
39, 1, 38, 39, 1, 41, 42, 42, 39, 1, 38, 39, 1, 41, 42, 42,
40, 43, 1, 42, 42, 43, 40, 44, 40, 43, 1, 42, 42, 43, 40, 44,
44, 45, 1, 45, 1, 45, 40, 41, 44, 45, 1, 45, 1, 45, 40, 41,
42, 42, 39, 40, 47, 48, 46, 50, 42, 42, 39, 40, 47, 48, 46, 50,
51, 49, 52, 52, 52, 52, 52, 52, 51, 49, 52, 52, 52, 52, 52, 52,
52, 52, 53, 1, 54, 54, 54, 1, 52, 52, 53, 1, 54, 54, 54, 1,
55, 55, 55, 1, 56, 56, 56, 1, 55, 55, 55, 1, 56, 56, 56, 1,
57, 57, 57, 1, 59, 60, 58, 61, 57, 57, 57, 1, 59, 60, 58, 61,
62, 63, 1, 64, 65, 1, 66, 67, 62, 63, 1, 64, 65, 1, 66, 67,
1, 68, 1, 67, 68, 1, 69, 1, 1, 68, 1, 67, 68, 1, 69, 1,
66, 67, 65, 1, 70, 1, 71, 1, 66, 67, 65, 1, 70, 1, 71, 1,
72, 1, 73, 1, 74, 1, 75, 1, 72, 1, 73, 1, 74, 1, 75, 1,
76, 1, 77, 1, 78, 1, 79, 1, 76, 1, 77, 1, 78, 1, 79, 1,
80, 1, 81, 1, 82, 1, 83, 1, 80, 1, 81, 1, 82, 1, 83, 1,
84, 1, 85, 1, 86, 1, 87, 1, 84, 1, 85, 1, 86, 1, 87, 1,
88, 1, 89, 89, 90, 91, 1, 92, 88, 1, 89, 89, 90, 91, 1, 92,
1, 93, 1, 94, 1, 95, 1, 96, 1, 93, 1, 94, 1, 95, 1, 96,
1, 97, 1, 98, 1, 99, 99, 100, 1, 97, 1, 98, 1, 99, 99, 100,
98, 1, 102, 1, 101, 104, 105, 103, 98, 1, 102, 1, 101, 104, 105, 103,
1, 1, 101, 106, 107, 108, 109, 110, 1, 1, 101, 106, 107, 108, 109, 110,
111, 112, 107, 1, 113, 1, 114, 115, 111, 112, 107, 1, 113, 1, 114, 115,
117, 118, 1, 117, 116, 119, 120, 118, 117, 118, 1, 117, 116, 119, 120, 118,
119, 1, 120, 1, 1, 120, 116, 121, 119, 1, 120, 1, 1, 120, 116, 121,
1, 122, 1, 123, 1, 124, 1, 125, 1, 122, 1, 123, 1, 124, 1, 125,
126, 1, 127, 1, 128, 1, 129, 130, 126, 1, 127, 1, 128, 1, 129, 130,
1, 131, 1, 132, 1, 133, 134, 135, 1, 131, 1, 132, 1, 133, 134, 135,
136, 134, 1, 137, 1, 138, 139, 138, 136, 134, 1, 137, 1, 138, 139, 138,
1, 139, 1, 1, 139, 140, 141, 142, 1, 139, 1, 1, 139, 140, 141, 142,
143, 141, 1, 144, 145, 136, 144, 1, 143, 141, 1, 144, 145, 136, 144, 1,
145, 135, 145, 1, 146, 147, 147, 1, 145, 135, 145, 1, 146, 147, 147, 1,
148, 148, 1, 149, 149, 1, 150, 150, 148, 148, 1, 149, 149, 1, 150, 150,
1, 151, 151, 1, 152, 152, 1, 1, 1, 151, 151, 1, 152, 152, 1, 1,
1, 1, 1, 1, 1, 0 1, 1, 1, 1, 1, 0
}; };
static const char _json_trans_targs[] = { static const char _json_trans_targs[] = {
1, 0, 2, 107, 3, 6, 10, 13, 1, 0, 2, 107, 3, 6, 10, 13,
16, 106, 4, 3, 106, 4, 5, 7, 16, 106, 4, 3, 106, 4, 5, 7,
8, 9, 108, 11, 12, 109, 14, 15, 8, 9, 108, 11, 12, 109, 14, 15,
110, 16, 17, 111, 18, 18, 19, 20, 110, 16, 17, 111, 18, 18, 19, 20,
21, 22, 111, 21, 22, 24, 25, 31, 21, 22, 111, 21, 22, 24, 25, 31,
112, 26, 28, 27, 29, 30, 33, 113, 112, 26, 28, 27, 29, 30, 33, 113,
34, 33, 113, 34, 32, 35, 36, 37, 34, 33, 113, 34, 32, 35, 36, 37,
38, 39, 33, 113, 34, 41, 42, 46, 38, 39, 33, 113, 34, 41, 42, 46,
42, 46, 43, 45, 44, 114, 48, 49, 42, 46, 43, 45, 44, 114, 48, 49,
50, 51, 52, 53, 54, 55, 56, 57, 50, 51, 52, 53, 54, 55, 56, 57,
58, 59, 60, 61, 62, 63, 64, 65, 58, 59, 60, 61, 62, 63, 64, 65,
66, 67, 73, 72, 68, 69, 70, 71, 66, 67, 73, 72, 68, 69, 70, 71,
72, 115, 74, 67, 72, 76, 116, 76, 72, 115, 74, 67, 72, 76, 116, 76,
116, 77, 79, 81, 82, 85, 90, 94, 116, 77, 79, 81, 82, 85, 90, 94,
98, 80, 117, 117, 83, 82, 80, 83, 98, 80, 117, 117, 83, 82, 80, 83,
84, 86, 87, 88, 89, 117, 91, 92, 84, 86, 87, 88, 89, 117, 91, 92,
93, 117, 95, 96, 97, 117, 98, 99, 93, 117, 95, 96, 97, 117, 98, 99,
105, 100, 100, 101, 102, 103, 104, 105, 105, 100, 100, 101, 102, 103, 104, 105,
103, 104, 117, 106, 106, 106, 106, 106, 103, 104, 117, 106, 106, 106, 106, 106,
106 106
}; };
static const char _json_trans_actions[] = { static const char _json_trans_actions[] = {
0, 0, 92, 86, 35, 0, 0, 0, 0, 0, 92, 86, 35, 0, 0, 0,
104, 41, 27, 0, 37, 0, 0, 0, 104, 41, 27, 0, 37, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 80, 33, 29, 0, 0, 27, 0, 0, 80, 33, 29, 0, 0, 27,
31, 31, 83, 0, 0, 0, 0, 0, 31, 31, 83, 0, 0, 0, 0, 0,
3, 0, 0, 0, 0, 0, 5, 15, 3, 0, 0, 0, 0, 0, 5, 15,
0, 0, 53, 7, 13, 0, 56, 9, 0, 0, 53, 7, 13, 0, 56, 9,
9, 9, 59, 62, 11, 17, 17, 17, 9, 9, 59, 62, 11, 17, 17, 17,
0, 0, 0, 19, 0, 21, 23, 0, 0, 0, 0, 19, 0, 21, 23, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 114, 65, 114, 0, 0, 0, 0, 0, 114, 65, 114, 0, 0, 0, 0,
0, 71, 0, 68, 68, 77, 25, 0, 0, 71, 0, 68, 68, 77, 25, 0,
110, 74, 92, 86, 35, 0, 0, 0, 110, 74, 92, 86, 35, 0, 0, 0,
104, 41, 51, 89, 27, 0, 37, 0, 104, 41, 51, 89, 27, 0, 37, 0,
0, 0, 0, 0, 0, 98, 0, 0, 0, 0, 0, 0, 0, 98, 0, 0,
0, 101, 0, 0, 0, 95, 0, 80, 0, 101, 0, 0, 0, 95, 0, 80,
33, 29, 0, 0, 27, 31, 31, 83, 33, 29, 0, 0, 27, 31, 31, 83,
0, 0, 107, 0, 39, 45, 47, 43, 0, 0, 107, 0, 39, 45, 47, 43,
49 49
}; };
static const char _json_eof_actions[] = { static const char _json_eof_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 39, 45, 47, 43, 49, 0, 0, 0, 39, 45, 47, 43, 49,
0, 0, 0, 0, 0, 0 0, 0, 0, 0, 0, 0
}; };
@ -15406,7 +15406,7 @@ size_t parse(void *closure, const void *hd, const char *buf, size_t size,
capture_resume(parser, buf); capture_resume(parser, buf);
#line 2831 "upb/json/parser.c" #line 2831 "upb/json/parser.c"
{ {
int _klen; int _klen;
@ -15785,7 +15785,7 @@ static void json_parser_reset(upb_json_parser *p) {
p->top->is_unknown_field = false; p->top->is_unknown_field = false;
/* Emit Ragel initialization of the parser. */ /* Emit Ragel initialization of the parser. */
#line 3210 "upb/json/parser.c" #line 3210 "upb/json/parser.c"
{ {
cs = json_start; cs = json_start;

@ -5042,7 +5042,7 @@ struct Func5 : public UnboundFunc {
/* BoundFunc2, BoundFunc3: Like Func2/Func3 except also contains a value that /* BoundFunc2, BoundFunc3: Like Func2/Func3 except also contains a value that
* shall be bound to the function's second parameter. * shall be bound to the function's second parameter.
* *
* Note that the second parameter is a const pointer, but our stored bound value * Note that the second parameter is a const pointer, but our stored bound value
* is non-const so we can free it when the handlers are destroyed. */ * is non-const so we can free it when the handlers are destroyed. */
template <class T> template <class T>

@ -446,7 +446,7 @@ class GPBWire
if (bccomp($value, 0) < 0 || if (bccomp($value, 0) < 0 ||
bccomp($value, "9223372036854775807") > 0) { bccomp($value, "9223372036854775807") > 0) {
return 10; return 10;
} }
if (bccomp($value, 1 << 7) < 0) { if (bccomp($value, 1 << 7) < 0) {
return 1; return 1;
} }
@ -475,7 +475,7 @@ class GPBWire
} else { } else {
if ($value < 0) { if ($value < 0) {
return 10; return 10;
} }
if ($value < (1 << 7)) { if ($value < (1 << 7)) {
return 1; return 1;
} }

@ -80,7 +80,7 @@ class FooUnitTest(unittest.TestCase):
self.assertEqual('Method Bar not implemented.', self.assertEqual('Method Bar not implemented.',
rpc_controller.failure_message) rpc_controller.failure_message)
self.assertEqual(None, self.callback_response) self.assertEqual(None, self.callback_response)
class MyServiceImpl(unittest_pb2.TestService): class MyServiceImpl(unittest_pb2.TestService):
def Foo(self, rpc_controller, request, done): def Foo(self, rpc_controller, request, done):
self.foo_called = True self.foo_called = True

@ -263,7 +263,7 @@ def _ModifyClass(class_object, testcases, naming_type):
'Cannot add parameters to %s,' 'Cannot add parameters to %s,'
' which already has parameterized methods.' % (class_object,)) ' which already has parameterized methods.' % (class_object,))
class_object._id_suffix = id_suffix = {} class_object._id_suffix = id_suffix = {}
# We change the size of __dict__ while we iterate over it, # We change the size of __dict__ while we iterate over it,
# which Python 3.x will complain about, so use copy(). # which Python 3.x will complain about, so use copy().
for name, obj in class_object.__dict__.copy().items(): for name, obj in class_object.__dict__.copy().items():
if (name.startswith(unittest.TestLoader.testMethodPrefix) if (name.startswith(unittest.TestLoader.testMethodPrefix)

@ -4,7 +4,7 @@ envlist =
[testenv] [testenv]
usedevelop=true usedevelop=true
passenv = passenv =
CC KOKORO_BUILD_ID KOKORO_BUILD_NUMBER CC KOKORO_BUILD_ID KOKORO_BUILD_NUMBER
setenv = setenv =
cpp: LD_LIBRARY_PATH={toxinidir}/../src/.libs cpp: LD_LIBRARY_PATH={toxinidir}/../src/.libs

@ -12,7 +12,7 @@ Installation from Gem
In Gemfile (Please check a version of Protocol Buffers you needed [RubyGems](https://rubygems.org/gems/google-protobuf)): In Gemfile (Please check a version of Protocol Buffers you needed [RubyGems](https://rubygems.org/gems/google-protobuf)):
gem 'google-protobuf' gem 'google-protobuf'
Or for using this pre-packaged gem, simply install it as you would any other gem: Or for using this pre-packaged gem, simply install it as you would any other gem:
$ gem install [--prerelease] google-protobuf $ gem install [--prerelease] google-protobuf

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save