Remove all autotools usage (#10132)
* Bazelfying conformance tests Adding infrastructure to "Bazelify" languages other than Java and C++ * Delete benchmarks for languages supported by other repositories * Bazelfying benchmark tests * Bazelfying python Use upb's system python rule instead of branching tensorflow * Bazelfying Ruby * Bazelfying C# * Bazelfying Objective-c * Bazelfying Kokoro mac builds * Bazelfying Kokoro linux builds * Deleting all deprecated files from autotools cleanup This boils down to Makefile.am and tests.sh and all of their remaining references * Cleanup after PR reorganizing - Enable 32 bit tests - Move conformance tests back - Use select statements to select alternate runtimes - Add internal prefixes to proto library macros * Updating READMEs to use bazel instead of autotools. * Bazelfying Kokoro release builds * First round of review fixes * Second round of review fixes * Third round of review fixes * Filtering out conformance tests from Bazel on Windows (b/241484899) * Add version metadata that was previously scraped from configure.ac * fixing typo from previous fix * Adding ruby version tests * Bumping pinned upb version, and adding tests to python CIpull/10389/head
parent
13b3647016
commit
ed5c57a574
245 changed files with 3982 additions and 10338 deletions
File diff suppressed because it is too large
Load Diff
@ -1,44 +0,0 @@ |
|||||||
#!/bin/sh |
|
||||||
|
|
||||||
# Run this script to generate the configure script and other files that will |
|
||||||
# be included in the distribution. These files are not checked in because they |
|
||||||
# are automatically generated. |
|
||||||
|
|
||||||
set -e |
|
||||||
|
|
||||||
if [ ! -z "$@" ]; then |
|
||||||
for argument in "$@"; do |
|
||||||
case $argument in |
|
||||||
# make curl silent |
|
||||||
"-s") |
|
||||||
curlopts="-s" |
|
||||||
;; |
|
||||||
esac |
|
||||||
done |
|
||||||
fi |
|
||||||
|
|
||||||
# Check that we're being run from the right directory. |
|
||||||
if test ! -f src/google/protobuf/stubs/common.h; then |
|
||||||
cat >&2 << __EOF__ |
|
||||||
Could not find source code. Make sure you are running this script from the |
|
||||||
root of the distribution tree. |
|
||||||
__EOF__ |
|
||||||
exit 1 |
|
||||||
fi |
|
||||||
|
|
||||||
set -ex |
|
||||||
|
|
||||||
# The absence of a m4 directory in googletest causes autoreconf to fail when |
|
||||||
# building under the CentOS docker image. It's a warning in regular build on |
|
||||||
# Ubuntu/gLinux as well. (This is only needed if git submodules have been |
|
||||||
# initialized, which is typically only needed for testing; see the installation |
|
||||||
# instructions for details.) |
|
||||||
if test -d third_party/googletest; then |
|
||||||
mkdir -p third_party/googletest/m4 |
|
||||||
fi |
|
||||||
|
|
||||||
# TODO(kenton): Remove the ",no-obsolete" part and fix the resulting warnings. |
|
||||||
autoreconf -f -i -Wall,no-obsolete |
|
||||||
|
|
||||||
rm -rf autom4te.cache config.h.in~ |
|
||||||
exit 0 |
|
@ -1,664 +0,0 @@ |
|||||||
benchmarks_protoc_inputs_benchmark_wrapper = \
|
|
||||||
benchmarks.proto
|
|
||||||
|
|
||||||
benchmarks_protoc_inputs = \
|
|
||||||
datasets/google_message1/proto3/benchmark_message1_proto3.proto
|
|
||||||
|
|
||||||
benchmarks_protoc_inputs_proto2 = \
|
|
||||||
datasets/google_message1/proto2/benchmark_message1_proto2.proto \
|
|
||||||
datasets/google_message2/benchmark_message2.proto \
|
|
||||||
datasets/google_message3/benchmark_message3.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_1.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_2.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_3.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_4.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_5.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_6.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_7.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_8.proto \
|
|
||||||
datasets/google_message4/benchmark_message4.proto \
|
|
||||||
datasets/google_message4/benchmark_message4_1.proto \
|
|
||||||
datasets/google_message4/benchmark_message4_2.proto \
|
|
||||||
datasets/google_message4/benchmark_message4_3.proto
|
|
||||||
|
|
||||||
make_tmp_dir: |
|
||||||
mkdir -p 'tmp/java/src/main/java'
|
|
||||||
touch make_tmp_dir
|
|
||||||
|
|
||||||
|
|
||||||
# We have to cd to $(srcdir) before executing protoc because $(protoc_inputs) is
|
|
||||||
# relative to srcdir, which may not be the same as the current directory when
|
|
||||||
# building out-of-tree.
|
|
||||||
protoc_middleman: make_tmp_dir $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks_protoc_inputs) $(well_known_type_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) |
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd/cpp --java_out=$$oldpwd/tmp/java/src/main/java --python_out=$$oldpwd/tmp $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) )
|
|
||||||
touch protoc_middleman
|
|
||||||
|
|
||||||
protoc_middleman2: make_tmp_dir $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks_protoc_inputs_proto2) $(well_known_type_protoc_inputs) |
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd/cpp --java_out=$$oldpwd/tmp/java/src/main/java --python_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2) )
|
|
||||||
touch protoc_middleman2
|
|
||||||
|
|
||||||
all_data = $$(find $$(cd $(srcdir) && pwd) -type f -name "dataset.*.pb" -not -path "$$(cd $(srcdir) && pwd)/tmp/*")
|
|
||||||
|
|
||||||
############# CPP RULES ##############
|
|
||||||
|
|
||||||
benchmarks_protoc_outputs = \
|
|
||||||
cpp/benchmarks.pb.cc \
|
|
||||||
cpp/datasets/google_message1/proto3/benchmark_message1_proto3.pb.cc
|
|
||||||
|
|
||||||
benchmarks_protoc_outputs_header = \
|
|
||||||
cpp/benchmarks.pb.h \
|
|
||||||
cpp/datasets/google_message1/proto3/benchmark_message1_proto3.pb.h
|
|
||||||
|
|
||||||
benchmarks_protoc_outputs_proto2_header = \
|
|
||||||
cpp/datasets/google_message1/proto2/benchmark_message1_proto2.pb.h \
|
|
||||||
cpp/datasets/google_message2/benchmark_message2.pb.h \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3.pb.h \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_1.pb.h \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_2.pb.h \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_3.pb.h \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_4.pb.h \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_5.pb.h \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_6.pb.h \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_7.pb.h \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_8.pb.h \
|
|
||||||
cpp/datasets/google_message4/benchmark_message4.pb.h \
|
|
||||||
cpp/datasets/google_message4/benchmark_message4_1.pb.h \
|
|
||||||
cpp/datasets/google_message4/benchmark_message4_2.pb.h \
|
|
||||||
cpp/datasets/google_message4/benchmark_message4_3.pb.h
|
|
||||||
|
|
||||||
benchmarks_protoc_outputs_proto2 = \
|
|
||||||
cpp/datasets/google_message1/proto2/benchmark_message1_proto2.pb.cc \
|
|
||||||
cpp/datasets/google_message2/benchmark_message2.pb.cc \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3.pb.cc \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_1.pb.cc \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_2.pb.cc \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_3.pb.cc \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_4.pb.cc \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_5.pb.cc \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_6.pb.cc \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_7.pb.cc \
|
|
||||||
cpp/datasets/google_message3/benchmark_message3_8.pb.cc \
|
|
||||||
cpp/datasets/google_message4/benchmark_message4.pb.cc \
|
|
||||||
cpp/datasets/google_message4/benchmark_message4_1.pb.cc \
|
|
||||||
cpp/datasets/google_message4/benchmark_message4_2.pb.cc \
|
|
||||||
cpp/datasets/google_message4/benchmark_message4_3.pb.cc
|
|
||||||
|
|
||||||
|
|
||||||
$(benchmarks_protoc_outputs): protoc_middleman |
|
||||||
$(benchmarks_protoc_outputs_header): protoc_middleman |
|
||||||
$(benchmarks_protoc_outputs_proto2): protoc_middleman2 |
|
||||||
$(benchmarks_protoc_outputs_proto2_header): protoc_middleman2 |
|
||||||
|
|
||||||
initialize_submodule: |
|
||||||
oldpwd=`pwd`
|
|
||||||
cd $(top_srcdir) && git submodule update --init -r third_party/benchmark && \
|
|
||||||
cd third_party/benchmark && cmake -DCMAKE_BUILD_TYPE=Release && make
|
|
||||||
cd $$oldpwd
|
|
||||||
touch initialize_submodule
|
|
||||||
|
|
||||||
$(top_srcdir)/third_party/benchmark/src/libbenchmark.a: initialize_submodule |
|
||||||
|
|
||||||
AM_CXXFLAGS = $(NO_OPT_CXXFLAGS) $(PROTOBUF_OPT_FLAG) -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare
|
|
||||||
|
|
||||||
bin_PROGRAMS = cpp-benchmark
|
|
||||||
|
|
||||||
cpp_benchmark_LDADD = $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/third_party/benchmark/src/libbenchmark.a
|
|
||||||
cpp_benchmark_SOURCES = cpp/cpp_benchmark.cc
|
|
||||||
cpp_benchmark_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp -I$(top_srcdir)/third_party/benchmark/include
|
|
||||||
# Explicit deps because BUILT_SOURCES are only done before a "make all/check"
|
|
||||||
# so a direct "make test_cpp" could fail if parallel enough.
|
|
||||||
# See: https://www.gnu.org/software/automake/manual/html_node/Built-Sources-Example.html#Recording-Dependencies-manually
|
|
||||||
cpp/cpp_benchmark-cpp_benchmark.$(OBJEXT): $(benchmarks_protoc_outputs) $(benchmarks_protoc_outputs_proto2) $(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_proto2_header) $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/third_party/benchmark/src/libbenchmark.a |
|
||||||
cpp/benchmark-cpp_benchmark.$(OBJEXT): $(benchmarks_protoc_outputs) $(benchmarks_protoc_outputs_proto2) $(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_proto2_header) $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/third_party/benchmark/src/libbenchmark.a |
|
||||||
nodist_cpp_benchmark_SOURCES = \
|
|
||||||
$(benchmarks_protoc_outputs) \
|
|
||||||
$(benchmarks_protoc_outputs_proto2) \
|
|
||||||
$(benchmarks_protoc_outputs_proto2_header) \
|
|
||||||
$(benchmarks_protoc_outputs_header)
|
|
||||||
|
|
||||||
cpp: protoc_middleman protoc_middleman2 cpp-benchmark initialize_submodule |
|
||||||
./cpp-benchmark $(all_data)
|
|
||||||
|
|
||||||
############ CPP RULES END ############
|
|
||||||
|
|
||||||
############# JAVA RULES ##############
|
|
||||||
|
|
||||||
java_benchmark_testing_files = \
|
|
||||||
java/src/main/java/com/google/protobuf/ProtoCaliperBenchmark.java
|
|
||||||
|
|
||||||
javac_middleman: $(java_benchmark_testing_files) protoc_middleman protoc_middleman2 |
|
||||||
cp -r $(srcdir)/java tmp
|
|
||||||
mkdir -p tmp/java/lib
|
|
||||||
cp $(top_srcdir)/java/core/target/*.jar tmp/java/lib/protobuf-java.jar
|
|
||||||
cd tmp/java && mvn clean compile assembly:single -Dprotobuf.version=$(PACKAGE_VERSION) && cd ../..
|
|
||||||
@touch javac_middleman
|
|
||||||
|
|
||||||
java-benchmark: javac_middleman |
|
||||||
@echo "Writing shortcut script java-benchmark..."
|
|
||||||
@echo '#! /bin/bash' > java-benchmark
|
|
||||||
@echo 'all_data=""' >> java-benchmark
|
|
||||||
@echo 'conf=()' >> java-benchmark
|
|
||||||
@echo 'data_files=""' >> java-benchmark
|
|
||||||
@echo 'for arg in $$@; do if [[ $${arg:0:1} == "-" ]]; then conf+=($$arg); else data_files+="$$arg,"; fi; done' >> java-benchmark
|
|
||||||
@echo 'java -cp '\"tmp/java/target/*:$(top_srcdir)/java/core/target/*:$(top_srcdir)/java/util/target/*\"" \\" >>java-benchmark
|
|
||||||
@echo ' com.google.caliper.runner.CaliperMain com.google.protobuf.ProtoCaliperBenchmark -i runtime '"\\" >> java-benchmark
|
|
||||||
@echo ' -b serializeToByteArray,serializeToMemoryStream,deserializeFromByteArray,deserializeFromMemoryStream '"\\" >> java-benchmark
|
|
||||||
@echo ' -DdataFile=$${data_files:0:-1} $${conf[*]}' >> java-benchmark
|
|
||||||
@chmod +x java-benchmark
|
|
||||||
|
|
||||||
java: protoc_middleman protoc_middleman2 java-benchmark |
|
||||||
./java-benchmark $(all_data)
|
|
||||||
|
|
||||||
############# JAVA RULES END ##############
|
|
||||||
|
|
||||||
|
|
||||||
############# PYTHON RULES ##############
|
|
||||||
|
|
||||||
python_add_init: protoc_middleman protoc_middleman2 |
|
||||||
all_file=`find tmp -type f -regex '.*\.py'` && \
|
|
||||||
for file in $${all_file[@]}; do \
|
|
||||||
path="$${file%/*}"; \
|
|
||||||
while true; do \
|
|
||||||
touch "$$path/__init__.py" && chmod +x "$$path/__init__.py"; \
|
|
||||||
if [[ $$path != *"/"* ]]; then break; fi; \
|
|
||||||
path=$${path%/*}; \
|
|
||||||
done \
|
|
||||||
done
|
|
||||||
|
|
||||||
python_cpp_pkg_flags = `pkg-config --cflags --libs python3`
|
|
||||||
|
|
||||||
lib_LTLIBRARIES = libbenchmark_messages.la
|
|
||||||
libbenchmark_messages_la_SOURCES = python/python_benchmark_messages.cc
|
|
||||||
libbenchmark_messages_la_LIBADD = $(top_srcdir)/src/.libs/libprotobuf.la
|
|
||||||
libbenchmark_messages_la_LDFLAGS = -version-info 1:0:0 -export-dynamic
|
|
||||||
libbenchmark_messages_la_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp $(python_cpp_pkg_flags)
|
|
||||||
libbenchmark_messages_la-python_benchmark_messages.$(OBJEXT): $(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_proto2_header) $(benchmarks_protoc_outputs) $(benchmarks_protoc_outputs_proto2) |
|
||||||
nodist_libbenchmark_messages_la_SOURCES = \
|
|
||||||
$(benchmarks_protoc_outputs) \
|
|
||||||
$(benchmarks_protoc_outputs_proto2) \
|
|
||||||
$(benchmarks_protoc_outputs_proto2_header) \
|
|
||||||
$(benchmarks_protoc_outputs_header)
|
|
||||||
|
|
||||||
python-pure-python-benchmark: python_add_init |
|
||||||
@echo "Writing shortcut script python-pure-python-benchmark..."
|
|
||||||
@echo '#! /bin/bash' > python-pure-python-benchmark
|
|
||||||
@echo export LD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-pure-python-benchmark
|
|
||||||
@echo export DYLD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-pure-python-benchmark
|
|
||||||
@echo export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=\'python\' >> python-pure-python-benchmark
|
|
||||||
@echo cp $(srcdir)/python/py_benchmark.py tmp >> python-pure-python-benchmark
|
|
||||||
@echo python3 tmp/py_benchmark.py '$$@' >> python-pure-python-benchmark
|
|
||||||
@chmod +x python-pure-python-benchmark
|
|
||||||
|
|
||||||
python-cpp-reflection-benchmark: python_add_init |
|
||||||
@echo "Writing shortcut script python-cpp-reflection-benchmark..."
|
|
||||||
@echo '#! /bin/bash' > python-cpp-reflection-benchmark
|
|
||||||
@echo export LD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-cpp-reflection-benchmark
|
|
||||||
@echo export DYLD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-cpp-reflection-benchmark
|
|
||||||
@echo export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=\'cpp\' >> python-cpp-reflection-benchmark
|
|
||||||
@echo cp $(srcdir)/python/py_benchmark.py tmp >> python-cpp-reflection-benchmark
|
|
||||||
@echo python3 tmp/py_benchmark.py '$$@' >> python-cpp-reflection-benchmark
|
|
||||||
@chmod +x python-cpp-reflection-benchmark
|
|
||||||
|
|
||||||
python-cpp-generated-code-benchmark: python_add_init libbenchmark_messages.la |
|
||||||
@echo "Writing shortcut script python-cpp-generated-code-benchmark..."
|
|
||||||
@echo '#! /bin/bash' > python-cpp-generated-code-benchmark
|
|
||||||
@echo export LD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-cpp-generated-code-benchmark
|
|
||||||
@echo export DYLD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-cpp-generated-code-benchmark
|
|
||||||
@echo export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=\'cpp\' >> python-cpp-generated-code-benchmark
|
|
||||||
@echo cp $(srcdir)/python/py_benchmark.py tmp >> python-cpp-generated-code-benchmark
|
|
||||||
@echo python3 tmp/py_benchmark.py --cpp_generated '$$@' >> python-cpp-generated-code-benchmark
|
|
||||||
@chmod +x python-cpp-generated-code-benchmark
|
|
||||||
|
|
||||||
python-pure-python: python-pure-python-benchmark |
|
||||||
./python-pure-python-benchmark $(all_data)
|
|
||||||
|
|
||||||
python-cpp-reflection: python-cpp-reflection-benchmark |
|
||||||
./python-cpp-reflection-benchmark $(all_data)
|
|
||||||
|
|
||||||
python-cpp-generated-code: python-cpp-generated-code-benchmark |
|
||||||
./python-cpp-generated-code-benchmark $(all_data)
|
|
||||||
|
|
||||||
############# PYTHON RULES END ##############
|
|
||||||
|
|
||||||
############# GO RULES BEGIN ##############
|
|
||||||
|
|
||||||
benchmarks_protoc_inputs_proto2_message1 = \
|
|
||||||
datasets/google_message1/proto2/benchmark_message1_proto2.proto
|
|
||||||
|
|
||||||
benchmarks_protoc_inputs_proto2_message2 = \
|
|
||||||
datasets/google_message2/benchmark_message2.proto
|
|
||||||
|
|
||||||
benchmarks_protoc_inputs_proto2_message3 = \
|
|
||||||
datasets/google_message3/benchmark_message3.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_1.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_2.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_3.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_4.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_5.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_6.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_7.proto \
|
|
||||||
datasets/google_message3/benchmark_message3_8.proto
|
|
||||||
|
|
||||||
benchmarks_protoc_inputs_proto2_message4 = \
|
|
||||||
datasets/google_message4/benchmark_message4.proto \
|
|
||||||
datasets/google_message4/benchmark_message4_1.proto \
|
|
||||||
datasets/google_message4/benchmark_message4_2.proto \
|
|
||||||
datasets/google_message4/benchmark_message4_3.proto
|
|
||||||
|
|
||||||
go_protoc_middleman: make_tmp_dir $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks_protoc_inputs) $(well_known_type_protoc_inputs) $(benchmarks_protoc_inputs_proto2_message1) $(benchmarks_protoc_inputs_proto2_message2) $(benchmarks_protoc_inputs_proto2_message3) $(benchmarks_protoc_inputs_proto2_message4) $(well_known_type_protoc_inputs) |
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_benchmark_wrapper) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2_message1) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2_message2) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2_message3) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2_message4) )
|
|
||||||
touch go_protoc_middleman
|
|
||||||
|
|
||||||
go-benchmark: go_protoc_middleman |
|
||||||
@echo "Writing shortcut script go-benchmark..."
|
|
||||||
@echo '#! /bin/bash' > go-benchmark
|
|
||||||
@echo 'cd $(srcdir)/go' >> go-benchmark
|
|
||||||
@echo 'all_data=""' >> go-benchmark
|
|
||||||
@echo 'conf=()' >> go-benchmark
|
|
||||||
@echo 'data_files=()' >> go-benchmark
|
|
||||||
@echo 'for arg in $$@; do if [[ $${arg:0:1} == "-" ]]; then conf+=($$arg); else data_files+=("$$arg"); fi; done' >> go-benchmark
|
|
||||||
@echo 'go test -bench=. $${conf[*]} -- $${data_files[*]}' >> go-benchmark
|
|
||||||
@echo 'cd ..' >> go-benchmark
|
|
||||||
@chmod +x go-benchmark
|
|
||||||
|
|
||||||
go: go_protoc_middleman go-benchmark |
|
||||||
./go-benchmark $(all_data)
|
|
||||||
|
|
||||||
############# GO RULES END ##############
|
|
||||||
|
|
||||||
############# GOGO RULES BEGIN ############
|
|
||||||
|
|
||||||
cpp_no_group_benchmarks_protoc_outputs_header = \
|
|
||||||
gogo/cpp_no_group/benchmarks.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message1/proto3/benchmark_message1_proto3.pb.h
|
|
||||||
|
|
||||||
cpp_no_group_benchmarks_protoc_outputs = \
|
|
||||||
gogo/cpp_no_group/benchmarks.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message1/proto3/benchmark_message1_proto3.pb.cc
|
|
||||||
|
|
||||||
cpp_no_group_benchmarks_protoc_outputs_proto2_header = \
|
|
||||||
gogo/cpp_no_group/datasets/google_message1/proto2/benchmark_message1_proto2.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message2/benchmark_message2.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_1.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_2.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_3.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_4.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_5.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_6.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_7.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_8.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message4/benchmark_message4.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_1.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_2.pb.h \
|
|
||||||
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_3.pb.h
|
|
||||||
|
|
||||||
cpp_no_group_benchmarks_protoc_outputs_proto2 = \
|
|
||||||
gogo/cpp_no_group/datasets/google_message1/proto2/benchmark_message1_proto2.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message2/benchmark_message2.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_1.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_2.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_3.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_4.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_5.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_6.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_7.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_8.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message4/benchmark_message4.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_1.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_2.pb.cc \
|
|
||||||
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_3.pb.cc
|
|
||||||
|
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs): cpp_no_group_protoc_middleman |
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs_header): cpp_no_group_protoc_middleman |
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs_proto2): cpp_no_group_protoc_middleman |
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs_proto2_header): cpp_no_group_protoc_middleman |
|
||||||
|
|
||||||
generate_cpp_no_group_benchmark_code: |
|
||||||
cp $(srcdir)/cpp/cpp_benchmark.cc gogo/cpp_no_group/cpp_benchmark.cc
|
|
||||||
sed -i -e "s/\#include \"datasets/\#include \"gogo\/cpp_no_group\/datasets/g" gogo/cpp_no_group/cpp_benchmark.cc
|
|
||||||
sed -i -e "s/\#include \"benchmarks.pb.h/\#include \"gogo\/cpp_no_group\/benchmarks.pb.h/g" gogo/cpp_no_group/cpp_benchmark.cc
|
|
||||||
touch generate_cpp_no_group_benchmark_code
|
|
||||||
|
|
||||||
bin_PROGRAMS += cpp-no-group-benchmark
|
|
||||||
cpp_no_group_benchmark_LDADD = $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/third_party/benchmark/src/libbenchmark.a
|
|
||||||
cpp_no_group_benchmark_SOURCES = gogo/cpp_no_group/cpp_benchmark.cc
|
|
||||||
cpp_no_group_benchmark_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/gogo/cpp_no_group -I$(top_srcdir)/third_party/benchmark/include
|
|
||||||
# Explicit deps because BUILT_SOURCES are only done before a "make all/check"
|
|
||||||
# so a direct "make test_cpp" could fail if parallel enough.
|
|
||||||
# See: https://www.gnu.org/software/automake/manual/html_node/Built-Sources-Example.html#Recording-Dependencies-manually
|
|
||||||
gogo/cpp_no_group/cpp_no_group_benchmark-cpp_benchmark.$(OBJEXT): $(cpp_no_group_benchmarks_protoc_outputs) $(cpp_no_group_benchmarks_protoc_outputs_proto2) $(cpp_no_group_benchmarks_protoc_outputs_header) \
|
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs_proto2_header) $(top_srcdir)/third_party/benchmark/src/libbenchmark.a generate_cpp_no_group_benchmark_code
|
|
||||||
gogo/cpp_no_group/cpp_benchmark.cc: generate_cpp_no_group_benchmark_code |
|
||||||
nodist_cpp_no_group_benchmark_SOURCES = \
|
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs_proto2) \
|
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs) \
|
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs_header) \
|
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs_proto2_header)
|
|
||||||
|
|
||||||
cpp_no_group: cpp_no_group_protoc_middleman generate_gogo_data cpp-no-group-benchmark |
|
||||||
./cpp-no-group-benchmark $(gogo_data)
|
|
||||||
|
|
||||||
gogo_proto_middleman: protoc-gen-gogoproto |
|
||||||
mkdir -p "tmp/gogo_proto"
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I$(srcdir) -I$(top_srcdir) --plugin=protoc-gen-gogoproto --gogoproto_out=$$oldpwd/tmp/gogo_proto $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2) )
|
|
||||||
touch gogo_proto_middleman
|
|
||||||
|
|
||||||
gogo_data = $$(for data in $(all_data); do echo "tmp/gogo_data$${data\#$(srcdir)}"; done | xargs)
|
|
||||||
|
|
||||||
generate_gogo_data: protoc_middleman protoc_middleman2 gogo-data-scrubber |
|
||||||
mkdir -p `dirname $(gogo_data)`
|
|
||||||
./gogo-data-scrubber $(all_data) $(gogo_data)
|
|
||||||
touch generate_gogo_data
|
|
||||||
|
|
||||||
make_tmp_dir_gogo: |
|
||||||
mkdir -p tmp/go_no_group/benchmark_code
|
|
||||||
mkdir -p tmp/gogofast/benchmark_code
|
|
||||||
mkdir -p tmp/gogofaster/benchmark_code
|
|
||||||
mkdir -p tmp/gogoslick/benchmark_code
|
|
||||||
touch make_tmp_dir_gogo
|
|
||||||
|
|
||||||
go_no_group_protoc_middleman: make_tmp_dir_gogo $(top_srcdir)/src/protoc$(EXEEXT) gogo_proto_middleman $(well_known_type_protoc_inputs) |
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs_benchmark_wrapper) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs_proto2_message1) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs_proto2_message2) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs_proto2_message3) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs_proto2_message4) )
|
|
||||||
touch go_no_group_protoc_middleman
|
|
||||||
|
|
||||||
cpp_no_group_protoc_middleman: make_tmp_dir_gogo $(top_srcdir)/src/protoc$(EXEEXT) gogo_proto_middleman $(well_known_type_protoc_inputs) |
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs_benchmark_wrapper) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs_proto2_message1) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs_proto2_message2) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs_proto2_message3) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs_proto2_message4) )
|
|
||||||
touch cpp_no_group_protoc_middleman
|
|
||||||
|
|
||||||
gogofast_protoc_middleman: make_tmp_dir_gogo $(top_srcdir)/src/protoc$(EXEEXT) gogo_proto_middleman $(well_known_type_protoc_inputs) |
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs_benchmark_wrapper) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs_proto2_message1) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs_proto2_message2) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs_proto2_message3) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs_proto2_message4) )
|
|
||||||
touch gogofast_protoc_middleman
|
|
||||||
|
|
||||||
gogofaster_protoc_middleman: make_tmp_dir_gogo $(top_srcdir)/src/protoc$(EXEEXT) gogo_proto_middleman $(well_known_type_protoc_inputs) |
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs_benchmark_wrapper) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs_proto2_message1) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs_proto2_message2) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs_proto2_message3) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs_proto2_message4) )
|
|
||||||
touch gogofaster_protoc_middleman
|
|
||||||
|
|
||||||
gogoslick_protoc_middleman: make_tmp_dir_gogo $(top_srcdir)/src/protoc$(EXEEXT) gogo_proto_middleman $(well_known_type_protoc_inputs) |
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs_benchmark_wrapper) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs_proto2_message1) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs_proto2_message2) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs_proto2_message3) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs_proto2_message4) )
|
|
||||||
touch gogoslick_protoc_middleman
|
|
||||||
|
|
||||||
generate-gogo-benchmark-code: |
|
||||||
@echo '#! /bin/bash' > generate-gogo-benchmark-code
|
|
||||||
@echo 'cp $(srcdir)/go/go_benchmark_test.go tmp/$$1/benchmark_code/$$1_benchmark1_test.go' >> generate-gogo-benchmark-code
|
|
||||||
@echo 'sed -i -e "s/\.\.\/tmp/../g" tmp/$$1/benchmark_code/$$1_benchmark1_test.go' >> generate-gogo-benchmark-code
|
|
||||||
@echo 'sed -i -e "s/b\.Run(\"\(.*\)\"/b.Run(\"\1\_$$1\"/g" tmp/$$1/benchmark_code/$$1_benchmark1_test.go' >> generate-gogo-benchmark-code
|
|
||||||
@echo 'if [[ $$2 == 1 ]]; then sed -i -e "s/github\.com\/golang/github.com\/gogo/g" tmp/$$1/benchmark_code/$$1_benchmark1_test.go; fi ' >> generate-gogo-benchmark-code
|
|
||||||
@chmod +x generate-gogo-benchmark-code
|
|
||||||
|
|
||||||
generate_all_gogo_benchmark_code: generate-gogo-benchmark-code make_tmp_dir_gogo |
|
||||||
./generate-gogo-benchmark-code go_no_group 0
|
|
||||||
./generate-gogo-benchmark-code gogofast 1
|
|
||||||
./generate-gogo-benchmark-code gogofaster 1
|
|
||||||
./generate-gogo-benchmark-code gogoslick 1
|
|
||||||
|
|
||||||
gogo-benchmark: |
|
||||||
@echo "Writing shortcut script gogo-benchmark..."
|
|
||||||
@echo '#! /bin/bash' > gogo-benchmark
|
|
||||||
@echo 'cd tmp/$$1/benchmark_code' >> gogo-benchmark
|
|
||||||
@echo 'shift' >> gogo-benchmark
|
|
||||||
@echo 'all_data=""' >> gogo-benchmark
|
|
||||||
@echo 'for data_file in $$@; do all_data="$$all_data ../../../$$data_file"; done' >> gogo-benchmark
|
|
||||||
@echo 'go test -bench=. -- $$all_data' >> gogo-benchmark
|
|
||||||
@echo 'cd ../..' >> gogo-benchmark
|
|
||||||
@chmod +x gogo-benchmark
|
|
||||||
|
|
||||||
go_no_group: go_no_group_protoc_middleman generate_gogo_data generate_all_gogo_benchmark_code gogo-benchmark |
|
||||||
./gogo-benchmark go_no_group $(gogo_data)
|
|
||||||
|
|
||||||
gogofast: gogofast_protoc_middleman generate_gogo_data gogo-benchmark generate_all_gogo_benchmark_code |
|
||||||
./gogo-benchmark gogofast $(gogo_data)
|
|
||||||
|
|
||||||
gogofaster: gogofaster_protoc_middleman generate_gogo_data gogo-benchmark generate_all_gogo_benchmark_code |
|
||||||
./gogo-benchmark gogofaster $(gogo_data)
|
|
||||||
|
|
||||||
gogoslick: gogoslick_protoc_middleman generate_gogo_data gogo-benchmark generate_all_gogo_benchmark_code |
|
||||||
./gogo-benchmark gogoslick $(gogo_data)
|
|
||||||
|
|
||||||
|
|
||||||
############# GOGO RULES END ############
|
|
||||||
|
|
||||||
|
|
||||||
############ UTIL RULES BEGIN ############
|
|
||||||
|
|
||||||
bin_PROGRAMS += protoc-gen-gogoproto gogo-data-scrubber protoc-gen-proto2_to_proto3 proto3-data-stripper
|
|
||||||
|
|
||||||
protoc_gen_gogoproto_LDADD = $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/src/libprotoc.la
|
|
||||||
protoc_gen_gogoproto_SOURCES = util/protoc-gen-gogoproto.cc
|
|
||||||
protoc_gen_gogoproto_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp -I$(srcdir)/util
|
|
||||||
|
|
||||||
gogo_data_scrubber_LDADD = $(top_srcdir)/src/libprotobuf.la
|
|
||||||
gogo_data_scrubber_SOURCES = util/gogo_data_scrubber.cc
|
|
||||||
gogo_data_scrubber_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp -I$(srcdir)/util
|
|
||||||
util/gogo_data_scrubber-gogo_data_scrubber.$(OBJEXT): $(benchmarks_protoc_outputs) $(benchmarks_protoc_outputs_proto2) $(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_proto2_header) |
|
||||||
nodist_gogo_data_scrubber_SOURCES = \
|
|
||||||
$(benchmarks_protoc_outputs) \
|
|
||||||
$(benchmarks_protoc_outputs_proto2) \
|
|
||||||
$(benchmarks_protoc_outputs_proto2_header) \
|
|
||||||
$(benchmarks_protoc_outputs_header)
|
|
||||||
|
|
||||||
protoc_gen_proto2_to_proto3_LDADD = $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/src/libprotoc.la
|
|
||||||
protoc_gen_proto2_to_proto3_SOURCES = util/protoc-gen-proto2_to_proto3.cc
|
|
||||||
protoc_gen_proto2_to_proto3_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp -I$(srcdir)/util
|
|
||||||
|
|
||||||
proto3_data_stripper_LDADD = $(top_srcdir)/src/libprotobuf.la
|
|
||||||
proto3_data_stripper_SOURCES = util/proto3_data_stripper.cc
|
|
||||||
proto3_data_stripper_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp -I$(srcdir)/util
|
|
||||||
util/proto3_data_stripper-proto3_data_stripper.$(OBJEXT): $(benchmarks_protoc_outputs) $(benchmarks_protoc_outputs_proto2) $(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_proto2_header) |
|
||||||
nodist_proto3_data_stripper_SOURCES = \
|
|
||||||
$(benchmarks_protoc_outputs) \
|
|
||||||
$(benchmarks_protoc_outputs_proto2) \
|
|
||||||
$(benchmarks_protoc_outputs_proto2_header) \
|
|
||||||
$(benchmarks_protoc_outputs_header)
|
|
||||||
|
|
||||||
|
|
||||||
############ UTIL RULES END ############
|
|
||||||
|
|
||||||
############ PROTO3 PREPARATION BEGIN #############
|
|
||||||
|
|
||||||
proto3_proto_middleman: protoc-gen-proto2_to_proto3 |
|
||||||
mkdir -p "tmp/proto3_proto"
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I$(srcdir) -I$(top_srcdir) --plugin=protoc-gen-proto2_to_proto3 --proto2_to_proto3_out=$$oldpwd/tmp/proto3_proto $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2) )
|
|
||||||
touch proto3_proto_middleman
|
|
||||||
|
|
||||||
full_srcdir = $$(cd $(srcdir) && pwd)
|
|
||||||
proto3_data = $$(for data in $(all_data); do echo $(full_srcdir)"/tmp/proto3_data$${data\#$(full_srcdir)}"; done | xargs)
|
|
||||||
|
|
||||||
generate_proto3_data: protoc_middleman protoc_middleman2 proto3-data-stripper |
|
||||||
mkdir -p `dirname $(proto3_data)`
|
|
||||||
./proto3-data-stripper $(all_data) $(proto3_data)
|
|
||||||
touch generate_proto3_data
|
|
||||||
|
|
||||||
############ PROTO3 PREPARATION END #############
|
|
||||||
|
|
||||||
############ PHP RULES BEGIN #################
|
|
||||||
|
|
||||||
proto3_middleman_php: proto3_proto_middleman |
|
||||||
mkdir -p "tmp/php"
|
|
||||||
oldpwd=`pwd` && ( cd tmp/proto3_proto && $$oldpwd/../src/protoc$(EXEEXT) -I$(srcdir) -I$(top_srcdir) --php_out=$$oldpwd/tmp/php $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2) )
|
|
||||||
touch proto3_middleman_php
|
|
||||||
|
|
||||||
php-benchmark: proto3_middleman_php generate_proto3_data |
|
||||||
mkdir -p "tmp/php/Google/Protobuf/Benchmark" && cp php/PhpBenchmark.php "tmp/php/Google/Protobuf/Benchmark"
|
|
||||||
cp php/autoload.php "tmp/php"
|
|
||||||
@echo "Writing shortcut script php-benchmark..."
|
|
||||||
@echo '#! /bin/bash' > php-benchmark
|
|
||||||
@echo 'export PROTOBUF_PHP_SRCDIR="$$(cd $(top_srcdir) && pwd)/php/src"' >> php-benchmark
|
|
||||||
@echo 'cd tmp/php' >> php-benchmark
|
|
||||||
@echo 'export CURRENT_DIR=$$(pwd)' >> php-benchmark
|
|
||||||
@echo 'php -d auto_prepend_file="autoload.php" -d include_path="$$(pwd)" Google/Protobuf/Benchmark/PhpBenchmark.php $$@' >> php-benchmark
|
|
||||||
@echo 'cd ../..' >> php-benchmark
|
|
||||||
@chmod +x php-benchmark
|
|
||||||
|
|
||||||
php: php-benchmark proto3_middleman_php |
|
||||||
./php-benchmark --behavior_prefix="php" $(proto3_data)
|
|
||||||
|
|
||||||
php_c_extension: |
|
||||||
cd $(top_srcdir)/php/ext/google/protobuf && phpize && ./configure CFLAGS='-O3' && make -j8
|
|
||||||
|
|
||||||
php-c-benchmark: proto3_middleman_php generate_proto3_data php_c_extension php_c_extension |
|
||||||
mkdir -p "tmp/php/Google/Protobuf/Benchmark" && cp php/PhpBenchmark.php "tmp/php/Google/Protobuf/Benchmark"
|
|
||||||
cp php/autoload.php "tmp/php"
|
|
||||||
@echo "Writing shortcut script php-c-benchmark..."
|
|
||||||
@echo '#! /bin/bash' > php-c-benchmark
|
|
||||||
@echo 'export PROTOBUF_PHP_SRCDIR="$$(cd $(top_srcdir) && pwd)/php/src"' >> php-c-benchmark
|
|
||||||
@echo 'export PROTOBUF_PHP_EXTDIR="$$PROTOBUF_PHP_SRCDIR/../ext/google/protobuf/modules"' >> php-c-benchmark
|
|
||||||
@echo 'cd tmp/php' >> php-c-benchmark
|
|
||||||
@echo 'export CURRENT_DIR=$$(pwd)' >> php-c-benchmark
|
|
||||||
@echo 'php -d auto_prepend_file="autoload.php" -d include_path="$$(pwd)" -d extension="$$PROTOBUF_PHP_EXTDIR/protobuf.so" Google/Protobuf/Benchmark/PhpBenchmark.php $$@' >> php-c-benchmark
|
|
||||||
@echo 'cd ../..' >> php-c-benchmark
|
|
||||||
@chmod +x php-c-benchmark
|
|
||||||
|
|
||||||
php_c: php-c-benchmark proto3_middleman_php |
|
||||||
./php-c-benchmark --behavior_prefix="php_c" $(proto3_data)
|
|
||||||
|
|
||||||
|
|
||||||
############ PHP RULES END #################
|
|
||||||
|
|
||||||
############ protobuf.js RULE BEGIN #############
|
|
||||||
|
|
||||||
pbjs_preparation: |
|
||||||
mkdir -p tmp/protobuf.js
|
|
||||||
cd tmp/protobuf.js && git clone https://github.com/dcodeIO/protobuf.js.git && \
|
|
||||||
cd protobuf.js && npm install && npm run build
|
|
||||||
cd tmp/protobuf.js && npm install benchmark
|
|
||||||
cp protobuf.js/* tmp/protobuf.js
|
|
||||||
cp js/benchmark_suite.js tmp/protobuf.js
|
|
||||||
touch pbjs_preparation
|
|
||||||
|
|
||||||
pbjs_middleman: pbjs_preparation |
|
||||||
export OLDDIR=$$(pwd) && cd tmp/protobuf.js && node generate_pbjs_files.js --target static-module --include_path=$$OLDDIR -o generated_bundle_code.js $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2)
|
|
||||||
touch pbjs_middleman
|
|
||||||
|
|
||||||
pbjs-benchmark: pbjs_middleman |
|
||||||
@echo '#! /bin/bash' > pbjs-benchmark
|
|
||||||
@echo 'cd tmp/protobuf.js' >> pbjs-benchmark
|
|
||||||
@echo 'sed -i "s/protobufjs/.\/protobuf.js/g" generated_bundle_code.js' >> pbjs-benchmark
|
|
||||||
@echo 'env NODE_PATH=".:./node_modules:$$NODE_PATH" node protobufjs_benchmark.js $$@' >> pbjs-benchmark
|
|
||||||
@chmod +x pbjs-benchmark
|
|
||||||
|
|
||||||
pbjs: pbjs-benchmark |
|
||||||
./pbjs-benchmark $(all_data)
|
|
||||||
|
|
||||||
############ protobuf.js RULE END #############
|
|
||||||
|
|
||||||
############ JS RULE BEGIN #############
|
|
||||||
|
|
||||||
js_preparation: |
|
||||||
mkdir -p tmp/js
|
|
||||||
oldpwd=$$(pwd) && cd $(top_srcdir)/js && npm install && npm test
|
|
||||||
cd tmp/js && npm install benchmark
|
|
||||||
cp js/* tmp/js
|
|
||||||
touch js_preparation
|
|
||||||
|
|
||||||
js_middleman: js_preparation |
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --js_out=import_style=commonjs,binary:$$oldpwd/tmp/js $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2))
|
|
||||||
touch js_middleman
|
|
||||||
|
|
||||||
js-benchmark: js_middleman |
|
||||||
@echo '#! /bin/bash' > js-benchmark
|
|
||||||
@echo 'export TOP_JS_SRCDIR=$$(cd $(top_srcdir)/js && pwd)' >> js-benchmark
|
|
||||||
@echo 'cd tmp/js' >> js-benchmark
|
|
||||||
@echo 'env NODE_PATH="$$TOP_JS_SRCDIR:.:./node_modules:$$NODE_PATH" node --max-old-space-size=4096 js_benchmark.js $$@' >> js-benchmark
|
|
||||||
@chmod +x js-benchmark
|
|
||||||
|
|
||||||
js: js-benchmark |
|
||||||
./js-benchmark $(all_data)
|
|
||||||
|
|
||||||
############ JS RULE END #############
|
|
||||||
|
|
||||||
EXTRA_DIST = \
|
|
||||||
$(benchmarks_protoc_inputs_benchmark_wrapper) \
|
|
||||||
$(benchmarks_protoc_inputs) \
|
|
||||||
$(benchmarks_protoc_inputs_proto2) \
|
|
||||||
google_size.proto
|
|
||||||
|
|
||||||
MAINTAINERCLEANFILES = \
|
|
||||||
Makefile.in
|
|
||||||
|
|
||||||
CLEANFILES = \
|
|
||||||
$(benchmarks_protoc_outputs) \
|
|
||||||
$(benchmarks_protoc_outputs_header) \
|
|
||||||
$(benchmarks_protoc_outputs_proto2) \
|
|
||||||
$(benchmarks_protoc_outputs_proto2_header) \
|
|
||||||
initialize_submodule \
|
|
||||||
make_tmp_dir \
|
|
||||||
protoc_middleman \
|
|
||||||
protoc_middleman2 \
|
|
||||||
javac_middleman \
|
|
||||||
java-benchmark \
|
|
||||||
python_cpp_proto_library \
|
|
||||||
python-pure-python-benchmark \
|
|
||||||
python-cpp-reflection-benchmark \
|
|
||||||
python-cpp-generated-code-benchmark \
|
|
||||||
go-benchmark \
|
|
||||||
go_protoc_middleman \
|
|
||||||
make_tmp_dir_gogo \
|
|
||||||
gogo_proto_middleman \
|
|
||||||
generate_gogo_data \
|
|
||||||
go_no_group_protoc_middleman \
|
|
||||||
go_no_group \
|
|
||||||
go-no-group-benchmark \
|
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs_header) \
|
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs) \
|
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs_proto2_header) \
|
|
||||||
$(cpp_no_group_benchmarks_protoc_outputs_proto2) \
|
|
||||||
generate_all_gogo_benchmark_code \
|
|
||||||
generate-gogo-benchmark-code \
|
|
||||||
cpp_no_group_protoc_middleman \
|
|
||||||
generate_cpp_no_group_benchmark_code \
|
|
||||||
generate_gogo_benchmark_code \
|
|
||||||
gogofast_protoc_middleman \
|
|
||||||
gogofast \
|
|
||||||
gogofaster_protoc_middleman \
|
|
||||||
gogofaster \
|
|
||||||
gogoslick_protoc_middleman \
|
|
||||||
gogoslick \
|
|
||||||
gogo-benchmark \
|
|
||||||
gogo/cpp_no_group/cpp_benchmark.* \
|
|
||||||
proto3_proto_middleman \
|
|
||||||
generate_proto3_data \
|
|
||||||
php-benchmark \
|
|
||||||
php-c-benchmark \
|
|
||||||
proto3_middleman_php \
|
|
||||||
pbjs_preparation \
|
|
||||||
pbjs_middleman \
|
|
||||||
pbjs-benchmark \
|
|
||||||
js_preparation \
|
|
||||||
js_middleman \
|
|
||||||
js-benchmark
|
|
||||||
|
|
||||||
clean-local: |
|
||||||
-rm -rf tmp/*
|
|
||||||
|
|
@ -1,124 +0,0 @@ |
|||||||
package main |
|
||||||
|
|
||||||
import ( |
|
||||||
benchmarkWrapper "../tmp" |
|
||||||
googleMessage1Proto2 "../tmp/datasets/google_message1/proto2" |
|
||||||
googleMessage1Proto3 "../tmp/datasets/google_message1/proto3" |
|
||||||
googleMessage2 "../tmp/datasets/google_message2" |
|
||||||
googleMessage3 "../tmp/datasets/google_message3" |
|
||||||
googleMessage4 "../tmp/datasets/google_message4" |
|
||||||
"flag" |
|
||||||
"github.com/golang/protobuf/proto" |
|
||||||
"io/ioutil" |
|
||||||
"testing" |
|
||||||
) |
|
||||||
|
|
||||||
// Data is returned by the Load function.
|
|
||||||
type Dataset struct { |
|
||||||
name string |
|
||||||
newMessage func() proto.Message |
|
||||||
marshaled [][]byte |
|
||||||
unmarshaled []proto.Message |
|
||||||
} |
|
||||||
|
|
||||||
var datasets []Dataset |
|
||||||
|
|
||||||
// This is used to getDefaultInstance for a message type.
|
|
||||||
func generateNewMessageFunction(dataset benchmarkWrapper.BenchmarkDataset) func() proto.Message { |
|
||||||
switch dataset.MessageName { |
|
||||||
case "benchmarks.proto3.GoogleMessage1": |
|
||||||
return func() proto.Message { return new(googleMessage1Proto3.GoogleMessage1) } |
|
||||||
case "benchmarks.proto2.GoogleMessage1": |
|
||||||
return func() proto.Message { return new(googleMessage1Proto2.GoogleMessage1) } |
|
||||||
case "benchmarks.proto2.GoogleMessage2": |
|
||||||
return func() proto.Message { return new(googleMessage2.GoogleMessage2) } |
|
||||||
case "benchmarks.google_message3.GoogleMessage3": |
|
||||||
return func() proto.Message { return new(googleMessage3.GoogleMessage3) } |
|
||||||
case "benchmarks.google_message4.GoogleMessage4": |
|
||||||
return func() proto.Message { return new(googleMessage4.GoogleMessage4) } |
|
||||||
default: |
|
||||||
panic("Unknown message type: " + dataset.MessageName) |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
func init() { |
|
||||||
flag.Parse() |
|
||||||
for _, f := range flag.Args() { |
|
||||||
// Load the benchmark.
|
|
||||||
b, err := ioutil.ReadFile(f) |
|
||||||
if err != nil { |
|
||||||
panic(err) |
|
||||||
} |
|
||||||
|
|
||||||
// Parse the benchmark.
|
|
||||||
var dm benchmarkWrapper.BenchmarkDataset |
|
||||||
if err := proto.Unmarshal(b, &dm); err != nil { |
|
||||||
panic(err) |
|
||||||
} |
|
||||||
|
|
||||||
// Determine the concrete protobuf message type to use.
|
|
||||||
var ds Dataset |
|
||||||
ds.newMessage = generateNewMessageFunction(dm) |
|
||||||
|
|
||||||
// Unmarshal each test message.
|
|
||||||
for _, payload := range dm.Payload { |
|
||||||
ds.marshaled = append(ds.marshaled, payload) |
|
||||||
m := ds.newMessage() |
|
||||||
if err := proto.Unmarshal(payload, m); err != nil { |
|
||||||
panic(err) |
|
||||||
} |
|
||||||
ds.unmarshaled = append(ds.unmarshaled, m) |
|
||||||
} |
|
||||||
ds.name = f |
|
||||||
|
|
||||||
datasets = append(datasets, ds) |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
func Benchmark(b *testing.B) { |
|
||||||
for _, ds := range datasets { |
|
||||||
b.Run(ds.name, func(b *testing.B) { |
|
||||||
b.Run("Unmarshal", func(b *testing.B) { |
|
||||||
for i := 0; i < b.N; i++ { |
|
||||||
for j, payload := range ds.marshaled { |
|
||||||
out := ds.newMessage() |
|
||||||
if err := proto.Unmarshal(payload, out); err != nil { |
|
||||||
b.Fatalf("can't unmarshal message %d %v", j, err) |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
}) |
|
||||||
b.Run("Marshal", func(b *testing.B) { |
|
||||||
for i := 0; i < b.N; i++ { |
|
||||||
for j, m := range ds.unmarshaled { |
|
||||||
if _, err := proto.Marshal(m); err != nil { |
|
||||||
b.Fatalf("can't marshal message %d %+v: %v", j, m, err) |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
}) |
|
||||||
b.Run("Size", func(b *testing.B) { |
|
||||||
for i := 0; i < b.N; i++ { |
|
||||||
for _, m := range ds.unmarshaled { |
|
||||||
proto.Size(m) |
|
||||||
} |
|
||||||
} |
|
||||||
}) |
|
||||||
b.Run("Clone", func(b *testing.B) { |
|
||||||
for i := 0; i < b.N; i++ { |
|
||||||
for _, m := range ds.unmarshaled { |
|
||||||
proto.Clone(m) |
|
||||||
} |
|
||||||
} |
|
||||||
}) |
|
||||||
b.Run("Merge", func(b *testing.B) { |
|
||||||
for i := 0; i < b.N; i++ { |
|
||||||
for _, m := range ds.unmarshaled { |
|
||||||
out := ds.newMessage() |
|
||||||
proto.Merge(out, m) |
|
||||||
} |
|
||||||
} |
|
||||||
}) |
|
||||||
}) |
|
||||||
} |
|
||||||
} |
|
@ -0,0 +1,37 @@ |
|||||||
|
"""Starlark definitions for Protobuf benchmark tests. |
||||||
|
|
||||||
|
PLEASE DO NOT DEPEND ON THE CONTENTS OF THIS FILE, IT IS UNSTABLE. |
||||||
|
""" |
||||||
|
load("//build_defs:internal_shell.bzl", "inline_sh_binary") |
||||||
|
|
||||||
|
def internal_benchmark_test( |
||||||
|
name, |
||||||
|
binary, |
||||||
|
datasets, |
||||||
|
args = [], |
||||||
|
env_vars = []): |
||||||
|
"""Benchmark test runner. |
||||||
|
|
||||||
|
Args: |
||||||
|
name: the name for the test. |
||||||
|
binary: a benchmark test binary. |
||||||
|
datasets: a set of datasets to benchmark. |
||||||
|
args: optional arguments to pass the binary. |
||||||
|
env_vars: environment variables to set in the test. |
||||||
|
""" |
||||||
|
|
||||||
|
dataset_labels = [] |
||||||
|
for dataset in datasets: |
||||||
|
dataset_labels.append("$(rootpaths %s)" % dataset) |
||||||
|
inline_sh_binary( |
||||||
|
name = name, |
||||||
|
srcs = datasets, |
||||||
|
tools = [binary], |
||||||
|
cmd = "%s $(rootpath %s) %s %s" % ( |
||||||
|
" ".join(env_vars), |
||||||
|
binary, |
||||||
|
" ".join(args), |
||||||
|
" ".join(dataset_labels)), |
||||||
|
tags = ["benchmark"], |
||||||
|
testonly = 1, |
||||||
|
) |
@ -0,0 +1,55 @@ |
|||||||
|
load("@rules_java//java:defs.bzl", "java_library", "java_binary") |
||||||
|
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix") |
||||||
|
load("//build_defs:internal_shell.bzl", "inline_sh_binary") |
||||||
|
load("//benchmarks:internal.bzl", "internal_benchmark_test") |
||||||
|
|
||||||
|
java_binary( |
||||||
|
name = "java_benchmark_jar", |
||||||
|
srcs = [ |
||||||
|
"src/main/java/com/google/protobuf/ProtoCaliperBenchmark.java", |
||||||
|
], |
||||||
|
main_class = "com.google.caliper.runner.CaliperMain", |
||||||
|
deps = [ |
||||||
|
"//:protobuf_java", |
||||||
|
"//benchmarks:benchmarks_java_proto", |
||||||
|
"//benchmarks/datasets:java_protos", |
||||||
|
"@maven//:com_google_caliper_caliper", |
||||||
|
"@maven//:com_google_caliper_caliper_api", |
||||||
|
], |
||||||
|
) |
||||||
|
|
||||||
|
# The benchmark binary which can be run over any dataset. |
||||||
|
inline_sh_binary( |
||||||
|
name = "java_benchmark", |
||||||
|
srcs = ["//benchmarks/datasets"], |
||||||
|
tools = [":java_benchmark_jar"], |
||||||
|
cmd = """ |
||||||
|
data_files="" |
||||||
|
conf=() |
||||||
|
for arg in "$${@:1}"; do |
||||||
|
if [[ $${arg:0:1} == "-" ]]; then |
||||||
|
conf+=($$arg) |
||||||
|
else |
||||||
|
data_files+="$$arg," |
||||||
|
fi |
||||||
|
done |
||||||
|
$(rootpath :java_benchmark_jar) com.google.protobuf.ProtoCaliperBenchmark \ |
||||||
|
-i runtime -DdataFile=$${data_files:0:-1} $${conf[*]} |
||||||
|
""" |
||||||
|
) |
||||||
|
|
||||||
|
# A pre-configured binary using the checked in datasets. |
||||||
|
internal_benchmark_test( |
||||||
|
name = "java", |
||||||
|
binary = ":java_benchmark", |
||||||
|
datasets = ["//benchmarks/datasets"], |
||||||
|
) |
||||||
|
|
||||||
|
pkg_files( |
||||||
|
name = "dist_files", |
||||||
|
srcs = [ |
||||||
|
"BUILD.bazel", |
||||||
|
] + glob(["**/*.java"]), |
||||||
|
strip_prefix = strip_prefix.from_root(""), |
||||||
|
visibility = ["//benchmarks:__pkg__"], |
||||||
|
) |
@ -1,98 +0,0 @@ |
|||||||
<?xml version="1.0" encoding="UTF-8"?> |
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" |
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> |
|
||||||
|
|
||||||
<modelVersion>4.0.0</modelVersion> |
|
||||||
<artifactId>protobuf-java-benchmark</artifactId> |
|
||||||
<groupId>com.google.protobuf</groupId> |
|
||||||
<version>1.0.0</version> |
|
||||||
<name>Protocol Buffers [Benchmark]</name> |
|
||||||
<description>The benchmark tools for Protobuf Java.</description> |
|
||||||
|
|
||||||
<dependencies> |
|
||||||
<dependency> |
|
||||||
<groupId>com.google.protobuf</groupId> |
|
||||||
<artifactId>protobuf-java</artifactId> |
|
||||||
<version>${protobuf.version}</version> |
|
||||||
<type>jar</type> |
|
||||||
<scope>system</scope> |
|
||||||
<systemPath>${project.basedir}/lib/protobuf-java.jar</systemPath> |
|
||||||
</dependency> |
|
||||||
<dependency> |
|
||||||
<groupId>com.google.caliper</groupId> |
|
||||||
<artifactId>caliper</artifactId> |
|
||||||
<version>1.0-beta-3</version> |
|
||||||
</dependency> |
|
||||||
</dependencies> |
|
||||||
|
|
||||||
<build> |
|
||||||
<pluginManagement> |
|
||||||
<plugins> |
|
||||||
<plugin> |
|
||||||
<groupId>org.apache.maven.plugins</groupId> |
|
||||||
<artifactId>maven-assembly-plugin</artifactId> |
|
||||||
<version>2.4.1</version> |
|
||||||
<configuration> |
|
||||||
<!-- get all project dependencies --> |
|
||||||
<descriptorRefs> |
|
||||||
<descriptorRef>jar-with-dependencies</descriptorRef> |
|
||||||
</descriptorRefs> |
|
||||||
<!-- MainClass in mainfest make a executable jar --> |
|
||||||
<archive> |
|
||||||
<manifest> |
|
||||||
<mainClass>com.mkyong.core.utils.App</mainClass> |
|
||||||
</manifest> |
|
||||||
</archive> |
|
||||||
</configuration> |
|
||||||
<executions> |
|
||||||
<execution> |
|
||||||
<id>make-assembly</id> |
|
||||||
<!-- bind to the packaging phase --> |
|
||||||
<phase>package</phase> |
|
||||||
<goals> |
|
||||||
<goal>single</goal> |
|
||||||
</goals> |
|
||||||
</execution> |
|
||||||
</executions> |
|
||||||
</plugin> |
|
||||||
<plugin> |
|
||||||
<groupId>org.apache.maven.plugins</groupId> |
|
||||||
<artifactId>maven-compiler-plugin</artifactId> |
|
||||||
<version>3.5.1</version> |
|
||||||
<configuration> |
|
||||||
<source>1.8</source> |
|
||||||
<target>1.8</target> |
|
||||||
</configuration> |
|
||||||
</plugin> |
|
||||||
<plugin> |
|
||||||
<groupId>org.apache.maven.plugins</groupId> |
|
||||||
<artifactId>maven-jar-plugin</artifactId> |
|
||||||
<version>2.5</version> |
|
||||||
<configuration> |
|
||||||
<archive> |
|
||||||
<manifest> |
|
||||||
<addClasspath>true</addClasspath> |
|
||||||
<mainClass>com.google.protocolbuffers.ProtoBench</mainClass> |
|
||||||
</manifest> |
|
||||||
</archive> |
|
||||||
</configuration> |
|
||||||
</plugin> |
|
||||||
<plugin> |
|
||||||
<groupId>org.apache.maven.plugins</groupId> |
|
||||||
<artifactId>maven-source-plugin</artifactId> |
|
||||||
<version>2.4</version> |
|
||||||
<executions> |
|
||||||
<execution> |
|
||||||
<id>attach-sources</id> |
|
||||||
<goals> |
|
||||||
<goal>jar-no-fork</goal> |
|
||||||
</goals> |
|
||||||
</execution> |
|
||||||
</executions> |
|
||||||
</plugin> |
|
||||||
</plugins> |
|
||||||
</pluginManagement> |
|
||||||
</build> |
|
||||||
</project> |
|
||||||
|
|
@ -1,33 +0,0 @@ |
|||||||
var benchmark = require("benchmark"); |
|
||||||
|
|
||||||
function newBenchmark(messageName, filename, language) { |
|
||||||
var benches = []; |
|
||||||
return { |
|
||||||
suite: new benchmark.Suite(messageName + filename + language ) |
|
||||||
.on("add", function(event) { |
|
||||||
benches.push(event.target); |
|
||||||
}) |
|
||||||
.on("start", function() { |
|
||||||
process.stdout.write( |
|
||||||
"benchmarking message " + messageName |
|
||||||
+ " of dataset file " + filename |
|
||||||
+ "'s performance ..." + "\n\n"); |
|
||||||
}) |
|
||||||
.on("cycle", function(event) { |
|
||||||
process.stdout.write(String(event.target) + "\n"); |
|
||||||
}) |
|
||||||
.on("complete", function() { |
|
||||||
var getHz = function(bench) { |
|
||||||
return 1 / (bench.stats.mean + bench.stats.moe); |
|
||||||
} |
|
||||||
benches.forEach(function(val, index) { |
|
||||||
benches[index] = getHz(val); |
|
||||||
}); |
|
||||||
}), |
|
||||||
benches: benches |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
module.exports = { |
|
||||||
newBenchmark: newBenchmark |
|
||||||
} |
|
@ -1,82 +0,0 @@ |
|||||||
require('./datasets/google_message1/proto2/benchmark_message1_proto2_pb.js'); |
|
||||||
require('./datasets/google_message1/proto3/benchmark_message1_proto3_pb.js'); |
|
||||||
require('./datasets/google_message2/benchmark_message2_pb.js'); |
|
||||||
require('./datasets/google_message3/benchmark_message3_pb.js'); |
|
||||||
require('./datasets/google_message4/benchmark_message4_pb.js'); |
|
||||||
require('./benchmarks_pb.js'); |
|
||||||
|
|
||||||
var fs = require('fs'); |
|
||||||
var benchmarkSuite = require("./benchmark_suite.js"); |
|
||||||
|
|
||||||
|
|
||||||
function getNewPrototype(name) { |
|
||||||
var message = eval("proto." + name); |
|
||||||
if (typeof(message) == "undefined") { |
|
||||||
throw "type " + name + " is undefined"; |
|
||||||
} |
|
||||||
return message; |
|
||||||
} |
|
||||||
|
|
||||||
var results = []; |
|
||||||
var json_file = ""; |
|
||||||
|
|
||||||
console.log("#####################################################"); |
|
||||||
console.log("Js Benchmark: "); |
|
||||||
process.argv.forEach(function(filename, index) { |
|
||||||
if (index < 2) { |
|
||||||
return; |
|
||||||
} |
|
||||||
if (filename.indexOf("--json_output") != -1) { |
|
||||||
json_file = filename.replace(/^--json_output=/, ''); |
|
||||||
return; |
|
||||||
} |
|
||||||
|
|
||||||
var benchmarkDataset = |
|
||||||
proto.benchmarks.BenchmarkDataset.deserializeBinary(fs.readFileSync(filename)); |
|
||||||
var messageList = []; |
|
||||||
var totalBytes = 0; |
|
||||||
benchmarkDataset.getPayloadList().forEach(function(onePayload) { |
|
||||||
var message = getNewPrototype(benchmarkDataset.getMessageName()); |
|
||||||
messageList.push(message.deserializeBinary(onePayload)); |
|
||||||
totalBytes += onePayload.length; |
|
||||||
}); |
|
||||||
|
|
||||||
var scenarios = benchmarkSuite.newBenchmark( |
|
||||||
benchmarkDataset.getMessageName(), filename, "js"); |
|
||||||
scenarios.suite |
|
||||||
.add("js deserialize", function() { |
|
||||||
benchmarkDataset.getPayloadList().forEach(function(onePayload) { |
|
||||||
var protoType = getNewPrototype(benchmarkDataset.getMessageName()); |
|
||||||
protoType.deserializeBinary(onePayload); |
|
||||||
}); |
|
||||||
}) |
|
||||||
.add("js serialize", function() { |
|
||||||
var protoType = getNewPrototype(benchmarkDataset.getMessageName()); |
|
||||||
messageList.forEach(function(message) { |
|
||||||
message.serializeBinary(); |
|
||||||
}); |
|
||||||
}) |
|
||||||
.run({"Async": false}); |
|
||||||
|
|
||||||
results.push({ |
|
||||||
filename: filename, |
|
||||||
benchmarks: { |
|
||||||
protobufjs_decoding: scenarios.benches[0] * totalBytes / 1024 / 1024, |
|
||||||
protobufjs_encoding: scenarios.benches[1] * totalBytes / 1024 / 1024 |
|
||||||
} |
|
||||||
}) |
|
||||||
|
|
||||||
console.log("Throughput for deserialize: " |
|
||||||
+ scenarios.benches[0] * totalBytes / 1024 / 1024 + "MB/s" ); |
|
||||||
console.log("Throughput for serialize: " |
|
||||||
+ scenarios.benches[1] * totalBytes / 1024 / 1024 + "MB/s" ); |
|
||||||
console.log(""); |
|
||||||
}); |
|
||||||
console.log("#####################################################"); |
|
||||||
|
|
||||||
if (json_file != "") { |
|
||||||
fs.writeFile(json_file, JSON.stringify(results), (err) => { |
|
||||||
if (err) throw err; |
|
||||||
}); |
|
||||||
} |
|
||||||
|
|
@ -0,0 +1,69 @@ |
|||||||
|
load("//benchmarks:internal.bzl", "internal_benchmark_test") |
||||||
|
load("//build_defs:internal_shell.bzl", "inline_sh_binary") |
||||||
|
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix") |
||||||
|
|
||||||
|
# The benchmark binary which can be run over any dataset. |
||||||
|
inline_sh_binary( |
||||||
|
name = "php_benchmark", |
||||||
|
srcs = [ |
||||||
|
"PhpBenchmark.php", |
||||||
|
"autoload.php", |
||||||
|
], |
||||||
|
deps = [ |
||||||
|
"//benchmarks:benchmarks_php_proto", |
||||||
|
"//benchmarks/datasets:php_protos", |
||||||
|
"//php:source_files", |
||||||
|
], |
||||||
|
cmd = """ |
||||||
|
php -d include_path=benchmarks:php/src \\ |
||||||
|
-d auto_prepend_file=$(rootpath autoload.php) \\ |
||||||
|
$(rootpath PhpBenchmark.php) $$@ |
||||||
|
""", |
||||||
|
testonly = 1, |
||||||
|
) |
||||||
|
|
||||||
|
# A pre-configured binary using the checked in datasets. |
||||||
|
internal_benchmark_test( |
||||||
|
name = "php", |
||||||
|
binary = ":php_benchmark", |
||||||
|
datasets = ["//benchmarks/datasets:proto3_datasets"], |
||||||
|
env_vars = ["PROTOBUF_PHP_SRCDIR=php/src"], |
||||||
|
args = ["--behavior_prefix='php'"], |
||||||
|
) |
||||||
|
|
||||||
|
# The benchmark binary which can be run over any dataset. |
||||||
|
inline_sh_binary( |
||||||
|
name = "php_c_benchmark", |
||||||
|
srcs = [ |
||||||
|
"PhpBenchmark.php", |
||||||
|
"//php:extension", |
||||||
|
], |
||||||
|
deps = [ |
||||||
|
"//benchmarks:benchmarks_php_proto", |
||||||
|
"//benchmarks/datasets:php_protos", |
||||||
|
], |
||||||
|
cmd = """ |
||||||
|
php -d include_path=benchmarks:php/src \\ |
||||||
|
-dextension=$(rootpath //php:extension) \\ |
||||||
|
$(rootpath PhpBenchmark.php) $$@ |
||||||
|
""", |
||||||
|
testonly = 1, |
||||||
|
) |
||||||
|
|
||||||
|
# A pre-configured binary using the checked in datasets. |
||||||
|
internal_benchmark_test( |
||||||
|
name = "php_c", |
||||||
|
binary = ":php_c_benchmark", |
||||||
|
datasets = ["//benchmarks/datasets:proto3_datasets"], |
||||||
|
env_vars = ["PROTOBUF_PHP_SRCDIR=php/src"], |
||||||
|
args = ["--behavior_prefix='php_c'"], |
||||||
|
) |
||||||
|
|
||||||
|
pkg_files( |
||||||
|
name = "dist_files", |
||||||
|
srcs = glob(["*.php"]) + [ |
||||||
|
"BUILD.bazel", |
||||||
|
], |
||||||
|
strip_prefix = strip_prefix.from_root(""), |
||||||
|
visibility = ["//benchmarks:__pkg__"], |
||||||
|
) |
@ -1,25 +0,0 @@ |
|||||||
var pbjs = require("./protobuf.js/cli").pbjs |
|
||||||
|
|
||||||
var argv = []; |
|
||||||
var protoFiles = []; |
|
||||||
var prefix = ""; |
|
||||||
process.argv.forEach(function(val, index) { |
|
||||||
var arg = val; |
|
||||||
if (arg.length > 6 && arg.substring(arg.length - 6) == ".proto") { |
|
||||||
protoFiles.push(arg); |
|
||||||
} else if (arg.length > 15 && arg.substring(0, 15) == "--include_path=") { |
|
||||||
prefix = arg.substring(15); |
|
||||||
} else if (index >= 2) { |
|
||||||
argv.push(arg); |
|
||||||
} |
|
||||||
}); |
|
||||||
protoFiles.forEach(function(val) { |
|
||||||
argv.push(prefix + "/" + val); |
|
||||||
}); |
|
||||||
|
|
||||||
pbjs.main(argv, function(err, output){ |
|
||||||
if (err) { |
|
||||||
console.log(err); |
|
||||||
} |
|
||||||
}); |
|
||||||
|
|
@ -1,66 +0,0 @@ |
|||||||
var root = require("./generated_bundle_code.js"); |
|
||||||
var fs = require('fs'); |
|
||||||
var benchmark = require("./node_modules/benchmark"); |
|
||||||
var benchmarkSuite = require("./benchmark_suite.js"); |
|
||||||
|
|
||||||
|
|
||||||
function getNewPrototype(name) { |
|
||||||
var message = eval("root." + name); |
|
||||||
if (typeof(message) == "undefined") { |
|
||||||
throw "type " + name + " is undefined"; |
|
||||||
} |
|
||||||
return message; |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
var results = []; |
|
||||||
|
|
||||||
console.log("#####################################################"); |
|
||||||
console.log("ProtobufJs Benchmark: "); |
|
||||||
process.argv.forEach(function(filename, index) { |
|
||||||
if (index < 2) { |
|
||||||
return; |
|
||||||
} |
|
||||||
var benchmarkDataset = |
|
||||||
root.benchmarks.BenchmarkDataset.decode(fs.readFileSync(filename)); |
|
||||||
var messageList = []; |
|
||||||
var totalBytes = 0; |
|
||||||
benchmarkDataset.payload.forEach(function(onePayload) { |
|
||||||
var message = getNewPrototype(benchmarkDataset.messageName); |
|
||||||
messageList.push(message.decode(onePayload)); |
|
||||||
totalBytes += onePayload.length; |
|
||||||
}); |
|
||||||
|
|
||||||
var scenarios = benchmarkSuite.newBenchmark( |
|
||||||
benchmarkDataset.messageName, filename, "protobufjs"); |
|
||||||
scenarios.suite |
|
||||||
.add("protobuf.js static decoding", function() { |
|
||||||
benchmarkDataset.payload.forEach(function(onePayload) { |
|
||||||
var protoType = getNewPrototype(benchmarkDataset.messageName); |
|
||||||
protoType.decode(onePayload); |
|
||||||
}); |
|
||||||
}) |
|
||||||
.add("protobuf.js static encoding", function() { |
|
||||||
var protoType = getNewPrototype(benchmarkDataset.messageName); |
|
||||||
messageList.forEach(function(message) { |
|
||||||
protoType.encode(message).finish(); |
|
||||||
}); |
|
||||||
}) |
|
||||||
.run({"Async": false}); |
|
||||||
|
|
||||||
results.push({ |
|
||||||
filename: filename, |
|
||||||
benchmarks: { |
|
||||||
protobufjs_decoding: scenarios.benches[0] * totalBytes, |
|
||||||
protobufjs_encoding: scenarios.benches[1] * totalBytes |
|
||||||
} |
|
||||||
}) |
|
||||||
|
|
||||||
console.log("Throughput for decoding: " |
|
||||||
+ scenarios.benches[0] * totalBytes / 1024 / 1024 + "MB/s" ); |
|
||||||
console.log("Throughput for encoding: " |
|
||||||
+ scenarios.benches[1] * totalBytes / 1024 / 1024 + "MB/s" ); |
|
||||||
console.log(""); |
|
||||||
}); |
|
||||||
console.log("#####################################################"); |
|
||||||
|
|
@ -0,0 +1,71 @@ |
|||||||
|
load("@rules_cc//cc:defs.bzl", "cc_binary") |
||||||
|
load("@rules_python//python:defs.bzl", "py_library", "py_binary") |
||||||
|
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix") |
||||||
|
load("//benchmarks:internal.bzl", "internal_benchmark_test") |
||||||
|
|
||||||
|
cc_binary( |
||||||
|
name = "libbenchmark_messages.so", |
||||||
|
srcs = ["python_benchmark_messages.cc"], |
||||||
|
deps = [ |
||||||
|
"@local_config_python//:python_headers", |
||||||
|
"//external:python_headers", |
||||||
|
"//benchmarks:benchmarks_cc_proto", |
||||||
|
"//benchmarks/datasets:cc_protos", |
||||||
|
], |
||||||
|
linkshared = True, |
||||||
|
linkstatic = True, |
||||||
|
visibility = ["//visibility:private"], |
||||||
|
) |
||||||
|
|
||||||
|
# The benchmark binary which can be run over any dataset. |
||||||
|
py_binary( |
||||||
|
name = "python_benchmark", |
||||||
|
srcs = ["py_benchmark.py"], |
||||||
|
main = "py_benchmark.py", |
||||||
|
data = ["libbenchmark_messages.so"], |
||||||
|
deps = [ |
||||||
|
"//:protobuf_python", |
||||||
|
"//benchmarks:benchmarks_py_proto", |
||||||
|
"//benchmarks/datasets:py_protos", |
||||||
|
], |
||||||
|
env = select({ |
||||||
|
"//python:use_fast_cpp_protos": {"PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": "cpp"}, |
||||||
|
"//conditions:default": {"PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": "python"}, |
||||||
|
}), |
||||||
|
) |
||||||
|
|
||||||
|
# Pre-configured binaries using the checked in datasets. |
||||||
|
|
||||||
|
internal_benchmark_test( |
||||||
|
name = "python", |
||||||
|
binary = ":python_benchmark", |
||||||
|
datasets = ["//benchmarks/datasets"], |
||||||
|
env_vars = ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python"], |
||||||
|
) |
||||||
|
|
||||||
|
# Note: this requires --define=use_fast_cpp_protos=true |
||||||
|
internal_benchmark_test( |
||||||
|
name = "cpp_reflection", |
||||||
|
binary = ":python_benchmark", |
||||||
|
datasets = ["//benchmarks/datasets"], |
||||||
|
env_vars = ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp"], |
||||||
|
) |
||||||
|
|
||||||
|
# Note: this requires --define=use_fast_cpp_protos=true |
||||||
|
internal_benchmark_test( |
||||||
|
name = "cpp_generated_code", |
||||||
|
binary = ":python_benchmark", |
||||||
|
datasets = ["//benchmarks/datasets"], |
||||||
|
env_vars = ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp"], |
||||||
|
args = ["--cpp_generated"], |
||||||
|
) |
||||||
|
|
||||||
|
pkg_files( |
||||||
|
name = "dist_files", |
||||||
|
srcs = glob(["*.py"]) + [ |
||||||
|
"BUILD.bazel", |
||||||
|
"python_benchmark_messages.cc", |
||||||
|
], |
||||||
|
strip_prefix = strip_prefix.from_root(""), |
||||||
|
visibility = ["//benchmarks:__pkg__"], |
||||||
|
) |
@ -0,0 +1,50 @@ |
|||||||
|
load("@rules_cc//cc:defs.bzl", "cc_binary") |
||||||
|
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix") |
||||||
|
load("@rules_python//python:defs.bzl", "py_binary") |
||||||
|
|
||||||
|
package(default_visibility = ["//benchmarks:__subpackages__"]) |
||||||
|
|
||||||
|
cc_binary( |
||||||
|
name = "protoc-gen-proto2_to_proto3", |
||||||
|
srcs = [ |
||||||
|
"schema_proto2_to_proto3_util.h", |
||||||
|
"protoc-gen-proto2_to_proto3.cc", |
||||||
|
], |
||||||
|
deps = [ |
||||||
|
"//:protobuf", |
||||||
|
"//src/google/protobuf/compiler:code_generator", |
||||||
|
"//benchmarks:benchmarks_cc_proto", |
||||||
|
], |
||||||
|
visibility = ["//benchmarks:__subpackages__"], |
||||||
|
) |
||||||
|
|
||||||
|
cc_binary( |
||||||
|
name = "proto3_data_stripper", |
||||||
|
srcs = [ |
||||||
|
"data_proto2_to_proto3_util.h", |
||||||
|
"proto3_data_stripper.cc", |
||||||
|
], |
||||||
|
deps = [ |
||||||
|
"//:protobuf", |
||||||
|
"//benchmarks/datasets:cc_protos", |
||||||
|
"//benchmarks:benchmarks_cc_proto", |
||||||
|
], |
||||||
|
) |
||||||
|
|
||||||
|
py_binary( |
||||||
|
name = "result_parser", |
||||||
|
srcs = ["result_parser.py"], |
||||||
|
deps = [ |
||||||
|
"//benchmarks:benchmarks_py_proto", |
||||||
|
], |
||||||
|
) |
||||||
|
|
||||||
|
################################################################################ |
||||||
|
# Distribution files |
||||||
|
################################################################################ |
||||||
|
|
||||||
|
pkg_files( |
||||||
|
name = "dist_files", |
||||||
|
srcs = glob(["*"]), |
||||||
|
strip_prefix = strip_prefix.from_root(""), |
||||||
|
) |
@ -0,0 +1,103 @@ |
|||||||
|
"""Starlark definitions for converting proto2 to proto3. |
||||||
|
|
||||||
|
PLEASE DO NOT DEPEND ON THE CONTENTS OF THIS FILE, IT IS UNSTABLE. |
||||||
|
""" |
||||||
|
|
||||||
|
load("//:protobuf.bzl", "internal_php_proto_library") |
||||||
|
|
||||||
|
def proto3_from_proto2_data( |
||||||
|
name, |
||||||
|
srcs, |
||||||
|
**kwargs): |
||||||
|
"""Transforms proto2 binary data into a proto3-compatible format, |
||||||
|
|
||||||
|
Args: |
||||||
|
name: the name of the target representing the generated proto files. |
||||||
|
srcs: the source binary protobuf data files. |
||||||
|
**kwargs: standard arguments to forward on |
||||||
|
""" |
||||||
|
outs = [] |
||||||
|
out_files = [] |
||||||
|
src_files = [] |
||||||
|
for src in srcs: |
||||||
|
outs.append("proto3/" + src) |
||||||
|
out_files.append("$(RULEDIR)/proto3/" + src) |
||||||
|
src_files.append("$(rootpath %s)" % src); |
||||||
|
|
||||||
|
native.genrule( |
||||||
|
name = name + "_genrule", |
||||||
|
srcs = srcs, |
||||||
|
exec_tools = [ |
||||||
|
"//benchmarks/util:proto3_data_stripper", |
||||||
|
], |
||||||
|
outs = outs, |
||||||
|
cmd = "$(execpath //benchmarks/util:proto3_data_stripper) %s %s" % ( |
||||||
|
" ".join(src_files), " ".join(out_files)), |
||||||
|
) |
||||||
|
|
||||||
|
native.filegroup( |
||||||
|
name = name, |
||||||
|
srcs = outs, |
||||||
|
**kwargs, |
||||||
|
) |
||||||
|
|
||||||
|
def _proto3_from_proto2_library( |
||||||
|
name, |
||||||
|
srcs, |
||||||
|
**kwargs): |
||||||
|
"""Create a proto3 library from a proto2 source. |
||||||
|
|
||||||
|
Args: |
||||||
|
name: the name of the target representing the generated proto files. |
||||||
|
srcs: the source proto2 files. Note: these must be raw sources. |
||||||
|
**kwargs: standard arguments to forward on |
||||||
|
""" |
||||||
|
outs = [] |
||||||
|
src_files = [] |
||||||
|
for src in srcs: |
||||||
|
outs.append(src + "3") |
||||||
|
src_files.append("$(rootpath %s)" % src); |
||||||
|
|
||||||
|
native.genrule( |
||||||
|
name = name, |
||||||
|
srcs = srcs, |
||||||
|
exec_tools = [ |
||||||
|
"//:protoc", |
||||||
|
"//benchmarks/util:protoc-gen-proto2_to_proto3", |
||||||
|
], |
||||||
|
outs = outs, |
||||||
|
cmd = """ |
||||||
|
$(execpath //:protoc) \ |
||||||
|
--plugin=$(execpath //benchmarks/util:protoc-gen-proto2_to_proto3) \ |
||||||
|
--proto_path=. \ |
||||||
|
--proto_path=$(GENDIR) \ |
||||||
|
--proto2_to_proto3_out=$(GENDIR) \ |
||||||
|
%s |
||||||
|
""" % (" ".join(src_files)), |
||||||
|
**kwargs, |
||||||
|
) |
||||||
|
|
||||||
|
def php_proto3_from_proto2_library( |
||||||
|
name, |
||||||
|
src, |
||||||
|
outs = [], |
||||||
|
**kwargs): |
||||||
|
"""Create a proto3 php library from a proto2 source. |
||||||
|
|
||||||
|
Args: |
||||||
|
name: the name of the target representing the generated proto files. |
||||||
|
src: the source proto2 file. |
||||||
|
outs: the expected php outputs. |
||||||
|
**kwargs: standard arguments to forward on |
||||||
|
""" |
||||||
|
_proto3_from_proto2_library( |
||||||
|
name = name + "_genrule", |
||||||
|
srcs = [src], |
||||||
|
) |
||||||
|
|
||||||
|
internal_php_proto_library( |
||||||
|
name = name, |
||||||
|
srcs = [name + "_genrule"], |
||||||
|
outs = outs, |
||||||
|
**kwargs, |
||||||
|
) |
@ -1,74 +0,0 @@ |
|||||||
#include "benchmarks.pb.h" |
|
||||||
#include "datasets/google_message1/proto2/benchmark_message1_proto2.pb.h" |
|
||||||
#include "datasets/google_message1/proto3/benchmark_message1_proto3.pb.h" |
|
||||||
#include "datasets/google_message2/benchmark_message2.pb.h" |
|
||||||
#include "datasets/google_message3/benchmark_message3.pb.h" |
|
||||||
#include "datasets/google_message4/benchmark_message4.pb.h" |
|
||||||
#include "data_proto2_to_proto3_util.h" |
|
||||||
|
|
||||||
#include <fstream> |
|
||||||
|
|
||||||
using google::protobuf::util::GogoDataStripper; |
|
||||||
|
|
||||||
std::string ReadFile(const std::string& name) { |
|
||||||
std::ifstream file(name.c_str()); |
|
||||||
GOOGLE_CHECK(file.is_open()) << "Couldn't find file '" |
|
||||||
<< name |
|
||||||
<< "', please make sure you are running this command from the benchmarks" |
|
||||||
<< " directory.\n"; |
|
||||||
return std::string((std::istreambuf_iterator<char>(file)), |
|
||||||
std::istreambuf_iterator<char>()); |
|
||||||
} |
|
||||||
|
|
||||||
int main(int argc, char *argv[]) { |
|
||||||
if (argc % 2 == 0 || argc == 1) { |
|
||||||
std::cerr << "Usage: [input_files] [output_file_names] where " << |
|
||||||
"input_files are one to one mapping to output_file_names." << |
|
||||||
std::endl; |
|
||||||
return 1; |
|
||||||
} |
|
||||||
|
|
||||||
for (int i = argc / 2; i > 0; i--) { |
|
||||||
const std::string &input_file = argv[i]; |
|
||||||
const std::string &output_file = argv[i + argc / 2]; |
|
||||||
|
|
||||||
std::cerr << "Generating " << input_file |
|
||||||
<< " to " << output_file << std::endl; |
|
||||||
benchmarks::BenchmarkDataset dataset; |
|
||||||
Message* message; |
|
||||||
std::string dataset_payload = ReadFile(input_file); |
|
||||||
GOOGLE_CHECK(dataset.ParseFromString(dataset_payload)) |
|
||||||
<< "Can' t parse data file " << input_file; |
|
||||||
|
|
||||||
if (dataset.message_name() == "benchmarks.proto3.GoogleMessage1") { |
|
||||||
message = new benchmarks::proto3::GoogleMessage1; |
|
||||||
} else if (dataset.message_name() == "benchmarks.proto2.GoogleMessage1") { |
|
||||||
message = new benchmarks::proto2::GoogleMessage1; |
|
||||||
} else if (dataset.message_name() == "benchmarks.proto2.GoogleMessage2") { |
|
||||||
message = new benchmarks::proto2::GoogleMessage2; |
|
||||||
} else if (dataset.message_name() == |
|
||||||
"benchmarks.google_message3.GoogleMessage3") { |
|
||||||
message = new benchmarks::google_message3::GoogleMessage3; |
|
||||||
} else if (dataset.message_name() == |
|
||||||
"benchmarks.google_message4.GoogleMessage4") { |
|
||||||
message = new benchmarks::google_message4::GoogleMessage4; |
|
||||||
} else { |
|
||||||
std::cerr << "Unknown message type: " << dataset.message_name(); |
|
||||||
exit(1); |
|
||||||
} |
|
||||||
|
|
||||||
for (int i = 0; i < dataset.payload_size(); i++) { |
|
||||||
message->ParseFromString(dataset.payload(i)); |
|
||||||
GogoDataStripper stripper; |
|
||||||
stripper.StripMessage(message); |
|
||||||
dataset.set_payload(i, message->SerializeAsString()); |
|
||||||
} |
|
||||||
|
|
||||||
std::ofstream ofs(output_file); |
|
||||||
ofs << dataset.SerializeAsString(); |
|
||||||
ofs.close(); |
|
||||||
} |
|
||||||
|
|
||||||
|
|
||||||
return 0; |
|
||||||
} |
|
@ -1,103 +0,0 @@ |
|||||||
#include "google/protobuf/compiler/code_generator.h" |
|
||||||
#include "google/protobuf/io/zero_copy_stream.h" |
|
||||||
#include "google/protobuf/io/printer.h" |
|
||||||
#include "google/protobuf/descriptor.h" |
|
||||||
#include "google/protobuf/descriptor.pb.h" |
|
||||||
#include "schema_proto2_to_proto3_util.h" |
|
||||||
|
|
||||||
#include "google/protobuf/compiler/plugin.h" |
|
||||||
|
|
||||||
using google::protobuf::FileDescriptorProto; |
|
||||||
using google::protobuf::FileDescriptor; |
|
||||||
using google::protobuf::DescriptorPool; |
|
||||||
using google::protobuf::io::Printer; |
|
||||||
using google::protobuf::util::SchemaGroupStripper; |
|
||||||
using google::protobuf::util::EnumScrubber; |
|
||||||
|
|
||||||
namespace google { |
|
||||||
namespace protobuf { |
|
||||||
namespace compiler { |
|
||||||
|
|
||||||
namespace { |
|
||||||
|
|
||||||
string StripProto(string filename) { |
|
||||||
if (filename.substr(filename.size() - 11) == ".protodevel") { |
|
||||||
// .protodevel
|
|
||||||
return filename.substr(0, filename.size() - 11); |
|
||||||
} else { |
|
||||||
// .proto
|
|
||||||
return filename.substr(0, filename.size() - 6); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
DescriptorPool new_pool_; |
|
||||||
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
class GoGoProtoGenerator : public CodeGenerator { |
|
||||||
public: |
|
||||||
virtual bool GenerateAll(const std::vector<const FileDescriptor*>& files, |
|
||||||
const string& parameter, |
|
||||||
GeneratorContext* context, |
|
||||||
string* error) const { |
|
||||||
for (int i = 0; i < files.size(); i++) { |
|
||||||
for (auto file : files) { |
|
||||||
bool can_generate = |
|
||||||
(new_pool_.FindFileByName(file->name()) == nullptr); |
|
||||||
for (int j = 0; j < file->dependency_count(); j++) { |
|
||||||
can_generate &= (new_pool_.FindFileByName( |
|
||||||
file->dependency(j)->name()) != nullptr); |
|
||||||
} |
|
||||||
for (int j = 0; j < file->public_dependency_count(); j++) { |
|
||||||
can_generate &= (new_pool_.FindFileByName( |
|
||||||
file->public_dependency(j)->name()) != nullptr); |
|
||||||
} |
|
||||||
for (int j = 0; j < file->weak_dependency_count(); j++) { |
|
||||||
can_generate &= (new_pool_.FindFileByName( |
|
||||||
file->weak_dependency(j)->name()) != nullptr); |
|
||||||
} |
|
||||||
if (can_generate) { |
|
||||||
Generate(file, parameter, context, error); |
|
||||||
break; |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
return true; |
|
||||||
} |
|
||||||
|
|
||||||
virtual bool Generate(const FileDescriptor* file, |
|
||||||
const string& parameter, |
|
||||||
GeneratorContext* context, |
|
||||||
string* error) const { |
|
||||||
FileDescriptorProto new_file; |
|
||||||
file->CopyTo(&new_file); |
|
||||||
SchemaGroupStripper::StripFile(file, &new_file); |
|
||||||
|
|
||||||
EnumScrubber enum_scrubber; |
|
||||||
enum_scrubber.ScrubFile(&new_file); |
|
||||||
|
|
||||||
string filename = file->name(); |
|
||||||
string basename = StripProto(filename); |
|
||||||
|
|
||||||
std::vector<std::pair<string,string>> option_pairs; |
|
||||||
ParseGeneratorParameter(parameter, &option_pairs); |
|
||||||
|
|
||||||
std::unique_ptr<google::protobuf::io::ZeroCopyOutputStream> output( |
|
||||||
context->Open(basename + ".proto")); |
|
||||||
string content = new_pool_.BuildFile(new_file)->DebugString(); |
|
||||||
Printer printer(output.get(), '$'); |
|
||||||
printer.WriteRaw(content.c_str(), content.size()); |
|
||||||
|
|
||||||
return true; |
|
||||||
} |
|
||||||
}; |
|
||||||
|
|
||||||
} // namespace compiler
|
|
||||||
} // namespace protobuf
|
|
||||||
} // namespace google
|
|
||||||
|
|
||||||
int main(int argc, char* argv[]) { |
|
||||||
google::protobuf::compiler::GoGoProtoGenerator generator; |
|
||||||
return google::protobuf::compiler::PluginMain(argc, argv, &generator); |
|
||||||
} |
|
@ -0,0 +1,56 @@ |
|||||||
|
"""Generated unittests to verify that a binary is built for the expected architecture.""" |
||||||
|
|
||||||
|
load("//build_defs:internal_shell.bzl", "inline_sh_test") |
||||||
|
|
||||||
|
def _arch_test_impl( |
||||||
|
name, |
||||||
|
platform, |
||||||
|
file_platform, |
||||||
|
bazel_binaries = [], |
||||||
|
system_binaries = [], |
||||||
|
**kwargs): |
||||||
|
""" |
||||||
|
Bazel rule to verify that a Bazel or system binary is built for the |
||||||
|
aarch64 architecture. |
||||||
|
|
||||||
|
Args: |
||||||
|
name: the name of the test. |
||||||
|
platform: a diagnostic name for this architecture. |
||||||
|
file_platform: the expected output of `file`. |
||||||
|
bazel_binaries: a set of binary targets to inspect. |
||||||
|
system_binaries: a set of paths to system executables to inspect. |
||||||
|
**kargs: other keyword arguments that are passed to the test. |
||||||
|
""" |
||||||
|
|
||||||
|
inline_sh_test( |
||||||
|
name = name, |
||||||
|
tools = bazel_binaries, |
||||||
|
cmd = """ |
||||||
|
for binary in "$(rootpaths %s) %s"; do |
||||||
|
(file -L $$binary | grep -q "%s") \ |
||||||
|
|| (echo "Test binary is not an %s binary: "; file -L $$binary; exit 1) |
||||||
|
done |
||||||
|
""" % (" ".join(bazel_binaries), |
||||||
|
" ".join(system_binaries), |
||||||
|
file_platform, |
||||||
|
platform), |
||||||
|
target_compatible_with = select({ |
||||||
|
"//build_defs:"+platform: [], |
||||||
|
"//conditions:default": ["@platforms//:incompatible"], |
||||||
|
}), |
||||||
|
) |
||||||
|
|
||||||
|
|
||||||
|
def aarch64_test(**kwargs): |
||||||
|
_arch_test_impl( |
||||||
|
platform = "aarch64", |
||||||
|
file_platform = "ELF 64-bit LSB executable, ARM aarch64", |
||||||
|
**kwargs, |
||||||
|
) |
||||||
|
|
||||||
|
def x86_64_test(**kwargs): |
||||||
|
_arch_test_impl( |
||||||
|
platform = "x86_64", |
||||||
|
file_platform = "ELF 64-bit LSB executable, ARM x86_64", |
||||||
|
**kwargs, |
||||||
|
) |
@ -0,0 +1,85 @@ |
|||||||
|
def inline_sh_binary( |
||||||
|
name, |
||||||
|
srcs = [], |
||||||
|
tools = [], |
||||||
|
deps = [], |
||||||
|
cmd = "", |
||||||
|
testonly = None, |
||||||
|
**kwargs): |
||||||
|
"""Bazel rule to wrap up an inline bash script in a binary. This is most |
||||||
|
useful as a stop-gap solution for migrating off Autotools. These binaries |
||||||
|
are likely to be non-hermetic, with implicit system dependencies. |
||||||
|
|
||||||
|
NOTE: the rule is only an internal workaround. The interface may change and |
||||||
|
the rule may be removed when everything is properly "Bazelified". |
||||||
|
|
||||||
|
Args: |
||||||
|
name: the name of the inline_sh_binary. |
||||||
|
srcs: the files used directly by the script. |
||||||
|
tools: the executable tools used directly by the script. Any target used |
||||||
|
with rootpath/execpath/location must be declared here or in `srcs`. |
||||||
|
deps: a list of dependency labels that are required to run this binary. |
||||||
|
**kargs: other keyword arguments that are passed to sh_binary. |
||||||
|
testonly: common rule attribute (see: |
||||||
|
https://bazel.build/reference/be/common-definitions#common-attributes) |
||||||
|
""" |
||||||
|
|
||||||
|
|
||||||
|
native.genrule( |
||||||
|
name = name + "_genrule", |
||||||
|
srcs = srcs, |
||||||
|
exec_tools = tools, |
||||||
|
outs = [name + ".sh"], |
||||||
|
cmd = "cat <<'EOF' >$(OUTS)\n#!/bin/bash -exu\n%s\nEOF\n" % cmd, |
||||||
|
testonly = testonly, |
||||||
|
visibility = ["//visibility:private"], |
||||||
|
) |
||||||
|
|
||||||
|
native.sh_binary( |
||||||
|
name = name, |
||||||
|
srcs = [name + "_genrule"], |
||||||
|
data = srcs + tools + deps, |
||||||
|
testonly = testonly, |
||||||
|
**kwargs |
||||||
|
) |
||||||
|
|
||||||
|
def inline_sh_test( |
||||||
|
name, |
||||||
|
srcs = [], |
||||||
|
tools = [], |
||||||
|
deps = [], |
||||||
|
cmd = "", |
||||||
|
**kwargs): |
||||||
|
"""Bazel rule to wrap up an inline bash script in a test. This is most |
||||||
|
useful as a stop-gap solution for migrating off Autotools. These tests |
||||||
|
are likely to be non-hermetic, with implicit system dependencies. |
||||||
|
|
||||||
|
NOTE: the rule is only an internal workaround. The interface may change and |
||||||
|
the rule may be removed when everything is properly "Bazelified". |
||||||
|
|
||||||
|
Args: |
||||||
|
name: the name of the inline_sh_binary. |
||||||
|
srcs: the files used directly by the script. |
||||||
|
tools: the executable tools used directly by the script. Any target used |
||||||
|
with rootpath/execpath/location must be declared here or in `srcs`. |
||||||
|
deps: a list of dependency labels that are required to run this binary. |
||||||
|
**kargs: other keyword arguments that are passed to sh_binary. |
||||||
|
https://bazel.build/reference/be/common-definitions#common-attributes) |
||||||
|
""" |
||||||
|
|
||||||
|
|
||||||
|
native.genrule( |
||||||
|
name = name + "_genrule", |
||||||
|
srcs = srcs, |
||||||
|
exec_tools = tools, |
||||||
|
outs = [name + ".sh"], |
||||||
|
cmd = "cat <<'EOF' >$(OUTS)\n#!/bin/bash -exu\n%s\nEOF\n" % cmd, |
||||||
|
visibility = ["//visibility:private"], |
||||||
|
) |
||||||
|
|
||||||
|
native.sh_test( |
||||||
|
name = name, |
||||||
|
srcs = [name + "_genrule"], |
||||||
|
data = srcs + tools + deps, |
||||||
|
**kwargs |
||||||
|
) |
@ -1,247 +0,0 @@ |
|||||||
## Process this file with autoconf to produce configure. |
|
||||||
## In general, the safest way to proceed is to run ./autogen.sh |
|
||||||
|
|
||||||
AC_PREREQ(2.59) |
|
||||||
|
|
||||||
# Note: If you change the version, you must also update it in: |
|
||||||
# * Protobuf.podspec |
|
||||||
# * csharp/Google.Protobuf.Tools.nuspec |
|
||||||
# * csharp/src/*/AssemblyInfo.cs |
|
||||||
# * csharp/src/Google.Protobuf/Google.Protobuf.nuspec |
|
||||||
# * java/*/pom.xml |
|
||||||
# * python/google/protobuf/__init__.py |
|
||||||
# * protoc-artifacts/pom.xml |
|
||||||
# * src/google/protobuf/stubs/common.h |
|
||||||
# * src/Makefile.am (Update -version-info for LDFLAGS if needed) |
|
||||||
# |
|
||||||
# In the SVN trunk, the version should always be the next anticipated release |
|
||||||
# version with the "-pre" suffix. (We used to use "-SNAPSHOT" but this pushed |
|
||||||
# the size of one file name in the dist tarfile over the 99-char limit.) |
|
||||||
AC_INIT([Protocol Buffers],[3.21.5],[protobuf@googlegroups.com],[protobuf]) |
|
||||||
|
|
||||||
AM_MAINTAINER_MODE([enable]) |
|
||||||
|
|
||||||
AC_CONFIG_SRCDIR(src/google/protobuf/message.cc) |
|
||||||
# The config file is generated but not used by the source code, since we only |
|
||||||
# need very few of them, e.g. HAVE_PTHREAD and HAVE_ZLIB. Those macros are |
|
||||||
# passed down in CXXFLAGS manually in src/Makefile.am |
|
||||||
AC_CONFIG_HEADERS([config.h]) |
|
||||||
AC_CONFIG_MACRO_DIR([m4]) |
|
||||||
|
|
||||||
AC_ARG_VAR(DIST_LANG, [language to include in the distribution package (i.e., make dist)]) |
|
||||||
case "$DIST_LANG" in |
|
||||||
"") DIST_LANG=all ;; |
|
||||||
all | cpp | csharp | java | python | javanano | objectivec | ruby | php) ;; |
|
||||||
*) AC_MSG_FAILURE([unknown language: $DIST_LANG]) ;; |
|
||||||
esac |
|
||||||
AC_SUBST(DIST_LANG) |
|
||||||
|
|
||||||
# autoconf's default CXXFLAGS are usually "-g -O2". These aren't necessarily |
|
||||||
# the best choice for libprotobuf. |
|
||||||
AS_IF([test "x${ac_cv_env_CFLAGS_set}" = "x"], |
|
||||||
[CFLAGS=""]) |
|
||||||
AS_IF([test "x${ac_cv_env_CXXFLAGS_set}" = "x"], |
|
||||||
[CXXFLAGS=""]) |
|
||||||
|
|
||||||
AC_CANONICAL_TARGET |
|
||||||
|
|
||||||
AM_INIT_AUTOMAKE([1.9 tar-ustar subdir-objects]) |
|
||||||
|
|
||||||
# Silent rules enabled: the output is minimal but informative. |
|
||||||
# In particular, the warnings from the compiler stick out very clearly. |
|
||||||
# To see all logs, use the --disable-silent-rules on configure or via make V=1 |
|
||||||
AM_SILENT_RULES([yes]) |
|
||||||
|
|
||||||
AC_ARG_WITH([zlib], |
|
||||||
[AS_HELP_STRING([--with-zlib], |
|
||||||
[include classes for streaming compressed data in and out @<:@default=check@:>@])], |
|
||||||
[],[with_zlib=check]) |
|
||||||
|
|
||||||
AC_ARG_WITH([zlib-include], |
|
||||||
[AS_HELP_STRING([--with-zlib-include=PATH], |
|
||||||
[zlib include directory])], |
|
||||||
[CPPFLAGS="-I$withval $CPPFLAGS"]) |
|
||||||
|
|
||||||
AC_ARG_WITH([zlib-lib], |
|
||||||
[AS_HELP_STRING([--with-zlib-lib=PATH], |
|
||||||
[zlib lib directory])], |
|
||||||
[LDFLAGS="-L$withval $LDFLAGS"]) |
|
||||||
|
|
||||||
AC_ARG_WITH([protoc], |
|
||||||
[AS_HELP_STRING([--with-protoc=COMMAND], |
|
||||||
[use the given protoc command instead of building a new one when building tests (useful for cross-compiling)])], |
|
||||||
[],[with_protoc=no]) |
|
||||||
|
|
||||||
# Checks for programs. |
|
||||||
AC_PROG_CC |
|
||||||
AC_PROG_CXX |
|
||||||
AC_PROG_CXX_FOR_BUILD |
|
||||||
AC_LANG([C++]) |
|
||||||
ACX_USE_SYSTEM_EXTENSIONS |
|
||||||
m4_ifdef([AM_PROG_AR], [AM_PROG_AR]) |
|
||||||
AM_CONDITIONAL(GCC, test "$GCC" = yes) # let the Makefile know if we're gcc |
|
||||||
AS_CASE([$target_os], [darwin*], [AC_PROG_OBJC], [AM_CONDITIONAL([am__fastdepOBJC], [false])]) |
|
||||||
|
|
||||||
# test_util.cc takes forever to compile with GCC and optimization turned on. |
|
||||||
AC_MSG_CHECKING([C++ compiler flags...]) |
|
||||||
AS_IF([test "x${ac_cv_env_CXXFLAGS_set}" = "x"],[ |
|
||||||
AS_IF([test "$GCC" = "yes"],[ |
|
||||||
PROTOBUF_OPT_FLAG="-O2" |
|
||||||
CXXFLAGS="${CXXFLAGS} -g" |
|
||||||
]) |
|
||||||
|
|
||||||
# Protocol Buffers contains several checks that are intended to be used only |
|
||||||
# for debugging and which might hurt performance. Most users are probably |
|
||||||
# end users who don't want these checks, so add -DNDEBUG by default. |
|
||||||
CXXFLAGS="$CXXFLAGS -std=c++11 -DNDEBUG" |
|
||||||
|
|
||||||
AC_MSG_RESULT([use default: $PROTOBUF_OPT_FLAG $CXXFLAGS]) |
|
||||||
],[ |
|
||||||
AC_MSG_RESULT([use user-supplied: $CXXFLAGS]) |
|
||||||
]) |
|
||||||
|
|
||||||
AC_SUBST(PROTOBUF_OPT_FLAG) |
|
||||||
|
|
||||||
ACX_CHECK_SUNCC |
|
||||||
|
|
||||||
# Have to do libtool after SUNCC, other wise it "helpfully" adds Crun Cstd |
|
||||||
# to the link |
|
||||||
AC_PROG_LIBTOOL |
|
||||||
|
|
||||||
# Check whether the linker supports version scripts |
|
||||||
AC_MSG_CHECKING([whether the linker supports version scripts]) |
|
||||||
save_LDFLAGS=$LDFLAGS |
|
||||||
LDFLAGS="$LDFLAGS -Wl,--version-script=conftest.map" |
|
||||||
cat > conftest.map <<EOF |
|
||||||
{ |
|
||||||
global: |
|
||||||
main; |
|
||||||
local: |
|
||||||
*; |
|
||||||
}; |
|
||||||
EOF |
|
||||||
AC_LINK_IFELSE( |
|
||||||
[AC_LANG_SOURCE([int main() { return 0; }])], |
|
||||||
[have_ld_version_script=yes; AC_MSG_RESULT(yes)], |
|
||||||
[have_ld_version_script=no; AC_MSG_RESULT(no)]) |
|
||||||
LDFLAGS=$save_LDFLAGS |
|
||||||
AM_CONDITIONAL([HAVE_LD_VERSION_SCRIPT], [test "$have_ld_version_script" = "yes"]) |
|
||||||
|
|
||||||
# Checks for header files. |
|
||||||
AC_HEADER_STDC |
|
||||||
AC_CHECK_HEADERS([fcntl.h inttypes.h limits.h stdlib.h unistd.h]) |
|
||||||
|
|
||||||
# Checks for library functions. |
|
||||||
AC_FUNC_MEMCMP |
|
||||||
AC_FUNC_STRTOD |
|
||||||
AC_CHECK_FUNCS([ftruncate memset mkdir strchr strerror strtol]) |
|
||||||
|
|
||||||
# Check for zlib. |
|
||||||
HAVE_ZLIB=0 |
|
||||||
AS_IF([test "$with_zlib" != no], [ |
|
||||||
AC_MSG_CHECKING([zlib version]) |
|
||||||
|
|
||||||
# First check the zlib header version. |
|
||||||
AC_COMPILE_IFELSE( |
|
||||||
[AC_LANG_PROGRAM([[ |
|
||||||
#include <zlib.h> |
|
||||||
#if !defined(ZLIB_VERNUM) || (ZLIB_VERNUM < 0x1204) |
|
||||||
# error zlib version too old |
|
||||||
#endif |
|
||||||
]], [])], [ |
|
||||||
AC_MSG_RESULT([ok (1.2.0.4 or later)]) |
|
||||||
|
|
||||||
# Also need to add -lz to the linker flags and make sure this succeeds. |
|
||||||
AC_SEARCH_LIBS([zlibVersion], [z], [ |
|
||||||
AC_DEFINE([HAVE_ZLIB], [1], [Enable classes using zlib compression.]) |
|
||||||
HAVE_ZLIB=1 |
|
||||||
], [ |
|
||||||
AS_IF([test "$with_zlib" != check], [ |
|
||||||
AC_MSG_FAILURE([--with-zlib was given, but no working zlib library was found]) |
|
||||||
]) |
|
||||||
]) |
|
||||||
], [ |
|
||||||
AS_IF([test "$with_zlib" = check], [ |
|
||||||
AC_MSG_RESULT([headers missing or too old (requires 1.2.0.4)]) |
|
||||||
], [ |
|
||||||
AC_MSG_FAILURE([--with-zlib was given, but zlib headers were not present or were too old (requires 1.2.0.4)]) |
|
||||||
]) |
|
||||||
]) |
|
||||||
]) |
|
||||||
AM_CONDITIONAL([HAVE_ZLIB], [test $HAVE_ZLIB = 1]) |
|
||||||
|
|
||||||
# Add -std=c++11 if necesssary. It is important for us to do this before the |
|
||||||
# libatomic check below, since that also depends on C++11. |
|
||||||
AX_CXX_COMPILE_STDCXX([11], [noext], [mandatory]) |
|
||||||
|
|
||||||
dnl On some platforms, std::atomic needs a helper library |
|
||||||
AC_MSG_CHECKING(whether -latomic is needed) |
|
||||||
AC_LINK_IFELSE([AC_LANG_SOURCE([[ |
|
||||||
#include <atomic> |
|
||||||
#include <cstdint> |
|
||||||
std::atomic<std::int64_t> v; |
|
||||||
int main() { |
|
||||||
return v; |
|
||||||
} |
|
||||||
]])], STD_ATOMIC_NEED_LIBATOMIC=no, STD_ATOMIC_NEED_LIBATOMIC=yes) |
|
||||||
AC_MSG_RESULT($STD_ATOMIC_NEED_LIBATOMIC) |
|
||||||
if test "x$STD_ATOMIC_NEED_LIBATOMIC" = xyes; then |
|
||||||
LIBATOMIC_LIBS="-latomic" |
|
||||||
fi |
|
||||||
AC_SUBST([LIBATOMIC_LIBS]) |
|
||||||
|
|
||||||
AS_IF([test "$with_protoc" != "no"], [ |
|
||||||
PROTOC=$with_protoc |
|
||||||
AS_IF([test "$with_protoc" = "yes"], [ |
|
||||||
# No argument given. Use system protoc. |
|
||||||
PROTOC=protoc |
|
||||||
]) |
|
||||||
AS_IF([echo "$PROTOC" | grep -q '^@<:@^/@:>@.*/'], [ |
|
||||||
# Does not start with a slash, but contains a slash. So, it's a relative |
|
||||||
# path (as opposed to an absolute path or an executable in $PATH). |
|
||||||
# Since it will actually be executed from the src directory, prefix with |
|
||||||
# the current directory. We also insert $ac_top_build_prefix in case this |
|
||||||
# is a nested package and --with-protoc was actually given on the outer |
|
||||||
# package's configure script. |
|
||||||
PROTOC=`pwd`/${ac_top_build_prefix}$PROTOC |
|
||||||
]) |
|
||||||
AC_SUBST([PROTOC]) |
|
||||||
]) |
|
||||||
AM_CONDITIONAL([USE_EXTERNAL_PROTOC], [test "$with_protoc" != "no"]) |
|
||||||
|
|
||||||
AX_PTHREAD |
|
||||||
AM_CONDITIONAL([HAVE_PTHREAD], [test "x$ax_pthread_ok" = "xyes"]) |
|
||||||
# We still keep this for improving pbconfig.h for unsupported platforms. |
|
||||||
AC_CXX_STL_HASH |
|
||||||
|
|
||||||
# Enable ObjC support for conformance directory on OS X. |
|
||||||
OBJC_CONFORMANCE_TEST=0 |
|
||||||
case "$target_os" in |
|
||||||
darwin*) |
|
||||||
OBJC_CONFORMANCE_TEST=1 |
|
||||||
;; |
|
||||||
esac |
|
||||||
AM_CONDITIONAL([OBJC_CONFORMANCE_TEST], [test $OBJC_CONFORMANCE_TEST = 1]) |
|
||||||
|
|
||||||
AC_MSG_CHECKING(whether -llog is needed) |
|
||||||
ANDROID_TEST=no |
|
||||||
case "$target_os" in |
|
||||||
*android*) |
|
||||||
ANDROID_TEST=yes |
|
||||||
;; |
|
||||||
esac |
|
||||||
AC_MSG_RESULT($ANDROID_TEST) |
|
||||||
if test "x$ANDROID_TEST" = xyes; then |
|
||||||
LIBLOG_LIBS="-llog" |
|
||||||
fi |
|
||||||
AC_SUBST([LIBLOG_LIBS]) |
|
||||||
|
|
||||||
# HACK: Make gmock's configure script pick up our copy of CFLAGS and CXXFLAGS, |
|
||||||
# since the flags added by ACX_CHECK_SUNCC must be used when compiling gmock |
|
||||||
# too. |
|
||||||
export CFLAGS |
|
||||||
export CXXFLAGS |
|
||||||
AC_CONFIG_SUBDIRS([third_party/googletest]) |
|
||||||
|
|
||||||
AC_CONFIG_FILES([Makefile src/Makefile benchmarks/Makefile conformance/Makefile protobuf.pc protobuf-lite.pc]) |
|
||||||
AC_OUTPUT |
|
@ -1,372 +0,0 @@ |
|||||||
## Process this file with automake to produce Makefile.in
|
|
||||||
|
|
||||||
conformance_protoc_inputs = \
|
|
||||||
conformance.proto \
|
|
||||||
$(top_srcdir)/src/google/protobuf/test_messages_proto3.proto
|
|
||||||
|
|
||||||
# proto2 input files, should be separated with proto3, as we
|
|
||||||
# can't generate proto2 files for php.
|
|
||||||
conformance_proto2_protoc_inputs = \
|
|
||||||
$(top_srcdir)/src/google/protobuf/test_messages_proto2.proto
|
|
||||||
|
|
||||||
well_known_type_protoc_inputs = \
|
|
||||||
$(top_srcdir)/src/google/protobuf/any.proto \
|
|
||||||
$(top_srcdir)/src/google/protobuf/duration.proto \
|
|
||||||
$(top_srcdir)/src/google/protobuf/field_mask.proto \
|
|
||||||
$(top_srcdir)/src/google/protobuf/struct.proto \
|
|
||||||
$(top_srcdir)/src/google/protobuf/timestamp.proto \
|
|
||||||
$(top_srcdir)/src/google/protobuf/wrappers.proto
|
|
||||||
|
|
||||||
|
|
||||||
protoc_outputs = \
|
|
||||||
conformance.pb.cc \
|
|
||||||
conformance.pb.h
|
|
||||||
|
|
||||||
other_language_protoc_outputs = \
|
|
||||||
conformance_pb2.py \
|
|
||||||
Conformance.pbobjc.h \
|
|
||||||
Conformance.pbobjc.m \
|
|
||||||
conformance_pb.js \
|
|
||||||
conformance_pb.rb \
|
|
||||||
com/google/protobuf/Any.java \
|
|
||||||
com/google/protobuf/AnyOrBuilder.java \
|
|
||||||
com/google/protobuf/AnyProto.java \
|
|
||||||
com/google/protobuf/BoolValue.java \
|
|
||||||
com/google/protobuf/BoolValueOrBuilder.java \
|
|
||||||
com/google/protobuf/BytesValue.java \
|
|
||||||
com/google/protobuf/BytesValueOrBuilder.java \
|
|
||||||
com/google/protobuf/conformance/Conformance.java \
|
|
||||||
com/google/protobuf/DoubleValue.java \
|
|
||||||
com/google/protobuf/DoubleValueOrBuilder.java \
|
|
||||||
com/google/protobuf/Duration.java \
|
|
||||||
com/google/protobuf/DurationOrBuilder.java \
|
|
||||||
com/google/protobuf/DurationProto.java \
|
|
||||||
com/google/protobuf/FieldMask.java \
|
|
||||||
com/google/protobuf/FieldMaskOrBuilder.java \
|
|
||||||
com/google/protobuf/FieldMaskProto.java \
|
|
||||||
com/google/protobuf/FloatValue.java \
|
|
||||||
com/google/protobuf/FloatValueOrBuilder.java \
|
|
||||||
com/google/protobuf/Int32Value.java \
|
|
||||||
com/google/protobuf/Int32ValueOrBuilder.java \
|
|
||||||
com/google/protobuf/Int64Value.java \
|
|
||||||
com/google/protobuf/Int64ValueOrBuilder.java \
|
|
||||||
com/google/protobuf/ListValue.java \
|
|
||||||
com/google/protobuf/ListValueOrBuilder.java \
|
|
||||||
com/google/protobuf/NullValue.java \
|
|
||||||
com/google/protobuf/StringValue.java \
|
|
||||||
com/google/protobuf/StringValueOrBuilder.java \
|
|
||||||
com/google/protobuf/Struct.java \
|
|
||||||
com/google/protobuf/StructOrBuilder.java \
|
|
||||||
com/google/protobuf/StructProto.java \
|
|
||||||
com/google/protobuf/Timestamp.java \
|
|
||||||
com/google/protobuf/TimestampOrBuilder.java \
|
|
||||||
com/google/protobuf/TimestampProto.java \
|
|
||||||
com/google/protobuf/UInt32Value.java \
|
|
||||||
com/google/protobuf/UInt32ValueOrBuilder.java \
|
|
||||||
com/google/protobuf/UInt64Value.java \
|
|
||||||
com/google/protobuf/UInt64ValueOrBuilder.java \
|
|
||||||
com/google/protobuf/Value.java \
|
|
||||||
com/google/protobuf/ValueOrBuilder.java \
|
|
||||||
com/google/protobuf/WrappersProto.java \
|
|
||||||
com/google/protobuf_test_messages/proto3/TestMessagesProto3.java \
|
|
||||||
com/google/protobuf_test_messages/proto2/TestMessagesProto2.java \
|
|
||||||
google/protobuf/any.pb.cc \
|
|
||||||
google/protobuf/any.pb.h \
|
|
||||||
google/protobuf/any.rb \
|
|
||||||
google/protobuf/any_pb2.py \
|
|
||||||
google/protobuf/duration.pb.cc \
|
|
||||||
google/protobuf/duration.pb.h \
|
|
||||||
google/protobuf/duration.rb \
|
|
||||||
google/protobuf/duration_pb2.py \
|
|
||||||
google/protobuf/field_mask.pb.cc \
|
|
||||||
google/protobuf/field_mask.pb.h \
|
|
||||||
google/protobuf/field_mask.rb \
|
|
||||||
google/protobuf/field_mask_pb2.py \
|
|
||||||
google/protobuf/struct.pb.cc \
|
|
||||||
google/protobuf/struct.pb.h \
|
|
||||||
google/protobuf/struct.rb \
|
|
||||||
google/protobuf/struct_pb2.py \
|
|
||||||
google/protobuf/TestMessagesProto2.pbobjc.h \
|
|
||||||
google/protobuf/TestMessagesProto2.pbobjc.m \
|
|
||||||
google/protobuf/TestMessagesProto3.pbobjc.h \
|
|
||||||
google/protobuf/TestMessagesProto3.pbobjc.m \
|
|
||||||
google/protobuf/test_messages_proto3.pb.cc \
|
|
||||||
google/protobuf/test_messages_proto3.pb.h \
|
|
||||||
google/protobuf/test_messages_proto2.pb.cc \
|
|
||||||
google/protobuf/test_messages_proto2.pb.h \
|
|
||||||
google/protobuf/test_messages_proto3_pb.rb \
|
|
||||||
google/protobuf/test_messages_proto3_pb2.py \
|
|
||||||
google/protobuf/test_messages_proto2_pb2.py \
|
|
||||||
google/protobuf/timestamp.pb.cc \
|
|
||||||
google/protobuf/timestamp.pb.h \
|
|
||||||
google/protobuf/timestamp.rb \
|
|
||||||
google/protobuf/timestamp_pb2.py \
|
|
||||||
google/protobuf/wrappers.pb.cc \
|
|
||||||
google/protobuf/wrappers.pb.h \
|
|
||||||
google/protobuf/wrappers.rb \
|
|
||||||
google/protobuf/wrappers_pb2.py \
|
|
||||||
Conformance/ConformanceRequest.php \
|
|
||||||
Conformance/ConformanceResponse.php \
|
|
||||||
Conformance/FailureSet.php \
|
|
||||||
Conformance/WireFormat.php \
|
|
||||||
GPBMetadata/Conformance.php \
|
|
||||||
GPBMetadata/Google/Protobuf/Any.php \
|
|
||||||
GPBMetadata/Google/Protobuf/Duration.php \
|
|
||||||
GPBMetadata/Google/Protobuf/FieldMask.php \
|
|
||||||
GPBMetadata/Google/Protobuf/Struct.php \
|
|
||||||
GPBMetadata/Google/Protobuf/TestMessagesProto3.php \
|
|
||||||
GPBMetadata/Google/Protobuf/Timestamp.php \
|
|
||||||
GPBMetadata/Google/Protobuf/Wrappers.php \
|
|
||||||
Google/Protobuf/Any.php \
|
|
||||||
Google/Protobuf/BoolValue.php \
|
|
||||||
Google/Protobuf/BytesValue.php \
|
|
||||||
Google/Protobuf/DoubleValue.php \
|
|
||||||
Google/Protobuf/Duration.php \
|
|
||||||
Google/Protobuf/FieldMask.php \
|
|
||||||
Google/Protobuf/FloatValue.php \
|
|
||||||
Google/Protobuf/Int32Value.php \
|
|
||||||
Google/Protobuf/Int64Value.php \
|
|
||||||
Google/Protobuf/ListValue.php \
|
|
||||||
Google/Protobuf/NullValue.php \
|
|
||||||
Google/Protobuf/StringValue.php \
|
|
||||||
Google/Protobuf/Struct.php \
|
|
||||||
Google/Protobuf/Timestamp.php \
|
|
||||||
Google/Protobuf/UInt32Value.php \
|
|
||||||
Google/Protobuf/UInt64Value.php \
|
|
||||||
Google/Protobuf/Value.php \
|
|
||||||
Protobuf_test_messages/Proto3/ForeignEnum.php \
|
|
||||||
Protobuf_test_messages/Proto3/ForeignMessage.php \
|
|
||||||
Protobuf_test_messages/Proto3/TestAllTypes_NestedEnum.php \
|
|
||||||
Protobuf_test_messages/Proto3/TestAllTypes_NestedMessage.php \
|
|
||||||
Protobuf_test_messages/Proto3/TestAllTypes.php
|
|
||||||
# lite/com/google/protobuf/Any.java \
|
|
||||||
# lite/com/google/protobuf/AnyOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/AnyProto.java \
|
|
||||||
# lite/com/google/protobuf/BoolValue.java \
|
|
||||||
# lite/com/google/protobuf/BoolValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/BytesValue.java \
|
|
||||||
# lite/com/google/protobuf/BytesValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/conformance/Conformance.java \
|
|
||||||
# lite/com/google/protobuf/DoubleValue.java \
|
|
||||||
# lite/com/google/protobuf/DoubleValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/Duration.java \
|
|
||||||
# lite/com/google/protobuf/DurationOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/DurationProto.java \
|
|
||||||
# lite/com/google/protobuf/FieldMask.java \
|
|
||||||
# lite/com/google/protobuf/FieldMaskOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/FieldMaskProto.java \
|
|
||||||
# lite/com/google/protobuf/FloatValue.java \
|
|
||||||
# lite/com/google/protobuf/FloatValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/Int32Value.java \
|
|
||||||
# lite/com/google/protobuf/Int32ValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/Int64Value.java \
|
|
||||||
# lite/com/google/protobuf/Int64ValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/ListValue.java \
|
|
||||||
# lite/com/google/protobuf/ListValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/NullValue.java \
|
|
||||||
# lite/com/google/protobuf/StringValue.java \
|
|
||||||
# lite/com/google/protobuf/StringValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/Struct.java \
|
|
||||||
# lite/com/google/protobuf/StructOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/StructProto.java \
|
|
||||||
# lite/com/google/protobuf/Timestamp.java \
|
|
||||||
# lite/com/google/protobuf/TimestampOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/TimestampProto.java \
|
|
||||||
# lite/com/google/protobuf/UInt32Value.java \
|
|
||||||
# lite/com/google/protobuf/UInt32ValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/UInt64Value.java \
|
|
||||||
# lite/com/google/protobuf/UInt64ValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/Value.java \
|
|
||||||
# lite/com/google/protobuf/ValueOrBuilder.java \
|
|
||||||
# lite/com/google/protobuf/WrappersProto.java
|
|
||||||
|
|
||||||
bin_PROGRAMS = conformance-test-runner conformance-cpp
|
|
||||||
|
|
||||||
# All source files excepet C++/Objective-C ones should be explicitly listed
|
|
||||||
# here because the autoconf tools don't include files of other languages
|
|
||||||
# automatically.
|
|
||||||
EXTRA_DIST = \
|
|
||||||
ConformanceJava.java \
|
|
||||||
ConformanceJavaLite.java \
|
|
||||||
README.md \
|
|
||||||
conformance.proto \
|
|
||||||
conformance_python.py \
|
|
||||||
conformance_ruby.rb \
|
|
||||||
conformance_php.php \
|
|
||||||
failure_list_cpp.txt \
|
|
||||||
failure_list_csharp.txt \
|
|
||||||
failure_list_java.txt \
|
|
||||||
failure_list_js.txt \
|
|
||||||
failure_list_objc.txt \
|
|
||||||
failure_list_python.txt \
|
|
||||||
failure_list_python_cpp.txt \
|
|
||||||
failure_list_python-post26.txt \
|
|
||||||
failure_list_ruby.txt \
|
|
||||||
failure_list_php.txt \
|
|
||||||
failure_list_php_c.txt
|
|
||||||
|
|
||||||
conformance_test_runner_LDADD = $(top_srcdir)/src/libprotobuf.la
|
|
||||||
conformance_test_runner_SOURCES = conformance_test.h conformance_test.cc \
|
|
||||||
conformance_test_main.cc \
|
|
||||||
binary_json_conformance_suite.h \
|
|
||||||
binary_json_conformance_suite.cc \
|
|
||||||
text_format_conformance_suite.h \
|
|
||||||
text_format_conformance_suite.cc \
|
|
||||||
conformance_test_runner.cc \
|
|
||||||
third_party/jsoncpp/json.h \
|
|
||||||
third_party/jsoncpp/jsoncpp.cpp
|
|
||||||
nodist_conformance_test_runner_SOURCES = conformance.pb.cc google/protobuf/test_messages_proto3.pb.cc google/protobuf/test_messages_proto2.pb.cc
|
|
||||||
conformance_test_runner_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)
|
|
||||||
conformance_test_runner_CXXFLAGS = -std=c++11
|
|
||||||
# Explicit deps because BUILT_SOURCES are only done before a "make all/check"
|
|
||||||
# so a direct "make test_cpp" could fail if parallel enough.
|
|
||||||
conformance_test_runner-conformance_test.$(OBJEXT): conformance.pb.h |
|
||||||
conformance_test_runner-conformance_test_runner.$(OBJEXT): conformance.pb.h |
|
||||||
|
|
||||||
conformance_cpp_LDADD = $(top_srcdir)/src/libprotobuf.la
|
|
||||||
conformance_cpp_SOURCES = conformance_cpp.cc
|
|
||||||
nodist_conformance_cpp_SOURCES = conformance.pb.cc google/protobuf/test_messages_proto3.pb.cc google/protobuf/test_messages_proto2.pb.cc
|
|
||||||
conformance_cpp_CPPFLAGS = -I$(top_srcdir)/src
|
|
||||||
# Explicit dep because BUILT_SOURCES are only done before a "make all/check"
|
|
||||||
# so a direct "make test_cpp" could fail if parallel enough.
|
|
||||||
conformance_cpp-conformance_cpp.$(OBJEXT): conformance.pb.h |
|
||||||
|
|
||||||
if OBJC_CONFORMANCE_TEST |
|
||||||
|
|
||||||
bin_PROGRAMS += conformance-objc
|
|
||||||
|
|
||||||
conformance_objc_SOURCES = conformance_objc.m ../objectivec/GPBProtocolBuffers.m
|
|
||||||
nodist_conformance_objc_SOURCES = Conformance.pbobjc.m google/protobuf/TestMessagesProto2.pbobjc.m google/protobuf/TestMessagesProto3.pbobjc.m
|
|
||||||
# On travis, the build fails without the isysroot because whatever system
|
|
||||||
# headers are being found don't include generics support for
|
|
||||||
# NSArray/NSDictionary, the only guess is their image at one time had an odd
|
|
||||||
# setup for Xcode and old frameworks are being found.
|
|
||||||
conformance_objc_CPPFLAGS = -I$(top_srcdir)/objectivec -isysroot `xcrun --sdk macosx --show-sdk-path`
|
|
||||||
conformance_objc_LDFLAGS = -framework Foundation
|
|
||||||
# Explicit dep because BUILT_SOURCES are only done before a "make all/check"
|
|
||||||
# so a direct "make test_objc" could fail if parallel enough.
|
|
||||||
conformance_objc-conformance_objc.$(OBJEXT): Conformance.pbobjc.h google/protobuf/TestMessagesProto2.pbobjc.h google/protobuf/TestMessagesProto3.pbobjc.h |
|
||||||
|
|
||||||
endif |
|
||||||
|
|
||||||
# JavaScript well-known types are expected to be in a directory called
|
|
||||||
# google-protobuf, because they are usually in the google-protobuf npm
|
|
||||||
# package. But we want to use the sources from our tree, so we recreate
|
|
||||||
# that directory structure here.
|
|
||||||
google-protobuf: |
|
||||||
mkdir google-protobuf
|
|
||||||
|
|
||||||
if USE_EXTERNAL_PROTOC |
|
||||||
|
|
||||||
# Some implementations include pre-generated versions of well-known types.
|
|
||||||
protoc_middleman: $(conformance_protoc_inputs) $(conformance_proto2_protoc_inputs) $(well_known_type_protoc_inputs) google-protobuf |
|
||||||
$(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. --java_out=. --ruby_out=. --objc_out=. --python_out=. --php_out=. $(conformance_protoc_inputs)
|
|
||||||
$(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. --java_out=. --ruby_out=. --objc_out=. --python_out=. $(conformance_proto2_protoc_inputs)
|
|
||||||
$(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. --java_out=. --ruby_out=. --python_out=. $(well_known_type_protoc_inputs)
|
|
||||||
## $(PROTOC) -I$(srcdir) -I$(top_srcdir) --java_out=lite:lite $(conformance_protoc_inputs) $(well_known_type_protoc_inputs)
|
|
||||||
touch protoc_middleman
|
|
||||||
|
|
||||||
else |
|
||||||
|
|
||||||
# We have to cd to $(srcdir) before executing protoc because $(protoc_inputs) is
|
|
||||||
# relative to srcdir, which may not be the same as the current directory when
|
|
||||||
# building out-of-tree.
|
|
||||||
protoc_middleman: $(top_srcdir)/src/protoc$(EXEEXT) $(conformance_protoc_inputs) $(conformance_proto2_protoc_inputs) $(well_known_type_protoc_inputs) google-protobuf |
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd --objc_out=$$oldpwd --python_out=$$oldpwd --php_out=$$oldpwd $(conformance_protoc_inputs) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd --objc_out=$$oldpwd --python_out=$$oldpwd $(conformance_proto2_protoc_inputs) )
|
|
||||||
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd --python_out=$$oldpwd $(well_known_type_protoc_inputs) )
|
|
||||||
## @mkdir -p lite
|
|
||||||
## oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --java_out=lite:$$oldpwd/lite $(conformance_protoc_inputs) $(well_known_type_protoc_inputs) )
|
|
||||||
touch protoc_middleman
|
|
||||||
|
|
||||||
endif |
|
||||||
|
|
||||||
$(protoc_outputs): protoc_middleman |
|
||||||
|
|
||||||
$(other_language_protoc_outputs): protoc_middleman |
|
||||||
|
|
||||||
CLEANFILES = $(protoc_outputs) protoc_middleman javac_middleman conformance-java javac_middleman_lite conformance-java-lite conformance-csharp conformance-php conformance-php-c $(other_language_protoc_outputs)
|
|
||||||
|
|
||||||
MAINTAINERCLEANFILES = \
|
|
||||||
Makefile.in
|
|
||||||
|
|
||||||
javac_middleman: ConformanceJava.java protoc_middleman $(other_language_protoc_outputs) |
|
||||||
jar=`ls ../java/util/target/*jar-with-dependencies.jar` && javac -classpath ../java/target/classes:$$jar ConformanceJava.java com/google/protobuf/conformance/Conformance.java com/google/protobuf_test_messages/proto3/TestMessagesProto3.java com/google/protobuf_test_messages/proto2/TestMessagesProto2.java
|
|
||||||
@touch javac_middleman
|
|
||||||
|
|
||||||
conformance-java: javac_middleman |
|
||||||
@echo "Writing shortcut script conformance-java..."
|
|
||||||
@echo '#! /bin/sh' > conformance-java
|
|
||||||
@jar=`ls ../java/util/target/*jar-with-dependencies.jar` && echo java -classpath .:../java/target/classes:$$jar ConformanceJava '$$@' >> conformance-java
|
|
||||||
@chmod +x conformance-java
|
|
||||||
|
|
||||||
javac_middleman_lite: ConformanceJavaLite.java protoc_middleman $(other_language_protoc_outputs) |
|
||||||
javac -classpath ../java/lite/target/classes:lite ConformanceJavaLite.java lite/com/google/protobuf/conformance/Conformance.java
|
|
||||||
@touch javac_middleman_lite
|
|
||||||
|
|
||||||
conformance-java-lite: javac_middleman_lite |
|
||||||
@echo "Writing shortcut script conformance-java-lite..."
|
|
||||||
@echo '#! /bin/sh' > conformance-java-lite
|
|
||||||
@echo java -classpath .:../java/lite/target/classes:lite ConformanceJavaLite '$$@' >> conformance-java-lite
|
|
||||||
@chmod +x conformance-java-lite
|
|
||||||
|
|
||||||
# Currently the conformance code is alongside the rest of the C#
|
|
||||||
# source, as it's easier to maintain there. We assume we've already
|
|
||||||
# built that, so we just need a script to run it.
|
|
||||||
conformance-csharp: $(other_language_protoc_outputs) |
|
||||||
@echo "Writing shortcut script conformance-csharp..."
|
|
||||||
@echo '#! /bin/sh' > conformance-csharp
|
|
||||||
@echo 'dotnet ../csharp/src/Google.Protobuf.Conformance/bin/Release/netcoreapp3.1/Google.Protobuf.Conformance.dll "$$@"' >> conformance-csharp
|
|
||||||
@chmod +x conformance-csharp
|
|
||||||
|
|
||||||
conformance-php: |
|
||||||
@echo "Writing shortcut script conformance-php..."
|
|
||||||
@echo '#! /bin/sh' > conformance-php
|
|
||||||
@echo 'php -d auto_prepend_file=autoload.php ./conformance_php.php' >> conformance-php
|
|
||||||
@chmod +x conformance-php
|
|
||||||
|
|
||||||
conformance-php-c: |
|
||||||
@echo "Writing shortcut script conformance-php-c..."
|
|
||||||
@echo '#! /bin/sh' > conformance-php-c
|
|
||||||
@echo 'php -dextension=../php/ext/google/protobuf/modules/protobuf.so ./conformance_php.php' >> conformance-php-c
|
|
||||||
@chmod +x conformance-php-c
|
|
||||||
|
|
||||||
# Targets for actually running tests.
|
|
||||||
test_cpp: protoc_middleman conformance-test-runner conformance-cpp |
|
||||||
./conformance-test-runner --enforce_recommended --failure_list failure_list_cpp.txt --text_format_failure_list text_format_failure_list_cpp.txt ./conformance-cpp
|
|
||||||
|
|
||||||
test_java: protoc_middleman conformance-test-runner conformance-java |
|
||||||
./conformance-test-runner --enforce_recommended --failure_list failure_list_java.txt --text_format_failure_list text_format_failure_list_java.txt ./conformance-java
|
|
||||||
|
|
||||||
test_java_lite: protoc_middleman conformance-test-runner conformance-java-lite |
|
||||||
./conformance-test-runner --enforce_recommended ./conformance-java-lite
|
|
||||||
|
|
||||||
test_csharp: protoc_middleman conformance-test-runner conformance-csharp |
|
||||||
./conformance-test-runner --enforce_recommended --failure_list failure_list_csharp.txt --text_format_failure_list text_format_failure_list_csharp.txt ./conformance-csharp
|
|
||||||
|
|
||||||
test_ruby: protoc_middleman conformance-test-runner $(other_language_protoc_outputs) |
|
||||||
RUBYLIB=../ruby/lib:. ./conformance-test-runner --enforce_recommended --failure_list failure_list_ruby.txt --text_format_failure_list text_format_failure_list_ruby.txt ./conformance_ruby.rb
|
|
||||||
|
|
||||||
test_jruby: protoc_middleman conformance-test-runner $(other_language_protoc_outputs) |
|
||||||
RUBYLIB=../ruby/lib:. ./conformance-test-runner --enforce_recommended --failure_list failure_list_jruby.txt --text_format_failure_list text_format_failure_list_jruby.txt ./conformance_ruby.rb
|
|
||||||
|
|
||||||
test_php: protoc_middleman conformance-test-runner conformance-php $(other_language_protoc_outputs) |
|
||||||
./conformance-test-runner --enforce_recommended --failure_list failure_list_php.txt --text_format_failure_list text_format_failure_list_php.txt ./conformance-php
|
|
||||||
|
|
||||||
test_php_c: protoc_middleman conformance-test-runner conformance-php-c $(other_language_protoc_outputs) |
|
||||||
./conformance-test-runner --enforce_recommended --failure_list failure_list_php_c.txt --text_format_failure_list text_format_failure_list_php.txt ./conformance-php-c
|
|
||||||
|
|
||||||
# These depend on library paths being properly set up. The easiest way to
|
|
||||||
# run them is to just use "tox" from the python dir.
|
|
||||||
test_python: protoc_middleman conformance-test-runner |
|
||||||
./conformance-test-runner --enforce_recommended --failure_list failure_list_python.txt --text_format_failure_list text_format_failure_list_python.txt ./conformance_python.py
|
|
||||||
|
|
||||||
test_python_cpp: protoc_middleman conformance-test-runner |
|
||||||
./conformance-test-runner --enforce_recommended --failure_list failure_list_python_cpp.txt --text_format_failure_list text_format_failure_list_python_cpp.txt ./conformance_python.py
|
|
||||||
|
|
||||||
if OBJC_CONFORMANCE_TEST |
|
||||||
|
|
||||||
test_objc: protoc_middleman conformance-test-runner conformance-objc |
|
||||||
./conformance-test-runner --enforce_recommended --failure_list failure_list_objc.txt ./conformance-objc
|
|
||||||
|
|
||||||
endif |
|
@ -1,189 +0,0 @@ |
|||||||
#!/usr/bin/env node
|
|
||||||
// Protocol Buffers - Google's data interchange format
|
|
||||||
// Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
// https://developers.google.com/protocol-buffers/
|
|
||||||
//
|
|
||||||
// Redistribution and use in source and binary forms, with or without
|
|
||||||
// modification, are permitted provided that the following conditions are
|
|
||||||
// met:
|
|
||||||
//
|
|
||||||
// * Redistributions of source code must retain the above copyright
|
|
||||||
// notice, this list of conditions and the following disclaimer.
|
|
||||||
// * Redistributions in binary form must reproduce the above
|
|
||||||
// copyright notice, this list of conditions and the following disclaimer
|
|
||||||
// in the documentation and/or other materials provided with the
|
|
||||||
// distribution.
|
|
||||||
// * Neither the name of Google Inc. nor the names of its
|
|
||||||
// contributors may be used to endorse or promote products derived from
|
|
||||||
// this software without specific prior written permission.
|
|
||||||
//
|
|
||||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
var conformance = require('conformance_pb'); |
|
||||||
var test_messages_proto3 = require('google/protobuf/test_messages_proto3_pb'); |
|
||||||
var test_messages_proto2 = require('google/protobuf/test_messages_proto2_pb'); |
|
||||||
var fs = require('fs'); |
|
||||||
|
|
||||||
var testCount = 0; |
|
||||||
|
|
||||||
function doTest(request) { |
|
||||||
var testMessage; |
|
||||||
var response = new conformance.ConformanceResponse(); |
|
||||||
|
|
||||||
try { |
|
||||||
if (request.getRequestedOutputFormat() == conformance.WireFormat.JSON) { |
|
||||||
response.setSkipped("JSON not supported."); |
|
||||||
return response; |
|
||||||
} |
|
||||||
|
|
||||||
if (request.getRequestedOutputFormat() == |
|
||||||
conformance.WireFormat.TEXT_FORMAT) { |
|
||||||
response.setSkipped('Text format is not supported as output format.'); |
|
||||||
return response; |
|
||||||
} |
|
||||||
|
|
||||||
switch (request.getPayloadCase()) { |
|
||||||
case conformance.ConformanceRequest.PayloadCase.PROTOBUF_PAYLOAD: { |
|
||||||
if (request.getMessageType() == "protobuf_test_messages.proto3.TestAllTypesProto3") { |
|
||||||
try { |
|
||||||
testMessage = test_messages_proto3.TestAllTypesProto3.deserializeBinary( |
|
||||||
request.getProtobufPayload()); |
|
||||||
} catch (err) { |
|
||||||
response.setParseError(err.toString()); |
|
||||||
return response; |
|
||||||
} |
|
||||||
} else if (request.getMessageType() == "protobuf_test_messages.proto2.TestAllTypesProto2"){ |
|
||||||
try { |
|
||||||
testMessage = test_messages_proto2.TestAllTypesProto2.deserializeBinary( |
|
||||||
request.getProtobufPayload()); |
|
||||||
} catch (err) { |
|
||||||
response.setParseError(err.toString()); |
|
||||||
return response; |
|
||||||
} |
|
||||||
} else { |
|
||||||
throw "Protobuf request doesn\'t have specific payload type"; |
|
||||||
} |
|
||||||
} break; |
|
||||||
|
|
||||||
case conformance.ConformanceRequest.PayloadCase.JSON_PAYLOAD: |
|
||||||
response.setSkipped("JSON not supported."); |
|
||||||
return response; |
|
||||||
|
|
||||||
case conformance.ConformanceRequest.PayloadCase.TEXT_PAYLOAD: |
|
||||||
response.setSkipped("Text format not supported."); |
|
||||||
return response; |
|
||||||
|
|
||||||
case conformance.ConformanceRequest.PayloadCase.PAYLOAD_NOT_SET: |
|
||||||
response.setRuntimeError("Request didn't have payload"); |
|
||||||
return response; |
|
||||||
} |
|
||||||
|
|
||||||
switch (request.getRequestedOutputFormat()) { |
|
||||||
case conformance.WireFormat.UNSPECIFIED: |
|
||||||
response.setRuntimeError("Unspecified output format"); |
|
||||||
return response; |
|
||||||
|
|
||||||
case conformance.WireFormat.PROTOBUF: |
|
||||||
response.setProtobufPayload(testMessage.serializeBinary()); |
|
||||||
|
|
||||||
case conformance.WireFormat.JSON: |
|
||||||
response.setSkipped("JSON not supported."); |
|
||||||
return response; |
|
||||||
|
|
||||||
default: |
|
||||||
throw "Request didn't have requested output format"; |
|
||||||
} |
|
||||||
} catch (err) { |
|
||||||
response.setRuntimeError(err.toString()); |
|
||||||
} |
|
||||||
|
|
||||||
return response; |
|
||||||
} |
|
||||||
|
|
||||||
function onEof(totalRead) { |
|
||||||
if (totalRead == 0) { |
|
||||||
return undefined; |
|
||||||
} else { |
|
||||||
throw "conformance_nodejs: premature EOF on stdin."; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
// Utility function to read a buffer of N bytes.
|
|
||||||
function readBuffer(bytes) { |
|
||||||
var buf = new Buffer(bytes); |
|
||||||
var totalRead = 0; |
|
||||||
while (totalRead < bytes) { |
|
||||||
var read = 0; |
|
||||||
try { |
|
||||||
read = fs.readSync(process.stdin.fd, buf, totalRead, bytes - totalRead); |
|
||||||
} catch (e) { |
|
||||||
if (e.code == 'EOF') { |
|
||||||
return onEof(totalRead) |
|
||||||
} else if (e.code == 'EAGAIN') { |
|
||||||
} else { |
|
||||||
throw "conformance_nodejs: Error reading from stdin." + e; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
totalRead += read; |
|
||||||
} |
|
||||||
|
|
||||||
return buf; |
|
||||||
} |
|
||||||
|
|
||||||
function writeBuffer(buffer) { |
|
||||||
var totalWritten = 0; |
|
||||||
while (totalWritten < buffer.length) { |
|
||||||
totalWritten += fs.writeSync( |
|
||||||
process.stdout.fd, buffer, totalWritten, buffer.length - totalWritten); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
// Returns true if the test ran successfully, false on legitimate EOF.
|
|
||||||
// If EOF is encountered in an unexpected place, raises IOError.
|
|
||||||
function doTestIo() { |
|
||||||
var lengthBuf = readBuffer(4); |
|
||||||
if (!lengthBuf) { |
|
||||||
return false; |
|
||||||
} |
|
||||||
|
|
||||||
var length = lengthBuf.readInt32LE(0); |
|
||||||
var serializedRequest = readBuffer(length); |
|
||||||
if (!serializedRequest) { |
|
||||||
throw "conformance_nodejs: Failed to read request."; |
|
||||||
} |
|
||||||
|
|
||||||
serializedRequest = new Uint8Array(serializedRequest); |
|
||||||
var request = |
|
||||||
conformance.ConformanceRequest.deserializeBinary(serializedRequest); |
|
||||||
var response = doTest(request); |
|
||||||
|
|
||||||
var serializedResponse = response.serializeBinary(); |
|
||||||
|
|
||||||
lengthBuf = new Buffer(4); |
|
||||||
lengthBuf.writeInt32LE(serializedResponse.length, 0); |
|
||||||
writeBuffer(lengthBuf); |
|
||||||
writeBuffer(new Buffer(serializedResponse)); |
|
||||||
|
|
||||||
testCount += 1 |
|
||||||
|
|
||||||
return true; |
|
||||||
} |
|
||||||
|
|
||||||
while (true) { |
|
||||||
if (!doTestIo()) { |
|
||||||
console.error('conformance_nodejs: received EOF from test runner ' + |
|
||||||
"after " + testCount + " tests, exiting") |
|
||||||
break; |
|
||||||
} |
|
||||||
} |
|
@ -1,162 +0,0 @@ |
|||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.ENUM[4].ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.ENUM[5].ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.FIXED64[2].ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.INT32[7].ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.INT64[2].ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.SFIXED64[2].ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.SINT64[2].ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.UINT32[8].ProtobufOutput |
|
||||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.UINT64[2].ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.DefaultOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.PackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.UnpackedOutput.ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.ENUM[4].ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.ENUM[5].ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.FIXED64[2].ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.INT32[7].ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.INT64[2].ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.SFIXED64[2].ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.SINT64[2].ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.UINT32[8].ProtobufOutput |
|
||||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.UINT64[2].ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.RepeatedScalarSelectsLast.ENUM.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.RepeatedScalarSelectsLast.FIXED64.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.RepeatedScalarSelectsLast.UINT64.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataScalar.ENUM[4].ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataScalar.ENUM[5].ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataScalar.FIXED64[2].ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataScalar.INT32[7].ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataScalar.INT64[2].ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataScalar.SFIXED64[2].ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataScalar.SINT64[2].ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataScalar.UINT32[8].ProtobufOutput |
|
||||||
Required.Proto2.ProtobufInput.ValidDataScalar.UINT64[2].ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.RepeatedScalarSelectsLast.ENUM.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.RepeatedScalarSelectsLast.FIXED64.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.RepeatedScalarSelectsLast.UINT64.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataScalar.ENUM[4].ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataScalar.ENUM[5].ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataScalar.FIXED64[2].ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataScalar.INT32[7].ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataScalar.INT64[2].ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataScalar.SFIXED64[2].ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataScalar.SINT64[2].ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataScalar.UINT32[8].ProtobufOutput |
|
||||||
Required.Proto3.ProtobufInput.ValidDataScalar.UINT64[2].ProtobufOutput |
|
@ -0,0 +1,16 @@ |
|||||||
|
load("//build_defs:internal_shell.bzl", "inline_sh_test") |
||||||
|
|
||||||
|
# Run csharp compatibility test between 3.0.0 and the current version. |
||||||
|
inline_sh_test( |
||||||
|
name = "tests", |
||||||
|
srcs = ["test.sh"], |
||||||
|
tools = ["//:protoc"], |
||||||
|
deps = glob([ |
||||||
|
"src/**/*.cs*", # .cs and .csproj |
||||||
|
"protos/**/*.proto", |
||||||
|
]) + [ |
||||||
|
"//csharp:srcs", |
||||||
|
"//:well_known_protos", |
||||||
|
], |
||||||
|
cmd = "$(location test.sh) 3.0.0 $(rootpath //:protoc)" |
||||||
|
) |
@ -0,0 +1,97 @@ |
|||||||
|
load("//build_defs:internal_shell.bzl", "inline_sh_binary") |
||||||
|
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix") |
||||||
|
|
||||||
|
inline_sh_binary( |
||||||
|
name = "build_conformance_test", |
||||||
|
srcs = ["Google.Protobuf.Conformance.csproj"], |
||||||
|
cmd = "dotnet build -c Release $(SRCS)", |
||||||
|
visibility = ["//visibility:private"], |
||||||
|
) |
||||||
|
|
||||||
|
CONFORMANCE_DEPS = [ |
||||||
|
"Google.Protobuf.dll", |
||||||
|
"Google.Protobuf.Conformance.deps.json", |
||||||
|
"Google.Protobuf.Conformance.pdb", |
||||||
|
"Google.Protobuf.Conformance.runtimeconfig.json", |
||||||
|
"Google.Protobuf.Test.dll", |
||||||
|
"Google.Protobuf.Test.TestProtos.dll", |
||||||
|
"Microsoft.TestPlatform.CommunicationUtilities.dll", |
||||||
|
"Microsoft.TestPlatform.CoreUtilities.dll", |
||||||
|
"Microsoft.TestPlatform.CrossPlatEngine.dll", |
||||||
|
"Microsoft.TestPlatform.PlatformAbstractions.dll", |
||||||
|
"Microsoft.TestPlatform.Utilities.dll", |
||||||
|
"Microsoft.VisualStudio.CodeCoverage.Shim.dll", |
||||||
|
"Microsoft.VisualStudio.TestPlatform.Common.dll", |
||||||
|
"Microsoft.VisualStudio.TestPlatform.ObjectModel.dll", |
||||||
|
"NUnit3.TestAdapter.dll", |
||||||
|
"Newtonsoft.Json.dll", |
||||||
|
"NuGet.Frameworks.dll", |
||||||
|
"nunit.engine.api.dll", |
||||||
|
"nunit.engine.core.dll", |
||||||
|
"nunit.engine.dll", |
||||||
|
"nunit.framework.dll", |
||||||
|
"testcentric.engine.metadata.dll", |
||||||
|
"testhost.dll", |
||||||
|
] |
||||||
|
|
||||||
|
filegroup( |
||||||
|
name = "srcs", |
||||||
|
srcs = [ |
||||||
|
"Conformance.cs", |
||||||
|
"Program.cs", |
||||||
|
"Google.Protobuf.Conformance.csproj", |
||||||
|
], |
||||||
|
visibility = [ |
||||||
|
"//csharp:__subpackages__", |
||||||
|
], |
||||||
|
) |
||||||
|
|
||||||
|
genrule( |
||||||
|
name = "package_conformance_test", |
||||||
|
srcs = [ |
||||||
|
"Program.cs", |
||||||
|
"Google.Protobuf.Conformance.csproj", |
||||||
|
"//conformance:conformance_csharp_proto", |
||||||
|
"//csharp:srcs", |
||||||
|
], |
||||||
|
visibility = ["//visibility:private"], |
||||||
|
tools = [":build_conformance_test"], |
||||||
|
outs = CONFORMANCE_DEPS + ["Google.Protobuf.Conformance.dll"], |
||||||
|
cmd = """ |
||||||
|
SRCDIR=$$(dirname $(location :Program.cs)) |
||||||
|
cp $(location //conformance:conformance_csharp_proto) $$SRCDIR |
||||||
|
DOTNET_CLI_HOME=/tmp ./$(location :build_conformance_test) |
||||||
|
cp -r $$SRCDIR/bin/Release/netcoreapp3.1/* $(RULEDIR) |
||||||
|
""", |
||||||
|
) |
||||||
|
|
||||||
|
filegroup( |
||||||
|
name = "conformance_dll", |
||||||
|
srcs = ["Google.Protobuf.Conformance.dll"], |
||||||
|
data = [":package_conformance_test"], |
||||||
|
visibility = ["//conformance:__subpackages__"], |
||||||
|
) |
||||||
|
|
||||||
|
filegroup( |
||||||
|
name = "conformance_runfiles", |
||||||
|
srcs = CONFORMANCE_DEPS, |
||||||
|
data = [":package_conformance_test"], |
||||||
|
visibility = ["//conformance:__subpackages__"], |
||||||
|
) |
||||||
|
|
||||||
|
################################################################################ |
||||||
|
# Distribution files |
||||||
|
################################################################################ |
||||||
|
|
||||||
|
pkg_files( |
||||||
|
name = "dist_files", |
||||||
|
srcs = [ |
||||||
|
"BUILD.bazel", |
||||||
|
"Google.Protobuf.Conformance.csproj", |
||||||
|
"Conformance.cs", |
||||||
|
"Program.cs", |
||||||
|
], |
||||||
|
strip_prefix = strip_prefix.from_root(""), |
||||||
|
visibility = ["//csharp:__pkg__"], |
||||||
|
) |
||||||
|
|
@ -0,0 +1,22 @@ |
|||||||
|
// Test that Kokoro is using the expected version of Java.
|
||||||
|
import static com.google.common.truth.Truth.assertWithMessage; |
||||||
|
|
||||||
|
import org.junit.Test; |
||||||
|
import org.junit.runner.RunWith; |
||||||
|
import org.junit.runners.JUnit4; |
||||||
|
|
||||||
|
@RunWith(JUnit4.class) |
||||||
|
public class JavaVersionTest { |
||||||
|
@Test |
||||||
|
public void testJavaVersion() throws Exception { |
||||||
|
String exp = System.getenv("KOKORO_JAVA_VERSION"); |
||||||
|
if(exp == null || exp.isEmpty()) { |
||||||
|
System.err.println("No kokoro java version found, skipping check"); |
||||||
|
return; |
||||||
|
} |
||||||
|
String version = System.getProperty("java.version"); |
||||||
|
assertWithMessage("Expected Python " + exp + " but found Python " + version) |
||||||
|
.that(version.startsWith(exp)) |
||||||
|
.isTrue(); |
||||||
|
} |
||||||
|
} |
@ -1,55 +0,0 @@ |
|||||||
#!/bin/bash |
|
||||||
# |
|
||||||
# Script to compare a distribution archive for expected files based on git. |
|
||||||
# |
|
||||||
# Usage: |
|
||||||
# check_missing_dist_files.sh path/to/dist_archive.tar.gz |
|
||||||
|
|
||||||
set -eux |
|
||||||
set -o pipefail |
|
||||||
|
|
||||||
# By default, look for a git repo based on this script's path. |
|
||||||
: ${SOURCE_DIR:=$(cd $(dirname $0)/../.. ; pwd)} |
|
||||||
|
|
||||||
# Use a temporary directory for intermediate files. |
|
||||||
# Note that pipelines below use subshells to avoid multiple trap executions. |
|
||||||
_workdir=$(mktemp -d) |
|
||||||
function cleanup_workdir() { rm -r ${_workdir}; } |
|
||||||
trap cleanup_workdir EXIT |
|
||||||
|
|
||||||
# List all the files in the archive. |
|
||||||
( |
|
||||||
tar -atf $1 | \ |
|
||||||
cut -d/ -f2- | \ |
|
||||||
sort |
|
||||||
) > ${_workdir}/archive.lst |
|
||||||
|
|
||||||
# List all files in the git repo that should be in the archive. |
|
||||||
( |
|
||||||
git -C ${SOURCE_DIR} ls-files | \ |
|
||||||
grep "^\(java\|python\|objectivec\|csharp\|ruby\|php\|cmake\|examples\|src/google/protobuf/.*\.proto\)" |\ |
|
||||||
grep -v ".gitignore" | \ |
|
||||||
grep -v "java/lite/proguard.pgcfg" | \ |
|
||||||
grep -v "python/compatibility_tests" | \ |
|
||||||
grep -v "python/docs" | \ |
|
||||||
grep -v "python/.repo-metadata.json" | \ |
|
||||||
grep -v "python/protobuf_distutils" | \ |
|
||||||
grep -v "csharp/compatibility_tests" | \ |
|
||||||
sort |
|
||||||
) > ${_workdir}/expected.lst |
|
||||||
|
|
||||||
# Check for missing files. |
|
||||||
MISSING_FILES=( $(cd ${_workdir} && comm -13 archive.lst expected.lst) ) |
|
||||||
if (( ${#MISSING_FILES[@]} == 0 )); then |
|
||||||
exit 0 |
|
||||||
fi |
|
||||||
|
|
||||||
( |
|
||||||
set +x |
|
||||||
echo -e "\n\nMissing files from archive:" |
|
||||||
for (( i=0 ; i < ${#MISSING_FILES[@]} ; i++ )); do |
|
||||||
echo " ${MISSING_FILES[i]}" |
|
||||||
done |
|
||||||
echo -e "\nAdd them to the 'pkg_files' rule in corresponding BUILD.bazel.\n" |
|
||||||
) >&2 |
|
||||||
exit 1 |
|
@ -1,16 +0,0 @@ |
|||||||
# Shared logic to choose a Python version with pyenv. |
|
||||||
# |
|
||||||
# This file should be `source`d. |
|
||||||
|
|
||||||
# Requested version of Python can be overridden by env variable. |
|
||||||
: ${PYTHON_VERSION:=3.9.5} |
|
||||||
|
|
||||||
if pyenv --version >/dev/null ; then |
|
||||||
eval "$(pyenv init -)" |
|
||||||
if ! pyenv global ${PYTHON_VERSION}; then |
|
||||||
echo "Python ${PYTHON_VERSION} is not available. Versions available:" >&2 |
|
||||||
pyenv versions >&2 |
|
||||||
exit 1 |
|
||||||
fi |
|
||||||
fi |
|
||||||
echo "Using $(python --version || python3 --version)" |
|
@ -1,143 +0,0 @@ |
|||||||
# This Dockerfile specifies the recipe for creating an image for the tests |
|
||||||
# to run in. |
|
||||||
# |
|
||||||
# We install as many test dependencies here as we can, because these setup |
|
||||||
# steps can be cached. They do *not* run every time we run the build. |
|
||||||
# The Docker image is only rebuilt when the Dockerfile (ie. this file) |
|
||||||
# changes. |
|
||||||
|
|
||||||
# Base Dockerfile for gRPC dev images |
|
||||||
FROM 32bit/debian:latest |
|
||||||
|
|
||||||
# Apt source for php |
|
||||||
RUN echo "deb http://ppa.launchpad.net/ondrej/php/ubuntu trusty main" | tee /etc/apt/sources.list.d/various-php.list && \ |
|
||||||
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys F4FCBB07 |
|
||||||
|
|
||||||
# Install dependencies. We start with the basic ones require to build protoc |
|
||||||
# and the C++ build |
|
||||||
RUN apt-get clean && apt-get update && apt-get install -y --force-yes \ |
|
||||||
autoconf \ |
|
||||||
autotools-dev \ |
|
||||||
build-essential \ |
|
||||||
bzip2 \ |
|
||||||
ccache \ |
|
||||||
curl \ |
|
||||||
gcc \ |
|
||||||
git \ |
|
||||||
libc6 \ |
|
||||||
libc6-dbg \ |
|
||||||
libc6-dev \ |
|
||||||
libgtest-dev \ |
|
||||||
libtool \ |
|
||||||
make \ |
|
||||||
parallel \ |
|
||||||
time \ |
|
||||||
wget \ |
|
||||||
unzip \ |
|
||||||
# -- For python -- |
|
||||||
python-setuptools \ |
|
||||||
python-pip \ |
|
||||||
python-dev \ |
|
||||||
# -- For C++ benchmarks -- |
|
||||||
cmake \ |
|
||||||
# -- For PHP -- |
|
||||||
php5.5 \ |
|
||||||
php5.5-dev \ |
|
||||||
php5.5-xml \ |
|
||||||
php5.6 \ |
|
||||||
php5.6-dev \ |
|
||||||
php5.6-xml \ |
|
||||||
php7.0 \ |
|
||||||
php7.0-dev \ |
|
||||||
php7.0-xml \ |
|
||||||
phpunit \ |
|
||||||
valgrind \ |
|
||||||
libxml2-dev \ |
|
||||||
&& apt-get clean |
|
||||||
|
|
||||||
################## |
|
||||||
# PHP dependencies. |
|
||||||
RUN wget http://am1.php.net/get/php-5.5.38.tar.bz2/from/this/mirror |
|
||||||
RUN mv mirror php-5.5.38.tar.bz2 |
|
||||||
RUN tar -xvf php-5.5.38.tar.bz2 |
|
||||||
RUN cd php-5.5.38 && ./configure --enable-maintainer-zts --prefix=/usr/local/php-5.5-zts && \ |
|
||||||
make && make install && make clean && cd .. |
|
||||||
RUN cd php-5.5.38 && make clean && ./configure --enable-bcmath --prefix=/usr/local/php-5.5 && \ |
|
||||||
make && make install && make clean && cd .. |
|
||||||
|
|
||||||
RUN wget http://am1.php.net/get/php-5.6.30.tar.bz2/from/this/mirror |
|
||||||
RUN mv mirror php-5.6.30.tar.bz2 |
|
||||||
RUN tar -xvf php-5.6.30.tar.bz2 |
|
||||||
RUN cd php-5.6.30 && ./configure --enable-maintainer-zts --prefix=/usr/local/php-5.6-zts && \ |
|
||||||
make && make install && cd .. |
|
||||||
RUN cd php-5.6.30 && make clean && ./configure --enable-bcmath --prefix=/usr/local/php-5.6 && \ |
|
||||||
make && make install && cd .. |
|
||||||
|
|
||||||
RUN wget http://am1.php.net/get/php-7.0.18.tar.bz2/from/this/mirror |
|
||||||
RUN mv mirror php-7.0.18.tar.bz2 |
|
||||||
RUN tar -xvf php-7.0.18.tar.bz2 |
|
||||||
RUN cd php-7.0.18 && ./configure --enable-maintainer-zts --prefix=/usr/local/php-7.0-zts && \ |
|
||||||
make && make install && cd .. |
|
||||||
RUN cd php-7.0.18 && make clean && ./configure --enable-bcmath --prefix=/usr/local/php-7.0 && \ |
|
||||||
make && make install && cd .. |
|
||||||
|
|
||||||
RUN wget http://am1.php.net/get/php-7.1.4.tar.bz2/from/this/mirror |
|
||||||
RUN mv mirror php-7.1.4.tar.bz2 |
|
||||||
RUN tar -xvf php-7.1.4.tar.bz2 |
|
||||||
RUN cd php-7.1.4 && ./configure --enable-maintainer-zts --prefix=/usr/local/php-7.1-zts && \ |
|
||||||
make && make install && cd .. |
|
||||||
RUN cd php-7.1.4 && make clean && ./configure --enable-bcmath --prefix=/usr/local/php-7.1 && \ |
|
||||||
make && make install && cd .. |
|
||||||
|
|
||||||
RUN php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');" |
|
||||||
RUN php composer-setup.php |
|
||||||
RUN mv composer.phar /usr/bin/composer |
|
||||||
RUN php -r "unlink('composer-setup.php');" |
|
||||||
RUN composer config -g -- disable-tls true |
|
||||||
RUN composer config -g -- secure-http false |
|
||||||
RUN cd /tmp && \ |
|
||||||
git clone https://github.com/google/protobuf.git && \ |
|
||||||
cd protobuf/php && \ |
|
||||||
git reset --hard 49b44bff2b6257a119f9c6a342d6151c736586b8 && \ |
|
||||||
ln -sfn /usr/local/php-5.5/bin/php /usr/bin/php && \ |
|
||||||
ln -sfn /usr/local/php-5.5/bin/php-config /usr/bin/php-config && \ |
|
||||||
ln -sfn /usr/local/php-5.5/bin/phpize /usr/bin/phpize && \ |
|
||||||
composer install && \ |
|
||||||
mv vendor /usr/local/vendor-5.5 && \ |
|
||||||
ln -sfn /usr/local/php-5.6/bin/php /usr/bin/php && \ |
|
||||||
ln -sfn /usr/local/php-5.6/bin/php-config /usr/bin/php-config && \ |
|
||||||
ln -sfn /usr/local/php-5.6/bin/phpize /usr/bin/phpize && \ |
|
||||||
composer install && \ |
|
||||||
mv vendor /usr/local/vendor-5.6 && \ |
|
||||||
ln -sfn /usr/local/php-7.0/bin/php /usr/bin/php && \ |
|
||||||
ln -sfn /usr/local/php-7.0/bin/php-config /usr/bin/php-config && \ |
|
||||||
ln -sfn /usr/local/php-7.0/bin/phpize /usr/bin/phpize && \ |
|
||||||
composer install && \ |
|
||||||
mv vendor /usr/local/vendor-7.0 && \ |
|
||||||
ln -sfn /usr/local/php-7.1/bin/php /usr/bin/php && \ |
|
||||||
ln -sfn /usr/local/php-7.1/bin/php-config /usr/bin/php-config && \ |
|
||||||
ln -sfn /usr/local/php-7.1/bin/phpize /usr/bin/phpize && \ |
|
||||||
composer install && \ |
|
||||||
mv vendor /usr/local/vendor-7.1 |
|
||||||
|
|
||||||
################## |
|
||||||
# Python dependencies |
|
||||||
|
|
||||||
# These packages exist in apt-get, but their versions are too old, so we have |
|
||||||
# to get updates from pip. |
|
||||||
|
|
||||||
RUN pip install pip --upgrade |
|
||||||
RUN pip install virtualenv tox yattag |
|
||||||
|
|
||||||
################## |
|
||||||
# Prepare ccache |
|
||||||
|
|
||||||
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc |
|
||||||
RUN ln -s /usr/bin/ccache /usr/local/bin/g++ |
|
||||||
RUN ln -s /usr/bin/ccache /usr/local/bin/cc |
|
||||||
RUN ln -s /usr/bin/ccache /usr/local/bin/c++ |
|
||||||
RUN ln -s /usr/bin/ccache /usr/local/bin/clang |
|
||||||
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++ |
|
||||||
|
|
||||||
# Define the default command. |
|
||||||
CMD ["bash"] |
|
@ -0,0 +1,51 @@ |
|||||||
|
#!/bin/bash |
||||||
|
|
||||||
|
set -eux |
||||||
|
|
||||||
|
# Change to repo root |
||||||
|
cd $(dirname $0)/../../.. |
||||||
|
|
||||||
|
use_php() { |
||||||
|
VERSION=$1 |
||||||
|
export PATH=/usr/local/php-${VERSION}/bin:$PATH |
||||||
|
} |
||||||
|
|
||||||
|
build_php() { |
||||||
|
use_php $1 |
||||||
|
pushd php |
||||||
|
rm -rf vendor |
||||||
|
php -v |
||||||
|
php -m |
||||||
|
composer update |
||||||
|
composer test |
||||||
|
popd |
||||||
|
} |
||||||
|
|
||||||
|
test_php_c() { |
||||||
|
pushd php |
||||||
|
rm -rf vendor |
||||||
|
php -v |
||||||
|
php -m |
||||||
|
composer update |
||||||
|
composer test_c |
||||||
|
popd |
||||||
|
} |
||||||
|
|
||||||
|
build_php_c() { |
||||||
|
use_php $1 |
||||||
|
test_php_c |
||||||
|
} |
||||||
|
|
||||||
|
cmake . |
||||||
|
cmake --build . --target protoc -- -j20 |
||||||
|
export PROTOC=$(pwd)/protoc |
||||||
|
|
||||||
|
build_php 7.0 |
||||||
|
build_php 7.1 |
||||||
|
build_php 7.4 |
||||||
|
build_php_c 7.0 |
||||||
|
build_php_c 7.1 |
||||||
|
build_php_c 7.4 |
||||||
|
build_php_c 7.1-zts |
||||||
|
build_php_c 7.2-zts |
||||||
|
build_php_c 7.5-zts |
@ -0,0 +1,44 @@ |
|||||||
|
#!/bin/bash |
||||||
|
|
||||||
|
set -ex |
||||||
|
|
||||||
|
if [[ -z "${CONTAINER_IMAGE}" ]]; then |
||||||
|
CONTAINER_IMAGE=gcr.io/protobuf-build/bazel/linux@sha256:2bfd061284eff8234f2fcca16d71d43c69ccf3a22206628b54c204a6a9aac277 |
||||||
|
fi |
||||||
|
|
||||||
|
cd $(dirname $0)/../.. |
||||||
|
GIT_REPO_ROOT=`pwd` |
||||||
|
rm -rf $GIT_REPO_ROOT/logs |
||||||
|
|
||||||
|
ENVS=() |
||||||
|
|
||||||
|
# Check for specific versions pinned to the docker image. In these cases we |
||||||
|
# want to forward the environment variable to tests, so that they can verify |
||||||
|
# that the correct version is being picked up by Bazel. |
||||||
|
ENVS+=("--test_env=KOKORO_JAVA_VERSION") |
||||||
|
ENVS+=("--test_env=KOKORO_PYTHON_VERSION") |
||||||
|
ENVS+=("--test_env=KOKORO_RUBY_VERSION") |
||||||
|
|
||||||
|
if [ -n "$BAZEL_ENV" ]; then |
||||||
|
for env in $BAZEL_ENV; do |
||||||
|
ENVS+="--action_env=${env}" |
||||||
|
done |
||||||
|
fi |
||||||
|
|
||||||
|
tmpfile=$(mktemp -u) |
||||||
|
|
||||||
|
docker run \ |
||||||
|
--cidfile $tmpfile \ |
||||||
|
-v $GIT_REPO_ROOT:/workspace \ |
||||||
|
$CONTAINER_IMAGE \ |
||||||
|
test \ |
||||||
|
--keep_going \ |
||||||
|
--test_output=streamed \ |
||||||
|
${ENVS[@]} \ |
||||||
|
$PLATFORM_CONFIG \ |
||||||
|
$BAZEL_EXTRA_FLAGS \ |
||||||
|
$BAZEL_TARGETS |
||||||
|
|
||||||
|
# Save logs for Kokoro |
||||||
|
docker cp \ |
||||||
|
`cat $tmpfile`:/workspace/logs $KOKORO_ARTIFACTS_DIR |
@ -1,44 +0,0 @@ |
|||||||
#!/bin/bash |
|
||||||
# |
|
||||||
# Build file to set up and run tests |
|
||||||
set -eu |
|
||||||
|
|
||||||
# Install Bazel 4.0.0. |
|
||||||
use_bazel.sh 4.0.0 |
|
||||||
bazel version |
|
||||||
|
|
||||||
# Change to repo root |
|
||||||
cd $(dirname $0)/../../.. |
|
||||||
|
|
||||||
# Get kokoro scripts from repo root by default. |
|
||||||
: ${SCRIPT_ROOT:=$(pwd)} |
|
||||||
source ${SCRIPT_ROOT}/kokoro/common/pyenv.sh |
|
||||||
|
|
||||||
# Disabled for now, re-enable if appropriate. |
|
||||||
# //:build_files_updated_unittest \ |
|
||||||
|
|
||||||
bazel_args=( |
|
||||||
test |
|
||||||
--keep_going |
|
||||||
--copt=-Werror |
|
||||||
--host_copt=-Werror |
|
||||||
--test_output=errors |
|
||||||
-- |
|
||||||
//... |
|
||||||
-//objectivec/... # only works on macOS |
|
||||||
@com_google_protobuf_examples//... |
|
||||||
) |
|
||||||
|
|
||||||
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh "${bazel_args[@]}" |
|
||||||
|
|
||||||
# Verify that we can build successfully from generated tar files. |
|
||||||
( |
|
||||||
pyenv versions |
|
||||||
pyenv shell 2.7.9 # python2 required for old googletest autotools support |
|
||||||
git submodule update --init --recursive |
|
||||||
./autogen.sh && ./configure && make -j$(nproc) dist |
|
||||||
) |
|
||||||
DIST=`ls *.tar.gz` |
|
||||||
tar -xf $DIST |
|
||||||
cd ${DIST//.tar.gz} |
|
||||||
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh build //:protobuf //:protobuf_java |
|
@ -1,9 +0,0 @@ |
|||||||
# Common config shared by presubmit and continuous. |
|
||||||
|
|
||||||
bazel_setting: { |
|
||||||
project_id: "protobuf-build" |
|
||||||
bes_backend_address: "buildeventservice.googleapis.com" |
|
||||||
foundry_backend_address: "remotebuildexecution.googleapis.com" |
|
||||||
upsalite_frontend_address: "https://source.cloud.google.com" |
|
||||||
local_execution: true |
|
||||||
} |
|
@ -1,5 +1,16 @@ |
|||||||
# Config file for running tests in Kokoro |
# Config file for running tests in Kokoro |
||||||
|
|
||||||
# Location of the build script in repository |
# Location of the build script in repository |
||||||
build_file: "protobuf/kokoro/linux/bazel/build.sh" |
build_file: "protobuf/kokoro/linux/bazel.sh" |
||||||
timeout_mins: 15 |
timeout_mins: 15 |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "BAZEL_TARGETS" |
||||||
|
value: "//src/..." |
||||||
|
} |
||||||
|
|
||||||
|
action { |
||||||
|
define_artifacts { |
||||||
|
regex: "**/sponge_log.*" |
||||||
|
} |
||||||
|
} |
||||||
|
@ -1,5 +1,16 @@ |
|||||||
# Config file for running tests in Kokoro |
# Config file for running C++ Bazel tests in Kokoro |
||||||
|
|
||||||
# Location of the build script in repository |
# Location of the build script in repository |
||||||
build_file: "protobuf/kokoro/linux/bazel/build.sh" |
build_file: "protobuf/kokoro/linux/bazel.sh" |
||||||
timeout_mins: 15 |
timeout_mins: 15 |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "BAZEL_TARGETS" |
||||||
|
value: "//src/..." |
||||||
|
} |
||||||
|
|
||||||
|
action { |
||||||
|
define_artifacts { |
||||||
|
regex: "**/sponge_log.*" |
||||||
|
} |
||||||
|
} |
||||||
|
@ -1,18 +0,0 @@ |
|||||||
#!/bin/bash |
|
||||||
# |
|
||||||
# This is the top-level script we give to Kokoro as the entry point for |
|
||||||
# running the "pull request" project: |
|
||||||
# |
|
||||||
# This script selects a specific Dockerfile (for building a Docker image) and |
|
||||||
# a script to run inside that image. Then we delegate to the general |
|
||||||
# build_and_run_docker.sh script. |
|
||||||
|
|
||||||
# Change to repo root |
|
||||||
cd $(dirname $0)/../../.. |
|
||||||
|
|
||||||
export DOCKERHUB_ORGANIZATION=protobuftesting |
|
||||||
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/java_stretch |
|
||||||
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh |
|
||||||
export OUTPUT_DIR=testoutput |
|
||||||
export TEST_SET="benchmark" |
|
||||||
./kokoro/linux/build_and_run_docker.sh |
|
@ -1,64 +0,0 @@ |
|||||||
#!/bin/bash |
|
||||||
# |
|
||||||
# Builds docker image and runs a command under it. |
|
||||||
# This is a generic script that is configured with the following variables: |
|
||||||
# |
|
||||||
# DOCKERHUB_ORGANIZATION - The organization on docker hub storing the |
|
||||||
# Dockerfile. |
|
||||||
# DOCKERFILE_DIR - Directory in which Dockerfile file is located. |
|
||||||
# DOCKER_RUN_SCRIPT - Script to run under docker (relative to protobuf repo root) |
|
||||||
# OUTPUT_DIR - Directory that will be copied from inside docker after finishing. |
|
||||||
# $@ - Extra args to pass to docker run |
|
||||||
|
|
||||||
set -ex |
|
||||||
|
|
||||||
cd $(dirname $0)/../.. |
|
||||||
git_root=$(pwd) |
|
||||||
cd - |
|
||||||
|
|
||||||
# Use image name based on Dockerfile sha1 |
|
||||||
if [ -z "$DOCKERHUB_ORGANIZATION" ] |
|
||||||
then |
|
||||||
DOCKERHUB_ORGANIZATION=grpctesting/protobuf |
|
||||||
DOCKER_IMAGE_NAME=${DOCKERHUB_ORGANIZATION}_$(sha1sum $DOCKERFILE_DIR/Dockerfile | cut -f1 -d\ ) |
|
||||||
else |
|
||||||
# TODO(teboring): Remove this when all tests have been migrated to separate |
|
||||||
# docker images. |
|
||||||
DOCKERFILE_PREFIX=$(basename $DOCKERFILE_DIR) |
|
||||||
DOCKER_IMAGE_NAME=${DOCKERHUB_ORGANIZATION}/${DOCKERFILE_PREFIX}_$(sha1sum $DOCKERFILE_DIR/Dockerfile | cut -f1 -d\ ) |
|
||||||
fi |
|
||||||
|
|
||||||
# Pull dockerimage from Dockerhub. This sometimes fails intermittently, so we |
|
||||||
# keep trying until we succeed. |
|
||||||
until docker pull $DOCKER_IMAGE_NAME; do sleep 10; done |
|
||||||
|
|
||||||
# Ensure existence of ccache directory |
|
||||||
CCACHE_DIR=/tmp/protobuf-ccache |
|
||||||
mkdir -p $CCACHE_DIR |
|
||||||
|
|
||||||
# Choose random name for docker container |
|
||||||
CONTAINER_NAME="build_and_run_docker_$(uuidgen)" |
|
||||||
|
|
||||||
echo $git_root |
|
||||||
|
|
||||||
# Run command inside docker |
|
||||||
docker run \ |
|
||||||
"$@" \ |
|
||||||
-e CCACHE_DIR=$CCACHE_DIR \ |
|
||||||
-e KOKORO_BUILD_NUMBER=$KOKORO_BUILD_NUMBER \ |
|
||||||
-e KOKORO_BUILD_ID=$KOKORO_BUILD_ID \ |
|
||||||
-e EXTERNAL_GIT_ROOT="/var/local/kokoro/protobuf" \ |
|
||||||
-e TEST_SET="$TEST_SET" \ |
|
||||||
-v "$git_root:/var/local/kokoro/protobuf:ro" \ |
|
||||||
-v $CCACHE_DIR:$CCACHE_DIR \ |
|
||||||
-w /var/local/git/protobuf \ |
|
||||||
--name=$CONTAINER_NAME \ |
|
||||||
$DOCKER_IMAGE_NAME \ |
|
||||||
bash -l "/var/local/kokoro/protobuf/$DOCKER_RUN_SCRIPT" || FAILED="true" |
|
||||||
|
|
||||||
# remove the container, possibly killing it first |
|
||||||
docker rm -f $CONTAINER_NAME || true |
|
||||||
|
|
||||||
[ -z "$FAILED" ] || { |
|
||||||
exit 1 |
|
||||||
} |
|
@ -1,11 +0,0 @@ |
|||||||
#!/bin/bash |
|
||||||
# |
|
||||||
# This is the top-level script we give to Kokoro as the entry point for |
|
||||||
# running the "continuous" and "presubmit" jobs. |
|
||||||
|
|
||||||
set -ex |
|
||||||
|
|
||||||
# Change to repo root |
|
||||||
cd $(dirname $0)/../../.. |
|
||||||
|
|
||||||
kokoro/linux/aarch64/test_cpp_aarch64.sh |
|
@ -1,5 +1,21 @@ |
|||||||
# Config file for running tests in Kokoro |
# Config file for running tests in Kokoro |
||||||
|
|
||||||
# Location of the build script in repository |
# Location of the build script in repository |
||||||
build_file: "protobuf/kokoro/linux/cpp_aarch64/build.sh" |
build_file: "protobuf/kokoro/linux/bazel.sh" |
||||||
timeout_mins: 120 |
timeout_mins: 120 |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "CONTAINER_IMAGE" |
||||||
|
value: "gcr.io/protobuf-build/emulation/linux:aarch64-4e847d7a01c1792471b6dd985ab0bf2677332e6f" |
||||||
|
} |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "BAZEL_TARGETS" |
||||||
|
value: "//src/..." |
||||||
|
} |
||||||
|
|
||||||
|
action { |
||||||
|
define_artifacts { |
||||||
|
regex: "**/sponge_log.*" |
||||||
|
} |
||||||
|
} |
||||||
|
@ -1,5 +1,21 @@ |
|||||||
# Config file for running tests in Kokoro |
# Config file for running tests in Kokoro |
||||||
|
|
||||||
# Location of the build script in repository |
# Location of the build script in repository |
||||||
build_file: "protobuf/kokoro/linux/cpp_aarch64/build.sh" |
build_file: "protobuf/kokoro/linux/bazel.sh" |
||||||
timeout_mins: 120 |
timeout_mins: 120 |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "CONTAINER_IMAGE" |
||||||
|
value: "gcr.io/protobuf-build/emulation/linux:aarch64-4e847d7a01c1792471b6dd985ab0bf2677332e6f" |
||||||
|
} |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "BAZEL_TARGETS" |
||||||
|
value: "//src/..." |
||||||
|
} |
||||||
|
|
||||||
|
action { |
||||||
|
define_artifacts { |
||||||
|
regex: "**/sponge_log.*" |
||||||
|
} |
||||||
|
} |
||||||
|
@ -1,13 +0,0 @@ |
|||||||
#!/bin/bash |
|
||||||
# |
|
||||||
# Build file to set up and run tests |
|
||||||
|
|
||||||
# Change to repo root |
|
||||||
cd $(dirname $0)/../../.. |
|
||||||
|
|
||||||
export DOCKERHUB_ORGANIZATION=protobuftesting |
|
||||||
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/cpp_tcmalloc |
|
||||||
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh |
|
||||||
export OUTPUT_DIR=testoutput |
|
||||||
export TEST_SET="cpp_tcmalloc" |
|
||||||
./kokoro/linux/build_and_run_docker.sh |
|
@ -1,5 +1,21 @@ |
|||||||
# Config file for running tests in Kokoro |
# Config file for running tests in Kokoro |
||||||
|
|
||||||
# Location of the build script in repository |
# Location of the build script in repository |
||||||
build_file: "protobuf/kokoro/linux/cpp_tcmalloc/build.sh" |
build_file: "protobuf/kokoro/linux/bazel.sh" |
||||||
timeout_mins: 1440 |
timeout_mins: 1440 |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "CONTAINER_IMAGE" |
||||||
|
value: "gcr.io/protobuf-build/tcmalloc/linux:64e8944e4f18d7d6c9649112a8a93be57e693cd8" |
||||||
|
} |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "BAZEL_TARGETS" |
||||||
|
value: "//src/..." |
||||||
|
} |
||||||
|
|
||||||
|
action { |
||||||
|
define_artifacts { |
||||||
|
regex: "**/sponge_log.*" |
||||||
|
} |
||||||
|
} |
||||||
|
@ -1,5 +1,21 @@ |
|||||||
# Config file for running tests in Kokoro |
# Config file for running tests in Kokoro |
||||||
|
|
||||||
# Location of the build script in repository |
# Location of the build script in repository |
||||||
build_file: "protobuf/kokoro/linux/cpp_tcmalloc/build.sh" |
build_file: "protobuf/kokoro/linux/bazel.sh" |
||||||
timeout_mins: 1440 |
timeout_mins: 1440 |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "CONTAINER_IMAGE" |
||||||
|
value: "gcr.io/protobuf-build/tcmalloc/linux:64e8944e4f18d7d6c9649112a8a93be57e693cd8" |
||||||
|
} |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "BAZEL_TARGETS" |
||||||
|
value: "//src/..." |
||||||
|
} |
||||||
|
|
||||||
|
action { |
||||||
|
define_artifacts { |
||||||
|
regex: "**/sponge_log.*" |
||||||
|
} |
||||||
|
} |
||||||
|
@ -1,18 +0,0 @@ |
|||||||
#!/bin/bash |
|
||||||
# |
|
||||||
# This is the top-level script we give to Kokoro as the entry point for |
|
||||||
# running the "pull request" project: |
|
||||||
# |
|
||||||
# This script selects a specific Dockerfile (for building a Docker image) and |
|
||||||
# a script to run inside that image. Then we delegate to the general |
|
||||||
# build_and_run_docker.sh script. |
|
||||||
|
|
||||||
# Change to repo root |
|
||||||
cd $(dirname $0)/../../.. |
|
||||||
|
|
||||||
export DOCKERHUB_ORGANIZATION=protobuftesting |
|
||||||
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/csharp |
|
||||||
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh |
|
||||||
export OUTPUT_DIR=testoutput |
|
||||||
export TEST_SET="csharp" |
|
||||||
./kokoro/linux/build_and_run_docker.sh |
|
@ -1,5 +1,27 @@ |
|||||||
# Config file for running tests in Kokoro |
# Config file for running tests in Kokoro |
||||||
|
|
||||||
# Location of the build script in repository |
# Location of the build script in repository |
||||||
build_file: "protobuf/kokoro/linux/csharp/build.sh" |
build_file: "protobuf/kokoro/linux/bazel.sh" |
||||||
timeout_mins: 1440 |
timeout_mins: 1440 |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "CONTAINER_IMAGE" |
||||||
|
value: "gcr.io/protobuf-build/csharp/linux:3.1.415-6.0.100-6bbe70439ba5b0404bb12662cebc0296909389fa" |
||||||
|
} |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "BAZEL_TARGETS" |
||||||
|
value: "//csharp/..." |
||||||
|
} |
||||||
|
|
||||||
|
env_vars { |
||||||
|
key: "BAZEL_EXTRA_FLAGS" |
||||||
|
value: "--action_env=DOTNET_CLI_TELEMETRY_OPTOUT=1 " |
||||||
|
"--test_env=DOTNET_CLI_HOME=/home/bazel" |
||||||
|
} |
||||||
|
|
||||||
|
action { |
||||||
|
define_artifacts { |
||||||
|
regex: "**/sponge_log.*" |
||||||
|
} |
||||||
|
} |
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue