1#!/bin/bash 2# 3# Change to repo root 4cd $(dirname $0)/../../.. 5 6set -ex 7 8export OUTPUT_DIR=testoutput 9repo_root="$(pwd)" 10 11# TODO(jtattermusch): Add back support for benchmarking with tcmalloc for C++ and python. 12# This feature was removed since it used to use tcmalloc from https://github.com/gperftools/gperftools.git 13# which is very outdated. See https://github.com/protocolbuffers/protobuf/issues/8725. 14 15# download datasets for benchmark 16pushd benchmarks 17datasets=$(for file in $(find . -type f -name "dataset.*.pb" -not -path "./tmp/*"); do echo "$(pwd)/$file"; done | xargs) 18echo $datasets 19popd 20 21# build Python protobuf 22./autogen.sh 23./configure CXXFLAGS="-fPIC -O2" 24make -j8 25pushd python 26python3 -m venv env 27source env/bin/activate 28python3 setup.py build --cpp_implementation 29pip3 install --install-option="--cpp_implementation" . 30popd 31 32# build and run Python benchmark 33# We do this before building protobuf C++ since C++ build 34# will rewrite some libraries used by protobuf python. 35pushd benchmarks 36make python-pure-python-benchmark 37make python-cpp-reflection-benchmark 38make -j8 python-cpp-generated-code-benchmark 39echo "[" > tmp/python_result.json 40echo "benchmarking pure python..." 41./python-pure-python-benchmark --json --behavior_prefix="pure-python-benchmark" $datasets >> tmp/python_result.json 42echo "," >> "tmp/python_result.json" 43echo "benchmarking python cpp reflection..." 44env LD_LIBRARY_PATH="${repo_root}/src/.libs" ./python-cpp-reflection-benchmark --json --behavior_prefix="cpp-reflection-benchmark" $datasets >> tmp/python_result.json 45echo "," >> "tmp/python_result.json" 46echo "benchmarking python cpp generated code..." 47env LD_LIBRARY_PATH="${repo_root}/src/.libs" ./python-cpp-generated-code-benchmark --json --behavior_prefix="cpp-generated-code-benchmark" $datasets >> tmp/python_result.json 48echo "]" >> "tmp/python_result.json" 49popd 50 51# build CPP protobuf 52./configure 53make clean && make -j8 54 55pushd java 56mvn package -B -Dmaven.test.skip=true 57popd 58 59pushd benchmarks 60 61# build and run C++ benchmark 62# "make clean" deletes the contents of the tmp/ directory, so we move it elsewhere and then restore it once build is done. 63# TODO(jtattermusch): find a less clumsy way of protecting python_result.json contents 64mv tmp/python_result.json . && make clean && make -j8 cpp-benchmark && mv python_result.json tmp 65echo "benchmarking cpp..." 66env ./cpp-benchmark --benchmark_min_time=5.0 --benchmark_out_format=json --benchmark_out="tmp/cpp_result.json" $datasets 67 68# TODO(jtattermusch): add benchmarks for https://github.com/protocolbuffers/protobuf-go. 69# The original benchmarks for https://github.com/golang/protobuf were removed 70# because: 71# * they were broken and haven't been producing results for a long time 72# * the https://github.com/golang/protobuf implementation has been superseded by 73# https://github.com/protocolbuffers/protobuf-go 74 75# build and run java benchmark (java 11 is required) 76make java-benchmark 77echo "benchmarking java..." 78./java-benchmark -Cresults.file.options.file="tmp/java_result.json" $datasets 79 80# TODO(jtattermusch): re-enable JS benchmarks once https://github.com/protocolbuffers/protobuf/issues/8747 is fixed. 81# build and run js benchmark 82# make js-benchmark 83# echo "benchmarking js..." 84# ./js-benchmark $datasets --json_output=$(pwd)/tmp/node_result.json 85 86# TODO(jtattermusch): add php-c-benchmark. Currently its build is broken. 87 88# persist raw the results in the build job log (for better debuggability) 89cat tmp/cpp_result.json 90cat tmp/java_result.json 91cat tmp/python_result.json 92 93# print the postprocessed results to the build job log 94# TODO(jtattermusch): re-enable uploading results to bigquery (it is currently broken) 95make python_add_init 96env LD_LIBRARY_PATH="${repo_root}/src/.libs" python3 -m util.result_parser \ 97 -cpp="../tmp/cpp_result.json" -java="../tmp/java_result.json" -python="../tmp/python_result.json" 98popd 99 100