Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions packages/wdl_bench/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
parse_line.parse_line_xxhash_benchmark(f, sum_c)
elif sys.argv[1] == "container_hash_maps_bench":
parse_line.parse_line_container_hash_maps_bench(f, sum_c)
elif sys.argv[1] == "erasure_code_perf":
parse_line.parse_line_erasure_code_perf(f, sum_c)
else:
parse_line.parse_line(f, sum_c)

Expand Down
25 changes: 21 additions & 4 deletions packages/wdl_bench/install_wdl_bench.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ declare -A REPOS=(
['libaegis']='https://github.com/aegis-aead/libaegis.git'
['xxhash']='https://github.com/Cyan4973/xxHash.git'
['glibc']='https://sourceware.org/git/glibc.git'
['isa-l']='https://github.com/intel/isa-l.git'
)

declare -A TAGS=(
Expand All @@ -29,6 +30,7 @@ declare -A TAGS=(
['libaegis']='0.4.2'
['xxhash']='136cc1f8fe4d5ea62a7c16c8424d4fa5158f6d68'
['glibc']="glibc-${GLIBC_VERSION}"
['isa-l']='d36de972efc18f2e85ca182a8b6758ecc7da512b'
)

declare -A DATASETS=(
Expand All @@ -51,12 +53,12 @@ LINUX_DIST_ID="$(awk -F "=" '/^ID=/ {print $2}' /etc/os-release | tr -d '"')"

if [ "$LINUX_DIST_ID" = "ubuntu" ]; then
apt install -y cmake autoconf automake flex bison \
nasm clang patch git libssl-dev \
nasm clang patch git libssl-dev libc6-dev\
tar unzip perl openssl python3-dev gawk

elif [ "$LINUX_DIST_ID" = "centos" ]; then
dnf install -y cmake autoconf automake flex bison \
meson nasm clang patch \
meson nasm clang patch glibc-static\
git tar unzip perl openssl-devel python3-devel gawk
fi

Expand All @@ -71,7 +73,7 @@ fi

##################### BUILD AND INSTALL FUNCTIONS #########################

folly_benchmark_list="concurrency_concurrent_hash_map_bench hash_hash_benchmark container_hash_maps_bench stats_digest_builder_benchmark fibers_fibers_benchmark crypto_lt_hash_benchmark memcpy_benchmark memset_benchmark io_async_event_base_benchmark io_iobuf_benchmark function_benchmark random_benchmark synchronization_small_locks_benchmark synchronization_lifo_sem_bench range_find_benchmark"
folly_benchmark_list="concurrency_concurrent_hash_map_bench hash_hash_benchmark container_hash_maps_bench stats_digest_builder_benchmark fibers_fibers_benchmark crypto_lt_hash_benchmark memcpy_benchmark memset_benchmark io_async_event_base_benchmark io_iobuf_benchmark function_benchmark random_benchmark synchronization_small_locks_benchmark synchronization_lifo_sem_bench range_find_benchmark hash_checksum_benchmark"

fbthrift_benchmark_list="ProtocolBench VarintUtilsBench"

Expand Down Expand Up @@ -146,7 +148,7 @@ build_lzbench()
pushd "${WDL_SOURCE}"
clone $lib || echo "Failed to clone $lib"
cd "$lib" || exit
make -j
make BUILD_STATIC=1 -j
cp ./lzbench "${WDL_ROOT}/" || exit

download_dataset 'silesia'
Expand Down Expand Up @@ -235,6 +237,20 @@ build_glibc()
popd || exit
}

build_isa_l()
{
lib='isa-l'
pushd "${WDL_SOURCE}"
clone $lib || echo "Failed to clone $lib"
cd "$lib" || exit
./autogen.sh
./configure
make perfs -j
cp ./erasure_code/erasure_code_perf "${WDL_ROOT}/" || exit

popd || exit
}


##################### BUILD AND INSTALL #########################

Expand All @@ -248,6 +264,7 @@ build_vdso
build_libaegis
build_xxhash
build_glibc
build_isa_l

cp "${BPKGS_WDL_ROOT}/run.sh" ./
cp "${BPKGS_WDL_ROOT}/run_prod.sh" ./
Expand Down
9 changes: 9 additions & 0 deletions packages/wdl_bench/parse_line.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,3 +190,12 @@ def parse_line_container_hash_maps_bench(f, sum_c):
for k, v in data.items():
if re.search("^(Find)|(Insert)|(InsertSqBr)|(Erase)|(Iter)", k):
sum_c[k] = v


def parse_line_erasure_code_perf(f, sum_c):
for line in f:
elements = line.split()
if re.search("warm", elements[0]):
name = elements[0]
value = float(elements[-2])
sum_c[name + ": MB/s"] = value
1 change: 1 addition & 0 deletions packages/wdl_bench/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ main() {

if [ "$run_type" = "prod" ]; then
bash "${WDL_ROOT}/run_prod.sh"
exit 0
fi

set -u # Enable unbound variables check from here onwards
Expand Down
29 changes: 21 additions & 8 deletions packages/wdl_bench/run_prod.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,27 +30,39 @@ Usage: ${0##*/} [-h] [--type single_core|all_core|multi_thread]
EOF
}

prod_benchmark_list_mem="memcpy_benchmark bench-memcmp"
prod_benchmark_list_mem="memcpy_benchmark bench-memcmp memset_benchmark"
prod_benchmark_list_hash="hash_hash_benchmark xxhash_benchmark"
prod_benchmark_compression="lzbench"
prod_benchmark_crypto="openssl libaegis_benchmark"
prod_benchmark_checksum="hash_checksum_benchmark"
prod_benchmark_checksum="hash_checksum_benchmark erasure_code_perf"
prod_benchmark_rng="random_benchmark"
prod_benchmark_chm="concurrency_concurrent_hash_map_bench"
prod_benchmark_thrift="ProtocolBench VarintUtilsBench"
prod_benchmark_f14="container_hash_maps_bench"
prod_benchmark_lock="synchronization_small_locks_benchmark synchronization_lifo_sem_bench"
prod_benchmark_vdso="vdso_bench"

prod_benchmarks="memcpy_benchmark bench-memcmp hash_hash_benchmark xxhash_benchmark lzbench openssl libaegis_benchmark hash_checksum_benchmark random_benchmark concurrency_concurrent_hash_map_bench ProtocolBench VarintUtilsBench container_hash_maps_bench synchronization_small_locks_benchmark synchronization_lifo_sem_bench vdso_bench"
prod_benchmarks="memcpy_benchmark memset_benchmark bench-memcmp hash_hash_benchmark xxhash_benchmark lzbench openssl libaegis_benchmark hash_checksum_benchmark erasure_code_perf random_benchmark concurrency_concurrent_hash_map_bench ProtocolBench VarintUtilsBench container_hash_maps_bench synchronization_small_locks_benchmark synchronization_lifo_sem_bench vdso_bench"

benchmark_non_json_list=("openssl" "libaegis_benchmark" "lzbench" "vdso_bench" "xxhash_benchmark" "concurrency_concurrent_hash_map_bench" "container_hash_maps_bench")
benchmark_non_json_list=("openssl" "libaegis_benchmark" "lzbench" "vdso_bench" "xxhash_benchmark" "concurrency_concurrent_hash_map_bench" "container_hash_maps_bench" "erasure_code_perf")

exec_non_json() {
local input="$1"
for item in "${benchmark_non_json_list[@]}"; do
if [[ "$item" == "$input" ]]; then
return 0
fi
done

return 1
}

run_list=""

declare -A prod_benchmark_config=(
['random_benchmark']="--bm_regex=xoshiro --json"
['memcpy_benchmark']="--json"
['memset_benchmark']="--json"
['hash_hash_benchmark']="--bm_regex=RapidHash --json"
['hash_checksum_benchmark']="--json"
['synchronization_lifo_sem_bench']="--bm_min_iters=1000000 --json"
Expand All @@ -59,12 +71,13 @@ declare -A prod_benchmark_config=(
['ProtocolBench']="--bm_regex=\"(^Binary)|(^Compact)Protocol\" --json"
['VarintUtilsBench']=" --json"
['concurrency_concurrent_hash_map_bench']=""
['lzbench']="-v -ezstd1,3 ${WDL_DATASETS}/${dataset}"
['lzbench']="-v -ezstd,1,3 ${WDL_DATASETS}/silesia.tar"
['openssl']="speed -seconds 20 -evp aes-256-gcm"
['vdso_bench']="-t 10 -p 20"
['libaegis_benchmark']=""
['xxhash_benchmark']="xxh3"
['bench-memcmp']=""
['erase_code_perf']=""
)

main() {
Expand Down Expand Up @@ -123,12 +136,12 @@ main() {
ldconfig
fi
out_file=""
if [[ " $benchmark_non_json_list{[*]} " =~ " ${benchmark} " ]]; then
if exec_non_json "${benchmark}"; then
out_file="out_${benchmark}.txt"
else
out_file="out_${benchmark}.json"
fi
"./${benchmark}" "${prod_benchmark_config[$benchmark]}" 2>&1 | tee -a "${out_file}"
bash -c "./${benchmark} ${prod_benchmark_config[$benchmark]}" 2>&1 | tee -a "${out_file}"
if [ "$benchmark" = "openssl" ]; then
unset LD_LIBRARY_PATH
ldconfig
Expand All @@ -142,7 +155,7 @@ main() {
fi

for benchmark in $run_list; do
if [[ " $benchmark_non_json_list{[*]} " =~ " ${benchmark} " ]]; then
if exec_non_json "${benchmark}"; then
python3 ./convert.py "$benchmark"
fi
done
Expand Down
Loading