Skip to content

Instantly share code, notes, and snippets.

@tabokie
Last active July 21, 2022 09:28
Show Gist options
  • Save tabokie/d38d27dc3843946c7813ab7bafd0f753 to your computer and use it in GitHub Desktop.
Save tabokie/d38d27dc3843946c7813ab7bafd0f753 to your computer and use it in GitHub Desktop.
Benchmarks on multiple RocksDB instances with db_bench
export CACHE_SIZE=$((8*1024*1024*1024))
export FILE_SIZE=$((8*1024*1024))
export NUM_DBS=20
export NUM_KEYS=2000000 # ~2GB each DB
# export EXTRAS=--db_write_buffer_size=134217728 # 1/10 of write_buffer_size(64M)*num_dbs
export SUFFIX=vanilla
./db_bench --db=$(pwd)/dbbench --num=${NUM_KEYS} --num_multi_db=${NUM_DBS} --cache_size=${CACHE_SIZE} --key_size=20 --value_size=400 --max_background_jobs=8 --cache_index_and_filter_blocks=true --level_compaction_dynamic_level_bytes=true --target_file_size_base=${FILE_SIZE} --compression_type=lz4 --seed=777 --benchmarks=fillrandom --use_existing_db=0 --sync=0 --threads=${NUM_DBS} --allow_concurrent_memtable_write=false --disable_wal=1 ${EXTRAS} > load.${SUFFIX}.log
du -d1 -h $(pwd)/dbbench >> load.${SUFFIX}.log
cp -rf $(pwd)/dbbench $(pwd)/dbbench.${SUFFIX}.bak
./db_bench --db=$(pwd)/dbbench --num=${NUM_KEYS} --num_multi_db=${NUM_DBS} --cache_size=${CACHE_SIZE} --key_size=20 --value_size=400 --max_background_jobs=8 --cache_index_and_filter_blocks=true --level_compaction_dynamic_level_bytes=true --target_file_size_base=${FILE_SIZE} --compression_type=lz4 --seed=7777 --benchmarks=overwrite --use_existing_db=1 --sync=0 --threads=${NUM_DBS} --allow_concurrent_memtable_write=false --disable_wal=1 --duration=900 ${EXTRAS} > overwrite.${SUFFIX}.log
du -d1 -h $(pwd)/dbbench >> overwrite.${SUFFIX}.log
./db_bench --db=$(pwd)/dbbench --num=${NUM_KEYS} --num_multi_db=${NUM_DBS} --cache_size=${CACHE_SIZE} --key_size=20 --value_size=400 --max_background_jobs=8 --cache_index_and_filter_blocks=true --level_compaction_dynamic_level_bytes=true --target_file_size_base=${FILE_SIZE} --compression_type=lz4 --seed=77 --benchmarks=readwhilewriting --use_existing_db=1 --sync=0 --threads=${NUM_DBS} --allow_concurrent_memtable_write=false --disable_wal=1 --duration=900 ${EXTRAS} > readwhilewriting.${SUFFIX}.log
du -d1 -h $(pwd)/dbbench >> readwhilewriting.${SUFFIX}.log
# Summary
echo "\n[insert] (ops, n_flushes, size)"
cat load.${SUFFIX}.log | grep "^fillrandom"
cat load.${SUFFIX}.log | grep "\[flush\]" -c
cat load.${SUFFIX}.log | grep "dbbench$"
echo "\n[update] (ops, n_flushes, size)"
cat overwrite.${SUFFIX}.log | grep "^overwrite"
cat overwrite.${SUFFIX}.log | grep "\[flush\]" -c
cat overwrite.${SUFFIX}.log | grep "dbbench$"
echo "\n[read write] (ops, n_flushes, size)"
cat readwhilewriting.${SUFFIX}.log | grep "^readwhilewriting"
cat readwhilewriting.${SUFFIX}.log | grep "\[flush\]" -c
cat readwhilewriting.${SUFFIX}.log | grep "dbbench$"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment