simplebench/bench-backup: add --drop-caches argument

Add an option to drop caches before each test run. It may probably
improve reliability of results when testing in cached mode.

Signed-off-by: Vladimir Sementsov-Ogievskiy <vsementsov@virtuozzo.com>
This commit is contained in:
Vladimir Sementsov-Ogievskiy 2021-03-04 11:10:17 +03:00
parent 8fa4ee1b00
commit 8c8407fe45
2 changed files with 15 additions and 2 deletions

View file

@ -156,7 +156,8 @@ def bench(args):
}) })
result = simplebench.bench(bench_func, test_envs, test_cases, result = simplebench.bench(bench_func, test_envs, test_cases,
count=args.count, initial_run=args.initial_run) count=args.count, initial_run=args.initial_run,
drop_caches=args.drop_caches)
with open('results.json', 'w') as f: with open('results.json', 'w') as f:
json.dump(result, f, indent=4) json.dump(result, f, indent=4)
print(results_to_text(result)) print(results_to_text(result))
@ -221,4 +222,7 @@ Number of test runs per table cell''')
Do additional initial run per cell which doesn't count in result, Do additional initial run per cell which doesn't count in result,
default true''') default true''')
p.add_argument('--drop-caches', action='store_true', help='''\
Do "sync; echo 3 > /proc/sys/vm/drop_caches" before each test run''')
bench(p.parse_args()) bench(p.parse_args())

View file

@ -19,11 +19,17 @@
# #
import statistics import statistics
import subprocess
import time import time
def do_drop_caches():
subprocess.run('sync; echo 3 > /proc/sys/vm/drop_caches', shell=True,
check=True)
def bench_one(test_func, test_env, test_case, count=5, initial_run=True, def bench_one(test_func, test_env, test_case, count=5, initial_run=True,
slow_limit=100): slow_limit=100, drop_caches=False):
"""Benchmark one test-case """Benchmark one test-case
test_func -- benchmarking function with prototype test_func -- benchmarking function with prototype
@ -40,6 +46,7 @@ def bench_one(test_func, test_env, test_case, count=5, initial_run=True,
initial_run -- do initial run of test_func, which don't get into result initial_run -- do initial run of test_func, which don't get into result
slow_limit -- stop at slow run (that exceedes the slow_limit by seconds). slow_limit -- stop at slow run (that exceedes the slow_limit by seconds).
(initial run is not measured) (initial run is not measured)
drop_caches -- drop caches before each run
Returns dict with the following fields: Returns dict with the following fields:
'runs': list of test_func results 'runs': list of test_func results
@ -53,6 +60,7 @@ def bench_one(test_func, test_env, test_case, count=5, initial_run=True,
""" """
if initial_run: if initial_run:
print(' #initial run:') print(' #initial run:')
do_drop_caches()
print(' ', test_func(test_env, test_case)) print(' ', test_func(test_env, test_case))
runs = [] runs = []
@ -60,6 +68,7 @@ def bench_one(test_func, test_env, test_case, count=5, initial_run=True,
t = time.time() t = time.time()
print(' #run {}'.format(i+1)) print(' #run {}'.format(i+1))
do_drop_caches()
res = test_func(test_env, test_case) res = test_func(test_env, test_case)
print(' ', res) print(' ', res)
runs.append(res) runs.append(res)