From 22bec79d1aef8fd82c7870e62f77b7817f0575a7 Mon Sep 17 00:00:00 2001 From: Jeff King Date: Fri, 17 Aug 2018 16:56:37 -0400 Subject: [PATCH] t/perf: add infrastructure for measuring sizes The main objective of scripts in the perf framework is to run "test_perf", which measures the time it takes to run some operation. However, it can also be interesting to see the change in the output size of certain operations. This patch introduces test_size, which records a single numeric output from the test and shows it in the aggregated output (with pretty printing and relative size comparison). Signed-off-by: Jeff King Signed-off-by: Junio C Hamano --- t/perf/README | 25 ++++++++++++++++++++++ t/perf/aggregate.perl | 48 ++++++++++++++++++++++++++++++++++++++----- t/perf/perf-lib.sh | 13 ++++++++++++ 3 files changed, 81 insertions(+), 5 deletions(-) diff --git a/t/perf/README b/t/perf/README index 21321a0f36..be12090c38 100644 --- a/t/perf/README +++ b/t/perf/README @@ -168,3 +168,28 @@ that While we have tried to make sure that it can cope with embedded whitespace and other special characters, it will not work with multi-line data. + +Rather than tracking the performance by run-time as `test_perf` does, you +may also track output size by using `test_size`. The stdout of the +function should be a single numeric value, which will be captured and +shown in the aggregated output. For example: + + test_perf 'time foo' ' + ./foo >foo.out + ' + + test_size 'output size' + wc -c ; return undef if not defined $line; close $fh or die "cannot close $name: $!"; - $line =~ /^(?:(\d+):)?(\d+):(\d+(?:\.\d+)?) (\d+(?:\.\d+)?) (\d+(?:\.\d+)?)$/ - or die "bad input line: $line"; - my $rt = ((defined $1 ? $1 : 0.0)*60+$2)*60+$3; - return ($rt, $4, $5); + # times + if ($line =~ /^(?:(\d+):)?(\d+):(\d+(?:\.\d+)?) (\d+(?:\.\d+)?) (\d+(?:\.\d+)?)$/) { + my $rt = ((defined $1 ? $1 : 0.0)*60+$2)*60+$3; + return ($rt, $4, $5); + # size + } elsif ($line =~ /^\d+$/) { + return $&; + } else { + die "bad input line: $line"; + } } sub relative_change { @@ -32,9 +38,15 @@ sub relative_change { sub format_times { my ($r, $u, $s, $firstr) = @_; + # no value means we did not finish the test if (!defined $r) { return ""; } + # a single value means we have a size, not times + if (!defined $u) { + return format_size($r, $firstr); + } + # otherwise, we have real/user/system times my $out = sprintf "%.2f(%.2f+%.2f)", $r, $u, $s; $out .= ' ' . relative_change($r, $firstr) if defined $firstr; return $out; @@ -54,6 +66,25 @@ EOT exit(1); } +sub human_size { + my $n = shift; + my @units = ('', qw(K M G)); + while ($n > 900 && @units > 1) { + $n /= 1000; + shift @units; + } + return $n unless length $units[0]; + return sprintf '%.1f%s', $n, $units[0]; +} + +sub format_size { + my ($size, $first) = @_; + # match the width of a time: 0.00(0.00+0.00) + my $out = sprintf '%15s', human_size($size); + $out .= ' ' . relative_change($size, $first) if defined $first; + return $out; +} + my (@dirs, %dirnames, %dirabbrevs, %prefixes, @tests, $codespeed, $sortby, $subsection, $reponame); @@ -184,7 +215,14 @@ sub print_default_results { my $firstr; for my $i (0..$#dirs) { my $d = $dirs[$i]; - $times{$prefixes{$d}.$t} = [get_times("$resultsdir/$prefixes{$d}$t.times")]; + my $base = "$resultsdir/$prefixes{$d}$t"; + $times{$prefixes{$d}.$t} = []; + foreach my $type (qw(times size)) { + if (-e "$base.$type") { + $times{$prefixes{$d}.$t} = [get_times("$base.$type")]; + last; + } + } my ($r,$u,$s) = @{$times{$prefixes{$d}.$t}}; my $w = length format_times($r,$u,$s,$firstr); $colwidth[$i] = $w if $w > $colwidth[$i]; diff --git a/t/perf/perf-lib.sh b/t/perf/perf-lib.sh index a54be09516..11d1922cf5 100644 --- a/t/perf/perf-lib.sh +++ b/t/perf/perf-lib.sh @@ -231,6 +231,19 @@ test_perf () { test_wrapper_ test_perf_ "$@" } +test_size_ () { + say >&3 "running: $2" + if test_eval_ "$2" 3>"$base".size; then + test_ok_ "$1" + else + test_failure_ "$@" + fi +} + +test_size () { + test_wrapper_ test_size_ "$@" +} + # We extend test_done to print timings at the end (./run disables this # and does it after running everything) test_at_end_hook_ () {