@@ -11899,49 +11899,41 @@ def check_output_sizes(self, *outputs: str, **metadata):
11899
11899
11900
11900
obtained_results = {}
11901
11901
11902
+ def update_and_print_diff(key, actual, expected):
11903
+ obtained_results[key] = actual
11904
+ diff = actual - expected
11905
+ s = f'{key}: size={actual}, expected={expected}'
11906
+ if diff:
11907
+ s += f', delta={diff} ({diff * 100.0 / expected:+.2f}%)'
11908
+ print(s)
11909
+ return diff
11910
+
11902
11911
total_output_size = 0
11903
11912
total_expected_size = 0
11904
11913
total_output_size_gz = 0
11905
11914
total_expected_size_gz = 0
11915
+
11906
11916
for f in outputs:
11907
- f_gz = f + '.gz'
11908
- expected_size = expected_results.get(f, inf)
11909
- expected_size_gz = expected_results.get(f_gz, inf)
11910
11917
contents = read_binary(f)
11911
- size = len(contents)
11912
- size_gz = len(gzip.compress(contents))
11913
11918
11914
- obtained_results[f] = size
11915
- obtained_results[f_gz] = size_gz
11916
-
11917
- if not common.EMTEST_REBASELINE and size != expected_size and (f.endswith(('.js', '.html'))):
11918
- print('Contents of ' + f + ': ')
11919
+ size = len(contents)
11920
+ expected_size = expected_results.get(f, inf)
11921
+ if update_and_print_diff(f, size, expected_size) and common.EMTEST_VERBOSE and not common.EMTEST_REBASELINE and f.endswith(('.js', '.html')):
11922
+ print(f'Contents of {f}:')
11919
11923
print(contents.decode('utf-8', errors='replace'))
11920
-
11921
- def print_diff(title, actual, expected):
11922
- diff = actual - expected
11923
- s = f'{title}={actual}, expected {expected}'
11924
- if diff > 0:
11925
- s += f', delta={diff} ({diff * 100.0 / expected:+.2f}%)'
11926
- print(s)
11927
-
11928
- print_diff(f'size of {f}', size, expected_size)
11929
- print_diff(f'size of {f_gz}', size_gz, expected_size_gz)
11930
-
11931
- # N.B. even though the test code above prints out gzip compressed sizes, regression testing is done against uncompressed sizes
11932
- # this is because optimizing for compressed sizes can be unpredictable and sometimes counterproductive
11933
11924
total_output_size += size
11934
11925
total_expected_size += expected_size
11935
11926
11927
+ f_gz = f + '.gz'
11928
+ size_gz = len(gzip.compress(contents))
11929
+ expected_size_gz = expected_results.get(f_gz, inf)
11930
+ update_and_print_diff(f_gz, size_gz, expected_size_gz)
11936
11931
total_output_size_gz += size_gz
11937
11932
total_expected_size_gz += expected_size_gz
11938
11933
11939
11934
if len(outputs) > 1:
11940
- obtained_results['total'] = total_output_size
11941
- obtained_results['total_gz'] = total_output_size_gz
11942
-
11943
- print_diff('Total output size', total_output_size, total_expected_size)
11944
- print_diff('Total output size gzipped', total_output_size_gz, total_expected_size_gz)
11935
+ update_and_print_diff('total', total_output_size, total_expected_size)
11936
+ update_and_print_diff('total_gz', total_output_size_gz, total_expected_size_gz)
11945
11937
11946
11938
obtained_results.update(metadata)
11947
11939
0 commit comments