Kaido Kert | b108943 | 2024-03-18 19:46:49 -0700 | [diff] [blame] | 1 | #!/usr/bin/env vpython3 |
| 2 | # Copyright 2020 The Chromium Authors |
| 3 | # Use of this source code is governed by a BSD-style license that can be |
| 4 | # found in the LICENSE file. |
| 5 | |
| 6 | import copy |
| 7 | import os |
| 8 | import tempfile |
| 9 | from typing import MutableMapping, Optional |
| 10 | import unittest |
| 11 | |
| 12 | import binary_size_differ |
| 13 | import binary_sizes |
| 14 | |
| 15 | _EXAMPLE_BLOBS_BEFORE = """ |
| 16 | { |
| 17 | "web_engine": [ |
| 18 | { |
| 19 | "merkle": "77e876447dd2daaaab7048d646e87fe8b6d9fecef6cbfcc4af30b8fbfa50b881", |
| 20 | "path": "locales/ta.pak", |
| 21 | "bytes": 17916, |
| 22 | "is_counted": true, |
| 23 | "size": 16384 |
| 24 | }, |
| 25 | { |
| 26 | "merkle": "5f1932b8c9fe954f3c3fdb34ab2089d2af34e5a0cef90cad41a1cd37d92234bf", |
| 27 | "path": "lib/libEGL.so", |
| 28 | "bytes": 226960, |
| 29 | "is_counted": true, |
| 30 | "size": 90112 |
| 31 | }, |
| 32 | { |
| 33 | "merkle": "9822fc0dd95cdd1cc46b5c6632a928a6ad19b76ed0157397d82a2f908946fc34", |
| 34 | "path": "meta.far", |
| 35 | "bytes": 24576, |
| 36 | "is_counted": false, |
| 37 | "size": 16384 |
| 38 | }, |
| 39 | { |
| 40 | "merkle": "090aed4593c4f7d04a3ad80e9971c0532dd5b1d2bdf4754202cde510a88fd220", |
| 41 | "path": "locales/ru.pak", |
| 42 | "bytes": 11903, |
| 43 | "is_counted": true, |
| 44 | "size": 16384 |
| 45 | } |
| 46 | ] |
| 47 | } |
| 48 | """ |
| 49 | |
| 50 | |
| 51 | class BinarySizeDifferTest(unittest.TestCase): |
| 52 | def ChangePackageSize( |
| 53 | self, |
| 54 | packages: MutableMapping[str, binary_sizes.PackageSizes], |
| 55 | name: str, |
| 56 | compressed_increase: int, |
| 57 | uncompressed_increase: Optional[int] = None): |
| 58 | if uncompressed_increase is None: |
| 59 | uncompressed_increase = compressed_increase |
| 60 | original_package = packages[name] |
| 61 | new_package = binary_sizes.PackageSizes( |
| 62 | compressed=original_package.compressed + compressed_increase, |
| 63 | uncompressed=original_package.uncompressed + uncompressed_increase) |
| 64 | packages[name] = new_package |
| 65 | |
| 66 | def testComputePackageDiffs(self): |
| 67 | # TODO(1309977): Disabled on Windows because Windows doesn't allow opening a |
| 68 | # NamedTemporaryFile by name. |
| 69 | if os.name == 'nt': |
| 70 | return |
| 71 | |
| 72 | SUCCESS = 0 |
| 73 | FAILURE = 1 |
| 74 | ROLLER_SIZE_WARNING = 2 |
| 75 | with tempfile.NamedTemporaryFile(mode='w') as before_file: |
| 76 | before_file.write(_EXAMPLE_BLOBS_BEFORE) |
| 77 | before_file.flush() |
| 78 | blobs = binary_sizes.ReadPackageBlobsJson(before_file.name) |
| 79 | sizes = binary_sizes.GetPackageSizes(blobs) |
| 80 | binary_sizes.WritePackageSizesJson(before_file.name, sizes) |
| 81 | |
| 82 | # No change. |
| 83 | growth = binary_size_differ.ComputePackageDiffs(before_file.name, |
| 84 | before_file.name) |
| 85 | self.assertEqual(growth['status_code'], SUCCESS) |
| 86 | self.assertEqual(growth['compressed']['web_engine'], 0) |
| 87 | |
| 88 | after_file = tempfile.NamedTemporaryFile(mode='w', delete=True) |
| 89 | after_file.close() |
| 90 | try: |
| 91 | # Increase a blob, but below the limit. |
| 92 | other_sizes = copy.deepcopy(sizes) |
| 93 | self.ChangePackageSize(other_sizes, 'web_engine', 8 * 1024) |
| 94 | binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) |
| 95 | |
| 96 | growth = binary_size_differ.ComputePackageDiffs(before_file.name, |
| 97 | after_file.name) |
| 98 | self.assertEqual(growth['status_code'], SUCCESS) |
| 99 | self.assertEqual(growth['compressed']['web_engine'], 8 * 1024) |
| 100 | |
| 101 | # Increase beyond the limit (adds another 8k) |
| 102 | self.ChangePackageSize(other_sizes, 'web_engine', 8 * 1024 + 1) |
| 103 | binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) |
| 104 | growth = binary_size_differ.ComputePackageDiffs(before_file.name, |
| 105 | after_file.name) |
| 106 | self.assertEqual(growth['status_code'], FAILURE) |
| 107 | self.assertEqual(growth['compressed']['web_engine'], 16 * 1024 + 1) |
| 108 | self.assertIn('check failed', growth['summary']) |
| 109 | self.assertIn(f'web_engine (compressed) grew by {16 * 1024 + 1} bytes', |
| 110 | growth['summary']) |
| 111 | |
| 112 | # Increase beyond the limit, but compressed does not increase. |
| 113 | binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) |
| 114 | self.ChangePackageSize(other_sizes, |
| 115 | 'web_engine', |
| 116 | 16 * 1024 + 1, |
| 117 | uncompressed_increase=0) |
| 118 | binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) |
| 119 | growth = binary_size_differ.ComputePackageDiffs(before_file.name, |
| 120 | after_file.name) |
| 121 | self.assertEqual(growth['uncompressed']['web_engine'], SUCCESS) |
| 122 | self.assertEqual(growth['status_code'], SUCCESS) |
| 123 | self.assertEqual(growth['compressed']['web_engine'], 16 * 1024 + 1) |
| 124 | |
| 125 | # Increase beyond the limit, but compressed goes down. |
| 126 | binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) |
| 127 | self.ChangePackageSize(other_sizes, |
| 128 | 'web_engine', |
| 129 | 16 * 1024 + 1, |
| 130 | uncompressed_increase=-4 * 1024) |
| 131 | binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) |
| 132 | growth = binary_size_differ.ComputePackageDiffs(before_file.name, |
| 133 | after_file.name) |
| 134 | self.assertEqual(growth['status_code'], SUCCESS) |
| 135 | self.assertEqual(growth['compressed']['web_engine'], 16 * 1024 + 1) |
| 136 | |
| 137 | # Increase beyond the second limit. Fails, regardless of uncompressed. |
| 138 | binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) |
| 139 | self.ChangePackageSize(other_sizes, |
| 140 | 'web_engine', |
| 141 | 100 * 1024 + 1, |
| 142 | uncompressed_increase=-4 * 1024) |
| 143 | binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) |
| 144 | growth = binary_size_differ.ComputePackageDiffs(before_file.name, |
| 145 | after_file.name) |
| 146 | self.assertEqual(growth['status_code'], FAILURE) |
| 147 | self.assertEqual(growth['compressed']['web_engine'], 100 * 1024 + 1) |
| 148 | |
| 149 | # Increase beyond the second limit, but roller authored CL. |
| 150 | binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) |
| 151 | self.ChangePackageSize(other_sizes, |
| 152 | 'web_engine', |
| 153 | 100 * 1024 + 1, |
| 154 | uncompressed_increase=-4 * 1024) |
| 155 | binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) |
| 156 | growth = binary_size_differ.ComputePackageDiffs(before_file.name, |
| 157 | after_file.name, |
| 158 | author='big-autoroller') |
| 159 | self.assertEqual(growth['status_code'], ROLLER_SIZE_WARNING) |
| 160 | self.assertEqual(growth['compressed']['web_engine'], 100 * 1024 + 1) |
| 161 | self.assertNotIn('check failed', growth['summary']) |
| 162 | self.assertIn('growth by an autoroller will be ignored', |
| 163 | growth['summary']) |
| 164 | self.assertIn(f'web_engine (compressed) grew by {100 * 1024 + 1} bytes', |
| 165 | growth['summary']) |
| 166 | finally: |
| 167 | os.remove(after_file.name) |
| 168 | |
| 169 | |
| 170 | if __name__ == '__main__': |
| 171 | unittest.main() |