• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2021 The Chromium Authors
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5# Please see the comments section at the top of the `run_performance_tests.py`.
6
7import json
8import os
9import pathlib
10import shutil
11import tempfile
12import unittest
13from unittest import mock
14
15import run_performance_tests
16from run_performance_tests import TelemetryCommandGenerator
17from telemetry.internal.util import binary_manager
18
19# Protected access is allowed for unittests.
20# pylint: disable=protected-access
21
22# The path where the output of a wpt run was written. This is the file that
23# gets processed by BaseWptScriptAdapter.
24OUTPUT_JSON_FILENAME = 'out.json'
25
26
27class TelemetryCommandGeneratorTest(unittest.TestCase):
28
29  def setUp(self):
30    fake_args = ['./run_benchmark', '--isolated-script-test-output=output.json']
31    self._fake_options = run_performance_tests.parse_arguments(fake_args)
32    mock.patch.object(binary_manager, 'InitDependencyManager').start()
33
34  def testStorySelectionBeginEnd(self):
35    story_selection_config = json.loads(
36        '{"begin": 11, "end": 21, "abridged": false}')
37    generator = TelemetryCommandGenerator('benchmark_name', self._fake_options,
38                                          story_selection_config)
39    command = generator.generate('output_dir')
40    self.assertIn('--story-shard-begin-index=11', command)
41    self.assertIn('--story-shard-end-index=21', command)
42    self.assertNotIn('--run-abridged-story-set', command)
43
44  def testStorySelectionAbridgedDefault(self):
45    story_selection_config = json.loads('{"begin": 11, "end": 21}')
46    generator = TelemetryCommandGenerator('benchmark_name', self._fake_options,
47                                          story_selection_config)
48    command = generator.generate('output_dir')
49    self.assertIn('--run-abridged-story-set', command)
50
51  def testStorySelectionIndexSectionsSingleIndex(self):
52    story_selection_config = json.loads(
53        '{"sections": [{"begin": 11, "end": 21}, {"begin": 25, "end": 26}]}')
54    generator = TelemetryCommandGenerator('benchmark_name', self._fake_options,
55                                          story_selection_config)
56    command = generator.generate('output_dir')
57    self.assertIn('--story-shard-indexes=11-21,25', command)
58
59  def testStorySelectionIndexSectionsOpenEnds(self):
60    story_selection_config = json.loads(
61        '{"sections": [{"end": 10}, {"begin": 15, "end": 16}, {"begin": 20}]}')
62    generator = TelemetryCommandGenerator('benchmark_name', self._fake_options,
63                                          story_selection_config)
64    command = generator.generate('output_dir')
65    self.assertIn('--story-shard-indexes=-10,15,20-', command)
66
67  def testStorySelectionIndexSectionsIllegalRange(self):
68    with self.assertRaises(ValueError):
69      story_selection_config = json.loads(
70          '{"sections": [{"begin": 15, "end": 16}, {"foo": "bar"}]}')
71      generator = TelemetryCommandGenerator('benchmark_name',
72                                            self._fake_options,
73                                            story_selection_config)
74      generator.generate('output_dir')
75
76  def testStorySelectionIndexSectionsEmpty(self):
77    story_selection_config = json.loads('{"sections": []}')
78    generator = TelemetryCommandGenerator('benchmark_name', self._fake_options,
79                                          story_selection_config)
80    command = generator.generate('output_dir')
81    self.assertNotIn('--story-shard-indexes=', command)
82
83  @mock.patch.object(os.path, 'exists')
84  @mock.patch.object(run_performance_tests, 'copy_map_file_to_out_dir')
85  @mock.patch('builtins.open',
86              new_callable=mock.mock_open,
87              read_data='{"foo": 1}')
88  def testLoadMapFileSuccess(self, mock_open, mock_copy_map_file_to_out_dir,
89                             mock_exists):
90    del mock_open, mock_exists
91    content = run_performance_tests.load_map_file('file', 'dir')
92
93    self.assertTrue(isinstance(content, dict))
94    self.assertEqual(content['foo'], 1)
95    mock_copy_map_file_to_out_dir.assert_called_with('file', 'dir')
96
97  @mock.patch.object(os.path, 'exists')
98  @mock.patch.object(pathlib.Path, 'exists')
99  @mock.patch.object(run_performance_tests, 'copy_map_file_to_out_dir')
100  @mock.patch('builtins.open',
101              new_callable=mock.mock_open,
102              read_data='{"foo": 1}')
103  def testLoadMapFileShardMapDirectory(self, mock_open,
104                                       mock_copy_map_file_to_out_dir,
105                                       mock_pathlib_exists, mock_exists):
106    del mock_open
107    mock_exists.return_value = False
108    mock_pathlib_exists.return_value = True
109    expected_file = str(run_performance_tests.SHARD_MAPS_DIR / 'file')
110
111    run_performance_tests.load_map_file('file', 'dir')
112
113    mock_copy_map_file_to_out_dir.assert_called_with(expected_file, 'dir')
114
115  @mock.patch.object(os.path, 'exists')
116  @mock.patch.object(run_performance_tests, 'copy_map_file_to_out_dir')
117  @mock.patch('builtins.open',
118              new_callable=mock.mock_open,
119              read_data='{"foo": 1}')
120  def testLoadMapFileException(self, mock_open, mock_copy_map_file_to_out_dir,
121                               mock_exists):
122    del mock_open, mock_copy_map_file_to_out_dir
123    mock_exists.side_effect = [False, False]
124
125    with self.assertRaises(Exception):
126      run_performance_tests.load_map_file('file', 'dir')
127
128  @mock.patch.object(run_performance_tests, 'copy_map_file_to_out_dir')
129  @mock.patch.object(tempfile, 'NamedTemporaryFile')
130  def testLoadMapStringSuccess(self, mock_named_temporary_file,
131                               mock_copy_map_file_to_out_dir):
132    del mock_named_temporary_file
133    content = run_performance_tests.load_map_string('{"foo": 1}', 'dir')
134
135    self.assertTrue(isinstance(content, dict))
136    self.assertEqual(content['foo'], 1)
137    mock_copy_map_file_to_out_dir.assert_called_with(mock.ANY, 'dir')
138
139  @mock.patch.object(os.path, 'exists')
140  @mock.patch.object(shutil, 'copyfile')
141  def testCopyMapFileToOutDirSuccess(self, mock_copyfile, mock_exists):
142    del mock_exists
143    run_performance_tests.copy_map_file_to_out_dir('file', 'dir')
144
145    mock_copyfile.assert_called_with('file', 'dir/benchmarks_shard_map.json')
146
147  @mock.patch.object(run_performance_tests.CrossbenchTest, 'execute_benchmark')
148  def testCrossbenchTestBenchmarksArg(self, mock_execute_benchmark):
149    fake_args = self._create_crossbench_args()
150    options = run_performance_tests.parse_arguments(fake_args)
151
152    run_performance_tests.CrossbenchTest(options, 'dir').execute()
153
154    mock_execute_benchmark.assert_called_with('speedometer_3.0',
155                                              'speedometer3.crossbench', [])
156
157  def testCrossbenchTestBenchmarksException(self):
158    fake_args = ['./cp.py', '--isolated-script-test-output=output']
159    options = run_performance_tests.parse_arguments(fake_args)
160
161    with self.assertRaises(Exception):
162      run_performance_tests.CrossbenchTest(options, 'dir').execute()
163
164  def testCrossbenchTestMultiBenchmarksException(self):
165    fake_args = [
166        './cp.py', '--isolated-script-test-output=output',
167        '--benchmarks=speedometer_3.0,speedometer_2.0'
168    ]
169    options = run_performance_tests.parse_arguments(fake_args)
170
171    with self.assertRaises(Exception):
172      run_performance_tests.CrossbenchTest(options, 'dir').execute()
173
174  @mock.patch.object(run_performance_tests, '_run_benchmarks_on_shardmap')
175  @mock.patch.object(os.path, 'dirname')
176  @mock.patch.object(run_performance_tests, 'load_map_file')
177  def testCrossbenchTestShardMapFile(self, mock_load_map_file, mock_direname,
178                                     mock_run_benchmarks_on_shardmap):
179    mock_load_map_file.return_value = 'map_file'
180    mock_direname.return_value = 'dir'
181    fake_args = [
182        'skip', 'run_benchmark', '--isolated-script-test-output=output',
183        '--test-shard-map-filename=foo'
184    ]
185    expected_options = run_performance_tests.parse_arguments(fake_args[1:])
186
187    run_performance_tests.main(fake_args)
188
189    mock_load_map_file.assert_called_with('foo', 'dir')
190    mock_run_benchmarks_on_shardmap.assert_called_with('map_file',
191                                                       expected_options, 'dir',
192                                                       [])
193
194  @mock.patch.object(run_performance_tests, '_run_benchmarks_on_shardmap')
195  @mock.patch.object(os.path, 'dirname')
196  @mock.patch.object(run_performance_tests, 'load_map_string')
197  def testCrossbenchTestShardMapString(self, mock_load_map_string,
198                                       mock_direname,
199                                       mock_run_benchmarks_on_shardmap):
200    mock_load_map_string.return_value = 'map_string'
201    mock_direname.return_value = 'dir'
202    fake_args = [
203        'skip', 'run_benchmark', '--isolated-script-test-output=output',
204        '--use-dynamic-shards', '--dynamic-shardmap=json'
205    ]
206    expected_options = run_performance_tests.parse_arguments(fake_args[1:])
207
208    run_performance_tests.main(fake_args)
209
210    mock_load_map_string.assert_called_with('json', 'dir')
211    mock_run_benchmarks_on_shardmap.assert_called_with('map_string',
212                                                       expected_options, 'dir',
213                                                       [])
214
215  @mock.patch.object(run_performance_tests.CrossbenchTest, 'execute_benchmark')
216  @mock.patch.dict(os.environ, {'GTEST_SHARD_INDEX': '0'})
217  def testCrossbenchTestRunBenchmarkOnShardMap(self, mock_execute_benchmark):
218    fake_args = [
219        'run_benchmark',
220        '--isolated-script-test-output=output',
221        '--test-shard-map-filename=foo',
222        '--browser=./chrome',
223    ]
224    options = run_performance_tests.parse_arguments(fake_args)
225    shard_map = {
226        '0': {
227            'crossbench': {
228                'my_benchmark': {
229                    'display_name': 'my_display',
230                    'arguments': []
231                }
232            }
233        }
234    }
235    mock_execute_benchmark.return_value = 0
236
237    return_code = run_performance_tests._run_benchmarks_on_shardmap(
238        shard_map, options, 'dir', [])
239
240    self.assertEqual(return_code, 0)
241    mock_execute_benchmark.assert_called_with('my_benchmark', 'my_display', [])
242
243  @mock.patch.object(run_performance_tests.CrossbenchTest, 'execute_benchmark')
244  def testCrossbenchTestMissingShardIndex(self, mock_execute_benchmark):
245    del mock_execute_benchmark
246    fake_args = [
247        'run_benchmark', '--isolated-script-test-output=output',
248        '--test-shard-map-filename=foo'
249    ]
250    options = run_performance_tests.parse_arguments(fake_args)
251    shard_map = {'0': {'crossbench': {'my_benchmark': []}}}
252
253    with self.assertRaises(Exception):
254      run_performance_tests._run_benchmarks_on_shardmap(shard_map, options,
255                                                        'dir', [])
256
257  @mock.patch.object(run_performance_tests.CrossbenchTest, 'execute_benchmark')
258  @mock.patch.dict(os.environ, {'GTEST_SHARD_INDEX': '0'})
259  def testCrossbenchTestMissingBenchmark(self, mock_execute_benchmark):
260    fake_args = [
261        'run_benchmark',
262        '--isolated-script-test-output=output',
263        '--test-shard-map-filename=foo',
264        '--browser=./chrome',
265    ]
266    options = run_performance_tests.parse_arguments(fake_args)
267    shard_map = {'0': {'crossbench': {}}}
268
269    return_code = run_performance_tests._run_benchmarks_on_shardmap(
270        shard_map, options, 'dir', [])
271    self.assertEqual(return_code, 0)
272    mock_execute_benchmark.assert_not_called()
273
274  @mock.patch.object(run_performance_tests.CrossbenchTest, 'execute_benchmark')
275  @mock.patch.dict(os.environ, {'GTEST_SHARD_INDEX': '0'})
276  def testCrossbenchTestRunMultiBenchmarkOnShardMap(self,
277                                                    mock_execute_benchmark):
278    fake_args = [
279        'run_benchmark',
280        '--isolated-script-test-output=output',
281        '--test-shard-map-filename=foo',
282        '--browser=./chrome',
283    ]
284    options = run_performance_tests.parse_arguments(fake_args)
285    shard_map = {
286        '0': {
287            'crossbench': {
288                'b1': {
289                    'display_name': 'display1',
290                    'arguments': []
291                },
292                'b2': {
293                    'display_name': 'display2',
294                    'arguments': []
295                }
296            }
297        }
298    }
299    mock_execute_benchmark.return_value = 1
300
301    return_code = run_performance_tests._run_benchmarks_on_shardmap(
302        shard_map, options, 'dir', [])
303
304    self.assertEqual(return_code, 1)
305    mock_execute_benchmark.assert_has_calls(
306        [mock.call('b1', 'display1', []),
307         mock.call('b2', 'display2', [])])
308
309  def testCrossbenchGetNetworkArgWithNetwork(self):
310    fake_args = self._create_crossbench_args() + ['--network=foo']
311    options = run_performance_tests.parse_arguments(fake_args)
312    expected_network = ['--network=foo']
313
314    crosebench_test = run_performance_tests.CrossbenchTest(options, 'dir')
315
316    self.assertEqual(crosebench_test.network, expected_network)
317
318  def testCrossbenchGetDefaultFileServer(self):
319    fake_args = self._create_crossbench_args() + ['--fileserver']
320    options = run_performance_tests.parse_arguments(fake_args)
321    src_dir = run_performance_tests.CHROMIUM_SRC_DIR
322    local_fileserver = str(src_dir / 'third_party/speedometer/v3.0')
323    expected_dict = {
324        'type': 'local',
325        'path': local_fileserver,
326        'url': 'http://localhost:8000'
327    }
328
329    crosebench_test = run_performance_tests.CrossbenchTest(options, 'dir')
330
331    network_dict = json.loads(crosebench_test.network[0].split('=', 1)[1])
332    self.assertDictEqual(network_dict, expected_dict)
333
334  def testCrossbenchGetTargetFileServer(self):
335    fake_args = self._create_crossbench_args() + ['--fileserver=foo']
336    options = run_performance_tests.parse_arguments(fake_args)
337    src_dir = run_performance_tests.CHROMIUM_SRC_DIR
338    local_fileserver = str(src_dir / 'foo')
339    expected_dict = {
340        'type': 'local',
341        'path': local_fileserver,
342        'url': 'http://localhost:8000'
343    }
344
345    crosebench_test = run_performance_tests.CrossbenchTest(options, 'dir')
346
347    network_dict = json.loads(crosebench_test.network[0].split('=', 1)[1])
348    self.assertDictEqual(network_dict, expected_dict)
349
350  @mock.patch.object(binary_manager, 'FetchPath')
351  def testCrossbenchGetDefaultWpr(self, mock_fetch_path):
352    mock_fetch_path.return_value = 'wpr_go_path'
353    fake_args = self._create_crossbench_args() + ['--wpr']
354    options = run_performance_tests.parse_arguments(fake_args)
355    data_dir = run_performance_tests.PAGE_SETS_DATA
356    archive = str(data_dir / 'crossbench_android_speedometer_3.0_000.wprgo')
357    expected_dict = {
358        'type': 'wpr',
359        'path': archive,
360        'wpr_go_bin': 'wpr_go_path'
361    }
362
363    crosebench_test = run_performance_tests.CrossbenchTest(options, 'dir')
364
365    network_dict = json.loads(crosebench_test.network[0].split('=', 1)[1])
366    self.assertDictEqual(network_dict, expected_dict)
367
368  @mock.patch.object(binary_manager, 'FetchPath')
369  def testCrossbenchGetTargetWpr(self, mock_fetch_path):
370    mock_fetch_path.return_value = 'wpr_go_path'
371    fake_args = self._create_crossbench_args() + ['--wpr=foo']
372    options = run_performance_tests.parse_arguments(fake_args)
373    data_dir = run_performance_tests.PAGE_SETS_DATA
374    archive = str(data_dir / 'foo')
375    expected_dict = {
376        'type': 'wpr',
377        'path': archive,
378        'wpr_go_bin': 'wpr_go_path'
379    }
380
381    crosebench_test = run_performance_tests.CrossbenchTest(options, 'dir')
382
383    network_dict = json.loads(crosebench_test.network[0].split('=', 1)[1])
384    self.assertDictEqual(network_dict, expected_dict)
385
386  def _create_crossbench_args(self, browser='./chrome'):
387    return [
388        './cp.py',
389        '--isolated-script-test-output=output',
390        '--benchmarks=speedometer_3.0',
391        '--benchmark-display-name=speedometer3.crossbench',
392        f'--browser={browser}',
393    ]
394