• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env vpython3
2# Copyright 2019 The Chromium Authors
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6import copy
7import json
8import os
9import subprocess
10import sys
11import unittest
12from unittest import mock
13
14import merge_results
15import merge_steps
16import merge_lib as merger
17
18
19class MergeProfilesTest(unittest.TestCase):
20
21  # pylint: disable=super-with-arguments
22  def __init__(self, *args, **kwargs):
23    super(MergeProfilesTest, self).__init__(*args, **kwargs)
24    self.maxDiff = None
25
26  # pylint: enable=super-with-arguments
27
28  def test_merge_script_api_parameters(self):
29    """Test the step-level merge front-end."""
30    build_properties = json.dumps({
31        'some': {
32            'complicated': ['nested', {
33                'json': None,
34                'object': 'thing',
35            }]
36        }
37    })
38    task_output_dir = 'some/task/output/dir'
39    profdata_dir = '/some/different/path/to/profdata/default.profdata'
40    profdata_file = os.path.join(profdata_dir, 'base_unittests.profdata')
41    args = [
42        'script_name', '--output-json', 'output.json', '--build-properties',
43        build_properties, '--summary-json', 'summary.json', '--task-output-dir',
44        task_output_dir, '--profdata-dir', profdata_dir, '--llvm-profdata',
45        'llvm-profdata', 'a.json', 'b.json', 'c.json', '--test-target-name',
46        'base_unittests', '--sparse'
47    ]
48    with mock.patch.object(merger, 'merge_profiles') as mock_merge:
49      mock_merge.return_value = None, None
50      with mock.patch.object(sys, 'argv', args):
51        merge_results.main()
52        self.assertEqual(
53            mock_merge.call_args,
54            mock.call(task_output_dir,
55                      profdata_file,
56                      '.profraw',
57                      'llvm-profdata',
58                      sparse=True,
59                      skip_validation=False), None)
60
61  def test_merge_steps_parameters(self):
62    """Test the build-level merge front-end."""
63    input_dir = 'some/task/output/dir'
64    output_file = '/some/different/path/to/profdata/merged.profdata'
65    args = [
66        'script_name', '--input-dir', input_dir, '--output-file', output_file,
67        '--llvm-profdata', 'llvm-profdata', '--profdata-filename-pattern', '.*'
68    ]
69    with mock.patch.object(merger, 'merge_profiles') as mock_merge:
70      mock_merge.return_value = [], []
71      with mock.patch.object(sys, 'argv', args):
72        merge_steps.main()
73        self.assertEqual(
74            mock_merge.call_args,
75            mock.call(input_dir,
76                      output_file,
77                      '.profdata',
78                      'llvm-profdata',
79                      '.*',
80                      sparse=False,
81                      merge_timeout=3600,
82                      weights={}))
83
84  @mock.patch('builtins.open', new_callable=mock.mock_open())
85  @mock.patch.object(merger, '_validate_and_convert_profraws')
86  def test_merge_profraw(self, mock_validate_and_convert_profraws,
87                         mock_file_open):
88    mock_input_dir_walk = [
89        ('/b/some/path', ['0', '1', '2', '3'], ['summary.json']),
90        ('/b/some/path/0', [],
91         ['output.json', 'default-1.profraw', 'default-2.profraw']),
92        ('/b/some/path/1', [],
93         ['output.json', 'default-1.profraw', 'default-2.profraw']),
94    ]
95
96    mock_validate_and_convert_profraws.return_value = [
97        '/b/some/path/0/default-1.profdata',
98        '/b/some/path/1/default-2.profdata',
99    ], [
100        '/b/some/path/0/default-2.profraw',
101        '/b/some/path/1/default-1.profraw',
102    ], [
103        '/b/some/path/1/default-1.profraw',
104    ]
105
106    with mock.patch.object(os, 'walk') as mock_walk:
107      with mock.patch.object(os, 'remove'):
108        mock_walk.return_value = mock_input_dir_walk
109        with mock.patch.object(subprocess, 'run') as mock_exec_cmd:
110          merger.merge_profiles('/b/some/path',
111                                'output/dir/default.profdata',
112                                '.profraw',
113                                'llvm-profdata',
114                                show_profdata=False)
115          self.assertEqual(
116              mock.call([
117                  'llvm-profdata',
118                  'merge',
119                  '-o',
120                  'output/dir/default.profdata',
121                  '-f',
122                  'output/dir/input-profdata-files.txt',
123              ],
124                        capture_output=True,
125                        check=True,
126                        text=True,
127                        timeout=3600), mock_exec_cmd.call_args)
128          context = mock_file_open()
129          self.assertEqual(context.__enter__().write.call_count, 2)
130          context.__enter__().write.assert_any_call(
131              '/b/some/path/0/default-1.profdata\n')
132          context.__enter__().write.assert_any_call(
133              '/b/some/path/1/default-2.profdata\n')
134
135    self.assertTrue(mock_validate_and_convert_profraws.called)
136
137  @mock.patch('builtins.open', new_callable=mock.mock_open())
138  @mock.patch.object(merger, '_validate_and_convert_profraws')
139  def test_profraw_skip_validation(self, mock_validate_and_convert_profraws,
140                                   mock_file_open):
141    mock_input_dir_walk = [
142        ('/b/some/path', ['0', '1', '2', '3'], ['summary.json']),
143        ('/b/some/path/0', [],
144         ['output.json', 'default-1.profraw', 'default-2.profraw']),
145        ('/b/some/path/1', [],
146         ['output.json', 'default-1.profraw', 'default-2.profraw']),
147    ]
148
149    with mock.patch.object(os, 'walk') as mock_walk:
150      with mock.patch.object(os, 'remove'):
151        mock_walk.return_value = mock_input_dir_walk
152        with mock.patch.object(subprocess, 'run') as mock_exec_cmd:
153          merger.merge_profiles('/b/some/path',
154                                'output/dir/default.profdata',
155                                '.profraw',
156                                'llvm-profdata',
157                                skip_validation=True,
158                                show_profdata=False)
159          self.assertEqual(
160              mock.call([
161                  'llvm-profdata',
162                  'merge',
163                  '-o',
164                  'output/dir/default.profdata',
165                  '-f',
166                  'output/dir/input-profdata-files.txt',
167              ],
168                        capture_output=True,
169                        check=True,
170                        text=True,
171                        timeout=3600), mock_exec_cmd.call_args)
172          context = mock_file_open()
173          self.assertEqual(context.__enter__().write.call_count, 4)
174          context.__enter__().write.assert_any_call(
175              '/b/some/path/0/default-1.profraw\n')
176          context.__enter__().write.assert_any_call(
177              '/b/some/path/0/default-2.profraw\n')
178          context.__enter__().write.assert_any_call(
179              '/b/some/path/1/default-1.profraw\n')
180          context.__enter__().write.assert_any_call(
181              '/b/some/path/1/default-2.profraw\n')
182
183    # Skip validation should've passed all profraw files directly, and
184    # this validate call should not have been invoked.
185    self.assertFalse(mock_validate_and_convert_profraws.called)
186
187  def test_merge_profraw_skip_if_there_is_no_file(self):
188    mock_input_dir_walk = [
189        ('/b/some/path', ['0', '1', '2', '3'], ['summary.json']),
190    ]
191
192    with mock.patch.object(os, 'walk') as mock_walk:
193      mock_walk.return_value = mock_input_dir_walk
194      with mock.patch.object(subprocess, 'check_call') as mock_exec_cmd:
195        merger.merge_profiles('/b/some/path',
196                              'output/dir/default.profdata',
197                              '.profraw',
198                              'llvm-profdata',
199                              show_profdata=False)
200        self.assertFalse(mock_exec_cmd.called)
201
202  @mock.patch('builtins.open', new_callable=mock.mock_open())
203  @mock.patch.object(merger, '_validate_and_convert_profraws')
204  def test_merge_profdata(self, mock_validate_and_convert_profraws,
205                          mock_file_open):
206    mock_input_dir_walk = [
207        ('/b/some/path', ['base_unittests', 'url_unittests'], ['summary.json']),
208        ('/b/some/path/base_unittests', [], ['output.json',
209                                             'default.profdata']),
210        ('/b/some/path/url_unittests', [], ['output.json', 'default.profdata']),
211    ]
212    with mock.patch.object(os, 'walk') as mock_walk:
213      with mock.patch.object(os, 'remove'):
214        mock_walk.return_value = mock_input_dir_walk
215        with mock.patch.object(subprocess, 'run') as mock_exec_cmd:
216          merger.merge_profiles('/b/some/path',
217                                'output/dir/default.profdata',
218                                '.profdata',
219                                'llvm-profdata',
220                                show_profdata=False)
221          self.assertEqual(
222              mock.call([
223                  'llvm-profdata',
224                  'merge',
225                  '-o',
226                  'output/dir/default.profdata',
227                  '-f',
228                  'output/dir/input-profdata-files.txt',
229              ],
230                        capture_output=True,
231                        check=True,
232                        text=True,
233                        timeout=3600), mock_exec_cmd.call_args)
234          context = mock_file_open()
235          self.assertEqual(context.__enter__().write.call_count, 2)
236          context.__enter__().write.assert_any_call(
237              '/b/some/path/base_unittests/default.profdata\n')
238          context.__enter__().write.assert_any_call(
239              '/b/some/path/url_unittests/default.profdata\n')
240
241    # The mock method should only apply when merging .profraw files.
242    self.assertFalse(mock_validate_and_convert_profraws.called)
243
244  @mock.patch('builtins.open', new_callable=mock.mock_open())
245  @mock.patch.object(merger, '_validate_and_convert_profraws')
246  def test_merge_profdata_pattern(self, mock_validate_and_convert_profraws,
247                                  mock_file_open):
248    mock_input_dir_walk = [
249        ('/b/some/path', ['base_unittests', 'url_unittests'], ['summary.json']),
250        ('/b/some/path/base_unittests', [],
251         ['output.json', 'base_unittests.profdata']),
252        (
253            '/b/some/path/url_unittests',
254            [],
255            ['output.json', 'url_unittests.profdata'],
256        ),
257        (
258            '/b/some/path/ios_chrome_smoke_eg2tests',
259            [],
260            ['output.json', 'ios_chrome_smoke_eg2tests.profdata'],
261        ),
262    ]
263    with mock.patch.object(os, 'walk') as mock_walk:
264      with mock.patch.object(os, 'remove'):
265        mock_walk.return_value = mock_input_dir_walk
266        with mock.patch.object(subprocess, 'run') as mock_exec_cmd:
267          input_profdata_filename_pattern = r'.+_unittests\.profdata'
268          merger.merge_profiles('/b/some/path',
269                                'output/dir/default.profdata',
270                                '.profdata',
271                                'llvm-profdata',
272                                input_profdata_filename_pattern,
273                                show_profdata=False)
274          self.assertEqual(
275              mock.call([
276                  'llvm-profdata',
277                  'merge',
278                  '-o',
279                  'output/dir/default.profdata',
280                  '-f',
281                  'output/dir/input-profdata-files.txt',
282              ],
283                        capture_output=True,
284                        check=True,
285                        text=True,
286                        timeout=3600), mock_exec_cmd.call_args)
287          context = mock_file_open()
288          self.assertEqual(context.__enter__().write.call_count, 2)
289          context.__enter__().write.assert_any_call(
290              '/b/some/path/base_unittests/base_unittests.profdata\n')
291          context.__enter__().write.assert_any_call(
292              '/b/some/path/url_unittests/url_unittests.profdata\n')
293
294    # The mock method should only apply when merging .profraw files.
295    self.assertFalse(mock_validate_and_convert_profraws.called)
296
297  @mock.patch('builtins.open', new_callable=mock.mock_open())
298  @mock.patch.object(merger, '_validate_and_convert_profraws')
299  def test_merge_profiles_with_weights(self, mock_validate_and_convert_profraws,
300                                       mock_file_open):
301    mock_input_dir_walk = [
302        ('/b/some/path', ['speedometer_benchmark', 'motionmark_benchmark'], []),
303        ('/b/some/path/speedometer_benchmark', [], ['foo.profdata']),
304        ('/b/some/path/motionmark_benchmark', [], ['foo.profdata']),
305    ]
306    with mock.patch.object(os, 'walk') as mock_walk:
307      with mock.patch.object(os, 'remove'):
308        mock_walk.return_value = mock_input_dir_walk
309        with mock.patch.object(subprocess, 'run') as mock_exec_cmd:
310          merger.merge_profiles(
311              '/b/some/path',
312              'output/dir/default.profdata',
313              '.profdata',
314              'llvm-profdata',
315              '.*',
316              show_profdata=False,
317              weights={'speedometer_benchmark/foo.profdata': '3'})
318          self.assertEqual(
319              mock.call([
320                  'llvm-profdata',
321                  'merge',
322                  '-o',
323                  'output/dir/default.profdata',
324                  '-f',
325                  'output/dir/input-profdata-files.txt',
326              ],
327                        capture_output=True,
328                        check=True,
329                        text=True,
330                        timeout=3600), mock_exec_cmd.call_args)
331          context = mock_file_open()
332          self.assertEqual(context.__enter__().write.call_count, 2)
333          context.__enter__().write.assert_any_call(
334              '3,/b/some/path/speedometer_benchmark/foo.profdata\n')
335          context.__enter__().write.assert_any_call(
336              '/b/some/path/motionmark_benchmark/foo.profdata\n')
337
338    # The mock method should only apply when merging .profraw files.
339    self.assertFalse(mock_validate_and_convert_profraws.called)
340
341  @mock.patch('merge_lib._JAVA_PATH', 'java')
342  def test_merge_java_exec_files(self):
343    mock_input_dir_walk = [
344        ('/b/some/path', ['0', '1', '2', '3'], ['summary.json']),
345        ('/b/some/path/0', [],
346         ['output.json', 'default-1.exec', 'default-2.exec']),
347        ('/b/some/path/1', [],
348         ['output.json', 'default-3.exec', 'default-4.exec']),
349    ]
350
351    with mock.patch.object(os, 'walk') as mock_walk:
352      mock_walk.return_value = mock_input_dir_walk
353      with mock.patch.object(subprocess, 'check_call') as mock_exec_cmd:
354        merger.merge_java_exec_files('/b/some/path', 'output/path',
355                                     'path/to/jacococli.jar')
356        self.assertEqual(
357            mock.call([
358                'java',
359                '-jar',
360                'path/to/jacococli.jar',
361                'merge',
362                '/b/some/path/0/default-1.exec',
363                '/b/some/path/0/default-2.exec',
364                '/b/some/path/1/default-3.exec',
365                '/b/some/path/1/default-4.exec',
366                '--destfile',
367                'output/path',
368            ],
369                      stderr=-2), mock_exec_cmd.call_args)
370
371  def test_merge_java_exec_files_if_there_is_no_file(self):
372    mock_input_dir_walk = [
373        ('/b/some/path', ['0', '1', '2', '3'], ['summary.json']),
374    ]
375
376    with mock.patch.object(os, 'walk') as mock_walk:
377      mock_walk.return_value = mock_input_dir_walk
378      with mock.patch.object(subprocess, 'check_call') as mock_exec_cmd:
379        merger.merge_java_exec_files('/b/some/path', 'output/path',
380                                     'path/to/jacococli.jar')
381        self.assertFalse(mock_exec_cmd.called)
382
383  def test_calls_merge_js_results_script(self):
384    task_output_dir = 'some/task/output/dir'
385    profdata_dir = '/some/different/path/to/profdata/default.profdata'
386
387    args = [
388        'script_name', '--output-json', 'output.json', '--task-output-dir',
389        task_output_dir, '--profdata-dir', profdata_dir, '--llvm-profdata',
390        'llvm-profdata', 'a.json', 'b.json', 'c.json', '--test-target-name',
391        'v8_unittests', '--sparse', '--javascript-coverage-dir',
392        'output/dir/devtools_code_coverage', '--chromium-src-dir',
393        'chromium/src', '--build-dir', 'output/dir'
394    ]
395    with mock.patch.object(merger, 'merge_profiles') as mock_merge:
396      mock_merge.return_value = None, None
397      with mock.patch.object(sys, 'argv', args):
398        with mock.patch.object(subprocess, 'call') as mock_exec_cmd:
399          with mock.patch.object(os.path, 'join') as mock_os_path_join:
400            mock_merge_js_results_path = 'path/to/js/merge_js_results.py'
401            mock_os_path_join.return_value = mock_merge_js_results_path
402            python_exec = sys.executable
403            merge_results.main()
404
405            mock_exec_cmd.assert_called_with([
406                python_exec, mock_merge_js_results_path, '--task-output-dir',
407                task_output_dir, '--javascript-coverage-dir',
408                'output/dir/devtools_code_coverage', '--chromium-src-dir',
409                'chromium/src', '--build-dir', 'output/dir'
410            ])
411
412  def test_argparse_sparse(self):
413    """Ensure that sparse flag defaults to true, and is set to correct value"""
414    # Basic required args
415    build_properties = json.dumps({
416        'some': {
417            'complicated': ['nested', {
418                'json': None,
419                'object': 'thing',
420            }]
421        }
422    })
423    task_output_dir = 'some/task/output/dir'
424    profdata_dir = '/some/different/path/to/profdata/default.profdata'
425    profdata_file = os.path.join(profdata_dir, 'base_unittests.profdata')
426    args = [
427        'script_name', '--output-json', 'output.json', '--build-properties',
428        build_properties, '--summary-json', 'summary.json', '--task-output-dir',
429        task_output_dir, '--profdata-dir', profdata_dir, '--llvm-profdata',
430        'llvm-profdata', 'a.json', 'b.json', 'c.json', '--test-target-name',
431        'base_unittests'
432    ]
433
434    test_scenarios = [
435        {
436            # Base set of args should set --sparse to false by default
437            'args': None,
438            'expected_outcome': False,
439        },
440        {
441            # Sparse should parse True when only --sparse is specified
442            'args': ['--sparse'],
443            'expected_outcome': True,
444        }
445    ]
446
447    for scenario in test_scenarios:
448      args = copy.deepcopy(args)
449      additional_args = scenario['args']
450      if additional_args:
451        args.extend(additional_args)
452      expected_outcome = scenario['expected_outcome']
453
454      with mock.patch.object(merger, 'merge_profiles') as mock_merge:
455        mock_merge.return_value = None, None
456        with mock.patch.object(sys, 'argv', args):
457          merge_results.main()
458          self.assertEqual(
459              mock_merge.call_args,
460              mock.call(task_output_dir,
461                        profdata_file,
462                        '.profraw',
463                        'llvm-profdata',
464                        sparse=expected_outcome,
465                        skip_validation=False), None)
466
467
468if __name__ == '__main__':
469  unittest.main()
470