• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2024 The Chromium Authors
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5from crossbench.benchmarks.loading.page.live import LivePage
6from crossbench.cli.config.probe import ProbeListConfig
7from crossbench.probes.js import JSProbe
8from tests import test_helper
9from tests.crossbench.probes.helper import GenericProbeTestCase
10
11
12class TestJSProbe(GenericProbeTestCase):
13
14  def test_parse_example_config(self):
15    config_file = test_helper.config_dir() / "doc/probe/js.config.hjson"
16    self.fs.add_real_file(config_file)
17    self.assertTrue(config_file.is_file())
18    probes = ProbeListConfig.parse_path(config_file).probes
19    self.assertEqual(len(probes), 1)
20    probe = probes[0]
21    self.assertIsInstance(probe, JSProbe)
22    isinstance(probe, JSProbe)
23    self.assertTrue(probe.metric_js)
24
25  def test_parse_config(self):
26    config = {
27        "setup": "globalThis.metrics = {};",
28        "js": "return globalThis.metrics;",
29    }
30    probe = JSProbe.config_parser().parse(config)
31    self.assertIsInstance(probe, JSProbe)
32    self.assertEqual(probe.setup_js, "globalThis.metrics = {};")
33    self.assertEqual(probe.metric_js, "return globalThis.metrics;")
34
35
36  def test_simple_loading_case(self):
37    config = {
38        "setup": "globalThis.metrics = {};",
39        "js": "return globalThis.metrics;",
40    }
41    probe = JSProbe.config_parser().parse(config)
42    stories = [
43        LivePage("google", "https://google.com"),
44        LivePage("amazon", "https://amazon.com")
45    ]
46    repetitions = 2
47    runner = self.create_runner(
48        stories,
49        js_side_effects=[
50            # setup:
51            None,
52            # js:
53            {
54                "metric1": 1.1,
55                "metric2": 2.2
56            }
57        ],
58        repetitions=repetitions,
59        separate=True,
60        throw=True)
61    runner.attach_probe(probe)
62    runner.run()
63    self.assertTrue(runner.is_success)
64    js_result_files = list(runner.out_dir.glob(f"**/{probe.name}.json"))
65    # One file per story repetition
66    result_count = len(self.browsers) * len(stories) * repetitions
67    # One merged result per story
68    result_count += len(self.browsers) * len(stories)
69    # One merged results per browser
70    result_count += len(self.browsers)
71    # One top-level
72    result_count += 1
73    self.assertEqual(len(js_result_files), result_count)
74
75    (story_data, repetitions_data, stories_data,
76     browsers_data) = self.get_non_empty_json_results(runner, probe)
77    self.assertIsInstance(story_data, dict)
78    self.assertIsInstance(repetitions_data, dict)
79    self.assertIsInstance(stories_data, dict)
80    self.assertIsInstance(browsers_data, dict)
81    # TODO: check probe result contents
82
83
84if __name__ == "__main__":
85  test_helper.run_pytest(__file__)
86