#!/usr/bin/env python """ This file generates all telemetry_Benchmarks control files from a master list. """ # This test list is a subset of telemetry benchmark tests. The full list can be # obtained by executing # /build/${BOARD}/usr/local/telemetry/src/tools/perf/list_benchmarks # PLEASE READ THIS: # PERF_TESTS: these tests run on each build: tot, tot-1, tot-2 and expensive to # run. # PERF_DAILY_RUN_TESTS: these tests run on a nightly build: tot. If you are # trying to gain confidence for a new test, adding your test in this list is a # good start. # For adding a new test to any of these lists, please add rohitbm, lafeenstra, # haddowk in the change. PERF_PER_BUILD_TESTS = ( 'jetstream', 'kraken', 'octane', 'smoothness.top_25_smooth', 'speedometer', 'startup.cold.blank_page', ) PERF_DAILY_RUN_TESTS = ( 'dromaeo.domcoreattr', 'dromaeo.domcoremodify', 'dromaeo.domcorequery', 'dromaeo.domcoretraverse', 'image_decoding.image_decoding_measurement', 'page_cycler_v2.typical_25', 'robohornet_pro', 'smoothness.tough_animation_cases', 'smoothness.tough_canvas_cases', 'smoothness.tough_filters_cases', 'smoothness.tough_pinch_zoom_cases', 'smoothness.tough_scrolling_cases', 'smoothness.tough_webgl_cases', 'sunspider', 'webrtc', ) PERF_NO_SUITE = ( 'page_cycler.typical_25', ) ALL_TESTS = PERF_PER_BUILD_TESTS + PERF_DAILY_RUN_TESTS + PERF_NO_SUITE CONTROLFILE_TEMPLATE = ( """# Copyright 2014 The Chromium OS Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Do not edit this file! It was created by generate_controlfiles.py. from autotest_lib.client.common_lib import utils AUTHOR = 'sbasi, achuith, rohitbm' NAME = 'telemetry_Benchmarks.{test}' {attributes} TIME = 'LONG' TEST_CATEGORY = 'Benchmark' TEST_CLASS = 'performance' TEST_TYPE = 'server' DOC = ''' This server side test suite executes the Telemetry Benchmark: {test} This is part of Chrome for Chrome OS performance testing. Pass local=True to run with local telemetry and no AFE server. ''' def run_benchmark(machine): host = hosts.create_host(machine) job.run_test('telemetry_Benchmarks', host=host, benchmark='{test}', tag='{test}', args=utils.args_to_dict(args)) parallel_simple(run_benchmark, machines)""") def _get_suite(test): if test in PERF_PER_BUILD_TESTS: return 'ATTRIBUTES = \'suite:crosbolt_perf_perbuild\'' elif test in PERF_DAILY_RUN_TESTS: return 'ATTRIBUTES = \'suite:crosbolt_perf_nightly\'' return '' for test in ALL_TESTS: filename = 'control.%s' % test with open(filename, 'w+') as f: content = CONTROLFILE_TEMPLATE.format( test=test, attributes=_get_suite(test)) f.write(content)