blob: 4cd4735a167e63596bfad7c8901ca9d05fb2e0d5 [file] [log] [blame]
#!/usr/bin/env python
"""
This file generates all telemetry_Benchmarks control files from a master list.
"""
# This test list is a subset of telemetry benchmark tests. The full list can be
# obtained by executing
# /build/${BOARD}/usr/local/telemetry/src/tools/perf/list_benchmarks
# PLEASE READ THIS:
# PERF_TESTS: these tests run on each build: tot, tot-1, tot-2 and expensive to
# run.
# PERF_DAILY_RUN_TESTS: these tests run on a nightly build: tot. If you are
# trying to gain confidence for a new test, adding your test in this list is a
# good start.
# For adding a new test to any of these lists, please add rohitbm, lafeenstra,
# haddowk in the change.
PERF_TESTS = [
'jetstream',
'kraken',
'octane',
'page_cycler.typical_25',
'smoothness.top_25_smooth',
'speedometer',
'startup.cold.blank_page',
]
PERF_DAILY_RUN_TESTS = [
'dromaeo.domcoreattr',
'dromaeo.domcoremodify',
'dromaeo.domcorequery',
'dromaeo.domcoretraverse',
'image_decoding.image_decoding_measurement',
'memory.top_7_stress',
'robohornet_pro',
'smoothness.tough_animation_cases',
'smoothness.tough_canvas_cases',
'smoothness.tough_filters_cases',
'smoothness.tough_pinch_zoom_cases',
'smoothness.tough_scrolling_cases',
'smoothness.tough_webgl_cases',
'sunspider',
'tab_switching.top_10',
'webrtc.webrtc_cases',
]
CONTROLFILE_TEMPLATE = (
"""# Copyright 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Do not edit this file! It was created by generate_controlfiles.py.
from autotest_lib.client.common_lib import utils
AUTHOR = 'sbasi, achuith, rohitbm'
NAME = 'telemetry_Benchmarks.{1}'
ATTRIBUTES = 'suite:{0}'
TIME = 'LONG'
TEST_CATEGORY = 'Benchmark'
TEST_CLASS = 'performance'
TEST_TYPE = 'server'
DOC = '''
This server side test suite executes the Telemetry Benchmark:
{1}
This is part of Chrome for Chrome OS performance testing.
Pass local=True to run with local telemetry and no AFE server.
'''
def run_benchmark(machine):
host = hosts.create_host(machine)
job.run_test('telemetry_Benchmarks', host=host,
benchmark='{1}',
tag='{1}',
args=utils.args_to_dict(args))
parallel_simple(run_benchmark, machines)""")
for test in PERF_TESTS + PERF_DAILY_RUN_TESTS:
filename = 'control.%s' % test
with open(filename, 'w+') as f:
if test in PERF_TESTS:
content = CONTROLFILE_TEMPLATE.format('crosbolt_perf_perbuild', test)
else:
content = CONTROLFILE_TEMPLATE.format('crosbolt_perf_nightly', test)
f.write(content)