blob: 7b16c2f0332361328ef716b8b14a22eb2044c936 [file] [log] [blame]
#!/usr/bin/env python
"""
This file generates all telemetry_Benchmarks control files from a master list.
"""
# This test list is a subset of telemetry benchmark tests. The full list can be
# obtained by executing
# /build/${BOARD}/usr/local/telemetry/src/tools/perf/list_benchmarks
# PLEASE READ THIS:
# PERF_TESTS: these tests run on each build: tot, tot-1, tot-2 and expensive to
# run.
# PERF_DAILY_RUN_TESTS: these tests run on a nightly build: tot. If you are
# trying to gain confidence for a new test, adding your test in this list is a
# good start.
# For adding a new test to any of these lists, please add rohitbm, lafeenstra,
# haddowk in the change.
PERF_PER_BUILD_TESTS = (
'cros_ui_smoothness',
'jetstream',
'kraken',
'octane',
'speedometer',
'speedometer2',
)
PERF_DAILY_RUN_TESTS = (
'cros_tab_switching.typical_24',
'dromaeo.domcoreattr',
'dromaeo.domcoremodify',
'dromaeo.domcorequery',
'dromaeo.domcoretraverse',
'image_decoding.image_decoding_measurement',
'memory.desktop',
'page_cycler_v2.typical_25',
'robohornet_pro',
'smoothness.tough_pinch_zoom_cases',
'sunspider',
'webrtc',
)
PERF_WEEKLY_RUN_TESTS = (
'system_health.memory_desktop',
)
PERF_NO_SUITE = (
'page_cycler.typical_25',
)
ALL_TESTS = (PERF_PER_BUILD_TESTS +
PERF_DAILY_RUN_TESTS +
PERF_WEEKLY_RUN_TESTS +
PERF_NO_SUITE)
CONTROLFILE_TEMPLATE = (
"""# Copyright 2018 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Do not edit this file! It was created by generate_controlfiles.py.
from autotest_lib.client.common_lib import utils
AUTHOR = 'sbasi, achuith, rohitbm'
NAME = 'telemetry_Benchmarks.{test}'
{attributes}
TIME = 'LONG'
TEST_CATEGORY = 'Benchmark'
TEST_CLASS = 'performance'
TEST_TYPE = 'server'
DOC = '''
This server side test suite executes the Telemetry Benchmark:
{test}
This is part of Chrome for Chrome OS performance testing.
Pass local=True to run with local telemetry and no AFE server.
'''
def run_benchmark(machine):
host = hosts.create_host(machine)
job.run_test('telemetry_Benchmarks', host=host,
benchmark='{test}',
tag='{test}',
args=utils.args_to_dict(args))
parallel_simple(run_benchmark, machines)""")
def _get_suite(test):
if test in PERF_PER_BUILD_TESTS:
return 'ATTRIBUTES = \'suite:crosbolt_perf_perbuild\''
elif test in PERF_DAILY_RUN_TESTS:
return 'ATTRIBUTES = \'suite:crosbolt_perf_nightly\''
elif test in PERF_WEEKLY_RUN_TESTS:
return 'ATTRIBUTES = \'suite:crosbolt_perf_weekly\''
return ''
for test in ALL_TESTS:
filename = 'control.%s' % test
with open(filename, 'w+') as f:
content = CONTROLFILE_TEMPLATE.format(
test=test,
attributes=_get_suite(test))
f.write(content)