blob: ac9bbdb0e2c32015dfe2806eac2abbe66c6fee92 [file] [log] [blame]
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging, random, re, sgmllib, time, urllib
from autotest_lib.client.common_lib import error
from autotest_lib.server import test
from autotest_lib.server.cros import pyauto_proxy
from autotest_lib.server.cros import stress
SLEEP_DEFAULT_SEED = 1
SLEEP_DEFAULT_SECS = { 'on': {'min': 3, 'max': 6},
'off': {'min': 10, 'max': 15}}
SLEEP_FAST_SECS = { 'on': {'min': 1, 'max': 5},
'off': {'min': 1, 'max': 5}}
MAX_TABS = 10
# TODO(tbroch) investigate removing retries and instead querying network status
# to determine if its ok to try loading a new tab
MAX_TAB_RETRIES = 3
class AlexaParser(sgmllib.SGMLParser):
"""Class to parse Alexa html for popular websites.
Attributes:
sites: List of hyperlinks (URL) gathered from Alexa site
"""
def __init__(self, verbose=0):
sgmllib.SGMLParser.__init__(self, verbose)
self._sites = []
def start_a(self, attributes):
"""Harvest URL's of top sites to visit later."""
for name, value in attributes:
if name == "href":
match = re.search(".*/(.+)#keywords", value)
if match:
self._sites.append("http://www." + match.group(1))
def parse(self, html):
"""Parse the given html string."""
self.feed(html)
self.close()
def get_sites(self):
"""Retrieve list of urls.
Returns:
List of urls
"""
return self._sites
class AlexaSites(object):
"""Class to scrape list of URL's from Alexa ranking website.
Attributes:
url_prefix: string of URL prefix. Used to assemble final Alexa URL to
visit and scrape for top sites
url_suffix: string of URL suffix. Another component of final URL
parser: SGMLParser instance object to
num_sites: number of top ranked sites to scrape
"""
def __init__(self, url_prefix, url_suffix, num_sites):
self._url_prefix = url_prefix
self._url_suffix = url_suffix
self._parser = AlexaParser()
self._num_sites = num_sites
def get_sites(self):
"""Generate list of sites and return.
Returns:
list of url strings
"""
i = 0
prev_sites = -1
cur_sites = len(self._parser.get_sites())
while cur_sites < self._num_sites and \
cur_sites > prev_sites:
fd = urllib.urlopen("%s%d%s" % (self._url_prefix, i,
self._url_suffix))
html = fd.read()
fd.close()
self._parser.parse(html)
i += 1
prev_sites = cur_sites
cur_sites = len(self._parser.get_sites())
return self._parser.get_sites()[0:self._num_sites]
def surf(pyauto, sites):
"""Surf to a list of URLs."""
for cnt, url in enumerate(sites):
logging.info("site %d of %d is %s", cnt + 1, len(sites), url)
success = False
for retry in xrange(MAX_TAB_RETRIES):
try:
# avoid tab bloat
while pyauto.GetTabCount() > MAX_TABS:
pyauto.CloseTab()
pyauto.AppendTab(url)
success = True
except:
logging.info("retry %d of site %s", retry + 1, url)
else:
break
if not success:
raise error.TestFail("Unable to browse %s" % url)
class CloseLidRandomly(object):
"""Callable to open and close lid with random intervals."""
def __init__(self, servo, sleep_secs=None, sleep_seed=None):
self._servo = servo
if not sleep_secs:
sleep_secs = SLEEP_DEFAULT_SECS
self._sleep_secs = sleep_secs
self._robj = random.Random()
self._robj.seed(SLEEP_DEFAULT_SEED if not sleep_seed else sleep_seed)
def __call__(self):
self._servo.set_nocheck('lid_open', 'no')
time.sleep(self._robj.uniform(self._sleep_secs['on']['min'],
self._sleep_secs['on']['max']))
self._servo.set_nocheck('lid_open', 'yes')
time.sleep(self._robj.uniform(self._sleep_secs['off']['min'],
self._sleep_secs['off']['max']))
class platform_LidStress(test.test):
"""Uses servo to repeatedly close & open lid while surfing."""
version = 1
def initialize(self, host):
self._pyauto = pyauto_proxy.create_pyauto_proxy(host)
self._pyauto.LoginToDefaultAccount()
def cleanup(self):
self._pyauto.cleanup()
def run_once(self, host, num_cycles=None):
if not num_cycles:
num_cycles = 50
# open & close lid frequently and quickly
lid_fast = stress.CountedStressor(CloseLidRandomly(host.servo,
SLEEP_FAST_SECS))
lid_fast.start(num_cycles)
lid_fast.wait()
# surf & open & close lid less frequently
alexa = AlexaSites("http://www.alexa.com/topsites/countries;",
"/US", num_cycles)
lid = stress.ControlledStressor(CloseLidRandomly(host.servo))
lid.start()
surf(self._pyauto, alexa.get_sites())
lid.stop()