cbuildbot: Export tags.json to Cloud Datastore.
Also add a number of additional tags such as build id,
changes, stage statuses, boards, arches, toolchains.
BUG=chromium:653342
TEST=run_tests
Change-Id: If29a29ec8b7e8a0b25fa681a22232203a1f6a412
Reviewed-on: https://chromium-review.googlesource.com/399478
Commit-Ready: David Riley <davidriley@chromium.org>
Tested-by: David Riley <davidriley@chromium.org>
Reviewed-by: David Riley <davidriley@chromium.org>
diff --git a/cbuildbot/stages/generic_stages.py b/cbuildbot/stages/generic_stages.py
index d120317..a4087f0 100644
--- a/cbuildbot/stages/generic_stages.py
+++ b/cbuildbot/stages/generic_stages.py
@@ -12,6 +12,7 @@
import os
import re
import sys
+import tempfile
import time
import traceback
@@ -24,6 +25,7 @@
from chromite.cbuildbot import buildbucket_lib
from chromite.cbuildbot import commands
+from chromite.cbuildbot import topology
from chromite.cbuildbot import repository
from chromite.lib import config_lib
from chromite.lib import constants
@@ -983,7 +985,28 @@
self._HandleExceptionAsWarning(sys.exc_info())
@failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
- def UploadMetadata(self, upload_queue=None, filename=None):
+ def RunExportMetadata(self, filename):
+ """Export JSON file of the builder run's metadata to Cloud Datastore.
+
+ Args:
+ filename: Name of file to export.
+ """
+ creds_file = topology.topology.get(topology.DATASTORE_WRITER_CREDS_KEY)
+ if creds_file is None:
+ logging.warn('No known path to datastore credentials file.')
+ return
+
+ export_cmd = os.path.join(self._build_root, 'chromite', 'bin',
+ 'export_to_gcloud')
+ try:
+ cros_build_lib.RunCommand([export_cmd, creds_file, filename])
+ except cros_build_lib.RunCommandError as e:
+ logging.warn('Unable to export to datastore: %s', e)
+
+
+ @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+ def UploadMetadata(self, upload_queue=None, filename=constants.METADATA_JSON,
+ export=False):
"""Create and upload JSON file of the builder run's metadata, and to cidb.
This uses the existing metadata stored in the builder run. The default
@@ -999,9 +1022,8 @@
this queue. If None then upload it directly now.
filename: Name of file to dump metadata to.
Defaults to constants.METADATA_JSON
+ export: If true, constants.METADATA_TAGS will be exported to gcloud.
"""
- filename = filename or constants.METADATA_JSON
-
metadata_json = os.path.join(self.archive_path, filename)
# Stages may run in parallel, so we have to do atomic updates on this.
@@ -1021,5 +1043,14 @@
logging.info('Writing updated metadata to database for build_id %s.',
build_id)
db.UpdateMetadata(build_id, self._run.attrs.metadata)
+ if export:
+ d = self._run.attrs.metadata.GetDict()
+ if constants.METADATA_TAGS in d:
+ with tempfile.NamedTemporaryFile() as f:
+ logging.info('Export tags to gcloud via %s.', f.name)
+ logging.debug('Exporting: %s' % d[constants.METADATA_TAGS])
+ osutils.WriteFile(f.name, json.dumps(d[constants.METADATA_TAGS]),
+ atomic=True, makedirs=True)
+ self.RunExportMetadata(f.name)
else:
logging.info('Skipping database update, no database or build_id.')
diff --git a/cbuildbot/stages/report_stages.py b/cbuildbot/stages/report_stages.py
index 9c5b52e..d316fb2 100644
--- a/cbuildbot/stages/report_stages.py
+++ b/cbuildbot/stages/report_stages.py
@@ -88,28 +88,42 @@
This is a proof of concept for using tags to help find commonality
in failures.
-
- TODO(crbug.com/653342): Refactor to more appropriate locations, and add a lot
- more data. Especially board family, and builder-type.
"""
+ build_id, _ = builder_run.GetCIDBHandle()
+
# Yes, these values match general metadata values, but they are just
# proof of concept, so far.
tags = {
- 'bot-hostname': cros_build_lib.GetHostName(fully_qualified=True),
- 'build-number': builder_run.buildnumber,
- 'builder-name': builder_run.GetBuilderName(),
- 'buildbot-url': os.environ.get('BUILDBOT_BUILDBOTURL', ''),
- 'buildbot-master-name':
+ 'bot_config': builder_run.config['name'],
+ 'bot_hostname': cros_build_lib.GetHostName(fully_qualified=True),
+ 'build_id': build_id,
+ 'build_number': builder_run.buildnumber,
+ 'builder_name': builder_run.GetBuilderName(),
+ 'buildbot_url': os.environ.get('BUILDBOT_BUILDBOTURL', ''),
+ 'buildbot_master_name':
os.environ.get('BUILDBOT_MASTERNAME', ''),
- 'bot-config': builder_run.config['name'],
+ 'id': ('Build', build_id),
'master_build_id': builder_run.options.master_build_id,
+ 'important': builder_run.config['important'],
}
+ # Guess type of bot.
+ tags['bot_type'] = 'unknown'
+ if '.golo.' in tags['bot_hostname']:
+ tags['bot_type'] = 'golo'
+ else:
+ gce_types = ['beefy', 'standard', 'wimpy']
+ for t in gce_types:
+ host_string = 'cros-%s' % t
+ if host_string in tags['bot_hostname']:
+ tags['bot_type'] = 'gce-%s' % t
+ break
+
# Look up the git version.
try:
cmd_result = cros_build_lib.RunCommand(['git', '--version'],
capture_output=True)
- tags['git_version'] = cmd_result.output
+ tags['git_version'] = cmd_result.output.strip()
except cros_build_lib.RunCommandError:
pass # If we fail, just don't include the tag.
@@ -131,7 +145,8 @@
except (cros_build_lib.RunCommandError, IndexError):
pass # If we fail, just don't include the tag.
- builder_run.attrs.metadata.UpdateKeyDictWithDict('testing-tags', tags)
+ builder_run.attrs.metadata.UpdateKeyDictWithDict(constants.METADATA_TAGS,
+ tags)
def GetChildConfigListMetadata(child_configs, config_status_map):
"""Creates a list for the child configs metadata.
@@ -190,12 +205,11 @@
self._run.config['doc'])
WriteBasicMetadata(self._run)
- WriteTagMetadata(self._run)
# This is a heuristic value for |important|, since patches that get applied
# later in the build might change the config. We write it now anyway,
# because in case the build fails before Sync, it is better to have this
- # heuristic value than None. In BuildReexectuionFinishedStage, we re-write
+ # heuristic value than None. In BuildReexecutionFinishedStage, we re-write
# the definitive value.
self._run.attrs.metadata.UpdateWithDict(
{'important': self._run.config['important']})
@@ -249,6 +263,9 @@
master_build_status['build_number'])
logging.PrintBuildbotLink('Link to master build', master_url)
+ # Write the tag metadata last so that a build_id is available.
+ WriteTagMetadata(self._run)
+
def HandleSkip(self):
"""Ensure that re-executions use the same db instance as initial db."""
metadata_dict = self._run.attrs.metadata.GetDict()
@@ -395,20 +412,47 @@
}
if len(config['boards']) == 1:
- toolchains = toolchain.GetToolchainsForBoard(config['boards'][0],
- buildroot=build_root)
- metadata['toolchain-tuple'] = (
- toolchain.FilterToolchains(toolchains, 'default', True).keys() +
- toolchain.FilterToolchains(toolchains, 'default', False).keys())
+ metadata['toolchain-tuple'] = toolchain.GetToolchainTupleForBoard(
+ config['boards'][0], buildroot=build_root)
logging.info('Metadata being written: %s', metadata)
self._run.attrs.metadata.UpdateWithDict(metadata)
+
+ toolchains = set()
+ toolchain_tuples = []
+ primary_toolchains = []
+ for board in config['boards']:
+ toolchain_tuple = toolchain.GetToolchainTupleForBoard(
+ board, buildroot=build_root)
+ toolchains |= set(toolchain_tuple)
+ toolchain_tuples.append(','.join(toolchain_tuple))
+ if len(toolchain_tuple):
+ primary_toolchains.append(toolchain_tuple[0])
+
# Update 'version' separately to avoid overwriting the existing
# entries in it (e.g. PFQ builders may have written the Chrome
# version to uprev).
logging.info("Metadata 'version' being written: %s", version)
self._run.attrs.metadata.UpdateKeyDictWithDict('version', version)
+ tags = {
+ 'boards': config['boards'],
+ 'child_config_names': [cc['name'] for cc in child_configs],
+ 'build_type': config['build_type'],
+ 'important': config['important'],
+
+ # Data for the toolchain used.
+ 'sdk_version': sdk_verinfo.get('SDK_LATEST_VERSION', '<unknown>'),
+ 'toolchain_url': sdk_verinfo.get('TC_PATH', '<unknown>'),
+ 'toolchains': list(toolchains),
+ 'toolchain_tuples': toolchain_tuples,
+ 'primary_toolchains': primary_toolchains,
+ }
+ full_version = self._run.attrs.metadata.GetValue('version')
+ tags.update({'version_%s' % v: full_version[v] for v in full_version})
+ self._run.attrs.metadata.UpdateKeyDictWithDict(constants.METADATA_TAGS,
+ tags)
+
# Ensure that all boards and child config boards have a per-board
# metadata subdict.
for b in config['boards']:
@@ -796,7 +840,7 @@
# Upload metadata, and update the pass/fail streak counter for the main
# run only. These aren't needed for the child builder runs.
- self.UploadMetadata()
+ self.UploadMetadata(export=True)
self._UpdateRunStreak(self._run, final_status)
# Alert if the Pre-CQ has infra failures.
@@ -900,6 +944,32 @@
self.GetReportMetadata(final_status=final_status,
completion_instance=self._completion_instance))
+ # Add tags for the arches and statuses of the build.
+ # arches requires crossdev which isn't available at the early part of the
+ # build.
+ arches = []
+ for board in self._run.config['boards']:
+ toolchains = toolchain.GetToolchainsForBoard(
+ board, buildroot=self._build_root)
+ default = toolchain.FilterToolchains(toolchains, 'default', True).keys()
+ if len(default):
+ try:
+ arches.append(toolchain.GetArchForTarget(default[0]))
+ except cros_build_lib.RunCommandError as e:
+ logging.warning(
+ 'Unable to retrieve arch for board %s default toolchain %s: %s' %
+ (board, default, e))
+ tags = {
+ 'arches': arches,
+ 'status': final_status,
+ }
+ results = self._run.attrs.metadata.GetValue('results')
+ for stage in results:
+ tags['stage_status:%s' % stage['name']] = stage['status']
+ tags['stage_summary:%s' % stage['name']] = stage['summary']
+ self._run.attrs.metadata.UpdateKeyDictWithDict(constants.METADATA_TAGS,
+ tags)
+
# Some operations can only be performed if a valid version is available.
try:
self._run.GetVersionInfo()
diff --git a/cbuildbot/stages/report_stages_unittest.py b/cbuildbot/stages/report_stages_unittest.py
index 2006429..67aa9fc 100644
--- a/cbuildbot/stages/report_stages_unittest.py
+++ b/cbuildbot/stages/report_stages_unittest.py
@@ -7,6 +7,7 @@
from __future__ import print_function
import datetime as dt
+import json
import mock
import os
@@ -19,6 +20,7 @@
from chromite.lib import metadata_lib
from chromite.lib import results_lib
from chromite.cbuildbot import triage_lib
+from chromite.cbuildbot.stages import generic_stages
from chromite.cbuildbot.stages import generic_stages_unittest
from chromite.cbuildbot.stages import report_stages
from chromite.lib import alerts
@@ -55,6 +57,8 @@
self.PatchObject(cbuildbot_run._BuilderRunBase, 'GetVersionInfo',
return_value=fake_versioninfo)
self.PatchObject(toolchain, 'GetToolchainsForBoard')
+ self.PatchObject(toolchain, 'GetToolchainTupleForBoard',
+ return_value=['i686-pc-linux-gnu', 'arm-none-eabi'])
def tearDown(self):
cidb.CIDBConnectionFactory.SetupMockCidb()
@@ -62,6 +66,8 @@
def testPerformStage(self):
"""Test that a normal runs completes without error."""
self.RunStage()
+ tags = self._run.attrs.metadata.GetValue(constants.METADATA_TAGS)
+ self.assertEqual(tags['version_full'], 'R39-4815.0.0-rc1')
def testMasterSlaveVersionMismatch(self):
"""Test that master/slave version mismatch causes failure."""
@@ -146,6 +152,9 @@
'master_build', constants.WATERFALL_EXTERNAL, 1,
'master_build_config', 'bot_hostname')
+ self.PatchObject(toolchain, 'GetToolchainsForBoard')
+ self.PatchObject(toolchain, 'GetArchForTarget', return_value='x86')
+
self._Prepare(build_id=None, master_build_id=master_build_id)
def testUnknownWaterfall(self):
@@ -208,6 +217,9 @@
self.PatchObject(report_stages.ReportStage, '_GetBuildDuration',
return_value=1000)
+ self.PatchObject(generic_stages.ArchivingStageMixin, 'RunExportMetadata')
+ self.PatchObject(toolchain, 'GetToolchainsForBoard')
+ self.PatchObject(toolchain, 'GetArchForTarget', return_value='x86')
# Set up a general purpose cidb mock. Tests with more specific
# mock requirements can replace this with a separate call to
@@ -297,13 +309,13 @@
# Verify build stages timeline contains the stages that were mocked.
self.assertEquals(calls, commands.UploadArchivedFile.call_args_list)
- timeline_content = osutils.WriteFile.call_args_list[1][0][1]
+ timeline_content = osutils.WriteFile.call_args_list[2][0][1]
for s in stages:
self.assertIn('["%s", new Date' % s['name'], timeline_content)
# Verify slaves timeline contains the slaves that were mocked.
self.assertEquals(calls, commands.UploadArchivedFile.call_args_list)
- timeline_content = osutils.WriteFile.call_args_list[2][0][1]
+ timeline_content = osutils.WriteFile.call_args_list[3][0][1]
for s in statuses:
self.assertIn('["%s - %s", new Date' %
(s['build_config'], s['build_number']), timeline_content)
@@ -359,6 +371,26 @@
self.assertTrue(metadata_dict.has_key('builder-name'))
self.assertTrue(metadata_dict.has_key('bot-hostname'))
+ def testWriteTagMetadata(self):
+ """Test that WriteTagMetadata writes expected keys correctly."""
+ self.PatchObject(cros_build_lib, 'GetHostName', return_value='cros-wimpy2')
+ self._SetupUpdateStreakCounter()
+ report_stages.WriteTagMetadata(self._run)
+ tags_dict = self._run.attrs.metadata.GetValue(constants.METADATA_TAGS)
+ self.assertEqual(tags_dict['build_number'],
+ generic_stages_unittest.DEFAULT_BUILD_NUMBER)
+ self.assertTrue(tags_dict.has_key('builder_name'))
+ self.assertTrue(tags_dict.has_key('bot_hostname'))
+ self.RunStage()
+ tags_content = osutils.WriteFile.call_args_list[1][0][1]
+ tags_content_dict = json.loads(tags_content)
+ self.assertEqual(tags_content_dict['build_number'],
+ generic_stages_unittest.DEFAULT_BUILD_NUMBER)
+ self.assertEqual(
+ osutils.WriteFile.call_args_list[1][0][0],
+ (generic_stages.ArchivingStageMixin.RunExportMetadata.
+ call_args_list[0][0][0]))
+
def testGetChildConfigsMetadataList(self):
"""Test that GetChildConfigListMetadata generates child config metadata."""
child_configs = [{'name': 'config1', 'boards': ['board1']},
diff --git a/cbuildbot/stages/sync_stages.py b/cbuildbot/stages/sync_stages.py
index e300b20..a52c635 100644
--- a/cbuildbot/stages/sync_stages.py
+++ b/cbuildbot/stages/sync_stages.py
@@ -459,6 +459,23 @@
x[cros_patch.ATTR_PATCH_NUMBER],
x[cros_patch.ATTR_REMOTE]))
self._run.attrs.metadata.UpdateWithDict({'changes': changes_list})
+ change_ids = []
+ change_gerrit_ids = []
+ change_gerrit_numbers = []
+ for c in changes_list:
+ change_ids.append(c[cros_patch.ATTR_CHANGE_ID])
+ gerrit_number = c[cros_patch.ATTR_GERRIT_NUMBER]
+ gerrit_id = '/'.join([c[cros_patch.ATTR_REMOTE], gerrit_number,
+ c[cros_patch.ATTR_PATCH_NUMBER]])
+ change_gerrit_ids.append(gerrit_id)
+ change_gerrit_numbers.append(gerrit_number)
+ tags = {
+ 'change_ids': change_ids,
+ 'change_gerrit_ids': change_gerrit_ids,
+ 'change_gerrit_numbers': change_gerrit_numbers,
+ }
+ self._run.attrs.metadata.UpdateKeyDictWithDict(constants.METADATA_TAGS,
+ tags)
@failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
def PerformStage(self):
diff --git a/lib/constants.py b/lib/constants.py
index 9b59da4..ae1946f 100644
--- a/lib/constants.py
+++ b/lib/constants.py
@@ -930,6 +930,7 @@
METADATA_JSON = 'metadata.json'
PARTIAL_METADATA_JSON = 'partial-metadata.json'
+METADATA_TAGS = 'tags'
DELTA_SYSROOT_TAR = 'delta_sysroot.tar.xz'
DELTA_SYSROOT_BATCH = 'batch'
diff --git a/lib/metadata_lib.py b/lib/metadata_lib.py
index 377e76b..966430c 100644
--- a/lib/metadata_lib.py
+++ b/lib/metadata_lib.py
@@ -243,9 +243,17 @@
"""
return self._metadata_dict.get(key, default)
- def GetJSON(self):
- """Return a JSON string representation of metadata."""
- return json.dumps(self.GetDict())
+ def GetJSON(self, key=None):
+ """Return a JSON string representation of metadata.
+
+ Args:
+ key: Key to return as JSON representation. If None, returns all
+ metadata. Default: None
+ """
+ if key:
+ return json.dumps(self.GetValue(key))
+ else:
+ return json.dumps(self.GetDict())
def RecordCLAction(self, change, action, timestamp=None, reason=''):
"""Record an action that was taken on a CL, to the metadata.
diff --git a/lib/toolchain.py b/lib/toolchain.py
index c4244c1..3cbde78 100644
--- a/lib/toolchain.py
+++ b/lib/toolchain.py
@@ -64,6 +64,21 @@
return targets
+def GetToolchainTupleForBoard(board, buildroot=constants.SOURCE_ROOT):
+ """Gets a tuple for the default and non-default toolchains for a board.
+
+ Args:
+ board: board name in question (e.g. 'daisy').
+ buildroot: path to buildroot.
+
+ Returns:
+ The tuples of toolchain targets ordered default, non-default for the board.
+ """
+ toolchains = GetToolchainsForBoard(board, buildroot)
+ return (FilterToolchains(toolchains, 'default', True).keys() +
+ FilterToolchains(toolchains, 'default', False).keys())
+
+
def FilterToolchains(targets, key, value):
"""Filter out targets based on their attributes.