| #!/usr/bin/python |
| |
| # Copyright (c) 2012 The Chromium OS Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| |
| """Main builder code for Chromium OS. |
| |
| Used by Chromium OS buildbot configuration for all Chromium OS builds including |
| full and pre-flight-queue builds. |
| """ |
| |
| import distutils.version |
| import errno |
| import glob |
| import logging |
| import optparse |
| import os |
| import pprint |
| import sys |
| import time |
| |
| from chromite.buildbot import builderstage as bs |
| from chromite.buildbot import cbuildbot_config |
| from chromite.buildbot import cbuildbot_stages as stages |
| from chromite.buildbot import cbuildbot_results as results_lib |
| from chromite.buildbot import constants |
| from chromite.buildbot import remote_try |
| from chromite.buildbot import repository |
| from chromite.buildbot import tee |
| from chromite.buildbot import trybot_patch_pool |
| |
| from chromite.lib import cgroups |
| from chromite.lib import cleanup |
| from chromite.lib import commandline |
| from chromite.lib import cros_build_lib |
| from chromite.lib import gclient |
| from chromite.lib import gerrit |
| from chromite.lib import git |
| from chromite.lib import osutils |
| from chromite.lib import patch as cros_patch |
| from chromite.lib import parallel |
| from chromite.lib import sudo |
| |
| |
| cros_build_lib.STRICT_SUDO = True |
| |
| _DEFAULT_LOG_DIR = 'cbuildbot_logs' |
| _BUILDBOT_LOG_FILE = 'cbuildbot.log' |
| _DEFAULT_EXT_BUILDROOT = 'trybot' |
| _DEFAULT_INT_BUILDROOT = 'trybot-internal' |
| _DISTRIBUTED_TYPES = [constants.COMMIT_QUEUE_TYPE, constants.PFQ_TYPE, |
| constants.CANARY_TYPE, constants.CHROME_PFQ_TYPE, |
| constants.PALADIN_TYPE] |
| _BUILDBOT_REQUIRED_BINARIES = ('pbzip2',) |
| _API_VERSION_ATTR = 'api_version' |
| |
| |
| def _PrintValidConfigs(display_all=False): |
| """Print a list of valid buildbot configs. |
| |
| Arguments: |
| display_all: Print all configs. Otherwise, prints only configs with |
| trybot_list=True. |
| """ |
| def _GetSortKey(config_name): |
| config_dict = cbuildbot_config.config[config_name] |
| return (not config_dict['trybot_list'], config_dict['description'], |
| config_name) |
| |
| COLUMN_WIDTH = 45 |
| print 'config'.ljust(COLUMN_WIDTH), 'description' |
| print '------'.ljust(COLUMN_WIDTH), '-----------' |
| config_names = cbuildbot_config.config.keys() |
| config_names.sort(key=_GetSortKey) |
| for name in config_names: |
| if display_all or cbuildbot_config.config[name]['trybot_list']: |
| desc = cbuildbot_config.config[name].get('description') |
| desc = desc if desc else '' |
| print name.ljust(COLUMN_WIDTH), desc |
| |
| |
| def _GetConfig(config_name): |
| """Gets the configuration for the build""" |
| if not cbuildbot_config.config.has_key(config_name): |
| print 'Non-existent configuration %s specified.' % config_name |
| print 'Please specify one of:' |
| _PrintValidConfigs() |
| sys.exit(1) |
| |
| result = cbuildbot_config.config[config_name] |
| |
| return result |
| |
| |
| def AcquirePoolFromOptions(options): |
| """Generate patch objects from passed in options. |
| |
| Args: |
| options: The options object generated by optparse. |
| |
| Returns: |
| trybot_patch_pool.TrybotPatchPool object. |
| |
| Raises: |
| gerrit.GerritException, cros_patch.PatchException |
| """ |
| gerrit_patches = [] |
| local_patches = [] |
| remote_patches = [] |
| |
| if options.gerrit_patches: |
| gerrit_patches = gerrit.GetGerritPatchInfo( |
| options.gerrit_patches) |
| for patch in gerrit_patches: |
| if patch.IsAlreadyMerged(): |
| cros_build_lib.Warning('Patch %s has already been merged.' % str(patch)) |
| |
| if options.local_patches: |
| manifest = git.ManifestCheckout.Cached(options.sourceroot) |
| local_patches = cros_patch.PrepareLocalPatches(manifest, |
| options.local_patches) |
| |
| if options.remote_patches: |
| remote_patches = cros_patch.PrepareRemotePatches( |
| options.remote_patches) |
| |
| return trybot_patch_pool.TrybotPatchPool(gerrit_patches, local_patches, |
| remote_patches) |
| |
| |
| class Builder(object): |
| """Parent class for all builder types. |
| |
| This class functions as a parent class for various build types. It's intended |
| use is builder_instance.Run(). |
| |
| Vars: |
| build_config: The configuration dictionary from cbuildbot_config. |
| options: The options provided from optparse in main(). |
| archive_urls: Where our artifacts for this builder will be archived. |
| release_tag: The associated "chrome os version" of this build. |
| """ |
| |
| def __init__(self, options, build_config): |
| """Initializes instance variables. Must be called by all subclasses.""" |
| self.build_config = build_config |
| self.options = options |
| |
| # TODO, Remove here and in config after bug chromium-os:14649 is fixed. |
| if self.build_config['chromeos_official']: |
| os.environ['CHROMEOS_OFFICIAL'] = '1' |
| |
| self.archive_stages = {} |
| self.archive_urls = {} |
| self.release_tag = None |
| self.patch_pool = trybot_patch_pool.TrybotPatchPool() |
| |
| bs.BuilderStage.SetManifestBranch(self.options.branch) |
| |
| def Initialize(self): |
| """Runs through the initialization steps of an actual build.""" |
| if self.options.resume: |
| results_lib.LoadCheckpoint(self.options.buildroot) |
| |
| self._RunStage(stages.CleanUpStage) |
| |
| def _GetStageInstance(self, stage, *args, **kwargs): |
| """Helper function to get an instance given the args. |
| |
| Useful as almost all stages just take in options and build_config. |
| """ |
| config = kwargs.pop('config', self.build_config) |
| return stage(self.options, config, *args, **kwargs) |
| |
| def _SetReleaseTag(self): |
| """Sets the release tag from the manifest_manager. |
| |
| Must be run after sync stage as syncing enables us to have a release tag. |
| """ |
| # Extract version we have decided to build into self.release_tag. |
| manifest_manager = stages.ManifestVersionedSyncStage.manifest_manager |
| if manifest_manager: |
| self.release_tag = manifest_manager.current_version |
| |
| def _RunStage(self, stage, *args, **kwargs): |
| """Wrapper to run a stage.""" |
| stage_instance = self._GetStageInstance(stage, *args, **kwargs) |
| return stage_instance.Run() |
| |
| def GetSyncInstance(self): |
| """Returns an instance of a SyncStage that should be run. |
| |
| Subclasses must override this method. |
| """ |
| raise NotImplementedError() |
| |
| def RunStages(self): |
| """Subclasses must override this method. Runs the appropriate code.""" |
| raise NotImplementedError() |
| |
| def _ShouldReExecuteInBuildRoot(self): |
| """Returns True if this build should be re-executed in the buildroot.""" |
| abs_buildroot = os.path.abspath(self.options.buildroot) |
| return not os.path.abspath(__file__).startswith(abs_buildroot) |
| |
| def _ReExecuteInBuildroot(self, sync_instance): |
| """Reexecutes self in buildroot and returns True if build succeeds. |
| |
| This allows the buildbot code to test itself when changes are patched for |
| buildbot-related code. This is a no-op if the buildroot == buildroot |
| of the running chromite checkout. |
| |
| Args: |
| sync_instance: Instance of the sync stage that was run to sync. |
| |
| Returns: |
| True if the Build succeeded. |
| """ |
| if not self.options.resume: |
| results_lib.WriteCheckpoint(self.options.buildroot) |
| |
| args = stages.BootstrapStage.FilterArgsForTargetCbuildbot( |
| self.options.buildroot, constants.PATH_TO_CBUILDBOT, self.options) |
| |
| # Specify a buildroot explicitly (just in case, for local trybot). |
| # Suppress any timeout options given from the commandline in the |
| # invoked cbuildbot; our timeout will enforce it instead. |
| args += ['--resume', '--timeout', '0', '--notee', '--nocgroups', |
| '--buildroot', os.path.abspath(self.options.buildroot)] |
| |
| if stages.ManifestVersionedSyncStage.manifest_manager: |
| ver = stages.ManifestVersionedSyncStage.manifest_manager.current_version |
| args += ['--version', ver] |
| |
| if isinstance(sync_instance, stages.CommitQueueSyncStage): |
| vp_file = sync_instance.SaveValidationPool() |
| args += ['--validation_pool', vp_file] |
| |
| # Reset the cache dir so that the child will calculate it automatically. |
| if not self.options.cache_dir_specified: |
| commandline.BaseParser.ConfigureCacheDir(None) |
| |
| # Re-run the command in the buildroot. |
| # Finally, be generous and give the invoked cbuildbot 30s to shutdown |
| # when something occurs. It should exit quicker, but the sigterm may |
| # hit while the system is particularly busy. |
| return_obj = cros_build_lib.RunCommand( |
| args, cwd=self.options.buildroot, error_code_ok=True, kill_timeout=30) |
| return return_obj.returncode == 0 |
| |
| def _InitializeTrybotPatchPool(self): |
| """Generate patch pool from patches specified on the command line. |
| |
| Do this only if we need to patch changes later on. |
| """ |
| changes_stage = stages.PatchChangesStage.StageNamePrefix() |
| check_func = results_lib.Results.PreviouslyCompletedRecord |
| if not check_func(changes_stage) or self.options.bootstrap: |
| self.patch_pool = AcquirePoolFromOptions(self.options) |
| |
| def _GetBootstrapStage(self): |
| """Constructs and returns the BootStrapStage object. |
| |
| We return None when there are no chromite patches to test, and |
| --test-bootstrap wasn't passed in. |
| """ |
| stage = None |
| chromite_pool = self.patch_pool.Filter(project=constants.CHROMITE_PROJECT) |
| manifest_pool = self.patch_pool.FilterManifest() |
| chromite_branch = git.GetChromiteTrackingBranch() |
| if (chromite_pool or manifest_pool or self.options.test_bootstrap |
| or chromite_branch != self.options.branch): |
| stage = stages.BootstrapStage(self.options, self.build_config, |
| chromite_pool, manifest_pool) |
| return stage |
| |
| def Run(self): |
| """Main runner for this builder class. Runs build and prints summary. |
| |
| Returns: |
| Whether the build succeeded. |
| """ |
| self._InitializeTrybotPatchPool() |
| |
| if self.options.bootstrap: |
| bootstrap_stage = self._GetBootstrapStage() |
| if bootstrap_stage: |
| # BootstrapStage blocks on re-execution of cbuildbot. |
| bootstrap_stage.Run() |
| return bootstrap_stage.returncode == 0 |
| |
| print_report = True |
| exception_thrown = False |
| success = True |
| try: |
| self.Initialize() |
| sync_instance = self.GetSyncInstance() |
| sync_instance.Run() |
| self._SetReleaseTag() |
| |
| # Filter out patches to manifest, since PatchChangesStage can't handle |
| # them. Manifest patches are patched in the BootstrapStage. |
| non_manifest_patches = self.patch_pool.FilterManifest(negate=True) |
| if non_manifest_patches: |
| self._RunStage(stages.PatchChangesStage, non_manifest_patches) |
| |
| if self._ShouldReExecuteInBuildRoot(): |
| print_report = False |
| success = self._ReExecuteInBuildroot(sync_instance) |
| else: |
| self.RunStages() |
| except results_lib.StepFailure: |
| # StepFailure exceptions are already recorded in the report, so there |
| # is no need to print these tracebacks twice. |
| exception_thrown = True |
| if not print_report: |
| raise |
| except Exception: |
| exception_thrown = True |
| raise |
| finally: |
| if print_report: |
| results_lib.WriteCheckpoint(self.options.buildroot) |
| print '\n\n\n@@@BUILD_STEP Report@@@\n' |
| results_lib.Results.Report(sys.stdout, self.archive_urls, |
| self.release_tag) |
| success = results_lib.Results.BuildSucceededSoFar() |
| if exception_thrown and success: |
| success = False |
| cros_build_lib.PrintBuildbotStepWarnings() |
| print """\ |
| Exception thrown, but all stages marked successful. This is an internal error, |
| because the stage that threw the exception should be marked as failing.""" |
| |
| return success |
| |
| |
| class SimpleBuilder(Builder): |
| """Builder that performs basic vetting operations.""" |
| |
| def GetSyncInstance(self): |
| """Sync to lkgm or TOT as necessary. |
| |
| Returns: the instance of the sync stage that was run. |
| """ |
| if self.build_config['use_lkgm']: |
| sync_stage = self._GetStageInstance(stages.LKGMSyncStage) |
| elif self.build_config['use_chrome_lkgm']: |
| sync_stage = self._GetStageInstance(stages.ChromeLKGMSyncStage) |
| else: |
| sync_stage = self._GetStageInstance(stages.SyncStage) |
| |
| return sync_stage |
| |
| def _RunBackgroundStagesForBoard(self, board): |
| """Run background board-specific stages for the specified board.""" |
| archive_stage = self.archive_stages[board] |
| configs = self.build_config['board_specific_configs'] |
| config = configs.get(board, self.build_config) |
| stage_list = [[stages.VMTestStage, board, archive_stage], |
| [stages.SignerTestStage, board, archive_stage], |
| [stages.UnitTestStage, board], |
| [stages.UploadPrebuiltsStage, board, archive_stage], |
| [stages.DevInstallerPrebuiltsStage, board, archive_stage]] |
| |
| # We can not run hw tests without archiving the payloads. |
| if self.options.archive: |
| for suite in config['hw_tests']: |
| stage_list.append([stages.HWTestStage, board, archive_stage, suite]) |
| |
| for suite in config['async_hw_tests']: |
| stage_list.append([stages.ASyncHWTestStage, board, archive_stage, |
| suite]) |
| |
| steps = [self._GetStageInstance(*x, config=config).Run for x in stage_list] |
| parallel.RunParallelSteps(steps + [archive_stage.Run]) |
| |
| def RunStages(self): |
| """Runs through build process.""" |
| # TODO(sosa): Split these out into classes. |
| if self.build_config['build_type'] == constants.CHROOT_BUILDER_TYPE: |
| self._RunStage(stages.UprevStage, boards=[], enter_chroot=False) |
| self._RunStage(stages.BuildBoardStage, [constants.CHROOT_BUILDER_BOARD]) |
| self._RunStage(stages.SyncChromeStage) |
| self._RunStage(stages.PatchChromeStage) |
| self._RunStage(stages.SDKPackageStage) |
| self._RunStage(stages.SDKTestStage) |
| self._RunStage(stages.UploadPrebuiltsStage, |
| constants.CHROOT_BUILDER_BOARD, None) |
| elif self.build_config['build_type'] == constants.REFRESH_PACKAGES_TYPE: |
| self._RunStage(stages.BuildBoardStage) |
| self._RunStage(stages.RefreshPackageStatusStage) |
| else: |
| self._RunStage(stages.BuildBoardStage) |
| self._RunStage(stages.UprevStage) |
| self._RunStage(stages.SyncChromeStage) |
| self._RunStage(stages.PatchChromeStage) |
| |
| configs = self.build_config['board_specific_configs'] |
| for board in self.build_config['boards']: |
| config = configs.get(board, self.build_config) |
| archive_stage = self._GetStageInstance(stages.ArchiveStage, board, |
| config=config) |
| self.archive_stages[board] = archive_stage |
| |
| # Set up a process pool to run test/archive stages in the background. |
| # This process runs task(board) for each board added to the queue. |
| task = self._RunBackgroundStagesForBoard |
| with parallel.BackgroundTaskRunner(task) as queue: |
| for board in self.build_config['boards']: |
| # Run BuildTarget in the foreground. |
| archive_stage = self.archive_stages[board] |
| config = configs.get(board, self.build_config) |
| self._RunStage(stages.BuildTargetStage, board, archive_stage, |
| self.release_tag, config=config) |
| self.archive_urls[board] = archive_stage.GetDownloadUrl() |
| |
| # Kick off task(board) in the background. |
| queue.put([board]) |
| |
| |
| class DistributedBuilder(SimpleBuilder): |
| """Build class that has special logic to handle distributed builds. |
| |
| These builds sync using git/manifest logic in manifest_versions. In general |
| they use a non-distributed builder code for the bulk of the work. |
| """ |
| def __init__(self, *args, **kwargs): |
| """Initializes a buildbot builder. |
| |
| Extra variables: |
| completion_stage_class: Stage used to complete a build. Set in the Sync |
| stage. |
| """ |
| super(DistributedBuilder, self).__init__(*args, **kwargs) |
| self.completion_stage_class = None |
| |
| def GetSyncInstance(self): |
| """Syncs the tree using one of the distributed sync logic paths. |
| |
| Returns: the instance of the sync stage that was run. |
| """ |
| # Determine sync class to use. CQ overrides PFQ bits so should check it |
| # first. |
| if cbuildbot_config.IsCQType(self.build_config['build_type']): |
| sync_stage = self._GetStageInstance(stages.CommitQueueSyncStage) |
| self.completion_stage_class = stages.CommitQueueCompletionStage |
| elif cbuildbot_config.IsPFQType(self.build_config['build_type']): |
| sync_stage = self._GetStageInstance(stages.LKGMCandidateSyncStage) |
| self.completion_stage_class = stages.LKGMCandidateSyncCompletionStage |
| else: |
| sync_stage = self._GetStageInstance(stages.ManifestVersionedSyncStage) |
| self.completion_stage_class = stages.ManifestVersionedSyncCompletionStage |
| |
| return sync_stage |
| |
| def Publish(self, was_build_successful): |
| """Completes build by publishing any required information.""" |
| completion_stage = self._GetStageInstance(self.completion_stage_class, |
| was_build_successful) |
| completion_stage.Run() |
| name = completion_stage.name |
| if not results_lib.Results.WasStageSuccessful(name): |
| should_publish_changes = False |
| else: |
| should_publish_changes = (self.build_config['master'] and |
| was_build_successful) |
| |
| if should_publish_changes: |
| self._RunStage(stages.PublishUprevChangesStage) |
| |
| def RunStages(self): |
| """Runs simple builder logic and publishes information to overlays.""" |
| was_build_successful = False |
| try: |
| super(DistributedBuilder, self).RunStages() |
| was_build_successful = results_lib.Results.BuildSucceededSoFar() |
| except SystemExit as ex: |
| # If a stage calls sys.exit(0), it's exiting with success, so that means |
| # we should mark ourselves as successful. |
| if ex.code == 0: |
| was_build_successful = True |
| raise |
| finally: |
| self.Publish(was_build_successful) |
| |
| |
| def _ConfirmBuildRoot(buildroot): |
| """Confirm with user the inferred buildroot, and mark it as confirmed.""" |
| cros_build_lib.Warning('Using default directory %s as buildroot', buildroot) |
| if not cros_build_lib.BooleanPrompt(default=False): |
| print('Please specify a different buildroot via the --buildroot option.') |
| sys.exit(0) |
| |
| if not os.path.exists(buildroot): |
| os.mkdir(buildroot) |
| |
| repository.CreateTrybotMarker(buildroot) |
| |
| |
| def _ConfirmRemoteBuildbotRun(): |
| """Confirm user wants to run with --buildbot --remote.""" |
| cros_build_lib.Warning( |
| 'You are about to launch a PRODUCTION job! This is *NOT* a ' |
| 'trybot run! Are you sure?') |
| if not cros_build_lib.BooleanPrompt(default=False): |
| print('Please specify --pass-through="--debug".') |
| sys.exit(0) |
| |
| |
| def _DetermineDefaultBuildRoot(sourceroot, internal_build): |
| """Default buildroot to be under the directory that contains current checkout. |
| |
| Arguments: |
| internal_build: Whether the build is an internal build |
| sourceroot: Use specified sourceroot. |
| """ |
| if not repository.IsARepoRoot(sourceroot): |
| cros_build_lib.Die( |
| 'Could not find root of local checkout at %s. Please specify ' |
| 'using the --sourceroot option.' % sourceroot) |
| |
| # Place trybot buildroot under the directory containing current checkout. |
| top_level = os.path.dirname(os.path.realpath(sourceroot)) |
| if internal_build: |
| buildroot = os.path.join(top_level, _DEFAULT_INT_BUILDROOT) |
| else: |
| buildroot = os.path.join(top_level, _DEFAULT_EXT_BUILDROOT) |
| |
| return buildroot |
| |
| |
| def _DisableYamaHardLinkChecks(): |
| """Disable Yama kernel hardlink security checks. |
| |
| The security module disables hardlinking to files you do not have |
| write access to which causes some of our build scripts problems. |
| Disable it so we don't have to worry about it. |
| """ |
| PROC_PATH = '/proc/sys/kernel/yama/protected_nonaccess_hardlinks' |
| SYSCTL_PATH = PROC_PATH[len('/proc/sys/'):].replace('/', '.') |
| |
| # Yama not available in this system -- nothing to do. |
| if not os.path.exists(PROC_PATH): |
| return |
| |
| # Already disabled -- nothing to do. |
| if osutils.ReadFile(PROC_PATH).strip() == '0': |
| return |
| |
| # Create a hardlink in a tempdir and see if we get back EPERM. |
| with osutils.TempDirContextManager() as tempdir: |
| try: |
| os.link('/bin/sh', os.path.join(tempdir, 'sh')) |
| except OSError as e: |
| if e.errno == errno.EPERM: |
| cros_build_lib.Warning('Disabling Yama hardlink security') |
| cros_build_lib.SudoRunCommand(['sysctl', '%s=0' % SYSCTL_PATH]) |
| |
| |
| def _BackupPreviousLog(log_file, backup_limit=25): |
| """Rename previous log. |
| |
| Args: |
| log_file: The absolute path to the previous log. |
| """ |
| if os.path.exists(log_file): |
| old_logs = sorted(glob.glob(log_file + '.*'), |
| key=distutils.version.LooseVersion) |
| |
| if len(old_logs) >= backup_limit: |
| os.remove(old_logs[0]) |
| |
| last = 0 |
| if old_logs: |
| last = int(old_logs.pop().rpartition('.')[2]) |
| |
| os.rename(log_file, log_file + '.' + str(last + 1)) |
| |
| |
| def _RunBuildStagesWrapper(options, build_config): |
| """Helper function that wraps RunBuildStages().""" |
| def IsDistributedBuilder(): |
| """Determines whether the build_config should be a DistributedBuilder.""" |
| if not options.buildbot: |
| return False |
| elif build_config['build_type'] in _DISTRIBUTED_TYPES: |
| # We don't do distributed logic to TOT Chrome PFQ's, nor local |
| # chrome roots (e.g. chrome try bots) |
| if chrome_rev not in [constants.CHROME_REV_TOT, |
| constants.CHROME_REV_LOCAL, |
| constants.CHROME_REV_SPEC]: |
| return True |
| |
| return False |
| |
| cros_build_lib.Info("cbuildbot executed with args %s" |
| % ' '.join(map(repr, sys.argv))) |
| |
| chrome_rev = build_config['chrome_rev'] |
| if options.chrome_rev: |
| chrome_rev = options.chrome_rev |
| if chrome_rev == constants.CHROME_REV_TOT: |
| # Build the TOT Chrome revision. |
| svn_url = gclient.GetBaseURLs()[0] |
| options.chrome_version = gclient.GetTipOfTrunkSvnRevision(svn_url) |
| options.chrome_rev = constants.CHROME_REV_SPEC |
| |
| # If it's likely we'll need to build Chrome, fetch the source. |
| if build_config['sync_chrome'] is None: |
| options.managed_chrome = (chrome_rev != constants.CHROME_REV_LOCAL and |
| (not build_config['usepkg_build_packages'] or chrome_rev or |
| build_config['useflags'] or build_config['profile'] or |
| options.rietveld_patches)) |
| else: |
| options.managed_chrome = build_config['sync_chrome'] |
| |
| if options.managed_chrome: |
| # Tell Chrome to fetch the source locally. |
| internal = constants.USE_CHROME_INTERNAL in (build_config['useflags'] or []) |
| chrome_src = 'chrome-src-internal' if internal else 'chrome-src' |
| options.chrome_root = os.path.join(options.cache_dir, 'distfiles', 'target', |
| chrome_src) |
| elif options.rietveld_patches: |
| cros_build_lib.Die('This builder does not support Rietveld patches.') |
| |
| target = DistributedBuilder if IsDistributedBuilder() else SimpleBuilder |
| buildbot = target(options, build_config) |
| if not buildbot.Run(): |
| sys.exit(1) |
| |
| |
| # Parser related functions |
| def _CheckLocalPatches(sourceroot, local_patches): |
| """Do an early quick check of the passed-in patches. |
| |
| If the branch of a project is not specified we append the current branch the |
| project is on. |
| |
| Args: |
| sourceroot: The checkout where patches are coming from. |
| """ |
| verified_patches = [] |
| manifest = git.ManifestCheckout.Cached(sourceroot) |
| for patch in local_patches: |
| components = patch.split(':') |
| if len(components) > 2: |
| cros_build_lib.Die( |
| 'Specify local patches in project[:branch] format. Got %s' % patch) |
| |
| # validate project |
| project = components[0] |
| |
| try: |
| project_dir = manifest.GetProjectPath(project, True) |
| except KeyError: |
| cros_build_lib.Die('Project %s does not exist.' % project) |
| |
| # If no branch was specified, we use the project's current branch. |
| if len(components) == 1: |
| branch = git.GetCurrentBranch(project_dir) |
| if not branch: |
| cros_build_lib.Die('Project %s is not on a branch!' % project) |
| else: |
| branch = components[1] |
| if not git.DoesLocalBranchExist(project_dir, branch): |
| cros_build_lib.Die('Project %s does not have branch %s' |
| % (project, branch)) |
| |
| verified_patches.append('%s:%s' % (project, branch)) |
| |
| return verified_patches |
| |
| |
| def _CheckChromeVersionOption(_option, _opt_str, value, parser): |
| """Upgrade other options based on chrome_version being passed.""" |
| value = value.strip() |
| |
| if parser.values.chrome_rev is None and value: |
| parser.values.chrome_rev = constants.CHROME_REV_SPEC |
| |
| parser.values.chrome_version = value |
| |
| |
| def _CheckChromeRootOption(_option, _opt_str, value, parser): |
| """Validate and convert chrome_root to full-path form.""" |
| if parser.values.chrome_rev is None: |
| parser.values.chrome_rev = constants.CHROME_REV_LOCAL |
| |
| parser.values.chrome_root = value |
| |
| |
| def _CheckChromeRevOption(_option, _opt_str, value, parser): |
| """Validate the chrome_rev option.""" |
| value = value.strip() |
| if value not in constants.VALID_CHROME_REVISIONS: |
| raise optparse.OptionValueError('Invalid chrome rev specified') |
| |
| parser.values.chrome_rev = value |
| |
| |
| def FindCacheDir(_parser, _options): |
| return None |
| |
| |
| class CustomGroup(optparse.OptionGroup): |
| def add_remote_option(self, *args, **kwargs): |
| """For arguments that are passed-through to remote trybot.""" |
| return optparse.OptionGroup.add_option(self, *args, |
| remote_pass_through=True, |
| **kwargs) |
| |
| |
| class CustomOption(commandline.FilteringOption): |
| """Subclass FilteringOption class to implement pass-through and api.""" |
| |
| ACTIONS = commandline.FilteringOption.ACTIONS + ('extend',) |
| STORE_ACTIONS = commandline.FilteringOption.STORE_ACTIONS + ('extend',) |
| TYPED_ACTIONS = commandline.FilteringOption.TYPED_ACTIONS + ('extend',) |
| ALWAYS_TYPED_ACTIONS = (commandline.FilteringOption.ALWAYS_TYPED_ACTIONS + |
| ('extend',)) |
| |
| def __init__(self, *args, **kwargs): |
| # The remote_pass_through argument specifies whether we should directly |
| # pass the argument (with its value) onto the remote trybot. |
| self.pass_through = kwargs.pop('remote_pass_through', False) |
| self.api_version = int(kwargs.pop('api', '0')) |
| commandline.FilteringOption.__init__(self, *args, **kwargs) |
| |
| def take_action(self, action, dest, opt, value, values, parser): |
| if action == 'extend': |
| # If there is extra spaces between each argument, we get '' which later |
| # code barfs on, so skip those. e.g. We see this with the forms: |
| # cbuildbot -p 'proj:branch ' ... |
| # cbuildbot -p ' proj:branch' ... |
| # cbuildbot -p 'proj:branch proj2:branch' ... |
| lvalue = value.split() |
| values.ensure_value(dest, []).extend(lvalue) |
| |
| commandline.FilteringOption.take_action( |
| self, action, dest, opt, value, values, parser) |
| |
| |
| class CustomParser(commandline.FilteringParser): |
| |
| DEFAULT_OPTION_CLASS = CustomOption |
| |
| def add_remote_option(self, *args, **kwargs): |
| """For arguments that are passed-through to remote trybot.""" |
| return self.add_option(*args, remote_pass_through=True, **kwargs) |
| |
| |
| def _CreateParser(): |
| """Generate and return the parser with all the options.""" |
| # Parse options |
| usage = "usage: %prog [options] buildbot_config" |
| parser = CustomParser(usage=usage, caching=FindCacheDir) |
| |
| # Main options |
| # The remote_pass_through parameter to add_option is implemented by the |
| # CustomOption class. See CustomOption for more information. |
| parser.add_option('-a', '--all', action='store_true', dest='print_all', |
| default=False, |
| help=('List all of the buildbot configs available. Use ' |
| 'with the --list option')) |
| parser.add_remote_option('-b', '--branch', |
| help='The manifest branch to test. The branch to ' |
| 'check the buildroot out to.') |
| parser.add_option('-r', '--buildroot', dest='buildroot', type='path', |
| help='Root directory where source is checked out to, and ' |
| 'where the build occurs. For external build configs, ' |
| "defaults to 'trybot' directory at top level of your " |
| 'repo-managed checkout.') |
| parser.add_remote_option('--chrome_rev', default=None, type='string', |
| action='callback', dest='chrome_rev', |
| callback=_CheckChromeRevOption, |
| help=('Revision of Chrome to use, of type [%s]' |
| % '|'.join(constants.VALID_CHROME_REVISIONS))) |
| parser.add_remote_option('-g', '--gerrit-patches', action='extend', |
| default=[], type='string', |
| metavar="'Id1 *int_Id2...IdN'", |
| help=("Space-separated list of short-form Gerrit " |
| "Change-Id's or change numbers to patch. " |
| "Please prepend '*' to internal Change-Id's")) |
| parser.add_remote_option('-G', '--rietveld-patches', action='extend', |
| default=[], type='string', |
| metavar="'id1[:subdir1]...idN[:subdirN]'", |
| help=("Space-separated list of short-form Rietveld " |
| "issue numbers to patch. If no subdir is " |
| "specified, the src directory is used.")) |
| parser.add_option('-l', '--list', action='store_true', dest='list', |
| default=False, |
| help=('List the suggested trybot configs to use. Use ' |
| '--all to list all of the available configs.')) |
| parser.add_option('--local', default=False, action='store_true', |
| help=('Specifies that this tryjob should be run locally.')) |
| parser.add_option('-p', '--local-patches', action='extend', default=[], |
| metavar="'<project1>[:<branch1>]...<projectN>[:<branchN>]'", |
| help=('Space-separated list of project branches with ' |
| 'patches to apply. Projects are specified by name. ' |
| 'If no branch is specified the current branch of the ' |
| 'project will be used.')) |
| parser.add_remote_option('--profile', default=None, type='string', |
| action='store', dest='profile', |
| help='Name of profile to sub-specify board variant.') |
| parser.add_option('--remote', default=False, action='store_true', |
| help=('Specifies that this tryjob should be run remotely.')) |
| parser.add_option('--remote-description', default=None, |
| help=('Attach an optional description to a --remote run ' |
| 'to make it easier to identify the results when it ' |
| 'finishes.')) |
| |
| # |
| # Advanced options |
| # |
| |
| group = CustomGroup( |
| parser, |
| 'Advanced Options', |
| 'Caution: use these options at your own risk.') |
| |
| group.add_remote_option('--buildbot', dest='buildbot', action='store_true', |
| default=False, help='This is running on a buildbot') |
| group.add_remote_option('--buildnumber', help='build number', type='int', |
| default=0) |
| group.add_option('--chrome_root', default=None, type='path', |
| action='callback', callback=_CheckChromeRootOption, |
| dest='chrome_root', help='Local checkout of Chrome to use.') |
| group.add_remote_option('--chrome_version', default=None, type='string', |
| action='callback', dest='chrome_version', |
| callback=_CheckChromeVersionOption, |
| help='Used with SPEC logic to force a particular SVN ' |
| 'revision of chrome rather than the latest.') |
| group.add_remote_option('--clobber', action='store_true', dest='clobber', |
| default=False, |
| help='Clears an old checkout before syncing') |
| group.add_remote_option('--hwtest', dest='hwtest', action='store_true', |
| default=False, |
| help='This adds HW test for remote trybot') |
| parser.add_option('--log_dir', dest='log_dir', type='path', |
| help=('Directory where logs are stored.')) |
| group.add_remote_option('--maxarchives', dest='max_archive_builds', |
| default=3, type='int', |
| help="Change the local saved build count limit.") |
| parser.add_remote_option('--manifest-repo-url', |
| help=('Overrides the default manifest repo url.')) |
| group.add_remote_option('--noarchive', action='store_false', dest='archive', |
| default=True, help="Don't run archive stage.") |
| group.add_remote_option('--nobootstrap', action='store_false', |
| dest='bootstrap', default=True, |
| help="Don't checkout and run from a standalone " |
| "chromite repo.") |
| group.add_remote_option('--nobuild', action='store_false', dest='build', |
| default=True, |
| help="Don't actually build (for cbuildbot dev)") |
| group.add_remote_option('--noclean', action='store_false', dest='clean', |
| default=True, help="Don't clean the buildroot") |
| group.add_remote_option('--nocgroups', action='store_false', dest='cgroups', |
| default=True, |
| help='Disable cbuildbots usage of cgroups.') |
| group.add_remote_option('--noprebuilts', action='store_false', |
| dest='prebuilts', default=True, |
| help="Don't upload prebuilts.") |
| group.add_remote_option('--nosync', action='store_false', dest='sync', |
| default=True, help="Don't sync before building.") |
| group.add_remote_option('--notests', action='store_false', dest='tests', |
| default=True, |
| help='Override values from buildconfig and run no ' |
| 'tests.') |
| group.add_remote_option('--nouprev', action='store_false', dest='uprev', |
| default=True, |
| help='Override values from buildconfig and never ' |
| 'uprev.') |
| group.add_option('--reference-repo', action='store', default=None, |
| dest='reference_repo', |
| help='Reuse git data stored in an existing repo ' |
| 'checkout. This can drastically reduce the network ' |
| 'time spent setting up the trybot checkout. By ' |
| "default, if this option isn't given but cbuildbot " |
| 'is invoked from a repo checkout, cbuildbot will ' |
| 'use the repo root.') |
| group.add_option('--resume', action='store_true', default=False, |
| help='Skip stages already successfully completed.') |
| group.add_remote_option('--timeout', action='store', type='int', default=0, |
| help='Specify the maximum amount of time this job ' |
| 'can run for, at which point the build will be ' |
| 'aborted. If set to zero, then there is no ' |
| 'timeout.') |
| group.add_option('--test-tryjob', action='store_true', |
| default=False, |
| help='Submit a tryjob to the test repository. Will not ' |
| 'show up on the production trybot waterfall.') |
| group.add_remote_option('--validation_pool', default=None, |
| help='Path to a pickled validation pool. Intended ' |
| 'for use only with the commit queue.') |
| group.add_remote_option('--version', dest='force_version', default=None, |
| help='Used with manifest logic. Forces use of this ' |
| 'version rather than create or get latest.') |
| group.add_remote_option('--cq-gerrit-query', dest='cq_gerrit_override', |
| default=None, |
| help= |
| "If given, this gerrit query will be used to find what patches to test, " |
| "rather than the normal 'CommitQueue=1 AND Verified=1 AND CodeReview=2' " |
| "query it defaults to. Use with care- note additionally this setting " |
| "only has an effect if the buildbot target is a cq target, and we're " |
| "in buildbot mode.") |
| |
| parser.add_option_group(group) |
| |
| # |
| # Hidden options. |
| # |
| |
| # The base GS URL (gs://<bucket_name>/<path>) to archive artifacts to. |
| parser.add_remote_option('--archive-base', type='gs_path', |
| help=optparse.SUPPRESS_HELP) |
| # bootstrap-args are not verified by the bootstrap code. It gets passed |
| # direcly to the bootstrap re-execution. |
| parser.add_remote_option('--bootstrap-args', action='append', |
| default=[], help=optparse.SUPPRESS_HELP) |
| parser.add_option('--pass-through', dest='pass_through_args', action='append', |
| type='string', default=[], help=optparse.SUPPRESS_HELP) |
| # Used for handling forwards/backwards compatibility for --resume and |
| # --bootstrap. |
| parser.add_option('--reexec-api-version', dest='output_api_version', |
| action='store_true', default=False, |
| help=optparse.SUPPRESS_HELP) |
| # Indicates this is running on a remote trybot machine. |
| parser.add_option('--remote-trybot', dest='remote_trybot', |
| action='store_true', default=False, |
| help=optparse.SUPPRESS_HELP) |
| # Patches uploaded by trybot client when run using the -p option. |
| parser.add_remote_option('--remote-patches', action='extend', default=[], |
| help=optparse.SUPPRESS_HELP) |
| # Specify specific remote tryslaves to run on. |
| parser.add_option('--slaves', action='extend', default=[], |
| help=optparse.SUPPRESS_HELP) |
| parser.add_option('--sourceroot', type='path', default=constants.SOURCE_ROOT, |
| help=optparse.SUPPRESS_HELP) |
| # Causes cbuildbot to bootstrap itself twice, in the sequence A->B->C. |
| # A(unpatched) patches and bootstraps B. B patches and bootstraps C. |
| parser.add_remote_option('--test-bootstrap', action='store_true', |
| default=False, help=optparse.SUPPRESS_HELP) |
| # Note the default here needs to be hardcoded to 3; that is the last version |
| # that lacked this functionality. |
| # This is used so that cbuildbot when processing tryjobs from |
| # older chromite instances, we can use it for handling compatibility. |
| parser.add_option('--remote-version', default=3, type=int, action='store', |
| help=optparse.SUPPRESS_HELP) |
| |
| # |
| # Debug options |
| # |
| # Temporary hack; in place till --dry-run replaces --debug. |
| # pylint: disable=W0212 |
| group = parser.debug_group |
| debug = [x for x in group.option_list if x._long_opts == ['--debug']][0] |
| debug.help += " Currently functions as --dry-run in addition." |
| debug.pass_through = True |
| group.add_option('--dump_config', action='store_true', dest='dump_config', |
| default=False, |
| help='Dump out build config options, and exit.') |
| group.add_option('--notee', action='store_false', dest='tee', default=True, |
| help="Disable logging and internal tee process. Primarily " |
| "used for debugging cbuildbot itself.") |
| return parser |
| |
| |
| def _FinishParsing(options, args): |
| """Perform some parsing tasks that need to take place after optparse. |
| |
| This function needs to be easily testable! Keep it free of |
| environment-dependent code. Put more detailed usage validation in |
| _PostParseCheck(). |
| |
| Args: |
| options, args: The options/args object returned by optparse |
| """ |
| # Populate options.pass_through_args. |
| accepted, _ = commandline.FilteringParser.FilterArgs( |
| options.parsed_args, lambda x: x.opt_inst.pass_through) |
| options.pass_through_args.extend(accepted) |
| |
| if options.chrome_root: |
| if options.chrome_rev != constants.CHROME_REV_LOCAL: |
| cros_build_lib.Die('Chrome rev must be %s if chrome_root is set.' % |
| constants.CHROME_REV_LOCAL) |
| elif options.chrome_rev == constants.CHROME_REV_LOCAL: |
| cros_build_lib.Die('Chrome root must be set if chrome_rev is %s.' % |
| constants.CHROME_REV_LOCAL) |
| |
| if options.chrome_version: |
| if options.chrome_rev != constants.CHROME_REV_SPEC: |
| cros_build_lib.Die('Chrome rev must be %s if chrome_version is set.' % |
| constants.CHROME_REV_SPEC) |
| elif options.chrome_rev == constants.CHROME_REV_SPEC: |
| cros_build_lib.Die( |
| 'Chrome rev must not be %s if chrome_version is not set.' |
| % constants.CHROME_REV_SPEC) |
| |
| patches = bool(options.gerrit_patches or options.local_patches or |
| options.rietveld_patches) |
| if options.remote: |
| if options.local: |
| cros_build_lib.Die('Cannot specify both --remote and --local') |
| |
| if not options.buildbot and not patches: |
| if not cros_build_lib.BooleanPrompt( |
| prompt="No patches were provided; are you sure you want to just " |
| "run a remote build of ToT?", default=False): |
| cros_build_lib.Die('Must provide patches when running with --remote.') |
| |
| # --debug needs to be explicitly passed through for remote invocations. |
| release_mode_with_patches = (options.buildbot and patches and |
| '--debug' not in options.pass_through_args) |
| else: |
| if len(args) > 1: |
| cros_build_lib.Die('Multiple configs not supported if not running with ' |
| '--remote. Got %r', args) |
| |
| if options.slaves: |
| cros_build_lib.Die('Cannot use --slaves if not running with --remote.') |
| |
| release_mode_with_patches = (options.buildbot and patches and |
| not options.debug) |
| |
| # When running in release mode, make sure we are running with checked-in code. |
| # We want checked-in cbuildbot/scripts to prevent errors, and we want to build |
| # a release image with checked-in code for CrOS packages. |
| if release_mode_with_patches: |
| cros_build_lib.Die( |
| 'Cannot provide patches when running with --buildbot!') |
| |
| if options.buildbot and options.remote_trybot: |
| cros_build_lib.Die( |
| '--buildbot and --remote-trybot cannot be used together.') |
| |
| # Record whether --debug was set explicitly vs. it was inferred. |
| options.debug_forced = False |
| if options.debug: |
| options.debug_forced = True |
| else: |
| # We don't set debug by default for |
| # 1. --buildbot invocations. |
| # 2. --remote invocations, because it needs to push changes to the tryjob |
| # repo. |
| options.debug = not options.buildbot and not options.remote |
| |
| # Record the configs targeted. |
| options.build_targets = args[:] |
| |
| |
| # pylint: disable=W0613 |
| def _PostParseCheck(parser, options, args): |
| """Perform some usage validation after we've parsed the arguments |
| |
| Args: |
| options/args: The options/args object returned by optparse |
| """ |
| if not options.branch: |
| options.branch = git.GetChromiteTrackingBranch() |
| |
| if not repository.IsARepoRoot(options.sourceroot): |
| if options.local_patches: |
| raise Exception('Could not find repo checkout at %s!' |
| % options.sourceroot) |
| |
| # Because the default cache dir depends on other options, FindCacheDir |
| # always returns None, and we setup the default here. |
| if options.cache_dir is None: |
| # Note, options.sourceroot is set regardless of the path |
| # actually existing. |
| if options.buildroot is not None: |
| options.cache_dir = os.path.join(options.buildroot, '.cache') |
| elif os.path.exists(options.sourceroot): |
| options.cache_dir = os.path.join(options.sourceroot, '.cache') |
| else: |
| options.cache_dir = parser.FindCacheDir(parser, options) |
| options.cache_dir = os.path.abspath(options.cache_dir) |
| parser.ConfigureCacheDir(options.cache_dir) |
| |
| osutils.SafeMakedirs(options.cache_dir) |
| |
| if options.local_patches: |
| options.local_patches = _CheckLocalPatches( |
| options.sourceroot, options.local_patches) |
| |
| default = os.environ.get('CBUILDBOT_DEFAULT_MODE') |
| if (default and not any([options.local, options.buildbot, |
| options.remote, options.remote_trybot])): |
| cros_build_lib.Info("CBUILDBOT_DEFAULT_MODE=%s env var detected, using it." |
| % default) |
| default = default.lower() |
| if default == 'local': |
| options.local = True |
| elif default == 'remote': |
| options.remote = True |
| elif default == 'buildbot': |
| options.buildbot = True |
| else: |
| cros_build_lib.Die("CBUILDBOT_DEFAULT_MODE value %s isn't supported. " |
| % default) |
| |
| |
| def _ParseCommandLine(parser, argv): |
| """Completely parse the commandline arguments""" |
| (options, args) = parser.parse_args(argv) |
| |
| if options.output_api_version: |
| print constants.REEXEC_API_VERSION |
| sys.exit(0) |
| |
| if options.list: |
| _PrintValidConfigs(options.print_all) |
| sys.exit(0) |
| |
| # Strip out null arguments. |
| # TODO(rcui): Remove when buildbot is fixed |
| args = [arg for arg in args if arg] |
| if not args: |
| parser.error('Invalid usage. Use -h to see usage. Use -l to list ' |
| 'supported configs.') |
| |
| _FinishParsing(options, args) |
| return options, args |
| |
| |
| def main(argv): |
| # Set umask to 022 so files created by buildbot are readable. |
| os.umask(022) |
| |
| parser = _CreateParser() |
| (options, args) = _ParseCommandLine(parser, argv) |
| |
| _PostParseCheck(parser, options, args) |
| |
| cros_build_lib.AssertOutsideChroot() |
| |
| if options.remote: |
| cros_build_lib.logger.setLevel(logging.WARNING) |
| |
| # Verify configs are valid. |
| # If hwtest flag is enabled, verify that config board is in whitelist. |
| for bot in args: |
| build_config = _GetConfig(bot) |
| if options.hwtest: |
| if not set(build_config['boards']).issubset( |
| set(constants.HWTEST_BOARD_WHITELIST)): |
| cros_build_lib.Die('The test lab is unable to run hwtest tryjobs ' |
| 'with the given board(s). The currently ' |
| 'supported boards are %s. If you are root ' |
| 'causing a critical bug and need temporary ' |
| 'support please contact the lab ' |
| 'team.' % constants.HWTEST_BOARD_WHITELIST) |
| |
| # Verify gerrit patches are valid. |
| print 'Verifying patches...' |
| patch_pool = AcquirePoolFromOptions(options) |
| |
| # --debug need to be explicitly passed through for remote invocations. |
| if options.buildbot and '--debug' not in options.pass_through_args: |
| _ConfirmRemoteBuildbotRun() |
| |
| print 'Submitting tryjob...' |
| tryjob = remote_try.RemoteTryJob(options, args, patch_pool.local_patches) |
| tryjob.Submit(testjob=options.test_tryjob, dryrun=False) |
| print 'Tryjob submitted!' |
| print ('Go to %s to view the status of your job.' |
| % tryjob.GetTrybotWaterfallLink()) |
| sys.exit(0) |
| elif (not options.buildbot and not options.remote_trybot |
| and not options.resume and not options.local): |
| cros_build_lib.Warning( |
| 'Running in LOCAL TRYBOT mode! Use --remote to submit REMOTE ' |
| 'tryjobs. Use --local to suppress this message.') |
| cros_build_lib.Warning( |
| 'In the future, --local will be required to run the local ' |
| 'trybot.') |
| time.sleep(5) |
| |
| # Only expecting one config |
| bot_id = args[-1] |
| build_config = _GetConfig(bot_id) |
| |
| if options.reference_repo is None: |
| repo_path = os.path.join(options.sourceroot, '.repo') |
| # If we're being run from a repo checkout, reuse the repo's git pool to |
| # cut down on sync time. |
| if os.path.exists(repo_path): |
| options.reference_repo = options.sourceroot |
| elif options.reference_repo: |
| if not os.path.exists(options.reference_repo): |
| parser.error('Reference path %s does not exist' |
| % (options.reference_repo,)) |
| elif not os.path.exists(os.path.join(options.reference_repo, '.repo')): |
| parser.error('Reference path %s does not look to be the base of a ' |
| 'repo checkout; no .repo exists in the root.' |
| % (options.reference_repo,)) |
| |
| if (options.buildbot or options.remote_trybot) and not options.resume: |
| if not options.cgroups: |
| parser.error('Options --buildbot/--remote-trybot and --nocgroups cannot ' |
| 'be used together. Cgroup support is required for ' |
| 'buildbot/remote-trybot mode.') |
| if not cgroups.Cgroup.IsSupported(): |
| parser.error('Option --buildbot/--remote-trybot was given, but this ' |
| 'system does not support cgroups. Failing.') |
| |
| missing = osutils.FindMissingBinaries(_BUILDBOT_REQUIRED_BINARIES) |
| if missing: |
| parser.error("Option --buildbot/--remote-trybot requires the following " |
| "binaries which couldn't be found in $PATH: %s" |
| % (', '.join(missing))) |
| |
| if options.reference_repo: |
| options.reference_repo = os.path.abspath(options.reference_repo) |
| |
| if options.dump_config: |
| # This works, but option ordering is bad... |
| print 'Configuration %s:' % bot_id |
| pretty_printer = pprint.PrettyPrinter(indent=2) |
| pretty_printer.pprint(build_config) |
| sys.exit(0) |
| |
| if not options.buildroot: |
| if options.buildbot: |
| parser.error('Please specify a buildroot with the --buildbot option.') |
| |
| options.buildroot = _DetermineDefaultBuildRoot(options.sourceroot, |
| build_config['internal']) |
| # We use a marker file in the buildroot to indicate the user has |
| # consented to using this directory. |
| if not os.path.exists(repository.GetTrybotMarkerPath(options.buildroot)): |
| _ConfirmBuildRoot(options.buildroot) |
| |
| # Sanity check of buildroot- specifically that it's not pointing into the |
| # midst of an existing repo since git-repo doesn't support nesting. |
| if (not repository.IsARepoRoot(options.buildroot) and |
| repository.InARepoRepository(options.buildroot)): |
| parser.error('Configured buildroot %s points into a repository checkout, ' |
| 'rather than the root of it. This is not supported.' |
| % options.buildroot) |
| |
| if not options.log_dir: |
| options.log_dir = os.path.join(options.buildroot, _DEFAULT_LOG_DIR) |
| |
| log_file = None |
| if options.tee: |
| log_file = os.path.join(options.log_dir, _BUILDBOT_LOG_FILE) |
| osutils.SafeMakedirs(options.log_dir) |
| _BackupPreviousLog(log_file) |
| |
| with cros_build_lib.ContextManagerStack() as stack: |
| critical_section = stack.Add(cleanup.EnforcedCleanupSection) |
| stack.Add(sudo.SudoKeepAlive) |
| |
| if not options.resume: |
| # If we're in resume mode, use our parents tempdir rather than |
| # nesting another layer. |
| stack.Add(osutils.TempDirContextManager, prefix='cbuildbot-tmp') |
| logging.debug("Cbuildbot tempdir is %r.", os.environ.get('TMP')) |
| |
| # TODO(ferringb): update this once https://gerrit.chromium.org/gerrit/25359 |
| # is landed- it's sensitive to the manifest-versions cache path. |
| options.preserve_paths = set(['manifest-versions', '.cache', |
| 'manifest-versions-internal']) |
| if log_file is not None: |
| stack.Add(tee.Tee, log_file) |
| options.preserve_paths.add(_DEFAULT_LOG_DIR) |
| |
| if options.cgroups: |
| stack.Add(cgroups.SimpleContainChildren, 'cbuildbot') |
| |
| # Mark everything between EnforcedCleanupSection and here as having to |
| # be rolled back via the contextmanager cleanup handlers. This |
| # ensures that sudo bits cannot outlive cbuildbot, that anything |
| # cgroups would kill gets killed, etc. |
| critical_section.ForkWatchdog() |
| |
| if options.timeout > 0: |
| stack.Add(cros_build_lib.Timeout, options.timeout) |
| |
| if not options.buildbot: |
| build_config = cbuildbot_config.OverrideConfigForTrybot( |
| build_config, |
| options.remote_trybot) |
| |
| if options.buildbot or options.remote_trybot: |
| _DisableYamaHardLinkChecks() |
| |
| _RunBuildStagesWrapper(options, build_config) |