| #!/usr/bin/env python |
| # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| |
| # Copyright (C) 2008 Evan Martin <martine@danga.com> |
| |
| """A git-command for integrating reviews on Gerrit.""" |
| |
| from __future__ import print_function |
| |
| from distutils.version import LooseVersion |
| from multiprocessing.pool import ThreadPool |
| import base64 |
| import collections |
| import contextlib |
| import datetime |
| import httplib |
| import itertools |
| import json |
| import logging |
| import multiprocessing |
| import optparse |
| import os |
| import re |
| import shutil |
| import stat |
| import sys |
| import tempfile |
| import textwrap |
| import time |
| import urllib |
| import urllib2 |
| import urlparse |
| import uuid |
| import webbrowser |
| import zlib |
| |
| from third_party import colorama |
| from third_party import httplib2 |
| import auth |
| import clang_format |
| import dart_format |
| import fix_encoding |
| import gclient_utils |
| import gerrit_util |
| import git_common |
| import git_footers |
| import metrics |
| import metrics_utils |
| import owners |
| import owners_finder |
| import presubmit_support |
| import scm |
| import setup_color |
| import split_cl |
| import subcommand |
| import subprocess2 |
| import watchlists |
| |
| __version__ = '2.0' |
| |
| # Traces for git push will be stored in a traces directory inside the |
| # depot_tools checkout. |
| DEPOT_TOOLS = os.path.dirname(os.path.abspath(__file__)) |
| TRACES_DIR = os.path.join(DEPOT_TOOLS, 'traces') |
| |
| # When collecting traces, Git hashes will be reduced to 6 characters to reduce |
| # the size after compression. |
| GIT_HASH_RE = re.compile(r'\b([a-f0-9]{6})[a-f0-9]{34}\b', flags=re.I) |
| # Used to redact the cookies from the gitcookies file. |
| GITCOOKIES_REDACT_RE = re.compile(r'1/.*') |
| |
| # The maximum number of traces we will keep. Multiplied by 3 since we store |
| # 3 files per trace. |
| MAX_TRACES = 3 * 10 |
| # Message to be displayed to the user to inform where to find the traces for a |
| # git-cl upload execution. |
| TRACES_MESSAGE = ( |
| '\n' |
| 'The traces of this git-cl execution have been recorded at:\n' |
| ' %(trace_name)s-traces.zip\n' |
| 'Copies of your gitcookies file and git config have been recorded at:\n' |
| ' %(trace_name)s-git-info.zip\n') |
| # Format of the message to be stored as part of the traces to give developers a |
| # better context when they go through traces. |
| TRACES_README_FORMAT = ( |
| 'Date: %(now)s\n' |
| '\n' |
| 'Change: https://%(gerrit_host)s/q/%(change_id)s\n' |
| 'Title: %(title)s\n' |
| '\n' |
| '%(description)s\n' |
| '\n' |
| 'Execution time: %(execution_time)s\n' |
| 'Exit code: %(exit_code)s\n') + TRACES_MESSAGE |
| |
| COMMIT_BOT_EMAIL = 'commit-bot@chromium.org' |
| POSTUPSTREAM_HOOK = '.git/hooks/post-cl-land' |
| DESCRIPTION_BACKUP_FILE = '~/.git_cl_description_backup' |
| REFS_THAT_ALIAS_TO_OTHER_REFS = { |
| 'refs/remotes/origin/lkgr': 'refs/remotes/origin/master', |
| 'refs/remotes/origin/lkcr': 'refs/remotes/origin/master', |
| } |
| |
| # Valid extensions for files we want to lint. |
| DEFAULT_LINT_REGEX = r"(.*\.cpp|.*\.cc|.*\.h)" |
| DEFAULT_LINT_IGNORE_REGEX = r"$^" |
| |
| # File name for yapf style config files. |
| YAPF_CONFIG_FILENAME = '.style.yapf' |
| |
| # Buildbucket master name prefix for Buildbot masters. |
| MASTER_PREFIX = 'master.' |
| |
| # Shortcut since it quickly becomes repetitive. |
| Fore = colorama.Fore |
| |
| # Initialized in main() |
| settings = None |
| |
| # Used by tests/git_cl_test.py to add extra logging. |
| # Inside the weirdly failing test, add this: |
| # >>> self.mock(git_cl, '_IS_BEING_TESTED', True) |
| # And scroll up to see the stack trace printed. |
| _IS_BEING_TESTED = False |
| |
| |
| def DieWithError(message, change_desc=None): |
| if change_desc: |
| SaveDescriptionBackup(change_desc) |
| |
| print(message, file=sys.stderr) |
| sys.exit(1) |
| |
| |
| def SaveDescriptionBackup(change_desc): |
| backup_path = os.path.expanduser(DESCRIPTION_BACKUP_FILE) |
| print('\nsaving CL description to %s\n' % backup_path) |
| backup_file = open(backup_path, 'w') |
| backup_file.write(change_desc.description) |
| backup_file.close() |
| |
| |
| def GetNoGitPagerEnv(): |
| env = os.environ.copy() |
| # 'cat' is a magical git string that disables pagers on all platforms. |
| env['GIT_PAGER'] = 'cat' |
| return env |
| |
| |
| def RunCommand(args, error_ok=False, error_message=None, shell=False, **kwargs): |
| try: |
| return subprocess2.check_output(args, shell=shell, **kwargs) |
| except subprocess2.CalledProcessError as e: |
| logging.debug('Failed running %s', args) |
| if not error_ok: |
| DieWithError( |
| 'Command "%s" failed.\n%s' % ( |
| ' '.join(args), error_message or e.stdout or '')) |
| return e.stdout |
| |
| |
| def RunGit(args, **kwargs): |
| """Returns stdout.""" |
| return RunCommand(['git'] + args, **kwargs) |
| |
| |
| def RunGitWithCode(args, suppress_stderr=False): |
| """Returns return code and stdout.""" |
| if suppress_stderr: |
| stderr = subprocess2.VOID |
| else: |
| stderr = sys.stderr |
| try: |
| (out, _), code = subprocess2.communicate(['git'] + args, |
| env=GetNoGitPagerEnv(), |
| stdout=subprocess2.PIPE, |
| stderr=stderr) |
| return code, out |
| except subprocess2.CalledProcessError as e: |
| logging.debug('Failed running %s', ['git'] + args) |
| return e.returncode, e.stdout |
| |
| |
| def RunGitSilent(args): |
| """Returns stdout, suppresses stderr and ignores the return code.""" |
| return RunGitWithCode(args, suppress_stderr=True)[1] |
| |
| |
| def IsGitVersionAtLeast(min_version): |
| prefix = 'git version ' |
| version = RunGit(['--version']).strip() |
| return (version.startswith(prefix) and |
| LooseVersion(version[len(prefix):]) >= LooseVersion(min_version)) |
| |
| |
| def BranchExists(branch): |
| """Return True if specified branch exists.""" |
| code, _ = RunGitWithCode(['rev-parse', '--verify', branch], |
| suppress_stderr=True) |
| return not code |
| |
| |
| def time_sleep(seconds): |
| # Use this so that it can be mocked in tests without interfering with python |
| # system machinery. |
| return time.sleep(seconds) |
| |
| |
| def time_time(): |
| # Use this so that it can be mocked in tests without interfering with python |
| # system machinery. |
| return time.time() |
| |
| |
| def datetime_now(): |
| # Use this so that it can be mocked in tests without interfering with python |
| # system machinery. |
| return datetime.datetime.now() |
| |
| |
| def ask_for_data(prompt): |
| try: |
| return raw_input(prompt) |
| except KeyboardInterrupt: |
| # Hide the exception. |
| sys.exit(1) |
| |
| |
| def confirm_or_exit(prefix='', action='confirm'): |
| """Asks user to press enter to continue or press Ctrl+C to abort.""" |
| if not prefix or prefix.endswith('\n'): |
| mid = 'Press' |
| elif prefix.endswith('.') or prefix.endswith('?'): |
| mid = ' Press' |
| elif prefix.endswith(' '): |
| mid = 'press' |
| else: |
| mid = ' press' |
| ask_for_data('%s%s Enter to %s, or Ctrl+C to abort' % (prefix, mid, action)) |
| |
| |
| def ask_for_explicit_yes(prompt): |
| """Returns whether user typed 'y' or 'yes' to confirm the given prompt.""" |
| result = ask_for_data(prompt + ' [Yes/No]: ').lower() |
| while True: |
| if 'yes'.startswith(result): |
| return True |
| if 'no'.startswith(result): |
| return False |
| result = ask_for_data('Please, type yes or no: ').lower() |
| |
| |
| def _git_branch_config_key(branch, key): |
| """Helper method to return Git config key for a branch.""" |
| assert branch, 'branch name is required to set git config for it' |
| return 'branch.%s.%s' % (branch, key) |
| |
| |
| def _git_get_branch_config_value(key, default=None, value_type=str, |
| branch=False): |
| """Returns git config value of given or current branch if any. |
| |
| Returns default in all other cases. |
| """ |
| assert value_type in (int, str, bool) |
| if branch is False: # Distinguishing default arg value from None. |
| branch = GetCurrentBranch() |
| |
| if not branch: |
| return default |
| |
| args = ['config'] |
| if value_type == bool: |
| args.append('--bool') |
| # `git config` also has --int, but apparently git config suffers from integer |
| # overflows (http://crbug.com/640115), so don't use it. |
| args.append(_git_branch_config_key(branch, key)) |
| code, out = RunGitWithCode(args) |
| if code == 0: |
| value = out.strip() |
| if value_type == int: |
| return int(value) |
| if value_type == bool: |
| return bool(value.lower() == 'true') |
| return value |
| return default |
| |
| |
| def _git_set_branch_config_value(key, value, branch=None, **kwargs): |
| """Sets or unsets the git branch config value. |
| |
| If value is None, the key will be unset, otherwise it will be set. |
| If no branch is given, the currently checked out branch is used. |
| """ |
| if not branch: |
| branch = GetCurrentBranch() |
| assert branch, 'a branch name OR currently checked out branch is required' |
| args = ['config'] |
| # Check for boolean first, because bool is int, but int is not bool. |
| if value is None: |
| args.append('--unset') |
| elif isinstance(value, bool): |
| args.append('--bool') |
| value = str(value).lower() |
| else: |
| # `git config` also has --int, but apparently git config suffers from |
| # integer overflows (http://crbug.com/640115), so don't use it. |
| value = str(value) |
| args.append(_git_branch_config_key(branch, key)) |
| if value is not None: |
| args.append(value) |
| RunGit(args, **kwargs) |
| |
| |
| def _get_committer_timestamp(commit): |
| """Returns Unix timestamp as integer of a committer in a commit. |
| |
| Commit can be whatever git show would recognize, such as HEAD, sha1 or ref. |
| """ |
| # Git also stores timezone offset, but it only affects visual display; |
| # actual point in time is defined by this timestamp only. |
| return int(RunGit(['show', '-s', '--format=%ct', commit]).strip()) |
| |
| |
| def _git_amend_head(message, committer_timestamp): |
| """Amends commit with new message and desired committer_timestamp. |
| |
| Sets committer timezone to UTC. |
| """ |
| env = os.environ.copy() |
| env['GIT_COMMITTER_DATE'] = '%d+0000' % committer_timestamp |
| return RunGit(['commit', '--amend', '-m', message], env=env) |
| |
| |
| def _get_properties_from_options(options): |
| properties = dict(x.split('=', 1) for x in options.properties) |
| for key, val in properties.iteritems(): |
| try: |
| properties[key] = json.loads(val) |
| except ValueError: |
| pass # If a value couldn't be evaluated, treat it as a string. |
| return properties |
| |
| |
| def _prefix_master(master): |
| """Convert user-specified master name to full master name. |
| |
| Buildbucket uses full master name(master.tryserver.chromium.linux) as bucket |
| name, while the developers always use shortened master name |
| (tryserver.chromium.linux) by stripping off the prefix 'master.'. This |
| function does the conversion for buildbucket migration. |
| """ |
| if master.startswith(MASTER_PREFIX): |
| return master |
| return '%s%s' % (MASTER_PREFIX, master) |
| |
| |
| def _unprefix_master(bucket): |
| """Convert bucket name to shortened master name. |
| |
| Buildbucket uses full master name(master.tryserver.chromium.linux) as bucket |
| name, while the developers always use shortened master name |
| (tryserver.chromium.linux) by stripping off the prefix 'master.'. This |
| function does the conversion for buildbucket migration. |
| """ |
| if bucket.startswith(MASTER_PREFIX): |
| return bucket[len(MASTER_PREFIX):] |
| return bucket |
| |
| |
| def _buildbucket_retry(operation_name, http, *args, **kwargs): |
| """Retries requests to buildbucket service and returns parsed json content.""" |
| try_count = 0 |
| while True: |
| response, content = http.request(*args, **kwargs) |
| try: |
| content_json = json.loads(content) |
| except ValueError: |
| content_json = None |
| |
| # Buildbucket could return an error even if status==200. |
| if content_json and content_json.get('error'): |
| error = content_json.get('error') |
| if error.get('code') == 403: |
| raise BuildbucketResponseException( |
| 'Access denied: %s' % error.get('message', '')) |
| msg = 'Error in response. Reason: %s. Message: %s.' % ( |
| error.get('reason', ''), error.get('message', '')) |
| raise BuildbucketResponseException(msg) |
| |
| if response.status == 200: |
| if content_json is None: |
| raise BuildbucketResponseException( |
| 'Buildbucket returned invalid JSON content: %s.\n' |
| 'Please file bugs at http://crbug.com, ' |
| 'component "Infra>Platform>Buildbucket".' % |
| content) |
| return content_json |
| if response.status < 500 or try_count >= 2: |
| raise httplib2.HttpLib2Error(content) |
| |
| # status >= 500 means transient failures. |
| logging.debug('Transient errors when %s. Will retry.', operation_name) |
| time_sleep(0.5 + (1.5 * try_count)) |
| try_count += 1 |
| assert False, 'unreachable' |
| |
| |
| def _get_bucket_map(changelist, options, option_parser): |
| """Returns a dict mapping bucket names to builders and tests, |
| for triggering tryjobs. |
| """ |
| # If no bots are listed, we try to get a set of builders and tests based |
| # on GetPreferredTryMasters functions in PRESUBMIT.py files. |
| if not options.bot: |
| change = changelist.GetChange( |
| changelist.GetCommonAncestorWithUpstream(), None) |
| # Get try masters from PRESUBMIT.py files. |
| masters = presubmit_support.DoGetTryMasters( |
| change=change, |
| changed_files=change.LocalPaths(), |
| repository_root=settings.GetRoot(), |
| default_presubmit=None, |
| project=None, |
| verbose=options.verbose, |
| output_stream=sys.stdout) |
| if masters is None: |
| return None |
| return {_prefix_master(m): b for m, b in masters.iteritems()} |
| |
| if options.bucket: |
| return {options.bucket: {b: [] for b in options.bot}} |
| option_parser.error( |
| 'Please specify the bucket, e.g. "-B luci.chromium.try".') |
| |
| |
| def _trigger_try_jobs(auth_config, changelist, buckets, options, patchset): |
| """Sends a request to Buildbucket to trigger tryjobs for a changelist. |
| |
| Args: |
| auth_config: AuthConfig for Buildbucket. |
| changelist: Changelist that the tryjobs are associated with. |
| buckets: A nested dict mapping bucket names to builders to tests. |
| options: Command-line options. |
| """ |
| assert changelist.GetIssue(), 'CL must be uploaded first' |
| codereview_url = changelist.GetCodereviewServer() |
| assert codereview_url, 'CL must be uploaded first' |
| patchset = patchset or changelist.GetMostRecentPatchset() |
| assert patchset, 'CL must be uploaded first' |
| |
| codereview_host = urlparse.urlparse(codereview_url).hostname |
| # Cache the buildbucket credentials under the codereview host key, so that |
| # users can use different credentials for different buckets. |
| authenticator = auth.get_authenticator_for_host(codereview_host, auth_config) |
| http = authenticator.authorize(httplib2.Http()) |
| http.force_exception_to_status_code = True |
| |
| buildbucket_put_url = ( |
| 'https://{hostname}/_ah/api/buildbucket/v1/builds/batch'.format( |
| hostname=options.buildbucket_host)) |
| buildset = 'patch/gerrit/{hostname}/{issue}/{patch}'.format( |
| hostname=codereview_host, |
| issue=changelist.GetIssue(), |
| patch=patchset) |
| |
| shared_parameters_properties = changelist.GetTryJobProperties(patchset) |
| shared_parameters_properties['category'] = options.category |
| if options.clobber: |
| shared_parameters_properties['clobber'] = True |
| extra_properties = _get_properties_from_options(options) |
| if extra_properties: |
| shared_parameters_properties.update(extra_properties) |
| |
| batch_req_body = {'builds': []} |
| print_text = [] |
| print_text.append('Tried jobs on:') |
| for bucket, builders_and_tests in sorted(buckets.iteritems()): |
| print_text.append('Bucket: %s' % bucket) |
| master = None |
| if bucket.startswith(MASTER_PREFIX): |
| master = _unprefix_master(bucket) |
| for builder, tests in sorted(builders_and_tests.iteritems()): |
| print_text.append(' %s: %s' % (builder, tests)) |
| parameters = { |
| 'builder_name': builder, |
| 'changes': [{ |
| 'author': {'email': changelist.GetIssueOwner()}, |
| 'revision': options.revision, |
| }], |
| 'properties': shared_parameters_properties.copy(), |
| } |
| if 'presubmit' in builder.lower(): |
| parameters['properties']['dry_run'] = 'true' |
| if tests: |
| parameters['properties']['testfilter'] = tests |
| |
| tags = [ |
| 'builder:%s' % builder, |
| 'buildset:%s' % buildset, |
| 'user_agent:git_cl_try', |
| ] |
| if master: |
| parameters['properties']['master'] = master |
| tags.append('master:%s' % master) |
| |
| batch_req_body['builds'].append( |
| { |
| 'bucket': bucket, |
| 'parameters_json': json.dumps(parameters), |
| 'client_operation_id': str(uuid.uuid4()), |
| 'tags': tags, |
| } |
| ) |
| |
| _buildbucket_retry( |
| 'triggering tryjobs', |
| http, |
| buildbucket_put_url, |
| 'PUT', |
| body=json.dumps(batch_req_body), |
| headers={'Content-Type': 'application/json'} |
| ) |
| print_text.append('To see results here, run: git cl try-results') |
| print_text.append('To see results in browser, run: git cl web') |
| print('\n'.join(print_text)) |
| |
| |
| def fetch_try_jobs(auth_config, changelist, buildbucket_host, |
| patchset=None): |
| """Fetches tryjobs from buildbucket. |
| |
| Returns a map from build id to build info as a dictionary. |
| """ |
| assert buildbucket_host |
| assert changelist.GetIssue(), 'CL must be uploaded first' |
| assert changelist.GetCodereviewServer(), 'CL must be uploaded first' |
| patchset = patchset or changelist.GetMostRecentPatchset() |
| assert patchset, 'CL must be uploaded first' |
| |
| codereview_url = changelist.GetCodereviewServer() |
| codereview_host = urlparse.urlparse(codereview_url).hostname |
| authenticator = auth.get_authenticator_for_host(codereview_host, auth_config) |
| if authenticator.has_cached_credentials(): |
| http = authenticator.authorize(httplib2.Http()) |
| else: |
| print('Warning: Some results might be missing because %s' % |
| # Get the message on how to login. |
| (auth.LoginRequiredError(codereview_host).message,)) |
| http = httplib2.Http() |
| |
| http.force_exception_to_status_code = True |
| |
| buildset = 'patch/gerrit/{hostname}/{issue}/{patch}'.format( |
| hostname=codereview_host, |
| issue=changelist.GetIssue(), |
| patch=patchset) |
| params = {'tag': 'buildset:%s' % buildset} |
| |
| builds = {} |
| while True: |
| url = 'https://{hostname}/_ah/api/buildbucket/v1/search?{params}'.format( |
| hostname=buildbucket_host, |
| params=urllib.urlencode(params)) |
| content = _buildbucket_retry('fetching tryjobs', http, url, 'GET') |
| for build in content.get('builds', []): |
| builds[build['id']] = build |
| if 'next_cursor' in content: |
| params['start_cursor'] = content['next_cursor'] |
| else: |
| break |
| return builds |
| |
| |
| def print_try_jobs(options, builds): |
| """Prints nicely result of fetch_try_jobs.""" |
| if not builds: |
| print('No tryjobs scheduled.') |
| return |
| |
| # Make a copy, because we'll be modifying builds dictionary. |
| builds = builds.copy() |
| builder_names_cache = {} |
| |
| def get_builder(b): |
| try: |
| return builder_names_cache[b['id']] |
| except KeyError: |
| try: |
| parameters = json.loads(b['parameters_json']) |
| name = parameters['builder_name'] |
| except (ValueError, KeyError) as error: |
| print('WARNING: Failed to get builder name for build %s: %s' % ( |
| b['id'], error)) |
| name = None |
| builder_names_cache[b['id']] = name |
| return name |
| |
| def get_bucket(b): |
| bucket = b['bucket'] |
| if bucket.startswith('master.'): |
| return bucket[len('master.'):] |
| return bucket |
| |
| if options.print_master: |
| name_fmt = '%%-%ds %%-%ds' % ( |
| max(len(str(get_bucket(b))) for b in builds.itervalues()), |
| max(len(str(get_builder(b))) for b in builds.itervalues())) |
| def get_name(b): |
| return name_fmt % (get_bucket(b), get_builder(b)) |
| else: |
| name_fmt = '%%-%ds' % ( |
| max(len(str(get_builder(b))) for b in builds.itervalues())) |
| def get_name(b): |
| return name_fmt % get_builder(b) |
| |
| def sort_key(b): |
| return b['status'], b.get('result'), get_name(b), b.get('url') |
| |
| def pop(title, f, color=None, **kwargs): |
| """Pop matching builds from `builds` dict and print them.""" |
| |
| if not options.color or color is None: |
| colorize = str |
| else: |
| colorize = lambda x: '%s%s%s' % (color, x, Fore.RESET) |
| |
| result = [] |
| for b in builds.values(): |
| if all(b.get(k) == v for k, v in kwargs.iteritems()): |
| builds.pop(b['id']) |
| result.append(b) |
| if result: |
| print(colorize(title)) |
| for b in sorted(result, key=sort_key): |
| print(' ', colorize('\t'.join(map(str, f(b))))) |
| |
| total = len(builds) |
| pop(status='COMPLETED', result='SUCCESS', |
| title='Successes:', color=Fore.GREEN, |
| f=lambda b: (get_name(b), b.get('url'))) |
| pop(status='COMPLETED', result='FAILURE', failure_reason='INFRA_FAILURE', |
| title='Infra Failures:', color=Fore.MAGENTA, |
| f=lambda b: (get_name(b), b.get('url'))) |
| pop(status='COMPLETED', result='FAILURE', failure_reason='BUILD_FAILURE', |
| title='Failures:', color=Fore.RED, |
| f=lambda b: (get_name(b), b.get('url'))) |
| pop(status='COMPLETED', result='CANCELED', |
| title='Canceled:', color=Fore.MAGENTA, |
| f=lambda b: (get_name(b),)) |
| pop(status='COMPLETED', result='FAILURE', |
| failure_reason='INVALID_BUILD_DEFINITION', |
| title='Wrong master/builder name:', color=Fore.MAGENTA, |
| f=lambda b: (get_name(b),)) |
| pop(status='COMPLETED', result='FAILURE', |
| title='Other failures:', |
| f=lambda b: (get_name(b), b.get('failure_reason'), b.get('url'))) |
| pop(status='COMPLETED', |
| title='Other finished:', |
| f=lambda b: (get_name(b), b.get('result'), b.get('url'))) |
| pop(status='STARTED', |
| title='Started:', color=Fore.YELLOW, |
| f=lambda b: (get_name(b), b.get('url'))) |
| pop(status='SCHEDULED', |
| title='Scheduled:', |
| f=lambda b: (get_name(b), 'id=%s' % b['id'])) |
| # The last section is just in case buildbucket API changes OR there is a bug. |
| pop(title='Other:', |
| f=lambda b: (get_name(b), 'id=%s' % b['id'])) |
| assert len(builds) == 0 |
| print('Total: %d tryjobs' % total) |
| |
| |
| def _ComputeDiffLineRanges(files, upstream_commit): |
| """Gets the changed line ranges for each file since upstream_commit. |
| |
| Parses a git diff on provided files and returns a dict that maps a file name |
| to an ordered list of range tuples in the form (start_line, count). |
| Ranges are in the same format as a git diff. |
| """ |
| # If files is empty then diff_output will be a full diff. |
| if len(files) == 0: |
| return {} |
| |
| # Take the git diff and find the line ranges where there are changes. |
| diff_cmd = BuildGitDiffCmd('-U0', upstream_commit, files, allow_prefix=True) |
| diff_output = RunGit(diff_cmd) |
| |
| pattern = r'(?:^diff --git a/(?:.*) b/(.*))|(?:^@@.*\+(.*) @@)' |
| # 2 capture groups |
| # 0 == fname of diff file |
| # 1 == 'diff_start,diff_count' or 'diff_start' |
| # will match each of |
| # diff --git a/foo.foo b/foo.py |
| # @@ -12,2 +14,3 @@ |
| # @@ -12,2 +17 @@ |
| # running re.findall on the above string with pattern will give |
| # [('foo.py', ''), ('', '14,3'), ('', '17')] |
| |
| curr_file = None |
| line_diffs = {} |
| for match in re.findall(pattern, diff_output, flags=re.MULTILINE): |
| if match[0] != '': |
| # Will match the second filename in diff --git a/a.py b/b.py. |
| curr_file = match[0] |
| line_diffs[curr_file] = [] |
| else: |
| # Matches +14,3 |
| if ',' in match[1]: |
| diff_start, diff_count = match[1].split(',') |
| else: |
| # Single line changes are of the form +12 instead of +12,1. |
| diff_start = match[1] |
| diff_count = 1 |
| |
| diff_start = int(diff_start) |
| diff_count = int(diff_count) |
| |
| # If diff_count == 0 this is a removal we can ignore. |
| line_diffs[curr_file].append((diff_start, diff_count)) |
| |
| return line_diffs |
| |
| |
| def _FindYapfConfigFile(fpath, yapf_config_cache, top_dir=None): |
| """Checks if a yapf file is in any parent directory of fpath until top_dir. |
| |
| Recursively checks parent directories to find yapf file and if no yapf file |
| is found returns None. Uses yapf_config_cache as a cache for previously found |
| configs. |
| """ |
| fpath = os.path.abspath(fpath) |
| # Return result if we've already computed it. |
| if fpath in yapf_config_cache: |
| return yapf_config_cache[fpath] |
| |
| parent_dir = os.path.dirname(fpath) |
| if os.path.isfile(fpath): |
| ret = _FindYapfConfigFile(parent_dir, yapf_config_cache, top_dir) |
| else: |
| # Otherwise fpath is a directory |
| yapf_file = os.path.join(fpath, YAPF_CONFIG_FILENAME) |
| if os.path.isfile(yapf_file): |
| ret = yapf_file |
| elif fpath == top_dir or parent_dir == fpath: |
| # If we're at the top level directory, or if we're at root |
| # there is no provided style. |
| ret = None |
| else: |
| # Otherwise recurse on the current directory. |
| ret = _FindYapfConfigFile(parent_dir, yapf_config_cache, top_dir) |
| yapf_config_cache[fpath] = ret |
| return ret |
| |
| |
| def write_try_results_json(output_file, builds): |
| """Writes a subset of the data from fetch_try_jobs to a file as JSON. |
| |
| The input |builds| dict is assumed to be generated by Buildbucket. |
| Buildbucket documentation: http://goo.gl/G0s101 |
| """ |
| |
| def convert_build_dict(build): |
| """Extracts some of the information from one build dict.""" |
| parameters = json.loads(build.get('parameters_json', '{}')) or {} |
| return { |
| 'buildbucket_id': build.get('id'), |
| 'bucket': build.get('bucket'), |
| 'builder_name': parameters.get('builder_name'), |
| 'created_ts': build.get('created_ts'), |
| 'experimental': build.get('experimental'), |
| 'failure_reason': build.get('failure_reason'), |
| 'result': build.get('result'), |
| 'status': build.get('status'), |
| 'tags': build.get('tags'), |
| 'url': build.get('url'), |
| } |
| |
| converted = [] |
| for _, build in sorted(builds.items()): |
| converted.append(convert_build_dict(build)) |
| write_json(output_file, converted) |
| |
| |
| def print_stats(args): |
| """Prints statistics about the change to the user.""" |
| # --no-ext-diff is broken in some versions of Git, so try to work around |
| # this by overriding the environment (but there is still a problem if the |
| # git config key "diff.external" is used). |
| env = GetNoGitPagerEnv() |
| if 'GIT_EXTERNAL_DIFF' in env: |
| del env['GIT_EXTERNAL_DIFF'] |
| |
| try: |
| stdout = sys.stdout.fileno() |
| except AttributeError: |
| stdout = None |
| return subprocess2.call( |
| ['git', 'diff', '--no-ext-diff', '--stat', '-l100000', '-C50'] + args, |
| stdout=stdout, env=env) |
| |
| |
| class BuildbucketResponseException(Exception): |
| pass |
| |
| |
| class Settings(object): |
| def __init__(self): |
| self.cc = None |
| self.root = None |
| self.tree_status_url = None |
| self.viewvc_url = None |
| self.updated = False |
| self.is_gerrit = None |
| self.squash_gerrit_uploads = None |
| self.gerrit_skip_ensure_authenticated = None |
| self.git_editor = None |
| |
| def LazyUpdateIfNeeded(self): |
| """Updates the settings from a codereview.settings file, if available.""" |
| if not self.updated: |
| # The only value that actually changes the behavior is |
| # autoupdate = "false". Everything else means "true". |
| autoupdate = RunGit(['config', 'rietveld.autoupdate'], |
| error_ok=True |
| ).strip().lower() |
| |
| cr_settings_file = FindCodereviewSettingsFile() |
| if autoupdate != 'false' and cr_settings_file: |
| LoadCodereviewSettingsFromFile(cr_settings_file) |
| self.updated = True |
| |
| @staticmethod |
| def GetRelativeRoot(): |
| return RunGit(['rev-parse', '--show-cdup']).strip() |
| |
| def GetRoot(self): |
| if self.root is None: |
| self.root = os.path.abspath(self.GetRelativeRoot()) |
| return self.root |
| |
| def GetTreeStatusUrl(self, error_ok=False): |
| if not self.tree_status_url: |
| error_message = ('You must configure your tree status URL by running ' |
| '"git cl config".') |
| self.tree_status_url = self._GetConfig( |
| 'rietveld.tree-status-url', error_ok=error_ok, |
| error_message=error_message) |
| return self.tree_status_url |
| |
| def GetViewVCUrl(self): |
| if not self.viewvc_url: |
| self.viewvc_url = self._GetConfig('rietveld.viewvc-url', error_ok=True) |
| return self.viewvc_url |
| |
| def GetBugPrefix(self): |
| return self._GetConfig('rietveld.bug-prefix', error_ok=True) |
| |
| def GetRunPostUploadHook(self): |
| run_post_upload_hook = self._GetConfig( |
| 'rietveld.run-post-upload-hook', error_ok=True) |
| return run_post_upload_hook == "True" |
| |
| def GetDefaultCCList(self): |
| return self._GetConfig('rietveld.cc', error_ok=True) |
| |
| def GetIsGerrit(self): |
| """Returns True if this repo is associated with Gerrit.""" |
| if self.is_gerrit is None: |
| self.is_gerrit = ( |
| self._GetConfig('gerrit.host', error_ok=True).lower() == 'true') |
| return self.is_gerrit |
| |
| def GetSquashGerritUploads(self): |
| """Returns True if uploads to Gerrit should be squashed by default.""" |
| if self.squash_gerrit_uploads is None: |
| self.squash_gerrit_uploads = self.GetSquashGerritUploadsOverride() |
| if self.squash_gerrit_uploads is None: |
| # Default is squash now (http://crbug.com/611892#c23). |
| self.squash_gerrit_uploads = not ( |
| RunGit(['config', '--bool', 'gerrit.squash-uploads'], |
| error_ok=True).strip() == 'false') |
| return self.squash_gerrit_uploads |
| |
| def GetSquashGerritUploadsOverride(self): |
| """Return True or False if codereview.settings should be overridden. |
| |
| Returns None if no override has been defined. |
| """ |
| # See also http://crbug.com/611892#c23 |
| result = RunGit(['config', '--bool', 'gerrit.override-squash-uploads'], |
| error_ok=True).strip() |
| if result == 'true': |
| return True |
| if result == 'false': |
| return False |
| return None |
| |
| def GetGerritSkipEnsureAuthenticated(self): |
| """Return True if EnsureAuthenticated should not be done for Gerrit |
| uploads.""" |
| if self.gerrit_skip_ensure_authenticated is None: |
| self.gerrit_skip_ensure_authenticated = ( |
| RunGit(['config', '--bool', 'gerrit.skip-ensure-authenticated'], |
| error_ok=True).strip() == 'true') |
| return self.gerrit_skip_ensure_authenticated |
| |
| def GetGitEditor(self): |
| """Returns the editor specified in the git config, or None if none is.""" |
| if self.git_editor is None: |
| # Git requires single quotes for paths with spaces. We need to replace |
| # them with double quotes for Windows to treat such paths as a single |
| # path. |
| self.git_editor = self._GetConfig( |
| 'core.editor', error_ok=True).replace('\'', '"') |
| return self.git_editor or None |
| |
| def GetLintRegex(self): |
| return (self._GetConfig('rietveld.cpplint-regex', error_ok=True) or |
| DEFAULT_LINT_REGEX) |
| |
| def GetLintIgnoreRegex(self): |
| return (self._GetConfig('rietveld.cpplint-ignore-regex', error_ok=True) or |
| DEFAULT_LINT_IGNORE_REGEX) |
| |
| def _GetConfig(self, param, **kwargs): |
| self.LazyUpdateIfNeeded() |
| return RunGit(['config', param], **kwargs).strip() |
| |
| |
| @contextlib.contextmanager |
| def _get_gerrit_project_config_file(remote_url): |
| """Context manager to fetch and store Gerrit's project.config from |
| refs/meta/config branch and store it in temp file. |
| |
| Provides a temporary filename or None if there was error. |
| """ |
| error, _ = RunGitWithCode([ |
| 'fetch', remote_url, |
| '+refs/meta/config:refs/git_cl/meta/config']) |
| if error: |
| # Ref doesn't exist or isn't accessible to current user. |
| print('WARNING: Failed to fetch project config for %s: %s' % |
| (remote_url, error)) |
| yield None |
| return |
| |
| error, project_config_data = RunGitWithCode( |
| ['show', 'refs/git_cl/meta/config:project.config']) |
| if error: |
| print('WARNING: project.config file not found') |
| yield None |
| return |
| |
| with gclient_utils.temporary_directory() as tempdir: |
| project_config_file = os.path.join(tempdir, 'project.config') |
| gclient_utils.FileWrite(project_config_file, project_config_data) |
| yield project_config_file |
| |
| |
| def ShortBranchName(branch): |
| """Convert a name like 'refs/heads/foo' to just 'foo'.""" |
| return branch.replace('refs/heads/', '', 1) |
| |
| |
| def GetCurrentBranchRef(): |
| """Returns branch ref (e.g., refs/heads/master) or None.""" |
| return RunGit(['symbolic-ref', 'HEAD'], |
| stderr=subprocess2.VOID, error_ok=True).strip() or None |
| |
| |
| def GetCurrentBranch(): |
| """Returns current branch or None. |
| |
| For refs/heads/* branches, returns just last part. For others, full ref. |
| """ |
| branchref = GetCurrentBranchRef() |
| if branchref: |
| return ShortBranchName(branchref) |
| return None |
| |
| |
| class _CQState(object): |
| """Enum for states of CL with respect to CQ.""" |
| NONE = 'none' |
| DRY_RUN = 'dry_run' |
| COMMIT = 'commit' |
| |
| ALL_STATES = [NONE, DRY_RUN, COMMIT] |
| |
| |
| class _ParsedIssueNumberArgument(object): |
| def __init__(self, issue=None, patchset=None, hostname=None): |
| self.issue = issue |
| self.patchset = patchset |
| self.hostname = hostname |
| |
| @property |
| def valid(self): |
| return self.issue is not None |
| |
| |
| def ParseIssueNumberArgument(arg): |
| """Parses the issue argument and returns _ParsedIssueNumberArgument.""" |
| fail_result = _ParsedIssueNumberArgument() |
| |
| if arg.isdigit(): |
| return _ParsedIssueNumberArgument(issue=int(arg)) |
| if not arg.startswith('http'): |
| return fail_result |
| |
| url = gclient_utils.UpgradeToHttps(arg) |
| try: |
| parsed_url = urlparse.urlparse(url) |
| except ValueError: |
| return fail_result |
| |
| return _GerritChangelistImpl.ParseIssueURL(parsed_url) or fail_result |
| |
| |
| def _create_description_from_log(args): |
| """Pulls out the commit log to use as a base for the CL description.""" |
| log_args = [] |
| if len(args) == 1 and not args[0].endswith('.'): |
| log_args = [args[0] + '..'] |
| elif len(args) == 1 and args[0].endswith('...'): |
| log_args = [args[0][:-1]] |
| elif len(args) == 2: |
| log_args = [args[0] + '..' + args[1]] |
| else: |
| log_args = args[:] # Hope for the best! |
| return RunGit(['log', '--pretty=format:%s\n\n%b'] + log_args) |
| |
| |
| class GerritChangeNotExists(Exception): |
| def __init__(self, issue, url): |
| self.issue = issue |
| self.url = url |
| super(GerritChangeNotExists, self).__init__() |
| |
| def __str__(self): |
| return 'change %s at %s does not exist or you have no access to it' % ( |
| self.issue, self.url) |
| |
| |
| _CommentSummary = collections.namedtuple( |
| '_CommentSummary', ['date', 'message', 'sender', 'autogenerated', |
| # TODO(tandrii): these two aren't known in Gerrit. |
| 'approval', 'disapproval']) |
| |
| |
| class Changelist(object): |
| """Changelist works with one changelist in local branch. |
| |
| Notes: |
| * Not safe for concurrent multi-{thread,process} use. |
| * Caches values from current branch. Therefore, re-use after branch change |
| with great care. |
| """ |
| |
| def __init__(self, branchref=None, issue=None, **kwargs): |
| """Create a new ChangeList instance. |
| |
| **kwargs will be passed directly to Gerrit implementation. |
| """ |
| # Poke settings so we get the "configure your server" message if necessary. |
| global settings |
| if not settings: |
| # Happens when git_cl.py is used as a utility library. |
| settings = Settings() |
| |
| self.branchref = branchref |
| if self.branchref: |
| assert branchref.startswith('refs/heads/') |
| self.branch = ShortBranchName(self.branchref) |
| else: |
| self.branch = None |
| self.upstream_branch = None |
| self.lookedup_issue = False |
| self.issue = issue or None |
| self.has_description = False |
| self.description = None |
| self.lookedup_patchset = False |
| self.patchset = None |
| self.cc = None |
| self.more_cc = [] |
| self._remote = None |
| self._cached_remote_url = (False, None) # (is_cached, value) |
| |
| self._codereview_impl = _GerritChangelistImpl(self, **kwargs) |
| |
| def GetCCList(self): |
| """Returns the users cc'd on this CL. |
| |
| The return value is a string suitable for passing to git cl with the --cc |
| flag. |
| """ |
| if self.cc is None: |
| base_cc = settings.GetDefaultCCList() |
| more_cc = ','.join(self.more_cc) |
| self.cc = ','.join(filter(None, (base_cc, more_cc))) or '' |
| return self.cc |
| |
| def GetCCListWithoutDefault(self): |
| """Return the users cc'd on this CL excluding default ones.""" |
| if self.cc is None: |
| self.cc = ','.join(self.more_cc) |
| return self.cc |
| |
| def ExtendCC(self, more_cc): |
| """Extends the list of users to cc on this CL based on the changed files.""" |
| self.more_cc.extend(more_cc) |
| |
| def GetBranch(self): |
| """Returns the short branch name, e.g. 'master'.""" |
| if not self.branch: |
| branchref = GetCurrentBranchRef() |
| if not branchref: |
| return None |
| self.branchref = branchref |
| self.branch = ShortBranchName(self.branchref) |
| return self.branch |
| |
| def GetBranchRef(self): |
| """Returns the full branch name, e.g. 'refs/heads/master'.""" |
| self.GetBranch() # Poke the lazy loader. |
| return self.branchref |
| |
| def ClearBranch(self): |
| """Clears cached branch data of this object.""" |
| self.branch = self.branchref = None |
| |
| def _GitGetBranchConfigValue(self, key, default=None, **kwargs): |
| assert 'branch' not in kwargs, 'this CL branch is used automatically' |
| kwargs['branch'] = self.GetBranch() |
| return _git_get_branch_config_value(key, default, **kwargs) |
| |
| def _GitSetBranchConfigValue(self, key, value, **kwargs): |
| assert 'branch' not in kwargs, 'this CL branch is used automatically' |
| assert self.GetBranch(), ( |
| 'this CL must have an associated branch to %sset %s%s' % |
| ('un' if value is None else '', |
| key, |
| '' if value is None else ' to %r' % value)) |
| kwargs['branch'] = self.GetBranch() |
| return _git_set_branch_config_value(key, value, **kwargs) |
| |
| @staticmethod |
| def FetchUpstreamTuple(branch): |
| """Returns a tuple containing remote and remote ref, |
| e.g. 'origin', 'refs/heads/master' |
| """ |
| remote = '.' |
| upstream_branch = _git_get_branch_config_value('merge', branch=branch) |
| |
| if upstream_branch: |
| remote = _git_get_branch_config_value('remote', branch=branch) |
| else: |
| upstream_branch = RunGit(['config', 'rietveld.upstream-branch'], |
| error_ok=True).strip() |
| if upstream_branch: |
| remote = RunGit(['config', 'rietveld.upstream-remote']).strip() |
| else: |
| # Else, try to guess the origin remote. |
| remote_branches = RunGit(['branch', '-r']).split() |
| if 'origin/master' in remote_branches: |
| # Fall back on origin/master if it exits. |
| remote = 'origin' |
| upstream_branch = 'refs/heads/master' |
| else: |
| DieWithError( |
| 'Unable to determine default branch to diff against.\n' |
| 'Either pass complete "git diff"-style arguments, like\n' |
| ' git cl upload origin/master\n' |
| 'or verify this branch is set up to track another \n' |
| '(via the --track argument to "git checkout -b ...").') |
| |
| return remote, upstream_branch |
| |
| def GetCommonAncestorWithUpstream(self): |
| upstream_branch = self.GetUpstreamBranch() |
| if not BranchExists(upstream_branch): |
| DieWithError('The upstream for the current branch (%s) does not exist ' |
| 'anymore.\nPlease fix it and try again.' % self.GetBranch()) |
| return git_common.get_or_create_merge_base(self.GetBranch(), |
| upstream_branch) |
| |
| def GetUpstreamBranch(self): |
| if self.upstream_branch is None: |
| remote, upstream_branch = self.FetchUpstreamTuple(self.GetBranch()) |
| if remote != '.': |
| upstream_branch = upstream_branch.replace('refs/heads/', |
| 'refs/remotes/%s/' % remote) |
| upstream_branch = upstream_branch.replace('refs/branch-heads/', |
| 'refs/remotes/branch-heads/') |
| self.upstream_branch = upstream_branch |
| return self.upstream_branch |
| |
| def GetRemoteBranch(self): |
| if not self._remote: |
| remote, branch = None, self.GetBranch() |
| seen_branches = set() |
| while branch not in seen_branches: |
| seen_branches.add(branch) |
| remote, branch = self.FetchUpstreamTuple(branch) |
| branch = ShortBranchName(branch) |
| if remote != '.' or branch.startswith('refs/remotes'): |
| break |
| else: |
| remotes = RunGit(['remote'], error_ok=True).split() |
| if len(remotes) == 1: |
| remote, = remotes |
| elif 'origin' in remotes: |
| remote = 'origin' |
| logging.warn('Could not determine which remote this change is ' |
| 'associated with, so defaulting to "%s".' % self._remote) |
| else: |
| logging.warn('Could not determine which remote this change is ' |
| 'associated with.') |
| branch = 'HEAD' |
| if branch.startswith('refs/remotes'): |
| self._remote = (remote, branch) |
| elif branch.startswith('refs/branch-heads/'): |
| self._remote = (remote, branch.replace('refs/', 'refs/remotes/')) |
| else: |
| self._remote = (remote, 'refs/remotes/%s/%s' % (remote, branch)) |
| return self._remote |
| |
| def GitSanityChecks(self, upstream_git_obj): |
| """Checks git repo status and ensures diff is from local commits.""" |
| |
| if upstream_git_obj is None: |
| if self.GetBranch() is None: |
| print('ERROR: Unable to determine current branch (detached HEAD?)', |
| file=sys.stderr) |
| else: |
| print('ERROR: No upstream branch.', file=sys.stderr) |
| return False |
| |
| # Verify the commit we're diffing against is in our current branch. |
| upstream_sha = RunGit(['rev-parse', '--verify', upstream_git_obj]).strip() |
| common_ancestor = RunGit(['merge-base', upstream_sha, 'HEAD']).strip() |
| if upstream_sha != common_ancestor: |
| print('ERROR: %s is not in the current branch. You may need to rebase ' |
| 'your tracking branch' % upstream_sha, file=sys.stderr) |
| return False |
| |
| # List the commits inside the diff, and verify they are all local. |
| commits_in_diff = RunGit( |
| ['rev-list', '^%s' % upstream_sha, 'HEAD']).splitlines() |
| code, remote_branch = RunGitWithCode(['config', 'gitcl.remotebranch']) |
| remote_branch = remote_branch.strip() |
| if code != 0: |
| _, remote_branch = self.GetRemoteBranch() |
| |
| commits_in_remote = RunGit( |
| ['rev-list', '^%s' % upstream_sha, remote_branch]).splitlines() |
| |
| common_commits = set(commits_in_diff) & set(commits_in_remote) |
| if common_commits: |
| print('ERROR: Your diff contains %d commits already in %s.\n' |
| 'Run "git log --oneline %s..HEAD" to get a list of commits in ' |
| 'the diff. If you are using a custom git flow, you can override' |
| ' the reference used for this check with "git config ' |
| 'gitcl.remotebranch <git-ref>".' % ( |
| len(common_commits), remote_branch, upstream_git_obj), |
| file=sys.stderr) |
| return False |
| return True |
| |
| def GetGitBaseUrlFromConfig(self): |
| """Return the configured base URL from branch.<branchname>.baseurl. |
| |
| Returns None if it is not set. |
| """ |
| return self._GitGetBranchConfigValue('base-url') |
| |
| def GetRemoteUrl(self): |
| """Return the configured remote URL, e.g. 'git://example.org/foo.git/'. |
| |
| Returns None if there is no remote. |
| """ |
| is_cached, value = self._cached_remote_url |
| if is_cached: |
| return value |
| |
| remote, _ = self.GetRemoteBranch() |
| url = RunGit(['config', 'remote.%s.url' % remote], error_ok=True).strip() |
| |
| # Check if the remote url can be parsed as an URL. |
| host = urlparse.urlparse(url).netloc |
| if host: |
| self._cached_remote_url = (True, url) |
| return url |
| |
| # If it cannot be parsed as an url, assume it is a local directory, |
| # probably a git cache. |
| logging.warning('"%s" doesn\'t appear to point to a git host. ' |
| 'Interpreting it as a local directory.', url) |
| if not os.path.isdir(url): |
| logging.error( |
| 'Remote "%s" for branch "%s" points to "%s", but it doesn\'t exist.', |
| remote, url, self.GetBranch()) |
| return None |
| |
| cache_path = url |
| url = RunGit(['config', 'remote.%s.url' % remote], |
| error_ok=True, |
| cwd=url).strip() |
| |
| host = urlparse.urlparse(url).netloc |
| if not host: |
| logging.error( |
| 'Remote "%(remote)s" for branch "%(branch)s" points to ' |
| '"%(cache_path)s", but it is misconfigured.\n' |
| '"%(cache_path)s" must be a git repo and must have a remote named ' |
| '"%(remote)s" pointing to the git host.', { |
| 'remote': remote, |
| 'cache_path': cache_path, |
| 'branch': self.GetBranch()}) |
| return None |
| |
| self._cached_remote_url = (True, url) |
| return url |
| |
| def GetIssue(self): |
| """Returns the issue number as a int or None if not set.""" |
| if self.issue is None and not self.lookedup_issue: |
| self.issue = self._GitGetBranchConfigValue( |
| self._codereview_impl.IssueConfigKey(), value_type=int) |
| self.lookedup_issue = True |
| return self.issue |
| |
| def GetIssueURL(self): |
| """Get the URL for a particular issue.""" |
| issue = self.GetIssue() |
| if not issue: |
| return None |
| return '%s/%s' % (self._codereview_impl.GetCodereviewServer(), issue) |
| |
| def GetDescription(self, pretty=False, force=False): |
| if not self.has_description or force: |
| if self.GetIssue(): |
| self.description = self._codereview_impl.FetchDescription(force=force) |
| self.has_description = True |
| if pretty: |
| # Set width to 72 columns + 2 space indent. |
| wrapper = textwrap.TextWrapper(width=74, replace_whitespace=True) |
| wrapper.initial_indent = wrapper.subsequent_indent = ' ' |
| lines = self.description.splitlines() |
| return '\n'.join([wrapper.fill(line) for line in lines]) |
| return self.description |
| |
| def GetDescriptionFooters(self): |
| """Returns (non_footer_lines, footers) for the commit message. |
| |
| Returns: |
| non_footer_lines (list(str)) - Simple list of description lines without |
| any footer. The lines do not contain newlines, nor does the list contain |
| the empty line between the message and the footers. |
| footers (list(tuple(KEY, VALUE))) - List of parsed footers, e.g. |
| [("Change-Id", "Ideadbeef...."), ...] |
| """ |
| raw_description = self.GetDescription() |
| msg_lines, _, footers = git_footers.split_footers(raw_description) |
| if footers: |
| msg_lines = msg_lines[:len(msg_lines) - 1] |
| return msg_lines, footers |
| |
| def GetPatchset(self): |
| """Returns the patchset number as a int or None if not set.""" |
| if self.patchset is None and not self.lookedup_patchset: |
| self.patchset = self._GitGetBranchConfigValue( |
| self._codereview_impl.PatchsetConfigKey(), value_type=int) |
| self.lookedup_patchset = True |
| return self.patchset |
| |
| def SetPatchset(self, patchset): |
| """Set this branch's patchset. If patchset=0, clears the patchset.""" |
| assert self.GetBranch() |
| if not patchset: |
| self.patchset = None |
| else: |
| self.patchset = int(patchset) |
| self._GitSetBranchConfigValue( |
| self._codereview_impl.PatchsetConfigKey(), self.patchset) |
| |
| def SetIssue(self, issue=None): |
| """Set this branch's issue. If issue isn't given, clears the issue.""" |
| assert self.GetBranch() |
| if issue: |
| issue = int(issue) |
| self._GitSetBranchConfigValue( |
| self._codereview_impl.IssueConfigKey(), issue) |
| self.issue = issue |
| codereview_server = self._codereview_impl.GetCodereviewServer() |
| if codereview_server: |
| self._GitSetBranchConfigValue( |
| self._codereview_impl.CodereviewServerConfigKey(), |
| codereview_server) |
| else: |
| # Reset all of these just to be clean. |
| reset_suffixes = [ |
| 'last-upload-hash', |
| self._codereview_impl.IssueConfigKey(), |
| self._codereview_impl.PatchsetConfigKey(), |
| self._codereview_impl.CodereviewServerConfigKey(), |
| ] + self._PostUnsetIssueProperties() |
| for prop in reset_suffixes: |
| self._GitSetBranchConfigValue(prop, None, error_ok=True) |
| msg = RunGit(['log', '-1', '--format=%B']).strip() |
| if msg and git_footers.get_footer_change_id(msg): |
| print('WARNING: The change patched into this branch has a Change-Id. ' |
| 'Removing it.') |
| RunGit(['commit', '--amend', '-m', |
| git_footers.remove_footer(msg, 'Change-Id')]) |
| self.lookedup_issue = True |
| self.issue = None |
| self.patchset = None |
| |
| def GetChange(self, upstream_branch, author, local_description=False): |
| if not self.GitSanityChecks(upstream_branch): |
| DieWithError('\nGit sanity check failure') |
| |
| root = settings.GetRelativeRoot() |
| if not root: |
| root = '.' |
| absroot = os.path.abspath(root) |
| |
| # We use the sha1 of HEAD as a name of this change. |
| name = RunGitWithCode(['rev-parse', 'HEAD'])[1].strip() |
| # Need to pass a relative path for msysgit. |
| try: |
| files = scm.GIT.CaptureStatus([root], '.', upstream_branch) |
| except subprocess2.CalledProcessError: |
| DieWithError( |
| ('\nFailed to diff against upstream branch %s\n\n' |
| 'This branch probably doesn\'t exist anymore. To reset the\n' |
| 'tracking branch, please run\n' |
| ' git branch --set-upstream-to origin/master %s\n' |
| 'or replace origin/master with the relevant branch') % |
| (upstream_branch, self.GetBranch())) |
| |
| issue = self.GetIssue() |
| patchset = self.GetPatchset() |
| if issue and not local_description: |
| description = self.GetDescription() |
| else: |
| # If the change was never uploaded, use the log messages of all commits |
| # up to the branch point, as git cl upload will prefill the description |
| # with these log messages. |
| args = ['log', '--pretty=format:%s%n%n%b', '%s...' % (upstream_branch)] |
| description = RunGitWithCode(args)[1].strip() |
| |
| if not author: |
| author = RunGit(['config', 'user.email']).strip() or None |
| return presubmit_support.GitChange( |
| name, |
| description, |
| absroot, |
| files, |
| issue, |
| patchset, |
| author, |
| upstream=upstream_branch) |
| |
| def UpdateDescription(self, description, force=False): |
| self._codereview_impl.UpdateDescriptionRemote(description, force=force) |
| self.description = description |
| self.has_description = True |
| |
| def UpdateDescriptionFooters(self, description_lines, footers, force=False): |
| """Sets the description for this CL remotely. |
| |
| You can get description_lines and footers with GetDescriptionFooters. |
| |
| Args: |
| description_lines (list(str)) - List of CL description lines without |
| newline characters. |
| footers (list(tuple(KEY, VALUE))) - List of footers, as returned by |
| GetDescriptionFooters. Key must conform to the git footers format (i.e. |
| `List-Of-Tokens`). It will be case-normalized so that each token is |
| title-cased. |
| """ |
| new_description = '\n'.join(description_lines) |
| if footers: |
| new_description += '\n' |
| for k, v in footers: |
| foot = '%s: %s' % (git_footers.normalize_name(k), v) |
| if not git_footers.FOOTER_PATTERN.match(foot): |
| raise ValueError('Invalid footer %r' % foot) |
| new_description += foot + '\n' |
| self.UpdateDescription(new_description, force) |
| |
| def RunHook(self, committing, may_prompt, verbose, change, parallel): |
| """Calls sys.exit() if the hook fails; returns a HookResults otherwise.""" |
| try: |
| start = time_time() |
| result = presubmit_support.DoPresubmitChecks(change, committing, |
| verbose=verbose, output_stream=sys.stdout, input_stream=sys.stdin, |
| default_presubmit=None, may_prompt=may_prompt, |
| gerrit_obj=self._codereview_impl.GetGerritObjForPresubmit(), |
| parallel=parallel) |
| metrics.collector.add_repeated('sub_commands', { |
| 'command': 'presubmit', |
| 'execution_time': time_time() - start, |
| 'exit_code': 0 if result.should_continue() else 1, |
| }) |
| return result |
| except presubmit_support.PresubmitFailure as e: |
| DieWithError('%s\nMaybe your depot_tools is out of date?' % e) |
| |
| def CMDPatchIssue(self, issue_arg, nocommit): |
| """Fetches and applies the issue patch from codereview to local branch.""" |
| if isinstance(issue_arg, (int, long)) or issue_arg.isdigit(): |
| parsed_issue_arg = _ParsedIssueNumberArgument(int(issue_arg)) |
| else: |
| # Assume url. |
| parsed_issue_arg = self._codereview_impl.ParseIssueURL( |
| urlparse.urlparse(issue_arg)) |
| if not parsed_issue_arg or not parsed_issue_arg.valid: |
| DieWithError('Failed to parse issue argument "%s". ' |
| 'Must be an issue number or a valid URL.' % issue_arg) |
| return self._codereview_impl.CMDPatchWithParsedIssue( |
| parsed_issue_arg, nocommit, False) |
| |
| def CMDUpload(self, options, git_diff_args, orig_args): |
| """Uploads a change to codereview.""" |
| custom_cl_base = None |
| if git_diff_args: |
| custom_cl_base = base_branch = git_diff_args[0] |
| else: |
| if self.GetBranch() is None: |
| DieWithError('Can\'t upload from detached HEAD state. Get on a branch!') |
| |
| # Default to diffing against common ancestor of upstream branch |
| base_branch = self.GetCommonAncestorWithUpstream() |
| git_diff_args = [base_branch, 'HEAD'] |
| |
| # Fast best-effort checks to abort before running potentially expensive |
| # hooks if uploading is likely to fail anyway. Passing these checks does |
| # not guarantee that uploading will not fail. |
| self._codereview_impl.EnsureAuthenticated(force=options.force) |
| self._codereview_impl.EnsureCanUploadPatchset(force=options.force) |
| |
| # Apply watchlists on upload. |
| change = self.GetChange(base_branch, None) |
| watchlist = watchlists.Watchlists(change.RepositoryRoot()) |
| files = [f.LocalPath() for f in change.AffectedFiles()] |
| if not options.bypass_watchlists: |
| self.ExtendCC(watchlist.GetWatchersForPaths(files)) |
| |
| if not options.bypass_hooks: |
| if options.reviewers or options.tbrs or options.add_owners_to: |
| # Set the reviewer list now so that presubmit checks can access it. |
| change_description = ChangeDescription(change.FullDescriptionText()) |
| change_description.update_reviewers(options.reviewers, |
| options.tbrs, |
| options.add_owners_to, |
| change) |
| change.SetDescriptionText(change_description.description) |
| hook_results = self.RunHook(committing=False, |
| may_prompt=not options.force, |
| verbose=options.verbose, |
| change=change, parallel=options.parallel) |
| if not hook_results.should_continue(): |
| return 1 |
| if not options.reviewers and hook_results.reviewers: |
| options.reviewers = hook_results.reviewers.split(',') |
| self.ExtendCC(hook_results.more_cc) |
| |
| print_stats(git_diff_args) |
| ret = self.CMDUploadChange(options, git_diff_args, custom_cl_base, change) |
| if not ret: |
| _git_set_branch_config_value('last-upload-hash', |
| RunGit(['rev-parse', 'HEAD']).strip()) |
| # Run post upload hooks, if specified. |
| if settings.GetRunPostUploadHook(): |
| presubmit_support.DoPostUploadExecuter( |
| change, |
| self, |
| settings.GetRoot(), |
| options.verbose, |
| sys.stdout) |
| |
| # Upload all dependencies if specified. |
| if options.dependencies: |
| print() |
| print('--dependencies has been specified.') |
| print('All dependent local branches will be re-uploaded.') |
| print() |
| # Remove the dependencies flag from args so that we do not end up in a |
| # loop. |
| orig_args.remove('--dependencies') |
| ret = upload_branch_deps(self, orig_args) |
| return ret |
| |
| def SetCQState(self, new_state): |
| """Updates the CQ state for the latest patchset. |
| |
| Issue must have been already uploaded and known. |
| """ |
| assert new_state in _CQState.ALL_STATES |
| assert self.GetIssue() |
| try: |
| self._codereview_impl.SetCQState(new_state) |
| return 0 |
| except KeyboardInterrupt: |
| raise |
| except: |
| print('WARNING: Failed to %s.\n' |
| 'Either:\n' |
| ' * Your project has no CQ,\n' |
| ' * You don\'t have permission to change the CQ state,\n' |
| ' * There\'s a bug in this code (see stack trace below).\n' |
| 'Consider specifying which bots to trigger manually or asking your ' |
| 'project owners for permissions or contacting Chrome Infra at:\n' |
| 'https://www.chromium.org/infra\n\n' % |
| ('cancel CQ' if new_state == _CQState.NONE else 'trigger CQ')) |
| # Still raise exception so that stack trace is printed. |
| raise |
| |
| # Forward methods to codereview specific implementation. |
| |
| def AddComment(self, message, publish=None): |
| return self._codereview_impl.AddComment(message, publish=publish) |
| |
| def GetCommentsSummary(self, readable=True): |
| """Returns list of _CommentSummary for each comment. |
| |
| args: |
| readable: determines whether the output is designed for a human or a machine |
| """ |
| return self._codereview_impl.GetCommentsSummary(readable) |
| |
| def CloseIssue(self): |
| return self._codereview_impl.CloseIssue() |
| |
| def GetStatus(self): |
| return self._codereview_impl.GetStatus() |
| |
| def GetCodereviewServer(self): |
| return self._codereview_impl.GetCodereviewServer() |
| |
| def GetIssueOwner(self): |
| """Get owner from codereview, which may differ from this checkout.""" |
| return self._codereview_impl.GetIssueOwner() |
| |
| def GetReviewers(self): |
| return self._codereview_impl.GetReviewers() |
| |
| def GetMostRecentPatchset(self): |
| return self._codereview_impl.GetMostRecentPatchset() |
| |
| def CannotTriggerTryJobReason(self): |
| """Returns reason (str) if unable trigger tryjobs on this CL or None.""" |
| return self._codereview_impl.CannotTriggerTryJobReason() |
| |
| def GetTryJobProperties(self, patchset=None): |
| """Returns dictionary of properties to launch tryjob.""" |
| return self._codereview_impl.GetTryJobProperties(patchset=patchset) |
| |
| def __getattr__(self, attr): |
| # This is because lots of untested code accesses Rietveld-specific stuff |
| # directly, and it's hard to fix for sure. So, just let it work, and fix |
| # on a case by case basis. |
| # Note that child method defines __getattr__ as well, and forwards it here, |
| # because _RietveldChangelistImpl is not cleaned up yet, and given |
| # deprecation of Rietveld, it should probably be just removed. |
| # Until that time, avoid infinite recursion by bypassing __getattr__ |
| # of implementation class. |
| return self._codereview_impl.__getattribute__(attr) |
| |
| |
| class _ChangelistCodereviewBase(object): |
| """Abstract base class encapsulating codereview specifics of a changelist.""" |
| def __init__(self, changelist): |
| self._changelist = changelist # instance of Changelist |
| |
| def __getattr__(self, attr): |
| # Forward methods to changelist. |
| # TODO(tandrii): maybe clean up _GerritChangelistImpl and |
| # _RietveldChangelistImpl to avoid this hack? |
| return getattr(self._changelist, attr) |
| |
| def GetStatus(self): |
| """Apply a rough heuristic to give a simple summary of an issue's review |
| or CQ status, assuming adherence to a common workflow. |
| |
| Returns None if no issue for this branch, or specific string keywords. |
| """ |
| raise NotImplementedError() |
| |
| def GetCodereviewServer(self): |
| """Returns server URL without end slash, like "https://codereview.com".""" |
| raise NotImplementedError() |
| |
| def FetchDescription(self, force=False): |
| """Fetches and returns description from the codereview server.""" |
| raise NotImplementedError() |
| |
| @classmethod |
| def IssueConfigKey(cls): |
| """Returns branch setting storing issue number.""" |
| raise NotImplementedError() |
| |
| @classmethod |
| def PatchsetConfigKey(cls): |
| """Returns branch setting storing patchset number.""" |
| raise NotImplementedError() |
| |
| @classmethod |
| def CodereviewServerConfigKey(cls): |
| """Returns branch setting storing codereview server.""" |
| raise NotImplementedError() |
| |
| def _PostUnsetIssueProperties(self): |
| """Which branch-specific properties to erase when unsetting issue.""" |
| return [] |
| |
| def GetGerritObjForPresubmit(self): |
| # None is valid return value, otherwise presubmit_support.GerritAccessor. |
| return None |
| |
| def UpdateDescriptionRemote(self, description, force=False): |
| """Update the description on codereview site.""" |
| raise NotImplementedError() |
| |
| def AddComment(self, message, publish=None): |
| """Posts a comment to the codereview site.""" |
| raise NotImplementedError() |
| |
| def GetCommentsSummary(self, readable=True): |
| raise NotImplementedError() |
| |
| def CloseIssue(self): |
| """Closes the issue.""" |
| raise NotImplementedError() |
| |
| def GetMostRecentPatchset(self): |
| """Returns the most recent patchset number from the codereview site.""" |
| raise NotImplementedError() |
| |
| def CMDPatchWithParsedIssue(self, parsed_issue_arg, nocommit, force): |
| """Fetches and applies the issue. |
| |
| Arguments: |
| parsed_issue_arg: instance of _ParsedIssueNumberArgument. |
| nocommit: do not commit the patch, thus leave the tree dirty. |
| """ |
| raise NotImplementedError() |
| |
| @staticmethod |
| def ParseIssueURL(parsed_url): |
| """Parses url and returns instance of _ParsedIssueNumberArgument or None if |
| failed.""" |
| raise NotImplementedError() |
| |
| def EnsureAuthenticated(self, force, refresh=False): |
| """Best effort check that user is authenticated with codereview server. |
| |
| Arguments: |
| force: whether to skip confirmation questions. |
| refresh: whether to attempt to refresh credentials. Ignored if not |
| applicable. |
| """ |
| raise NotImplementedError() |
| |
| def EnsureCanUploadPatchset(self, force): |
| """Best effort check that uploading isn't supposed to fail for predictable |
| reasons. |
| |
| This method should raise informative exception if uploading shouldn't |
| proceed. |
| |
| Arguments: |
| force: whether to skip confirmation questions. |
| """ |
| raise NotImplementedError() |
| |
| def CMDUploadChange(self, options, git_diff_args, custom_cl_base, change): |
| """Uploads a change to codereview.""" |
| raise NotImplementedError() |
| |
| def SetCQState(self, new_state): |
| """Updates the CQ state for the latest patchset. |
| |
| Issue must have been already uploaded and known. |
| """ |
| raise NotImplementedError() |
| |
| def CannotTriggerTryJobReason(self): |
| """Returns reason (str) if unable trigger tryjobs on this CL or None.""" |
| raise NotImplementedError() |
| |
| def GetIssueOwner(self): |
| raise NotImplementedError() |
| |
| def GetReviewers(self): |
| raise NotImplementedError() |
| |
| def GetTryJobProperties(self, patchset=None): |
| raise NotImplementedError() |
| |
| |
| class _GerritChangelistImpl(_ChangelistCodereviewBase): |
| def __init__(self, changelist, auth_config=None, codereview_host=None): |
| # auth_config is Rietveld thing, kept here to preserve interface only. |
| super(_GerritChangelistImpl, self).__init__(changelist) |
| self._change_id = None |
| # Lazily cached values. |
| self._gerrit_host = None # e.g. chromium-review.googlesource.com |
| self._gerrit_server = None # e.g. https://chromium-review.googlesource.com |
| # Map from change number (issue) to its detail cache. |
| self._detail_cache = {} |
| |
| if codereview_host is not None: |
| assert not codereview_host.startswith('https://'), codereview_host |
| self._gerrit_host = codereview_host |
| self._gerrit_server = 'https://%s' % codereview_host |
| |
| def _GetGerritHost(self): |
| # Lazy load of configs. |
| self.GetCodereviewServer() |
| if self._gerrit_host and '.' not in self._gerrit_host: |
| # Abbreviated domain like "chromium" instead of chromium.googlesource.com. |
| # This happens for internal stuff http://crbug.com/614312. |
| parsed = urlparse.urlparse(self.GetRemoteUrl()) |
| if parsed.scheme == 'sso': |
| print('WARNING: using non-https URLs for remote is likely broken\n' |
| ' Your current remote is: %s' % self.GetRemoteUrl()) |
| self._gerrit_host = '%s.googlesource.com' % self._gerrit_host |
| self._gerrit_server = 'https://%s' % self._gerrit_host |
| return self._gerrit_host |
| |
| def _GetGitHost(self): |
| """Returns git host to be used when uploading change to Gerrit.""" |
| remote_url = self.GetRemoteUrl() |
| if not remote_url: |
| return None |
| return urlparse.urlparse(remote_url).netloc |
| |
| def GetCodereviewServer(self): |
| if not self._gerrit_server: |
| # If we're on a branch then get the server potentially associated |
| # with that branch. |
| if self.GetIssue(): |
| self._gerrit_server = self._GitGetBranchConfigValue( |
| self.CodereviewServerConfigKey()) |
| if self._gerrit_server: |
| self._gerrit_host = urlparse.urlparse(self._gerrit_server).netloc |
| if not self._gerrit_server: |
| # We assume repo to be hosted on Gerrit, and hence Gerrit server |
| # has "-review" suffix for lowest level subdomain. |
| parts = self._GetGitHost().split('.') |
| parts[0] = parts[0] + '-review' |
| self._gerrit_host = '.'.join(parts) |
| self._gerrit_server = 'https://%s' % self._gerrit_host |
| return self._gerrit_server |
| |
| def _GetGerritProject(self): |
| """Returns Gerrit project name based on remote git URL.""" |
| remote_url = self.GetRemoteUrl() |
| if remote_url is None: |
| logging.warn('can\'t detect Gerrit project.') |
| return None |
| project = urlparse.urlparse(remote_url).path.strip('/') |
| if project.endswith('.git'): |
| project = project[:-len('.git')] |
| # *.googlesource.com hosts ensure that Git/Gerrit projects don't start with |
| # 'a/' prefix, because 'a/' prefix is used to force authentication in |
| # gitiles/git-over-https protocol. E.g., |
| # https://chromium.googlesource.com/a/v8/v8 refers to the same repo/project |
| # as |
| # https://chromium.googlesource.com/v8/v8 |
| if project.startswith('a/'): |
| project = project[len('a/'):] |
| return project |
| |
| def _GerritChangeIdentifier(self): |
| """Handy method for gerrit_util.ChangeIdentifier for a given CL. |
| |
| Not to be confused by value of "Change-Id:" footer. |
| If Gerrit project can be determined, this will speed up Gerrit HTTP API RPC. |
| """ |
| project = self._GetGerritProject() |
| if project: |
| return gerrit_util.ChangeIdentifier(project, self.GetIssue()) |
| # Fall back on still unique, but less efficient change number. |
| return str(self.GetIssue()) |
| |
| @classmethod |
| def IssueConfigKey(cls): |
| return 'gerritissue' |
| |
| @classmethod |
| def PatchsetConfigKey(cls): |
| return 'gerritpatchset' |
| |
| @classmethod |
| def CodereviewServerConfigKey(cls): |
| return 'gerritserver' |
| |
| def EnsureAuthenticated(self, force, refresh=None): |
| """Best effort check that user is authenticated with Gerrit server.""" |
| if settings.GetGerritSkipEnsureAuthenticated(): |
| # For projects with unusual authentication schemes. |
| # See http://crbug.com/603378. |
| return |
| |
| # Check presence of cookies only if using cookies-based auth method. |
| cookie_auth = gerrit_util.Authenticator.get() |
| if not isinstance(cookie_auth, gerrit_util.CookiesAuthenticator): |
| return |
| |
| if urlparse.urlparse(self.GetRemoteUrl()).scheme != 'https': |
| print('WARNING: Ignoring branch %s with non-https remote %s' % |
| (self._changelist.branch, self.GetRemoteUrl())) |
| return |
| |
| # Lazy-loader to identify Gerrit and Git hosts. |
| self.GetCodereviewServer() |
| git_host = self._GetGitHost() |
| assert self._gerrit_server and self._gerrit_host and git_host |
| |
| gerrit_auth = cookie_auth.get_auth_header(self._gerrit_host) |
| git_auth = cookie_auth.get_auth_header(git_host) |
| if gerrit_auth and git_auth: |
| if gerrit_auth == git_auth: |
| return |
| all_gsrc = cookie_auth.get_auth_header('d0esN0tEx1st.googlesource.com') |
| print( |
| 'WARNING: You have different credentials for Gerrit and git hosts:\n' |
| ' %s\n' |
| ' %s\n' |
| ' Consider running the following command:\n' |
| ' git cl creds-check\n' |
| ' %s\n' |
| ' %s' % |
| (git_host, self._gerrit_host, |
| ('Hint: delete creds for .googlesource.com' if all_gsrc else ''), |
| cookie_auth.get_new_password_message(git_host))) |
| if not force: |
| confirm_or_exit('If you know what you are doing', action='continue') |
| return |
| else: |
| missing = ( |
| ([] if gerrit_auth else [self._gerrit_host]) + |
| ([] if git_auth else [git_host])) |
| DieWithError('Credentials for the following hosts are required:\n' |
| ' %s\n' |
| 'These are read from %s (or legacy %s)\n' |
| '%s' % ( |
| '\n '.join(missing), |
| cookie_auth.get_gitcookies_path(), |
| cookie_auth.get_netrc_path(), |
| cookie_auth.get_new_password_message(git_host))) |
| |
| def EnsureCanUploadPatchset(self, force): |
| if not self.GetIssue(): |
| return |
| |
| # Warm change details cache now to avoid RPCs later, reducing latency for |
| # developers. |
| self._GetChangeDetail( |
| ['DETAILED_ACCOUNTS', 'CURRENT_REVISION', 'CURRENT_COMMIT', 'LABELS']) |
| |
| status = self._GetChangeDetail()['status'] |
| if status in ('MERGED', 'ABANDONED'): |
| DieWithError('Change %s has been %s, new uploads are not allowed' % |
| (self.GetIssueURL(), |
| 'submitted' if status == 'MERGED' else 'abandoned')) |
| |
| # TODO(vadimsh): For some reason the chunk of code below was skipped if |
| # 'is_gce' is True. I'm just refactoring it to be 'skip if not cookies'. |
| # Apparently this check is not very important? Otherwise get_auth_email |
| # could have been added to other implementations of Authenticator. |
| cookies_auth = gerrit_util.Authenticator.get() |
| if not isinstance(cookies_auth, gerrit_util.CookiesAuthenticator): |
| return |
| |
| cookies_user = cookies_auth.get_auth_email(self._GetGerritHost()) |
| if self.GetIssueOwner() == cookies_user: |
| return |
| logging.debug('change %s owner is %s, cookies user is %s', |
| self.GetIssue(), self.GetIssueOwner(), cookies_user) |
| # Maybe user has linked accounts or something like that, |
| # so ask what Gerrit thinks of this user. |
| details = gerrit_util.GetAccountDetails(self._GetGerritHost(), 'self') |
| if details['email'] == self.GetIssueOwner(): |
| return |
| if not force: |
| print('WARNING: Change %s is owned by %s, but you authenticate to Gerrit ' |
| 'as %s.\n' |
| 'Uploading may fail due to lack of permissions.' % |
| (self.GetIssue(), self.GetIssueOwner(), details['email'])) |
| confirm_or_exit(action='upload') |
| |
| def _PostUnsetIssueProperties(self): |
| """Which branch-specific properties to erase when unsetting issue.""" |
| return ['gerritsquashhash'] |
| |
| def GetGerritObjForPresubmit(self): |
| return presubmit_support.GerritAccessor(self._GetGerritHost()) |
| |
| def GetStatus(self): |
| """Applies a rough heuristic to give a simple summary of an issue's review |
| or CQ status, assuming adherence to a common workflow. |
| |
| Returns None if no issue for this branch, or one of the following keywords: |
| * 'error' - error from review tool (including deleted issues) |
| * 'unsent' - no reviewers added |
| * 'waiting' - waiting for review |
| * 'reply' - waiting for uploader to reply to review |
| * 'lgtm' - Code-Review label has been set |
| * 'dry-run' - dry-running in the CQ |
| * 'commit' - in the CQ |
| * 'closed' - successfully submitted or abandoned |
| """ |
| if not self.GetIssue(): |
| return None |
| |
| try: |
| data = self._GetChangeDetail([ |
| 'DETAILED_LABELS', 'CURRENT_REVISION', 'SUBMITTABLE']) |
| except (httplib.HTTPException, GerritChangeNotExists): |
| return 'error' |
| |
| if data['status'] in ('ABANDONED', 'MERGED'): |
| return 'closed' |
| |
| cq_label = data['labels'].get('Commit-Queue', {}) |
| max_cq_vote = 0 |
| for vote in cq_label.get('all', []): |
| max_cq_vote = max(max_cq_vote, vote.get('value', 0)) |
| if max_cq_vote == 2: |
| return 'commit' |
| if max_cq_vote == 1: |
| return 'dry-run' |
| |
| if data['labels'].get('Code-Review', {}).get('approved'): |
| return 'lgtm' |
| |
| if not data.get('reviewers', {}).get('REVIEWER', []): |
| return 'unsent' |
| |
| owner = data['owner'].get('_account_id') |
| messages = sorted(data.get('messages', []), key=lambda m: m.get('updated')) |
| last_message_author = messages.pop().get('author', {}) |
| while last_message_author: |
| if last_message_author.get('email') == COMMIT_BOT_EMAIL: |
| # Ignore replies from CQ. |
| last_message_author = messages.pop().get('author', {}) |
| continue |
| if last_message_author.get('_account_id') == owner: |
| # Most recent message was by owner. |
| return 'waiting' |
| else: |
| # Some reply from non-owner. |
| return 'reply' |
| |
| # Somehow there are no messages even though there are reviewers. |
| return 'unsent' |
| |
| def GetMostRecentPatchset(self): |
| data = self._GetChangeDetail(['CURRENT_REVISION']) |
| patchset = data['revisions'][data['current_revision']]['_number'] |
| self.SetPatchset(patchset) |
| return patchset |
| |
| def FetchDescription(self, force=False): |
| data = self._GetChangeDetail(['CURRENT_REVISION', 'CURRENT_COMMIT'], |
| no_cache=force) |
| current_rev = data['current_revision'] |
| return data['revisions'][current_rev]['commit']['message'].encode( |
| 'utf-8', 'ignore') |
| |
| def UpdateDescriptionRemote(self, description, force=False): |
| if gerrit_util.HasPendingChangeEdit( |
| self._GetGerritHost(), self._GerritChangeIdentifier()): |
| if not force: |
| confirm_or_exit( |
| 'The description cannot be modified while the issue has a pending ' |
| 'unpublished edit. Either publish the edit in the Gerrit web UI ' |
| 'or delete it.\n\n', action='delete the unpublished edit') |
| |
| gerrit_util.DeletePendingChangeEdit( |
| self._GetGerritHost(), self._GerritChangeIdentifier()) |
| gerrit_util.SetCommitMessage( |
| self._GetGerritHost(), self._GerritChangeIdentifier(), |
| description, notify='NONE') |
| |
| def AddComment(self, message, publish=None): |
| gerrit_util.SetReview( |
| self._GetGerritHost(), self._GerritChangeIdentifier(), |
| msg=message, ready=publish) |
| |
| def GetCommentsSummary(self, readable=True): |
| # DETAILED_ACCOUNTS is to get emails in accounts. |
| # CURRENT_REVISION is included to get the latest patchset so that |
| # only the robot comments from the latest patchset can be shown. |
| messages = self._GetChangeDetail( |
| options=['MESSAGES', 'DETAILED_ACCOUNTS', |
| 'CURRENT_REVISION']).get('messages', []) |
| file_comments = gerrit_util.GetChangeComments( |
| self._GetGerritHost(), self._GerritChangeIdentifier()) |
| robot_file_comments = gerrit_util.GetChangeRobotComments( |
| self._GetGerritHost(), self._GerritChangeIdentifier()) |
| |
| # Add the robot comments onto the list of comments, but only |
| # keep those that are from the latest pachset. |
| latest_patch_set = self.GetMostRecentPatchset() |
| for path, robot_comments in robot_file_comments.iteritems(): |
| line_comments = file_comments.setdefault(path, []) |
| line_comments.extend( |
| [c for c in robot_comments if c['patch_set'] == latest_patch_set]) |
| |
| # Build dictionary of file comments for easy access and sorting later. |
| # {author+date: {path: {patchset: {line: url+message}}}} |
| comments = collections.defaultdict( |
| lambda: collections.defaultdict(lambda: collections.defaultdict(dict))) |
| for path, line_comments in file_comments.iteritems(): |
| for comment in line_comments: |
| tag = comment.get('tag', '') |
| if tag.startswith('autogenerated') and 'robot_id' not in comment: |
| continue |
| key = (comment['author']['email'], comment['updated']) |
| if comment.get('side', 'REVISION') == 'PARENT': |
| patchset = 'Base' |
| else: |
| patchset = 'PS%d' % comment['patch_set'] |
| line = comment.get('line', 0) |
| url = ('https://%s/c/%s/%s/%s#%s%s' % |
| (self._GetGerritHost(), self.GetIssue(), comment['patch_set'], path, |
| 'b' if comment.get('side') == 'PARENT' else '', |
| str(line) if line else '')) |
| comments[key][path][patchset][line] = (url, comment['message']) |
| |
| summaries = [] |
| for msg in messages: |
| summary = self._BuildCommentSummary(msg, comments, readable) |
| if summary: |
| summaries.append(summary) |
| return summaries |
| |
| @staticmethod |
| def _BuildCommentSummary(msg, comments, readable): |
| key = (msg['author']['email'], msg['date']) |
| # Don't bother showing autogenerated messages that don't have associated |
| # file or line comments. this will filter out most autogenerated |
| # messages, but will keep robot comments like those from Tricium. |
| is_autogenerated = msg.get('tag', '').startswith('autogenerated') |
| if is_autogenerated and not comments.get(key): |
| return None |
| message = msg['message'] |
| # Gerrit spits out nanoseconds. |
| assert len(msg['date'].split('.')[-1]) == 9 |
| date = datetime.datetime.strptime(msg['date'][:-3], |
| '%Y-%m-%d %H:%M:%S.%f') |
| if key in comments: |
| message += '\n' |
| for path, patchsets in sorted(comments.get(key, {}).items()): |
| if readable: |
| message += '\n%s' % path |
| for patchset, lines in sorted(patchsets.items()): |
| for line, (url, content) in sorted(lines.items()): |
| if line: |
| line_str = 'Line %d' % line |
| path_str = '%s:%d:' % (path, line) |
| else: |
| line_str = 'File comment' |
| path_str = '%s:0:' % path |
| if readable: |
| message += '\n %s, %s: %s' % (patchset, line_str, url) |
| message += '\n %s\n' % content |
| else: |
| message += '\n%s ' % path_str |
| message += '\n%s\n' % content |
| |
| return _CommentSummary( |
| date=date, |
| message=message, |
| sender=msg['author']['email'], |
| autogenerated=is_autogenerated, |
| # These could be inferred from the text messages and correlated with |
| # Code-Review label maximum, however this is not reliable. |
| # Leaving as is until the need arises. |
| approval=False, |
| disapproval=False, |
| ) |
| |
| def CloseIssue(self): |
| gerrit_util.AbandonChange( |
| self._GetGerritHost(), self._GerritChangeIdentifier(), msg='') |
| |
| def SubmitIssue(self, wait_for_merge=True): |
| gerrit_util.SubmitChange( |
| self._GetGerritHost(), self._GerritChangeIdentifier(), |
| wait_for_merge=wait_for_merge) |
| |
| def _GetChangeDetail(self, options=None, no_cache=False): |
| """Returns details of associated Gerrit change and caching results. |
| |
| If fresh data is needed, set no_cache=True which will clear cache and |
| thus new data will be fetched from Gerrit. |
| """ |
| options = options or [] |
| assert self.GetIssue(), 'issue is required to query Gerrit' |
| |
| # Optimization to avoid multiple RPCs: |
| if (('CURRENT_REVISION' in options or 'ALL_REVISIONS' in options) and |
| 'CURRENT_COMMIT' not in options): |
| options.append('CURRENT_COMMIT') |
| |
| # Normalize issue and options for consistent keys in cache. |
| cache_key = str(self.GetIssue()) |
| options = [o.upper() for o in options] |
| |
| # Check in cache first unless no_cache is True. |
| if no_cache: |
| self._detail_cache.pop(cache_key, None) |
| else: |
| options_set = frozenset(options) |
| for cached_options_set, data in self._detail_cache.get(cache_key, []): |
| # Assumption: data fetched before with extra options is suitable |
| # for return for a smaller set of options. |
| # For example, if we cached data for |
| # options=[CURRENT_REVISION, DETAILED_FOOTERS] |
| # and request is for options=[CURRENT_REVISION], |
| # THEN we can return prior cached data. |
| if options_set.issubset(cached_options_set): |
| return data |
| |
| try: |
| data = gerrit_util.GetChangeDetail( |
| self._GetGerritHost(), self._GerritChangeIdentifier(), options) |
| except gerrit_util.GerritError as e: |
| if e.http_status == 404: |
| raise GerritChangeNotExists(self.GetIssue(), self.GetCodereviewServer()) |
| raise |
| |
| self._detail_cache.setdefault(cache_key, []).append( |
| (frozenset(options), data)) |
| return data |
| |
| def _GetChangeCommit(self): |
| assert self.GetIssue(), 'issue must be set to query Gerrit' |
| try: |
| data = gerrit_util.GetChangeCommit( |
| self._GetGerritHost(), self._GerritChangeIdentifier()) |
| except gerrit_util.GerritError as e: |
| if e.http_status == 404: |
| raise GerritChangeNotExists(self.GetIssue(), self.GetCodereviewServer()) |
| raise |
| return data |
| |
| def _IsCqConfigured(self): |
| detail = self._GetChangeDetail(['LABELS']) |
| if u'Commit-Queue' not in detail.get('labels', {}): |
| return False |
| # TODO(crbug/753213): Remove temporary hack |
| if ('https://chromium.googlesource.com/chromium/src' == |
| self._changelist.GetRemoteUrl() and |
| detail['branch'].startswith('refs/branch-heads/')): |
| return False |
| return True |
| |
| def CMDLand(self, force, bypass_hooks, verbose, parallel): |
| if git_common.is_dirty_git_tree('land'): |
| return 1 |
| |
| detail = self._GetChangeDetail(['CURRENT_REVISION', 'LABELS']) |
| if not force and self._IsCqConfigured(): |
| confirm_or_exit('\nIt seems this repository has a CQ, ' |
| 'which can test and land changes for you. ' |
| 'Are you sure you wish to bypass it?\n', |
| action='bypass CQ') |
| differs = True |
| last_upload = self._GitGetBranchConfigValue('gerritsquashhash') |
| # Note: git diff outputs nothing if there is no diff. |
| if not last_upload or RunGit(['diff', last_upload]).strip(): |
| print('WARNING: Some changes from local branch haven\'t been uploaded.') |
| else: |
| if detail['current_revision'] == last_upload: |
| differs = False |
| else: |
| print('WARNING: Local branch contents differ from latest uploaded ' |
| 'patchset.') |
| if differs: |
| if not force: |
| confirm_or_exit( |
| 'Do you want to submit latest Gerrit patchset and bypass hooks?\n', |
| action='submit') |
| print('WARNING: Bypassing hooks and submitting latest uploaded patchset.') |
| elif not bypass_hooks: |
| hook_results = self.RunHook( |
| committing=True, |
| may_prompt=not force, |
| verbose=verbose, |
| change=self.GetChange(self.GetCommonAncestorWithUpstream(), None), |
| parallel=parallel) |
| if not hook_results.should_continue(): |
| return 1 |
| |
| self.SubmitIssue(wait_for_merge=True) |
| print('Issue %s has been submitted.' % self.GetIssueURL()) |
| links = self._GetChangeCommit().get('web_links', []) |
| for link in links: |
| if link.get('name') == 'gitiles' and link.get('url'): |
| print('Landed as: %s' % link.get('url')) |
| break |
| return 0 |
| |
| def CMDPatchWithParsedIssue(self, parsed_issue_arg, nocommit, force): |
| assert parsed_issue_arg.valid |
| |
| self._changelist.issue = parsed_issue_arg.issue |
| |
| if parsed_issue_arg.hostname: |
| self._gerrit_host = parsed_issue_arg.hostname |
| self._gerrit_server = 'https://%s' % self._gerrit_host |
| |
| try: |
| detail = self._GetChangeDetail(['ALL_REVISIONS']) |
| except GerritChangeNotExists as e: |
| DieWithError(str(e)) |
| |
| if not parsed_issue_arg.patchset: |
| # Use current revision by default. |
| revision_info = detail['revisions'][detail['current_revision']] |
| patchset = int(revision_info['_number']) |
| else: |
| patchset = parsed_issue_arg.patchset |
| for revision_info in detail['revisions'].itervalues(): |
| if int(revision_info['_number']) == parsed_issue_arg.patchset: |
| break |
| else: |
| DieWithError('Couldn\'t find patchset %i in change %i' % |
| (parsed_issue_arg.patchset, self.GetIssue())) |
| |
| remote_url = self._changelist.GetRemoteUrl() |
| if remote_url.endswith('.git'): |
| remote_url = remote_url[:-len('.git')] |
| remote_url = remote_url.rstrip('/') |
| |
| fetch_info = revision_info['fetch']['http'] |
| fetch_info['url'] = fetch_info['url'].rstrip('/') |
| |
| if remote_url != fetch_info['url']: |
| DieWithError('Trying to patch a change from %s but this repo appears ' |
| 'to be %s.' % (fetch_info['url'], remote_url)) |
| |
| RunGit(['fetch', fetch_info['url'], fetch_info['ref']]) |
| |
| if force: |
| RunGit(['reset', '--hard', 'FETCH_HEAD']) |
| print('Checked out commit for change %i patchset %i locally' % |
| (parsed_issue_arg.issue, patchset)) |
| elif nocommit: |
| RunGit(['cherry-pick', '--no-commit', 'FETCH_HEAD']) |
| print('Patch applied to index.') |
| else: |
| RunGit(['cherry-pick', 'FETCH_HEAD']) |
| print('Committed patch for change %i patchset %i locally.' % |
| (parsed_issue_arg.issue, patchset)) |
| print('Note: this created a local commit which does not have ' |
| 'the same hash as the one uploaded for review. This will make ' |
| 'uploading changes based on top of this branch difficult.\n' |
| 'If you want to do that, use "git cl patch --force" instead.') |
| |
| if self.GetBranch(): |
| self.SetIssue(parsed_issue_arg.issue) |
| self.SetPatchset(patchset) |
| fetched_hash = RunGit(['rev-parse', 'FETCH_HEAD']).strip() |
| self._GitSetBranchConfigValue('last-upload-hash', fetched_hash) |
| self._GitSetBranchConfigValue('gerritsquashhash', fetched_hash) |
| else: |
| print('WARNING: You are in detached HEAD state.\n' |
| 'The patch has been applied to your checkout, but you will not be ' |
| 'able to upload a new patch set to the gerrit issue.\n' |
| 'Try using the \'-b\' option if you would like to work on a ' |
| 'branch and/or upload a new patch set.') |
| |
| return 0 |
| |
| @staticmethod |
| def ParseIssueURL(parsed_url): |
| if not parsed_url.scheme or not parsed_url.scheme.startswith('http'): |
| return None |
| # Gerrit's new UI is https://domain/c/project/+/<issue_number>[/[patchset]] |
| # But old GWT UI is https://domain/#/c/project/+/<issue_number>[/[patchset]] |
| # Short urls like https://domain/<issue_number> can be used, but don't allow |
| # specifying the patchset (you'd 404), but we allow that here. |
| if parsed_url.path == '/': |
| part = parsed_url.fragment |
| else: |
| part = parsed_url.path |
| match = re.match(r'(/c(/.*/\+)?)?/(\d+)(/(\d+)?/?)?$', part) |
| if match: |
| return _ParsedIssueNumberArgument( |
| issue=int(match.group(3)), |
| patchset=int(match.group(5)) if match.group(5) else None, |
| hostname=parsed_url.netloc) |
| return None |
| |
| def _GerritCommitMsgHookCheck(self, offer_removal): |
| hook = os.path.join(settings.GetRoot(), '.git', 'hooks', 'commit-msg') |
| if not os.path.exists(hook): |
| return |
| # Crude attempt to distinguish Gerrit Codereview hook from a potentially |
| # custom developer-made one. |
| data = gclient_utils.FileRead(hook) |
| if not('From Gerrit Code Review' in data and 'add_ChangeId()' in data): |
| return |
| print('WARNING: You have Gerrit commit-msg hook installed.\n' |
| 'It is not necessary for uploading with git cl in squash mode, ' |
| 'and may interfere with it in subtle ways.\n' |
| 'We recommend you remove the commit-msg hook.') |
| if offer_removal: |
| if ask_for_explicit_yes('Do you want to remove it now?'): |
| gclient_utils.rm_file_or_tree(hook) |
| print('Gerrit commit-msg hook removed.') |
| else: |
| print('OK, will keep Gerrit commit-msg hook in place.') |
| |
| def _CleanUpOldTraces(self): |
| """Keep only the last |MAX_TRACES| traces.""" |
| try: |
| traces = sorted([ |
| os.path.join(TRACES_DIR, f) |
| for f in os.listdir(TRACES_DIR) |
| if (os.path.isfile(os.path.join(TRACES_DIR, f)) |
| and not f.startswith('tmp')) |
| ]) |
| traces_to_delete = traces[:-MAX_TRACES] |
| for trace in traces_to_delete: |
| os.remove(trace) |
| except OSError: |
| print('WARNING: Failed to remove old git traces from\n' |
| ' %s' |
| 'Consider removing them manually.' % TRACES_DIR) |
| |
| def _WriteGitPushTraces(self, trace_name, traces_dir, git_push_metadata): |
| """Zip and write the git push traces stored in traces_dir.""" |
| gclient_utils.safe_makedirs(TRACES_DIR) |
| traces_zip = trace_name + '-traces' |
| traces_readme = trace_name + '-README' |
| # Create a temporary dir to store git config and gitcookies in. It will be |
| # compressed and stored next to the traces. |
| git_info_dir = tempfile.mkdtemp() |
| git_info_zip = trace_name + '-git-info' |
| |
| git_push_metadata['now'] = datetime_now().strftime('%c') |
| if sys.stdin.encoding and sys.stdin.encoding != 'utf-8': |
| git_push_metadata['now'] = git_push_metadata['now'].decode( |
| sys.stdin.encoding) |
| |
| git_push_metadata['trace_name'] = trace_name |
| gclient_utils.FileWrite( |
| traces_readme, TRACES_README_FORMAT % git_push_metadata) |
| |
| # Keep only the first 6 characters of the git hashes on the packet |
| # trace. This greatly decreases size after compression. |
| packet_traces = os.path.join(traces_dir, 'trace-packet') |
| if os.path.isfile(packet_traces): |
| contents = gclient_utils.FileRead(packet_traces) |
| gclient_utils.FileWrite( |
| packet_traces, GIT_HASH_RE.sub(r'\1', contents)) |
| shutil.make_archive(traces_zip, 'zip', traces_dir) |
| |
| # Collect and compress the git config and gitcookies. |
| git_config = RunGit(['config', '-l']) |
| gclient_utils.FileWrite( |
| os.path.join(git_info_dir, 'git-config'), |
| git_config) |
| |
| cookie_auth = gerrit_util.Authenticator.get() |
| if isinstance(cookie_auth, gerrit_util.CookiesAuthenticator): |
| gitcookies_path = cookie_auth.get_gitcookies_path() |
| if os.path.isfile(gitcookies_path): |
| gitcookies = gclient_utils.FileRead(gitcookies_path) |
| gclient_utils.FileWrite( |
| os.path.join(git_info_dir, 'gitcookies'), |
| GITCOOKIES_REDACT_RE.sub('REDACTED', gitcookies)) |
| shutil.make_archive(git_info_zip, 'zip', git_info_dir) |
| |
| gclient_utils.rmtree(git_info_dir) |
| |
| def _RunGitPushWithTraces( |
| self, change_desc, refspec, refspec_opts, git_push_metadata): |
| """Run git push and collect the traces resulting from the execution.""" |
| # Create a temporary directory to store traces in. Traces will be compressed |
| # and stored in a 'traces' dir inside depot_tools. |
| traces_dir = tempfile.mkdtemp() |
| trace_name = os.path.join( |
| TRACES_DIR, datetime_now().strftime('%Y%m%dT%H%M%S.%f')) |
| |
| env = os.environ.copy() |
| env['GIT_REDACT_COOKIES'] = 'o,SSO,GSSO_Uberproxy' |
| env['GIT_TR2_EVENT'] = os.path.join(traces_dir, 'tr2-event') |
| env['GIT_TRACE2_EVENT'] = os.path.join(traces_dir, 'tr2-event') |
| env['GIT_TRACE_CURL'] = os.path.join(traces_dir, 'trace-curl') |
| env['GIT_TRACE_CURL_NO_DATA'] = '1' |
| env['GIT_TRACE_PACKET'] = os.path.join(traces_dir, 'trace-packet') |
| |
| try: |
| push_returncode = 0 |
| remote_url = self.GetRemoteUrl() |
| before_push = time_time() |
| push_stdout = gclient_utils.CheckCallAndFilter( |
| ['git', 'push', remote_url, refspec], |
| env=env, |
| print_stdout=True, |
| # Flush after every line: useful for seeing progress when running as |
| # recipe. |
| filter_fn=lambda _: sys.stdout.flush()) |
| except subprocess2.CalledProcessError as e: |
| push_returncode = e.returncode |
| DieWithError('Failed to create a change. Please examine output above ' |
| 'for the reason of the failure.\n' |
| 'Hint: run command below to diagnose common Git/Gerrit ' |
| 'credential problems:\n' |
| ' git cl creds-check\n' |
| '\n' |
| 'If git-cl is not working correctly, file a bug under the ' |
| 'Infra>SDK component including the files below.\n' |
| 'Review the files before upload, since they might contain ' |
| 'sensitive information.\n' |
| 'Set the Restrict-View-Google label so that they are not ' |
| 'publicly accessible.\n' |
| + TRACES_MESSAGE % {'trace_name': trace_name}, |
| change_desc) |
| finally: |
| execution_time = time_time() - before_push |
| metrics.collector.add_repeated('sub_commands', { |
| 'command': 'git push', |
| 'execution_time': execution_time, |
| 'exit_code': push_returncode, |
| 'arguments': metrics_utils.extract_known_subcommand_args(refspec_opts), |
| }) |
| |
| git_push_metadata['execution_time'] = execution_time |
| git_push_metadata['exit_code'] = push_returncode |
| self._WriteGitPushTraces(trace_name, traces_dir, git_push_metadata) |
| |
| self._CleanUpOldTraces() |
| gclient_utils.rmtree(traces_dir) |
| |
| return push_stdout |
| |
| def CMDUploadChange(self, options, git_diff_args, custom_cl_base, change): |
| """Upload the current branch to Gerrit.""" |
| if options.squash is None: |
| # Load default for user, repo, squash=true, in this order. |
| options.squash = settings.GetSquashGerritUploads() |
| |
| remote, remote_branch = self.GetRemoteBranch() |
| branch = GetTargetRef(remote, remote_branch, options.target_branch) |
| # This may be None; default fallback value is determined in logic below. |
| title = options.title |
| |
| # Extract bug number from branch name. |
| bug = options.bug |
| match = re.match(r'(?:bug|fix)[_-]?(\d+)', self.GetBranch()) |
| if not bug and match: |
| bug = match.group(1) |
| |
| if options.squash: |
| self._GerritCommitMsgHookCheck(offer_removal=not options.force) |
| if self.GetIssue(): |
| # Try to get the message from a previous upload. |
| message = self.GetDescription() |
| if not message: |
| DieWithError( |
| 'failed to fetch description from current Gerrit change %d\n' |
| '%s' % (self.GetIssue(), self.GetIssueURL())) |
| if not title: |
| if options.message: |
| # When uploading a subsequent patchset, -m|--message is taken |
| # as the patchset title if --title was not provided. |
| title = options.message.strip() |
| else: |
| default_title = RunGit( |
| ['show', '-s', '--format=%s', 'HEAD']).strip() |
| if options.force: |
| title = default_title |
| else: |
| title = ask_for_data( |
| 'Title for patchset [%s]: ' % default_title) or default_title |
| change_id = self._GetChangeDetail()['change_id'] |
| while True: |
| footer_change_ids = git_footers.get_footer_change_id(message) |
| if footer_change_ids == [change_id]: |
| break |
| if not footer_change_ids: |
| message = git_footers.add_footer_change_id(message, change_id) |
| print('WARNING: appended missing Change-Id to change description.') |
| continue |
| # There is already a valid footer but with different or several ids. |
| # Doing this automatically is non-trivial as we don't want to lose |
| # existing other footers, yet we want to append just 1 desired |
| # Change-Id. Thus, just create a new footer, but let user verify the |
| # new description. |
| message = '%s\n\nChange-Id: %s' % (message, change_id) |
| print( |
| 'WARNING: change %s has Change-Id footer(s):\n' |
| ' %s\n' |
| 'but change has Change-Id %s, according to Gerrit.\n' |
| 'Please, check the proposed correction to the description, ' |
| 'and edit it if necessary but keep the "Change-Id: %s" footer\n' |
| % (self.GetIssue(), '\n '.join(footer_change_ids), change_id, |
| change_id)) |
| confirm_or_exit(action='edit') |
| if not options.force: |
| change_desc = ChangeDescription(message) |
| change_desc.prompt(bug=bug) |
| message = change_desc.description |
| if not message: |
| DieWithError("Description is empty. Aborting...") |
| # Continue the while loop. |
| # Sanity check of this code - we should end up with proper message |
| # footer. |
| assert [change_id] == git_footers.get_footer_change_id(message) |
| change_desc = ChangeDescription(message) |
| else: # if not self.GetIssue() |
| if options.message: |
| message = options.message |
| else: |
| message = _create_description_from_log(git_diff_args) |
| if options.title: |
| message = options.title + '\n\n' + message |
| change_desc = ChangeDescription(message) |
| |
| if not options.force: |
| change_desc.prompt(bug=bug) |
| # On first upload, patchset title is always this string, while |
| # --title flag gets converted to first line of message. |
| title = 'Initial upload' |
| if not change_desc.description: |
| DieWithError("Description is empty. Aborting...") |
| change_ids = git_footers.get_footer_change_id(change_desc.description) |
| if len(change_ids) > 1: |
| DieWithError('too many Change-Id footers, at most 1 allowed.') |
| if not change_ids: |
| # Generate the Change-Id automatically. |
| change_desc.set_description(git_footers.add_footer_change_id( |
| change_desc.description, |
| GenerateGerritChangeId(change_desc.description))) |
| change_ids = git_footers.get_footer_change_id(change_desc.description) |
| assert len(change_ids) == 1 |
| change_id = change_ids[0] |
| |
| if options.reviewers or options.tbrs or options.add_owners_to: |
| change_desc.update_reviewers(options.reviewers, options.tbrs, |
| options.add_owners_to, change) |
| if options.preserve_tryjobs: |
| change_desc.set_preserve_tryjobs() |
| |
| remote, upstream_branch = self.FetchUpstreamTuple(self.GetBranch()) |
| parent = self._ComputeParent(remote, upstream_branch, custom_cl_base, |
| options.force, change_desc) |
| tree = RunGit(['rev-parse', 'HEAD:']).strip() |
| with tempfile.NamedTemporaryFile(delete=False) as desc_tempfile: |
| desc_tempfile.write(change_desc.description) |
| desc_tempfile.close() |
| ref_to_push = RunGit(['commit-tree', tree, '-p', parent, |
| '-F', desc_tempfile.name]).strip() |
| os.remove(desc_tempfile.name) |
| else: |
| change_desc = ChangeDescription( |
| options.message or _create_description_from_log(git_diff_args)) |
| if not change_desc.description: |
| DieWithError("Description is empty. Aborting...") |
| |
| if not git_footers.get_footer_change_id(change_desc.description): |
| DownloadGerritHook(False) |
| change_desc.set_description( |
| self._AddChangeIdToCommitMessage(options, git_diff_args)) |
| if options.reviewers or options.tbrs or options.add_owners_to: |
| change_desc.update_reviewers(options.reviewers, options.tbrs, |
| options.add_owners_to, change) |
| ref_to_push = 'HEAD' |
| # For no-squash mode, we assume the remote called "origin" is the one we |
| # want. It is not worthwhile to support different workflows for |
| # no-squash mode. |
| parent = 'origin/%s' % branch |
| change_id = git_footers.get_footer_change_id(change_desc.description)[0] |
| |
| assert change_desc |
| SaveDescriptionBackup(change_desc) |
| commits = RunGitSilent(['rev-list', '%s..%s' % (parent, |
| ref_to_push)]).splitlines() |
| if len(commits) > 1: |
| print('WARNING: This will upload %d commits. Run the following command ' |
| 'to see which commits will be uploaded: ' % len(commits)) |
| print('git log %s..%s' % (parent, ref_to_push)) |
| print('You can also use `git squash-branch` to squash these into a ' |
| 'single commit.') |
| confirm_or_exit(action='upload') |
| |
| if options.reviewers or options.tbrs or options.add_owners_to: |
| change_desc.update_reviewers(options.reviewers, options.tbrs, |
| options.add_owners_to, change) |
| |
| reviewers = sorted(change_desc.get_reviewers()) |
| # Add cc's from the CC_LIST and --cc flag (if any). |
| if not options.private and not options.no_autocc: |
| cc = self.GetCCList().split(',') |
| else: |
| cc = [] |
| if options.cc: |
| cc.extend(options.cc) |
| cc = filter(None, [email.strip() for email in cc]) |
| if change_desc.get_cced(): |
| cc.extend(change_desc.get_cced()) |
| if self._GetGerritHost() == 'chromium-review.googlesource.com': |
| valid_accounts = set(reviewers + cc) |
| # TODO(crbug/877717): relax this for all hosts. |
| else: |
| valid_accounts = gerrit_util.ValidAccounts( |
| self._GetGerritHost(), reviewers + cc) |
| logging.info('accounts %s are recognized, %s invalid', |
| sorted(valid_accounts), |
| set(reviewers + cc).difference(set(valid_accounts))) |
| |
| # Extra options that can be specified at push time. Doc: |
| # https://gerrit-review.googlesource.com/Documentation/user-upload.html |
| refspec_opts = [] |
| |
| # By default, new changes are started in WIP mode, and subsequent patchsets |
| # don't send email. At any time, passing --send-mail will mark the change |
| # ready and send email for that particular patch. |
| if options.send_mail: |
| refspec_opts.append('ready') |
| refspec_opts.append('notify=ALL') |
| elif not self.GetIssue() and options.squash: |
| refspec_opts.append('wip') |
| else: |
| refspec_opts.append('notify=NONE') |
| |
| # TODO(tandrii): options.message should be posted as a comment |
| # if --send-mail is set on non-initial upload as Rietveld used to do it. |
| |
| if title: |
| # Punctuation and whitespace in |title| must be percent-encoded. |
| refspec_opts.append('m=' + gerrit_util.PercentEncodeForGitRef(title)) |
| |
| if options.private: |
| refspec_opts.append('private') |
| |
| for r in sorted(reviewers): |
| if r in valid_accounts: |
| refspec_opts.append('r=%s' % r) |
| reviewers.remove(r) |
| else: |
| # TODO(tandrii): this should probably be a hard failure. |
| print('WARNING: reviewer %s doesn\'t have a Gerrit account, skipping' |
| % r) |
| for c in sorted(cc): |
| # refspec option will be rejected if cc doesn't correspond to an |
| # account, even though REST call to add such arbitrary cc may succeed. |
| if c in valid_accounts: |
| refspec_opts.append('cc=%s' % c) |
| cc.remove(c) |
| |
| if options.topic: |
| # Documentation on Gerrit topics is here: |
| # https://gerrit-review.googlesource.com/Documentation/user-upload.html#topic |
| refspec_opts.append('topic=%s' % options.topic) |
| |
| if options.enable_auto_submit: |
| refspec_opts.append('l=Auto-Submit+1') |
| if options.use_commit_queue: |
| refspec_opts.append('l=Commit-Queue+2') |
| elif options.cq_dry_run: |
| refspec_opts.append('l=Commit-Queue+1') |
| |
| if change_desc.get_reviewers(tbr_only=True): |
| score = gerrit_util.GetCodeReviewTbrScore( |
| self._GetGerritHost(), |
| self._GetGerritProject()) |
| refspec_opts.append('l=Code-Review+%s' % score) |
| |
| # Gerrit sorts hashtags, so order is not important. |
| hashtags = {change_desc.sanitize_hash_tag(t) for t in options.hashtags} |
| if not self.GetIssue(): |
| hashtags.update(change_desc.get_hash_tags()) |
| refspec_opts += ['hashtag=%s' % t for t in sorted(hashtags)] |
| |
| refspec_suffix = '' |
| if refspec_opts: |
| refspec_suffix = '%' + ','.join(refspec_opts) |
| assert ' ' not in refspec_suffix, ( |
| 'spaces not allowed in refspec: "%s"' % refspec_suffix) |
| refspec = '%s:refs/for/%s%s' % (ref_to_push, branch, refspec_suffix) |
| |
| git_push_metadata = { |
| 'gerrit_host': self._GetGerritHost(), |
| 'title': title or '<untitled>', |
| 'change_id': change_id, |
| 'description': change_desc.description, |
| } |
| push_stdout = self._RunGitPushWithTraces( |
| change_desc, refspec, refspec_opts, git_push_metadata) |
| |
| if options.squash: |
| regex = re.compile(r'remote:\s+https?://[\w\-\.\+\/#]*/(\d+)\s.*') |
| change_numbers = [m.group(1) |
| for m in map(regex.match, push_stdout.splitlines()) |
| if m] |
| if len(change_numbers) != 1: |
| DieWithError( |
| ('Created|Updated %d issues on Gerrit, but only 1 expected.\n' |
| 'Change-Id: %s') % (len(change_numbers), change_id), change_desc) |
| self.SetIssue(change_numbers[0]) |
| self._GitSetBranchConfigValue('gerritsquashhash', ref_to_push) |
| |
| if self.GetIssue() and (reviewers or cc): |
| # GetIssue() is not set in case of non-squash uploads according to tests. |
| # TODO(agable): non-squash uploads in git cl should be removed. |
| gerrit_util.AddReviewers( |
| self._GetGerritHost(), |
| self._GerritChangeIdentifier(), |
| reviewers, cc, |
| notify=bool(options.send_mail)) |
| |
| return 0 |
| |
| def _ComputeParent(self, remote, upstream_branch, custom_cl_base, force, |
| change_desc): |
| """Computes parent of the generated commit to be uploaded to Gerrit. |
| |
| Returns revision or a ref name. |
| """ |
| if custom_cl_base: |
| # Try to avoid creating additional unintended CLs when uploading, unless |
| # user wants to take this risk. |
| local_ref_of_target_remote = self.GetRemoteBranch()[1] |
| code, _ = RunGitWithCode(['merge-base', '--is-ancestor', custom_cl_base, |
| local_ref_of_target_remote]) |
| if code == 1: |
| print('\nWARNING: Manually specified base of this CL `%s` ' |
| 'doesn\'t seem to belong to target remote branch `%s`.\n\n' |
| 'If you proceed with upload, more than 1 CL may be created by ' |
| 'Gerrit as a result, in turn confusing or crashing git cl.\n\n' |
| 'If you are certain that specified base `%s` has already been ' |
| 'uploaded to Gerrit as another CL, you may proceed.\n' % |
| (custom_cl_base, local_ref_of_target_remote, custom_cl_base)) |
| if not force: |
| confirm_or_exit( |
| 'Do you take responsibility for cleaning up potential mess ' |
| 'resulting from proceeding with upload?', |
| action='upload') |
| return custom_cl_base |
| |
| if remote != '.': |
| return self.GetCommonAncestorWithUpstream() |
| |
| # If our upstream branch is local, we base our squashed commit on its |
| # squashed version. |
| upstream_branch_name = scm.GIT.ShortBranchName(upstream_branch) |
| |
| if upstream_branch_name == 'master': |
| return self.GetCommonAncestorWithUpstream() |
| |
| # Check the squashed hash of the parent. |
| # TODO(tandrii): consider checking parent change in Gerrit and using its |
| # hash if tree hash of latest parent revision (patchset) in Gerrit matches |
| # the tree hash of the parent branch. The upside is less likely bogus |
| # requests to reupload parent change just because it's uploadhash is |
| # missing, yet the downside likely exists, too (albeit unknown to me yet). |
| parent = RunGit(['config', |
| 'branch.%s.gerritsquashhash' % upstream_branch_name], |
| error_ok=True).strip() |
| # Verify that the upstream branch has been uploaded too, otherwise |
| # Gerrit will create additional CLs when uploading. |
| if not parent or (RunGitSilent(['rev-parse', upstream_branch + ':']) != |
| RunGitSilent(['rev-parse', parent + ':'])): |
| DieWithError( |
| '\nUpload upstream branch %s first.\n' |
| 'It is likely that this branch has been rebased since its last ' |
| 'upload, so you just need to upload it again.\n' |
| '(If you uploaded it with --no-squash, then branch dependencies ' |
| 'are not supported, and you should reupload with --squash.)' |
| % upstream_branch_name, |
| change_desc) |
| return parent |
| |
| def _AddChangeIdToCommitMessage(self, options, args): |
| """Re-commits using the current message, assumes the commit hook is in |
| place. |
| """ |
| log_desc = options.message or _create_description_from_log(args) |
| git_command = ['commit', '--amend', '-m', log_desc] |
| RunGit(git_command) |
| new_log_desc = _create_description_from_log(args) |
| if git_footers.get_footer_change_id(new_log_desc): |
| print('git-cl: Added Change-Id to commit message.') |
| return new_log_desc |
| else: |
| DieWithError('ERROR: Gerrit commit-msg hook not installed.') |
| |
| def SetCQState(self, new_state): |
| """Sets the Commit-Queue label assuming canonical CQ config for Gerrit.""" |
| vote_map = { |
| _CQState.NONE: 0, |
| _CQState.DRY_RUN: 1, |
| _CQState.COMMIT: 2, |
| } |
|