| #!/usr/bin/env vpython |
| # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| |
| # Copyright (C) 2008 Evan Martin <martine@danga.com> |
| |
| """A git-command for integrating reviews on Gerrit.""" |
| |
| from __future__ import print_function |
| |
| import base64 |
| import collections |
| import datetime |
| import fnmatch |
| import httplib2 |
| import itertools |
| import json |
| import logging |
| import multiprocessing |
| import optparse |
| import os |
| import re |
| import shutil |
| import stat |
| import sys |
| import tempfile |
| import textwrap |
| import time |
| import uuid |
| import webbrowser |
| import zlib |
| |
| from third_party import colorama |
| import auth |
| import clang_format |
| import fix_encoding |
| import gclient_utils |
| import gerrit_util |
| import git_common |
| import git_footers |
| import git_new_branch |
| import metrics |
| import metrics_utils |
| import owners |
| import owners_client |
| import owners_finder |
| import presubmit_canned_checks |
| import presubmit_support |
| import scm |
| import setup_color |
| import split_cl |
| import subcommand |
| import subprocess2 |
| import watchlists |
| |
| from third_party import six |
| from six.moves import urllib |
| |
| |
| if sys.version_info.major == 3: |
| basestring = (str,) # pylint: disable=redefined-builtin |
| |
| |
| __version__ = '2.0' |
| |
| # Traces for git push will be stored in a traces directory inside the |
| # depot_tools checkout. |
| DEPOT_TOOLS = os.path.dirname(os.path.abspath(__file__)) |
| TRACES_DIR = os.path.join(DEPOT_TOOLS, 'traces') |
| PRESUBMIT_SUPPORT = os.path.join(DEPOT_TOOLS, 'presubmit_support.py') |
| |
| # When collecting traces, Git hashes will be reduced to 6 characters to reduce |
| # the size after compression. |
| GIT_HASH_RE = re.compile(r'\b([a-f0-9]{6})[a-f0-9]{34}\b', flags=re.I) |
| # Used to redact the cookies from the gitcookies file. |
| GITCOOKIES_REDACT_RE = re.compile(r'1/.*') |
| |
| MAX_ATTEMPTS = 3 |
| |
| # The maximum number of traces we will keep. Multiplied by 3 since we store |
| # 3 files per trace. |
| MAX_TRACES = 3 * 10 |
| # Message to be displayed to the user to inform where to find the traces for a |
| # git-cl upload execution. |
| TRACES_MESSAGE = ( |
| '\n' |
| 'The traces of this git-cl execution have been recorded at:\n' |
| ' %(trace_name)s-traces.zip\n' |
| 'Copies of your gitcookies file and git config have been recorded at:\n' |
| ' %(trace_name)s-git-info.zip\n') |
| # Format of the message to be stored as part of the traces to give developers a |
| # better context when they go through traces. |
| TRACES_README_FORMAT = ( |
| 'Date: %(now)s\n' |
| '\n' |
| 'Change: https://%(gerrit_host)s/q/%(change_id)s\n' |
| 'Title: %(title)s\n' |
| '\n' |
| '%(description)s\n' |
| '\n' |
| 'Execution time: %(execution_time)s\n' |
| 'Exit code: %(exit_code)s\n') + TRACES_MESSAGE |
| |
| POSTUPSTREAM_HOOK = '.git/hooks/post-cl-land' |
| DESCRIPTION_BACKUP_FILE = '.git_cl_description_backup' |
| REFS_THAT_ALIAS_TO_OTHER_REFS = { |
| 'refs/remotes/origin/lkgr': 'refs/remotes/origin/master', |
| 'refs/remotes/origin/lkcr': 'refs/remotes/origin/master', |
| } |
| |
| DEFAULT_OLD_BRANCH = 'refs/remotes/origin/master' |
| DEFAULT_NEW_BRANCH = 'refs/remotes/origin/main' |
| |
| # Valid extensions for files we want to lint. |
| DEFAULT_LINT_REGEX = r"(.*\.cpp|.*\.cc|.*\.h)" |
| DEFAULT_LINT_IGNORE_REGEX = r"$^" |
| |
| # File name for yapf style config files. |
| YAPF_CONFIG_FILENAME = '.style.yapf' |
| |
| # The issue, patchset and codereview server are stored on git config for each |
| # branch under branch.<branch-name>.<config-key>. |
| ISSUE_CONFIG_KEY = 'gerritissue' |
| PATCHSET_CONFIG_KEY = 'gerritpatchset' |
| CODEREVIEW_SERVER_CONFIG_KEY = 'gerritserver' |
| |
| # Shortcut since it quickly becomes repetitive. |
| Fore = colorama.Fore |
| |
| # Initialized in main() |
| settings = None |
| |
| # Used by tests/git_cl_test.py to add extra logging. |
| # Inside the weirdly failing test, add this: |
| # >>> self.mock(git_cl, '_IS_BEING_TESTED', True) |
| # And scroll up to see the stack trace printed. |
| _IS_BEING_TESTED = False |
| |
| |
| _KNOWN_GERRIT_TO_SHORT_URLS = { |
| 'https://chrome-internal-review.googlesource.com': 'https://crrev.com/i', |
| 'https://chromium-review.googlesource.com': 'https://crrev.com/c', |
| } |
| assert len(_KNOWN_GERRIT_TO_SHORT_URLS) == len( |
| set(_KNOWN_GERRIT_TO_SHORT_URLS.values())), 'must have unique values' |
| |
| |
| class GitPushError(Exception): |
| pass |
| |
| |
| def DieWithError(message, change_desc=None): |
| if change_desc: |
| SaveDescriptionBackup(change_desc) |
| print('\n ** Content of CL description **\n' + |
| '='*72 + '\n' + |
| change_desc.description + '\n' + |
| '='*72 + '\n') |
| |
| print(message, file=sys.stderr) |
| sys.exit(1) |
| |
| |
| def SaveDescriptionBackup(change_desc): |
| backup_path = os.path.join(DEPOT_TOOLS, DESCRIPTION_BACKUP_FILE) |
| print('\nsaving CL description to %s\n' % backup_path) |
| with open(backup_path, 'w') as backup_file: |
| backup_file.write(change_desc.description) |
| |
| |
| def GetNoGitPagerEnv(): |
| env = os.environ.copy() |
| # 'cat' is a magical git string that disables pagers on all platforms. |
| env['GIT_PAGER'] = 'cat' |
| return env |
| |
| |
| def RunCommand(args, error_ok=False, error_message=None, shell=False, **kwargs): |
| try: |
| stdout = subprocess2.check_output(args, shell=shell, **kwargs) |
| return stdout.decode('utf-8', 'replace') |
| except subprocess2.CalledProcessError as e: |
| logging.debug('Failed running %s', args) |
| if not error_ok: |
| message = error_message or e.stdout.decode('utf-8', 'replace') or '' |
| DieWithError('Command "%s" failed.\n%s' % (' '.join(args), message)) |
| return e.stdout.decode('utf-8', 'replace') |
| |
| |
| def RunGit(args, **kwargs): |
| """Returns stdout.""" |
| return RunCommand(['git'] + args, **kwargs) |
| |
| |
| def RunGitWithCode(args, suppress_stderr=False): |
| """Returns return code and stdout.""" |
| if suppress_stderr: |
| stderr = subprocess2.VOID |
| else: |
| stderr = sys.stderr |
| try: |
| (out, _), code = subprocess2.communicate(['git'] + args, |
| env=GetNoGitPagerEnv(), |
| stdout=subprocess2.PIPE, |
| stderr=stderr) |
| return code, out.decode('utf-8', 'replace') |
| except subprocess2.CalledProcessError as e: |
| logging.debug('Failed running %s', ['git'] + args) |
| return e.returncode, e.stdout.decode('utf-8', 'replace') |
| |
| |
| def RunGitSilent(args): |
| """Returns stdout, suppresses stderr and ignores the return code.""" |
| return RunGitWithCode(args, suppress_stderr=True)[1] |
| |
| |
| def time_sleep(seconds): |
| # Use this so that it can be mocked in tests without interfering with python |
| # system machinery. |
| return time.sleep(seconds) |
| |
| |
| def time_time(): |
| # Use this so that it can be mocked in tests without interfering with python |
| # system machinery. |
| return time.time() |
| |
| |
| def datetime_now(): |
| # Use this so that it can be mocked in tests without interfering with python |
| # system machinery. |
| return datetime.datetime.now() |
| |
| |
| def confirm_or_exit(prefix='', action='confirm'): |
| """Asks user to press enter to continue or press Ctrl+C to abort.""" |
| if not prefix or prefix.endswith('\n'): |
| mid = 'Press' |
| elif prefix.endswith('.') or prefix.endswith('?'): |
| mid = ' Press' |
| elif prefix.endswith(' '): |
| mid = 'press' |
| else: |
| mid = ' press' |
| gclient_utils.AskForData( |
| '%s%s Enter to %s, or Ctrl+C to abort' % (prefix, mid, action)) |
| |
| |
| def ask_for_explicit_yes(prompt): |
| """Returns whether user typed 'y' or 'yes' to confirm the given prompt.""" |
| result = gclient_utils.AskForData(prompt + ' [Yes/No]: ').lower() |
| while True: |
| if 'yes'.startswith(result): |
| return True |
| if 'no'.startswith(result): |
| return False |
| result = gclient_utils.AskForData('Please, type yes or no: ').lower() |
| |
| |
| def _get_properties_from_options(options): |
| prop_list = getattr(options, 'properties', []) |
| properties = dict(x.split('=', 1) for x in prop_list) |
| for key, val in properties.items(): |
| try: |
| properties[key] = json.loads(val) |
| except ValueError: |
| pass # If a value couldn't be evaluated, treat it as a string. |
| return properties |
| |
| |
| def _call_buildbucket(http, buildbucket_host, method, request): |
| """Calls a buildbucket v2 method and returns the parsed json response.""" |
| headers = { |
| 'Accept': 'application/json', |
| 'Content-Type': 'application/json', |
| } |
| request = json.dumps(request) |
| url = 'https://%s/prpc/buildbucket.v2.Builds/%s' % (buildbucket_host, method) |
| |
| logging.info('POST %s with %s' % (url, request)) |
| |
| attempts = 1 |
| time_to_sleep = 1 |
| while True: |
| response, content = http.request(url, 'POST', body=request, headers=headers) |
| if response.status == 200: |
| return json.loads(content[4:]) |
| if attempts >= MAX_ATTEMPTS or 400 <= response.status < 500: |
| msg = '%s error when calling POST %s with %s: %s' % ( |
| response.status, url, request, content) |
| raise BuildbucketResponseException(msg) |
| logging.debug( |
| '%s error when calling POST %s with %s. ' |
| 'Sleeping for %d seconds and retrying...' % ( |
| response.status, url, request, time_to_sleep)) |
| time.sleep(time_to_sleep) |
| time_to_sleep *= 2 |
| attempts += 1 |
| |
| assert False, 'unreachable' |
| |
| |
| def _parse_bucket(raw_bucket): |
| legacy = True |
| project = bucket = None |
| if '/' in raw_bucket: |
| legacy = False |
| project, bucket = raw_bucket.split('/', 1) |
| # Assume luci.<project>.<bucket>. |
| elif raw_bucket.startswith('luci.'): |
| project, bucket = raw_bucket[len('luci.'):].split('.', 1) |
| # Otherwise, assume prefix is also the project name. |
| elif '.' in raw_bucket: |
| project = raw_bucket.split('.')[0] |
| bucket = raw_bucket |
| # Legacy buckets. |
| if legacy and project and bucket: |
| print('WARNING Please use %s/%s to specify the bucket.' % (project, bucket)) |
| return project, bucket |
| |
| |
| def _trigger_tryjobs(changelist, jobs, options, patchset): |
| """Sends a request to Buildbucket to trigger tryjobs for a changelist. |
| |
| Args: |
| changelist: Changelist that the tryjobs are associated with. |
| jobs: A list of (project, bucket, builder). |
| options: Command-line options. |
| """ |
| print('Scheduling jobs on:') |
| for project, bucket, builder in jobs: |
| print(' %s/%s: %s' % (project, bucket, builder)) |
| print('To see results here, run: git cl try-results') |
| print('To see results in browser, run: git cl web') |
| |
| requests = _make_tryjob_schedule_requests(changelist, jobs, options, patchset) |
| if not requests: |
| return |
| |
| http = auth.Authenticator().authorize(httplib2.Http()) |
| http.force_exception_to_status_code = True |
| |
| batch_request = {'requests': requests} |
| batch_response = _call_buildbucket( |
| http, options.buildbucket_host, 'Batch', batch_request) |
| |
| errors = [ |
| ' ' + response['error']['message'] |
| for response in batch_response.get('responses', []) |
| if 'error' in response |
| ] |
| if errors: |
| raise BuildbucketResponseException( |
| 'Failed to schedule builds for some bots:\n%s' % '\n'.join(errors)) |
| |
| |
| def _make_tryjob_schedule_requests(changelist, jobs, options, patchset): |
| """Constructs requests for Buildbucket to trigger tryjobs.""" |
| gerrit_changes = [changelist.GetGerritChange(patchset)] |
| shared_properties = { |
| 'category': options.ensure_value('category', 'git_cl_try') |
| } |
| if options.ensure_value('clobber', False): |
| shared_properties['clobber'] = True |
| shared_properties.update(_get_properties_from_options(options) or {}) |
| |
| shared_tags = [{'key': 'user_agent', 'value': 'git_cl_try'}] |
| if options.ensure_value('retry_failed', False): |
| shared_tags.append({'key': 'retry_failed', |
| 'value': '1'}) |
| |
| requests = [] |
| for (project, bucket, builder) in jobs: |
| properties = shared_properties.copy() |
| if 'presubmit' in builder.lower(): |
| properties['dry_run'] = 'true' |
| |
| requests.append({ |
| 'scheduleBuild': { |
| 'requestId': str(uuid.uuid4()), |
| 'builder': { |
| 'project': getattr(options, 'project', None) or project, |
| 'bucket': bucket, |
| 'builder': builder, |
| }, |
| 'gerritChanges': gerrit_changes, |
| 'properties': properties, |
| 'tags': [ |
| {'key': 'builder', 'value': builder}, |
| ] + shared_tags, |
| } |
| }) |
| |
| if options.ensure_value('revision', None): |
| requests[-1]['scheduleBuild']['gitilesCommit'] = { |
| 'host': gerrit_changes[0]['host'], |
| 'project': gerrit_changes[0]['project'], |
| 'id': options.revision |
| } |
| |
| return requests |
| |
| |
| def _fetch_tryjobs(changelist, buildbucket_host, patchset=None): |
| """Fetches tryjobs from buildbucket. |
| |
| Returns list of buildbucket.v2.Build with the try jobs for the changelist. |
| """ |
| fields = ['id', 'builder', 'status', 'createTime', 'tags'] |
| request = { |
| 'predicate': { |
| 'gerritChanges': [changelist.GetGerritChange(patchset)], |
| }, |
| 'fields': ','.join('builds.*.' + field for field in fields), |
| } |
| |
| authenticator = auth.Authenticator() |
| if authenticator.has_cached_credentials(): |
| http = authenticator.authorize(httplib2.Http()) |
| else: |
| print('Warning: Some results might be missing because %s' % |
| # Get the message on how to login. |
| (auth.LoginRequiredError().message,)) |
| http = httplib2.Http() |
| http.force_exception_to_status_code = True |
| |
| response = _call_buildbucket(http, buildbucket_host, 'SearchBuilds', request) |
| return response.get('builds', []) |
| |
| |
| def _fetch_latest_builds(changelist, buildbucket_host, latest_patchset=None): |
| """Fetches builds from the latest patchset that has builds (within |
| the last few patchsets). |
| |
| Args: |
| changelist (Changelist): The CL to fetch builds for |
| buildbucket_host (str): Buildbucket host, e.g. "cr-buildbucket.appspot.com" |
| lastest_patchset(int|NoneType): the patchset to start fetching builds from. |
| If None (default), starts with the latest available patchset. |
| Returns: |
| A tuple (builds, patchset) where builds is a list of buildbucket.v2.Build, |
| and patchset is the patchset number where those builds came from. |
| """ |
| assert buildbucket_host |
| assert changelist.GetIssue(), 'CL must be uploaded first' |
| assert changelist.GetCodereviewServer(), 'CL must be uploaded first' |
| if latest_patchset is None: |
| assert changelist.GetMostRecentPatchset() |
| ps = changelist.GetMostRecentPatchset() |
| else: |
| assert latest_patchset > 0, latest_patchset |
| ps = latest_patchset |
| |
| min_ps = max(1, ps - 5) |
| while ps >= min_ps: |
| builds = _fetch_tryjobs(changelist, buildbucket_host, patchset=ps) |
| if len(builds): |
| return builds, ps |
| ps -= 1 |
| return [], 0 |
| |
| |
| def _filter_failed_for_retry(all_builds): |
| """Returns a list of buckets/builders that are worth retrying. |
| |
| Args: |
| all_builds (list): Builds, in the format returned by _fetch_tryjobs, |
| i.e. a list of buildbucket.v2.Builds which includes status and builder |
| info. |
| |
| Returns: |
| A dict {(proj, bucket): [builders]}. This is the same format accepted by |
| _trigger_tryjobs. |
| """ |
| grouped = {} |
| for build in all_builds: |
| builder = build['builder'] |
| key = (builder['project'], builder['bucket'], builder['builder']) |
| grouped.setdefault(key, []).append(build) |
| |
| jobs = [] |
| for (project, bucket, builder), builds in grouped.items(): |
| if 'triggered' in builder: |
| print('WARNING: Not scheduling %s. Triggered bots require an initial job ' |
| 'from a parent. Please schedule a manual job for the parent ' |
| 'instead.') |
| continue |
| if any(b['status'] in ('STARTED', 'SCHEDULED') for b in builds): |
| # Don't retry if any are running. |
| continue |
| # If builder had several builds, retry only if the last one failed. |
| # This is a bit different from CQ, which would re-use *any* SUCCESS-full |
| # build, but in case of retrying failed jobs retrying a flaky one makes |
| # sense. |
| builds = sorted(builds, key=lambda b: b['createTime']) |
| if builds[-1]['status'] not in ('FAILURE', 'INFRA_FAILURE'): |
| continue |
| # Don't retry experimental build previously triggered by CQ. |
| if any(t['key'] == 'cq_experimental' and t['value'] == 'true' |
| for t in builds[-1]['tags']): |
| continue |
| jobs.append((project, bucket, builder)) |
| |
| # Sort the jobs to make testing easier. |
| return sorted(jobs) |
| |
| |
| def _print_tryjobs(options, builds): |
| """Prints nicely result of _fetch_tryjobs.""" |
| if not builds: |
| print('No tryjobs scheduled.') |
| return |
| |
| longest_builder = max(len(b['builder']['builder']) for b in builds) |
| name_fmt = '{builder:<%d}' % longest_builder |
| if options.print_master: |
| longest_bucket = max(len(b['builder']['bucket']) for b in builds) |
| name_fmt = ('{bucket:>%d} ' % longest_bucket) + name_fmt |
| |
| builds_by_status = {} |
| for b in builds: |
| builds_by_status.setdefault(b['status'], []).append({ |
| 'id': b['id'], |
| 'name': name_fmt.format( |
| builder=b['builder']['builder'], bucket=b['builder']['bucket']), |
| }) |
| |
| sort_key = lambda b: (b['name'], b['id']) |
| |
| def print_builds(title, builds, fmt=None, color=None): |
| """Pop matching builds from `builds` dict and print them.""" |
| if not builds: |
| return |
| |
| fmt = fmt or '{name} https://ci.chromium.org/b/{id}' |
| if not options.color or color is None: |
| colorize = lambda x: x |
| else: |
| colorize = lambda x: '%s%s%s' % (color, x, Fore.RESET) |
| |
| print(colorize(title)) |
| for b in sorted(builds, key=sort_key): |
| print(' ', colorize(fmt.format(**b))) |
| |
| total = len(builds) |
| print_builds( |
| 'Successes:', builds_by_status.pop('SUCCESS', []), color=Fore.GREEN) |
| print_builds( |
| 'Infra Failures:', builds_by_status.pop('INFRA_FAILURE', []), |
| color=Fore.MAGENTA) |
| print_builds('Failures:', builds_by_status.pop('FAILURE', []), color=Fore.RED) |
| print_builds('Canceled:', builds_by_status.pop('CANCELED', []), fmt='{name}', |
| color=Fore.MAGENTA) |
| print_builds('Started:', builds_by_status.pop('STARTED', []), |
| color=Fore.YELLOW) |
| print_builds( |
| 'Scheduled:', builds_by_status.pop('SCHEDULED', []), fmt='{name} id={id}') |
| # The last section is just in case buildbucket API changes OR there is a bug. |
| print_builds( |
| 'Other:', sum(builds_by_status.values(), []), fmt='{name} id={id}') |
| print('Total: %d tryjobs' % total) |
| |
| |
| def _ComputeDiffLineRanges(files, upstream_commit): |
| """Gets the changed line ranges for each file since upstream_commit. |
| |
| Parses a git diff on provided files and returns a dict that maps a file name |
| to an ordered list of range tuples in the form (start_line, count). |
| Ranges are in the same format as a git diff. |
| """ |
| # If files is empty then diff_output will be a full diff. |
| if len(files) == 0: |
| return {} |
| |
| # Take the git diff and find the line ranges where there are changes. |
| diff_cmd = BuildGitDiffCmd('-U0', upstream_commit, files, allow_prefix=True) |
| diff_output = RunGit(diff_cmd) |
| |
| pattern = r'(?:^diff --git a/(?:.*) b/(.*))|(?:^@@.*\+(.*) @@)' |
| # 2 capture groups |
| # 0 == fname of diff file |
| # 1 == 'diff_start,diff_count' or 'diff_start' |
| # will match each of |
| # diff --git a/foo.foo b/foo.py |
| # @@ -12,2 +14,3 @@ |
| # @@ -12,2 +17 @@ |
| # running re.findall on the above string with pattern will give |
| # [('foo.py', ''), ('', '14,3'), ('', '17')] |
| |
| curr_file = None |
| line_diffs = {} |
| for match in re.findall(pattern, diff_output, flags=re.MULTILINE): |
| if match[0] != '': |
| # Will match the second filename in diff --git a/a.py b/b.py. |
| curr_file = match[0] |
| line_diffs[curr_file] = [] |
| else: |
| # Matches +14,3 |
| if ',' in match[1]: |
| diff_start, diff_count = match[1].split(',') |
| else: |
| # Single line changes are of the form +12 instead of +12,1. |
| diff_start = match[1] |
| diff_count = 1 |
| |
| diff_start = int(diff_start) |
| diff_count = int(diff_count) |
| |
| # If diff_count == 0 this is a removal we can ignore. |
| line_diffs[curr_file].append((diff_start, diff_count)) |
| |
| return line_diffs |
| |
| |
| def _FindYapfConfigFile(fpath, yapf_config_cache, top_dir=None): |
| """Checks if a yapf file is in any parent directory of fpath until top_dir. |
| |
| Recursively checks parent directories to find yapf file and if no yapf file |
| is found returns None. Uses yapf_config_cache as a cache for previously found |
| configs. |
| """ |
| fpath = os.path.abspath(fpath) |
| # Return result if we've already computed it. |
| if fpath in yapf_config_cache: |
| return yapf_config_cache[fpath] |
| |
| parent_dir = os.path.dirname(fpath) |
| if os.path.isfile(fpath): |
| ret = _FindYapfConfigFile(parent_dir, yapf_config_cache, top_dir) |
| else: |
| # Otherwise fpath is a directory |
| yapf_file = os.path.join(fpath, YAPF_CONFIG_FILENAME) |
| if os.path.isfile(yapf_file): |
| ret = yapf_file |
| elif fpath == top_dir or parent_dir == fpath: |
| # If we're at the top level directory, or if we're at root |
| # there is no provided style. |
| ret = None |
| else: |
| # Otherwise recurse on the current directory. |
| ret = _FindYapfConfigFile(parent_dir, yapf_config_cache, top_dir) |
| yapf_config_cache[fpath] = ret |
| return ret |
| |
| |
| def _GetYapfIgnorePatterns(top_dir): |
| """Returns all patterns in the .yapfignore file. |
| |
| yapf is supposed to handle the ignoring of files listed in .yapfignore itself, |
| but this functionality appears to break when explicitly passing files to |
| yapf for formatting. According to |
| https://github.com/google/yapf/blob/HEAD/README.rst#excluding-files-from-formatting-yapfignore, |
| the .yapfignore file should be in the directory that yapf is invoked from, |
| which we assume to be the top level directory in this case. |
| |
| Args: |
| top_dir: The top level directory for the repository being formatted. |
| |
| Returns: |
| A set of all fnmatch patterns to be ignored. |
| """ |
| yapfignore_file = os.path.join(top_dir, '.yapfignore') |
| ignore_patterns = set() |
| if not os.path.exists(yapfignore_file): |
| return ignore_patterns |
| |
| with open(yapfignore_file) as f: |
| for line in f.readlines(): |
| stripped_line = line.strip() |
| # Comments and blank lines should be ignored. |
| if stripped_line.startswith('#') or stripped_line == '': |
| continue |
| ignore_patterns.add(stripped_line) |
| return ignore_patterns |
| |
| |
| def _FilterYapfIgnoredFiles(filepaths, patterns): |
| """Filters out any filepaths that match any of the given patterns. |
| |
| Args: |
| filepaths: An iterable of strings containing filepaths to filter. |
| patterns: An iterable of strings containing fnmatch patterns to filter on. |
| |
| Returns: |
| A list of strings containing all the elements of |filepaths| that did not |
| match any of the patterns in |patterns|. |
| """ |
| # Not inlined so that tests can use the same implementation. |
| return [f for f in filepaths |
| if not any(fnmatch.fnmatch(f, p) for p in patterns)] |
| |
| |
| def print_stats(args): |
| """Prints statistics about the change to the user.""" |
| # --no-ext-diff is broken in some versions of Git, so try to work around |
| # this by overriding the environment (but there is still a problem if the |
| # git config key "diff.external" is used). |
| env = GetNoGitPagerEnv() |
| if 'GIT_EXTERNAL_DIFF' in env: |
| del env['GIT_EXTERNAL_DIFF'] |
| |
| return subprocess2.call( |
| ['git', 'diff', '--no-ext-diff', '--stat', '-l100000', '-C50'] + args, |
| env=env) |
| |
| |
| class BuildbucketResponseException(Exception): |
| pass |
| |
| |
| class Settings(object): |
| def __init__(self): |
| self.cc = None |
| self.root = None |
| self.tree_status_url = None |
| self.viewvc_url = None |
| self.updated = False |
| self.is_gerrit = None |
| self.squash_gerrit_uploads = None |
| self.gerrit_skip_ensure_authenticated = None |
| self.git_editor = None |
| self.format_full_by_default = None |
| |
| def _LazyUpdateIfNeeded(self): |
| """Updates the settings from a codereview.settings file, if available.""" |
| if self.updated: |
| return |
| |
| # The only value that actually changes the behavior is |
| # autoupdate = "false". Everything else means "true". |
| autoupdate = ( |
| scm.GIT.GetConfig(self.GetRoot(), 'rietveld.autoupdate', '').lower()) |
| |
| cr_settings_file = FindCodereviewSettingsFile() |
| if autoupdate != 'false' and cr_settings_file: |
| LoadCodereviewSettingsFromFile(cr_settings_file) |
| cr_settings_file.close() |
| |
| self.updated = True |
| |
| @staticmethod |
| def GetRelativeRoot(): |
| return scm.GIT.GetCheckoutRoot('.') |
| |
| def GetRoot(self): |
| if self.root is None: |
| self.root = os.path.abspath(self.GetRelativeRoot()) |
| return self.root |
| |
| def GetTreeStatusUrl(self, error_ok=False): |
| if not self.tree_status_url: |
| self.tree_status_url = self._GetConfig('rietveld.tree-status-url') |
| if self.tree_status_url is None and not error_ok: |
| DieWithError( |
| 'You must configure your tree status URL by running ' |
| '"git cl config".') |
| return self.tree_status_url |
| |
| def GetViewVCUrl(self): |
| if not self.viewvc_url: |
| self.viewvc_url = self._GetConfig('rietveld.viewvc-url') |
| return self.viewvc_url |
| |
| def GetBugPrefix(self): |
| return self._GetConfig('rietveld.bug-prefix') |
| |
| def GetRunPostUploadHook(self): |
| run_post_upload_hook = self._GetConfig( |
| 'rietveld.run-post-upload-hook') |
| return run_post_upload_hook == "True" |
| |
| def GetDefaultCCList(self): |
| return self._GetConfig('rietveld.cc') |
| |
| def GetSquashGerritUploads(self): |
| """Returns True if uploads to Gerrit should be squashed by default.""" |
| if self.squash_gerrit_uploads is None: |
| self.squash_gerrit_uploads = self.GetSquashGerritUploadsOverride() |
| if self.squash_gerrit_uploads is None: |
| # Default is squash now (http://crbug.com/611892#c23). |
| self.squash_gerrit_uploads = self._GetConfig( |
| 'gerrit.squash-uploads').lower() != 'false' |
| return self.squash_gerrit_uploads |
| |
| def GetSquashGerritUploadsOverride(self): |
| """Return True or False if codereview.settings should be overridden. |
| |
| Returns None if no override has been defined. |
| """ |
| # See also http://crbug.com/611892#c23 |
| result = self._GetConfig('gerrit.override-squash-uploads').lower() |
| if result == 'true': |
| return True |
| if result == 'false': |
| return False |
| return None |
| |
| def GetGerritSkipEnsureAuthenticated(self): |
| """Return True if EnsureAuthenticated should not be done for Gerrit |
| uploads.""" |
| if self.gerrit_skip_ensure_authenticated is None: |
| self.gerrit_skip_ensure_authenticated = self._GetConfig( |
| 'gerrit.skip-ensure-authenticated').lower() == 'true' |
| return self.gerrit_skip_ensure_authenticated |
| |
| def GetGitEditor(self): |
| """Returns the editor specified in the git config, or None if none is.""" |
| if self.git_editor is None: |
| # Git requires single quotes for paths with spaces. We need to replace |
| # them with double quotes for Windows to treat such paths as a single |
| # path. |
| self.git_editor = self._GetConfig('core.editor').replace('\'', '"') |
| return self.git_editor or None |
| |
| def GetLintRegex(self): |
| return self._GetConfig('rietveld.cpplint-regex', DEFAULT_LINT_REGEX) |
| |
| def GetLintIgnoreRegex(self): |
| return self._GetConfig( |
| 'rietveld.cpplint-ignore-regex', DEFAULT_LINT_IGNORE_REGEX) |
| |
| def GetFormatFullByDefault(self): |
| if self.format_full_by_default is None: |
| result = ( |
| RunGit(['config', '--bool', 'rietveld.format-full-by-default'], |
| error_ok=True).strip()) |
| self.format_full_by_default = (result == 'true') |
| return self.format_full_by_default |
| |
| def _GetConfig(self, key, default=''): |
| self._LazyUpdateIfNeeded() |
| return scm.GIT.GetConfig(self.GetRoot(), key, default) |
| |
| |
| class _CQState(object): |
| """Enum for states of CL with respect to CQ.""" |
| NONE = 'none' |
| DRY_RUN = 'dry_run' |
| COMMIT = 'commit' |
| |
| ALL_STATES = [NONE, DRY_RUN, COMMIT] |
| |
| |
| class _ParsedIssueNumberArgument(object): |
| def __init__(self, issue=None, patchset=None, hostname=None): |
| self.issue = issue |
| self.patchset = patchset |
| self.hostname = hostname |
| |
| @property |
| def valid(self): |
| return self.issue is not None |
| |
| |
| def ParseIssueNumberArgument(arg): |
| """Parses the issue argument and returns _ParsedIssueNumberArgument.""" |
| fail_result = _ParsedIssueNumberArgument() |
| |
| if isinstance(arg, int): |
| return _ParsedIssueNumberArgument(issue=arg) |
| if not isinstance(arg, basestring): |
| return fail_result |
| |
| if arg.isdigit(): |
| return _ParsedIssueNumberArgument(issue=int(arg)) |
| if not arg.startswith('http'): |
| return fail_result |
| |
| url = gclient_utils.UpgradeToHttps(arg) |
| for gerrit_url, short_url in _KNOWN_GERRIT_TO_SHORT_URLS.items(): |
| if url.startswith(short_url): |
| url = gerrit_url + url[len(short_url):] |
| break |
| |
| try: |
| parsed_url = urllib.parse.urlparse(url) |
| except ValueError: |
| return fail_result |
| |
| # Gerrit's new UI is https://domain/c/project/+/<issue_number>[/[patchset]] |
| # But old GWT UI is https://domain/#/c/project/+/<issue_number>[/[patchset]] |
| # Short urls like https://domain/<issue_number> can be used, but don't allow |
| # specifying the patchset (you'd 404), but we allow that here. |
| if parsed_url.path == '/': |
| part = parsed_url.fragment |
| else: |
| part = parsed_url.path |
| |
| match = re.match( |
| r'(/c(/.*/\+)?)?/(?P<issue>\d+)(/(?P<patchset>\d+)?/?)?$', part) |
| if not match: |
| return fail_result |
| |
| issue = int(match.group('issue')) |
| patchset = match.group('patchset') |
| return _ParsedIssueNumberArgument( |
| issue=issue, |
| patchset=int(patchset) if patchset else None, |
| hostname=parsed_url.netloc) |
| |
| |
| def _create_description_from_log(args): |
| """Pulls out the commit log to use as a base for the CL description.""" |
| log_args = [] |
| if len(args) == 1 and not args[0].endswith('.'): |
| log_args = [args[0] + '..'] |
| elif len(args) == 1 and args[0].endswith('...'): |
| log_args = [args[0][:-1]] |
| elif len(args) == 2: |
| log_args = [args[0] + '..' + args[1]] |
| else: |
| log_args = args[:] # Hope for the best! |
| return RunGit(['log', '--pretty=format:%B%n'] + log_args) |
| |
| |
| class GerritChangeNotExists(Exception): |
| def __init__(self, issue, url): |
| self.issue = issue |
| self.url = url |
| super(GerritChangeNotExists, self).__init__() |
| |
| def __str__(self): |
| return 'change %s at %s does not exist or you have no access to it' % ( |
| self.issue, self.url) |
| |
| |
| _CommentSummary = collections.namedtuple( |
| '_CommentSummary', ['date', 'message', 'sender', 'autogenerated', |
| # TODO(tandrii): these two aren't known in Gerrit. |
| 'approval', 'disapproval']) |
| |
| |
| class Changelist(object): |
| """Changelist works with one changelist in local branch. |
| |
| Notes: |
| * Not safe for concurrent multi-{thread,process} use. |
| * Caches values from current branch. Therefore, re-use after branch change |
| with great care. |
| """ |
| |
| def __init__(self, |
| branchref=None, |
| issue=None, |
| codereview_host=None, |
| commit_date=None): |
| """Create a new ChangeList instance. |
| |
| **kwargs will be passed directly to Gerrit implementation. |
| """ |
| # Poke settings so we get the "configure your server" message if necessary. |
| global settings |
| if not settings: |
| # Happens when git_cl.py is used as a utility library. |
| settings = Settings() |
| |
| self.branchref = branchref |
| if self.branchref: |
| assert branchref.startswith('refs/heads/') |
| self.branch = scm.GIT.ShortBranchName(self.branchref) |
| else: |
| self.branch = None |
| self.commit_date = commit_date |
| self.upstream_branch = None |
| self.lookedup_issue = False |
| self.issue = issue or None |
| self.description = None |
| self.lookedup_patchset = False |
| self.patchset = None |
| self.cc = None |
| self.more_cc = [] |
| self._remote = None |
| self._cached_remote_url = (False, None) # (is_cached, value) |
| |
| # Lazily cached values. |
| self._gerrit_host = None # e.g. chromium-review.googlesource.com |
| self._gerrit_server = None # e.g. https://chromium-review.googlesource.com |
| # Map from change number (issue) to its detail cache. |
| self._detail_cache = {} |
| |
| if codereview_host is not None: |
| assert not codereview_host.startswith('https://'), codereview_host |
| self._gerrit_host = codereview_host |
| self._gerrit_server = 'https://%s' % codereview_host |
| |
| def GetCCList(self): |
| """Returns the users cc'd on this CL. |
| |
| The return value is a string suitable for passing to git cl with the --cc |
| flag. |
| """ |
| if self.cc is None: |
| base_cc = settings.GetDefaultCCList() |
| more_cc = ','.join(self.more_cc) |
| self.cc = ','.join(filter(None, (base_cc, more_cc))) or '' |
| return self.cc |
| |
| def ExtendCC(self, more_cc): |
| """Extends the list of users to cc on this CL based on the changed files.""" |
| self.more_cc.extend(more_cc) |
| |
| def GetCommitDate(self): |
| """Returns the commit date as provided in the constructor""" |
| return self.commit_date |
| |
| def GetBranch(self): |
| """Returns the short branch name, e.g. 'main'.""" |
| if not self.branch: |
| branchref = scm.GIT.GetBranchRef(settings.GetRoot()) |
| if not branchref: |
| return None |
| self.branchref = branchref |
| self.branch = scm.GIT.ShortBranchName(self.branchref) |
| return self.branch |
| |
| def GetBranchRef(self): |
| """Returns the full branch name, e.g. 'refs/heads/main'.""" |
| self.GetBranch() # Poke the lazy loader. |
| return self.branchref |
| |
| def _GitGetBranchConfigValue(self, key, default=None): |
| return scm.GIT.GetBranchConfig( |
| settings.GetRoot(), self.GetBranch(), key, default) |
| |
| def _GitSetBranchConfigValue(self, key, value): |
| action = 'set %s to %r' % (key, value) |
| if not value: |
| action = 'unset %s' % key |
| assert self.GetBranch(), 'a branch is needed to ' + action |
| return scm.GIT.SetBranchConfig( |
| settings.GetRoot(), self.GetBranch(), key, value) |
| |
| @staticmethod |
| def FetchUpstreamTuple(branch): |
| """Returns a tuple containing remote and remote ref, |
| e.g. 'origin', 'refs/heads/main' |
| """ |
| remote, upstream_branch = scm.GIT.FetchUpstreamTuple( |
| settings.GetRoot(), branch) |
| if not remote or not upstream_branch: |
| DieWithError( |
| 'Unable to determine default branch to diff against.\n' |
| 'Either pass complete "git diff"-style arguments, like\n' |
| ' git cl upload origin/main\n' |
| 'or verify this branch is set up to track another \n' |
| '(via the --track argument to "git checkout -b ...").') |
| |
| return remote, upstream_branch |
| |
| def GetCommonAncestorWithUpstream(self): |
| upstream_branch = self.GetUpstreamBranch() |
| if not scm.GIT.IsValidRevision(settings.GetRoot(), upstream_branch): |
| DieWithError('The upstream for the current branch (%s) does not exist ' |
| 'anymore.\nPlease fix it and try again.' % self.GetBranch()) |
| return git_common.get_or_create_merge_base(self.GetBranch(), |
| upstream_branch) |
| |
| def GetUpstreamBranch(self): |
| if self.upstream_branch is None: |
| remote, upstream_branch = self.FetchUpstreamTuple(self.GetBranch()) |
| if remote != '.': |
| upstream_branch = upstream_branch.replace('refs/heads/', |
| 'refs/remotes/%s/' % remote) |
| upstream_branch = upstream_branch.replace('refs/branch-heads/', |
| 'refs/remotes/branch-heads/') |
| self.upstream_branch = upstream_branch |
| return self.upstream_branch |
| |
| def GetRemoteBranch(self): |
| if not self._remote: |
| remote, branch = None, self.GetBranch() |
| seen_branches = set() |
| while branch not in seen_branches: |
| seen_branches.add(branch) |
| remote, branch = self.FetchUpstreamTuple(branch) |
| branch = scm.GIT.ShortBranchName(branch) |
| if remote != '.' or branch.startswith('refs/remotes'): |
| break |
| else: |
| remotes = RunGit(['remote'], error_ok=True).split() |
| if len(remotes) == 1: |
| remote, = remotes |
| elif 'origin' in remotes: |
| remote = 'origin' |
| logging.warn('Could not determine which remote this change is ' |
| 'associated with, so defaulting to "%s".' % self._remote) |
| else: |
| logging.warn('Could not determine which remote this change is ' |
| 'associated with.') |
| branch = 'HEAD' |
| if branch.startswith('refs/remotes'): |
| self._remote = (remote, branch) |
| elif branch.startswith('refs/branch-heads/'): |
| self._remote = (remote, branch.replace('refs/', 'refs/remotes/')) |
| else: |
| self._remote = (remote, 'refs/remotes/%s/%s' % (remote, branch)) |
| return self._remote |
| |
| def GetRemoteUrl(self): |
| """Return the configured remote URL, e.g. 'git://example.org/foo.git/'. |
| |
| Returns None if there is no remote. |
| """ |
| is_cached, value = self._cached_remote_url |
| if is_cached: |
| return value |
| |
| remote, _ = self.GetRemoteBranch() |
| url = scm.GIT.GetConfig(settings.GetRoot(), 'remote.%s.url' % remote, '') |
| |
| # Check if the remote url can be parsed as an URL. |
| host = urllib.parse.urlparse(url).netloc |
| if host: |
| self._cached_remote_url = (True, url) |
| return url |
| |
| # If it cannot be parsed as an url, assume it is a local directory, |
| # probably a git cache. |
| logging.warning('"%s" doesn\'t appear to point to a git host. ' |
| 'Interpreting it as a local directory.', url) |
| if not os.path.isdir(url): |
| logging.error( |
| 'Remote "%(remote)s" for branch "%(branch)s" points to "%(url)s", ' |
| 'but it doesn\'t exist.', |
| {'remote': remote, 'branch': self.GetBranch(), 'url': url}) |
| return None |
| |
| cache_path = url |
| url = scm.GIT.GetConfig(url, 'remote.%s.url' % remote, '') |
| |
| host = urllib.parse.urlparse(url).netloc |
| if not host: |
| logging.error( |
| 'Remote "%(remote)s" for branch "%(branch)s" points to ' |
| '"%(cache_path)s", but it is misconfigured.\n' |
| '"%(cache_path)s" must be a git repo and must have a remote named ' |
| '"%(remote)s" pointing to the git host.', { |
| 'remote': remote, |
| 'cache_path': cache_path, |
| 'branch': self.GetBranch()}) |
| return None |
| |
| self._cached_remote_url = (True, url) |
| return url |
| |
| def GetIssue(self): |
| """Returns the issue number as a int or None if not set.""" |
| if self.issue is None and not self.lookedup_issue: |
| self.issue = self._GitGetBranchConfigValue(ISSUE_CONFIG_KEY) |
| if self.issue is not None: |
| self.issue = int(self.issue) |
| self.lookedup_issue = True |
| return self.issue |
| |
| def GetIssueURL(self, short=False): |
| """Get the URL for a particular issue.""" |
| issue = self.GetIssue() |
| if not issue: |
| return None |
| server = self.GetCodereviewServer() |
| if short: |
| server = _KNOWN_GERRIT_TO_SHORT_URLS.get(server, server) |
| return '%s/%s' % (server, issue) |
| |
| def FetchDescription(self, pretty=False): |
| assert self.GetIssue(), 'issue is required to query Gerrit' |
| |
| if self.description is None: |
| data = self._GetChangeDetail(['CURRENT_REVISION', 'CURRENT_COMMIT']) |
| current_rev = data['current_revision'] |
| self.description = data['revisions'][current_rev]['commit']['message'] |
| |
| if not pretty: |
| return self.description |
| |
| # Set width to 72 columns + 2 space indent. |
| wrapper = textwrap.TextWrapper(width=74, replace_whitespace=True) |
| wrapper.initial_indent = wrapper.subsequent_indent = ' ' |
| lines = self.description.splitlines() |
| return '\n'.join([wrapper.fill(line) for line in lines]) |
| |
| def GetPatchset(self): |
| """Returns the patchset number as a int or None if not set.""" |
| if self.patchset is None and not self.lookedup_patchset: |
| self.patchset = self._GitGetBranchConfigValue(PATCHSET_CONFIG_KEY) |
| if self.patchset is not None: |
| self.patchset = int(self.patchset) |
| self.lookedup_patchset = True |
| return self.patchset |
| |
| def GetAuthor(self): |
| return scm.GIT.GetConfig(settings.GetRoot(), 'user.email') |
| |
| def SetPatchset(self, patchset): |
| """Set this branch's patchset. If patchset=0, clears the patchset.""" |
| assert self.GetBranch() |
| if not patchset: |
| self.patchset = None |
| else: |
| self.patchset = int(patchset) |
| self._GitSetBranchConfigValue(PATCHSET_CONFIG_KEY, str(self.patchset)) |
| |
| def SetIssue(self, issue=None): |
| """Set this branch's issue. If issue isn't given, clears the issue.""" |
| assert self.GetBranch() |
| if issue: |
| issue = int(issue) |
| self._GitSetBranchConfigValue(ISSUE_CONFIG_KEY, str(issue)) |
| self.issue = issue |
| codereview_server = self.GetCodereviewServer() |
| if codereview_server: |
| self._GitSetBranchConfigValue( |
| CODEREVIEW_SERVER_CONFIG_KEY, codereview_server) |
| else: |
| # Reset all of these just to be clean. |
| reset_suffixes = [ |
| 'last-upload-hash', |
| ISSUE_CONFIG_KEY, |
| PATCHSET_CONFIG_KEY, |
| CODEREVIEW_SERVER_CONFIG_KEY, |
| 'gerritsquashhash', |
| ] |
| for prop in reset_suffixes: |
| try: |
| self._GitSetBranchConfigValue(prop, None) |
| except subprocess2.CalledProcessError: |
| pass |
| msg = RunGit(['log', '-1', '--format=%B']).strip() |
| if msg and git_footers.get_footer_change_id(msg): |
| print('WARNING: The change patched into this branch has a Change-Id. ' |
| 'Removing it.') |
| RunGit(['commit', '--amend', '-m', |
| git_footers.remove_footer(msg, 'Change-Id')]) |
| self.lookedup_issue = True |
| self.issue = None |
| self.patchset = None |
| |
| def GetAffectedFiles(self, upstream): |
| try: |
| return [f for _, f in scm.GIT.CaptureStatus(settings.GetRoot(), upstream)] |
| except subprocess2.CalledProcessError: |
| DieWithError( |
| ('\nFailed to diff against upstream branch %s\n\n' |
| 'This branch probably doesn\'t exist anymore. To reset the\n' |
| 'tracking branch, please run\n' |
| ' git branch --set-upstream-to origin/main %s\n' |
| 'or replace origin/main with the relevant branch') % |
| (upstream, self.GetBranch())) |
| |
| def UpdateDescription(self, description, force=False): |
| assert self.GetIssue(), 'issue is required to update description' |
| |
| if gerrit_util.HasPendingChangeEdit( |
| self.GetGerritHost(), self._GerritChangeIdentifier()): |
| if not force: |
| confirm_or_exit( |
| 'The description cannot be modified while the issue has a pending ' |
| 'unpublished edit. Either publish the edit in the Gerrit web UI ' |
| 'or delete it.\n\n', action='delete the unpublished edit') |
| |
| gerrit_util.DeletePendingChangeEdit( |
| self.GetGerritHost(), self._GerritChangeIdentifier()) |
| gerrit_util.SetCommitMessage( |
| self.GetGerritHost(), self._GerritChangeIdentifier(), |
| description, notify='NONE') |
| |
| self.description = description |
| |
| def _GetCommonPresubmitArgs(self, verbose, upstream): |
| args = [ |
| '--root', settings.GetRoot(), |
| '--upstream', upstream, |
| ] |
| |
| args.extend(['--verbose'] * verbose) |
| |
| author = self.GetAuthor() |
| gerrit_url = self.GetCodereviewServer() |
| issue = self.GetIssue() |
| patchset = self.GetPatchset() |
| if author: |
| args.extend(['--author', author]) |
| if gerrit_url: |
| args.extend(['--gerrit_url', gerrit_url]) |
| if issue: |
| args.extend(['--issue', str(issue)]) |
| if patchset: |
| args.extend(['--patchset', str(patchset)]) |
| |
| return args |
| |
| def RunHook(self, committing, may_prompt, verbose, parallel, upstream, |
| description, all_files, resultdb=False, realm=None): |
| """Calls sys.exit() if the hook fails; returns a HookResults otherwise.""" |
| args = self._GetCommonPresubmitArgs(verbose, upstream) |
| args.append('--commit' if committing else '--upload') |
| if may_prompt: |
| args.append('--may_prompt') |
| if parallel: |
| args.append('--parallel') |
| if all_files: |
| args.append('--all_files') |
| |
| with gclient_utils.temporary_file() as description_file: |
| with gclient_utils.temporary_file() as json_output: |
| |
| gclient_utils.FileWrite(description_file, description) |
| args.extend(['--json_output', json_output]) |
| args.extend(['--description_file', description_file]) |
| args.extend(['--gerrit_project', self.GetGerritProject()]) |
| |
| start = time_time() |
| cmd = ['vpython', PRESUBMIT_SUPPORT] + args |
| if resultdb and realm: |
| cmd = ['rdb', 'stream', '-new', '-realm', realm, '--'] + cmd |
| elif resultdb: |
| # TODO (crbug.com/1113463): store realm somewhere and look it up so |
| # it is not required to pass the realm flag |
| print('Note: ResultDB reporting will NOT be performed because --realm' |
| ' was not specified. To enable ResultDB, please run the command' |
| ' again with the --realm argument to specify the LUCI realm.') |
| |
| p = subprocess2.Popen(cmd) |
| exit_code = p.wait() |
| |
| metrics.collector.add_repeated('sub_commands', { |
| 'command': 'presubmit', |
| 'execution_time': time_time() - start, |
| 'exit_code': exit_code, |
| }) |
| |
| if exit_code: |
| sys.exit(exit_code) |
| |
| json_results = gclient_utils.FileRead(json_output) |
| return json.loads(json_results) |
| |
| def RunPostUploadHook(self, verbose, upstream, description): |
| args = self._GetCommonPresubmitArgs(verbose, upstream) |
| args.append('--post_upload') |
| |
| with gclient_utils.temporary_file() as description_file: |
| gclient_utils.FileWrite(description_file, description) |
| args.extend(['--description_file', description_file]) |
| p = subprocess2.Popen(['vpython', PRESUBMIT_SUPPORT] + args) |
| p.wait() |
| |
| def _GetDescriptionForUpload(self, options, git_diff_args, files): |
| # Get description message for upload. |
| if self.GetIssue(): |
| description = self.FetchDescription() |
| elif options.message: |
| description = options.message |
| else: |
| description = _create_description_from_log(git_diff_args) |
| if options.title and options.squash: |
| description = options.title + '\n\n' + description |
| |
| # Extract bug number from branch name. |
| bug = options.bug |
| fixed = options.fixed |
| match = re.match(r'(?P<type>bug|fix(?:e[sd])?)[_-]?(?P<bugnum>\d+)', |
| self.GetBranch()) |
| if not bug and not fixed and match: |
| if match.group('type') == 'bug': |
| bug = match.group('bugnum') |
| else: |
| fixed = match.group('bugnum') |
| |
| change_description = ChangeDescription(description, bug, fixed) |
| |
| # Set the reviewer list now so that presubmit checks can access it. |
| if options.reviewers or options.tbrs or options.add_owners_to: |
| change_description.update_reviewers( |
| options.reviewers, options.tbrs, options.add_owners_to, files, |
| self.GetAuthor()) |
| |
| return change_description |
| |
| def _GetTitleForUpload(self, options): |
| # When not squashing, just return options.title. |
| if not options.squash: |
| return options.title |
| |
| # On first upload, patchset title is always this string, while options.title |
| # gets converted to first line of message. |
| if not self.GetIssue(): |
| return 'Initial upload' |
| |
| # When uploading subsequent patchsets, options.message is taken as the title |
| # if options.title is not provided. |
| if options.title: |
| return options.title |
| if options.message: |
| return options.message.strip() |
| |
| # Use the subject of the last commit as title by default. |
| title = RunGit(['show', '-s', '--format=%s', 'HEAD']).strip() |
| if options.force or options.skip_title: |
| return title |
| user_title = gclient_utils.AskForData('Title for patchset [%s]: ' % title) |
| return user_title or title |
| |
| def CMDUpload(self, options, git_diff_args, orig_args): |
| """Uploads a change to codereview.""" |
| custom_cl_base = None |
| if git_diff_args: |
| custom_cl_base = base_branch = git_diff_args[0] |
| else: |
| if self.GetBranch() is None: |
| DieWithError('Can\'t upload from detached HEAD state. Get on a branch!') |
| |
| # Default to diffing against common ancestor of upstream branch |
| base_branch = self.GetCommonAncestorWithUpstream() |
| git_diff_args = [base_branch, 'HEAD'] |
| |
| # Fast best-effort checks to abort before running potentially expensive |
| # hooks if uploading is likely to fail anyway. Passing these checks does |
| # not guarantee that uploading will not fail. |
| self.EnsureAuthenticated(force=options.force) |
| self.EnsureCanUploadPatchset(force=options.force) |
| |
| # Apply watchlists on upload. |
| watchlist = watchlists.Watchlists(settings.GetRoot()) |
| files = self.GetAffectedFiles(base_branch) |
| if not options.bypass_watchlists: |
| self.ExtendCC(watchlist.GetWatchersForPaths(files)) |
| |
| change_desc = self._GetDescriptionForUpload(options, git_diff_args, files) |
| if not options.bypass_hooks: |
| hook_results = self.RunHook( |
| committing=False, |
| may_prompt=not options.force, |
| verbose=options.verbose, |
| parallel=options.parallel, |
| upstream=base_branch, |
| description=change_desc.description, |
| all_files=False, |
| resultdb=options.resultdb, |
| realm=options.realm) |
| self.ExtendCC(hook_results['more_cc']) |
| |
| print_stats(git_diff_args) |
| ret = self.CMDUploadChange( |
| options, git_diff_args, custom_cl_base, change_desc) |
| if not ret: |
| self._GitSetBranchConfigValue( |
| 'last-upload-hash', scm.GIT.ResolveCommit(settings.GetRoot(), 'HEAD')) |
| # Run post upload hooks, if specified. |
| if settings.GetRunPostUploadHook(): |
| self.RunPostUploadHook( |
| options.verbose, base_branch, change_desc.description) |
| |
| # Upload all dependencies if specified. |
| if options.dependencies: |
| print() |
| print('--dependencies has been specified.') |
| print('All dependent local branches will be re-uploaded.') |
| print() |
| # Remove the dependencies flag from args so that we do not end up in a |
| # loop. |
| orig_args.remove('--dependencies') |
| ret = upload_branch_deps(self, orig_args, options.force) |
| return ret |
| |
| def SetCQState(self, new_state): |
| """Updates the CQ state for the latest patchset. |
| |
| Issue must have been already uploaded and known. |
| """ |
| assert new_state in _CQState.ALL_STATES |
| assert self.GetIssue() |
| try: |
| vote_map = { |
| _CQState.NONE: 0, |
| _CQState.DRY_RUN: 1, |
| _CQState.COMMIT: 2, |
| } |
| labels = {'Commit-Queue': vote_map[new_state]} |
| notify = False if new_state == _CQState.DRY_RUN else None |
| gerrit_util.SetReview( |
| self.GetGerritHost(), self._GerritChangeIdentifier(), |
| labels=labels, notify=notify) |
| return 0 |
| except KeyboardInterrupt: |
| raise |
| except: |
| print('WARNING: Failed to %s.\n' |
| 'Either:\n' |
| ' * Your project has no CQ,\n' |
| ' * You don\'t have permission to change the CQ state,\n' |
| ' * There\'s a bug in this code (see stack trace below).\n' |
| 'Consider specifying which bots to trigger manually or asking your ' |
| 'project owners for permissions or contacting Chrome Infra at:\n' |
| 'https://www.chromium.org/infra\n\n' % |
| ('cancel CQ' if new_state == _CQState.NONE else 'trigger CQ')) |
| # Still raise exception so that stack trace is printed. |
| raise |
| |
| def GetGerritHost(self): |
| # Lazy load of configs. |
| self.GetCodereviewServer() |
| if self._gerrit_host and '.' not in self._gerrit_host: |
| # Abbreviated domain like "chromium" instead of chromium.googlesource.com. |
| # This happens for internal stuff http://crbug.com/614312. |
| parsed = urllib.parse.urlparse(self.GetRemoteUrl()) |
| if parsed.scheme == 'sso': |
| print('WARNING: using non-https URLs for remote is likely broken\n' |
| ' Your current remote is: %s' % self.GetRemoteUrl()) |
| self._gerrit_host = '%s.googlesource.com' % self._gerrit_host |
| self._gerrit_server = 'https://%s' % self._gerrit_host |
| return self._gerrit_host |
| |
| def _GetGitHost(self): |
| """Returns git host to be used when uploading change to Gerrit.""" |
| remote_url = self.GetRemoteUrl() |
| if not remote_url: |
| return None |
| return urllib.parse.urlparse(remote_url).netloc |
| |
| def GetCodereviewServer(self): |
| if not self._gerrit_server: |
| # If we're on a branch then get the server potentially associated |
| # with that branch. |
| if self.GetIssue() and self.GetBranch(): |
| self._gerrit_server = self._GitGetBranchConfigValue( |
| CODEREVIEW_SERVER_CONFIG_KEY) |
| if self._gerrit_server: |
| self._gerrit_host = urllib.parse.urlparse(self._gerrit_server).netloc |
| if not self._gerrit_server: |
| # We assume repo to be hosted on Gerrit, and hence Gerrit server |
| # has "-review" suffix for lowest level subdomain. |
| parts = self._GetGitHost().split('.') |
| parts[0] = parts[0] + '-review' |
| self._gerrit_host = '.'.join(parts) |
| self._gerrit_server = 'https://%s' % self._gerrit_host |
| return self._gerrit_server |
| |
| def GetGerritProject(self): |
| """Returns Gerrit project name based on remote git URL.""" |
| remote_url = self.GetRemoteUrl() |
| if remote_url is None: |
| logging.warning('can\'t detect Gerrit project.') |
| return None |
| project = urllib.parse.urlparse(remote_url).path.strip('/') |
| if project.endswith('.git'): |
| project = project[:-len('.git')] |
| # *.googlesource.com hosts ensure that Git/Gerrit projects don't start with |
| # 'a/' prefix, because 'a/' prefix is used to force authentication in |
| # gitiles/git-over-https protocol. E.g., |
| # https://chromium.googlesource.com/a/v8/v8 refers to the same repo/project |
| # as |
| # https://chromium.googlesource.com/v8/v8 |
| if project.startswith('a/'): |
| project = project[len('a/'):] |
| return project |
| |
| def _GerritChangeIdentifier(self): |
| """Handy method for gerrit_util.ChangeIdentifier for a given CL. |
| |
| Not to be confused by value of "Change-Id:" footer. |
| If Gerrit project can be determined, this will speed up Gerrit HTTP API RPC. |
| """ |
| project = self.GetGerritProject() |
| if project: |
| return gerrit_util.ChangeIdentifier(project, self.GetIssue()) |
| # Fall back on still unique, but less efficient change number. |
| return str(self.GetIssue()) |
| |
| def EnsureAuthenticated(self, force, refresh=None): |
| """Best effort check that user is authenticated with Gerrit server.""" |
| if settings.GetGerritSkipEnsureAuthenticated(): |
| # For projects with unusual authentication schemes. |
| # See http://crbug.com/603378. |
| return |
| |
| # Check presence of cookies only if using cookies-based auth method. |
| cookie_auth = gerrit_util.Authenticator.get() |
| if not isinstance(cookie_auth, gerrit_util.CookiesAuthenticator): |
| return |
| |
| remote_url = self.GetRemoteUrl() |
| if remote_url is None: |
| logging.warning('invalid remote') |
| return |
| if urllib.parse.urlparse(remote_url).scheme != 'https': |
| logging.warning('Ignoring branch %(branch)s with non-https remote ' |
| '%(remote)s', { |
| 'branch': self.branch, |
| 'remote': self.GetRemoteUrl() |
| }) |
| return |
| |
| # Lazy-loader to identify Gerrit and Git hosts. |
| self.GetCodereviewServer() |
| git_host = self._GetGitHost() |
| assert self._gerrit_server and self._gerrit_host and git_host |
| |
| gerrit_auth = cookie_auth.get_auth_header(self._gerrit_host) |
| git_auth = cookie_auth.get_auth_header(git_host) |
| if gerrit_auth and git_auth: |
| if gerrit_auth == git_auth: |
| return |
| all_gsrc = cookie_auth.get_auth_header('d0esN0tEx1st.googlesource.com') |
| print( |
| 'WARNING: You have different credentials for Gerrit and git hosts:\n' |
| ' %s\n' |
| ' %s\n' |
| ' Consider running the following command:\n' |
| ' git cl creds-check\n' |
| ' %s\n' |
| ' %s' % |
| (git_host, self._gerrit_host, |
| ('Hint: delete creds for .googlesource.com' if all_gsrc else ''), |
| cookie_auth.get_new_password_message(git_host))) |
| if not force: |
| confirm_or_exit('If you know what you are doing', action='continue') |
| return |
| else: |
| missing = ( |
| ([] if gerrit_auth else [self._gerrit_host]) + |
| ([] if git_auth else [git_host])) |
| DieWithError('Credentials for the following hosts are required:\n' |
| ' %s\n' |
| 'These are read from %s (or legacy %s)\n' |
| '%s' % ( |
| '\n '.join(missing), |
| cookie_auth.get_gitcookies_path(), |
| cookie_auth.get_netrc_path(), |
| cookie_auth.get_new_password_message(git_host))) |
| |
| def EnsureCanUploadPatchset(self, force): |
| if not self.GetIssue(): |
| return |
| |
| status = self._GetChangeDetail()['status'] |
| if status in ('MERGED', 'ABANDONED'): |
| DieWithError('Change %s has been %s, new uploads are not allowed' % |
| (self.GetIssueURL(), |
| 'submitted' if status == 'MERGED' else 'abandoned')) |
| |
| # TODO(vadimsh): For some reason the chunk of code below was skipped if |
| # 'is_gce' is True. I'm just refactoring it to be 'skip if not cookies'. |
| # Apparently this check is not very important? Otherwise get_auth_email |
| # could have been added to other implementations of Authenticator. |
| cookies_auth = gerrit_util.Authenticator.get() |
| if not isinstance(cookies_auth, gerrit_util.CookiesAuthenticator): |
| return |
| |
| cookies_user = cookies_auth.get_auth_email(self.GetGerritHost()) |
| if self.GetIssueOwner() == cookies_user: |
| return |
| logging.debug('change %s owner is %s, cookies user is %s', |
| self.GetIssue(), self.GetIssueOwner(), cookies_user) |
| # Maybe user has linked accounts or something like that, |
| # so ask what Gerrit thinks of this user. |
| details = gerrit_util.GetAccountDetails(self.GetGerritHost(), 'self') |
| if details['email'] == self.GetIssueOwner(): |
| return |
| if not force: |
| print('WARNING: Change %s is owned by %s, but you authenticate to Gerrit ' |
| 'as %s.\n' |
| 'Uploading may fail due to lack of permissions.' % |
| (self.GetIssue(), self.GetIssueOwner(), details['email'])) |
| confirm_or_exit(action='upload') |
| |
| def GetStatus(self): |
| """Applies a rough heuristic to give a simple summary of an issue's review |
| or CQ status, assuming adherence to a common workflow. |
| |
| Returns None if no issue for this branch, or one of the following keywords: |
| * 'error' - error from review tool (including deleted issues) |
| * 'unsent' - no reviewers added |
| * 'waiting' - waiting for review |
| * 'reply' - waiting for uploader to reply to review |
| * 'lgtm' - Code-Review label has been set |
| * 'dry-run' - dry-running in the CQ |
| * 'commit' - in the CQ |
| * 'closed' - successfully submitted or abandoned |
| """ |
| if not self.GetIssue(): |
| return None |
| |
| try: |
| data = self._GetChangeDetail([ |
| 'DETAILED_LABELS', 'CURRENT_REVISION', 'SUBMITTABLE']) |
| except GerritChangeNotExists: |
| return 'error' |
| |
| if data['status'] in ('ABANDONED', 'MERGED'): |
| return 'closed' |
| |
| cq_label = data['labels'].get('Commit-Queue', {}) |
| max_cq_vote = 0 |
| for vote in cq_label.get('all', []): |
| max_cq_vote = max(max_cq_vote, vote.get('value', 0)) |
| if max_cq_vote == 2: |
| return 'commit' |
| if max_cq_vote == 1: |
| return 'dry-run' |
| |
| if data['labels'].get('Code-Review', {}).get('approved'): |
| return 'lgtm' |
| |
| if not data.get('reviewers', {}).get('REVIEWER', []): |
| return 'unsent' |
| |
| owner = data['owner'].get('_account_id') |
| messages = sorted(data.get('messages', []), key=lambda m: m.get('date')) |
| while messages: |
| m = messages.pop() |
| if m.get('tag', '').startswith('autogenerated:cq:'): |
| # Ignore replies from CQ. |
| continue |
| if m.get('author', {}).get('_account_id') == owner: |
| # Most recent message was by owner. |
| return 'waiting' |
| else: |
| # Some reply from non-owner. |
| return 'reply' |
| |
| # Somehow there are no messages even though there are reviewers. |
| return 'unsent' |
| |
| def GetMostRecentPatchset(self): |
| if not self.GetIssue(): |
| return None |
| |
| data = self._GetChangeDetail(['CURRENT_REVISION']) |
| patchset = data['revisions'][data['current_revision']]['_number'] |
| self.SetPatchset(patchset) |
| return patchset |
| |
| def GetMostRecentDryRunPatchset(self): |
| """Get patchsets equivalent to the most recent patchset and return |
| the patchset with the latest dry run. If none have been dry run, return |
| the latest patchset.""" |
| if not self.GetIssue(): |
| return None |
| |
| data = self._GetChangeDetail(['ALL_REVISIONS']) |
| patchset = data['revisions'][data['current_revision']]['_number'] |
| dry_run = set([int(m['_revision_number']) |
| for m in data.get('messages', []) |
| if m.get('tag', '').endswith('dry-run')]) |
| |
| for revision_info in sorted(data.get('revisions', {}).values(), |
| key=lambda c: c['_number'], reverse=True): |
| if revision_info['_number'] in dry_run: |
| patchset = revision_info['_number'] |
| break |
| if revision_info.get('kind', '') not in \ |
| ('NO_CHANGE', 'NO_CODE_CHANGE', 'TRIVIAL_REBASE'): |
| break |
| self.SetPatchset(patchset) |
| return patchset |
| |
| def AddComment(self, message, publish=None): |
| gerrit_util.SetReview( |
| self.GetGerritHost(), self._GerritChangeIdentifier(), |
| msg=message, ready=publish) |
| |
| def GetCommentsSummary(self, readable=True): |
| # DETAILED_ACCOUNTS is to get emails in accounts. |
| # CURRENT_REVISION is included to get the latest patchset so that |
| # only the robot comments from the latest patchset can be shown. |
| messages = self._GetChangeDetail( |
| options=['MESSAGES', 'DETAILED_ACCOUNTS', |
| 'CURRENT_REVISION']).get('messages', []) |
| file_comments = gerrit_util.GetChangeComments( |
| self.GetGerritHost(), self._GerritChangeIdentifier()) |
| robot_file_comments = gerrit_util.GetChangeRobotComments( |
| self.GetGerritHost(), self._GerritChangeIdentifier()) |
| |
| # Add the robot comments onto the list of comments, but only |
| # keep those that are from the latest patchset. |
| latest_patch_set = self.GetMostRecentPatchset() |
| for path, robot_comments in robot_file_comments.items(): |
| line_comments = file_comments.setdefault(path, []) |
| line_comments.extend( |
| [c for c in robot_comments if c['patch_set'] == latest_patch_set]) |
| |
| # Build dictionary of file comments for easy access and sorting later. |
| # {author+date: {path: {patchset: {line: url+message}}}} |
| comments = collections.defaultdict( |
| lambda: collections.defaultdict(lambda: collections.defaultdict(dict))) |
| |
| server = self.GetCodereviewServer() |
| if server in _KNOWN_GERRIT_TO_SHORT_URLS: |
| # /c/ is automatically added by short URL server. |
| url_prefix = '%s/%s' % (_KNOWN_GERRIT_TO_SHORT_URLS[server], |
| self.GetIssue()) |
| else: |
| url_prefix = '%s/c/%s' % (server, self.GetIssue()) |
| |
| for path, line_comments in file_comments.items(): |
| for comment in line_comments: |
| tag = comment.get('tag', '') |
| if tag.startswith('autogenerated') and 'robot_id' not in comment: |
| continue |
| key = (comment['author']['email'], comment['updated']) |
| if comment.get('side', 'REVISION') == 'PARENT': |
| patchset = 'Base' |
| else: |
| patchset = 'PS%d' % comment['patch_set'] |
| line = comment.get('line', 0) |
| url = ('%s/%s/%s#%s%s' % |
| (url_prefix, comment['patch_set'], path, |
| 'b' if comment.get('side') == 'PARENT' else '', |
| str(line) if line else '')) |
| comments[key][path][patchset][line] = (url, comment['message']) |
| |
| summaries = [] |
| for msg in messages: |
| summary = self._BuildCommentSummary(msg, comments, readable) |
| if summary: |
| summaries.append(summary) |
| return summaries |
| |
| @staticmethod |
| def _BuildCommentSummary(msg, comments, readable): |
| key = (msg['author']['email'], msg['date']) |
| # Don't bother showing autogenerated messages that don't have associated |
| # file or line comments. this will filter out most autogenerated |
| # messages, but will keep robot comments like those from Tricium. |
| is_autogenerated = msg.get('tag', '').startswith('autogenerated') |
| if is_autogenerated and not comments.get(key): |
| return None |
| message = msg['message'] |
| # Gerrit spits out nanoseconds. |
| assert len(msg['date'].split('.')[-1]) == 9 |
| date = datetime.datetime.strptime(msg['date'][:-3], |
| '%Y-%m-%d %H:%M:%S.%f') |
| if key in comments: |
| message += '\n' |
| for path, patchsets in sorted(comments.get(key, {}).items()): |
| if readable: |
| message += '\n%s' % path |
| for patchset, lines in sorted(patchsets.items()): |
| for line, (url, content) in sorted(lines.items()): |
| if line: |
| line_str = 'Line %d' % line |
| path_str = '%s:%d:' % (path, line) |
| else: |
| line_str = 'File comment' |
| path_str = '%s:0:' % path |
| if readable: |
| message += '\n %s, %s: %s' % (patchset, line_str, url) |
| message += '\n %s\n' % content |
| else: |
| message += '\n%s ' % path_str |
| message += '\n%s\n' % content |
| |
| return _CommentSummary( |
| date=date, |
| message=message, |
| sender=msg['author']['email'], |
| autogenerated=is_autogenerated, |
| # These could be inferred from the text messages and correlated with |
| # Code-Review label maximum, however this is not reliable. |
| # Leaving as is until the need arises. |
| approval=False, |
| disapproval=False, |
| ) |
| |
| def CloseIssue(self): |
| gerrit_util.AbandonChange( |
| self.GetGerritHost(), self._GerritChangeIdentifier(), msg='') |
| |
| def SubmitIssue(self, wait_for_merge=True): |
| gerrit_util.SubmitChange( |
| self.GetGerritHost(), self._GerritChangeIdentifier(), |
| wait_for_merge=wait_for_merge) |
| |
| def _GetChangeDetail(self, options=None): |
| """Returns details of associated Gerrit change and caching results.""" |
| options = options or [] |
| assert self.GetIssue(), 'issue is required to query Gerrit' |
| |
| # Optimization to avoid multiple RPCs: |
| if 'CURRENT_REVISION' in options or 'ALL_REVISIONS' in options: |
| options.append('CURRENT_COMMIT') |
| |
| # Normalize issue and options for consistent keys in cache. |
| cache_key = str(self.GetIssue()) |
| options_set = frozenset(o.upper() for o in options) |
| |
| for cached_options_set, data in self._detail_cache.get(cache_key, []): |
| # Assumption: data fetched before with extra options is suitable |
| # for return for a smaller set of options. |
| # For example, if we cached data for |
| # options=[CURRENT_REVISION, DETAILED_FOOTERS] |
| # and request is for options=[CURRENT_REVISION], |
| # THEN we can return prior cached data. |
| if options_set.issubset(cached_options_set): |
| return data |
| |
| try: |
| data = gerrit_util.GetChangeDetail( |
| self.GetGerritHost(), self._GerritChangeIdentifier(), options_set) |
| except gerrit_util.GerritError as e: |
| if e.http_status == 404: |
| raise GerritChangeNotExists(self.GetIssue(), self.GetCodereviewServer()) |
| raise |
| |
| self._detail_cache.setdefault(cache_key, []).append((options_set, data)) |
| return data |
| |
| def _GetChangeCommit(self): |
| assert self.GetIssue(), 'issue must be set to query Gerrit' |
| try: |
| data = gerrit_util.GetChangeCommit( |
| self.GetGerritHost(), self._GerritChangeIdentifier()) |
| except gerrit_util.GerritError as e: |
| if e.http_status == 404: |
| raise GerritChangeNotExists(self.GetIssue(), self.GetCodereviewServer()) |
| raise |
| return data |
| |
| def _IsCqConfigured(self): |
| detail = self._GetChangeDetail(['LABELS']) |
| return u'Commit-Queue' in detail.get('labels', {}) |
| |
| def CMDLand(self, force, bypass_hooks, verbose, parallel, resultdb, realm): |
| if git_common.is_dirty_git_tree('land'): |
| return 1 |
| |
| detail = self._GetChangeDetail(['CURRENT_REVISION', 'LABELS']) |
| if not force and self._IsCqConfigured(): |
| confirm_or_exit('\nIt seems this repository has a CQ, ' |
| 'which can test and land changes for you. ' |
| 'Are you sure you wish to bypass it?\n', |
| action='bypass CQ') |
| differs = True |
| last_upload = self._GitGetBranchConfigValue('gerritsquashhash') |
| # Note: git diff outputs nothing if there is no diff. |
| if not last_upload or RunGit(['diff', last_upload]).strip(): |
| print('WARNING: Some changes from local branch haven\'t been uploaded.') |
| else: |
| if detail['current_revision'] == last_upload: |
| differs = False |
| else: |
| print('WARNING: Local branch contents differ from latest uploaded ' |
| 'patchset.') |
| if differs: |
| if not force: |
| confirm_or_exit( |
| 'Do you want to submit latest Gerrit patchset and bypass hooks?\n', |
| action='submit') |
| print('WARNING: Bypassing hooks and submitting latest uploaded patchset.') |
| elif not bypass_hooks: |
| upstream = self.GetCommonAncestorWithUpstream() |
| if self.GetIssue(): |
| description = self.FetchDescription() |
| else: |
| description = _create_description_from_log([upstream]) |
| self.RunHook( |
| committing=True, |
| may_prompt=not force, |
| verbose=verbose, |
| parallel=parallel, |
| upstream=upstream, |
| description=description, |
| all_files=False, |
| resultdb=resultdb, |
| realm=realm) |
| |
| self.SubmitIssue(wait_for_merge=True) |
| print('Issue %s has been submitted.' % self.GetIssueURL()) |
| links = self._GetChangeCommit().get('web_links', []) |
| for link in links: |
| if link.get('name') == 'gitiles' and link.get('url'): |
| print('Landed as: %s' % link.get('url')) |
| break |
| return 0 |
| |
| def CMDPatchWithParsedIssue(self, parsed_issue_arg, nocommit, force): |
| assert parsed_issue_arg.valid |
| |
| self.issue = parsed_issue_arg.issue |
| |
| if parsed_issue_arg.hostname: |
| self._gerrit_host = parsed_issue_arg.hostname |
| self._gerrit_server = 'https://%s' % self._gerrit_host |
| |
| try: |
| detail = self._GetChangeDetail(['ALL_REVISIONS']) |
| except GerritChangeNotExists as e: |
| DieWithError(str(e)) |
| |
| if not parsed_issue_arg.patchset: |
| # Use current revision by default. |
| revision_info = detail['revisions'][detail['current_revision']] |
| patchset = int(revision_info['_number']) |
| else: |
| patchset = parsed_issue_arg.patchset |
| for revision_info in detail['revisions'].values(): |
| if int(revision_info['_number']) == parsed_issue_arg.patchset: |
| break |
| else: |
| DieWithError('Couldn\'t find patchset %i in change %i' % |
| (parsed_issue_arg.patchset, self.GetIssue())) |
| |
| remote_url = self.GetRemoteUrl() |
| if remote_url.endswith('.git'): |
| remote_url = remote_url[:-len('.git')] |
| remote_url = remote_url.rstrip('/') |
| |
| fetch_info = revision_info['fetch']['http'] |
| fetch_info['url'] = fetch_info['url'].rstrip('/') |
| |
| if remote_url != fetch_info['url']: |
| DieWithError('Trying to patch a change from %s but this repo appears ' |
| 'to be %s.' % (fetch_info['url'], remote_url)) |
| |
| RunGit(['fetch', fetch_info['url'], fetch_info['ref']]) |
| |
| if force: |
| RunGit(['reset', '--hard', 'FETCH_HEAD']) |
| print('Checked out commit for change %i patchset %i locally' % |
| (parsed_issue_arg.issue, patchset)) |
| elif nocommit: |
| RunGit(['cherry-pick', '--no-commit', 'FETCH_HEAD']) |
| print('Patch applied to index.') |
| else: |
| RunGit(['cherry-pick', 'FETCH_HEAD']) |
| print('Committed patch for change %i patchset %i locally.' % |
| (parsed_issue_arg.issue, patchset)) |
| print('Note: this created a local commit which does not have ' |
| 'the same hash as the one uploaded for review. This will make ' |
| 'uploading changes based on top of this branch difficult.\n' |
| 'If you want to do that, use "git cl patch --force" instead.') |
| |
| if self.GetBranch(): |
| self.SetIssue(parsed_issue_arg.issue) |
| self.SetPatchset(patchset) |
| fetched_hash = scm.GIT.ResolveCommit(settings.GetRoot(), 'FETCH_HEAD') |
| self._GitSetBranchConfigValue('last-upload-hash', fetched_hash) |
| self._GitSetBranchConfigValue('gerritsquashhash', fetched_hash) |
| else: |
| print('WARNING: You are in detached HEAD state.\n' |
| 'The patch has been applied to your checkout, but you will not be ' |
| 'able to upload a new patch set to the gerrit issue.\n' |
| 'Try using the \'-b\' option if you would like to work on a ' |
| 'branch and/or upload a new patch set.') |
| |
| return 0 |
| |
| def _GerritCommitMsgHookCheck(self, offer_removal): |
| hook = os.path.join(settings.GetRoot(), '.git', 'hooks', 'commit-msg') |
| if not os.path.exists(hook): |
| return |
| # Crude attempt to distinguish Gerrit Codereview hook from a potentially |
| # custom developer-made one. |
| data = gclient_utils.FileRead(hook) |
| if not('From Gerrit Code Review' in data and 'add_ChangeId()' in data): |
| return |
| print('WARNING: You have Gerrit commit-msg hook installed.\n' |
| 'It is not necessary for uploading with git cl in squash mode, ' |
| 'and may interfere with it in subtle ways.\n' |
| 'We recommend you remove the commit-msg hook.') |
| if offer_removal: |
| if ask_for_explicit_yes('Do you want to remove it now?'): |
| gclient_utils.rm_file_or_tree(hook) |
| print('Gerrit commit-msg hook removed.') |
| else: |
| print('OK, will keep Gerrit commit-msg hook in place.') |
| |
| def _CleanUpOldTraces(self): |
| """Keep only the last |MAX_TRACES| traces.""" |
| try: |
| traces = sorted([ |
| os.path.join(TRACES_DIR, f) |
| for f in os.listdir(TRACES_DIR) |
| if (os.path.isfile(os.path.join(TRACES_DIR, f)) |
| and not f.startswith('tmp')) |
| ]) |
| traces_to_delete = traces[:-MAX_TRACES] |
| for trace in traces_to_delete: |
| os.remove(trace) |
| except OSError: |
| print('WARNING: Failed to remove old git traces from\n' |
| ' %s' |
| 'Consider removing them manually.' % TRACES_DIR) |
| |
| def _WriteGitPushTraces(self, trace_name, traces_dir, git_push_metadata): |
| """Zip and write the git push traces stored in traces_dir.""" |
| gclient_utils.safe_makedirs(TRACES_DIR) |
| traces_zip = trace_name + '-traces' |
| traces_readme = trace_name + '-README' |
| # Create a temporary dir to store git config and gitcookies in. It will be |
| # compressed and stored next to the traces. |
| git_info_dir = tempfile.mkdtemp() |
| git_info_zip = trace_name + '-git-info' |
| |
| git_push_metadata['now'] = datetime_now().strftime('%Y-%m-%dT%H:%M:%S.%f') |
| |
| git_push_metadata['trace_name'] = trace_name |
| gclient_utils.FileWrite( |
| traces_readme, TRACES_README_FORMAT % git_push_metadata) |
| |
| # Keep only the first 6 characters of the git hashes on the packet |
| # trace. This greatly decreases size after compression. |
| packet_traces = os.path.join(traces_dir, 'trace-packet') |
| if os.path.isfile(packet_traces): |
| contents = gclient_utils.FileRead(packet_traces) |
| gclient_utils.FileWrite( |
| packet_traces, GIT_HASH_RE.sub(r'\1', contents)) |
| shutil.make_archive(traces_zip, 'zip', traces_dir) |
| |
| # Collect and compress the git config and gitcookies. |
| git_config = RunGit(['config', '-l']) |
| gclient_utils.FileWrite( |
| os.path.join(git_info_dir, 'git-config'), |
| git_config) |
| |
| cookie_auth = gerrit_util.Authenticator.get() |
| if isinstance(cookie_auth, gerrit_util.CookiesAuthenticator): |
| gitcookies_path = cookie_auth.get_gitcookies_path() |
| if os.path.isfile(gitcookies_path): |
| gitcookies = gclient_utils.FileRead(gitcookies_path) |
| gclient_utils.FileWrite( |
| os.path.join(git_info_dir, 'gitcookies'), |
| GITCOOKIES_REDACT_RE.sub('REDACTED', gitcookies)) |
| shutil.make_archive(git_info_zip, 'zip', git_info_dir) |
| |
| gclient_utils.rmtree(git_info_dir) |
| |
| def _RunGitPushWithTraces(self, refspec, refspec_opts, git_push_metadata): |
| """Run git push and collect the traces resulting from the execution.""" |
| # Create a temporary directory to store traces in. Traces will be compressed |
| # and stored in a 'traces' dir inside depot_tools. |
| traces_dir = tempfile.mkdtemp() |
| trace_name = os.path.join( |
| TRACES_DIR, datetime_now().strftime('%Y%m%dT%H%M%S.%f')) |
| |
| env = os.environ.copy() |
| env['GIT_REDACT_COOKIES'] = 'o,SSO,GSSO_Uberproxy' |
| env['GIT_TR2_EVENT'] = os.path.join(traces_dir, 'tr2-event') |
| env['GIT_TRACE2_EVENT'] = os.path.join(traces_dir, 'tr2-event') |
| env['GIT_TRACE_CURL'] = os.path.join(traces_dir, 'trace-curl') |
| env['GIT_TRACE_CURL_NO_DATA'] = '1' |
| env['GIT_TRACE_PACKET'] = os.path.join(traces_dir, 'trace-packet') |
| |
| try: |
| push_returncode = 0 |
| remote_url = self.GetRemoteUrl() |
| before_push = time_time() |
| push_stdout = gclient_utils.CheckCallAndFilter( |
| ['git', 'push', remote_url, refspec], |
| env=env, |
| print_stdout=True, |
| # Flush after every line: useful for seeing progress when running as |
| # recipe. |
| filter_fn=lambda _: sys.stdout.flush()) |
| push_stdout = push_stdout.decode('utf-8', 'replace') |
| except subprocess2.CalledProcessError as e: |
| push_returncode = e.returncode |
| raise GitPushError( |
| 'Failed to create a change. Please examine output above for the ' |
| 'reason of the failure.\n' |
| 'Hint: run command below to diagnose common Git/Gerrit ' |
| 'credential problems:\n' |
| ' git cl creds-check\n' |
| '\n' |
| 'If git-cl is not working correctly, file a bug under the Infra>SDK ' |
| 'component including the files below.\n' |
| 'Review the files before upload, since they might contain sensitive ' |
| 'information.\n' |
| 'Set the Restrict-View-Google label so that they are not publicly ' |
| 'accessible.\n' + TRACES_MESSAGE % {'trace_name': trace_name}) |
| finally: |
| execution_time = time_time() - before_push |
| metrics.collector.add_repeated('sub_commands', { |
| 'command': 'git push', |
| 'execution_time': execution_time, |
| 'exit_code': push_returncode, |
| 'arguments': metrics_utils.extract_known_subcommand_args(refspec_opts), |
| }) |
| |
| git_push_metadata['execution_time'] = execution_time |
| git_push_metadata['exit_code'] = push_returncode |
| self._WriteGitPushTraces(trace_name, traces_dir, git_push_metadata) |
| |
| self._CleanUpOldTraces() |
| gclient_utils.rmtree(traces_dir) |
| |
| return push_stdout |
| |
| def CMDUploadChange(self, options, git_diff_args, custom_cl_base, |
| change_desc): |
| """Upload the current branch to Gerrit, retry if new remote HEAD is |
| found. options and change_desc may be mutated.""" |
| try: |
| return self._CMDUploadChange(options, git_diff_args, custom_cl_base, |
| change_desc) |
| except GitPushError as e: |
| remote, remote_branch = self.GetRemoteBranch() |
| should_retry = remote_branch == DEFAULT_OLD_BRANCH and \ |
| gerrit_util.GetProjectHead( |
| self._gerrit_host, self.GetGerritProject()) == 'refs/heads/main' |
| if not should_retry: |
| DieWithError(str(e), change_desc) |
| |
| print("WARNING: Detected HEAD change in upstream, fetching remote state") |
| RunGit(['fetch', remote]) |
| options.edit_description = False |
| options.force = True |
| try: |
| self._CMDUploadChange(options, git_diff_args, custom_cl_base, change_desc) |
| except GitPushError as e: |
| DieWithError(str(e), change_desc) |
| |
| def _CMDUploadChange(self, options, git_diff_args, custom_cl_base, |
| change_desc): |
| """Upload the current branch to Gerrit.""" |
| remote, remote_branch = self.GetRemoteBranch() |
| branch = GetTargetRef(remote, remote_branch, options.target_branch) |
| |
| if options.squash: |
| self._GerritCommitMsgHookCheck(offer_removal=not options.force) |
| if self.GetIssue(): |
| # User requested to change description |
| if options.edit_description: |
| change_desc.prompt() |
| change_id = self._GetChangeDetail()['change_id'] |
| change_desc.ensure_change_id(change_id) |
| else: # if not self.GetIssue() |
| if not options.force: |
| change_desc.prompt() |
| change_ids = git_footers.get_footer_change_id(change_desc.description) |
| if len(change_ids) == 1: |
| change_id = change_ids[0] |
| else: |
| change_id = GenerateGerritChangeId(change_desc.description) |
| change_desc.ensure_change_id(change_id) |
| |
| if options.preserve_tryjobs: |
| change_desc.set_preserve_tryjobs() |
| |
| remote, upstream_branch = self.FetchUpstreamTuple(self.GetBranch()) |
| parent = self._ComputeParent( |
| remote, upstream_branch, custom_cl_base, options.force, change_desc) |
| tree = RunGit(['rev-parse', 'HEAD:']).strip() |
| with gclient_utils.temporary_file() as desc_tempfile: |
| gclient_utils.FileWrite(desc_tempfile, change_desc.description) |
| ref_to_push = RunGit( |
| ['commit-tree', tree, '-p', parent, '-F', desc_tempfile]).strip() |
| else: # if not options.squash |
| if not git_footers.get_footer_change_id(change_desc.description): |
| DownloadGerritHook(False) |
| change_desc.set_description( |
| self._AddChangeIdToCommitMessage( |
| change_desc.description, git_diff_args)) |
| ref_to_push = 'HEAD' |
| # For no-squash mode, we assume the remote called "origin" is the one we |
| # want. It is not worthwhile to support different workflows for |
| # no-squash mode. |
| parent = 'origin/%s' % branch |
| change_id = git_footers.get_footer_change_id(change_desc.description)[0] |
| |
| SaveDescriptionBackup(change_desc) |
| commits = RunGitSilent(['rev-list', '%s..%s' % (parent, |
| ref_to_push)]).splitlines() |
| if len(commits) > 1: |
| print('WARNING: This will upload %d commits. Run the following command ' |
| 'to see which commits will be uploaded: ' % len(commits)) |
| print('git log %s..%s' % (parent, ref_to_push)) |
| print('You can also use `git squash-branch` to squash these into a ' |
| 'single commit.') |
| confirm_or_exit(action='upload') |
| |
| reviewers = sorted(change_desc.get_reviewers()) |
| cc = [] |
| # Add CCs from WATCHLISTS and rietveld.cc git config unless this is |
| # the initial upload, the CL is private, or auto-CCing has ben disabled. |
| if not (self.GetIssue() or options.private or options.no_autocc): |
| cc = self.GetCCList().split(',') |
| # Add cc's from the --cc flag. |
| if options.cc: |
| cc.extend(options.cc) |
| cc = [email.strip() for email in cc if email.strip()] |
| if change_desc.get_cced(): |
| cc.extend(change_desc.get_cced()) |
| if self.GetGerritHost() == 'chromium-review.googlesource.com': |
| valid_accounts = set(reviewers + cc) |
| # TODO(crbug/877717): relax this for all hosts. |
| else: |
| valid_accounts = gerrit_util.ValidAccounts( |
| self.GetGerritHost(), reviewers + cc) |
| logging.info('accounts %s are recognized, %s invalid', |
| sorted(valid_accounts), |
| set(reviewers + cc).difference(set(valid_accounts))) |
| |
| # Extra options that can be specified at push time. Doc: |
| # https://gerrit-review.googlesource.com/Documentation/user-upload.html |
| refspec_opts = [] |
| |
| # By default, new changes are started in WIP mode, and subsequent patchsets |
| # don't send email. At any time, passing --send-mail will mark the change |
| # ready and send email for that particular patch. |
| if options.send_mail: |
| refspec_opts.append('ready') |
| refspec_opts.append('notify=ALL') |
| elif not self.GetIssue() and options.squash: |
| refspec_opts.append('wip') |
| else: |
| refspec_opts.append('notify=NONE') |
| |
| # TODO(tandrii): options.message should be posted as a comment |
| # if --send-mail is set on non-initial upload as Rietveld used to do it. |
| |
| # Set options.title in case user was prompted in _GetTitleForUpload and |
| # _CMDUploadChange needs to be called again. |
| options.title = self._GetTitleForUpload(options) |
| if options.title: |
| # Punctuation and whitespace in |title| must be percent-encoded. |
| refspec_opts.append( |
| 'm=' + gerrit_util.PercentEncodeForGitRef(options.title)) |
| |
| if options.private: |
| refspec_opts.append('private') |
| |
| for r in sorted(reviewers): |
| if r in valid_accounts: |
| refspec_opts.append('r=%s' % r) |
| reviewers.remove(r) |
| else: |
| # TODO(tandrii): this should probably be a hard failure. |
| print('WARNING: reviewer %s doesn\'t have a Gerrit account, skipping' |
| % r) |
| for c in sorted(cc): |
| # refspec option will be rejected if cc doesn't correspond to an |
| # account, even though REST call to add such arbitrary cc may succeed. |
| if c in valid_accounts: |
| refspec_opts.append('cc=%s' % c) |
| cc.remove(c) |
| |
| if options.topic: |
| # Documentation on Gerrit topics is here: |
| # https://gerrit-review.googlesource.com/Documentation/user-upload.html#topic |
| refspec_opts.append('topic=%s' % options.topic) |
| |
| if options.enable_auto_submit: |
| refspec_opts.append('l=Auto-Submit+1') |
| if options.use_commit_queue: |
| refspec_opts.append('l=Commit-Queue+2') |
| elif options.cq_dry_run: |
| refspec_opts.append('l=Commit-Queue+1') |
| |
| if change_desc.get_reviewers(tbr_only=True): |
| score = gerrit_util.GetCodeReviewTbrScore( |
| self.GetGerritHost(), |
| self.GetGerritProject()) |
| refspec_opts.append('l=Code-Review+%s' % score) |
| |
| # Gerrit sorts hashtags, so order is not important. |
| hashtags = {change_desc.sanitize_hash_tag(t) for t in options.hashtags} |
| if not self.GetIssue(): |
| hashtags.update(change_desc.get_hash_tags()) |
| refspec_opts += ['hashtag=%s' % t for t in sorted(hashtags)] |
| |
| refspec_suffix = '' |
| if refspec_opts: |
| refspec_suffix = '%' + ','.join(refspec_opts) |
| assert ' ' not in refspec_suffix, ( |
| 'spaces not allowed in refspec: "%s"' % refspec_suffix) |
| refspec = '%s:refs/for/%s%s' % (ref_to_push, branch, refspec_suffix) |
| |
| git_push_metadata = { |
| 'gerrit_host': self.GetGerritHost(), |
| 'title': options.title or '<untitled>', |
| 'change_id': change_id, |
| 'description': change_desc.description, |
| } |
| push_stdout = self._RunGitPushWithTraces(refspec, refspec_opts, |
| git_push_metadata) |
| |
| if options.squash: |
| regex = re.compile(r'remote:\s+https?://[\w\-\.\+\/#]*/(\d+)\s.*') |
| change_numbers = [m.group(1) |
| for m in map(regex.match, push_stdout.splitlines()) |
| if m] |
| if len(change_numbers) != 1: |
| DieWithError( |
| ('Created|Updated %d issues on Gerrit, but only 1 expected.\n' |
| 'Change-Id: %s') % (len(change_numbers), change_id), change_desc) |
| self.SetIssue(change_numbers[0]) |
| self._GitSetBranchConfigValue('gerritsquashhash', ref_to_push) |
| |
| if self.GetIssue() and (reviewers or cc): |
| # GetIssue() is not set in case of non-squash uploads according to tests. |
| # TODO(crbug.com/751901): non-squash uploads in git cl should be removed. |
| gerrit_util.AddReviewers( |
| self.GetGerritHost(), |
| self._GerritChangeIdentifier(), |
| reviewers, cc, |
| notify=bool(options.send_mail)) |
| |
| return 0 |
| |
| def _ComputeParent(self, remote, upstream_branch, custom_cl_base, force, |
| change_desc): |
| """Computes parent of the generated commit to be uploaded to Gerrit. |
| |
| Returns revision or a ref name. |
| """ |
| if custom_cl_base: |
| # Try to avoid creating additional unintended CLs when uploading, unless |
| # user wants to take this risk. |
| local_ref_of_target_remote = self.GetRemoteBranch()[1] |
| code, _ = RunGitWithCode(['merge-base', '--is-ancestor', custom_cl_base, |
| local_ref_of_target_remote]) |
| if code == 1: |
| print('\nWARNING: Manually specified base of this CL `%s` ' |
| 'doesn\'t seem to belong to target remote branch `%s`.\n\n' |
| 'If you proceed with upload, more than 1 CL may be created by ' |
| 'Gerrit as a result, in turn confusing or crashing git cl.\n\n' |
| 'If you are certain that specified base `%s` has already been ' |
| 'uploaded to Gerrit as another CL, you may proceed.\n' % |
| (custom_cl_base, local_ref_of_target_remote, custom_cl_base)) |
| if not force: |
| confirm_or_exit( |
| 'Do you take responsibility for cleaning up potential mess ' |
| 'resulting from proceeding with upload?', |
| action='upload') |
| return custom_cl_base |
| |
| if remote != '.': |
| return self.GetCommonAncestorWithUpstream() |
| |
| # If our upstream branch is local, we base our squashed commit on its |
| # squashed version. |
| upstream_branch_name = scm.GIT.ShortBranchName(upstream_branch) |
| |
| if upstream_branch_name == 'master': |
| return self.GetCommonAncestorWithUpstream() |
| if upstream_branch_name == 'main': |
| return self.GetCommonAncestorWithUpstream() |
| |
| # Check the squashed hash of the parent. |
| # TODO(tandrii): consider checking parent change in Gerrit and using its |
| # hash if tree hash of latest parent revision (patchset) in Gerrit matches |
| # the tree hash of the parent branch. The upside is less likely bogus |
| # requests to reupload parent change just because it's uploadhash is |
| # missing, yet the downside likely exists, too (albeit unknown to me yet). |
| parent = scm.GIT.GetBranchConfig( |
| settings.GetRoot(), upstream_branch_name, 'gerritsquashhash') |
| # Verify that the upstream branch has been uploaded too, otherwise |
| # Gerrit will create additional CLs when uploading. |
| if not parent or (RunGitSilent(['rev-parse', upstream_branch + ':']) != |
| RunGitSilent(['rev-parse', parent + ':'])): |
| DieWithError( |
| '\nUpload upstream branch %s first.\n' |
| 'It is likely that this branch has been rebased since its last ' |
| 'upload, so you just need to upload it again.\n' |
| '(If you uploaded it with --no-squash, then branch dependencies ' |
| 'are not supported, and you should reupload with --squash.)' |
| % upstream_branch_name, |
| change_desc) |
| return parent |
| |
| def _AddChangeIdToCommitMessage(self, log_desc, args): |
| """Re-commits using the current message, assumes the commit hook is in |
| place. |
| """ |
| RunGit(['commit', '--amend', '-m', log_desc]) |
| new_log_desc = _create_description_from_log(args) |
| if git_footers.get_footer_change_id(new_log_desc): |
| print('git-cl: Added Change-Id to commit message.') |
| return new_log_desc |
| else: |
| DieWithError('ERROR: Gerrit commit-msg hook not installed.') |
| |
| def CannotTriggerTryJobReason(self): |
| try: |
| data = self._GetChangeDetail() |
| except GerritChangeNotExists: |
| return 'Gerrit doesn\'t know about your change %s' % self.GetIssue() |
| |
| if data['status'] in ('ABANDONED', 'MERGED'): |
| return 'CL %s is closed' % self.GetIssue() |
| |
| def GetGerritChange(self, patchset=None): |
| """Returns a buildbucket.v2.GerritChange message for the current issue.""" |
| host = urllib.parse.urlparse(self.GetCodereviewServer()).hostname |
| issue = self.GetIssue() |
| patchset = int(patchset or self.GetPatchset()) |
| data = self._GetChangeDetail(['ALL_REVISIONS']) |
| |
| assert host and issue and patchset, 'CL must be uploaded first' |
| |
| has_patchset = any( |
| int(revision_data['_number']) == patchset |
| for revision_data in data['revisions'].values()) |
| if not has_patchset: |
| raise Exception('Patchset %d is not known in Gerrit change %d' % |
| (patchset, self.GetIssue())) |
| |
| return { |
| 'host': host, |
| 'change': issue, |
| 'project': data['project'], |
| 'patchset': patchset, |
| } |
| |
| def GetIssueOwner(self): |
| return self._GetChangeDetail(['DETAILED_ACCOUNTS'])['owner']['email'] |
| |
| def GetReviewers(self): |
| details = self._GetChangeDetail(['DETAILED_ACCOUNTS']) |
| return [r['email'] for r in details['reviewers'].get('REVIEWER', [])] |
| |
| |
| def _get_bug_line_values(default_project_prefix, bugs): |
| """Given default_project_prefix and comma separated list of bugs, yields bug |
| line values. |
| |
| Each bug can be either: |
| * a number, which is combined with default_project_prefix |
| * string, which is left as is. |
| |
| This function may produce more than one line, because bugdroid expects one |
| project per line. |
| |
| >>> list(_get_bug_line_values('v8:', '123,chromium:789')) |
| ['v8:123', 'chromium:789'] |
| """ |
| default_bugs = [] |
| others = [] |
| for bug in bugs.split(','): |
| bug = bug.strip() |
| if bug: |
| try: |
| default_bugs.append(int(bug)) |
| except ValueError: |
| others.append(bug) |
| |
| if default_bugs: |
| default_bugs = ','.join(map(str, default_bugs)) |
| if default_project_prefix: |
| if not default_project_prefix.endswith(':'): |
| default_project_prefix += ':' |
| yield '%s%s' % (default_project_prefix, default_bugs) |
| else: |
| yield default_bugs |
| for other in sorted(others): |
| # Don't bother finding common prefixes, CLs with >2 bugs are very very rare. |
| yield other |
| |
| |
| class ChangeDescription(object): |
| """Contains a parsed form of the change description.""" |
| R_LINE = r'^[ \t]*(TBR|R)[ \t]*=[ \t]*(.*?)[ \t]*$' |
| CC_LINE = r'^[ \t]*(CC)[ \t]*=[ \t]*(.*?)[ \t]*$' |
| BUG_LINE = r'^[ \t]*(?:(BUG)[ \t]*=|Bug:)[ \t]*(.*?)[ \t]*$' |
| FIXED_LINE = r'^[ \t]*Fixed[ \t]*:[ \t]*(.*?)[ \t]*$' |
| CHERRY_PICK_LINE = r'^\(cherry picked from commit [a-fA-F0-9]{40}\)$' |
| STRIP_HASH_TAG_PREFIX = r'^(\s*(revert|reland)( "|:)?\s*)*' |
| BRACKET_HASH_TAG = r'\s*\[([^\[\]]+)\]' |
| COLON_SEPARATED_HASH_TAG = r'^([a-zA-Z0-9_\- ]+):($|[^:])' |
| BAD_HASH_TAG_CHUNK = r'[^a-zA-Z0-9]+' |
| |
| def __init__(self, description, bug=None, fixed=None): |
| self._description_lines = (description or '').strip().splitlines() |
| if bug: |
| regexp = re.compile(self.BUG_LINE) |
| prefix = settings.GetBugPrefix() |
| if not any((regexp.match(line) for line in self._description_lines)): |
| values = list(_get_bug_line_values(prefix, bug)) |
| self.append_footer('Bug: %s' % ', '.join(values)) |
| if fixed: |
| regexp = re.compile(self.FIXED_LINE) |
| prefix = settings.GetBugPrefix() |
| if not any((regexp.match(line) for line in self._description_lines)): |
| values = list(_get_bug_line_values(prefix, fixed)) |
| self.append_footer('Fixed: %s' % ', '.join(values)) |
| |
| @property # www.logilab.org/ticket/89786 |
| def description(self): # pylint: disable=method-hidden |
| return '\n'.join(self._description_lines) |
| |
| def set_description(self, desc): |
| if isinstance(desc, basestring): |
| lines = desc.splitlines() |
| else: |
| lines = [line.rstrip() for line in desc] |
| while lines and not lines[0]: |
| lines.pop(0) |
| while lines and not lines[-1]: |
| lines.pop(-1) |
| self._description_lines = lines |
| |
| def ensure_change_id(self, change_id): |
| description = self.description |
| footer_change_ids = git_footers.get_footer_change_id(description) |
| # Make sure that the Change-Id in the description matches the given one. |
| if footer_change_ids != [change_id]: |
| if footer_change_ids: |
| # Remove any existing Change-Id footers since they don't match the |
| # expected change_id footer. |
| description = git_footers.remove_footer(description, 'Change-Id') |
| print('WARNING: Change-Id has been set to %s. Use `git cl issue 0` ' |
| 'if you want to set a new one.') |
| # Add the expected Change-Id footer. |
| description = git_footers.add_footer_change_id(description, change_id) |
| self.set_description(description) |
| |
| def update_reviewers( |
| self, reviewers, tbrs, add_owners_to, affected_files, author_email): |
| """Rewrites the R=/TBR= line(s) as a single line each. |
| |
| Args: |
| reviewers (list(str)) - list of additional emails to use for reviewers. |
| tbrs (list(str)) - list of additional emails to use for TBRs. |
| add_owners_to (None|'R'|'TBR') - Pass to do an OWNERS lookup for files in |
| the change that are missing OWNER coverage. If this is not None, you |
| must also pass a value for `change`. |
| change (Change) - The Change that should be used for OWNERS lookups. |
|