From c427ebf244c57f10d904b5437b70aae190a59ea8 Mon Sep 17 00:00:00 2001 From: Mike Pennisi Date: Mon, 14 Oct 2019 22:06:09 -0400 Subject: [PATCH 1/7] [infra] Integrate with external PR preview system Introduce a GitHub Action to monitor Pull Requests, storing relevant information in the project's git repository (thus allowing the external wptpr.live system to publish previews) and creating GitHub Deployments (thus alerting contributors to the status of the preview). This Action is triggered on a regular interval. Introduce a second GitHub Action to monitor the state of the preview system and communicate the relevant status to contributors via the Pull Request UI. This Action is triggered for every GitHub Deployment created in the previously-described Action. For example, if three Pull Requests are updated, the first GitHub Action will inspect them all. It will create GitHub Deployments only for the "trusted" Pull Requests. A new GitHub Action will run for each of the Deployments, polling the preview website until either the preview is available or a timeout is reached. This Action will update the deployment accordingly so that the author of each Pull Request author is aware of the status of the preview site. The following flow chart visually describes the same sequence: sync gh-101 (trusted) ---> | gh-102 (untrusted) -> | gh-103 (trusted) ---> | .----. |sync|--------+---------------------. '----' | | .-------------. .-------------. |deploy gh-101| |deploy gh-103| '-------------' '-------------' | | poll for preview poll for preview | | gh-101 <------ success ----- preview available poll for preview | gh-103 <------- error --------------------------------- timeout --- .../workflows/detect_pull_request_preview.yml | 24 + .github/workflows/pull_request_previews.yml | 27 + tools/ci/pr_preview.py | 402 ++++++++++ tools/ci/tests/test_pr_preview.py | 720 ++++++++++++++++++ tools/tox.ini | 2 + 5 files changed, 1175 insertions(+) create mode 100644 .github/workflows/detect_pull_request_preview.yml create mode 100644 .github/workflows/pull_request_previews.yml create mode 100755 tools/ci/pr_preview.py create mode 100644 tools/ci/tests/test_pr_preview.py diff --git a/.github/workflows/detect_pull_request_preview.yml b/.github/workflows/detect_pull_request_preview.yml new file mode 100644 index 00000000000000..f86f6ec9a940a5 --- /dev/null +++ b/.github/workflows/detect_pull_request_preview.yml @@ -0,0 +1,24 @@ +name: pr-preview-detect +on: deployment +jobs: + detect-deployment: + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v1 + # By default, the "checkout" Action will attempt to check out the + # revision to be deployed. Because it does not fetch GitHub Pull Request + # branches, this will fail. + with: + ref: refs/heads/master + - name: Install dependency + run: pip install requests + - name: Detect deployment + run: + ./tools/ci/pr_preview.py + --host https://api.github.com + --github-project web-platform-tests/wpt + detect + --target https://wptpr.live + --timeout 600 + env: + GITHUB_TOKEN: ${{ secrets.DEPLOY_TOKEN }} diff --git a/.github/workflows/pull_request_previews.yml b/.github/workflows/pull_request_previews.yml new file mode 100644 index 00000000000000..300b6649c6efcb --- /dev/null +++ b/.github/workflows/pull_request_previews.yml @@ -0,0 +1,27 @@ +name: pr-preview-sync +on: + schedule: + - cron: */5 * * * * +jobs: + update-pr-preview: + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v1 + - name: Install dependency + run: pip install requests + - name: Synchronize state + run: + ./tools/ci/pr_preview.py + --host https://api.github.com + --github-project web-platform-tests/wpt + synchronize + --window 480 + env: + # This Workflow must trigger further workflows. The GitHub-provided + # `GITHUB_TOKEN` secret is incapable of doing this [1], so a + # user-generated token must be specified instead. This token requires + # the "repo" scope, and is should be stored as a Secret named + # "DEPLOY_TOKEN" in this GitHub project. + # + # [1] https://help.github.com/en/github/automating-your-workflow-with-github-actions/events-that-trigger-workflows + GITHUB_TOKEN: ${{ secrets.DEPLOY_TOKEN }} diff --git a/tools/ci/pr_preview.py b/tools/ci/pr_preview.py new file mode 100755 index 00000000000000..42833099df8f95 --- /dev/null +++ b/tools/ci/pr_preview.py @@ -0,0 +1,402 @@ +#!/usr/bin/env python + +# The service provided by this script is not critical, but it shares a GitHub +# API request quota with critical services. For this reason, all requests to +# the GitHub API are preceded by a "guard" which verifies that the subsequent +# request will not deplete the shared quota. +# +# In effect, this script will fail rather than interfere with the operation of +# critical services. + +import argparse +import json +import logging +import os +import subprocess +import time + +import requests + +# The ratio of "requests remaining" to "total request quota" below which this +# script should refuse to interact with the GitHub.com API +API_RATE_LIMIT_THRESHOLD = 0.2 +# The GitHub Pull Request label which indicates that a Pull Request is expected +# to be actively mirrored by the preview server +LABEL = 'safelisted-for-preview' +# The number of seconds to wait between attempts to verify that a submission +# preview is available on the Pull Request preview server +POLLING_PERIOD = 5 +# Pull Requests from authors with the following associations to the project +# should automatically receive previews +# +# https://developer.github.com/v4/enum/commentauthorassociation/ (equivalent +# documentation for the REST API was not available at the time of writing) +TRUSTED_AUTHOR_ASSOCIATIONS = ('COLLABORATOR', 'MEMBER', 'OWNER') +# These GitHub accounts are not associated with individuals, and the Pull +# Requests they submit rarely require a preview. +AUTOMATION_GITHUB_USERS = ( + 'chromium-wpt-export-bot', 'moz-wptsync-bot', 'servo-wpt-sync' +) + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +def gh_request(method_name, url, body=None, media_type=None): + github_token = os.environ.get('GITHUB_TOKEN') + + kwargs = { + 'headers': { + 'Authorization': 'token {}'.format(github_token), + 'Accept': media_type or 'application/vnd.github.v3+json' + } + } + method = getattr(requests, method_name.lower()) + + if body is not None: + kwargs['json'] = body + + logger.info('Issuing request: %s %s', method_name.upper(), url) + + resp = method(url, **kwargs) + + resp.raise_for_status() + + logger.info('Response status code: %s', resp.status_code) + + return resp.json() + +def guard(resource): + '''Decorate a `Project` instance method which interacts with the GitHub + API, ensuring that the subsequent request will not deplete the relevant + allowance. This verification does not itself influence rate limiting: + + > Accessing this endpoint does not count against your REST API rate limit. + + https://developer.github.com/v3/rate_limit/ + ''' + def guard_decorator(func): + def wrapped(self, *args, **kwargs): + limits = gh_request('GET', '{}/rate_limit'.format(self._host)) + + values = limits['resources'].get(resource) + + remaining = values['remaining'] + limit = values['limit'] + + logger.info( + 'Limit for "%s" resource: %s/%s', resource, remaining, limit + ) + + if limit and float(remaining) / limit < API_RATE_LIMIT_THRESHOLD: + raise Exception( + 'Exiting to avoid GitHub.com API request throttling.' + ) + + return func(self, *args, **kwargs) + return wrapped + return guard_decorator + +class Project(object): + def __init__(self, host, github_project): + self._host = host + self._github_project = github_project + + @guard('search') + def get_pull_requests(self, updated_since): + window_start = time.strftime('%Y-%m-%dT%H:%M:%SZ', updated_since) + url = '{}/search/issues?q=repo:{}+is:pr+updated:>{}'.format( + self._host, self._github_project, window_start + ) + + logger.info( + 'Searching for Pull Requests updated since %s', window_start + ) + + data = gh_request('GET', url) + + logger.info('Found %d Pull Requests', len(data['items'])) + + if data['incomplete_results']: + raise Exception('Incomplete results') + + return data['items'] + + @guard('core') + def create_ref(self, refspec, revision): + url = '{}/repos/{}/git/refs'.format(self._host, self._github_project) + + logger.info('Creating ref "%s" (%s)', refspec, revision) + + gh_request('POST', url, { + 'ref': 'refs/{}'.format(refspec), + 'sha': revision + }) + + @guard('core') + def update_ref(self, refspec, revision): + url = '{}/repos/{}/git/refs/{}'.format( + self._host, self._github_project, refspec + ) + + logger.info('Updating ref "%s" (%s)', refspec, revision) + + gh_request('PATCH', url, {'sha': revision}) + + @guard('core') + def create_deployment(self, pull_request, revision): + url = '{}/repos/{}/deployments'.format( + self._host, self._github_project + ) + # The Pull Request preview system only exposes one Deployment for a + # given Pull Request. Identifying the Deployment by the Pull Request + # number ensures that GitHub.com automatically responds to new + # Deployments by designating prior Deployments as "inactive" + environment = 'gh-{}'.format(pull_request['number']) + + logger.info('Creating Deployment for "%s"', revision) + + return gh_request('POST', url, { + 'ref': revision, + 'environment': environment, + 'auto_merge': False, + # Pull Request previews are created regardless of GitHub Commit + # Status Checks, so Status Checks should be ignored when creating + # GitHub Deployments. + 'required_contexts': [] + }, 'application/vnd.github.ant-man-preview+json') + + @guard('core') + def get_deployment(self, revision): + url = '{}/repos/{}/deployments?sha={}'.format( + self._host, self._github_project, revision + ) + + deployments = gh_request('GET', url) + + return deployments.pop() if len(deployments) else None + + @guard('core') + def update_deployment(self, target, deployment, state, description=''): + if state in ('pending', 'success'): + environment_url = '{}/submissions/{}'.format( + target, deployment['environment'] + ) + else: + environment_url = None + url = '{}/repos/{}/deployments/{}/statuses'.format( + self._host, self._github_project, deployment['id'] + ) + + gh_request('POST', url, { + 'state': state, + 'description': description, + 'environment_url': environment_url + }, 'application/vnd.github.ant-man-preview+json') + +class Remote(object): + def __init__(self, github_project): + # The repository in the GitHub Actions environment is configured with + # a remote whose URL uses unauthenticated HTTPS, making it unsuitable + # for pushing changes. + self._token = os.environ.get('GITHUB_TOKEN') + + def get_revision(self, refspec): + output = subprocess.check_output([ + 'git', + '-c', + 'credential.username={}"'.format(self._token), + '-c', + 'core.askPass=true', + 'ls-remote', + 'origin', + 'refs/{}'.format(refspec) + ]) + + if not output: + return None + + return output.decode('utf-8').split()[0] + + def delete_ref(self, refspec): + full_ref = 'refs/{}'.format(refspec) + + logger.info('Deleting ref "%s"', refspec) + + subprocess.check_call([ + 'git', + '-c', + 'credential.username={}"'.format(self._token), + '-c', + 'core.askPass=true', + 'push', + 'origin', + '--delete', + full_ref + ]) + +def is_open(pull_request): + return not pull_request['closed_at'] + +def has_label(pull_request): + for label in pull_request['labels']: + if label['name'] == LABEL: + return True + + return False + +def should_be_mirrored(pull_request): + return ( + is_open(pull_request) and + pull_request['user']['login'] not in AUTOMATION_GITHUB_USERS and ( + pull_request['author_association'] in TRUSTED_AUTHOR_ASSOCIATIONS or + has_label(pull_request) + ) + ) + +def is_deployed(host, deployment): + response = requests.get( + '{}/.git/worktrees/{}/HEAD'.format(host, deployment['environment']) + ) + + if response.status_code != 200: + return False + + return response.text.strip() == deployment['sha'] + +def synchronize(host, github_project, window): + '''Inspect all Pull Requests which have been modified in a given window of + time. Add or remove the "preview" label and update or delete the relevant + git refs according to the status of each Pull Request.''' + + project = Project(host, github_project) + remote = Remote(github_project) + + pull_requests = project.get_pull_requests( + time.gmtime(time.time() - window) + ) + + for pull_request in pull_requests: + logger.info('Processing Pull Request #%(number)d', pull_request) + + refspec_trusted = 'prs-trusted-for-preview/{number}'.format( + **pull_request + ) + refspec_open = 'prs-open/{number}'.format(**pull_request) + revision_latest = remote.get_revision( + 'pull/{number}/head'.format(**pull_request) + ) + revision_trusted = remote.get_revision(refspec_trusted) + revision_open = remote.get_revision(refspec_open) + + if should_be_mirrored(pull_request): + logger.info('Pull Request should be mirrored') + + if revision_trusted is None: + project.create_ref(refspec_trusted, revision_latest) + elif revision_trusted != revision_latest: + project.update_ref(refspec_trusted, revision_latest) + + if revision_open is None: + project.create_ref(refspec_open, revision_latest) + elif revision_open != revision_latest: + project.update_ref(refspec_open, revision_latest) + + if project.get_deployment(revision_latest) is None: + project.create_deployment( + pull_request, revision_latest + ) + else: + logger.info('Pull Request should not be mirrored') + + if not has_label(pull_request) and revision_trusted is not None: + remote.delete_ref(refspec_trusted) + + if revision_open is not None and not is_open(pull_request): + remote.delete_ref(refspec_open) + +def detect(host, github_project, target, timeout): + '''Manage the status of a GitHub Deployment by polling the Pull Request + preview website until the Deployment is complete or a timeout is + reached.''' + + project = Project(host, github_project) + + with open(os.environ['GITHUB_EVENT_PATH']) as handle: + data = json.loads(handle.read()) + + logger.info('Event data: %s', json.dumps(data, indent=2)) + + deployment = data['deployment'] + + if not deployment['environment'].startswith('gh-'): + logger.info('Deployment environment is unrecognized. Exiting.') + return + + message = 'Waiting up to {} seconds for Deployment {} to be available on {}'.format( + timeout, deployment['environment'], target + ) + logger.info(message) + project.update_deployment(target, deployment, 'pending', message) + + start = time.time() + + while not is_deployed(target, deployment): + if time.time() - start > timeout: + message = 'Deployment did not become available after {} seconds'.format(timeout) + project.update_deployment(target, deployment, 'error', message) + raise Exception(message) + + time.sleep(POLLING_PERIOD) + + result = project.update_deployment(target, deployment, 'success') + logger.info(json.dumps(result, indent=2)) + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='''Synchronize the state of a GitHub.com project with the + underlying git repository in order to support a externally-hosted + Pull Request preview system. Communicate the state of that system + via GitHub Deployments associated with each Pull Request.''' + ) + parser.add_argument( + '--host', required=True, help='the location of the GitHub API server' + ) + parser.add_argument( + '--github-project', + required=True, + help='''the GitHub organization and GitHub project name, separated by + a forward slash (e.g. "web-platform-tests/wpt")''' + ) + subparsers = parser.add_subparsers(title='subcommands') + + parser_sync = subparsers.add_parser( + 'synchronize', help=synchronize.__doc__ + ) + parser_sync.add_argument( + '--window', + type=int, + required=True, + help='''the number of seconds prior to the current moment within which + to search for GitHub Pull Requests. Any Pull Requests updated in + this time frame will be considered for synchronization.''' + ) + parser_sync.set_defaults(func=synchronize) + + parser_detect = subparsers.add_parser('detect', help=detect.__doc__) + parser_detect.add_argument( + '--target', + required=True, + help='''the URL of the website to which submission previews are + expected to become available''' + ) + parser_detect.add_argument( + '--timeout', + type=int, + required=True, + help='''the number of seconds to wait for a submission preview to + become available before reporting a GitHub Deployment failure''' + ) + parser_detect.set_defaults(func=detect) + + values = dict(vars(parser.parse_args())) + values.pop('func')(**values) diff --git a/tools/ci/tests/test_pr_preview.py b/tools/ci/tests/test_pr_preview.py new file mode 100644 index 00000000000000..d8ab467bb6d6d5 --- /dev/null +++ b/tools/ci/tests/test_pr_preview.py @@ -0,0 +1,720 @@ +try: + from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer +except ImportError: + # Python 3 case + from http.server import BaseHTTPRequestHandler, HTTPServer +import contextlib +import errno +import json +import os +import shutil +import stat +import subprocess +import tempfile +import threading + +subject = os.path.join( + os.path.dirname(os.path.abspath(__file__)), '..', 'pr_preview.py' +) +test_host = 'localhost' + + +def same_members(a, b): + if len(a) != len(b): + return False + a_copy = list(a) + for elem in b: + try: + a_copy.remove(elem) + except ValueError: + return False + + return len(a_copy) == 0 + + +# When these tests are executed in Windows, files in the temporary git +# repositories may be marked as "read only" at the moment they are intended to +# be deleted. The following handler for `shutil.rmtree` accounts for this by +# making the files writable and attempting to delete them a second time. +# +# Source: +# https://stackoverflow.com/questions/1213706/what-user-do-python-scripts-run-as-in-windows +def handle_remove_readonly(func, path, exc): + excvalue = exc[1] + candidates = (os.rmdir, os.remove, os.unlink) + if func in candidates and excvalue.errno == errno.EACCES: + os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777 + func(path) + else: + raise + + +class MockHandler(BaseHTTPRequestHandler, object): + def do_all(self): + path = self.path.split('?')[0] + request_body = None + + if 'Content-Length' in self.headers: + request_body = self.rfile.read( + int(self.headers['Content-Length']) + ).decode('utf-8') + + if self.headers.get('Content-Type') == 'application/json': + request_body = json.loads(request_body) + + for request, response in self.server.expected_traffic: + if request[0] != self.command: + continue + if request[1] != path: + continue + body_matches = True + for key in request[2]: + body_matches &= request[2][key] == request_body.get(key) + if not body_matches: + continue + break + else: + request = (self.command, path, request_body) + response = (400, {}) + + self.server.actual_traffic.append((request, response)) + self.send_response(response[0]) + self.end_headers() + self.wfile.write(json.dumps(response[1]).encode('utf-8')) + + def do_DELETE(self): + return self.do_all() + + def do_GET(self): + return self.do_all() + + def do_PATCH(self): + return self.do_all() + + def do_POST(self): + return self.do_all() + + +class MockServer(HTTPServer, object): + '''HTTP server that responds to all requests with status code 200 and body + '{}' unless an alternative status code and body are specified for the given + method and path in the `responses` parameter.''' + def __init__(self, address, expected_traffic): + super(MockServer, self).__init__(address, MockHandler) + self.expected_traffic = expected_traffic + self.actual_traffic = [] + + def __enter__(self): + threading.Thread(target=lambda: self.serve_forever()).start() + return self + + def __exit__(self, *args): + self.shutdown() + + +class Requests(object): + get_rate = ('GET', '/rate_limit', {}) + search = ('GET', '/search/issues', {}) + ref_create_open = ( + 'POST', '/repos/test-org/test-repo/git/refs', {'ref':'refs/prs-open/23'} + ) + ref_create_trusted = ( + 'POST', + '/repos/test-org/test-repo/git/refs', + {'ref':'refs/prs-trusted-for-preview/23'} + ) + ref_update_open = ( + 'PATCH', '/repos/test-org/test-repo/git/refs/prs-open/23', {} + ) + ref_update_trusted = ( + 'PATCH', '/repos/test-org/test-repo/git/refs/prs-trusted-for-preview/23', {} + ) + deployment_get = ('GET', '/repos/test-org/test-repo/deployments', {}) + deployment_create = ('POST', '/repos/test-org/test-repo/deployments', {}) + deployment_status_create_pending = ( + 'POST', + '/repos/test-org/test-repo/deployments/24601/statuses', + {'state':'pending'} + ) + deployment_status_create_error = ( + 'POST', + '/repos/test-org/test-repo/deployments/24601/statuses', + {'state':'error'} + ) + deployment_status_create_success = ( + 'POST', + '/repos/test-org/test-repo/deployments/24601/statuses', + {'state':'success'} + ) + preview = ('GET', '/.git/worktrees/gh-45/HEAD', {}) + + +class Responses(object): + no_limit = (200, { + 'resources': { + 'search': { + 'remaining': 100, + 'limit': 100 + }, + 'core': { + 'remaining': 100, + 'limit': 100 + } + } + }) + + +@contextlib.contextmanager +def temp_repo(): + directory = tempfile.mkdtemp() + + try: + subprocess.check_call(['git', 'init'], cwd=directory) + subprocess.check_call( + ['git', 'config', 'user.name', 'example'], + cwd=directory + ) + subprocess.check_call( + ['git', 'config', 'user.email', 'example@example.com'], + cwd=directory + ) + subprocess.check_call( + ['git', 'commit', '--allow-empty', '-m', 'first'], + cwd=directory + ) + + yield directory + finally: + shutil.rmtree( + directory, ignore_errors=False, onerror=handle_remove_readonly + ) + +def synchronize(expected_traffic, refs={}): + env = { + 'GITHUB_TOKEN': 'c0ffee' + } + env.update(os.environ) + server = MockServer((test_host, 0), expected_traffic) + test_port = server.server_address[1] + remote_refs = {} + + with temp_repo() as local_repo, temp_repo() as remote_repo, server: + subprocess.check_call( + ['git', 'commit', '--allow-empty', '-m', 'first'], + cwd=remote_repo + ) + subprocess.check_call( + ['git', 'commit', '--allow-empty', '-m', 'second'], + cwd=remote_repo + ) + subprocess.check_call( + ['git', 'remote', 'add', 'origin', remote_repo], cwd=local_repo + ) + + for name, value in refs.items(): + subprocess.check_call( + ['git', 'update-ref', name, value], + cwd=remote_repo + ) + + child = subprocess.Popen( + [ + 'python', + subject, + '--host', + 'http://{}:{}'.format(test_host, test_port), + '--github-project', + 'test-org/test-repo', + 'synchronize', + '--window', + '3000' + ], + cwd=local_repo, + env=env + ) + + child.communicate() + lines = subprocess.check_output( + ['git', 'ls-remote', 'origin'], cwd=local_repo + ) + for line in lines.decode('utf-8').strip().split('\n'): + revision, ref = line.split() + + if not ref or ref in ('HEAD', 'refs/heads/master'): + continue + + remote_refs[ref] = revision + + return child.returncode, server.actual_traffic, remote_refs + + +def detect(event, expected_github_traffic, expected_preview_traffic): + env = { + 'GITHUB_TOKEN': 'c0ffee' + } + env.update(os.environ) + github_server = MockServer((test_host, 0), expected_github_traffic) + github_port = github_server.server_address[1] + preview_server = MockServer((test_host, 0), expected_preview_traffic) + preview_port = preview_server.server_address[1] + + with temp_repo() as repo, github_server, preview_server: + env['GITHUB_EVENT_PATH'] = repo + '/event.json' + + with open(env['GITHUB_EVENT_PATH'], 'w') as handle: + handle.write(json.dumps(event)) + + child = subprocess.Popen( + [ + 'python', + subject, + '--host', + 'http://{}:{}'.format(test_host, github_port), + '--github-project', + 'test-org/test-repo', + 'detect', + '--target', + 'http://{}:{}'.format(test_host, preview_port), + '--timeout', + '1' + ], + cwd=repo, + env=env + ) + child.communicate() + + return ( + child.returncode, + github_server.actual_traffic, + preview_server.actual_traffic + ) + + +def test_synchronize_zero_results(): + expected_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.search, ( + 200, + { + 'items': [], + 'incomplete_results': False + } + )) + ] + + returncode, actual_traffic, remote_refs = synchronize(expected_traffic) + + assert returncode == 0 + assert same_members(expected_traffic, actual_traffic) + +def test_synchronize_fail_search_throttled(): + expected_traffic = [ + (Requests.get_rate, ( + 200, + { + 'resources': { + 'search': { + 'remaining': 1, + 'limit': 10 + } + } + } + )) + ] + + returncode, actual_traffic, remote_refs = synchronize(expected_traffic) + + assert returncode != 0 + assert same_members(expected_traffic, actual_traffic) + +def test_synchronize_fail_incomplete_results(): + expected_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.search, ( + 200, + { + 'items': [], + 'incomplete_results': True + } + )) + ] + + returncode, actual_traffic, remove_refs = synchronize(expected_traffic) + + assert returncode != 0 + assert same_members(expected_traffic, actual_traffic) + +def test_synchronize_ignore_closed(): + expected_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.search, ( + 200, + { + 'items': [ + { + 'number': 23, + 'labels': [], + 'closed_at': '2019-10-28', + 'user': {'login': 'grace'}, + 'author_association': 'COLLABORATOR' + } + ], + 'incomplete_results': False + } + )) + ] + + returncode, actual_traffic, remote_refs = synchronize(expected_traffic) + + assert returncode == 0 + assert same_members(expected_traffic, actual_traffic) + +def test_synchronize_sync_collaborator(): + expected_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.search, ( + 200, + { + 'items': [ + { + 'number': 23, + 'labels': [], + 'closed_at': None, + 'user': {'login': 'grace'}, + 'author_association': 'COLLABORATOR' + } + ], + 'incomplete_results': False + } + )), + (Requests.ref_create_open, (200, {})), + (Requests.ref_create_trusted, (200, {})), + (Requests.deployment_get, (200, {})), + (Requests.deployment_create, (200, {})) + ] + + returncode, actual_traffic, remote_refs = synchronize(expected_traffic) + + assert returncode == 0 + assert same_members(expected_traffic, actual_traffic) + +def test_synchronize_ignore_collaborator_bot(): + expected_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.search, ( + 200, + { + 'items': [ + { + 'number': 23, + 'labels': [], + 'closed_at': None, + 'user': {'login': 'chromium-wpt-export-bot'}, + 'author_association': 'COLLABORATOR' + } + ], + 'incomplete_results': False + } + )) + ] + + returncode, actual_traffic, remote_refs = synchronize(expected_traffic) + + assert returncode == 0 + assert same_members(expected_traffic, actual_traffic) + +def test_synchronize_ignore_untrusted_contributor(): + expected_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.search, ( + 200, + { + 'items': [ + { + 'number': 23, + 'labels': [], + 'closed_at': None, + 'user': {'login': 'grace'}, + 'author_association': 'CONTRIBUTOR' + } + ], + 'incomplete_results': False + } + )) + ] + + returncode, actual_traffic, remote_refs = synchronize(expected_traffic) + + assert returncode == 0 + assert same_members(expected_traffic, actual_traffic) + +def test_synchronize_sync_trusted_contributor(): + expected_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.search, ( + 200, + { + 'items': [ + { + 'number': 23, + 'labels': [{'name': 'safelisted-for-preview'}], + 'closed_at': None, + 'user': {'login': 'Hexcles'}, + 'author_association': 'CONTRIBUTOR' + } + ], + 'incomplete_results': False + } + )), + (Requests.ref_create_open, (200, {})), + (Requests.ref_create_trusted, (200, {})), + (Requests.deployment_get, (200, [])), + (Requests.deployment_create, (200, {})) + ] + + returncode, actual_traffic, remote_refs = synchronize(expected_traffic) + + assert returncode == 0 + assert same_members(expected_traffic, actual_traffic) + +def test_synchronize_update_collaborator(): + expected_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.search, (200, + { + 'items': [ + { + 'number': 23, + 'labels': [], + 'closed_at': None, + 'user': {'login': 'grace'}, + 'author_association': 'COLLABORATOR' + } + ], + 'incomplete_results': False + } + )), + (Requests.deployment_get, (200, [])), + (Requests.ref_update_open, (200, {})), + (Requests.ref_update_trusted, (200, {})), + (Requests.deployment_create, (200, {})) + ] + refs = { + 'refs/pull/23/head': 'HEAD', + 'refs/prs-open/23': 'HEAD~', + 'refs/prs-trusted-for-preview/23': 'HEAD~' + } + + returncode, actual_traffic, remote_refs = synchronize(expected_traffic, refs) + + assert returncode == 0 + assert same_members(expected_traffic, actual_traffic) + +def test_synchronize_update_member(): + expected_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.get_rate, Responses.no_limit), + (Requests.search, (200, + { + 'items': [ + { + 'number': 23, + 'labels': [], + 'closed_at': None, + 'user': {'login': 'grace'}, + 'author_association': 'MEMBER' + } + ], + 'incomplete_results': False + } + )), + (Requests.deployment_get, (200, [{'some': 'deployment'}])), + (Requests.ref_update_open, (200, {})), + (Requests.ref_update_trusted, (200, {})) + ] + refs = { + 'refs/pull/23/head': 'HEAD', + 'refs/prs-open/23': 'HEAD~', + 'refs/prs-trusted-for-preview/23': 'HEAD~' + } + + returncode, actual_traffic, remote_refs = synchronize(expected_traffic, refs) + + assert returncode == 0 + assert same_members(expected_traffic, actual_traffic) + +def test_synchronize_delete_collaborator(): + expected_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.search, (200, + { + 'items': [ + { + 'number': 23, + 'labels': [], + 'closed_at': '2019-10-30', + 'user': {'login': 'grace'}, + 'author_association': 'COLLABORATOR' + } + ], + 'incomplete_results': False + } + )) + ] + refs = { + 'refs/pull/23/head': 'HEAD', + 'refs/prs-open/23': 'HEAD~', + 'refs/prs-trusted-for-preview/23': 'HEAD~' + } + + returncode, actual_traffic, remote_refs = synchronize(expected_traffic, refs) + + assert returncode == 0 + assert same_members(expected_traffic, actual_traffic) + assert list(remote_refs) == ['refs/pull/23/head'] + +def test_detect_ignore_unknown_env(): + expected_github_traffic = [] + expected_preview_traffic = [] + event = { + 'deployment': { + 'id': 24601, + 'environment': 'ghosts', + 'sha': '3232' + } + } + + returncode, actual_github_traffic, actual_preview_traffic = detect( + event, expected_github_traffic, expected_preview_traffic + ) + + assert returncode == 0 + assert len(actual_github_traffic) == 0 + assert len(actual_preview_traffic) == 0 + +def test_detect_fail_search_throttled(): + expected_github_traffic = [ + (Requests.get_rate, ( + 200, + { + 'resources': { + 'core': { + 'remaining': 1, + 'limit': 10 + } + } + } + )) + ] + expected_preview_traffic = [] + event = { + 'deployment': { + 'id': 24601, + 'environment': 'gh-45', + 'sha': '3232' + } + } + + returncode, actual_github_traffic, actual_preview_traffic = detect( + event, expected_github_traffic, expected_preview_traffic + ) + + assert returncode == 1 + assert actual_github_traffic == expected_github_traffic + assert actual_preview_traffic == expected_preview_traffic + +def test_detect_success(): + expected_github_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.deployment_status_create_pending, (200, {})), + (Requests.get_rate, Responses.no_limit), + (Requests.deployment_status_create_success, (200, {})) + ] + expected_preview_traffic = [ + (Requests.preview, (200, 3232)) + ] + event = { + 'deployment': { + 'id': 24601, + 'environment': 'gh-45', + 'sha': '3232' + } + } + + returncode, actual_github_traffic, actual_preview_traffic = detect( + event, expected_github_traffic, expected_preview_traffic + ) + + assert returncode == 0 + assert actual_github_traffic == expected_github_traffic + assert actual_preview_traffic == expected_preview_traffic + +def test_detect_timeout_missing(): + expected_github_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.deployment_status_create_pending, (200, {})), + (Requests.get_rate, Responses.no_limit), + (Requests.deployment_status_create_error, (200, {})) + ] + expected_preview_traffic = [ + (Requests.preview, (404, {})) + ] + event = { + 'deployment': { + 'id': 24601, + 'environment': 'gh-45', + 'sha': '3232' + } + } + + returncode, actual_github_traffic, actual_preview_traffic = detect( + event, expected_github_traffic, expected_preview_traffic + ) + + assert returncode == 1 + assert expected_github_traffic == actual_github_traffic + ping_count = len(actual_preview_traffic) + assert ping_count > 0 + assert actual_preview_traffic == expected_preview_traffic * ping_count + +def test_detect_timeout_wrong_revision(): + expected_github_traffic = [ + (Requests.get_rate, Responses.no_limit), + (Requests.deployment_status_create_pending, (200, {})), + (Requests.get_rate, Responses.no_limit), + (Requests.deployment_status_create_error, (200, {})) + ] + expected_preview_traffic = [ + (Requests.preview, (200, 1234)) + ] + event = { + 'deployment': { + 'id': 24601, + 'environment': 'gh-45', + 'sha': '3232' + } + } + + returncode, actual_github_traffic, actual_preview_traffic = detect( + event, expected_github_traffic, expected_preview_traffic + ) + + assert returncode == 1 + assert expected_github_traffic == actual_github_traffic + ping_count = len(actual_preview_traffic) + assert ping_count > 0 + assert actual_preview_traffic == expected_preview_traffic * ping_count diff --git a/tools/tox.ini b/tools/tox.ini index a330d0523486ca..7a5c1eb648a97a 100644 --- a/tools/tox.ini +++ b/tools/tox.ini @@ -8,6 +8,8 @@ deps = pytest-cov mock hypothesis + # `requests` is required by `pr_preview.py` + requests commands = pytest {posargs} From 854be5aae597e5e60c2ec7dd7b34b551df62cb10 Mon Sep 17 00:00:00 2001 From: Mike Pennisi Date: Mon, 11 Nov 2019 20:48:39 -0500 Subject: [PATCH 2/7] fixup! [infra] Integrate with external PR preview system --- tools/ci/tests/test_pr_preview.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tools/ci/tests/test_pr_preview.py b/tools/ci/tests/test_pr_preview.py index d8ab467bb6d6d5..a9baf85a08ae97 100644 --- a/tools/ci/tests/test_pr_preview.py +++ b/tools/ci/tests/test_pr_preview.py @@ -40,13 +40,13 @@ def same_members(a, b): # Source: # https://stackoverflow.com/questions/1213706/what-user-do-python-scripts-run-as-in-windows def handle_remove_readonly(func, path, exc): - excvalue = exc[1] - candidates = (os.rmdir, os.remove, os.unlink) - if func in candidates and excvalue.errno == errno.EACCES: - os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777 - func(path) - else: - raise + excvalue = exc[1] + candidates = (os.rmdir, os.remove, os.unlink) + if func in candidates and excvalue.errno == errno.EACCES: + os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777 + func(path) + else: + raise class MockHandler(BaseHTTPRequestHandler, object): From f5c7cb9afa3878c5fed296455c4ac5f7121f6c2b Mon Sep 17 00:00:00 2001 From: Mike Pennisi Date: Thu, 14 Nov 2019 16:14:24 -0500 Subject: [PATCH 3/7] fixup! [infra] Integrate with external PR preview system --- .github/workflows/pull_request_previews.yml | 2 ++ tools/ci/pr_preview.py | 5 +++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pull_request_previews.yml b/.github/workflows/pull_request_previews.yml index 300b6649c6efcb..3694fe7938478a 100644 --- a/.github/workflows/pull_request_previews.yml +++ b/.github/workflows/pull_request_previews.yml @@ -7,6 +7,8 @@ jobs: runs-on: ubuntu-18.04 steps: - uses: actions/checkout@v1 + with: + fetch-depth: 1 - name: Install dependency run: pip install requests - name: Synchronize state diff --git a/tools/ci/pr_preview.py b/tools/ci/pr_preview.py index 42833099df8f95..31b86881c55a43 100755 --- a/tools/ci/pr_preview.py +++ b/tools/ci/pr_preview.py @@ -22,7 +22,7 @@ API_RATE_LIMIT_THRESHOLD = 0.2 # The GitHub Pull Request label which indicates that a Pull Request is expected # to be actively mirrored by the preview server -LABEL = 'safelisted-for-preview' +LABEL = 'safe for preview' # The number of seconds to wait between attempts to verify that a submission # preview is available on the Pull Request preview server POLLING_PERIOD = 5 @@ -35,7 +35,8 @@ # These GitHub accounts are not associated with individuals, and the Pull # Requests they submit rarely require a preview. AUTOMATION_GITHUB_USERS = ( - 'chromium-wpt-export-bot', 'moz-wptsync-bot', 'servo-wpt-sync' + 'autofoolip', 'chromium-wpt-export-bot', 'moz-wptsync-bot', + 'servo-wpt-sync' ) logging.basicConfig(level=logging.INFO) From 4988be19fd653b23240687395b35c7501cc8b71b Mon Sep 17 00:00:00 2001 From: Mike Pennisi Date: Thu, 14 Nov 2019 16:27:42 -0500 Subject: [PATCH 4/7] fixup! [infra] Integrate with external PR preview system --- tools/ci/tests/test_pr_preview.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/ci/tests/test_pr_preview.py b/tools/ci/tests/test_pr_preview.py index a9baf85a08ae97..3afee3b7ef45d5 100644 --- a/tools/ci/tests/test_pr_preview.py +++ b/tools/ci/tests/test_pr_preview.py @@ -465,7 +465,7 @@ def test_synchronize_sync_trusted_contributor(): 'items': [ { 'number': 23, - 'labels': [{'name': 'safelisted-for-preview'}], + 'labels': [{'name': 'safe for preview'}], 'closed_at': None, 'user': {'login': 'Hexcles'}, 'author_association': 'CONTRIBUTOR' From b7f86cbcacd7918ff6e297a9724e8cedb4b38f40 Mon Sep 17 00:00:00 2001 From: Mike Pennisi Date: Tue, 19 Nov 2019 19:16:04 -0500 Subject: [PATCH 5/7] fixup! [infra] Integrate with external PR preview system --- tools/ci/pr_preview.py | 25 +++++++++++++++---------- tools/ci/tests/test_pr_preview.py | 10 +++++----- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/tools/ci/pr_preview.py b/tools/ci/pr_preview.py index 31b86881c55a43..3be37bf3d7a85b 100755 --- a/tools/ci/pr_preview.py +++ b/tools/ci/pr_preview.py @@ -38,6 +38,7 @@ 'autofoolip', 'chromium-wpt-export-bot', 'moz-wptsync-bot', 'servo-wpt-sync' ) +DEPLOYMENT_PREFIX = 'wpt-preview-' logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -60,10 +61,10 @@ def gh_request(method_name, url, body=None, media_type=None): resp = method(url, **kwargs) - resp.raise_for_status() - logger.info('Response status code: %s', resp.status_code) + resp.raise_for_status() + return resp.json() def guard(resource): @@ -152,9 +153,9 @@ def create_deployment(self, pull_request, revision): # given Pull Request. Identifying the Deployment by the Pull Request # number ensures that GitHub.com automatically responds to new # Deployments by designating prior Deployments as "inactive" - environment = 'gh-{}'.format(pull_request['number']) + environment = DEPLOYMENT_PREFIX + str(pull_request['number']) - logger.info('Creating Deployment for "%s"', revision) + logger.info('Creating Deployment "%s" for "%s"', environment, revision) return gh_request('POST', url, { 'ref': revision, @@ -238,7 +239,7 @@ def delete_ref(self, refspec): def is_open(pull_request): return not pull_request['closed_at'] -def has_label(pull_request): +def has_mirroring_label(pull_request): for label in pull_request['labels']: if label['name'] == LABEL: return True @@ -250,13 +251,14 @@ def should_be_mirrored(pull_request): is_open(pull_request) and pull_request['user']['login'] not in AUTOMATION_GITHUB_USERS and ( pull_request['author_association'] in TRUSTED_AUTHOR_ASSOCIATIONS or - has_label(pull_request) + has_mirroring_label(pull_request) ) ) def is_deployed(host, deployment): + worktree_name = deployment['environment'][len(DEPLOYMENT_PREFIX):] response = requests.get( - '{}/.git/worktrees/{}/HEAD'.format(host, deployment['environment']) + '{}/.git/worktrees/{}/HEAD'.format(host, worktree_name) ) if response.status_code != 200: @@ -309,7 +311,7 @@ def synchronize(host, github_project, window): else: logger.info('Pull Request should not be mirrored') - if not has_label(pull_request) and revision_trusted is not None: + if not has_mirroring_label(pull_request) and revision_trusted is not None: remote.delete_ref(refspec_trusted) if revision_open is not None and not is_open(pull_request): @@ -329,8 +331,11 @@ def detect(host, github_project, target, timeout): deployment = data['deployment'] - if not deployment['environment'].startswith('gh-'): - logger.info('Deployment environment is unrecognized. Exiting.') + if not deployment['environment'].startswith(DEPLOYMENT_PREFIX): + logger.info( + 'Deployment environment "%s" is unrecognized. Exiting.', + deployment['environment'] + ) return message = 'Waiting up to {} seconds for Deployment {} to be available on {}'.format( diff --git a/tools/ci/tests/test_pr_preview.py b/tools/ci/tests/test_pr_preview.py index 3afee3b7ef45d5..8c42f24b02902c 100644 --- a/tools/ci/tests/test_pr_preview.py +++ b/tools/ci/tests/test_pr_preview.py @@ -146,7 +146,7 @@ class Requests(object): '/repos/test-org/test-repo/deployments/24601/statuses', {'state':'success'} ) - preview = ('GET', '/.git/worktrees/gh-45/HEAD', {}) + preview = ('GET', '/.git/worktrees/45/HEAD', {}) class Responses(object): @@ -624,7 +624,7 @@ def test_detect_fail_search_throttled(): event = { 'deployment': { 'id': 24601, - 'environment': 'gh-45', + 'environment': 'wpt-preview-45', 'sha': '3232' } } @@ -650,7 +650,7 @@ def test_detect_success(): event = { 'deployment': { 'id': 24601, - 'environment': 'gh-45', + 'environment': 'wpt-preview-45', 'sha': '3232' } } @@ -676,7 +676,7 @@ def test_detect_timeout_missing(): event = { 'deployment': { 'id': 24601, - 'environment': 'gh-45', + 'environment': 'wpt-preview-45', 'sha': '3232' } } @@ -704,7 +704,7 @@ def test_detect_timeout_wrong_revision(): event = { 'deployment': { 'id': 24601, - 'environment': 'gh-45', + 'environment': 'wpt-preview-45', 'sha': '3232' } } From 2775f47a9dfe289e4344e704bee0ac68350f3d0c Mon Sep 17 00:00:00 2001 From: Mike Pennisi Date: Tue, 19 Nov 2019 20:24:09 -0500 Subject: [PATCH 6/7] fixup! [infra] Integrate with external PR preview system From 8bbdd76fde48120b6bfd557f624db9e2e19d655a Mon Sep 17 00:00:00 2001 From: Mike Pennisi Date: Tue, 19 Nov 2019 22:18:26 -0500 Subject: [PATCH 7/7] fixup! [infra] Integrate with external PR preview system