diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 48b9d62bb9b7..971053006b0e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -36,61 +36,8 @@ concurrency: jobs: MacOS: - runs-on: macOS-latest + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - with: - submodules: 'recursive' - - name: Set up environment - uses: ./.github/actions/setup - - name: Conda Build - shell: bash -l {0} - run: >- - conda build --output-folder=conda/pkg conda/recipe && - conda install tvm -c ./conda/pkg - - name: Build iOS RPC + - name: test run: | - IOS_VERSION="14.0" - CMAKE_FLAGS="-DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_SYSTEM_NAME=iOS \ - -DCMAKE_SYSTEM_VERSION=${IOS_VERSION} \ - -DCMAKE_OSX_SYSROOT=iphonesimulator \ - -DCMAKE_OSX_ARCHITECTURES=x86_64 \ - -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \ - -DCMAKE_BUILD_WITH_INSTALL_NAME_DIR=ON \ - -DUSE_IOS_RPC=ON" - - mkdir build-ios-simulator - cd build-ios-simulator - cmake .. ${CMAKE_FLAGS} - cmake --build . --target ios_rpc - - name: Test - shell: bash -l {0} - run: >- - python -m pytest -v tests/python/all-platform-minimal-test - - name: Test iOS RPC - shell: bash -l {0} - run: >- - python -m pip install tornado psutil cloudpickle && - export PYTHONPATH=tests/python/contrib:${PYTHONPATH} && - export BUNDLE_ID=org.apache.tvmrpc && - export BUNDLE_PATH=build-ios-simulator/apps/ios_rpc/ios_rpc/src/ios_rpc-build/Release-iphonesimulator/tvmrpc.app && - python -m pytest -v tests/python/contrib/test_rpc_server_device.py - - Windows: - runs-on: windows-2019 - steps: - - uses: actions/checkout@v2 - with: - submodules: 'recursive' - - name: Set up environment - uses: ./.github/actions/setup - - name: Conda Build - shell: cmd /C call {0} - run: >- - conda build --output-folder=conda/pkg conda/recipe && - conda install tvm -c ./conda/pkg - - name: Test - shell: cmd /C call {0} - run: >- - python -m pytest -v tests/python/all-platform-minimal-test + echo hi diff --git a/.github/workflows/merge.yml b/.github/workflows/merge.yml new file mode 100644 index 000000000000..de2fba707fa9 --- /dev/null +++ b/.github/workflows/merge.yml @@ -0,0 +1,28 @@ + +name: Merge +on: + status: + pull_request_review: + types: + - submitted + issue_comment: + +concurrency: + group: merge-${{ github.event.pull_request.number }}-${{ github.event.issue.number }} + cancel-in-progress: true + +jobs: + maybe-merge: + if: github.repository == 'driazati/tvm' + # if: github.repository == 'apache/tvm' # TODO: uncomment after testing + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + - name: Merge if requested and possible + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ github.event.issue.number }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + run: | + set -eux + python tests/scripts/github_mergebot.py --pr "$PR_NUMBER" --run-url "$RUN_URL" diff --git a/.gitignore b/.gitignore index 887231895383..184ff17ab25e 100644 --- a/.gitignore +++ b/.gitignore @@ -263,3 +263,5 @@ tvm-site/ # Generated docs files gallery/how_to/work_with_microtvm/micro_tvmc.py +# Test sample data files +!tests/python/ci/sample_prs/*.json diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index e7b726b6a006..d47dd0e8925a 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -181,3 +181,4 @@ TVM is now a top-level Apache project. During our Incubator phase, we were fortu - Henry Saputra @hsaputra - Timothy Chen @tnachen - Furkan KAMACI @kamaci +test diff --git a/tests/python/ci/sample_prs/pr10786-badci.json b/tests/python/ci/sample_prs/pr10786-badci.json new file mode 100644 index 000000000000..274d452dc12d --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-badci.json @@ -0,0 +1,123 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "FAILED", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-merge", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "@tvm-bot merge", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr10786-merges.json b/tests/python/ci/sample_prs/pr10786-merges.json new file mode 100644 index 000000000000..6ee9864466ac --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-merges.json @@ -0,0 +1,123 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-merge", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "@tvm-bot merge", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr10786-nottriggered.json b/tests/python/ci/sample_prs/pr10786-nottriggered.json new file mode 100644 index 000000000000..be14c6d1d9a5 --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-nottriggered.json @@ -0,0 +1,123 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-merge", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr10786-oldreview.json b/tests/python/ci/sample_prs/pr10786-oldreview.json new file mode 100644 index 000000000000..a5e226f6403e --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-oldreview.json @@ -0,0 +1,123 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-merge", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "@tvm-bot merge", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f24bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/test_mergebot.py b/tests/python/ci/test_mergebot.py new file mode 100644 index 000000000000..34a721ee1473 --- /dev/null +++ b/tests/python/ci/test_mergebot.py @@ -0,0 +1,93 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import subprocess +import json +import sys +import pytest + +from pathlib import Path + +from test_utils import REPO_ROOT + + +class TempGit: + def __init__(self, cwd): + self.cwd = cwd + + def run(self, *args): + proc = subprocess.run(["git"] + list(args), cwd=self.cwd) + if proc.returncode != 0: + raise RuntimeError(f"git command failed: '{args}'") + + +def test_mergebot(tmpdir_factory): + mergebot_script = REPO_ROOT / "tests" / "scripts" / "github_mergebot.py" + test_json_dir = Path(__file__).resolve().parent / "sample_prs" + + def run(number, filename, expected): + git = TempGit(tmpdir_factory.mktemp("tmp_git_dir")) + git.run("init") + git.run("checkout", "-b", "main") + git.run("remote", "add", "origin", "https://github.com/apache/tvm.git") + with open(test_json_dir / filename) as f: + test_data = json.load(f) + + proc = subprocess.run( + [ + str(mergebot_script), + "--pr", + str(number), + "--dry-run", + "--testing-pr-json", + json.dumps(test_data), + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding="utf-8", + cwd=git.cwd, + ) + if proc.returncode != 0: + raise RuntimeError(f"Process failed:\nstdout:\n{proc.stdout}\n\nstderr:\n{proc.stderr}") + + if expected not in proc.stderr: + raise RuntimeError(f"{proc.stderr}\ndid not contain\n{expected}") + + run( + number=10786, + filename="pr10786-merges.json", + expected="Dry run, would have merged with url=pulls/10786/merge", + ) + run( + number=10786, + filename="pr10786-nottriggered.json", + expected="No merge requested, exiting", + ) + run( + number=10786, + filename="pr10786-badci.json", + expected="Cannot merge, these CI jobs failed on", + ) + run( + number=10786, + filename="pr10786-oldreview.json", + expected="Cannot merge, did not find any approving reviews", + ) + + +if __name__ == "__main__": + sys.exit(pytest.main([__file__] + sys.argv[1:])) diff --git a/tests/scripts/git_utils.py b/tests/scripts/git_utils.py index 8e8cbfb1e261..51de25057a4f 100644 --- a/tests/scripts/git_utils.py +++ b/tests/scripts/git_utils.py @@ -38,17 +38,19 @@ def headers(self): def graphql(self, query: str, variables: Optional[Dict[str, str]] = None) -> Dict[str, Any]: if variables is None: variables = {} - response = self._post( - "https://api.github.com/graphql", {"query": query, "variables": variables} + response = self._request( + "https://api.github.com/graphql", + {"query": query, "variables": variables}, + method="POST", ) if "data" not in response: msg = f"Error fetching data with query:\n{query}\n\nvariables:\n{variables}\n\nerror:\n{json.dumps(response, indent=2)}" raise RuntimeError(msg) return response - def _post(self, full_url: str, body: Dict[str, Any]) -> Dict[str, Any]: - print("Requesting POST to", full_url, "with", body) - req = request.Request(full_url, headers=self.headers(), method="POST") + def _request(self, full_url: str, body: Dict[str, Any], method: str) -> Dict[str, Any]: + print(f"Requesting {method} to", full_url, "with", body) + req = request.Request(full_url, headers=self.headers(), method=method.upper()) req.add_header("Content-Type", "application/json; charset=utf-8") data = json.dumps(body) data = data.encode("utf-8") @@ -58,8 +60,11 @@ def _post(self, full_url: str, body: Dict[str, Any]) -> Dict[str, Any]: response = json.loads(response.read()) return response + def put(self, url: str, data: Dict[str, Any]) -> Dict[str, Any]: + return self._request(self.base + url, data, method="PUT") + def post(self, url: str, data: Dict[str, Any]) -> Dict[str, Any]: - return self._post(self.base + url, data) + return self._request(self.base + url, data, method="POST") def get(self, url: str) -> Dict[str, Any]: url = self.base + url diff --git a/tests/scripts/github_mergebot.py b/tests/scripts/github_mergebot.py new file mode 100755 index 000000000000..a4ce7876195d --- /dev/null +++ b/tests/scripts/github_mergebot.py @@ -0,0 +1,501 @@ +#!/usr/bin/env python3 +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +import json +import argparse +import warnings +import logging +import traceback +from typing import Dict, Any, List, Optional +from pathlib import Path + +from git_utils import git, GitHubRepo, parse_remote +from cmd_utils import init_log + + +Review = Dict[str, Any] +CIJob = Dict[str, Any] + + +def js(obj: Any) -> str: + return json.dumps(obj, indent=2) + + +PR_QUERY = """ + query ($owner: String!, $name: String!, $number: Int!) { + repository(owner: $owner, name: $name) { + pullRequest(number: $number) { + title + body + state + comments(last: 100) { + pageInfo { + hasPreviousPage + } + nodes { + author { + login + } + updatedAt + body + } + } + authorCommits:commits(last:100) { + nodes { + commit { + authors(first:100) { + nodes { + name + email + } + } + } + } + } + commits(last: 1) { + nodes { + commit { + oid + statusCheckRollup { + contexts(first: 100) { + pageInfo { + hasNextPage + } + nodes { + ... on CheckRun { + name + checkSuite { + workflowRun { + workflow { + name + } + } + } + status + conclusion + url + } + ... on StatusContext { + state + context + targetUrl + } + } + } + } + } + } + } + reviewDecision + reviews(last: 100) { + pageInfo { + hasPreviousPage + } + nodes { + body + updatedAt + authorCanPushToRepository + commit { + oid + } + state + } + } + } + } + } + """ + + +def walk(obj, visitor, parent_key=None): + """ + Recursively call 'visitor' on all the children of a dictionary + """ + visitor(obj, parent_key) + if isinstance(obj, dict): + for k, v in obj.items(): + walk(v, visitor, parent_key=k) + elif isinstance(obj, list): + for v in obj: + walk(v, visitor) + + +class PR: + def __init__( + self, + number: int, + owner: str, + repo: str, + dry_run: bool = False, + raw_data: Dict[str, Any] = None, + ): + self.owner = owner + self.number = number + self.repo_name = repo + self.dry_run = dry_run + + self.github = GitHubRepo(user=owner, repo=repo, token=os.environ["GITHUB_TOKEN"]) + + if dry_run and raw_data: + # In test mode there is no need to fetch anything + self.raw = raw_data + else: + if os.getenv("DEBUG", "0") == "1": + # For local runs fill in the requested data but cache it for + # later use + cached_path = Path("pr.json") + if not cached_path.exists(): + self.raw = self.fetch_data() + with open(cached_path, "w") as f: + json.dump(self.raw, f, indent=2) + else: + with open(cached_path) as f: + self.raw = json.load(f) + else: + # Usual path, fetch the PR's data based on the number from + # GitHub + self.raw = self.fetch_data() + + def checker(obj, parent_key): + """ + Verify that any paged results don't have extra data (if so the bot + may still work since most relevant comments will be more recent) + """ + if parent_key == "pageInfo": + if obj.get("hasPreviousPage", False): + warnings.warn(f"Found {obj} with a previous page, bot may be missing data") + if obj.get("hasNextPage", False): + warnings.warn(f"Found {obj} with a next page, bot may be missing data") + + walk(self.raw, checker) + + logging.info(f"Verified data, running with PR {js(self.raw)}") + + def __repr__(self): + return json.dumps(self.raw, indent=2) + + def head_commit(self): + return self.raw["commits"]["nodes"][0]["commit"] + + def co_authors(self) -> List[str]: + authors = [] + for commit in self.raw["authorCommits"]["nodes"]: + # Co-authors always come after the main author according to the + # GitHub docs, so ignore the first item + for author in commit["commit"]["authors"]["nodes"][1:]: + name = author["name"] + email = author["email"] + authors.append(f"{name} <{email}>") + + return list(set(authors)) + + def head_oid(self): + return self.head_commit()["oid"] + + def ci_jobs(self) -> List[CIJob]: + """ + Get a list of all CI jobs (GitHub Actions and other) in a unified format + """ + jobs = [] + for item in self.head_commit()["statusCheckRollup"]["contexts"]["nodes"]: + if "checkSuite" in item: + # GitHub Actions job, parse separately + status = item["conclusion"] + if status is None: + # If the 'conclusion' isn't filled out the job hasn't + # finished yet + status = "PENDING" + jobs.append( + { + "name": item["checkSuite"]["workflowRun"]["workflow"]["name"] + + " / " + + item["name"], + "url": item["url"], + "status": status.upper(), + } + ) + else: + # GitHub Status (e.g. from Jenkins) + jobs.append( + { + "name": item["context"], + "url": item["targetUrl"], + "status": item["state"].upper(), + } + ) + + logging.info(f"Found CI jobs for {self.head_commit()['oid']} {js(jobs)}") + return jobs + + def reviews(self) -> List[Review]: + return self.raw["reviews"]["nodes"] + + def head_commit_reviews(self) -> List[Review]: + """ + Find reviews associated with the head commit + """ + commits_to_review_status: Dict[str, List[Review]] = {} + + for review in self.reviews(): + if not review["authorCanPushToRepository"]: + # ignore reviews from non-committers + continue + + oid = review["commit"]["oid"] + if oid in commits_to_review_status: + commits_to_review_status[oid].append(review) + else: + commits_to_review_status[oid] = [review] + + # Only use the data for the head commit of the PR + head_reviews = commits_to_review_status.get(self.head_oid(), []) + return head_reviews + + def fetch_data(self): + """ + Fetch the data for this PR from GitHub + """ + return self.github.graphql( + query=PR_QUERY, + variables={ + "owner": self.owner, + "name": self.repo_name, + "number": self.number, + }, + )["data"]["repository"]["pullRequest"] + + def comment(self, text: str) -> None: + """ + Leave the comment 'text' on this PR + """ + logging.info(f"Commenting:\n{text}") + # TODO: Update latest comment in-place if there has been no activity + data = {"body": text} + url = f"issues/{self.number}/comments" + if self.dry_run: + logging.info( + f"Dry run, would have commented on url={url} commenting with data={js(data)}" + ) + return + + self.github.post(url, data=data) + + def state(self) -> str: + """ + PR state (OPEN, CLOSED, MERGED, etc) + """ + return self.raw["state"] + + def lint_commit_message(self, subject: str, body: str) -> bool: + # TODO: NYI (Add rules as decided in https://discuss.tvm.apache.org/t/commit-message-guideline/12334) + return True + + def processed_body(self) -> str: + body = self.raw["body"].strip().replace("\r", "") + body = body.replace( + "Thanks for contributing to TVM! Please refer to guideline https://tvm.apache.org/docs/contribute/ for useful information and tips. After the pull request is submitted, please request code reviews from [Reviewers](https://github.com/apache/incubator-tvm/blob/master/CONTRIBUTORS.md#reviewers) by @ them in the pull request thread.", + "", + ) + return body + + def body_with_co_authors(self) -> str: + """ + Add 'Co-authored-by' strings to the PR body based on the prior commits + in the PR + """ + body = self.processed_body() + author_lines = self.co_authors() + logging.info(f"Found co-authors: author_lines={author_lines}") + for author_line in author_lines: + if author_line not in body: + # If the line isn't already in the PR body (it could have been + # added manually), put it in + body = f"{body}\n\nCo-authored-by: {author_line}" + + return body + + def merge(self) -> None: + """ + Request a merge of this PR via the GitHub API + """ + url = f"pulls/{self.number}/merge" + + title = self.raw["title"] + body = self.body_with_co_authors() + self.lint_commit_message(title, body) + logging.info(f"Full commit:\n{title}\n\n{body}") + + data = { + "commit_title": title, + "commit_message": body, + # The SHA is necessary in case there was an update right when this + # script ran, GitHub will sort out who won + "sha": self.head_oid(), + "merge_method": "squash", + } + if self.dry_run: + logging.info(f"Dry run, would have merged with url={url} and data={js(data)}") + return + + self.github.put(url, data=data) + + def merge_requested(self) -> bool: + """ + Check if this PR has had a merge requested + """ + merge_commands = [ + "merge", + "merge this", + "merge this pr", + ] + cancel_commands = [ + "cancel", + "cancel merge", + "cancel the merge", + "stop", + "stop merge", + "stop the merge", + ] + + def parse_action(s: str) -> Optional[str]: + if any(f"@tvm-bot {c}" in s for c in merge_commands): + return "merge" + + if any(f"@tvm-bot {c}" in s for c in cancel_commands): + return "cancel" + + return None + + # Check regular comments + all_comments = [] + for comment in self.raw["comments"]["nodes"]: + all_comments.append((comment["updatedAt"], comment["body"])) + + # Check top-level review comments + for review in self.reviews(): + all_comments.append((review["updatedAt"], review["body"])) + + all_comments = sorted(all_comments, key=lambda x: x[0]) + actions = [parse_action(body) for _, body in all_comments] + logging.info(f"Found these tvm-bot actions: {actions}") + actions = [a for a in actions if a is not None] + + if len(actions) == 0: + return False + + return actions[-1] == "merge" + + def merge_if_passed_checks(self): + # NEUTRAL is GitHub Action's way of saying cancelled + failed_ci_jobs = [ + job + for job in self.ci_jobs() + if job["status"] not in {"SUCCESS", "SUCCESSFUL", "NEUTRAL", "SKIPPED"} + ] + all_ci_passed = False + has_one_approval = False + if len(failed_ci_jobs) > 0: + failed_jobs_msg = "\n".join( + [f" * [{job['name']} (`{job['status']}`)]({job['url']})" for job in failed_ci_jobs] + ) + self.comment( + f"Cannot merge, these CI jobs are not successful on {self.head_oid()}:\n{failed_jobs_msg}" + ) + return + else: + all_ci_passed = True + + head_commit_reviews = self.head_commit_reviews() + for review in head_commit_reviews: + if review["state"] == "CHANGES_REQUESTED": + self.comment( + f"Cannot merge, found [this review]({review['url']}) on {self.head_oid()} with changes requested" + ) + return + + if review["state"] == "APPROVED": + has_one_approval = True + logging.info(f"Found approving review: {js(review)}") + + if has_one_approval and all_ci_passed: + self.merge() + elif not has_one_approval: + self.comment( + f"Cannot merge, did not find any approving reviews from users with write access on {self.head_oid()}" + ) + return + elif not all_ci_passed: + self.comment(f"Cannot merge, CI did not pass on on {self.head_oid()}") + return + + +if __name__ == "__main__": + help = "Automatically tag people based on PR / issue labels" + parser = argparse.ArgumentParser(description=help) + parser.add_argument("--remote", default="origin", help="ssh remote to parse") + parser.add_argument("--pr", required=True, help="pr number to check") + parser.add_argument("--run-url", required=True, help="workflow run URL") + parser.add_argument("--testing-pr-json", help="(testing only) manual data for testing") + parser.add_argument( + "--dry-run", + action="store_true", + default=False, + help="run but don't send any request to GitHub", + ) + args = parser.parse_args() + init_log() + + remote = git(["config", "--get", f"remote.{args.remote}.url"]) + logging.info(f"Using remote remote={remote}") + owner, repo = parse_remote(remote) + + if args.pr.strip() == "": + logging.info("No PR number passed") + exit(0) + + logging.info(f"Checking owner={owner} repo={repo}") + if args.testing_pr_json: + pr = PR( + number=int(args.pr), + owner=owner, + repo=repo, + dry_run=args.dry_run, + raw_data=json.loads(args.testing_pr_json), + ) + else: + pr = PR(number=int(args.pr), owner=owner, repo=repo, dry_run=args.dry_run) + + state = pr.state() + + if state != "OPEN": + logging.info(f"Ignoring event on PR, state was not OPEN, instead was state={state}") + exit(0) + + if pr.merge_requested(): + try: + pr.merge_if_passed_checks() + except Exception as e: + if not args.dry_run: + msg = traceback.format_exc() + pr.comment( + f"Failed to process merge request in {args.run_url}\n\n
\n\n```\n{msg}\n```\n\n
" + ) + raise e + else: + logging.info("No merge requested, exiting")