Merge pull request #1391 from colobot/dev-gh-actions
Fix uploading linter results from forks to GitHubpyro-refactor
commit
fbbe25b25e
|
@ -15,59 +15,20 @@ jobs:
|
|||
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends build-essential cmake libsdl2-dev libsdl2-image-dev libsdl2-ttf-dev libsndfile1-dev libvorbis-dev libogg-dev libpng-dev libglew-dev libopenal-dev libboost-dev libboost-system-dev libboost-filesystem-dev libboost-regex-dev libphysfs-dev gettext git po4a vorbis-tools librsvg2-bin xmlstarlet
|
||||
- name: Download colobot-lint dependencies
|
||||
run: sudo apt-get install -y --no-install-recommends clang-3.6 libtinyxml2.6.2v5
|
||||
- run: pip install requests
|
||||
- run: mkdir -p /tmp/colobot-lint
|
||||
- name: Download colobot-lint
|
||||
working-directory: /tmp/colobot-lint
|
||||
shell: python
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
REPO_NAME: colobot/colobot-lint
|
||||
BRANCH_NAME: master
|
||||
ARTIFACT_NAME: colobot-lint
|
||||
run: |
|
||||
import os
|
||||
import requests
|
||||
|
||||
# How there can be no builtin action to download the latest artifact from another repo?!
|
||||
|
||||
s = requests.Session()
|
||||
s.headers.update({
|
||||
'Authorization': 'token ' + os.environ['GITHUB_TOKEN'],
|
||||
'Accept': 'application/vnd.github.v3+json'
|
||||
})
|
||||
|
||||
r = s.get("https://api.github.com/repos/" + os.environ['REPO_NAME'] + "/actions/runs", params={'branch': os.environ['BRANCH_NAME'], 'event': 'push', 'status': 'success'})
|
||||
r.raise_for_status()
|
||||
|
||||
# Querying for "dev" returns all branches that have "dev" anywhere in the name... is that a GitHub bug or intended behaviour?
|
||||
runs = list(filter(lambda x: x['head_branch'] == os.environ['BRANCH_NAME'], r.json()['workflow_runs']))
|
||||
if len(runs) == 0:
|
||||
raise Exception('No valid run found')
|
||||
run = runs[0]
|
||||
print("Using colobot-lint from run #{} ({}) for commit {}".format(run['run_number'], run['id'], run['head_sha']))
|
||||
|
||||
r = s.get(run['artifacts_url'])
|
||||
r.raise_for_status()
|
||||
artifacts = list(filter(lambda x: x['name'] == os.environ['ARTIFACT_NAME'], r.json()['artifacts']))
|
||||
if len(artifacts) != 1:
|
||||
raise Exception('Artifact not found')
|
||||
artifact = artifacts[0]
|
||||
print(artifact['archive_download_url'])
|
||||
|
||||
r = s.get(artifact['archive_download_url'], stream=True)
|
||||
r.raise_for_status()
|
||||
with open(os.environ['ARTIFACT_NAME'] + '.zip', 'wb') as f:
|
||||
for block in r.iter_content(1024):
|
||||
f.write(block)
|
||||
print("Download finished")
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
repo: colobot/colobot-lint
|
||||
branch: master
|
||||
workflow: build.yml
|
||||
workflow_conclusion: success
|
||||
name: colobot-lint
|
||||
path: /tmp/colobot-lint/archive
|
||||
- name: Unpack colobot-lint
|
||||
working-directory: /tmp/colobot-lint
|
||||
run: |
|
||||
# Unzip the archive
|
||||
mkdir archive; cd archive
|
||||
unzip ../colobot-lint.zip
|
||||
cd ..
|
||||
# Workaround for Clang not finding system headers
|
||||
mkdir ./bin
|
||||
mv ./archive/build/colobot-lint ./bin/
|
||||
|
@ -118,15 +79,12 @@ jobs:
|
|||
with:
|
||||
name: HTML results
|
||||
path: build/html_report
|
||||
- name: Send linter results to GitHub
|
||||
- name: Generate GitHub annotations JSON and process check result
|
||||
shell: python
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
ACTUALLY_SEND: ${{ github.event.type != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
run: |
|
||||
import os
|
||||
import sys
|
||||
import requests
|
||||
import json
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
OVERALL_STABLE_RULES=[
|
||||
|
@ -179,18 +137,6 @@ jobs:
|
|||
"whitespace",
|
||||
]
|
||||
|
||||
# None of the available actions seem to do what I want, they all do stupid things like adding another check... let's just do it manually
|
||||
# GitHub also doesn't seem to provide you with the check suite or check run ID, so we have to get it from the action ID via the API
|
||||
s = requests.Session()
|
||||
s.headers.update({
|
||||
'Authorization': 'token ' + os.environ['GITHUB_TOKEN'],
|
||||
'Accept': 'application/vnd.github.antiope-preview+json' # Annotations are still technically a preview feature of the API
|
||||
})
|
||||
action_run = s.get(os.environ['GITHUB_API_URL'] + "/repos/" + os.environ['GITHUB_REPOSITORY'] + "/actions/runs/" + os.environ['GITHUB_RUN_ID']).json()
|
||||
check_suite = s.get(action_run['check_suite_url']).json()
|
||||
check_suite_runs = s.get(check_suite['check_runs_url']).json()
|
||||
check_run = check_suite_runs['check_runs'][0] # NOTE: This assumes that the 'lint' job is the first one in the workflow. You could find it by name if you really wanted.
|
||||
|
||||
def we_care_about(file_name, type):
|
||||
if 'CBot' in file_name:
|
||||
return type in OVERALL_STABLE_RULES
|
||||
|
@ -199,6 +145,7 @@ jobs:
|
|||
|
||||
results = ET.parse('build/colobot_lint_report.xml')
|
||||
annotations = []
|
||||
stable_annotations = []
|
||||
for error in results.find('errors').findall('error'):
|
||||
location = error.find('location')
|
||||
file_name = os.path.relpath(location.get('file'), os.environ['GITHUB_WORKSPACE'])
|
||||
|
@ -213,42 +160,35 @@ jobs:
|
|||
elif severity == 'information':
|
||||
gh_severity = 'notice'
|
||||
|
||||
if not we_care_about(file_name, type):
|
||||
# don't send the unstable rules to github at all as there are way too many of them and it would overload the API rate limit
|
||||
continue
|
||||
|
||||
print('{}:{}: [{}] {}'.format(file_name, line_num, type, msg))
|
||||
|
||||
annotations.append({
|
||||
annotation = {
|
||||
'path': file_name,
|
||||
'start_line': line_num,
|
||||
'end_line': line_num,
|
||||
'annotation_level': gh_severity,
|
||||
'title': type,
|
||||
'message': msg
|
||||
})
|
||||
}
|
||||
annotations.append(annotation)
|
||||
|
||||
summary = 'colobot-lint found {} issues'.format(len(annotations))
|
||||
all_ok = len(annotations) == 0
|
||||
if we_care_about(file_name, type):
|
||||
# don't send the unstable rules to github at all as there are way too many of them and it would overload the API rate limit
|
||||
stable_annotations.append(annotation)
|
||||
print('{}:{}: [{}] {}'.format(file_name, line_num, type, msg))
|
||||
|
||||
summary = 'colobot-lint found {} issues'.format(len(stable_annotations))
|
||||
all_ok = len(stable_annotations) == 0
|
||||
print('Conclusion: {}'.format(summary))
|
||||
|
||||
if os.environ['ACTUALLY_SEND'] != "true":
|
||||
print('Skip uploading the results as annotations because tokens from forks are readonly and there seems to be no way to do it. Blame GitHub Actions devs.')
|
||||
else:
|
||||
# Annotations have to be sent in batches of 50
|
||||
first = True
|
||||
while first or len(annotations) > 0:
|
||||
first = False
|
||||
to_send = annotations[:50]
|
||||
annotations = annotations[50:]
|
||||
data = {
|
||||
'output': {
|
||||
'title': summary,
|
||||
'summary': summary,
|
||||
'annotations': to_send
|
||||
}
|
||||
}
|
||||
r = s.patch(check_run['url'], json=data)
|
||||
r.raise_for_status()
|
||||
|
||||
with open("build/annotations.json", "w") as f:
|
||||
json.dump(annotations, f, indent=4)
|
||||
with open("build/stable_annotations.json", "w") as f:
|
||||
json.dump(stable_annotations, f, indent=4)
|
||||
sys.exit(0 if all_ok else 1)
|
||||
- name: Upload results (JSON)
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: JSON results
|
||||
path: |
|
||||
build/annotations.json
|
||||
build/stable_annotations.json
|
||||
if: ${{ always() }}
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
name: Linter upload results
|
||||
|
||||
# Upload linter results after succesful linter run
|
||||
# This is done in a separate workflow to safely use the read-write GitHub token
|
||||
# See https://securitylab.github.com/research/github-actions-preventing-pwn-requests
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Linter"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
lint_upload:
|
||||
runs-on: ubuntu-16.04
|
||||
steps:
|
||||
- run: pip install requests
|
||||
- name: Download linter results
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
workflow: lint.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: JSON results
|
||||
path: results
|
||||
- name: Send linter results to GitHub
|
||||
shell: python
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RUN_ID: ${{ github.event.workflow_run.id }}
|
||||
run: |
|
||||
import os
|
||||
import json
|
||||
import requests
|
||||
|
||||
# Load the results from the lint job artifact
|
||||
with open("results/stable_annotations.json", "r") as f:
|
||||
annotations = json.load(f)
|
||||
summary = 'colobot-lint found {} issues'.format(len(annotations))
|
||||
|
||||
# None of the available actions seem to do what I want, they all do stupid things like adding another check... let's just do it manually
|
||||
# GitHub also doesn't seem to provide you with the check suite or check run ID, so we have to get it from the action ID via the API
|
||||
s = requests.Session()
|
||||
s.headers.update({
|
||||
'Authorization': 'token ' + os.environ['GITHUB_TOKEN'],
|
||||
'Accept': 'application/vnd.github.antiope-preview+json' # Annotations are still technically a preview feature of the API
|
||||
})
|
||||
action_run = s.get(os.environ['GITHUB_API_URL'] + "/repos/" + os.environ['GITHUB_REPOSITORY'] + "/actions/runs/" + os.environ['RUN_ID']).json()
|
||||
check_suite = s.get(action_run['check_suite_url']).json()
|
||||
check_suite_runs = s.get(check_suite['check_runs_url']).json()
|
||||
check_run = check_suite_runs['check_runs'][0] # NOTE: This assumes that the 'lint' job is the first one in the workflow. You could find it by name if you really wanted.
|
||||
|
||||
# Annotations have to be sent in batches of 50
|
||||
first = True
|
||||
while first or len(annotations) > 0:
|
||||
first = False
|
||||
to_send = annotations[:50]
|
||||
annotations = annotations[50:]
|
||||
data = {
|
||||
'output': {
|
||||
'title': summary,
|
||||
'summary': summary,
|
||||
'annotations': to_send
|
||||
}
|
||||
}
|
||||
r = s.patch(check_run['url'], json=data)
|
||||
r.raise_for_status()
|
|
@ -1,14 +1,21 @@
|
|||
name: Verify pull request target
|
||||
|
||||
on: [pull_request]
|
||||
on: [pull_request_target]
|
||||
|
||||
jobs:
|
||||
check_pr_target:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Wrong pull request target
|
||||
run: echo "This pull request targets the master branch. Please edit the pull request to target dev." && exit 1
|
||||
- name: Send comment if wrong pull request target
|
||||
if: github.base_ref == 'master'
|
||||
uses: peter-evans/create-or-update-comment@v1
|
||||
with:
|
||||
issue-number: ${{ github.event.number }}
|
||||
body: |
|
||||
Hey! This pull request targets the `master` branch. You should probably target `dev` instead. Make sure to read the [contributing guidelines](https://github.com/colobot/colobot/blob/master/CONTRIBUTING.md#submitting-pull-requests) and [edit the target branch if necessary](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/changing-the-base-branch-of-a-pull-request).
|
||||
- name: Wrong pull request target
|
||||
if: github.base_ref == 'master'
|
||||
run: echo "This pull request targets the master branch. Please edit the pull request to target dev." && exit 1
|
||||
- name: Correct pull request target
|
||||
if: github.base_ref != 'master'
|
||||
run: echo "This pull request targets the correct branch." && exit 0
|
||||
if: github.base_ref != 'master'
|
Loading…
Reference in New Issue