more automated uploads (#1301)

This commit is contained in:
bnnm 2023-01-29 21:12:45 +01:00 committed by GitHub
parent f71d37e7f6
commit 4f857d702c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 308 additions and 46 deletions

157
.github/changelog.py vendored Normal file
View File

@ -0,0 +1,157 @@
# generates changelog
# NOTE: this is just a quick fix, feel free to handle releases more cleanly
import subprocess, urllib.request, json
USE_GIT = True
GIT_MAX_MERGES = 10 #maybe should use max date
JSON_MAX_MERGES = 10
JSON_LOCAL = True
def convert_git(stdout):
lines = stdout.split('\n')
# split into groups of commit/author/etc/text
groups = []
curr = -1
for idx, line in enumerate(lines):
if line.startswith('commit '):
if curr >= 0:
groups.append(lines[curr:idx])
curr = idx
groups.append(lines[curr:idx])
# assumed to use --small
items = []
for group in groups:
item = {}
# not consistent (may include 'merge' msgs)
for idx, line in enumerate(group):
lw = line.lower()
#if lw.startswith('author'):
# item['author'] = line[7:].strip()
if lw.startswith('date'):
item['date'] = line[5:].strip().replace('"','')
if not line:
item['message'] = '\n'.join(group[idx:])
break
items.append(item)
return items
# get lastest commits, examples:
# git log --max count 5 --merges
# git --no-pager log --after="2020-02-01" --format=medium
def load_git():
if not USE_GIT:
raise ValueError("git disabled")
args = ['git','--no-pager','log', '--merges', '--max-count', str(GIT_MAX_MERGES), '--date=format:"%Y-%m-%d %H:%M:%S"']
proc = subprocess.run(args, capture_output=True)
if proc.returncode != 0:
raise ValueError("git exception")
stdout = proc.stdout.decode('utf-8')
return convert_git(stdout)
def convert_json(data):
merges = 0
items = []
for data_elem in data:
commit = data_elem['commit']
# use "merge" messages that (usually) have useful, formatted info
message = commit['message']
if not message.lower().strip().startswith('merge'):
continue
# request
date = commit['author']['date'].replace('T',' ').replace('Z','')
item = {
'date': date,
'message': message,
}
items.append(item)
merges += 1
if merges > JSON_MAX_MERGES:
break
return items
def load_json():
# see https://docs.github.com/en/rest/commits/commits
# for reference (needs to be logged in to get artifacts = useless)
# https://api.github.com/repos/OWNER/REPO/actions/artifacts
# https://api.github.com/repos/OWNER/REPO/actions/artifacts/ARTIFACT_ID
# https://api.github.com/repos/OWNER/REPO/actions/workflows/release.yml/runs
if JSON_LOCAL:
with open('commits.json', 'r', encoding='utf-8') as f:
data = json.load(f)
else:
contents = urllib.request.urlopen("https://api.github.com/repos/vgmstream/vgmstream/commits?per_page=100").read()
if len(contents) > 10000000: #?
raise ValueError("bad call")
data = json.loads(contents)
return convert_json(data)
def convert_items(items, lines):
for item in items:
message = item['message']
date = item['date']
header = "#### %s" % (date.replace('T',' ').replace('Z',''))
subs = []
msg_lines = iter([msg.strip() for msg in message.split('\n')])
for msg in msg_lines:
if msg.lower().startswith('merge'):
continue
if not msg: #always first?
continue
if not msg.startswith('-'):
msg = '- %s' % (msg)
if msg.startswith('* '):
msg = '- %s' % (msg[2:])
subs.append(msg)
if not subs:
subs.append('- (not described)')
lines.append(header)
lines.extend(subs)
lines.append('')
def write(lines):
with open('changelog.txt', 'w', encoding="utf-8") as f:
f.write('\n'.join(lines))
def get_lines():
lines = [
'### CHANGELOG (latest changes)',
'',
]
try:
try:
items = load_git()
except Exception as e:
print("error when generating git, using json:", e)
items = load_json()
convert_items(items, lines)
except Exception as e:
print("err", e)
lines.append("(couldn't generate changelog)")
return lines
def main():
lines = get_lines()
write(lines)
return lines
if __name__ == "__main__":
main()

97
.github/uploader.py vendored
View File

@ -1,4 +1,5 @@
# uploads artifacts to nightly releases
# NOTE: this is just a quick fix, feel free to handle releases more cleanly
import urllib.request, json, argparse, glob, subprocess, os
# to handle nightly releases:
@ -29,14 +30,37 @@ URL_RELEASE = 'https://api.github.com/repos/vgmstream/vgmstream-releases/release
URL_DELETE = 'https://api.github.com/repos/vgmstream/vgmstream-releases/releases/assets/%s'
# allows uploading a single asset
URL_UPLOAD = 'https://uploads.github.com/repos/vgmstream/vgmstream-releases/releases/%s/assets?name=%s'
# change release info
URL_UPDATE = 'https://api.github.com/repos/vgmstream/vgmstream-releases/releases/%s'
#------------------------------------------------------------------------------
def get_release():
contents = urllib.request.urlopen(URL_RELEASE).read()
data = json.loads(contents)
return data
def delete_asset(release, file, token, debug):
def update_release(release, token, debug, body):
release_id = release['id']
args = [
'curl',
'-X', 'PATCH',
'-H', 'Accept: application/vnd.github+json',
'-H', 'Authorization: Bearer %s' % (token),
'-H', 'X-GitHub-Api-Version: 2022-11-28',
URL_UPDATE % (release_id),
'-d', '{"body":"%s"}' % (body),
]
#-d '{"tag_name":"v1.0.0","target_commitish":"master","name":"v1.0.0","body":"...","draft":false,"prerelease":false}'
print("* updating release text")
if debug:
print(' '.join(args))
else:
subprocess.run(args, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
def delete_asset(release, token, debug, file):
basename = os.path.basename(file)
asset_id = None
@ -54,7 +78,7 @@ def delete_asset(release, file, token, debug):
'-H', 'Accept: application/vnd.github+json',
'-H', 'Authorization: Bearer %s' % (token),
'-H', 'X-GitHub-Api-Version: 2022-11-28',
URL_DELETE % (asset_id)
URL_DELETE % (asset_id),
]
print("* deleting old asset %s (%s)" % (file, asset_id))
@ -63,7 +87,7 @@ def delete_asset(release, file, token, debug):
else:
subprocess.run(args, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
def upload_asset(release, file, token, debug):
def upload_asset(release, token, debug, file):
basename = os.path.basename(file)
release_id = release['id']
@ -71,7 +95,6 @@ def upload_asset(release, file, token, debug):
'curl',
'-X', 'POST',
'-H', 'Accept: application/vnd.github+json',
'-H', 'Access-Control-Allow-Origin: *',
'-H', 'Authorization: Bearer %s' % (token),
'-H', 'X-GitHub-Api-Version: 2022-11-28',
'-H', 'Content-Type: application/octet-stream',
@ -85,16 +108,58 @@ def upload_asset(release, file, token, debug):
else:
subprocess.run(args, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
#------------------------------------------------------------------------------
def generate_changelog(release, token, debug):
print("* generating changelog")
try:
import changelog
# writes in work dir and gets lines
lines = changelog.main()
current_tag = 'r1810' #TODO get from some API maybe
body = [
'Automated releases ([full diffs here](https://github.com/vgmstream/vgmstream/compare/%s...master)).' % (current_tag),
'',
'',
'<details>'
'<summary>Recent changes</summary>',
'', #important, collapsable doesn't work otherwise
]
body.extend(lines)
body.extend([
'</details>'
])
body_text = '\\n'.join(body)
body_text = body_text.replace('"', '\"')
file = "changelog.txt"
update_release(release, token, debug, body_text)
delete_asset(release, token, debug, file)
upload_asset(release, token, debug, file)
#with open("body.json",'w') as f:
# f.write('{"body":"%s"}' % (body_text))
#with open("changelog.txt",'rb') as f:
# print(f.read())
except Exception as e:
print("couldn't generate changelog", e)
def main(args):
print("staring asset uploader")
print("starting asset uploader")
files = []
for file_glob in args.files:
files += glob.glob(file_glob)
if not files:
raise ValueError("no files found")
# allow for changelog only
#if not files:
# raise ValueError("no files found")
# this token usually only exists in env on merges, but allow passing for tests
token = args.token
if not token:
token = os.environ.get('UPLOADER_GITHUB_TOKEN')
@ -103,21 +168,25 @@ def main(args):
release = get_release()
for file in files:
delete_asset(release, file, token, args.debug)
upload_asset(release, file, token, args.debug)
delete_asset(release, token, args.debug, file)
upload_asset(release, token, args.debug, file)
# this should be invoked separately so release doesn't change per artifact
if args.changelog:
generate_changelog(release, token, args.debug)
print("done")
def parse_args():
description = (
"uploads artifacts to releases"
)
epilog = (
"-"
)
epilog = None
ap = argparse.ArgumentParser(description=description, epilog=epilog, formatter_class=argparse.RawTextHelpFormatter)
ap.add_argument("files", help="files to upload", nargs='+')
ap.add_argument("files", help="files to upload", nargs='*')
ap.add_argument("-t","--token", help="security token")
ap.add_argument("-c","--changelog", help="update changelog as well", action="store_true")
ap.add_argument("-x","--debug", help="no actions", action="store_true")
args = ap.parse_args()

View File

@ -128,24 +128,38 @@ jobs:
# path: ${{runner.workspace}}/build/audacious/vgmstream.so
# name: vgmstream-audacious
- name: Upload artifacts to S3
if: github.event_name != 'pull_request'
working-directory: ${{runner.workspace}}/build
# uploads current assets to vgmstream-releases (token only works on merges)
- name: Upload artifacts to nightly
if: github.event_name != 'pull_request'
working-directory: ${{runner.workspace}}
shell: bash
env:
AWS_DEFAULT_REGION: us-west-1
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
# TODO: Add vgmstream123 to the archive and upload the Audacious plugin when it is supported
UPLOADER_GITHUB_TOKEN: ${{ secrets.UPLOADER_GITHUB_TOKEN }}
run: |
cd cli
tar cvfz vgmstream-linux-cli.tar.gz vgmstream-cli
cd ..
aws s3 cp cli/vgmstream-linux-cli.tar.gz s3://vgmstream-builds/${{github.sha}}/linux/vgmstream-linux-cli.tar.gz --acl public-read
cd ${{runner.workspace}}
echo ${{github.sha}} | tee latest_id_lx
aws s3 cp latest_id_lx s3://vgmstream-builds/ --acl public-read
# cd audacious
# tar cvfz vgmstream-audacious.tar.gz vgmstream.so
# cd ..
# aws s3 cp audacious/vgmstream-audacious.tar.gz s3://vgmstream-builds/${{github.sha}}/linux/vgmstream-audacious.tar.gz --acl public-read
tar cvfz vgmstream-linux-cli.tar.gz -C ./build/cli vgmstream-cli
# tar cvfz vgmstream-linux-123.tar.gz -C ./build/cli vgmstream123
# tar cvfz vgmstream-linux-audacious.tar.gz -C ./build/audacious vgmstream.so
python .github/uploader.py vgmstream-linux-cli.tar.gz
#- name: Upload artifacts to S3
# if: github.event_name != 'pull_request'
# working-directory: ${{runner.workspace}}/build
# shell: bash
# env:
# AWS_DEFAULT_REGION: us-west-1
# AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
# AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
# # TODO: Add vgmstream123 to the archive and upload the Audacious plugin when it is supported
# run: |
# cd cli
# tar cvfz vgmstream-linux-cli.tar.gz vgmstream-cli
# cd ..
# aws s3 cp cli/vgmstream-linux-cli.tar.gz s3://vgmstream-builds/${{github.sha}}/linux/vgmstream-linux-cli.tar.gz --acl public-read
# cd ${{runner.workspace}}
# echo ${{github.sha}} | tee latest_id_lx
# aws s3 cp latest_id_lx s3://vgmstream-builds/ --acl public-read
# # cd audacious
# # tar cvfz vgmstream-audacious.tar.gz vgmstream.so
# # cd ..
# # aws s3 cp audacious/vgmstream-audacious.tar.gz s3://vgmstream-builds/${{github.sha}}/linux/vgmstream-audacious.tar.gz --acl public-read

View File

@ -51,3 +51,16 @@ jobs:
with:
path: ${{ github.workspace }}/build/cli/vgmstream-cli
name: vgmstream-mac
# uploads current assets to vgmstream-releases (token only works on merges)
- name: Upload artifacts to vgmstream-releases
if: github.event_name != 'pull_request'
working-directory: ${{github.workspace}}
shell: bash
env:
UPLOADER_GITHUB_TOKEN: ${{ secrets.UPLOADER_GITHUB_TOKEN }}
run: |
tar cvfz vgmstream-mac-cli.tar.gz -C ./build/cli vgmstream-cli
# tar cvfz vgmstream-linux-123.tar.gz -C ./build/cli vgmstream123
# tar cvfz vgmstream-linux-audacious.tar.gz -C ./build/audacious vgmstream.so
python .github/uploader.py vgmstream-mac-cli.tar.gz

View File

@ -118,16 +118,25 @@ jobs:
${{runner.workspace}}/embuild/cli/vgmstream-cli.wasm
name: vgmstream-wasm
- name: Upload artifacts to S3
if: github.event_name != 'pull_request'
working-directory: ${{runner.workspace}}/embuild/cli
shell: bash
# uploads current assets to vgmstream-releases (token only works on merges)
- name: Upload artifacts to vgmstream-releases
if: github.event_name != 'pull_request'
working-directory: ${{github.workspace}}
env:
AWS_DEFAULT_REGION: us-west-1
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
UPLOADER_GITHUB_TOKEN: ${{ secrets.UPLOADER_GITHUB_TOKEN }}
run: |
echo ${{github.sha}} | tee latest_id
aws s3 cp ${{runner.workspace}}/embuild/cli/vgmstream-cli.js s3://vgmstream-builds/js/vgmstream-cli.js --acl public-read
aws s3 cp ${{runner.workspace}}/embuild/cli/vgmstream-cli.wasm s3://vgmstream-builds/js/vgmstream-cli.wasm --acl public-read
aws s3 cp latest_id s3://vgmstream-builds/js/version --acl public-read
python .github/uploader.py ${{runner.workspace}}/embuild/cli/vgmstream-cli.js ${{runner.workspace}}/embuild/cli/vgmstream-cli.wasm
#- name: Upload artifacts to S3
# if: github.event_name != 'pull_request'
# working-directory: ${{runner.workspace}}/embuild/cli
# shell: bash
# env:
# AWS_DEFAULT_REGION: us-west-1
# AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
# AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
# run: |
# echo ${{github.sha}} | tee latest_id
# aws s3 cp ${{runner.workspace}}/embuild/cli/vgmstream-cli.js s3://vgmstream-builds/js/vgmstream-cli.js --acl public-read
# aws s3 cp ${{runner.workspace}}/embuild/cli/vgmstream-cli.wasm s3://vgmstream-builds/js/vgmstream-cli.wasm --acl public-read
# aws s3 cp latest_id s3://vgmstream-builds/js/version --acl public-read

View File

@ -61,14 +61,15 @@ jobs:
name: vgmstream-win.pdb
path: ${{github.workspace}}\tmp\cli-p
# uploads current assets to vgmstream-releases
- name: Upload artifacts to nightly
# uploads current assets to vgmstream-releases (token only works on merges)
- name: Upload artifacts to vgmstream-releases
if: github.event_name != 'pull_request'
working-directory: ${{github.workspace}}
env:
UPLOADER_GITHUB_TOKEN: ${{ secrets.UPLOADER_GITHUB_TOKEN }}
run: |
python .github/uploader.py bin/vgmstream-win.zip bin/foo_input_vgmstream.fb2k-component
python .github/uploader.py --changelog
#- name: Upload artifacts to S3
# if: github.event_name != 'pull_request'
@ -87,4 +88,3 @@ jobs:
# git describe --always | tee latest_ver_win
# aws s3 cp latest_id_win s3://vgmstream-builds/ --acl public-read
# aws s3 cp latest_ver_win s3://vgmstream-builds/ --acl public-read