aboutsummaryrefslogtreecommitdiffstats
path: root/python/servo/package_commands.py
diff options
context:
space:
mode:
authorMukilan Thiyagarajan <me@mukilan.in>2023-04-09 10:14:45 +0530
committerMukilan Thiyagarajan <me@mukilan.in>2023-04-12 21:02:07 +0530
commitf1ba708cf77f7ee7dd7c718f48c4f0532bba1845 (patch)
treec92c0431aebbc947e487d702e0b973e2d87101f8 /python/servo/package_commands.py
parent1f3837dd43b9c10f083dc401f0901a6a900ba2cb (diff)
downloadservo-f1ba708cf77f7ee7dd7c718f48c4f0532bba1845.tar.gz
servo-f1ba708cf77f7ee7dd7c718f48c4f0532bba1845.zip
Upload nightly builds to Github Releases
This change extends the `mach upload-nightly` command to publish the nightly builds for all platforms as GH Release assets. The GH releases are made on a separate repository so that we can persist older nightly builds without having to accumulate git tags for them. Some design tradeoffs in this approach are: 1. To allow the 'latest' link from servo.org to remain stable, the release assets are named 'servo-latest.{ext}' instead of containing the release tag/date. 2. The release is created as draft and published atomically when all platforms have been built successfully. This allows us to link to the 'latest' alias from servo.org while gauranteeing that it contains builds for all platforms. The other option here would be to have code in servo.org UI that uses GH API to find the most recent release with a successful build for a given platform. 3. The tags in the nightly repo are all based on the same commit that has no relation to servo code base. Signed-off-by: Mukilan Thiyagarajan <me@mukilan.in>
Diffstat (limited to 'python/servo/package_commands.py')
-rw-r--r--python/servo/package_commands.py54
1 files changed, 40 insertions, 14 deletions
diff --git a/python/servo/package_commands.py b/python/servo/package_commands.py
index 53910dcd644..405bdfb8c3e 100644
--- a/python/servo/package_commands.py
+++ b/python/servo/package_commands.py
@@ -10,6 +10,8 @@
from __future__ import absolute_import, print_function, unicode_literals
from datetime import datetime
+from github import Github
+
import base64
import hashlib
import io
@@ -594,7 +596,11 @@ class PackageCommands(CommandBase):
@CommandArgument('--secret-from-environment',
action='store_true',
help='Retrieve the appropriate secrets from the environment.')
- def upload_nightly(self, platform, secret_from_environment):
+ @CommandArgument('--github-release-id',
+ default=None,
+ type=int,
+ help='The github release to upload the nightly builds.')
+ def upload_nightly(self, platform, secret_from_environment, github_release_id):
import boto3
def get_s3_secret():
@@ -612,7 +618,24 @@ class PackageCommands(CommandBase):
path.basename(package)
)
- def upload_to_s3(platform, package, timestamp):
+ def upload_to_github_release(platform, package, package_hash_fileobj):
+ if not github_release_id:
+ return
+ extension = path.basename(package).partition('.')[2]
+ g = Github(os.environ['NIGHTLY_REPO_TOKEN'])
+ nightly_repo = g.get_repo(os.environ['NIGHTLY_REPO'])
+ release = nightly_repo.get_release(github_release_id)
+ if '2020' in platform:
+ asset_name = f'servo-latest-layout-2020.{extension}'
+ else:
+ asset_name = f'servo-latest.{extension}'
+ release.upload_asset(package, name=asset_name)
+ release.upload_asset_from_memory(
+ package_hash_fileobj,
+ package_hash_fileobj.getbuffer().nbytes,
+ name=f'{asset_name}.sha256')
+
+ def upload_to_s3(platform, package, package_hash_fileobj, timestamp):
(aws_access_key, aws_secret_access_key) = get_s3_secret()
s3 = boto3.client(
's3',
@@ -635,17 +658,6 @@ class PackageCommands(CommandBase):
extension = path.basename(package).partition('.')[2]
latest_upload_key = '{}/servo-latest.{}'.format(nightly_dir, extension)
- # Compute the hash
- SHA_BUF_SIZE = 1048576 # read in 1 MiB chunks
- sha256_digest = hashlib.sha256()
- with open(package, 'rb') as package_file:
- while True:
- data = package_file.read(SHA_BUF_SIZE)
- if not data:
- break
- sha256_digest.update(data)
- package_hash = sha256_digest.hexdigest()
- package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
latest_hash_upload_key = f'{latest_upload_key}.sha256'
s3.upload_file(package, BUCKET, package_upload_key)
@@ -763,7 +775,21 @@ class PackageCommands(CommandBase):
package
), file=sys.stderr)
return 1
- upload_to_s3(platform, package, timestamp)
+
+ # Compute the hash
+ SHA_BUF_SIZE = 1048576 # read in 1 MiB chunks
+ sha256_digest = hashlib.sha256()
+ with open(package, 'rb') as package_file:
+ while True:
+ data = package_file.read(SHA_BUF_SIZE)
+ if not data:
+ break
+ sha256_digest.update(data)
+ package_hash = sha256_digest.hexdigest()
+ package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
+
+ upload_to_s3(platform, package, package_hash_fileobj, timestamp)
+ upload_to_github_release(platform, package, package_hash_fileobj)
if platform == 'maven':
for package in PACKAGES[platform]: