aboutsummaryrefslogtreecommitdiffstats
path: root/python/servo/package_commands.py
diff options
context:
space:
mode:
authorbors-servo <infra@servo.org>2023-04-12 18:16:59 +0200
committerGitHub <noreply@github.com>2023-04-12 18:16:59 +0200
commit4a9b80382c387e9ca099b669827de09e21cd61b3 (patch)
tree2b2bfdfac037ce5dcf84735ba58fafcb10642c48 /python/servo/package_commands.py
parentbdc3bc53fc201d5d542df967b3534cadb3757075 (diff)
parentf1ba708cf77f7ee7dd7c718f48c4f0532bba1845 (diff)
downloadservo-4a9b80382c387e9ca099b669827de09e21cd61b3.tar.gz
servo-4a9b80382c387e9ca099b669827de09e21cd61b3.zip
Auto merge of #29621 - mukilan:upload-nightly-to-gh-releases, r=mrobinson
Upload nightly builds to Github Releases This change extends the `mach upload-nightly` command to publish the nightly builds for all platforms as GH Release assets. The GH releases are made on a separate repository so that we can persist older nightly builds without having to accumulate git tags for them. Example releases on a fork can be seen here: https://github.com/mukilan/servo-nightly-builds/releases/ Assets of the *latest* nightly releases will available at a stable url e.g for linux `https://github.com/mukilan/servo-nightly-builds/releases/latest/download/servo-latest.tar.gz` Some design tradeoffs in this approach are: 1. To allow the 'latest' link from servo.org to remain stable, the release assets are named 'servo-latest.{ext}' instead of containing the release tag/date. 2. The release is created as draft and published atomically when all platforms have been built successfully. This allows us to link to the 'latest' alias from servo.org while gauranteeing that it contains builds for all platforms. The other option here would be to have code in servo.org UI that uses GH API to find the most recent release with a successful build for a given platform. 3. The tags in the nightly repo are all based on the same commit that has no relation to servo code base. --- - [x] `./mach build -d` does not report any errors - [x] `./mach test-tidy` does not report any errors - [ ] These changes fix #___ (GitHub issue number if applicable) <!-- Either: --> - [ ] There are tests for these changes OR - [x] These changes do not require tests because it extends the nightly CI job
Diffstat (limited to 'python/servo/package_commands.py')
-rw-r--r--python/servo/package_commands.py54
1 files changed, 40 insertions, 14 deletions
diff --git a/python/servo/package_commands.py b/python/servo/package_commands.py
index 53910dcd644..405bdfb8c3e 100644
--- a/python/servo/package_commands.py
+++ b/python/servo/package_commands.py
@@ -10,6 +10,8 @@
from __future__ import absolute_import, print_function, unicode_literals
from datetime import datetime
+from github import Github
+
import base64
import hashlib
import io
@@ -594,7 +596,11 @@ class PackageCommands(CommandBase):
@CommandArgument('--secret-from-environment',
action='store_true',
help='Retrieve the appropriate secrets from the environment.')
- def upload_nightly(self, platform, secret_from_environment):
+ @CommandArgument('--github-release-id',
+ default=None,
+ type=int,
+ help='The github release to upload the nightly builds.')
+ def upload_nightly(self, platform, secret_from_environment, github_release_id):
import boto3
def get_s3_secret():
@@ -612,7 +618,24 @@ class PackageCommands(CommandBase):
path.basename(package)
)
- def upload_to_s3(platform, package, timestamp):
+ def upload_to_github_release(platform, package, package_hash_fileobj):
+ if not github_release_id:
+ return
+ extension = path.basename(package).partition('.')[2]
+ g = Github(os.environ['NIGHTLY_REPO_TOKEN'])
+ nightly_repo = g.get_repo(os.environ['NIGHTLY_REPO'])
+ release = nightly_repo.get_release(github_release_id)
+ if '2020' in platform:
+ asset_name = f'servo-latest-layout-2020.{extension}'
+ else:
+ asset_name = f'servo-latest.{extension}'
+ release.upload_asset(package, name=asset_name)
+ release.upload_asset_from_memory(
+ package_hash_fileobj,
+ package_hash_fileobj.getbuffer().nbytes,
+ name=f'{asset_name}.sha256')
+
+ def upload_to_s3(platform, package, package_hash_fileobj, timestamp):
(aws_access_key, aws_secret_access_key) = get_s3_secret()
s3 = boto3.client(
's3',
@@ -635,17 +658,6 @@ class PackageCommands(CommandBase):
extension = path.basename(package).partition('.')[2]
latest_upload_key = '{}/servo-latest.{}'.format(nightly_dir, extension)
- # Compute the hash
- SHA_BUF_SIZE = 1048576 # read in 1 MiB chunks
- sha256_digest = hashlib.sha256()
- with open(package, 'rb') as package_file:
- while True:
- data = package_file.read(SHA_BUF_SIZE)
- if not data:
- break
- sha256_digest.update(data)
- package_hash = sha256_digest.hexdigest()
- package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
latest_hash_upload_key = f'{latest_upload_key}.sha256'
s3.upload_file(package, BUCKET, package_upload_key)
@@ -763,7 +775,21 @@ class PackageCommands(CommandBase):
package
), file=sys.stderr)
return 1
- upload_to_s3(platform, package, timestamp)
+
+ # Compute the hash
+ SHA_BUF_SIZE = 1048576 # read in 1 MiB chunks
+ sha256_digest = hashlib.sha256()
+ with open(package, 'rb') as package_file:
+ while True:
+ data = package_file.read(SHA_BUF_SIZE)
+ if not data:
+ break
+ sha256_digest.update(data)
+ package_hash = sha256_digest.hexdigest()
+ package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
+
+ upload_to_s3(platform, package, package_hash_fileobj, timestamp)
+ upload_to_github_release(platform, package, package_hash_fileobj)
if platform == 'maven':
for package in PACKAGES[platform]: