aboutsummaryrefslogtreecommitdiffstats
path: root/python/servo/package_commands.py
diff options
context:
space:
mode:
authorMukilan Thiyagarajan <me@mukilan.in>2023-04-13 12:31:42 +0530
committerMukilan Thiyagarajan <me@mukilan.in>2023-04-13 12:31:42 +0530
commit76a5a1081bdd5bda86815a8014f16f080e6eb0ae (patch)
tree821c186cfb6318486e110000dbb5d798c004cd5d /python/servo/package_commands.py
parent4a9b80382c387e9ca099b669827de09e21cd61b3 (diff)
downloadservo-76a5a1081bdd5bda86815a8014f16f080e6eb0ae.tar.gz
servo-76a5a1081bdd5bda86815a8014f16f080e6eb0ae.zip
Fix nightly upload to GH release logic.
The boto3 S3 client automatically closes the given fileobj after the transfer is complete. This prevents us from reusing the package_hash_fileobj between s3 and github upload methods. This PR changes fixes the issue by creating fresh instances of io.BytesIO within the upload_to_* methods. Signed-off-by: Mukilan Thiyagarajan <me@mukilan.in>
Diffstat (limited to 'python/servo/package_commands.py')
-rw-r--r--python/servo/package_commands.py14
1 files changed, 9 insertions, 5 deletions
diff --git a/python/servo/package_commands.py b/python/servo/package_commands.py
index 405bdfb8c3e..c5b9fb69726 100644
--- a/python/servo/package_commands.py
+++ b/python/servo/package_commands.py
@@ -618,24 +618,28 @@ class PackageCommands(CommandBase):
path.basename(package)
)
- def upload_to_github_release(platform, package, package_hash_fileobj):
+ def upload_to_github_release(platform, package, package_hash):
if not github_release_id:
return
+
extension = path.basename(package).partition('.')[2]
g = Github(os.environ['NIGHTLY_REPO_TOKEN'])
nightly_repo = g.get_repo(os.environ['NIGHTLY_REPO'])
release = nightly_repo.get_release(github_release_id)
+ package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
+
if '2020' in platform:
asset_name = f'servo-latest-layout-2020.{extension}'
else:
asset_name = f'servo-latest.{extension}'
+
release.upload_asset(package, name=asset_name)
release.upload_asset_from_memory(
package_hash_fileobj,
package_hash_fileobj.getbuffer().nbytes,
name=f'{asset_name}.sha256')
- def upload_to_s3(platform, package, package_hash_fileobj, timestamp):
+ def upload_to_s3(platform, package, package_hash, timestamp):
(aws_access_key, aws_secret_access_key) = get_s3_secret()
s3 = boto3.client(
's3',
@@ -658,6 +662,7 @@ class PackageCommands(CommandBase):
extension = path.basename(package).partition('.')[2]
latest_upload_key = '{}/servo-latest.{}'.format(nightly_dir, extension)
+ package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
latest_hash_upload_key = f'{latest_upload_key}.sha256'
s3.upload_file(package, BUCKET, package_upload_key)
@@ -786,10 +791,9 @@ class PackageCommands(CommandBase):
break
sha256_digest.update(data)
package_hash = sha256_digest.hexdigest()
- package_hash_fileobj = io.BytesIO(package_hash.encode('utf-8'))
- upload_to_s3(platform, package, package_hash_fileobj, timestamp)
- upload_to_github_release(platform, package, package_hash_fileobj)
+ upload_to_s3(platform, package, package_hash, timestamp)
+ upload_to_github_release(platform, package, package_hash)
if platform == 'maven':
for package in PACKAGES[platform]: