aboutsummaryrefslogtreecommitdiffstats
path: root/python/servo
diff options
context:
space:
mode:
authorcamelid <camelidcamel@gmail.com>2020-05-12 14:08:36 -0700
committercamelid <camelidcamel@gmail.com>2020-05-13 10:03:02 -0700
commitf1e37e2dd2b73b0e128d27a1518545ea6b24de8b (patch)
tree34e2bdbb178e55281e02f548b1ddac486d554302 /python/servo
parent500a21c6c74721f39068dd721481b108164ca75b (diff)
downloadservo-f1e37e2dd2b73b0e128d27a1518545ea6b24de8b.tar.gz
servo-f1e37e2dd2b73b0e128d27a1518545ea6b24de8b.zip
Add integrity hash for nightly builds
Diffstat (limited to 'python/servo')
-rw-r--r--python/servo/package_commands.py22
1 files changed, 22 insertions, 0 deletions
diff --git a/python/servo/package_commands.py b/python/servo/package_commands.py
index 5f27a6a6391..7f52a59735e 100644
--- a/python/servo/package_commands.py
+++ b/python/servo/package_commands.py
@@ -12,6 +12,7 @@ from __future__ import absolute_import, print_function, unicode_literals
from datetime import datetime
import base64
import hashlib
+import io
import json
import os
import os.path as path
@@ -630,12 +631,33 @@ class PackageCommands(CommandBase):
extension = path.splitext(path.basename(package))[1]
latest_upload_key = '{}/servo-latest{}'.format(nightly_dir, extension)
+ # Compute the hash
+ SHA_BUF_SIZE = 1048576 # read in 1 MiB chunks
+ sha256_digest = hashlib.sha256()
+ with open(package, 'rb') as package_file:
+ while True:
+ data = package_file.read(SHA_BUF_SIZE)
+ if not data:
+ break
+ sha256_digest.update(data)
+ package_hash = sha256_digest.hexdigest()
+ package_hash_fileobj = io.BytesIO(package_hash)
+ package_hash_upload_key = '{}/{}.sha256'.format(nightly_dir, filename)
+ latest_hash_upload_key = '{}/servo-latest{}.sha256'.format(nightly_dir, extension)
+
s3.upload_file(package, BUCKET, package_upload_key)
+ s3.upload_fileobj(package_hash_fileobj, BUCKET, package_hash_upload_key)
+
copy_source = {
'Bucket': BUCKET,
'Key': package_upload_key,
}
+ copy_source_hash = {
+ 'Bucket': BUCKET,
+ 'Key': package_hash_upload_key,
+ }
s3.copy(copy_source, BUCKET, latest_upload_key)
+ s3.copy(copy_source_hash, BUCKET, latest_hash_upload_key)
def update_maven(directory):
(aws_access_key, aws_secret_access_key) = get_s3_secret()