aboutsummaryrefslogtreecommitdiffstats
path: root/tests/wpt/web-platform-tests/compression/compression-stream.any.js
diff options
context:
space:
mode:
authorWPT Sync Bot <josh+wptsync@joshmatthews.net>2019-10-11 10:24:47 +0000
committerWPT Sync Bot <josh+wptsync@joshmatthews.net>2019-10-11 15:44:29 +0000
commite10932a8f789cf1f0de0aa1e44c49b7f36121baf (patch)
treed3f978cb8960358498d9ece8daa052aabe701161 /tests/wpt/web-platform-tests/compression/compression-stream.any.js
parent0e503a0e0c942b3535724674ba332ad838da2352 (diff)
downloadservo-e10932a8f789cf1f0de0aa1e44c49b7f36121baf.tar.gz
servo-e10932a8f789cf1f0de0aa1e44c49b7f36121baf.zip
Update web-platform-tests to revision 33fa44546cbd74796eebfd7e8a33d6fc2f4e020a
Diffstat (limited to 'tests/wpt/web-platform-tests/compression/compression-stream.any.js')
-rw-r--r--tests/wpt/web-platform-tests/compression/compression-stream.any.js91
1 files changed, 91 insertions, 0 deletions
diff --git a/tests/wpt/web-platform-tests/compression/compression-stream.any.js b/tests/wpt/web-platform-tests/compression/compression-stream.any.js
new file mode 100644
index 00000000000..47df70f7cd3
--- /dev/null
+++ b/tests/wpt/web-platform-tests/compression/compression-stream.any.js
@@ -0,0 +1,91 @@
+// META: global=worker
+// META: script=pako/pako_inflate.min.js
+// META: timeout=long
+
+'use strict';
+
+const SMALL_FILE = "/media/foo.vtt";
+const LARGE_FILE = "/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm";
+
+async function compressArrayBuffer(input, format) {
+ const cs = new CompressionStream(format);
+ const writer = cs.writable.getWriter();
+ writer.write(input);
+ const closePromise = writer.close();
+ const out = [];
+ const reader = cs.readable.getReader();
+ let totalSize = 0;
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done)
+ break;
+ out.push(value);
+ totalSize += value.byteLength;
+ }
+ await closePromise;
+ const concatenated = new Uint8Array(totalSize);
+ let offset = 0;
+ for (const array of out) {
+ concatenated.set(array, offset);
+ offset += array.byteLength;
+ }
+ return concatenated;
+}
+
+test(() => {
+ assert_throws(new TypeError(), () => {
+ const transformer = new CompressionStream("nonvalid");
+ }, "non supported format should throw");
+}, "CompressionStream constructor should throw on invalid format");
+
+promise_test(async () => {
+ const buffer = new ArrayBuffer(0);
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "deflate");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "deflated empty data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(SMALL_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "deflate");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "deflated small amount data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(LARGE_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "deflate");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "deflated large amount data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const buffer = new ArrayBuffer(0);
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "gzip");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "gzipped empty data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(SMALL_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "gzip");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "gzipped small amount data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(LARGE_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "gzip");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "gzipped large amount data should be reinflated back to its origin");