aboutsummaryrefslogtreecommitdiffstats
path: root/components/script/dom/blob.rs
diff options
context:
space:
mode:
Diffstat (limited to 'components/script/dom/blob.rs')
-rw-r--r--components/script/dom/blob.rs510
1 files changed, 211 insertions, 299 deletions
diff --git a/components/script/dom/blob.rs b/components/script/dom/blob.rs
index c076282dcc1..44011df9494 100644
--- a/components/script/dom/blob.rs
+++ b/components/script/dom/blob.rs
@@ -1,348 +1,203 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-use dom::bindings::cell::DOMRefCell;
-use dom::bindings::codegen::Bindings::BlobBinding;
-use dom::bindings::codegen::Bindings::BlobBinding::BlobMethods;
-use dom::bindings::codegen::UnionTypes::BlobOrString;
-use dom::bindings::error::{Error, Fallible};
-use dom::bindings::js::{JS, Root};
-use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
-use dom::bindings::str::DOMString;
-use dom::globalscope::GlobalScope;
+ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
+
+use crate::body::{run_array_buffer_data_algorithm, FetchedData};
+use crate::dom::bindings::codegen::Bindings::BlobBinding;
+use crate::dom::bindings::codegen::Bindings::BlobBinding::BlobMethods;
+use crate::dom::bindings::codegen::UnionTypes::ArrayBufferOrArrayBufferViewOrBlobOrString;
+use crate::dom::bindings::error::{Error, Fallible};
+use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
+use crate::dom::bindings::root::DomRoot;
+use crate::dom::bindings::serializable::{Serializable, StorageKey};
+use crate::dom::bindings::str::DOMString;
+use crate::dom::bindings::structuredclone::StructuredDataHolder;
+use crate::dom::globalscope::GlobalScope;
+use crate::dom::promise::Promise;
+use crate::dom::readablestream::ReadableStream;
+use crate::realms::{AlreadyInRealm, InRealm};
+use crate::script_runtime::JSContext;
use dom_struct::dom_struct;
-use encoding::all::UTF_8;
-use encoding::types::{EncoderTrap, Encoding};
-use ipc_channel::ipc;
-use net_traits::{CoreResourceMsg, IpcSend};
-use net_traits::blob_url_store::{BlobBuf, get_blob_origin};
-use net_traits::filemanager_thread::{FileManagerThreadMsg, ReadFileProgress, RelativePos};
-use std::mem;
-use std::ops::Index;
-use std::path::PathBuf;
+use encoding_rs::UTF_8;
+use js::jsapi::JSObject;
+use msg::constellation_msg::{BlobId, BlobIndex, PipelineNamespaceId};
+use net_traits::filemanager_thread::RelativePos;
+use script_traits::serializable::BlobImpl;
+use std::collections::HashMap;
+use std::num::NonZeroU32;
+use std::ptr::NonNull;
+use std::rc::Rc;
use uuid::Uuid;
-/// File-based blob
-#[derive(JSTraceable)]
-pub struct FileBlob {
- id: Uuid,
- name: Option<PathBuf>,
- cache: DOMRefCell<Option<Vec<u8>>>,
- size: u64,
-}
-
-
-/// Different backends of Blob
-#[must_root]
-#[derive(JSTraceable)]
-pub enum BlobImpl {
- /// File-based blob, whose content lives in the net process
- File(FileBlob),
- /// Memory-based blob, whose content lives in the script process
- Memory(Vec<u8>),
- /// Sliced blob, including parent blob reference and
- /// relative positions of current slicing range,
- /// IMPORTANT: The depth of tree is only two, i.e. the parent Blob must be
- /// either File-based or Memory-based
- Sliced(JS<Blob>, RelativePos),
-}
-
-impl BlobImpl {
- /// Construct memory-backed BlobImpl
- #[allow(unrooted_must_root)]
- pub fn new_from_bytes(bytes: Vec<u8>) -> BlobImpl {
- BlobImpl::Memory(bytes)
- }
-
- /// Construct file-backed BlobImpl from File ID
- pub fn new_from_file(file_id: Uuid, name: PathBuf, size: u64) -> BlobImpl {
- BlobImpl::File(FileBlob {
- id: file_id,
- name: Some(name),
- cache: DOMRefCell::new(None),
- size: size,
- })
- }
-}
-
// https://w3c.github.io/FileAPI/#blob
#[dom_struct]
pub struct Blob {
reflector_: Reflector,
- #[ignore_heap_size_of = "No clear owner"]
- blob_impl: DOMRefCell<BlobImpl>,
- /// content-type string
- type_string: String,
+ blob_id: BlobId,
}
impl Blob {
- #[allow(unrooted_must_root)]
- pub fn new(
- global: &GlobalScope, blob_impl: BlobImpl, typeString: String)
- -> Root<Blob> {
- let boxed_blob = box Blob::new_inherited(blob_impl, typeString);
- reflect_dom_object(boxed_blob, global, BlobBinding::Wrap)
+ pub fn new(global: &GlobalScope, blob_impl: BlobImpl) -> DomRoot<Blob> {
+ let dom_blob = reflect_dom_object(Box::new(Blob::new_inherited(&blob_impl)), global);
+ global.track_blob(&dom_blob, blob_impl);
+ dom_blob
}
#[allow(unrooted_must_root)]
- pub fn new_inherited(blob_impl: BlobImpl, type_string: String) -> Blob {
+ pub fn new_inherited(blob_impl: &BlobImpl) -> Blob {
Blob {
reflector_: Reflector::new(),
- blob_impl: DOMRefCell::new(blob_impl),
- // NOTE: Guarding the format correctness here,
- // https://w3c.github.io/FileAPI/#dfn-type
- type_string: normalize_type_string(&type_string),
+ blob_id: blob_impl.blob_id(),
}
}
- #[allow(unrooted_must_root)]
- fn new_sliced(parent: &Blob, rel_pos: RelativePos,
- relative_content_type: DOMString) -> Root<Blob> {
- let blob_impl = match *parent.blob_impl.borrow() {
- BlobImpl::File(_) => {
- // Create new parent node
- BlobImpl::Sliced(JS::from_ref(parent), rel_pos)
- }
- BlobImpl::Memory(_) => {
- // Create new parent node
- BlobImpl::Sliced(JS::from_ref(parent), rel_pos)
- }
- BlobImpl::Sliced(ref grandparent, ref old_rel_pos) => {
- // Adjust the slicing position, using same parent
- BlobImpl::Sliced(grandparent.clone(), old_rel_pos.slice_inner(&rel_pos))
- }
- };
-
- Blob::new(&parent.global(), blob_impl, relative_content_type.into())
- }
-
// https://w3c.github.io/FileAPI/#constructorBlob
- pub fn Constructor(global: &GlobalScope,
- blobParts: Option<Vec<BlobOrString>>,
- blobPropertyBag: &BlobBinding::BlobPropertyBag)
- -> Fallible<Root<Blob>> {
- // TODO: accept other blobParts types - ArrayBuffer or ArrayBufferView
+ #[allow(non_snake_case)]
+ pub fn Constructor(
+ global: &GlobalScope,
+ blobParts: Option<Vec<ArrayBufferOrArrayBufferViewOrBlobOrString>>,
+ blobPropertyBag: &BlobBinding::BlobPropertyBag,
+ ) -> Fallible<DomRoot<Blob>> {
let bytes: Vec<u8> = match blobParts {
None => Vec::new(),
Some(blobparts) => match blob_parts_to_bytes(blobparts) {
Ok(bytes) => bytes,
Err(_) => return Err(Error::InvalidCharacter),
- }
+ },
};
- Ok(Blob::new(global, BlobImpl::new_from_bytes(bytes), blobPropertyBag.type_.to_string()))
+ let type_string = normalize_type_string(&blobPropertyBag.type_.to_string());
+ let blob_impl = BlobImpl::new_from_bytes(bytes, type_string);
+
+ Ok(Blob::new(global, blob_impl))
}
/// Get a slice to inner data, this might incur synchronous read and caching
pub fn get_bytes(&self) -> Result<Vec<u8>, ()> {
- match *self.blob_impl.borrow() {
- BlobImpl::File(ref f) => {
- let (buffer, is_new_buffer) = match *f.cache.borrow() {
- Some(ref bytes) => (bytes.clone(), false),
- None => {
- let bytes = read_file(&self.global(), f.id.clone())?;
- (bytes, true)
- }
- };
-
- // Cache
- if is_new_buffer {
- *f.cache.borrow_mut() = Some(buffer.clone());
- }
-
- Ok(buffer)
- }
- BlobImpl::Memory(ref s) => Ok(s.clone()),
- BlobImpl::Sliced(ref parent, ref rel_pos) => {
- parent.get_bytes().map(|v| {
- let range = rel_pos.to_abs_range(v.len());
- v.index(range).to_vec()
- })
- }
- }
+ self.global().get_blob_bytes(&self.blob_id)
}
/// Get a copy of the type_string
pub fn type_string(&self) -> String {
- self.type_string.clone()
+ self.global().get_blob_type_string(&self.blob_id)
}
/// Get a FileID representing the Blob content,
/// used by URL.createObjectURL
pub fn get_blob_url_id(&self) -> Uuid {
- let opt_sliced_parent = match *self.blob_impl.borrow() {
- BlobImpl::Sliced(ref parent, ref rel_pos) => {
- Some((parent.promote(/* set_valid is */ false), rel_pos.clone(), parent.Size()))
- }
- _ => None
- };
+ self.global().get_blob_url_id(&self.blob_id)
+ }
- match opt_sliced_parent {
- Some((parent_id, rel_pos, size)) => self.create_sliced_url_id(&parent_id, &rel_pos, size),
- None => self.promote(/* set_valid is */ true),
- }
+ /// <https://w3c.github.io/FileAPI/#blob-get-stream>
+ pub fn get_stream(&self) -> DomRoot<ReadableStream> {
+ self.global().get_blob_stream(&self.blob_id)
}
+}
- /// Promote non-Slice blob:
- /// 1. Memory-based: The bytes in data slice will be transferred to file manager thread.
- /// 2. File-based: If set_valid, then activate the FileID so it can serve as URL
- /// Depending on set_valid, the returned FileID can be part of
- /// valid or invalid Blob URL.
- fn promote(&self, set_valid: bool) -> Uuid {
- let mut bytes = vec![];
- let global_url = self.global().get_url();
-
- match *self.blob_impl.borrow_mut() {
- BlobImpl::Sliced(_, _) => {
- debug!("Sliced can't have a sliced parent");
- // Return dummy id
- return Uuid::new_v4();
- }
- BlobImpl::File(ref f) => {
- if set_valid {
- let origin = get_blob_origin(&global_url);
- let (tx, rx) = ipc::channel().unwrap();
-
- let msg = FileManagerThreadMsg::ActivateBlobURL(f.id.clone(), tx, origin.clone());
- self.send_to_file_manager(msg);
-
- match rx.recv().unwrap() {
- Ok(_) => return f.id.clone(),
- // Return a dummy id on error
- Err(_) => return Uuid::new_v4(),
- }
- } else {
- // no need to activate
- return f.id.clone();
- }
- }
- BlobImpl::Memory(ref mut bytes_in) => mem::swap(bytes_in, &mut bytes),
+impl Serializable for Blob {
+ /// <https://w3c.github.io/FileAPI/#ref-for-serialization-steps>
+ fn serialize(&self, sc_holder: &mut StructuredDataHolder) -> Result<StorageKey, ()> {
+ let blob_impls = match sc_holder {
+ StructuredDataHolder::Write { blobs, .. } => blobs,
+ _ => panic!("Unexpected variant of StructuredDataHolder"),
};
- let origin = get_blob_origin(&global_url);
+ let blob_id = self.blob_id.clone();
- let blob_buf = BlobBuf {
- filename: None,
- type_string: self.type_string.clone(),
- size: bytes.len() as u64,
- bytes: bytes.to_vec(),
- };
+ // 1. Get a clone of the blob impl.
+ let blob_impl = self.global().serialize_blob(&blob_id);
- let (tx, rx) = ipc::channel().unwrap();
- let msg = FileManagerThreadMsg::PromoteMemory(blob_buf, set_valid, tx, origin.clone());
- self.send_to_file_manager(msg);
-
- match rx.recv().unwrap() {
- Ok(id) => {
- *self.blob_impl.borrow_mut() = BlobImpl::File(FileBlob {
- id: id.clone(),
- name: None,
- cache: DOMRefCell::new(Some(bytes.to_vec())),
- size: bytes.len() as u64,
- });
- id
- }
- // Dummy id
- Err(_) => Uuid::new_v4(),
- }
- }
+ // We clone the data, but the clone gets its own Id.
+ let new_blob_id = blob_impl.blob_id();
- /// Get a FileID representing sliced parent-blob content
- fn create_sliced_url_id(&self, parent_id: &Uuid,
- rel_pos: &RelativePos, parent_len: u64) -> Uuid {
- let origin = get_blob_origin(&self.global().get_url());
-
- let (tx, rx) = ipc::channel().unwrap();
- let msg = FileManagerThreadMsg::AddSlicedURLEntry(parent_id.clone(),
- rel_pos.clone(),
- tx, origin.clone());
- self.send_to_file_manager(msg);
- match rx.recv().expect("File manager thread is down") {
- Ok(new_id) => {
- *self.blob_impl.borrow_mut() = BlobImpl::File(FileBlob {
- id: new_id.clone(),
- name: None,
- cache: DOMRefCell::new(None),
- size: rel_pos.to_abs_range(parent_len as usize).len() as u64,
- });
-
- // Return the indirect id reference
- new_id
- }
- Err(_) => {
- // Return dummy id
- Uuid::new_v4()
- }
- }
- }
+ // 2. Store the object at a given key.
+ let blobs = blob_impls.get_or_insert_with(|| HashMap::new());
+ blobs.insert(new_blob_id.clone(), blob_impl);
- /// Cleanups at the time of destruction/closing
- fn clean_up_file_resource(&self) {
- if let BlobImpl::File(ref f) = *self.blob_impl.borrow() {
- let origin = get_blob_origin(&self.global().get_url());
+ let PipelineNamespaceId(name_space) = new_blob_id.namespace_id;
+ let BlobIndex(index) = new_blob_id.index;
+ let index = index.get();
- let (tx, rx) = ipc::channel().unwrap();
+ let name_space = name_space.to_ne_bytes();
+ let index = index.to_ne_bytes();
- let msg = FileManagerThreadMsg::DecRef(f.id.clone(), origin, tx);
- self.send_to_file_manager(msg);
- let _ = rx.recv().unwrap();
- }
- }
+ let storage_key = StorageKey {
+ index: u32::from_ne_bytes(index),
+ name_space: u32::from_ne_bytes(name_space),
+ };
- fn send_to_file_manager(&self, msg: FileManagerThreadMsg) {
- let global = self.global();
- let resource_threads = global.resource_threads();
- let _ = resource_threads.send(CoreResourceMsg::ToFileManager(msg));
+ // 3. Return the storage key.
+ Ok(storage_key)
}
-}
-impl Drop for Blob {
- fn drop(&mut self) {
- self.clean_up_file_resource();
- }
-}
+ /// <https://w3c.github.io/FileAPI/#ref-for-deserialization-steps>
+ fn deserialize(
+ owner: &GlobalScope,
+ sc_holder: &mut StructuredDataHolder,
+ storage_key: StorageKey,
+ ) -> Result<(), ()> {
+ // 1. Re-build the key for the storage location
+ // of the serialized object.
+ let namespace_id = PipelineNamespaceId(storage_key.name_space.clone());
+ let index = BlobIndex(
+ NonZeroU32::new(storage_key.index.clone()).expect("Deserialized blob index is zero"),
+ );
+
+ let id = BlobId {
+ namespace_id,
+ index,
+ };
-fn read_file(global: &GlobalScope, id: Uuid) -> Result<Vec<u8>, ()> {
- let resource_threads = global.resource_threads();
- let (chan, recv) = ipc::channel().map_err(|_|())?;
- let origin = get_blob_origin(&global.get_url());
- let check_url_validity = false;
- let msg = FileManagerThreadMsg::ReadFile(chan, id, check_url_validity, origin);
- let _ = resource_threads.send(CoreResourceMsg::ToFileManager(msg));
-
- let mut bytes = vec![];
-
- loop {
- match recv.recv().unwrap() {
- Ok(ReadFileProgress::Meta(mut blob_buf)) => {
- bytes.append(&mut blob_buf.bytes);
- }
- Ok(ReadFileProgress::Partial(mut bytes_in)) => {
- bytes.append(&mut bytes_in);
- }
- Ok(ReadFileProgress::EOF) => {
- return Ok(bytes);
- }
- Err(_) => return Err(()),
+ let (blobs, blob_impls) = match sc_holder {
+ StructuredDataHolder::Read {
+ blobs, blob_impls, ..
+ } => (blobs, blob_impls),
+ _ => panic!("Unexpected variant of StructuredDataHolder"),
+ };
+
+ // 2. Get the transferred object from its storage, using the key.
+ let blob_impls_map = blob_impls
+ .as_mut()
+ .expect("The SC holder does not have any blob impls");
+ let blob_impl = blob_impls_map
+ .remove(&id)
+ .expect("No blob to be deserialized found.");
+ if blob_impls_map.is_empty() {
+ *blob_impls = None;
}
+
+ let deserialized_blob = Blob::new(&*owner, blob_impl);
+
+ let blobs = blobs.get_or_insert_with(|| HashMap::new());
+ blobs.insert(storage_key, deserialized_blob);
+
+ Ok(())
}
}
/// Extract bytes from BlobParts, used by Blob and File constructor
-/// https://w3c.github.io/FileAPI/#constructorBlob
-pub fn blob_parts_to_bytes(blobparts: Vec<BlobOrString>) -> Result<Vec<u8>, ()> {
+/// <https://w3c.github.io/FileAPI/#constructorBlob>
+#[allow(unsafe_code)]
+pub fn blob_parts_to_bytes(
+ mut blobparts: Vec<ArrayBufferOrArrayBufferViewOrBlobOrString>,
+) -> Result<Vec<u8>, ()> {
let mut ret = vec![];
-
- for blobpart in &blobparts {
+ for blobpart in &mut blobparts {
match blobpart {
- &BlobOrString::String(ref s) => {
- let mut bytes = UTF_8.encode(s, EncoderTrap::Replace).map_err(|_|())?;
- ret.append(&mut bytes);
+ &mut ArrayBufferOrArrayBufferViewOrBlobOrString::String(ref s) => {
+ ret.extend(s.as_bytes());
+ },
+ &mut ArrayBufferOrArrayBufferViewOrBlobOrString::Blob(ref b) => {
+ let bytes = b.get_bytes().unwrap_or(vec![]);
+ ret.extend(bytes);
+ },
+ &mut ArrayBufferOrArrayBufferViewOrBlobOrString::ArrayBuffer(ref mut a) => unsafe {
+ let bytes = a.as_slice();
+ ret.extend(bytes);
},
- &BlobOrString::Blob(ref b) => {
- let mut bytes = b.get_bytes().unwrap_or(vec![]);
- ret.append(&mut bytes);
+ &mut ArrayBufferOrArrayBufferViewOrBlobOrString::ArrayBufferView(ref mut a) => unsafe {
+ let bytes = a.as_slice();
+ ret.extend(bytes);
},
}
}
@@ -353,41 +208,98 @@ pub fn blob_parts_to_bytes(blobparts: Vec<BlobOrString>) -> Result<Vec<u8>, ()>
impl BlobMethods for Blob {
// https://w3c.github.io/FileAPI/#dfn-size
fn Size(&self) -> u64 {
- match *self.blob_impl.borrow() {
- BlobImpl::File(ref f) => f.size,
- BlobImpl::Memory(ref v) => v.len() as u64,
- BlobImpl::Sliced(ref parent, ref rel_pos) =>
- rel_pos.to_abs_range(parent.Size() as usize).len() as u64,
- }
+ self.global().get_blob_size(&self.blob_id)
}
// https://w3c.github.io/FileAPI/#dfn-type
fn Type(&self) -> DOMString {
- DOMString::from(self.type_string.clone())
+ DOMString::from(self.type_string())
+ }
+
+ // <https://w3c.github.io/FileAPI/#blob-get-stream>
+ fn Stream(&self, _cx: JSContext) -> NonNull<JSObject> {
+ self.get_stream().get_js_stream()
}
// https://w3c.github.io/FileAPI/#slice-method-algo
- fn Slice(&self,
- start: Option<i64>,
- end: Option<i64>,
- content_type: Option<DOMString>)
- -> Root<Blob> {
+ fn Slice(
+ &self,
+ start: Option<i64>,
+ end: Option<i64>,
+ content_type: Option<DOMString>,
+ ) -> DomRoot<Blob> {
+ let type_string =
+ normalize_type_string(&content_type.unwrap_or(DOMString::from("")).to_string());
let rel_pos = RelativePos::from_opts(start, end);
- Blob::new_sliced(self, rel_pos, content_type.unwrap_or(DOMString::from("")))
+ let blob_impl = BlobImpl::new_sliced(rel_pos, self.blob_id.clone(), type_string);
+ Blob::new(&*self.global(), blob_impl)
+ }
+
+ // https://w3c.github.io/FileAPI/#text-method-algo
+ fn Text(&self) -> Rc<Promise> {
+ let global = self.global();
+ let in_realm_proof = AlreadyInRealm::assert(&global);
+ let p = Promise::new_in_current_realm(&global, InRealm::Already(&in_realm_proof));
+ let id = self.get_blob_url_id();
+ global.read_file_async(
+ id,
+ p.clone(),
+ Box::new(|promise, bytes| match bytes {
+ Ok(b) => {
+ let (text, _, _) = UTF_8.decode(&b);
+ let text = DOMString::from(text);
+ promise.resolve_native(&text);
+ },
+ Err(e) => {
+ promise.reject_error(e);
+ },
+ }),
+ );
+ p
+ }
+
+ // https://w3c.github.io/FileAPI/#arraybuffer-method-algo
+ fn ArrayBuffer(&self) -> Rc<Promise> {
+ let global = self.global();
+ let in_realm_proof = AlreadyInRealm::assert(&global);
+ let p = Promise::new_in_current_realm(&global, InRealm::Already(&in_realm_proof));
+
+ let id = self.get_blob_url_id();
+
+ global.read_file_async(
+ id,
+ p.clone(),
+ Box::new(|promise, bytes| {
+ match bytes {
+ Ok(b) => {
+ let cx = promise.global().get_cx();
+ let result = run_array_buffer_data_algorithm(cx, b);
+
+ match result {
+ Ok(FetchedData::ArrayBuffer(a)) => promise.resolve_native(&a),
+ Err(e) => promise.reject_error(e),
+ _ => panic!("Unexpected result from run_array_buffer_data_algorithm"),
+ }
+ },
+ Err(e) => promise.reject_error(e),
+ };
+ }),
+ );
+ p
}
}
/// Get the normalized, MIME-parsable type string
-/// https://w3c.github.io/FileAPI/#dfn-type
+/// <https://w3c.github.io/FileAPI/#dfn-type>
/// XXX: We will relax the restriction here,
/// since the spec has some problem over this part.
/// see https://github.com/w3c/FileAPI/issues/43
-fn normalize_type_string(s: &str) -> String {
+pub fn normalize_type_string(s: &str) -> String {
if is_ascii_printable(s) {
- let s_lower = s.to_lowercase();
+ let s_lower = s.to_ascii_lowercase();
// match s_lower.parse() as Result<Mime, ()> {
- // Ok(_) => s_lower,
- // Err(_) => "".to_string()
+ // Ok(_) => s_lower,
+ // Err(_) => "".to_string()
s_lower
} else {
"".to_string()