aboutsummaryrefslogtreecommitdiffstats
path: root/components/net
diff options
context:
space:
mode:
authorZhen Zhang <izgzhen@gmail.com>2016-07-15 01:02:21 +0800
committerZhen Zhang <izgzhen@gmail.com>2016-07-15 20:33:51 +0800
commitfdc3a8e3ac7ea5512e6fa09be892cd67ff6e8657 (patch)
tree51410a9abbf6d58aee8df62bd84d9737952c75dd /components/net
parent4b78b9adab916cc4fdde6248e785030b79f406da (diff)
downloadservo-fdc3a8e3ac7ea5512e6fa09be892cd67ff6e8657.tar.gz
servo-fdc3a8e3ac7ea5512e6fa09be892cd67ff6e8657.zip
Put Blob URL online
Diffstat (limited to 'components/net')
-rw-r--r--components/net/blob_loader.rs115
-rw-r--r--components/net/filemanager_thread.rs82
-rw-r--r--components/net/resource_thread.rs21
3 files changed, 119 insertions, 99 deletions
diff --git a/components/net/blob_loader.rs b/components/net/blob_loader.rs
index df0c7468d42..0ded019553f 100644
--- a/components/net/blob_loader.rs
+++ b/components/net/blob_loader.rs
@@ -5,53 +5,92 @@
use hyper::header::{DispositionType, ContentDisposition, DispositionParam};
use hyper::header::{Headers, ContentType, ContentLength, Charset};
use hyper::http::RawStatus;
+use ipc_channel::ipc::{self, IpcSender};
use mime::{Mime, Attr};
use mime_classifier::MimeClassifier;
-use net_traits::ProgressMsg::Done;
-use net_traits::blob_url_store::BlobBuf;
+use net_traits::ProgressMsg::{Payload, Done};
+use net_traits::blob_url_store::parse_blob_url;
+use net_traits::filemanager_thread::{FileManagerThreadMsg, SelectedFileId};
use net_traits::response::HttpsState;
-use net_traits::{LoadConsumer, LoadData, Metadata};
-use resource_thread::start_sending_sniffed_opt;
+use net_traits::{LoadConsumer, LoadData, Metadata, NetworkError};
+use resource_thread::CancellationListener;
+use resource_thread::{start_sending_sniffed_opt, send_error};
+use std::boxed::FnBox;
use std::sync::Arc;
+use util::thread::spawn_named;
// TODO: Check on GET
// https://w3c.github.io/FileAPI/#requestResponseModel
-pub fn load_blob(load_data: LoadData, start_chan: LoadConsumer,
- classifier: Arc<MimeClassifier>, blob_buf: BlobBuf) {
- let content_type: Mime = blob_buf.type_string.parse().unwrap_or(mime!(Text / Plain));
- let charset = content_type.get_param(Attr::Charset);
-
- let mut headers = Headers::new();
-
- if let Some(name) = blob_buf.filename {
- let charset = charset.and_then(|c| c.as_str().parse().ok());
- headers.set(ContentDisposition {
- disposition: DispositionType::Inline,
- parameters: vec![
- DispositionParam::Filename(charset.unwrap_or(Charset::Us_Ascii),
- None, name.as_bytes().to_vec())
- ]
- });
+pub fn factory(filemanager_chan: IpcSender<FileManagerThreadMsg>)
+ -> Box<FnBox(LoadData,
+ LoadConsumer,
+ Arc<MimeClassifier>,
+ CancellationListener) + Send> {
+ box move |load_data: LoadData, start_chan, classifier, _cancel_listener| {
+ spawn_named(format!("blob loader for {}", load_data.url), move || {
+ load_blob(load_data, start_chan, classifier, filemanager_chan);
+ })
}
+}
+
+fn load_blob(load_data: LoadData, start_chan: LoadConsumer,
+ classifier: Arc<MimeClassifier>,
+ filemanager_chan: IpcSender<FileManagerThreadMsg>) {
+ let (chan, recv) = ipc::channel().unwrap();
+ if let Ok((id, origin, _fragment)) = parse_blob_url(&load_data.url.clone()) {
+ let id = SelectedFileId(id.simple().to_string());
+ let check_url_validity = true;
+ let msg = FileManagerThreadMsg::ReadFile(chan, id, check_url_validity, origin);
+ let _ = filemanager_chan.send(msg);
+
+ match recv.recv().unwrap() {
+ Ok(blob_buf) => {
+ let content_type: Mime = blob_buf.type_string.parse().unwrap_or(mime!(Text / Plain));
+ let charset = content_type.get_param(Attr::Charset);
+
+ let mut headers = Headers::new();
+
+ if let Some(name) = blob_buf.filename {
+ let charset = charset.and_then(|c| c.as_str().parse().ok());
+ headers.set(ContentDisposition {
+ disposition: DispositionType::Inline,
+ parameters: vec![
+ DispositionParam::Filename(charset.unwrap_or(Charset::Us_Ascii),
+ None, name.as_bytes().to_vec())
+ ]
+ });
+ }
+
+ headers.set(ContentType(content_type.clone()));
+ headers.set(ContentLength(blob_buf.size as u64));
+
+ let metadata = Metadata {
+ final_url: load_data.url.clone(),
+ content_type: Some(ContentType(content_type.clone())),
+ charset: charset.map(|c| c.as_str().to_string()),
+ headers: Some(headers),
+ // https://w3c.github.io/FileAPI/#TwoHundredOK
+ status: Some(RawStatus(200, "OK".into())),
+ https_state: HttpsState::None,
+ referrer: None,
+ };
- headers.set(ContentType(content_type.clone()));
- headers.set(ContentLength(blob_buf.size as u64));
-
- let metadata = Metadata {
- final_url: load_data.url.clone(),
- content_type: Some(ContentType(content_type.clone())),
- charset: charset.map(|c| c.as_str().to_string()),
- headers: Some(headers),
- // https://w3c.github.io/FileAPI/#TwoHundredOK
- status: Some(RawStatus(200, "OK".into())),
- https_state: HttpsState::None,
- referrer: None
- };
-
- if let Ok(chan) =
- start_sending_sniffed_opt(start_chan, metadata, classifier,
- &blob_buf.bytes, load_data.context.clone()) {
- let _ = chan.send(Done(Ok(())));
+ if let Ok(chan) =
+ start_sending_sniffed_opt(start_chan, metadata, classifier,
+ &blob_buf.bytes, load_data.context.clone()) {
+ let _ = chan.send(Payload(blob_buf.bytes));
+ let _ = chan.send(Done(Ok(())));
+ }
+ }
+ Err(e) => {
+ let err = NetworkError::Internal(format!("{:?}", e));
+ send_error(load_data.url, err, start_chan);
+ }
+ }
+ } else {
+ let e = format!("Invalid blob URL format {:?}", load_data.url);
+ let format_err = NetworkError::Internal(e);
+ send_error(load_data.url.clone(), format_err, start_chan);
}
}
diff --git a/components/net/filemanager_thread.rs b/components/net/filemanager_thread.rs
index 3f19949f0e3..175d1ca3fe1 100644
--- a/components/net/filemanager_thread.rs
+++ b/components/net/filemanager_thread.rs
@@ -2,15 +2,11 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-use blob_loader::load_blob;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
-use mime_classifier::MimeClassifier;
use mime_guess::guess_mime_type_opt;
-use net_traits::blob_url_store::{BlobBuf, BlobURLStoreError, parse_blob_url};
+use net_traits::blob_url_store::{BlobBuf, BlobURLStoreError};
use net_traits::filemanager_thread::{FileManagerThreadMsg, FileManagerResult, FilterPattern, FileOrigin};
use net_traits::filemanager_thread::{SelectedFile, RelativePos, FileManagerThreadError, SelectedFileId};
-use net_traits::{LoadConsumer, LoadData, NetworkError};
-use resource_thread::send_error;
use std::collections::HashMap;
use std::fs::File;
use std::io::{Read, Seek, SeekFrom};
@@ -133,7 +129,6 @@ enum FileImpl {
struct FileManager<UI: 'static + UIProvider> {
receiver: IpcReceiver<FileManagerThreadMsg>,
store: Arc<FileManagerStore<UI>>,
- classifier: Arc<MimeClassifier>,
}
impl<UI: 'static + UIProvider> FileManager<UI> {
@@ -141,7 +136,6 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
FileManager {
receiver: recv,
store: Arc::new(FileManagerStore::new(ui)),
- classifier: Arc::new(MimeClassifier::new()),
}
}
@@ -160,9 +154,9 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
store.select_files(filter, sender, origin, opt_test_paths);
})
}
- FileManagerThreadMsg::ReadFile(sender, id, origin) => {
+ FileManagerThreadMsg::ReadFile(sender, id, check_url_validity, origin) => {
spawn_named("read file".to_owned(), move || {
- match store.try_read_file(id, origin) {
+ match store.try_read_file(id, check_url_validity, origin) {
Ok(buffer) => { let _ = sender.send(Ok(buffer)); }
Err(e) => {
let _ = sender.send(Err(FileManagerThreadError::BlobURLStoreError(e)));
@@ -170,9 +164,9 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
}
})
}
- FileManagerThreadMsg::PromoteMemory(blob_buf, sender, origin) => {
+ FileManagerThreadMsg::PromoteMemory(blob_buf, set_valid, sender, origin) => {
spawn_named("transfer memory".to_owned(), move || {
- store.promote_memory(blob_buf, sender, origin);
+ store.promote_memory(blob_buf, set_valid, sender, origin);
})
}
FileManagerThreadMsg::AddSlicedURLEntry(id, rel_pos, sender, origin) =>{
@@ -180,18 +174,6 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
store.add_sliced_url_entry(id, rel_pos, sender, origin);
})
}
- FileManagerThreadMsg::LoadBlob(load_data, consumer) => {
- match parse_blob_url(&load_data.url.clone()) {
- None => {
- let e = format!("Invalid blob URL format {:?}", load_data.url);
- let format_err = NetworkError::Internal(e);
- send_error(load_data.url.clone(), format_err, consumer);
- }
- Some((id, _fragment)) => {
- self.process_request(load_data, consumer, id);
- }
- }
- },
FileManagerThreadMsg::RevokeBlobURL(id, origin, sender) => {
if let Ok(id) = Uuid::parse_str(&id.0) {
spawn_named("revoke blob url".to_owned(), move || {
@@ -233,18 +215,6 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
};
}
}
-
- fn process_request(&self, load_data: LoadData, consumer: LoadConsumer, id: Uuid) {
- let origin_in = load_data.url.origin().unicode_serialization();
- // check_url_validity is true since content is requested by this URL
- match self.store.get_blob_buf(&id, &origin_in, RelativePos::full_range(), true) {
- Ok(blob_buf) => {
- let classifier = self.classifier.clone();
- spawn_named("load blob".to_owned(), move || load_blob(load_data, consumer, classifier, blob_buf));
- }
- Err(e) => send_error(load_data.url.clone(), NetworkError::Internal(format!("{:?}", e)), consumer),
- }
- }
}
/// File manager's data store. It maintains a thread-safe mapping
@@ -271,7 +241,8 @@ impl <UI: 'static + UIProvider> FileManagerStore<UI> {
if *origin_in != *entry.origin {
Err(BlobURLStoreError::InvalidOrigin)
} else {
- if check_url_validity && !entry.is_valid_url.load(Ordering::Acquire) {
+ let is_valid = entry.is_valid_url.load(Ordering::Acquire);
+ if check_url_validity && !is_valid {
Err(BlobURLStoreError::InvalidFileID)
} else {
Ok(entry.file_impl.clone())
@@ -497,44 +468,47 @@ impl <UI: 'static + UIProvider> FileManagerStore<UI> {
}
}
- fn try_read_file(&self, id: SelectedFileId, origin_in: FileOrigin) -> Result<Vec<u8>, BlobURLStoreError> {
+ // Convenient wrapper over get_blob_buf
+ fn try_read_file(&self, id: SelectedFileId, check_url_validity: bool,
+ origin_in: FileOrigin) -> Result<BlobBuf, BlobURLStoreError> {
let id = try!(Uuid::parse_str(&id.0).map_err(|_| BlobURLStoreError::InvalidFileID));
-
- // No need to check URL validity in reading a file by FileReader
- let blob_buf = try!(self.get_blob_buf(&id, &origin_in, RelativePos::full_range(), false));
-
- Ok(blob_buf.bytes)
+ self.get_blob_buf(&id, &origin_in, RelativePos::full_range(), check_url_validity)
}
fn dec_ref(&self, id: &Uuid, origin_in: &FileOrigin,
unset_url_validity: bool) -> Result<(), BlobURLStoreError> {
- let (is_last_ref, opt_parent_id) = match self.entries.read().unwrap().get(id) {
+ let (do_remove, opt_parent_id) = match self.entries.read().unwrap().get(id) {
Some(entry) => {
if *entry.origin == *origin_in {
let old_refs = entry.refs.fetch_sub(1, Ordering::Release);
- if old_refs > 1 {
- if unset_url_validity {
- entry.is_valid_url.store(false, Ordering::Release);
- }
+ if unset_url_validity {
+ entry.is_valid_url.store(false, Ordering::Release);
+ }
+ if old_refs > 1 {
+ // not the last reference, no need to touch parent
(false, None)
} else {
+ // last reference, and if it has a reference to parent id
+ // dec_ref on parent later if necessary
+ let is_valid = entry.is_valid_url.load(Ordering::Acquire);
if let FileImpl::Sliced(ref parent_id, _) = entry.file_impl {
- // if it has a reference to parent id, dec_ref on parent later
- (true, Some(parent_id.clone()))
+ (!is_valid, Some(parent_id.clone()))
} else {
- (true, None)
+ (!is_valid, None)
}
}
- } else { // Invalid origin
+ } else {
return Err(BlobURLStoreError::InvalidOrigin);
}
}
None => return Err(BlobURLStoreError::InvalidFileID),
};
- if is_last_ref {
+ // Trigger removing if its last reference is gone and it is
+ // not a part of a valid Blob URL
+ if do_remove {
atomic::fence(Ordering::Acquire);
self.remove(id);
@@ -548,7 +522,7 @@ impl <UI: 'static + UIProvider> FileManagerStore<UI> {
Ok(())
}
- fn promote_memory(&self, blob_buf: BlobBuf,
+ fn promote_memory(&self, blob_buf: BlobBuf, set_valid: bool,
sender: IpcSender<Result<SelectedFileId, BlobURLStoreError>>, origin: FileOrigin) {
match Url::parse(&origin) { // parse to check sanity
Ok(_) => {
@@ -558,7 +532,7 @@ impl <UI: 'static + UIProvider> FileManagerStore<UI> {
file_impl: FileImpl::Memory(blob_buf),
refs: AtomicUsize::new(1),
// Valid here since PromoteMemory implies URL creation
- is_valid_url: AtomicBool::new(true),
+ is_valid_url: AtomicBool::new(set_valid),
});
let _ = sender.send(Ok(SelectedFileId(id.simple().to_string())));
diff --git a/components/net/resource_thread.rs b/components/net/resource_thread.rs
index 56caf7ce886..bffd40fed6e 100644
--- a/components/net/resource_thread.rs
+++ b/components/net/resource_thread.rs
@@ -4,6 +4,7 @@
//! A thread that takes a URL and streams back the binary data.
use about_loader;
+use blob_loader;
use chrome_loader;
use connector::{Connector, create_http_connector};
use content_blocker::BLOCKED_CONTENT_RULES;
@@ -173,18 +174,20 @@ fn start_sending_opt(start_chan: LoadConsumer, metadata: Metadata,
pub fn new_resource_threads(user_agent: String,
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
profiler_chan: ProfilerChan) -> (ResourceThreads, ResourceThreads) {
- let (public_core, private_core) = new_core_resource_thread(user_agent, devtools_chan, profiler_chan);
+ let filemanager_chan: IpcSender<FileManagerThreadMsg> = FileManagerThreadFactory::new(TFD_PROVIDER);
+ let (public_core, private_core) = new_core_resource_thread(user_agent, devtools_chan,
+ profiler_chan, filemanager_chan.clone());
let storage: IpcSender<StorageThreadMsg> = StorageThreadFactory::new();
- let filemanager: IpcSender<FileManagerThreadMsg> = FileManagerThreadFactory::new(TFD_PROVIDER);
- (ResourceThreads::new(public_core, storage.clone(), filemanager.clone()),
- ResourceThreads::new(private_core, storage, filemanager))
+ (ResourceThreads::new(public_core, storage.clone(), filemanager_chan.clone()),
+ ResourceThreads::new(private_core, storage, filemanager_chan))
}
/// Create a CoreResourceThread
pub fn new_core_resource_thread(user_agent: String,
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
- profiler_chan: ProfilerChan)
+ profiler_chan: ProfilerChan,
+ filemanager_chan: IpcSender<FileManagerThreadMsg>)
-> (CoreResourceThread, CoreResourceThread) {
let (public_setup_chan, public_setup_port) = ipc::channel().unwrap();
let (private_setup_chan, private_setup_port) = ipc::channel().unwrap();
@@ -192,7 +195,7 @@ pub fn new_core_resource_thread(user_agent: String,
let private_setup_chan_clone = private_setup_chan.clone();
spawn_named("ResourceManager".to_owned(), move || {
let resource_manager = CoreResourceManager::new(
- user_agent, devtools_chan, profiler_chan
+ user_agent, devtools_chan, profiler_chan, filemanager_chan
);
let mut channel_manager = ResourceChannelManager {
@@ -462,6 +465,7 @@ pub struct CoreResourceManager {
mime_classifier: Arc<MimeClassifier>,
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
profiler_chan: ProfilerChan,
+ filemanager_chan: IpcSender<FileManagerThreadMsg>,
cancel_load_map: HashMap<ResourceId, Sender<()>>,
next_resource_id: ResourceId,
}
@@ -469,12 +473,14 @@ pub struct CoreResourceManager {
impl CoreResourceManager {
pub fn new(user_agent: String,
devtools_channel: Option<Sender<DevtoolsControlMsg>>,
- profiler_chan: ProfilerChan) -> CoreResourceManager {
+ profiler_chan: ProfilerChan,
+ filemanager_chan: IpcSender<FileManagerThreadMsg>) -> CoreResourceManager {
CoreResourceManager {
user_agent: user_agent,
mime_classifier: Arc::new(MimeClassifier::new()),
devtools_chan: devtools_channel,
profiler_chan: profiler_chan,
+ filemanager_chan: filemanager_chan,
cancel_load_map: HashMap::new(),
next_resource_id: ResourceId(0),
}
@@ -548,6 +554,7 @@ impl CoreResourceManager {
},
"data" => from_factory(data_loader::factory),
"about" => from_factory(about_loader::factory),
+ "blob" => blob_loader::factory(self.filemanager_chan.clone()),
_ => {
debug!("resource_thread: no loader for scheme {}", load_data.url.scheme());
send_error(load_data.url, NetworkError::Internal("no loader for scheme".to_owned()), consumer);