aboutsummaryrefslogtreecommitdiffstats
path: root/components/net
diff options
context:
space:
mode:
authorms2300 <matt.sewall@gmail.com>2018-09-23 21:12:51 -0700
committerFernando Jiménez Moreno <ferjmoreno@gmail.com>2018-11-26 09:33:34 +0100
commite84e1d607bd158dde576d2def8ebadfdd63630e1 (patch)
treebe5bc4fd07626c14191532ef6f2b46cdd5c9a124 /components/net
parent7c65505df3fff47f43062da20088113631ed9ae0 (diff)
downloadservo-e84e1d607bd158dde576d2def8ebadfdd63630e1.tar.gz
servo-e84e1d607bd158dde576d2def8ebadfdd63630e1.zip
Initial implementation of asynchronous blob url fetching
Diffstat (limited to 'components/net')
-rw-r--r--components/net/blob_loader.rs87
-rw-r--r--components/net/fetch/methods.rs20
-rw-r--r--components/net/filemanager_thread.rs154
3 files changed, 171 insertions, 90 deletions
diff --git a/components/net/blob_loader.rs b/components/net/blob_loader.rs
index e6d64acc16e..50c5455808d 100644
--- a/components/net/blob_loader.rs
+++ b/components/net/blob_loader.rs
@@ -3,6 +3,7 @@
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::filemanager_thread::FileManager;
+use fetch::methods::DoneChannel;
use headers_core::HeaderMapExt;
use headers_ext::{ContentLength, ContentType};
use http::header::{self, HeaderValue};
@@ -11,18 +12,21 @@ use ipc_channel::ipc;
use mime::{self, Mime};
use net_traits::blob_url_store::parse_blob_url;
use net_traits::filemanager_thread::ReadFileProgress;
+use net_traits::response::{Response, ResponseBody};
use net_traits::{http_percent_encode, NetworkError};
use servo_url::ServoUrl;
+use std::sync::mpsc::channel;
// TODO: Check on GET
// https://w3c.github.io/FileAPI/#requestResponseModel
/// https://fetch.spec.whatwg.org/#concept-basic-fetch (partial)
-// TODO: make async.
-pub fn load_blob_sync(
+pub fn load_blob_async(
url: ServoUrl,
filemanager: FileManager,
-) -> Result<(HeaderMap, Vec<u8>), NetworkError> {
+ response: &Response,
+ done_chan: &mut DoneChannel
+) -> Result<(), NetworkError> {
let (id, origin) = match parse_blob_url(&url) {
Ok((id, origin)) => (id, origin),
Err(()) => {
@@ -31,78 +35,11 @@ pub fn load_blob_sync(
},
};
- let (sender, receiver) = ipc::channel().unwrap();
+ let (sender, receiver) = channel();
+ *done_chan = Some((sender.clone(), receiver));
+ *response.body.lock().unwrap() = ResponseBody::Receiving(vec![]);
let check_url_validity = true;
- filemanager.read_file(sender, id, check_url_validity, origin);
+ filemanager.fetch_file(sender, id, check_url_validity, origin, response);
- let blob_buf = match receiver.recv().unwrap() {
- Ok(ReadFileProgress::Meta(blob_buf)) => blob_buf,
- Ok(_) => {
- return Err(NetworkError::Internal(
- "Invalid filemanager reply".to_string(),
- ));
- },
- Err(e) => {
- return Err(NetworkError::Internal(format!("{:?}", e)));
- },
- };
-
- let content_type: Mime = blob_buf.type_string.parse().unwrap_or(mime::TEXT_PLAIN);
- let charset = content_type.get_param(mime::CHARSET);
-
- let mut headers = HeaderMap::new();
-
- if let Some(name) = blob_buf.filename {
- let charset = charset
- .map(|c| c.as_ref().into())
- .unwrap_or("us-ascii".to_owned());
- // TODO(eijebong): Replace this once the typed header is there
- headers.insert(
- header::CONTENT_DISPOSITION,
- HeaderValue::from_bytes(
- format!(
- "inline; {}",
- if charset.to_lowercase() == "utf-8" {
- format!(
- "filename=\"{}\"",
- String::from_utf8(name.as_bytes().into()).unwrap()
- )
- } else {
- format!(
- "filename*=\"{}\"''{}",
- charset,
- http_percent_encode(name.as_bytes())
- )
- }
- )
- .as_bytes(),
- )
- .unwrap(),
- );
- }
-
- // Basic fetch, Step 4.
- headers.typed_insert(ContentLength(blob_buf.size as u64));
- // Basic fetch, Step 5.
- headers.typed_insert(ContentType::from(content_type.clone()));
-
- let mut bytes = blob_buf.bytes;
- loop {
- match receiver.recv().unwrap() {
- Ok(ReadFileProgress::Partial(ref mut new_bytes)) => {
- bytes.append(new_bytes);
- },
- Ok(ReadFileProgress::EOF) => {
- return Ok((headers, bytes));
- },
- Ok(_) => {
- return Err(NetworkError::Internal(
- "Invalid filemanager reply".to_string(),
- ));
- },
- Err(e) => {
- return Err(NetworkError::Internal(format!("{:?}", e)));
- },
- }
- }
+ Ok(())
}
diff --git a/components/net/fetch/methods.rs b/components/net/fetch/methods.rs
index 70dcf4d7983..67a02775369 100644
--- a/components/net/fetch/methods.rs
+++ b/components/net/fetch/methods.rs
@@ -2,7 +2,7 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
-use crate::blob_loader::load_blob_sync;
+use crate::blob_loader::load_blob_async;
use crate::data_loader::decode;
use crate::fetch::cors_cache::CorsCache;
use crate::filemanager_thread::FileManager;
@@ -657,19 +657,11 @@ fn scheme_fetch(
));
}
- match load_blob_sync(url.clone(), context.filemanager.clone()) {
- Ok((headers, bytes)) => {
- let mut response =
- Response::new(url, ResourceFetchTiming::new(request.timing_type()));
- response.headers = headers;
- *response.body.lock().unwrap() = ResponseBody::Done(bytes);
- response
- },
- Err(e) => {
- debug!("Failed to load {}: {:?}", url, e);
- Response::network_error(e)
- },
- }
+ let mut response = Response::new(url, ResourceFetchTiming::new(request.timing_type()));
+
+ load_blob_async(url.clone(), context.filemanager.clone(), &response, done_chan);
+
+ response
},
"ftp" => {
diff --git a/components/net/filemanager_thread.rs b/components/net/filemanager_thread.rs
index 8f0fb05b229..0be8de815b4 100644
--- a/components/net/filemanager_thread.rs
+++ b/components/net/filemanager_thread.rs
@@ -4,12 +4,18 @@
use embedder_traits::{EmbedderMsg, EmbedderProxy, FilterPattern};
use ipc_channel::ipc::{self, IpcSender};
+use fetch::methods::Data;
+use hyper::header::{Charset, ContentLength, ContentType, Headers};
+use hyper::header::{ContentDisposition, DispositionParam, DispositionType};
+use mime::{Attr, Mime};
use mime_guess::guess_mime_type_opt;
+use net_traits::NetworkError;
use net_traits::blob_url_store::{BlobBuf, BlobURLStoreError};
use net_traits::filemanager_thread::{FileManagerResult, FileManagerThreadMsg, FileOrigin};
use net_traits::filemanager_thread::{
FileManagerThreadError, ReadFileProgress, RelativePos, SelectedFile,
};
+use net_traits::response::{Response, ResponseBody};
use servo_config::prefs::PREFS;
use std::collections::HashMap;
use std::fs::File;
@@ -17,7 +23,7 @@ use std::io::{Read, Seek, SeekFrom};
use std::ops::Index;
use std::path::{Path, PathBuf};
use std::sync::atomic::{self, AtomicBool, AtomicUsize, Ordering};
-use std::sync::{Arc, RwLock};
+use std::sync::{Arc, RwLock, Mutex, mpsc};
use std::thread;
use url::Url;
use uuid::Uuid;
@@ -91,6 +97,19 @@ impl FileManager {
.expect("Thread spawning failed");
}
+ pub fn fetch_file(&self,
+ sender: mpsc::Sender<Data>,
+ id: Uuid,
+ check_url_validity: bool,
+ origin: FileOrigin,
+ response: &Response) {
+ let store = self.store.clone();
+ let mut res_body = response.body.clone();
+ thread::Builder::new().name("read file".to_owned()).spawn(move || {
+ store.try_fetch_file(&sender, id, check_url_validity, origin, response, res_body)
+ }).expect("Thread spawning failed");
+ }
+
pub fn promote_memory(
&self,
blob_buf: BlobBuf,
@@ -489,6 +508,94 @@ impl FileManagerStore {
)
}
+ fn fetch_blob_buf(&self, sender: &mpsc::Sender<Data>,
+ id: &Uuid, origin_in: &FileOrigin, rel_pos: RelativePos,
+ check_url_validity: bool, response: &Response, res_body: Arc<Mutex<ResponseBody>>) -> Result<(), BlobURLStoreError> {
+ let mut bytes = vec![];
+ let file_impl = self.get_impl(id, origin_in, check_url_validity)?;
+ match file_impl {
+ FileImpl::Memory(buf) => {
+ let range = rel_pos.to_abs_range(buf.size as usize);
+ let blob_buf = BlobBuf {
+ filename: None,
+ type_string: buf.type_string,
+ size: range.len() as u64,
+ bytes: buf.bytes.index(range).to_vec(),
+ };
+
+ let content_type: Mime = blob_buf.type_string.parse().unwrap_or(mime!(Text / Plain));
+ let charset = content_type.get_param(Attr::Charset);
+ let mut headers = Headers::new();
+
+ if let Some(name) = blob_buf.filename {
+ let charset = charset.and_then(|c| c.as_str().parse().ok());
+ headers.set(ContentDisposition {
+ disposition: DispositionType::Inline,
+ parameters: vec![
+ DispositionParam::Filename(charset.unwrap_or(Charset::Us_Ascii),
+ None, name.as_bytes().to_vec())
+ ]
+ });
+ }
+
+ headers.set(ContentLength(blob_buf.size as u64));
+ headers.set(ContentType(content_type.clone()));
+
+ bytes.extend_from_slice(&blob_buf.bytes);
+
+ response.headers = headers;
+ *res_body.lock().unwrap() = ResponseBody::Done(bytes);
+ let _ = sender.send(Data::Done);
+ Ok(())
+ }
+ FileImpl::MetaDataOnly(metadata) => {
+ /* XXX: Snapshot state check (optional) https://w3c.github.io/FileAPI/#snapshot-state.
+ Concretely, here we create another file, and this file might not
+ has the same underlying file state (meta-info plus content) as the time
+ create_entry is called.
+ */
+
+ let opt_filename = metadata.path.file_name()
+ .and_then(|osstr| osstr.to_str())
+ .map(|s| s.to_string());
+
+ let mime = guess_mime_type_opt(metadata.path.clone());
+ let range = rel_pos.to_abs_range(metadata.size as usize);
+
+ let mut file = File::open(&metadata.path)
+ .map_err(|e| BlobURLStoreError::External(e.to_string()))?;
+ let seeked_start = file.seek(SeekFrom::Start(range.start as u64))
+ .map_err(|e| BlobURLStoreError::External(e.to_string()))?;
+
+ if seeked_start == (range.start as u64) {
+ let type_string = match mime {
+ Some(x) => format!("{}", x),
+ None => "".to_string(),
+ };
+
+ chunked_fetch(sender, &mut file, range.len(), opt_filename,
+ type_string, response, res_body, &mut bytes);
+ Ok(())
+ } else {
+ Err(BlobURLStoreError::InvalidEntry)
+ }
+ }
+ FileImpl::Sliced(parent_id, inner_rel_pos) => {
+ // Next time we don't need to check validity since
+ // we have already done that for requesting URL if necessary
+ self.fetch_blob_buf(sender, &parent_id, origin_in,
+ rel_pos.slice_inner(&inner_rel_pos), false, response, res_body)
+ }
+ }
+ }
+
+ fn try_fetch_file(&self, sender: &mpsc::Sender<Data>, id: Uuid, check_url_validity: bool,
+ origin_in: FileOrigin, response: &Response, res_body: Arc<Mutex<ResponseBody>>)
+ -> Result<(), BlobURLStoreError> {
+ self.fetch_blob_buf(sender, &id, &origin_in, RelativePos::full_range(), check_url_validity,
+ response, res_body)
+ }
+
fn dec_ref(&self, id: &Uuid, origin_in: &FileOrigin) -> Result<(), BlobURLStoreError> {
let (do_remove, opt_parent_id) = match self.entries.read().unwrap().get(id) {
Some(entry) => {
@@ -656,3 +763,48 @@ fn chunked_read(
}
}
}
+
+fn chunked_fetch(sender: &mpsc::Sender<Data>,
+ file: &mut File, size: usize, opt_filename: Option<String>,
+ type_string: String, response: &Response, res_body: Arc<Mutex<ResponseBody>>, bytes: &mut Vec<u8>) {
+ // First chunk
+ let mut buf = vec![0; CHUNK_SIZE];
+ match file.read(&mut buf) {
+ Ok(n) => {
+ buf.truncate(n);
+ let blob_buf = BlobBuf {
+ filename: opt_filename,
+ type_string: type_string,
+ size: size as u64,
+ bytes: buf,
+ };
+ bytes.extend_from_slice(&blob_buf.bytes);
+ let _ = sender.send(Data::Payload(blob_buf.bytes));
+ }
+ Err(_) => {
+ *response = Response::network_error(NetworkError::Internal("Opening file failed".into()));
+ return;
+ }
+ }
+
+ // Send the remaining chunks
+ loop {
+ let mut buf = vec![0; CHUNK_SIZE];
+ match file.read(&mut buf) {
+ Ok(0) => {
+ *res_body.lock().unwrap() = ResponseBody::Done(bytes.to_vec());
+ let _ = sender.send(Data::Done);
+ return;
+ }
+ Ok(n) => {
+ buf.truncate(n);
+ bytes.extend_from_slice(&buf);
+ let _ = sender.send(Data::Payload(buf));
+ }
+ Err(e) => {
+ *response = Response::network_error(NetworkError::Internal("Opening file failed".into()));
+ return;
+ }
+ }
+ }
+}