aboutsummaryrefslogtreecommitdiffstats
path: root/components/net/blob_loader.rs
diff options
context:
space:
mode:
authorZhen Zhang <izgzhen@gmail.com>2016-06-18 18:14:40 +0800
committerZhen Zhang <izgzhen@gmail.com>2016-07-04 23:02:03 +0800
commit14d68968edc936fe67a226840af4c10ff0aea350 (patch)
tree2d4e1fd6f7e1d4beeaca0f6697b26631d89e8e3b /components/net/blob_loader.rs
parent212aa4437e06af72ed3a215a1b49a4b1121f6398 (diff)
downloadservo-14d68968edc936fe67a226840af4c10ff0aea350.tar.gz
servo-14d68968edc936fe67a226840af4c10ff0aea350.zip
Integration and improvements of File API backends
1. More complete origin check in FileManagerThreadMsg 2. Add reference counting logic to file manage store and script API 3. Integrate the support of slicing
Diffstat (limited to 'components/net/blob_loader.rs')
-rw-r--r--components/net/blob_loader.rs54
1 files changed, 14 insertions, 40 deletions
diff --git a/components/net/blob_loader.rs b/components/net/blob_loader.rs
index 37665da86e2..1008b405e3e 100644
--- a/components/net/blob_loader.rs
+++ b/components/net/blob_loader.rs
@@ -2,60 +2,32 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-use filemanager_thread::BlobURLStore;
use hyper::header::{DispositionType, ContentDisposition, DispositionParam};
use hyper::header::{Headers, ContentType, ContentLength, Charset};
use hyper::http::RawStatus;
use mime::{Mime, Attr};
use mime_classifier::MimeClassifier;
use net_traits::ProgressMsg::Done;
-use net_traits::blob_url_store::{parse_blob_url, BlobURLStoreEntry, BlobURLStoreError};
+use net_traits::blob_url_store::BlobURLStoreEntry;
+use net_traits::filemanager_thread::RelativePos;
use net_traits::response::HttpsState;
-use net_traits::{LoadConsumer, LoadData, Metadata, NetworkError};
-use resource_thread::{send_error, start_sending_sniffed_opt};
-use std::str;
-use std::sync::{Arc, RwLock};
-
+use net_traits::{LoadConsumer, LoadData, Metadata};
+use resource_thread::start_sending_sniffed_opt;
+use std::ops::Index;
+use std::sync::Arc;
// TODO: Check on GET
// https://w3c.github.io/FileAPI/#requestResponseModel
-pub fn load(load_data: LoadData, consumer: LoadConsumer,
- blob_url_store: Arc<RwLock<BlobURLStore>>,
- classifier: Arc<MimeClassifier>) { // XXX: Move it into net process later
-
- match parse_blob_url(&load_data.url) {
- None => {
- let format_err = NetworkError::Internal(format!("Invalid blob URL format {:?}", load_data.url));
- send_error(load_data.url.clone(), format_err, consumer);
- }
- Some((uuid, _fragment)) => {
- match blob_url_store.read().unwrap().request(uuid, &load_data.url.origin()) {
- Ok(entry) => load_blob(&load_data, consumer, classifier, entry),
- Err(e) => {
- let err = match e {
- BlobURLStoreError::InvalidKey =>
- format!("Invalid blob URL key {:?}", uuid.simple().to_string()),
- BlobURLStoreError::InvalidOrigin =>
- format!("Invalid blob URL origin {:?}", load_data.url.origin()),
- };
- send_error(load_data.url.clone(), NetworkError::Internal(err), consumer);
- }
- }
- }
- }
-}
-
-fn load_blob(load_data: &LoadData,
- start_chan: LoadConsumer,
- classifier: Arc<MimeClassifier>,
- entry: &BlobURLStoreEntry) {
+pub fn load_blob(load_data: &LoadData, start_chan: LoadConsumer,
+ classifier: Arc<MimeClassifier>, opt_filename: Option<String>,
+ rel_pos: &RelativePos, entry: &BlobURLStoreEntry) {
let content_type: Mime = entry.type_string.parse().unwrap_or(mime!(Text / Plain));
let charset = content_type.get_param(Attr::Charset);
let mut headers = Headers::new();
- if let Some(ref name) = entry.filename {
+ if let Some(name) = opt_filename {
let charset = charset.and_then(|c| c.as_str().parse().ok());
headers.set(ContentDisposition {
disposition: DispositionType::Inline,
@@ -66,8 +38,10 @@ fn load_blob(load_data: &LoadData,
});
}
+ let range = rel_pos.to_abs_range(entry.size as usize);
+
headers.set(ContentType(content_type.clone()));
- headers.set(ContentLength(entry.size));
+ headers.set(ContentLength(range.len() as u64));
let metadata = Metadata {
final_url: load_data.url.clone(),
@@ -81,7 +55,7 @@ fn load_blob(load_data: &LoadData,
if let Ok(chan) =
start_sending_sniffed_opt(start_chan, metadata, classifier,
- &entry.bytes, load_data.context.clone()) {
+ &entry.bytes.index(range), load_data.context.clone()) {
let _ = chan.send(Done(Ok(())));
}
}