diff options
author | Ms2ger <ms2ger@gmail.com> | 2014-12-06 11:39:53 +0100 |
---|---|---|
committer | Ms2ger <ms2ger@gmail.com> | 2014-12-08 12:44:49 +0100 |
commit | c7c7dc03ee944ca03748f236dd08601eab5ceb24 (patch) | |
tree | b3e66de39f9f81ac4ee95b49a2e4c11e61e4a046 /components/script/parse | |
parent | de318ae8f13fcbe82ad545bb73b2c0a690e0ea15 (diff) | |
download | servo-c7c7dc03ee944ca03748f236dd08601eab5ceb24.tar.gz servo-c7c7dc03ee944ca03748f236dd08601eab5ceb24.zip |
Move the networking code in parse_html into ScriptTask::load.
This doesn't really have anything to do with parsing HTML, and fits in better
with the code in ScriptTask::load. In particular, all changes to Page's url
now go through ScriptTask methods.
Diffstat (limited to 'components/script/parse')
-rw-r--r-- | components/script/parse/html.rs | 96 |
1 files changed, 4 insertions, 92 deletions
diff --git a/components/script/parse/html.rs b/components/script/parse/html.rs index 1bef0f21c91..5f34c87d3da 100644 --- a/components/script/parse/html.rs +++ b/components/script/parse/html.rs @@ -16,72 +16,26 @@ use dom::node::{Node, NodeHelpers, TrustedNodeAddress}; use dom::servohtmlparser; use dom::servohtmlparser::ServoHTMLParser; use dom::text::Text; -use page::Page; use parse::Parser; use encoding::all::UTF_8; use encoding::types::{Encoding, DecodeReplace}; -use servo_net::resource_task::{Load, LoadData, Payload, Done, ResourceTask}; -use servo_msg::constellation_msg::LoadData as MsgLoadData; +use servo_net::resource_task::{Payload, Done, LoadResponse}; use servo_util::task_state; use servo_util::task_state::IN_HTML_PARSER; use std::ascii::AsciiExt; -use std::comm::channel; -use std::fmt::{mod, Show}; use std::str::MaybeOwned; use url::Url; -use time::{Tm, strptime}; use html5ever::Attribute; use html5ever::tree_builder::{TreeSink, QuirksMode, NodeOrText, AppendNode, AppendText}; use string_cache::QualName; -use hyper::header::{Header, HeaderFormat}; -use hyper::header::common::util as header_util; pub enum HTMLInput { InputString(String), InputUrl(Url), } -//FIXME(seanmonstar): uplift to Hyper -#[deriving(Clone)] -struct LastModified(pub Tm); - -impl Header for LastModified { - #[inline] - fn header_name(_: Option<LastModified>) -> &'static str { - "Last-Modified" - } - - // Parses an RFC 2616 compliant date/time string, - fn parse_header(raw: &[Vec<u8>]) -> Option<LastModified> { - header_util::from_one_raw_str(raw).and_then(|s: String| { - let s = s.as_slice(); - strptime(s, "%a, %d %b %Y %T %Z").or_else(|_| { - strptime(s, "%A, %d-%b-%y %T %Z") - }).or_else(|_| { - strptime(s, "%c") - }).ok().map(|tm| LastModified(tm)) - }) - } -} - -impl HeaderFormat for LastModified { - // a localized date/time string in a format suitable - // for document.lastModified. - fn fmt_header(&self, f: &mut fmt::Formatter) -> fmt::Result { - let LastModified(ref tm) = *self; - match tm.tm_gmtoff { - 0 => tm.rfc822().fmt(f), - _ => tm.to_utc().rfc822().fmt(f) - } - } -} - -fn dom_last_modified(tm: &Tm) -> String { - tm.to_local().strftime("%m/%d/%Y %H:%M:%S").unwrap() -} - trait SinkHelpers { fn get_or_create(&self, child: NodeOrText<TrustedNodeAddress>) -> Temporary<Node>; } @@ -207,52 +161,10 @@ impl<'a> TreeSink<TrustedNodeAddress> for servohtmlparser::Sink { } } -// The url from msg_load_data is ignored here -pub fn parse_html(page: &Page, - document: JSRef<Document>, +pub fn parse_html(document: JSRef<Document>, input: HTMLInput, - resource_task: ResourceTask, - msg_load_data: MsgLoadData) { - let (base_url, load_response) = match input { - InputUrl(ref url) => { - // Wait for the LoadResponse so that the parser knows the final URL. - let (input_chan, input_port) = channel(); - resource_task.send(Load(LoadData { - url: url.clone(), - method: msg_load_data.method, - headers: msg_load_data.headers, - data: msg_load_data.data, - cors: None, - consumer: input_chan, - })); - - let load_response = input_port.recv(); - - load_response.metadata.headers.as_ref().map(|headers| { - headers.get().map(|&LastModified(ref tm)| { - document.set_last_modified(dom_last_modified(tm)); - }); - }); - - let base_url = load_response.metadata.final_url.clone(); - - { - // Store the final URL before we start parsing, so that DOM routines - // (e.g. HTMLImageElement::update_image) can resolve relative URLs - // correctly. - *page.mut_url() = Some((base_url.clone(), true)); - } - - (Some(base_url), Some(load_response)) - }, - InputString(_) => { - match *page.url() { - Some((ref page_url, _)) => (Some(page_url.clone()), None), - None => (None, None), - } - }, - }; - + base_url: Option<Url>, + load_response: Option<LoadResponse>) { let parser = ServoHTMLParser::new(base_url.clone(), document).root(); let parser: JSRef<ServoHTMLParser> = *parser; |