aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--components/script/dom/window.rs25
-rw-r--r--components/script/html/hubbub_html_parser.rs131
-rw-r--r--components/script/script_task.rs48
-rw-r--r--tests/wpt/metadata/XMLHttpRequest/open-url-javascript-window-2.htm.ini2
-rw-r--r--tests/wpt/metadata/XMLHttpRequest/open-url-javascript-window.htm.ini2
5 files changed, 152 insertions, 56 deletions
diff --git a/components/script/dom/window.rs b/components/script/dom/window.rs
index 23b6c71e029..ce60f3cdb19 100644
--- a/components/script/dom/window.rs
+++ b/components/script/dom/window.rs
@@ -29,14 +29,15 @@ use servo_net::image_cache_task::ImageCacheTask;
use servo_util::str::{DOMString,HTML_SPACE_CHARACTERS};
use servo_util::task::{spawn_named};
-use js::jsapi::JS_CallFunctionValue;
+use js::jsapi::{JS_CallFunctionValue, JS_EvaluateUCScript};
use js::jsapi::JSContext;
use js::jsapi::{JS_GC, JS_GetRuntime};
use js::jsval::JSVal;
-use js::jsval::NullValue;
+use js::jsval::{UndefinedValue, NullValue};
use js::rust::with_compartment;
use url::{Url, UrlParser};
+use libc;
use serialize::base64::{FromBase64, ToBase64, STANDARD};
use std::collections::hashmap::HashMap;
use std::cell::{Cell, RefCell};
@@ -358,6 +359,7 @@ pub trait WindowHelpers {
fn init_browser_context(&self, doc: &JSRef<Document>);
fn load_url(&self, href: DOMString);
fn handle_fire_timer(&self, timer_id: TimerId, cx: *mut JSContext);
+ fn evaluate_js_with_result(&self, code: &str) -> JSVal;
}
trait PrivateWindowHelpers {
@@ -365,6 +367,25 @@ trait PrivateWindowHelpers {
}
impl<'a> WindowHelpers for JSRef<'a, Window> {
+ fn evaluate_js_with_result(&self, code: &str) -> JSVal {
+ let global = self.reflector().get_jsobject();
+ let code: Vec<u16> = code.as_slice().utf16_units().collect();
+ let mut rval = UndefinedValue();
+ let filename = "".to_c_str();
+ let cx = self.get_cx();
+
+ with_compartment(cx, global, || {
+ unsafe {
+ if JS_EvaluateUCScript(cx, global, code.as_ptr(),
+ code.len() as libc::c_uint,
+ filename.as_ptr(), 1, &mut rval) == 0 {
+ debug!("error evaluating JS string");
+ }
+ rval
+ }
+ })
+ }
+
fn damage_and_reflow(&self, damage: DocumentDamageLevel) {
// FIXME This should probably be ReflowForQuery, not Display. All queries currently
// currently rely on the display list, which means we can't destroy it by
diff --git a/components/script/html/hubbub_html_parser.rs b/components/script/html/hubbub_html_parser.rs
index 89d817b9380..4f9bd3cfb96 100644
--- a/components/script/html/hubbub_html_parser.rs
+++ b/components/script/html/hubbub_html_parser.rs
@@ -55,11 +55,16 @@ macro_rules! handle_element(
pub struct JSFile {
pub data: String,
- pub url: Url
+ pub url: Option<Url>,
}
pub type JSResult = Vec<JSFile>;
+pub enum HTMLInput {
+ InputString(String),
+ InputUrl(Url),
+}
+
enum CSSMessage {
CSSTaskNewFile(StylesheetProvenance),
CSSTaskExit
@@ -67,7 +72,7 @@ enum CSSMessage {
enum JSMessage {
JSTaskNewFile(Url),
- JSTaskNewInlineScript(String, Url),
+ JSTaskNewInlineScript(String, Option<Url>),
JSTaskExit
}
@@ -148,7 +153,7 @@ fn js_script_listener(to_parent: Sender<HtmlDiscoveryMessage>,
let decoded = UTF_8.decode(bytes.as_slice(), DecodeReplace).unwrap();
result_vec.push(JSFile {
data: decoded.to_string(),
- url: metadata.final_url,
+ url: Some(metadata.final_url),
});
}
}
@@ -326,10 +331,10 @@ pub fn build_element_from_tag(tag: DOMString, ns: Namespace, document: &JSRef<Do
pub fn parse_html(page: &Page,
document: &JSRef<Document>,
- url: Url,
+ input: HTMLInput,
resource_task: ResourceTask)
-> HtmlParserResult {
- debug!("Hubbub: parsing {:?}", url);
+ debug!("Hubbub: parsing {:?}", input);
// Spawn a CSS parser to receive links to CSS style sheets.
let (discovery_chan, discovery_port) = channel();
@@ -347,33 +352,45 @@ pub fn parse_html(page: &Page,
js_script_listener(js_result_chan, js_msg_port, resource_task2.clone());
});
- // Wait for the LoadResponse so that the parser knows the final URL.
- let (input_chan, input_port) = channel();
- resource_task.send(Load(LoadData::new(url.clone()), input_chan));
- let load_response = input_port.recv();
+ let (base_url, load_response) = match input {
+ InputUrl(ref url) => {
+ // Wait for the LoadResponse so that the parser knows the final URL.
+ let (input_chan, input_port) = channel();
+ resource_task.send(Load(LoadData::new(url.clone()), input_chan));
+ let load_response = input_port.recv();
- debug!("Fetched page; metadata is {:?}", load_response.metadata);
+ debug!("Fetched page; metadata is {:?}", load_response.metadata);
- load_response.metadata.headers.map(|headers| {
- let header = headers.iter().find(|h|
- h.header_name().as_slice().to_ascii_lower() == "last-modified".to_string()
- );
+ load_response.metadata.headers.as_ref().map(|headers| {
+ let header = headers.iter().find(|h|
+ h.header_name().as_slice().to_ascii_lower() == "last-modified".to_string()
+ );
- match header {
- Some(h) => document.set_last_modified(
- parse_last_modified(h.header_value().as_slice())),
- None => {},
- };
- });
+ match header {
+ Some(h) => document.set_last_modified(
+ parse_last_modified(h.header_value().as_slice())),
+ None => {},
+ };
+ });
- let base_url = &load_response.metadata.final_url;
+ let base_url = load_response.metadata.final_url.clone();
- {
- // Store the final URL before we start parsing, so that DOM routines
- // (e.g. HTMLImageElement::update_image) can resolve relative URLs
- // correctly.
- *page.mut_url() = Some((base_url.clone(), true));
- }
+ {
+ // Store the final URL before we start parsing, so that DOM routines
+ // (e.g. HTMLImageElement::update_image) can resolve relative URLs
+ // correctly.
+ *page.mut_url() = Some((base_url.clone(), true));
+ }
+
+ (Some(base_url), Some(load_response))
+ },
+ InputString(_) => {
+ match *page.url() {
+ Some((ref page_url, _)) => (Some(page_url.clone()), None),
+ None => (None, None),
+ }
+ },
+ };
let mut parser = build_parser(unsafe { document.to_hubbub_node() });
debug!("created parser");
@@ -457,7 +474,15 @@ pub fn parse_html(page: &Page,
s.as_slice().eq_ignore_ascii_case("stylesheet")
}) {
debug!("found CSS stylesheet: {:s}", *href);
- match UrlParser::new().base_url(base_url).parse(href.as_slice()) {
+ let mut url_parser = UrlParser::new();
+ match base_url {
+ None => (),
+ Some(ref base_url) => {
+ url_parser.base_url(base_url);
+ }
+ }
+
+ match url_parser.parse(href.as_slice()) {
Ok(url) => css_chan2.send(CSSTaskNewFile(
UrlProvenance(url, resource_task.clone()))),
Err(e) => debug!("Parsing url {:s} failed: {:?}", *href, e)
@@ -550,8 +575,14 @@ pub fn parse_html(page: &Page,
match script_element.get_attribute(Null, "src").root() {
Some(src) => {
debug!("found script: {:s}", src.deref().Value());
- match UrlParser::new().base_url(base_url)
- .parse(src.deref().value().as_slice()) {
+ let mut url_parser = UrlParser::new();
+ match base_url {
+ None => (),
+ Some(ref base_url) => {
+ url_parser.base_url(base_url);
+ }
+ };
+ match url_parser.parse(src.deref().value().as_slice()) {
Ok(new_url) => js_chan2.send(JSTaskNewFile(new_url)),
Err(e) => debug!("Parsing url {:s} failed: {:?}", src.deref().Value(), e)
};
@@ -580,25 +611,33 @@ pub fn parse_html(page: &Page,
parser.set_tree_handler(&mut tree_handler);
debug!("set tree handler");
debug!("loaded page");
- match load_response.metadata.content_type {
- Some((ref t, _)) if t.as_slice().eq_ignore_ascii_case("image") => {
- let page = format!("<html><body><img src='{:s}' /></body></html>", base_url.serialize());
- parser.parse_chunk(page.into_bytes().as_slice());
+ match input {
+ InputString(s) => {
+ parser.parse_chunk(s.into_bytes().as_slice());
},
- _ => loop {
- match load_response.progress_port.recv() {
- Payload(data) => {
- debug!("received data");
- parser.parse_chunk(data.as_slice());
- }
- Done(Err(err)) => {
- fail!("Failed to load page URL {:s}, error: {:s}", url.serialize(), err);
- }
- Done(..) => {
- break;
+ InputUrl(url) => {
+ let load_response = load_response.unwrap();
+ match load_response.metadata.content_type {
+ Some((ref t, _)) if t.as_slice().eq_ignore_ascii_case("image") => {
+ let page = format!("<html><body><img src='{:s}' /></body></html>", base_url.get_ref().serialize());
+ parser.parse_chunk(page.into_bytes().as_slice());
+ },
+ _ => loop {
+ match load_response.progress_port.recv() {
+ Payload(data) => {
+ debug!("received data");
+ parser.parse_chunk(data.as_slice());
+ }
+ Done(Err(err)) => {
+ fail!("Failed to load page URL {:s}, error: {:s}", url.serialize(), err);
+ }
+ Done(..) => {
+ break;
+ }
+ }
}
}
- }
+ },
}
debug!("finished parsing");
diff --git a/components/script/script_task.rs b/components/script/script_task.rs
index 88e2f42415a..a705398d4c1 100644
--- a/components/script/script_task.rs
+++ b/components/script/script_task.rs
@@ -6,6 +6,7 @@
//! and layout tasks.
use dom::bindings::codegen::InheritTypes::{EventTargetCast, NodeCast, EventCast};
+use dom::bindings::conversions::{FromJSValConvertible, Empty};
use dom::bindings::global::Window;
use dom::bindings::js::{JS, JSRef, RootCollection, Temporary, OptionalSettable};
use dom::bindings::js::OptionalRootable;
@@ -22,7 +23,7 @@ use dom::node::{ElementNodeTypeId, Node, NodeHelpers};
use dom::window::{TimerId, Window, WindowHelpers};
use dom::worker::{Worker, TrustedWorkerAddress};
use dom::xmlhttprequest::{TrustedXHRAddress, XMLHttpRequest, XHRProgress};
-use html::hubbub_html_parser::HtmlParserResult;
+use html::hubbub_html_parser::{InputString, InputUrl, HtmlParserResult};
use html::hubbub_html_parser::{HtmlDiscoveredStyle, HtmlDiscoveredScript};
use html::hubbub_html_parser;
use layout_interface::AddStylesheetMsg;
@@ -590,7 +591,7 @@ impl ScriptTask {
/// The entry point to document loading. Defines bindings, sets up the window and document
/// objects, parses HTML and CSS, and kicks off initial layout.
fn load(&self, pipeline_id: PipelineId, url: Url) {
- debug!("ScriptTask: loading {:?} on page {:?}", url, pipeline_id);
+ debug!("ScriptTask: loading {} on page {:?}", url, pipeline_id);
let mut page = self.page.borrow_mut();
let page = page.find(pipeline_id).expect("ScriptTask: received a load
@@ -610,6 +611,9 @@ impl ScriptTask {
_ => (),
}
+ let is_javascript = url.scheme.as_slice() == "javascript";
+ let last_url = last_loaded_url.map(|(ref loaded, _)| loaded.clone());
+
let cx = self.js_context.borrow();
let cx = cx.get_ref();
// Create the window and document objects.
@@ -619,17 +623,39 @@ impl ScriptTask {
self.control_chan.clone(),
self.compositor.dup(),
self.image_cache_task.clone()).root();
- let document = Document::new(&*window, Some(url.clone()), HTMLDocument, None).root();
+ let doc_url = if is_javascript {
+ let doc_url = match last_url {
+ Some(url) => Some(url.clone()),
+ None => Url::parse("about:blank").ok(),
+ };
+ *page.mut_url() = Some((doc_url.get_ref().clone(), true));
+ doc_url
+ } else {
+ Some(url.clone())
+ };
+ let document = Document::new(&*window, doc_url, HTMLDocument, None).root();
+
window.deref().init_browser_context(&*document);
self.compositor.set_ready_state(pipeline_id, Loading);
+
+ let parser_input = if !is_javascript {
+ InputUrl(url.clone())
+ } else {
+ let evalstr = url.non_relative_scheme_data().unwrap();
+ let jsval = window.evaluate_js_with_result(evalstr);
+ let strval = FromJSValConvertible::from_jsval(self.get_cx(), jsval, Empty);
+ InputString(strval.unwrap_or("".to_string()))
+ };
+
// Parse HTML.
//
// Note: We can parse the next document in parallel with any previous documents.
- let html_parsing_result = hubbub_html_parser::parse_html(&*page,
- &*document,
- url.clone(),
- self.resource_task.clone());
+ let html_parsing_result =
+ hubbub_html_parser::parse_html(&*page,
+ &*document,
+ parser_input,
+ self.resource_task.clone());
let HtmlParserResult {
discovery_port
@@ -665,6 +691,7 @@ impl ScriptTask {
}
// Kick off the initial reflow of the page.
+ debug!("kicking off initial reflow of {}", url);
document.deref().content_changed();
let fragment = url.fragment.as_ref().map(|ref fragment| fragment.to_string());
@@ -684,9 +711,14 @@ impl ScriptTask {
// Evaluate every script in the document.
for file in js_scripts.iter() {
let global_obj = window.reflector().get_jsobject();
+ let filename = match file.url {
+ None => String::new(),
+ Some(ref url) => url.serialize(),
+ };
+
//FIXME: this should have some kind of error handling, or explicitly
// drop an exception on the floor.
- match cx.evaluate_script(global_obj, file.data.clone(), file.url.serialize(), 1) {
+ match cx.evaluate_script(global_obj, file.data.clone(), filename, 1) {
Ok(_) => (),
Err(_) => println!("evaluate_script failed")
}
diff --git a/tests/wpt/metadata/XMLHttpRequest/open-url-javascript-window-2.htm.ini b/tests/wpt/metadata/XMLHttpRequest/open-url-javascript-window-2.htm.ini
index 96352459d95..d535d0b584a 100644
--- a/tests/wpt/metadata/XMLHttpRequest/open-url-javascript-window-2.htm.ini
+++ b/tests/wpt/metadata/XMLHttpRequest/open-url-javascript-window-2.htm.ini
@@ -1,3 +1,5 @@
[open-url-javascript-window-2.htm]
type: testharness
expected: TIMEOUT
+ [XMLHttpRequest: open() - resolving URLs (javascript: ]
+ expected: TIMEOUT
diff --git a/tests/wpt/metadata/XMLHttpRequest/open-url-javascript-window.htm.ini b/tests/wpt/metadata/XMLHttpRequest/open-url-javascript-window.htm.ini
index a31220abbac..7f9301c48d1 100644
--- a/tests/wpt/metadata/XMLHttpRequest/open-url-javascript-window.htm.ini
+++ b/tests/wpt/metadata/XMLHttpRequest/open-url-javascript-window.htm.ini
@@ -1,3 +1,5 @@
[open-url-javascript-window.htm]
type: testharness
expected: TIMEOUT
+ [XMLHttpRequest: open() - resolving URLs (javascript: ]
+ expected: TIMEOUT