aboutsummaryrefslogtreecommitdiffstats
path: root/components/script/dom
diff options
context:
space:
mode:
Diffstat (limited to 'components/script/dom')
-rw-r--r--components/script/dom/baseaudiocontext.rs5
-rw-r--r--components/script/dom/bindings/trace.rs10
-rw-r--r--components/script/dom/htmlmediaelement.rs359
-rw-r--r--components/script/dom/htmlvideoelement.rs36
-rw-r--r--components/script/dom/node.rs14
-rw-r--r--components/script/dom/webidls/HTMLMediaElement.webidl2
-rw-r--r--components/script/dom/webidls/HTMLVideoElement.webidl4
-rw-r--r--components/script/dom/window.rs11
8 files changed, 378 insertions, 63 deletions
diff --git a/components/script/dom/baseaudiocontext.rs b/components/script/dom/baseaudiocontext.rs
index b892049a8ec..0cb60a2fd80 100644
--- a/components/script/dom/baseaudiocontext.rs
+++ b/components/script/dom/baseaudiocontext.rs
@@ -461,7 +461,7 @@ impl BaseAudioContextMethods for BaseAudioContext {
}),
&canceller,
);
- }).error(move || {
+ }).error(move |error| {
let _ = task_source_.queue_with_canceller(
task!(audio_decode_eos: move || {
let this = this_.root();
@@ -473,7 +473,8 @@ impl BaseAudioContextMethods for BaseAudioContext {
&DOMException::new(&this.global(), DOMErrorName::DataCloneError),
ExceptionHandling::Report);
}
- resolver.promise.reject_error(Error::Type("Audio decode error".to_owned()));
+ let error = format!("Audio decode error {:?}", error);
+ resolver.promise.reject_error(Error::Type(error));
}),
&canceller_,
);
diff --git a/components/script/dom/bindings/trace.rs b/components/script/dom/bindings/trace.rs
index a3c4c7814ea..8e720f28e25 100644
--- a/components/script/dom/bindings/trace.rs
+++ b/components/script/dom/bindings/trace.rs
@@ -48,6 +48,7 @@ use dom::bindings::str::{DOMString, USVString};
use dom::bindings::utils::WindowProxyHandler;
use dom::document::PendingRestyle;
use dom::htmlimageelement::SourceSet;
+use dom::htmlmediaelement::MediaFrameRenderer;
use encoding_rs::{Decoder, Encoding};
use euclid::{Transform2D, Transform3D, Point2D, Vector2D, Rect, TypedSize2D, TypedScale};
use euclid::Length as EuclidLength;
@@ -90,12 +91,14 @@ use servo_arc::Arc as ServoArc;
use servo_atoms::Atom;
use servo_channel::{Receiver, Sender};
use servo_media::Backend;
+use servo_media::Error as ServoMediaError;
use servo_media::audio::analyser_node::AnalysisEngine;
use servo_media::audio::buffer_source_node::AudioBuffer;
use servo_media::audio::context::AudioContext;
use servo_media::audio::graph::NodeId;
use servo_media::audio::panner_node::{DistanceModel, PanningModel};
use servo_media::audio::param::ParamType;
+use servo_media::player::Player;
use servo_url::{ImmutableOrigin, MutableOrigin, ServoUrl};
use smallvec::SmallVec;
use std::cell::{Cell, RefCell, UnsafeCell};
@@ -104,7 +107,7 @@ use std::hash::{BuildHasher, Hash};
use std::ops::{Deref, DerefMut};
use std::path::PathBuf;
use std::rc::Rc;
-use std::sync::Arc;
+use std::sync::{Arc, Mutex};
use std::sync::atomic::{AtomicBool, AtomicUsize};
use std::time::{SystemTime, Instant};
use style::attr::{AttrIdentifier, AttrValue, LengthOrPercentageOrAuto};
@@ -121,7 +124,7 @@ use style::stylesheets::keyframes_rule::Keyframe;
use style::values::specified::Length;
use time::Duration;
use uuid::Uuid;
-use webrender_api::{DocumentId, ImageKey};
+use webrender_api::{DocumentId, ImageKey, RenderApiSender};
use webvr_traits::WebVRGamepadHand;
/// A trait to allow tracing (only) DOM objects.
@@ -454,6 +457,9 @@ unsafe_no_jsmanaged_fields!(AudioBuffer);
unsafe_no_jsmanaged_fields!(AudioContext<Backend>);
unsafe_no_jsmanaged_fields!(NodeId);
unsafe_no_jsmanaged_fields!(AnalysisEngine, DistanceModel, PanningModel, ParamType);
+unsafe_no_jsmanaged_fields!(Player<Error=ServoMediaError>);
+unsafe_no_jsmanaged_fields!(Mutex<MediaFrameRenderer>);
+unsafe_no_jsmanaged_fields!(RenderApiSender);
unsafe impl<'a> JSTraceable for &'a str {
#[inline]
diff --git a/components/script/dom/htmlmediaelement.rs b/components/script/dom/htmlmediaelement.rs
index ad2a487a16f..84380c8d11f 100644
--- a/components/script/dom/htmlmediaelement.rs
+++ b/components/script/dom/htmlmediaelement.rs
@@ -2,7 +2,6 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-use audio_video_metadata;
use document_loader::{LoadBlocker, LoadType};
use dom::attr::Attr;
use dom::bindings::cell::DomRefCell;
@@ -18,7 +17,7 @@ use dom::bindings::error::{Error, ErrorResult};
use dom::bindings::inheritance::Castable;
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::DomObject;
-use dom::bindings::root::{DomRoot, MutNullableDom};
+use dom::bindings::root::{DomRoot, LayoutDom, MutNullableDom};
use dom::bindings::str::DOMString;
use dom::blob::Blob;
use dom::document::Document;
@@ -26,28 +25,106 @@ use dom::element::{Element, AttributeMutation};
use dom::eventtarget::EventTarget;
use dom::htmlelement::HTMLElement;
use dom::htmlsourceelement::HTMLSourceElement;
+use dom::htmlvideoelement::HTMLVideoElement;
use dom::mediaerror::MediaError;
-use dom::node::{window_from_node, document_from_node, Node, UnbindContext};
+use dom::node::{document_from_node, window_from_node, Node, NodeDamage, UnbindContext};
use dom::promise::Promise;
use dom::virtualmethods::VirtualMethods;
use dom_struct::dom_struct;
+use fetch::FetchCanceller;
use html5ever::{LocalName, Prefix};
+use hyper::header::ContentLength;
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use microtask::{Microtask, MicrotaskRunnable};
use mime::{Mime, SubLevel, TopLevel};
-use net_traits::{FetchResponseListener, FetchMetadata, Metadata, NetworkError};
+use net_traits::{CoreResourceMsg, FetchChannels, FetchResponseListener, FetchMetadata, Metadata};
+use net_traits::NetworkError;
use net_traits::request::{CredentialsMode, Destination, RequestInit};
use network_listener::{NetworkListener, PreInvoke};
+use script_layout_interface::HTMLMediaData;
use script_thread::ScriptThread;
+use servo_media::Error as ServoMediaError;
+use servo_media::ServoMedia;
+use servo_media::player::{PlaybackState, Player, PlayerEvent};
+use servo_media::player::frame::{Frame, FrameRenderer};
use servo_url::ServoUrl;
use std::cell::Cell;
use std::collections::VecDeque;
+use std::f64;
use std::mem;
use std::rc::Rc;
use std::sync::{Arc, Mutex};
use task_source::{TaskSource, TaskSourceName};
use time::{self, Timespec, Duration};
+use webrender_api::{ImageData, ImageDescriptor, ImageFormat, ImageKey, RenderApi};
+use webrender_api::{RenderApiSender, Transaction};
+
+pub struct MediaFrameRenderer {
+ api: RenderApi,
+ current_frame: Option<(ImageKey, i32, i32)>,
+ old_frame: Option<ImageKey>,
+ very_old_frame: Option<ImageKey>,
+}
+
+impl MediaFrameRenderer {
+ fn new(render_api_sender: RenderApiSender) -> Self {
+ Self {
+ api: render_api_sender.create_api(),
+ current_frame: None,
+ old_frame: None,
+ very_old_frame: None,
+ }
+ }
+}
+
+impl FrameRenderer for MediaFrameRenderer {
+ fn render(&mut self, frame: Frame) {
+ let descriptor = ImageDescriptor::new(
+ frame.get_width() as u32,
+ frame.get_height() as u32,
+ ImageFormat::BGRA8,
+ false,
+ false,
+ );
+
+ let mut txn = Transaction::new();
+
+ let image_data = ImageData::Raw(frame.get_data().clone());
+
+ if let Some(old_image_key) = mem::replace(&mut self.very_old_frame, self.old_frame.take()) {
+ txn.delete_image(old_image_key);
+ }
+
+ match self.current_frame {
+ Some((ref image_key, ref mut width, ref mut height))
+ if *width == frame.get_width() && *height == frame.get_height() =>
+ {
+ txn.update_image(*image_key, descriptor, image_data, None);
+
+ if let Some(old_image_key) = self.old_frame.take() {
+ txn.delete_image(old_image_key);
+ }
+ }
+ Some((ref mut image_key, ref mut width, ref mut height)) => {
+ self.old_frame = Some(*image_key);
+
+ let new_image_key = self.api.generate_image_key();
+ txn.add_image(new_image_key, descriptor, image_data, None);
+ *image_key = new_image_key;
+ *width = frame.get_width();
+ *height = frame.get_height();
+ },
+ None => {
+ let image_key = self.api.generate_image_key();
+ txn.add_image(image_key, descriptor, image_data, None);
+ self.current_frame = Some((image_key, frame.get_width(), frame.get_height()));
+ },
+ }
+
+ self.api.update_resources(txn.resource_updates);
+ }
+}
#[dom_struct]
// FIXME(nox): A lot of tasks queued for this element should probably be in the
@@ -82,6 +159,15 @@ pub struct HTMLMediaElement {
/// Play promises which are soon to be fulfilled by a queued task.
#[ignore_malloc_size_of = "promises are hard"]
in_flight_play_promises_queue: DomRefCell<VecDeque<(Box<[Rc<Promise>]>, ErrorResult)>>,
+ #[ignore_malloc_size_of = "servo_media"]
+ player: Box<Player<Error=ServoMediaError>>,
+ #[ignore_malloc_size_of = "Arc"]
+ frame_renderer: Arc<Mutex<MediaFrameRenderer>>,
+ fetch_canceller: DomRefCell<FetchCanceller>,
+ /// https://html.spec.whatwg.org/multipage/#show-poster-flag
+ show_poster: Cell<bool>,
+ /// https://html.spec.whatwg.org/multipage/#dom-media-duration
+ duration: Cell<f64>,
}
/// <https://html.spec.whatwg.org/multipage/#dom-media-networkstate>
@@ -95,9 +181,9 @@ pub enum NetworkState {
}
/// <https://html.spec.whatwg.org/multipage/#dom-media-readystate>
-#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq, PartialOrd)]
+#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq, PartialOrd)]
#[repr(u8)]
-enum ReadyState {
+pub enum ReadyState {
HaveNothing = HTMLMediaElementConstants::HAVE_NOTHING as u8,
HaveMetadata = HTMLMediaElementConstants::HAVE_METADATA as u8,
HaveCurrentData = HTMLMediaElementConstants::HAVE_CURRENT_DATA as u8,
@@ -122,9 +208,20 @@ impl HTMLMediaElement {
delaying_the_load_event_flag: Default::default(),
pending_play_promises: Default::default(),
in_flight_play_promises_queue: Default::default(),
+ player: ServoMedia::get().unwrap().create_player(),
+ frame_renderer: Arc::new(Mutex::new(MediaFrameRenderer::new(
+ document.window().get_webrender_api_sender(),
+ ))),
+ fetch_canceller: DomRefCell::new(Default::default()),
+ show_poster: Cell::new(true),
+ duration: Cell::new(f64::NAN),
}
}
+ pub fn get_ready_state(&self) -> ReadyState {
+ self.ready_state.get()
+ }
+
fn media_type_id(&self) -> HTMLMediaElementTypeId {
match self.upcast::<Node>().type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(
@@ -187,8 +284,10 @@ impl HTMLMediaElement {
self.paused.set(false);
// Step 6.2.
- // FIXME(nox): Set show poster flag to false and run time marches on
- // steps if show poster flag is true.
+ if self.show_poster.get() {
+ self.show_poster.set(false);
+ self.time_marches_on();
+ }
// Step 6.3.
task_source.queue_simple_event(self.upcast(), atom!("play"), &window);
@@ -217,10 +316,15 @@ impl HTMLMediaElement {
return;
}
- this.fulfill_in_flight_play_promises(|| ());
+ this.fulfill_in_flight_play_promises(|| {
+ if let Err(e) = this.player.play() {
+ eprintln!("Could not play media {:?}", e);
+ }
+ });
}),
window.upcast(),
- ).unwrap();
+ )
+ .unwrap();
}
// Step 8.
@@ -230,6 +334,11 @@ impl HTMLMediaElement {
// Not applicable here, the promise is returned from Play.
}
+ /// https://html.spec.whatwg.org/multipage/#time-marches-on
+ fn time_marches_on(&self) {
+ // TODO: implement this.
+ }
+
/// <https://html.spec.whatwg.org/multipage/#internal-pause-steps>
fn internal_pause_steps(&self) {
// Step 1.
@@ -263,6 +372,10 @@ impl HTMLMediaElement {
// Step 2.3.2.
this.upcast::<EventTarget>().fire_event(atom!("pause"));
+ if let Err(e) = this.player.pause() {
+ eprintln!("Could not pause player {:?}", e);
+ }
+
// Step 2.3.3.
// Done after running this closure in
// `fulfill_in_flight_play_promises`.
@@ -298,6 +411,9 @@ impl HTMLMediaElement {
this.fulfill_in_flight_play_promises(|| {
// Step 2.1.
this.upcast::<EventTarget>().fire_event(atom!("playing"));
+ if let Err(e) = this.player.play() {
+ eprintln!("Could not play media {:?}", e);
+ }
// Step 2.2.
// Done after running this closure in
@@ -377,7 +493,11 @@ impl HTMLMediaElement {
if self.autoplaying.get() && self.Paused() && self.Autoplay() {
// Step 1
self.paused.set(false);
- // TODO step 2: show poster
+ // Step 2
+ if self.show_poster.get() {
+ self.show_poster.set(false);
+ self.time_marches_on();
+ }
// Step 3
task_source.queue_simple_event(self.upcast(), atom!("play"), &window);
// Step 4
@@ -398,7 +518,7 @@ impl HTMLMediaElement {
self.network_state.set(NetworkState::NoSource);
// Step 2.
- // FIXME(nox): Set show poster flag to true.
+ self.show_poster.set(true);
// Step 3.
self.delay_load_event(true);
@@ -416,7 +536,7 @@ impl HTMLMediaElement {
base_url: doc.base_url(),
};
- // FIXME(nox): This will later call the resource_selection_algorith_sync
+ // FIXME(nox): This will later call the resource_selection_algorithm_sync
// method from below, if microtasks were trait objects, we would be able
// to put the code directly in this method, without the boilerplate
// indirections.
@@ -426,6 +546,8 @@ impl HTMLMediaElement {
// https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm
fn resource_selection_algorithm_sync(&self, base_url: ServoUrl) {
// Step 5.
+ // FIXME(ferjm): Implement blocked_on_parser logic
+ // https://html.spec.whatwg.org/multipage/#blocked-on-parser
// FIXME(nox): Maybe populate the list of pending text tracks.
// Step 6.
@@ -515,6 +637,7 @@ impl HTMLMediaElement {
},
Mode::Children(_source) => {
// Step 9.children.
+ // FIXME: https://github.com/servo/servo/issues/21481
self.queue_dedicated_media_source_failure_steps()
},
}
@@ -522,6 +645,11 @@ impl HTMLMediaElement {
// https://html.spec.whatwg.org/multipage/#concept-media-load-resource
fn resource_fetch_algorithm(&self, resource: Resource) {
+ if let Err(e) = self.setup_media_player() {
+ eprintln!("Setup media player error {:?}", e);
+ self.queue_dedicated_media_source_failure_steps();
+ return;
+ }
// Steps 1-2.
// Unapplicable, the `resource` variable already conveys which mode
// is in use.
@@ -554,7 +682,8 @@ impl HTMLMediaElement {
this.root().delay_load_event(false);
}),
window.upcast(),
- ).unwrap();
+ )
+ .unwrap();
// Steps 4.remote.1.4.
// FIXME(nox): Somehow we should wait for the task from previous
@@ -600,9 +729,15 @@ impl HTMLMediaElement {
listener.notify_fetch(message.to().unwrap());
}),
);
- document
- .loader_mut()
- .fetch_async_background(request, action_sender);
+ let cancel_receiver = self.fetch_canceller.borrow_mut().initialize();
+ let global = self.global();
+ global
+ .core_resource_thread()
+ .send(CoreResourceMsg::Fetch(
+ request,
+ FetchChannels::ResponseMsg(action_sender, Some(cancel_receiver)),
+ ))
+ .unwrap();
},
Resource::Object => {
// FIXME(nox): Actually do something with the object.
@@ -642,11 +777,15 @@ impl HTMLMediaElement {
this.network_state.set(NetworkState::NoSource);
// Step 4.
- // FIXME(nox): Set show poster flag to true.
+ this.show_poster.set(true);
// Step 5.
this.upcast::<EventTarget>().fire_event(atom!("error"));
+ if let Err(e) = this.player.stop() {
+ eprintln!("Could not stop player {:?}", e);
+ }
+
// Step 6.
// Done after running this closure in
// `fulfill_in_flight_play_promises`.
@@ -688,7 +827,7 @@ impl HTMLMediaElement {
task_source.queue_simple_event(self.upcast(), atom!("emptied"), &window);
// Step 6.2.
- // FIXME(nox): Abort in-progress fetching process.
+ self.fetch_canceller.borrow_mut().cancel();
// Step 6.3.
// FIXME(nox): Detach MediaSource media provider object.
@@ -722,7 +861,7 @@ impl HTMLMediaElement {
// FIXME(nox): Set timeline offset to NaN.
// Step 6.10.
- // FIXME(nox): Set duration to NaN.
+ self.duration.set(f64::NAN);
}
// Step 7.
@@ -805,6 +944,104 @@ impl HTMLMediaElement {
}
self.media_element_load_algorithm();
}
+
+ // servo media player
+ fn setup_media_player(&self) -> Result<(), ServoMediaError>{
+ let (action_sender, action_receiver) = ipc::channel().unwrap();
+
+ self.player.register_event_handler(action_sender)?;
+ self.player
+ .register_frame_renderer(self.frame_renderer.clone())?;
+
+ let trusted_node = Trusted::new(self);
+ let window = window_from_node(self);
+ let task_source = window.dom_manipulation_task_source();
+ let task_canceller = window.task_canceller(TaskSourceName::DOMManipulation);
+ ROUTER.add_route(
+ action_receiver.to_opaque(),
+ Box::new(move |message| {
+ let event: PlayerEvent = message.to().unwrap();
+ let this = trusted_node.clone();
+ task_source
+ .queue_with_canceller(
+ task!(handle_player_event: move || {
+ this.root().handle_player_event(&event);
+ }),
+ &task_canceller,
+ )
+ .unwrap();
+ }),
+ );
+
+ Ok(())
+ }
+
+ fn handle_player_event(&self, event: &PlayerEvent) {
+ match *event {
+ PlayerEvent::MetadataUpdated(ref metadata) => {
+ // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
+ // => "Once enough of the media data has been fetched to determine the duration..."
+ // Step 1.
+ // servo-media owns the media timeline.
+
+ // Step 2.
+ // XXX(ferjm) Update the timeline offset.
+
+ // Step 3.
+ // XXX(ferjm) Set the current and official playback positions
+ // to the earliest possible position.
+
+ // Step 4.
+ if let Some(duration) = metadata.duration {
+ self.duration.set(duration.as_secs() as f64);
+ } else {
+ self.duration.set(f64::INFINITY);
+ }
+ let window = window_from_node(self);
+ let task_source = window.dom_manipulation_task_source();
+ task_source.queue_simple_event(self.upcast(), atom!("durationchange"), &window);
+
+ // Step 5.
+ if self.is::<HTMLVideoElement>() {
+ let video_elem = self.downcast::<HTMLVideoElement>().unwrap();
+ video_elem.set_video_width(metadata.width);
+ video_elem.set_video_height(metadata.height);
+ task_source.queue_simple_event(self.upcast(), atom!("resize"), &window);
+ }
+
+ // Step 6.
+ self.change_ready_state(ReadyState::HaveMetadata);
+
+ // XXX(ferjm) Steps 7 to 13.
+ },
+ PlayerEvent::StateChanged(ref state) => match *state {
+ PlaybackState::Paused => {
+ if self.ready_state.get() == ReadyState::HaveMetadata {
+ self.change_ready_state(ReadyState::HaveEnoughData);
+ }
+ },
+ _ => {},
+ },
+ PlayerEvent::EndOfStream => {
+ // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
+ // => "If the media data can be fetched but is found by inspection to be in
+ // an unsupported format, or can otherwise not be rendered at all"
+ if self.ready_state.get() < ReadyState::HaveMetadata {
+ self.queue_dedicated_media_source_failure_steps();
+ }
+ },
+ PlayerEvent::FrameUpdated => {
+ self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
+ },
+ PlayerEvent::Error => {
+ self.error.set(Some(&*MediaError::new(
+ &*window_from_node(self),
+ MEDIA_ERR_DECODE,
+ )));
+ self.upcast::<EventTarget>().fire_event(atom!("error"));
+ },
+ }
+ }
}
impl HTMLMediaElementMethods for HTMLMediaElement {
@@ -894,6 +1131,11 @@ impl HTMLMediaElementMethods for HTMLMediaElement {
fn Paused(&self) -> bool {
self.paused.get()
}
+
+ // https://html.spec.whatwg.org/multipage/#dom-media-duration
+ fn Duration(&self) -> f64 {
+ self.duration.get()
+ }
}
impl VirtualMethods for HTMLMediaElement {
@@ -927,6 +1169,20 @@ impl VirtualMethods for HTMLMediaElement {
}
}
+pub trait LayoutHTMLMediaElementHelpers {
+ fn data(&self) -> HTMLMediaData;
+}
+
+impl LayoutHTMLMediaElementHelpers for LayoutDom<HTMLMediaElement> {
+ #[allow(unsafe_code)]
+ fn data(&self) -> HTMLMediaData {
+ let media = unsafe { &*self.unsafe_get() };
+ HTMLMediaData {
+ current_frame: media.frame_renderer.lock().unwrap().current_frame.clone(),
+ }
+ }
+}
+
#[derive(JSTraceable, MallocSizeOf)]
pub enum MediaElementMicrotask {
ResourceSelectionTask {
@@ -968,16 +1224,12 @@ enum Resource {
struct HTMLMediaElementContext {
/// The element that initiated the request.
elem: Trusted<HTMLMediaElement>,
- /// The response body received to date.
- data: Vec<u8>,
/// The response metadata received to date.
metadata: Option<Metadata>,
/// The generation of the media element when this fetch started.
generation_id: u32,
/// Time of last progress notification.
next_progress_event: Timespec,
- /// Whether the media metadata has been completely received.
- have_metadata: bool,
/// True if this response is invalid and should be ignored.
ignore_response: bool,
}
@@ -994,6 +1246,16 @@ impl FetchResponseListener for HTMLMediaElementContext {
FetchMetadata::Filtered { unsafe_, .. } => unsafe_,
});
+ if let Some(metadata) = self.metadata.as_ref() {
+ if let Some(headers) = metadata.headers.as_ref() {
+ if let Some(content_length) = headers.get::<ContentLength>() {
+ if let Err(e) = self.elem.root().player.set_input_size(**content_length) {
+ eprintln!("Could not set player input size {:?}", e);
+ }
+ }
+ }
+ }
+
let status_is_ok = self
.metadata
.as_ref()
@@ -1005,28 +1267,24 @@ impl FetchResponseListener for HTMLMediaElementContext {
// Ensure that the element doesn't receive any further notifications
// of the aborted fetch.
self.ignore_response = true;
- self.elem
- .root()
- .queue_dedicated_media_source_failure_steps();
+ let elem = self.elem.root();
+ elem.fetch_canceller.borrow_mut().cancel();
+ elem.queue_dedicated_media_source_failure_steps();
}
}
- fn process_response_chunk(&mut self, mut payload: Vec<u8>) {
+ fn process_response_chunk(&mut self, payload: Vec<u8>) {
if self.ignore_response {
// An error was received previously, skip processing the payload.
return;
}
- self.data.append(&mut payload);
-
let elem = self.elem.root();
- // https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
- // => "Once enough of the media data has been fetched to determine the duration..."
- if !self.have_metadata {
- self.check_metadata(&elem);
- } else {
- elem.change_ready_state(ReadyState::HaveCurrentData);
+ // Push input data into the player.
+ if let Err(e) = elem.player.push_data(payload) {
+ eprintln!("Could not push input data to player {:?}", e);
+ return;
}
// https://html.spec.whatwg.org/multipage/#concept-media-load-resource step 4,
@@ -1050,13 +1308,17 @@ impl FetchResponseListener for HTMLMediaElementContext {
}
let elem = self.elem.root();
- // => "If the media data can be fetched but is found by inspection to be in an unsupported
- // format, or can otherwise not be rendered at all"
- if !self.have_metadata {
- elem.queue_dedicated_media_source_failure_steps();
+ // Signal the eos to player.
+ if let Err(e) = elem.player.end_of_stream() {
+ eprintln!("Could not signal EOS to player {:?}", e);
}
- // => "Once the entire media resource has been fetched..."
- else if status.is_ok() {
+
+ if status.is_ok() {
+ if elem.ready_state.get() == ReadyState::HaveNothing {
+ // Make sure that we don't skip the HaveMetadata and HaveCurrentData
+ // states for short streams.
+ elem.change_ready_state(ReadyState::HaveMetadata);
+ }
elem.change_ready_state(ReadyState::HaveEnoughData);
elem.upcast::<EventTarget>().fire_event(atom!("progress"));
@@ -1067,6 +1329,9 @@ impl FetchResponseListener for HTMLMediaElementContext {
}
// => "If the connection is interrupted after some media data has been received..."
else if elem.ready_state.get() != ReadyState::HaveNothing {
+ // Step 1
+ elem.fetch_canceller.borrow_mut().cancel();
+
// Step 2
elem.error.set(Some(&*MediaError::new(
&*window_from_node(&*elem),
@@ -1099,20 +1364,10 @@ impl HTMLMediaElementContext {
fn new(elem: &HTMLMediaElement) -> HTMLMediaElementContext {
HTMLMediaElementContext {
elem: Trusted::new(elem),
- data: vec![],
metadata: None,
generation_id: elem.generation_id.get(),
next_progress_event: time::get_time() + Duration::milliseconds(350),
- have_metadata: false,
ignore_response: false,
}
}
-
- fn check_metadata(&mut self, elem: &HTMLMediaElement) {
- if audio_video_metadata::get_format_from_slice(&self.data).is_ok() {
- // Step 6.
- elem.change_ready_state(ReadyState::HaveMetadata);
- self.have_metadata = true;
- }
- }
}
diff --git a/components/script/dom/htmlvideoelement.rs b/components/script/dom/htmlvideoelement.rs
index 8561aa051e1..59c93bfea6e 100644
--- a/components/script/dom/htmlvideoelement.rs
+++ b/components/script/dom/htmlvideoelement.rs
@@ -3,16 +3,22 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLVideoElementBinding;
+use dom::bindings::codegen::Bindings::HTMLVideoElementBinding::HTMLVideoElementMethods;
use dom::bindings::root::DomRoot;
use dom::document::Document;
-use dom::htmlmediaelement::HTMLMediaElement;
+use dom::htmlmediaelement::{HTMLMediaElement, ReadyState};
use dom::node::Node;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
+use std::cell::Cell;
#[dom_struct]
pub struct HTMLVideoElement {
htmlmediaelement: HTMLMediaElement,
+ /// https://html.spec.whatwg.org/multipage/#dom-video-videowidth
+ video_width: Cell<u32>,
+ /// https://html.spec.whatwg.org/multipage/#dom-video-videoheight
+ video_height: Cell<u32>,
}
impl HTMLVideoElement {
@@ -23,6 +29,8 @@ impl HTMLVideoElement {
) -> HTMLVideoElement {
HTMLVideoElement {
htmlmediaelement: HTMLMediaElement::new_inherited(local_name, prefix, document),
+ video_width: Cell::new(0),
+ video_height: Cell::new(0),
}
}
@@ -40,4 +48,30 @@ impl HTMLVideoElement {
HTMLVideoElementBinding::Wrap,
)
}
+
+ pub fn set_video_width(&self, width: u32) {
+ self.video_width.set(width);
+ }
+
+ pub fn set_video_height(&self, height: u32) {
+ self.video_height.set(height);
+ }
+}
+
+impl HTMLVideoElementMethods for HTMLVideoElement {
+ // https://html.spec.whatwg.org/multipage/#dom-video-videowidth
+ fn VideoWidth(&self) -> u32 {
+ if self.htmlmediaelement.get_ready_state() == ReadyState::HaveNothing {
+ return 0;
+ }
+ self.video_width.get()
+ }
+
+ // https://html.spec.whatwg.org/multipage/#dom-video-videoheight
+ fn VideoHeight(&self) -> u32 {
+ if self.htmlmediaelement.get_ready_state() == ReadyState::HaveNothing {
+ return 0;
+ }
+ self.video_height.get()
+ }
}
diff --git a/components/script/dom/node.rs b/components/script/dom/node.rs
index 8fa494767f6..7899e494341 100644
--- a/components/script/dom/node.rs
+++ b/components/script/dom/node.rs
@@ -43,6 +43,7 @@ use dom::htmliframeelement::{HTMLIFrameElement, HTMLIFrameElementLayoutMethods};
use dom::htmlimageelement::{HTMLImageElement, LayoutHTMLImageElementHelpers};
use dom::htmlinputelement::{HTMLInputElement, LayoutHTMLInputElementHelpers};
use dom::htmllinkelement::HTMLLinkElement;
+use dom::htmlmediaelement::{HTMLMediaElement, LayoutHTMLMediaElementHelpers};
use dom::htmlmetaelement::HTMLMetaElement;
use dom::htmlstyleelement::HTMLStyleElement;
use dom::htmltextareaelement::{HTMLTextAreaElement, LayoutHTMLTextAreaElementHelpers};
@@ -62,8 +63,8 @@ use libc::{self, c_void, uintptr_t};
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use msg::constellation_msg::{BrowsingContextId, PipelineId};
use ref_slice::ref_slice;
-use script_layout_interface::{HTMLCanvasData, OpaqueStyleAndLayoutData, SVGSVGData};
-use script_layout_interface::{LayoutElementType, LayoutNodeType, TrustedNodeAddress};
+use script_layout_interface::{HTMLCanvasData, HTMLMediaData, LayoutElementType, LayoutNodeType};
+use script_layout_interface::{OpaqueStyleAndLayoutData, SVGSVGData, TrustedNodeAddress};
use script_layout_interface::message::Msg;
use script_thread::ScriptThread;
use script_traits::DocumentActivity;
@@ -1086,6 +1087,7 @@ pub trait LayoutNodeHelpers {
fn image_url(&self) -> Option<ServoUrl>;
fn image_density(&self) -> Option<f64>;
fn canvas_data(&self) -> Option<HTMLCanvasData>;
+ fn media_data(&self) -> Option<HTMLMediaData>;
fn svg_data(&self) -> Option<SVGSVGData>;
fn iframe_browsing_context_id(&self) -> Option<BrowsingContextId>;
fn iframe_pipeline_id(&self) -> Option<PipelineId>;
@@ -1245,6 +1247,11 @@ impl LayoutNodeHelpers for LayoutDom<Node> {
.map(|canvas| canvas.data())
}
+ fn media_data(&self) -> Option<HTMLMediaData> {
+ self.downcast::<HTMLMediaElement>()
+ .map(|media| media.data())
+ }
+
fn svg_data(&self) -> Option<SVGSVGData> {
self.downcast::<SVGSVGElement>().map(|svg| svg.data())
}
@@ -2911,6 +2918,9 @@ impl Into<LayoutElementType> for ElementTypeId {
ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLImageElement) => {
LayoutElementType::HTMLImageElement
},
+ ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLMediaElement(_)) => {
+ LayoutElementType::HTMLMediaElement
+ },
ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement) => {
LayoutElementType::HTMLInputElement
},
diff --git a/components/script/dom/webidls/HTMLMediaElement.webidl b/components/script/dom/webidls/HTMLMediaElement.webidl
index b67a5e3c1fe..e00b1de7c4f 100644
--- a/components/script/dom/webidls/HTMLMediaElement.webidl
+++ b/components/script/dom/webidls/HTMLMediaElement.webidl
@@ -39,7 +39,7 @@ interface HTMLMediaElement : HTMLElement {
// playback state
// attribute double currentTime;
// void fastSeek(double time);
- // readonly attribute unrestricted double duration;
+ readonly attribute unrestricted double duration;
// Date getStartDate();
readonly attribute boolean paused;
// attribute double defaultPlaybackRate;
diff --git a/components/script/dom/webidls/HTMLVideoElement.webidl b/components/script/dom/webidls/HTMLVideoElement.webidl
index bfd1be006ea..a3a34e7c47e 100644
--- a/components/script/dom/webidls/HTMLVideoElement.webidl
+++ b/components/script/dom/webidls/HTMLVideoElement.webidl
@@ -9,8 +9,8 @@ interface HTMLVideoElement : HTMLMediaElement {
// attribute unsigned long width;
// [CEReactions]
// attribute unsigned long height;
- // readonly attribute unsigned long videoWidth;
- // readonly attribute unsigned long videoHeight;
+ readonly attribute unsigned long videoWidth;
+ readonly attribute unsigned long videoHeight;
// [CEReactions]
// attribute DOMString poster;
};
diff --git a/components/script/dom/window.rs b/components/script/dom/window.rs
index ff905ab55b5..c50aae0aec7 100644
--- a/components/script/dom/window.rs
+++ b/components/script/dom/window.rs
@@ -132,7 +132,7 @@ use time;
use timers::{IsInterval, TimerCallback};
use url::Position;
use webdriver_handlers::jsval_to_webdriver;
-use webrender_api::{ExternalScrollId, DeviceIntPoint, DeviceUintSize, DocumentId};
+use webrender_api::{DeviceIntPoint, DeviceUintSize, DocumentId, ExternalScrollId, RenderApiSender};
use webvr_traits::WebVRMsg;
/// Current state of the window object
@@ -308,6 +308,9 @@ pub struct Window {
/// Flag to identify whether mutation observers are present(true)/absent(false)
exists_mut_observer: Cell<bool>,
+ /// Webrender API Sender
+ #[ignore_malloc_size_of = "defined in webrender_api"]
+ webrender_api_sender: RenderApiSender,
}
impl Window {
@@ -483,6 +486,10 @@ impl Window {
}
self.add_pending_reflow();
}
+
+ pub fn get_webrender_api_sender(&self) -> RenderApiSender {
+ self.webrender_api_sender.clone()
+ }
}
// https://html.spec.whatwg.org/multipage/#atob
@@ -2083,6 +2090,7 @@ impl Window {
webvr_chan: Option<IpcSender<WebVRMsg>>,
microtask_queue: Rc<MicrotaskQueue>,
webrender_document: DocumentId,
+ webrender_api_sender: RenderApiSender,
) -> DomRoot<Self> {
let layout_rpc: Box<LayoutRPC + Send> = {
let (rpc_send, rpc_recv) = channel();
@@ -2161,6 +2169,7 @@ impl Window {
paint_worklet: Default::default(),
webrender_document,
exists_mut_observer: Cell::new(false),
+ webrender_api_sender,
});
unsafe { WindowBinding::Wrap(runtime.cx(), win) }