diff options
Diffstat (limited to 'components/script/dom')
-rw-r--r-- | components/script/dom/audiobuffer.rs | 123 | ||||
-rw-r--r-- | components/script/dom/audiobuffersourcenode.rs | 12 | ||||
-rw-r--r-- | components/script/dom/audioscheduledsourcenode.rs | 20 | ||||
-rw-r--r-- | components/script/dom/htmlmediaelement.rs | 25 | ||||
-rw-r--r-- | components/script/dom/offlineaudiocontext.rs | 6 | ||||
-rw-r--r-- | components/script/dom/window.rs | 9 |
6 files changed, 110 insertions, 85 deletions
diff --git a/components/script/dom/audiobuffer.rs b/components/script/dom/audiobuffer.rs index a7b7467f14c..92040be9bbf 100644 --- a/components/script/dom/audiobuffer.rs +++ b/components/script/dom/audiobuffer.rs @@ -17,25 +17,41 @@ use js::rust::CustomAutoRooterGuard; use js::rust::wrappers::JS_DetachArrayBuffer; use js::typedarray::{CreateWith, Float32Array}; use servo_media::audio::buffer_source_node::AudioBuffer as ServoMediaAudioBuffer; +use std::cell::Ref; use std::cmp::min; use std::ptr::{self, NonNull}; -// This range is defined by the spec. +// Spec mandates at least [8000, 96000], we use [8000, 192000] to match Firefox // https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffer pub const MIN_SAMPLE_RATE: f32 = 8000.; -pub const MAX_SAMPLE_RATE: f32 = 96000.; +pub const MAX_SAMPLE_RATE: f32 = 192000.; type JSAudioChannel = Heap<*mut JSObject>; +/// The AudioBuffer keeps its data either in js_channels +/// or in shared_channels if js_channels buffers are detached. +/// +/// js_channels buffers are (re)attached right before calling GetChannelData +/// and remain attached until its contents are needed by some other API +/// implementation. Follow https://webaudio.github.io/web-audio-api/#acquire-the-content +/// to know in which situations js_channels buffers must be detached. +/// #[dom_struct] pub struct AudioBuffer { reflector_: Reflector, + /// Float32Arrays returned by calls to GetChannelData. js_channels: DomRefCell<Vec<JSAudioChannel>>, + /// Aggregates the data from js_channels. + /// This is Some<T> iff the buffers in js_channels are detached. #[ignore_malloc_size_of = "servo_media"] - shared_channels: DomRefCell<ServoMediaAudioBuffer>, + shared_channels: DomRefCell<Option<ServoMediaAudioBuffer>>, + /// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-samplerate sample_rate: f32, + /// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-length length: u32, + /// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-duration duration: f64, + /// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-numberofchannels number_of_channels: u32, } @@ -47,10 +63,7 @@ impl AudioBuffer { AudioBuffer { reflector_: Reflector::new(), js_channels: DomRefCell::new(vec), - shared_channels: DomRefCell::new(ServoMediaAudioBuffer::new( - number_of_channels as u8, - length as usize, - )), + shared_channels: DomRefCell::new(None), sample_rate, length, duration: length as f64 / sample_rate as f64, @@ -68,7 +81,7 @@ impl AudioBuffer { ) -> DomRoot<AudioBuffer> { let buffer = AudioBuffer::new_inherited(number_of_channels, length, sample_rate); let buffer = reflect_dom_object(Box::new(buffer), global, AudioBufferBinding::Wrap); - buffer.set_channels(initial_data); + buffer.set_initial_data(initial_data); buffer } @@ -96,18 +109,18 @@ impl AudioBuffer { // Initialize the underlying channels data with initial data provided by // the user or silence otherwise. - #[allow(unsafe_code)] - pub fn set_channels(&self, initial_data: Option<&[Vec<f32>]>) { + fn set_initial_data(&self, initial_data: Option<&[Vec<f32>]>) { + let mut channels = ServoMediaAudioBuffer::new( + self.number_of_channels as u8, + self.length as usize, + ); for channel in 0..self.number_of_channels { - (*self.shared_channels.borrow_mut()).buffers[channel as usize] = match initial_data { + channels.buffers[channel as usize] = match initial_data { Some(data) => data[channel as usize].clone(), None => vec![0.; self.length as usize], }; } - } - - pub fn get_channels(&self) -> ServoMediaAudioBuffer { - self.shared_channels.borrow().clone() + *self.shared_channels.borrow_mut() = Some(channels); } #[allow(unsafe_code)] @@ -117,35 +130,39 @@ impl AudioBuffer { for (i, channel) in self.js_channels.borrow_mut().iter().enumerate() { if !channel.get().is_null() { // Already have data in JS array. - // We may have called GetChannelData, and web content may have modified - // js_channels. So make sure that shared_channels contains the same data as - // js_channels. - typedarray!(in(cx) let array: Float32Array = channel.get()); - if let Ok(array) = array { - (*self.shared_channels.borrow_mut()).buffers[i] = array.to_vec(); - } continue; } - // Copy the channel data from shared_channels to js_channels. rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>()); - if Float32Array::create( - cx, - CreateWith::Slice(&(*self.shared_channels.borrow_mut()).buffers[i]), - array.handle_mut(), - ).is_err() - { - return false; + if let Some(ref shared_channels) = *self.shared_channels.borrow() { + // Step 4. of + // https://webaudio.github.io/web-audio-api/#acquire-the-content + // "Attach ArrayBuffers containing copies of the data to the AudioBuffer, + // to be returned by the next call to getChannelData()". + if Float32Array::create( + cx, + CreateWith::Slice(&shared_channels.buffers[i]), + array.handle_mut(), + ).is_err() + { + return false; + } } channel.set(array.get()); } + *self.shared_channels.borrow_mut() = None; + true } // https://webaudio.github.io/web-audio-api/#acquire-the-content #[allow(unsafe_code)] - pub fn acquire_contents(&self) -> Option<ServoMediaAudioBuffer> { + fn acquire_contents(&self) -> Option<ServoMediaAudioBuffer> { + let mut result = ServoMediaAudioBuffer::new( + self.number_of_channels as u8, + self.length as usize, + ); let cx = self.global().get_cx(); for (i, channel) in self.js_channels.borrow_mut().iter().enumerate() { // Step 1. @@ -173,13 +190,20 @@ impl AudioBuffer { channel.set(ptr::null_mut()); // Step 3. - (*self.shared_channels.borrow_mut()).buffers[i] = channel_data; - - // Step 4 will complete turning shared_channels - // data into js_channels ArrayBuffers in restore_js_channel_data. + result.buffers[i] = channel_data; } - Some((*self.shared_channels.borrow()).clone()) + Some(result) + } + + pub fn get_channels(&self) -> Ref<Option<ServoMediaAudioBuffer>> { + if self.shared_channels.borrow().is_none() { + let channels = self.acquire_contents(); + if channels.is_some() { + *self.shared_channels.borrow_mut() = channels; + } + } + return self.shared_channels.borrow() } } @@ -254,10 +278,10 @@ impl AudioBufferMethods for AudioBuffer { let data = unsafe { array.as_slice() }; dest.extend_from_slice(&data[offset..offset + bytes_to_copy]); } - } else if let Some(shared_channel) = - self.shared_channels.borrow().buffers.get(channel_number) - { - dest.extend_from_slice(&shared_channel.as_slice()[offset..offset + bytes_to_copy]); + } else if let Some(ref shared_channels) = *self.shared_channels.borrow() { + if let Some(shared_channel) = shared_channels.buffers.get(channel_number) { + dest.extend_from_slice(&shared_channel.as_slice()[offset..offset + bytes_to_copy]); + } } unsafe { @@ -297,21 +321,12 @@ impl AudioBufferMethods for AudioBuffer { typedarray!(in(cx) let js_channel: Float32Array = js_channel); if let Ok(mut js_channel) = js_channel { let bytes_to_copy = min(self.length - start_in_channel, source.len() as u32) as usize; + let mut js_channel_data = unsafe { js_channel.as_mut_slice() }; + let (_, mut js_channel_data) = + js_channel_data.split_at_mut(start_in_channel as usize); unsafe { - let data = &source.as_slice()[0..bytes_to_copy]; - // Update shared channel. - { - let mut shared_channels = self.shared_channels.borrow_mut(); - let shared_channel = shared_channels.data_chan_mut(channel_number as u8); - let (_, mut shared_channel) = - shared_channel.split_at_mut(start_in_channel as usize); - shared_channel[0..bytes_to_copy].copy_from_slice(data); - } - // Update js channel. - js_channel.update( - self.shared_channels.borrow().buffers[channel_number as usize].as_slice(), - ); - } + js_channel_data[0..bytes_to_copy].copy_from_slice(&source.as_slice()[0..bytes_to_copy]) + }; } else { return Err(Error::IndexSize); } diff --git a/components/script/dom/audiobuffersourcenode.rs b/components/script/dom/audiobuffersourcenode.rs index 7a2817e1204..da5329350fc 100644 --- a/components/script/dom/audiobuffersourcenode.rs +++ b/components/script/dom/audiobuffersourcenode.rs @@ -137,14 +137,14 @@ impl AudioBufferSourceNodeMethods for AudioBufferSourceNode { self.buffer.set(new_buffer); // Step 5. - if self.source_node.started() { + if self.source_node.has_start() { if let Some(buffer) = self.buffer.get() { - let buffer = buffer.acquire_contents(); + let buffer = buffer.get_channels(); if buffer.is_some() { self.source_node .node() .message(AudioNodeMessage::AudioBufferSourceNode( - AudioBufferSourceNodeMessage::SetBuffer(buffer), + AudioBufferSourceNodeMessage::SetBuffer((*buffer).clone()), )); } } @@ -215,12 +215,12 @@ impl AudioBufferSourceNodeMethods for AudioBufferSourceNode { } if let Some(buffer) = self.buffer.get() { - let buffer = buffer.acquire_contents(); + let buffer = buffer.get_channels(); if buffer.is_some() { self.source_node .node() .message(AudioNodeMessage::AudioBufferSourceNode( - AudioBufferSourceNodeMessage::SetBuffer(buffer), + AudioBufferSourceNodeMessage::SetBuffer((*buffer).clone()), )); } } @@ -235,7 +235,7 @@ impl<'a> From<&'a AudioBufferSourceOptions> for AudioBufferSourceNodeOptions { Self { buffer: if let Some(ref buffer) = options.buffer { if let Some(ref buffer) = buffer { - Some(buffer.get_channels()) + (*buffer.get_channels()).clone() } else { None } diff --git a/components/script/dom/audioscheduledsourcenode.rs b/components/script/dom/audioscheduledsourcenode.rs index 6e15148993c..2614a0aa40a 100644 --- a/components/script/dom/audioscheduledsourcenode.rs +++ b/components/script/dom/audioscheduledsourcenode.rs @@ -18,8 +18,8 @@ use task_source::{TaskSource, TaskSourceName}; #[dom_struct] pub struct AudioScheduledSourceNode { node: AudioNode, - started: Cell<bool>, - stopped: Cell<bool>, + has_start: Cell<bool>, + has_stop: Cell<bool>, } impl AudioScheduledSourceNode { @@ -39,8 +39,8 @@ impl AudioScheduledSourceNode { number_of_inputs, number_of_outputs, )?, - started: Cell::new(false), - stopped: Cell::new(false), + has_start: Cell::new(false), + has_stop: Cell::new(false), }) } @@ -48,8 +48,8 @@ impl AudioScheduledSourceNode { &self.node } - pub fn started(&self) -> bool { - self.started.get() + pub fn has_start(&self) -> bool { + self.has_start.get() } } @@ -63,7 +63,7 @@ impl AudioScheduledSourceNodeMethods for AudioScheduledSourceNode { return Err(Error::Range("'when' must be a positive value".to_owned())); } - if self.started.get() || self.stopped.get() { + if self.has_start.get() || self.has_stop.get() { return Err(Error::InvalidState); } @@ -93,7 +93,7 @@ impl AudioScheduledSourceNodeMethods for AudioScheduledSourceNode { AudioScheduledSourceNodeMessage::RegisterOnEndedCallback(callback), )); - self.started.set(true); + self.has_start.set(true); self.node .message(AudioNodeMessage::AudioScheduledSourceNode( AudioScheduledSourceNodeMessage::Start(*when), @@ -107,10 +107,10 @@ impl AudioScheduledSourceNodeMethods for AudioScheduledSourceNode { return Err(Error::Range("'when' must be a positive value".to_owned())); } - if !self.started.get() { + if !self.has_start.get() { return Err(Error::InvalidState); } - self.stopped.set(true); + self.has_stop.set(true); self.node .message(AudioNodeMessage::AudioScheduledSourceNode( AudioScheduledSourceNodeMessage::Stop(*when), diff --git a/components/script/dom/htmlmediaelement.rs b/components/script/dom/htmlmediaelement.rs index 84380c8d11f..3a1d439168c 100644 --- a/components/script/dom/htmlmediaelement.rs +++ b/components/script/dom/htmlmediaelement.rs @@ -278,7 +278,8 @@ impl HTMLMediaElement { let state = self.ready_state.get(); let window = window_from_node(self); - let task_source = window.dom_manipulation_task_source(); + // FIXME(nox): Why are errors silenced here? + let task_source = window.media_element_task_source(); if self.Paused() { // Step 6.1. self.paused.set(false); @@ -356,9 +357,7 @@ impl HTMLMediaElement { let window = window_from_node(self); let this = Trusted::new(self); let generation_id = self.generation_id.get(); - // FIXME(nox): Why are errors silenced here? - // FIXME(nox): Media element event task source should be used here. - let _ = window.dom_manipulation_task_source().queue( + let _ = window.media_element_task_source().queue( task!(internal_pause_steps: move || { let this = this.root(); if generation_id != this.generation_id.get() { @@ -400,8 +399,7 @@ impl HTMLMediaElement { let this = Trusted::new(self); let generation_id = self.generation_id.get(); // FIXME(nox): Why are errors silenced here? - // FIXME(nox): Media element event task source should be used here. - let _ = window.dom_manipulation_task_source().queue( + let _ = window.media_element_task_source().queue( task!(notify_about_playing: move || { let this = this.root(); if generation_id != this.generation_id.get() { @@ -435,7 +433,7 @@ impl HTMLMediaElement { } let window = window_from_node(self); - let task_source = window.dom_manipulation_task_source(); + let task_source = window.media_element_task_source(); // Step 1. match (old_ready_state, ready_state) { @@ -590,7 +588,7 @@ impl HTMLMediaElement { // Step 8. let window = window_from_node(self); - window.dom_manipulation_task_source().queue_simple_event( + window.media_element_task_source().queue_simple_event( self.upcast(), atom!("loadstart"), &window, @@ -667,7 +665,7 @@ impl HTMLMediaElement { // Step 4.remote.1.2. let window = window_from_node(self); - window.dom_manipulation_task_source().queue_simple_event( + window.media_element_task_source().queue_simple_event( self.upcast(), atom!("suspend"), &window, @@ -676,7 +674,7 @@ impl HTMLMediaElement { // Step 4.remote.1.3. let this = Trusted::new(self); window - .dom_manipulation_task_source() + .media_element_task_source() .queue( task!(set_media_delay_load_event_flag_to_false: move || { this.root().delay_load_event(false); @@ -755,8 +753,7 @@ impl HTMLMediaElement { let generation_id = self.generation_id.get(); self.take_pending_play_promises(Err(Error::NotSupported)); // FIXME(nox): Why are errors silenced here? - // FIXME(nox): Media element event task source should be used here. - let _ = window.dom_manipulation_task_source().queue( + let _ = window.media_element_task_source().queue( task!(dedicated_media_source_failure_steps: move || { let this = this.root(); if generation_id != this.generation_id.get() { @@ -813,7 +810,7 @@ impl HTMLMediaElement { } let window = window_from_node(self); - let task_source = window.dom_manipulation_task_source(); + let task_source = window.media_element_task_source(); // Step 5. let network_state = self.network_state.get(); @@ -1291,7 +1288,7 @@ impl FetchResponseListener for HTMLMediaElementContext { // => "If mode is remote" step 2 if time::get_time() > self.next_progress_event { let window = window_from_node(&*elem); - window.dom_manipulation_task_source().queue_simple_event( + window.media_element_task_source().queue_simple_event( elem.upcast(), atom!("progress"), &window, diff --git a/components/script/dom/offlineaudiocontext.rs b/components/script/dom/offlineaudiocontext.rs index 32aaa926ec9..7e967d0078a 100644 --- a/components/script/dom/offlineaudiocontext.rs +++ b/components/script/dom/offlineaudiocontext.rs @@ -151,10 +151,14 @@ impl OfflineAudioContextMethods for OfflineAudioContext { task!(resolve: move || { let this = this.root(); let processed_audio = processed_audio.lock().unwrap(); - let processed_audio: Vec<_> = processed_audio + let mut processed_audio: Vec<_> = processed_audio .chunks(this.length as usize) .map(|channel| channel.to_vec()) .collect(); + // it can end up being empty if the task failed + if processed_audio.len() != this.length as usize { + processed_audio.resize(this.length as usize, Vec::new()) + } let buffer = AudioBuffer::new( &this.global().as_window(), this.channel_count, diff --git a/components/script/dom/window.rs b/components/script/dom/window.rs index c50aae0aec7..e3ae0634b93 100644 --- a/components/script/dom/window.rs +++ b/components/script/dom/window.rs @@ -123,6 +123,7 @@ use task_source::TaskSourceName; use task_source::dom_manipulation::DOMManipulationTaskSource; use task_source::file_reading::FileReadingTaskSource; use task_source::history_traversal::HistoryTraversalTaskSource; +use task_source::media_element::MediaElementTaskSource; use task_source::networking::NetworkingTaskSource; use task_source::performance_timeline::PerformanceTimelineTaskSource; use task_source::remote_event::RemoteEventTaskSource; @@ -175,6 +176,8 @@ pub struct Window { #[ignore_malloc_size_of = "task sources are hard"] dom_manipulation_task_source: DOMManipulationTaskSource, #[ignore_malloc_size_of = "task sources are hard"] + media_element_task_source: MediaElementTaskSource, + #[ignore_malloc_size_of = "task sources are hard"] user_interaction_task_source: UserInteractionTaskSource, #[ignore_malloc_size_of = "task sources are hard"] networking_task_source: NetworkingTaskSource, @@ -359,6 +362,10 @@ impl Window { self.dom_manipulation_task_source.clone() } + pub fn media_element_task_source(&self) -> MediaElementTaskSource { + self.media_element_task_source.clone() + } + pub fn user_interaction_task_source(&self) -> UserInteractionTaskSource { self.user_interaction_task_source.clone() } @@ -2061,6 +2068,7 @@ impl Window { runtime: Rc<Runtime>, script_chan: MainThreadScriptChan, dom_manipulation_task_source: DOMManipulationTaskSource, + media_element_task_source: MediaElementTaskSource, user_interaction_task_source: UserInteractionTaskSource, networking_task_source: NetworkingTaskSource, history_traversal_task_source: HistoryTraversalTaskSource, @@ -2116,6 +2124,7 @@ impl Window { ), script_chan, dom_manipulation_task_source, + media_element_task_source, user_interaction_task_source, networking_task_source, history_traversal_task_source, |