WebKit Bugzilla
Attachment 368521 Details for
Bug 197391
: AudioContext methods on a stopped AudioContext should exit early
Home
|
New
|
Browse
|
Search
|
[?]
|
Reports
|
Requests
|
Help
|
New Account
|
Log In
Remember
[x]
|
Forgot Password
Login:
[x]
[patch]
Patch
bug-197391-20190429172535.patch (text/plain), 21.62 KB, created by
youenn fablet
on 2019-04-29 17:25:37 PDT
(
hide
)
Description:
Patch
Filename:
MIME Type:
Creator:
youenn fablet
Created:
2019-04-29 17:25:37 PDT
Size:
21.62 KB
patch
obsolete
>Subversion Revision: 244694 >diff --git a/Source/WebCore/ChangeLog b/Source/WebCore/ChangeLog >index 83a5570adea3d55df64ea767b4b5aa71bb39ed4e..9ea348440047ea88c91810e01d7e0b333e93f679 100644 >--- a/Source/WebCore/ChangeLog >+++ b/Source/WebCore/ChangeLog >@@ -1,3 +1,42 @@ >+2019-04-29 Youenn Fablet <youenn@apple.com> >+ >+ Reject/throw when calling AudioContext methods on a stopped AudioContext >+ https://bugs.webkit.org/show_bug.cgi?id=197391 >+ >+ Reviewed by NOBODY (OOPS!). >+ >+ Return InvalidStateError in that case. >+ ASSERT that we do not call lazyInitialize after being stopped >+ since this would mean we are doing unneeded processing. >+ >+ Test: http/wpt/webaudio/audiocontext-stopped.html >+ >+ * Modules/webaudio/AudioContext.cpp: >+ (WebCore::AudioContext::lazyInitialize): >+ (WebCore::AudioContext::createBufferSource): >+ (WebCore::AudioContext::createMediaElementSource): >+ (WebCore::AudioContext::createMediaStreamSource): >+ (WebCore::AudioContext::createMediaStreamDestination): >+ (WebCore::AudioContext::createScriptProcessor): >+ (WebCore::AudioContext::createBiquadFilter): >+ (WebCore::AudioContext::createWaveShaper): >+ (WebCore::AudioContext::createPanner): >+ (WebCore::AudioContext::createConvolver): >+ (WebCore::AudioContext::createDynamicsCompressor): >+ (WebCore::AudioContext::createAnalyser): >+ (WebCore::AudioContext::createGain): >+ (WebCore::AudioContext::createDelay): >+ (WebCore::AudioContext::createChannelSplitter): >+ (WebCore::AudioContext::createChannelMerger): >+ (WebCore::AudioContext::createOscillator): >+ (WebCore::AudioContext::createPeriodicWave): >+ (WebCore::AudioContext::startRendering): >+ (WebCore::AudioContext::suspend): >+ (WebCore::AudioContext::resume): >+ (WebCore::AudioContext::close): >+ * Modules/webaudio/AudioContext.h: >+ * Modules/webaudio/AudioContext.idl: >+ > 2019-04-29 Youenn Fablet <youenn@apple.com> > > Make Document audio producers use WeakPtr >diff --git a/Source/WebCore/Modules/webaudio/AudioContext.cpp b/Source/WebCore/Modules/webaudio/AudioContext.cpp >index 2e769e511964f96530ad690f3accf068d6788d97..48b0e78c310e2a31d23d8664e337e644c20703b0 100644 >--- a/Source/WebCore/Modules/webaudio/AudioContext.cpp >+++ b/Source/WebCore/Modules/webaudio/AudioContext.cpp >@@ -206,6 +206,8 @@ AudioContext::~AudioContext() > > void AudioContext::lazyInitialize() > { >+ ASSERT(!m_isStopScheduled); >+ > if (m_isInitialized) > return; > >@@ -426,11 +428,15 @@ void AudioContext::decodeAudioData(Ref<ArrayBuffer>&& audioData, RefPtr<AudioBuf > m_audioDecoder.decodeAsync(WTFMove(audioData), sampleRate(), WTFMove(successCallback), WTFMove(errorCallback)); > } > >-Ref<AudioBufferSourceNode> AudioContext::createBufferSource() >+ExceptionOr<Ref<AudioBufferSourceNode>> AudioContext::createBufferSource() > { > ALWAYS_LOG(LOGIDENTIFIER); >- >+ > ASSERT(isMainThread()); >+ >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > Ref<AudioBufferSourceNode> node = AudioBufferSourceNode::create(*this, m_destinationNode->sampleRate()); > >@@ -446,13 +452,14 @@ Ref<AudioBufferSourceNode> AudioContext::createBufferSource() > ExceptionOr<Ref<MediaElementAudioSourceNode>> AudioContext::createMediaElementSource(HTMLMediaElement& mediaElement) > { > ALWAYS_LOG(LOGIDENTIFIER); >- >+ > ASSERT(isMainThread()); >- lazyInitialize(); >- >- if (mediaElement.audioSourceNode()) >+ >+ if (m_isStopScheduled || mediaElement.audioSourceNode()) > return Exception { InvalidStateError }; > >+ lazyInitialize(); >+ > auto node = MediaElementAudioSourceNode::create(*this, mediaElement); > > mediaElement.setAudioSourceNode(node.ptr()); >@@ -471,6 +478,9 @@ ExceptionOr<Ref<MediaStreamAudioSourceNode>> AudioContext::createMediaStreamSour > > ASSERT(isMainThread()); > >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > auto audioTracks = mediaStream.getAudioTracks(); > if (audioTracks.isEmpty()) > return Exception { InvalidStateError }; >@@ -494,8 +504,11 @@ ExceptionOr<Ref<MediaStreamAudioSourceNode>> AudioContext::createMediaStreamSour > return node; > } > >-Ref<MediaStreamAudioDestinationNode> AudioContext::createMediaStreamDestination() >+ExceptionOr<Ref<MediaStreamAudioDestinationNode>> AudioContext::createMediaStreamDestination() > { >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > // FIXME: Add support for an optional argument which specifies the number of channels. > // FIXME: The default should probably be stereo instead of mono. > return MediaStreamAudioDestinationNode::create(*this, 1); >@@ -508,6 +521,10 @@ ExceptionOr<Ref<ScriptProcessorNode>> AudioContext::createScriptProcessor(size_t > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > > // W3C Editor's Draft 06 June 2017 >@@ -563,65 +580,87 @@ ExceptionOr<Ref<ScriptProcessorNode>> AudioContext::createScriptProcessor(size_t > return node; > } > >-Ref<BiquadFilterNode> AudioContext::createBiquadFilter() >+ExceptionOr<Ref<BiquadFilterNode>> AudioContext::createBiquadFilter() > { > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); >+ > return BiquadFilterNode::create(*this, m_destinationNode->sampleRate()); > } > >-Ref<WaveShaperNode> AudioContext::createWaveShaper() >+ExceptionOr<Ref<WaveShaperNode>> AudioContext::createWaveShaper() > { > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > return WaveShaperNode::create(*this); > } > >-Ref<PannerNode> AudioContext::createPanner() >+ExceptionOr<Ref<PannerNode>> AudioContext::createPanner() > { > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > return PannerNode::create(*this, m_destinationNode->sampleRate()); > } > >-Ref<ConvolverNode> AudioContext::createConvolver() >+ExceptionOr<Ref<ConvolverNode>> AudioContext::createConvolver() > { > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > return ConvolverNode::create(*this, m_destinationNode->sampleRate()); > } > >-Ref<DynamicsCompressorNode> AudioContext::createDynamicsCompressor() >+ExceptionOr<Ref<DynamicsCompressorNode>> AudioContext::createDynamicsCompressor() > { > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > return DynamicsCompressorNode::create(*this, m_destinationNode->sampleRate()); > } > >-Ref<AnalyserNode> AudioContext::createAnalyser() >+ExceptionOr<Ref<AnalyserNode>> AudioContext::createAnalyser() > { > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > return AnalyserNode::create(*this, m_destinationNode->sampleRate()); > } > >-Ref<GainNode> AudioContext::createGain() >+ExceptionOr<Ref<GainNode>> AudioContext::createGain() > { > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > return GainNode::create(*this, m_destinationNode->sampleRate()); > } >@@ -631,6 +670,9 @@ ExceptionOr<Ref<DelayNode>> AudioContext::createDelay(double maxDelayTime) > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > return DelayNode::create(*this, m_destinationNode->sampleRate(), maxDelayTime); > } >@@ -640,6 +682,9 @@ ExceptionOr<Ref<ChannelSplitterNode>> AudioContext::createChannelSplitter(size_t > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > auto node = ChannelSplitterNode::create(*this, m_destinationNode->sampleRate(), numberOfOutputs); > if (!node) >@@ -652,6 +697,9 @@ ExceptionOr<Ref<ChannelMergerNode>> AudioContext::createChannelMerger(size_t num > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > auto node = ChannelMergerNode::create(*this, m_destinationNode->sampleRate(), numberOfInputs); > if (!node) >@@ -659,11 +707,14 @@ ExceptionOr<Ref<ChannelMergerNode>> AudioContext::createChannelMerger(size_t num > return node.releaseNonNull(); > } > >-Ref<OscillatorNode> AudioContext::createOscillator() >+ExceptionOr<Ref<OscillatorNode>> AudioContext::createOscillator() > { > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > lazyInitialize(); > > Ref<OscillatorNode> node = OscillatorNode::create(*this, m_destinationNode->sampleRate()); >@@ -680,6 +731,9 @@ ExceptionOr<Ref<PeriodicWave>> AudioContext::createPeriodicWave(Float32Array& re > ALWAYS_LOG(LOGIDENTIFIER); > > ASSERT(isMainThread()); >+ if (m_isStopScheduled) >+ return Exception { InvalidStateError }; >+ > if (real.length() != imaginary.length() || (real.length() > MaxPeriodicWaveLength) || !real.length()) > return Exception { IndexSizeError }; > lazyInitialize(); >@@ -1074,7 +1128,7 @@ bool AudioContext::willPausePlayback() > void AudioContext::startRendering() > { > ALWAYS_LOG(LOGIDENTIFIER); >- if (!willBeginPlayback()) >+ if (m_isStopScheduled || !willBeginPlayback()) > return; > > destination()->startRendering(); >@@ -1146,7 +1200,7 @@ void AudioContext::decrementActiveSourceCount() > > void AudioContext::suspend(DOMPromiseDeferred<void>&& promise) > { >- if (isOfflineContext()) { >+ if (isOfflineContext() || m_isStopScheduled) { > promise.reject(InvalidStateError); > return; > } >@@ -1175,7 +1229,7 @@ void AudioContext::suspend(DOMPromiseDeferred<void>&& promise) > > void AudioContext::resume(DOMPromiseDeferred<void>&& promise) > { >- if (isOfflineContext()) { >+ if (isOfflineContext() || m_isStopScheduled) { > promise.reject(InvalidStateError); > return; > } >@@ -1204,7 +1258,7 @@ void AudioContext::resume(DOMPromiseDeferred<void>&& promise) > > void AudioContext::close(DOMPromiseDeferred<void>&& promise) > { >- if (isOfflineContext()) { >+ if (isOfflineContext() || m_isStopScheduled) { > promise.reject(InvalidStateError); > return; > } >diff --git a/Source/WebCore/Modules/webaudio/AudioContext.h b/Source/WebCore/Modules/webaudio/AudioContext.h >index 86c57c3df6aa670019d0addc1345705ae4f2b095..3705c4953eac66052d6bd310fbe4fa8038ae1bc5 100644 >--- a/Source/WebCore/Modules/webaudio/AudioContext.h >+++ b/Source/WebCore/Modules/webaudio/AudioContext.h >@@ -134,26 +134,26 @@ public: > bool wouldTaintOrigin(const URL&) const; > > // The AudioNode create methods are called on the main thread (from JavaScript). >- Ref<AudioBufferSourceNode> createBufferSource(); >+ ExceptionOr<Ref<AudioBufferSourceNode>> createBufferSource(); > #if ENABLE(VIDEO) > ExceptionOr<Ref<MediaElementAudioSourceNode>> createMediaElementSource(HTMLMediaElement&); > #endif > #if ENABLE(MEDIA_STREAM) > ExceptionOr<Ref<MediaStreamAudioSourceNode>> createMediaStreamSource(MediaStream&); >- Ref<MediaStreamAudioDestinationNode> createMediaStreamDestination(); >+ ExceptionOr<Ref<MediaStreamAudioDestinationNode>> createMediaStreamDestination(); > #endif >- Ref<GainNode> createGain(); >- Ref<BiquadFilterNode> createBiquadFilter(); >- Ref<WaveShaperNode> createWaveShaper(); >+ ExceptionOr<Ref<GainNode>> createGain(); >+ ExceptionOr<Ref<BiquadFilterNode>> createBiquadFilter(); >+ ExceptionOr<Ref<WaveShaperNode>> createWaveShaper(); > ExceptionOr<Ref<DelayNode>> createDelay(double maxDelayTime); >- Ref<PannerNode> createPanner(); >- Ref<ConvolverNode> createConvolver(); >- Ref<DynamicsCompressorNode> createDynamicsCompressor(); >- Ref<AnalyserNode> createAnalyser(); >+ ExceptionOr<Ref<PannerNode>> createPanner(); >+ ExceptionOr<Ref<ConvolverNode>> createConvolver(); >+ ExceptionOr<Ref<DynamicsCompressorNode>> createDynamicsCompressor(); >+ ExceptionOr<Ref<AnalyserNode>> createAnalyser(); > ExceptionOr<Ref<ScriptProcessorNode>> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels); > ExceptionOr<Ref<ChannelSplitterNode>> createChannelSplitter(size_t numberOfOutputs); > ExceptionOr<Ref<ChannelMergerNode>> createChannelMerger(size_t numberOfInputs); >- Ref<OscillatorNode> createOscillator(); >+ ExceptionOr<Ref<OscillatorNode>> createOscillator(); > ExceptionOr<Ref<PeriodicWave>> createPeriodicWave(Float32Array& real, Float32Array& imaginary); > > // When a source node has no more processing to do (has finished playing), then it tells the context to dereference it. >diff --git a/Source/WebCore/Modules/webaudio/AudioContext.idl b/Source/WebCore/Modules/webaudio/AudioContext.idl >index a114202b01f8b3378ff66be73a9f0b963e3527ac..bf7a4ff3565ddee62c2136549ade1c0dbc13cef1 100644 >--- a/Source/WebCore/Modules/webaudio/AudioContext.idl >+++ b/Source/WebCore/Modules/webaudio/AudioContext.idl >@@ -69,24 +69,24 @@ enum AudioContextState { > void decodeAudioData(ArrayBuffer audioData, AudioBufferCallback? successCallback, optional AudioBufferCallback? errorCallback); > > // Sources >- AudioBufferSourceNode createBufferSource(); >+ [MayThrowException] AudioBufferSourceNode createBufferSource(); > > [Conditional=VIDEO, MayThrowException] MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement); > > [Conditional=MEDIA_STREAM, MayThrowException] MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream); >- [Conditional=MEDIA_STREAM] MediaStreamAudioDestinationNode createMediaStreamDestination(); >+ [Conditional=MEDIA_STREAM, MayThrowException] MediaStreamAudioDestinationNode createMediaStreamDestination(); > > // Processing nodes >- GainNode createGain(); >+ [MayThrowException] GainNode createGain(); > [MayThrowException] DelayNode createDelay(optional unrestricted double maxDelayTime = 1); >- BiquadFilterNode createBiquadFilter(); >- WaveShaperNode createWaveShaper(); >- PannerNode createPanner(); >- ConvolverNode createConvolver(); >- DynamicsCompressorNode createDynamicsCompressor(); >- AnalyserNode createAnalyser(); >+ [MayThrowException] BiquadFilterNode createBiquadFilter(); >+ [MayThrowException] WaveShaperNode createWaveShaper(); >+ [MayThrowException] PannerNode createPanner(); >+ [MayThrowException] ConvolverNode createConvolver(); >+ [MayThrowException] DynamicsCompressorNode createDynamicsCompressor(); >+ [MayThrowException] AnalyserNode createAnalyser(); > [MayThrowException] ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0, optional unsigned long numberOfInputChannels = 2, optional unsigned long numberOfOutputChannels = 2); >- OscillatorNode createOscillator(); >+ [MayThrowException] OscillatorNode createOscillator(); > [MayThrowException] PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag); > > // Channel splitting and merging >diff --git a/LayoutTests/ChangeLog b/LayoutTests/ChangeLog >index 3264a8078bbd295b40f1de40250ae78849a9b1c6..928b431b77cdc86a94716ea0ab6796abf3f314bb 100644 >--- a/LayoutTests/ChangeLog >+++ b/LayoutTests/ChangeLog >@@ -1,3 +1,15 @@ >+2019-04-29 Youenn Fablet <youenn@apple.com> >+ >+ Reject/throw when calling AudioContext methods on a stopped AudioContext >+ https://bugs.webkit.org/show_bug.cgi?id=197391 >+ >+ Reviewed by NOBODY (OOPS!). >+ >+ * http/wpt/webaudio/audiocontext-stopped-expected.txt: Added. >+ * http/wpt/webaudio/audiocontext-stopped.html: Added. >+ * http/wpt/webaudio/resources/audiocontext-stopped-iframe.html: Added. >+ * platform/win/TestExpectations: Skip test for win. >+ > 2019-04-28 Youenn Fablet <youenn@apple.com> > > getDisplayMedia should be called on user gesture >diff --git a/LayoutTests/http/wpt/webaudio/audiocontext-stopped-expected.txt b/LayoutTests/http/wpt/webaudio/audiocontext-stopped-expected.txt >new file mode 100644 >index 0000000000000000000000000000000000000000..2cffd4593172b283105af3e512afeadfe9185c7f >--- /dev/null >+++ b/LayoutTests/http/wpt/webaudio/audiocontext-stopped-expected.txt >@@ -0,0 +1,21 @@ >+ >+ >+PASS Load test iframe >+PASS createBufferSource >+PASS createMediaElementSource >+PASS createMediaStreamDestination >+PASS createGain >+PASS createDelay >+PASS createBiquadFilter >+PASS createWaveShapper >+PASS createPanner >+PASS createConvolver >+PASS createDynamicsCompressor >+PASS createAnalyser >+PASS createScriptProcessor >+PASS createOscillator >+PASS createPeriodicWave >+PASS suspend >+PASS resume >+PASS close >+ >diff --git a/LayoutTests/http/wpt/webaudio/audiocontext-stopped.html b/LayoutTests/http/wpt/webaudio/audiocontext-stopped.html >new file mode 100644 >index 0000000000000000000000000000000000000000..09670899b3b6a712f0714d03f3f4f2e5867c9d91 >--- /dev/null >+++ b/LayoutTests/http/wpt/webaudio/audiocontext-stopped.html >@@ -0,0 +1,100 @@ >+<!DOCTYPE html> >+<html> >+<head> >+<script src="/resources/testharness.js"></script> >+<script src="/resources/testharnessreport.js"></script> >+</head> >+<body> >+<video id="video"></video> >+<script> >+function with_iframe(url) { >+ return new Promise(function(resolve) { >+ var frame = document.createElement('iframe'); >+ frame.className = 'test-iframe'; >+ frame.src = url; >+ frame.onload = function() { resolve(frame); }; >+ document.body.appendChild(frame); >+ }); >+} >+ >+var context; >+promise_test(async () => { >+ const iframe = await with_iframe("resources/audiocontext-stopped-iframe.html"); >+ context = iframe.contentWindow.audioContext; >+ iframe.remove(); >+ >+ runTests(); >+}, "Load test iframe"); >+ >+function runTests() >+{ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createBufferSource()); >+ }, "createBufferSource"); >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createMediaElementSource(video)); >+ }, "createMediaElementSource"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createMediaStreamDestination()); >+ }, "createMediaStreamDestination"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createGain()); >+ }, "createGain"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createDelay()); >+ }, "createDelay"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createBiquadFilter()); >+ }, "createBiquadFilter"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createWaveShaper()); >+ }, "createWaveShapper"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createPanner()); >+ }, "createPanner"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createConvolver()); >+ }, "createConvolver"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createDynamicsCompressor()); >+ }, "createDynamicsCompressor"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createAnalyser()); >+ }, "createAnalyser"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createScriptProcessor()); >+ }, "createScriptProcessor"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createOscillator()); >+ }, "createOscillator"); >+ >+ test(() => { >+ assert_throws('InvalidStateError', () => context.createPeriodicWave(new Float32Array(1), new Float32Array(1))); >+ }, "createPeriodicWave"); >+ >+ promise_test((test) => { >+ return promise_rejects(test, 'InvalidStateError', context.suspend()); >+ }, "suspend"); >+ >+ promise_test((test) => { >+ return promise_rejects(test, 'InvalidStateError', context.resume()); >+ }, "resume"); >+ >+ promise_test((test) => { >+ return promise_rejects(test, 'InvalidStateError', context.close()); >+ }, "close"); >+} >+</script> >+</body> >+</html> >diff --git a/LayoutTests/http/wpt/webaudio/resources/audiocontext-stopped-iframe.html b/LayoutTests/http/wpt/webaudio/resources/audiocontext-stopped-iframe.html >new file mode 100644 >index 0000000000000000000000000000000000000000..8b1dca0a631aef958b74ad11301dbec6ab4de6bc >--- /dev/null >+++ b/LayoutTests/http/wpt/webaudio/resources/audiocontext-stopped-iframe.html >@@ -0,0 +1,3 @@ >+<script> >+window.audioContext = new webkitAudioContext(); >+</script> >diff --git a/LayoutTests/platform/win/TestExpectations b/LayoutTests/platform/win/TestExpectations >index 450805b64508e9db49857ef927564547201acbad..ac87428f0ff381c4f705aa5f37abbf2e45e392b7 100644 >--- a/LayoutTests/platform/win/TestExpectations >+++ b/LayoutTests/platform/win/TestExpectations >@@ -494,6 +494,7 @@ webkit.org/b/61540 http/tests/security/isolatedWorld/cross-origin-xhr.html [ Ski > > # TODO For now, Web Audio tests are disabled > webkit.org/b/86914 webaudio/ [ Skip ] >+webkit.org/b/86914 http/wpt/webaudio/ [ Skip ] > webkit.org/b/86914 fast/history/page-cache-closed-audiocontext.html [ Skip ] > webkit.org/b/86914 fast/history/page-cache-running-audiocontext.html [ Skip ] > webkit.org/b/86914 fast/history/page-cache-suspended-audiocontext.html [ Skip ]
You cannot view the attachment while viewing its details because your browser does not support IFRAMEs.
View the attachment on a separate page
.
View Attachment As Diff
View Attachment As Raw
Actions:
View
|
Formatted Diff
|
Diff
Attachments on
bug 197391
:
368501
|
368520
|
368521
|
368534
|
368621